pax_global_header00006660000000000000000000000064143421412060014507gustar00rootroot0000000000000052 comment=2671a5c357526139e42ba77b2adce5698cc4a6cf sequel-5.63.0/000077500000000000000000000000001434214120600131005ustar00rootroot00000000000000sequel-5.63.0/.ci.gemfile000066400000000000000000000035141434214120600151060ustar00rootroot00000000000000# This file is only used for CI. source 'http://rubygems.org' gem 'minitest-hooks' gem 'minitest-global_expectations' # Plugin/Extension Dependencies gem 'tzinfo' if RUBY_VERSION < '2.1' gem 'nokogiri', '<1.7' elsif RUBY_VERSION < '2.3' gem 'nokogiri', '<1.10' elsif RUBY_VERSION < '2.4' gem 'nokogiri', '<1.11' elsif RUBY_VERSION < '2.5' gem 'nokogiri', '<1.12' elsif RUBY_VERSION < '2.6' gem 'nokogiri', '<1.13' else gem 'nokogiri' end if RUBY_VERSION < '2.3' gem 'i18n', '<1.5' end if RUBY_VERSION < '2.2.0' gem 'activemodel', '<5.0.0' gem 'concurrent-ruby', '<1.1.10' elsif RUBY_VERSION < '2.4.0' gem 'activemodel', '<6.0.0' elsif RUBY_VERSION < '2.7.0' gem 'activemodel', '<7.0.0' else gem 'activemodel' end if RUBY_VERSION < '3.1.0' && RUBY_VERSION >= '3.0.0' gem 'json', '2.5.1' elsif RUBY_VERSION < '2.0.0' gem 'json', '<1.8.5' elsif RUBY_VERSION < '2.3.0' gem 'json', '<2.6' else gem 'json' end if RUBY_VERSION < '2.0.0' gem 'rake', '<10' elsif RUBY_VERSION < '2.3.0' gem 'rake', '<13' else gem 'rake' end if RUBY_VERSION < '2.4.0' # Until mintest 5.12.0 is fixed gem 'minitest', '5.11.3' else gem 'minitest', '>= 5.7.0' end # MRI Adapter Dependencies platforms :ruby do sequel_pg = RUBY_VERSION.split('.')[1].to_i.send(Time.now.yday.even? ? :even? : :odd?) gem "sqlite3" if RUBY_VERSION < '2.0.0' gem "pg", '<0.19.0' gem "mysql2", '<0.5' else gem "pg", RUBY_VERSION < '2.2.0' ? '<1.2.0' : '>0' gem "mysql2" end # Test current sequel_pg on half of the MRIs, and pure-ruby on the other half if sequel_pg gem 'sequel_pg', git: 'https://github.com/jeremyevans/sequel_pg', require: 'sequel' end end # JRuby Adapter Dependencies platforms :jruby do if RUBY_VERSION < '2.4' gem 'racc', '<1.6' end gem 'jdbc-sqlite3' gem 'jdbc-mysql' gem 'jdbc-postgres' end sequel-5.63.0/.github/000077500000000000000000000000001434214120600144405ustar00rootroot00000000000000sequel-5.63.0/.github/workflows/000077500000000000000000000000001434214120600164755ustar00rootroot00000000000000sequel-5.63.0/.github/workflows/ci.yml000066400000000000000000000026331434214120600176170ustar00rootroot00000000000000name: CI on: push: branches: [ master ] pull_request: branches: [ master ] permissions: contents: read jobs: tests: runs-on: ubuntu-latest services: postgres: image: postgres:latest ports: ["5432:5432"] options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 env: POSTGRES_PASSWORD: postgres mysql: image: mysql:latest env: MYSQL_ROOT_PASSWORD: root MYSQL_DATABASE: sequel_test ports: ["3306:3306"] options: --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=3 strategy: fail-fast: false matrix: ruby: [ "1.9.3", "2.0.0", 2.1, 2.2, 2.3, 2.4, 2.5, 2.6, 2.7, "3.0", 3.1, jruby-9.1, jruby-9.2, jruby-9.3, truffleruby-head ] name: ${{ matrix.ruby }} env: BUNDLE_GEMFILE: .ci.gemfile steps: - uses: actions/checkout@v2 - run: sudo apt-get -yqq install libpq-dev libmysqlclient-dev - run: sudo apt-get -yqq install libxml2-dev libxslt-dev if: startsWith(matrix.ruby, 'truffleruby') - uses: ruby/setup-ruby@v1 with: ruby-version: ${{ matrix.ruby }} bundler-cache: true - run: bundle exec rake spec_ci env: DEFAULT_DATABASE: 1 MYSQL_ROOT_PASSWORD: 1 continue-on-error: ${{ startsWith(matrix.ruby, 'truffleruby') }} sequel-5.63.0/.gitignore000066400000000000000000000001761434214120600150740ustar00rootroot00000000000000*.lock *.rbc *.swp /coverage /rdoc /sequel-*.gem /spec/bin-sequel-* /spec/spec_config.rb /www/public/*.html /www/public/rdoc* sequel-5.63.0/CHANGELOG000066400000000000000000001557021434214120600143240ustar00rootroot00000000000000=== 5.63.0 (2022-12-01) * Make validates_associated plugin avoid database type errors for non-integer association keys (jeremyevans) (#1968) * Make tactical_eager_loading plugin work better with table inheritance plugins (rolftimmermans, jeremyevans) (#1962) * Add support for pool_class: :timed_queue on Ruby 3.2+, using a Queue for available connections (jeremyevans) * Allow :pool_class Database option to be specified as a string to more easily choose a different pool type (jeremyevans) * Use compare_by_identity hashes for Thread-keyed hashes in threaded connection pools (jeremyevans) * Skip use of JRuby workaround on JRuby 9.3.9.0+ in named_timezones extension as JRuby fixed the related bug (jeremyevans) === 5.62.0 (2022-11-01) * Add back the pg_auto_parameterize extension for automatically using bound variables when using postgres adapter with pg driver (jeremyevans) * Add pg_extended_integer_support extension for customizing behavior when literalizing a Ruby integer outside PostgreSQL bigint range (jeremyevans) * Raise Postgres::IntegerOutsideBigintRange if attempting to literalize a Ruby integer outside PostgreSQL bigint range (jeremyevans) * Add primary_key_lookup_check_values plugin for typecasting and checking primary key values during lookup (jeremyevans) * Setup validation of minimum and maximum values for integer columns in auto_validations (jeremyevans) * Add validates_max_value and validates_min_value to validation_helpers (jeremyevans) * Include :min_value and :max_value schema entries for integer columns on most databases (jeremyevans) * Don't wrap multi-inserts in a transaction when it's not required (shannoncole, jeremyevans) (#1945) * Update mock PostgreSQL adapter to default to PostgreSQL 15 instead of PostgreSQL 14 (jeremyevans) * Support fractional seconds in the named_timezones extension (jeremyevans) (#1943) * Cache reflection datasets in the postgres adapter to improve performance (jeremyevans) * Handle BC dates and timestamps in bound variables when using the pg_extended_date_support extension (jeremyevans) * Correctly format hstore[] types in bound variables on PostgreSQL (jeremyevans) * Fix corner case in eager loading where window function eager limit strategy is used, but row number entry is not removed (jeremyevans) * Support server/shard specific :after_connect and :connect_sqls Database options (jeremyevans) (#1935) === 5.61.0 (2022-10-01) * Make Database#foreign_key_list on PostgreSQL return results for partitioned tables (jeremyevans) * Add Database#check_string_typecast_bytesize for checking bytesize of strings before typecasting (jeremyevans) * Treat negative hexidecimal strings similar to positive hexidecimal strings when typecasting to integer (jeremyevans) * Remove is_json and is_not_json methods from the pg_json_ops extension, as the support was removed in PostgreSQL 15 beta 4 (jeremyevans) * Fix handling of timestamps before the date of calendar reform when using pg_extended_date_support extension on Ruby 3.2 (jeremyevans) === 5.60.1 (2022-09-02) * Revert conversion of respond_to? to defined?, as it breaks with unused refinements on Ruby 2 (jeremyevans) (#1919) === 5.60.0 (2022-09-01) * Support arbitrary expressions for date_arithmetic interval values on PostgreSQL 9.4+ (jeremyevans) * Support native IS DISTINCT FROM on SQLite 3.39+ instead of emulating support in the is_distinct_from extension (jeremyevans) * Support HAVING without GROUP BY on SQLite 3.39+ (jeremyevans) * Convert most respond_to? calls to equivalent defined? for better performance (jeremyevans) === 5.59.0 (2022-08-01) * Set :allow_eager association option to false for instance specific associations without eager loaders (jeremyevans) * Add require_valid_schema plugin for checking that model classes have schema parsed as expected (jeremyevans) * Model classes created from aliased expressions and literal strings no longer use the simple table optimization (jeremyevans) * Model code that does not swallow connection errors will now also not swallow disconnect errors (jeremyevans) (#1892) * Add is_json and is_not_json methods to the pg_json_ops extension, for the PostgreSQL 15+ IS [NOT] JSON operator (jeremyevans) * Support :security_invoker view option on PostgreSQL 15+, for views where access uses permissions of user instead of owner (jeremyevans) * Support :nulls_distinct index option on PostgreSQL 15+, for NULLS [NOT] DISTINCT (jeremyevans) * Support sequel-postgres-pr driver in the postgres adapter (jeremyevans) === 5.58.0 (2022-07-01) * Support :disable_split_materialized Database option on MySQL to work around optimizer bug in MariaDB 10.5+ affecting association tests (jeremyevans) * Add Dataset#merge* methods to support MERGE statement on PostgreSQL 15+, MSSQL, Oracle, DB2, H2, HSQLDB, and Derby (jeremyevans) === 5.57.0 (2022-06-01) * Make Database#create_function on PostgreSQL accept :parallel option (bananarne) (#1870) * Add support for :on_update_current_timestamp column option on MySQL (jeremyevans) * Add is_distinct_from extension with support for the IS DISTINCT FROM operator (jeremyevans) === 5.56.0 (2022-05-01) * Make alter_table add_column/add_foreign_key methods support :index option to create an index on the column (jeremyevans) * Support creation of STRICT tables on SQLite 3.37.0+ via create_table :strict option (jeremyevans) * Add sqlite_json_ops extension for DSL support for JSON functions and operators added in SQLite 3.38.0 (jeremyevans) * Recognize "INTEGER" type same as "integer" type in the schema dumper, helpful on SQLite 3.37.0+ (jeremyevans) === 5.55.0 (2022-04-01) * Support :setup_regexp_function Database option in the sqlite adapter to allow the use of regexps when querying (jeremyevans) * Add auto_restrict_eager_graph plugin for automatically disallow eager_graph with associations needing but lacking graph options (jeremyevans) * Fix placeholder literalizer optimization for dataset aggregate methods on a model dataset (belousovAV) (#1847, #1848) === 5.54.0 (2022-03-01) * Add enum plugin for treating columns as enums in a model (jeremyevans) (#1839) === 5.53.0 (2022-02-01) * Make Dataset#_sql_comment private when using the Database sql_comments extension (jeremyevans) * Fix prepared statements in the mysql2 adapter to reuse native prepared statements (jeremyevans) (#1832) * Support H2 version 2+ in the jdbc/h2 adapter (jeremyevans) (#1817) * Work around active_support breaking subclasses plugin on Ruby <3.1 (jeremyevans) (#1816) * Fix error handling if trying to setup column_encryption plugin without keys (jeremyevans) (#1815) === 5.52.0 (2022-01-01) * Use Class#subclasses if available in the subclasses plugin, instead of a custom Model.subclasses accessor (jeremyevans) * Add Model.descendants and .freeze_descendants to subclasses plugin (jeremyevans) * Avoid use of deprecated Refinement#include on Ruby 3.1+ (jeremyevans) * Add date_parse_input_handler extension for custom handling of input to date parsing methods (jeremyevans) * Make postgres adapter respect Database#default_string_column_size (jeremyevans) * Make pg_interval extension work with ActiveSupport 7.0 (jeremyevans) * Make :ruby_default schema entry for type: :datetime respect Sequel.datetime_class (jeremyevans) * Make alter_table drop_constraint have an effect on MySQL 8.0.19+ (jeremyevans) * Make mysql adapter support ruby-mysql 3 API (jeremyevans) (#1795) * Make mysql adapter no longer use connection's server_version, since it isn't accurate when using the ruby-mysql driver (jeremyevans) * Add sql_comments plugin for automatically including comments on queries generated by model class, instance, and dataset methods (jeremyevans) * Make sql_comments Database extension support Database#with_comments, for automatically including comments for queries executed inside the block (jeremyevans) * Fix sql_comments extension to not modify cached SQL for a dataset (jeremyevans) === 5.51.0 (2021-12-01) * Make eager loading via tactical_eager_loading no longer modify objects who already have a cached value for the association (jeremyevans) * Make association cloning handle cases where clone association sets different :class option than cloned association (jeremyevans) * Make column schema entries on MySQL include an :extra entry for the Extra column in DESCRIBE output (bschmeck) (#1791) * Update mock PostgreSQL adapter to default to PostgreSQL 14 instead of PostgreSQL 9.5 (jeremyevans) * Support Dataset#with_recursive :search and :cycle options on PostgreSQL 14+ for result ordering and cycle detection (jeremyevans) * Avoid method redefined verbose mode warnings in lazy_attributes plugin (jeremyevans) === 5.50.0 (2021-11-01) * Make Migrator :allow_missing_migration_files also allow down migrations where the current database version is greater than the last migration file version (francisconeves97) (#1789) * Fix Model#freeze in composition, serialization, and serialization_modification_detection plugins to return self (jeremyevans) (#1788) * Fix typecasting of lazy columns when using lazy_attributes plugin in model where dataset selects from subquery (jeremyevans) * Add :before_preconnect Database option, for configuring extensions loaded via :preconnect_extensions (MarcPer, jeremyevans) (#1786) * Change Dataset#columns! to use a LIMIT 0 query instead of a LIMIT 1 query (jeremyevans) * Add sql_log_normalizer extension for normalizing logged SQL, helpful for analytics and sensitive data (jeremyevans) * Add support for range_merge, multirange, and unnest, and PGMultiRange#op to pg_range_ops extension (jeremyevans) * Add pg_multirange extension with support for PostgreSQL 14+ multirange types (jeremyevans) === 5.49.0 (2021-10-01) * Switch block_given? usage to defined?(yield) (jeremyevans) * Support table aliases for JOIN USING columns on PostgreSQL 14+ (jeremyevans) * Support calling PostgreSQL procedures without arguments (jeremyevans) * Support hstore subscripts in pg_hstore_ops on PostgreSQL 14+, for updating only part of an hstore value (jeremyevans) * Support JSONB subscripts in pg_json_ops on PostgreSQL 14+, for updating only part of a JSONB value (jeremyevans) * Support SQL::Expression#sequel_ast_transform for custom AST transforms on arbitrary expressions (jeremyevans) * Add Database#create_trigger :replace option on PostgreSQL 14+ for CREATE OR REPLACE TRIGGER (jeremyevans) * Make auto_validations plugin automatically setup no_null_byte validations (jeremyevans) * Add Model#validates_no_null_byte to validation_helpers plugin (jeremyevans) === 5.48.0 (2021-09-01) * Make the unused_associations plugin association reflection tracking work correctly when combining coverage runs (jeremyevans) * Add Database#like_without_collate on MSSQL, to avoid using COLLATE on LIKE arguments, which can significantly improve performance (jeremyevans) * Add Model::Errors#full_message private method for easiest i18n support for errors with multiple attributes (jeremyevans) (#1779) === 5.47.0 (2021-08-01) * Make the unused_associations plugin track access to association reflections to determine whether associations are used (jeremyevans) * Support :db option for join tables in {many,one}_through_many to use a separate query for each join table (jeremyevans) * Support :join_table_db option for many_to_many/one_through_one associations, to use a separate query for the join table (jeremyevans) * Support :allow_eager_graph and :allow_filtering_by association options (jeremyevans) * Add Database#rename_tables on MySQL, for renaming multiple tables in a single call (nick96) (#1774) * Support Dataset#returning on SQLite 3.35+ (jeremyevans) === 5.46.0 (2021-07-01) * Add unused_associations plugin, for determining which associations and association methods are not used (jeremyevans) * Make nil :setter/:adder/:remover/:clearer association options not create related methods (jeremyevans) === 5.45.0 (2021-06-01) * Fix handling of NULL values in boolean columns in the ODBC adapter (jeremyevans) (#1765) * Add auto_validations_constraint_validations_presence_message plugin for auto_validations/constraint_validations presence message integration (jeremyevans) * Support Dataset#with :materialized option on SQLite 3.35+ for [NOT] MATERIALIZED (jeremyevans) * Use ALTER TABLE DROP COLUMN for dropping columns on SQLite 3.35+ (jeremyevans) === 5.44.0 (2021-05-01) * Add concurrent_eager_loading plugin, for eager loading multiple associations concurrently using separate threads (jeremyevans) * Support :weeks as a interval unit in the date_arithmetic extension (jeremyevans) (#1759) * Raise an exception if an interval hash with an unsupported key is passed in the date_arithmetic extension (jeremyevans) (#1759) * Support dropping non-composite unique constraints on SQLite (jeremyevans) (#1755) === 5.43.0 (2021-04-01) * Add column_encryption plugin, for encrypting column values (jeremyevans) === 5.42.0 (2021-03-01) * Make the ado timestamp conversion proc a normal conversion proc that can be overridden similar to other conversion procs (jeremyevans) * Add :reject_nil option to the nested_attributes method, to ignore calls where nil is passed as the associated object data (jeremyevans) * Add async_thread_pool plugin for easier async usage with model classes and support for async destroy, with_pk, and with_pk! methods (jeremyevans) * Add async_thread_pool Database extension for executing queries asynchronously using a thread pool (jeremyevans) * Fix possible thread safety issue in Database#extension that could allow Module#extended to be called twice with the same Database instance (jeremyevans) * Support cases where validations make modifications beyond setting errors in Model#freeze (jeremyevans) * Add Model#to_json_data to the json_serializer plugin, returning a JSON data structure (jeremyevans) === 5.41.0 (2021-02-01) * Have explicit :text option for a String column take priority over :size option on PostgreSQL (jeremyevans) (#1750) * Support a :skip_invalid option in auto_validations plugin for not adding errors to a column that already has an error (jeremyevans) * Support a :skip_invalid option in validation_helpers for not adding an error to a column that already has an error (jeremyevans) * Support :adder, :remover, and :clearer association options that use keyword arguments in Ruby 2.7+ (jeremyevans) * Make pg_interval use the same number of seconds per year and per month as ActiveSupport::Duration when using ActiveSupport 5.1+ (jeremyevans) === 5.40.0 (2021-01-01) * Support UPDATE FROM syntax in SQLite 3.33.0+ (jeremyevans) * Have pg_interval extension work with ActiveSupport 6.1 (jeremyevans) * Have date_arithmetic extension work with ActiveSupport 6.1 (jeremyevans) * Avoid method redefinition warnings in verbose warning mode (jeremyevans) === 5.39.0 (2020-12-01) * Support :clustered option for primary key and unique constraints on Microsoft SQL Server (jeremyevans) * Do not modify the size of binary columns when using set_column_allow_null on Microsoft SQL Server (jeremyevans) (#1736) * Add a fork safety guide with more detail on how to use Sequel with libraries that fork (janko) (#1733) * Make the roots_dataset method in the tree plugin work with queries using joins (jeremyevans) (#1731) * Make Database#tables return partitioned tables on PostgreSQL 10+ (epoberezhny) (#1729, #1730) === 5.38.0 (2020-11-01) * Do not add new Database instances to Sequel::DATABASES if the test connection fails (jeremyevans) (#1727) * Support the newer com.mysql.cj.jdbc.Driver in the jdbc/mysql adapter (jeremyevans) * Do not swallow disconnect errors in Database#create_or_replace_view or Database#create_table* on Oracle (jeremyevans) * Only rescue non-disconnect Sequel::DatabaseErrors in Postgres::Database#server_version (jeremyevans) (#1724) * Make the single_table_inheritance and prepared_statements plugins work if loaded into the same class (jeremyevans) (#1721) === 5.37.0 (2020-10-01) * Recognize more unsigned decimal/float types in the schema dumper (akimd, jeremyevans) (#1720) * Add Postgres::PGRow::{Array,Hash}Row#op to the pg_row_ops extension if the pg_row extension is loaded (jeremyevans) * Add Model#column_previously_was and #column_previously_changed? to the dirty plugin (jeremyevans) * Raise Migrator::Error if attempting to migrate down to a version where there are necessary migration files missing (jeremyevans) (#1716) === 5.36.0 (2020-09-01) * Handle passing keyword arguments through class methods defined via Plugins.def_dataset_method on Ruby 2.7+ (jeremyevans) * Handle passing keyword arguments through when loading plugins on Ruby 2.7+ (jeremyevans) * Handle passing keyword arguments through migrations when defining custom Database methods that accept keywords on Ruby 2.7+ (jeremyevans) * Handle passing keyword arguments through Dataset#query when using the query extension on Ruby 2.7+ (jeremyevans) * Handle passing keyword arguments through the association proxy when using the association_proxies plugin on Ruby 2.7+ (jeremyevans) * Handle passing keyword arguments through the class method to a method defined in dataset_module on Ruby 2.7+ (adam12) (#1713) * Stream result sets in the odbc adapter for better performance and lower memory usage (sparrovv) (#1711) * Add Postgres::JSONBOp#set_lax and #path_*_tz methods to the pg_json_ops extension for new jsonb functions added in PostgreSQL 13 (jeremyevans) * Add Dataset#with_ties on PostgreSQL 13+ and Microsoft SQL Server to include rows with same order as final row (jeremyevans) * Add a :current_schema option to Database#view_exists? (only defined on Oracle) to look in the current schema instead of non-system schemas (jeremyevans) (#1710) * Recognize another disconnect error in the mysql and mysql2 adapters (jeremyevans) (#1706) === 5.35.0 (2020-08-01) * Recognize another disconnect error in the oracle adapter (sterlzbd) (#1705) * Consider all associations with :dataset options as instance-specific associations (jeremyevans) * Make Model.finalize_associations not break with instance-specific associations (jeremyevans) * Make association placeholder loader consider block if instance_specific: false association option is used (jeremyevans) * Copy composite unique constraints when emulating alter table operations on SQLite (jeremyevans) (#1704) * Add instance_specific_default plugin for setting default association :instance_specific value, or warning/raising for cases where it is not specified (jeremyevans) * Make Model.plugin issue deprecation warning if loading plugin with arguments and block if plugin does not accept arguments/block (jeremyevans) * Make validation_class_methods consider all :if, :allow_missing, :allow_nil, and :allow_blank settings, instead of just the first (jeremyevans) * Include hash entries with nil keys in Dataset#to_dot output in to_dot extension (jeremyevans) * Remove unneeded conditionals from plugins and extensions (jeremyevans) * Fix exception class in run_transaction_hooks extension if calling run_after_{commit,rollback}_hooks outside of a transaction (jeremyevans) === 5.34.0 (2020-07-01) * Make eager_graph work correctly if called with no associations (jeremyevans) * Make :ruby eager limit strategy handle cases where there is no limit or offset (jeremyevans) * Do not keep a reference to a Sequel::Database instance that raises an exception during initialization (jeremyevans) * Make Database#pool.all_connections not yield for a single connection pool in disconnected state (jeremyevans) * Raise an exception if trying to disconnect a server that doesn't exist in the sharded connection pools (jeremyevans) * Support :refresh option when calling *_pks getter method in the association_pks plugin (jeremyevans) * Support caching of repeated calls to *_pks getter method in the association_pks plugin using :cache_pks association option (jeremyevans) * Add *_pks_dataset methods for one_to_many and many_to_many associations when using the association_pks plugin (jeremyevans) === 5.33.0 (2020-06-01) * Support custom join types on a per-association basis when using eager_graph/association_join (jeremyevans) * Support primary_key with type: :smallserial on PostgreSQL (j-a-m-l) (#1698) * Add Database#current_timestamp_utc accessor on SQLite to keep CURRENT_* in UTC instead of converting to localtime (jeremyevans) === 5.32.0 (2020-05-01) * Allow Database#create_table? work with :partition_of option on PostgreSQL (jeremyevans) (#1690) * Add fiber_concurrency extension, for using Fiber.current instead of Thread.current for checking out connections (jeremyevans) * Move most Sequel singleton methods into a module that extends Sequel for easier overriding (jeremyevans) * Fix method visibility issues in model, plugin, extension, and adapter code (jeremyevans) * Avoid defining conversion procs for PostgreSQL inet/cidr types in pg_inet extension when using sequel_pg 1.13.0+ (jeremyevans) * Add run_transaction_hooks Database extension, allowing for running the transaction hooks before commit/rollback, for use with transactional testing (jeremyevans) * Recognize timestamp(N) with time zone type (isc) (#1684) === 5.31.0 (2020-04-01) * Fix alter_table drop_constraint :primary_key option on SQLite for non-integer primary keys (jeremyevans) * Add skip_saving_columns plugin, which supports columns to skip when saving, and skips generated columns by default (joeosburn, jeremyevans) (#1681, #1682) * Add support for creating partitioned tables in PostgreSQL 10+ using :partition_by and :partition_of options (jeremyevans) * Dump generated columns as generated columns when using the schema_dumper with :same_db option on PostgreSQL 12+ (jeremyevans) (#1680) * Ignore defaults for generated columns by default when using the schema dumper (jeremyevans) (#1680) * Include generated columns in schema on SQLite 3.31+ (jeremyevans) * Add :generated schema entry on PostgreSQL 12+ and SQLite 3.31+ for whether the columns is generated (jeremyevans) * Add association_lazy_eager_option plugin for supporting :eager option for association method (jeremyevans) * Add forbid_lazy_load plugin for forbidding lazy loading of associations, to help find N+1 issues (jeremyevans) === 5.30.0 (2020-03-01) * Remove specs and old release notes from the gem to reduce gem size by over 40% (jeremyevans) * When using Database#transaction :retry_on, call :before_retry option if retrying even if :num_retries is nil (jcmfernandes) (#1678) * Support generated columns on SQLite 3.31+ using :generated_always_as and :generated_type options (jeremyevans) === 5.29.0 (2020-02-01) * Recognize another disconnect error in the tinytds adapter (jeremyevans) * Fix verbose warning in postgres adapter when using prepared statements and recent versions of ruby-pg (jeremyevans) * Work correctly on Ruby 2.8+ by supporting second argument for initialize_clone (jeremyevans) * Add empty_failure_backtraces plugin for empty backtraces for ValidationFailed and HookFailed exceptions, much faster on JRuby (jeremyevans) * Add Dataset#json_serializer_opts to json_serializer plugin, allowing to set json_serializer options on a per-dataset basis (jeremyevans) === 5.28.0 (2020-01-01) * Warn when calling Sequel::JDBC::Postgres::Dataset#with_fetch_size (jeremyevans) (#1665) * Add exclude_or_null extension, for filtering datasets where the condition is false or NULL (jeremyevans) * Add any_not_empty extension, for making Dataset#any? without a block mean !empty? (jeremyevans) === 5.27.0 (2019-12-01) * Add Sequel::DEFAULT for a DEFAULT expression, useful for assigning to default values (jeremyevans) * Make Postgres::ArrayOp#join in pg_array_ops extension work correctly on PostgreSQL <9.1 (jeremyevans) * Make pg_enum extension work correctly on PostgreSQL 8.3-9.0 (jeremyevans) * Emulate FILTER clause for aggregate functions using CASE on databases not supporting it directly (jeremyevans) * Support ordering by NULLS FIRST/NULLS LAST without emulation on SQLite 3.30+ (jeremyevans) === 5.26.0 (2019-11-01) * Recognize two additional foreign key constraint violation codes on MySQL 8.0.13+ (rianmcguire) (#1657) * Support table aliases for single-table INSERT statements on PostgreSQL 9.5+ (jeremyevans) (#1656) * Implement Sequel::Postgres::PGRange#hash so instances work correctly in hashes (jeremyevans) (#1648) * Make dirty plugin work correctly with typecast_on_load plugin (jeremyevans) (#1647) * Add support for :require_modification option when setting up nested_attributes (jeremyevans) * Add support for SQL/JSON path expressions to the pg_json_ops extension, supported by PostgreSQL 12+ (jeremyevans) === 5.25.0 (2019-10-01) * Fix Sequel::SQL::NumericMethods#coerce to not raise NoMethodError if super method is not defined (jeremyevans) (#1645) * Allow setting a default for a column that already has a default on Microsoft SQL Server (jeremyevans) * Fix keyword argument separation warnings on Ruby master branch in csv_serializer plugin (jeremyevans) * Add association_multi_add_remove plugin for adding/removing multiple associated objects in a single method call (AlexWayfer, jeremyevans) (#1641, #1643) * Make sharding plugin integrate with server_block extension (jeremyevans) === 5.24.0 (2019-09-01) * Add Database#skip_logging? private method designed for extensions to force query timing even if no logger is present (adam12) (#1640) * Allow a hostname specified in a defaults_file in the mysql2 adapter, by not explicitly setting :host (sapio-bdeamer) (#1638) * Convert all database array types to Ruby arrays in the jdbc adapter (jeremyevans) * Add static_cache_cache plugin for caching rows for static_cache models to a file to avoid database queries during model initialization (jeremyevans) * Add :cache_file plugin option to pg_auto_constraint_validations plugin, for caching metadata to a file for faster initialization (jeremyevans) * Support :unique_deferrable and :primary_key_deferrable column options (jeremyevans) * Support :generated_always_as column option on PostgreSQL 12+ (jeremyevans) === 5.23.0 (2019-08-01) * Work around a bug on jdbc-sqlite3 3.27.2.1 when parsing schema for tables with columns with default values (jeremyevans) * Work around a bug in jdbc-sqlite3 3.27.2.1 when in Database#foreign_key_list in the jdbc/sqlite3 adapter (jeremyevans) * Make Dataset#execute* private methods respect explicit servers option, fixing Dataset#paged_each in the postgres adapter when sharding (jeremyevans) (#1632) * Handle instances of subclasses of core classes when wrapping objects in the pg_json extension (jeremyevans) (#1631) * Support :ansi Database option in the tinytds adapter (kenaniah) (#1629) * Support cross-database and linked servers when parsing schema on Microsoft SQL Server (kenaniah) (#1629) * Add insert_conflict plugin for automatically handling unique constraint conflicts when saving new model instances on PostgreSQL 9.5+ and SQLite 3.24.0+ (jeremyevans) * Avoid errors when parsing schema in the mock sqlite adapter (jeremyevans) * Avoid possible thread-safety issue in the timezones support (jeremyevans) * Handle offsets when typecasting an array or hash to datetime when Sequel.datetime_class = Time (jeremyevans) * Support Sequel.datetime_class = Time when using the named_timezones extension (jeremyevans) === 5.22.0 (2019-07-01) * Fix Dataset#multi_insert and #import with return: :primary_key on MSSQL when the dataset has a row_proc (jeremyevans) (#1627) * Support Dataset#with :materialized option on PostgreSQL 12 for [NOT] MATERIALIZED (jeremyevans) * Make Database#primary_key_sequence work on tables without serial sequences on PostgreSQL 12 (jeremyevans) * Support ruby 2.7+ startless ranges in the pg_range extension (jeremyevans) * Support ruby 2.7+ startless, endless ranges in filters, using an always true condition for them (jeremyevans) * Support ruby 2.7+ startless ranges in filters, using just a <= or < operator for them (jeremyevans) === 5.21.0 (2019-06-01) * Recognize additional DatabaseLockTimeout errors in mysql and mysql2 adapters (jeremyevans) * Disallow eager_graph of ancestors and descendants associations when using the rcte_tree plugin (jeremyevans) * Make jdbc/mysql adapter work when using JRuby with Java 11 (jeremyevans) * Support window function options :window, :exclude, and :frame :type=>:groups, :start, and :end on SQLite 3.28.0+ (jeremyevans) * Make the server_block extension respect the :servers_hash Database option (jeremyevans) * Typecast string input for json/jsonb types as JSON strings instead of parsing as JSON in the pg_json extension when Database#typecast_json_strings is set to true (jeremyevans) * Wrap JSON primitives (string, number, true, false, nil) in the pg_json extension when Database#wrap_json_primitives is set to true (jeremyevans) * Convert the Database :timeout option to an integer in the sqlite adapter (jeremyevans) (#1620) * Improve performance in ado adapter using more efficient inner loop (jeremyevans) * Improve performance in ado adapter using faster callables for type conversion (jeremyevans) * Fix handling of decimal values in the ado adapter when using locales where the decimal separator is , and not . (jeremyevans) (#1619) === 5.20.0 (2019-05-01) * Fix reversing of alter_table add_foreign_key when :type option is used (jeremyevans) (#1615) * Switch from using instance_exec to define_method for model associations and in some plugins (jeremyevans) * Fix Database#server_version when using mysql2 adapter with mysql driver on MariaDB 10+ database (v-kolesnikov) (#1614) * Make one_to_one setter method handle models that use joined datasets (jeremyevans) (#1612) * Make auto_validations plugin work with the class_table_inheritance plugin (jeremyevans) (#1611) * Avoid use of instance_exec for PlaceholderLiteralString#with_dataset (jeremyevans) * Recognize float unsigned database types as float (keeguon, jeremyevans) (#1609) * Support :savepoint options to Database#{after_commit,after_rollback} for making the hooks handle savepoints (jeremyevans) * Avoid use of instance_exec in association_dependencies plugin (jeremyevans) * Add pg_auto_constraint_validation_override to the pg_auto_constraint_validations plugin, for customizing columns and error message per constraint (jeremyevans) * Make Database#check_constraints on PostgreSQL also include constraints where the related columns are not known (jeremyevans) === 5.19.0 (2019-04-02) * Use more optimized approach to merging hashes in ruby 2.5+ (jeremyevans) * Use SQLite extended result codes when using ruby-sqlite3 1.4.0+ (jeremyevans) * Recognize additional SQLite extended result codes in the shared sqlite adapter (jeremyevans) * Add Database#rename_enum_value to the pg_enum extension (AlexWayfer) (#1603) * Make Database#drop_table delete constraint validations metadata for that table if using the constraint_validations extension (jeremyevans) * Speed up row fetching in the sqlite adapter (jeremyevans) * Speed up row fetching and type conversion in the sqlanywhere adapter (jeremyevans) === 5.18.0 (2019-03-01) * Use singleton .call methods on plain objects instead of procs/methods for faster type conversion (jeremyevans) * Add Sequel::SQL::Blob.call to avoid indirection when converting values from the database (jeremyevans) * Use while instead of each for inner loops in sqlite and jdbc adapters for better performance (jeremyevans) * Make after_initialize plugin not make the argument to Model.call optional (jeremyevans) * Allow Dataset#paged_each to be called without a block in the postgres and mysql2 adapters (jeremyevans) * Remove flow-control exceptions in connection_expiration and connection_validator extensions (jeremyevans) * Add throw_failures plugin for throwing ValidationFailed and HookFailed exceptions instead of raising them, up to 10x performance increase on JRuby (jeremyevans) * Support tzinfo 2 in addition to tzinfo 1 in the named_timezones extension (jeremyevans) (#1596) === 5.17.0 (2019-02-01) * Support skip_auto_validations instance method in auto_validations plugin (oldgreen, jeremyevans) (#1592) * Support :preconnect_extensions Database option for loading extensions before :preconnect option (jeremyevans) * Avoid usage of Proc.new with implicit block as ruby 2.7+ deprecates this behavior (jeremyevans) * Allow Sequel[].as to be used for constructing aliases with eager_graph (e.g. Model.eager_graph(Sequel[:a].as(:b))) (jeremyevans) (#1588) === 5.16.0 (2019-01-02) * Convert integer columns to bigint columns when copying SQLite databases to other databases using bin/sequel -C (jeremyevans) (#1584) * Use nicer error messages for missing or empty migration directories (Lavode) (#1585) * Make alter table emulation work correctly in SQLite 3.26.0+ (jeremyevans) (#1582) * Do not unset new one_to_one associated objects' reciprocal associations before saving associated objects in the nested_attributes plugin (jeremyevans) * Do not validate new one_to_one associated objects twice when saving in the nested_attributes plugin (jeremyevans) * Fix :qualify_tables option to class_table_inheritance plugin to work correctly with subclasses of subclasses (benalavi) (#1581) * Make class_table_inheritance plugin use the schema cache instead of sending a query to get columns for tables (kenaniah) (#1580) * Remove loading of mysqlplus in the mysql adapter (jeremyevans) * Make mysql adapter work correctly on ruby 2.6+ (jeremyevans) * Add Database#rollback_on_exit to rollback transactions instead of committing them when exiting the transaction block (jeremyevans) * Enable window functions in SQLite 3.26.0+ (jeremyevans) * Do not override existing methods when creating Sequel::Model attribute getter/setter methods (jeremyevans) (#1578) * Use parentheses for expressions being subscripted (e.g. (array_agg(column))[1]) (jeremyevans) === 5.15.0 (2018-12-01) * Add :conn_str option in the postgres adapter for PostgreSQL connection strings, if the pg driver is used (graywolf) (#1572) * Add :qualify_tables option to class_table_inheritance plugin to automatically qualify subclass tables with superclass qualifier (benalavi) (#1571) * Access already allocated connections in a thread safe manner when checking out connections in the sharded threaded connection pool (jeremyevans) * Automatically support datasets using qualified tables in the class_table_inheritance plugin without having to use the :alias option (benalavi) (#1565) * Support rename_column without emulation on SQLite 3.25+ (jeremyevans) * Do not remove currently cached many_to_one associated objects when changing the related foreign key value from nil to non-nil (jeremyevans) * Do not validate new *_to_many associated objects twice when saving in the nested_attributes plugin (jeremyevans) * Add Model#skip_validation_on_next_save! for skipping validation on next save call (jeremyevans) === 5.14.0 (2018-11-01) * Drop defaulting the :port option to 5432 in the postgres adapter, so that setting the :service option in :driver_options works (jeremyevans) (#1558) * Do not cache values for columns without parseable defaults when using :cache option in defaults_setter plugin (jeremyevans) * Emulate NULLS FIRST/LAST ordering on databases that do not natively support it (jeremyevans) * Do not modify boolean expressions created from string or array if string or array is modified (jeremyevans) * Make roots and roots_dataset dataset methods instead of class methods in the tree plugin (JelF) (#1554) * Do not cache dataset SQL if dataset uses subquery that cannot cache SQL (jeremyevans) * Make Model#=== work correctly for models with composite primary keys (jeremyevans) * Add Model#pk_equal? as a more descriptive name for Model#=== (AlexWayfer) (#1550) * Do not push down expression inversion in cases where it may result in incorrect behavior (e.g. ANY/SOME/ALL operators) (jeremyevans) (#1549) === 5.13.0 (2018-10-01) * Support :single_value type in prepared statements (rintaun) (#1547) * Make Model.all in static_cache plugin accept a block (AlexWayfer, jeremyevans) (#1543) * Add constant_sql_override extension for overriding SQL used for constants such as CURRENT_TIMESTAMP (celsworth) (#1538) * Do not cache from_self datasets if options are given (jeremyevans) === 5.12.0 (2018-08-31) * Make constraint_validations extension respect Database#constraint_validations_table setting (jeremyevans) * Make Sequel.extension load files from gems (jeremyevans) * Map clob prepared statement argument type to OCI8::CLOB in the oracle adapter (pipistrellka) (#1534) * Make Model.load_cache public in the static_cache plugin (AlexWayfer) (#1533) * Enable support for NOWAIT on MariaDB 10.3+ (jeremyevans) * Enable support for INTERSECT and EXCEPT on MariaDB 10.3+ (jeremyevans) * Make tactical_eager_loading plugin handle automatic eager loading for associated objects created by eager_graph (jeremyevans) * Cache eager_graph loader to speed up subsequent loads from the same dataset (jeremyevans) * Add caller_logging database extension to log callers before queries, useful during development (jeremyevans) * Add Database#call_procedure in the postgres adapter for calling PostgreSQL 11+ procedures (jeremyevans) * Add eager_graph_eager plugin for chaining eager association loads after eager_graph association loads (jeremyevans) * Support using Dataset#eager_graph in eager load callback for associations using join tables (jeremyevans) * Make Dataset#graph handle existing selections without determinable aliases by forcing a subselect (jeremyevans) * Freeze prepared statement arguments before returning the prepared statement (jeremyevans) * Refactor emulated prepared statement internals to use a placeholder literalizer (jeremyevans) === 5.11.0 (2018-08-01) * Fix using the jdbc/sqlserver adapter on JRuby 9.2+ (jeremyevans) * Fix dumping schema for numeric/decimal columns with default values, broken starting in 5.9.0 (jeremyevans) * Recognize additional check constraint violations on certain versions of SQLite (jeremyevans) * Use cached model instances for Model.first calls without an argument or with a single integer argument in the static_cache plugin (AlexWayfer) (#1529) * Support ON CONFLICT clause for INSERT on SQLite 3.24+ (jeremyevans) * Support Dataset#window for WINDOW clause on MySQL 8 and SQLAnywhere (jeremyevans) * Enable window function support on SQLAnywhere (jeremyevans) * Support using a hash as a window function :frame option value, with support for ROWS/RANGE/GROUPS, numeric offsets, and EXCLUDE (jeremyevans) * Allow using set_column_default with a nil value to remove the default value for a column on MySQL when the column is NOT NULL (jeremyevans) === 5.10.0 (2018-07-01) * Use input type casts when using the postgres adapter with pg 0.18+ to reduce string allocations for some primitive types used as prepared statement arguments (jeremyevans) * Assume local time if database timezone not specified when handling BC timestamps on JRuby 9.2.0.0 in the pg_extended_date_support extension (jeremyevans) * Fix parsing of timetz types in the jdbc/postgresql adapter (jeremyevans) * Make SQLTime.parse respect SQLTime.date and Sequel.application_timezone (jeremyevans) * Add :top as an option in the list plugin (celsworth) (#1526) * Fix Model#{ancestors,descendants,self_and_siblings} in the tree plugin when custom parent/children association names are used (jeremyevans) (#1525) * Treat read-only mode error as disconnect error on mysql and mysql2 adapters, for better behavior on AWS Aurora cluster (jeremyevans) * Don't use cached placeholder literalizers for in Dataset#{first,where_all,where_each,where_single_value} if argument is empty array or hash (jeremyevans) * Support :tablespace option when adding tables, indexes, and materialized views on PostgreSQL (jeremyevans) * Support :include option for indexes on PostgreSQL 11+ (jeremyevans) * Allow the use of IN/NOT IN operators with set returning functions for Sequel::Model datasets (jeremyevans) * Make many_to_pg_array associations in the pg_array_associations plugin work on PostgreSQL 11 (jeremyevans) * Only load strscan library in pg_array extension if it is needed (jeremyevans) * Don't remove related many_to_one associations from cache when setting column value to existing value for model instances that have not been persisted (jeremyevans) (#1521) * Support ruby 2.6+ endless ranges in the pg_range extension (jeremyevans) * Support ruby 2.6+ endless ranges in filters, using just a >= operator for them (jeremyevans) === 5.9.0 (2018-06-01) * Support generated columns on MySQL 5.7+ and MariaDB 5.2+ (wjordan, jeremyevans) (#1517) * Add escaped_like extension for creation of LIKE expressions with placeholders in the pattern without access to a dataset (jeremyevans) * Modify jdbc adapter exception handling to work around ::NativeException deprecation in JRuby 9.2 (jeremyevans) * Work around broken BC date handling in JRuby 9.2.0.0 (jeremyevans) * Switch use of BigDecimal.new() to BigDecimal(), since the former is deprecated (jeremyevans) * Add Sequel::VERSION_NUMBER for easier version comparisons (jeremyevans) * Add Model.has_dataset? to determine if the model class has a dataset (AlexWayfer) (#1508) * Support use of LIKE with ANY function on PostgreSQL by avoiding unnecessary use of ESCAPE syntax (jeremyevans) * Disconnect connections left allocated by dead threads instead of returning the connections to the pool (jeremyevans) * Make both threaded connection pools avoid disconnecting connections while holding the connection pool mutex (jeremyevans) * Don't deadlock when disconnecting connections in the sharded_threaded connection pool when using connection_validator or connection_expiration extensions (jeremyevans) * Don't modify hash argument passed in Model.nested_attributes in the nested_attributes plugin (jeremyevans) * Avoid unnecessary hash creation in many places (jeremyevans) * Fix duplicate objects in nested associations when eager_graphing cascaded many_to_one=>one_to_many associations (jeremyevans) === 5.8.0 (2018-05-01) * Don't mark SQLAnywhere as supporting WITH in INSERT statement (jeremyevans) * Support :search_path as a shard option on PostgreSQL (jeremyevans) * Add Dataset#nowait for raising a Sequel::DatabaseLockTimeout when a locked row is encountered, supported on PostgreSQL, MySQL 8+, MSSQL, and Oracle (jeremyevans) * Support Dataset#skip_locked on MySQL 8+ (jeremyevans) * Make schema modification methods in the pg_enum extension work on a frozen Database object (jeremyevans) * Support common table expressions and window functions on MySQL 8+ (jeremyevans) * Ignore Dataset#explain :extended option on MySQL 5.7+, since extended output is then the MySQL default (jeremyevans) * Work around REGEXP BINARY not working correctly on MySQL 8+ by using REGEXP_LIKE with the 'c' match_type (jeremyevans) * Force correct column order in Database#foreign_key_list on MySQL (jeremyevans) * Add ConnectionPool#connection_expiration_random_delay to connection_expiration extension, to avoid thundering herd if preallocating connections (hex2a, jeremyevans) (#1503) * Emit deprecation warning in association_proxies plugin if using #filter on an association proxy, since behavior will change on ruby 2.6+ (utilum) (#1497) * Handle multiple add_constraint calls and a set_column_null call in the same alter_table block on SQLite (jeremyevans) (#1498) * Add Database#rename_enum to the pg_enum extension (AlexWayfer) (#1495) * Make tactical_eager_loading plugin respect the :allow_eager association option (jeremyevans) (#1494) * Add pg_auto_constraint_validations plugin, for automatically converting constraint violations to validation failures on PostgreSQL (jeremyevans) * Don't make Model#_valid? public in the error_splitter plugin (jeremyevans) * Support Database#indexes :include_partial option on PostgreSQL for including partial indexes (jeremyevans) * Include more diagnostic information in Database#error_info on PostgreSQL (jeremyevans) * Support Database#foreign_key_list :reverse option on PostgreSQL for parsing foreign key constraints that reference a given table (jeremyevans) * Add Database#check_constraints on PostgreSQL for parsing CHECK constraints (jeremyevans) * Don't use identity columns if :serial=>true or :type=>:serial|:bigserial column options are used (#1490) (jeremyevans) * Cache Dataset#select_all datasets if no arguments are given (jeremyevans) * Cache Dataset#returning datasets if no arguments are given (jeremyevans) * Cache Dataset#qualify datasets if no argument is given (jeremyevans) * Cache Dataset#lateral datasets (jeremyevans) * Cache Dataset#from_self datasets if no options are given (jeremyevans) * Cache Dataset#distinct datasets if no arguments or block is given (jeremyevans) === 5.7.0 (2018-04-01) * Add Sequel.start_timer and .elapsed_seconds_since for more accurate elapsed time calculations on ruby 2.1+ (jeremyevans) * Run Dataset#with_sql_{all,each,first,single_value} using a cached dataset to avoid clobbering the dataset's columns (jeremyevans) * Add Database#convert_serial_to_identity on PostgreSQL 10.2+, which requires superuser access (jeremyevans) * Fix Database#server_version when connecting to PostgreSQL 10.1+ in certain cases (jeremyevans) * Free temporary clobs in the jdbc/oracle adapter to prevent a memory leak (jeremyevans) (#1482) * Treat prepared statement errors due to changing types as disconnect errors in the postgres adapter (jeremyevans) (#1481) * Add integer64 extension for treating Integer as a 64-bit integer when used as a generic type (jeremyevans) * Allow many_to_pg_array remove_all_* method cast appropriately to work correctly for non-integer types (jeremyevans) * Fix array_type for pg_array_to_many and many_to_pg_array associations in pg_array_associations plugin (jeremyevans) * Use identity columns instead of serial columns for primary keys on PostgreSQL 10.2+ (jeremyevans) * Support :identity option when creating columns on PostgreSQL 10+ to create identity columns (jeremyevans) * Add Dataset#overriding_{system,user}_value on PostgreSQL for use with PostgreSQL 10+ identity columns (jeremyevans) * Set :auto_increment schema entry correctly for PostgreSQL 10+ identity columns (jeremyevans) === 5.6.0 (2018-03-01) * Dedup :db_type strings in schema hashes on Ruby 2.5+ (jeremyevans) * Make schema_caching extension work with :callable_default schema values (jeremyevans) * Freeze string valuse in hashes returned by Database#schema when using the schema_caching extension (jeremyevans) * Protect migration file loading with a mutex to not break when multiple threads load migration files simultaneously (jeremyevans) * Respect identifier mangling rules when renaming columns on Microsoft SQL Server (jeremyevans) === 5.5.0 (2018-01-31) * Make Database#copy_table in the postgres adapter handle errors that occur while processing rows (jeremyevans) (#1470) * Cache results of changed_columns method in local variables in many places for better performance (jeremyevans) * Make modification_detection plugin not break column change detection for new objects (jeremyevans) (#1468) * Make pg_range extension set :ruby_default schema value for recognized range defaults (jeremyevans) * Make pg_interval extension set :ruby_default schema value for recognized interval defaults (jeremyevans) * Make pg_json extension set :callable_default schema value for empty json/jsonb array/hash defaults (jeremyevans) * Make pg_inet extension set :ruby_default schema value for recognized inet/cidr defaults (jeremyevans) * Make pg_hstore extension set :callable_default schema value for empty hstore defaults (jeremyevans) * Make pg_array extension set :callable_default schema value for recognized empty array defaults (jeremyevans) (#1466) * Make defaults_setter plugin prefer :callable_default db_schema values over :ruby_default db_schema values (jeremyevans) * Add defaults_setter plugin :cache option for caching default values returned (jeremyevans) * Freeze string values in hashes returned by Database#schema (jeremyevans) === 5.4.0 (2018-01-04) * Enable fractional seconds in timestamps on DB2 (jeremyevans) (#1463) * Don't attempt to insert a second time if insert_select runs a query that doesn't return results, which can happen when triggers are used (jeremyevans) * Make Dataset#insert_select on PostgreSQL and MSSQL return false instead of nil if the INSERT query is sent to the database but returns no rows (jeremyevans) * Add index_caching extension for caching calls to Database#indexes (kenaniah, jeremyevans) (#1461) * Allow Database#indexes on SQLite, MSSQL, SQLAnywhere, and DB2 to handle SQL::Identifier values (jeremyevans) * Add pg_timestamptz extension for using timestamptz (timestamp with time zone) as the default timestamp type (jeremyevans) * Support Sequel.date_{add,sub} :cast option for setting cast type in date_arithmetic extension (jeremyevans) * Optimize Database#synchronize implementation on ruby 2.5+ (jeremyevans) * Add class_table_inheritance plugin :ignore_subclass_columns option (brianphillips) (#1459) * Make Dataset#to_xml in xml_serializer work with eager_graphed datasets (jeremyevans) * Make Dataset#to_json in json_serializer work with eager_graphed datasets (jeremyevans) * Cache Dataset#nullify dataset in the null_dataset extension (chanks) (#1456) * Add datetime_parse_to_time extension, for parsing timestamp strings without offsets using DateTime.parse.to_time (jeremyevans) (#1455) * Add WHERE NULL filter for Dataset#where calls with no existing filter, no argument, and where the virtual row block returns nil (jeremyevans) === 5.3.0 (2017-12-01) * Add logger to Database instance before making first connection in bin/sequel (jeremyevans) * Drop support for PostgreSQL <8.1 in Database#indexes (jeremyevans) * Add synchronize_sql extension, for checking out a connection around SQL generation (KJTsanaktsidis, jeremyevans) (#1451) * Deprecate Dataset#where calls with no existing filter, no argument, and where the virtual row block returns nil (jeremyevans) (#1454) * Add DatasetModule#reverse for simpler use of descending orders (jeremyevans) * Support WITH clauses in subqueries on SQLite, but not in UNION/INTERSECT/EXCEPT (jeremyevans) * Hoist WITH clauses to INSERT statement level if INSERT subquery uses a CTE on MSSQL (jeremyevans) * Respect indislive and ignore indcheckxmin index attributes when using Database#indexes on PostgreSQL (jeremyevans) * Explicitly disallow use of server-side prepared statements when using Dataset#call in the jdbc/postgresql adapter (jeremyevans) (#1448) * Support common table expressions, window functions, dropping CHECK constraints, and recognizing CURRENT_DATE defaults on MariaDB 10.2+ (jeremyevans) * Make Database#reset_primary_key_sequence work on PostgreSQL 10+ (jeremyevans) * Support :connect_sqls Database option for easily issuing sql commands on all new connections (jeremyevans) * Support :extensions Database option for loading extensions when initializing, useful in connection strings (jeremyevans) * Avoid warning if trying to rollback after a commit or rollback raises an exception in the postgres adapter (jeremyevans) * Support Date::Infinity values in the pg_extended_date_support extension (jeremyevans) === 5.2.0 (2017-10-27) * Fix type conversion for smallint unsigned and integer unsigned types on jdbc/mysql (jeremyevans) (#1443) * Add pg_extended_date_support extension, for handling infinite and BC dates/timestamps (jeremyevans) * Do not ignore existing @dataset instance variable when subclassing Sequel::Model (bjmllr) (#1435) === 5.1.0 (2017-10-01) * Make jdbc/h2 and jdbc/hsqldb adapters respect :foreign_key_constraint_name option when adding new foreign key column (jeremyevans) * Do not issue unnecessary query for macaddr type oid when loading the pg_inet extension (jeltz) (#1423) * Make alter_table add_foreign_key with a column symbol reversible when using the :foreign_key_constraint_name option (jeremyevans) (#1422) * Do not raise an error if calling Model.freeze on a frozen model (jeremyevans) (#1421) * Make Database#copy_into in the jdbc/postgresql adapter handle multi-byte strings (ckoenig) (#1416) * Remove deprecated Model use_after_commit_rollback class and instance methods (jeremyevans) * Remove deprecated Model.allowed_columns method in the base model support (jeremyevans) * Remove deprecated Model.plugin_module_defined? private method (jeremyevans) * Remove deprecated support for Model#_before_validation private method (jeremyevans) === 5.0.0 (2017-09-01) * Make bin/sequel -M option always use base 10 (jeremyevans) * Don't use savepoints when creating indexes inside a transaction on databases that don't support transactional schema modifications (jeremyevans) (#1407) * Support :if_not_exists option when creating indexes on PostgreSQL 9.5+ (DyegoCosta) (#1405) * Make threaded connection pools not block while connections are being made (jeremyevans) * SQL::Expression#clone and #dup now return self, since all expressions should be frozen value objects (jeremyevans) * Don't create empty arrays for unused association callbacks (jeremyevans) * Cache association method name symbols instead of recomputing them everytime (jeremyevans) * Raise an exception if attempting to create a prepared statement using a dataset with a delayed evaluation (jeremyevans) * Make ConnectionPool#size thread safe by using the pool mutex (jeremyevans) * Use instance_exec instead of instance_eval when passing a block, to work with lambdas that accept no arguments (jeremyevans) * Freeze SQL::StringAgg instances in string_agg extension (jeremyevans) * Freeze SQL::DateAdd instances in date_arithmetic extension (jeremyevans) * Freeze SQL::Expression.comparison_attrs (jeremyevans) * Rename SQL::Subscript#f to #expression, keeping #f as an alias (jeremyevans) * Require the :pool_class Database option be a class to use a custom connection pool (jeremyevans) * Make the class_table_inheritance plugin raise an Error during update if any UPDATE query does not affect a single row (jeremyevans) * Change most send calls to public_send unless calling private methods is expected (jeremyevans) * Database schema and schema generator methods now return nil (jeremyevans) * Model#validates_unique in the validation helpers plugin now defaults to only checking on new or modified values (jeremyevans) * Deprecate Model#_before_validation (private_method), use Model#before_validation now (jeremyevans) * Always run before/after/around validation hooks when saving, even when not validating the object (jeremyevans) * Deprecate Model use_after_commit_rollback class and instance accessors (jeremyevans) * Deprecate Model.allowed_columns reader (jeremyevans) * Freeze internal constants that shouldn't be modified at runtime (jeremyevans) * Attempt to connect to the database immediately when creating the Database instance (jeremyevans) * Make association_pks plugin delay the setting of associated objects until the current object is saved by default (jeremyevans) * Joined datasets used as model datasets are now automatically wrapped in a subquery (jeremyevans) * Setting an invalid dataset for a model class now raises an exception by default (jeremyevans) * Getting all values for newly created models now happens before calling after_create, instead of after (jeremyevans) * Remove use of @was_new/@columns_updated instance variables when saving model objects (jeremyevans) * Disable symbol splitting by default (jeremyevans) * Make datasets frozen by default (jeremyevans) * Drop support for ruby 1.8.7, minimum now is 1.9.2 (jeremyevans) * Remove deprecated adapters, extensions, plugins, constants, and features (jeremyevans) === Older See doc/CHANGELOG.old sequel-5.63.0/CONTRIBUTING000066400000000000000000000055141434214120600147370ustar00rootroot00000000000000Issue Guidelines ---------------- 1) Issues should only be created for things that are definitely bugs. If you are not sure that the behavior is a bug, ask about it on GitHub Discussions or the sequel-talk Google Group. GitHub Issues should not be used as a help forum. 2) If you are sure it is a bug, then post a complete description of the issue, the simplest possible self-contained example showing the problem, the full backtrace of any exception, and for issues involving database queries, an SQL log. 3) Issues are generally closed as soon as the problem is considered fixed. However, discussion can still happen after the issue is closed, and the issue will be reopened if additional evidence is provided showing the issue still exists. Pull Request Guidelines ----------------------- 1) Try to include tests for all new features and substantial bug fixes. See the testing guide for details about testing Sequel. 2) Try to include documentation for all new features. In most cases this should include RDoc method documentation, but updates to the guides are also appropriate in some cases. 3) Follow the style conventions of the surrounding code. In most cases, this is standard ruby style. 4) Do not submit whitespace changes with code changes. Sequel is not pedantic about trailing whitespace, so if you have an editor that automatically strips trailing whitespace, you may want to turn that feature off. 5) All code in pull requests is assumed to be MIT licensed. Do not submit a pull request if that isn't the case. 6) Please do not submit pull requests for code that is not ready to be merged. Pull requests should not be used to "start a conversation" about a possible code change. If the pull request requires a conversation, that conversation should take place on GitHub Discussions or the sequel-talk Google Group. 7) Pull requests are generally closed as soon as it appears that the branch will not be merged. However, discussion about the code can still happen after the pull request is closed, and the pull request can be reopened if additional commits to the branch or other changes make it likely that it will be merged. Code of Conduct --------------- This code of conduct applies to all of the project's "collaborative space", which is defined as community communications channels, including the Google Group, GitHub project, and source code repository. 1) Participants must ensure that their language and actions are free of personal attacks and remarks disparaging to people or groups. 2) Behaviour which can be reasonably considered harassment will not be tolerated. 3) Discussion should be limited to the project and related technologies. You can report a violation of this code of conduct to the project maintainer, who will take appropriate action. sequel-5.63.0/Gemfile000066400000000000000000000000461434214120600143730ustar00rootroot00000000000000source 'https://rubygems.org' gemspec sequel-5.63.0/ISSUE_TEMPLATE.md000066400000000000000000000006001434214120600156010ustar00rootroot00000000000000Note: If you have a question about Sequel, would like help using Sequel, want to request a feature, or do anything else other than submit a bug report, please use GitHub Discussions or the sequel-talk Google Group. ### Complete Description of Issue ### Simplest Possible Self-Contained Example Showing the Bug ### Full Backtrace of Exception (if any) ### SQL Log (if any) sequel-5.63.0/MIT-LICENSE000066400000000000000000000020741434214120600145370ustar00rootroot00000000000000Copyright (c) 2007-2008 Sharon Rosner Copyright (c) 2008-2022 Jeremy Evans Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. sequel-5.63.0/README.rdoc000066400000000000000000001033531434214120600147130ustar00rootroot00000000000000== Sequel: The Database Toolkit for Ruby Sequel is a simple, flexible, and powerful SQL database access toolkit for Ruby. * Sequel provides thread safety, connection pooling and a concise DSL for constructing SQL queries and table schemas. * Sequel includes a comprehensive ORM layer for mapping records to Ruby objects and handling associated records. * Sequel supports advanced database features such as prepared statements, bound variables, savepoints, two-phase commit, transaction isolation, primary/replica configurations, and database sharding. * Sequel currently has adapters for ADO, Amalgalite, IBM_DB, JDBC, MySQL, Mysql2, ODBC, Oracle, PostgreSQL, SQLAnywhere, SQLite3, and TinyTDS. == Resources Website :: https://sequel.jeremyevans.net RDoc Documentation :: https://sequel.jeremyevans.net/rdoc Source Code :: https://github.com/jeremyevans/sequel Bug tracking (GitHub Issues) :: https://github.com/jeremyevans/sequel/issues Discussion Forum (GitHub Discussions) :: https://github.com/jeremyevans/sequel/discussions Alternate Discussion Forum (sequel-talk Google Group) :: http://groups.google.com/group/sequel-talk If you have questions about how to use Sequel, please ask on GitHub Discussions or the sequel-talk Google Group. Only use the the bug tracker to report bugs in Sequel, not to ask for help on using Sequel. To check out the source code: git clone git://github.com/jeremyevans/sequel.git === Contact If you have any comments or suggestions please post to the Google group. == Installation gem install sequel == A Short Example require 'sequel' DB = Sequel.sqlite # memory database, requires sqlite3 DB.create_table :items do primary_key :id String :name Float :price end items = DB[:items] # Create a dataset # Populate the table items.insert(name: 'abc', price: rand * 100) items.insert(name: 'def', price: rand * 100) items.insert(name: 'ghi', price: rand * 100) # Print out the number of records puts "Item count: #{items.count}" # Print out the average price puts "The average price is: #{items.avg(:price)}" == The Sequel Console Sequel includes an IRB console for quick access to databases (usually referred to as bin/sequel). You can use it like this: sequel sqlite://test.db # test.db in current directory You get an IRB session with the Sequel::Database object stored in DB. In addition to providing an IRB shell (the default behavior), bin/sequel also has support for migrating databases, dumping schema migrations, and copying databases. See the {bin/sequel guide}[rdoc-ref:doc/bin_sequel.rdoc] for more details. == An Introduction Sequel is designed to take the hassle away from connecting to databases and manipulating them. Sequel deals with all the boring stuff like maintaining connections, formatting SQL correctly and fetching records so you can concentrate on your application. Sequel uses the concept of datasets to retrieve data. A Dataset object encapsulates an SQL query and supports chainability, letting you fetch data using a convenient Ruby DSL that is both concise and flexible. For example, the following one-liner returns the average GDP for countries in the middle east region: DB[:countries].where(region: 'Middle East').avg(:GDP) Which is equivalent to: SELECT avg(GDP) FROM countries WHERE region = 'Middle East' Since datasets retrieve records only when needed, they can be stored and later reused. Records are fetched as hashes, and are accessed using an +Enumerable+ interface: middle_east = DB[:countries].where(region: 'Middle East') middle_east.order(:name).each{|r| puts r[:name]} Sequel also offers convenience methods for extracting data from Datasets, such as an extended +map+ method: middle_east.map(:name) # => ['Egypt', 'Turkey', 'Israel', ...] middle_east.map([:id, :name]) # => [[1, 'Egypt'], [3, 'Turkey'], [2, 'Israel'], ...] Or getting results as a hash via +as_hash+, with one column as key and another as value: middle_east.as_hash(:name, :area) # => {'Israel' => 20000, 'Turkey' => 120000, ...} == Getting Started === Connecting to a database To connect to a database you simply provide Sequel.connect with a URL: require 'sequel' DB = Sequel.connect('sqlite://blog.db') # requires sqlite3 The connection URL can also include such stuff as the user name, password, and port: DB = Sequel.connect('postgres://user:password@host:port/database_name') # requires pg You can also specify optional parameters, such as the connection pool size, or loggers for logging SQL queries: DB = Sequel.connect("postgres://user:password@host:port/database_name", max_connections: 10, logger: Logger.new('log/db.log')) It is also possible to use a hash instead of a connection URL, but make sure to include the :adapter option in this case: DB = Sequel.connect(adapter: :postgres, user: 'user', password: 'password', host: 'host', port: port, database: 'database_name', max_connections: 10, logger: Logger.new('log/db.log')) You can specify a block to connect, which will disconnect from the database after it completes: Sequel.connect('postgres://user:password@host:port/database_name'){|db| db[:posts].delete} === The DB convention Throughout Sequel's documentation, you will see the +DB+ constant used to refer to the Sequel::Database instance you create. This reflects the recommendation that for an app with a single Sequel::Database instance, the Sequel convention is to store the instance in the +DB+ constant. This is just a convention, it's not required, but it is recommended. Note that some frameworks that use Sequel may create the Sequel::Database instance for you, and you might not know how to access it. In most cases, you can access the Sequel::Database instance through Sequel::Model.db. === Arbitrary SQL queries You can execute arbitrary SQL code using Database#run: DB.run("create table t (a text, b text)") DB.run("insert into t values ('a', 'b')") You can also create datasets based on raw SQL: dataset = DB['select id from items'] dataset.count # will return the number of records in the result set dataset.map(:id) # will return an array containing all values of the id column in the result set You can also fetch records with raw SQL through the dataset: DB['select * from items'].each do |row| p row end You can use placeholders in your SQL string as well: name = 'Jim' DB['select * from items where name = ?', name].each do |row| p row end === Getting Dataset Instances Datasets are the primary way records are retrieved and manipulated. They are generally created via the Database#from or Database#[] methods: posts = DB.from(:posts) posts = DB[:posts] # same Datasets will only fetch records when you tell them to. They can be manipulated to filter records, change ordering, join tables, etc. Datasets are always frozen, and they are safe to use by multiple threads concurrently. === Retrieving Records You can retrieve all records by using the +all+ method: posts.all # SELECT * FROM posts The +all+ method returns an array of hashes, where each hash corresponds to a record. You can also iterate through records one at a time using +each+: posts.each{|row| p row} Or perform more advanced stuff: names_and_dates = posts.map([:name, :date]) old_posts, recent_posts = posts.partition{|r| r[:date] < Date.today - 7} You can also retrieve the first record in a dataset: posts.order(:id).first # SELECT * FROM posts ORDER BY id LIMIT 1 Note that you can get the first record in a dataset even if it isn't ordered: posts.first # SELECT * FROM posts LIMIT 1 If the dataset is ordered, you can also ask for the last record: posts.order(:stamp).last # SELECT * FROM posts ORDER BY stamp DESC LIMIT 1 You can also provide a filter when asking for a single record: posts.first(id: 1) # SELECT * FROM posts WHERE id = 1 LIMIT 1 Or retrieve a single value for a specific record: posts.where(id: 1).get(:name) # SELECT name FROM posts WHERE id = 1 LIMIT 1 === Filtering Records The most common way to filter records is to provide a hash of values to match to +where+: my_posts = posts.where(category: 'ruby', author: 'david') # WHERE ((category = 'ruby') AND (author = 'david')) You can also specify ranges: my_posts = posts.where(stamp: (Date.today - 14)..(Date.today - 7)) # WHERE ((stamp >= '2010-06-30') AND (stamp <= '2010-07-07')) Or arrays of values: my_posts = posts.where(category: ['ruby', 'postgres', 'linux']) # WHERE (category IN ('ruby', 'postgres', 'linux')) By passing a block to where, you can use expressions (this is fairly "magical"): my_posts = posts.where{stamp > Date.today << 1} # WHERE (stamp > '2010-06-14') my_posts = posts.where{stamp =~ Date.today} # WHERE (stamp = '2010-07-14') If you want to wrap the objects yourself, you can use expressions without the "magic": my_posts = posts.where(Sequel[:stamp] > Date.today << 1) # WHERE (stamp > '2010-06-14') my_posts = posts.where(Sequel[:stamp] =~ Date.today) # WHERE (stamp = '2010-07-14') Some databases such as PostgreSQL and MySQL also support filtering via Regexps: my_posts = posts.where(category: /ruby/i) # WHERE (category ~* 'ruby') You can also use an inverse filter via +exclude+: my_posts = posts.exclude(category: ['ruby', 'postgres', 'linux']) # WHERE (category NOT IN ('ruby', 'postgres', 'linux')) But note that this does a full inversion of the filter: my_posts = posts.exclude(category: ['ruby', 'postgres', 'linux'], id: 1) # WHERE ((category NOT IN ('ruby', 'postgres', 'linux')) OR (id != 1)) If at any point you want to use a custom SQL fragment for part of a query, you can do so via +Sequel.lit+: posts.where(Sequel.lit('stamp IS NOT NULL')) # WHERE (stamp IS NOT NULL) You can safely interpolate parameters into the custom SQL fragment by providing them as additional arguments: author_name = 'JKR' posts.where(Sequel.lit('(stamp < ?) AND (author != ?)', Date.today - 3, author_name)) # WHERE ((stamp < '2010-07-11') AND (author != 'JKR')) Datasets can also be used as subqueries: DB[:items].where(Sequel[:price] > DB[:items].select{avg(price) + 100}) # WHERE (price > (SELECT avg(price) + 100 FROM items)) After filtering, you can retrieve the matching records by using any of the retrieval methods: my_posts.each{|row| p row} See the {Dataset Filtering}[rdoc-ref:doc/dataset_filtering.rdoc] file for more details. === Security Designing apps with security in mind is a best practice. Please read the {Security Guide}[rdoc-ref:doc/security.rdoc] for details on security issues that you should be aware of when using Sequel. === Summarizing Records Counting records is easy using +count+: posts.where(Sequel.like(:category, '%ruby%')).count # SELECT COUNT(*) FROM posts WHERE (category LIKE '%ruby%' ESCAPE '\') And you can also query maximum/minimum values via +max+ and +min+: max = DB[:history].max(:value) # SELECT max(value) FROM history min = DB[:history].min(:value) # SELECT min(value) FROM history Or calculate a sum or average via +sum+ and +avg+: sum = DB[:items].sum(:price) # SELECT sum(price) FROM items avg = DB[:items].avg(:price) # SELECT avg(price) FROM items === Ordering Records Ordering datasets is simple using +order+: posts.order(:stamp) # ORDER BY stamp posts.order(:stamp, :name) # ORDER BY stamp, name +order+ always overrides the existing order: posts.order(:stamp).order(:name) # ORDER BY name If you would like to add to the existing order, use +order_append+ or +order_prepend+: posts.order(:stamp).order_append(:name) # ORDER BY stamp, name posts.order(:stamp).order_prepend(:name) # ORDER BY name, stamp You can also specify descending order: posts.reverse_order(:stamp) # ORDER BY stamp DESC posts.order(Sequel.desc(:stamp)) # ORDER BY stamp DESC === Core Extensions Note the use of Sequel.desc(:stamp) in the above example. Much of Sequel's DSL uses this style, calling methods on the Sequel module that return SQL expression objects. Sequel also ships with a {core_extensions extension}[rdoc-ref:doc/core_extensions.rdoc] that integrates Sequel's DSL better into the Ruby language, allowing you to write: :stamp.desc instead of: Sequel.desc(:stamp) === Selecting Columns Selecting specific columns to be returned is also simple using +select+: posts.select(:stamp) # SELECT stamp FROM posts posts.select(:stamp, :name) # SELECT stamp, name FROM posts Like +order+, +select+ overrides an existing selection: posts.select(:stamp).select(:name) # SELECT name FROM posts As you might expect, there is an +order_append+ equivalent for +select+ called +select_append+: posts.select(:stamp).select_append(:name) # SELECT stamp, name FROM posts === Deleting Records Deleting records from the table is done with +delete+: posts.where(Sequel[:stamp] < Date.today - 3).delete # DELETE FROM posts WHERE (stamp < '2010-07-11') Be very careful when deleting, as +delete+ affects all rows in the dataset. Call +where+ first and +delete+ second: # DO THIS: posts.where(Sequel[:stamp] < Date.today - 7).delete # NOT THIS: posts.delete.where(Sequel[:stamp] < Date.today - 7) === Inserting Records Inserting records into the table is done with +insert+: posts.insert(category: 'ruby', author: 'david') # INSERT INTO posts (category, author) VALUES ('ruby', 'david') === Updating Records Updating records in the table is done with +update+: posts.where(Sequel[:stamp] < Date.today - 7).update(state: 'archived') # UPDATE posts SET state = 'archived' WHERE (stamp < '2010-07-07') You can provide arbitrary expressions when choosing what values to set: posts.where(Sequel[:stamp] < Date.today - 7).update(backup_number: Sequel[:backup_number] + 1) # UPDATE posts SET backup_number = (backup_number + 1) WHERE (stamp < '2010-07-07')))) As with +delete+, +update+ affects all rows in the dataset, so +where+ first, +update+ second: # DO THIS: posts.where(Sequel[:stamp] < Date.today - 7).update(state: 'archived') # NOT THIS: posts.update(state: 'archived').where(Sequel[:stamp] < Date.today - 7) === Merging records Merging records using the SQL MERGE statement is done using merge* methods. You use +merge_using+ to specify the merge source and join conditions. You can use +merge_insert+, +merge_delete+, and/or +merge_update+ to set the INSERT, DELETE, and UPDATE clauses for the merge. +merge_insert+ takes the same arguments as +insert+, and +merge_update+ takes the same arguments as +update+. +merge_insert+, +merge_delete+, and +merge_update+ can all be called with blocks, to set the conditions for the related INSERT, DELETE, or UPDATE. Finally, after calling all of the other merge_* methods, you call +merge+ to run the MERGE statement on the database. ds = DB[:m1] merge_using(:m2, i1: :i2). merge_insert(i1: :i2, a: Sequel[:b]+11). merge_delete{a > 30}. merge_update(i1: Sequel[:i1]+:i2+10, a: Sequel[:a]+:b+20) ds.merge # MERGE INTO m1 USING m2 ON (i1 = i2) # WHEN NOT MATCHED THEN INSERT (i1, a) VALUES (i2, (b + 11)) # WHEN MATCHED AND (a > 30) THEN DELETE # WHEN MATCHED THEN UPDATE SET i1 = (i1 + i2 + 10), a = (a + b + 20) === Transactions You can wrap a block of code in a database transaction using the Database#transaction method: DB.transaction do # BEGIN posts.insert(category: 'ruby', author: 'david') # INSERT posts.where(Sequel[:stamp] < Date.today - 7).update(state: 'archived') # UPDATE end # COMMIT If the block does not raise an exception, the transaction will be committed. If the block does raise an exception, the transaction will be rolled back, and the exception will be reraised. If you want to rollback the transaction and not raise an exception outside the block, you can raise the Sequel::Rollback exception inside the block: DB.transaction do # BEGIN posts.insert(category: 'ruby', author: 'david') # INSERT if posts.where('stamp < ?', Date.today - 7).update(state: 'archived') == 0 # UPDATE raise Sequel::Rollback end end # ROLLBACK === Joining Tables Sequel makes it easy to join tables: order_items = DB[:items].join(:order_items, item_id: :id).where(order_id: 1234) # SELECT * FROM items # INNER JOIN order_items ON (order_items.item_id = items.id) # WHERE (order_id = 1234) The important thing to note here is that item_id is automatically qualified with the table being joined, and id is automatically qualified with the last table joined. You can then do anything you like with the dataset: order_total = order_items.sum(:price) # SELECT sum(price) FROM items # INNER JOIN order_items ON (order_items.item_id = items.id) # WHERE (order_id = 1234) Note that the default selection in Sequel is *, which includes all columns in all joined tables. Because Sequel returns results as a hash keyed by column name symbols, if any tables have columns with the same name, this will clobber the columns in the returned hash. So when joining you are usually going to want to change the selection using +select+, +select_all+, and/or +select_append+. == Column references in Sequel Sequel expects column names to be specified using symbols. In addition, returned hashes always use symbols as their keys. This allows you to freely mix literal values and column references in many cases. For example, the two following lines produce equivalent SQL: items.where(x: 1) # SELECT * FROM items WHERE (x = 1) items.where(1 => :x) # SELECT * FROM items WHERE (1 = x)" Ruby strings are generally treated as SQL strings: items.where(x: 'x') # SELECT * FROM items WHERE (x = 'x') === Qualifying identifiers (column/table names) An identifier in SQL is a name that represents a column, table, or schema. The recommended way to qualify columns is to use Sequel[][] or +Sequel.qualify+ Sequel[:table][:column] Sequel.qualify(:table, :column) # table.column You can also qualify tables with schemas: Sequel[:schema][:table] # schema.table or use multi-level qualification: Sequel[:schema][:table][:column] # schema.table.column === Expression aliases You can alias identifiers using Sequel[].as or +Sequel.as+: Sequel[:column].as(:alias) Sequel.as(:column, :alias) # column AS alias You can use the Sequel.as method to alias arbitrary expressions, not just identifiers: Sequel.as(DB[:posts].select{max(id)}, :p) # (SELECT max(id) FROM posts) AS p And most Sequel expression objects support an +as+ method for aliasing: (Sequel[:column] + 2).as(:c_plus_2) # (column + 2) AS c_plus_2 == Sequel Models A model class wraps a dataset, and an instance of that class wraps a single record in the dataset. Model classes are defined as regular Ruby classes inheriting from Sequel::Model: DB = Sequel.connect('sqlite://blog.db') class Post < Sequel::Model end When a model class is created, it parses the schema in the table from the database, and automatically sets up accessor methods for all of the columns in the table (Sequel::Model implements the active record pattern). Sequel model classes assume that the table name is an underscored plural of the class name: Post.table_name # => :posts You can explicitly set the table name or even the dataset used: class Post < Sequel::Model(:my_posts); end # or: class Post < Sequel::Model(DB[:my_posts]); end If you pass a symbol to the Sequel::Model method, it assumes you are referring to the table with the same name. You can also call it with a dataset, which will set the defaults for all retrievals for that model: class Post < Sequel::Model(DB[:my_posts].where(category: 'ruby')); end class Post < Sequel::Model(DB[:my_posts].select(:id, :name).order(:date)); end === Model instances Model instances are identified by a primary key. Sequel queries the database to determine the primary key for each model. The Model.[] method can be used to fetch records by their primary key: post = Post[123] The +pk+ method is used to retrieve the record's primary key value: post.pk # => 123 If you want to override which column(s) to use as the primary key, you can use +set_primary_key+: class Post < Sequel::Model set_primary_key [:category, :title] end post = Post['ruby', 'hello world'] post.pk # => ['ruby', 'hello world'] You can also define a model class that does not have a primary key via +no_primary_key+, but then you lose the ability to easily update and delete records: Post.no_primary_key A single model instance can also be fetched by specifying a condition: post = Post.first(title: 'hello world') post = Post.first{num_comments < 10} The dataset for a model class returns rows of model instances instead of plain hashes: DB[:posts].first.class # => Hash Post.first.class # => Post === Acts like a dataset A model class forwards many methods to the underlying dataset. This means that you can use most of the +Dataset+ API to create customized queries that return model instances, e.g.: Post.where(category: 'ruby').each{|post| p post} You can also manipulate the records in the dataset: Post.where{num_comments < 7}.delete Post.where(Sequel.like(:title, /ruby/)).update(category: 'ruby') === Accessing record values A model instance stores its values as a hash with column symbol keys, which you can access directly via the +values+ method: post.values # => {:id => 123, :category => 'ruby', :title => 'hello world'} You can read the record values as object attributes, assuming the attribute names are valid columns in the model's dataset: post.id # => 123 post.title # => 'hello world' If the record's attributes names are not valid columns in the model's dataset (maybe because you used +select_append+ to add a computed value column), you can use Model#[] to access the values: post[:id] # => 123 post[:title] # => 'hello world' You can also modify record values using attribute setters or the []= method. post.title = 'hey there' post[:title] = 'hey there' That will just change the value for the object, it will not update the row in the database. To update the database row, call the +save+ method: post.save === Mass assignment You can also set the values for multiple columns in a single method call, using one of the mass-assignment methods. See the {mass assignment guide}[rdoc-ref:doc/mass_assignment.rdoc] for details. For example +set+ updates the model's column values without saving: post.set(title: 'hey there', updated_by: 'foo') and +update+ updates the model's column values and then saves the changes to the database: post.update(title: 'hey there', updated_by: 'foo') === Creating new records New model instances can be created by calling Model.new, which returns a new model instance without updating the database: post = Post.new(title: 'hello world') You can save the record to the database later by calling +save+ on the model instance: post.save If you want to create a new record and save it to the database at the same time, you can use Model.create: post = Post.create(title: 'hello world') You can also supply a block to Model.new and Model.create: post = Post.new do |p| p.title = 'hello world' end post = Post.create{|p| p.title = 'hello world'} === Hooks You can execute custom code when creating, updating, or deleting records by defining hook methods. The +before_create+ and +after_create+ hook methods wrap record creation. The +before_update+ and +after_update+ hook methods wrap record updating. The +before_save+ and +after_save+ hook methods wrap record creation and updating. The +before_destroy+ and +after_destroy+ hook methods wrap destruction. The +before_validation+ and +after_validation+ hook methods wrap validation. Example: class Post < Sequel::Model def after_create super author.increase_post_count end def after_destroy super author.decrease_post_count end end Note the use of +super+ if you define your own hook methods. Almost all Sequel::Model class and instance methods (not just hook methods) can be overridden safely, but you have to make sure to call +super+ when doing so, otherwise you risk breaking things. For the example above, you should probably use a database trigger if you can. Hooks can be used for data integrity, but they will only enforce that integrity when you are modifying the database through model instances, and even then they are often subject to race conditions. It's best to use database triggers and database constraints to enforce data integrity. === Deleting records You can delete individual records by calling +delete+ or +destroy+. The only difference between the two methods is that +destroy+ invokes +before_destroy+ and +after_destroy+ hook methods, while +delete+ does not: post.delete # => bypasses hooks post.destroy # => runs hooks Records can also be deleted en-masse by calling delete and destroy on the model's dataset. As stated above, you can specify filters for the deleted records: Post.where(category: 32).delete # => bypasses hooks Post.where(category: 32).destroy # => runs hooks Please note that if destroy is called, each record is deleted separately, but delete deletes all matching records with a single SQL query. === Associations Associations are used in order to specify relationships between model classes that reflect relationships between tables in the database, which are usually specified using foreign keys. You specify model associations via class methods: class Post < Sequel::Model many_to_one :author one_to_many :comments one_to_one :first_comment, class: :Comment, order: :id many_to_many :tags one_through_one :first_tag, class: :Tag, order: :name, right_key: :tag_id end +many_to_one+ and +one_to_one+ create a getter and setter for each model object: post = Post.create(name: 'hi!') post.author = Author.first(name: 'Sharon') post.author +one_to_many+ and +many_to_many+ create a getter method, a method for adding an object to the association, a method for removing an object from the association, and a method for removing all associated objects from the association: post = Post.create(name: 'hi!') post.comments comment = Comment.create(text: 'hi') post.add_comment(comment) post.remove_comment(comment) post.remove_all_comments tag = Tag.create(tag: 'interesting') post.add_tag(tag) post.remove_tag(tag) post.remove_all_tags Note that the remove_* and remove_all_* methods do not delete the object from the database, they merely disassociate the associated object from the receiver. All associations add a dataset method that can be used to further filter or reorder the returned objects, or modify all of them: # Delete all of this post's comments from the database post.comments_dataset.destroy # Return all tags related to this post with no subscribers, ordered by the tag's name post.tags_dataset.where(subscribers: 0).order(:name).all === Eager Loading Associations can be eagerly loaded via +eager+ and the :eager association option. Eager loading is used when loading a group of objects. It loads all associated objects for all of the current objects in one query, instead of using a separate query to get the associated objects for each current object. Eager loading requires that you retrieve all model objects at once via +all+ (instead of individually by +each+). Eager loading can be cascaded, loading association's associated objects. class Person < Sequel::Model one_to_many :posts, eager: [:tags] end class Post < Sequel::Model many_to_one :person one_to_many :replies many_to_many :tags end class Tag < Sequel::Model many_to_many :posts many_to_many :replies end class Reply < Sequel::Model many_to_one :person many_to_one :post many_to_many :tags end # Eager loading via .eager Post.eager(:person).all # eager is a dataset method, so it works with filters/orders/limits/etc. Post.where{topic > 'M'}.order(:date).limit(5).eager(:person).all person = Person.first # Eager loading via :eager (will eagerly load the tags for this person's posts) person.posts # These are equivalent Post.eager(:person, :tags).all Post.eager(:person).eager(:tags).all # Cascading via .eager Tag.eager(posts: :replies).all # Will also grab all associated posts' tags (because of :eager) Reply.eager(person: :posts).all # No depth limit (other than memory/stack), and will also grab posts' tags # Loads all people, their posts, their posts' tags, replies to those posts, # the person for each reply, the tag for each reply, and all posts and # replies that have that tag. Uses a total of 8 queries. Person.eager(posts: {replies: [:person, {tags: [:posts, :replies]}]}).all In addition to using +eager+, you can also use +eager_graph+, which will use a single query to get the object and all associated objects. This may be necessary if you want to filter or order the result set based on columns in associated tables. It works with cascading as well, the API is similar. Note that using +eager_graph+ to eagerly load multiple *_to_many associations will cause the result set to be a cartesian product, so you should be very careful with your filters when using it in that case. You can dynamically customize the eagerly loaded dataset by using a proc. This proc is passed the dataset used for eager loading, and should return a modified copy of that dataset: # Eagerly load only replies containing 'foo' Post.eager(replies: proc{|ds| ds.where(Sequel.like(text, '%foo%'))}).all This also works when using +eager_graph+, in which case the proc is called with dataset to graph into the current dataset: Post.eager_graph(replies: proc{|ds| ds.where(Sequel.like(text, '%foo%'))}).all You can dynamically customize eager loads for both +eager+ and +eager_graph+ while also cascading, by making the value a single entry hash with the proc as a key, and the cascaded associations as the value: # Eagerly load only replies containing 'foo', and the person and tags for those replies Post.eager(replies: {proc{|ds| ds.where(Sequel.like(text, '%foo%'))} => [:person, :tags]}).all === Joining with Associations You can use the +association_join+ method to add a join to the model's dataset based on the assocation: Post.association_join(:author) # SELECT * FROM posts # INNER JOIN authors AS author ON (author.id = posts.author_id) This comes with variants for different join types: Post.association_left_join(:replies) # SELECT * FROM posts # LEFT JOIN replies ON (replies.post_id = posts.id) Similar to the eager loading methods, you can use multiple associations and nested associations: Post.association_join(:author, replies: :person).all # SELECT * FROM posts # INNER JOIN authors AS author ON (author.id = posts.author_id) # INNER JOIN replies ON (replies.post_id = posts.id) # INNER JOIN people AS person ON (person.id = replies.person_id) === Extending the underlying dataset The recommended way to implement table-wide logic by defining methods on the dataset using +dataset_module+: class Post < Sequel::Model dataset_module do def with_few_comments where{num_comments < 30} end def clean_boring with_few_comments.delete end end end This allows you to have access to your model API from filtered datasets as well: Post.where(category: 'ruby').clean_boring # DELETE FROM posts WHERE ((category = 'ruby') AND (num_comments < 30)) Inside +dataset_module+ blocks, there are numerous methods that support easy creation of dataset methods. Most of these methods are named after the dataset methods themselves, such as +select+, +order+, and +group+: class Post < Sequel::Model dataset_module do where(:with_few_comments, Sequel[:num_comments] < 30) select :with_title_and_date, :id, :title, :post_date order :by_post_date, :post_date limit :top10, 10 end end Post.with_few_comments.with_title_and_date.by_post_date.top10 # SELECT id, title, post_date # FROM posts # ORDER BY post_date # LIMIT 10 One advantage of using these methods inside dataset_module blocks, instead of defining methods manually, is that the created methods will generally cache the resulting values and result in better performance. === Model Validations You can define a +validate+ method for your model, which +save+ will check before attempting to save the model in the database. If an attribute of the model isn't valid, you should add an error message for that attribute to the model object's +errors+. If an object has any errors added by the validate method, +save+ will raise an error by default: class Post < Sequel::Model def validate super errors.add(:name, "can't be empty") if name.empty? errors.add(:written_on, "should be in the past") if written_on >= Time.now end end == Testing Sequel Please see the {testing guide}[rdoc-ref:doc/testing.rdoc] for recommendations on testing applications that use Sequel, as well as the how to run the tests for Sequel itself. == Sequel Release Policy New major versions of Sequel do not have a defined release policy, but historically have occurred once every few years. New minor versions of Sequel are released around once a month near the start of the month. New tiny versions of Sequel are only released to address security issues or regressions in the most current release. == Ruby Support Policy Sequel fully supports the currently supported versions of Ruby (MRI) and JRuby. It may support unsupported versions of Ruby or JRuby, but such support may be dropped in any minor version if keeping it becomes a support issue. The minimum Ruby version required to run the current version of Sequel is 1.9.2, and the minimum JRuby version is 9.0.0.0. == Maintainer Jeremy Evans sequel-5.63.0/Rakefile000066400000000000000000000146501434214120600145530ustar00rootroot00000000000000require "rake" require "rake/clean" NAME = 'sequel' VERS = lambda do require File.expand_path("../lib/sequel/version", __FILE__) Sequel.version end CLEAN.include ["sequel-*.gem", "rdoc", "coverage", "www/public/*.html", "www/public/rdoc*", "spec/bin-sequel-*"] # Gem Packaging desc "Build sequel gem" task :package=>[:clean] do |p| sh %{#{FileUtils::RUBY} -S gem build sequel.gemspec} end ### Website desc "Make local version of website" task :website do sh %{#{FileUtils::RUBY} www/make_www.rb} end ### RDoc RDOC_DEFAULT_OPTS = ["--line-numbers", '--title', 'Sequel: The Database Toolkit for Ruby'] begin # Sequel uses hanna-nouveau for the website RDoc. gem 'hanna-nouveau' RDOC_DEFAULT_OPTS.concat(['-f', 'hanna']) rescue Gem::LoadError end require "rdoc/task" RDOC_OPTS = RDOC_DEFAULT_OPTS + ['--main', 'README.rdoc'] RDoc::Task.new do |rdoc| rdoc.rdoc_dir = "rdoc" rdoc.options += RDOC_OPTS rdoc.rdoc_files.add %w"README.rdoc CHANGELOG MIT-LICENSE lib/**/*.rb doc/*.rdoc doc/release_notes/*.txt" end desc "Make rdoc for website" task :website_rdoc=>[:website_rdoc_main, :website_rdoc_adapters, :website_rdoc_plugins] RDoc::Task.new(:website_rdoc_main) do |rdoc| rdoc.rdoc_dir = "www/public/rdoc" rdoc.options += RDOC_OPTS + %w'--no-ignore-invalid' rdoc.rdoc_files.add %w"README.rdoc CHANGELOG doc/CHANGELOG.old MIT-LICENSE lib/*.rb lib/sequel/*.rb lib/sequel/{connection_pool,dataset,database,model}/*.rb doc/*.rdoc doc/release_notes/*.txt lib/sequel/extensions/migration.rb" end RDoc::Task.new(:website_rdoc_adapters) do |rdoc| rdoc.rdoc_dir = "www/public/rdoc-adapters" rdoc.options += RDOC_DEFAULT_OPTS + %w'--main Sequel --no-ignore-invalid' rdoc.rdoc_files.add %w"lib/sequel/adapters/**/*.rb" end RDoc::Task.new(:website_rdoc_plugins) do |rdoc| rdoc.rdoc_dir = "www/public/rdoc-plugins" rdoc.options += RDOC_DEFAULT_OPTS + %w'--main Sequel --no-ignore-invalid' rdoc.rdoc_files.add %w"lib/sequel/{extensions,plugins}/**/*.rb doc/core_*" end ### Specs run_spec = proc do |file| lib_dir = File.join(File.dirname(File.expand_path(__FILE__)), 'lib') rubylib = ENV['RUBYLIB'] ENV['RUBYLIB'] ? (ENV['RUBYLIB'] += ":#{lib_dir}") : (ENV['RUBYLIB'] = lib_dir) sh "#{FileUtils::RUBY} #{"-w" if RUBY_VERSION >= '3'} #{file}" ENV['RUBYLIB'] = rubylib end spec_task = proc do |description, name, file, coverage, visibility| desc description task name do run_spec.call(file) end if coverage desc "#{description} with coverage" task :"#{name}_cov" do ENV['COVERAGE'] = coverage == true ? '1' : coverage run_spec.call(file) ENV.delete('COVERAGE') end end if visibility desc "Run specs with method visibility checking" task :"#{name}_vis" do ENV['CHECK_METHOD_VISIBILITY'] = '1' run_spec.call(file) ENV.delete('CHECK_METHOD_VISIBILITY') end end end desc "Run the core, model, and extension/plugin specs" task :default => :spec desc "Run the core, model, and extension/plugin specs" task :spec => [:spec_core, :spec_model, :spec_plugin] desc "Run the core, model, and extension/plugin specs with warnings" task :spec_w => [:spec_core_w, :spec_model_w, :spec_plugin_w] spec_task.call("Run core and model specs together", :spec_core_model, 'spec/core_model_spec.rb', "core-model", false) spec_task.call("Run core specs", :spec_core, 'spec/core_spec.rb', false, false) spec_task.call("Run model specs", :spec_model, 'spec/model_spec.rb', false, false) spec_task.call("Run plugin/extension specs", :spec_plugin, 'spec/plugin_spec.rb', "plugin-extension", true) spec_task.call("Run bin/sequel specs", :spec_bin, 'spec/bin_spec.rb', 'bin', false) spec_task.call("Run core extensions specs", :spec_core_ext, 'spec/core_extensions_spec.rb', 'core-ext', true) spec_task.call("Run integration tests", :spec_integration, 'spec/adapter_spec.rb none', '1', true) %w'postgres sqlite mysql oracle mssql db2 sqlanywhere'.each do |adapter| spec_task.call("Run #{adapter} tests", :"spec_#{adapter}", "spec/adapter_spec.rb #{adapter}", adapter, true) end spec_task.call("Run model specs without the associations code", :_spec_model_no_assoc, 'spec/model_no_assoc_spec.rb', false, false) desc "Run model specs without the associations code" task :spec_model_no_assoc do ENV['SEQUEL_NO_ASSOCIATIONS'] = '1' Rake::Task['_spec_model_no_assoc'].invoke end desc "Run core/model/extension/plugin specs with coverage" task :spec_cov do Rake::Cleaner.cleanup_files(::Rake::FileList["coverage"]) ENV['SEQUEL_MERGE_COVERAGE'] = '1' Rake::Task['spec_bin_cov'].invoke Rake::Task['spec_core_model_cov'].invoke Rake::Task['spec_plugin_cov'].invoke Rake::Task['spec_core_ext_cov'].invoke ENV['NO_SEQUEL_PG'] = '1' Rake::Task['spec_postgres_cov'].invoke end task :spec_ci=>[:spec_core, :spec_model, :spec_plugin, :spec_core_ext] do mysql_host = "localhost" pg_database = "sequel_test" unless ENV["DEFAULT_DATABASE"] if ENV["MYSQL_ROOT_PASSWORD"] mysql_password = "&password=root" mysql_host= "127.0.0.1:3306" end if defined?(RUBY_ENGINE) && RUBY_ENGINE == 'jruby' ENV['SEQUEL_SQLITE_URL'] = "jdbc:sqlite::memory:" ENV['SEQUEL_POSTGRES_URL'] = "jdbc:postgresql://localhost/#{pg_database}?user=postgres&password=postgres" ENV['SEQUEL_MYSQL_URL'] = "jdbc:mysql://#{mysql_host}/sequel_test?user=root#{mysql_password}&useSSL=false&allowPublicKeyRetrieval=true" else ENV['SEQUEL_SQLITE_URL'] = "sqlite:/" ENV['SEQUEL_POSTGRES_URL'] = "postgres://localhost/#{pg_database}?user=postgres&password=postgres" ENV['SEQUEL_MYSQL_URL'] = "mysql2://#{mysql_host}/sequel_test?user=root#{mysql_password}&useSSL=false" end if RUBY_VERSION >= '2.3' Rake::Task['spec_postgres'].invoke end if RUBY_VERSION >= '2.4' Rake::Task['spec_sqlite'].invoke Rake::Task['spec_mysql'].invoke end end desc "Print Sequel version" task :version do puts VERS.call end desc "Check syntax of all .rb files" task :check_syntax do Dir['**/*.rb'].each{|file| print `#{FileUtils::RUBY} -c #{file} | fgrep -v "Syntax OK"`} end desc "Check documentation for plugin/extension files" task :check_plugin_doc do text = File.binread('www/pages/plugins.html.erb') skip = %w'before_after_save freeze_datasets from_block no_auto_literal_strings auto_validations_constraint_validations_presence_message' Dir['lib/sequel/{plugins,extensions}/*.rb'].map{|f| File.basename(f).sub('.rb', '') if File.size(f)}.sort.each do |f| puts f if !f.start_with?('_') && !skip.include?(f) && !text.include?(f) end end sequel-5.63.0/bin/000077500000000000000000000000001434214120600136505ustar00rootroot00000000000000sequel-5.63.0/bin/sequel000077500000000000000000000167061434214120600151060ustar00rootroot00000000000000#!/usr/bin/env ruby # frozen-string-literal: true require 'optparse' code = nil copy_databases = nil dump_migration = nil dump_schema = nil dump_indexes = nil env = nil migrate_dir = nil migrate_ver = nil backtrace = nil show_version = false test = true load_dirs = [] exclusive_options = [] loggers = [] options = OptionParser.new do |opts| opts.banner = "Sequel: The Database Toolkit for Ruby" opts.define_head "Usage: sequel [options] [file]" opts.separator "" opts.separator "Examples:" opts.separator " sequel sqlite://blog.db" opts.separator " sequel postgres://localhost/my_blog" opts.separator " sequel config/database.yml" opts.separator "" opts.separator "For more information see http://sequel.jeremyevans.net" opts.separator "" opts.separator "Options:" opts.on_tail("-h", "-?", "--help", "Show this message") do puts opts exit end opts.on("-c", "--code CODE", "run the given code and exit") do |v| code = v exclusive_options << :c end opts.on("-C", "--copy-databases", "copy one database to another") do copy_databases = true exclusive_options << :C end opts.on("-d", "--dump-migration", "print database migration to STDOUT") do dump_migration = true exclusive_options << :d end opts.on("-D", "--dump-migration-same-db", "print database migration to STDOUT without type translation") do dump_migration = :same_db exclusive_options << :D end opts.on("-e", "--env ENV", "use environment config for database") do |v| env = v end opts.on("-E", "--echo", "echo SQL statements") do require 'logger' loggers << Logger.new($stdout) end opts.on("-I", "--include dir", "specify $LOAD_PATH directory") do |v| $: << v end opts.on("-l", "--log logfile", "log SQL statements to log file") do |v| require 'logger' loggers << Logger.new(v) end opts.on("-L", "--load-dir DIR", "loads all *.rb under specifed directory") do |v| load_dirs << v end opts.on("-m", "--migrate-directory DIR", "run the migrations in directory") do |v| migrate_dir = v exclusive_options << :m end opts.on("-M", "--migrate-version VER", "migrate the database to version given") do |v| migrate_ver = Integer(v, 10) end opts.on("-N", "--no-test-connection", "do not test the connection") do test = false end opts.on("-r", "--require LIB", "require the library, before executing your script") do |v| load_dirs << [v] end opts.on("-S", "--dump-schema filename", "dump the schema for all tables to the file") do |v| dump_schema = v exclusive_options << :S end opts.on("-t", "--trace", "Output the full backtrace if an exception is raised") do backtrace = true end opts.on_tail("-v", "--version", "Show version") do show_version = true end opts.on("-X", "--dump-indexes filename", "dump the index cache for all tables to the file") do |v| dump_indexes = v exclusive_options << :X end end opts = options opts.parse! db = ARGV.shift error_proc = lambda do |msg| $stderr.puts(msg) exit 1 end extra_proc = lambda do $stderr.puts("Warning: last #{ARGV.length} arguments ignored") unless ARGV.empty? end error_proc["Error: Must specify -m if using -M"] if migrate_ver && !migrate_dir error_proc["Error: Cannot specify #{exclusive_options.map{|v| "-#{v}"}.join(' and ')} together"] if exclusive_options.length > 1 connect_proc = lambda do |database| db_opts = {:test=>test, :loggers=>loggers} if database.nil? || database.empty? Sequel.connect('mock:///', db_opts) elsif File.exist?(database) require 'yaml' env ||= "development" db_config = YAML.load_file(database) db_config = db_config[env] || db_config[env.to_sym] || db_config db_config.keys.each{|k| db_config[k.to_sym] = db_config.delete(k)} Sequel.connect(db_config, db_opts) else Sequel.connect(database, db_opts) end end begin $:.unshift(File.expand_path(File.join(File.dirname(__FILE__), '..', 'lib'))) require 'sequel' if show_version puts "sequel #{Sequel.version}" unless db || code exit end end DB = connect_proc[db] load_dirs.each{|d| d.is_a?(Array) ? require(d.first) : Dir["#{d}/**/*.rb"].each{|f| load(f)}} if migrate_dir extra_proc.call Sequel.extension :migration, :core_extensions Sequel::Migrator.apply(DB, migrate_dir, migrate_ver) exit end if dump_migration extra_proc.call DB.extension :schema_dumper puts DB.dump_schema_migration(:same_db=>dump_migration==:same_db) exit end if dump_schema extra_proc.call DB.extension :schema_caching DB.tables.each{|t| DB.schema(Sequel::SQL::Identifier.new(t))} DB.dump_schema_cache(dump_schema) exit end if dump_indexes extra_proc.call DB.extension :index_caching DB.tables.each{|t| DB.indexes(Sequel::SQL::Identifier.new(t))} DB.dump_index_cache(dump_indexes) exit end if copy_databases Sequel.extension :migration DB.extension :schema_dumper db2 = ARGV.shift error_proc["Error: Must specify database connection string or path to yaml file as second argument for database you want to copy to"] if db2.nil? || db2.empty? extra_proc.call start_time = Time.now TO_DB = connect_proc[db2] same_db = DB.database_type==TO_DB.database_type index_opts = {:same_db=>same_db} # :nocov: index_opts[:index_names] = :namespace if !DB.global_index_namespace? && TO_DB.global_index_namespace? # :nocov: if DB.database_type == :sqlite && !same_db # SQLite integer types allows 64-bit integers TO_DB.extension :integer64 end puts "Databases connections successful" schema_migration = eval(DB.dump_schema_migration(:indexes=>false, :same_db=>same_db)) index_migration = eval(DB.dump_indexes_migration(index_opts)) fk_migration = eval(DB.dump_foreign_key_migration(:same_db=>same_db)) puts "Migrations dumped successfully" schema_migration.apply(TO_DB, :up) puts "Tables created" puts "Begin copying data" DB.transaction do TO_DB.transaction do all_status_lines = ENV['SEQUEL_BIN_STATUS_ALL_LINES'] DB.tables.each do |table| puts "Begin copying records for table: #{table}" time = Time.now to_ds = TO_DB.from(table) j = 0 DB.from(table).each do |record| to_ds.insert(record) j += 1 if Time.now - time > 5 || all_status_lines puts "Status: #{j} records copied" time = Time.now end end puts "Finished copying #{j} records for table: #{table}" end end end puts "Finished copying data" puts "Begin creating indexes" index_migration.apply(TO_DB, :up) puts "Finished creating indexes" puts "Begin adding foreign key constraints" fk_migration.apply(TO_DB, :up) puts "Finished adding foreign key constraints" if TO_DB.database_type == :postgres TO_DB.tables.each{|t| TO_DB.reset_primary_key_sequence(t)} puts "Primary key sequences reset successfully" end puts "Database copy finished in #{Time.now - start_time} seconds" exit end if code extra_proc.call eval(code) exit end rescue => e raise e if backtrace error_proc["Error: #{e.class}: #{e.message}\n#{e.backtrace.first}"] end if !ARGV.empty? ARGV.each{|v| load(v)} elsif !$stdin.isatty eval($stdin.read) # :nocov: else require 'irb' puts "Your database is stored in DB..." IRB.start end # :nocov: sequel-5.63.0/doc/000077500000000000000000000000001434214120600136455ustar00rootroot00000000000000sequel-5.63.0/doc/CHANGELOG.old000066400000000000000000011500651434214120600156440ustar00rootroot00000000000000=== 4.49.0 (2017-08-01) * Make dataset_associations plugin automatically alias tables when using many_through_many associations that join the same table multiple times (jeremyevans) * Deprecate using a :pool_class Database that is not a class or a symbol for a supported pool class (jeremyevans) * Deprecate :eager_loading_predicate_key association option and association reflection method (jeremyevans) * Deprecate Model.serialized_columns in the serialization plugin (jeremyevans) * Deprecate Model.cti_columns in the class_table_inheritance plugin (jeremyevans) * Deprecate SQL::AliasedExpression#aliaz, use #alias instead (jeremyevans) * Deprecate SQL::Function#f, use #name instead (jeremyevans) * Deprecate treating cross join with conditions as inner join on MySQL (jeremyevans) * Deprecate ConnectionPool#created_count, use #size instead (jeremyevans) * Deprecate ConnectionPool::CONNECTION_POOL_MAP, use the :pool_class option to specify a non-default connection pool (jeremyevans) * Deprecate Sequel::IBMDB::Connection#prepared_statements= in the ibmdb adapter (jeremyevans) * Deprecate DEFAULT_OPTIONS in validation_helpers, override default_validation_helpers_options private method instead (jeremyevans) * Deprecate model association before callbacks returning false to cancel the action (jeremyevans) * Support native offset syntax on Oracle 12 (timon) (#1397) * Deprecate Dataset#nullify! in the null_dataset extension (jeremyevans) * Deprecate Dataset#autoid=, #_fetch=, and #numrows= in the mock adapter (jeremyevans) * Deprecate loading plugins by requiring sequel_#{plugin} (jeremyevans) * Add Model.sti_class_from_sti_key in the single_table_inheritance plugin to get the appropriate class to use (Aryk) (#1396) * Make Sequel::Error#cause use #wrapped_exception if it exists on ruby 2.1+ (jeremyevans) * Make Dataset#where_all, #where_each, #where_single_value core dataset methods instead of just model dataset methods (jeremyevans) * Make Database#extend_datasets and Dataset#with_extend now use a Dataset::DatasetModule instance if given a block (jeremyevans) * Add Sequel::Dataset::DatasetModule, now a superclass of Sequel::Model::DatasetModule (jeremyevans) * Make composition plugin with :mapping option work correctly if Model#get_column_value is overridden (jeremyevans) * Support Dataset#paged_each :stream => false option on mysql2 to disable streaming (Aryk) (#1395) * Make datetimeoffset handling in the jdbc/sqlserver adapter work on more drivers (jeremyevans) * Make alter_table add_primary_key work correctly on H2 1.4+ (jeremyevans) * Support :sslrootcert Database option in the postgres adapter (dleavitt) (#1391) === 4.48.0 (2017-07-01) * Deprecate Model.<< (jeremyevans) * Deprecate Dataset#{and,exclude_where,range,interval}, move to sequel_4_dataset_methods extension (jeremyevans) * Make Database#indexes not include partial indexes on SQLite 3.8.8+ (jeremyevans) * Make Database#indexes include indexes created automatically from unique constraints on SQLite 3.8.8+ (jeremyevans) * Deprecate Sequel::Postgres::PG_TYPES, conversion procs should not be registered per-Database (jeremyevans) * Add Database#add_conversion_proc method on PostgreSQL for registering conversion procs (jeremyevans) * Deprecate unexpected values passed to Dataset#insert_conflict on SQLite (jeremyevans) * Deprecate Sequel::SqlAnywhere::Dataset#convert_smallint_to_bool= method (jeremyevans) * Deprecate Sequel::SqlAnywhere.convert_smallint_to_bool accessor (jeremyevans) * Use savepoints around index creation if creating table inside transaction if ignore_index_errors is used (jeremyevans) * Deprecate treating :natrual_inner join type on MySQL as NATURAL LEFT JOIN (jeremyevans) * Deprecate Dataset#mssql_unicode_strings= on Microsoft SQL Server (jeremyevans) * Preserve encoding when parsing PostgreSQL arrays (jeltz) (#1387) * Deprecate external modification of Sequel::JDBC::TypeConvertor (jeremyevans) * Deprecate Sequel::DB2.use_clob_as_blob accessor (jeremyevans) * Add Database#use_clob_as_blob accessor on DB2 (jeremyevans) * Deprecate SEQUEL_POSTGRES_USES_PG constant (jeremyevans) * Do not swallow original exception if exception is raised inside Database#copy_table on PostgreSQL (jeremyevans) * Deprecate Sequel::Postgres.client_min_messages and force_standard_strings accessors (jeremyevans) * Deprecate Sequel::Postgres.use_iso_date_format accessor (jeremyevans) * Do not allow connection in postgres adapter if postgres-pr driver is used and force_standard_strings is false (jeremyevans) * Drop support for ancient postgres driver in postgres adapter, now only pg and postgres-pr drivers are supported (jeremyevans) * Deprecate Sequel::MySQL.convert_invalid_date_time accessor (jeremyevans) * Deprecate Sequel::MySQL.convert_tinyint_to_bool accessor (jeremyevans) * Deprecate Sequel::MySQL.default_{charset,collate,engine} accessors (jeremyevans) * Add Database#default_{charset,collate,engine} accessors on MySQL (jeremyevans) * Make mock adapter thread safe (jeremyevans) * Deprecate Sequel::JDBC::Dataset#convert_types accessor (jeremyevans) * Add Dataset#with_convert_types in jdbc adapter (jeremyevans) * Deprecate Sequel::IBMDB::Dataset#convert_smallint_to_bool= method (jeremyevans) * Deprecate Sequel::IBMDB.convert_smallint_to_bool accessor (jeremyevans) * Add Database#convert_smallint_to_bool accessor in the ibmdb adapter (jeremyevans) * Deprecate sequel_3_dataset_methods extension (jeremyevans) * Deprecate query_literals extension (jeremyevans) * Deprecate using subtype conversion procs added after registering composite type in the pg_row extension (jeremyevans) * Don't try canceling copy in Database#copy_into if copier is not created yet (aakashAu) (#1384) * Deprecate global conversion procs added by pg_* extensions, when extension isn't loaded into Database instance (jeremyevans) * Deprecate Sequel::Postgres::PGRange.register in the pg_range extension (jeremyevans) * Deprecate Sequel::Postgres::PGArray.register in the pg_array extension (jeremyevans) * Deprecate Database#copy_conversion_procs (private method) on PostgreSQL (jeremyevans) * Deprecate Database#reset_conversion_procs on PostgreSQL (jeremyevans) * Deprecate meta_def extension (jeremyevans) * Make class_table_inheritance plugin with :alias option not use subquery for datasets that don't join (jeremyevans) * Deprecate hash_aliases extension (jeremyevans) * Deprecate filter_having extension (jeremyevans) * Deprecate empty_array_ignore_nulls extension (jeremyevans) * Deprecate Array#sql_array in the core_extensions extension (jeremyevans) * Make validation_helpers plugin :allow_blank option work correctly when the blank extension is not loaded (jeremyevans) * Make validation_class_methods plugin no longer require the blank extension (jeremyevans) * Clear cached associations when touching associations in the touch plugin (jeremyevans) * Make pg_array_associations model plugin load pg_array extension into database (jeremyevans) * Remove support for :strict option in nested_attributes plugin, use :unmatched_pk option instead (jeremyevans) * Make to_json class/dataset method in json_serializer plugin accept :instance_block option to pass block to Model#to_json (jeremyevans) * Make to_json methods in json_serializer plugin accept blocks that are used to transform values before serializing to JSON (jeremyevans) * Make Sequel.object_to_json pass block to #to_json (jeremyevans) * Deprecate identifier_columns plugin, not needed with Sequel.split_symbols = false (jeremyevans) * Make reloading column_conflicts plugin not remove existing conflict markings (jeremyevans) * Deprecate cti_base_model, cti_key, and cti_model_map class methods in class_table_inheritance plugin (jeremyevans) * Make Model.skip_auto_validations(:not_null) in the auto_validations plugin skip not null checks for columns with default values (jeremyevans) * Make Database#copy_into in jdbc/postgresql adapter respect :server option (jeremyevans) * Make #to_hash and #to_hash_groups handle options in the static_cache plugin, and add rename #to_hash to #as_hash (jeremyevans) * Rename Dataset#to_hash to #as_hash, and add #to_hash as an alias, to allow undefing #to_hash to fix ruby calling it implicitly (jeremyevans) (#1375) * Handle PG* constants deprecated in pg 0.21.0 in the postgres adapter (jeremyevans) (#1377, #1378) * Support :association_pks_use_associated_table association option in association_pks plugin (jeremyevans) * Make pg_hstore extension reset hstore conversion proc when running Database#reset_conversion_procs (jeremyevans) * Fix incorrect SQL used for inserting into a CTI subclass sharing the primary table when using the :alias option (jeremyevans) === 4.47.0 (2017-06-01) * Deprecate pg_typecast_on_load plugin, only useful on deprecated do and swift adapters (jeremyevans) * Deprecate association_autoreloading and many_to_one_pk_lookup plugins, which were made the default model behavior in Sequel 4 (jeremyevans) * Deprecate setting invalid datasets for models unless required_valid_table = false (jeremyevans) * Make Model.require_valid_table = true not raise for datasets where Database#schema raises an error but Dataset#columns works (jeremyevans) * Make Database#with_server in the server_block extension accept a second argument for a different read_only shard (jeremyevans) (#1355) * Make schema_dumper extension handle Oracle 11g XE inclusion of not null in the db_type (StevenCregan, jeremyevans) (#1351) * Add Model.default_association_type_options for changing default association options per association type (jeremyevans) * Add :materialized option to Database#views on PostgreSQL to return materialized views (Blargel) (#1348) * Make defaults_setter plugin inherit custom default values when subclassing (jeremyevans) === 4.46.0 (2017-05-01) * Recognize additional disconnect error on MySQL (jeremyevans) * Deconstantize dataset SQL generation, speeding up ruby 2.3+, slowing down earlier versions (jeremyevans) * Deprecate calling Dataset#set_graph_aliases before Dataset#graph (jeremyevans) * Don't swallow exception if there is an exception when rolling back a transaction when using :rollback=>:always option (jeremyevans) * Deprecate passing 2 arguments to Database#alter_table (jeremyevans) * Deprecate passing Schema::CreateTableGenerator instance as second argument to Database#create_table (jeremyevans) * Deprecate Database::DatasetClass as a way for getting default dataset classes for datasets (jeremyevans) * Deprecate SQLite pragma getting and setting methods (jeremyevans) * Remove handling of EMULATED_FUNCTION_MAP from adapter dataset classes, overide Dataset#native_function_name instead (jeremyevans) * Deprecate {Integer,Timestamp}Migrator::DEFAULT_SCHEMA_{COLUMN,TABLE} (jeremyevans) * Deprecate Database#jdbc_* methods for jdbc/db2 adapter Database instances (jeremyevans) * Remove addition of Database#jdbc_* to JDBC::Database in jdbc/db2 adapter (jeremyevans) * Deprecate many internal Database and Dataset string/regexp constants in core and included adapters (jeremyevans) * Remove use of Fixnum in sqlanywhere shared adapter (jeremyevans) * Deprecate Sequel::Schema::Generator constant, use Sequel::Schema::CreateTableGenerator instead (jeremyevans) * Deprecate Database#log_yield (jeremyevans) * Deprecate the set_overrides extension (jeremyevans) * If passing an empty array or hash and a block to a filtering method, ignore the array or hash and just use the block (jeremyevans) * Deprecate ignoring explicit nil argument when there is no existing filter (jeremyevans) * Deprecate ignoring explicit nil argument to filtering methods when passing a block (jeremyevans) * Deprecate ignoring empty strings and other empty? arguments passed to the filtering methods without a block (jeremyevans) * Deprecate calling filtering methods without an argument or a block (jeremyevans) * Deprecate Sequel::VirtualRow#` to create literal SQL, use Sequel.lit instead (jeremyevans) * Add auto_literal_strings extensions for treating plain strings passed to filtering/update methods as literal SQL (jeremyevans) * Deprecate automatically treating plain strings passed to filtering/update methods as literal SQL (jeremyevans) * Passing a PlaceholderLiteralString to a filtering method now uses parentheses around the expression (jeremyevans) * Make Dataset#full_text_search work on Microsoft SQL Server when no_auto_literal_strings extension is used (jeremyevans) * Fix Database#disconnect when using the single connection pool without an active connection (jeremyevans) (#1339) * Handle conversion of datetimeoffset values when using the jdbc/sqlserver adapter in some configurations (iaddict, jeremyevans) (#1338) * Fix conversion of some time values when using the jdbc/sqlserver adapter in some configurations (iaddict, jeremyevans) (#1337) * Use microsecond precision for time values on Microsoft SQL Server, instead of millisecond precision (jeremyevans) * Add Dataset#sqltime_precision private method for adapters to use different precision for Sequel::SQLTime than Time and Date (jeremyevans) * Use utc timezone in Sequel::SQLTime.create if Sequel.application_timezone is :utc (jeremyevans) (#1336) * Include migration filename in message about migration file without a single migration (jmettraux) (#1334) * Deprecate conversion of - to _ in adapter schemes (jeremyevans) * Don't quote function names that are SQL::Identifiers, unless SQL::Function#quoted is used (jeremyevans) * Deprecate splitting virtual row method names (jeremyevans) * Deprecate passing blocks to virtual row methods, move to virtual_row_method_block extension (jeremyevans) * Deprecate Sequel::SQL::Expression#sql_literal and #lit (jeremyevans) * Don't issue deprecation warnings on ruby 1.8.7, as Sequel 5 is dropping support for it (jeremyevans) * Deprecate Sequel::BasicObject#remove_methods! (jeremyevans) * Deprecate sequel/no_core_ext file (jeremyevans) * Deprecate model dataset #insert_sql accepting model instances (jeremyevans) * Deprecate model dataset #join_table and #graph accepting model classes (jeremyevans) * Support :alias option to class_table_inheritance plugin, wrapping subclass datasets in a subquery to fix ambiguous column issues (jeremyevans) * Deprecate Model.set_allowed_columns and Model#{set_all,set_only,update_all,update_only}, move to whitelist security plugin (jeremyevans) * Do not raise MassAssignmentRestriction when setting nested attributes and using the :fields option, only check for fields given (jeremyevans) * Do not add class methods for private methods definined in dataset_module (jeremyevans) * Deprecate Model.def_dataset_method and Model.subset, move to def_dataset_method plugin (jeremyevans) * Deprecate Model.finder and Model.prepared_finder, move to finder plugin (jeremyevans) * Deprecate calling Model.db= on a model with a dataset (jeremyevans) * Deprecate splitting symbols to look for qualified/aliased identifiers (e.g. :table__column) (jeremyevans) * Allow optimized lookups and deletes for models using SQL::Identifier and SQL::QualifiedIdentifier values as the FROM table (jeremyevans) === 4.45.0 (2017-04-01) * Correctly handle datasets with offsets but no limits used in compound datasets on MSSQL <2012 (jeremyevans) * Correctly handle false values in the split_values plugin (bananarne) (#1333) * Deprecate Dataset#dup/clone and Model.dup/clone (jeremyevans) * Deprecate the schema and scissors plugins (jeremyevans) * Deprecate Model.{lazy_attributes,nested_attributes,composition,serialization}_module accessors (jeremyevans) * Deprecate Database#database_name on MySQL (jeremyevans) * Deprecate Database#use on MySQL (jeremyevans) * Make pg_hstore extension no longer update PG_NAMED_TYPES (jeremyevans) * Deprecate Sequel::PG_NAMED_TYPES (jeremyevans) * Add columns_updated plugin for making updated columns hash available in after_update and after_save hooks (jeremyevans) * Deprecate accessing @columns_updated directly in model after_update and after_save hooks (jeremyevans) * Deprecate Database#{add,remove}_servers when not using a sharded connection pool (jeremyevans) * Deprecate Database#each_server (jeremyevans) * Make Model#_valid? private method accept only an options hash (jeremyevans) * Deprecate returning false from model before hooks to cancel the action, use Model#cancel_action (jeremyevans) * Handle Model#cancel_action correctly in before hooks when Model#valid? is called (jeremyevans) * Deprecate Sequel::BeforeHookFailed (jeremyevans) * Deprecate passing multiple arguments as filter arguments when not using a conditions specifier (jeremyevans) * Deprecate passing Procs as filter arguments, require they be passed as blocks (jeremyevans) * Deprecate Sequel::Error::* exception class aliases (jeremyevans) * Deprecate prepared_statements_associations and prepared_statements_with_pk plugins (jeremyevans) * Deprecate Sequel::Unbinder, Sequel::UnbindDuplicate, and Dataset#unbind (jeremyevans) * Deprecating calling Sequel::Qualifier with two arguments (jeremyevans) * Add validation_contexts plugin for supporting custom contexts when validating (jeremyevans) * Deprecate Sequel::Database.single_threaded singleton accessor (jeremyevans) * Deprecate treating unrecognized prepared statement type as :select (jeremyevans) * Deprecate Sequel.identifier_{in,out}put_method= and .quote_identifiers= singleton setters (jeremyevans) * Deprecate Sequel::Database.identifier_{in,out}put_method and .quote_identifiers singleton accessors (jeremyevans) * Deprecate loading the identifier_mangling by default, require it be loaded explicitly if needed (jeremyevans) * Make Database#dump_{table_schema,schema_migration} in schema_dumper extension support :schema option (dadario) (#1328) * Make Dataset#delete respect an existing limit on Microsoft SQL Server (jeremyevans) * Add Dataset#skip_limit_check to mark a dataset as skipping the limit/offset check for updates and deletes (jeremyevans) * Deprecate calling Dataset#{update/delete/truncate} on datasets with limits or offsets unless the database supports it (jeremyevans) * Add deprecation message for using association_pks setter method with :delay_pks=>true association option (jeremyevans) * Add deprecation message for using association_pks setter method without :delay_pks association option (jeremyevans) * Deprecate having duplicate column names in subclass tables when using the class_table_inheritance plugin (jeremyevans) * Deprecate do (DataObjects), swift, and jdbc/as400 adapters (jeremyevans) * Deprecate support for Cubrid, Firebird, Informix, and Progress databases (jeremyevans) * The :proxy_argument option passed to association_proxies plugin block is now an empty hash if no arguments are passed to the association method (jeremyevans) * Deprecate passing non-hash arguments to association methods (jeremyevans) * Deprecate passing multiple arguments to association methods (jeremyevans) * Deprecate model transaction hook methods (jeremyevans) * Drop support for pg <0.8.0 in the postgres adapter (jeremyevans) * Deprecate passing a block to Database#from (jeremyevans) * Deprecate Sequel::Model::ANONYMOUS_MODEL_CLASSES{,_MUTEX} (jeremyevans) * Deprecate Sequel.cache_anonymous_models and Sequel.cache_anonymous_models= (jeremyevans) * Automatically use from_self when using a dataset as part of a compound if it has an offset but no limit (jeremyevans) * Drop order on existing datasets when using Dataset#union/intersect/except on Microsoft SQL Server unless a limit or offset is used (jeremyevans) * Deprecate dataset mutation (jeremyevans) * Handle dumping of autoincrementing 64-bit integer primary key columns correctly when using :same_db option in the schema dumper (jeremyevans) (#1324) * Add Model.dataset_module_class accessor, allowing plugins to support custom behavior in dataset_module blocks (jeremyevans) * Make ORDER BY come after UNION/INTERSECT/EXCEPT on Microsoft SQL Server and SQLAnywhere (jeremyevans) * Make Database#indexes on MySQL handle qualified identifiers (jeremyevans) (#1316) * Add oracle support to the odbc adapter (samuel02) (#1315) === 4.44.0 (2017-03-01) * Add where_all, where_each, where_single_value model dataset methods, optimized for frozen datasets (jeremyevans) * Add eager method to dataset_module (jeremyevans) * Add implicit_subquery extension, for implicitly using a subquery for datasets using raw SQL when calling dataset methods that modify SQL (jeremyevans) * Make Dataset#from_self keep the columns from the current dataset if present (jeremyevans) * Add implicit_subquery extension, implicitly using subqueries for dataset methods if the current dataset uses raw SQL (jeremyevans) * Make SQL::ValueList#inspect show that it is a value list (jeremyevans) * Make LiteralString#inspect show that it is a literal string (jeremyevans) * Make Model::Associations::AssociationReflection#inspect show reflection class and guess at association definition line (jeremyevans) * Make SQLTime#inspect show it is an SQLTime instance, and only the time component (jeremyevans) * Make SQL::Blob#inspect show that it is a blob, the number of bytes, and some or all of the content (jeremyevans) * Make plugins not modify the constant namespace for the model class that uses them (jeremyevans) * Do not modify encoding of SQL::Blob instances in force_encoding plugin (jeremyevans) * Add Model.freeze_descendents to subclasses plugin, for easier finalizing associations/freezing of descendent classes (jeremyevans) * Add Model.finalize_associations method for finalizing associations, speeding up some association reflections methods almost 10x (jeremyevans) * Implement Model.freeze such that it can be used in production (jeremyevans) * Recognize another disconnect error in the jdbc/as400 adapter (perlun) (#1300) * Correctly handle conversion of false values when typecasting PostgreSQL arrays (mistoo) (#1299) * Raise error if the postgres adapter attempts to load an incompatible version of sequel_pg (mahlonsmith) (#1298) * Fix jdbc adapter so basic_type_convertor_map is not shared between instances, work with Database#freeze (jeremyevans) === 4.43.0 (2017-02-01) * Make jdbc/postgresql adapter work if pg_hstore extension is loaded first (jeremyevans) (#1296) * Make prepared_statements_associations plugin work correctly on some instance specific associations (jeremyevans) * Make prepared_statements plugin not use prepared statements in cases where it is probably slower (jeremyevans) * Optimize Model#refresh similar to Model.with_pk (jeremyevans) * Make Database#extension not attempt to load the same extension more than once (jeremyevans) * Implement Database#freeze such that it can be used in production (jeremyevans) * Freeze enum_labels in the pg_enum extension (jeremyevans) * Handle Database#type_supported? thread-safely on PostgreSQL (jeremyevans) * Handle primary_key_sequences thread-safely on Oracle (jeremyevans) * Handle sharding better when using mysql2 native prepared statements (jeremyevans) * Use thread-safe incrementor for mock adapter autoid handling (jeremyevans) * Make Model#freeze not freeze associations hash until after validating the model instance (jeremyevans) * Make prepared_statements_associations plugin work correctly when model object explicitly specifies server to use when also using sharding plugin (jeremyevans) * Make prepared_statements_with_pk plugin work correctly when dataset explicitly specifies server to use (jeremyevans) * Make prepared_statements plugin work correctly when model object explicitly specifies server to use (jeremyevans) * Make dataset_module inherited to subclasses when using the single_table_inheritance plugin (jeremyevans) (#1284) * Support use of SQLite result codes in the jdbc-sqlite adapter, if the jdbc sqlite driver supports them (flash-gordon, jeremyevans) (#1283) * Make timestamp migrator handle key length limitations when using MySQL with InnoDB engine and utf8mb4 charset default (jeremyevans) (#1282) === 4.42.0 (2017-01-01) * Handle eager load callbacks correctly for one_to_one associations with orders or offsets when window functions are not supported (jeremyevans) * Raise Sequel::Error if using an :eager_limit dataset option when eager loading a singular association (jeremyevans) * Replace internal uses of Dataset#select_more with #select_append to save a method call (jeremyevans) * Make Dataset#order_append the primary method, and #order_more the alias, for similarity to #select_append and #select_more (jeremyevans) * Replace internal uses of Dataset#filter with #where to save a method call (jeremyevans) * Do not set :auto_increment in the schema information for integer columns that are part of a composite primary key on SQLite (jeremyevans) * Use autoincrement setting on integer primary key columns when emulating table modification methods on SQLite (thenrio, jeremyevans) (#1277, #1278) * Make the pagination extension work on frozen datasets (jeremyevans) * Make Dataset#server work for frozen model datasets using the sharding plugin (jeremyevans) * Make Dataset#nullify in the null_dataset extension work on frozen datasets (jeremyevans) * Make Model#set_server work when using a frozen model dataset (jeremyevans) * Make Dataset#ungraphed work on a frozen model dataset (jeremyevans) * Add Dataset#with_{autoid,fetch,numrows} to the mock adapter, returning cloned datasets with the setting changed (jeremyevans) * Make looser_typecasting extension handle the strict BigDecimal parsing introduced in ruby 2.4rc1 (jeremyevans) * Make Database#{db,opts}= in the sequel_3_dataset_methods extension raise for frozen datasets (jeremyevans) * Speed up repeated calls to Dataset#{interval,range} for frozen datasets using a cached placeholder literalizer (jeremyevans) * Speed up repeated calls to Dataset#get with a single argument for frozen datasets using a cached placeholder literalizer (jeremyevans) * Speed up repeated calls to Dataset#{first,last} with arguments/blocks for frozen datasets using a cached placeholder literalizer (jeremyevans) * Speed up repeated calls to Dataset#{avg,min,max,sum} for frozen datasets using a cached placeholder literalizer (jeremyevans) * Cache dataset returned by Dataset#skip_locked for frozen datasets (jeremyevans) * Cache dataset returned by Dataset#for_update for frozen datasets (jeremyevans) * Cache dataset returned by Dataset#un{filtered,grouped,limited,ordered} for frozen datasets (jeremyevans) * Cache dataset returned by Dataset#reverse (no args) for frozen datasets (jeremyevans) * Cache dataset returned by Dataset#invert for frozen datasets (jeremyevans) * Speed up repeated calls to Dataset#count with an argument or block for frozen datasets using a cached placeholder literalizer (jeremyevans) * Using :on_duplicate_columns=>:warn Database option with duplicate_columns_handler now prepends file/line to the warning message (jeremyevans) * Move identifier mangling code to identifier_mangling extension, load by default unless using :identifier_mangling=>false Database option (jeremyevans) * Allow Dataset#with_extend to accept a block and create a module with that block that the object is extended with (jeremyevans) * Speed up repeated calls to with_pk on the same frozen model dataset using a cached placeholder literalizer (jeremyevans) * Add dataset_module methods such as select and order that define dataset methods which support caching for frozen datasets (jeremyevans) * Cache subset datasets if they don't use blocks or procs for frozen model datasets (jeremyevans) * Cache intermediate dataset used in Dataset#{last,paged_each} for frozen model datasets without an order (jeremyevans) * Cache dataset returned by Dataset#naked for frozen datasets (jeremyevans) * Cache intermediate dataset used in Dataset#last (no args) for frozen datasets (jeremyevans) * Cache intermediate dataset used in Dataset#first (no args) and #single_record for frozen datasets (jeremyevans) * Cache intermediate dataset used in Dataset#empty? for frozen datasets (jeremyevans) * Cache intermediate dataset used in Dataset#count (no args) for frozen datasets (jeremyevans) * Warn if :conditions option may be unexpectedly ignored during eager_graph/association_join (jeremyevans) (#1272) * Cache SELECT and DELETE SQL for most frozen datasets (jeremyevans) * Freeze most SQL::Expression objects and internal state by default (jeremyevans) * Freeze Dataset::PlaceholderLiteralizer and Dataset::PlaceholderLiteralizer::Argument instances (jeremyevans) * Freeze most dataset opts values to avoid unintentional modification (jeremyevans) * Add Dataset#with_convert_smallint_to_bool on DB2, returning a clone with convert_smallint_to_bool set (jeremyevans) * Make Dataset#freeze actually freeze the dataset on ruby 2.4+ (jeremyevans) * Avoid using instance variables other than @opts for dataset data storage (jeremyevans) * Add freeze_datasets extension, making all datasets for a given Database frozen (jeremyevans) * Refactor prepared statement internals, using opts instead of instance variables (jeremyevans) * Model.set_dataset now operates on a clone of the dataset given instead of modifying it, so it works with frozen datasets (jeremyevans) === 4.41.0 (2016-12-01) * Add Dataset#with_mssql_unicode_strings on Microsoft SQL Server, returning a clone with mssql_unicode_strings set (jeremyevans) * Add Dataset#with_identifier_output_method, returning a clone with identifier_output_method set (jeremyevans) * Add Dataset#with_identifier_input_method, returning a clone with identifier_input_method set (jeremyevans) * Add Dataset#with_quote_identifiers, returning a clone with quote_identifiers set (jeremyevans) * Add Dataset#with_extend, returning a clone extended with given modules (jeremyevans) * Add Dataset#with_row_proc, returning a clone with row_proc set (jeremyevans) * Support use of SQL::AliasedExpressions as Model#to_json :include option keys in the json_serializer plugin (sensadrome) (#1269) * Major improvements to type conversion in the ado adapter (vais, jeremyevans) (#1265) * Avoid memory leak in ado adapter by closing result sets after yielding them (vais, jeremyevans) (#1259) * Fix hook_class_methods plugin handling of commit hooks (jeremyevans) * Make association dataset method correctly handle cases where key fields are nil (jeremyevans) * Handle pure java exceptions that don't support message= when reraising the exception in the jdbc adapter (jeremyevans) * Add support for :offset_strategy Database option on DB2, with :limit_offset and :offset_fetch values, to disable OFFSET emulation (#1254) (jeremyevans) * Remove deprecated support for using Bignum class as a generic type (jeremyevans) === 4.40.0 (2016-10-28) * Make column_select plugin not raise an exception if the model's table does not exist (jeremyevans) * Make dataset_associations plugin correctly handle (many|one)_through_many associations with single join table (jeremyevans) (#1253) * Add s extension, with adds Sequel::S module that includes private #S method for calling Sequel.expr, including use as refinement (jeremyevans) * Add symbol_as and symbol_as_refinement extensions so that :column.as(:alias) is treated as Sequel.as(:column, :alias) (jeremyevans) * Add symbol_aref and symbol_aref_refinement extensions so that :table[:column] is treated as Sequel.qualify(:table, :column) (jeremyevans) * Add Sequel.split_symbols=, to support the disabling of splitting symbols with double/triple underscores (jeremyevans) * Make SQL::QualifiedIdentifier convert SQL::Identifier arguments to strings, fixing Sequel[:schema][:table] usage in schema methods (jeremyevans) * Do not attempt to combine non-associative operators (jeremyevans) (#1246) * Automatically add NOT NULL to columns when adding primary keys if the database doesn't handle it (jeremyevans) * Make prepared_statements plugin correctly handle lookup on joined datasets (jeremyevans) (#1244) * Make Database#tables with :qualify=>true option handle table names with double underscores correctly (jeremyevans) (#1241) * Add SQL::Identifier#[] and SQL::QualifiedIdentifier#[] for creating qualified identifiers (jeremyevans) * Add support for Dataset#insert_conflict :conflict_where option, for a predicate to use in ON CONFLICT clauses (chanks) (#1240) * Freeze Dataset::NON_SQL_OPTIONS, add private Dataset#non_sql_options, fixing thread safety issues during require (jeremyevans) * Make the callable returned by Database#rollback_checker thread safe (jeremyevans) * Make lazy_attributes and dataset_associations plugins work if insert_returning_select plugin is loaded before on model with no dataset (jeremyevans) === 4.39.0 (2016-10-01) * Make active_model plugin use rollback_checker instead of after_rollback hook (jeremyevans) * Add Database#rollback_checker, which returns a proc that returns whether the in progress transaction is rolled back (jeremyevans) * Add Sequel::Database.set_shared_adapter_scheme to allow external adapters to support the mock adapter (jeremyevans) * Make hook_class_methods plugin not use after commit/rollback model hooks (jeremyevans) * Support add_column :after and :first options on MySQL (AnthonyBobsin, jeremyevans) (#1234) * Support ActiveSupport 5 in pg_interval extension when weeks/hours are used in ActiveSupport::Duration objects (chanks) (#1233) * Support IntegerMigrator :relative option, for running only the specified number of migrations up or down (jeremyevans) * Make the touch plugin also touch associations on create in addition to update and delete (jeremyevans) * Add :allow_manual_update timestamps plugin option for not overriding a manually set update timestamp (jeremyevans) * Add Sequel.[] as an alias to Sequel.expr, for easier expression creation (jeremyevans) * Add PostgreSQL full_text_search :to_tsquery=>:phrase option, for using PostgreSQL 9.6+ full text search phrase searching (jeremyevans) * Add JSONBOp#insert in pg_json_ops extension, for jsonb_insert support on PostgreSQL 9.6+ (jeremyevans) * Support add_column :if_not_exists option on PostgreSQL 9.6+ (jeremyevans) === 4.38.0 (2016-09-01) * Support :driver_options option when using the postgres adapter with pg driver (jeremyevans) * Don't use after commit/rollback database hooks if the model instance methods are not overridden (jeremyevans) * Add SQL::NumericMethods#coerce, allowing code such as Sequel.expr{1 - x} (jeremyevans) * Support ** operator for exponentiation on expressions, similar to +, -, *, and / (jeremyevans) * Add Sequel::SQLTime.date= to set the date used for SQLTime instances (jeremyevans) === 4.37.0 (2016-08-01) * Add support for regular expression matching on Oracle 10g+ using REGEXP_LIKE (johndcaldwell) (#1221) * Recognize an additional disconnect error in the postgres adapter (jeremyevans) * Make connection pool remove connections for disconnect errors not raised as DatabaseDisconnectError (jeremyevans) * Support mysql2 0.4+ native prepared statements and bound variables (jeremyevans) * Add Database#values for VALUES support on SQLite 3.8.3+ (jeremyevans) * Support create_view :columns option on SQLite 3.9.0+ (jeremyevans) * Make migration reverser handle alter_table add_constraint using a hash as the first argument (soupmatt) (#1215) * Make ASTTransformer handle Sequel.extract (jeremyevans) (#1213) === 4.36.0 (2016-07-01) * Deprecate use of Bignum class as generic type, since the behavior will change in ruby 2.4 (jeremyevans) * Don't hold connection pool mutex while disconnecting connections (jeremyevans) * Don't hold references to disconnected connections in the connection_validator extension (jeremyevans) * Don't overwrite existing connection_validation_timeout when loading connection_validator extension multiple times (jeremyevans) * Add connection_expiration extension, for automatically removing connections open for too long (pdrakeweb) (#1208, #1209) * Handle disconnection errors raised during string literalization in mysql2 and postgres adapters (jeremyevans) * Add string_agg extension for aggregate string concatenation support on many databases (jeremyevans) * Add SQL::Function#order for ordered aggregate functions (jeremyevans) * Support operator validation in constraint_validations for <, <=, >, and >= operators with string and integer arguments (jeremyevans) * Make validates_operator validation consider nil values invalid unless :allow_nil or similar option is used (jeremyevans) * Close cursors for non-SELECT queries in the oracle adapter after execution, instead of waiting until GC (jeremyevans) (#1203) * Add :class_namespace association option for setting default namespace for :class option given as symbol/string (jeremyevans) * Add Sequel::Model.cache_anonymous_models accessor for changing caching on a per-model basis (jeremyevans) * Add Sequel::Model.def_Model for adding a Model() method to a module, for easier use of namespaced models (jeremyevans) * Add Sequel::Model::Model() for creating subclasses of Sequel::Model subclasses, instead of just Sequel::Model itself (jeremyevans) === 4.35.0 (2016-06-01) * Add :headline option to PostgreSQL Dataset#full_text_search for adding an extract of the matched text to the SELECT list (jeremyevans) * Make :rollback=>:always inside a transaction use a savepoint automatically if supported (jeremyevans) (#1193) * Recognize bool type as boolean in the schema dumper (jeremyevans) (#1192) * Make Dataset#to_hash and #to_hash_groups work correctly for model datasets doing eager loading (jeremyevans) * Make delay_add_association plugin handle hashes and primary keys passed to add_* association methods (jeremyevans) (#1187) * Treat :Bignum as a generic type, to support 64-bit integers on ruby 2.4+, where Bignum == Integer (jeremyevans) * Add server_logging extension for including server/shard information when logging queries (jeremyevans) * Add Database#log_connection_info, for including connection information when logging queries (jeremyevans) * Add Dataset#skip_locked for skipping locked rows on PostgreSQL 9.5+, MSSQL, and Oracle (jeremyevans) * Allow Sequel::Model#lock! to accept an optional lock style (petedmarsh) (#1183) * Add sql_comments extension for setting SQL comments on queries (jeremyevans) * Make Postgres::PGRange#cover? handle empty, unbounded, and exclusive beginning ranges (jeremyevans) * Fix frozen string literal issues on JRuby 9.1.0.0 (jeremyevans) * Allow json_serializer :include option with cascaded values to work correctly when used with association_proxies (jeremyevans) === 4.34.0 (2016-05-01) * Add support for :dataset_associations_join association option to dataset_associations plugin, for making resulting datasets have appropriate joins (jeremyevans) * Log server connection was attempted to in PoolTimeout exception messages in sharded connection pool (jeremyevans) * Log Database :name option in PoolTimeout exception messages (bigkevmcd, jeremyevans) (#1176) * Add duplicate_columns_handler extension, for raising or warning if a dataset returns multiple columns with the same name (TSMMark, jeremyevans) (#1175) * Support registering per-Database custom range types in the pg_range extension (steveh) (#1174) * Support :preconnect=>:concurrently Database option for preconnecting in separate threads (kch, jeremyevans) (#1172) * Make prepared_statements_safe plugin work correctly with CURRENT_DATE/CURRENT_TIMESTAMP defaults (jeremyevans) (#1168) * Add validates_operator validation helper (petedmarsh) (#1170) * Recognize additional unique constraint violation on Microsoft SQL Server (jeremyevans) * Add :hash option to Dataset#(select|to)_hash(_groups)? methods for choosing object to populate (mwpastore) (#1167) === 4.33.0 (2016-04-01) * Handle arbitrary objects passed as arguments to the association method (jeremyevans) (#1166) * Handle array with multiple columns as Dataset#insert_conflict :target value on PostgreSQL (chanks) (#1165) * Add Database#transaction :savepoint=>:only option, for only creating a savepoint if already inside a transaction (jeremyevans) * Make Database#sequence_for_table on Oracle handle cases where the schema for a table cannot be determined (jeremyevans) * The boolean_readers, boolean_subsets, and class_table_inheritance plugins no longer do blind rescues (jeremyevans) (#1162) * Add Model.require_valid_table setting, if set to true doesn't swallow any errors for invalid tables (jeremyevans) * Creating model classes inside a transaction when the table doesn't exist no longer rolls back the transaction on PostgreSQL (jeremyevans) (#1160) * Sequel::Model no longer swallows many errors when subclassing or setting datasets (jeremyevans) (#1160) * Handle altering column NULL settings for varchar(max) and text columns on MSSQL (Ilja Resch) * Remove Sequel.firebird and Sequel.informix adapter methods (jeremyevans) * Make graph_each extension handle result set splitting when using Dataset#first (jeremyevans) * Allow raising Sequel::ValidationFailed and Sequel::HookFailed without an argument (jeremyevans) * Allow schema_dumper to handle :qualify=>true option on PostgreSQL (jeremyevans) * Allow foreign_key schema method to handle SQL::Identifier and SQL::QualifiedIdentifier as 2nd argument (jeremyevans) === 4.32.0 (2016-03-01) * Use mutex for synchronizing access to association reflection cache on MRI (jeremyevans) * Add Dataset#delete_from on MySQL, allowing deletions from multiple tables in a single query (jeremyevans) (#1146) * Add no_auto_literal_strings extension, which makes SQL injection vulnerabilities less likely (jeremyevans) * Add Model.default_association_options, for setting option defaults for all future associations (jeremyevans) * Support :association_pks_nil association option in association_pks setter for determining how to handle nil (jeremyevans) * Make association_pks setter handle empty array correctly when :delay_pks is set (jeremyevans) * Add a setter method for one_through_one associations (jeremyevans) * Include :remarks entry in JDBC schema parsing output, containing comments on the column (olleolleolle) (#1143) * Support :eager_reload and :eager options to associations in tactical_eager_loading plugin (jeremyevans) * Make tactical_eager_loading not eager load if passing proc or block to association method (jeremyevans) * Make eager_each plugin handle eager loading for Dataset#first and similar methods (jeremyevans) === 4.31.0 (2016-02-01) * Convert types in association_pks setters before saving them, instead of just before running queries (jeremyevans) * Use getField and getOID instead of field and oid in the jdbc/postgresql adapter to work around JRuby 9.0.5.0 regression (jeremyevans) (#1137) * Support using PostgreSQL-specific types in bound variables in the jdbc/postgresql adapter (jeremyevans) * Add support for running with --enable-frozen-string-literal on ruby 2.3 (jeremyevans) * Make Database#disconnect in the oracle adapter work correctly on newer versions of oci8 (jeremyevans) * Support parsing PostgreSQL arrays with explicit bounds (jeremyevans) (#1131) * Raise an error if attempting to use a migration file not containing a single migration (jeremyevans) (#1127) * Automatically set referenced key for self referential foriegn key constraint for simple non-autoincrementing primary key on MySQL (jeremyevans) (#1126) === 4.30.0 (2016-01-04) * Add Dataset#insert_conflict and #insert_ignore on SQLite for handling uniqueness violations (Sharpie) (#1121) * Make Database#row_type in pg_row extension handle different formats of schema-qualified types (jeremyevans) (#1119) * Add identifier_columns plugin for handling column names containing 2 or more consecutive underscores when saving (jeremyevans) (#1117) * Support :eager_limit and :eager_limit_strategy dataset options in model eager loaders for per-call limits and strategies (chanks) (#1115) * Allow IPv6 addresses in database URLs on ruby 1.9+ (hellvinz, jeremyevans) (#1113) * Make Database#schema :db_type entries include sizes for string types on DB2 (jeremyevans) * Make Database#schema :db_type entries include sizes for string and decimal types in the jdbc adapter's schema parsing (jeremyevans) * Recognize another disconnect error in the tinytds adapter (jeremyevans) === 4.29.0 (2015-12-01) * Add Model#json_serializer_opts method to json_serializer plugin, allowing for setting to_json defaults on per-instance basis (jeremyevans) * Add uuid plugin for automatically setting UUID column when creating a model object (pdrakeweb, jeremyevans) (#1106) * Allow the sqlanywhere adapter to work with sharding (jeremyevans) * Support blobs as bound variables in the oracle adapter (jeremyevans) (#1104) * Order by best results first when using the Database#full_text_search :rank option on PostgreSQL (chanks) (#1101) * Run Database#table_exists? inside a savepoint if currently in a transaction and the database supports savepoints (jeremyevans) (#1100) * Allow Database#transaction :retry_on option to work when using savepoints (jeremyevans) * Allow for external adapters to implement Dataset#date_add_sql_append to integrate with the date_arithmetic extension (jeremyevans) * Add Dataset#insert_empty_columns_values private method for easy overriding for databases that don't support INSERT with DEFAULT VALUES (jeremyevans) === 4.28.0 (2015-11-02) * Add boolean_subsets plugin, which adds a subset for each boolean column (jeremyevans) * Add subset_conditions plugin, which adds a method for each subset returning the filter conditions for the subset (jeremyevans) * Make the list plugin work better with the auto_validations plugin when there is a validation on the position column (jeremyevans) * Make to_csv for model datasets call instance methods, just like Model#to_csv, in the csv_serializer plugin (skrobul) (#1088) * Raise Sequel::NoExistingObject instead of generic error if Model#refresh can't find the related row (jeremyevans) === 4.27.0 (2015-10-01) * Don't stub Sequel.synchronize on MRI (YorickPeterse) (#1083) * Make bin/sequel warn if given arguments that it doesn't use (jeremyevans) * Fix the order of referenced composite keys returned by Database#foreign_key_list on PostgreSQL (jeremyevans) (#1081) * Recognize another disconnect error in the jdbc/postgresql adapter (jeremyevans) * In the active model plugin, make Model#persisted? return false if the transaction used for creation is rolled back (jeremyevans) (#1076) * Use primary_key :keep_order option in the schema dumper if the auto incrementing column is not the first column in the table (jeremyevans) * Set :auto_increment option correctly in the schema parser when the auto incrementing column is not the first column in the table (jeremyevans) * Support :keep_order option to primary_key in schema generator, to not automatically make the primary key the first column (jeremyevans) * Add new jsonb/json functions and operators supported in PostgreSQL 9.5+ (jeremyevans) * Add before_after_save plugin, for refreshing created objects and resetting modified flag before calling after_create/update/save hooks (jeremyevans) * Add Dataset#single_record! and #single_value! which don't require cloning the receiver (jeremyevans) * Dataset#with_sql_single_value now works correctly for model datasets (jeremyevans) * Optimize Dataset#single_value and #with_sql_single_value to not create an unnecessary array (jeremyevans) * Make postgres adapter work with postgres-pr 0.7.0 (jeremyevans) (#1074) === 4.26.0 (2015-09-01) * Make Dataset#== not consider frozen status in determining equality (jeremyevans) * Support :if_exists option to drop_column on PostgreSQL (jeremyevans) * Add Dataset#grouping_sets to support GROUP BY GROUPING SETS on PostgreSQL 9.5+, MSSQL 2008+, Oracle, DB2, and SQLAnywhere (jeremyevans) * Fix handling of Class.new(ModelClass){set_dataset :table} on ruby 1.8 (jeremyevans) * Use range function constructors instead of casts for known range types in pg_range (jeremyevans) (#1066) * Make class_table_inheritance plugin work without sti_key (jeremyevans) * Detect additional disconnect errors when using the tinytds adapter (jeremyevans) * Make offset emulation without order but with explicit selection handle ambiguous column names (jeremyevans) * Allow preparing already prepared statements when emulating limits and/or offsets (jeremyevans) * Have Sequel::NoMatchingRow exceptions record the dataset related to the exception (pedro, jeremyevans) (#1060) === 4.25.0 (2015-08-01) * Add Dataset#insert_conflict on PostgreSQL 9.5+, for upsert/insert ignore support using INSERT ON CONFLICT (jeremyevans) * Support Dataset#group_rollup and #group_cube on PostgreSQL 9.5+ (jeremyevans) * Automatically REORG tables when altering when using jdbc/db2 (karlhe) (#1054) * Recognize constraint violation exceptions on swift/sqlite (jeremyevans) * Recognize another check constraint violation exception message on SQLite (jeremyevans) * Allow =~ and !~ to be used on ComplexExpressions (janko-m) (#1050) * Support case sensitive SQL Server 2012 in MSSQL metadata queries (knut2) (#1049) * Add Dataset#group_append, for appending to the existing GROUP BY clause (YorickPeterse) (#1047) * Add inverted_subsets plugin, for creating an inverted subset method for each subset (celsworth) (#1042) * Make Dataset#for_update not use the :read_only database when the dataset is executed (jeremyevans) (#1041) * Add singular_table_names plugin, for changing Sequel to not pluralize table names by default (jeremyevans) * PreparedStatement#prepare now raises an Error (jeremyevans) * Clear delayed association pks when refreshing an object (jeremyevans) * Add empty_array_consider_nulls extension to make Sequel consider NULL values when using IN/NOT IN with an empty array (jeremyevans) * Make Sequel default to ignoring NULL values when using IN/NOT IN with an empty array (jeremyevans) * Remove the deprecated firebird and informix adapters (jeremyevans) * Make :collate option when creating columns literalize non-String values on PostgreSQL (jeremyevans) (#1040) * Make dirty plugin notice when serialized column is changed (celsworth) (#1039) * Allow prepared statements to use RETURNING (jeremyevans) (#1036) === 4.24.0 (2015-07-01) * Allow class_table_inheritance plugin to support subclasses that don't add additional columns (QuinnHarris, jeremyevans) (#1030) * Add :columns option to update_refresh plugin, specifying the columns to include in the RETURNING clause (celsworth) (#1029) * Use column symbol key for auto validation unique errors if the unique index is on a single column (jeremyevans) * Allow :timeout option to Database#listen in the postgres adapter to be a callable object (celsworth) (#1028) * Add pg_inet_ops extension, for DSL support for PostgreSQL inet/cidr operators and functions (celsworth, jeremyevans) (#1024) * Support :*_opts options in auto_validations plugin, for setting options for the underlying validation methods (celsworth, jeremyevans) (#1026) * Support :delay_pks association option in association_pks to delay setting of associated_pks until after saving (jeremyevans) * Make jdbc subadapters work if they issue queries while the subadapter is being loaded (jeremyevans) (#1022) * Handle 64-bit auto incrementing primary keys in jdbc subadapters (DougEverly) (#1018, #1019) * Remove the deprecated db2 and dbi adapters (jeremyevans) * Make auto_validation plugin use :from=>:values option to setup validations on the underlying columns (jeremyevans) * Add :from=>:values option to validation_helpers methods, for getting values from the values hash instead of a method call (jeremyevans) === 4.23.0 (2015-06-01) * Make dataset.call_sproc(:insert) work in the jdbc adapter (flash-gordon) (#1013) * Add update_refresh plugin, for refreshing a model instance when updating (jeremyevans) * Add delay_add_association plugin, for delaying add_* method calls on new objects until after saving the object (jeremyevans) * Add validate_associated plugin, for validating associated objects when validating the current object (jeremyevans) * Make Postgres::JSONBOp#[] and #get_text return JSONBOp instances (jeremyevans) (#1005) * Remove the fdbsql, jdbc/fdbsql, and openbase adapters (jeremyevans) * Database#transaction now returns block return value if :rollback=>:always is used (jeremyevans) * Allow postgresql:// connection strings as aliases to postgres://, for compatibility with libpq (jeremyevans) (#1004) * Make Model#move_to in the list plugin handle out-of-range targets without raising an exception (jeremyevans) (#1003) * Make Database#add_named_conversion_proc on PostgreSQL handle conversion procs for enum types (celsworth) (#1002) === 4.22.0 (2015-05-01) * Deprecate the db2, dbi, fdbsql, firebird, jdbc/fdbsql, informix, and openbase adapters (jeremyevans) * Avoid hash allocations and rehashes (jeremyevans) * Don't silently ignore :jdbc_properties Database option in jdbc adapter (jeremyevans) * Make tree plugin set reciprocal association for children association correctly (lpil, jeremyevans) (#995) * Add Sequel::MassAssignmentRestriction exception, raised for mass assignment errors in strict mode (jeremyevans) (#994) * Handle ODBC::SQL_BIT type as boolean in the odbc adapter, fixing boolean handling on odbc/mssql (jrgns) (#993) * Make :auto_validations plugin check :default entry instead of :ruby_default entry for checking existence of default value (jeremyevans) (#990) * Adapters should now set :default schema option to nil when adapter can determine that the value is nil (jeremyevans) * Do not add a schema :max_length entry for a varchar(max) column on MSSQL (jeremyevans) * Allow :default value for PostgreSQL array columns to be a ruby array when using the pg_array extension (jeremyevans) (#989) * Add csv_serializer plugin for serializing model objects to and from csv (bjmllr, jeremyevans) (#988) * Make Dataset#to_hash and #to_hash_groups handle single array argument for model datasets (jeremyevans) * Handle Model#cancel_action in association before hooks (jeremyevans) * Use a condition variable instead of busy waiting in the threaded connection pools on ruby 1.9+ (jeremyevans) * Use Symbol#to_proc instead of explicit blocks (jeremyevans) === 4.21.0 (2015-04-01) * Support :tsquery and :tsvector options in Dataset#full_text_search on PostgreSQL, for using existing tsquery/tsvector expressions (jeremyevans) * Fix TinyTds::Error being raised when trying to cancel a query on a closed connection in the tinytds adapter (jeremyevans) * Add GenericExpression#!~ for inverting =~ on ruby 1.9 (similar to inverting a hash) (jeremyevans) (#979) * Add GenericExpression#=~ for equality, inclusion, and pattern matching (similar to using a hash) (jeremyevans) (#979) * Add Database#add_named_conversion_proc on PostgreSQL to make it easier to add conversion procs for types by name (jeremyevans) * Make Sequel.pg_jsonb return JSONBOp instances instead of JSONOp instances when passed other than Array or Hash (jeremyevans) (#977) * Demodulize default root name in json_serializer plugin (janko-m) (#968) * Make Database#transaction work in after_commit/after_rollback blocks (jeremyevans) === 4.20.0 (2015-03-03) * Restore the use of AUTOINCREMENT on SQLite (jeremyevans) (#965) * Duplicate the associations hash when duplicating a model object (jeremyevans) * Correctly apply association limit when eager loading with an eager block using default limit strategy on some databases (jeremyevans) * Fix eager loading when using the :window_function limit strategy with an eager block and cascaded associations (jeremyevans) * Add support for set_column_type :auto_increment=>true to add AUTO_INCREMENT to existing column on MySQL (jeremyevans) (#959) * Add support for overridding the :instance_specific association option (jeremyevans) * Recognize MSSQL bit type as boolean in the schema_dumper (jeremyevans) * Skip eager loading queries if there are no matching keys (jeremyevans) (#952) * Dataset#paged_each now returns an enumerator if not passed a block (jeremyevans) * Use to_json :root option with string value as the JSON object key in the json_serializer plugin (jeremyevans) * Allow create_enum in the pg_enum extension be reversible in migrations (celsworth) (#951) * Have swift adapter respect database and application timezone settings (asppsa, jeremyevans) (#946) * Don't have the static cache plugin attempt to validate objects (jeremyevans) * Make freeze not validate objects if their errors are already frozen (jeremyevans) * Only use prepared statements for associations if caching association metadata (jeremyevans) * Set parent association when loading descendants in the rcte_tree plugin (jeremyevans) * Add Database#transaction :before_retry option, specifying a proc to call before retrying (uhoh-itsmaciek) (#941) === 4.19.0 (2015-02-01) * Make jdbc/sqlanywhere correctly set :auto_increment entry in schema hashes (jeremyevans) * Add Model#cancel_action for canceling actions in before hooks, instead of having the hooks return false (jeremyevans) * Support not setting @@wait_timeout on MySQL via :timeout=>nil Database option (jeremyevans) * Add accessed_columns plugin, recording which columns have been accessed for a model instance (jeremyevans) * Use correct migration version when using IntegerMigrator with :allow_missing_migration_files (blerins) (#938) * Make Dataset#union, #intersect, and #except automatically handle datasets with raw SQL (jeremyevans) (#934) * Add column_conflicts plugin to automatically handle columns that conflict with method names (jeremyevans) (#929) * Add Model#get_column_value and #set_column_value to get/set column values (jeremyevans) (#929) === 4.18.0 (2015-01-02) * Make Dataset#empty? work when the dataset is ordered by a non-column expression (pete) (#923) * Fix passing a hash value to :eager association option (jeremyevans) * Treat all PG::ConnectionBad exceptions as disconnect errors in the postgres adapter (jeremyevans) * Add :auto_increment key to schema information for primary key columns (jeremyevans) (#919) * Fix handling of schema qualified tables in many_through_many associations (jeremyevans) === 4.17.0 (2014-12-01) * Fix handling of Sequel::SQL::Blob instances in bound variables in the postgres adapter (jeremyevans) (#917) * Add :preconnect Database option for immediately creating the maximum number of connections (jeremyevans) * Support DB.pool.max_size for the single connection pools (jeremyevans) * Work around regression in jdbc-sqlite3 3.8.7 where empty blobs are returned as nil (jeremyevans) * Work around regression in jdbc-sqlite3 3.8.7 when using JDBC getDate method for date parsing (jeremyevans) * Make Model#update_or_create return object if existing object exists but updates are not necessary (contentfree) (#916) * Add Dataset#server? for conditionally setting a default server to use if no default is present (jeremyevans) * Add Database#sharded? for determining if database uses sharding (jeremyevans) * Fix server used by Dataset#insert_select on PostgreSQL (jeremyevans) * Fix server used for deleting model instances when using sharding (jeremyevans) === 4.16.0 (2014-11-01) * Make Database#create_table? and #create_join_table? not use IF NOT EXISTS if indexes are being added (jeremyevans) (#904) * Dataset#distinct now accepts virtual row blocks (chanks) (#901) * Recognize disconnect errors in the postgres adapter when SSL is used (jeremyevans) (#900) * Stop converting '' default values to nil default values on MySQL (jeremyevans) * Add Model#qualified_pk_hash, for returning a hash with qualified pk keys (jeremyevans) * Make validates_unique use a qualified primary key if the dataset is joined (jeremyevans) (#895) * Make Sequel::Model.cache_associations = false skip the database's schema cache when loading the schema (jeremyevans) * Make Database#foreign_key_list work on Microsoft SQL Server 2005 (jeremyevans) * Make create_table with :foreign option reversible on PostgreSQL (jeremyevans) * Make drop_table with :foreign option on PostgreSQL drop a foreign table (johnnyt) (#892) === 4.15.0 (2014-10-01) * Make AssociationReflection#reciprocal not raise error if associated class contains association with invalid associated class (jeremyevans) * Make create_view(:view_name, dataset, :materialized=>true) reversible on PostgreSQL (jeremyevans) * Add support for creating foreign tables on PostgreSQL using :foreign and :options create_table options (jeremyevans) * Raise Error if a primary key is necessary to use an association, but the model doesn't have a primary key (jeremyevans) * Make tactical_eager_loading plugin work for limited associations (jeremyevans) * Add PlaceholderLiteralizer#with_dataset, for returning a new literalizer using a modified dataset (jeremyevans) * Support active_model 4.2.0beta1 in the active_model plugin (jeremyevans) * Make Dataset#insert in the informix adapter return last inserted id (jihwans) (#887) * Support :nolog option in the informix adapter to disable transactions (jihwans) (#887) * Remove optional argument for Postgres::{JSON,JSONB}Op#to_record and #to_recordset (jeremyevans) * Add support for FoundationDB SQL Layer, via fdbsql and jdbc/fdbsql adapters (ScottDugas, jeremyevans) (#884) * Work around bug in old versions of MySQL when schema dumping a table with multiple timestamp columns (jeremyevans) (#882) * Support more array types by default in the pg_array extension, such as xml[] and uuid[] (jeremyevans) * Add Sequel::Model.cache_associations accessor, which can be set to false to not cache association metadata (jeremyevans) * Add split_values plugin, for moving noncolumn entries from the values hash into a separate hash (jeremyevans) (#868) === 4.14.0 (2014-09-01) * Raise original exception if there is an exception raised when rolling back transaction/savepoint (jeremyevans) (#875) * Allow delayed evaluation blocks to take dataset as an argument (jeremyevans) * Allow more types as filter expressions, only specifically disallow Numeric/String expressions (jeremyevans) * Remove objects from cached association array at time of nested_attributes call instead of waiting until save (jeremyevans) * Handle composite primary keys when working around validation issues for one_to_(one|many) associations in nested_attributes plugin (jeremyevans) (#870) * Recognize additional disconnect error in jdbc/jtds adapter (jeremyevans) * Have association_join work with existing model selections (jeremyevans) * Fix regression in class_table_inheritance plugin when lazily loading column in middle table (jeremyevans) (#862) * Add cache_key_prefix method to caching plugin, which can be overridden for custom handling (pete) (#861) * Add :when option to PostgreSQL create_trigger method, for adding a filter to the trigger (aschrab) (#860) * Recognize an additional serialization failure on PostgreSQL (tmtm) (#857) === 4.13.0 (2014-08-01) * Use copy constructors instead of overriding Model#dup and #clone (ged, jeremyevans) (#852) * Fix handling of MySQL create_table foreign_key calls using :key option (mimperatore, jeremyevans) (#850) * Handle another disconnection error in the postgres adapter (lbosque) (#848) * Make list plugin update remaining positions after destroying an instance (ehq, jeremyevans) (#847) * Unalias aliased tables in Dataset#insert (jeremyevans) * Add insert_returning_select plugin, for setting up RETURNING for inserts for models selecting explicit columns (jeremyevans) * Make Model#save use insert_select if the dataset used for inserting already uses returning (jeremyevans) * Add Dataset#unqualified_column_for helper method, returning unqualified version of possibly qualified column (jeremyevans) * Calling Dataset#returning when the Database does not support or emulate RETURNING now raises an Error (jeremyevans) * Emulate RETURNING on Microsoft SQL Server using OUTPUT, as long as only simple column references are used (jeremyevans) * Switch class_table_inheritance plugin to use JOIN ON instead of JOIN USING (jeremyevans) * Qualify primary keys for models with joined datasets when looking up model instances by primary key (jeremyevans) * Fix qualification of columns when Dataset#graph automatically wraps the initially graphed dataset in a subselect (jeremyevans) * Make Dataset#joined_dataset? a public method (jeremyevans) * Allow external jdbc, odbc, and do subadapters to be loaded automatically (jeremyevans) * Recognize another disconnect error in the jdbc/mysql adapter (jeremyevans) * Set primary keys correctly for models even if datasets select specific columns (jeremyevans) * Add dataset_source_alias extension, for automatically aliasing datasets to their first source (jeremyevans) * Use qualified columns in the lazy_attributes plugin (jeremyevans) * Add column_select plugin, for using explicit column selections in model datasets (jeremyevans) * Use associated model's existing selection for join associations if it consists solely of explicitly quailfied columns (jeremyevans) * Add round_timestamps extension for automatically rounding timestamp values to database precision before literalizing (jeremyevans) * Make rake default task run plugin specs as well as core/model specs (jeremyevans) * Use all_tables and all_views for Database#tables and #views on Oracle (jeremyevans) * Use all_tab_cols instead of user_tab cols for defaults parsing in the oracle adapter (jeremyevans) * Fix recursive mutex locking issue on JRuby when using Sequel::Model(dataset) (jeremyevans) (#841) * Make composition and serialization plugins support validations on underlying columns (jeremyevans) * Fix regression in timestamps and table inheritance plugin where column values would not be saved if validation is skipped (jeremyevans) (#839) * Add pg_enum extension, for dealing with PostgreSQL enums (jeremyevans) * Add modification_detection plugin, for automatic detection of in-place column value modifications (jeremyevans) * Speed up using plain strings, numbers, true, false, and nil in json columns if underlying json library supports them (jeremyevans) (#834) === 4.12.0 (2014-07-01) * Support :readonly Database option in sqlite adapter (ippeiukai, jeremyevans) (#832) * Automatically setup max_length validations for string columns in the auto_validations plugin (jeremyevans) * Add :max_length entry to column schema hashes for string types (jeremyevans) * Add :before_thread_exit option to Database#listen_for_static_cache_updates in pg_static_cache_updater extension (jeremyevans) * Add Database#values on PostgreSQL to create a dataset that uses VALUES instead of SELECT (jeremyevans) * Add Model#set_nested_attributes to nested_attributes, allowing setting nested attributes options per-call (jeremyevans) * Use explicit columns when using automatically prepared SELECT statements in the prepared statement plugins (jeremyevans) * Make Dataset#insert_select on PostgreSQL respect existing RETURNING clause (jeremyevans) * Fix eager loading limited associations via a UNION when an association block is used (jeremyevans) * Associate reciprocal object before saving associated object when creating new objects in nested_attributes (chanks, jeremyevans) (#831) * Handle intervals containing more than 100 hours in the pg_interval extension's parser (will) (#827) * Remove methods/class deprecated in 4.11.0 (jeremyevans) * Allow Dataset#natural_join/cross_join and related methods to take a options hash passed to join_table (jeremyevans) * Add :reset_implicit_qualifier option to Dataset#join_table, to set false to not reset the implicit qualifier (jeremyevans) * Support :notice_receiver Database option when postgres adapter is used with pg driver (jeltz, jeremyevans) (#825) === 4.11.0 (2014-06-03) * Add :model_map option to class_table_inheritance plugin so class names don't need to be stored in the database (jeremyevans) * Set version when using for MySQL/SQLite emulation in the mock adapter (jeremyevans) * Add support for CUBRID/SQLAnywhere emulation to the mock adapter (jeremyevans) * Add support for the jsonb operators added in PostgreSQL 9.4 to the pg_json_ops extension (jeremyevans) * Add support for new json/jsonb functions added in PostgreSQL 9.4 to the pg_json_ops extension (jeremyevans) * Add support for the PostgreSQL 9.4+ jsonb type to the pg_json_ops extension (jeremyevans) * Add support for derived column lists to Sequel.as and SQL::AliasMethods#as (jeremyevans) * Support connecting to a DB2 catalog name in the ibmdb adapter (calh) (#821) * Fix warnings in some cases in the ibmdb adapter (calh) (#820) * Add SQL::Function#with_ordinality for creating set returning functions WITH ORDINALITY (jeremyevans) * Add SQL::Function#filter for creating filtered aggregate function calls (jeremyevans) * Add SQL::Function#within_group for creating ordered-set and hypothetical-set aggregate functions (jeremyevans) * Add SQL::Function#lateral, for creating set returning functions that will be preceded by LATERAL (jeremyevans) * Add SQL::Function#quoted and #unquoted methods, to enable/disable quoting of function names (jeremyevans) * Deprecate Dataset#{window,emulated,}_function_sql_append (jeremyevans) * Deprecate SQL::WindowFunction and SQL::EmulatedFunction classes, switch to using options on SQL::Function (jeremyevans) * Only modify changed_columns if deserialized value changes in the serialization plugin (jeremyevans) (#818) * Support PostgreSQL 9.4+ jsonb type in the pg_json extension (jeremyevans) * Allow Postgres::ArrayOp#unnest to accept arguments in the pg_array_ops extension (jeremyevans) * Add Postgres::ArrayOp#cardinality to the pg_array_ops extension (jeremyevans) * Add :check option to Database#create_view for WITH [LOCAL] CHECK OPTION support (jeremyevans) * Add :concurrently option to Database#refresh_view on PostgreSQL to support concurrent refresh of materialized views (jeremyevans) * Call the :after_connect Database option proc with both the connection and server/shard if it accepts 2 arguments (pedro, jeremyevans) (#813) * Make multiple plugins set values before validation instead of before create, works better with auto_validations (jeremyevans) * Support a default Dataset#import slice size, set to 500 on SQLite (jeremyevans) (#810) * Make :read_only transaction option be per-savepoint on PostgreSQL (jeremyevans) (#807) * Add :rank option to Dataset#full_text_search on PostgreSQL, to order by the ranking (jeremyevans) (#809) * Remove methods deprecated in 4.10.0 (jeremyevans) === 4.10.0 (2014-05-01) * Make Model.include API same as Module.include (ged) (#803) * Dataset::PlaceholderLiteralizer now handles DelayedEvaluations correctly (jeremyevans) * Refactor type conversion in the jdbc adapter, for up to a 20% speedup (jeremyevans) * Add Dataset#with_fetch_size to jdbc adapter, for setting fetch size for JDBC ResultSets (jeremyevans) * Default to a fetch_size of 100 in the jdbc/oracle adapter, similar to the oci8-based oracle adapter (jeremyevans) * Add Database#fetch_size accessor and :fetch_size option to jdbc adapter, for setting JDBC Statement fetch size (jeremyevans) * Automatically determine array type in pg_array_associations plugin, explicitly cast arrays in more places (jeremyevans, maccman) (#800) * Speed up Dataset#literal for symbols 60% by caching results, speeding up dataset literalization up to 40% or more (jeremyevans) * Speed up Sequel.split_symbol 10-20x by caching results, speeding up dataset literalization up to 80% or more (jeremyevans) * Speed up dataset literalization for simple datasets by up to 100% (jeremyevans) * Support :fractional_seconds Database option on MySQL 5.6.5+ to support fractional seconds by default (jeremyevans) (#797) * Work around MySQL 5.6+ bug when combining DROP FOREIGN KEY and DROP INDEX in same ALTER TABLE statement (jeremyevans) * Make auto_validations plugin handle models that select from subqueries (jeremyevans) * Recognize additional disconnect errors in the postgres adapter (jeremyevans) * Make import/multi_insert insert multiple rows in a single query using a UNION on Oracle, DB2, and Firebird (jeremyevans) * Speed up association_pks many_to_many setter method by using Dataset#import (jeremyevans) * Add Model.prepared_finder, similar to .finder but using a prepared statement (jeremyevans) * Model.def_{add_method,association_dataset_methods,remove_methods} are now deprecated (jeremyevans) * Model.eager_loading_dataset and Model.apply_association_dataset_opts are now deprecated (jeremyevans) * Make prepared_statement_associations plugin handle one_through_one and one_through_many associations (jeremyevans) * Use placeholder literalizer for regular association loading for up to 85% speedup (jeremyevans) * Use placeholder literalizer for eager association loading for up to 20% speedup (jeremyevans) * Make Model#marshallable! work correctly when using the tactical_eager_loading plugin (jeremyevans) * Respect :foreign_key_constraint_name option when adding columns to existing table on MySQL (noah256) (#795) * AssociationReflection#association_dataset now handles joining tables if necessary (jeremyevans) * Support drop_view :if_exists option on SQLite, MySQL, H2, and HSQLDB (jeremyevans) (#793) * Support drop_table :if_exists option on HSQLDB (jeremyevans) * Add Database#transaction :auto_savepoint option, for automatically using a savepoint in nested transactions (jeremyevans) * Add :server_version Database option on Microsoft SQL Server, instead of querying the database for it (jeremyevans) * Support :correlated_subquery as an eager_graph and filter by associations limit strategy for one_to_* associations (jeremyevans) * Support named paramters in call_mssql_sproc on Microsoft SQL Server (y.zemlyanukhin, jeremyevans) (#792) * Handle placeholder literalizer arguments when emulating offsets (jeremyevans) * Don't attempt to emulate offsets if the dataset uses literal SQL (jeremyevans) * Use a UNION-based strategy by default to eagerly load limited associations (jeremyevans) * Support offsets without limits on MySQL, SQLite, H2, SQLAnywhere and CUBRID (jeremyevans) * Remove the install/uninstall rake tasks (jeremyevans) * Use INSERT VALUES with multiple rows for Dataset#import and #multi_insert on more databases (jeremyevans) * Support common table expressions (WITH clause) on SQLite >=3.8.3 (jeremyevans) === 4.9.0 (2014-04-01) * Recognize CHECK constraint violations on newer versions of SQLite (jeremyevans) * Do not attempt to eager load when calling Dataset#columns in the eager_each plugin (jeremyevans) * Support :driver option for jdbc adapter, for specifying driver class for cases where getConnection doesn't work (jeremyevans) (#785) * Massive speedup for PostgreSQL array parser (jeremyevans) (#788) * Add current_datetime_timestamp extension, for current Time/DateTime instances that are literalized as CURRENT_TIMESTAMP (jeremyevans) * Recognize additional unique constraint violations on SQLite (jeremyevans) (#782) * Don't remove column value when validating nested attributes for one_to_* association where association foreign key is the model's primary key (jeremyevans) * Add Dataset#disable_insert_returning on PostgreSQL for skipping implicit use of RETURNING (jeremyevans) * Automatically optimize Model.[], .with_pk, and .with_pk! for models with composite keys (jeremyevans) * Automatically optimize Model.[] when called with a hash (jeremyevans) * Automatically optimize Model.find, .first, and .first! when called with a single argument (jeremyevans) * Add Model.finder for creating optimized finder methods using Dataset::PlaceholderLiteralizer (jeremyevans) * Add Dataset::PlaceholderLiteralizer optimization framework (jeremyevans) * Add Dataset#with_sql_{each,all,first,single_value,insert,update} optimized methods (jeremyevans) * Make pg_array extension use correct type when typecasting column values for smallint, oid, real, character, and varchar arrays (jeremyevans) * Make Database#column_schema_to_ruby_default a public method in the schema_dumper extension (jeremyevans) (#776) * Fix multiple corner cases in the eager_graph support (jeremyevans) (#771) * Use streaming to implement paging for Dataset#paged_each in the mysql2 adapter (jeremyevans) * Use a cursor to implement paging for Dataset#paged_each in the postgres adapter (jeremyevans) * Add Database#create_join_table? and #create_join_table! for consistency (jeremyevans) * Add Dataset#where_current_of to the postgres adapter for supporting updating rows based on a cursor's current position (jeremyevans) * Add Dataset#use_cursor :hold option in the postgres adapter for supporting cursor use outside of a transaction (jeremyevans) * Add Dataset#paged_each :strategy=>:filter option for increased performance (jeremyevans) === 4.8.0 (2014-03-01) * Add SQL::AliasedExpression#alias alias for #aliaz (jeremyevans) * Handle SQL::Identifier, SQL::QualifiedIdentifier, and SQL::AliasedExpression objects as first argument to Dataset#graph (jeremyevans) * Respect qualification and aliases in symbols passed as first argument to Dataset#graph (dividedmind) (#769) * Recognize new constraint violation error messages in SQLite 3.8.2+ (itswindtw) (#766) * Use limit strategy to correctly handle limited associations in the dataset_associations plugin (jeremyevans) * Handle issues in dataset_associations plugin when dataset uses unqualified identifiers for associations requiring joins (jeremyevans) * Handle fractional seconds in input timestamps in the odbc/mssql adapter (Ross Attrill, jeremyevans) * Return fractional seconds in timestamps in the odbc adapter (jeremyevans) * Support :plain and :phrase options to Dataset#full_text_search on PostgreSQL (jeremyevans) * Use limit strategy to correctly handle filtering by limited associations (jeremyevans) * Simplify queries used for filtering by associations with conditions (jeremyevans) * Use an eager limit strategy by default for *_one associations with orders (jeremyevans) * Support :limit_strategy eager_graph option, for specifying strategy used for limited associations in that eager graph (jeremyevans) * Add eager_graph_with_options to model datasets, for specifying options specific to the eager_graph call (jeremyevans) * Handle offsets on *_many associations when eager graphing when there are no associated results (jeremyevans) * Make Database#register_array_type work without existing scalar conversion proc in the pg_array extension (jeremyevans) * Handle presence validations on foreign keys in associated objects when creating new associated objects in the nested_attributes plugin (jeremyevans) * Respect offsets when eager graphing *_one associations (jeremyevans) * Add association_join to model datasets, for setting up joins based on associations (jeremyevans) * Add one_through_many association to many_through_many plugin, for only returning a single record (jeremyevans) * Add :graph_order association option, useful when :order needs to contain qualified identifiers (jeremyevans) * Add one_through_one association, similar to many_to_many but only returning a single record (jeremyevans) === 4.7.0 (2014-02-01) * Don't swallow underlying exception if there is an exception closing the cursor on PostgreSQL (jeremyevans) (#761) * Recognize primary key unique constraint violations on MSSQL and SQLAnywhere (jeremyevans) * Recognize composite unique constraint violations on SQLite (timcraft) (#758) * Make #* method without arguments on SQL::Function return a Function with * prepended to the arguments (jeremyevans) * Add #function to SQL::Identifier and SQL::QualifiedIdentifier, allowing for easy use of schema qualified functions or functions names that need quoting (jeremyevans) * Add SQL::Function#distinct for easier creation of aggregate functions using DISTINCT (jeremyevans) * Add SQL::Function#over for easier creation of window functions (jeremyevans) * Don't clear validation instance_hooks until after a successful save (jeremyevans) * Support :raise_on_save_failure option for one_to_many, pg_array_to_many, and many_to_pg_array associations (jeremyevans) * Make SQLTime#to_s return a string in HH:MM:SS format, since it shouldn't include date information (jeremyevans) * Support the Database#tables :schema option in the jdbc adapter (robbiegill, jeremyevans) (#755) * Automatically rollback transactions in killed threads in ruby 2.0+ (chanks) (#752) * Add update_or_create plugin, for updating an object if it exists, or creating such an object if it does not (jeremyevans) * Make auto_validations uniqueness validations work correctly for STI subclasses (jeremyevans) * Support :dataset option to validates_unique vaildation (jeremyevans) === 4.6.0 (2014-01-02) * Add Database#call_mssql_sproc on MSSQL for calling stored procedures and handling output parameters (jrgns, jeremyevans) (#748) * Handle RuntimeErrors raised by oci8 in the oracle adapter (jeremyevans) * Support OFFSET/FETCH on Microsoft SQL Server 2012 (jeremyevans) * Support :server option for Database#{commit,rollback}_prepared_transaction on PostgreSQL, MySQL, and H2 (jeremyevans) (#743) * Do not attempt to eager load and raise an exception when doing Model.eager(...).naked.all (jeremyevans) * Recognize a couple additional disconnect errors in the jdbc/postgresql adapter (jeremyevans) (#742) === 4.5.0 (2013-12-02) * Support :on_commit=>(:drop|:delete_rows|:preserve_rows) options when creating temp tables on PostgreSQL (rosenfeld) (#737) * Make Dataset#insert work on PostgreSQL if the table name is a SQL::PlaceholderLiteralString (jeremyevans) (#736) * Copy unique constraints when emulating alter_table operations on SQLite (jeremyevans) (#735) * Don't return clob column values as SQL::Blob instances in the db2 and ibmdb adapters unless use_clob_as_blob is true (jeremyevans) * Make use_clob_as_blob false by default on DB2 (jeremyevans) * Fix usage of Sequel::SQL::Blob objects as prepared statement arguments in jdbc/db2 adapter when use_clob_as_blob is false (jeremyevans) * Add mssql_optimistic_locking plugin, using a timestamp/rowversion column to protect against concurrent updates (pinx, jeremyevans) (#731) * Make Model.primary_key array immutable for composite keys (chanks) (#730) === 4.4.0 (2013-11-01) * Make Database#tables not show tables in the recycle bin on Oracle (jeremyevans) (#728) * Don't automatically order on all columns when emulating offsets for unordered datasets on DB2 (jeremyevans) * Improve PostgreSQL type support in the jdbc/postgresql adapter (jeremyevans) * Make offset emulation on Oracle work when using columns that can't be ordered (jeremyevans, sdeming) (#724, #725) * Make filter by associations support handle associations with :conditions or block (jeremyevans) * Make association cloning handle :block correctly for clones of clones (jeremyevans) * Make association cloning handle :eager_block option correctly (jeremyevans) * Make add_primary_key work on h2 (jeremyevans) * Add support for foreign key parsing on Oracle (jeremyevans) * Add support for foreign key parsing to the jdbc adapter (jeremyevans) * Make add_foreign_key work on HSQLDB (jeremyevans) * Add table_select plugin for selecting table.* instead of * for model datasets (jeremyevans) * Issue constraint_validation table deletes before inserts, so modifying constraint via drop/add in same alter_table block works (jeremyevans) * Support add_*/remove_*/remove_all_* pg_array_to_many association methods on unsaved model objects (jeremyevans) * Add Sybase SQLAnywhere support via new sqlanywhere and jdbc/sqlanywhere adapters (gditrick, jeremyevans) * Add Dataset#offset for setting the offset separately from the limit (Paul Henry, jeremyevans) (#717) === 4.3.0 (2013-10-02) * Fix literalization of empty blobs on MySQL (jeremyevans) (#715) * Ensure Dataset#page_count in pagination extension is at least one (jeremyevans) (#714) * Recognize another disconnect error in the jdbc/as400 adapter (jeremyevans) * Make Dataset#qualify and Sequel.delay work together (jeremyevans) * Recognize citext type as string on PostgreSQL (isc) (#710) * Support composite keys in the rcte_tree plugin (jeremyevans) * Support composite keys in the tree plugin (jeremyevans) * Make Migrator.migrator_class public (robertjpayne, jeremyevans) (#708) * Make PostgreSQL empty array literalization work correctly on PostgreSQL <8.4 (jeremyevans) * Add Sequel extensions guide (jeremyevans) * Add model plugins guide (jeremyevans) * Add error_sql Database extension, allowing DatabaseError#sql to return SQL query that caused underlying exception (jeremyevans) * Make Dataset#each_page in pagination extension return enumerator if no block is given (justinj) (#702) === 4.2.0 (2013-09-01) * Support custom :flags option in mysql2 adapter (jeremyevans) (#700) * Add implementations of Dataset#freeze and Dataset#dup (jeremyevans) * Add implementations of Model#dup and Model#clone (jeremyevans) * Don't have partial_indexes returned by Database#indexes on MSSQL 2008+ (jeremyevans) * Support partial indexes on SQLite 3.8.0+ (jeremyevans) * Add Database#supports_partial_indexes? to check for partial index support (mluu, jeremyevans) (#698) * The static_cache plugin now disallows saving/destroying if the :frozen=>false option is not used (jeremyevans) * Support :frozen=>false option in static_cache plugin, for having new instances returned instead of frozen cached instances (jeremyevans) * Add pg_static_cache_updater Database extension for listening for changes to tables and updating static_cache caches automatically (jeremyevans) * Add mssql_emulate_lateral_with_apply extension for emulating LATERAL queries using CROSS/OUTER APPLY (jeremyevans) * Support LATERAL queries via Dataset#lateral (jeremyevans) * Add pg_loose_count Database extension, for fast approximate counts of PostgreSQL tables (jeremyevans) * Add from_block Database extension, for having Database#from block affect FROM instead of WHERE (jeremyevans) * Support :cursor_name option in postgres adapter Dataset#use_cursor (heeringa, jeremyevans) (#696) * Fix placeholder literal strings when used with an empty placeholder hash (trydionel, jeremyevans) (#695) === 4.1.1 (2013-08-01) * Fix select_map, select_order_map, and single_value methods on eager_graphed datasets (jeremyevans) === 4.1.0 (2013-08-01) * Support :inherits option in Database#create_table on PostgreSQL, for table inheritance (jeremyevans) * Handle dropping indexes for schema qualified tables on PostgreSQL (jeremyevans) * Add Database#error_info on PostgreSQL 9.3+ if pg-0.16.0+ is used, to get a hash of metadata for a given database exception (jeremyevans) * Allow prepared_statements plugin to work with instance_filters and update_primary_key plugins (jeremyevans) * Support deferrable exclusion constraints on PostgreSQL using the :deferrable option (mfoody) (#687) * Make Database#run and #<< accept SQL::PlaceholderLiteralString values (jeremyevans) * Deprecate :driver option in odbc adapter since it appears to be broken (jeremyevans) * Support :drvconnect option in odbc adapter for supplying the ODBC connection string directly (jeremyevans) * Support mysql2 0.3.12+ result streaming via Dataset#stream (jeremyevans) * Convert Java::JavaUtil::HashMap to ruby Hash in jdbc adapter, for better handling of PostgreSQL hstore type (jeremyevans) (#686) * Raise NoMatchingRow if calling add_association with a primary key value that doesn't match an existing row (jeremyevans) * Allow PostgreSQL add_constraint to support :not_valid option (jeremyevans) * Allow CHECK constraints to have options by using an options hash as the constraint name (jeremyevans) * Correctly raise error when using an invalid virtual row block function call (jeremyevans) * Support REPLACE on SQLite via Dataset#replace and #multi_replace (etehtsea) (#681) === 4.0.0 (2013-07-01) * Correctly parse composite primary keys on SQLite 3.7.16+ (jeremyevans) * Recognize another disconnect error in the jdbc/oracle adapter (jeremyevans) * Add pg_json_ops extension for calling JSON functions and operators in PostgreSQL 9.3+ (jeremyevans) * Handle non-JSON plain strings, integers, and floats in PostgreSQL JSON columns in pg_json extension (jeremyevans) * Dataset#from now accepts virtual row blocks (jeremyevans) * Add Database#refresh_view on PostgreSQL to support refreshing materialized views (jeremyevans) * Support the Database#drop_view :if_exists option on PostgreSQL (jeremyevans) * Support the Database#{create,drop}_view :materialized option for creating materialized views in PostgreSQL 9.3+ (jeremyevans) * Support the Database#create_view :recursive option for creating recursive views in PostgreSQL 9.3+ (jeremyevans) * Support the Database#create_view :columns option for using explicit columns (jeremyevans) * Support the Database#create_schema :owner and :if_not_exists options on PostgreSQL (jeremyevans) * Support :index_type=>:gist option to create GIST full text indexes on PostgreSQL (jeremyevans) * Add Postgres::ArrayOp#replace for the array_replace function in PostgreSQL 9.3+ (jeremyevans) * Add Postgres::ArrayOp#remove for the array_remove function in PostgreSQL 9.3+ (jeremyevans) * Add Postgres::ArrayOp#hstore for creating hstores from arrays (jeremyevans) * Make Postgres::ArrayOp#[] return ArrayOp if given a range (jeremyevans) * Ensure that CHECK constraints are surrounded with parentheses (jeremyevans) * Ensure Dataset#unbind returned variable hash uses symbol keys (jeremyevans) * Add pg_array_associations plugin, for associations based on PostgreSQL arrays containing foreign keys (jeremyevans) * Add Sequel.deep_qualify, for easily doing a deep qualification (jeremyevans) * Enable use of window functions for limited eager loading by default (jeremyevans) * Handle offsets correctly when eager loading one_to_one associations (jeremyevans) * Raise exception for infinite and NaN floats on MySQL (jeremyevans) (#677) * Make dataset string literalization that requires database connection use dataset's chosen server (jeremyevans) * Make sure an offset without a limit is handled correctly when eager loading (jeremyevans) * Allow providing ranges as subscripts for array[start:end] (jeremyevans) * Prepare one_to_one associations in the prepared_statements_associations plugin (jeremyevans) * Use prepared statements when the association has :conditions in the prepared_statements_associations plugin (jeremyevans) * Fix prepared statement usage in some additional cases in the prepared_statements_associations plugin (jeremyevans) * Hex escape blob input on MySQL (jeremyevans) * Handle more disconnect errors when using the postgres adapter with the postgres-pr driver (jeremyevans) * Model#setter_methods private method now accepts 1 argument instead of 2 (jeremyevans) * Model#set_restricted and #update_restricted private methods now accept 2 arguments instead of 3 (jeremyevans) * ungraphed on an eager_graph dataset now resets the original row_proc (jeremyevans) * eager_graph now returns a naked dataset (jeremyevans) * All behavior deprecated in Sequel 3.48.0 has been removed (jeremyevans) * Make adapter/integration spec environment variables more consistent (jeremyevans) * Sequel no longer provides default databases for adapter/integration specs (jeremyevans) * Model#save no longer calls #_refresh internally (jeremyevans) * Model#set_all and #update_all can now update the primary key (jeremyevans) * Integrate many_to_one_pk_lookup and association_autoreloading plugins into main associations plugin (jeremyevans) * Make defaults_setter plugin operate in a lazy manner (jeremyevans) * Plugins now extend the model class with ClassMethods before including InstanceMethods (jeremyevans) * Remove Model::EMPTY_INSTANCE_VARIABLES (jeremyevans) * Model.raise_on_typecast_failure now defaults to false (jeremyevans) * Model#_save private method now only takes a single argument (jeremyevans) * Remove Dataset#columns_without_introspection from columns_introspection extension (jeremyevans) * Make boolean prepared statement arguments work on sqlite adapter when integer_booleans is true (jeremyevans) * Make Database#tables and #views reflect search_path on PostgreSQL (jeremyevans) * SQLite now defaults to true for integer_booleans and false for use_timestamp_timezones (jeremyevans) * Make the default value for most option hashes a shared frozen hash (jeremyevans) * Remove Sequel::NotImplemented exception (jeremyevans) * Automatically alias single expressions in Dataset#get, #select_map, and #select_order_map, to work around possible DoS issues (jeremyevans) * Use a connection queue instead of stack by default for threaded connection pools (jeremyevans) * Remove SQL::SQLArray alias for SQL::ValueList (jeremyevans) * Remove SQL::NoBooleanInputMethods empty module (jeremyevans) === 3.48.0 (2013-06-01) * Make named_timezones extension usable by databases allowing timezone strings to be given to Database#timezone= (jeremyevans) * Make Dataset#or just clone if given an empty argument (jeremyevans) * Deprecated using a mismatched number of placeholders and arguments in a placeholder literal string (jeremyevans) * Add Dataset#qualify_to and #qualify_to_first_source to sequel_3_dataset_methods extension (jeremyevans) * Add scissors plugin for Model.update, .delete, and .destroy (jeremyevans) * Validate against explicit nil values in NOT NULL columns with default values in the auto_validations plugin (jeremyevans) * Support :not_null=>:presence option for auto_validations plugin, for using presence validation for not null columns (jeremyevans) * Rename auto_validate_presence_columns to auto_validate_not_null_columns (jeremyevans) * Make pg_hstore_ops extension integrate with pg_array, pg_hstore, and pg_array_ops extensions (jeremyevans) * Add Sequel.json_parser_error_class and Sequel.object_to_json to allow the use of alternative JSON implementations (jeremyevans) (#662) * Deprecate JSON.create_id usage in the json_serializer plugin (jeremyevans) * Emulate offsets on Microsoft Access using reverse orders and total counts (jeremyevans) (#661) * Make ado adapter handle disconnecting an already disconnected connection (jeremyevans) * Deprecate parsing columns for the same table name in multiple schemas on jdbc (jeremyevans) * Allow association_proxies plugin to accept a block to give user control over which methods are proxied to the dataset (jeremyevans) (#660) * Deprecate calling Dataset#add_graph_aliases before #graph or #set_graph_aliases (jeremyevans) * Deprecate Model.add_graph_aliases, .insert_multiple, .query, .set_overrides, .set_defaults, .to_csv, and .paginate (jeremyevans) * Add guide for ordering code with Sequel (jeremyevans) * Deprecate Database#transaction :disconnect=>:retry option (jeremyevans) * Deprecate Model.set, .update, .delete, and .destroy (jeremyevans) * Deprecate Dataset#set (jeremyevans) * Add specs for bin/sequel (jeremyevans) * Make constraint_validations plugin reflect validations by column (jeremyevans) * Allow for per-model/per-validation type customization of validation options in constraint_validations plugin (jeremyevans) * Make Database#constraint_validations in the constraint_validations plugin have raw row values (jeremyevans) * Fix statement freeing in the ibmdb adapter (jeremyevans) * Make single and class table inheritance plugins handle usage of set_dataset in a subclass (jeremyevans) * Allow validates_schema_types in validation_helpers plugin accept an options hash (jeremyevans) * Deprecate Model.set_primary_key taking multiple arguments (jeremyevans) * Make auto_validations plugin work with databases that don't support index parsing (jeremyevans) * Model classes will no longer call Database#schema if it isn't supported (jeremyevans) * Speed up Model.with_pk and with_pk! class methods (jeremyevans) * Speed up Dataset#clone when called without an argument (jeremyevans) * Deprecate Postgres::PGRangeOp#{starts_before,ends_after} (jeremyevans) * Deprecate global use of null_dataset, pagination, pretty_table, query, select_remove, schema_caching, schema_dumper, and to_dot extensions (jeremyevans) * Deprecate Dataset.introspect_all_columns in the columns_introspection extension (jeremyevans) * Add empty_array_ignore_nulls extension for ignoring null handling for IN/NOT with an empty array (jeremyevans) * Deprecate Sequel.empty_array_handle_nulls accessor (jeremyevans) * Deprecate Sequel.{k,ts,tsk}_require and Sequel.check_requiring_thread (jeremyevans) * Discontinue use of manual thread-safe requiring (jeremyevans) * Deprecate using an unsupported client_min_messages setting on PostgreSQL (jeremyevans) * Deprecate passing non-hash 4th argument to Dataset#join_table (jeremyevans) * Deprecate passing non-hash 2nd argument to Dataset#union/intersect/except (jeremyevans) * Deprecate one_to_many with :one_to_one option raising an error (jeremyevans) * Automatically typecast hash and array to string for string columns in the looser_typecasting extension (jeremyevans) * Deprecate automatic typecasting of hash and array to string for string columns (jeremyevans) * Deprecate :eager_loader and :eager_grapher association options getting passed 3 separate arguments (jeremyevans) * Deprecate validates_not_string (jeremyevans) * Deprecate casting via __type suffix for prepared type placeholders in the postgres adapter (jeremyevans) * Deprecate json_serializer's Model.json_create (jeremyevans) * Deprecate json_serializer from_json and xml_serializer from_xml :all_columns and :all_associations options (jeremyevans) * Deprecate passing an unsupported lock mode to Dataset#lock on PostgreSQL (jeremyevans) * Deprecate Model::InstanceMethods.class_attr_{overridable,reader} (jeremyevans) * Deprecate all methods in Dataset::PUBLIC_APPEND_METHODS except for literal, quote_identifier, quote_schema_table (jeremyevans) * Deprecate all methods in Dataset::PRIVATE_APPEND_METHODS (jeremyevans) * Deprecate Dataset.def_append_methods (jeremyevans) * Deprecate Dataset#table_ref_append (jeremyevans) * Deprecate SQL::Expression#to_s taking an argument and returning a literal SQL string (jeremyevans) * Deprecate creating Model class methods automatically from plugin public dataset methods (jeremyevans) * Add Sequel.cache_anonymous_models accessor (jeremyevans) * Deprecate Sequel::Model.cache_anonymous_models accessor (jeremyevans) * Deprecate identity_map plugin (jeremyevans) * Deprecate Model#set_values (jeremyevans) * Deprecate pg_auto_parameterize and pg_statement_cache extensions (jeremyevans) * Deprecate Model#pk_or_nil (jeremyevans) * Deprecate Model.print and Model.each_page (jeremyevans) * Deprecate Dataset checking that the Database implements the identifier mangling methods (jeremyevans) * Deprecate Database#reset_schema_utility_dataset private method (jeremyevans) * Speed up Database#fetch, #from, #select, and #get by using a cached dataset (jeremyevans) * Make sure adapters with subadapters have fully initialized database instances before calling Database.after_initialize (jeremyevans) * Set identifier mangling methods on Database initialization (jeremyevans) * Switch internal use of class variables to instance variables (jeremyevans) * Deprecate passing an options hash to Database#dataset or Dataset.new (jeremyevans) * Speed up Dataset#clone (jeremyevans) * Add sequel_3_dataset_methods extension for Dataset#[]=, #insert_multiple, #set, #to_csv, #db=, and #opts= (jeremyevans) * Deprecate Dataset#[]=, #insert_multiple, #to_csv, #db=, and #opts= (jeremyevans) * Add blacklist_security plugin for Model.restricted_columns, Model.set_restricted_columns, Model#set_except, and Model#update_except (jeremyevans) * Deprecate Model.restricted_columns, Model.set_restricted_columns, Model#set_except, and Model#update_except (jeremyevans) * Deprecate Database#default_schema (jeremyevans) * Deprecate Sequel::NotImplemented and defining methods that raise it (jeremyevans) * Add Database#supports_{index_parsing,foreign_key_parsing,table_listing,view_listing}? (jeremyevans) * Deprecate Sequel.virtual_row_instance_eval accessor (jeremyevans) * Deprecate sequel_core.rb and sequel_model.rb (jeremyevans) * Add graph_each extension for Dataset#graph_each (jeremyevans) * Deprecate Dataset#graph_each (jeremyevans) * Add set_overrides extension for Dataset#set_overrides and #set_defaults (jeremyevans) * Deprecate Dataset#set_overrides and #set_defaults (jeremyevans) * Deprecate Database#query in the informix adapter (jeremyevans) * Deprecate Database#do as an alias to execute/execute_dui in some adapters (jeremyevans) * Deprecate modifying initial Dataset hash if the hash wasn't provided as an argument (jeremyevans) * Make active_model plugin use an errors class with autovivification (jeremyevans) * Deprecate Model::Errors#[] autovivification (returning empty array when missing) (jeremyevans) * Add Model#errors_class private method for choosing the errors class on a per-model basis (jeremyevans) * Add after_initialize plugin for the after_initialize hook (jeremyevans) * Deprecate Model after_initialize hook (jeremyevans) * Deprecate passing two arguments to Model.new (jeremyevans) * Deprecate choosing reciprocal associations with conditions, blocks, or differing primary keys (jeremyevans) * Deprecate choosing first from ambiguous reciprocal associations (jeremyevans) * Deprecate validates_type allowing nil values by default (jeremyevans) * Deprecate the correlated_subquery eager limit strategy (jeremyevans) * Add hash_aliases extension for making Dataset#select and #from treat hashes as alias specifiers (jeremyevans) * Deprecate having Dataset#select and #from treat hashes as alias specifiers (jeremyevans) * Do not automatically convert virtual row block return values to arrays by some Dataset methods (jeremyevans) * Add filter_having extension for making Dataset#{and,filter,exclude,or} affect the HAVING clause if present (jeremyevans) * Deprecate Dataset#select_more meaning Dataset#select when called without an existing selection (jeremyevans) * Deprecate Dataset#and, #or, and #invert raising exceptions for no existing filter (jeremyevans) * Deprecate Dataset#{and,filter,exclude,or} affecting the HAVING clause (jeremyevans) * Deprecate passing explicit columns to update as separate arguments to Model#save (jeremyevans) * Allow specifying explicit columns to update in Model#save via the :columns option (jeremyevans) * Add ability set the default for join_table's :qualify option via Dataset#default_join_table_qualification (jeremyevans) * Deprecated :root=>true meaning :root=>:both in the json_serializer (jeremyevans) * Deprecate core extension usage if the core_extensions have not been explicitly loaded (jeremyevans) * Deprecate Symbol#{[],<,<=,>,>=} methods when using the core_extensions (jeremyevans) * Add ruby18_symbol_extensions extension for the Symbol#{[],<,<=,>,>=} methods (jeremyevans) === 3.47.0 (2013-05-01) * Don't fail for missing conversion proc in pg_typecast_on_load plugin (jeremyevans) * Rename PGRangeOp #starts_before and #ends_after to #ends_before and #starts_after (soupmatt) (#655) * Add Database#supports_schema_parsing? for checking for schema parsing support (jeremyevans) * Handle hstore[] types on PostgreSQL if using pg_array and pg_hstore extensions (jeremyevans) * Don't reset conversion procs when loading pg_* extensions (jeremyevans) * Handle domain types when parsing the schema on PostgreSQL (jeremyevans) * Handle domain types in composite types in the pg_row extension (jeremyevans) * Add Database.extension, for loading an extension into all future databases (jeremyevans) * Support a :search_path Database option for setting PostgreSQL search_path (jeremyevans) * Support a :convert_infinite_timestamps Database option in the postgres adapter (jeremyevans) * Support a :use_iso_date_format Database option in the postgres adapter, for per-Database specific behavior (jeremyevans) * Add Model.default_set_fields_options, for having a model-wide default setting (jeremyevans) * Make Model.map, .to_hash, and .to_hash_groups work without a query when using the static_cache plugin (jeremyevans) * Support :hash_dup and Proc Model inherited instance variable types (jeremyevans) * Handle aliased tables in the pg_row plugin (jeremyevans) * Add input_transformer plugin, for automatically transform input to model column setters (jeremyevans) * Add auto_validations plugin, for automatically adding not null, type, and unique validations (jeremyevans) * Add validates_not_null to validation_helpers (jeremyevans) * Add :setter, :adder, :remover, and :clearer association options for overriding the default modification behavior (jeremyevans) * Add Database#register_array_type to the pg_array extension, for registering database-specific array types (jeremyevans) * Speed up fetching model instances when using update_primary_key plugin (jeremyevans) * In the update_primary_key plugin, if the primary key column changes, clear related associations (jeremyevans) * Add :allow_missing_migration_files option to migrators, for not raising if migration files are missing (bporterfield) (#652) * Fix race condition related to prepared_sql for newly prepared statements (jeremyevans) (#651) * Support :keep_reference=>false Database option for not adding reference to Sequel::DATABASES (jeremyevans) * Make Postgres::HStoreOp#- explicitly cast a string argument to text, to avoid PostgreSQL assuming it is an hstore (jeremyevans) * Add validates_schema_types validation for validating column values are instances of an appropriate class (jeremyevans) * Allow validates_type validation to accept an array of allowable classes (jeremyevans) * Add Database#schema_type_class for getting the ruby class or classes related to the type symbol (jeremyevans) * Add error_splitter plugin, for splitting multi-column errors into separate errors per column (jeremyevans) * Skip validates_unique validation if underlying columns are not valid (jeremyevans) * Allow Model#modified! to take an optional column argument and mark that column as being modified (jeremyevans) * Allow Model#modified? to take an optional column argument and check if that column has been modified (jeremyevans) * Make Model.count not issue a database query if using the static_cache plugin (jeremyevans) * Handle more corner cases in the many_to_one_pk_lookup plugin (jeremyevans) * Handle database connection during initialization in jdbc adapter (jeremyevans) (#646) * Add Database.after_initialize, which takes a block and calls the block with each newly created Database instance (ged) (#641) * Add a guide detailing PostgreSQL-specific support (jeremyevans) * Make model plugins deal with frozen instances (jeremyevans) * Allow freezing of model instances for models without primary keys (jeremyevans) * Reflect constraint_validations extension :allow_nil=>true setting in the database constraints (jeremyevans) * Add Plugins.after_set_dataset for easily running code after set_dataset (jeremyevans) * Add Plugins.inherited_instance_variables for easily setting class instance variables when subclassing (jeremyevans) * Add Plugins.def_dataset_methods for easily defining class methods that call dataset methods (jeremyevans) * Make lazy_attributes plugin no longer depend on identity_map plugin (jeremyevans) * Make Dataset#get with an array of values handle case where no row is returned (jeremyevans) * Make caching plugin handle memcached API for deletes if ignore_exceptions option is used (rintaun) (#639) === 3.46.0 (2013-04-02) * Add Dataset#cross_apply and Dataset#outer_apply on Microsoft SQL Server (jeremyevans) * Speed up threaded connection pools when :connection_handling=>:queue is used (jeremyevans) * Allow external connection pool classes to be loaded automatically (jeremyevans) * Add Dataset#with_pk! for model datasets, like #with_pk, but raising instead of returning nil (jeremyevans) * Add Dataset#first!, like #first, but raising a Sequel::NoMatchingRow exception instead of returning nil (jeremyevans) * Dataset #select_map, #select_order_map, and #get no longer support a plain string inside an array of arguments (jeremyevans) * Escape ] characters in identifiers on Microsoft SQL Server (jeremyevans) * Add security guide (jeremyevans) * Make validates_type handle false values correctly (jeremyevans) (#636) * Have associations, composition, serialization, and dirty plugins clear caches in some additional cases (jeremyevans) (#635) * Add alter_table drop_foreign_key method for dropping foreign keys by column names (raxoft, jeremyevans) (#627) * Allow creation named column constraints via :*_constraint_name column options (jeremyevans) * Handle drop_constraint :type=>:primary_key on H2 (jeremyevans) * Handle infinite dates in the postgres adapter using Database#convert_infinite_timestamps (jeremyevans) * Make the looser_typecasting extension use looser typecasting for decimal columns as well as integers and floats (jeremyevans) * Do strict typecasting of decimal columns by default, similar to integer/float typecasting (jeremyevans) === 3.45.0 (2013-03-01) * Remove bad model typecasting of money type on PostgreSQL (jeremyevans) (#624) * Use simplecov instead of rcov for coverage testing on 1.9+ (jeremyevans) * Make the Database#quote_identifier method public (jeremyevans) * Make PostgreSQL metadata parsing handle tables with the same name in multiple schemas (jeremyevans) * Switch query extension to use a proxy instead of Object#extend (chanks, jeremyevans) * Remove Dataset#def_mutiation_method instance method (jeremyevans) * Make foreign key parsing on MySQL not pick up foreign keys in other databases (jeremyevans) * Allow per-instance overrides of Postgres.force_standard_strings and .client_min_messages (jeremyevans) (#618) * Add Sequel.tzinfo_disambiguator= to the named_timezones plugin for automatically handling TZInfo::AmbiguousTime exceptions (jeremyevans) (#616) * Add Dataset#escape_like, for escaping LIKE metacharacters (jeremyevans) (#614) * The LIKE operators now use an explicit ESCAPE '\' clause for similar behavior across databases (jeremyevans) * Make Database#tables and #views accept a :qualify option on PostgreSQL to return qualified identifiers (jeremyevans) * Make json_serializer and xml_serializer plugins secure by default (jeremyevans) * Address JSON.parse vulnerabilities (jeremyevans) * Fix Dataset#from_self! to no longer create a self-referential dataset (jeremyevans) * Use SQLSTATE or database error codes if available instead of regexp parsing for more specific DatabaseErrors (jeremyevans) * Add unlimited_update plugin to work around MySQL warning in replicated environments (jeremyevans) * Add the :retry_on and :num_retries transaction options for automatically retrying transactions (jeremyevans) * Raise serialization failures/deadlocks as Sequel::SerializationFailure exceptions (jeremyevans) * Support transaction isolation levels on Oracle and DB2 (jeremyevans) * Support transaction isolation levels when using the JDBC transaction support (jeremyevans) === 3.44.0 (2013-02-04) * Speedup mysql2 adapter with identifier output method fetch speed by up to 50% (jeremyevans) * Speedup tinytds adapter fetch speed by up to 60% (jeremyevans) * Expand columns_introspection extension to consider cached schema values in the database (jeremyevans) * Expand columns_introspection extension to handle subselects (jeremyevans) * Have #last and #paged_each for model datasets order by the model's primary key by default (jeremyevans) * Improve emulated offset support to handle subqueries (jeremyevans) * Remove use of Object#extend from the eager_each plugin (jeremyevans) * Add support for temporary views on SQLite and PostgreSQL via the :temp option to create_view (chanks, jeremyevans) * Emulate Database#create_or_replace_view if not supported directly (jeremyevans) * Add Dataset#paged_each, for processing entire datasets without keeping all rows in memory (jeremyevans) * Add Sequel::ConstraintViolation exception class and subclasses for easier exception handling (jeremyevans) * Fix use of identity_map plugin with many_to_many associations with right composite keys (chanks) (#603) * Increase virtual row performance by using a shared VirtualRow instance (jeremyevans) * Allow the :dataset association option to accept the association reflection as an argument (jeremyevans) * Improve association method performance by caching intermediate dataset (jeremyevans) === 3.43.0 (2013-01-08) * Move the #meta_def support for Database, Dataset, and Model to the meta_def extension (jeremyevans) * Fix Database#copy_into on jdbc/postgres when an exception is raised (jeremyevans) * Add core_refinements extension, providing refinement versions of Sequel's core extensions (jeremyevans) * Make Database#copy_into raise a DatabaseError if the database signals an error in the postgres adapter (jeremyevans) * Define respond_to_missing? where method_missing is defined and the object supports respond_to? (jeremyevans) * Allow lambda procs with 0 arity as virtual row blocks on ruby 1.9 (jeremyevans) * Handle schema-qualified row_types in the pg_array integration in the pg_row extension (jeremyevans) (#595) * Support default_schema when reseting primary key sequences on PostgreSQL (jeremyevans) (#596) * Allow treating tinyint(1) unsigned columns as booleans in the mysql adapters (jeremyevans) * Support the jdbc-hsqldb gem in the jdbc adapter, since it has been updated to 2.2.9 (jeremyevans) * Work with new jdbc-* gems that require manual driver loading (kares) (#598) * Cast blobs correctly on DB2 when use_clob_as_blob is false (mluu, jeremyevans) (#594) * Add date_arithmetic extension for database-independent date calculations (jeremyevans) * Make Database#schema handle [host.]database.schema.table qualified tables on Microsoft SQL Server (jeremyevans) * Add Dataset#split_qualifiers helper method for splitting a qualifier identifier into array of strings (jeremyevans) * Make Database#schema_and_table always return strings for the schema and table (jeremyevans) * Skip stripping of blob columns in the string_stripper plugin (jeremyevans) (#593) * Allow Dataset#get to take an array to return multiple values, similar to map/select_map (jeremyevans) * Default :prefetch_rows to 100 in the Oracle adapter (andrewhr) (#592) === 3.42.0 (2012-12-03) * If an exception occurs while committing a transaction, attempt to rollback (jeremyevans) * Support setting default string column sizes on a per-Database basis via default_string_column_size (jeremyevans) * Reset Model.instance_dataset when extending the model's dataset (jeremyevans) * Make the force_encoding plugin work with frozen strings (jeremyevans) * Add Database#do on PostgreSQL for using the DO anonymous code block execution statement (jeremyevans) * Remove Model.dataset_methods (jeremyevans) * Allow subset to be called inside a dataset_module block (jeremyevans) * Make Dataset#avg, #interval, #min, #max, #range, and #sum accept virtual row blocks (jeremyevans) * Make Dataset#count use a subselect when the dataset has an offset without a limit (jeremyevans) (#587) * Dump deferrable status of unique indexes on PostgreSQL (radford) (#583) * Extend deferrable constraint support to all types of constraints, not just foreign keys (radford, jeremyevans) (#583) * Support Database#copy_table and #copy_into on jdbc/postgres (bdon) (#580) * Make Dataset#update not use a limit (TOP) on Microsoft SQL Server 2000 (jeremyevans) (#578) === 3.41.0 (2012-11-01) * Add bin/sequel usage guide (jeremyevans) * Make Dataset#reverse and #reverse_order accept virtual row blocks (jeremyevans) * Add Sequel.delay for generic delayed evaluation (jeremyevans) * Make uniqueness validations correctly handle nil values (jeremyevans) * Support :unlogged option for create_table on PostgreSQL (JonathanTron) (#575) * Add ConnectionPool#pool_type to get the type of connection pool in use (jeremyevans) * Explicitly mark primary keys as NOT NULL on SQLite (jeremyevans) * Add support for renaming primary key columns on MySQL (jeremyevans) * Add connection_validator extension for automatically checking connections and transparently handling disconnects (jeremyevans) * Add Database#valid_connection? for checking whether a given connection is valid (jeremyevans) * Make dataset.limit(nil, nil) reset offset as well as limit (jeremyevans) (#571) * Support IMMEDIATE/EXCLUSIVE/DEFERRED transaction modes on SQLite (Eric Wong) * Major change in the Database <-> ConnectionPool interface (jeremyevans) * Make touch plugin handle touching of many_*_many associations (jeremyevans) * Make single_table_inheritance plugin handle non-bijective mappings (hannesg) (#567) * Support foreign key parsing on MSSQL (munkyboy) (#564) * Include SQL::AliasMethods in most pg_* extension objects (treydempsey, jeremyevans) (#563) * Handle failure to create a prepared statement better in the postgres, mysql, and mysql2 adapters (jeremyevans) (#560) * Treat clob columns as strings instead of blobs (jeremyevans) === 3.40.0 (2012-09-26) * Add a cubrid adapter for accessing CUBRID databases via the cubrid gem (jeremyevans) * Add a jdbc/cubrid adapter for accessing CUBRID databases via JDBC on JRuby (jeremyevans) * Return OCI8::CLOB values as ruby Strings in the Oracle adapter (jeremyevans) * Use clob for String :text=>true types on Oracle, DB2, HSQLDB, and Derby (jeremyevans) (#555) * Allowing marshalling of Sequel::Postgres::HStore (jeremyevans) (#556) * Quote channel identifier names when using LISTEN/NOTIFY on PostgreSQL (jeremyevans) * Handle nil values when formatting bound variable arguments in the pg_row extension (jeremyevans) (#548) * Handle nil values when parsing composite types in the pg_row extension (jeremyevans) (#548) * Add :disconnect=>:retry option to Database#transaction, for automatically retrying the transaction on disconnect (jeremyevans) * Greatly improved support on Microsoft Access (jeremyevans) * Support Database#{schema,tables,views,indexes,foreign_key_list} when using ado/access adapter (ericgj) (#545, #546) * Add ado/access adapter for accessing Microsoft Access via the ado adapter (jeremyevans) * Combine disconnect error detection for mysql and mysql2 adapters (jeremyevans) * Update the association_pks plugin to handle composite primary keys (chanks, jeremyevans) (#544) === 3.39.0 (2012-09-01) * Fix defaults_setter to set false default values (jeremyevans) * Fix serial sequence query in Database#primary_key_sequence on PostgreSQL (jeremyevans) (#538) * Add Database#copy_into when using postgres adapter with pg driver, for very fast inserts into tables (jeremyevans) * Combine multiple alter_table operations into a single query where possible on MySQL and PostgreSQL (jeremyevans) * Handle sets of alter_table operations on MySQL and MSSQL where later operations depend on earlier ones (jeremyevans) * Add constraint_validations plugin for automatic validations of constaints defined by extension (jeremyevans) * Add constraint_validations extension for defining database constraints similar to validations (jeremyevans) * Add Database#supports_regexp? for checking for regular expression support (jeremyevans) * Add Sequel.trim for cross platform trim function (jeremyevans) * Add Sequel.char_length for cross platform char_length function (jeremyevans) * Fixing caching of MySQL server version (hannesg) (#536) * Allow overriding the convert_tinyint_to_bool setting on a per-Dataset basis in the mysql and mysql2 adapters (jeremyevans) * Make ValidationFailed and HookFailed exceptions have model method that returns the related model (jeremyevans) * Automatically wrap array arguments to most PGArrayOp methods in PGArrays (jeremyevans) * Add set_column_not_null to alter table generator for marking a column as not null (jeremyevans) * Default second argument of set_column_allow_null to true in alter table generator (jeremyevans) * Allow Dataset#count to take an argument or virtual row block (jeremyevans) * Attempt to recognize CURRENT_{DATE,TIMESTAMP} defaults and return them as Sequel::CURRENT_{DATE,TIMESTAMP} (jeremyevans) * Make dataset.insert(model) assume a single column if model uses the pg_row plugin (jeremyevans) * No longer handle model instances in plain (non-model) datasets when inserting (jeremyevans) * Use subselects for model classes as tables in join methods in model datasets if the model's dataset isn't a simple select (jeremyevans) * No longer handle model classes as tables in join/graph methods in plain (non-model) datasets (jeremyevans) * Make Time->DateTime and DateTime->Time typecasts retain fractional seconds on ruby 1.8 (jeremyevans) (#531) * Add bin/sequel -c support, for running code string instead of using an IRB prompt (jeremyevans) * Allow subclasses plugin to take a block, which is called with each subclasses created (jeremyevans) * Add :where option to validates_unique, for custom uniqueness filters (jeremyevans) * Add :connection_handling=>:disconnect option for threaded connection pools (jeremyevans) * Add Postgres::PGRowOp#* for referencing the members of the composite type as separate columns (jeremyevans) * Make identity_map plugin work with models lacking a primary key (jeremyevans) * Recognize MySQL set type and default value (jeremyevans) (#529) === 3.38.0 (2012-08-01) * Sequel now recognizes the double(x, y) and double(x, y) unsigned MySQL types (Slike9, jeremyevans) (#528) * The swift subadapters now require swift-db-* instead of swift itself (deepfryed, jeremyevans) (#526) * Add :textsize option to tinytds adapter to override the default TEXTSIZE (jeremyevans, wardrop) (#525) * Support an output identifier method in the swift adapter (jeremyevans) * Add Model#to_hash as an alias to Model#values (jeremyevans) * When loading multiple pg_* extensions via Database#extension, only reset the conversion procs once (jeremyevans) * Don't allow model typecasting from string to postgres array, hstore, or composite types (jeremyevans) * Add pg_typecast_on_load plugin for converting advanced PostgreSQL types on load the {jdbc,do,swift}/postgres adapters (jeremyevans) * Make all adapters that connect to PostgreSQL store type conversion procs (jeremyevans) * Add type oid to column schema on PostgreSQL (jeremyevans) * Add pg_row plugin, for using Sequel::Model classes to represent PostgreSQL row-valued/composite types (jeremyevans) * Add pg_row_ops extension for DSL support for PostgreSQL row-valued/composite types (jeremyevans) * Add pg_row extension for dealing with PostgreSQL row-valued/composite types (jeremyevans) * Allow custom registered array types in the pg_array extension to be Database instance specific (jeremyevans) * Remove Sequel::SQL::IdentifierMethods (jeremyevans) * Don't have the schema_dumper extension produce code that relies on the core_extensions (jeremyevans) * Fix dropping of columns with constraints on Microsoft SQL Server (mluu, jeremyevans) (#515, #518) * Don't have pg_* extensions add methods to core classes unless the core_extensions extension is loaded (jeremyevans) * Use real boolean literals on derby 10.7+ (jeremyevans, matthauck) (#514) * Work around JRuby 1.6 ruby 1.9 mode bug in Time#nsec for Time prepared statement arguments on jdbc (jeremyevans) * Handle blob prepared statement arguments on jdbc/db2 and jdbc/oracle (jeremyevans) * Handle blob values in the swift adapter (jeremyevans) * Handle better nil prepared statement arguments on jdbc (jeremyevans) (#513) * Make SQL::Blob objects handle as, cast, and lit methods even if the core extensions are not loaded (jeremyevans) * Make #* with no arguments produce a ColumnAll for Identifier and QualifiedIdentifier (jeremyevans) * Sequel.expr(:symbol) now returns Identifier, QualifiedIdentifier, or AliasedExpression instead of Wrapper (jeremyevans) * Treat clob columns as string instead of blob on Derby (jeremyevans) (#509) === 3.37.0 (2012-07-02) * Allow specifying eager_graph alias base on a per-call basis using an AliasedExpression (jeremyevans) * Allow bin/sequel to respect multiple -l options for logging to multiple files (jeremyevans) * Correctly handle cases where SCOPE_IDENTITY is nil in the odbc/mssql adapter (stnoonan, jeremyevans) * Add pg_interval extension, for returning interval types as ActiveSupport::Duration instances (jeremyevans) * Save a new one_to_one associated object once instead of twice in the nested_attributes plugin (jeremyevans) * Don't add unnecessary filter condition when passing a new object to a one_to_one setter method (jeremyevans) * Differentiate between column references and method references in many_through_many associations (jeremyevans) * Use :qualify=>:deep option when joining tables in model association datasets (jeremyevans) * Support :qualify=>:deep option to Dataset#join_table to qualify subexpressions in the expression tree (jeremyevans) * Support :qualify=>false option to Dataset#join_table to not automatically qualify keys/values (jeremyevans) * Make filter by associations support use column references and method references correctly (jeremyevans) * Call super in list plugin before_create (jeremyevans) (#504) * Do not automatically cast String to text in pg_auto_parameterize extension (jeremyevans) * Support alter_table validate_constraint on PostgreSQL for validating constraints previously declared with NOT VALID (jeremyevans) * Support :not_valid option when adding foreign key constraints on PostgreSQL (jeremyevans) * Support exclusion constraints on PostgreSQL (jeremyevans) * Allow for overriding the create/alter table generators used per Database object (jeremyevans) * Make casting to Date/(Time/DateTime) use date/datetime functions on SQLite (jeremyevans) * Add pg_range_ops extension for DSL support for PostgreSQL range operators and functions (jeremyevans) * The json library is now required when running the plugin/extension specs (jeremyevans) * Use change migrations instead of up/down migrations in the schema_dumper (jeremyevans) * Dump unsigned integer columns with a check >= 0 constraint in the schema_dumper (stu314) * Switch the :key_hash entry to the association :eager_loader option to use the method symbol(s) instead of the column symbol(s) (jeremyevans) * Add :id_map entry to the hash passed to the association :eager_loader option, for easier custom eager loading (jeremyevans) * Fix dumping of non-integer foreign key columns in the schema_dumper (jeremyevans) (#502) * Add nested_attributes :fields option to be a proc that is called with the associated object (chanks) (#498) * Add split_array_nil extension, for compiling :col=>[1, nil] to col IN (1) OR col IS NULL (jeremyevans) * Add Database#extension and Dataset#extension for loading extension modules into objects automatically (jeremyevans) * Respect an existing dataset limit when updating on Microsoft SQL Server (jeremyevans) * Add pg_range extension, for dealing with PostgreSQL 9.2+ range types (jeremyevans) * Make pg_array extension convert array members when typecasting Array to PGArray (jeremyevans) * Make jdbc/postgres adapter convert array type elements (e.g. date[] arrays are returned as arrays of Date instances) (jeremyevans) * Make the pg_inet extension handle inet[]/cidr[]/macaddr[] types when used with the pg_array extension (jeremyevans) * Make the pg_json extension handle json[] type when used with the pg_array extension (jeremyevans) * Fix schema parsing of h2 clob types (jeremyevans) * Make the pg_array extension handle array types for scalar types handled by the native postgres adapter (jeremyevans) * Generalize handling of array types in the pg_array extension, allowing easy support of custom array types (jeremyevans) * Remove type conversion of int2vector and money types on PostgreSQL, since previous conversions were wrong (jeremyevans) * Add eval_inspect extension, which makes Sequel::SQL::Expression#inspect attempt to return a string suitable for eval (jeremyevans) * When emulating offset with ROW_NUMBER, default to ordering by all columns if no specific order is given (stnoonan, jeremyevans) (#490) * Work around JRuby 1.6 ruby 1.9 mode bug in Time -> SQLTime conversion (jeremyevans) === 3.36.1 (2012-06-01) * Fix jdbc adapter when DriverManager#getConnection fails (aportnov) (#488) === 3.36.0 (2012-06-01) * Use Bignum generic type when dumping unsigned integer types that could potentially overflow 32-bit signed integer values (stu314) * Support :transform option in the nested_attributes plugin, for automatically preprocessing input hashes (chanks) * Support :unmatched_pk option in the nested_attributes plugin, can be set to :create for associated objects with natural keys (chanks) * Support composite primary keys in the nested_attributes plugin (chanks) * Allow Model#from_json in the json_serializer plugin to use set_fields if a :fields option is given (jeremyevans) * Support :using option to set_column_type on PostgreSQL, to force a specific conversion from the old value to the new value (jeremyevans) * Drop indexes in the reverse order that they were added in the schema dumper (jeremyevans) * Add :index_names option to schema dumper method, can be set to false or :namespace (stu314, jeremyevans) * Add Database#global_index_namespace? for checking if index namespace is global or per table (jeremyevans) * Fix typecasting of time columns on jdbc/postgres, before could be off by a millisecond (jeremyevans) * Add document explaining Sequel's object model (jeremyevans) * Attempt to detect more disconnect errors in the mysql2 adapter (jeremyevans) * Add is_current? and check_current to the migrators, for checking/raising if there are unapplied migrations (pvh, jeremyevans) (#487) * Add a jdbc subadapter for the Progress database (Michael Gliwinski, jeremyevans) * Add pg_inet extension, for working with PostgreSQL inet and cidr types (jeremyevans) * Fix bug in model column setters when passing an object that raises an exception for ==('') (jeremyevans) * Add eager_each plugin, which makes each on an eagerly loaded dataset do eager loading (jeremyevans) * Fix bugs when parsing foreign keys for tables with explicit schema on PostgreSQL (jeremyevans) * Remove Database#case_sensitive_like on SQLite (jeremyevans) * Remove Database#single_value in the native sqlite adapter (jeremyevans) * Make Dataset#get work with nil and false arguments (jeremyevans) * Make json_serializer plugin respect :root=>:collection and :root=>:instance options (jeremyevans) * Support savepoints in prepared transactions on MySQL 5.5.23+ (jeremyevans) * Add pg_json extension, for working with PostgreSQL 9.2's new json type (jeremyevans) * In the optimistic locking plugin, make refresh and save after a failed save work correctly (jeremyevans) * Support partial indexes on Microsoft SQL Server 2008 (jeremyevans) * Make Database#call pass blocks (jeremyevans) * Support :each when preparing statements, useful for iterating over large datasets (jeremyevans) * Support :if_exists and :cascade options when dropping indexes on PostgreSQL (jeremyevans) * Support :concurrently option when adding and dropping indexes on PostgreSQL (jeremyevans) * Make Database#transaction on PostgreSQL recognize :synchronous, :read_only, and :deferrable options (jeremyevans) * Support :sql_mode option when connecting to MySQL (jeremyevans) * Apply :timeout MySQL connection setting on do, jdbc, and swift adapters (jeremyevans) * Don't set Sequel::Model.db automatically when creating an anonymous class with an associated database object (jeremyevans) * Add :connection_handling=>:queue option to the threaded connection pools, may reduce chance of stale connections (jeremyevans) (#481) * Handle JRuby 1.7 exception handling changes when connecting in the jdbc adapter (jeremyevans) (#477) * Make *_to_one association setters be noops if you pass a value that is the same as the cached value (jeremyevans) * Make Model#refresh return self when using dirty plugin (jeremyevans) === 3.35.0 (2012-05-01) * Correctly handle parsing schema for tables in other databases on MySQL (jeremyevans) * Add DSL support for the modulus operator (%), similar to the bitwise operators (jeremyevans) * Fix possible thread-safety issues on non-GVL ruby implementations (jeremyevans) * Allow truncation of multiple tables at the same time on PostgreSQL (jeremyevans) * Allow truncate to take a :cascade, :only, and :restart options on PostgreSQL (hgimenez, jeremyevans) * Allow json and xml serializers to support :array option in class to_json method to serialize existing array of model instances (jeremyevans) * Add dirty plugin, which saves the initial value of the column when the value is changed (jeremyevans) * create_table now supports an :as option to create a table directly from the results of a query (jeremyevans) * The :index option when creating columns in the schema generator can now be a hash of options passed to index (jeremyevans) * Parsing the default column values in the oracle adapter no longer requires superuser privileges (Jason Hines) * Add Database#cache_schema to allow schema caching to be turned of, useful for development modes where models are reloaded (jeremyevans) * Correctly handle errors that occur when rolling back transactions (jeremyevans) * Recognize identity type in the schema dumper (jeremyevans) (#468) * Don't assign instance variables to Java objects, for future JRuby 2.0 support (jeremyevans) (#466) * Use date and timestamp formats that are multilanguage and not DATEFORMAT dependent on Microsoft SQL Server (jeremyevans) * Add Database#log_exception, which logs when a query raises an exception, for easier overriding (jeremyevans) (#465) * Make the migrators only use transactions by default if the database supports transactional DDL (jeremyevans) * Add Database#supports_transactional_ddl? for checking if DDL statements can be rolled back in transactions (jeremyevans) * Don't use auto parameterization when using cursors in the pg_auto_parameterize extension (jeremyevans) (#463) * No longer escape backslashes in strings by default, fixes doubled backslashes on some adapters (jeremyevans) * Escape blackslash-carriage return-line feed in strings on Microsoft SQL Server (mluu, jeremyevans) (#462, #461) * Remove Array#all_two_pairs? (jeremyevans) * Remove Dataset#disable_insert_returning on PostgreSQL (jeremyevans) * Remove support for PostgreSQL <8.2 (jeremyevans) * Remove support for Ruby <1.8.7 (jeremyevans) === 3.34.1 (2012-04-02) * Fix bug in optimization of primary key lookup (jeremyevans) (#460) === 3.34.0 (2012-04-02) * Fix connection failures when connecting to PostgreSQL with newer versions of swift (jeremyevans) * Fix using a bound variable for a limit in the ibmdb adapter on ruby 1.9 (jeremyevans) * primary_key :column, :type=>Bignum now works correctly on H2 (jeremyevans) * Add query_literals extension for treating regular strings like literal strings in select, group, and order methods (jeremyevans) * Actually use RETURNING for deletes/updates on PostgreSQL 8.2-9.0 (jeremyevans) * You can now require 'sequel/no_core_ext' to load Sequel without the core extensions (jeremyevans) * The core extensions have now been made a real Sequel extension (still loaded by default) (jeremyevans) * VirtualRow#` has been added for creating literal strings (jeremyevans) * VirtualRow instances now have operator methods defined {+,-,*,/,&,|,~,>,<,>=,<=} (jeremyevans) * Array#all_two_pairs? is now deprecated and will be removed after 3.34.0 is released (jeremyevans) * All of Sequel's core extensions now have equivalent methods defined on the Sequel module (jeremyevans) * Add Sequel.core_extensions? for checking if the core extensions are enabled (jeremyevans) * Increase speed of Model#this by about 85% (jeremyevans) * Increase speed of Model#delete and #destroy by about 75% for models with simple datasets (jeremyevans) * Make nested_attributes plugin work when destroying/removing associated objects when strict_param_setting is true (r-stu31) (#455) * Dataset#disable_insert_returning on PostgreSQL is now deprecated and will be removed after 3.34.0 is released (jeremyevans) * Double speed of Model[pk] for models with simple datasets (most models) (jeremyevans) * Support for ruby <1.8.7 and PostgreSQL <8.2 is now deprecated and will be removed after 3.34.0 is released (jeremyevans) * Add select_remove extension which adds Dataset#select_remove for removing columns/expressions from a dataset selection (jeremyevans) * Add static_cache plugin, for staticly caching all model instances, useful for model tables that don't change (jeremyevans) * Add Model#freeze implementation to get a working frozen model object (jeremyevans) * Add many_to_one_pk_lookup plugin, for using a simple primary key lookup for many_to_one associations (great with caching) (jeremyevans) * Use bigint type instead of integer for Bignum generic type on SQLite, except for auto incrementing primary keys (jeremyevans) * Add Database#dump_foreign_key_migration for just dumping foreign key constraints to the schema dumper extension (jeremyevans) * Dump foreign key constraints by default when using the schema dumper extension (jeremyevans) * Don't raise an error when no indexes exist for a table when calling Database#indexes on the jdbc/sqlite adapter (jeremyevans) * Copy composite foreign key constraints when emulating alter_table on SQLite (jeremyevans) * Add Database#foreign_key_list for getting foreign key metadata for a given table on SQLite, MySQL, and PostgreSQL (jeremyevans) * Add Dataset#to_hash_groups and #select_hash_groups for getting a hash with arrays of matching values (jeremyevans) * Model#set_fields and #update_fields now respect :missing=>:skip and :missing=>:raise options for handling missing values (jeremyevans) * The :on_update and :on_delete entries for foreign key can now take strings, which are used literally (jeremyevans) * Add Database#convert_infinite_timestamps to the postgres adapter, can be set to :nil, :string, or :float (jeremyevans) (#454) * Add Database#create_join_table and #drop_join_table for easily creating many-to-many join tables (jeremyevans) * Fix Dataset#group_rollup/#group_cube on Microsoft SQL Server 2005 (jeremyevans) * Add Dataset#explain on MySQL (jeremyevans) * Change formatting and return value of Dataset#explain on SQLite (jeremyevans) * Recognize unsigned tinyint types in the schema dumper (jeremyevans) * Add null_dataset extension, for creating a dataset that never issues a database query (jeremyevans) * Database#uri and #url now return nil if a connection string was not used when connecting (jeremyevans) (#453) * Add schema_caching extension, to speed up loading a large number of models by loading cached schema information from a file (jeremyevans) * Add Dataset#multi_replace on MySQL, allowing you to REPLACE multiple rows in a single query (danielb2) (#452) * Double speed of Model#new with empty hash, and quadruple speed of Model#set with empty hash (jeremyevans) * Allow SQL::QualifiedIdentifier objects to contain arbitrary Sequel expressions (jeremyevans) * Add pg_hstore_ops extension, for easily calling PostgreSQL hstore functions and operators (jeremyevans) * Add Sequel::SQL::Wrapper class for easier dealing with wrapper objects (jeremyevans) * Add pg_hstore extension, for dealing with the PostgreSQL hstore (key/value table) type (jeremyevans) * Add Database#type_supported? method on PostgreSQL for checking if the given type symbol/string is supported (jeremyevans) * Convert Java::OrgPostgresqlUtil::PGobject instances to ruby strings in jdbc/postgres type conversion (jeremyevans) * Allow PlaceholderLiteralString objects to store placeholder string as an array for improved performance (jeremyevans) * Work around ruby-pg bugs 111 (Time/DateTime fractional seconds) and 112 ("\0" in bytea) in bound variable arguments (jeremyevans) (#450) * Handle fractional seconds correctly for time type on jdbc/postgres (jeremyevans) * Add pg_array_ops extension, for easily calling PostgreSQL array functions and operators (jeremyevans) * Add SQL::Subscript#[] for using nested subscripts (accessing member of multi-dimensional array) (jeremyevans) * Add Model.cache_anonymous_models accessor so you can disable the caching of classes created by Sequel::Model() (jeremyevans) * Convert PostgreSQL JDBC arrays to Ruby arrays in the jdbc/postgres adapter (jeremyevans) * The typecast_on_load extension now works correctly when saving new model objects when insert_select is enabled (jeremyevans) * Add pg_array extension, for dealing with string and numeric PostgreSQL arrays (jeremyevans) * Add Database#reset_conversion_procs to the postgres adapter, for use with extensions with modify default conversion procs (jeremyevans) * Escape table and schema names when getting primary key or sequence information on PostgreSQL (jeremyevans) * Escape identifiers when quoting on MySQL and SQLite (jeremyevans) * Add Database#supports_drop_table_if_exists? for checking if DROP TABLE supports IF EXISTS (jeremyevans) * Add Database#drop_table? for dropping a table if it already exists (jeremyevans) * Log full SQL string by default for prepared statements created automatically by model prepared_statements* plugins (jeremyevans) * Add ability for prepared statements to log full SQL string (jeremyevans) * Add pg_statement_cache extension, for automatically preparing queries when using postgres adapter with pg driver (jeremyevans) * Add pg_auto_parameterize extension, for automatically parameterizing queries when using postgres adapter with pg driver (jeremyevans) * Add ConnectionPool#disconnection_proc= method, to modify disconnection_proc after the pool has been created (jeremyevans) * Add ConnectionPool#after_connect= method, to modify after_connect proc after the pool has been created (jeremyevans) * Add ConnectionPool#all_connections method, which yields all available connections in the pool (jeremyevans) === 3.33.0 (2012-03-01) * Add ability to force or disable transactions completely in the migrators using the :use_transactions option (jeremyevans) * Add ability to turn off transactions for migrations by calling no_transaction inside the Sequel.migration block (jeremyevans) * Allow specifically choosing which migrator to use via TimestampMigrator.apply or IntegerMigrator.apply (jeremyevans) * Add arbitrary_servers extension to allow the use of arbitrary servers/shards by providing a hash of options as the server (jeremyevans) * Add server_block extension to scope database access inside the block to a specific default server/shard (jeremyevans) * Respect :collate column option on MySQL (jeremyevans) (#445) * Use Mysql2::Client::FOUND_ROWS to get accurate number of rows matched in the mysql2 adapter (jeremyevans) * Use Mysql#info to get accurate number of rows matched in the mysql adapter (jeremyevans) * Make mock adapter with specific SQL dialect use appropriate defaults for quoting identifiers (jeremyevans) * Make list plugin automatically set position field value on creation if not already set (jeremyevans) * Add Database#integer_booleans setting on SQLite to store booleans as integers (jeremyevans) * Typecast columns stored as integers/floats in the SQLite adapter (jeremyevans) * In the instance_hooks plugin, (before|after)_*_hook instance methods now return self (jeremyevans) * Handle NaN, Infinity, and -Infinity floats on PostgreSQL (kf8a, jeremyevans) (#444) * Support an :sslmode option when using the postgres adapter with the pg driver (jeremyevans) * Add Database#create_schema and #drop_schema to the shared postgres adapter (tkellen, jeremyevans) (#440) * Add Database#supports_savepoints_in_prepared_transactions?, false on MySQL >=5.5.12 (jeremyevans) (#437) * Support an identifier output method in the mysql2 adapter (jeremyevans) * Make foreign key creation work on MySQL with InnoDB engine without specifying :key option (jeremyevans) * Allow disabling use of sudo with SUDO='' when running the rake install/uninstall tasks (jeremyevans) (#433) === 3.32.0 (2012-02-01) * Make serialization_modification_detection plugin work correctly with new objects and after saving existing objects (jeremyevans) (#432) * Make refreshes after model creation clear the deserialized values in the serialization plugin (jeremyevans) * Add Dataset#update_ignore on MySQL, for using UPDATE IGNORE in queries (danielb2) (#429) * Allow select_map/select_order_map to take both a column argument and a block (jeremyevans) * Fix virtual row block handling in select_map/select_order_map if block returns an array (jeremyevans) (#428) * Add Sequel.empty_array_handle_nulls setting, can be set to false for possible better performance on some databases (jeremyevans) * Change exclude(:b=>[]) to not return rows where b is NULL (jeremyevans) (#427) * Support ActiveModel 3.2 in the active_model plugin, by adding support for to_partial_path (jeremyevans) * Fix metadata methods (e.g. tables) on Oracle when custom identifier input methods are used (jeremyevans) * Fix Database#indexes on DB2 (jeremyevans) * Make DateTime/Time columns with Sequel::CURRENT_TIMESTAMP default values use timestamp column on MySQL (jeremyevans) * Wrap column default values in extra parens on SQLite, fixes some cases (jeremyevans) * Make Database#indexes not include primary key indexes on Derby, HSQLDB, Oracle, and DB2 using the jdbc adapter (jeremyevans) * Support Database#indexes in shared MSSQL adapter (jeremyevans) * Support :include option when creating indexes on MSSQL, for storing column values in the index (crawlik) (#426) * Make set_column_type not modify defaults and NULL/NOT NULL setting on MSSQL, H2, and SQLite (jeremyevans) * Qualify identifiers when filtering/excluding by associations (jeremyevans) * Make table_exists? better handle tables where you don't have permissions for all columns (jeremyevans) (#422) * Using new association options, support associations based on columns that clash with ruby method names (jeremyevans) (#417) * Add use_after_commit_rollback setting to models, can be turned off to allow model usage with prepared transactions (jeremyevans) * Fix alter table emulation on SQLite when foreign keys reference the table being altered (jeremyevans) * Fix progress shared adapter, broken since the dataset literalization refactoring (jeremyevans) (#414) * Support :map and :to_hash prepared statement types (jeremyevans) * Make Dataset#naked! work correctly (jeremyevans) * Remove Dataset#paginate!, as it was broken (jeremyevans) * Fix query extension to not break usage of #clone without arguments (jeremyevans) (#413) === 3.31.0 (2012-01-03) * Dataset#from no longer handles :a__b__c___d as a.b.c AS d (jeremyevans) * Support many_to_one associations with the same name as their column, using the :key_column option (jeremyevans) * Add Model.def_column_alias for defining alias methods for columns (jeremyevans) * Support :server option in Dataset#import and #multi_insert (jeremyevans) * Respect existing RETURNING/OUTPUT clauses in #import/#multi_insert on PostgreSQL/MSSQL (jeremyevans) * Support :return=>:primary_key option to Dataset#import and #multi_insert (jeremyevans) * Correctly handle return value for Dataset#insert with column array and value array on PostgreSQL <8.2 (jeremyevans) * Dataset#insert_multiple now returns an array of inserted primary keys (jeremyevans) (#408) * Support RETURNING with DELETE and UPDATE on PostgreSQL 8.2+ (funny-falcon) * Raise error if tables from two separate schema are detected when parsing the schema for a single table on PostgreSQL (jeremyevans) * Handle clob types as string instead of blob on H2 (jeremyevans) * Add database type support to the mock adapter, e.g. mock://postgres (jeremyevans) * Allow creation of full text indexes on Microsoft SQL Server, but you need to provide a :key_index option (jeremyevans) * Allow Dataset#full_text_search usage with prepared statements (jeremyevans) * Make Dataset#exists use a PlaceholderLiteralString so it works with prepared statements (jeremyevans) * Fix Dataset#empty? for datasets with offsets when offset support is emulated (jeremyevans) * Add Dataset#group_rollup and #group_cube methods for GROUP BY ROLLUP and CUBE support (jeremyevans) * Add support for custom serialization formats to the serialization plugin (jeremyevans) * Support a :login_timeout option in the jdbc adapter (glebpom) (#406) === 3.30.0 (2011-12-01) * Handle usage of on_duplicate_key_update in MySQL prepared statements (jeremyevans) (#404) * Make after_commit and after_rollback respect :server option (jeremyevans) (#401) * Respect :connect_timeout option in the postgres adapter when using pg (glebpom, jeremyevans) (#402) * Make Dataset#destroy for model datasets respect dataset shard when using a transaction (jeremyevans) * Make :server option to Model#save set the shard to use (jeremyevans) * Move Model#set_server from the sharding plugin to the base plugin (jeremyevans) * Add :graph_alias_base association option for setting base name to use for table aliases when eager graphing (jeremyevans) * Make ILIKE work correctly on Microsoft SQL Server if database/column collation is case sensitive (jfirebaugh) (#398) * When starting a new dataset graph, assume existing selection is the columns to select from the current table (jeremyevans) * Allow specifying nanoseconds and offsets when converting a hash or array to a timestamp (jeremyevans, jfirebaugh) (#395) * Improve performance when converting Java types to ruby types in the jdbc adapter (jeremyevans, jfirebaugh) (#395) * Fix tinytds adapter if DB.identifier_output_method = nil (jeremyevans) * Explicitly order by the row number column when emulating offsets (jfirebaugh) (#393) * Fix Dataset#graph and #eager_graph modifying the receiver if the receiver is already graphed (jeremyevans) (#392) * Change dataset literalization to an append-only-all-the-way-down design (jeremyevans) === 3.29.0 (2011-11-01) * Allow Model.dataset_module to take a Module instance (jeremyevans) * Apply Model.[] optimization in more cases (jeremyevans) * Fix Model.[] optimization when dataset uses identifier_input_method different than database (jeremyevans) * Work around pragma bug on jdbc/sqlite when emulating alter table support (jeremyevans) * Database#<< and Dataset#<< now return self so they can be safely chained (jeremyevans) * Fully support using an aliased table name as the :join_table option for a many_to_many association (jeremyevans) * Make like case sensitive on SQLite and Microsoft SQL Server (use ilike for case insensitive matching) (jeremyevans) * Add Database#extend_datasets for the equivalent of extending of the Database object's datasets with a module (jeremyevans) * Speed up Dataset #map, #to_hash, and related methods if an array of symbols is given (jeremyevans) * Add Database#dataset_class for modifying the class used for datasets for a single Database object (jeremyevans) * Plugins that override Model.load should be modified to override Model.call instead (jeremyevans) * Speed up loading model objects from the database by up to 7-16% (jeremyevans) * Create accessor methods for all columns in a model's table, even if the dataset doesn't select the columns (jeremyevans) * Add mock adapter for better mocking of a database connection (jeremyevans) * Have models pass their dataset instead of table name to Database#schema (jeremyevans) * Allow Database#schema to take a dataset as the table argument, and use its identifier input/output methods (jeremyevans) * Significant improvements to the db2 adapter (jeremyevans) * Handle methods with names that can't be called directly in Model.def_dataset_method (jeremyevans) * Add dataset_associations plugin for making dataset methods that return datasets of associated objects (jeremyevans) * Don't allow Model.def_dataset_method to override private model methods (jeremyevans) * Parsing primary key information from system tables in the shared MSSQL adapter (jeremyevans) * Fix handling of composite primary keys when emulating alter table operations on SQLite (jeremyevans) * Emulate add_constraint and drop_constraint alter table operations on SQLite (jeremyevans) * Apply the correct pragmas when connecting to SQLite via the Amalgalite and Swift adapters (jeremyevans) * Fix bound variable usage for some types (e.g. Date) when used outside of prepared statements on SQLite (jeremyevans) * Work around SQLite column naming bug when using subselects (jeremyevans) * Make prepared_statements plugin work with adapters that require type specifiers for variable placeholders, such as oracle (jeremyevans) * Add savepoint support to the generic JDBC transaction support (used by 6 jdbc subadapters) (jeremyevans) * Add native prepared statement support to the oracle adapter (jeremyevans) * Support sharding correctly by default when using transactions in model saving/destroying (jeremyevans) * Add Database#in_transaction? method for checking if you are already in a transaction (jeremyevans) * Add after_commit, after_rollback, after_destroy_commit, and after_destroy_rollback hooks to Model objects (jeremyevans) * Add after_commit and after_rollback hooks to Database objects (jeremyevans) (#383) * Support savepoints inside prepared transactions on MySQL (jeremyevans) * Support opening transactions to multiple shards of the same Database object in the same Thread (jeremyevans) * Add Sequel.transaction for running transactions on multiple databases at the same time (jeremyevans) * Support :rollback => :always option in Database#transaction to always rollback the transaction (jeremyevans) * Support :rollback => :reraise option in Database#transaction to reraise the Sequel::Rollback exception (jeremyevans) * Add support for connecting to Apache Derby databases using the jdbc adapter (jeremyevans) * Add support for connecting to HSQLDB databases using the jdbc adapter (jeremyevans) * Fix inserting all default values into a table on DB2 (jeremyevans) * Add :qualify option to many_to_one associations for whether to qualify the primary key column with the associated table (jeremyevans) * Modify rcte_tree plugin to use column aliases if recursive CTEs require them (jeremyevans) * Add Dataset#recursive_cte_requires_column_aliases? method to check if you must provide an argument list for a recursive CTE (jeremyevans) * Much better support for Oracle in both the oci8-based oracle adapter and the jdbc oracle subadapter (jeremyevans) * Handle CTEs in subselects in more places on databases that don't natively support CTEs in subselects (jeremyevans) * Change Dataset#to_hash to not call the row_proc if 2 arguments are given (jeremyevans) * Change Dataset#map to not call the row_proc if an argument is given (jeremyevans) * Make Dataset#select_map and #select_order_map return an array of single element arrays if given an array with a single symbol (jeremyevans) * Make Dataset#columns work correctly on jdbc, odbc, ado, and dbi adapters when using an emulated offset on MSSQL and DB2 (jeremyevans) * Add Database#listen and #notify to the postgres adapter, for LISTEN and NOTIFY support (jeremyevans) * Emulate the bitwise compliment operator on h2 (jeremyevans) * Fix improper handling of emulated bitwise operators with more than two arguments (jeremyevans) * Allow convert_invalid_date_time to be set on a per-Database basis in the mysql adapter (jeremyevans) * Allow convert_tinyint_to_bool to be set on a per-Database basis in the mysql and mysql2 adapters (jeremyevans) * Allow per-Database override of the typeconversion procs on the mysql, sqlite, and ibmdb adapters (jeremyevans) * Add Database#timezone accessor, for overriding Sequel.database_timezone per Database object (jeremyevans) === 3.28.0 (2011-10-03) * Add firebird jdbc subadapter (jeremyevans) * Add SQLTime.create method for easier creation of SQLTime instances (jeremyevans) * Make Dataset#with_pk use a qualified primary key, so it works correctly on joined datasets (jeremyevans) * Support the :limit association option when using eager_graph (jeremyevans) * Fix eager loading via eager_graph of one_to_one associations that match multiple associated objects and use order to pick the first one (jeremyevans) * Make after_load association hooks apply when using eager_graph (jeremyevans) * Make Dataset#with_sql treat a symbol as a first argument as a method name to call to get the SQL (jeremyevans) * Make Dataset #delete, #insert, #update return array of plain hashes if block not given and Dataset#returning is used (jeremyevans) * Allow Dataset #map, #to_hash, #select_map, #select_order_map, and #select_hash to take arrays of columns instead of single columns (jeremyevans) * Make Dataset #delete, #insert, #update yield plain hashes to a block if Dataset#returning is used (jeremyevans) * Add Dataset#returning for setting the columns to return in INSERT/UPDATE/DELETE statements, used by PostgreSQL 9.1 (jeremyevans) * Support WITH clause in INSERT/UPDATE/DELETE on PostgreSQL 9.1+ (jeremyevans) * Add Database#copy_table for PostgreSQL COPY support when using the postgres adapter with pg (jeremyevans) * Support CREATE TABLE IF NOT EXISTS on PostgreSQL 9.1+ (jeremyevans) * Add support for Sequel::Model.default_eager_limit_strategy to set the default :eager_limit_strategy for *_many associations (jeremyevans) * Add support for an :eager_limit_strategy => :correlated_subquery value for limiting using correlated subqueries (jeremyevans) * Allow use of a dataset that uses the emulated offset support on MSSQL and DB2 in an IN subquery by using a nested subquery (jeremyevans) * Allow use of a dataset that uses LIMIT in an IN subquery on MySQL by using a nested subquery (jeremyevans) * Work around serious ActiveSupport bug in Time.=== that breaks literalization of Time values (jeremyevans) * Speed up SQL operator methods by using module_eval instead of define_method (jeremyevans) * Support sql_(boolean,number,string) methods on ComplexExpressions, allowing you do to (x + 1).sql_string + 'a' for (x + 1) || 'a' (jeremyevans) * Don't disallow SQL expression creation based on types, leave that to the database server (jeremyevans) * Make :column [&|] 1 use an SQL bitwise [&|] expression instead of a logical (AND|OR) expression (jeremyevans) * Make :column + 'a' use an SQL string concatenation expression instead of an addition expression (jeremyevans) * Fix :time typecasting from Time to SQLTime for fractional seconds on ruby 1.9 (jeremyevans) * Have Dataset#select_append check supports_select_all_and_column? and select all from all FROM and JOIN tables if no columns selected (jeremyevans) * Add Dataset#supports_select_all_and_column? for checking if you can do SELECT *, column (jeremyevans) * Add support for an :eager_limit_strategy => :window_function value for limiting using window functions (jeremyevans) * Add support for an :eager_limit_strategy => :distinct_on value for one_to_one associations for using DISTINCT ON (jeremyevans) * Add support for an :eager_limit_strategy association option, for manual control over how limiting is done (jeremyevans) * Add Dataset#supports_ordered_distinct_on? for checking if the dataset can use distinct on while respecting order (jeremyevans) * Add support for the association :limit option when eager loading via .eager for *_many associations (jeremyevans) * Add db2 jdbc subadapter (jeremyevans) * Fix the db2 adapter so it actually works (jeremyevans) * Add ibmdb adapter for accessing DB2 (roylez, jeremyevans) (#376) * Add much better support for DB2 databases (roylez, jeremyevans) (#376) * Handle SQL::AliasedExpressions and SQL::JoinClauses in Dataset#select_all (jeremyevans) * Speed up type translation slightly in mysql, postgres, and sqlite adapters (jeremyevans) * Add Dataset#supports_cte_in_subqueries? for checking whether database supports WITH in subqueries (jeremyevans) * Allow Model.set_dataset to accept Sequel::LiteralString arguments as table names (jeremyevans) * Association :after_load hooks in lazy loading are now called after the associated objects have been cached (jeremyevans) * Emulate handling of extract on MSSQL, using datepart (jeremyevans) * Emulate handling of extract on SQLite, but you need to set Database#use_timestamp_timezones = false (jeremyevans) * Abstract handling of ComplexExpressionMethods#extract so that it can work on databases that don't implement extract (jeremyevans) * Emulate xor operator on SQLite (jeremyevans) * Add Dataset#supports_where_true? for checking if the database supports WHERE true (or WHERE 1 if 1 is true) (jeremyevans) * Fix eager loading via eager of one_to_one associations that match multiple associated objects and use order to pick the first one (jeremyevans) === 3.27.0 (2011-09-01) * Add support for native prepared statements to the tinytds adapter (jeremyevans) * Add support for native prepared statements and stored procedures to the mysql2 adapter (jeremyevans) * Support dropping primary key, foreign key, and unique constraints on MySQL via the drop_constraint :type option (jeremyevans) * Add Sequel::SQLTime class for handling SQL time columns (jeremyevans) * Typecast DateTime objects to Date for date columns (jeremyevans) * When typecasting Date objects to timestamps, make the resulting objects always have no fractional date components (jeremyevans) * Add Model.dataset_module for simplifying many def_dataset_method calls (jeremyevans) * Make prepared_statements_safe plugin work on classes without datasets (jeremyevans) * Make Dataset#hash work correctly when referencing SQL::Expression instances (jeremyevans) * Handle allowed mass assignment methods correctly when including modules in classes or extending instances with modules (jeremyevans) * Fix Model#hash to work correctly with composite primary keys and with no primary key (jeremyevans) * Model#exists? now returns false without issuing a query for new model objects (jeremyevans) === 3.26.0 (2011-08-01) * Fix bug in default connection pool if a disconnect error is raised and the disconnection_proc also raises an error (jeremyevans) * Disallow eager loading via eager of many_*_many associations with :eager_graph option (jeremyevans) * Major speedup in dataset creation (jeremyevans) * Replace internal implementation of eager_graph with much faster version (jeremyevans) * Don't treat strings with leading zeros as octal format in the default typecasting (jeremyevans) * Fix literalization of Date, Time, and DateTime values on Microsoft Access (jeremyevans) * Fix handling of nil values with the pure-Java version of nokogiri in the xml_serializer plugin (jeremyevans) * Make identity_map plugin work with standard eager loading of many_to_many and many_through_many associations (jeremyevans) * Make create_table! only attempt to drop the table if it already exists (jeremyevans) * Remove custom table_exists? implementations in the oracle and postgres adapters (jeremyevans) * Handle another type of disconnection in the postgres adapter (jeremyevans) * Handle disconnections in the ado adapter and do postgres subadapter (jeremyevans) * Recognize disconnections when issuing BEGIN/ROLLBACK/COMMIT statements (jeremyevans) (#368) === 3.25.0 (2011-07-01) * Work with tiny_tds-0.4.5 in the tinytds adapter, older versions are no longer supported (jeremyevans) * Make association_pks plugin typecast provided values to integer if the primary key column type is integer (jeremyevans) * Model.set_dataset now accepts Identifier, QualifiedIdentifier, and AliasedExpression arguments (jeremyevans) * Fix handling of nil values in bound variables and prepared statement and stored procedure arguments in the jdbc adapter (jeremyevans, wei) * Allow treating Datasets as Expressions, e.g. DB[:table1].select(:column1) > DB[:table2].select(:column2) (jeremyevans) * No longer use CASCADE by default when dropping tables on PostgreSQL (jeremyevans) * Support :cascade option to #drop_table, #drop_view, #drop_column, and #drop_constraint for using CASCADE (jeremyevans) * If validation error messages are LiteralStrings, don't add the column name to them in Errors#full_messages (jeremyevans) * Fix bug loading plugins on 1.9 where ::ClassMethods, ::InstanceMethods, or ::DatasetMethods is defined (jeremyevans) * Add Dataset#exclude_where and Dataset#exclude_having methods, so you can force use of having or where clause (jeremyevans) * Allow Dataset#select_all to take table name arguments and select all columns from each given table (jeremyevans) * Add Dataset#select_group method, for selecting and grouping on the same columns (jeremyevans) * Allow Dataset#group and Dataset#group_and_count to accept a virtual row block (jeremyevans) === 3.24.1 (2011-06-03) * Ignore index creation errors if using create_table? with the IF NOT EXISTS syntax (jeremyevans) (#362) === 3.24.0 (2011-06-01) * Add prepared_statements_association plugin, for using prepared statements by default for regular association loading (jeremyevans) * Add prepared_statements_safe plugin, for making prepared statement use with models more safe (jeremyevans) * Add prepared_statements_with_pk plugin, for using prepared statements for dataset lookups by primary key (jeremyevans) * Fix bug in emulated prepared statement support not supporting nil or false as bound values (jeremyevans) * Add Dataset#unbind for unbinding values from a dataset, for use with creating prepared statements (jeremyevans) * Add prepared_statements plugin for using prepared statements for updates, inserts, deletes, and lookups by primary key (jeremyevans) * Make Dataset#[] for model datasets consider a single integer argument as a lookup by primary key (jeremyevans) * Add Dataset#with_pk for model datasets, for finding first record with matching primary key value (jeremyevans) * Add defaults_setter plugin for setting default values when initializing model instances (jeremyevans) * Add around hooks (e.g. around_save) to Sequel::Model (jeremyevans) * Add Model#initialize_set private method to ease extension writing (jeremyevans) * Only typecast bit fields to booleans on MSSQL, the MySQL bit type is a bitfield, not a boolean (jeremyevans) * Set SQL_AUTO_IS_NULL=0 by default when connecting to MySQL via the swift and jdbc adapters (jeremyevans) * Fix bug in multiple column IN/NOT IN emulation when a model dataset is used (jeremyevans) * Add support for filtering and excluding by association datasets (jeremyevans) * Fix literalization of boolean values in filters on SQLite and MSSQL (jeremyevans) * Add support for filtering and excluding by multiple associations (jeremyevans) * Add support for inverting some SQL::Constant instances such as TRUE, FALSE, NULL, and NOTNULL (jeremyevans) * Add support for excluding by associations to model datasets (jeremyevans) * The Sequel::Postgres.use_iso_date_format setting now only affects future Database objects (jeremyevans) * Add Sequel::Postgres::PG_NAMED_TYPES hash for extensions to register type conversions for non-standard types (jeremyevans, pvh) * Make create_table? use IF NOT EXISTS instead of using SELECT to determine existence, if supported (jeremyevans) * Fix bug in association_pks plugin when associated table has a different primary key column name (jfirebaugh) * Fix limiting rows when connecting to DB2 (semmons99) * Exclude columns from tables in the INFORMATION_SCHEMA when parsing table schema on JDBC (jeremyevans) * Fix limiting rows when connecting to Microsoft Access (jeremyevans) * Add Database#views for getting an array of symbols of view names for the database (jeremyevans, christian.michon) * Make Datbase#tables no longer include view names on MySQL (jeremyevans) * Convert Java CLOB objects to ruby strings when using the JDBC JTDS subadapter (christian.michon) * If Thread#kill is called on a thread with an open transaction, roll the transaction back on ruby 1.8 and rubinius (jeremyevans) * Split informix adapter into shared/specific parts, add JDBC informix subadapter (jeremyevans) === 3.23.0 (2011-05-02) * Migrate issue tracker from Google Code to GitHub Issues (jeremyevans) * Add support for filtering by associations to model datasets (jeremyevans) * Don't call insert_select when saving a model that doesn't select all columns of the table (jeremyevans) * Fix bug when using :select=>[] option for a many_to_many association (jeremyevans) * Add a columns_introspection extension that attempts to skip database queries by introspecting selected columns (jeremyevans) * When combining old integer migrations and new timestamp migrations, make sure old integer migrations are all applied first (jeremyevans) * Support dynamic callbacks to customize regular association loading at query time (jeremyevans) * Support cascading of eager loading with dynamic callbacks for both eager and eager_graph (jeremyevans) * Make the xml_serializer plugin handle namespaced models by using __ instead of / as a separator (jeremyevans) * Allow the :eager_grapher association proc to accept a single hash instead of 3 arguments (jfirebaugh) * Support dynamic callbacks to customize eager loading at query time (jfirebaugh, jeremyevans) * Fix bug in the identity_map plugin for many_to_one associations when the association reflection hadn't been filled in yet (funny-falcon) * Add serialization_modification_detection plugin for detecting changes in serialized columns (jeremyevans) (#333) === 3.22.0 (2011-04-01) * Add disconnect detection to tinytds adapter, though correct behavior may require an update to tiny_tds (cult_hero) * Add Dataset/Database#mssql_unicode_strings accessor when connecting to MSSQL to control string literalization (semmons99, jeremyevans) * Fix ODBC::Time instance handling in the odbc adapter (jeremyevans) * Use Sequel.application_timezone when connecting in the oracle adapter to set the connection's session's timezone (jmthomas) * In the ADO adapter, assume access to SQL Server if a :conn_string option is given that doesn't indicate Access/Jet (damir.si) (#332) * Use the correct class when loading instances for descendents of model classes that use single table inheritance (jeremyevans) * Support for COLLATE in column definitions (jfirebaugh) * Don't use a schema when creating a temporary table (jeremyevans) * Make migrator work correctly when a default_schema is set (jeremyevans) (#331) === 3.21.0 (2011-03-01) * Make symbol splitting (:table__column___alias) work correctly for identifiers that are not in the \w character class (authorNari) * Enable row locks in Oracle (authorNari) * Prefer cover? over include? for validates_includes/validates_inclusion_of (jeremyevans) * Make using NULL/NOT NULL, DEFAULT, and UNIQUE column options work correctly on H2 and possibly Oracle (jeremyevans) * Make bin/sequel accept file arguments and work correctly when $stdin is not a tty (jeremyevans) * Add support for -I and -r options to bin/sequel (jeremyevans) * Sequel::Model.plugin can now be overridden just like the other Model methods (jeremyevans) * Add tinytds adapter, the best way to connect to MSSQL from a C based ruby running on *nix (jeremyevans) * Recognize bigint unsigned as a Bignum type in the schema dumper (gamespy-tech) (#327) * Add Dataset#calc_found_rows for MySQL datasets (macks) * Add association_autoreloading plugin for clearing association cache when foreign key value changes (jfirebaugh, jeremyevans) * Fix join_table on MySQL ignoring the block (jfirebaugh) * Transfer CTE WITH clauses in subselect to main query when joining on MSSQL (jfirebaugh) * Make specs support both RSpec 1 and RSpec 2 (jeremyevans) * Work with ruby-informix versions >= 0.7.3 in the informix adapter (jeremyevans) (#326) === 3.20.0 (2011-02-01) * Allow a :partial option to Database#indexes on MySQL to include partial indexes (roland.swingler) (#324) * Add a SQLite subadapter to the swift adapter, now that swift supports it (jeremyevans) * Update swift adapter to support swift 0.8.1, older versions no longer supported (jeremyevans) * Allow setting arbitrary JDBC properties in the jdbc adapter with the :jdbc_properties option (jeremyevans) * Use a better error message if a validates_max_length validation is applied to a nil value (jeremyevans) (#322) * Add some basic Microsoft Access support to the ado adapter, autoincrementing primary keys now work (jeremyevans) * Make class_table_inheritance plugin handle subclass associations better (jeremyevans) (#320) === 3.19.0 (2011-01-03) * Handle Date and DateTime types in prepared statements when using the jdbc adapter (jeremyevans) * Handle Date, DateTime, Time, SQL::Blob, true, and false in prepared statements when using the SQLite adapter (jeremyevans) * Use varbinary(max) instead of image for the generic blob type on MSSQL (jeremyevans) * Close prepared statements when disconnecting when using SQLite (jeremyevans) * Allow reflecting on validations in the validation_class_methods plugin (jeremyevans) * Allow passing a primary key value to the add_* association method (gucki) * When typecasting model column values, check the classes of the new and existing values (jeremyevans) * Improve type translation performance in the postgres, mysql, and sqlite adapters by using methods instead of procs (jeremyevans) === 3.18.0 (2010-12-01) * Allow the user to control how the connection pool deals with attempts to access shards that aren't configured (jeremyevans) * Typecast columns when creating model objects from JSON in the json_serializer plugin (jeremyevans) * When parsing the schema for a model that uses an aliased table, use the unaliased table name (jeremyevans) * When emulating schema methods such as drop_column on SQLite, recreate applicable indexes on the recreated table (jeremyevans) * Only remove hook pairs that have been run successfully in the instance_hooks plugin (jeremyevans) * Add reversible migration support to the migration extension (jeremyevans) * Add to_dot extension, for producing visualizations of Dataset abstract syntax trees with Graphviz (jeremyevans) * Switch to using manual type translation in the SQLite adapter (jeremyevans) * Support :read_timeout option in the native mysql adapter (tmm1) * Support :connect_timeout option in the native mysql and mysql2 adapters (tmm1) === 3.17.0 (2010-11-05) * Ensure that the optimistic locking plugin increments the lock column when using Model#modified! (jfirebaugh) * Correctly handle nil values in the xml_serializer plugin, instead of converting them to empty strings (george.haff) (#313) * Use a default wait_timeout that's allowed on Windows for the mysql and mysql2 adapters (jeremyevans) (#314) * Add support for connecting to MySQL over SSL using the :sslca, :sslkey, and related options (jeremyevans) * Fix Database#each_server when used with jdbc or do connection strings without separate :adapter option (jeremyevans) (#312) * Much better support in the AS400 JDBC subadapter (bhauff) * Allow cloning of many_through_many associations (gucki, jeremyevans) * In the nested_attributes plugin, don't make unnecessary update calls to modify associated objects that are about to be deleted (jeremyevans, gucki) * Allow Dataset#(add|set)_graph_aliases to accept as hash values symbols and arrays with a single element (jeremyevans) * Add Databse#views and #view_exists? to the Oracle adapter (gpheruson) * Add Database#sql_log_level for changing the level at which SQL queries are logged (jeremyevans) * Remove unintended use of prepared statements in swift adapter (jeremyevans) * Fix logging in the swift PostgreSQL subadapter (jeremyevans) === 3.16.0 (2010-10-01) * Support composite foreign keys for associations in the identity_map plugin (harukizaemon, jeremyevans) (#310) * Handle INTERSECT and EXCEPT on Microsoft SQL Server 2005+ (jfirebaugh) * Add :replace option to Database#create_language in the postgresql adapter (jeremyevans) * Make rcte_tree plugin work when not all columns are selected (jeremyevans) * Add swift adapter (jeremyevans) * Fix literalization of DateTime objects on 1.9 for databases that support fractional seconds (jeremyevans) === 3.15.0 (2010-09-01) * Make emulated alter_table tasks on SQLite correctly preserve foreign keys (DirtYiCE, jeremyevans) * Add support for sequel_pg to the native postgres adapter when pg is used (jeremyevans) * Make class MyModel < Sequel::Model(DB[:table]) reload safe (jeremyevans) * Fix a possible error when using the do (DataObjects) adapter with postgres (jeremyevans) * Handle a many_to_many :join_table option that uses an implicit alias (mluu, jeremyevans) * Work around bug in Microsoft's SQL Server JDBC Adapter version 3.0 (jfirebaugh) * Make eager graphing a model that uses an aliased table name work correctly (jeremyevans) * Make class_table_inheritance plugin work with non integer primary keys on SQLite (jeremyevans, russm) * Add :auto_increment field to column schema values on MySQL if the column is auto incrementing (dbd) * Handle DSN-less ODBC connections better (Ricardo Ramalho) * Exclude temporary tables when parsing the schema on PostgreSQL (jeremyevans) (#306) * Add Mysql2 adapter (brianmario) * Handle Mysql::Error exceptions when disconnecting in the MySQL adapter (jeremyevans) * Make typecasting work correctly for attributes loaded lazily when using the lazy attributes plugin (jeremyevans) === 3.14.0 (2010-08-02) * Handle OCIInvalidHandle errors when disconnecting in the Oracle adapter (jeremyevans) * Allow calling Model.create_table, .create_table! and .create_table? with blocks containing the schema in the schema plugin (jfirebaugh) * Fix handling of a :conditions options in the rcte plugin (mluu) * Fix aggregate methods such as Dataset#sum and #avg on MSSQL on datasets with an order but no limit (mluu) * Fix rename_table on MSSQL for case sensitive collations and schemas (mluu) * Add a :single_root option to the tree plugin, for enforcing a single root value via a before_save hook (jfirebaugh) * Add a Model#root? method to the tree plugin, for checking if the current node is a root (jfirebaugh) * Add a :raise_on_failure option to Model#save to override the raise_on_save_failure setting (jfirebaugh) * Handle class discriminator column names that are existing ruby method names in the single table inheritance plugin (jeremyevans) * Fix times and datetimes when timezone support is used and you are loading a standard time when in daylight time or vice versa (gcampbell) * Handle literalization of OCI8::CLOB objects in the native oracle adapter (jeremyevans) * Raise a Sequel::Error instead of an ArgumentError if the migration current or target version does not exist (jeremyevans) * Fix Database#schema on Oracle when the same table exists in multiple schemas (djwhitt) * Fix Database#each_server when using a connection string to connect (jeremyevans) * Make Model dataset's destroy method respect the model's use_transactions setting, instead of always using a transaction (jeremyevans) * Add Database#adapter_scheme, for checking which adapter a Database uses (jeremyevans) * Allow Dataset#grep to take :all_patterns, :all_columns, and :case_insensitive options (mighub, jeremyevans) === 3.13.0 (2010-07-01) * Allow Model.find_or_create to take a block which is yielded the object to be created, if no object is found (zaius, jeremyevans) * Make PlaceholderLiteralString a GenericExpression subclass (jeremyevans) * Allow nil/NULL to be used as a CASE expression value (jeremyevans) * Support bitwise operators on more databases (jeremyevans) * Make PostgreSQL do bitwise xor instead of exponentiation for ^ operator (jeremyevans) * Fix handling of tinyint(1) columns when connecting to MySQL via JDBC (jeremyevans) * Handle arrays of two element arrays as filter hash values automatically (jeremyevans) * Allow :frame option for windows to take a string that is used literally (jeremyevans) * Support transaction isolation levels on PostgreSQL, MySQL, and MSSQL (jeremyevans) * Support prepared transactions/two-phase commit on PostgreSQL, MySQL, and H2 (jeremyevans) * Allow NULLS FIRST/LAST when ordering using the :nulls=>:first/:last option to asc and desc (jeremyevans) * On PostgreSQL, if no :schema option is provided for #tables, #table_exists?, or #schema, assume all schemas except the default non-public ones (jeremyevans) (#305) * Cache prepared statements when using the native sqlite driver, improving performance (jeremyevans) * Add a Tree plugin for treating model objects as being part of a tree (jeremyevans, mwlang) * Add a :methods_module association option, for choosing the module into which association methods are placed (jeremyevans) * Add a List plugin for treating model objects as being part of a list (jeremyevans, aemadrid) * Don't attempt to use class polymorphism in the class_table_inheritance plugin if no cti_key is defined (jeremyevans) * Add a XmlSerializer plugin for serializing/deserializing model objects to/from XML (jeremyevans) * Add a JsonSerializer plugin for serializing/deserializing model objects to/from JSON (jeremyevans) * Handle unsigned integers in the schema dumper (jeremyevans) === 3.12.1 (2010-06-09) * Make :encoding option work on MySQL even if config file specifies different encoding (jeremyevans) (#300) === 3.12.0 (2010-06-01) * Add a :deferrable option to foreign_key for creating deferrable foreign keys (hydrow) * Add a :join_table_block many_to_many association option used by the add/remove/remove_all methods (jeremyevans) * Add an AssociationPks plugin that adds association_pks and association_pks= methods for *_to_many associations (jeremyevans) * Add an UpdatePrimaryKey plugin that allows you to update the primary key of a model object (jeremyevans) * Add a SkipCreateRefresh plugin that skips the refresh when saving new model objects (jeremyevans) * Add a StringStripper plugin that strips strings before assigning them to model attributes (jeremyevans) * Allow the :eager_loader association proc to accept a single hash instead of 3 arguments (jeremyevans) * Add a Dataset#order_append alias for order_more, for consistency with order_prepend (jeremyevans) * Add a Dataset#order_prepend method that adds to the end of an existing order (jeremyevans) * Add a Sequel::NotImplemented exception class, use instead of NotImplementedError (jeremyevans) * Correctly handle more than 2 hierarchy levels in the single table inheritance plugin (jeremyevans) * Allow using a custom column value<->class mapping to the single_table_inheritance plugin (jeremyevans, tmm1) * Handle SQL::Identifiers in the schema_dumper extension (jeremyevans) (#304) * Make sure certain alter table operations clear the schema correctly on MySQL (jeremyevans) (#301) * Fix leak of JDBC Statement objects when using transactions on JDBC on databases that support savepoints (jeremyevans) * Add DatabaseDisconnectError support to the ODBC adapter (Joshua Hansen) * Make :encoding option work on MySQL in some cases where it was ignored (jeremyevans) (#300) * Make Model::Errors#on always return nil if there are no errors on that attribute (jeremyevans) * When using multiple plugins that add before hooks, the order that the hooks are called may have changed (jeremyevans) * The hook_class_methods plugin no longer skips later after hooks if earlier after hooks return false (jeremyevans) * Add Model#set_fields and update_fields, similar to set_only and update_only but ignoring other keys in the hash (jeremyevans) * Add Model.qualified_primary_key_hash, similar to primary_key_hash but with qualified columns (jeremyevans) * Make Model::Errors#empty? handle attributes with empty error arrays (jeremyevans) * No longer apply association options to join table dataset when removing all many_to_many associated objects (jeremyevans) * Log the execution times of migrations to the database's loggers (jeremyevans) * Add a TimestampMigrator that can work with migrations where versions are timestamps, and handle migrations applied out of order (jeremyevans) * Completely refactor Sequel::Migrator, now a class instead of a module (jeremyevans) * Save migration version after each migration, instead of after all migrations (jeremyevans) * Raise an error if missing a migration version (jeremyevans) * Raise an error if using a duplicate migration version (jeremyevans) * Add a Sequel.migration DSL for defining migrations (jeremyevans) * Add a sharding plugin giving Sequel::Model objects support for dealing with sharding (jeremyevans) * Handle timestamp(N) with time zone data types (hone) * Fix MSSQL temporary table creation, but watch out as it changes the table name (gpd, jeremyevans) (#299) === 3.11.0 (2010-05-03) * Allow shared postgresql adapter to work with ruby 1.9 with the -Ku switch (golubev.pavel) (#298) * Add support for connecting to MSSQL via JTDS in the JDBC adapter (jeremyevans) * Support returning the number of rows updated/deleted on MSSQL when using the ADO adapter with an explicit :provider (jeremyevans) * Support transactions in the ADO adapter if not using the default :provider (jeremyevans) * Make Database#disconnect not raise an exception when using the unsharded single connection pool (jeremyevans) * Attempt to handle JDBC connection problems in cases where driver auto loading doesn't work (e.g. Tomcat) (elskwid) * Make native MySQL adapter's tinyint to boolean conversion only convert tinyint(1) columns and not larger tinyint columns (roland.swingler) (#294) * Fix use of limit with distinct on Microsoft SQL Server (jeremyevans) (#297) * Correctly swallow errors when using :ignore_index_errors in Database#create_table when using unsupported indexes (jeremyevans) (#295) * Fix insert returning the autogenerated key when using the 5.1.12 MySQL JDBC driver (viking) * Consider number/numeric/decimal columns with a 0 scale to be integer columns (e.g. numeric(10, 0)) (jeremyevans, QaDes) * Fix Database#rename_table on Microsoft SQL Server (rohit.namjoshi) (#293) * Add Dataset#provides_accurate_rows_matched?, for seeing if update and delete are likely to return correct numbers (jeremyevans) * Add require_modification to Sequel::Model, for checking that model instance updating and deleting affects a single row (jeremyevans) * Fix leak of ResultSets when getting metadata in the jdbc adapter (jrun) * Make Dataset#filter and related methods just clone receiver if given an empty argument, such as {}, [], or '' (jeremyevans) * Add instance_filters plugin, for adding arbitrary filters when updating/destroying the instance (jeremyevans) * No longer create the #{plugin}_opts methods for plugins (jeremyevans) * Support :auto_vacuum, :foreign_keys, :synchronous, and :temp_store Database options on SQLite, for thread-safe PRAGMA setting (jeremyevans) * Add foreign_keys accessor to SQLite Database objects (enabled by default), which modifies the foreign_keys PRAGMA available in 3.6.19+ (jeremyevans) * Add an Database#sqlite_version method when connecting to SQLite, used to determine feature support (jeremyevans) * Fix rolling back transactions when connecting to Oracle via JDBC (jeremyevans) * Fix syntax errors when connecting to MSSQL via the dbi adapter (jeremyevans) (#292) * Add support for an :after_connect option when connection, called with each new connection made (jeremyevans) * Add support for a :test option when connecting to be automatically test the connection (jeremyevans) * Add Dataset#select_append, which always appends to the existing SELECTed columns (jeremyevans) * Emulate DISTINCT ON on MySQL using GROUP BY (jeremyevans) * Make MSSQL shared adapter emulate set_column_null alter table op better with types containing sizes (jeremyevans) (#291) * Add :config_default_group and :config_local_infile options to the native MySQL adapter (jeremyevans) * Add log_warn_duration attribute to Database, queries that take longer than it will be logged at warn level (jeremyevans) * Switch Database logging to use log_yield instead of log_info, queries that raise errors are now logged at error level (jeremyevans) * Update active_model plugin to work with the ActiveModel::Lint 3.0.0beta2 specs (jeremyevans) * Support JNDI connection strings in the JDBC adapter (jrun) === 3.10.0 (2010-04-02) * Make one_to_one setter and *_to_many remove_all methods apply the association options (jeremyevans) * Make nested_attributes plugin handle invalid many_to_one associations better (john_firebaugh) * Remove private methods from Sequel::BasicObject on ruby 1.8 (i.e. most Kernel methods) (jeremyevans) * Add Sequel::BasicObject.remove_methods!, useful on 1.8 if libraries required after Sequel add methods to Object (jeremyevans) * Change Sequel.connect with a block to return the block's value (jonas11235) * Add an rcte_tree plugin, which uses recursive common table expressions for loading trees stored as adjacency lists (jeremyevans) * Make typecast_on_load plugin also typecast when refreshing the object (either explicitly or implicitly after creation) (jeremyevans) * Fix schema parsing and dumping of tinyint columns when connecting to MySQL via the do adapter (ricardochimal) * Fix transactions when connecting to Oracle via JDBC (jeremyevans) * Fix plugin loading when plugin module name is the same as an already defined top level constant (jeremyevans) * Add an AS400 JDBC subadapter (need jt400.jar in classpath) (jeremyevans, bhauff) * Fix the emulated MSSQL offset support when core extensions are not used (jeremyevans) * Make Sequel::BasicObject work correctly on Rubinius (kronos) * Add the :eager_loader_key option to associations, useful for custom eager loaders (jeremyevans) * Dataset#group_and_count no longer orders by the count (jeremyevans) * Fix Dataset#limit on MSSQL 2000 (jeremyevans) * Support eagerly load nested associations when lazily loading *_to_one associations using the :eager option (jeremyevans) * Fix the one_to_one setter to work with a nil argument (jeremyevans) * Cache one_to_one associations like many_to_one associations instead of one_to_many associations (jeremyevans) * Use the singular form for one_to_one association names instead of the plural form (john_firebaugh) * Add real one_to_one associations, using the :one_to_one option of one_to_many is now an error (jeremyevans) * Add Model#lock! which uses Dataset#for_update to lock model rows (jeremyevans) * Add Dataset#for_update as a standard dataset method (jeremyevans) * Add composition plugin, simlar to ActiveRecord's composed_of (jeremyevans) * Combine multiple complex expressions for simpler SQL and object tree (jeremyevans) * Add Dataset#first_source_table, for the unaliased version of the table for the first source (jeremyevans) * Raise a more explicit error if attempting to use the sqlite adapter with sqlite3 instead of sqlite3-ruby (jeremyevans) === 3.9.0 (2010-03-04) * Allow loading adapters and extensions from outside of the Sequel lib directory (jeremyevans) * Make limit and offset work as bound variables in prepared statements (jeremyevans) * In the single_table_inheritance plugin, handle case where the sti_key is nil or '' specially (jeremyevans) (#287) * Handle IN/NOT IN with an empty array (jeremyevans) * Emulate IN/NOT IN with multiple columns where the database doesn't support it and a dataset is given (jeremyevans) * Add Dataset#unused_table_alias, for generating a table alias that has not yet been used in the query (jeremyevans) * Support an empty database argument in bin/sequel, useful for testing things without a real database (jeremyevans) * Support for schemas and aliases when eager graphing (jeremyevans) * Handle using an SQL::Identifier as an 4th option to Dataset#join_table (jeremyevans) * Move gem spec from Rakefile to a .gemspec file, for compatibility with gem build and builder (jeremyevans) (#285) * Fix MSSQL 2005+ offset emulation on ruby 1.9 (jeremyevans) * Make active_model plugin work with ActiveModel 3.0 beta Lint specs, now requires active_model (jeremyevans) * Correctly create foreign key constraints on MySQL with the InnoDB engine, but you must specify the :key option (jeremyevans) * Add an optimistic_locking plugin for models, similar to ActiveRecord's optimistic locking support (jeremyevans) * Handle implicitly qualified symbols in UPDATE statements, useful for updating joined datasets (jeremyevans) * Have schema_dumper extension pass options hash to Database#tables (jeremyevans) (#283) * Make all internal uses of require thread-safe (jeremyevans) * Refactor connection pool into 4 separate pools, increase performance for unsharded setups (jeremyevans) * Change a couple instance_evaled lambdas into procs, for 1.9.2 compatibility (jeremyevans) * Raise error message earlier if DISTINCT ON is used on SQLite (jeremyevans) * Speed up prepared statements on SQLite (jeremyevans) * Correctly handle ODBC timestamps when database_timezone is nil (jeremyevans) * Add Sequel::ValidationFailed#errors (tmm1) === 3.8.0 (2010-01-04) * Catch cases in the postgres adapter where exceptions weren't converted or raised appropriately (jeremyevans) * Don't double escape backslashes in string literals in the mssql shared adapter (john_firebaugh) * Fix order of ORDER and HAVING clauses in the mssql shared adapter (mluu) * Add validates_type to the validation_helpers plugin (mluu) * Attempt to detect database disconnects in the JDBC adapter (john_firebaugh) * Add Sequel::SQL::Expression#==, so arbtirary expressions can be compared by value (dlee) * Respect the :size option for the generic File type on MySQL to create tinyblob, mediumblob, and longblob (ibc) * Don't use the OUTPUT clause on SQL Server versions that don't support it (pre-2005) (jeremyevans) (#281) * Raise DatabaseConnectionErrors in the single-threaded connection pool if unable to connect (jeremyevans) * Fix handling of non-existent server in single-threaded connection pool (jeremyevans) * Default to using mysqlplus driver in the native mysql adapter, fall back to mysql driver (ibc, jeremyevans) * Handle 64-bit integers in JDBC prepared statements (paulfras) * Improve blob support when using the H2 JDBC subadapter (nullstyle, jeremyevans, paulfras) * Add Database#each_server, which yields a new Database object for each server in the connection pool which is connected to only that server (jeremyevans) * Add Dataset#each_server, which yields a dataset for each server in the connection pool which is will execute on that server (jeremyevans) * Remove meta_eval and metaclass private methods from Sequel::Metaprogramming (jeremyevans) * Merge Dataset::FROM_SELF_KEEP_OPTS into Dataset::NON_SQL_OPTIONS (jeremyevans) * Add Database#remove_servers for removing servers from the pool on the fly (jeremyevans) * When disconnecting servers, if there are any connections to the server currently in use, schedule them to be disconnected (jeremyevans) * Allow disconnecting specific server(s)/shard(s) in Database#disconnect via a :servers option (jeremyevans) * Handle multiple statements in a single query in the native MySQL adapter in all cases, not just when selecting via Dataset#each (jeremyevans) * In the boolean_readers plugin, don't raise an error if the model's columns can't be determined (jeremyevans) * In the identity_map plugin, remove instances from the cache if they are deleted/destroyed (jeremyevans) * Add Database#add_servers, for adding new servers/shards on the fly (chuckremes, jeremyevans) === 3.7.0 (2009-12-01) * Add Dataset#sequence to the shared Oracle Adapter, for returning autogenerated primary key values on insert (jeremyevans) (#280) * Bring support for modifying joined datasets into Sequel proper, supported on MySQL and PostgreSQL (jeremyevans) * No longer use native autoreconnection in the mysql adapter (jeremyevans) * Add NULL, NOTNULL, TRUE, SQLTRUE, FALSE, and SQLFALSE constants (jeremyevans) * Add Dataset #select_map, #select_order_map, and #select_hash (jeremyevans) * Make Dataset#group_and_count handle arguments other than Symbols (jeremyevans) * Add :only_if_modified option to validates_unique method in validation_helpers plugin (jeremyevans) * Allow specifying the dataset alias via :alias option when using union/intersect/except (jeremyevans) * Allow Model#destroy to take an options hash and respect a :transaction option (john_firebaugh) * If a transaction is being used, raise_on_save_failure is false, and a before hook returns false, rollback the transaction (john_firebaugh, jeremyevans) * In the schema_dumper, explicitly specify the :type option if it isn't Integer (jeremyevans) * On postgres, use bigserial type if :type=>Bignum is given as an option to primary_key (jeremyevans) * Use READ_DEFAULT_GROUP in the mysql adapter to load the options in the client section of the my.cnf file (crohr) === 3.6.0 (2009-11-02) * Make the MSSQL shared adapter correctly parse the column schema information for tables in the non-default database schema (rohit.namjoshi) * Use save_changes instead of save when updating existing associated objects in the nested_attributes plugin (jeremyevans) * Allow Model#save_changes to accept an option hash that is passed to save, so you can save changes without validating (jeremyevans) * Make nested_attributes plugin add newly created objects to cached association array immediately (jeremyevans) * Make add_ association method not add the associated object to the cached array if it's already there (jeremyevans) * Add Model#modified! for explicitly marking an object as modified, so save_changes/update will run callbacks even if no columns have been modified (jeremyevans) * Add support for a :fields option in the nested attributes plugin, and only allow updating of the fields specified (jeremyevans) * Don't allow modifying keys related to the association when updating existing objects in the nested_attributes plugin (jeremyevans) * Add associated_object_keys method to AssociationReflection objects, specifying the key(s) in the associated model table related to the association (jeremyevans) * Support the memcached protocol in the caching plugin via the new :ignore_exceptions option (EppO, jeremyevans) * Don't modify array with a string and placeholders passed to Dataset#filter or related methods (jeremyevans) * Speed up Amalgalite adapter (copiousfreetime) * Fix bound variables on PostgreSQL when using nil and potentially other values (jeremyevans) * Allow easier overriding of default options used in the validation_helpers plugin (jeremyevans) * Have Dataset#literal_other call sql_literal on the object if it responds to it (heda, michaeldiamond) * Fix Dataset#explain in the amalgalite adapter (jeremyevans) * Have Model.table_name respect table aliases (jeremyevans) * Allow marshalling of saved model records after calling #marshallable! (jeremyevans) * one_to_many association methods now make sure that the removed object is currently associated to the receiver (jeremyevans) * Model association add_ and remove_ methods now have more descriptive error messages (jeremyevans) * Model association add_ and remove_ methods now make sure passed object is of the correct class (jeremyevans) * Model association remove_ methods now accept a primary key value and disassociate the associated model object (natewiger, jeremyevans) * Model association add_ methods now accept a hash and create a new associated model object (natewiger, jeremyevans) * Dataset#window for PostgreSQL datasets now respects previous windows (jeremyevans) * Dataset#simple_select_all? now ignores options that don't affect the SQL being issued (jeremyevans) * Account for table aliases in eager_graph (mluu) * Add support for MSSQL clustered index creation (mluu) * Implement insert_select in the MSSQL adapter via OUTPUT. Can be disabled via disable_insert_output. (jfirebaugh, mluu) * Correct error handling when beginning a transaction fails (jfirebaugh, mluu) * Correct JDBC binding for Time objects in prepared statements (jfirebaugh, jeremyevans) * Emulate JOIN USING clause poorly using JOIN ON if the database doesn't support JOIN USING (e.g. MSSQL, H2) (jfirebaugh, jeremyevans) * Support column aliases in Dataset#group_and_count (jfirebaugh) * Support preparing insert statements of the form insert(1,2,3) and insert(columns, values) (jfirebaugh) * Fix add_index for tables in non-default schema (jfirebaugh) * Allow named placeholders in placeholder literal strings (jeremyevans) * Allow the force_encoding plugin to work when refreshing (jeremyevans) * Add Dataset#bind for setting bound variable values before calling #call (jeremyevans) * Add additional join methods to Dataset: (cross|natural|(natural_)?(full|left|right))_join (jeremyevans) * Fix use a dataset aggregate methods (e.g. sum) on limited/grouped/etc. datasets (jeremyevans) * Clear changed_columns when saving new model objects with a database adapter that supports insert_select, such as postgres (jeremyevans) * Fix Dataset#replace with default values on MySQL, and respect insert-related options (jeremyevans) * Fix Dataset#lock on PostgreSQL (jeremyevans) * Fix Dataset#explain on SQLite (jeremyevans) * Add Dataset#use_cursor to the native postgres adapter, for processing large datasets (jeremyevans) * Don't ignore Class.inherited in Sequel::Model.inherited (antage) (#277) * Optimize JDBC::MySQL::DatabaseMethods#last_insert_id to prevent additional queries (tmm1) * Fix use of MSSQL with ruby 1.9 (cult hero) * Don't try to load associated objects when the current object has NULL for one of the key fields (jeremyevans) * No longer require GROUP BY to use HAVING, except on SQLite (jeremyevans) * Add emulated support for the lack of multiple column IN/NOT IN support in MSSQL and SQLite (jeremyevans) * Add emulated support for #ilike on MSSQL and H2 (jeremyevans) * Add a :distinct option for all associations, which uses the SQL DISTINCT clause (jeremyevans) * Don't require :: prefix for constant lookups in instance_evaled virtual row blocks on ruby 1.9 (jeremyevans) === 3.5.0 (2009-10-01) * Correctly literalize timezones in timestamps when using Oracle (jeremyevans) * Add class_table_inheritance plugin, supporting inheritance in the database using a table-per-model-class approach (jeremyevans) * Allow easier overriding of model code to insert and update individual records (jeremyevans) * Allow graphing to work on previously joined datasets, and eager graphing of models backed by joined datasets (jeremyevans) * Fix MSSQL emulated offset support for datasets with row_procs (e.g. Model datasets) (jeremyevans) * Support composite keys with set_primary_key when called with an array of multiple symbols (jeremyevans) * Fix select_more and order_more to not affect receiver (tamas.denes, jeremyevans) * Support composite keys in model associations, including many_through_many plugin support (jeremyevans) * Add the force_encoding plugin for forcing encoding of strings for models (requires ruby 1.9) (jeremyevans) * Support DataObjects 0.10 (previous DataObjects versions are now unsupported) (jeremyevans) * Allow the user to specify the ADO connection string via the :conn_string option (jeremyevans) * Add thread_local_timezones extension for allow per-thread overrides of the global timezone settings (jeremyevans) * Add named_timezones extension for using named timezones such as "America/Los_Angeles" using TZInfo (jeremyevans) * Pass through unsigned/elements/size and other options when altering columns on MySQL (tmm1) * Replace Dataset#virtual_row_block_call with Sequel.virtual_row (jeremyevans) * Allow Dataset #delete, #update, and #insert to respect existing WITH clauses on MSSQL (dlee, jeremyevans) * Add touch plugin, which adds Model#touch for updating an instance's timestamp, as well as touching associations when an instance is updated or destroyed (jeremyevans) * Add sql_expr extension, which adds the sql_expr to all objects, giving them easy access to Sequel's DSL (jeremyevans) * Add active_model plugin, which gives Sequel::Model an ActiveModel compliant API, passes the ActiveModel::Lint tests (jeremyevans) * Fix MySQL commands out of sync error when using queries with multiple result sets without retrieving all result sets (jeremyevans) * Allow splitting of multiple result sets into separate arrays when using multiple statements in a single query in the native MySQL adapter (jeremyevans) * Don't include primary key indexes when parsing MSSQL indexes on JDBC (jeremyevans) * Make Dataset#insert_select return nil on PostgreSQL if disable_insert_returning is used (jeremyevans) * Speed up execution of prepared statements with bound variables on MySQL (ibc@aliax.net) * Add association_dependencies plugin, for deleting, destroying, or nullifying associated objects when destroying a model object (jeremyevans) * Add :validate association option, set to false to not validate when implicitly saving associated objects (jeremyevans) * Add subclasses plugin, for recording all of a models subclasses and descendent classes (jeremyevans) * Add looser_typecasting extension, for using .to_f and .to_i instead of Kernel.Float and Kernel.Integer when typecasting floats and integers (jeremyevans) * Catch database errors when preparing statements or setting variable values when using the native MySQL adapter (jeremyevans) * Add typecast_on_load plugin, for fixing bad database typecasting when loading model objects (jeremyevans) * Detect more types of MySQL disconnection errors (jeremyevans) * Add Sequel.convert_exception_class for wrapping exceptions (jeremyevans) * Model#modified? now always considers new records as modified (jeremyevans) * Typecast before checking current model attribute value, instead of after (jeremyevans) * Don't attempt to use unparseable defaults as literals when dumping the schema for a MySQL database (jeremyevans) * Handle MySQL enum defaults in the schema dumper (jeremyevans) * Support Database#server_version on MSSQL (dlee, jeremyevans) * Support updating and deleting joined datasets on MSSQL (jfirebaugh) * Support the OUTPUT SQL clause on MSSQL delete, insert, and update statements (jfirebaugh) * Refactor generation of delete, insert, select, and update statements (jfirebaugh, jeremyevans) * Do a better job of parsing defaults on MSSQL (jfirebaugh) === 3.4.0 (2009-09-02) * Allow datasets without tables to work correctly on Oracle (mikegolod) * Add #invert, #asc, and #desc to OrderedExpression (dlee) * Allow validates_unique to take a block used to scope the uniqueness constraint (drfreeze, jeremyevans) * Automatically save a new many_to_many associated object when associating the object via add_* (jeremyevans) * Add a nested_attributes plugin for modifying associated objects directly through a model object (jeremyevans) * Add an instance_hooks plugin for adding hooks to specific model instances (jeremyevans) * Add a boolean_readers plugin for creating attribute? methods for boolean columns (jeremyevans) * Add Dataset#ungrouped which removes existing grouping (jeremyevans) * Make Dataset#group with nil or no arguments to remove existing grouping (dlee) * Fix using multiple emulated ALTER TABLE statements (e.g. drop_column) in a single alter_table block on SQLite (jeremyevans) * Don't allow inserting on a grouped dataset or a dataset that selects from multiple tables (jeremyevans) * Allow class Item < Sequel::Model(DB2) to work (jeremyevans) * Add Dataset#truncate for truncating tables (jeremyevans) * Add Database#run method for executing arbitrary SQL on a database (jeremyevans) * Handle index parsing correctly for tables in a non-default schema on JDBC (jfirebaugh) * Handle unique index parsing correctly when connecting to MSSQL via JDBC (jfirebaugh) * Add support for converting Time/DateTime to local or UTC time upon storage, retrieval, or typecasting (jeremyevans) * Accept a hash when typecasting values to date, time, and datetime types (jeremyevans) * Make JDBC adapter prepared statements support booleans, blobs, and potentially any type of object (jfirebaugh) * Refactor the inflection support and modify the default inflections (jeremyevans, dlee) * Make the serialization and lazy_attribute plugins add accessor methods to modules included in the class (jeremyevans) * Make Database#schema on JDBC include a :column_size entry specifying the maximum length/precision for the column (jfirebaugh) * Make Database#schema on JDBC accept a :schema option (dlee) * Fix Dataset#import when called with a dataset (jeremyevans) * Give a much more descriptive error message if the mysql.rb driver is detected (jeremyevans) * Make postgres adapter work with a modified postgres-pr that raises PGError (jeremyevans) * Make ODBC adapter respect Sequel.datetime_class (jeremyevans) * Add support for generic concepts of CURRENT_{DATE,TIME,TIMESTAMP} (jeremyevans) * Add a timestamps plugin for automatically creating hooks for create and update timestamps (jeremyevans) * Add support for serializing to json (derdewey) === 3.3.0 (2009-08-03) * Add an assocation_proxies plugin that uses proxies for associations (jeremyevans) * Have the add/remove/remove_all methods take additional arguments and pass them to the internal methods (clivecrous) * Move convert_tinyint_to_bool method from Sequel to Sequel::MySQL (jeremyevans) * Model associations now default to associating to classes in the same scope (jeremyevans, nougad) (#274) * Add Dataset#unlimited, similar to unfiltered and unordered (jeremyevans) * Make Dataset#from_self take an options hash and respect an :alias option, giving the alias to use (Phrogz) * Make the JDBC adapter accept a :convert_types option to turn off Java type conversion and double performance (jeremyevans) * Slight increase in ConnectionPool performance (jeremyevans) * SQL::WindowFunction can now be aliased/casted etc. just like SQL::Function (jeremyevans) * Model#save no longer attempts to update primary key columns (jeremyevans) * Sequel will now unescape values provided in connection strings (e.g. ado:///db?host=server%5cinstance) (jeremyevans) * Significant improvements to the ODBC and ADO adapters in general (jeremyevans) * The ADO adapter no longer attempts to use database transactions, since they never worked (jeremyevans) * Much better support for Microsoft SQL Server using the ADO, ODBC, and JDBC adapters (jeremyevans) * Support rename_column, set_column_null, set_column_type, and add_foreign_key on H2 (jeremyevans) * Support adding a column with a primary key or unique constraint to an existing table on SQLite (jeremyevans) * Support altering a column's type, null status, or default on SQLite (jeremyevans) * Fix renaming a NOT NULL column without a default on MySQL (nougad, jeremyevans) (#273) * Don't swallow DatabaseConnectionErrors when creating model subclasses (tommy.midttveit) === 3.2.0 (2009-07-02) * In the STI plugin, don't overwrite the STI field if it is already set (jeremyevans) * Add support for Common Table Expressions, which use the SQL WITH clause (jeremyevans) * Add SQL::WindowFunction, expand virtual row blocks to support them and other constructions (jeremyevans) * Add Model#autoincrementing_primary_key, for when the autoincrementing key isn't the same as the primary key (jeremyevans) * Add Dataset#ungraphed, to remove the splitting of results into subhashes or associated records (jeremyevans) * Support :opclass option for PostgreSQL indexes (tmi, jeremyevans) * Make parsing of server's version more reliable for PostgreSQL (jeremyevans) * Add Dataset#qualify, which is qualify_to with a first_source default (jeremyevans) * Add :ruby_default to parsed schema information, which contains a ruby object representing the database default (jeremyevans) * Fix changing a column's name, type, or null status on MySQL when column has a string default (jeremyevans) * Remove Dataset#to_table_reference protected method, no longer used (jeremyevans) * Fix thread-safety issue in stored procedure code (jeremyevans) * Remove SavepointTransactions module, integrate into Database code (jeremyevans) * Add supports_distinct_on? method (jeremyevans) * Remove SQLStandardDateFormat, replace with requires_sql_standard_datetimes? method (jeremyevans) * Remove UnsupportedIsTrue module, replace with supports_is_true? method (jeremyevans) * Remove UnsupportedIntersectExcept(All)? modules, replace with methods (jeremyevans) * Make Database#indexes work on PostgreSQL versions prior to 8.3 (tested on 7.4) (jeremyevans) * Fix bin/sequel using a YAML file on 1.9 (jeremyevans) * Allow connection pool options to be specified in connection string (jeremyevans) * Handle :user and :password options in the JDBC adapter (jeremyevans) * Fix warnings when using the ODBC adapter (jeremyevans) * Add opening_databases.rdoc file for describing how to connect to a database (mwlang, jeremyevans) * Significantly increase JDBC select performance (jeremyevans) * Slightly increase SQLite select performance using the native adapter (jeremyevans) * Majorly increase MySQL select performance using the native adapter (jeremyevans) * Pass through unsigned/elements/size and other options when altering columns on MySQL (tmm1) * Allow on_duplicate_key_update to affect Dataset#insert on MySQL (tmm1) * Support using a given table and column to store schema versions, using new Migrator.run method (bougyman, jeremyevans) * Fix foreign key table constraints on MySQL (jeremyevans) * Remove Dataset#table_exists?, use Database#table_exists? instead (jeremyevans) * Fix graphing of datasets with dataset sources (jeremyevans) (#271) * Raise a Sequel::Error if Sequel.connect is called with something other than a Hash or String (jeremyevans) (#272) * Add -N option to bin/sequel to not test the database connection (jeremyevans) * Make Model.grep call Dataset#grep instead of Enumerable#grep (jeremyevans) * Support the use of Regexp as first argument to StringExpression.like (jeremyevans) * Fix Database#indexes on PostgreSQL when the schema used is a symbol (jeremyevans) === 3.1.0 (2009-06-04) * Require the classes match to consider an association a reciprocal (jeremyevans) (#270) * Make Migrator work correctly with file names like 001_873465873465873465_some_name.rb (jeremyevans) (#267) * Add Dataset#qualify_to and #qualify_to_first_source, for qualifying unqualified identifiers in the dataset (jeremyevans) * All the use of #sql_subscript on most SQL::* objects, and support non-integer subscript values (jeremyevans) * Add reflection.rdoc file which explains and gives examples of many of Sequel's reflection methods (jeremyevans) * Add many_through_many plugin, allowing you to construct an association to multiple objects through multiple join tables (jeremyevans) * Add the :cartesian_product_number option to associations, for specifying if they can cause a cartesian product (jeremyevans) * Make :eager_graph association option work correctly when lazily loading many_to_many associations (jeremyevans) * Make eager_unique_table_alias consider joined tables as well as tables in the FROM clause (jeremyevans) * Make add_graph_aliases work correctly even if set_graph_aliases hasn't been used (jeremyevans) * Fix using :conditions that are a placeholder string in an association (e.g. :conditions=>['a = ?', 42]) (jeremyevans) * On MySQL, make Dataset#insert_ignore affect #insert as well as #multi_insert and #import (jeremyevans, tmm1) * Add -t option to bin/sequel to output the full backtrace if an exception is raised (jeremyevans) * Make schema_dumper extension ignore errors with indexes unless it is dumping in the database-specific type format (jeremyevans) * Don't dump partial indexes in the MySQL adapter (jeremyevans) * Add :ignore_index_errors option to Database#create_table and :ignore_errors option to Database#add_index (jeremyevans) * Make graphing a complex dataset work correctly (jeremyevans) * Fix MySQL command out of sync errors, disconnect from database if they occur (jeremyevans) * In the schema_dumper extension, do a much better job of parsing defaults from the database (jeremyevans) * On PostgreSQL, assume the public schema if one is not given and there is no default in Database#tables (jeremyevans) * Ignore a :default value if creating a String :text=>true or File column on MySQL, since it doesn't support defaults on text/blob columns (jeremyevans) * On PostgreSQL, do not raise an error when attempting to reset the primary key sequence for a table without a primary key (jeremyevans) * Allow plugins to have a configure method that is called on every attempt to load them (jeremyevans) * Attempting to load an already loaded plugin no longer calls the plugin's apply method (jeremyevans) * Make plugin's plugin_opts methods return an array of arguments if multiple arguments were given, instead of just the first argument (jeremyevans) * Keep track of loaded plugins at Model.plugins, allows plugins to depend on other plugins (jeremyevans) * Make Dataset#insert on PostgreSQL work with static SQL (jeremyevans) * Add lazy_attributes plugin, for creating attributes that can be lazily loaded from the database (jeremyevans) * Add tactical_eager_loading plugin, similar to DataMapper's strategic eager loading (jeremyevans) * Don't raise an error when loading a plugin with DatasetMethods where none of the methods are public (jeremyevans) * Add identity_map plugin, for creating temporary thread-local identity maps with some caching (jeremyevans) * Support savepoints when using MySQL and SQLite (jeremyevans) * Add -C option to bin/sequel that copies one database to another (jeremyevans) * In the schema_dumper extension, don't include defaults that contain literal strings unless the DBs are the same (jeremyevans) * Only include valid non-partial indexes of simple column references in the PostgreSQL adapter (jeremyevans) * Add -h option to bin/sequel for outputting the usage, alias for -? (jeremyevans) * Add -d and -D options to bin/sequel for dumping schema migrations (jeremyevans) * Support eager graphing for model tables that lack primary keys (jeremyevans) * Add Model.create_table? to the schema plugin, similar to Database#create_table? (jeremyevans) * Add Database#create_table?, which creates the table if it doesn't already exist (jeremyevans) * Handle ordered and limited datasets correctly when using UNION, INTERSECT, or EXCEPT (jeremyevans) * Fix unlikely threading bug with class level validations (jeremyevans) * Make the schema_dumper extension dump tables in alphabetical order in migrations (jeremyevans) * Add Sequel.extension method for loading extensions, so you don't have to use require (jeremyevans) * Allow bin/sequel to respect multiple -L options instead of ignoring all but the last one (jeremyevans) * Add :command_timeout and :provider options to ADO adapter (hgimenez) * Fix exception messages when Sequel.string_to_* fail (jeremyevans) * Fix String :type=>:text generic type in the Firebird adapter (wishdev) * Add Sequel.amalgalite adapter method (jeremyevans) === 3.0.0 (2009-05-04) * Remove dead threads from connection pool if the pool is full and a connection is requested (jeremyevans) * Add autoincrementing primary key support in the Oracle adapter, using a sequence and trigger (jeremyevans, Mike Golod) * Make Model#save use the same server it uses for saving as for retrieving the saved record (jeremyevans) * Add Database#database_type method, for identifying which type of database the object is connecting to (jeremyevans) * Add ability to reset primary key sequences in the PostgreSQL adapter (jeremyevans) * Fix parsing of non-simple sequence names (that contain uppercase, spaces, etc.) in the PostgreSQL adapter (jeremyevans) * Support dumping indexes in the schema_dumper extension (jeremyevans) * Add index parsing to PostgreSQL, MySQL, SQLite, and JDBC adapters (jeremyevans) * Correctly quote SQL Array references, and handle qualified identifiers with them (e.g. :table__column.sql_subscript(1)) (jeremyevans) * Allow dropping an index with a name different than the default name (jeremyevans) * Allow Dataset#from to remove existing FROM tables when called without an argument, instead of raising an error later (jeremyevans) * Fix string quoting on Oracle so it doesn't double backslashes (jeremyevans) * Alias the count function call in Dataset#count, fixes use on MSSQL (akitaonrails, jeremyevans) * Allow QualifiedIdentifiers to be qualified, to allow :column.qualify(:table).qualify(:schema) (jeremyevans) * Allow :db_type=>'mssql' option to be respected when using the DBI adapter (akitaonrails) * Add schema_dumper extension, for dumping schema of tables (jeremyevans) * Allow generic database types specified as ruby types to take options (jeremyevans) * Change Dataset#exclude to invert given hash argument, not negate it (jeremyevans) * Make Dataset#filter and related methods treat multiple arguments more intuitively (jeremyevans) * Fix full text searching with multiple search terms on MySQL (jeremyevans) * Fix altering a column name, type, default, or NULL/NOT NULL status on MySQL (jeremyevans) * Fix index type syntax on MySQL (jeremyevans) * Add temporary table support, via :temp option to Database#create_table (EppO, jeremyevans) * Add Amalgalite adapter (jeremyevans) * Remove Sequel::Metaprogramming#metaattr_accessor and metaattr_reader (jeremyevans) * Remove Dataset#irregular_function_sql (jeremyevans) * Add Dataset#full_text_sql to the MySQL adapter (dusty) * Fix schema type parsing of decimal types on MySQL (jeremyevans) * Make Dataset#quote_identifier work with SQL::Identifiers (jeremyevans) * Remove methods and features deprecated in 2.12.0 (jeremyevans) === 2.12.0 (2009-04-03) * Deprecate Java::JavaSQL::Timestamp#usec (jeremyevans) * Fix Model.[] optimization introduced in 2.11.0 for databases that don't use LIMIT (jacaetevha) * Don't use the model association plugin if SEQUEL_NO_ASSOCIATIONS constant or environment variable is defined (jeremyevans) * Don't require core_sql if SEQUEL_NO_CORE_EXTENSIONS constant or environment variable is defined (jeremyevans) * Add validation_helpers model plugin, which adds instance level validation support similar to previously standard validations, with a different API (jeremyevans) * Split multi_insert into 2 methods with separate APIs, multi_insert for hashes, import for arrays of columns and values (jeremyevans) * Deprecate Dataset#transform and Model.serialize, and model serialization plugin (jeremyevans) * Add multi_insert_update to the MySQL adapter, used for setting specific update behavior when an error occurs when using multi_insert (dusty) * Add multi_insert_ignore to the MySQL adapter, used for skipping errors on row inserts when using multi_insert (dusty) * Add Sequel::MySQL.convert_invalid_date_time accessor for dealing with dates like "0000-00-00" and times like "25:00:00" (jeremyevans, epugh) * Eliminate internal dependence on core_sql extensions (jeremyevans) * Deprecate Migration and Migrator, require 'sequel/extensions/migration' if you want them (jeremyevans) * Denamespace Sequel::Error decendants (e.g. use Sequel::Rollback instead of Sequel::Error::Rollback) (jeremyevans) * Deprecate Error::InvalidTransform, Error::NoExistingFilter, and Error::InvalidStatement (jeremyevans) * Deprecate Dataset#[] when called without an argument, and Dataset#map when called with an argument and a block (jeremyevans) * Fix aliasing columns in the JDBC adapter (per.melin) (#263) * Make Database#rename_table remove the cached schema entry for the table (jeremyevans) * Make Database schema sql methods private (jeremyevans) * Deprecate Database #multi_threaded? and #logger (jeremyevans) * Make Dataset#where always affect the WHERE clause (jeremyevans) * Deprecate Object#blank? and related extensions, require 'sequel/extensions/blank' to get them back (jeremyevans) * Move lib/sequel_core into lib/sequel and lib/sequel_model into lib/sequel/model (jeremyevans) * Remove Sequel::Schema::SQL module, move methods into Sequel::Database (jeremyevans) * Support creating and dropping schema qualified views (jeremyevans) * Fix saving a newly inserted record in an after_create or after_save hook (jeremyevans) * Deprecate Dataset#print and PrettyTable, require 'sequel/extensions/pretty_table' if you want them (jeremyevans) * Deprecate Database#query and Dataset#query, require 'sequel/extensions/query' if you want them (jeremyevans) * Deprecate Dataset#paginate and #each_page, require 'sequel/extensions/pagination' if you want them (jeremyevans) * Fix ~{:bool_col=>true} and related inversions of boolean values (jeremyevans) * Add disable_insert_returning method to PostgreSQL datasets, so they fallback to just using INSERT (jeremyevans) * Don't use savepoints by default on PostgreSQL, use the :savepoint option to Database#transaction to use a savepoint (jeremyevans) * Deprecate Database#transaction accepting a server symbol argument, use an options hash with the :server option (jeremyevans) * Add Model.use_transactions for setting whether models should use transactions when destroying/saving records (jeremyevans, mjwillson) * Deprecate Model::Validation::Errors, use Model::Errors (jeremyevans) * Deprecate string inflection methods, require 'sequel/extensions/inflector' if you use them (jeremyevans) * Deprecate Model validation class methods, override Model#validate instead or Model.plugin validation_class_methods (jeremyevans) * Deprecate Model schema methods, use Model.plugin :schema (jeremyevans) * Deprecate Model hook class methods, use instance methods instead or Model.plugin :hook_class_methods (jeremyevans) * Deprecate Model.set_sti_key, use Model.plugin :single_table_inheritance (jeremyevans) * Deprecate Model.set_cache, use Model.plugin :caching (jeremyevans) * Move most model instance methods into Model::InstanceMethods, for easier overriding of instance methods for all models (jeremyevans) * Move most model class methods into Model::ClassMethods, for easier overriding of class methods for all models (jeremyevans) * Deprecate String#to_date, #to_datetime, #to_time, and #to_sequel_time, use require 'sequel/extensions/string_date_time' if you want them (jeremyevans) * Deprecate Array#extract_options! and Object#is_one_of? (jeremyevans) * Deprecate Object#meta_def, #meta_eval, and #metaclass (jeremyevans) * Deprecate Module#class_def, #class_attr_overridable, #class_attr_reader, #metaalias, #metaattr_reader, and #metaatt_accessor (jeremyevans) * Speed up the calling of most column accessor methods, and reduce memory overhead of creating them (jeremyevans) * Deprecate Model#set_restricted using Model#[] if no setter method exists, a symbol is used, and the columns are not set (jeremyevans) * Deprecate Model#set_with_params and #update_with_params (jeremyevans) * Deprecate Model#save!, use Model.save(:validate=>false) (jeremyevans) * Deprecate Model#dataset (jeremyevans) * Deprecate Model.is and Model.is_a, use Model.plugin for plugins (jeremyevans) * Deprecate Model.str_columns, Model#str_columns, #set_values, #update_values (jeremyevans) * Deprecate Model.delete_all, .destroy_all, .size, and .uniq (jeremyevans) * Copy all current dataset options when calling Model.db= (jeremyevans) * Deprecate Model.belongs_to, Model.has_many, and Model.has_and_belongs_to_many (jeremyevans) * Remove SQL::SpecificExpression, have subclasses inherit from SQL::Expression instead (jeremyevans) * Deprecate SQL::CastMethods#cast_as (jeremyevans) * Deprecate calling Database#schema without a table argument (jeremyevans) * Remove cached version of @db_schema in model instances to reduce memory and marshalling overhead (tmm1) * Deprecate Dataset#quote_column_ref and Dataset#symbol_to_column_ref (jeremyevans) * Deprecate Dataset#size and Dataset#uniq (jeremyevans) * Deprecate passing options to Dataset#each, #all, #single_record, #single_value, #sql, #select_sql, #update, #update_sql, #delete, #delete_sql, and #exists (jeremyevans) * Deprecate Dataset#[Integer] (jeremyevans) * Deprecate Dataset#create_view and Dataset#create_or_replace_view (jeremyevans) * Model datasets now have a model accessor that returns the related model (jeremyevans) * Model datasets no longer have :models and :polymorphic_key options (jeremyevans) * Deprecate Dataset.dataset_classes, Dataset#model_classes, Dataset#polymorphic_key, and Dataset#set_model (jeremyevans) * Allow Database#get and Database#select to take a block (jeremyevans) * Deprecate Database#>> (jeremyevans) * Deprecate String#to_blob and Sequel::SQL::Blob#to_blob (jeremyevans) * Deprecate use of Symbol#| for SQL array subscripts, add Symbol#sql_subscript (jeremyevans) * Deprecate Symbol#to_column_ref (jeremyevans) * Deprecate String#expr (jeremyevans) * Deprecate Array#to_sql, String#to_sql, and String#split_sql (jeremyevans) * Deprecate passing an array to Database#<< (jeremyevans) * Deprecate Range#interval (jeremyevans) * Deprecate Enumerable#send_each (jeremyevans) * Deprecate Hash#key on ruby 1.8, change some SQLite adapter constants (jeremyevans) * Deprecate Sequel.open, Sequel.use_parse_tree=?, and the upcase_identifier methods (jeremyevans) * Deprecate virtual row blocks without block arguments, unless Sequel.virtual_row_instance_eval is enabled (jeremyevans) * Support schema parsing in the Oracle adapter (jacaetevha) * Allow virtual row blocks to be instance_evaled, add Sequel.virtual_row_instance_eval= (jeremyevans) === 2.11.0 (2009-03-02) * Optimize Model.[] by using static sql when possible, for a 30-40% speed increase (jeremyevans) * Add Dataset#with_sql, which returns a clone of the dataset with static SQL (jeremyevans) * Refactor Dataset#literal so it doesn't need to be overridden in subadapters, for a 20-25% performance increase (jeremyevans) * Remove SQL::IrregularFunction, no longer used internally (jeremyevans) * Allow String#lit to take arguments and return a SQL::PlaceholderLiteralString (jeremyevans) * Add Model#set_associated_object, used by the many_to_one setter method, for easier overriding (jeremyevans) * Allow use of database independent types when casting (jeremyevans) * Give association datasets knowledge of the model object that created them and the related association reflection (jeremyevans) * Make Dataset#select, #select_more, #order, #order_more, and #get take a block that yields a SQL::VirtualRow, similar to #filter (jeremyevans) * Fix stored procedures in MySQL adapter when multiple arguments are used (clivecrous) * Add :conditions association option, for easier filtering of associated objects (jeremyevans) * Add :clone association option, for making clones of existing associations (jeremyevans) * Handle typecasting invalid date strings (and possible other types) correctly (jeremyevans) * Add :compress=>false option to MySQL adapter to turn off compression of client-server connection (tmm1) * Set SQL_AUTO_IS_NULL=0 on MySQL connections, disable with :auto_is_null=>false (tmm1) * Add :timeout option to MySQL adapter, default to 30 days (tmm1) * Set MySQL encoding using Mysql#options so it works across reconnects (tmm1) * Fully support blobs on SQLite (jeremyevans) * Add String#to_sequel_blob, alias String#to_blob to that (jeremyevans) * Fix default index names when a non-String or Symbol column is used (jeremyevans) * Fix some ruby -w warnings (jeremyevans) (#259) * Fix issues with default column values, table names, and quoting in the rename_column and drop_column support in shared SQLite adapter (jeremyevans) * Add rename_column support to SQLite shared adapter (jmhodges) * Add validates_inclusion_of validation (jdunphy) === 2.10.0 (2009-02-03) * Don't use a default schema any longer in the shared PostgreSQL adapter (jeremyevans) * Make Dataset#quote_identifier return LiteralStrings as-is (jeremyevans) * Support symbol keys and unnested hashes in the sequel command line tool's yaml config support (jeremyevans) * Add schema parsing support to the JDBC adapter (jeremyevans) * Add per-database type translation support for schema changes, translating ruby classes to database specific types (jeremyevans) * Add Sequel::DatabaseConnectionError, for indicating that Sequel wasn't able to connect to the database (jeremyevans) * Add validates_not_string validation, useful in conjunction with raise_on_typecast_failure = false (jeremyevans) * Don't modify Model#new? and Model#changed_columns when saving a record until after the after hooks have been run (tamas, jeremyevans) * Database#quote_identifiers= now affects future schema modification statements, even if it is not used before one of the schema modification statements (jeremyevans) * Fix literalization of blobs when using the PostreSQL JDBC subadapter (jeremyevans) * Fix literalization of date and time types when using the MySQL JDBC subadapter (jeremyevans) * Convert some Java specific types to ruby types on output in the JDBC adapter (jeremyevans) * Add Database#tables method to JDBC adapter (jeremyevans) * Add H2 JDBC subadapter (logan_barnett, david_koontz, james_britt, jeremyevans) * Add identifer_output_method, used for converting identifiers coming out of the database, replacing the lowercase support on some databases (jeremyevans) * Add identifier_input_method, used for converting identifiers going into the database, replacing upcase_identifiers (jeremyevans) * Add :allow_missing validation option, useful if the database provides a good default (jeremyevans) * Fix literalization of SQL::Blobs in DataObjects and JDBC adapter's postgresql subadapters when ruby 1.9 is used (jeremyevans) * When using standard strings in the postgres adapter with the postgres-pr driver, use custom string escaping to prevent errors (jeremyevans) * Before hooks now run in reverse order of being added, so later ones are run first (tamas) * Add Firebird adapter, requires Firebird ruby driver located at http://github.com/wishdev/fb (wishdev) * Don't clobber the following Symbol instance methods when using ruby 1.9: [], <, <=, >, >= (jeremyevans) * Quote the table name and the index for PostgreSQL index creation (jeremyevans) * Add DataObjects adapter, supporting PostgreSQL, MySQL, and SQLite (jeremyevans) * Add ability for Database#create_table to take options, support specifying MySQL engine, charset, and collate per table (pusewicz, jeremyevans) * Add Model.add_hook_type class method, for adding your own hook types, mostly for use by plugin authors (pkondzior, jeremyevans) * Add Sequel.version for getting the internal version of Sequel (pusewicz, jeremyevans) === 2.9.0 (2009-01-12) * Add -L option to sequel command line tool to load all .rb files in the given directory (pkondzior, jeremyevans) * Fix Dataset#destroy for model datasets that can't handle nested queries (jeremyevans) * Improve the error messages in parts of Sequel::Model (jeremyevans, pusewicz) * Much better support for Dataset#{union,except,intersect}, allowing chaining and respecting order (jeremyevans) * Default to logging only WARNING level messages when connecting to PostgreSQL (jeremyevans) * Fix add_foreign_key for MySQL (jeremyevans, aphyr) * Correctly literalize BigDecimal NaN and (+-)Infinity values (jeremyevans) (#256) * Make Sequel raise an Error if you attempt to subclass Sequel::Model before setting up a database connection (jeremyevans) * Add Sequel::BeforeHookFailed exception to be raised when a record fails because a before hook fails (bougyman) * Add Sequel::ValidationFailed exception to be raised when a record fails because a validation fails (bougyman) * Make Database#schema raise an error if given a table that doesn't exist (jeremyevans) (#255) * Make Model#inspect call Model#inspect_values private method for easier overloading (bougyman) * Add methods to create and drop functions, triggers, and procedural languages on PostgreSQL (jeremyevans) * Fix Dataset#count when using UNION, EXCEPT, or INTERSECT (jeremyevans) * Make SQLite keep table's primary key information when dropping columns (jmhodges) * Support dropping indicies on SQLite (jmhodges) === 2.8.0 (2008-12-05) * Support drop column operations inside a transaction on sqlite (jeremyevans) * Support literal strings with placeholders and subselects in prepared statements (jeremyevans) * Have the connection pool remove disconnected connections when the adapter supports it (jeremyevans) * Make Dataset#exists return a LiteralString (jeremyevans) * Support multiple SQL statements in one query in the MySQL adapter (jeremyevans) * Add stored procedure support for the MySQL and JDBC adapters (jeremyevans, krsgoss) (#252) * Support options when altering a column's type (for changing enums, varchar size, etc.) (jeremyevans) * Support AliasedExpressions in tables when using implicitly qualified arguments in joins (jeremyevans) * Support Dataset#except on Oracle (jeremyevans) * Raise errors when EXCEPT/INTERSECT is used when not supported (jeremyevans) * Fix ordering of UNION, INTERSECT, and EXCEPT statements (jeremyevans) (#253) * Support aliasing subselects in the Oracle adapter (jeremyevans) * Add a subadapter for the Progress RDBMS to the ODBC adapter (:db_type=>'progress') (groveriffic) (#251) * Make MySQL and Oracle adapters raise an Error if asked to do a SELECT DISTINCT ON (jeremyevans) * Set standard_conforming_strings = ON by default when using PostgreSQL, turn off with Sequel::Postgres.force_standard_strings = false (jeremyevans) (#247) * Fix Database#rename_table when using PostgreSQL (jeremyevans) (#248) * Whether to upcase or quote identifiers can now be set separately, via Sequel.upcase_identifiers= or the :upcase_identifiers database option (jeremyevans) * Support transactions in the ODBC adapter (dlee) * Support multi_insert_sql and unicode string literals in MSSQL shared adapter (dlee) * Make PostgreSQL use the default schema if parsing the schema for all tables at once, even if :schema=>nil option is used (jeremyevans) * Make MySQL adapter not raise an error when giving an SQL::Identifier object to the schema modification methods such as create_table (jeremyevans) * The keys of the hash returned by Database#schema without a table name are now quoted strings instead of symbols (jeremyevans) * Make Database#schema to handle implicit schemas on all databases and multiple identifier object types (jeremyevans) * Remove Sequel.odbc_mssql method (jeremyevans) (#249) * More optimization of Model#initialize (jeremyevans) * Treat interval as it's own type, not an integer type (jeremyevans) * Allow use of implicitly qualified symbol as argument to Symbol#qualify (:a.qualify(:b__c)=>b.c.a), fixes model associations in different schemas (jeremyevans) (#246) === 2.7.1 (2008-11-04) * Fix PostgreSQL Date optimization so that it doesn't reject dates like 11/03/2008 (jeremyevans) === 2.7.0 (2008-11-03) * Transform AssociationReflection from a single class to a class hierarchy (jeremyevans) * Optimize Date object creation in PostgreSQL adapter (jeremyevans) * Allow easier creation of custom association types, though support for them may still be suboptimal (jeremyevans) * Add :eager_grapher option to associations, which the user can use to override the default eager_graph code (jeremyevans) * Associations are now inherited when a model class is subclassed (jeremyevans) * Instance methods added by associations are now added to an anonymous module the class includes, allowing you to override them and use super (jeremyevans) * Add #add_graph_aliases (select_more for graphs), and allow use of arbitrary expressions when graphing (jeremyevans) * Fix a corner case where the wrong table name is used in eager_graph (jeremyevans) * Make Dataset#join_table take an option hash instead of a table_alias argument, add support for :implicit_qualifier option (jeremyevans) * Add :left_primary_key and :right_primary_key options to many_to_many associations (jeremyevans) * Add :primary_key option to one_to_many and many_to_one associations (jeremyevans) * Make after_load association callbacks take effect when eager loading via eager (jeremyevans) * Add a :uniq association option to many_to_many associations (jeremyevans) * Support using any expression as the argument to Symbol#like (jeremyevans) * Much better support for multiple schemas in PostgreSQL (jeremyevans) (#243) * The first argument to Model#initalize can no longer be nil, it must be a hash if it is given (jeremyevans) * Remove Sequel::Model.lazy_load_schema= setting (jeremyevans) * Lazily load model instance options such as raise_on_save_failure, for better performance (jeremyevans) * Make Model::Validiation::Errors more Rails-compatible (jeremyevans) * Refactor model hooks for performance (jeremyevans) * Major performance enhancement when fetching rows using PostgreSQL (jeremyevans) * Don't typecast serialized columns in models (jeremyevans) * Add Array#sql_array to handle ruby arrays of all two pairs as SQL arrays (jeremyevans) (#245) * Add ComplexExpression#== and #eql?, for checking equality (rubymage) (#244) * Allow full text search on PostgreSQL to include rows where a search column is NULL (jeremyevans) * PostgreSQL full text search queries with multiple columns are joined with space to prevent joining border words to one (michalbugno) * Don't modify a dataset's cached column information if calling #each with an option that modifies the columns (jeremyevans) * The PostgreSQL adapter will now generally default to using a unix socket in /tmp if no host is specified, instead of a tcp socket to localhost (jeremyevans) * Make Dataset#sql call Dataset#select_sql instead of being an alias, to allow for easier subclassing (jeremyevans) * Split Oracle adapter into shared and unshared parts, so Oracle is better supported when using JDBC (jeremyevans) * Fix automatic loading of Oracle driver when using JDBC adapter (bburton333) (#242) === 2.6.0 (2008-10-11) * Make the sqlite adapter respect the Sequel.datetime_class setting, for timestamp and datetime types (jeremyevans) * Enhance the CASE statement support to include an optional expression (jarredholman) * Default to using the simple language if no language is specified for a full text index on PostgreSQL (michalbugno) * Add Model.raise_on_typecast_failure=, which makes it possible to not raise errors on invalid typecasts (michalbugno) * Add schema.rdoc file, which provides an brief description of the various parts of Sequel related to schema modification (jeremyevans) * Fix constraint generation when not using a proc or interpolated string (jeremyevans) * Make eager_graph respect associations' :order options (use :order_eager_graph=>false to disable) (jeremyevans) * Cache negative lookup when eagerly loading many_to_one associations where no objects have an associated object (jeremyevans) * Allow string keys to be used when using Dataset#multi_insert (jeremyevans) * Fix join_table when doing the first join for a dataset where the first source is a dataset when using unqualified columns (jeremyevans) * Fix a few corner cases in eager_graph (jeremyevans) * Support transactions on MSSQL (jeremyevans) * Use string literals in AS clauses on SQLite (jeremyevans) (#241) * AlterTableGenerator#set_column_allow_null was added to SET/DROP NOT NULL for columns (divoxx) * Database#tables now works for MySQL databases using the JDBC adapter (jeremyevans) * Database#drop_view can now take multiple arguments to drop multiple views at once (jeremyevans) * Schema modification methods (e.g. drop_table, create_table!) now remove the cached schema entry (jeremyevans) * Models can now determine their primary keys by looking at the schema (jeremyevans) * No longer include :numeric_precision and :max_chars entries in the schema column hashes, use the :db_type entry instead (jeremyevans) * Make schema parsing on PostgreSQL handle implicit schemas (e.g. schema(:schema__table)), so it works with models for tables outside the public schema (jeremyevans) * Significantly speed up schema parsing on MySQL (jeremyevans) * Include primary key information when parsing the schema (jeremyevans) * Fix schema generation of composite foreign keys on MySQL (clivecrous, jeremyevans) === 2.5.0 (2008-09-03) * Add Dataset #set_defaults and #set_overrides, used for scoping the values used in insert/update statements (jeremyevans) * Allow Models to use the RETURNING clause when inserting records on PostgreSQL (jeremyevans) * Raise Sequel::DatabaseError instead of generic Sequel::Error for database errors, don't swallow tracebacks (jeremyevans) * Use INSERT ... RETURNING ... with PostgreSQL 8.2 and higher (jeremyevans) * Make insert_sql, delete_sql, and update_sql respect the :sql option (jeremyevans) * Default to converting 2 digit years, use Sequel.convert_two_digit_years = false to get back the old behavior (jeremyevans) * Make the PostgreSQL adapter with the pg driver use async_exec, so it doesn't block the entire interpreter (jeremyevans) * Make the schema generators support composite primary and foreign keys and unique constraints (jarredholman) * Work with the 2008.08.17 version of the pg gem (erikh) * Disallow abuse of SQL function syntax for types (use :type=>:varchar, :size=>255 instead of :type=>:varchar[255]) (jeremyevans) * Quote index names when creating or dropping indexes (jeremyevans, SanityInAnarchy) * Don't have column accessor methods override plugin instance methods (jeremyevans) * Allow validation of multiple attributes at once, with built in support for uniqueness checking of multiple columns (jeremyevans) * In PostgreSQL adapter, fix inserting a row with a primary key value inside a transaction (jeremyevans) * Allow before_save and before_update to affect the columns saved by save_changes (jeremyevans) * Make Dataset#single_value work when graphing, which fixes count and paginate on graphed datasets (jeremyevans) === 2.4.0 (2008-08-06) * Handle Java::JavaSql::Date type in the JDBC adapter (jeremyevans) * Add support for read-only slave/writable master databases and database sharding (jeremyevans) * Remove InvalidExpression, InvalidFilter, InvalidJoinType, and WorkerStop exceptions (jeremyevans) * Add prepared statement/bound variable support (jeremyevans) * Fix anonymous column names in the ADO adapter (nusco) * Remove odbc_mssql adapter, use :db_type=>'mssql' option instead (jeremyevans) * Split MSSQL specific syntax into separate file, usable by ADO and ODBC adapters (nusco, jeremyevans) === 2.3.0 (2008-07-25) * Enable almost full support for MySQL using JDBC (jeremyevans) * Fix ODBC adapter's conversion of ::ODBC::Time values (Michael Xavier) * Enable full support for SQLite-JDBC using the JDBC adapter (jeremyevans) * Minor changes to allow for full Ruby 1.9 compatibility (jeremyevans) * Make Database#disconnect work for the ADO adapter (spicyj) * Don't raise an exception in the ADO adapter if the dataset contains no records (nusco) * Enable almost full support of PostgreSQL-JDBC using the JDBC adapter (jeremyevans) * Remove Sequel::Worker (jeremyevans) * Make PostgreSQL adapter not raise an error when inserting records into a table without a primary key (jeremyevans) * Make Database.uri_to_options a private class method (jeremyevans) * Make JDBC load drivers automatically for PostgreSQL, MySQL, SQLite, Oracle, and MSSQL (jeremyevans) * Make Oracle adapter work with a nonstandard Oracle database port (pavel.lukin) * Typecast '' to nil by default for non-string non-blob columns, add typecast_empty_string_to_nil= model class and instance methods (jeremyevans) * Use a simpler select in Dataset#empty?, fixes use with MySQL (jeremyevans) * Add integration test suite, testing sequel against a real database, with nothing mocked (jeremyevans) * Make validates_length_of default tag depend on presence of options passed to it (jeremyevans) * Combine the directory structure for sequel_model and sequel_core, now there is going to be only one gem named sequel (jeremyevans) === 2.2.0 (2008-07-05) * Add :extend association option, extending the dataset with module(s) (jeremyevans) * Add :after_load association callback option, called after associated objects have been loaded from the database (jeremyevans) * Make validation methods support a :tag option, to work correctly with source reloading (jeremyevans) * Add :before_add, :after_add, :before_remove, :after_remove association callback options (jeremyevans) * Break many_to_one association setter method in two parts, for easier overriding (jeremyevans) * Model.validates_presence_of now considers false as present instead of absent (jeremyevans) * Add Model.raise_on_save_failure, raising errors on save failure instead of return false (now nil), default to true (jeremyevans) * Add :eager_loader association option, to specify code to be run when eager loading (jeremyevans) * Make :many_to_one associations support :dataset, :order, :limit association options, as well as block arguments (jeremyevans) * Add :dataset association option, which overrides the default base dataset to use (jeremyevans) * Add :eager_graph association option, works just like :eager except it uses #eager_graph (jeremyevans) * Add :graph_join_table_join_type association option (jeremyevans) * Add :graph_only_conditions and :graph_join_table_only_conditions association options (jeremyevans) * Add :graph_block and :graph_join_table_block association options (jeremyevans) * Set the model's dataset's columns in addition to the model's columns when loading the schema for a model (jeremyevans) * Make caching work correctly with subclasses (jeremyevans) * Add the Model.to_hash dataset method (jeremyevans) * Filter blocks now yield a SQL::VirtualRow argument, which is useful if another library defines operator methods on Symbol (jeremyevans) * Add Symbol#identifier method, to make x__a be treated as "x__a" instead of "x"."a" (jeremyevans) * Dataset#update no longer takes a block, please use a hash argument with the expression syntax instead (jeremyevans) * ParseTree support has been removed from Sequel (jeremyevans) * Database#drop_column is now supported in the SQLite adapter (abhay) * Tinyint columns can now be considered integers instead of booleans by setting Sequel.convert_tinyint_to_bool = false (samsouder) * Allow the use of URL parameters in connection strings (jeremyevans) * Ignore any previously selected columns when using Dataset#graph for the first time (jeremyevans) * Dataset#graph now accepts a block which is passed to join_table (jeremyevans) * Make Dataset#columns ignore any filtering, ordering, and distinct clauses (jeremyevans) * Use the safer connection-specific string escaping methods for PostgreSQL (jeremyevans) * Database#transaction now yields a connection when using the Postgres adapter, just like it does for other adapters (jeremyevans) * Dataset#count now works for a limited dataset (divoxx) * Database#add_index is now supported in the SQLite adapter (abhay) * Sequel's MySQL adapter should no longer conflict with ActiveRecord's use of MySQL (careo) * Treat Hash as expression instead of column alias when used in DISTINCT, ORDER BY, and GROUP BY clauses (jeremyevans) * PostgreSQL bytea fields are now fully supported (dlee) * For PostgreSQL, don't raise an error when assigning a value to a SERIAL PRIMARY KEY field when inserting records (jeremyevans) === 2.1.0 (2008-06-17) * Break association add_/remove_/remove_all_ methods into two parts, for easier overriding (jeremyevans) * Add Model.strict_param_setting, on by default, which raises errors if a missing/restricted method is called via new/set/update/etc. (jeremyevans) * Raise errors when using association methods on objects without valid primary keys (jeremyevans) * The model's primary key is a restricted column by default, Add model.unrestrict_primary_key to get the old behavior (jeremyevans) * Add Model.set_(allowed|restricted)_columns, which affect which columns create/new/set/update/etc. modify (jeremyevans) * Calls to Model.def_dataset_method with a block are cached and reapplied to the new dataset if set_dataset is called, even in a subclass (jeremyevans) * The :reciprocal option to associations should now be the symbol name of the reciprocal association, not an instance variable symbol (jeremyevans) * Add Model#associations, which is a hash holding a cache of associated objects, with each association being a separate key (jeremyevans) * Make all associations support a :graph_select option, specifying a column or array of columns to select when using eager_graph (jeremyevans) * Bring back Model#set and Model#update, now the same as Model#set_with_params and Model#update_with_params (jeremyevans) * Allow model datasets to call to_hash without any arguments, which allows easy creation of identity maps (jeremyevans) * Add Model.set_sti_key, for easily setting up single table inheritance (jeremyevans) * Make all associations support a :read_only option, which doesn't add methods that modify the database (jeremyevans) * Make *_to_many associations support a :limit option, for specifying a limit to the resulting records (and possibly an offset) (jeremyevans) * Make association block argument and :eager option affect the _dataset method (jeremyevans) * Add a :one_to_one option to one_to_many associations, which creates a getter and setter similar to many_to_one (a.k.a. has_one) (jeremyevans) * add_ and remove_ one_to_many association methods now raise an error if the passed object cannot be saved, instead of saving without validation (jeremyevans) * Add support for :if option on validations, using a symbol (specifying an instance method) or a proc (dtsato) * Support bitwise operators for NumericExpressions: &, |, ^, ~, <<, >> (jeremyevans) * No longer raise an error for Dataset#filter(true) or Dataset#filter(false) (jeremyevans) * Allow Dataset #filter, #or, #exclude and other methods that call them to use both the block and regular arguments (jeremyevans) * ParseTree support is now officially deprecated, use Sequel.use_parse_tree = false to use the expression (blockless) filters inside blocks (jeremyevans) * Remove :pool_reuse_connections ConnectionPool/Database option, MySQL users need to be careful with nested queries (jeremyevans) * Allow Dataset#graph :select option to take an array of columns to select (jeremyevans) * Allow Dataset#to_hash to be called with only one argument, allowing for easy creation of lookup tables for a single key (jeremyevans) * Allow join_table to accept a block providing the aliases and previous joins, that allows you to specify arbitrary conditions properly qualified (jeremyevans) * Support NATURAL, CROSS, and USING joins in join_table (jeremyevans) * Make sure HAVING comes before ORDER BY, per the SQL standard and at least MySQL, PostgreSQL, and SQLite (juco) * Add cast_numeric and cast_string methods for use in the Sequel DSL, that have default types and wrap the object in the correct class (jeremyevans) * Add Symbol#qualify, for adding a table/schema qualifier to a column/table name (jeremyevans) * Remove Module#metaprivate, since it duplicates the standard Module#private_class_method (jeremyevans) * Support the SQL CASE expression via Array#case and Hash#case (jeremyevans) * Support the SQL EXTRACT function: :date.extract(:year) (jeremyevans) * Convert numeric fields to BigDecimal in PostgreSQL adapter (jeremyevans) * Add :decimal fields to the schema parser (jeremyevans) * The expr argument in join table now allows the same argument as filter, so it can take a string or a blockless filter expression (brushbox, jeremyevans) * No longer assume the expr argument to join_table references the primary key column (jeremyevans) * Rename the Sequel.time_class setting to Sequel.datetime_class (jeremyevans) * Add savepoint/nesting support to postgresql transactions (elven) * Use the specified table alias when joining a dataset, instead of the automatically generated alias (brushbox) === 2.0.1 (2008-06-04) * Make the choice of Time or DateTime optional for typecasting :datetime types, default to Time (jeremyevans) * Reload database schema for table when calling Model.create_table (jeremyevans) * Have PostgreSQL money type use BigDecimal instead of Float (jeremyevans) * Have the PostgreSQL and MySQL adapters use the Sequel.time_class setting for datetime/timestamp types (jeremyevans) * Add Sequel.time_class and String#to_sequel_time, used for converting time values from the database to either Time (default) or DateTime (jeremyevans) * Make identifier quoting uppercase by default, to work better with the SQL standard, override in PostgreSQL (jeremyevans) (#232) * Add StringExpression#+, for simple SQL string concatenation (:x.sql_string + :y) (jeremyevans) * Make StringMethods.like to a case sensensitive search on MySQL (use ilike for the old behavior) (jeremyevans) * Add StringMethods.ilike, for case insensitive pattern matching (jeremyevans) * Refactor ComplexExpression into three subclasses and a few modules, so operators that don't make sense are not defined for the class (jeremyevans) === 2.0.0 (2008-06-01) * Comprehensive update of all documentation (jeremyevans) * Remove methods deprecated in 1.5.0 (jeremyevans) * Add typecasting on attribute assignment to Sequel::Model objects, optional but enabled by default (jeremyevans) * Returning false in one of the before_ hooks now causes the appropriate method(s) to immediately return false (jeremyevans) * Add remove_all_* association method for *_to_many associations, which removes the association with all currently associated objects (jeremyevans) * Add Model.lazy_load_schema=, when set to true, it loads the schema on first instantiation (jeremyevans) * Add before_validation and after_validation hooks, called whenever the model is validated (jeremyevans) * Add Model.default_foreign_key, a private class method that allows changing the default foreign key that Sequel will use in associations (jeremyevans) * Cache negative lookup when eagerly loading many_to_one associations (jeremyevans) * Make all associations support the :select option, not just many_to_many (jeremyevans) * Allow the use of blocks when eager loading, and add the :eager_block and :allow_eager association options for configuration (jeremyevans) * Add the :graph_join_type, :graph_conditions, and :graph_join_table_conditions association options, used when eager graphing (jeremyevans) * Add AssociationReflection class (subclass of Hash), to make calling a couple of private Model methods unnecessary (jeremyevans) * Change hook methods so that if a tag/method is specified it overwrites an existing hook block with the same tag/method (jeremyevans) * Refactor String inflection support, you must use String.inflections instead of Inflector.inflections now (jeremyevans) * Allow connection to ODBC-MSSQL via a URL (petersumskas) (#230) * Comprehensive update of all documentation, except for the block filters and adapters (jeremyevans) * Handle Date and DateTime value literalization correctly in adapters (jeremyevans) * Literalize DateTime values the same as Time values (jeremyevans) * MySQL tinyints are now returned as boolean values instead of integers (jeremyevans) * Set additional MySQL charset options required for creating tables and databases (tmm1) * Remove methods deprecated in 1.5.0 (jeremyevans) * Add Module#metaattr_accessor for creating attr_accessors for the metaclass (jeremyevans) * Add SQL string concatenation support to blockless filters, via Array#sql_string_join (jeremyevans) * Add Pagination#last_page? and Pagination#first_page? (apeiros) * Add limited column reflection support, tested on PostgreSQL, MySQL, and SQLite (jeremyevans) * Allow the use of :schema__table___table_alias syntax for tables, similar to the column support (jeremyevans) * Merge metaid gem into core_ext.rb and clean it up, so sequel now has no external dependencies (jeremyevans) * Add Dataset#as, so using a dataset as a column with an alias is not deprecated (jeremyevans) * Add Dataset#invert, which returns a dataset with inverted HAVING and WHERE clauses (jeremyevans) * Add blockless filter syntax support (jeremyevans) * Passing an array to Dataset#order and Dataset#select no longer works, you need to pass multiple arguments (jeremyevans) * You should use '?' instead of '(?)' when using interpolated strings with array arguments (jeremyevans) * Dataset.literal now surrounds the literalization of arrays with parentheses (jeremyevans) * Add echo option (back?) to sequel command line tool, via -E or --echo (jeremyevans) * Allow databases to have multiple loggers (jeremyevans) * The sequel command line tool now also accepts a path to a database config YAML file in addition to a URI (mtodd) * Major update of the postgresql adapter (jdavis, jeremyevans) (#225) * Make returning inside of a database transaction commit the transaction (ahoward, jeremyevans) * Dataset#to_table_reference is now protected, and it has a different API (jeremyevans) * Dataset#join_table and related functions now take an explicit optional table_alias argument, you can no longer include the table alias in the table argument (jeremyevans) * Aliased and/or qualified columns with embedded spaces can now be specified as symbols (jeremyevans) * When identifier quoting is enabled, the SQL standard double quote is used by default (jeremyevans) * When identifier quoting is enabled, quote tables as well as columns (jeremyevans) * Make identifier quoting optional, enabled by default (jeremyevans) * Allow Sequel::Database.connect and related methods to take a block that disconnects the database when the block finishes (jeremyevans) * Add Dataset#unfiltered, for removing filters from dataset (jeremyevans) * Add add_foreign_key and add_primary_key methods to the AlterTableGenerator (jeremyevans) * Allow migration files to have more than 3 digits (jeremyevans) * Add methods directly to Dataset instead of including modules (jeremyevans) * Make some Dataset instance methods private: invert_order, insert_default_values_sql (jeremyevans) * Don't add methods that depend on ParseTree unless you can load ParseTree (jeremyevans) * Don't wipeout the cached columns every time a dataset is cloned, but only on changes to :select, :sql, :from, or :join (jeremyevans) * Fix Oracle Adapter (yasushi.abe) * Fixed sqlite uri so that sqlite:// works just like file:// (2 slashes for a relative path, 3 for an absolute) (dlee) * Raise a Sequel::Error if an invalid limit or offset is used (jeremyevans) * Refactor and beef up Dataset#first and Dataset#last, with some change in functionality (jeremyevans) * Add String#to_datetime, for consistency (jeremyevans) * Fix Range#interval so that it returns 1 less for an exclusive range * Change SQLite adapter so it doesn't swallow exceptions other than SQLite3::Exception (such as Interrupt) (jeremyevans) * Change PostgreSQL and MySQL adapters to raise Sequel::Error instead of database specific errors if a database error occurs (jeremyevans) * Using a memory database with SQLite now defaults to a single connection, so all queries it uses run against the same database (jeremyevans) * Fix attempting to query MySQL using the same connection being used to concurrently execute another query (jeremyevans) * Add options to the connection pool to configure reusing connections and converting exceptions (jeremyevans) * Use the database driver provided string quoting methods for MySQL and SQLite (jeremyevans) (#223) * Add ColumnAll#==, for checking the equality of two ColumnAlls (jeremyevans) * Allow an array of arrays instead of a hash when specifying conditions (jeremyevans) * Add Sequel::DBI::Database#lowercase, for lowercasing column names (jamesearl) * Remove Dataset#extend_with_destroy, which may break code that uses Dataset#set_model directly and expects the destroy method to be added (jeremyevans) * Fix some issues when running on Ruby 1.9 (Zverok, jeremyevans) * Make the DBI adapter work (partially) with PostgreSQL (Seb) === 1.5.1 (2008-04-30) * Fix Dataset#eager_graph when not all objects have associated objects (jeremyevans) * Have Dataset#graph give a nil value instead of a hash with all nil values if no matching rows exist in the graphed table (jeremyevans) === 1.5.0 (2008-04-29) * Make the validation errors API compatible with Merb (Inviz) * Add validates_uniqueness_of, for protecting against duplicate entries in the database (neaf, jeremyevans) * Alias Model#dataset= to Model#set_dataset (tmm1) * Make some Model class methods private: def_hook_method, hooks, add_hook, plugin_module, plugin_gem (jeremyevans) * Add the eager! and eager_graph! mutation methods to model datasets (jeremyevans) * Remove Model.database_opened (jeremyevans) * Remove Model.super_dataset (jeremyevans) * Deprecate .create_with_params, .create_with, #set, #update, #update_with, and #new_record from Sequel::Model (jeremyevans) * Add Model.def_dataset_method, for defining methods on the model that reference methods on the dataset (jeremyevans) * Deprecate Model.method_missing, add dataset methods to Model via metaprogramming (jeremyevans) * Remove Model.join, so it is the same as Dataset#join (jeremyevans) * Use reciprocal associations for all types of associations in the getter/setter/add_/remove_ methods (jeremyevans) * Fix many_to_one associations to cache negative lookups (jeremyevans) * Change Model#=== to always be false if the primary key is nil (jeremyevans) * Add Model#hash, which should be unique for a given class and primary key (or values if primary key is nil) (jeremyevans) * Add Model#eql? as a alias to Model#== (jeremyevans) * Make Model#reload clear any cached associations (jeremyevans) * No longer depend on the assistance gem, merge the Inflector and Validations code (jeremyevans) * Add Model#set_with_params, which is Model#update_with_params without the save (jeremyevans) * Fix Model#destroy so that it returns self, not the result of after_destroy (jeremyevans) * Define Model column accessors in set_dataset, so they should always be avaiable, deprecate Model#method_missing (jeremyevans) * Add eager loading of associations via new sequel_core object graphing feature (jeremyevans) * Fix many_to_many associations with classes inside modules without an explicit join table (jeremyevans) * Allow creation of new records that don't have primary keys when the cache is on (jeremyevans) (#213) * Make Model#initialize, Model#set, and Model#update_with_params invulnerable to memory exhaustion (jeremyevans) (#210) * Add Model.str_columns, which gives a list of columns as frozen strings (jeremyevans) * Remove pretty_table.rb from sequel, since it is in sequel_core (jeremyevans) * Set a timeout in the Sqlite adapter, default to 5 seconds (hrvoje.marjanovic) (#218) * Document that calling Sequel::ODBC::Database#execute manually requires you to manually drop the returned object (jeremyevans) (#217) * Paginating an already paginated/limited dataset now raises an error (jeremyevans) * Add support for PostgreSQL partial indexes (dlee) * Added support for arbitrary index types (including spatial indexes) (dlee) * Quote column names in SQL generated for SQLite (tmm1) * Deprecate Object#rollback! (jeremyevans) * Make some Dataset methods private (qualified_column_name, column_list, table_ref, source_list) (jeremyevans) * Deprecate Dataset methods #set_options, #set_row_proc, #remove_row_proc, and #clone_merge (jeremyevans) * Add Symbol#*, a replacement for Symbol#all (jeremyevans) * Deprecate including ColumnMethods in Object, include it in Symbol, String, and Sequel::SQL::Expression (jeremyevans) * Deprecate Symbol#method_missing, and #AS, #DESC, #ASC, #ALL, and #all from ColumnMethods (jeremyevans) * Fix table joining in MySQL (jeremyevans) * Deprecate Sequel.method_missing and Object#Sequel, add real Sequel.adapter methods (jeremyevans) * Move dataset methods applicable only to paginated datasets into Sequel::Dataset::Pagination (jeremyevans) * Make Sequel::Dataset::Sequelizer methods private (jeremyevans) * Deprecate Dataset#method_missing, add real mutation methods (e.g. filter!) (jeremyevans) * Fix connecting to an MSSQL server via ODBC using domain user credentials (jeremyevans) (#216) * No longer depend on the assistance gem, merge in the ConnectionPool and .blank methods (jeremyevans) * No longer depend on ParseTree, RubyInline, or ruby2ruby, but you still need them if you want to use the block filters (jeremyevans) * Fix JDBC adapter by issuing index things start at 1 (pdamer) * Fix connecting to a database via the ADO adapter (now requires options instead of URI) (timuckun, jeremyevans) (#204) * Support storing microseconds in postgres timestamp fields (schnarch...@rootimage.msu.edu) (#215) * Allow joining of multiple datasets, by making the table alias different for each dataset joined (jeremyevans) * SECURITY: Fix backslash escaping of strings (dlee) * Add ability to create a graph of objects from a query, with the result split into corresponding tables (jeremyevans) (#113) * Add attr_accessor for dataset row_proc (jeremyevans) * Don't redefine Dataset#each when adding a transform or row_proc (jeremyevans) * Remove array_keys.rb from sequel_core, it was partially broken (since the arrays came from hashes), and redefined Dataset#each (jeremyevans) * Fix MySQL default values insert (matt.binary) (#196) * Fix ODBC adapter improperly escaping date and timestamp values (leo.borisenko) (#165) * Fix renaming columns on MySQL with type :varchar (jeremyevans) (#206) * Add Sequel::SQL::Function#==, for comparing SQL Functions (jeremyevans) (#209) * Update Informix adapter to work with Ruby/Informix 0.7.0 (gerardo.santana@gmail.com) * Remove sequel_core's knowledge of Sequel::Model (jeremyevans) * Use "\n" instead of $/ (since $/ can be redefined in ways we do not want) (jeremyevans) === 1.4.0 (2008-04-08) * Don't mark a column as changed unless the new value is different from the current value (tamas.denes, jeremyevans) (#203). * Switch gem name from "sequel_model" to just "sequel", which required large version bump (jeremyevans). * Add :select option to many_to_many associations, default to selecting only the associated model table and not the join table (jeremyevans) (#208). * Add :reciprocal one_to_many association option, for setting corresponding many_to_one instance variable (jeremyevans). * Add eager loading implementation (jeremyevans). * Change *_to_many associations so that the all associations are considered :cache=>true (jeremyevans). * Fix associations with block arguments and :cache=>true (jeremyevans). * Merge 3 mysql patches from the bugtracker (mvyver) (#200, #201, #202). * Merge 2 postgresql patches from the bugtracker (a...@mellowtone.co.jp) (#211, 212). * Allow overriding of default posgres spec database via ENV['SEQUEL_PG_SPEC_DB'] (jeremyevans). * Allow using the Sequel::Model as the first argument in a dataset join selection (jeremyevans) (#170). * Add simple callback mechanism to make model eager loading implementation easier (jeremyevans). * Added Sequel::Error::InvalidOperation class for invalid operations (#198). * Implemented MySQL::Database#server_version (#199). * Added spec configuration for MySQL socket file. * Fixed transform with array tuples in postgres adapter. * Changed spec configuration to Database objects instead of URIs in order to support custom options for spec databases. * Renamed schema files. * Fixed Dataset#from to work correctly with SQL functions (#193). ===Previous to 1.4.0, Sequel model and Sequel core versioning differed, see the bottom of this file for the changelog to Sequel model prior to 1.4.0. === 1.3 (2008-03-08) * Added configuration file for running specs (#186). * Changed Database#drop_index to accept fixed arity (#173). * Changed column definition sql to put UNSIGNED constraint before unique in order to satisfy MySQL (#171). * Enhanced MySQL adapter to support load data local infile_, added compress option for mysql connection by default (#172). * Fixed bug when inserting hashes in array tuples mode. * Changed SQLite adapter to catch RuntimeError raised when executing a statement and raise an Error::InvalidStatement with the offending SQL and error message (#188). * Added Error::InvalidStatement class. * Fixed Dataset#reverse to not raise for unordered dataset (#189). * Added Dataset#unordered method and changed #order to remove order if nil is specified (#190). * Fixed reversing order of ASC expression (#164). * Added support for :null => true option when defining table columns (#192). * Fixed Symbol#method_missing to accept variable arity (#185). === 1.2.1 (2008-02-29) * Added add_constraint and drop_constraint functionality to Database#alter_table (#182). * Enhanced Dataset#multi_insert to accept datasets (#179). * Added MySQL::Database#use method for switching database (#180). * Enhanced Database.uri_to_options to accept uri strings (#178). * Added Dataset#columns! method that always makes a roundtrip to the DB (#177). * Added new Dataset#each_page method that iterates over all pages in the result set (#175). * Added Dataset#reverse alias to Dataset#reverse_order (#174). * Fixed Dataset#transform_load and #transform_save to create a trasnformed copy of the supplied hash instead of transforming it in place (#184). * Implemented MySQL::Dataset#replace (#163). === 1.2 (2008-02-15) * Added support for :varchar[100] like type declarations in #create_table. * Fixed #rename_column in mysql adapter to support types like varchar(255) (#159). * Added support for order and limit in DELETE statement in MySQL adapter (#160). * Added checks to Dataset#multi_insert to prevent work if no values are given (#162). * Override ruby2ruby implementation of Proc#to_sexp which leaks memory (#161). * Added log option, help for sequel script (#157). === 1.1 (2008-02-15) * Fixed Dataset#join_table to support joining of datasets (#156). * Changed Dataset#empty? to use EXISTS condition instead of counting records, for much better performance (#158). * Implemented insertion of multiple records in a single statement for postgres adapter. This feature is available only in postgres 8.2 and newer. * Implemented Postgres::Database#server_version. * Implemented Database#get, short for dataset.get(...). * Refactored Dataset#multi_insert, added #import alias, added support for calling #multi_insert using array of columns and array of value arrays (thanks David Lee). * Implemented Dataset#get, a replacement for select(column).first[column]. * Implemented Dataset#grep method, poor man's text search. === 1.0.10 (2008-02-13) * Fixed Datset#group_and_count to work inside a query block (#152). * Changed datasets with transforms to automatically transform hash filters (#155). * Changed Marshal stock transform to use Base64 encoding with backward-compatibility to support existing marshaled values (#154). * Added support for inserting multiple records in a single statement using #multi_insert in MySQL adapter (#153). * Added support for :slice option (same as :commit_every) in Dataset#multi_insert. * Changed Dataset#all to accept opts and iteration block. === 1.0.9 (2008-02-10) * Implemented Dataset#inspect and Database#inspect (#151). * Added full-text searching for odbc_mssql adapter (thanks Joseph Love). * Added AlterTableGenerator#add_full_text_index method. * Implemented full_text indexing and searching for PostgreSQL adapter (thanks David Lee). * Implemented full_text indexing and searching for MySQL adapter (thanks David Lee). * Fixed Dataset#insert_sql to work with array subscript references (thanks Jim Morris). === 1.0.8 (2008-02-08) * Added support for multiple choices in string matching expressions (#147). * Renamed Dataset#clone_merge to Dataset#clone, works with or without options for merging (#148). * Fixed MySQL::Database#<< method to always free the result in order to allow multiple calls in a row (#149). Same also for PostgreSQL adapter. === 1.0.7 (2008-02-05) * Added support for conditional filters (using if else statements) inside block filters (thanks Kee). === 1.0.6 (2008-02-05) * Removed code pollution introduced in revs 814, 817 (really bad patch, IMO). * Fixed joining datasets using aliased tables (#140). * Added support additional field types in postgresql adapter (#146). * Added support for date field types in postgresql adapter (#145). * Fixed Dataset#count to work correctly for grouped datasets (#144). * Added Dataset#select_more, Dataset#order_more methods (#129). === 1.0.5 (2008-01-25) * Added support for instantiating models by using the load constructor method. === 1.0.4.1 (2008-01-24) * Fixed bin/sequel to require sequel_model if available. === 1.0.4 (2008-01-24) * Added Dataset#select_all method. * Changed ODBC::Database to support connection using driver and database name, also added support for untitled columns in ODBC::Dataset (thanks Leonid Borisenko). * Fixed MySQL adapter to correctly format foreign key definitions (#123). * Changed MySQL::Dataset to allow HAVING clause on ungrouped datasets, and put HAVING clause before ORDER BY clause (#133). * Changed Dataset#group_and_count to accept multiple columns (#134). * Fixed database spec to open YAML file in binary mode (#131). * Cleaned up gem spec (#132). * Added Dataset#table_exists? convenience method. === 1.0.3 (2008-01-17) * Added support for UNSIGNED constraint, used in MySQL? (#127). * Implemented constraint definitions inside Database#create_table. * Fixed postgres adapter to define PGconn#async_exec as alias to #exec if not defined (for pure-ruby postgres driver). * Added String#to_date. Updated mysql adapter to use String#to_date for mysql date types (thanks drfreeze). === 1.0.2 (2008-01-14) * Removed ConnectionPool, NumericExtensions. Added dependency on assistance. === 1.0.1 (2008-01-12) * Changed postgres adapter to quote column references using double quotes. * Applied patch for oracle adapter: fix behavior of limit and offset, transactions, #table_exists?, #tables and additional specs (thanks Liming Lian #122). * Allow for additional filters on a grouped dataset (#119 and #120) * Changed mysql adapter to default to localhost if :host option is not specified (#114). * Refactored Sequelizer to use Proc#to_sexp (method provided by r2r). * Enhanced Database.connect to accept options with string keys, so it can now accept options loaded from YAML files. Database.connect also automatically converts :username option into :user for compatibility with existing YAML configuration files for AR and DataMapper. === 1.0.0.1 (2008-01-03) * Changed MySQL adapter to support specifying socket option. * Added support for limiting and paginating datasets with fixed SQL, gotten with DB#fetch (thanks Ruy Diaz). * Added new Dataset#from_self method that returns a dataset selecting from the original dataset. === 1.0 (2008-01-02) * Removed deprecated adapter stubs. * Removed Sequel::Model() stub. * Changed name to sequel_core. * 100% code coverage. * Fixed error behavior when sequel_model is not available. * Fixed error behavior when parse_tree or ruby2ruby are not available. === 0.5.0.2 (2008-01-01) * Fixed String#to_time to raise error correctly for invalid time stamps. * Improved code coverage - now at 99.2%. === 0.5.0.1 (2007-12-31) * Added a stub for Sequel::Model that auto-loads sequel_model. * Changed Sequel.method_missing and Database.adapter_class to raise AdapterNotFound if an adapter could not be loaded. * Fixed behavior of error trap in sequel command line tool. === 0.5 (2007-12-30) * Removed model code into separate sub-project. Rearranged trunk into core, model and model_plugins. === 0.4.5 (2007-12-25) * Added rdoc for new alter_table functionality (#109). * Fixed update_sql with array sub-item keys (#110). * Refactored model specs. * Added Model#update as alias to #set. * Refactored validations code. Renamed Model.validations? into Model.has_validations?. * Added initial Model validations (Thanks Lance Carlson) * Added Database#set_column_default method (thanks Jim Morris.) * Removed warning on uninitialized @transform value (thanks Jim Morris). === 0.4.4.2 (2007-12-20) * Fixed parsing errors in Ruby 1.9. * Fixed sync problem in connection_pool_spec. * Changed String#to_time to raise Error::InvalidValue if Time.parse fails. * Refactored sequel error classes. === 0.4.4.1 (2007-12-19) * Fixed schema generation code to use field quoting and support adapter-specific literalization of default values (#108). === 0.4.4 (2007-12-17) * Implemented Database#rename_table (#104). * Fixed drop_index in mysql adapter (#103). * Added ALTER TABLE specs for postgres, sqlite and mysql adapters. Added custom alter_table behavior for sqlite and mysql adapters (#101, #102). * Added direct Database API for altering tables. * Added Database#alter_table method with support for adding, dropping, renaming, modifying columns and adding and droppping indexes. * Added #unique schema method for defining unique indexes (thanks Dado). * Implemented unfolding of #each calls inside sequelizer blocks (thanks Jim Morris). === 0.4.3 (2007-12-15) * Fixed Dataset#update to accept strings (#98). * Fixed Model.[] to raise for boolean argument (#97). * Added Database#add_index method (thanks coda.hale). * Added error reporting for filtering on comparison not in a block (thanks Jim Morris). * Added support for inline index definition (thanks Dado). * Added Database#create_table! method for forcibly creating a table (thanks Dado). * Added support for using Dataset#update with block. * Changed subscript access to use | operator. * Fixed subscript access in sequelizer. * Added support for subscript access using Symbol#/ operator. === 0.4.2.2 (2007-12-10) * Improved code coverage. * Fixed Dataset#count to work properly with datasets with fixed SQL (when using #fetch). * Added Model.create_with_params method that filters the given parameters accordring to the model's columns (thanks Aman Gupta). === 0.4.2.1 (2007-12-09) * Refactored and fixed Dataset#reverse_order to work with field quoting (thanks Christian). * Fixed problem with field quoting in insert statements. * Changed sequelizer code to silently fail on any error when requiring parsetree and ruby2ruby. * Added Database#create_view, #create_or_replace_view and #drop_view methods. Also implemented Dataset#create_view and #create_or_replace_view convenience methods. * Keep DRY by re-using Model#[]= from method_missing. * Added Model.fetch alias for DB.fetch.set_model(Model) === 0.4.2 (2007-12-07) * Implemented Model#save_changes. * Extended Model#save to accept specific columns to update. * Implemented experimental JDBC adapter. * Added adapter skeleton as starting point for new adapters. * Cleaned-up adapters and moved automatic requiring of 'sequel' to adapter stubs. === 0.4.1.3 (2007-12-05) * Better plugin conventions. * Added experimental OpenBase adapter. * Fixed Sequel. methods to accept options hash as well as database name. Fixed Sequel.connect to accept options hash as well as URI (Wayne). === 0.4.1.2 (2007-12-04) * Added release rake task (using RubyForge). * Changed Model.is to accept variable arity. * Implemented plugin loading for model classes. * Fixed odbc-mssql and odbc adapters (thanks Dusty.) * Implemented odbc-mssql adapter (thanks Dusty.) === 0.4.1.1 (2007-11-27) * Fixed #first and #last functionality in Informix::Dataset (thanks Gerardo Santana). === 0.4.1 (2007-11-25) * Put adapter files in lib/sequel/adapters. Requiring sequel/ is now deprecated. Users can now just require 'sequel' and adapters are automagically loaded (#93). === 0.4.0 (2007-11-24) * Reorganized lib directory structure. * Added support for dbi-xxx URI schemes (#86). * Fixed problem in Database#uri where setting the password would raise an error (#87). * Improved Dataset#insert_sql to correctly handle string keys (#92). * Improved error-handling for worker threads. Errors are saved to an array and are accessible through #errors (#91). * Dataset#uniq/distinct can now accept a column list for DISTINCT ON clauses. * Fixed Model.all. * Fixed literalization of strings with escape sequences in postgres adapter (#90). * Added support for literalizing BigDecimal values (#89). * Fixed column qualification for joined datasets (thanks Christian). * Implemented experimental informix adapter. === 0.3.4.1 (2007-11-10) * Changed Dataset#select_sql to support queries without a FROM clause. === 0.3.4 (2007-11-10) * Fixed MySQL adapter to allow calling stored procedures (thanks Sebastian). * Changed Dataset#each to always return self. * Fixed SQL functions without arguments in block filters. * Implemented super-cool Symbol#cast_as method. * Fixed error message in command-line tool if failed to load adapter (#85). * Refactored code relating to column references for better extendibility (#88). * Tiny fix to Model#run_hooks. === 0.3.3 (2007-11-04) * Revised code to generate SQL statements without trailing semicolons. * Added Sequel::Worker implementation of a simple worker thread for asynchronous execution. * Added spec for Oracle adapter. * Fixed Oracle adapter to format INSERT statements without semicolons (thanks Liming Lian). * Renamed alias to Array#keys as Array#columns instead of Array#fields. * Renamed FieldCompositionMethods as ColumnCompositionMethods. * Implemented Sequel::NumericExtensions to provide stuff like 30.days.ago. === 0.3.2 (2007-11-01) * Added #to_column_name as alias to #to_field_name, #column_title as alias to #field_title. * Added Dataset#interval method for getting interval between minimum/maximum values for a column. * Fixed Oracle::Database#execute (#84). * Added group_and_count as general implementation for count_by_xxx. * Added count_by magic method. * Added Dataset#range method for getting the minimum/maximum values for a column. * Fixed timestamp translation in SQLite adapter (#83). * Experimental DB2 adapter. * Added Dataset#set as alias to Dataset#update. * Removed long deprecated expressions.rb code. * Better documentation. * Implemented Dataset magic methods: order_by_xxx, group_by_xxx, filter_by_xxx, all_by_xxx, first_by_xxx, last_by_xxx. * Changed Model.create and Model.new to accept a block. === 0.3.1 (2007-10-30) * Typo fixes (#79). * Added require 'yaml' to dataset.rb (#78). * Changed postgres adapter to use the ruby-postgres library's type conversion if available (#76). * Fixed string literalization in mysql adapter for strings with comment backslashes in them (#75). * Fixed ParseTree dependency to work with version 2.0.0 and later (#74). * foreign_key definitions now accept :key option for specifying the remote key (#73). * Fixed Model#method_missing to not raise error for columns not in the table but for which a value exists (#77). * New documentation for Model. * Implemented Oracle adapter based on ruby-oci8 library. * Implemented Model#pk_hash. Is it really necessary? * Deprecated Model#pkey. Implemented better Model#pk method. * Specs and docs for Model.one_to_one, Model.one_to_many macros. === 0.3.0.1 (2007-10-20) * Changed Database#fetch to return a modified dataset. === 0.3 (2007-10-20) * Added stock transforms to Dataset#transform. Refactored Model.serialize. * Added Database#logger= method for setting the database logger object. * Fixed Model.[] to act as shortcut to Model.find when a hash is given (#71). * Added support for old and new decimal types in MySQL adapter, and updated MYSQL_TYPES with MySQL 5.0 constants (#72). * Implemented Database#disconnect method for all adapters. * Fixed small bug in ArrayKeys module. * Implemented model caching by primary key. * Separated Model.find and Model.[] functionality. Model.find takes a filter. Model.[] is strictly for finding by primary keys. * Enhanced Dataset#first to accept a filter block. Model#find can also now accept a filter block. * Changed Database#[] to act as shortcut to #fetch if a string is given. * Renamed Database#each to #fetch. If no block is given, the method returns an enumerator. * Changed Dataset#join methods to correctly literalize values in join conditions (#70). * Fixed #filter with ranges to correctly literalize field names (#69). * Implemented Database#each method for quickly retrieving records with arbitrary SQL (thanks Aman Gupta). * Fixed bug in postgres adapter where a LiteralString would be literalized as a regular String. * Fixed SQLite insert with subquery (#68). * Reverted back to hashes as default mode. Added Sequel.use_array_tuples and Sequel.use_hash_tuples methods. * Fixed problem with arrays with keys when using #delete. * Implemented ArrayKeys as substitute for ArrayFields. * Added Dataset#each_hash method. * Rewrote SQLite::Database#transaction to use sqlite3-ruby library implementation of transactions. * Fixed Model.destroy_all to work correctly in cases where no before_destroy hook is defined and an after_destroy hook is defined. * Restored Model.has_hooks? implementation. * Changed Database#<< to strip comments and whitespace only when an array is given. * Changed Schema::Generator#primary_key to accept calls with the type argument omitted. * Hooks can now be prepended or appended by choice. * Changed Model.subset to define filter method on the underlying dataset instead of the model class. * Fixed Dataset#transform to work with array fields. * Added Dataset#to_csv method. * PrettyTable can now extract column names from arrayfields. * Converted ado, dbi, odbc adapters to use arrayfields instead of hashes. * Fixed composite key support. * Fixed Dataset#insert_sql, update_sql to support array fields. * Converted sqlite, mysql, postgres adapters to use arrayfields instead of hashes. * Extended Dataset#from to auto alias sub-queries. * Extended Dataset#from to accept hash for aliasing tables. * Added before_update, after_update hooks. === 0.2.1.1 (2007-10-07) * Added Date literalization to sqlite adapter (#60). * Changed Model.serialize to allow calling it after the class is defined (#59). * Fixed after_create hooks to allow calling save inside the hook (#58). * Fixed MySQL quoting of sql functions (#57). * Implemented rollback! global method for cancelling transactions in progress. * Fixed =~ operator in Sequelizer. * Fixed ODBC::Dataset#fetch_rows (thanks Dusty). * Renamed Model.recreate_table to create_table!. recreate_table is deprecated and will issue a warning (#56). === 0.2.1 (2007-09-24) * Added default implementation of Model.primary_key_hash. * Fixed Sequel::Model() to set dataset for inherited classes. * Rewrote Model.serialize to use Dataset#transform. * Implemented Dataset#transform. * Added gem spec for Windows (without ParseTree dependency). * Added support for dynamic strings in Sequelizer (#49). * Query branch merged into trunk. * Implemented self-changing methods. * Add support for ternary operator to Sequelizer. * Fixed sequelizer to evaluate expressions if they don't involve symbols or literal strings. * Added protection against using #each, #delete, #insert, #update inside query blocks. * Improved Model#method_missing to deal with invalid attributes. * Implemented Dataset#query. * Added Dataset#group_by as alias for Dataset#group. * Added Dataset#order_by as alias for Dataset#order. * More model refactoring. Added support for composite keys. * Added Dataset#empty? method (#46). * Fixed Symbol#to_field_name to support names with numbers and upper-case characters (#45). * Added install_no_doc rake task. * Partial refactoring of model code. * Refactored dataset-model association and added Dataset#set_row_filter method. * Added support for case-sensitive regexps to mysql adapter. * Changed mysql adapter to support encoding option as well. * Added charset/encoding option to postgres adapter. * Implemented Model.serialize (thanks Aman Gupta.) * Changed Model.create to INSERT DEFAULT VALUES instead of (id) VALUES (null) (brings back #41.) * Fixed Model.new to work without arguments. * Added Model.no_primary_key method to allow models without primary keys. * Added Model#this method (#42 thanks Duane Johnson). * Fixed Dataset#insert_sql to use DEFAULT VALUES clause if argument is an empty hash. * Fixed Model.create to work correctly when no argument is passed (#41). === 0.2.0.2 (2007-09-07) * Dataset#insert can now accept subqueries. * Changed Migrator.apply to return the version. * Changed Sequel::Model() to cache intermediate classes so descendant classes can be reopened (#39). * Added :charset option to MySQL adapter (#40). * Fixed Dataset#exclude to add parens around NOT expression (#38). * Fixed use of sub-queries with all comparison operators in block filters (#38). * Fixed arithmetic expressions in block filters to not be literalized. * Changed Symbol#method_missing to return LiteralString. * Changed PrettyTable to right-align numbers. * Fixed Model.create_table (thanks Duane Johnson.) === 0.2.0.1 (2007-09-04) * Improved support for invoking methods with inline procs inside block filters. === 0.2.0 (2007-09-02) * Fixed Model.drop_table (thanks Duane Johnson.) * Dataset#each can now return rows for arbitrary SQL by specifying :sql option. * Added spec for postgres adapter. * Fixed Model.method_missing to work with new SQL generation. * Fixed #compare_expr to support regexps. * Fixed postgres, mysql adapters to support regexps. * More specs for block filters. Updated README. * Added support for globals and $X macros in block filters. * Fixed Sequelizer to not fail if ParseTree or Ruby2Ruby gems are missing. * Renamed String#expr into String#lit (#expr should be deprecated in future versions). * Renamed Sequel::ExpressionString into LiteralString. * Fixed Symbol#[] to return an ExpressionString, so as not to be literalized. * Renamed Dataset::Expressions to Dataset::Sequelizer. * Renamed Expressions#format_re_expression to match_expr. * Renamed Expressions#format_eq_expression to compare_expr. * Added support for Regexp in MySQL adapter. * Refactored Regexp expressions into a separate #format_re_expression method. * Added support for arithmetic in proc filters. * Added support for nested proc expressions, more specs. * Added support for SQL function using symbols, e.g. :sum[:x]. * Fixed deadlock bug in ConnectionPool. * Removed deprecated old expressions.rb. * Rewrote Proc filter feature using ParseTree. * Added support for additional functions on columns using Symbol#method_missing. * Added support for supplying filter block to DB#[] method, to allow stuff like DB[:nodes] {:path =~ /^icex1/}. === 0.1.9.12 (2007-08-26) * Added spec for PrettyTable. * Added specs for Schema::Generator and Model (#36 thanks technoweenie). * Fixed Sequel::Model.set_schema (#36 thanks technoweenie.) * Added support for no options on Schema::Generator#foreign_key (#36 thanks technoweenie.) * Implemented (restored?) Schema::Generator#primary_key_name (#36 thanks technoweenie.) * Better spec code coverage. === 0.1.9.11 (2007-08-24) * Changed Dataset#set_model to allow supplying additional arguments to the model's initialize method (#35). Thanks Sunny Hirai. === 0.1.9.10 (2007-08-22) * Changed schema generation code to generate separate statements for CREATE TABLE and each CREATE INDEX (#34). * Refactored Dataset::SQL#field_name for better support of different field quoting standards by specific adapters. * Added #current_page_record_count for paginated datasets. * Removed Database#literal and included Dataset::SQL instead. * Sequel::Dataset:SQL#field_name can now take a hash (as well as #select and any method that uses #field_name) for aliasing column names. E.g. DB[:test].select(:_qqa => 'Date').sql #=> 'SELECT _qqa AS Date FROM test'. * Moved SingleThreadedPool to lib/sequel/connection_pool.rb. * Changed SQLite::Dataset to return affected rows for #delete and #update (#33). * ADO adapter: Added use of Enumerable for Recordset#Fields, playing it safe and moving to the first row before getting results, and changing the auto_increment constant to work for MSSQL. === 0.1.9.9 (2007-08-18) * New ADO adapter by cdcarter (#31). * Added automatic column aliasing to #avg, #sum, #min and #max (#30). * Fixed broken Sequel::DBI::Dataset#fetch_rows (#29 thanks cdcarter.) === 0.1.9.8 (2007-08-15) * Fixed DBI adapter. === 0.1.9.7 (2007-08-15) * Added support for executing batch statements in sqlite adapter. * Changed #current_page_record_range to return 0..0 for an invalid page. * Fixed joining of aliased tables. * Improved Symbol#to_field_name to prevent false positives. * Implemented Dataset#multi_insert with :commit_every option. * More docs for Dataset#set_model. * Implemented automatic creation of convenience methods for each adapter (e.g. Sequel.sqlite etc.) === 0.1.9.6 (2007-08-13) * Refactored schema definition code. Gets rid of famous primary_key problem as well as other issues (e.g. issue #22). * Added #pagination_record_count, #page_range and #current_page_record_range for paginated datasets. * Changed MySQL adapter to automatically reconnect (issue #26). * Changed Sequel() to accept variable arity. * Added :elements option to column definition, in order to support ENUM and SET types. === 0.1.9.5 (2007-08-12) * Fixed migration docs. * Removed dependency on PGconn in Schema class. === 0.1.9.4 (2007-08-11) * Added Sequel.dbi convenience method for using DBI connection strings to open DBI databases. === 0.1.9.3 (2007-08-10) * Added support for specifying field size in schema definitions (thanks Florian Assmann.) * Added migration code based on work by Florian Assmann. * Reintroduced metaid dependency. No need to keep a local copy of it. === 0.1.9.2 (2007-07-24) * Removed metaid dependency. Re-factored requires in lib/sequel.rb. === 0.1.9.1 (2007-07-22) * Improved robustness of MySQL::Dataset#field_name. * Added Sequel.single_threaded= convenience method. === 0.1.9 (2007-07-21) * Fixed #update_sql and #insert_sql to support field quoting by calling #field_name. * Implemented automatic data type conversion in mysql adapter. * Added support for boolean literals in mysql adapter. * Added support for ORDER and LIMIT clauses in UPDATE statements in mysql adapter. * Implemented correct field quoting (using back-ticks) in mysql adapter. * Wrote basic MySQL spec. * Fixd MySQL::Dataset to return correct data types with symbols as hash keys. * Removed discunctional MySQL::Database#transaction. * Added support for single threaded operation. * Fixed bug in Dataset#format_eq_expression where Range objects would not be literalized correctly. * Added parens around postgres LIKE expressions using regexps. === 0.1.8 (2007-07-10) * Implemented Dataset#columns for retrieving the columns in the result set. * Updated Model with changes to how model-associated datasets work. * Beefed-up specs. Coverage is now at 95.0%. * Added support for polymorphic datasets. * The adapter dataset interface was simplified and standardized. Only four methods need be overriden: fetch_rows, update, insert and delete. * The Dataset class was refactored. The bulk of the dataset code was moved into separate modules. * Renamed Dataset#hash_column to Dataset#to_hash. * Added some common pragmas to sqlite adapter. * Added Postgres::Dataset#analyze for EXPLAIN ANALYZE queries. * Fixed broken Postgres::Dataset#explain. === 0.1.7 * Removed db.synchronize wrapping calls in sqlite adapter. * Implemented Model.join method to restrict returned columns to the model table (thanks Pedro Gutierrez). * Implemented Dataset#paginate method. * Fixed after_destroy hook. * Improved Dataset#first and #last to accept a filter hash. * Added Dataset#[]= method. * Added Sequel() convenience method. * Fixed Dataset#first to include a LIMIT clause for a single record. * Small fix to Postgres driver to return a primary_key value for the inserted record if it is specified in the insertion values (thanks Florian Assmann and Pedro Gutierrez). * Fixed Symbol#DESC to support qualified notation (thanks Pedro Gutierrez). === 0.1.6 * Fixed Model#method_missing to raise for an invalid attribute. * Fixed PrettyTable to print model objects (thanks snok.) * Fixed ODBC timestamp conversion to return DateTime rather than Time object (thanks snok.) * Fixed Model.method_missing (thanks snok.) * Model.method_missing now creates stubs for calling Model.dataset methods. Methods like Model.each etc. are removed. * Changed default join type to INNER JOIN (thanks snok.) * Added support for literal expressions, e.g. DB[:items].filter(:col1 => 'col2 - 10'.expr). * Added Dataset#and. * SQLite adapter opens a memory DB if no database is specified, e.g. Sequel.open 'sqlite:/'. * Added Dataset#or, pretty nifty. === 0.1.5 * Fixed Dataset#join to support multiple joins. Added #left_outer_join, #right_outer_join, #full_outer_join, #inner_join methods. === 0.1.4 * Added String#split_sql. * Implemented Array#to_sql and String#to_sql. Database#to_sql can now take an array of strings and convert into an SQL string. Comments and excessive white-space are removed. * Improved Schema generator to support data types as method names: DB.create_table :test do integer :abc text :def ... end * Implemented ODBC adapter. === 0.1.3 * Implemented DBI adapter. * Refactored database connection code. Now handled through Database#connect. === 0.1.2 * The first opened database is automatically assigned to to Model.db. * Removed SequelConnectionError. Exception class errors are converted to RuntimeError. * Added support for UNION, INTERSECT and EXCEPT set operations. * Fixed Dataset#single_record to return nil if no record is found. * Updated specs to conform to RSpec 1.0. * Added Model#find_or_create method. * Fixed MySQL::Dataset#query_single (thanks Dries Harnie.) * Added Model.subset method. Fixed Model.filter and Model.exclude to accept blocks. * Added Database#uri method. * Refactored and removed deprecated code in postgres adapter. ===0.1.1 * More documentation for Dataset. * Added Dataset#size as alias to Dataset#count. * Changed Database#<< to call execute (instead of being an alias). Thus it will work for descendants as well. * Fixed Sequel.open to accept variable arity. * Refactored Model#refresh, Model.create. Removed Model#reload. * Refactored Model hooks. * Cleaned up Dataset API. === 0.1.0 * Changed Database#create_table to only accept a block. Nobody's gonna use the other way. * Removed Dataset#[]= method. Too confusing and not really useful. * Fixed ConnectionPool#hold to wrap exceptions only once. * Dataset#where_list Renamed Dataset#expression_list. * Added support for qualified fields in Proc expressions (e.g. filter {items.id == 1}.) * Added like? and in? Proc expression operators. * Added require 'date' in dataset.rb. Is this a 1.8.5 thing? * Refactored Dataset to use literal strings instead of format strings (slight performance improvement and better readability.) * Added support for literalizing Date objects. * Refactored literalization of Time objects. === 0.0.20 * Refactored Dataset where clause construction to use expressions. * Implemented Proc expressions (adapted from a great idea by Sam Smoot.) * Fixed Model#map. * Documentation for ConnectionPool. * Specs for Database. === 0.0.19 * More specs for Dataset. * Fixed Dataset#invert_order to work correctly with strings. * Fixed Model#== to check equality of values. * Added Model#exclude and Model#order. * Fixed Dataset#order and Dataset#group to behave correctly when supplied with qualified field name symbols. * Removed Database#literal. Shouldn't have been there. * Added SQLite::Dataset#explain. Returns an array of opcode hashes. * Specs for ConnectionPool. === 0.0.18 * Implemented SequelError and SequelConnectionError classes. ConnectionPool#hold now catches any connection errors and reraises them SequelConnectionError. * Removed duplication in Database#[]. * :from and :select options are now always arrays (patch by Alex Bradbury.) * Fixed Dataset#exclude to work correctly (patch and specs by Alex Bradbury.) === 0.0.17 * Fixed Postgres::Database#tables to return table names as symbols (caused problem when using Database#table_exists?). * Fixed Dataset#from to have variable arity, like Dataset#select and Dataset#where (patch by Alex Bradbury.) * Added support for GROUP BY and HAVING clauses (patches by Alex Bradbury.) Refactored Dataset#filter. * More specs. * Refactored Dataset#where for better composability. * Added Dataset#[]= method. * Added support for DISTINCT and OFFSET clauses (patches by Alex Bradbury.) Dataset#limit now accepts ranges. Added Dataset#uniq and distinct methods. === 0.0.16 * More documentation. * Added support for subqueries in Dataset#literal. * Added support for Model.all_by_XXX methods through Model.method_missing. * Added basic SQL logging to Database. * Added Enumerable#send_each convenience method. * Changed Dataset#destroy to return the number of deleted records. === 0.0.15 * Improved Dataset#insert_sql to allow arrays as well as hashes. * Database#drop_table now accepts a list of table names. * Added Model#id to to return the id column. === 0.0.14 * Fixed Model's attribute accessors (hopefully for the last time). * Changed Model.db and Model.db= to allow different databases for different model classes. * Fixed bug in aggregate methods (max, min, etc.) for datasets using record classes. === 0.0.13 * Fixed Model#method_missing to do both find, filter and attribute accessors. duh. * Fixed bug in Dataset#literal when quoting arrays of strings (thanks Douglas Koszerek.) === 0.0.12 * Model#save now correctly performs an INSERT for new objects. * Added Model#reload for reloading an object from the database. * Added Dataset#naked method for getting a version of a dataset that fetches records as hashes. * Implemented attribute accessors for column values ala ActiveRecord models. * Fixed filtering using nil values (e.g. dataset.filter(:parent_id => nil)). === 0.0.11 * Renamed Model.schema to Model.set_schema and Model.get_schema to Model.schema. * Improved Model class to allow descendants of model clases (thanks Pedro Gutierrez.) * Removed require 'postgres' in schema.rb (thanks Douglas Koszerek.) === 0.0.10 * Added some examples. * Added Dataset#print method for pretty-printing tables. === 0.0.9 * Fixed Postgres::Database#tables and #locks methods. * Added PGconn#last_insert_id method that should support all 7.x and 8.x versions of Postgresql. * Added Dataset#exists method for EXISTS where clauses. * Changed behavior of Dataset#literal to regard symbols as field names. * Refactored and DRY'd Dataset#literal and overrides therof. Added support for subqueries in where clause. === 0.0.8 * Fixed Dataset#reverse_order to provide chainability. This method can be called without arguments to invert the current order or with arguments to provide a descending order. * Fixed literal representation of literals in SQLite adapter (thanks Christian Neukirchen!) * Refactored insert code in Postgres adapter (in preparation for fetching the last insert id for pre-8.1 versions). === 0.0.7 * Fixed bug in Model.schema, duh! === 0.0.6 * Added Dataset#sql as alias to Dataset#select_sql. * Dataset#where and Dataset#exclude can now be used for refining dataset conditions, enabling stuff like posts.where(:title => 'abcdef').exclude(:user_id => 3). * Implemented Dataset#exclude method. * Added Sequel::Schema#auto_primary_key method for setting an automatic primary key to be added to every table definition. Changed the schema generator to not define a primary key by default. * Changed Sequel::Database#table_exists? to rely on the tables method if it is available. * Implemented SQLite::Database#tables. === 0.0.5 * Added Dataset#[] method. Refactored Model#find and Model#[]. * Renamed Pool#conn_maker to Pool#connection_proc. * Added automatic require 'sequel' to all adapters for convenience. === 0.0.4 * Added preliminary MySQL support. * Code cleanup. === 0.0.3 * Add Dataset#sum method. * Added support for exclusive ranges (thanks Christian Neukirchen.) * Added sequel console for quick'n'dirty access to databases. * Fixed small bug in Dataset#qualified_field_name for better join support. === 0.0.2 * Added Sequel.open as alias to Sequel.connect. * Refactored Dataset#where_equal_condition into Dataset#where_condition, allowing arrays and ranges, e.g. posts.filter(:stamp => (3.days.ago)..(1.day.ago)), or posts.filter(:category => ['ruby', 'postgres', 'linux']). * Added Model#[]= method for changing column values and Model#save method for saving them. * Added Dataset#destroy for deleting each record individually as support for models. Renamed Model#delete to Model#destroy (and Model#destroy_all) ala ActiveRecord. * Refactored Dataset#first and Dataset#last code. These methods can now accept the number of records to fetch. === 0.0.1 * More documentation for Dataset. * Renamed Database#query to Database#dataset. * Added Dataset#insert_multiple for inserting multiple records. * Added Dataset#<< as shorthand for inserting records. * Added Database#<< method for executing arbitrary SQL. * Imported Sequel code. == Sequel::Model CHANGELOG 0.1 - 0.5.0.2 === 0.5.0.2 (2008-03-12) * More fixes for Model.associate to accept strings and symbols as class references. === 0.5.0.1 (2008-03-09) * Fixed Model.associate to accept class and class name in :class option. === 0.5 (2008-03-08) * Merged new associations branch into trunk. * Rewrote RDoc for associations. * Added has_and_belongs_to_many alias for many_to_many. * Added support for optional dataset block. * Added :order option to order association datasets. * Added :cache option to return and cache array of objects for association. * Changed one_to_many, many_to_many associations to return dataset by default. * Added has_many, belongs_to aliases. * Refactored associations code. * Added deprecations for old-style relations. * Completed specs for new associations code. * New associations code by Jeremy Evans (replaces relations code.) === 0.4.2 (2008-02-29) * Fixed one_to_many implicit key to work correctly for namespaced classes (#167). * Fixed Model.db= to affect the underlying dataset (#183). * Fixed Model.implicit_table_name to disregard namespaces. === 0.4.1 (2008-02-10) * Implemented Model#inspect (#151). * Changed Model#method_missing to short-circuit and bypass checking #columns if the values hash already contains the relevant column (#150). * Updated to reflect changes in sequel_core (Dataset#clone_merge renamed to Dataset#clone). === 0.4 (2008-02-05) * Fixed Model#set to work with string keys (#143). * Fixed Model.create to correctly initialize instances marked as new (#135). * Fixed Model#initialize to convert string keys into symbol keys. This also fixes problem with validating objects initialized with string keys (#136). === 0.3.3 (2008-01-25) * Finalized support for virtual attributes. === 0.3.2.1 (2008-01-24) * Fixed Model.dataset to correctly set the dataset if using implicit naming or inheriting the superclass dataset (thanks celldee). === 0.3.2 (2008-01-24) * Added Model#update_with_params method with support for virtual attributes and auto-filtering of unrelated parameters, and changed Model.create_with_params to support virtual attributes (#128). * Cleaned up gem spec (#132). * Removed validations code. Now relying on validations in assistance gem. === 0.3.1 (2008-01-21) * Changed Model.dataset to use inflector to pluralize the class name into the table name. Works in similar fashion to table names in AR or DM. === 0.3 (2008-01-18) * Implemented Validatable::Errors class. * Added Model#reload as alias to Model#refresh. * Changed Model.create to accept a block (#126). * Rewrote validations. * Fixed Model#initialize to accept nil values (#115). === 0.2 (2008-01-02) * Removed deprecated Model.recreate_table method. * Removed deprecated :class and :on options from one_to_many macro. * Removed deprecated :class option from one_to_one macro. * Removed deprecated Model#pkey method. * Changed dependency to sequel_core. * Removed examples from sequel core. * Additional specs. We're now at 100% coverage. * Refactored hooks code. Hooks are now inheritable, and can be defined by supplying a block or a method name, or by overriding the hook instance method. Hook chains can now be broken by returning false (#111, #112). === 0.1 (2007-12-30) * Moved model code from sequel into separate model sub-project. sequel-5.63.0/doc/advanced_associations.rdoc000066400000000000000000001077641434214120600210610ustar00rootroot00000000000000= Advanced Associations Sequel::Model's association support is powerful and flexible, but it can be difficult for new users to understand what the support enables. This guide shows off some of the more advanced Sequel::Model association features. You should probably review the {Model Associations Basics and Options guide}[rdoc-ref:doc/association_basics.rdoc] before reviewing this guide. == Sequel::Model Eager Loading Sequel::Model offers two different ways to perform eager loading, +eager+ and +eager_graph+. +eager+ uses an SQL query per association, +eager_graph+ uses a single SQL query containing JOINs. Assuming the following associations: Artist.one_to_many :albums Album.one_to_many :tracks Tracks.many_to_one :lyric Let's say you wanted to load all artists and eagerly load the related albums, tracks, and lyrics. Artist.eager(albums: {tracks: :lyric}) # 4 Queries: # SELECT * FROM artists; # SELECT * FROM albums WHERE (artist_id IN (...)); # SELECT * FROM tracks WHERE (album_id IN (...)); # SELECT * FROM lyrics WHERE (id IN (...)); Artist.eager_graph(albums: {tracks: :lyric}) # 1 Query: # SELECT artists.id, artists.name, ... # albums.id AS albums_id, albums.name AS albums_name, ... # tracks.id AS tracks_id, tracks.name AS tracks_name, ... # lyric.id AS lyric_id, ... # FROM artists # LEFT OUTER JOIN albums ON (albums.artist_id = artists.id) # LEFT OUTER JOIN tracks ON (tracks.album_id = albums.id) # LEFT OUTER JOIN lyrics AS lyric ON (lyric.id = tracks.lyric_id); In general, the recommendation is to use +eager+ unless you have a reason to use +eager_graph+. +eager_graph+ is needed when you want to reference columns in an associated table. For example, if you want to order the loading of returned artists based on the names of the albums, you cannot do: Artist.eager(albums: {tracks: :lyric}).order{albums[:name]} because the initial query Sequel will use would be: # SELECT * FROM artists ORDER BY albums.name; and +albums+ is not a valid qualifier in such a query. In this situation, you must use +eager_graph+: Artist.eager_graph(albums: {tracks: :lyric}).order{albums[:name]} Whether +eager+ or +eager_graph+ performs better is association and database dependent. If you are concerned about performance, you should try benchmarking both cases with appropriate data to see which performs better. === Mixing eager and eager_graph Sequel offers the ability to mix +eager+ and +eager_graph+ when loading results. This can be done at the main level by calling both +eager+ and +eager_graph+ on the same dataset: Album.eager(:artist).eager_graph(:tracks) # 2 Queries: # SELECT albums.id, albums.name, ... # artist.id AS artist_id, artist.name AS artist_name, ... # FROM albums # LEFT OUTER JOIN artists AS artist ON (artist.id = albums.artist_id); # SELECT * FROM artists WHERE (id IN (...)); You can also use +eager+ to load initial associations, and +eager_graph+ to load remaining associations, by using +eager_graph+ in an eager load callback: Artist.eager(albums: {tracks: proc{|ds| ds.eager_graph(:lyric)}}) # 3 Queries: # SELECT * FROM artists; # SELECT * FROM albums WHERE (artist_id IN (...)); # SELECT tracks.id, tracks.name, ... # lyric.id AS lyric_id, ... # FROM tracks # LEFT OUTER JOIN lyrics AS lyric ON (lyric.id = tracks.lyric_id) # WHERE (tracks.album_id IN (...)); Using the +eager_graph_eager+ plugin, you can use +eager_graph+ to load the initial associations, and +eager+ to load the remaining associations. When you call +eager_graph_eager+, you must specify the dependency chain at which to start the eager loading via +eager+: Artist.plugin :eager_graph_eager Artist.eager_graph(albums: :tracks).eager_graph_eager([:albums, :tracks], :lyric) # 2 Queries: # SELECT artists.id, artists.name, ... # albums.id AS albums_id, albums.name AS albums_name, ... # tracks.id AS tracks_id, tracks.name AS tracks_name, ... # FROM artists # LEFT OUTER JOIN albums ON (albums.artist_id = artists.id) # LEFT OUTER JOIN tracks ON (tracks.album_id= albums.id); # SELECT * FROM lyrics WHERE (id IN (...)); These two approaches can also be nested, with +eager+ -> +eager_graph+ -> +eager+: Album.plugin :eager_graph_eager Artist.eager(albums: proc{|ds| ds.eager_graph(:tracks).eager_graph_eager([:tracks], :lyric)}) # 3 Queries: # SELECT * FROM artists; # SELECT albums.id, albums.name, ... # tracks.id AS tracks_id, tracks.name AS tracks_name, ... # FROM albums # LEFT OUTER JOIN tracks ON (tracks.album_id = albums.id) # WHERE (albums.artist_id IN (...)); # SELECT * FROM lyrics WHERE (id IN (...)); Or with 2 separate +eager_graph+ queries: Artist.eager_graph(:albums).eager_graph_eager([:albums], tracks: proc{|ds| ds.eager_graph(:lyric)}) # 2 Queries: # SELECT artists.id, artists.name, ... # albums.id AS albums_id, albums.name AS albums_name, ... # FROM artists # LEFT OUTER JOIN albums ON (albums.artist_id = artists.id); # SELECT tracks.id, tracks.name, ... # lyric.id AS lyric_id, ... # FROM tracks # LEFT OUTER JOIN lyrics AS lyric ON (lyric.id = tracks.lyric_id) # WHERE (tracks.album_id IN (...)); == Sequel::Model Association Loading Options There are a bunch of advanced association options that are available to handle more complex cases. First we'll go over some of the simpler ones: All associations take a block that can be used to further filter/modify the default dataset: Artist.one_to_many :gold_albums, class: :Album do |ds| ds.where{copies_sold > 500000} end There's also an :eager_block option if you want to use a different block when eager loading via Dataset#eager. There are many options for changing how the association is eagerly loaded via Dataset#eager_graph: :graph_join_type :: The type of join to do (:inner, :left, :right) :graph_conditions :: Additional conditions to put on join (needs to be a hash or array of all two pairs). Automatically assumes unqualified symbols or first element of the pair to be columns of the associated model, and unqualified symbols of the second element of the pair to be columns of the current model. :graph_block :: A block passed to +join_table+, allowing you to specify conditions other than equality, or to use OR, or set up any arbitrary condition. The block is passed the associated table alias, current table alias, and an array of previous joins clause objects. :graph_only_conditions :: Use these conditions instead of the standard association conditions. This is necessary when you don't want to have an equal condition between the foreign key and primary key of the tables. You can also use this to have a JOIN USING (array of symbols), or a NATURAL or CROSS JOIN (nil, with the appropriate :graph_join_type). These can be used like this: # Makes Artist.eager_graph(:required_albums).all not return artists that # don't have any albums Artist.one_to_many :required_albums, class: :Album, graph_join_type: :inner # Makes sure all returned albums have the active flag set Artist.one_to_many :active_albums, class: :Album, graph_conditions: {active: true} # Only returns albums that have sold more than 500,000 copies Artist.one_to_many :gold_albums, class: :Album, graph_block: proc{|j,lj,js| Sequel[j][:copies_sold] > 500000} # Handles the case where the tables are associated by a case insensitive name string Artist.one_to_many :albums, key: :artist_name, graph_only_conditions: nil, graph_block: proc{|j,lj,js| {Sequel.function(:lower, Sequel[j][:artist_name])=>Sequel.function(:lower, Sequel[lj][:name])}} # Handles the case where both key columns have the name artist_name, and you want to use # a JOIN USING Artist.one_to_many :albums, key: :artist_name, graph_only_conditions: [:artist_name] One advantage of using +eager_graph+ is that you can easily filter/order on columns in an associated table on a per-query basis, using regular Sequel dataset methods. For example, if you only want to retrieve artists who have albums that start with A, and eager load just those albums, ordered by the albums name, you can do: albums = Artist. eager_graph(:albums). where{Sequel.like(albums[:name], 'A%')}. order{albums[:name]}. all For lazy loading (e.g. Model[1].association), the :dataset option can be used to specify an arbitrary dataset (one that uses different keys, multiple keys, joins to other tables, etc.). == Custom Eager Loaders For eager loading via +eager+, the :eager_loader option can be used to specify how to eagerly load a complex association. This is an extremely powerful option. Though it can often be verbose (compared to other things in Sequel), it allows you complete control over how to eagerly load associations for a group of objects. :eager_loader should be a proc that takes a single hash argument, which will have at least the following keys: :id_map :: A mapping of key values to arrays of current model instances, usage described below :rows :: An array of model objects :associations :: A hash of dependent associations to eagerly load :self :: The dataset that is doing the eager loading :eager_block :: A dynamic callback for this eager load. Since you are given all of the records, you can do things like filter on associations that are specified by multiple keys, or do multiple queries depending on the content of the records (which would be necessary for polymorphic associations). Inside the :eager_loader proc, you should get the related objects and populate the associations cache for all objects in the array of records. The hash of dependent associations is available for you to cascade the eager loading down multiple levels, but it is up to you to use it. The id_map is a performance enhancement that is used by the default association loaders and is also available to you. It is a hash with keys foreign/primary key values, and values being arrays of current model objects having the foreign/primary key value associated with the key. This may be hard to visualize, so I'll give an example. Let's say you have the following associations Album.many_to_one :artist Album.one_to_many :tracks and the following three albums in the database: album1 = Album.create(artist_id: 3) # id: 1 album2 = Album.create(artist_id: 3) # id: 2 album3 = Album.create(artist_id: 2) # id: 3 If you try to eager load this dataset: Album.eager(:artist, :tracks).all Then the id_map provided to the artist :eager_loader proc would be: {3=>[album1, album2], 2=>[album3]} The artist id_map contains a mapping of artist_id values to arrays of album objects. Since both album1 and album2 have the same artist_id, the are both in the array related to that key. album3 has a different artist_id, so it is in a different array. Eager loading of artists is done by looking for any artist having one of the keys in the hash: artists = Artist.where(id: id_map.keys).all When the artists are retrieved, you can iterate over them, find entries with matching keys, and manually associate them to the albums: artists.each do |artist| # Find related albums using the artist_id_map if albums = id_map[artist.id] # Iterate over the albums albums.each do |album| # Manually set the artist association for each album album.associations[:artist] = artist end end end The id_map provided to the tracks :eager_loader proc would be: {1=>[album1], 2=>[album2], 3=>[album3]} Now the id_map contains a mapping of id values to arrays of album objects (in this case each array only has a single object, because id is the primary key). So when looking for tracks to eagerly load, you only need to look for ones that have an album_id with one of the keys in the hash: tracks = Track.where(album_id: id_map.keys).all When the tracks are retrieved, you can iterate over them, find entries with matching keys, and manually associate them to the albums: tracks.each do |track| if albums = id_map[track.album_id] albums.each do |album| album.associations[:tracks] << track end end end === Two basic example eager loaders Putting the code in the above examples together, you almost have enough for a basic working eager loader. The main important thing that is missing is you need to set initial values for the eagerly loaded associations. For the artist association, you need to initial the values to nil: # rows here is the :rows entry in the hash passed to the eager loader rows.each{|album| album.associations[:artist] = nil} For the tracks association, you set the initial value to an empty array: rows.each{|album| album.associations[:track] = []} These are done so that if an album currently being loaded doesn't have an associated artist or any associated tracks, the lack of them will be cached, so calling the artist or tracks method on the album will not do another database lookup. So putting everything together, the artist eager loader looks like: Album.many_to_one :artist, eager_loader: (proc do |eo_opts| eo_opts[:rows].each{|album| album.associations[:artist] = nil} id_map = eo_opts[:id_map] Artist.where(id: id_map.keys).all do |artist| if albums = id_map[artist.id] albums.each do |album| album.associations[:artist] = artist end end end end) and the tracks eager loader looks like: Album.one_to_many :tracks, eager_loader: (proc do |eo_opts| eo_opts[:rows].each{|album| album.associations[:tracks] = []} id_map = eo_opts[:id_map] Track.where(album_id: id_map.keys).all do |track| if albums = id_map[track.album_id] albums.each do |album| album.associations[:tracks] << track end end end end) Now, these are both overly simplistic eager loaders that don't respect cascaded associations or any of the association options. But hopefully they both provide simple examples that you can more easily build and learn from, as the custom eager loaders described later in this page are more complex. Basically, the eager loading steps can be broken down into: 1. Set default association values (nil/[]) for each of the current objects 2. Return just related associated objects by filtering the associated class to include only rows with keys present in the id_map. 3. Iterating over the returned associated objects, indexing into the id_map using the foreign/primary key value in the associated object to get current values associated to that specific object. 4. For each of those current values, updating the cached association value to include that specific object. Using the :eager_loader proc, you should be able to eagerly load all associations that can be eagerly loaded, even if Sequel doesn't natively support such eager loading. == Limited Associations Sequel supports specifying limits and/or offsets for associations: Artist.one_to_many :first_10_albums, class: :Album, order: :release_date, limit: 10 For retrieving the associated objects for a single object, this just uses a LIMIT: artist.first_10_albums # SELECT * FROM albums WHERE (artist_id = 1) LIMIT 10 === Eager Loading via eager However, if you want to eagerly load an association, you must use a different approach. Sequel has 4 separate strategies for dealing with such cases. The default strategy used on all databases is a UNION-based approach, which will submit multiple subqueries in a UNION query: Artist.where(id: [1,2]).eager(:first_10_albums).all # SELECT * FROM (SELECT * FROM albums WHERE (artist_id = 1) LIMIT 10) UNION ALL # SELECT * FROM (SELECT * FROM albums WHERE (artist_id = 2) LIMIT 10) This is the fastest way to load the associated objects on most databases, as long as there is an index on albums.artist_id. Without an index it is probably the slowest approach, so make sure you have an index on the key columns. If you cannot add an index, you'll want to manually specify the :eager_limit_strategy option as shown below. On PostgreSQL, for *_one associations that don't use an offset, you can choose to use a the distinct on strategy: Artist.one_to_one :first_album, class: :Album, order: :release_date, eager_limit_strategy: :distinct_on Artist.where(id: [1,2]).eager(:first_album).all # SELECT DISTINCT ON (albums.artist_id) * # FROM albums # WHERE (albums.artist_id IN (1, 2)) # ORDER BY albums.artist_id, release_date Otherwise, if the database supports window functions, you can choose to use the window function strategy: Artist.one_to_many :first_10_albums, class: :Album, order: :release_date, limit: 10, eager_limit_strategy: :window_function Artist.where(id: [1,2]).eager(:first_10_albums).all # SELECT * FROM ( # SELECT *, row_number() OVER (PARTITION BY albums.artist_id ORDER BY release_date) AS x_sequel_row_number_x # FROM albums # WHERE (albums.artist_id IN (1, 2)) # ) AS t1 # WHERE (x_sequel_row_number_x <= 10) Alternatively, you can use the :ruby strategy, which will fall back to retrieving all records, and then will slice the resulting array to get the first 10 after retrieval. === Dynamic Eager Loading Limits If you need to eager load variable numbers of records (with limits that aren't known at the time of the association definition), Sequel supports an :eager_limit dataset option that can be defined in an eager loading callback: Artist.one_to_many :albums Artist.where(id: [1, 2]).eager(albums: lambda{|ds| ds.order(:release_date).clone(eager_limit: 3)}).all # SELECT * FROM ( # SELECT *, row_number() OVER (PARTITION BY albums.artist_id ORDER BY release_date) AS x_sequel_row_number_x # FROM albums # WHERE (albums.artist_id IN (1, 2)) # ) AS t1 # WHERE (x_sequel_row_number_x <= 3) You can also customize the :eager_limit_strategy on a case-by-case basis by passing in that option in the same way: Artist.where(id: [1, 2]).eager(albums: lambda{|ds| ds.order(:release_date).clone(eager_limit: 3, eager_limit_strategy: :ruby)}).all # SELECT * FROM albums WHERE (albums.artist_id IN (1, 2)) ORDER BY release_date The :eager_limit and :eager_limit_strategy options currently only work when eager loading via #eager, not with #eager_graph. === Eager Loading via eager_graph_with_options When eager loading an association via eager_graph (which uses JOINs), the situation is similar. While the UNION-based strategy cannot be used as you don't know the records being eagerly loaded in advance, Sequel can use a variant of the other 3 strategies. By default it retrieves all records and then does the array slice in ruby. As eager_graph does not support options, to use an eager_graph limit strategy you have to use the eager_graph_with_options method with the :limit_strategy option. The :distinct_on strategy uses DISTINCT ON in a subquery and JOINs that subquery: Artist.eager_graph_with_options(:first_album, limit_strategy: :distinct_on).all # SELECT artists.id, artists.name, first_album.id AS first_album_id, # first_album.name AS first_album_name, first_album.artist_id, # first_album.release_date # FROM artists # LEFT OUTER JOIN ( # SELECT DISTINCT ON (albums.artist_id) * # FROM albums # ORDER BY albums.artist_id, release_date # ) AS first_album ON (first_album.artist_id = artists.id) The :window_function approach JOINs to a nested subquery using a window function: Artist.eager_graph_with_options(:first_10_albums, limit_strategy: :window_function).all # SELECT artists.id, artists.name, first_10_albums.id AS first_10_albums_id, # first_10_albums.name AS first_10_albums_name, first_10_albums.artist_id, # first_10_albums.release_date # FROM artists # LEFT OUTER JOIN ( # SELECT id, name, artist_id, release_date # FROM ( # SELECT *, row_number() OVER (PARTITION BY tracks.album_id ORDER BY release_date) AS x_sequel_row_number_x # FROM albums # ) AS t1 WHERE (x_sequel_row_number_x <= 10) # ) AS first_10_albums ON (first_10_albums.artist_id = artists.id) The :correlated_subquery approach JOINs to a nested subquery using a correlated subquery: Artist.eager_graph_with_options(:first_10_albums, limit_strategy: :correlated_subquery).all # SELECT artists.id, artists.name, first_10_albums.id AS first_10_albums_id, # first_10_albums.name AS first_10_albums_name, first_10_albums.artist_id, # first_10_albums.release_date # FROM artists # LEFT OUTER JOIN ( # SELECT * # FROM albums # WHERE albums.id IN ( # SELECT t1.id # FROM tracks AS t1 # WHERE (t1.album_id = tracks.album_id) # ORDER BY release_date # LIMIT 10 # ) # ) AS first_10_albums ON (first_10_albums.artist_id = artists.id) The reason that Sequel does not automatically use the :distinct_on, :window function or :correlated_subquery strategy for eager_graph is that it can perform much worse than the default of just doing the array slicing in ruby. If you are only using eager_graph to return a few records, it may be cheaper to get all of their associated records and filter them in ruby as opposed to computing the set of limited associated records for all rows. It's recommended to only use an eager_graph limit strategy if you have benchmarked it against the default behavior and found it is faster for your use case. === Filtering By Associations In order to return correct results, Sequel automatically uses a limit strategy when using filtering by associations with limited associations, if the database supports it. As in the eager_graph case, the UNION-based strategy doesn't work. Unlike in the eager and eager_graph cases, the array slicing in ruby approach does not work, you must use an SQL-based strategy. Sequel will select an appropriate default strategy based on the database you are using, and you can override it using the :filter_limit_strategy option. The :distinct_on strategy: Artist.where(first_album: Album[1]).all # SELECT * # FROM artists # WHERE (artists.id IN ( # SELECT albums.artist_id # FROM albums # WHERE ((albums.artist_id IS NOT NULL) AND (albums.id IN ( # SELECT DISTINCT ON (albums.artist_id) albums.id # FROM albums # ORDER BY albums.artist_id, release_date # )) AND (albums.id = 1)))) The :window_function strategy: Artist.where(first_10_albums: Album[1]).all # SELECT * # FROM artists # WHERE (artists.id IN ( # SELECT albums.artist_id # FROM albums # WHERE ((albums.artist_id IS NOT NULL) AND (albums.id IN ( # SELECT id FROM ( # SELECT albums.id, row_number() OVER (PARTITION BY albums.artist_id ORDER BY release_date) AS x_sequel_row_number_x # FROM albums # ) AS t1 # WHERE (x_sequel_row_number_x <= 10) # )) AND (albums.id = 1)))) The :correlated_subquery strategy: Artist.where(first_10_albums: Album[1]).all # SELECT * # FROM artists # WHERE (artists.id IN ( # SELECT albums.artist_id # FROM albums # WHERE ((albums.artist_id IS NOT NULL) AND (albums.id IN ( # SELECT t1.id # FROM albums AS t1 # WHERE (t1.artist_id = albums.artist_id) # ORDER BY release_date # LIMIT 1 # )) AND (albums.id = 1)))) Note that filtering by limited associations does not work on MySQL, as MySQL does not support any of the strategies. It's also not supported when using composite keys on databases that don't support window functions and don't support multiple columns in IN. === Additional Association Types While the above examples for limited associations showed one_to_many and one_to_one associations, it's just because those are the simplest examples. Sequel supports all of the same features for many_to_many and one_through_one associations that are enabled by default, as well as the many_through_many and one_through_many associations that are added by the many_through_many plugin. == More advanced association examples === Association extensions All associations come with an association_dataset method that can be further filtered or otherwise modified: class Author < Sequel::Model one_to_many :authorships end Author.first.authorships_dataset.where{number < 10}.first You can extend a dataset with a module using the :extend association option. You can reference the model object that created the association dataset via the dataset's +model_object+ method, and the related association reflection via the dataset's +association_reflection+ method: module FindOrCreate def find_or_create(vals) first(vals) || model.create(vals.merge(association_reflection[:key]=>model_object.id)) end end class Author < Sequel::Model one_to_many :authorships, extend: FindOrCreate end Author.first.authorships_dataset.find_or_create(name: 'Blah', number: 10) === many_to_many associations through model tables The many_to_many association can be used even when the join table is a table used for a model. The only requirement is the join table has foreign keys to both the current model and the associated model. Anytime there is a one_to_many association from model A to model B, and model B has a many_to_one association to model C, you can use a many_to_many association from model A to model C. class Author < Sequel::Model one_to_many :authorships many_to_many :books, join_table: :authorships end class Authorship < Sequel::Model many_to_one :author many_to_one :book end @author = Author.first @author.books === many_to_many for three-level associations You can even use a many_to_many association between model A and model C if model A has a one_to_many association to model B, and model B has a one_to_many association to model C. You just need to use the appropriate :right_key and :right_primary_key options. And in the reverse direction from model C to model A, you can use a one_through_one association using the :left_key and :left_primary_key options. class Firm < Sequel::Model one_to_many :clients many_to_many :invoices, join_table: :clients, right_key: :id, right_primary_key: :client_id end class Client < Sequel::Model many_to_one :firm one_to_many :invoices end class Invoice < Sequel::Model many_to_one :client one_through_one :firm, join_table: :clients, left_key: :id, left_primary_key: :client_id end Firm.first.invoices Invoice.first.firm To handle cases where there are multiple join tables, you can use the many_through_many plugin that ships with Sequel. === Polymorphic Associations Sequel discourages the use of polymorphic associations, which is the reason they are not supported by default. All polymorphic associations can be made non-polymorphic by using additional tables and/or columns instead of having a column containing the associated class name as a string. Polymorphic associations break referential integrity and are significantly more complex than non-polymorphic associations, so their use is not recommended unless you are stuck with an existing design that uses them. If you must use them, look for the sequel_polymorphic external plugin, as it makes using polymorphic associations in Sequel about as easy as it is in ActiveRecord. However, here's how they can be done using Sequel's custom associations (the sequel_polymorphic external plugin is just a generic version of this code): class Asset < Sequel::Model many_to_one :attachable, reciprocal: :assets, setter: (lambda do |attachable| self[:attachable_id] = (attachable.pk if attachable) self[:attachable_type] = (attachable.class.name if attachable) end), dataset: (proc do klass = attachable_type.constantize klass.where(klass.primary_key=>attachable_id) end), eager_loader: (lambda do |eo| id_map = {} eo[:rows].each do |asset| asset.associations[:attachable] = nil ((id_map[asset.attachable_type] ||= {})[asset.attachable_id] ||= []) << asset end id_map.each do |klass_name, id_map| klass = klass_name.constantize klass.where(klass.primary_key=>id_map.keys).all do |attach| id_map[attach.pk].each do |asset| asset.associations[:attachable] = attach end end end end) end class Post < Sequel::Model one_to_many :assets, key: :attachable_id, reciprocal: :attachable, conditions: {attachable_type: 'Post'}, adder: lambda{|asset| asset.update(attachable_id: pk, attachable_type: 'Post')}, remover: lambda{|asset| asset.update(attachable_id: nil, attachable_type: nil)}, clearer: lambda{assets_dataset.update(attachable_id: nil, attachable_type: nil)} end class Note < Sequel::Model one_to_many :assets, key: :attachable_id, reciprocal: :attachable, conditions: {attachable_type: 'Note'}, adder: lambda{|asset| asset.update(attachable_id: pk, attachable_type: 'Note')}, remover: lambda{|asset| asset.update(attachable_id: nil, attachable_type: nil)}, clearer: lambda{assets_dataset.update(attachable_id: nil, attachable_type: nil)} end @asset.attachable = @post @asset.attachable = @note === Joining on multiple keys Let's say you have two tables that are associated with each other with multiple keys. This can be handled using Sequel's built in composite key support for associations: # Both of these models have an album_id, number, and disc_number fields. # All FavoriteTracks have an associated track, but not all tracks have an # associated favorite track class Track < Sequel::Model many_to_one :favorite_track, key: [:disc_number, :number, :album_id], primary_key: [:disc_number, :number, :album_id] end class FavoriteTrack < Sequel::Model one_to_one :tracks, key: [:disc_number, :number, :album_id], primary_key: [:disc_number, :number, :album_id] end === Tree - All Ancestors and Descendants Let's say you want to store a tree relationship in your database, it's pretty simple: class Node < Sequel::Model many_to_one :parent, class: self one_to_many :children, key: :parent_id, class: self end You can easily get a node's parent with node.parent, and a node's children with node.children. You can even eager load the relationship up to a certain depth: # Eager load three generations of generations of children for a given node Node.where(id: 1).eager(children: {children: :children}).all.first # Load parents and grandparents for a group of nodes Node.where{id < 10}.eager(parent: :parent).all What if you want to get all ancestors up to the root node, or all descendants, without knowing the depth of the tree? class Node < Sequel::Model many_to_one :ancestors, class: self, eager_loader: (lambda do |eo| # Handle cases where the root node has the same parent_id as primary_key # and also when it is NULL non_root_nodes = eo[:rows].reject do |n| if [nil, n.pk].include?(n.parent_id) # Make sure root nodes have their parent association set to nil n.associations[:parent] = nil true else false end end unless non_root_nodes.empty? id_map = {} # Create an map of parent_ids to nodes that have that parent id non_root_nodes.each{|n| (id_map[n.parent_id] ||= []) << n} # Doesn't cause an infinite loop, because when only the root node # is left, this is not called. Node.where(id: id_map.keys).eager(:ancestors).all do |node| # Populate the parent association for each node id_map[node.pk].each{|n| n.associations[:parent] = node} end end end) many_to_one :descendants, eager_loader: (lambda do |eo| id_map = {} eo[:rows].each do |n| # Initialize an empty array of child associations for each parent node n.associations[:children] = [] # Populate identity map of nodes id_map[n.pk] = n end # Doesn't cause an infinite loop, because the :eager_loader is not called # if no records are returned. Exclude id = parent_id to avoid infinite loop # if the root note is one of the returned records and it has parent_id = id # instead of parent_id = NULL. Node.where(parent_id: id_map.keys).exclude(id: :parent_id).eager(:descendants).all do |node| # Get the parent from the identity map parent = id_map[node.parent_id] # Set the child's parent association to the parent node.associations[:parent] = parent # Add the child association to the array of children in the parent parent.associations[:children] << node end end) end Note that Sequel ships with an rcte_tree plugin that does all of the above and more: class Node < Sequel::Model plugin :rcte_tree end === Joining multiple keys to a single key, through a third table Let's say you have a database of songs, lyrics, and artists. Each song may or may not have a lyric (most songs are instrumental). The lyric can be associated to an artist in each of four ways: composer, arranger, vocalist, or lyricist. These may all be the same, or they could all be different, and none of them are required. The songs table has a lyric_id field to associate it to the lyric, and the lyric table has four fields to associate it to the artist (composer_id, arranger_id, vocalist_id, and lyricist_id). What you want to do is get all songs for a given artist, ordered by the song's name, with no duplicates? class Artist < Sequel::Model one_to_many :songs, order: Sequel[:songs][:name], dataset: proc{Song.select_all(:songs).join(:lyrics, id: :lyric_id, id=>[:composer_id, :arranger_id, :vocalist_id, :lyricist_id])}, eager_loader: (lambda do |eo| h = eo[:id_map] ids = h.keys eo[:rows].each{|r| r.associations[:songs] = []} Song.select_all(:songs). select_append{[lyrics[:composer_id], lyrics[:arranger_id], lyrics[:vocalist_id], lyrics[:lyricist_id]]}. join(:lyrics, id: :lyric_id){Sequel.or(composer_id: ids, arranger_id: ids, vocalist_id: ids, lyricist_id: ids)}. order{songs[:name]}.all do |song| [:composer_id, :arranger_id, :vocalist_id, :lyricist_id].each do |x| recs = h[song.values.delete(x)] recs.each{|r| r.associations[:songs] << song} if recs end end eo[:rows].each{|r| r.associations[:songs].uniq!} end) end === Statistics Associations (Sum of Associated Table Column) In addition to getting associated records, you can use Sequel's association support to get aggregate information for columns in associated tables (sums, averages, etc.). Let's say you have a database with projects and tickets. A project can have many tickets, and each ticket has a number of hours associated with it. You can use the association support to create a Project association that gives the sum of hours for all associated tickets. class Project < Sequel::Model one_to_many :tickets many_to_one :ticket_hours, read_only: true, key: :id, dataset: proc{Ticket.where(project_id: id).select{sum(hours).as(hours)}}, eager_loader: (lambda do |eo| eo[:rows].each{|p| p.associations[:ticket_hours] = nil} Ticket.where(project_id: eo[:id_map].keys). select_group(:project_id). select_append{sum(hours).as(hours)}. all do |t| p = eo[:id_map][t.values.delete(:project_id)].first p.associations[:ticket_hours] = t end end) # The association method returns a Ticket object with a single aggregate # sum-of-hours value, but you want it to return an Integer/Float of just the # sum of hours, so you call super and return just the sum-of-hours value. # This works for both lazy loading and eager loading. def ticket_hours if s = super s[:hours] end end end class Ticket < Sequel::Model many_to_one :project end Note that it is often better to use a sum cache instead of this approach. You can implement a sum cache using +after_create+, +after_update+, and +after_delete+ hooks, or preferably using a database trigger. sequel-5.63.0/doc/association_basics.rdoc000066400000000000000000002047361434214120600203720ustar00rootroot00000000000000= Association Basics This guide is based on http://guides.rubyonrails.org/association_basics.html == Why Associations? Associations exist to simplify code that deals with related rows in separate database tables. Without associations, if you had classes such as: class Artist < Sequel::Model end class Album < Sequel::Model end And you wanted to get all of the albums for a given artist (assuming each album was associated with only one artist): Album.where(artist_id: @artist.id).all Or maybe you want to add an album for a given artist: Album.create(artist_id: @artist.id, name: 'RF') With associations, you can make the above code simpler, by setting up associations between the two models: class Artist < Sequel::Model one_to_many :albums end class Album < Sequel::Model many_to_one :artist end Then, the code to retrieve albums related to the artist is simpler: @artist.albums As is the code to add a related album to an artist: @artist.add_album(name: 'RF') It also makes it easier to create queries that use joins based on the association: Artist.association_join(:albums) # SELECT * FROM artists # INNER JOIN albums ON (albums.artist_id = artists.id) == The Types of Associations Sequel has five different association types built in: * many_to_one * one_to_many * one_to_one * many_to_many * one_through_one It ships with additional association types via plugins. === many_to_one The many_to_one association is used when the table for the current class contains a foreign key that references the primary key in the table for the associated class. It is named 'many_to_one' because there can be many rows in the current table for each row in the associated table. # Database schema: # albums artists # :id /--> :id # :artist_id --/ :name # :name class Album # Uses singular form of associated model name many_to_one :artist end === one_to_many and one_to_one The one_to_many association is used when the table for the associated class contains a foreign key that references the primary key in the table for the current class. It is named 'one_to_many' because for each row in the current table there can be many rows in the associated table: The one_to_one association can be thought of as a subset of the one_to_many association, but where there can only be either 0 or 1 records in the associated table. This is useful if there is a unique constraint on the foreign key field in the associated table. It's also useful if you want to impose an order on the association and just want the first record returned. # Database schema: # artists albums # :id <----\ :id # :name \----- :artist_id # :name class Artist # Uses plural form of associated model name one_to_many :albums # Uses singular form of associated model name one_to_one :album end === many_to_many and one_through_one The many_to_many association allows each row in the current table to be associated to many rows in the associated table, and each row in the associated table to many rows in the current table, by using a join table to associate the two tables. The one_through_one association can be thought of as a subset of the many_to_many association, but where there can only be 0 or 1 records in the associated table. This is useful if there is a unique constraint on the foreign key in the join table that references the current table. It's also useful if you want to impose an order on the association and just want the first record returned. The one_through_one association is so named because it sets up a one-to-one association through a single join table. # Database schema: # albums # :id <----\ # :name \ albums_artists # \---- :album_id # artists /---- :artist_id # :id <-----/ # :name class Artist # Uses plural form of associated model name many_to_many :albums # Uses singular form of associated model name one_through_one :album end === Differences Between many_to_one and one_to_one If you want to setup a 1-1 relationship between two models, where the foreign key in one table references the associated table directly, you have to use many_to_one in one model, and one_to_one in the other model. How do you know which to use in which model? The simplest way to remember is that the model whose table has the foreign key uses many_to_one, and the other model uses one_to_one: # Database schema: # artists albums # :id <----\ :id # :name \----- :artist_id # :name class Artist one_to_one :album end class Album many_to_one :artist end == Most Common Options === :key The :key option must be used if the default column symbol that Sequel would use is not the correct column. For example: class Album # Assumes :key is :artist_id, based on association name of :artist many_to_one :artist end class Artist # Assumes :key is :artist_id, based on class name of Artist one_to_many :albums end However, if your schema looks like: # Database schema: # artists albums # :id <----\ :id # :name \----- :artistid # Note missing underscore # :name Then the default :key option will not be correct. To fix this, you need to specify an explicit :key option: class Album many_to_one :artist, key: :artistid end class Artist one_to_many :albums, key: :artistid end For many_to_many associations, the :left_key and :right_key options can be used to specify the column names in the join table, and the :join_table option can be used to specify the name of the join table: # Database schema: # albums # :id <----\ # :name \ albumsartists # \---- :albumid # artists /---- :artistid # :id <-----/ # :name class Artist # Note that :left_key refers to the foreign key pointing to the # current table, and :right_key the foreign key pointing to the # associated table. many_to_many :albums, left_key: :artistid, right_key: :albumid, join_table: :albumsartists end class Album many_to_many :artists, left_key: :albumid, right_key: :artistid, join_table: :albumsartists end === :class If the class of the association cannot be guessed directly by looking at the association name, you need to specify it via the :class option. For example, if you have two separate foreign keys in the albums table that both point to the artists table, maybe to indicate one artist is the vocalist and one is the composer, you'd have to use the :class option: # Database schema: # artists albums # :id <----\ :id # :name \----- :vocalist_id # \---- :composer_id # :name class Album many_to_one :vocalist, class: :Artist many_to_one :composer, class: :Artist end class Artist one_to_many :vocalist_albums, class: :Album, key: :vocalist_id one_to_many :composer_albums, class: :Album, key: :composer_id end == Self-referential Associations Self-referential associations are easy to handle in Sequel. The simplest example is a tree structure: # Database schema: # nodes # :id <--\ # :parent_id ---/ # :name class Node many_to_one :parent, class: self one_to_many :children, key: :parent_id, class: self end For many_to_many self_referential associations, it's fairly similar. Here's an example of a directed graph: # Database schema: # nodes edges # :id <----------- :successor_id # :name \----- :predecessor_id class Node many_to_many :direct_predecessors, left_key: :successor_id, right_key: :predecessor_id, join_table: :edges, class: self many_to_many :direct_successors, right_key: :successor_id, left_key: :predecessor_id, join_table: :edges, class: self end == Methods Added When you create an association, it's going to add instance methods to the class related to the association. All associations are going to have an instance method added with the same name as the association: @artist.albums @album.artists many_to_one and one_to_one associations will also have a setter method added to change the associated object: @album.artist = Artist.create(name: 'YJM') many_to_many and one_to_many associations will have three methods added: add_* :: to associate an object to the current object remove_* :: to disassociate an object from the current object remove_all_* :: to dissociate all currently associated objects Examples: @artist.add_album(@album) @artist.remove_album(@album) @artist.remove_all_albums Note that the remove_all_* method does not call remove hooks defined on the association, it just issues a single query to the database. If you want to remove all associated objects and call remove hooks, iterate over the array of associated objects and call remove_* for each: @artist.albums.each do |album| @artist.remove_album(album) end == Caching Associations are cached after being retrieved: @artist.album # Not cached - Database Query @artist.album # Cached - No Database Query @album.artists # Not cached - Database Query @album.artists # Cached - No Database Query You can choose to ignore the cached versions and do a database query to retrieve results by passing a reload: true option to the association method: @album.artists # Not cached - Database Query @album.artists # Cached - No Database Query @album.artists(reload: true) # Ignore cache - Database Query If you reload/refresh the object, it will automatically clear the associations cache for the object: @album.artists # Not cached - Database Query @album.artists # Cached - No Database Query @album.reload @album.artists # Not Cached - Database Query If you want direct access to the associations cache, use the associations instance method: @album.associations # {} @album.associations[:artists] # nil @album.artists # [, ...] @album.associations[:artists] # [, ...] === Code Reloading When declaring associations, Sequel caches association metadata in the association reflection. If you're doing any code reloading that doesn't involve restarting the related process, you should disable caching of the association reflection, to avoid stale model classes still being referenced after reloading: Sequel::Model.cache_associations = false == Dataset Method In addition to the above methods, associations also add an instance method ending in +_dataset+ that returns a dataset representing the objects in the associated table: @album.artist_id # 10 @album.artist_dataset # SELECT * FROM artists WHERE (id = 10) LIMIT 1 @artist.id # 20 @artist.albums_dataset # SELECT * FROM albums WHERE (artist_id = 20) The association dataset is just like any other Sequel dataset, in that it can be further filtered, ordered, etc.: @artist.albums_dataset. where(Sequel.like(:name, 'A%')). order(:copies_sold). limit(10) # SELECT * FROM albums # WHERE ((artist_id = 20) AND (name LIKE 'A%' ESCAPE '\')) # ORDER BY copies_sold LIMIT 10 Records retrieved using the +_dataset+ method are not cached in the associations cache. @album.artists_dataset.all # [, ...] @album.associations[:artists] # nil == Dynamic Association Modification Similar to the +_dataset+ method, you can provide a block to the association method to customize the dataset that will be used to retrieve the records. So you can apply a filter in either of these two ways: @artist.albums_dataset.where(Sequel.like(:name, 'A%')) @artist.albums{|ds| ds.where(Sequel.like(:name, 'A%'))} While they both apply the same filter, using the +_dataset+ method does not apply any of the association callbacks or handle association reciprocals (see below for details about callbacks and reciprocals). Using a block instead handles all those things, and also caches its results in the associations cache (ignoring any previously cached value). == Filtering By Associations In addition to using the association method to get associated objects, you can also use associated objects in filters. For example, to get all albums for a given artist, you would usually do: @artist.albums # or @artist.albums_dataset for a dataset You can also do the following: Album.where(artist: @artist).all # or leave off the .all for a dataset For filtering by a single association, this isn't very useful. However, unlike using the association method, using a filter allows you to filter by multiple associations: Album.where(artist: @artist, publisher: @publisher) This will return all albums by that artist and published by that publisher. This isn't possible using just the association method approach, though you can combine the approaches: @artist.albums_dataset.where(publisher: @publisher) This doesn't just work for +many_to_one+ associations, it also works for the other associations: Album.one_to_one :album_info # The album related to that AlbumInfo instance Album.where(album_info: AlbumInfo[2]) Album.one_to_many :tracks # The album related to that Track instance Album.where(tracks: Track[3]) Album.many_to_many :tags # All albums related to that Tag instance Album.where(tags: Tag[4]) Album.one_through_one :tag # All albums related to that Tag instance Album.where(tag: Tag[4]) Note that for +one_to_many+ and +many_to_many+ associations, you still use the plural form even though only a single model object is given. You can also exclude by associations: Album.exclude(artist: @artist).all This will return all albums not by that artist. You can also provide an array with multiple model objects: Album.where(artist: [@artist1, @artist2]).all Similar to using an array of integers or strings, this will return all albums whose artist is one of those two artists. You can also use +exclude+ if you want all albums not by either of those artists: Album.exclude(artist: [@artist1, @artist2]).all If you are using a +one_to_many+ or +many_to_many+ association, you may want to return records where the records matches all of multiple records, instead of matching any of them. For example: Album.where(tags: [@tag1, @tag2]) This matches albums that are associated with either @tag1 or @tag2 or both. If you only want ones that you are associated with both, you can use separate filter calls: Album.where(tags: @tag1).where(tags: @tag2) Or the array form of condition specifiers: Album.where([[:tags, @tag1], [:tags, @tag2]]) These will return albums associated with both @tag1 and @tag2. You can also provide a dataset value when filtering by associations: Album.where(artist: Artist.where(Sequel.like(:name, 'A%'))).all This will return all albums whose artist starts with 'A'. Like the other forms, this can be inverted: Album.exclude(artist: Artist.where(Sequel.like(:name, 'A%'))).all This will return all albums whose artist does not start with 'A'. Filtering by associations even works for associations that have conditions added via the :conditions option or a block: Album.one_to_many :popular_tags, clone: :tags do |ds| ds.where{times_used > 1000} end Album.where(popular_tags: [@tag1, @tag2]) This will return all albums that whose popular tags would include at least one of those tags. Note that filtering by associations does not work for associations that use blocks with instance-specific code. == Name Collisions Because associations create instance methods, it's possible to override existing instance methods if you name an association the same as an existing method. For example, values and associations would be bad association names. == Database Schema Creating an association doesn't modify the database schema. Sequel assumes your associations reflect the existing database schema. If not, you should modify your schema before creating the associations. === many_to_one/one_to_many For example, for the following model code: class Album many_to_one :artist end class Artist one_to_many :albums end You probably want the following database schema: # albums artists # :id /--> :id # :artist_id --/ :name # :name Which could be created using the following Sequel code: DB.create_table(:artists) do # Primary key must be set explicitly primary_key :id String :name, null: false, unique: true end DB.create_table(:albums) do primary_key :id # Table that foreign key references needs to be set explicitly # for a database foreign key reference to be created. foreign_key :artist_id, :artists, null: false String :name, null: false, unique: true end If you already had a schema such as: # Database schema: # albums artists # :id :id # :name :name Then you just need to add the column: DB.alter_table(:albums) do add_foreign_key :artist_id, :artists, null: false end === many_to_many With many_to_many associations, the default join table for the association uses the sorted underscored names of both model classes. For example, with the following model code: class Album many_to_many :artists end class Artist many_to_many :albums end The default join table name would be albums_artists, not artists_albums, because: ["artists", "albums"].sort.join('_') # "albums_artists" Assume you already had the albums and artists tables created, and you just wanted to add an albums_artists join table to create the following schema: # Database schema: # albums # :id <----\ # :name \ albums_artists # \---- :album_id # artists /---- :artist_id # :id <-----/ # :name You could use the following Sequel code: DB.create_join_table(album_id: :albums, artist_id: :artists) # or DB.create_table(:albums_artists) do foreign_key :album_id, :albums, null: false foreign_key :artist_id, :artists, null: false primary_key [:album_id, :artist_id] index [:artist_id, :album_id] end == Association Scope If you nest your Sequel::Model classes inside modules, then you should know that Sequel will only look in the same module for associations by default. So the following code will work fine: module App class Artist < Sequel::Model one_to_many :albums end class Album < Sequel::Model many_to_one :artist end end However, if you enclose your model classes inside two different modules, things will not work by default: module App1 class Artist < Sequel::Model one_to_many :albums end end module App2 class Album < Sequel::Model many_to_one :artist end end To fix this, you need to specify the full model class name using the :class option: module App1 class Artist < Sequel::Model one_to_many :albums, class: "App2::Album" end end module App2 class Album < Sequel::Model many_to_one :artist, class: "App1::Artist" end end If both classes are in the same module, but the default class name used is not correct, you need to specify the full class name with the :class option: module App1 class AlbumArtist < Sequel::Model one_to_many :albums end class Album < Sequel::Model many_to_one :artist, class: "App1::AlbumArtist" end end == Method Details In all of these methods, _association_ is replaced by the symbol you pass to the association. === _association_(opts={}) (e.g. albums) For +many_to_one+ and +one_to_one+ associations, the _association_ method returns either the single object associated, or nil if no object is associated. @artist = @album.artist For +one_to_many+ and +many_to_many+ associations, the _association_ method returns an array of associated objects, which may be empty if no objects are currently associated. @albums = @artist.albums === _association_=(object_to_associate) (e.g. artist=) [+many_to_one+ and +one_to_one+] The _association_= method sets up an association of the passed object to the current object. For +many_to_one+ associations, this sets the foreign key for the current object to point to the associated object's primary key. @album.artist = @artist For +one_to_one+ associations, this sets the foreign key of the associated object to the primary key value of the current object. For +many_to_one+ associations, this does not save the current object. For +one_to_one+ associations, this does save the associated object. === add_association(object_to_associate) (e.g. add_album) [+one_to_many+ and +many_to_many+] The add_association method associates the passed object to the current object. For +one_to_many+ associations, it sets the foreign key of the associated object to the primary key value of the current object, and saves the associated object. For +many_to_many+ associations, this inserts a row into the join table with the foreign keys set to the primary key values of the current and associated objects. Note that the singular form of the association name is used in this method. @artist.add_album(@album) In addition to passing an actual associated object, you can pass a hash, and a new associated object will be created from them: @artist.add_album(name: 'RF') # creates Album object The add_association method returns the new associated object: @album = @artist.add_album(name: 'RF') Note that the add_* methods for +one_to_many+ persist the changes by saving the passed in (or newly created) object. However, to avoid silent failures of these methods, they explicitly raise exceptions even when raise_on_save_failure is false for the associated model. You can disable this behavior (i.e. return nil instead of raising exceptions on a save failure) by setting the raise_on_save_failure: false option for the association. === remove_association(object_to_disassociate) (e.g. remove_album) [+one_to_many+ and +many_to_many+] The remove_association method disassociates the passed object from the current object. For +one_to_many+ associations, it sets the foreign key of the associated object to NULL, and saves the associated object. For +many_to_many+ associations, this deletes the matching row in the join table. Similar to the add_association method, the singular form of the association name is used in this method. @artist.remove_album(@album) Note that this does not delete @album from the database, it only disassociates it from the @artist. To delete @album from the database: @album.destroy The add_association and remove_association methods should be thought of as adding and removing from the association, not from the database. In addition to passing the object directly to remove_association, you can also pass the associated object's primary key: @artist.remove_album(10) This will look up the associated object using the key, and remove that album. The remove_association method returns the now disassociated object: @album = @artist.remove_album(10) === remove_all_association (e.g. remove_all_albums) [+one_to_many+ and +many_to_many+] The remove_all_association method disassociates all currently associated objects. For +one_to_many+ associations, it sets the foreign key of all associated objects to NULL in a single query. For +many_to_many+ associations, this deletes all matching rows in the join table. Unlike the add_association and remove_association method, the plural form of the association name is used in this method. The remove_all_association method returns the previously cached associated records, or nil if there were no cached associated records. === association_dataset (e.g. albums_dataset) The association_dataset method returns a dataset that represents all associated objects. This dataset is like any other Sequel dataset, in that it can be filtered, ordered, etc.: ds = @artist.albums_dataset.where(Sequel.like(:name, 'A%')).order(:copies_sold) Unlike most other Sequel datasets, association datasets have a couple of added methods: ds.model_object # @artist ds.association_reflection # same as Artist.association_reflection(:albums) For a more info on Sequel's reflection capabilities see the {Reflection page}[rdoc-ref:doc/reflection.rdoc]. == Overriding Method Behavior Sequel is designed to be very flexible. If the default behavior of the association modification methods isn't what you desire, you can override the methods in your classes. However, you should be aware that for each of the association modification methods described, there is a private method that is preceded by an underscore that does the actual modification. The public method without the underscore handles caching and callbacks, and shouldn't be overridden by the user. In addition to overriding the private method in your class, you can also use association options to change which method Sequel defines. The only difference between the two is that if you use an association option to change the method Sequel defines, you cannot call super to get the default behavior. === :setter (_association= method) Let's say you want to set a specific field whenever associating an object using the association setter method. For example, let's say you have a file_under column for each album to tell you where to file it. If the album is associated with an artist, it should be filed under the artist's name and the album's name, otherwise it should just use the album's name. class Album < Sequel::Model many_to_one :artist, setter: (lambda do |artist| if artist self.artist_id = artist.id self.file_under = "#{artist.name}-#{name}" else self.artist_id = nil self.file_under = name end end) end The above example is contrived, as you would generally use a before_save model hook to handle such a modification. However, if you only modify the album's artist using the artist= method, this approach may perform better. === :adder (\_add_association method) Continuing with the same example, here's how you would handle the same case if you also wanted to handle the Artist#add_album method: class Artist < Sequel::Model one_to_many :albums, adder: (lambda do |album| album.update(artist_id: id, file_under: "#{name}-#{album.name}") end) end You can set this to +nil+ to not create a add_association method. === :remover (\_remove_association method) Continuing with the same example, here's how you would handle the same case if you also wanted to handle the Artist#remove_album method: class Artist < Sequel::Model one_to_many :albums, remover: (lambda do |album| album.update(artist_id: nil, file_under: album.name) end) end You can set this to +nil+ to not create a remove_association method. === :clearer (\_remove_all_association method) Continuing with the same example, here's how you would handle the same case if you also wanted to handle the Artist#remove_all_albums method: class Artist < Sequel::Model one_to_many :albums, clearer: (lambda do # This is Dataset#update, not Model#update, so the file_under: :name # ends up being "SET file_under = name" in SQL. albums_dataset.update(artist_id: nil, file_under: :name) end) end You can set this to +nil+ to not create a remove_all_association method. === :no_dataset_method Setting this to true will not result in the association_dataset method not being defined. This can save memory if you only use the association method and do not call the association_dataset method directly or indirectly. === :no_association_method Setting this to true will not result in the association method not being defined. This can save memory if you only use the association_dataset method and do not call the association method directly or indirectly. == Association Options Sequel's associations mostly share the same options. For ease of understanding, they are grouped here by section. The defaults for any of these options can be set at the class level using Sequel::Model.default_association_options. To make associations read only by default: Sequel::Model.default_association_options[:read_only] = true Many of these options are specific to particular association types, and the defaults can be set on a per association type basis. To make one_to_many associations read only by default: Sequel::Model.default_association_type_options[:one_to_many] = {read_only: true} === Association Dataset Modification Options ==== block All association defining methods take a block that is passed the default dataset and should return a modified copy of the dataset to use for the association. For example, if you wanted an association that returns all albums of an artist that went gold (sold at least 500,000 copies): Artist.one_to_many :gold_albums, class: :Album do |ds| ds.where{copies_sold > 500000} end The result of the block is cached as an optimization. One of the side effects of that is that if your block depends on external state, it won't work correctly unless you setup a delayed evaluation. For example: Artist.one_to_many :gold_albums, class: :Album do |ds| ds.where{copies_sold > $gold_limit} end In this case if you change $gold_limit later, the changes won't effect the association. If you want to pick up changes to $gold_limit, you need to setup a delayed evaluation: Artist.one_to_many :gold_albums, class: :Album do |ds| ds.where{copies_sold > Sequel.delay{$gold_limit}} end ==== :class This is the class of the associated objects that will be used. It's one of the most commonly used options. If it is not given, it guesses based on the name of the association, including considering the namespace of the current model. If a *_to_many association is used, this uses the singular form of the association name. For example: Album.many_to_one :artist # guesses Artist Artist.one_to_many :albums # guesses Album Foo::Artist.one_to_many :albums # guesses Foo::Album However, for more complex associations, especially ones that add additional filters beyond the foreign/primary key relationships, the default class guessed will be wrong: # guesses GoldAlbum Artist.one_to_many :gold_albums do |ds| ds.where{copies_sold > 500000} end You can specify the :class option using the class itself, a Symbol, or a String: Album.many_to_one :artist, class: Artist # Class Album.many_to_one :artist, class: :Artist # Symbol Album.many_to_one :artist, class: "Artist" # String If you are namespacing your models, and you need to specify the :class option, the path you give to the :class option should be the full path to the associated class including any namespaces: Foo::Album.many_to_one :artist # Uses Foo::Artist Foo::Album.many_to_one :artist, class: "Artist" # Uses Artist Foo::Album.many_to_one :artist, class: "Foo::Artist" # Uses Foo::Artist ==== :key For +many_to_one+ associations, this is the foreign_key in the current model's table that references the associated model's primary key as a symbol. Defaults to :association_id. Album.many_to_one :artist, key: :artistid For +one_to_one+ and +one_to_many+ associations, is the foreign key in associated model's table that references current model's primary key, as a symbol. Defaults to :"#{self.name.underscore}_id". Artist.one_to_many :albums, key: :artistid In both cases an array of symbols can be used for a composite key association: Apartment.many_to_one :building, key: [:city, :address] ==== :conditions The conditions to use to filter the association, can be any argument passed to +where+. If you use a hash or an array of two element arrays, this will also be used as a filter when using eager_graph or association_join to load the association. If you do not use a hash or array of two element arrays, you should use the :graph_conditions, :graph_only_conditions, or :graph_block option or you will not be able to use eager_graph or association_join with the association. Artist.one_to_many :good_albums, class: :Album, conditions: {good: true} @artist.good_albums # SELECT * FROM albums WHERE ((artist_id = 1) AND (good IS TRUE)) ==== :order The column(s) by which to order the association dataset. Can be a singular column or an array. Artist.one_to_many :albums_by_name, class: :Album, order: :name Artist.one_to_many :albums_by_num_tracks, class: :Album, order: [:num_tracks, :name] ==== :select The columns to SELECT when loading the association. For most associations, it defaults to nil, so * is used. For +many_to_many+ associations, it defaults to the associated class's table_name.*, which means it doesn't include the columns from the join table. This is to prevent the common issue where the join table includes columns with the same name as columns in the associated table, in which case the joined table's columns would usually end up clobbering the values in the associated table. If you want to include the join table attributes, you can use this option, but beware that the join table columns can clash with columns from the associated table, so you should alias any columns that have the same name in both the join table and the associated table. Example: Artist.one_to_many :albums, select: [:id, :name] Album.many_to_many :tags, select: [Sequel[:tags].*, Sequel[:albums_tags][:number]] ==== :limit Limit the number of records to the provided value: Artist.one_to_many :best_selling_albums, class: :Album, order: :copies_sold, limit: 5 Use an array with two arguments for the value to specify a limit and an offset. Artist.one_to_many :next_best_selling_albums, class: :Album, order: :copies_sold, limit: [10, 5] # LIMIT 10 OFFSET 5 This probably doesn't make a lot of sense for *_to_one associations, though you could use it to specify an offset. ==== :join_table [+many_to_many+, +one_through_one+] Name of table that includes the foreign keys to both the current model and the associated model, as a symbol. Defaults to the name of current model and name of associated model, pluralized, underscored, sorted, and joined with '_'. Here's an example of the defaults: Artist.many_to_many :albums, join_table: :albums_artists Album.many_to_many :artists, join_table: :albums_artists Person.many_to_many :colleges, join_table: :colleges_people ==== :left_key [+many_to_many+, +one_through_one+] Foreign key in join table that points to current model's primary key, as a symbol. Defaults to :"#{model_name.underscore}_id". Album.many_to_many :tags, left_key: :album_id Can use an array of symbols for a composite key association. ==== :right_key [+many_to_many+, +one_through_one+] Foreign key in join table that points to associated model's primary key, as a symbol. Defaults to :"#{association_name.singularize}_id" for +many_to_many+ and :"#{association_name}_id" for +one_through_one+. Album.many_to_many :tags, right_key: :tag_id Can use an array of symbols for a composite key association. ==== :distinct Use the DISTINCT clause when selecting associating object, both when lazy loading and eager loading via eager (but not when using eager_graph). This is most useful for many_to_many associations that use join tables that contain more than just the foreign keys, where you are storing additional information. For example, if you have a database of people, degree types, and colleges, and you want to return all people from a given college, you may want to use :distinct so that if a person has two separate degrees from the same college, they won't show up twice. ==== :clone The :clone option clones an existing association, taking the options you specified for that association, and making a copy of them for this association. Other options provided by this association are then merged into the cloned options. This is commonly used if you have a bunch of similar associations that you want to DRY up: one_to_many :english_verses, class: :LyricVerse, key: :lyricsongid, order: :number, conditions: {languageid: 1} one_to_many :romaji_verses, clone: :english_verses, conditions: {languageid: 2} one_to_many :japanese_verses, clone: :english_verses, conditions: {languageid: 3} Note that for the final two asociations, you didn't have to specify the :class, :key, or :order options, as they were copied by the :clone option. By specifying the :conditions option for the final two associations, it overrides the :conditions option of the first association, it doesn't attempt to merge them. In addition to the options hash, the :clone option will copy a block argument from the existing situation. If you want a cloned association to not have the same block as the association you are cloning from, specify the block: nil option in addition to the :clone option. ==== :dataset This is generally only specified for custom associations that aren't based on primary/foreign key relationships. It should be a proc that is instance_execed to get the base dataset to use before the other options are applied. If the proc accepts an argument, it is passed the related association reflection. For best performance, it's recommended that custom associations call the +associated_dataset+ method on the association reflection as the starting point for the dataset to return. The +associated_dataset+ method will return a dataset based on the associated class with most of the association options already applied, and the proc should return a modified copy of this dataset. Here's an example of an association of songs to artists through lyrics, where the artist can perform any one of four tasks for the lyric: Artist.one_to_many :songs, dataset: (lambda do |r| r.associated_dataset.select_all(:songs). join(:lyrics, id: :lyricid, id=>[:composer_id, :arranger_id, :vocalist_id, :lyricist_id]) end) Artist.first.songs_dataset # SELECT songs.* FROM songs # INNER JOIN lyrics ON ((lyrics.id = songs.lyric_id) # AND (1 IN (composer_id, arranger_id, vocalist_id, lyricist_id)) ==== :extend A module or array of modules to extend the dataset with. These are used to set up association extensions. For more information , please see the {Advanced Associations page}[rdoc-ref:doc/advanced_associations.rdoc]. ==== :primary_key [+many_to_one+, +one_to_one+, +one_to_many+] The column that the :key option references, as a symbol. For +many_to_one+ associations, this column is in the associated table. For +one_to_one+ and +one_to_many+ associations, this column is in the current table. In both cases, it defaults to the primary key of the table. Can use an array of symbols for a composite key association. Artist.set_primary_key :arid Artist.one_to_many :albums, primary_key: :arid Album.one_to_many :artist, primary_key: :arid ==== :left_primary_key [+many_to_many+, +one_through_one+] Column in current table that :left_key option points to, as a symbol. Defaults to primary key of current table. Album.set_primary_key :alid Album.many_to_many :tags, left_primary_key: :alid Can use an array of symbols for a composite key association. ==== :right_primary_key [+many_to_many+, +one_through_one+] Column in associated table that :right_key points to, as a symbol. Defaults to primary key of the associated table. Tag.set_primary_key :tid Album.many_to_many :tags, right_primary_key: :tid Can use an array of symbols for a composite key association. ==== :join_table_block [+many_to_many+, +one_through_one+] A proc that can be used to modify the dataset used in the add/remove/remove_all methods. It's separate from the association block, as that is called on a join of the join table and the associated table, whereas this option just applies to the join table. It can be used to make sure that filters are used when deleting. Artist.many_to_many :lead_guitar_albums, class: :Album, join_table_block: (lambda do |ds| ds.where(instrument_id: 5) end) ==== :join_table_db [+many_to_many+, +one_through_one+] A Sequel::Database to use for the join table. Specifying this option switches the loading to use a separate query for the join table. This is useful if the join table is not located in the same database as the associated table, or if the database account with access to the associated table doesn't have access to the join table. For example, if the Album class uses a different Sequel::Database than the Artist class, and the join table is in the database that the Artist class uses: Artist.many_to_many :lead_guitar_albums, class: :Album, join_table_db: Artist.db This option also affects the add/remove/remove_all methods, by changing which database is used for inserts/deletes from the join table (add/remove/remove_all defaults to use the current model's database instead of the associated model's database). === Callback Options All callbacks can be specified as a Symbol, Proc, or array of both/either specifying a callback to call. Symbols are interpreted as instance methods that are called with the associated object. Procs are called with the receiver as the first argument and the associated object as the second argument. If an array is given, all of them are called in order. Before callbacks are often used to check preconditions, they can call Model#cancel_action to signal Sequel to abort the modification. If any before callback calls cancel_action, the remaining before callbacks are not called and the modification is aborted. ==== :before_add [+one_to_many+, +many_to_many+] Called before adding an object to the association: class Artist # Don't allow adding an album to an artist if it has no tracks one_to_many :albums, before_add: lambda{|ar, al| ar.cancel_action if al.num_tracks == 0} end ==== :after_add [+one_to_many+, +many_to_many+] Called after adding an object to the association: class Artist # Log all associations of albums to an audit logging table one_to_many :albums, after_add: :log_add_album private def log_add_album(album) DB[:audit_logs].insert(log: "Album #{album.inspect} associated to #{inspect}") end end ==== :before_remove [+one_to_many+, +many_to_many+] Called before removing an object from the association using remove_association: class Artist # Don't allow removing a self-titled album one_to_many :albums, before_remove: lambda{|ar, al| ar.cancel_action if al.name == ar.name} end This is not called when using remove_all_association. ==== :after_remove [+one_to_many+, +many_to_many+] Called after removing an object from the association using remove_association: class Artist # Log all disassociations of albums to an audit logging table one_to_many :albums, after_remove: :log_remove_album private def log_remove_album(album) DB[:audit_logs].insert(log: "Album #{album.inspect} disassociated from #{inspect}") end end This is not called when using remove_all_association. ==== :before_set [+many_to_one+, +one_to_one+] Called before the _association= method is called to modify the objects: class Album # Don't associate the album with an artist if the year the album was # released is less than the year the artist/band started. many_to_one :artist, before_set: lambda{|al, ar| al.cancel_action if al.year < ar.year_started} end ==== :after_set [+many_to_one+, +one_to_one+] Called after the _association= method is called to modify the objects: class Album # Log all disassociations of albums to an audit logging table many_to_one :artist, after_set: :log_artist_set private def log_artist_set(artist) DB[:audit_logs].insert(log: "Artist for album #{inspect} set to #{artist.inspect}") end end ==== :after_load Called after retrieving the associated records from the database. class Artist # Cache all album names to a single string when retrieving the albums. one_to_many :albums, after_load: :cache_album_names attr_reader :album_names private def cache_album_names(albums) @album_names = albums.map(&:name).join(", ") end end Generally used if you know you will always want a certain action done when retrieving the association. For +one_to_many+ and +many_to_many+ associations, both the argument to symbol callbacks and the second argument to proc callbacks will be an array of associated objects instead of a single object. ==== :uniq [+many_to_many+] Adds a after_load callback that makes the array of objects unique. In many cases, using the :distinct option is a better approach. === Eager Loading via eager (query per association) Options ==== :eager The associations to eagerly load via eager when loading the associated object(s). This is useful for example if you always want to eagerly load dependent associations when loading this association. For example, if you know that any time that you want to load an artist's albums, you are also going to want access to the album's tracks as well: # Eager load tracks when loading the albums Artist.one_to_many :albums, eager: :tracks You can also use a hash or array to specify multiple dependent associations to eagerly load: # Eager load the albums' tracks and the tracks' tags when loading the albums Artist.one_to_many :albums, eager: {tracks: :tags} # Eager load the albums' tags and tracks when loading the albums Artist.one_to_many :albums, eager: [:tags, :tracks] # Eager load the albums' tags, tracks, and tracks' tags when loading the albums Artist.one_to_many :albums, eager: [:tags, {tracks: :tags}] ==== :eager_loader A custom loader to use when eagerly load associated objects via eager. For many details and examples of custom eager loaders, please see the {Advanced Associations guide}[rdoc-ref:doc/advanced_associations.rdoc]. ==== :eager_loader_key A symbol for the key column to use to populate the key hash for the eager loader. Generally does not need to be set manually, defaults to the key method used. Can be set to nil to not populate the key hash (better for performance if a custom eager loader does not use the key_hash). ==== :eager_block If given, should be a proc to use instead of the association method block when eagerly loading. To not use a block when eager loading when one is used normally, set to nil. It's very uncommon to need this option. === Eager Loading via eager_graph (one query with joins) Options ==== :eager_graph The associations to eagerly load via eager_graph when loading the associated object(s). This is useful for example if you always want to eagerly load dependent associations when loading this association, but you want to filter or order the association based on dependent associations: Artist.one_to_many :albums_with_short_tracks, class: :Album, eager_graph: :tracks do |ds| ds.where{tracks[:seconds] < 120} end Artist.one_to_many :albums_by_track_name, class: :Album, eager_graph: :tracks do |ds| ds.order{tracks[:name]} end You can also use a hash or array of arguments for :eager_graph, similar to what the :eager option accepts. ==== :graph_conditions The additional conditions to use on the SQL join when eagerly loading the association via eager_graph. Should be a hash or an array of two element arrays. If not specified, the :conditions option is used if it is a hash or array of two element arrays. Artist.one_to_many :active_albums, class: :Album, graph_conditions: {active: true} Note that these conditions on the association are in addition to the default conditions specified by the foreign/primary keys. If you want to replace the conditions specified by the foreign/primary keys, you need the :graph_only_conditions options. ==== :graph_block The block to pass to Dataset#join_table when eagerly loading the association via eager_graph. This is useful to specify conditions that can't be specified in a hash or array of two element arrays. Artist.one_to_many :gold_albums, class: :Album, graph_block: proc{|j,lj,js| Sequel[j][:copies_sold] > 500000} ==== :graph_join_type The type of SQL join to use when eagerly loading the association via eager_graph. Defaults to :left_outer. This is useful if you want to ensure that only artists that have albums are returned: Artist.one_to_many :albums, graph_join_type: :inner # Will exclude artists without an album Artist.eager_graph(:albums).all ==== :graph_select A column or array of columns to select from the associated table when eagerly loading the association via eager_graph. Defaults to all columns in the associated table. ==== :graph_only_conditions The conditions to use on the SQL join when eagerly loading the association via eager_graph, instead of the default conditions specified by the foreign/primary keys. This option causes the :graph_conditions option to be ignored. This can be useful if the keys you are using are strings and you want to do a case insensitive comparison. For example, let's say that instead of integer keys, you used string keys based on the album or artist name, and that the album was associated to the artist by name. However, you weren't enforcing case sensitivity between the keys, so you still want to return albums where the artist's name differs in case: Artist.one_to_many :albums, key: :artist_name, graph_only_conditions: nil, graph_block: (proc do |j,lj,js| {Sequel.function(:lower, Sequel[j][:artist_name])=> Sequel.function(:lower, Sequel[lj][:name])} end) Note how :graph_only_conditions is set to nil to ignore any existing conditions, and :graph_block is used to set up the case insensitive comparison. Another case where :graph_only_conditions may be used is if you want to use a JOIN USING or NATURAL JOIN for the graph: # JOIN USING Artist.one_to_many :albums, key: :artist_name, graph_only_conditions: [:artist_name] # NATURAL JOIN Artist.one_to_many :albums, key: :artist_name, graph_only_conditions: nil, graph_join_type: :natural ==== :graph_alias_base The base name to use for the table alias when eager graphing. Defaults to the name of the association. If the alias name has already been used in the query, Sequel will create a unique alias by appending a numeric suffix (e.g. alias_0, alias_1, ...) until the alias is unique. This is mostly useful if you have associations with the same name in many models, and you want to be able to easily tell which table alias corresponds to which association when eagerly graphing multiple associations with the same name. You can override this option on a per-eager_graph basis by specifying the association as an SQL::AliasedExpression instead of a symbol: Album.eager_graph(Sequel.as(:artist, :a)) ==== :eager_grapher Sets up a custom grapher to use when eager loading the objects via eager_graph. This is the eager_graph analogue to the :eager_loader option. This isn't generally needed, as one of the other eager_graph related association options is usually sufficient. If specified, should be a proc that accepts a single hash argument, which will contain at least the following keys: :callback :: A callback proc used to dynamically modify the dataset to graph into the current dataset, before such graphing is done. This is nil if no callback proc is used. :implicit_qualifier :: The alias that was used for the current table (since you can cascade associations). :join_type :: Override the join type to use when graphing. :limit_strategy :: The limit strategy symbol to use when graphing (for limited associations only) :self :: The dataset that is doing the eager loading :table_alias :: An alias to use for the table to graph for this association. Example: Artist.one_to_many :self_title_albums, class: :Album, eager_grapher: (lambda do |eo| eo[:self].graph(:albums, {artist_id: :id, name: :name}, table_alias: eo[:table_alias], implicit_qualifier: eo[:implicit_qualifier]) end) ==== :order_eager_graph Whether to add the order to the dataset's order when graphing via eager_graph. Defaults to true, so set to false to disable. Sequel has to do some guess work when attempting to add the association's order to an eager_graphed dataset. In most cases it does so correctly, but if it has problems, you'll probably want to set this option to false. ==== :graph_order Override the order added when using eager_graph, instead of using the one defined in :order. This is useful if :order contains qualified identifiers, as the qualifiers may not match the aliases automatically used by eager_graph. This should contain unqualified identifiers, and eager_graph will automatically qualify them with the appropriate alias. ==== :graph_join_table_conditions [+many_to_many+, +one_through_one+] The additional conditions to use on the SQL join for the join table when eagerly loading the association via eager_graph. Should be a hash or an array of two element arrays. Let's say you have a database of people, colleges, and a table called degrees_received that includes a string field specifying the name of the degree, and you want to eager load all colleges for people where the person has received a specific degree: Person.many_to_many :bs_degree_colleges, class: :College, join_table: :degrees_received, graph_join_table_conditions: {degree: 'BS'} ==== :graph_join_table_block [+many_to_many+, +one_through_one+] The block to pass to join_table for the join table when eagerly loading the association via eager_graph. This is used for similar reasons as :graph_block, but is only used for +many_to_many+ associations when graphing the join table into the dataset. It's used in the same place as :graph_join_table_conditions but like :graph_block, is needed for situations where the conditions can't be specified as a hash or array of two element arrays. Let's say you have a database of people, colleges, and a table called degrees_received that includes a string field specifying the name of the degree, and you want to eager load all colleges for people where the person has received a bachelor's degree (degree starting with B): Person.many_to_many :bachelor_degree_colleges, class: :College, join_table: :degrees_received, graph_join_table_block: proc{|j,lj,js| Sequel[j][:degree].like('B%')} This should be done when graphing the join table, instead of when graphing the final table, as :degree is a column of the join table. ==== :graph_join_table_join_type [+many_to_many+, +one_through_one+] The type of SQL join to use for the join table when eagerly loading the association via eager_graph. Defaults to the :graph_join_type option or :left_outer. This exists mainly for consistency in the unlikely case that you want to use a different join type when JOINing to the join table then you want to use for JOINing to the final table ==== :graph_join_table_only_conditions [+many_to_many+, +one_through_one+] The conditions to use on the SQL join for the join table when eagerly loading the association via eager_graph, instead of the default conditions specified by the foreign/primary keys. This option causes the :graph_join_table_conditions option to be ignored. This is only useful if you want to replace the default foreign/primary key conditions that Sequel would use when eagerly graphing. === Associations Based on SQL Expressions Options Sequel's associations can work not just with columns, but also with arbitrary SQL expressions. For example, on PostgreSQL, you can store foreign keys to other tables in hstore, json, or jsonb columns, and Sequel can work with such constructs, including full support for eager loading. There's actually two parts to supporting associations based on SQL expressions. First is you must have an instance method in the model that returns the value that the SQL expression would return. Second is you must have an SQL expression object. If Sequel has access to a model instance and needs to get the value of the expression, it calls the method to get the value. If Sequel does not have access to a model instance, but needs to use the SQL expression in a query, it will use the SQL expression object. Below is an example storing foreign keys to other tables in a PostgreSQL hstore column, using the +pg_json+ and +pg_json_ops+ extensions. # Example schema: # albums artists # :id /---> :id # :meta ---/ :name # :name class Album < Sequel::Model many_to_one :artist, key_column: Sequel.pg_jsonb(:meta)['artist_id'].cast(String).cast(Integer) def artist_id meta['artist_id'].to_i end end class Artist < Sequel::Model one_to_many :albums, key: Sequel.pg_jsonb(:meta)['artist_id'].cast(String).cast(Integer), key_method: :artist_id end # Example schema: # albums albums_artists artists # :id <----- :meta -------> :id # :name :name class Album < Sequel::Model many_to_many :artists, left_key: Sequel.pg_jsonb(:meta)['album_id'].cast(String).cast(Integer), right_key: Sequel.pg_jsonb(:meta)['artist_id'].cast(String).cast(Integer) end class Artist < Sequel::Model many_to_many :albums, left_key: Sequel.pg_jsonb(:meta)['artist_id'].cast(String).cast(Integer), right_key: Sequel.pg_jsonb(:meta)['album_id'].cast(String).cast(Integer) end ==== :key_column [+many_to_one+] Like the :key option, but :key references the method name, while :key_column references the underlying column/expression. ==== :primary_key_method [+many_to_one+] Like the :primary_key option, but :primary_key references the column/expression name, while :primary_key_method references the method name. ==== :primary_key_column [+one_to_many+, +one_to_one+] Like the :primary_key option, but :primary_key references the method name, while :primary_key_column references the underlying column/expression. ==== :key_method [+one_to_many+, +one_to_one+] Like the :key option, but :key references the column/expression name, while :key_method references the method name. ==== :left_primary_key_column [+many_to_many+, +one_through_one+] Like the :left_primary_key option, but :left_primary_key references the method name, while :left_primary_key_column references the underlying column/expression. ==== :right_primary_key_method [+many_to_many+, +one_through_one+] Like the :right_primary_key option, but :right_primary_key references the column/expression name, while :right_primary_key_method references the method name. === Advanced Options ==== :reciprocal The symbol name of the reciprocal association, if it exists. By default, Sequel will try to determine it by looking at the associated model's associations for a association that matches the current association's key(s). Set to nil to not use a reciprocal. Reciprocals are used in Sequel to modify the matching cached associations in associated objects when calling association methods on the current object. For example, when you retrieve objects in a one_to_many association, Sequel will automatically set the matching many_to_one association in the associated objects. The result of this is that code that does this: @artist.albums.each{|album| album.artist.name} only does one database query, because when the @artist's albums are retrieved, the cached artist association for each album is set to @artist. In addition to the one_to_many retrieval case, the association modification methods affect the reciprocals as well: # Sets the cached artist association for @album to @artist @artist.add_album(@album) # Sets the cached artist association for @album to nil @artist.remove_album(@album) # Sets the cached artist association to nil for the @artist's # cached albums association @artist.remove_all_albums # Remove @album from the artist1's cached albums association, and add @album # to @artist2's cached albums association. @album.artist # @artist1 @album.artist = @artist2 Sequel can usually guess the correct reciprocal, but if you have multiple associations to the same associated class that use the same keys, you may want to specify the :reciprocal option manually to ensure the correct one is used. ==== :read_only For +many_to_one+ and +one_to_one+ associations, do not add a setter method. For +one_to_many+ and +many_to_many+, do not add the add_association, remove_association, or remove_all_association methods. If you are not using the association modification methods, setting this value to true will save memory. ==== :validate Set to false to not validate when implicitly saving any associated object. When using the +one_to_many+ association modification methods, the +one_to_one+ setter method, or creating a new object by passing a hash to the add_association method, Sequel will automatically save the object. If you don't want to validate objects when these implicit saves are done, the validate option should be set to false. ==== :raise_on_save_failure [+one_to_many+ associations] Set to false to not raise an exception when validation or a before hook fails when implicitly saving an associated object in the add_* or remove_* methods. This mirrors the raise_on_save_failure model setting, which these methods do not respect (by design). If you use this option, you must explicitly check all add_* and remove_* return values to see if they were successful. ==== :allow_eager If set to false, you cannot load the association eagerly via eager or eager_graph. Artist.one_to_many :albums, allow_eager: false Artist.eager(:albums) # Raises Sequel::Error Artist.eager_graph(:albums) # Raises Sequel::Error This is usually used if the association dataset depends on specific values in model instance that would not be valid when eager loading for multiple instances. ==== :allow_eager_graph If set to false, you cannot load the association eagerly via eager_graph. Artist.one_to_many :albums, allow_eager_graph: false Artist.eager(:albums) # Allowed Artist.eager_graph(:albums) # Raises Sequel::Error This is useful if you still want to allow loading via eager, but do not want to allow loading via eager graph, possibly because the association does not support joins. ==== :allow_filtering_by If set to false, you cannot use the association when filtering. Artist.one_to_many :albums, allow_filtering_by: false Artist.where(albums: Album.where(name: 'A')).all # Raises Sequel::Error This is useful if such filtering cannot work, such as when a subquery cannot be used because the necessary tables are not in the same database. ==== :instance_specific This allows you to override the setting of whether the dataset contains instance specific code. If you are passing a block to the association, Sequel sets this to true by default, which disables some optimizations that would be invalid if the association is instance specific. If you know that the block does not contain instance specific code, you can set this to false to reenable the optimizations. Instance specific code is mostly commonly calling model instance methods inside an association block, but also includes cases where the association block can return different values based on the runtime environment, such as calls to Time.now in the block. Associations that use the :dataset option are always considered instance specific, even if explicitly specified otherwise. ==== :cartesian_product_number The number of joins completed by this association that could cause more than one row for each row in the current table (default: 0 for *_one associations, 1 for *_to_many associations). This should only be modified in specific cases. For example, if you have a one_to_one association that can actually return more than one row (where the default association method will just return the first), or a many_to_many association where there is a unique index in the join table so that you know only one object will ever be associated through the association. ==== :class_namespace If the :class option is specified as a symbol or string, the default namespace in which to look up the class. If the :class option is not specified as a symbol or string, this option is ignored. This namespace can be overridden by starting the string or symbol with ::: Foo::Album.many_to_one :artist, class: "Artist" # Uses Artist Foo::Album.many_to_one :artist, class: "Artist", class_namespace: 'Foo' # Uses Foo::Artist Foo::Album.many_to_one :artist, class: "Foo::Artist", class_namespace: 'Foo' # Uses Foo::Foo::Artist Foo::Album.many_to_one :artist, class: "::Artist", class_namespace: 'Foo' # Uses Artist Foo::Album.many_to_one :artist, class: "::Foo::Artist", class_namespace: 'Foo' # Uses Foo::Artist ==== :methods_module The module that the methods created by the association will be placed into. Defaults to the module containing the model's columns. Any module given to this option is not included in the model's class automatically, so you are responsible for doing that manually. This is only useful in rare cases, such as when a plugin that adds associations depends on another plugin that defines instance methods of the same name. In that case, the instance methods of the dependent plugin would override the association methods created by the main plugin. ==== :eager_limit_strategy This setting determines what strategy to use for eager loading the associations that use the :limit setting to limit the number of returned records. You can't use LIMIT directly, since you want a limit for each group of associated records, not a LIMIT on the total number of records returned by the dataset. In general, Sequel picks an appropriate strategy, so it is not usually necessary to specify a strategy. You can specify true for this option to have Sequel choose which strategy to use (this is the default). You can specify a symbol to manually choose a strategy. The available strategies are: :union :: Uses one or more UNION queries with a subquery for each record you are eagerly loading for (this is the default strategy). :distinct_on :: Uses DISTINCT ON to ensure only the first matching record is loaded (only used for one_*_one associations without offsets on PostgreSQL). :window_function :: Uses a ROW_NUMBER window functions to ensure the correctly limited/offset records are returned. :ruby :: Uses ruby array slicing to emulate database limiting/offsetting. ==== :subqueries_per_union The number of subqueries per union query to use when eager loading for a limited association using a union strategy. This defaults to 40, but the optimum number depends on the database in use and the latency between the database and the application. ==== :filter_limit_strategy The strategy to use when filtering by limited associations. In general Sequel will choose either a :distinct_on, :window_function, or :correlated_subquery strategy based on the association type and what the database supports, but you can override that if necessary using this option. sequel-5.63.0/doc/bin_sequel.rdoc000066400000000000000000000146121434214120600166500ustar00rootroot00000000000000= bin/sequel bin/sequel is the name used to refer to the "sequel" command line tool that ships with the sequel gem. By default, bin/sequel provides an IRB shell with the +DB+ constant set to a Sequel::Database object created using the database connection string provided on the command line. For example, to connect to a new in-memory SQLite database using the sqlite adapter, you can use the following: sequel sqlite:/ This is very useful for quick testing of ideas, and does not affect the environment, since the in-memory SQLite database is destroyed when the program exits. == Running from a git checkout If you've installed the sequel gem, then just running "sequel" should load the program, since rubygems should place the sequel binary in your load path. However, if you want to run bin/sequel from the root of a repository checkout, you should probably do: ruby bin/sequel == Choosing the Database to Connect to === Connection String In general, you probably want to provide a connection string argument to bin/sequel, indicating the adapter and database connection information you want to use. For example: sequel sqlite:/ sequel postgres://user:pass@host/database_name sequel mysql2://user:pass@host/database_name See the {Connecting to a database guide}[rdoc-ref:doc/opening_databases.rdoc] for more details about and examples of connection strings. === YAML Connection File Instead of specifying the database connection using a connection string, you can provide the path to a YAML configuration file containing the connection information. This YAML file can contain a single options hash, or it can contain a nested hash, where the top-level hash uses environment keys with hash values for each environment. Using the -e option with a yaml connection file, you can choose which environment to use if using a nested hash. sequel -e production config/database.yml Note that bin/sequel does not directly support ActiveRecord YAML configuration files, as they use different names for some options. === Mock Connection If you don't provide a connection string or YAML connection file, Sequel will start with a mock database. The mock database allows you to play around with Sequel without any database at all, and can be useful if you just want to test things out and generate SQL without actually getting results from a database. sequel Sequel also has the ability to use the mock adapter with database-specific syntax, allowing you to pretend you are connecting to a specific type of database without actually connecting to one. To do that, you need to use a connection string: sequel mock://postgres == Not Just an IRB shell bin/sequel is not just an IRB shell, it can also do far more. === Execute Code bin/sequel can also be used to execute other ruby files with +DB+ preset to the database given on the command line: sequel postgres://host/database_name path/to/some_file.rb On modern versions of Linux, this means that you can use bin/sequel in a shebang line: #!/path/to/bin/sequel postgres://host/database_name If you want to quickly execute a small piece of ruby code, you can use the -c option: sequel -c "p DB.tables" postgres://host/database_name Similarly, if data is piped into bin/sequel, it will be executed: echo "p DB.tables" | sequel postgres://host/database_name === Migrate Databases With -m option, Sequel will migrate the database given using the migration directory provided by -m: sequel -m /path/to/migrations/dir postgres://host/database You can use the -M attribute to set the version to migrate to: sequel -m /path/to/migrations/dir -M 3 postgres://host/database See the {migration guide}[rdoc-ref:doc/migration.rdoc] for more details about migrations. === Dump Schemas Using the -d or -D options, Sequel will dump the database's schema in Sequel migration format to the standard output: sequel -d postgres://host/database To save this information to a file, use a standard shell redirection: sequel -d postgres://host/database > /path/to/migrations/dir/001_base_schema.rb The -d option dumps the migration in database-independent format, the -D option dumps it in database-specific format. Note that the support for dumping schema is fairly limited. It doesn't handle database views, functions, triggers, schemas, partial indexes, functional indexes, and many other things. You should probably use the database specific tools to handle those. The -S option dumps the schema cache for all tables in the database, which can speed up the usage of Sequel with models when using the schema_caching extension. You should provide this option with the path to which to dump the schema: sequel -S /path/to/schema_cache.db postgres://host/database === Copy Databases Using the -C option, Sequel can copy the contents of one database to another, even between different database types. Using this option, you provide two connection strings on the command line: sequel -C mysql://host1/database postgres://host2/database2 This copies the table structure, table data, indexes, and foreign keys from the MySQL database to the PostgreSQL database. Note that the support for copying is fairly limited. It doesn't handle database views, functions, triggers, schemas, partial indexes, functional indexes, and many other things. Also, the data type conversion may not be exactly what you want. It is best designed for quick conversions and testing. For serious production use, use the database's tools to copy databases for the same database type, and for different database types, use the Sequel API. == Other Options Other options not mentioned above are explained briefly here. === -E -E logs all SQL queries to the standard output, so you can see all SQL that Sequel is sending the database. === -I include_directory -I is similar to ruby -I, and specifies an additional $LOAD_PATH directory. === -l log_file -l is similar to -E, but logs all SQL queries to the given file. === -L load_directory -L loads all *.rb files under the given directory. This is usually used to load Sequel::Model classes into bin/sequel. === -N -N skips testing the connection when creating the Database object. This is rarely needed. === -r require_lib -r is similar to ruby -r, requiring the given library. === -t -t tells bin/sequel to output full backtraces in the case of an error, which can aid in debugging. === -h -h prints the usage information for bin/sequel. === -v -v prints the Sequel version in use. sequel-5.63.0/doc/cheat_sheet.rdoc000066400000000000000000000147441434214120600170040ustar00rootroot00000000000000= Cheat Sheet == Open a database require 'sequel' DB = Sequel.sqlite('my_blog.db') DB = Sequel.connect('postgres://user:password@localhost/my_db') DB = Sequel.postgres('my_db', user: 'user', password: 'password', host: 'localhost') DB = Sequel.ado('mydb') == Open an SQLite memory database Without a filename argument, the sqlite adapter will setup a new sqlite database in memory. DB = Sequel.sqlite == Logging SQL statements require 'logger' DB = Sequel.sqlite(loggers: [Logger.new($stdout)]) # or DB.loggers << Logger.new($stdout) == Using raw SQL DB.run "CREATE TABLE users (name VARCHAR(255) NOT NULL, age INT(3) NOT NULL)" dataset = DB["SELECT age FROM users WHERE name = ?", name] dataset.map(:age) DB.fetch("SELECT name FROM users") do |row| p row[:name] end == Create a dataset dataset = DB[:items] dataset = DB.from(:items) == Most dataset methods are chainable dataset = DB[:managers].where(salary: 5000..10000).order(:name, :department) == Insert rows dataset.insert(name: 'Sharon', grade: 50) == Retrieve rows dataset.each{|r| p r} dataset.all # => [{...}, {...}, ...] dataset.first # => {...} dataset.last # => {...} == Update/Delete rows dataset.exclude(:active).delete dataset.where{price < 100}.update(active: true) dataset.where(:active).update(price: Sequel[:price] * 0.90) = Merge rows dataset. merge_using(:table, col1: :col2). merge_insert(col3: :col4). merge_delete{col5 > 30}. merge_update(col3: Sequel[:col3] + :col4) == Datasets are Enumerable dataset.map{|r| r[:name]} dataset.map(:name) # same as above dataset.inject(0){|sum, r| sum + r[:value]} dataset.sum(:value) # better == Filtering (see also {Dataset Filtering}[rdoc-ref:doc/dataset_filtering.rdoc]) === Equality dataset.where(name: 'abc') === Inequality dataset.where{value > 100} dataset.exclude{value <= 100} === Inclusion dataset.where(value: 50..100) dataset.where{(value >= 50) & (value <= 100)} dataset.where(value: [50,75,100]) dataset.where(id: other_dataset.select(:other_id)) === Subselects as scalar values dataset.where{price > dataset.select(avg(price) + 100)} === LIKE/Regexp DB[:items].where(Sequel.like(:name, 'AL%')) DB[:items].where(name: /^AL/) === AND/OR/NOT DB[:items].where{(x > 5) & (y > 10)} # SELECT * FROM items WHERE ((x > 5) AND (y > 10)) DB[:items].where(Sequel.or(x: 1, y: 2) & Sequel.~(z: 3)) # SELECT * FROM items WHERE (((x = 1) OR (y = 2)) AND (z != 3)) === Mathematical operators DB[:items].where{x + y > z} # SELECT * FROM items WHERE ((x + y) > z) DB[:items].where{price - 100 < avg(price)} # SELECT * FROM items WHERE ((price - 100) < avg(price)) === Raw SQL Fragments dataset.where(Sequel.lit('id= 1')) dataset.where(Sequel.lit('name = ?', 'abc')) dataset.where(Sequel.lit('value IN ?', [50,75,100])) dataset.where(Sequel.lit('price > (SELECT avg(price) + 100 FROM table)')) == Ordering dataset.order(:kind) # kind dataset.reverse(:kind) # kind DESC dataset.order(Sequel.desc(:kind), :name) # kind DESC, name == Limit/Offset dataset.limit(30) # LIMIT 30 dataset.limit(30, 10) # LIMIT 30 OFFSET 10 dataset.limit(30).offset(10) # LIMIT 30 OFFSET 10 == Joins DB[:items].left_outer_join(:categories, id: :category_id) # SELECT * FROM items # LEFT OUTER JOIN categories ON categories.id = items.category_id DB[:items].join(:categories, id: :category_id). join(:groups, id: Sequel[:items][:group_id]) # SELECT * FROM items # INNER JOIN categories ON categories.id = items.category_id # INNER JOIN groups ON groups.id = items.group_id == Aggregate functions methods dataset.count #=> record count dataset.max(:price) dataset.min(:price) dataset.avg(:price) dataset.sum(:stock) dataset.group_and_count(:category).all dataset.select_group(:category).select_append{avg(:price)} == SQL Functions / Literals dataset.update(updated_at: Sequel.function(:NOW)) dataset.update(updated_at: Sequel.lit('NOW()')) dataset.update(updated_at: Sequel.lit("DateValue('1/1/2001')")) dataset.update(updated_at: Sequel.function(:DateValue, '1/1/2001')) == Schema Manipulation DB.create_table :items do primary_key :id String :name, unique: true, null: false TrueClass :active, default: true foreign_key :category_id, :categories DateTime :created_at, default: Sequel::CURRENT_TIMESTAMP, index: true index [:category_id, :active] end DB.drop_table :items == Aliasing DB[:items].select(Sequel[:name].as(:item_name)) DB[:items].select(Sequel.as(:name, :item_name)) DB[:items].select{name.as(:item_name)} # SELECT name AS item_name FROM items DB[Sequel[:items].as(:items_table)].select{items_table[:name].as(:item_name)} # SELECT items_table.name AS item_name FROM items AS items_table == Transactions DB.transaction do # BEGIN dataset.insert(first_name: 'Inigo', last_name: 'Montoya') dataset.insert(first_name: 'Farm', last_name: 'Boy') end # COMMIT Transactions are reentrant: DB.transaction do # BEGIN DB.transaction do dataset.insert(first_name: 'Inigo', last_name: 'Montoya') end end # COMMIT Transactions are aborted if an error is raised: DB.transaction do # BEGIN raise "some error occurred" end # ROLLBACK issued and the error is re-raised Transactions can also be aborted by raising Sequel::Rollback: DB.transaction do # BEGIN raise(Sequel::Rollback) end # ROLLBACK issued and no error raised Savepoints can be used if the database supports it: DB.transaction do dataset.insert(first_name: 'Farm', last_name: 'Boy') # Inserted DB.transaction(savepoint: true) do # This savepoint is rolled back dataset.insert(first_name: 'Inigo', last_name: 'Montoya') # Not inserted raise(Sequel::Rollback) end dataset.insert(first_name: 'Prince', last_name: 'Humperdink') # Inserted end == Retrieving SQL dataset.sql # "SELECT * FROM items" dataset.insert_sql(a: 1) # "INSERT INTO items (a) VALUES (1)" dataset.update_sql(a: 1) # "UPDATE items SET a = 1" dataset.delete_sql # "DELETE FROM items" == Basic introspection dataset.columns # => [:id, :name, ...] DB.tables # => [:items, ...] DB.views # => [:new_items, ...] DB.schema(:items) # => [[:id, {:type=>:integer, ...}], [:name, {:type=>:string, ...}], ...] DB.indexes(:items) # => {:index_name => {:columns=>[:a], :unique=>false}, ...} DB.foreign_key_list(:items) # => [{:name=>:items_a_fk, :columns=>[:a], :key=>[:id], :table=>:other_table}, ...] sequel-5.63.0/doc/code_order.rdoc000066400000000000000000000074251434214120600166330ustar00rootroot00000000000000= Code Order In Sequel, the order in which code is executed during initialization is important. This guide provides the recommended way to order your Sequel code. Some of these guidelines are not strictly necessary, but others are, and this guide will be specific about which are strictly necessary. == Require Sequel This is sort of a no-brainer, but you need to require the library first. This is a strict requirement, none of the other code can be executed unless the library has been required first. Example: require 'sequel' == Add Global Extensions Global extensions are loaded with Sequel.extension, and affect other parts of Sequel or the general ruby environment. It's not necessary to load them first, but it is a recommended practice. Example: Sequel.extension :blank == Add Extensions Applied to All Databases/Datasets If you want database or datasets extensions applied to all databases and datasets, you must use Sequel::Database.extension to load the extension before connecting to a database. If you connect to a database before using Sequel::Database.extension, it will not have that extension loaded. Example: Sequel::Database.extension :columns_introspection == Connect to Databases Connecting to a database is required before running any queries against that database, or creating any datasets or models. You cannot create model classes without having a database object created first. The convention for an application with a single Database instance is to store that instance in a constant named DB. Example: DB = Sequel.connect('postgres://user:pass@host/database') == Add Extensions Specific to a Database or All Datasets in that Database If you want specific databases to use specific extensions, or have all datasets in that database use a specific extension, you need to load that extension into the database after creating it using Sequel::Database#extension. Example: DB.extension :pg_array == Configure Global Model Behavior If you want to change the configuration for all model classes, you must do so before loading your model classes, as configuration is copied into the subclass when model subclasses are created. Example: Sequel::Model.raise_on_save_failure = false == Add Global Model Plugins If you want to load a plugin into all models classes, you must do so before loading your model classes, as plugin specific data may need to be copied into the subclass when model subclasses are created. Example: Sequel::Model.plugin :prepared_statements == Load Model Classes After you have established a database connection, and configured your global model configuration and global plugins, you can load your model classes. It's recommended to have a separate file for each model class, unless the model classes are very simple. Example: Dir['./models/*.rb'].each{|f| require f} == Finalize Associations and Freeze Model Classes and Database After all the models have been setup, you can finalize the associations. This can speed up association reflection methods by doing a lookup in advance to find the associated class, and cache related association information in the association itself. Additionally, in production and testing, you should freeze the model classes and Database instance, so that you can detect unsafe runtime modification of the configuration: model_classes.each(&:finalize_associations) model_classes.each(&:freeze) DB.freeze The `subclasses` plugin can be used to keep track of all model classes that have been setup in your application. Finalizing their associations and freezing them can easily be achieved through the plugin: # Register the plugin before setting up the models Sequel::Model.plugin :subclasses # ... setup models # Now finalize associations & freeze models by calling the plugin: Sequel::Model.freeze_descendents sequel-5.63.0/doc/core_extensions.rdoc000066400000000000000000000265101434214120600177310ustar00rootroot00000000000000= Sequel's Core Extensions == Background Historically, Sequel added methods to many of the core classes, and usage of those methods was the primary and recommended way to use Sequel. For example: DB[:table].select(:column.cast(Integer)). # Symbol#cast where(:column.like('A%')). # Symbol#like order({1=>2}.case(0, :a)) # Hash#case While Sequel never overrode any methods defined by ruby, it is possible that other libraries could define the same methods that Sequel defines, which could cause problems. Also, some rubyists do not like using libraries that add methods to the core classes. Alternatives for the core extension methods were added to Sequel, so the query above could be written as: DB[:table].select(Sequel.cast(:column, Integer)). where(Sequel.like(:column, 'A%')). order(Sequel.case({1=>2}, 0, :a)) or with virtual rows: DB[:table].select{column.as(Integer)}. where{column.like('A%')}. order(Sequel.case({1=>2}, 0, :a)) Almost all of the core extension methods have a replacement on the Sequel module. So it is now up to the user which style to use. Using the methods on the Sequel module results in slightly more verbose code, but allows the code to work without modifications to the core classes. == Issues There is no recommendation on whether the core_extensions should be used or not. It is very rare that any of the methods added by core_extensions actually causes a problem, but some of them can make it more difficult to find other problems. For example, if you type: do_something if value | other_value while meaning to type: do_something if value || other_value and value is a Symbol, instead of a NoMethodError being raised because Symbol#| is not implemented by default, value | other_value will return a Sequel expression object, which if will evaluate as true, and do_something will be called. == Usage All of Sequel's extensions to the core classes are stored in Sequel's core_extensions extension, which you can load via: Sequel.extension :core_extensions == No Internal Dependency Sequel has no internal dependency on the core extensions. This includes Sequel's core, Sequel::Model, and all plugins and extensions that ship with Sequel. However, it is possible that external plugins and extensions will depend on the core extensions. Such plugins and extensions should be updated so that they no longer depend on the core extensions. == Refinements Most of the these extensions can be added on a per-file basis using refinements (if you are using Ruby 2.0+). To use refinements, first load them: Sequel.extension :core_refinements Then for each file where you want to use the refinements: using Sequel::CoreRefinements == Core Extension Methods This section will briefly describe all of the methods added to the core classes, and what the alternative method is that doesn't require the core extensions. === Symbol & String ==== as Symbol#as and String#as return Sequel aliased expressions using the provided alias: :a.as(:b) # SQL: a AS b 'a'.as(:b) # SQL: 'a' AS b Alternatives: Sequel[:a].as(:b) Sequel.as(:a, :b) ==== cast Symbol#cast and String#cast return Sequel cast expressions for typecasting in the database: :a.cast(Integer) # SQL: CAST(a AS integer) 'a'.cast(Integer) # SQL: CAST('a' AS integer) Alternatives: Sequel[:a].cast(Integer) Sequel.cast(:a, Integer) ==== cast_numeric Symbol#cast_numeric and String#cast_numeric return Sequel cast expressions for typecasting in the database, defaulting to integers, where the returned expression is treated as an numeric value: :a.cast_numeric # SQL: CAST(a AS integer) 'a'.cast_numeric(Float) # SQL: CAST('a' AS double precision) Alternative: Sequel[:a].cast_numeric Sequel.cast_numeric(:a) ==== cast_string Symbol#cast_string and String#cast_string return Sequel cast expressions for typecasting in the database, defaulting to strings, where the returned expression is treated as a string value: :a.cast_string # SQL: CAST(a AS varchar(255)) 'a'.cast_string(:text) # SQL: CAST('a' AS text) Alternatives: Sequel[:a].cast_string Sequel.cast_string(:a) === Symbol ==== identifier Symbol#identifier wraps the symbol in an Sequel identifier object. If symbol splitting is enabled (no longer the default), it also makes sure the symbol will not be split. If symbol splitting is disabled (the default), there is little reason to use this). :column.identifier # SQL: column Alternatives: Sequel[:column] Sequel.identifier(:column) ==== asc Symbol#asc is used to define an ascending order on a column. It exists mostly for consistency with #desc, since ascending is the default order: :a.asc # SQL: a ASC Alternatives: Sequel[:a].asc Sequel.asc(:a) ==== desc Symbol#desc is used to defined a descending order on a column. The returned value is usually passed to one of the dataset order methods. :a.desc # SQL: a DESC Alternatives: Sequel[:a].desc Sequel.desc(:a) ==== +, -, *, / The standard mathematical operators are defined on Symbol, and return a Sequel numeric expression object representing the operation: :a + :b # SQL: a + b :a - :b # SQL: a - b :a * :b # SQL: a * b :a / :b # SQL: a / b :a ** :b # SQL: power(a, b) Sequel also supports ruby's coercion protocols on symbols (note that this does not work when using refinements): 1 + :b # SQL: 1 + b Alternatives: Sequel[:a] + :b Sequel[:a] - :b Sequel[:a] * :b Sequel[:a] / :b Sequel[:a] ** :b Sequel.+(:a, :b) Sequel.-(:a, :b) Sequel.*(:a, :b) Sequel./(:a, :b) Sequel.**(:a, :b) ==== * The * operator is overloaded on Symbol such that if it is called with no arguments, it represents a selection of all columns in the table: :a.* # SQL: a.* Alternative: Sequel[:a].* ==== qualify Symbol#qualify qualifies the identifier (e.g. a column) with a another identifier (e.g. a table): :column.qualify(:table) # SQL: table.column Alternative: Sequel[:table][:column] Note the reversed order of the arguments. For the Symbol#qualify method, the argument is the qualifier, while for Sequel[][], the first [] is the qualifier, and the second [] is the identifier. ==== like Symbol#like returns a case sensitive LIKE expression between the identifier and the given argument: :a.like('b%') # SQL: a LIKE 'b%' ESCAPE '\' Alternatives: Sequel[:a].like('b%') Sequel.like(:a, 'b%') ==== ilike Symbol#ilike returns a case insensitive LIKE expression between the identifier and the given argument: :a.ilike('b%') # SQL: a ILIKE 'b%' ESCAPE '\' Alternatives: Sequel[:a].ilike('b%') Sequel.ilike(:a, 'b%') ==== sql_subscript Symbol#sql_subscript returns a Sequel expression representing an SQL array access: :a.sql_subscript(1) # SQL: a[1] Alternatives: Sequel[:a].sql_subscript(1) Sequel.subscript(:a, 1) ==== extract Symbol#extract does a datetime part extraction from the receiver: :a.extract(:year) # SQL: extract(year FROM a) Alternatives: Sequel[:a].extract(:year) Sequel.extract(:year, :a) ==== sql_boolean, sql_number, sql_string These Symbol methods are used to force the treating of the object as a specific SQL type, instead of as a general SQL type. For example: :a.sql_boolean + 1 # NoMethodError :a.sql_number << 1 # SQL: a << 1 :a.sql_string + 'a' # SQL: a || 'a' Alternatives: Sequel[:a].sql_boolean Sequel[:a].sql_number Sequel[:a].sql_string ==== sql_function Symbol#sql_function returns an SQL function call expression object: :now.sql_function # SQL: now() :sum.sql_function(:a) # SQL: sum(a) :concat.sql_function(:a, :b) # SQL: concat(a, b) Alternatives: Sequel[:sum].function(:a) Sequel.function(:sum, :a) === String ==== lit String#lit creates a literal string, using placeholders if any arguments are given. Literal strings are not escaped, they are treated as SQL code, not as an SQL string: 'a'.lit # SQL: a '"a" = ?'.lit(1) # SQL: "a" = 1 Alternatives: Sequel.lit('a') Sequel.lit('a = ?', 1) ==== to_sequel_blob String#to_sequel_blob returns the string wrapper in Sequel blob object. Often blobs need to be handled differently than regular strings by the database adapters. "a\0".to_sequel_blob # SQL: X'6100' Alternative: Sequel.blob("a\0") === Hash, Array, & Symbol ==== ~ Array#~, Hash#~, and Symbol#~ treat the receiver as a conditions specifier, not matching all of the conditions: ~{a: 1, b: [2, 3]} # SQL: a != 1 OR b NOT IN (2, 3) ~[[:a, 1], [:b, [1, 2]]] # SQL: a != 1 OR b NOT IN (1, 2) Alternatives: ~Sequel[a: 1, b: [2, 3]] Sequel.~(a: 1, b: [2, 3]) === Hash & Array ==== case Array#case and Hash#case return an SQL CASE expression, where the keys are conditions and the values are results: {{a: [2,3]} => 1}.case(0) # SQL: CASE WHEN a IN (2, 3) THEN 1 ELSE 0 END [[{a: [2,3]}, 1]].case(0) # SQL: CASE WHEN a IN (2, 3) THEN 1 ELSE 0 END Alternative: Sequel.case({{a: [2,3]}=>1}, 0) ==== sql_expr Array#sql_expr and Hash#sql_expr treat the receiver as a conditions specifier, matching all of the conditions in the array. {a: 1, b: [2, 3]}.sql_expr # SQL: a = 1 AND b IN (2, 3) [[:a, 1], [:b, [2, 3]]].sql_expr # SQL: a = 1 AND b IN (2, 3) Alternative: Sequel[a: 1, b: [2, 3]] ==== sql_negate Array#sql_negate and Hash#sql_negate treat the receiver as a conditions specifier, matching none of the conditions in the array: {a: 1, b: [2, 3]}.sql_negate # SQL: a != 1 AND b NOT IN (2, 3) [[:a, 1], [:b, [2, 3]]].sql_negate # SQL: a != 1 AND b NOT IN (2, 3) Alternative: Sequel.negate(a: 1, b: [2, 3]) ==== sql_or Array#sql_or nd Hash#sql_or treat the receiver as a conditions specifier, matching any of the conditions in the array: {a: 1, b: [2, 3]}.sql_or # SQL: a = 1 OR b IN (2, 3) [[:a, 1], [:b, [2, 3]]].sql_or # SQL: a = 1 OR b IN (2, 3) Alternative: Sequel.or(a: 1, b: [2, 3]) === Array ==== sql_value_list Array#sql_value_list wraps the array in an array subclass, which Sequel will always treat as a value list and not a conditions specifier. By default, Sequel treats arrays of two element arrays as a conditions specifier. DB[:a].where('(a, b) IN ?', [[1, 2], [3, 4]]) # SQL: (a, b) IN ((1 = 2) AND (3 = 4)) DB[:a].where('(a, b) IN ?', [[1, 2], [3, 4]].sql_value_list) # SQL: (a, b) IN ((1, 2), (3, 4)) Alternative: Sequel.value_list([[1, 2], [3, 4]]) ==== sql_string_join Array#sql_string_join joins all of the elements in the array in an SQL string concatentation expression: [:a].sql_string_join # SQL: a [:a, :b].sql_string_join # SQL: a || b [:a, 'b'].sql_string_join # SQL: a || 'b' ['a', :b].sql_string_join(' ') # SQL: 'a' || ' ' || b Alternative: Sequel.join(['a', :b], ' ') === Hash & Symbol ==== & Hash#& and Symbol#& return a Sequel boolean expression, matching the condition specified by the receiver and the condition specified by the given argument: :a & :b # SQL: a AND b {a: 1} & :b # SQL: a = 1 AND b {a: true} & :b # SQL: a IS TRUE AND b Alternatives: Sequel[a: 1] & :b Sequel.&({a: 1}, :b) ==== | Hash#| returns a Sequel boolean expression, matching the condition specified by the receiver or the condition specified by the given argument: :a | :b # SQL: a OR b {a: 1} | :b # SQL: a = 1 OR b {a: true} | :b # SQL: a IS TRUE OR b Alternative: Sequel[a: 1] | :b Sequel.|({a: 1}, :b) sequel-5.63.0/doc/dataset_basics.rdoc000066400000000000000000000102701434214120600174670ustar00rootroot00000000000000= Dataset Basics == Introduction Datasets are the primary way Sequel uses to access the database. While most database libraries have specific support for updating all records or only a single record, Sequel's ability to represent SQL queries themselves as datasets is what gives Sequel most of its power. This document aims to give a basic introduction to datasets and how to use them. == Basics The most basic dataset is the simple selection of all columns in a table: ds = DB[:posts] # SELECT * FROM posts Here, DB represents your Sequel::Database object, and ds is your dataset, with the SQL query it represents below it. One of the core dataset ideas that should be understood is that datasets are frozen and use a functional style of modification, in which methods called on the dataset return modified copies of the dataset, they don't modify the dataset themselves: ds2 = ds.where(id: 1) ds2 # SELECT * FROM posts WHERE id = 1 ds # SELECT * FROM posts Note how ds itself is not modified. This is because ds.where returns a modified copy of ds, instead of modifying ds itself. This makes using datasets both thread safe and easy to chain: # Thread safe: 100.times do |i| Thread.new do ds.where(id: i).first end end # Easy to chain: ds3 = ds.select(:id, :name).order(:name).where{id < 100} # SELECT id, name FROM posts WHERE id < 100 ORDER BY name Thread safety you don't really need to worry about, but chainability is core to how Sequel is generally used. Almost all dataset methods that affect the SQL produced return modified copies of the receiving dataset. Another important thing to realize is that dataset methods that return modified datasets do not execute the dataset's code on the database. Only dataset methods that return or yield results will execute the code on the database: # No SQL queries sent: ds3 = ds.select(:id, :name).order(:name).where{id < 100} # Until you call a method that returns results results = ds3.all One important consequence of this API style is that if you use a method chain that includes both methods that return modified copies and a method that executes the SQL, the method that executes the SQL should generally be the last method in the chain: # Good ds.select(:id, :name).order(:name).where{id < 100}.all # Bad ds.all.select(:id, :name).order(:name).where{id < 100} This is because all will return an array of hashes, and +select+, +order+, and +where+ are dataset methods, not array methods. == Methods Most Dataset methods that users will use can be broken down into two types: * Methods that return modified datasets * Methods that execute code on the database === Methods that return modified datasets Most dataset methods fall into this category, which can be further broken down by the clause they affect: SELECT:: select, select_all, select_append, select_group, select_more FROM:: from, from_self JOIN:: join, left_join, right_join, full_join, natural_join, natural_left_join, natural_right_join, natural_full_join, cross_join, inner_join, left_outer_join, right_outer_join, full_outer_join, join_table WHERE:: where, filter, exclude, or, grep, invert, unfiltered GROUP:: group, group_by, group_and_count, group_append, select_group, ungrouped HAVING:: having, exclude_having, invert, unfiltered ORDER:: order, order_by, order_append, order_prepend, order_more, reverse, reverse_order, unordered LIMIT/OFFSET:: limit, offset, unlimited compounds:: union, intersect, except locking:: for_update, lock_style common table expressions:: with, with_recursive other:: distinct, naked, qualify, server, with_sql === Methods that execute code on the database Most other dataset methods commonly used will execute the dataset's SQL on the database: SELECT (All Records):: all, each, map, as_hash, to_hash_groups, select_map, select_order_map, select_hash, select_hash_groups SELECT (First Record):: first, last, [], single_record SELECT (Single Value):: get, single_value SELECT (Aggregates):: count, avg, max, min, sum INSERT:: insert, <<, import, multi_insert UPDATE:: update DELETE:: delete other:: columns, columns!, truncate === Other methods See the Sequel::Dataset RDoc for other methods that are less commonly used. sequel-5.63.0/doc/dataset_filtering.rdoc000066400000000000000000000202531434214120600202100ustar00rootroot00000000000000= Dataset Filtering Sequel is very flexible when it comes to filtering records. You can specify your conditions as a hash of values to compare against, or as ruby code that Sequel translates into SQL expressions, or as an SQL code fragment (with optional parameters), . == Filtering using a hash If you just need to compare records against values, you can supply a hash: items.where(category: 'ruby').sql # "SELECT * FROM items WHERE (category = 'ruby')" Sequel can check for null values: items.where(category: nil).sql # "SELECT * FROM items WHERE (category IS NULL)" Or compare two columns: items.where{{x: some_table[:y]}}.sql # "SELECT * FROM items WHERE (x = some_table.y)" And also compare against multiple values: items.where(category: ['ruby', 'perl']).sql # "SELECT * FROM items WHERE (category IN ('ruby', 'perl'))" Ranges (both inclusive and exclusive) can also be used: items.where(price: 100..200).sql # "SELECT * FROM items WHERE (price >= 100 AND price <= 200)" items.where(price: 100...200).sql # "SELECT * FROM items WHERE (price >= 100 AND price < 200)" == Filtering using an array If you need to select multiple items from a dataset, you can supply an array: items.where(id: [1, 38, 47, 99]).sql # "SELECT * FROM items WHERE (id IN (1, 38, 47, 99))" == Filtering using expressions You can pass a block to where (referred to as a virtual row block), which is evaluated in a special context: items.where{price * 2 < 50}.sql # "SELECT * FROM items WHERE ((price * 2) < 50) This works for the standard inequality and arithmetic operators: items.where{price + 100 < 200}.sql # "SELECT * FROM items WHERE ((price + 100) < 200) items.where{price - 100 > 200}.sql # "SELECT * FROM items WHERE ((price - 100) > 200) items.where{price * 100 <= 200}.sql # "SELECT * FROM items WHERE ((price * 100) <= 200) items.where{price / 100 >= 200}.sql # "SELECT * FROM items WHERE ((price / 100) >= 200) items.where{price ** 2 >= 200}.sql # "SELECT * FROM items WHERE (power(price, 2) >= 200) You use the overloaded bitwise and (&) and or (|) operators to combine expressions: items.where{(price + 100 < 200) & (price * 100 <= 200)}.sql # "SELECT * FROM items WHERE (((price + 100) < 200) AND ((price * 100) <= 200)) items.where{(price - 100 > 200) | (price / 100 >= 200)}.sql # "SELECT * FROM items WHERE (((price - 100) > 200) OR ((price / 100) >= 200)) To filter by equality, you use the standard hash, which can be combined with other expressions using Sequel.& and Sequel.|: items.where{Sequel.&({category: 'ruby'}, (price + 100 < 200))}.sql # "SELECT * FROM items WHERE ((category = 'ruby') AND ((price + 100) < 200))" You can also use the =~ operator: items.where{(category =~ 'ruby') & (price + 100 < 200)}.sql # "SELECT * FROM items WHERE ((category = 'ruby') AND ((price + 100) < 200))" This works with other hash values, such as arrays and ranges: items.where{Sequel.|({category: ['ruby', 'other']}, (price - 100 > 200))}.sql # "SELECT * FROM items WHERE ((category IN ('ruby', 'other')) OR ((price - 100) > 200))" items.where{(price =~ (100..200)) & :active}.sql # "SELECT * FROM items WHERE ((price >= 100 AND price <= 200) AND active)" == Filtering using a custom filter string If you wish to include an SQL fragment as part of a filter, you need to wrap it with +Sequel.lit+ to mark that it is literal SQL code, and pass it to the #where method: items.where(Sequel.lit('x < 10')).sql # "SELECT * FROM items WHERE x < 10" In order to prevent SQL injection, you can replace literal values with question marks and supply the values as additional arguments to +Sequel.lit+: items.where(Sequel.lit('category = ?', 'ruby')).sql # "SELECT * FROM items WHERE category = 'ruby'" You can also use placeholders with :placeholder and a hash of placeholder values: items.where(Sequel.lit('category = :category', category: "ruby")).sql # "SELECT * FROM items WHERE category = 'ruby'" In order to combine AND and OR together, you have a few options: items.where(category: nil).or(category: "ruby") # SELECT * FROM items WHERE (category IS NULL) OR (category = 'ruby') This won't work if you add other conditions: items.where(name: "Programming in Ruby").where(category: nil).or(category: 'ruby') # SELECT * FROM items WHERE ((name = 'Programming in Ruby') AND (category IS NULL)) OR (category = 'ruby') The OR applies globally and not locally. To fix this, use & and |: items.where(Sequel[name: "Programming in Ruby"] & (Sequel[category: nil] | Sequel[category: "ruby"])) # SELECT * FROM items WHERE ((name = 'Programming in Ruby') AND ((category IS NULL) OR (category = 'ruby'))) === Specifying SQL functions Sequel also allows you to specify functions by using the Sequel.function method: items.literal(Sequel.function(:avg, :price)) # "avg(price)" If you are specifying a filter/selection/order, you can use a virtual row block: items.select{avg(price)} === Negating conditions You can use the exclude method to exclude whole conditions: items.exclude(category: 'ruby').sql # "SELECT * FROM items WHERE (category != 'ruby')" items.exclude(:active).sql # "SELECT * FROM items WHERE NOT active" items.exclude{price / 100 >= 200}.sql # "SELECT * FROM items WHERE ((price / 100) < 200) To exclude only parts of conditions, you can use when in combination with Sequel.~ or the ~ method on Sequel expressions: items.where{Sequel.&(Sequel.~(category: 'ruby'), (price + 100 < 200))}.sql # "SELECT * FROM items WHERE ((category != 'ruby') AND ((price + 100) < 200))" items.where{~(category =~ 'ruby') & (price + 100 < 200)}.sql # "SELECT * FROM items WHERE ((category != 'ruby') AND ((price + 100) < 200))" You can also use the !~ method: items.where{(category !~ 'ruby') & (price + 100 < 200)}.sql # "SELECT * FROM items WHERE ((category != 'ruby') AND ((price + 100) < 200))" === Comparing against column references You can also compare against other columns: items.where{credit > debit}.sql # "SELECT * FROM items WHERE (credit > debit) Or against SQL functions: items.where{price - 100 < max(price)}.sql # "SELECT * FROM items WHERE ((price - 100) < max(price))" == String search functions You can search SQL strings in a case sensitive manner using the Sequel.like method: items.where(Sequel.like(:name, 'Acme%')).sql # "SELECT * FROM items WHERE (name LIKE 'Acme%' ESCAPE '\')" You can search SQL strings in a case insensitive manner using the Sequel.ilike method: items.where(Sequel.ilike(:name, 'Acme%')).sql # "SELECT * FROM items WHERE (name ILIKE 'Acme%' ESCAPE '\')" You can specify a Regexp as a hash value (or like argument), but this will probably only work on PostgreSQL and MySQL: items.where(name: /Acme.*/).sql # "SELECT * FROM items WHERE (name ~ 'Acme.*')" Like can also take more than one argument: items.where(Sequel.like(:name, 'Acme%', /Beta.*/)).sql # "SELECT * FROM items WHERE ((name LIKE 'Acme%' ESCAPE '\') OR (name ~ 'Beta.*'))" == String concatenation You can concatenate SQL strings using Sequel.join: items.where(Sequel.join([:name, :comment]).like('Jo%nice%')).sql # "SELECT * FROM items WHERE ((name || comment) LIKE 'Jo%nice%' ESCAPE '\')" Sequel.join also takes a join argument: items.where(Sequel.join([:name, :comment], ':').like('John:%nice%')).sql # "SELECT * FROM items WHERE ((name || ':' || comment) LIKE 'John:%nice%' ESCAPE '\')" == Filtering using sub-queries Datasets can be used as subqueries. Subqueries can be very useful for filtering records, and many times provide a simpler alternative to table joins. Subqueries can be used in all forms of filters: refs = consumer_refs.where(:logged_in).select(:consumer_id) consumers.where(id: refs).sql # "SELECT * FROM consumers WHERE (id IN (SELECT consumer_id FROM consumer_refs WHERE logged_in))" Note that if you are checking for the inclusion of a single column in a subselect, the subselect should only select a single column. == Using OR instead of AND By default, if you chain calls to +where+, the conditions get ANDed together. If you want to use an OR for a condition, you can use the +or+ method: items.where(name: 'Food').or(vendor: 1).sql # "SELECT * FROM items WHERE ((name = 'Food') OR (vendor = 1))" sequel-5.63.0/doc/extensions.rdoc000066400000000000000000000124731434214120600167240ustar00rootroot00000000000000= Sequel Extensions Sequel has an official extension system, for adding global, Database, and Dataset extensions. == Global Extensions Global extensions can add or modify the behavior of any part of Sequel. Technically, they are not limited to affecting Sequel, as they can also modify code outside of Sequel (e.g. the blank extension). However, extensions that modify things outside of Sequel generally do so only for backwards compatibility. Global extensions are loaded via Sequel.extension: Sequel.extension :named_timezones All this does is require the relevent extension from sequel/extensions/named_timezones somewhere in the ruby path. Global extensions are just a simpler, consistent way to require code that modifies Sequel. == Database Extensions Database extensions should add or modify the behavior of a single Sequel::Database instance. They are loaded via Sequel::Database#extension: DB.extension :server_block The first thing that this does is load the relevent extension globally. However, Database extensions should be structured in a way that loading the relevent extension globally just adds a module with the related behavior, it doesn't modify any other state. After loading the extension globally, it modifies the related Sequel::Database object to modify it's behavior, usually by extending it with a module. If you want a Database extension loaded into all future Database instances, you can use Sequel::Database.extension: Sequel::Database.extension :server_block All future Sequel::Database instances created afterward will then automatically have the server_block extension loaded. == Dataset Extensions Dataset extensions should add or modify the behavior of a single Sequel::Dataset instance. They are loaded via Sequel::Dataset#extension. Sequel::Dataset#extension returns a modifies copy of the dataset that includes the extension (similar to how most dataset query methods work): ds = DB[:a].extension(:columns_introspection) The first thing loading a Dataset extension does is load the relevent extension globally. Similar to Database extensions, loading a Dataset extension globally should not affect state other than maybe adding a module. After loading the extension globally, it returned a modified copy of the Sequel::Dataset with the extension loaded into it. If you want to load an extension into all future datasets for a given Sequel::Database instance, you can also load it as a Database extension: DB.extension :columns_introspection Likewise, if you want to load an extension into all future datasets for all future databases, you can load it via Sequel::Database.extension: Sequel::Database.extension :columns_introspection == Creating Global Extensions If you want to create a global extension, you just need to store your code so that you can require it via sequel/extensions/extension_name. Then users can load it via: Sequel.extension :extension_name It is recommended you only create a global extension if what you want to do would not work as a Database or Dataset extension. == Creating Database Extensions Creating Database extensions is similar to global extensions in terms of creating the file. However, somewhere in the file, you need to call Sequel::Database.register_extension. Usually you would call this with the module that will be added to the related Sequel::Database instance when the extension is loaded. For example, the server_block extension uses something like: Sequel::Database.register_extension(:server_block, Sequel::ServerBlock) The first argument is the name of the extension as a symbol, and the second is the module. In some cases, just extending the Sequel::Database instance with a module is not sufficient. So Sequel::Database.register_extension also accepts a proc instead of a second argument. This proc is called with the Sequel::Database instance, and can then run any related code: Sequel::Database.register_extension(:arbitrary_servers){|db| db.pool.extend(Sequel::ArbitraryServers)} == Creating Dataset Extensions Creating Dataset extensions is very similar to creating Database extensions, but instead of calling Sequel::Database.register_extension, you call Sequel::Dataset.register_extension. In general, you would call this with the module that will be added to the related Sequel::Dataset instance when the extension is loaded. For example, the columns_introspection extension uses something like: Sequel::Dataset.register_extension(:columns_introspection, Sequel::ColumnsIntrospection) The first argument is the name of the extension as a symbol, and the second is the module. When you call the Sequel::Dataset.register_extension method with a module, it in turn calls Sequel::Database.register_extension and adds a Database extension that loads this Dataset extension into all future Datasets created from the Database. You can also call Sequel::Dataset.register_extension with a proc: Sequel::Dataset.register_extension(:extension_name){|ds| } Note that if you use a proc, a corresponding Database extension will not be created automatically (you can still call Sequel::Database.register_extension manually in this case). sequel-5.63.0/doc/fork_safety.rdoc000066400000000000000000000045411434214120600170360ustar00rootroot00000000000000= Fork Safety If you are forking or using a library that forks after you have created a Sequel::Database instance, then you must disconnect database connections before forking. If you don't do this, you can end up with child processes sharing database connections and all sorts of weird behavior, including crashes. Sequel will automatically create new connections on an as needed basis in the child processes, so you only need to do the following in the parent process: DB.disconnect Or if you have connections to multiple databases: Sequel::DATABASES.each(&:disconnect) == Puma When using the Puma web server in clustered mode (which is the default behavior in Puma 5+ when using multiple processes), you should disconnect inside the +before_fork+ hook in your Puma config: before_fork do Sequel::DATABASES.each(&:disconnect) end == Unicorn When using the Unicorn web server and preloading the application (+preload_app true+ in the Unicorn config), you should disconnect inside the +before_fork+ hook in the Unicorn config: before_fork do Sequel::DATABASES.each(&:disconnect) end == Passenger In Passenger web server, you should disconnect inside the +starting_worker_process+ event hook: if defined?(PhusionPassenger) PhusionPassenger.on_event(:starting_worker_process) do |forked| Sequel::DATABASES.each(&:disconnect) if forked end end Note that this disconnects after forking instead of before forking. Passenger does not offer a before fork hook. == Spring In Spring application preloader, you should disconnect inside the +after_fork+ hook: if defined?(Spring) Spring.after_fork do Sequel::DATABASES.each(&:disconnect) end end As the method indicates, this disconnects after forking instead of before forking. Spring does not offer a before fork hook. == Resque In Resque, you should disconnect inside the +before_fork+ hook: Resque.before_fork do |job| Sequel::DATABASES.each(&:disconnect) end == Parallel If you're using the Parallel gem with processes, you should disconnect before calling it: Sequel::DATABASES.each(&:disconnect) Parallel.map(['a','b','c'], in_processes: 3) { |one_letter| } == Other Libraries Calling fork For any other library that calls fork, you should disconnect before calling a method that forks: Sequel::DATABASES.each(&:disconnect) SomeLibrary.method_that_forks sequel-5.63.0/doc/mass_assignment.rdoc000066400000000000000000000104161434214120600177130ustar00rootroot00000000000000= Sequel::Model Mass Assignment Most Model methods that take a hash of attribute keys and values, including Model.new, Model.create, Model#set and Model#update are subject to Sequel's mass assignment rules. If you have an instance of a plain Sequel::Model class: class Post < Sequel::Model end post = Post.new and you call a mass assignment method with a hash: post.set(title: 'T', body: 'B') the mass assignment method will go through each key in the hash, append = to it to determine the setter method, and if the setter method is defined and access to it is not restricted, Sequel will call the setter method with the hash value. So if we assume that the posts table has title and body columns, what the above mass assignment call actually does is: post.title=('T') post.body=('B') By default, there are two types of setter methods that are restricted. The first is methods like typecast_on_assignment= and ==, which don't affect columns. These methods cannot be enabled for mass assignment. The second is primary key setters. So if you do: post = Post.new(id: 1) Sequel will raise a Sequel::MassAssignmentRestriction exception, since by default setting the primary key is not allowed. To enable use of primary key setters, you need to call +unrestrict_primary_key+ for that model: Post.unrestrict_primary_key If you want to change mass assignment so it ignores attempts to access restricted setter methods, you can do: # Global default Sequel::Model.strict_param_setting = false # Class level Post.strict_param_setting = false # Instance level post.strict_param_setting = false Since mass assignment by default allows modification of all column values except for primary key columns, it can be a security risk in some cases. If you are dealing with untrusted input, you are generally going to want to restrict what should be updated. Sequel has Model#set_fields and Model#update_fields methods, which are designed to be used with untrused input. These methods take two arguments, the untrusted hash as the first argument, and a trusted array of field names as the second argument: post.set_fields({title: 'T', body: 'B'}, [:title, :body]) Instead of looking at every key in the untrusted hash, +set_fields+ will iterate over the trusted field names, looking each up in the hash, and calling the setter method appropriately with the result. +set_fields+ basically translates the above method call to: post.title=('T') post.body=('B') By using this method, you can be sure that the mass assignment method only sets the fields you expect it to set. Note that if one of the fields does not exist in the hash: post.set_fields({title: 'T'}, [:title, :body]) +set_fields+ will set the value to nil (the default hash value) by default, with behavior equivalent to: post.title=('T') post.body=(nil) You can use the :missing option to +set_fields+ to change the behavior: post.set_fields({title: 'T'}, [:title, :body], missing: :skip) # post.title=('T') # only post.set_fields({title: 'T'}, [:title, :body], missing: :raise) # raises Sequel::Error If you want to set a model level default for the +set_fields+ options, you can use the +default_set_fields_options+ class accessor: # Global default Sequel::Model.default_set_fields_options[:missing] = :skip # Class level Post.default_set_fields_options[:missing] = :skip Here's a table describing Sequel's default mass assignment methods: Model.new(hash) :: Creates a new model instance, then calls Model#set(hash) Model.create(hash) :: Calls Model.new(hash).save Model#set(hash) :: Calls related setter method (unless access is restricted) for each key in the hash, then returns self Model#update(hash) :: Calls set(hash).save_changes Model#set_fields(hash, columns, options) :: For each column in columns, looks up related entry in hash, and calls the related setter method Model#update_fields(hash, columns, options) :: Calls set_fields(hash, columns, options).save_changes For backwards compatibility, Sequel also ships with a whitelist_security and blacklist_security plugins that offer additional mass assignment methods, but it is recommended to use +set_fields+ or +update_fields+ for untrusted input, and the other methods for trusted input. sequel-5.63.0/doc/migration.rdoc000066400000000000000000000626631434214120600165240ustar00rootroot00000000000000= Migrations This guide is based on http://guides.rubyonrails.org/migrations.html == Overview Migrations make it easy to alter your database's schema in a systematic manner. They make it easier to coordinate with other developers and make sure that all developers are using the same database schema. Migrations are optional, you don't have to use them. You can always just create the necessary database structure manually using Sequel's schema modification methods or another database tool. However, if you are dealing with other developers, you'll have to send them all of the changes you are making. Even if you aren't dealing with other developers, you generally have to make the schema changes in 3 places (development, testing, and production), and it's probably easier to use the migrations system to apply the schema changes than it is to keep track of the changes manually and execute them manually at the appropriate time. Sequel tracks which migrations you have already run, so to apply migrations you generally need to run Sequel's migrator with bin/sequel -m: sequel -m path/to/migrations postgres://host/database Migrations in Sequel use a DSL via the Sequel.migration method, and inside the DSL, use the Sequel::Database schema modification methods such as +create_table+ and +alter_table+. See the {schema modification guide}[rdoc-ref:doc/schema_modification.rdoc] for details on the schema modification methods you can use. == A Basic Migration Here is a fairly basic Sequel migration: Sequel.migration do up do create_table(:artists) do primary_key :id String :name, null: false end end down do drop_table(:artists) end end This migration has an +up+ block which adds an artist table with an integer primary key named id, and a varchar or text column (depending on the database) named +name+ that doesn't accept +NULL+ values. Migrations should include both up and +down+ blocks, with the +down+ block reversing the change made by up. However, if you never need to be able to migrate down, you can leave out the +down+ block. In this case, the +down+ block just reverses the changes made by up, dropping the table. You can simplify the migration given above by using a reversible migration with a +change+ block: Sequel.migration do change do create_table(:artists) do primary_key :id String :name, null: false end end end The +change+ block acts exactly like an +up+ block. The only difference is that it will attempt to create a +down+ block for you, assuming that it knows how to reverse the given migration. The +change+ block can usually correctly reverse the following methods: * +create_table+ * +create_join_table+ * +create_view+ * +add_column+ * +add_index+ * +rename_column+ * +rename_table+ * +alter_table+ (supporting the following methods in the +alter_table+ block): * +add_column+ * +add_constraint+ * +add_foreign_key+ (with a symbol, not an array) * +add_primary_key+ (with a symbol, not an array) * +add_index+ * +add_full_text_index+ * +add_spatial_index+ * +rename_column+ If you use any other methods, you should create your own +down+ block. In normal usage, when Sequel's migrator runs, it runs the +up+ blocks for all migrations that have not yet been applied. However, you can use the -M switch to specify the version to which to migrate, and if it is lower than the current version, Sequel will run the +down+ block on the appropriate migrations. You are not limited to creating tables inside a migration, you can alter existing tables as well as modify data. Let's say your artist database originally only included artists from Sacramento, CA, USA, but now you want to branch out and include artists in any city: Sequel.migration do up do add_column :artists, :location, String from(:artists).update(location: 'Sacramento') end down do drop_column :artists, :location end end This migration adds a +location+ column to the +artists+ table, and sets the +location+ column to 'Sacramento' for all existing artists. It doesn't use a default on the column, because future artists should not be assumed to come from Sacramento. In the +down+ block, it just drops the +location+ column from the +artists+ table, reversing the actions of the up block. Note that when updating the +artists+ table in the update, a plain dataset is used, from(:artists). This may look a little strange, but you need to be aware that inside an up or +down+ block in a migration, self always refers to the Sequel::Database object that the migration is being applied to. Since Database#from creates datasets, using from(:artists) inside the +up+ block creates a dataset on the database representing all columns in the +artists+ table, and updates it to set the +location+ column to 'Sacramento'. You should avoid referencing the Sequel::Database object directly in your migration, and always use self to reference it, otherwise you may run into problems. == The +migration+ extension The migration code is not technically part of the core of Sequel. It's not loaded by default as it is only useful in specific cases. It is one of the extensions that ship with Sequel, which receive the same level of support as Sequel's core. If you want to play with Sequel's migration tools without using the bin/sequel tool, you need to load the migration extension manually: Sequel.extension :migration == Schema methods Migrations themselves do not contain any schema modification methods, but they make it easy to call any of the Sequel::Database modification methods, of which there are many. The main ones are +create_table+ and +alter_table+, but Sequel also comes with numerous other schema modification methods, most of which are shortcuts for +alter_table+ (all of these methods are described in more detail in the {schema modification guide}[rdoc-ref:doc/schema_modification.rdoc]): * add_column * add_index * create_view * drop_column * drop_index * drop_table * drop_view * rename_table * rename_column * set_column_default * set_column_type These methods handle the vast majority of cross database schema modification SQL. If you need to drop down to SQL to execute some database specific code, you can use the +run+ method: Sequel.migration do up{run 'CREATE TRIGGER ...'} down{run 'DROP TRIGGER ...'} end In this case, we are using { and } instead of do and end to define the blocks. Just as before, the +run+ methods inside the blocks are called on the +Database+ object, which just executes the code on the underlying database. == Errors when running migrations Sequel attempts to run migrations inside of a transaction if the database supports transactional DDL statements. On the databases that don't support transactional DDL statements, if there is an error while running a migration, it will not rollback the previous schema changes made by the migration. In that case, you will need to update the database by hand. It's recommended to always run migrations on a test database and ensure they work before running them on any production database. == Transactions You can manually specify to use transactions on a per migration basis. For example, if you want to force transaction use for a particular migration, call the transaction method in the Sequel.migration block: Sequel.migration do transaction change do # ... end end Likewise, you can disable transaction use via no_transaction: Sequel.migration do no_transaction change do # ... end end This is necessary in some cases, such as when attempting to use CREATE INDEX CONCURRENTLY on PostgreSQL (which supports transactional schema, but not that statement inside a transaction). You can also override the transactions setting at the migrator level, either by forcing transactions even if no_transaction is set, or by disabling transactions all together: # Force transaction use Sequel::Migrator.run(DB, '/path/to/migrations/dir', :use_transactions=>true) # Disable use of transactions Sequel::Migrator.run(DB, '/path/to/migrations/dir', :use_transactions=>false) == Migration files While you can create migration objects yourself and apply them manually, most of the benefit to using migrations come from using Sequel's +Migrator+, which is what the bin/sequel -m switch does. Sequel's +Migrator+ expects that each migration will be in a separate file in a specific directory. The -m switch requires an argument be specified that is the path to the directory containing the migration files. For example: sequel -m db/migrations postgres://localhost/sequel_test will look in the db/migrations folder relative to the current directory, and run unapplied migrations on the PostgreSQL database sequel_test running on localhost. == Two separate migrators Sequel actually ships with two separate migrators. One is the +IntegerMigrator+, the other is the +TimestampMigrator+. They both have plusses and minuses: === +IntegerMigrator+ * Simpler, uses migration versions starting with 1 * Doesn't allow duplicate migrations * Doesn't allow missing migrations by default * Just stores the version of the last migration run * Good for single developer or small teams with close communication * Lower risk of undetected conflicting migrations * Requires manual merging of simultaneous migrations === +TimeStampMigrator+ * More complex, uses migration versions where the version should represent a timestamp * Allows duplicate migrations (since you could have multiple in a given second) * Allows missing migrations (since you obviously don't have one every second) * Stores the file names of all applied migrations * Good for large teams without close communication * Higher risk of undetected conflicting migrations * Does not require manual merging of simultaneous migrations === Filenames In order for migration files to work with the Sequel, they must be specified as follows: version_name.rb where version is an integer and name is a string which should be a very brief description of what the migration does. Each migration class should contain 1 and only 1 call to Sequel.migration. === +IntegerMigrator+ Filenames These are valid migration names for the +IntegerMigrator+: 1_create_artists.rb 2_add_artist_location.rb The only problem with this naming format is that if you have more than 9 migrations, the 10th one will look a bit odd: 1_create_artists.rb 2_add_artist_location.rb ... 9_do_something.rb 10_do_something_else.rb For this reasons, it's often best to start with 001 instead of 1, as that means you don't need to worry about that issue until the 1000th migration: 001_create_artists.rb 002_add_artist_location.rb ... 009_do_something.rb 010_do_something_else.rb Migrations start at 1, not 0. The migration version number 0 is important though, as it is used to mean that all migrations should be unapplied (i.e. all +down+ blocks run). In Sequel, you can do that with: sequel -m db/migrations -M 0 postgres://localhost/sequel_test === +TimestampMigrator+ Filenames With the +TimestampMigrator+, the version integer should represent a timestamp, though this isn't strictly required. For example, for 5/10/2010 12:00:00pm, you could use any of the following formats: # Date 20100510_create_artists.rb # Date and Time 20100510120000_create_artists.rb # Unix Epoch Time Integer 1273518000_create_artists.rb The important thing is that all migration files should be in the same format, otherwise when you update, it'll be difficult to make sure migrations are applied in the correct order, as well as be difficult to unapply some the affected migrations correctly. The +TimestampMigrator+ will be used if any filename in the migrations directory has a version greater than 20000101. Otherwise, the +IntegerMigrator+ will be used. You can force the use of the +TimestampMigrator+ in the API by calling TimestampMigrator.apply instead of Migrator.apply. === How to choose Basically, unless you need the features provided by the +TimestampMigrator+, stick with the +IntegerMigrator+, as it is simpler and makes it easier to detect possible errors. For a single developer, the +TimestampMigrator+ has no real benefits, so I would always recommend the +IntegerMigrator+. When dealing with multiple developers, it depends on the size of the development team, the team's communication level, and the level of overlap between developers. Let's say Alice works on a new feature that requires a migration at the same time Bob works on a separate feature that requires an unrelated migration. If both developers are committing to their own private respositories, when it comes time to merge, the +TimestampMigrator+ will not require any manually changes. That's because Alice will have a migration such as 20100512_do_this.rb and Bob will have one such as 20100512_do_that.rb. If the +IntegerMigrator+ was used, Alice would have 34_do_this.rb and Bob would have 34_do_that.rb. When the +IntegerMigrator+ was used, it would raise an exception due to the duplicate migration version. The only way to fix it would be to renumber one of the two migrations, and have the affected developer manually modify their database. So for unrelated migrations, the +TimestampMigrator+ works fine. However, let's say that the migrations are related, in such a way that if Bob's is run first, Alice's will fail. In this case, the +TimestampMigrator+ would not raise an error when Bob merges Alice's changes, since Bob ran his migration first. However, it would raise an error when Alice runs Bob's migration, and could leave the database in an inconsistent state if the database doesn't support transactional schema changes. With the +TimestampMigrator+, you are trading reliability for convenience. That's possibly a valid trade, especially if simultaneous related schema changes by separate developers are unlikely, but you should give it some thought before using it. == Ignoring missing migrations In some cases, you may want to allow a migration in the database that does not exist in the filesystem (deploying to an older version of code without running a down migration when deploy auto-migrates, for example). If required, you can pass allow_missing_migration_files: true as an option. This will stop errors from being raised if there are migrations in the database that do not exist in the filesystem. Note that the migrations themselves can still raise an error when using this option, if the database schema isn't in the state the migrations expect it to be in. In general, the allow_missing_migration_files: true option is very risky to use, and should only be used if it is absolutely necessary. == Modifying existing migrations Just don't do it. In general, you should not modify any migration that has been run on the database and been committed to the source control repository, unless the migration contains an error that causes data loss. As long as it is possible to undo the migration without losing data, you should just add another migration that undoes the actions of the previous bad migration, and does the correct action afterward. The main problem with modifying existing migrations is that you will have to manually modify any databases that ran the migration before it was modified. If you are a single developer, that may be an option, but certainly if you have multiple developers, it's a lot more work. == Creating a migration Sequel doesn't come with generators that create migrations for you. However, creating a migration is as simple as creating a file with the appropriate filename in your migrations directory that contains a Sequel.migration call. The minimal do-nothing migration is: Sequel.migration{} However, the migrations you write should contain an +up+ block that does something, and a +down+ block that reverses the changes made by the +up+ block: Sequel.migration do up{} down{} end or they should use the reversible migrations feature with a +change+ block: Sequel.migration do change{} end == What to put in your migration's +down+ block It's usually easy to determine what you should put in your migration's +up+ block, as it's whatever change you want to make to the database. The +down+ block is less obvious. In general, it should reverse the changes made by the +up+ block, which means it should execute the opposite of what the +up+ block does in the reverse order in which the +up+ block does it. Here's an example where you are switching from having a single artist per album to multiple artists per album: Sequel.migration do up do # Create albums_artists table create_join_table(album_id: :albums, artist_id: :artists) # Insert one row in the albums_artists table # for each row in the albums table where there # is an associated artist from(:albums_artists).insert([:album_id, :artist_id], from(:albums).select(:id, :artist_id).exclude(artist_id: nil)) # Drop the now unnecesssary column from the albums table drop_column :albums, :artist_id end down do # Add the foreign key column back to the artists table alter_table(:albums){add_foreign_key :artist_id, :artists} # If possible, associate each album with one of the artists # it was associated with. This loses information, but # there's no way around that. from(:albums).update(artist_id: from(:albums_artists). select{max(artist_id)}. where(album_id: Sequel[:albums][:id]) ) # Drop the albums_artists table drop_join_table(album_id: :albums, artist_id: :artists) end end Note that the operations performed in the +down+ block are performed in the reverse order of how they are performed in the +up+ block. Also note how it isn't always possible to reverse exactly what was done in the +up+ block. You should try to do so as much as possible, but if you can't, you may want to have your +down+ block raise a Sequel::Error exception saying why the migration cannot be reverted. == Running migrations You can run migrations using the +sequel+ command line program that comes with Sequel. If you use the -m switch, +sequel+ will run the migrator instead of giving you an IRB session. The -m switch requires an argument that should be a path to a directory of migration files: sequel -m relative/path/to/migrations postgres://host/database sequel -m /absolute/path/to/migrations postgres://host/database If you do not provide a -M switch, +sequel+ will migrate to the latest version in the directory. If you provide a -M switch, it should specify an integer version to which to migrate. # Migrate all the way down sequel -m db/migrations -M 0 postgres://host/database # Migrate to version 10 (IntegerMigrator style migrations) sequel -m db/migrations -M 10 postgres://host/database # Migrate to version 20100510 (TimestampMigrator migrations using YYYYMMDD) sequel -m db/migrations -M 20100510 postgres://host/database Whether or not migrations use the +up+ or +down+ block depends on the version to which you are migrating. If you don't provide a -M switch, all unapplied migrations will be migrated up. If you provide a -M, it will depend on which migrations that have been applied. Applied migrations greater than that version will be migrated down, while unapplied migrations less than or equal to that version will be migrated up. == Running migrations from a Rake task You can also incorporate migrations into a Rakefile: namespace :db do desc "Run migrations" task :migrate, [:version] do |t, args| require "sequel/core" Sequel.extension :migration version = args[:version].to_i if args[:version] Sequel.connect(ENV.fetch("DATABASE_URL")) do |db| Sequel::Migrator.run(db, "db/migrations", target: version) end end end To migrate to the latest version, run: rake db:migrate This Rake task takes an optional argument specifying the target version. To migrate to version 42, run: rake db:migrate[42] == Verbose migrations By default, sequel -m operates as a well behaved command line utility should, printing out nothing if there is no error. If you want to see the SQL being executed during a migration, as well as the amount of time that each migration takes, you can use the -E option to +sequel+ to set up a +Database+ logger that logs to +STDOUT+. You can also log that same output to a file using the -l option with a log file name. If you want to include a logger in the rake task above, add a +:logger+ option when calling Sequel.connect: require "logger" Sequel.connect(ENV.fetch("DATABASE_URL"), logger: Logger.new($stderr)) == Using models in your migrations Just don't do it. It can be tempting to use models in your migrations, especially since it's easy to load them at the same time using the -L option to +sequel+. However, this ties your migrations to your models, and makes it so that changes in your models can break old migrations. With Sequel, it should be easy to use plain datasets to accomplish pretty much anything you would want to accomplish in a migration. Even if you have to copy some code from a model method into a migration itself, it's better than having your migration use models and call model methods. == Dumping the current schema as a migration Sequel comes with a +schema_dumper+ extension that dumps the current schema of the database as a migration to +STDOUT+ (which you can redirect to a file using >). This is exposed in the +sequel+ command line tool with the -d and -D switches. -d dumps the schema in database independent format, while -D dumps the schema using a non-portable format, useful if you are using nonportable columns such as +inet+ in your database. Let's say you have an existing database and want to create a migration that would recreate the database's schema: sequel -d postgres://host/database > db/migrations/001_start.rb or using a nonportable format: sequel -D postgres://host/database > db/migrations/001_start.rb The main difference between the two is that -d will use the type methods with the database independent ruby class types, while -D will use the +column+ method with string types. You can take the migration created by the schema dumper to another computer with an empty database, and attempt to recreate the schema using: sequel -m db/migrations postgres://host/database The schema_dumper extension is quite limited in what types of database objects it supports. In general, it only supports dumping tables, columns, primary key and foreign key constraints, and some indexes. It does not support most table options, CHECK constraints, partial indexes, database functions, triggers, security grants/revokes, and a wide variety of other useful database properties. Be aware of the limitations when using the schema_dumper extension. If you are dumping the schema to restore to the same database type, it is recommended to use your database's dump and restore programs instead of the schema_dumper extension. == Checking for Current Migrations In your application code, you may want to check that you are up to date in regards to migrations (i.e. you don't have any unapplied migrations). Sequel offers two separate methods to do that. The first is Sequel::Migrator.check_current. This method raises an exception if there are outstanding migrations that need to be run. The second is Sequel::Migrator.is_current?, which returns true if there are no outstanding migrations, and false if there are outstanding migrations. If you want to ensure that your application code is up to date, you may want to add the following code after connecting to your database: Sequel.extension :migration Sequel::Migrator.check_current(DB, '/path/to/migrations') This will cause your application to raise an error when you start it if you have any outstanding migrations. == Old-style migration classes Before the Sequel.migration DSL was introduced, Sequel used classes for Migrations: Class.new(Sequel::Migration) do def up end def down end end or: class DoSomething < Sequel::Migration def up end def down end end This usage is discouraged in new code, but will continue to be supported indefinitely. It is not recommended to convert old-style migration classes to the Sequel.migration DSL, but it is recommended to use the Sequel.migration DSL for all new migrations. == Database-specific migrations While not a recommended practice, it is sometimes necessary to have parts of migrations be database-specific . You can use the Sequel::Database#database_type method to check which database the migration is being run on, and operate accordingly: Sequel.migration do up do if database_type == :mysql run 'MySQL specific code' else run 'Generic SQL code' end end down do if database_type == :mysql run 'MySQL specific code' else run 'Generic SQL code' end end end == Using Database Extensions in Migrations If you need to use database extensions in migrations (e.g. +:pg_enum+), you should load the extension in the up or down block as appropriate. Sequel.migration do up do extension :pg_enum # migration here end down do extension :pg_enum # migration here end end sequel-5.63.0/doc/model_dataset_method_design.rdoc000066400000000000000000000170071434214120600222210ustar00rootroot00000000000000= Model Dataset Method Design Guide How you design your model dataset methods can significantly affect the flexibility of your API for your model classes, as well as the performance. The goal of this guide is to provide an example of how to design your model dataset methods for maximum flexibility and performance. == Flexibility: Use Single Method Per Task In general, it is recommended that you have a single method per task for maximum flexibility. For example, let's say you need to retrieve all albums released in a given year, ordered by number of units sold descending, and only care about the id, name and number of units sold. One way to do this is in your application code (outside the model), you can call the dataset methods directly: Album. select(:id, :name, :copies_sold). where(release_year: params[:year].to_i). order(Sequel.desc(:copies_sold)). all One issue with this design is that it ties you to your current database schema, and will make it necessary to change your application code if your schema changes. In general, it is better to encapsulate your code into a dataset method (or a class method, but a dataset method is more flexible): class Album < Sequel::Model dataset_module do def all_albums_released_in_year(year) select(:id, :name, :copies_sold). where(release_year: year). order(Sequel.desc(:copies_sold)). all end end end Then your application code just needs to call your dataset method: Album.all_albums_released_in_year(params[:year].to_i) The advantage of this approach is that you can change your schema at any point in the future, and you should only need to change your model code, you should never need to change other application code. == Performance After designing your dataset methods for flexibility, stop. Don't worry about performance until you need to worry about performance. However, assuming you have profiled your application and profiling shows you can benefit from optimizing the above method, you can then consider the performance impact of future design choices. First, considering that the root cause of the performance issue may not be at the Sequel level, it may be at the database itself. Use +EXPLAIN+ or the equivalent to analyze the query plan for the query in use, and see if there is something you can do to optimize it, such as adding an appropriate index. Second, assuming the performance issue is at the Sequel level, you need to understand that one of the best ways to improve performance in most ruby code is to reduce the number of objects allocated. Here is the above code with comments showing datasets allocated: def all_albums_released_in_year(year) select(:id, :name, :copies_sold). # new dataset allocated where(release_year: year). # new dataset allocated order(Sequel.desc(:copies_sold)). # new dataset allocated all end Third, you need to understand that Sequel has optimizations specifically designed to reduce the number of objects allocated, by caching intermediate datasets. Unfortunately, those optimizations do not apply in this case. The reason for this is that +select+, +where+, and +order+ can potentially receive arbitrary arguments, and enabling caching for them could easily lead to unbounded cache size (denial of service due to memory exhaustion). To allow intermediate dataset caching to work, you need to signal to Sequel that particular arguments to these methods should be cached, and you can do that by calling methods inside +dataset_module+ blocks such as +select+ and +order+. These methods will add dataset methods to the model that can cache the returned dataset to optimize performance. Here is an example using these methods: class Album < Sequel::Model dataset_module do select :with_name_and_units, :id, :name, :copies_sold order :by_units_sold, Sequel.desc(:copies_sold) def all_albums_released_in_year(year) with_name_and_units. by_units_sold. where(release_year: year). all end end end Performance aside, this does provide a slightly nicer and more readable internal API, though naming such methods can be problematic. By calling +select+ and +order+ here, Sequel expects that the created dataset methods may be called more than once on the same dataset, and it knows that the arguments to the underlying +select+ and +order+ methods are fixed, so it can cache the resulting datasets. Let's comment the above example with dataset allocations: def all_albums_released_in_year(year) with_name_and_units. # cached dataset returned by_units_sold. # cached dataset returned where(release_year: year). # new dataset allocated all end Note that the order of methods here is important. If you instead change the method chain to filter the dataset first, then no caching happens: def all_albums_released_in_year(year) where(release_year: year). # new dataset allocated with_name_and_units. # new dataset allocated by_units_sold. # new dataset allocated all end This is because any time a new, uncached dataset is returned by a dataset method, all subsequent methods in the method chain cannot benefit from caching. Usually, when you are designing methods to process data based on user input, the user input affects the rows selected, and not the columns selected or the order in which the rows are returned. Sequel is aware of this and has dataset methods that specifically take user input (arguments), interpret them as a filter condition and either: * Return all matching rows in an array (+where_all+) * Iterate over all matching rows (+where_each+) * Return first matching row (+first+) * Return first column in first matching row, assumes only a single column is selected (+where_single_value+) After calling these methods on a cached dataset a number of times (currently 3), Sequel will automatically build an optimized loader, cache it, and use it for future loads. So the above example changes to: def all_albums_released_in_year(year) with_name_and_units. # cached dataset returned by_units_sold. # cached dataset returned where_all(release_year: year) # cached loader used end This can significantly improve performance, up to 3x for complex method chains that only return a few rows. So the general advice on designing dataset methods for performance is: * Use +dataset_module+ methods to create named dataset methods that return cached datasets * If any filtering is to be done, have it done last using +where_all+, +where_each+, +first+, or +where_single_value+. By following this advice, you can significantly increase the performance of your model dataset code. === Further Increasing Performance The best way to further increase performance at the Sequel level is to switch to using prepared statements. This does require more significant changes to the API. Here's an example using prepared statements: class Album < Sequel::Model ALBUMS_RELEASED_IN_YEAR = select(:id, :name, :copies_sold). where(release_year: :$year). order(Sequel.desc(:copies_sold)). prepare(:all, :all_albums_released_in_year) def self.all_albums_released_in_year(year) ALBUMS_RELEASED_IN_YEAR.call(year: year) end end Note that when using prepared statements, you need to use a class method instead of a dataset method, as the SQL for the prepared statement must be fixed for the class. This limits the flexibility of the method, since you can no longer call it on arbitrary datasets on the class. sequel-5.63.0/doc/model_hooks.rdoc000066400000000000000000000262531434214120600170310ustar00rootroot00000000000000= Model Hooks This guide is based on http://guides.rubyonrails.org/activerecord_validations_callbacks.html == Overview Model hooks are used to specify actions that occur at a given point in a model instance's lifecycle, such as before or after the model object is saved, created, updated, destroyed, or validated. There are also around hooks for all types, which wrap the before hooks, the behavior, and the after hooks. == Basic Usage Sequel::Model uses instance methods for hooks. To define a hook on a model, you just add an instance method to the model class: class Album < Sequel::Model def before_create self.created_at ||= Time.now super end end The one important thing to note here is the call to +super+ inside the hook. Whenever you override one of Sequel::Model's methods, you should be calling +super+ to get the default behavior. Many of the plugins that ship with Sequel work by overriding the hook methods and calling +super+. If you use these plugins and override the hook methods but do not call +super+, it's likely the plugins will not work correctly. == Available Hooks Sequel calls hooks in the following order when saving/creating a new object (one that does not already exist in the database): * +around_validation+ * +before_validation+ * +validate+ method called * +after_validation+ * +around_save+ * +before_save+ * +around_create+ * +before_create+ * INSERT QUERY * +after_create+ * +after_save+ Sequel calls hooks in the following order when saving an existing object: * +around_validation+ * +before_validation+ * +validate+ method called * +after_validation+ * +around_save+ * +before_save+ * +around_update+ * +before_update+ * UPDATE QUERY * +after_update+ * +after_save+ Note that all of the hook calls are the same, except that +around_create+, +before_create+ and +after_create+ are used for a new object, and +around_update+, +before_update+ and +after_update+ are used for an existing object. Note that +around_save+, +before_save+, and +after_save+ are called in both cases. Note that the validation hooks are still called if validate: false option is passed to save. If you call Model#valid? manually, then only the validation hooks are called: * +around_validation+ * +before_validation+ * +validate+ method called * +after_validation+ Sequel calls hooks in the following order when destroying an existing object: * +around_destroy+ * +before_destroy+ * DELETE QUERY * +after_destroy+ Note that these hooks are only called when using Model#destroy, they are not called if you use Model#delete. == Transaction-related Hooks Sequel::Model no longer offers transaction hooks for model instances. However, you can use the database transaction hooks inside model +before_save+ and +after_save+ hooks: class Album < Sequel::Model def before_save db.after_rollback{rollback_action} super end def after_save super db.after_commit{commit_action} end end == Running Hooks Sequel does not provide a simple way to turn off the running of save/create/update hooks. If you attempt to save a model object, the save hooks are always called. All model instance methods that modify the database call save in some manner, so you can be sure that if you define the hooks, they will be called when you save the object. However, you should note that there are plenty of ways to modify the database without saving a model object. One example is by using plain datasets, or one of the model's dataset methods: Album.where(name: 'RF').update(copies_sold: Sequel.+(:copies_sold, 1)) # UPDATE albums SET copies_sold = copies_sold + 1 WHERE name = 'RF' In this case, the +update+ method is called on the dataset returned by Album.where. Even if there is only a single object with the name RF, this will not call any hooks. If you want model hooks to be called, you need to make sure to operate on a model object: album = Album.first(name: 'RF') album.update(copies_sold: album.copies_sold + 1) # UPDATE albums SET copies_sold = 2 WHERE id = 1 For the destroy hooks, you need to make sure you call +destroy+ on the object: album.destroy # runs destroy hooks == Skipping Hooks Sequel makes it easy to skip destroy hooks by calling +delete+ instead of +destroy+: album.delete # does not run destroy hooks However, skipping hooks is a bad idea in general and should be avoided. As mentioned above, Sequel doesn't allow you to turn off the running of save hooks. If you know what you are doing and really want to skip them, you need to drop down to the dataset level to do so. This can be done for a specific model object by using the +this+ method for a dataset that represents a single object: album.this # dataset The +this+ dataset works just like any other dataset, so you can call +update+ on it to modify it: album.this.update(copies_sold: album.copies_sold + 1) If you want to insert a row into the model's table without running the creation hooks, you can use Model.insert instead of Model.create: Album.insert(name: 'RF') # does not run hooks == Canceling Actions in Hooks Sometimes want to cancel an action in a before hook, so the action is not performed. For example, you may want to not allow destroying or saving a record in certain cases. In those cases, you can call +cancel_action+ inside the before_* hook, which will stop processing the hook and will either raise a Sequel::HookFailed exception (the default), or return +nil+ if +raise_on_save_failure+ is +false+). You can use this to implement validation-like behavior, that will run even if validations are skipped: class Album < Sequel::Model def before_save cancel_action if name == '' super end end For around hooks, neglecting to call +super+ halts hook processing in the same way as calling +cancel_action+ in a before hook. It's probably a bad idea to use +cancel_action+ hook processing in after hooks, or after yielding in around hooks, since by then the main processing has already taken place. By default, Sequel runs hooks other than validation hooks inside a transaction, so if you cancel the action by calling +cancel_action+ in any hook, Sequel will rollback the transaction. However, note that the implicit use of transactions when saving and destroying model objects is conditional (it depends on the model instance's +use_transactions+ setting and the :transaction option passed to save). == Conditional Hooks Sometimes you only take to take a certain action in a hook if the object meets a certain condition. For example, let's say you only want to make sure a timestamp is set when updating if the object is at a certain status level: class Album < Sequel::Model def before_update self.timestamp ||= Time.now if status_id > 3 super end end Note how this hook action is made conditional just be using the standard ruby +if+ conditional. Sequel makes it easy to handle conditional hook actions by using standard ruby conditionals inside the instance methods. == Using Hooks in Multiple Classes If you want all your model classes to use the same hook, you can just define that hook in Sequel::Model: class Sequel::Model def before_create self.created_at ||= Time.now super end end Just remember to call +super+ whenever you override the method in a subclass. Note that +super+ is also used when overriding the hook in Sequel::Model itself. This is important as if you add any plugins to Sequel::Model itself, if you override a hook in Sequel::Model and do not call +super+, the plugin may not work correctly. If you don't want all classes to use the same hook, but want to reuse hooks in multiple classes, you should use a plugin or a simple module: === Plugin module SetCreatedAt module InstanceMethods def before_create self.created_at ||= Time.now super end end end Album.plugin(SetCreatedAt) Artist.plugin(SetCreatedAt) === Simple Module module SetCreatedAt def before_create self.created_at ||= Time.now super end end Album.send(:include, SetCreatedAt) Artist.send(:include, SetCreatedAt) == +super+ Ordering While it's not enforced anywhere, it's a good idea to make +super+ the last expression when you override a before hook, and the first expression when you override an after hook: class Album < Sequel::Model def before_save self.updated_at ||= Time.now super end def after_save super AuditLog.create(log: "Album #{name} created") end end This allows the following general principles to be true: * before hooks are run in reverse order of inclusion * after hooks are run in order of inclusion So if you define the same before hook in both a model and a plugin that the model uses, the hooks will be called in this order: * model before hook * plugin before hook * plugin after hook * model after hook Again, Sequel does not enforce that, and you are free to call +super+ in an order other than the recommended one (just make sure that you call it). == Around Hooks Around hooks should only be used if you cannot accomplish the same results with before and after hooks. For example, if you want to catch database errors caused by the +INSERT+ or +UPDATE+ query when saving a model object and raise them as validation errors, you cannot use a before or after hook. You have use an +around_save+ hook: class Album < Sequel::Model def around_save super rescue Sequel::DatabaseError => e # parse database error, set error on self, and reraise a Sequel::ValidationFailed end end Likewise, let's say that upon retrieval, you associate an object with a file descriptor, and you want to ensure that the file descriptor is closed after the object is saved to the database. Let's assume you are always saving the object and you are not using validations. You could not use an +after_save+ hook safely, since if the database raises an error, the +after_save+ method will not be called. In this case, an +around_save+ hook is also the correct choice: class Album < Sequel::Model def around_save super ensure @file_descriptor.close end end == Hook related plugins === +instance_hooks+ Sequel also ships with an +instance_hooks+ plugin that allows you to define before and after hooks on a per instance basis. It's very useful as it allows you to delay action on an instance until before or after saving. This can be important if you want to modify a group of related objects together (which is how the +nested_attributes+ plugin uses +instance_hooks+). === +hook_class_methods+ While it's recommended to write your hooks as instance methods, Sequel ships with a +hook_class_methods+ plugin that allows you to define hooks via class methods. It exists mostly for legacy compatibility, but is still supported. However, it does not implement around hooks. === +after_initialize+ The after_initialize plugin adds an after_initialize hook, that is called for all model instances on creation (both new instances and instances retrieved from the database). It exists mostly for legacy compatibility, but it is still supported. sequel-5.63.0/doc/model_plugins.rdoc000066400000000000000000000241101434214120600173550ustar00rootroot00000000000000= Model Plugins Sequel::Model (and Sequel in general) is designed around the idea of a small core, to which application-specific behavior can easily be added. Sequel::Model implements this design using a plugin system. Plugins are modules that include submodules for model class methods, model instance methods, and model dataset methods. All plugins can override the class, instance, and dataset methods added by earlier plugins, and call super to get the behavior before the plugin was added. == Default Plugins The Sequel::Model class is completely empty by default, in that it has no class methods or instance methods. Sequel::Model is itself a plugin, and it is the first plugin loaded, and it is loaded into itself (meta!). So methods in Sequel::Model::ClassMethods become Sequel::Model class methods, methods in Sequel::Model::InstanceMethods become Sequel::Model instance methods, and methods in Sequel::Model::DatasetMethods become Sequel::Model dataset methods. The Sequel::Model plugin is often referred to as the base plugin. By default, the Sequel::Model class also has the Sequel::Model::Associations plugin loaded by default, though it is possible to disable this. == Loading Plugins Loading a plugin into a model class is generally as simple as calling the Sequel::Model.plugin method with the name of the plugin, for example: Sequel::Model.plugin :subclasses What is does is require the sequel/plugins/subclasses file, and then assumes that that file defines the Sequel::Plugins::Subclasses plugin module. It's possible to pass module instances to the plugin method to load plugins that are stored in arbitrary files or namespaces: Sequel::Model.plugin MyApp::Plugins::Foo In the examples shown above, the plugin is loaded into Sequel::Model, which means it is loaded into all subclasses that are created afterward. With many plugins, you are not going to want to add them to Sequel::Model, but to a specific subclass: class Node < Sequel::Model plugin :tree end Doing this, only Node and future subclasses of Node will have the tree plugin loaded. == Plugin Arguments/Options Some plugins require arguments and/or support options. For example, the single_table_inheritance plugin requires an argument containing the column that specifies the class to use, and options: class Employee < Sequel::Model plugin :single_table_inheritance, :type_id, model_map: {1=>:Staff, 2=>:Manager} end You should read the documentation for the plugin to determine if it requires arguments and what if any options are supported. == Creating Plugins The simplest possible plugin is an empty module in a file stored in sequel/plugins/plugin_name somewhere in ruby's load path: module Sequel module Plugins module PluginName end end end Well, technically, that's not the simplest possible plugin, but it is the simplest one you can load by name. The absolute simplest plugin would be an empty module: Sequel::Model.plugin Module.new == Example Formatting In general, loading plugins by module instead of by name is not recommended, so this guide will assume that plugins are loaded by name. For simplicity, we'll also use the following format for example plugin code (and assume a plugin named Foo stored in sequel/plugins/foo): module Sequel::Plugins::Foo end This saves 4 lines per example. However, it's recommended that you use the nested example displayed earlier for production code. The examples also assume that the following model class exists: class Bar < Sequel::Model end == Adding Class Methods If you want your plugin to add class methods to the model class it is loaded into, define a ClassMethods module under the plugin module: module Sequel::Plugins::Foo module ClassMethods def a 1 end end end This allows a plugin user to do: Bar.plugin :foo Bar.a # => 1 == Adding Instance Methods If you want your plugin to add instance methods to the model class it is loaded into, define an InstanceMethods module under the plugin module: module Sequel::Plugins::Foo module InstanceMethods def a 1 end end end This allows a plugin user to do: Bar.plugin :foo Bar.new.a # => 1 == Adding Dataset Methods If you want your plugin to add methods to the dataset of the model class it is loaded into, define a DatasetMethods module under the plugin module: module Sequel::Plugins::Foo module DatasetMethods def a 1 end end end This allows a plugin user to do: Bar.plugin :foo Bar.dataset.a # => 1 == Calling super to get Previous Behavior No matter if you are dealing with class, instance, or dataset methods, you can call super inside the method to get the previous behavior. This makes it easy to hook into the method, add your own behavior, but still get the previous behavior: module Sequel::Plugins::Foo module InstanceMethods def save if allow_saving? super else raise Sequel::Error, 'saving not allowed for this object' end end private def allow_saving? moon =~ /Waxing/ end end end == Running Code When the Plugin is Loaded Some plugins require more than just adding methods. Any plugin that requires state is going to have to initialize that state and store it somewhere (generally in the model class itself). If you want to run code when a plugin is loaded (usually to initialize state, but possibly for other reasons), there are two methods you can define to do so. The first method is apply, and it is called only the first time the plugin is loaded into the class, before it is loaded into the class. This is generally only used if a plugin depends on another plugin or for initializing state. You define this method as a singleton method of the plugin module: module Sequel::Plugins::Foo def self.apply(model) model.instance_eval do plugin :plugin_that_foo_depends_on @foo_states = {} end end end The other method is called configure, and it is called everytime the plugin is loaded into the class, after it is loaded into the class: module Sequel::Plugins::Foo def self.configure(model) model.instance_eval do @foo_states[:initial] ||= :baz end end end Note that in the configure method, you know apply has already been called at least once (so @foo_state will definitely exist). If you want your plugin to take arguments and/or support options, you handle that by making your apply and configure methods take arguments and/or an options hash. For example, if you want the user to be able to set the initial state via an option, you can do: module Sequel::Plugins::Foo def self.apply(model, opts={}) model.instance_eval do plugin :plugin_foo_depends_on @foo_states = {} end end def self.configure(model, opts={}) model.instance_eval do @foo_states[:initial] = opts[:initial_state] || @foo_states[:initial] || :baz end end end This allows a user of the plugin to do either of the following Bar.plugin :foo Bar.plugin :foo, initial_state: :quux If you want to require the initial state to be provided as an argument: module Sequel::Plugins::Foo def self.apply(model, initial_state) model.instance_eval do plugin :plugin_foo_depends_on @foo_states = {} end end def self.configure(model, initial_state) model.instance_eval do @foo_states[:initial] = initial_state end end end This requires that the user of the plugin specify the argument: Bar.plugin :foo, :quux In general you should only require plugin arguments if you absolutely must have a value and there is no good default. == Handling Subclasses Sequel::Model uses a copy-on-subclassing approach to model state. So instead of having a model subclass ask its superclass for a value if the subclass don't have the value defined, the value should be copied from the parent class to the subclass when the subclass is created. While this can be implemented by overriding the +inherited+ class method, there is an available shortcut that handles most cases: module Sequel::Plugins::Foo module ClassMethods Sequel::Plugins.inherited_instance_variables(self, :@foo_states => :dup) end end Inside the ClassMethods submodule, you call the Sequel::Plugins.inherited_instance_variables method with the first argument being self. The second argument should be a hash describing how to copy the value from the parent class into the subclass. The keys of this hash are instance variable names, including the @ symbol (e.g. :@foo_state). The values of this hash describe how to copy it: nil :: Use the value directly. :dup :: Call dup on the value. :hash_dup :: Create a new hash with the same keys, but a dup of all the values. Proc :: An arbitrary proc that is called with the parent class value and should return the value to set into the subclass. == Handling Changes to the Model's Dataset In many plugins, if the model class changes the dataset, you need to change the state for the plugin. While you can do this by overriding the set_dataset class method, there is an available shortcut: module Sequel::Plugins::Foo module ClassMethods Sequel::Plugins.after_set_dataset(self, :set_foo_table) private def set_foo_table @foo_states[:table] = table_name end end end With this code, any time the model's dataset changes, the state of the plugin will be updated to set the correct table name. This is also called when creating a new model class with a dataset. == Making Dataset Methods Callable as Class Methods In some cases, when dataset methods are added, you want to also create a model class method that will call the dataset method, so you can write: Model.method instead of: Model.dataset.method There is an available shortcut that automatically creates the class methods: module Sequel::Plugins::Foo module ClassMethods Sequel::Plugins.def_dataset_methods(self, :quux) end module DatasetMethods def quux 2 end end end sequel-5.63.0/doc/mssql_stored_procedures.rdoc000066400000000000000000000023041434214120600214670ustar00rootroot00000000000000= Stored Procedures in MSSQL This guide documents the workaround implemented to allow executing stored procedures in MSSQL, as well as getting the value of output variables. == Simple Execution The following stored procedure is used as an example: CREATE PROCEDURE dbo.SequelTest( @Input varchar(25), @Output int OUTPUT ) AS SET @Output = LEN(@Input) RETURN 0 Execute it as follows: DB.call_mssql_sproc(:SequelTest, {args: ['Input String', :output]}) Use the +:output+ symbol to denote an output variable. The result will contain a hash of the output variables, as well as the result code and number of affected rows: {:result => 0, :numrows => 1, :var1 => "1"} Output variables will be strings by default. To specify their type, include the SQL type: DB.call_mssql_sproc(:SequelTest, {args: ['Input String', [:output, 'int']]}) Result: {:result => 0, :numrows => 1, :var1 => 1} Output variables will be named +var#{n}+ where n is their zero indexed position in the parameter list. To name the output variable, include their name: DB.call_mssql_sproc(:SequelTest, {args: ['Input String', [:output, nil, 'Output']]}) Result: {:result => 0, :numrows => 1, :output => "1"} sequel-5.63.0/doc/object_model.rdoc000066400000000000000000000450331434214120600171510ustar00rootroot00000000000000= The Sequel Object Model Sequel's dataset layer is mostly structured as an DSL, so it often obscures what actual objects are being used. For example, you don't usually create Sequel objects by calling #new on the object's class (other than Sequel::Model instances). However, just as almost everything in ruby is an object, all the methods you call in Sequel deal with objects behind the scenes. In addition to the standard ruby types, there are four main types of Sequel-specific objects that you deal when programming with Sequel: * Sequel::Database * Sequel::Dataset * Sequel::Model * Sequel::SQL::Expression (and subclasses) == Sequel::Database Sequel::Database is the main Sequel object that you deal with. It's usually created by the Sequel.connect method: DB = Sequel.connect('postgres://host/database') A Sequel::Database object represents the database you are connecting to. Sequel::Database handles things like Sequel::Dataset creation, dataset = DB[:table] schema modification, DB.create_table(:table) do primary_key :id String :name end and transactions: DB.transaction do DB[:table].insert(column: value) end Sequel::Database#literal can be used to take any object that Sequel handles and literalize the object to an SQL string fragment: DB.literal(DB[:table]) # (SELECT * FROM "table") == Sequel::Dataset Sequel::Dataset objects represent SQL queries. They are created from a Sequel::Database object: dataset = DB[:table] # SELECT * FROM "table" dataset = DB.from(table) # SELECT * FROM "table" dataset = DB.select(:column) # SELECT "column" Most Sequel::Dataset methods that do not execute queries return modified copies of the receiver, and the general way to build queries in Sequel is via a method chain: dataset = DB[:test]. select(:column1, :column2). where(column3: 4). order(:column5) Such a method chain is a more direct way of doing: dataset = DB[:test] dataset = dataset.select(:column1, :column2) dataset = dataset.where(column3: 4) dataset = dataset.order(:column5) When you are ready to execute your query, you call one of the Sequel::Dataset action methods. For returning rows, you can do: dataset.first dataset.all dataset.each{|row| row} For inserting, updating, or deleting rows, you can do: dataset.insert(column: value) dataset.update(column: value) dataset.delete All datasets are related to their database object, which you can access via the Sequel::Dataset#db method: dataset.db # => DB == Sequel::Model Sequel::Model classes are wrappers around a particular Sequel::Dataset object that add custom behavior, both custom behavior for the entire set of rows in the dataset (the model's class methods), custom behavior for a subset of rows in the dataset (the model's dataset methods), and custom behavior for single rows in the dataset (the model's instance methods). Unlike most other Sequel objects, Sequel::Model classes and instances are generally created by the user using standard ruby syntax: class Album < Sequel::Model end album = Album.new Model classes that use a non-default Database instance or table name generally use the Sequel::Model method to create the superclass: class Album < Sequel::Model(DB[:music_albums]) end album = Album.new All model classes are related to their Sequel::Dataset object, which you can access via the Sequel::Model.dataset method: Album.dataset # SELECT * FROM "albums" Additionally, all model classes are related to their dataset's Sequel::Database object, which you can access via the Sequel::Model.db method: Album.db # => DB == Standard Ruby Types Where possible, Sequel uses ruby's standard types to represent SQL concepts. In the examples here, the text to the right side of the # sign is the output if you pass the left side to Sequel::Database#literal. === Symbol Ruby symbols represent SQL identifiers (tables, columns, schemas): :schema # "schema" :table # "table" :column # "column" === Integer, Float, BigDecimal, String, Date, Time, DateTime Ruby's Integer, Float, BigDecimal, String, Date, Time, and DateTime classes represent similar types in SQL: 1 # 1 1.0 # 1.0 BigDecimal.new('1.0') # 1.0 "string" # 'string' Date.new(2012, 5, 6) # '2012-05-06' Time.now # '2012-05-06 10:20:30' DateTime.now # '2012-05-06 10:20:30' === Hash Sequel generally uses hash objects to represent equality: {column: 1} # ("column" = 1) However, if you use an array as the hash value, it represents inclusion in the value list: {column: [1, 2, 3]} # ("column" IN (1, 2, 3)) You can also use a Sequel::Dataset instance as the hash value, which will be used to represent inclusion in the subselect: {column: DB[:table].select(:column)} # ("column" IN (SELECT "column" FROM "table")) If you pass true, false, or nil as the hash value, it represents identity: {column: nil} # ("column" IS NULL) If you pass a Range object, it will be used as the bounds for a greater than and less than operation: {column: 1..2} # (("column" >= 1) AND ("column" <= 2)) {column: 1...3} # (("column" >= 1) AND ("column" < 3)) If you pass a Regexp object as the value, it will be used as a regular expression operation if the database supports it: {column: /a.*b/} # ("column" ~ 'a.*b') === Array Sequel generally treats arrays as an SQL value list: [1, 2, 3] # (1, 2, 3) However, if all members of the array are arrays with two members, then the array is treated like a hash: [[:column, 1]] # ("column" = 1) The advantage of using an array over a hash for such a case is that a hash cannot include multiple objects with the same key, while the array can. == Sequel::SQL::Expression (and subclasses) If Sequel needs to represent an SQL concept that does not map directly to an existing ruby class, it will generally use a Sequel::SQL::Expression subclass to represent that concept. Some of the examples below show examples that require the {core_extensions extension}[rdoc-ref:doc/core_extensions.rdoc]. === Sequel::LiteralString Sequel::LiteralString is not actually a Sequel::SQL::Expression subclass. It is a subclass of String, but it is treated specially by Sequel, in that it is treated as literal SQL code, instead of as an SQL string that needs to be escaped: Sequel::LiteralString.new("co'de") # co'de The following shortcuts exist for creating Sequel::LiteralString objects: Sequel.lit("co'de") "co'de".lit # core_extensions extension === Sequel::SQL::Blob Sequel::SQL::Blob is also a String subclass, but it is treated as an SQL blob instead of an SQL string, as SQL blobs often have different literalization rules than SQL strings do: Sequel::SQL::Blob.new("blob") The following shortcuts exist for creating Sequel::SQL::Blob objects: Sequel.blob("blob") "blob".to_sequel_blob # core_extensions extension === Sequel::SQLTime Sequel::SQLTime is a Time subclass. However, it is treated specially by Sequel in that only the time component is literalized, not the date part. This type is used to represent SQL time types, which do not contain date information. Sequel::SQLTime.create(10, 20, 30) # "10:20:30" === Sequel::SQL::ValueList Sequel::SQL::ValueList objects always represent SQL value lists. Most ruby arrays represent value lists in SQL, except that arrays of two-element arrays are treated similar to hashes. Such arrays can be wrapped in this class to ensure they are treated as value lists. This is important when doing a composite key IN lookup, which some databases support. Sequel::SQL::ValueList is an ::Array subclass with no additional behavior, so it can be instantiated like a normal array: Sequel::SQL::ValueList.new([[1, 2], [3, 4]]) # ((1, 2), (3, 4)) In general, you don't need to create Sequel::SQL::ValueList instances manually, they will be created automatically where they are required in most cases. The following shortcuts exist for creating Sequel::SQL::ValueList objects: Sequel.value_list([[1, 2], [3, 4]]) [[1, 2], [3, 4]].sql_value_list # core_extensions extension === Sequel::SQL::Identifier Sequel::SQL::Identifier objects represent single identifiers. The main reason for their existence is they support many additional Sequel specific methods that are not supported on plain symbols: Sequel::SQL::Identifier.new(:colum) # "col" The following shortcuts exist for creating Sequel::SQL::Identifier objects: Sequel[:column] Sequel.identifier(:column) :column.identifier # core_extensions extension === Sequel::SQL::QualifiedIdentifier Sequel::SQL::QualifiedIdentifier objects represent qualified identifiers: Sequel::SQL::QualifiedIdentifier.new(:table, :column) # "table"."column" The following shortcuts exist for creating Sequel::SQL::QualifiedIdentifier objects: Sequel[:table][:column] Sequel.qualify(:table, :column) :column.qualify(:table) # core_extensions extension === Sequel::SQL::AliasedExpression Sequel::SQL::AliasedExpression objects represent aliased expressions in SQL. The alias is treated as an identifier, but the expression can be an arbitrary Sequel expression: Sequel::SQL::AliasedExpression.new(:column, :alias) # "column" AS "alias" Derived column lists are also supported: Sequel::SQL::AliasedExpression.new(:table, :alias, [:column_alias1, :column_alias2]) # "table" AS "alias"("column_alias1", "column_alias2") The following shortcuts exist for creating Sequel::SQL::AliasedExpression objects: Sequel[:column].as(:alias) Sequel.as(:column, :alias) Sequel.as(:column, :alias, [:column_alias1, :column_alias2]) :column.as(:alias) # core_extensions or symbol_as extension === Sequel::SQL::ComplexExpression Sequel::SQL::ComplexExpression objects mostly represent SQL operations with arguments. There are separate subclasses for representing boolean operations such as AND and OR (Sequel::SQL::BooleanExpression), mathematical operations such as + and - (Sequel::SQL::NumericExpression), and string operations such as || and LIKE (Sequel::SQL::StringExpression). Sequel::SQL::BooleanExpression.new(:OR, :col1, :col2) # ("col1" OR "col2") Sequel::SQL::NumericExpression.new(:+, :column, 2) # ("column" + 2) Sequel::SQL::StringExpression.new(:"||", :column, "b") # ("column" || 'b') There are many shortcuts for creating Sequel::SQL::ComplexExpression objects: Sequel.or(:col1, :col2) :col1 | :col2 # core_extensions extension Sequel.+(:column, 2) :column + 2 # core_extensions extension Sequel.join([:column, 'b']) :column + 'b' # core_extensions extension === Sequel::SQL::CaseExpression Sequel::SQL::CaseExpression objects represent SQL CASE expressions, which represent branches in the database, similar to ruby case expressions. Like ruby's case expressions, these case expressions can have a implicit value you are comparing against: Sequel::SQL::CaseExpression.new({2=>1}, 0, :a) # CASE "a" WHEN 2 THEN 1 ELSE 0 END Or they can treat each condition separately: Sequel::SQL::CaseExpression.new({{a: 2}=>1}, 0) # CASE WHEN ("a" = 2) THEN 1 ELSE 0 END In addition to providing a hash, you can also provide an array of two-element arrays: Sequel::SQL::CaseExpression.new([[2, 1]], 0, :a) # CASE "a" WHEN 2 THEN 1 ELSE 0 END The following shortcuts exist for creating Sequel::SQL::CaseExpression objects: Sequel.case({2=>1}, 0, :a) Sequel.case({{a: 2}=>1}, 0) {2=>1}.case(0, :a) # core_extensions extension {{a: 2}=>1}.case(0) # core_extensions extension === Sequel::SQL::Cast Sequel::SQL::Cast objects represent CAST expressions in SQL, which does explicit typecasting in the database. With Sequel, you provide the expression to typecast as well as the type to cast to. The type can either be a generic type, given as a ruby class: Sequel::SQL::Cast.new(:a, String) # (CAST "a" AS text) or a specific type, given as a symbol or string: Sequel::SQL::Cast.new(:a, :int4) # (CAST "a" AS int4) The following shortcuts exist for creating Sequel::SQL::Cast objects: Sequel.cast(:a, String) Sequel.cast(:a, :int4) :a.cast(String) # core_extensions extension :a.cast(:int4) # core_extensions extension === Sequel::SQL::ColumnAll Sequel::SQL::ColumnAll objects represent the selection of all columns from a table: Sequel::SQL::ColumnAll.new(:table) # "table".* The following shortcut exists for creating Sequel::SQL::ColumnAll objects: Sequel[:table].* Sequel[:schema][:table].* :table.* # core_extensions extension === Sequel::SQL::Constant Sequel::SQL::Constant objects represent constants or pseudo-constants in SQL, such as TRUE, NULL, and CURRENT_TIMESTAMP. These are not designed to be created or used by the end user, but some existing values are predefined under the Sequel namespace: Sequel::CURRENT_TIMESTAMP # CURRENT_TIMESTAMP These objects are usually used as values in queries: DB[:table].insert(time: Sequel::CURRENT_TIMESTAMP) === Sequel::SQL::DelayedEvaluation Sequel::SQL::DelayedEvaluation objects represent an evaluation that is delayed until query literalization. Sequel::SQL::DelayedEvaluation.new(proc{some_model.updated_at}) The following shortcut exists for creating Sequel::SQL::DelayedEvaluation objects: Sequel.delay{some_model.updated_at} Note how Sequel.delay requires a block, while Sequel::SQL::DelayedEvaluation.new accepts a generic callable object. Let's say you wanted a dataset for the number of objects greater than some attribute of another object: ds = DB[:table].where{updated_at > some_model.updated_at} The problem with the above query is that it evaluates "some_model.updated_at" statically, so if you change some_model.updated_at later, it won't affect this dataset. You can use Sequel.delay to fix this: ds = DB[:table].where{updated_at > Sequel.delay{some_model.updated_at}} This will evaluate "some_model.updated_at" every time you literalize the dataset (usually every time it is executed). === Sequel::SQL::Function Sequel::SQL::Function objects represents database function calls, which take a function name and any arguments: Sequel::SQL::Function.new(:func, :a, 2) # func("a", 2) The following shortcuts exist for creating Sequel::SQL::Function objects: Sequel.function(:func, :a, 2) :func.sql_function(:a, 2) # core_extensions extension === Sequel::SQL::JoinClause Sequel::SQL::JoinClause objects represent SQL JOIN clauses. They are usually not created manually, as the Dataset join methods create them automatically. === Sequel::SQL::PlaceholderLiteralString Sequel::SQL::PlaceholderLiteralString objects represent a literal SQL string with placeholders for variables. There are three types of these objects. The first type uses question marks with multiple placeholder value objects: Sequel::SQL::PlaceholderLiteralString.new('? = ?', [:a, 1]) # "a" = 1 The second uses named placeholders with colons and a hash of placeholder value objects: Sequel::SQL::PlaceholderLiteralString.new(':b = :v', [{b: :a, v: 1}]) # "a" = 1 The third uses an array instead of a string, with multiple placeholder objects, each one going in between the members of the array: Sequel::SQL::PlaceholderLiteralString.new(['', ' = '], [:a, 1]) # "a" = 1 For any of these three forms, you can also include a third argument for whether to include parentheses around the string: Sequel::SQL::PlaceholderLiteralString.new('? = ?', [:a, 1], true) # ("a" = 1) The following shortcuts exist for creating Sequel::SQL::PlaceholderLiteralString objects: Sequel.lit('? = ?', :a, 1) Sequel.lit(':b = :v', b: :a, v: 1) Sequel.lit(['', ' = '], :a, 1) '? = ?'.lit(:a, 1) # core_extensions extension ':b = :v'.lit(b: :a, v: 1) # core_extensions extension === Sequel::SQL::OrderedExpression Sequel::SQL::OrderedExpression objects represent ascending or descending sorts, used by the Dataset order methods. They take an expression, and whether to sort it ascending or descending: Sequel::SQL::OrderedExpression.new(:a) # "a" DESC Sequel::SQL::OrderedExpression.new(:a, false) # "a" ASC Additionally, they take an options hash, which can be used to specify how nulls can be sorted: Sequel::SQL::OrderedExpression.new(:a, true, nulls: :first) # "a" DESC NULLS FIRST Sequel::SQL::OrderedExpression.new(:a, false, nulls: :last) # "a" ASC NULLS LAST The following shortcuts exist for creating Sequel::SQL::OrderedExpression objects: Sequel.asc(:a) Sequel.desc(:a) Sequel.asc(:a, nulls: :first) Sequel.desc(:a, nulls: :last) :a.asc # core_extensions extension :a.desc # core_extensions extension :a.asc(nulls: :first) # core_extensions extension :a.desc(nulls: :last) # core_extensions extension === Sequel::SQL::Subscript Sequel::SQL::Subscript objects represent SQL database array access. They take an expression and an array of indexes (or a range for an SQL array slice): Sequel::SQL::Subscript.new(:a, [1]) # "a"[1] Sequel::SQL::Subscript.new(:a, [1, 2]) # "a"[1, 2] Sequel::SQL::Subscript.new(:a, [1..2]) # "a"[1:2] The following shortcuts exist for creating Sequel::SQL::Subscript objects: Sequel.subscript(:a, 1) Sequel.subscript(:a, 1, 2) Sequel.subscript(:a, 1..2) :a.sql_subscript(1) # core_extensions extension :a.sql_subscript(1, 2) # core_extensions extension :a.sql_subscript(1..2) # core_extensions extension === Sequel::SQL::VirtualRow Sequel::SQL::VirtualRow is a BasicObject subclass that is the backbone behind the block expression support: DB[:table].where{a < 1} In the above code, the block is instance-evaled inside a VirtualRow instance. These objects are usually not instantiated manually. See the {Virtual Row Guide}[rdoc-ref:doc/virtual_rows.rdoc] for details. === Sequel::SQL::Window Sequel::SQL::Window objects represent the windows used by Sequel::SQL::Function. They use a hash-based API, supporting the :frame, :order, :partition, and :window options: Sequel::SQL::Window.new(order: :a) # (ORDER BY "a") Sequel::SQL::Window.new(partition: :a) # (PARTITION BY "a") Sequel::SQL::Window.new(partition: :a, frame: :all) # (PARTITION BY "a" ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) === Sequel::SQL::Wrapper Sequel::SQL::Wrapper objects wrap arbitrary objects so that they can be used in Sequel expressions: o = Object.new def o.sql_literal_append(ds, sql) sql << "foo" end Sequel::SQL::Wrapper.new(o) # foo The advantage of wrapping the object is that you can the call Sequel methods on the wrapper that would not be defined on the object itself: Sequel::SQL::Wrapper.new(o) + 1 # (foo + 1) You can use the Sequel.[] method to wrap any object: Sequel[o] However, note that that does not necessarily return a Sequel::SQL::Wrapper object, it may return a different class of object, such as a Sequel::SQL::ComplexExpression subclass object. sequel-5.63.0/doc/opening_databases.rdoc000066400000000000000000000547041434214120600201760ustar00rootroot00000000000000= Connecting to a database All Sequel activity begins with connecting to a database, which creates a Sequel::Database object. The Database object is used to create datasets and execute queries. Sequel provides a powerful and flexible mechanism for connecting to databases. There are two main ways to establish database connections: 1. Using the Sequel.connect method 2. Using the specialized adapter method (Sequel.sqlite, Sequel.postgres, etc.) The connection options needed depend on the adapter being used, though most adapters share the same basic connection options. If you are only connecting to a single database, it is recommended that you store the database object in a constant named DB. This is not required, but it is the convention that most Sequel code uses. == Using the Sequel.connect method The connect method usually takes a well-formed URI, which is parsed into connection options needed to open the database connection. The scheme/protocol part of the URI is used to determine the adapter to use: DB = Sequel.connect('postgres://user:password@localhost/blog') # Uses the postgres adapter You can use URI query parameters to specify options: DB = Sequel.connect('postgres://localhost/blog?user=user&password=password') You can also pass an additional option hash with the connection string: DB = Sequel.connect('postgres://localhost/blog', user: 'user', password: 'password') You can also just use an options hash without a connection string. If you do this, you must provide the adapter to use: DB = Sequel.connect(adapter: 'postgres', host: 'localhost', database: 'blog', user: 'user', password: 'password') All of the above statements are equivalent. == Using the specialized adapter method The specialized adapter method is similar to Sequel.connect with an options hash, except that it automatically populates the :adapter option and assumes the first argument is the :database option, unless the first argument is a hash. So the following statements are equivalent to the previous statements. DB = Sequel.postgres('blog', host: 'localhost', user: 'user', password: 'password') DB = Sequel.postgres(host: 'localhost', user: 'user', password: 'password', database: 'blog') Note that using an adapter method forces the use of the specified adapter, not a database type, even though some adapters have the same name as the database type. So if you want to connect to SQLite, for example, you can do so using the sqlite, amalgalite, and jdbc adapters. If you want to connect to SQLite on JRuby using the jdbc adapter, you should not use Sequel.sqlite for example, as that uses the C-based sqlite3 gem. Instead, the Sequel.jdbc would be appropriate (though as mentioned below, using Sequel.connect is recommended instead of Sequel.jdbc). == Passing a block to either method Both the Sequel.connect method and the specialized adapter methods take a block. If you provide a block to the method, Sequel will create a Database object and pass it as an argument to the block. When the block returns, Sequel will disconnect the database connection. For example: Sequel.connect('sqlite://blog.db'){|db| puts db[:users].count} Note that if you do not pass a block to Sequel.connect, Sequel will automatically retain a reference to the object in the Sequel::DATABASES array. So calling +Sequel.connect+ multiple times (say once per request), can result in a memory leak. For any application where database access is needed for a long period of time, it's best to store the result of Sequel.connection in a constant, as recommended above. == Using the Sequel.connect method == General connection options These options are shared by all adapters unless otherwise noted. :adapter :: The adapter to use :database :: The name of the database to which to connect :extensions :: Extensions to load into this Database instance. Can be a symbol, array of symbols, or string with extensions separated by columns. These extensions are loaded after connections are made by the :preconnect option. :cache_schema :: Whether schema should be cached for this database (true by default) :default_string_column_size :: The default size for string columns (255 by default) :host :: The hostname of the database server to which to connect :keep_reference :: Whether to keep a reference to the database in Sequel::DATABASES (true by default) :logger :: A specific SQL logger to log to :loggers :: An array of SQL loggers to log to :log_connection_info :: Whether to include connection information in log messages (false by default) :log_warn_duration :: The amount of seconds after which the queries are logged at :warn level :password :: The password for the user account :preconnect :: Whether to automatically make the maximum number of connections when setting up the pool. Can be set to "concurrently" to connect in parallel. :preconnect_extensions :: Similar to the :extensions option, but loads the extensions before the connections are made by the :preconnect option. :quote_identifiers :: Whether to quote identifiers. :servers :: A hash with symbol keys and hash or proc values, used with primary/replica and sharded database configurations :sql_log_level :: The level at which to issue queries to the loggers (:info by default) :test :: Whether to test that a valid database connection can be made (true by default) :user :: The user account name to use logging in The following options can be specified and are passed to the database's internal connection pool. :after_connect :: A callable object called after each new connection is made, with the connection object (and server argument if the callable accepts 2 arguments), useful for customizations that you want to apply to all connections (nil by default). :connect_sqls :: An array of sql strings to execute on each new connection, after :after_connect runs. :max_connections :: The maximum size of the connection pool (4 connections by default on most databases) :pool_timeout :: The number of seconds to wait if a connection cannot be acquired before raising an error (5 seconds by default) :single_threaded :: Whether to use a single-threaded (non-thread safe) connection pool == Adapter specific connection options The following sections explain the options and behavior specific to each adapter. If the library the adapter requires is different from the name of the adapter scheme, it is listed specifically, otherwise you can assume that is requires the library with the same name. === ado Requires: win32ole The ADO adapter provides connectivity to ADO databases in Windows. It relies on WIN32OLE library, so it isn't usable on other operating systems (except possibly through WINE, but that's unlikely). The following options are supported: :command_timeout :: Sets the time in seconds to wait while attempting to execute a command before cancelling the attempt and generating an error. Specifically, it sets the ADO CommandTimeout property. :driver :: The driver to use in the ADO connection string. If not provided, a default of "SQL Server" is used. :conn_string :: The full ADO connection string. If this is provided, the usual options are ignored. :provider :: Sets the Provider of this ADO connection (for example, "SQLOLEDB"). If you don't specify a provider, the default one used by WIN32OLE has major problems, such as creating a new native database connection for every query, which breaks things such as transactions and temporary tables. Pay special attention to the :provider option, as without specifying a provider, many things will be broken. The SQLNCLI10 and SQLNCLI11 providers work well if you are connecting to Microsoft SQL Server, but it is not the default as it depends on those providers being installed. Example connections: # SQL Server Sequel.connect('ado:///sequel_test?host=server%5cdb_instance') Sequel.connect('ado://user:password@server/database?host=server%5cdb_instance&provider=SQLNCLI10') # Access 2007 Sequel.ado(conn_string: 'Provider=Microsoft.ACE.OLEDB.12.0;Data Source=drive:\\path\\filename.accdb') # Access 2000 Sequel.ado(conn_string: 'Provider=Microsoft.Jet.OLEDB.4.0;Data Source=drive:\\path\\filename.mdb') # Excel 2000 (for table names, use a dollar after the sheet name, e.g. Sheet1$) Sequel.ado(conn_string: 'Provider=Microsoft.Jet.OLEDB.4.0;Data Source=drive:\\path\\filename.xls;Extended Properties=Excel 8.0;') === amalgalite Amalgalite is an ruby extension that provides self contained access to SQLite, so you don't need to install SQLite separately. As amalgalite is a file backed database, the :host, :user, and :password options are not used. :database :: The name of the database file :timeout :: The busy timeout period given in milliseconds Without a database argument, assumes a memory database, so you can do: Sequel.amalgalite Handles paths in the connection string similar to the SQLite adapter, so see the sqlite section below for details. === ibmdb requires 'ibm_db' This connects to DB2 using IBM_DB. This is the recommended adapter if you are using a C-based ruby to connect to DB2. === jdbc Requires: java Houses Sequel's JDBC support when running on JRuby. Support for individual database types is done using subadapters. There are currently subadapters for DB2, Derby, H2, HSQLDB, JTDS, MySQL, Oracle, PostgreSQL, SQLAnywhere, SQLite, and SQL Server. For Derby, H2, HSQLDB, JTDS, MySQL, Postgres, SQLite3 the adapters can use the `jdbc-*` gem, for the others you need to have the `.jar` in your CLASSPATH or load the Java class manually before calling Sequel.connect. Note that when using a JDBC adapter, the best way to use Sequel is via Sequel.connect using a connection string, NOT Sequel.jdbc. Use the JDBC connection string when connecting, which will be in a different format than the native connection string. The connection string should start with 'jdbc:'. For PostgreSQL, use 'jdbc:postgresql:', and for SQLite you do not need 2 preceding slashes for the database name (use no preceding slashes for a relative path, and one preceding slash for an absolute path). Sequel does no preprocessing of JDBC connection strings, it passes them directly to JDBC. So if you have problems getting a connection string to work, look up the documentation for the JDBC driver. The jdbc adapter does not handle common options such as +:host+, +:user+, and +:port+. If you must use a hash of options when connecting, provide the full JDBC connection string as the :uri option. Example connection strings: jdbc:sqlite::memory: jdbc:postgresql://localhost/database?user=username jdbc:mysql://localhost/test?user=root&password=root&serverTimezone=UTC jdbc:h2:mem: jdbc:hsqldb:mem:mymemdb jdbc:derby:memory:myDb;create=true jdbc:sqlserver://localhost;database=sequel_test;integratedSecurity=true jdbc:jtds:sqlserver://localhost/sequel_test;user=sequel_test;password=sequel_test jdbc:oracle:thin:user/password@localhost:1521:database jdbc:db2://localhost:3700/database:user=user;password=password; jdbc:sqlanywhere://localhost?DBN=Test;UID=user;PWD=password You can also use JNDI connection strings: jdbc:jndi:java:comp/env/jndi_resource_name The following additional options are supported: :convert_types :: If set to false, does not attempt to convert some Java types to ruby types. Setting to false roughly doubles performance when selecting large numbers of rows. Note that you can't provide this option inside the connection string (as that is passed directly to JDBC), you have to pass it as a separate option. :driver :: Specify the Java driver class to use to connect to the database. This only has an effect if the database type is not recognized from the connection string, and only helps cases where java.sql.DriverManager.getConnection does not return a connection. :login_timeout :: Set the login timeout on the JDBC connection (in seconds). :jdbc_properties :: A hash for properties to set, skips the normal connection process of using java.sql.drivermanager.getconnection and tries the backup process of using driver.new.connect for the appropriate driver. There are a few issues with specific jdbc driver gems: jdbc-h2 :: jdbc-h2 versions greater than 1.3.175 have issues with ORDER BY not working correctly in some cases. jdbc-mysql :: Depending on the configuration of the MySQL server, jdbc-mysql versions greater 8 may complain about the server time zone being unrecognized. You can either use an older jdbc-mysql version, or you can specify the +serverTimezone+ option in the connection string, as shown in the example jdbc:mysql connection string above. === mysql Requires: mysql This should work with the mysql gem (C extension) and the ruby-mysql gem (pure ruby). The following additional options are supported: :auto_is_null :: If set to true, makes "WHERE primary_key IS NULL" select the last inserted id. :charset :: Same as :encoding, :encoding takes precedence. :compress :: Whether to compress data sent/received via the socket connection. :config_default_group :: The default group to read from the in the MySQL config file, defaults to "client") :config_local_infile :: If provided, sets the Mysql::OPT_LOCAL_INFILE option on the connection with the given value. :disable_split_materialized :: Set split_materialized=off in the optimizer settings. Necessary to pass the associations integration tests in MariaDB 10.5+, due to a unfixed bug in the optimizer. :encoding :: Specify the encoding/character set to use for the connection. :fractional_seconds :: On MySQL 5.6.5+, this option is recognized and will include fractional seconds in time/timestamp values, as well as have the schema method create columns that can contain fractional seconds by deafult. This option is also supported on other adapters that connect to MySQL. :socket :: Can be used to specify a Unix socket file to connect to instead of a TCP host and port. :sql_mode :: Set the sql_mode(s) for a given connection. Can be single symbol or string, or an array of symbols or strings (e.g. sql_mode: [:no_zero_date, :pipes_as_concat]). :timeout :: Sets the wait_timeout for the connection, defaults to 1 month. :read_timeout :: Set the timeout in seconds for reading back results to a query. :connect_timeout :: Set the timeout in seconds before a connection attempt is abandoned (may not be supported when using MariaDB 10.2+ client libraries). The :sslkey, :sslcert, :sslca, :sslcapath, and :sslca options (in that order) are passed to Mysql#ssl_set method if either the :sslca or :sslkey option is given. === mysql2 This is a newer MySQL adapter that does typecasting in C, so it is often faster than the mysql adapter. The options given are passed to Mysql2::Client.new, see the mysql2 documentation for details on what options are supported. The :timeout, :auto_is_null, :sql_mode, and :disable_split_materialized options supported by the mysql adapter are also supported for mysql2 adapter (and any other adapters connecting to mysql, such as the jdbc/mysql adapter). === odbc The ODBC adapter allows you to connect to any database with the appropriate ODBC drivers installed. The :database option given ODBC database should be the DSN (Descriptive Service Name) from the ODBC configuration. Sequel.odbc('mydb', user: "user", password: "password") The :host and :port options are not respected. The following additional options are supported: :db_type :: Can be specified as 'mssql', 'progress', or 'db2' to use SQL syntax specific to those databases. :drvconnect :: Can be given an ODBC connection string, and will use ODBC::Database#drvconnect to do the connection. Typical usage would be: Sequel.odbc(drvconnect: 'driver={...};...') === oracle Requires: oci8 The following additional options are supported: :autosequence :: Set to true to use Sequel's conventions to guess the sequence to use for datasets. False by default. :prefetch_rows :: The number of rows to prefetch. Defaults to 100, a larger number can be specified and may improve performance when retrieving a large number of rows. :privilege :: The Oracle privilege level. === postgres Requires: pg (or sequel/postgres-pr or postgres-pr/postgres-compat if pg is not available) The Sequel postgres adapter works with the pg, sequel-postgres-pr, jeremyevans-postgres-pr, and postgres-pr ruby libraries. The pg library is the best supported, as it supports real bound variables and prepared statements. If the pg library is being used, Sequel will also attempt to load the sequel_pg library, which is a C extension that optimizes performance when Sequel is used with pg. All users of Sequel who use pg are encouraged to install sequel_pg. For users who want to use one of the postgres-pr libraries to avoid issues with C extensions, it is recommended to use sequel-postgres-pr. The following additional options are supported: :charset :: Same as :encoding, :encoding takes precedence :convert_infinite_timestamps :: Whether infinite timestamps/dates should be converted on retrieval. By default, no conversion is done, so an error is raised if you attempt to retrieve an infinite timestamp/date. You can set this to :nil to convert to nil, :string to leave as a string, or :float to convert to an infinite float. :conn_str :: Use connection string (in form of `host=x port=y ...`). Ignores all other options, only supported with pg library. See https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-CONNSTRING and https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-PARAMKEYWORDS for format and list of supported options. :connect_timeout :: Set the number of seconds to wait for a connection (default 20, only respected if using the pg library). :driver_options :: A hash of options to pass to the underlying driver (only respected if using the pg library) :encoding :: Set the client_encoding to the given string :notice_receiver :: A proc that be called with the PGresult objects that have notice or warning messages. The default notice receiver just prints the messages to stderr, but this can be used to handle notice/warning messages differently. Only respected if using the pg library). :sslmode :: Set to 'disable', 'allow', 'prefer', 'require', 'verify-ca', or 'verify-full' to choose how to treat SSL (only respected if using the pg library) :sslrootcert :: Specify the path to the root SSL certificate to use. :search_path :: Set to the schema search_path. This can either be a single string containing the schemas separated by commas (for use via a URL: postgres:///?search_path=schema1,schema2), or it can be an array of strings (for use via an option: Sequel.postgres(search_path: ['schema1', 'schema2'])). :use_iso_date_format :: This can be set to false to not force the ISO date format. Sequel forces it by default to allow for an optimization. === sqlanywhere The sqlanywhere driver works off connection strings, so a connection string is built based on the url/options hash provided. The following additional options are respected: :commlinks :: specify the CommLinks connection string option :conn_string :: specify the connection string to use, ignoring all other options :connection_name :: specify the ConnectionName connection string option :encoding :: specify the CharSet connection string option === sqlite Requires: sqlite3 As SQLite is a file-based database, the :host and :port options are ignored, and the :database option should be a path to the file. Examples: # In Memory databases: Sequel.sqlite Sequel.connect('sqlite:/') Sequel.sqlite(':memory:') # Relative Path Sequel.sqlite('blog.db') Sequel.sqlite('./blog.db') Sequel.connect('sqlite://blog.db') # Absolute Path Sequel.sqlite('/var/sqlite/blog.db') Sequel.connect('sqlite:///var/sqlite/blog.db') The following additional options are supported: :readonly :: open database in read-only mode :timeout :: the busy timeout to use in milliseconds (default: 5000). :setup_regexp_function :: Whether to setup a REGEXP function in the underlying SQLite3::Database object. Doing so allows you to use regexp support in dataset expressions. Note that this creates a new Regexp object per call to the function, so it is not an efficient implementation. Note that SQLite memory databases are restricted to a single connection by default. This is because SQLite does not allow multiple connections to a single memory database. For this reason, Sequel sets the maximum number of connections in the connection pool to 1 by default when an SQLite memory database is used. Attempts to force the use of more than 1 connection can result in weird behavior, since the connections will be to separate memory databases. === tinytds Requires: tiny_tds The connection options are passed directly to tiny_tds, except that the tiny_tds :username option is set to the Sequel :user option. If you want to use an entry in the freetds.conf file, you should specify the :dataserver option with that name as the value. Some other options that you may want to set are :login_timeout, :timeout, :tds_version, :azure, :appname, and :encoding, see the tiny_tds README for details. Other Sequel specific options: :ansi :: Set to true to enable the ANSI compatibility settings when connecting (ANSI_NULLS, ANSI_PADDING, ANSI_WARNINGS, ANSI_NULL_DFLT_ON, QUOTED_IDENTIFIER, CONCAT_NULL_YIELDS_NULL). :server_version :: Override the server version to use (9000000 = SQL Server 2005). This also works on any other adapter that connects to Microsoft SQL Server. :textsize :: Override the default TEXTSIZE setting for this connection. The FreeTDS default is small (around 64000 bytes), but can be set up to around 2GB. This should be specified as an integer. If you plan on setting large text or blob values via tinytds, you should use this option or modify your freetds.conf file. sequel-5.63.0/doc/postgresql.rdoc000066400000000000000000000557321434214120600167350ustar00rootroot00000000000000= PostgreSQL-specific Support in Sequel Sequel's core database and dataset functions are designed to support the features shared by most common SQL database implementations. However, Sequel's database adapters extend the core support to include support for database-specific features. By far the most extensive database-specific support in Sequel is for PostgreSQL. This support is roughly broken into the following areas: * Database Types * DDL Support * DML Support * sequel_pg Note that while this guide is extensive, it is not exhaustive. There are additional rarely used PostgreSQL features that Sequel supports which are not mentioned here. == Adapter/Driver Specific Support Some of this this support depends on the specific adapter or underlying driver in use. postgres only will denote support specific to the postgres adapter (i.e. not available when connecting to PostgreSQL via the jdbc adapter). postgres/pg only will denote support specific to the postgres adapter when pg is used as the underlying driver (i.e. not available when using the postgres-pr driver). == PostgreSQL-specific Database Type Support Sequel's default support on PostgreSQL only includes common database types. However, Sequel ships with support for many PostgreSQL-specific types via extensions. In general, you load these extensions via Database#extension. For example, to load support for arrays, you would do: DB.extension :pg_array The following PostgreSQL-specific type extensions are available: pg_array :: arrays (single and multidimensional, for any scalar type), as a ruby Array-like object pg_hstore :: hstore, as a ruby Hash-like object pg_inet :: inet/cidr, as ruby IPAddr objects pg_interval :: interval, as ActiveSupport::Duration objects pg_json :: json, as either ruby Array-like or Hash-like objects pg_range :: ranges (for any scalar type), as a ruby Range-like object pg_row :: row-valued/composite types, as a ruby Hash-like or Sequel::Model object In general, these extensions just add support for Database objects to return retrieved column values as the appropriate type and support for literalizing the objects correctly for use in an SQL string, or using them as bound variable values (postgres/pg and jdbc/postgres only). There are also type-specific extensions that make it easy to use database functions and operators related to the type. These extensions are: pg_array_ops :: array-related functions and operators pg_hstore_ops :: hstore-related functions and operators pg_json_ops :: json-related functions and operators pg_range_ops :: range-related functions and operators pg_row_ops :: row-valued/composite type syntax support These extensions aren't Database specific, they are global extensions, so you should load them via Sequel.extension, after loading support for the specific types into the Database instance: DB.extension :pg_array Sequel.extension :pg_array_ops With regard to common database types, please note that the generic String type is +text+ on PostgreSQL and not varchar(255) as it is on some other databases. +text+ is PostgreSQL's recommended type for storage of text data, and is more similar to Ruby's String type as it allows for unlimited length. If you want to set a maximum size for a text column, you must specify a :size option. This will use a varchar($size) type and impose a maximum size for the column. == PostgreSQL-specific DDL Support === Exclusion Constraints In +create_table+ blocks, you can use the +exclude+ method to set up exclusion constraints: DB.create_table(:table) do daterange :during exclude([[:during, '&&']], name: :table_during_excl) end # CREATE TABLE "table" ("during" daterange, # CONSTRAINT "table_during_excl" EXCLUDE USING gist ("during" WITH &&)) You can also add exclusion constraints in +alter_table+ blocks using add_exclusion_constraint: DB.alter_table(:table) do add_exclusion_constraint([[:during, '&&']], name: :table_during_excl) end # ALTER TABLE "table" ADD CONSTRAINT "table_during_excl" EXCLUDE USING gist ("during" WITH &&) === Adding Foreign Key and Check Constraints Without Initial Validation You can add a not_valid: true option when adding constraints to existing tables so that it doesn't check if all current rows are valid: DB.alter_table(:table) do # Assumes t_id column already exists add_foreign_key([:t_id], :table, not_valid: true, name: :table_fk) constraint({name: :col_123, not_valid: true}, col: [1,2,3]) end # ALTER TABLE "table" ADD CONSTRAINT "table_fk" FOREIGN KEY ("t_id") REFERENCES "table" NOT VALID # ALTER TABLE "table" ADD CONSTRAINT "col_123" CHECK (col IN (1, 2, 3)) NOT VALID Such constraints will be enforced for newly inserted and updated rows, but not for existing rows. After all existing rows have been fixed, you can validate the constraint: DB.alter_table(:table) do validate_constraint(:table_fk) validate_constraint(:col_123) end # ALTER TABLE "table" VALIDATE CONSTRAINT "table_fk" # ALTER TABLE "table" VALIDATE CONSTRAINT "col_123" === Creating Indexes Concurrently You can create indexes concurrently using the concurrently: true option: DB.add_index(:table, :t_id, concurrently: true) # CREATE INDEX CONCURRENTLY "table_t_id_index" ON "table" ("t_id") Similarly, you can drop indexes concurrently as well: DB.drop_index(:table, :t_id, concurrently: true) # DROP INDEX CONCURRENTLY "table_t_id_index" === Specific Conversions When Altering Column Types When altering a column type, PostgreSQL allows the user to specify how to do the conversion via a USING clause, and Sequel supports this using the :using option: DB.alter_table(:table) do # Assume unix_time column is stored as an integer, and you want to change it to timestamp set_column_type :unix_time, Time, using: (Sequel.cast('epoch', Time) + Sequel.cast('1 second', :interval) * :unix_time) end # ALTER TABLE "table" ALTER COLUMN "unix_time" TYPE timestamp # USING (CAST('epoch' AS timestamp) + (CAST('1 second' AS interval) * "unix_time")) === Creating Partitioned Tables PostgreSQL allows marking tables as partitioned tables, and adding partitions to such tables. Sequel offers support for this. You can create a partitioned table using the +:partition_by+ option and +:partition_type+ options (the default partition type is range partitioning): DB.create_table(:table1, partition_by: :column, partition_type: :range) do Integer :id Date :column end DB.create_table(:table2, partition_by: :column, partition_type: :list) do Integer :id String :column end DB.create_table(:table3, partition_by: :column, partition_type: :hash) do Integer :id Integer :column end To add partitions of other tables, you use the +:partition_of+ option. This option will use a custom DSL specific to partitioning other tables. For range partitioning, you can use the +from+ and +to+ methods to specify the inclusive beginning and exclusive ending of the range of the partition. You can call the +minvalue+ and +maxvalue+ methods to get the minimum and maximum values for the column(s) in the range, useful as arguments to +from+ and +to+: DB.create_table(:table1a, partition_of: :table1) do from minvalue to 0 end DB.create_table(:table1b, partition_of: :table1) do from 0 to 100 end DB.create_table(:table1c, partition_of: :table1) do from 100 to maxvalue end For list partitioning, you use the +values_in+ method. You can also use the +default+ method to mark a partition as the default partition: DB.create_table(:table2a, partition_of: :table2) do values_in 1, 2, 3 end DB.create_table(:table2b, partition_of: :table2) do values_in 4, 5, 6 end DB.create_table(:table2c, partition_of: :table2) do default end For hash partitioning, you use the +modulus+ and +remainder+ methods: DB.create_table(:table3a, partition_of: :table3) do modulus 3 remainder 0 end DB.create_table(:table3b, partition_of: :table3) do modulus 3 remainder 1 end DB.create_table(:table3c, partition_of: :table3) do modulus 3 remainder 2 end There is currently no support for using custom column or table constraints in partitions of other tables. Support may be added in the future. === Creating Unlogged Tables PostgreSQL allows users to create unlogged tables, which are faster but not crash safe. Sequel allows you to create an unlogged table by specifying the unlogged: true option to +create_table+: DB.create_table(:table, unlogged: true){Integer :i} # CREATE UNLOGGED TABLE "table" ("i" integer) === Creating Identity Columns You can use the +:identity+ option when creating columns to mark them as identity columns. Identity columns are tied to a sequence for the default value. You can still override the default value for the column when inserting: DB.create_table(:table){Integer :id, identity: true} # CREATE TABLE "table" ("id" integer GENERATED BY DEFAULT AS IDENTITY) If you want to disallow using a user provided value when inserting, you can mark the identity column using identity: :always: DB.create_table(:table){Integer :id, identity: :always} # CREATE TABLE "table" ("id" integer GENERATED ALWAYS AS IDENTITY) === Creating/Dropping Schemas, Languages, Functions, and Triggers Sequel has built in support for creating and dropping PostgreSQL schemas, procedural languages, functions, and triggers: DB.create_schema(:s) # CREATE SCHEMA "s" DB.drop_schema(:s) # DROP SCHEMA "s" DB.create_language(:plperl) # CREATE LANGUAGE plperl DB.drop_language(:plperl) # DROP LANGUAGE plperl DB.create_function(:set_updated_at, <<-SQL, language: :plpgsql, returns: :trigger) BEGIN NEW.updated_at := CURRENT_TIMESTAMP; RETURN NEW; END; SQL # CREATE FUNCTION set_updated_at() RETURNS trigger LANGUAGE plpgsql AS ' # BEGIN # NEW.updated_at := CURRENT_TIMESTAMP; # RETURN NEW; # END;' DB.drop_function(:set_updated_at) # DROP FUNCTION set_updated_at() DB.create_trigger(:table, :trg_updated_at, :set_updated_at, events: :update, each_row: true, when: {Sequel[:new][:updated_at] => Sequel[:old][:updated_at]}) # CREATE TRIGGER trg_updated_at BEFORE UPDATE ON "table" FOR EACH ROW WHEN ("new"."updated_at" = "old"."updated_at") EXECUTE PROCEDURE set_updated_at() DB.drop_trigger(:table, :trg_updated_at) # DROP TRIGGER trg_updated_at ON "table" However, you may want to consider just use Database#run with the necessary SQL code, at least for functions and triggers. === Parsing Check Constraints Sequel has support for parsing CHECK constraints on PostgreSQL using Sequel::Database#check_constraints: DB.create_table(:foo) do Integer :i Integer :j constraint(:ic, Sequel[:i] > 2) constraint(:jc, Sequel[:j] > 2) constraint(:ijc, Sequel[:i] - Sequel[:j] > 2) end DB.check_constraints(:foo) # => { # :ic=>{:definition=>"CHECK ((i > 2))", :columns=>[:i]}, # :jc=>{:definition=>"CHECK ((j > 2))", :columns=>[:j]}, # :ijc=>{:definition=>"CHECK (((i - j) > 2))", :columns=>[:i, :j]} # } === Parsing Foreign Key Constraints Referencing A Given Table Sequel has support for parsing FOREIGN KEY constraints that reference a given table, using the +:reverse+ option to +foreign_key_list+: DB.create_table!(:a) do primary_key :id Integer :i Integer :j foreign_key :a_id, :a, foreign_key_constraint_name: :a_a unique [:i, :j] end DB.create_table!(:b) do foreign_key :a_id, :a, foreign_key_constraint_name: :a_a Integer :c Integer :d foreign_key [:c, :d], :a, key: [:j, :i], name: :a_c_d end DB.foreign_key_list(:a, reverse: true) # => [ # {:name=>:a_a, :columns=>[:a_id], :key=>[:id], :on_update=>:no_action, :on_delete=>:no_action, :deferrable=>false, :table=>:a, :schema=>:public}, # {:name=>:a_a, :columns=>[:a_id], :key=>[:id], :on_update=>:no_action, :on_delete=>:no_action, :deferrable=>false, :table=>:b, :schema=>:public}, # {:name=>:a_c_d, :columns=>[:c, :d], :key=>[:j, :i], :on_update=>:no_action, :on_delete=>:no_action, :deferrable=>false, :table=>:b, :schema=>:public} # ] == PostgreSQL-specific DML Support === Returning Rows From Insert, Update, and Delete Statements Sequel supports the ability to return rows from insert, update, and delete statements, via Dataset#returning: DB[:table].returning.insert # INSERT INTO "table" DEFAULT VALUES RETURNING * DB[:table].returning(:id).delete # DELETE FROM "table" RETURNING "id" DB[:table].returning(:id, Sequel.*(:id, :id).as(:idsq)).update(id: 2) # UPDATE "table" SET "id" = 2 RETURNING "id", ("id" * "id") AS "idsq" When returning is used, instead of returning the number of rows affected (for updated/delete) or the serial primary key value (for insert), it will return an array of hashes with the returning results. === VALUES Support Sequel offers support for the +VALUES+ statement using Database#values: DB.values([[1,2],[2,3],[3,4]]) # VALUES (1, 2), (2, 3), (3, 4) DB.values([[1,2],[2,3],[3,4]]).order(2, 1) # VALUES (1, 2), (2, 3), (3, 4) ORDER BY 2, 1 DB.values([[1,2],[2,3],[3,4]]).order(2, 1).limit(1,2) # VALUES (1, 2), (2, 3), (3, 4) ORDER BY 2, 1 LIMIT 1 OFFSET 2 === INSERT ON CONFLICT Support Starting with PostgreSQL 9.5, you can do an upsert or ignore unique or exclusion constraint violations when inserting using Dataset#insert_conflict: DB[:table].insert_conflict.insert(a: 1, b: 2) # INSERT INTO TABLE (a, b) VALUES (1, 2) # ON CONFLICT DO NOTHING For compatibility with Sequel's MySQL support, you can also use +insert_ignore+: DB[:table].insert_ignore.insert(a: 1, b: 2) # INSERT INTO TABLE (a, b) VALUES (1, 2) # ON CONFLICT DO NOTHING You can pass a specific constraint name using +:constraint+, to only ignore a specific constraint violation: DB[:table].insert_conflict(constraint: :table_a_uidx).insert(a: 1, b: 2) # INSERT INTO TABLE (a, b) VALUES (1, 2) # ON CONFLICT ON CONSTRAINT table_a_uidx DO NOTHING If the unique or exclusion constraint covers the whole table (e.g. it isn't a partial unique index), then you can just specify the column using the +:target+ option: DB[:table].insert_conflict(target: :a).insert(a: 1, b: 2) # INSERT INTO TABLE (a, b) VALUES (1, 2) # ON CONFLICT (a) DO NOTHING If you want to update the existing row instead of ignoring the constraint violation, you can pass an +:update+ option with a hash of values to update. You must pass either the +:target+ or +:constraint+ options when passing the +:update+ option: DB[:table].insert_conflict(target: :a, update: {b: Sequel[:excluded][:b]}).insert(a: 1, b: 2) # INSERT INTO TABLE (a, b) VALUES (1, 2) # ON CONFLICT (a) DO UPDATE SET b = excluded.b If you want to update existing rows but using the current value of the column, you can build the desired calculation using Sequel[] DB[:table] .insert_conflict( target: :a, update: {b: Sequel[:excluded][:b] + Sequel[:table][:a]} ) .import([:a, :b], [ [1, 2] ]) # INSERT INTO TABLE (a, b) VALUES (1, 2) # ON CONFLICT (a) DO UPDATE SET b = (excluded.b + table.a) Additionally, if you only want to do the update in certain cases, you can specify an +:update_where+ option, which will be used as a filter. If the row doesn't match the conditions, the constraint violation will be ignored, but the row will not be updated: DB[:table].insert_conflict(constraint::table_a_uidx, update: {b: Sequel[:excluded][:b]}, update_where: {Sequel[:table][:status_id]=>1}).insert(a: 1, b: 2) # INSERT INTO TABLE (a, b) VALUES (1, 2) # ON CONFLICT ON CONSTRAINT table_a_uidx # DO UPDATE SET b = excluded.b WHERE (table.status_id = 1) === INSERT OVERRIDING SYSTEM|USER VALUE Support PostgreSQL 10+ supports identity columns, which are designed to replace the serial columns previously used for autoincrementing primary keys. You can use Dataset#overriding_system_value and Dataset#overriding_user_value to use this new syntax: DB.create_table(:table){primary_key :id} # Ignore the given value for id, using the identity's sequence value. DB[:table].overriding_user_value.insert(id: 1) DB.create_table(:table){primary_key :id, identity: :always} # Force the use of the given value for id, because otherwise the insert will # raise an error, since GENERATED ALWAYS was used when creating the column. DB[:table].overriding_system_value.insert(id: 1) === Distinct On Specific Columns Sequel allows passing columns to Dataset#distinct, which will make the dataset return rows that are distinct on just those columns: DB[:table].distinct(:id).all # SELECT DISTINCT ON ("id") * FROM "table" === JOIN USING table alias Sequel allows passing an SQL::AliasedExpression to join table methods to use a USING join with a table alias for the USING columns: DB[:t1].join(:t2, Sequel.as([:c1, :c2], :alias)) # SELECT * FROM "t1" INNER JOIN "t2" USING ("c1", "c2") AS "alias" === Calling PostgreSQL 11+ Procedures postgres only PostgreSQL 11+ added support for procedures, which are different from the user defined functions that PostgreSQL has historically supported. These procedures are called via a special +CALL+ syntax, and Sequel supports them via Database#call_procedure: DB.call_procedure(:foo, 1, "bar") # CALL foo(1, 'bar') Database#call_procedure will return a hash of return values if the procedure returns a result, or +nil+ if the procedure does not return a result. === Using a Cursor to Process Large Datasets postgres only The postgres adapter offers a Dataset#use_cursor method to process large result sets without keeping all rows in memory: DB[:table].use_cursor.each{|row| } # BEGIN; # DECLARE sequel_cursor NO SCROLL CURSOR WITHOUT HOLD FOR SELECT * FROM "table"; # FETCH FORWARD 1000 FROM sequel_cursor # FETCH FORWARD 1000 FROM sequel_cursor # ... # FETCH FORWARD 1000 FROM sequel_cursor # CLOSE sequel_cursor # COMMIT This support is used by default when using Dataset#paged_each. Using cursors, it is possible to update individual rows of a large dataset easily using the rows_per_fetch: 1 option in conjunction with Dataset#where_current_of. This is useful if the logic needed to update the rows exists in the application and not in the database: ds.use_cursor(rows_per_fetch: 1).each do |row| ds.where_current_of.update(col: new_col_value(row)) end === Truncate Modifiers Sequel supports PostgreSQL-specific truncate options: DB[:table].truncate(cascade: true, only: true, restart: true) # TRUNCATE TABLE ONLY "table" RESTART IDENTITY CASCADE === COPY Support postgres/pg and jdbc/postgres only PostgreSQL's COPY feature is pretty much the fastest way to get data in or out of the database. Sequel supports getting data out of the database via Database#copy_table, either for a specific table or for an arbitrary dataset: DB.copy_table(:table, format: :csv) # COPY "table" TO STDOUT (FORMAT csv) DB.copy_table(DB[:table], format: :csv) # COPY (SELECT * FROM "table") TO STDOUT (FORMAT csv) It supports putting data into the database via Database#copy_into: DB.copy_into(:table, format: :csv, columns: [:column1, :column2], data: "1,2\n2,3\n") # COPY "table"("column1", "column2") FROM STDIN (FORMAT csv) === Anonymous Function Execution You can execute anonymous functions using a database procedural language via Database#do (the plpgsql language is the default): DB.do <<-SQL DECLARE r record; BEGIN FOR r IN SELECT table_schema, table_name FROM information_schema.tables WHERE table_type = 'VIEW' AND table_schema = 'public' LOOP EXECUTE 'GRANT ALL ON ' || quote_ident(r.table_schema) || '.' || quote_ident(r.table_name) || ' TO webuser'; END LOOP; END; SQL === Listening On and Notifying Channels You can use Database#notify to send notification to channels: DB.notify(:channel) # NOTIFY "channel" postgres/pg only You can listen on channels via Database#listen. Note that this blocks until the listening thread is notified: DB.listen(:channel) # LISTEN "channel" # after notification received: # UNLISTEN * Note that +listen+ by default only listens for a single notification. If you want to loop and process notifications: DB.listen(:channel, loop: true){|channel| p channel} The +pg_static_cache_updater+ extension uses this support to automatically update the caches for models using the +static_cache+ plugin. Look at the documentation of that plugin for details. === Locking Tables Sequel makes it easy to lock tables, though it is generally better to let the database handle locking: DB[:table].lock('EXCLUSIVE') do DB[:table].insert(id: DB[:table].max(:id)+1) end # BEGIN; # LOCK TABLE "table" IN EXCLUSIVE MODE; # SELECT max("id") FROM "table" LIMIT 1; # INSERT INTO "table" ("id") VALUES (2) RETURNING NULL; # COMMIT; == Extended Error Info (postgres/pg only) If you run a query that raises a Sequel::DatabaseError, you can pass the exception object to Database#error_info, and that will return a hash with metadata regarding the error, such as the related table and column or constraint. DB.create_table(:test1){primary_key :id} DB.create_table(:test2){primary_key :id; foreign_key :test1_id, :test1} DB[:test2].insert(test1_id: 1) rescue DB.error_info($!) # => { # :schema=>"public", # :table=>"test2", # :column=>nil, # :constraint=>"test2_test1_id_fkey", # :type=>nil, # :severity=>"ERROR", # :sql_state=>"23503", # :message_primary=>"insert or update on table \"test2\" violates foreign key constraint \"test2_test1_id_fkey\"", # :message_detail=>"Key (test1_id)=(1) is not present in table \"test1\"." # :message_hint=>nil, # :statement_position=>nil, # :internal_position=>nil, # :internal_query=>nil, # :source_file=>"ri_triggers.c", # :source_line=>"3321", # :source_function=>"ri_ReportViolation" # } == sequel_pg (postgres/pg only) When the postgres adapter is used with the pg driver, Sequel automatically checks for sequel_pg, and loads it if it is available. sequel_pg is a C extension that optimizes the fetching of rows, generally resulting in a ~2x speedup. It is highly recommended to install sequel_pg if you are using the postgres adapter with pg. sequel_pg has additional optimizations when using the Dataset +map+, +as_hash+, +to_hash_groups+, +select_hash+, +select_hash_groups+, +select_map+, and +select_order_map+ methods, which avoids creating intermediate hashes and can add further speedups. In addition to optimization, sequel_pg also adds streaming support if used on PostgreSQL 9.2+. Streaming support is similar to using a cursor, but it is faster and more transparent. You can enable the streaming support: DB.extension(:pg_streaming) Then you can stream individual datasets: DB[:table].stream.each{|row| } Or stream all datasets by default: DB.stream_all_queries = true When streaming is enabled, Dataset#paged_each will use streaming to implement paging. sequel-5.63.0/doc/prepared_statements.rdoc000066400000000000000000000121221434214120600205650ustar00rootroot00000000000000= Prepared Statements and Bound Variables Sequel has support for prepared statements and bound variables. No matter which database you are using, the Sequel prepared statement/bound variable API remains the same. There is native support for prepared statements/bound variables on the following adapters: * ibmdb (prepared statements only) * jdbc * mysql (server prepared statements using literalized connection variables) * mysql2 (full support on 0.4+, otherwise server prepared statements using literalized connection variables) * oracle (requires type specifiers for nil/NULL values) * postgres (when using the pg driver) * sqlite * tinytds Support on other adapters is emulated. You can use the prepared_statements model plugin to automatically use prepared statements for some common model actions such as saving or deleting a model instance, or looking up a model based on a primary key. == Placeholders Generally, when using prepared statements (and certainly when using bound variables), you need to put placeholders in your SQL to indicate where you want your bound arguments to appear. Database support and syntax vary significantly for placeholders (e.g. :name, $1, ?). Sequel abstracts all of that and allows you to specify placeholders by using the :$name format for placeholders, e.g.: ds = DB[:items].where(name: :$n) You can use these placeholders in most places where you can use the value directly. For example, if you want to use placeholders while also using raw SQL, you can do: ds = DB["SELECT * FROM items WHERE name = ?", :$n] == Bound Variables Using bound variables for this query is simple: ds.call(:select, n: 'Jim') This will do the equivalent of selecting records that have the name 'Jim'. It returns all records, and can take a block that is passed to Dataset#all. Deleting or returning the first record works similarly: ds.call(:first, n: 'Jim') # First record with name 'Jim' ds.call(:delete, n: 'Jim') # Delete records with name 'Jim' For inserting/updating records, you should also specify a value hash, which may itself contain placeholders: # Insert record with 'Jim', note that the previous filter is ignored ds.call(:insert, {n: 'Jim'}, name: :$n) # Change name to 'Bob' for all records with name of 'Jim' ds.call(:update, {n: 'Jim', new_n: 'Bob'}, name: :$new_n) == Prepared Statements Prepared statement support is similar to bound variable support, but you use Dataset#prepare with a name, and Dataset#call or Database#call later with the values: ds = DB[:items].where(name: :$n) ps = ds.prepare(:select, :select_by_name) ps.call(n: 'Jim') DB.call(:select_by_name, n: 'Jim') # same The Dataset#prepare method returns a prepared statement, and also stores a copy of the prepared statement in the database for later use. For insert and update queries, the hash to insert/update is passed to +prepare+: ps1 = DB[:items].prepare(:insert, :insert_with_name, name: :$n) ps1.call(n: 'Jim') DB.call(:insert_with_name, n: 'Jim') # same ds = DB[:items].where(name: :$n) ps2 = ds.prepare(:update, :update_name, name: :$new_n) ps2.call(n: 'Jim', new_n: 'Bob') DB.call(:update_name, n: 'Jim', new_n: 'Bob') # same == Implementation Issues Currently, creating a prepared statement uses Object#extend, which can hurt performance. For high performance applications, it's recommended to create all of your prepared statements upon application initialization, and not to create prepared statements dynamically at runtime. == Database support === PostgreSQL If you are using the postgres-pr driver, PostgreSQL uses the default emulated support. If you are using ruby-pg, there is native support for both prepared statements and bound variables. Prepared statements are always server side. === SQLite SQLite supports both prepared statements and bound variables. === MySQL/Mysql2 The MySQL and Mysql2 <0.4 ruby drivers do not support bound variables, so the bound variable methods are emulated. It uses server side prepared statements. Mysql2 0.4+ supports both prepared statements and bound variables. === JDBC JDBC supports both prepared statements and bound variables. Whether these are server side or client side depends on the JDBC driver. For PostgreSQL over JDBC, you can add the prepareThreshold=N parameter to the connection string, which will use a server side prepared statement after N calls to the prepared statement. === TinyTDS Uses the sp_executesql stored procedure with bound variables, since Microsoft SQL Server doesn't support true prepared statements. === IBM_DB DB2 supports both prepared statements and bound variables. === Oracle Oracle supports both prepared statements and bound variables. Prepared statements (OCI8::Cursor objects) are cached per connection. If you ever plan to use a nil/NULL value as a bound variable/prepared statement value, you must specify the type in the placeholder using a __* suffix. You can use any of the schema types that Sequel supports, such as :$name__string or :$num__integer. Using blobs as bound variables is not currently supported. === All Others Support is emulated. sequel-5.63.0/doc/querying.rdoc000066400000000000000000001066441434214120600163740ustar00rootroot00000000000000= Querying in Sequel This guide is based on http://guides.rubyonrails.org/active_record_querying.html == Purpose of this Guide Sequel is a flexible and powerful database library that supports a wide variety of different querying methods. This guide aims to be a introduction to Sequel's querying support. While you can use raw SQL with Sequel, a large part of the advantage you get from using Sequel is Sequel's ability to abstract SQL from you and give you a pure-ruby interface. Sequel also ships with a {core_extensions extension}[rdoc-ref:doc/core_extensions.rdoc], which adds methods to core ruby types to work with Sequel. == Retrieving Objects Sequel provides a few separate methods for retrieving objects from the database. The underlying method is Sequel::Dataset#each, which yields each row as the Sequel::Database provides it. However, while Dataset#each can and often is used directly, in many cases there is a more convenient retrieval method you can use. === Sequel::Dataset If you are new to Sequel and aren't familiar with Sequel, you should probably read the {"Dataset Basics" guide}[rdoc-ref:doc/dataset_basics.rdoc], then come back here. === Retrieving a Single Object Sequel offers quite a few ways to to retrieve a single object. ==== Using a Primary Key [Sequel::Model] The Sequel::Model.[] is the easiest method to use to find a model instance by its primary key value: # Find artist with primary key (id) 1 artist = Artist[1] # SELECT * FROM artists WHERE (id = 1) # => #"YJM", :id=>1}> If there is no record with the given primary key, nil will be returned. If you want to raise an exception if no record is found, you can use Sequel::Model.with_pk!: artist = Artist.with_pk!(1) ==== Using +first+ If you want the first record in the dataset, Sequel::Dataset#first is probably the most obvious method to use: artist = Artist.first # SELECT * FROM artists LIMIT 1 # => #"YJM", :id=>1}> Any options you pass to +first+ will be used as a filter: artist = Artist.first(name: 'YJM') # SELECT * FROM artists WHERE (name = 'YJM') LIMIT 1 # => #"YJM", :id=>1}> artist = Artist.first(Sequel.like(:name, 'Y%')) # SELECT * FROM artists WHERE (name LIKE 'Y%' ESCAPE '\') LIMIT 1 # => #"YJM", :id=>1}> If there is no matching row, +first+ will return nil. If you want to raise an exception instead, use first!. Sequel::Dataset#[] is basically an alias for +first+, except it requires an argument: DB[:artists][{name: 'YJM'}] # SELECT * FROM artists WHERE (name = 'YJM') LIMIT 1 # => {:name=>"YJM", :id=>1} Note that while Model.[] allows you to pass a primary key directly, Dataset#[] does not (unless it is a model dataset). ==== Using +last+ If you want the last record in the dataset, Sequel::Dataset#last is an obvious method to use. +last+ requires the dataset be ordered, unless the dataset is a model dataset in which case +last+ will do a reverse order by the primary key field: artist = Artist.last # SELECT * FROM artists ORDER BY id DESC LIMIT 1 # => #"YJM", :id=>1}> Note: 1. +last+ is equivalent to running a +reverse.first+ query, in other words it reverses the order of the dataset and then calls +first+. This is why +last+ raises a Sequel::Error when there is no order on a plain dataset - because it will provide the same record as +first+, and most users will find that confusing. 2. +last+ is not necessarily going to give you the last record in the dataset unless you give the dataset an unambiguous order. 3. +last+ will ignore +limit+ if chained together in a query because it sets a limit of 1 if no arguments are given. ==== Retrieving a Single Column Value Sometimes, instead of wanting an entire row, you only want the value of a specific column. For this Sequel::Dataset#get is the method you want: artist_name = Artist.get(:name) # SELECT name FROM artists LIMIT 1 # => "YJM" ==== Retrieving a Multiple Column Values If you want the value for multiple columns, you can pass an array to Sequel::Dataset#get: artist_id, artist_name = Artist.get([:id, :name]) # SELECT id, name FROM artists LIMIT 1 # => [1, "YJM"] === Retrieving Multiple Objects ==== As an Array of Hashes or Model Objects In many cases, you want an array of all of the rows associated with the dataset, in which case Sequel::Dataset#all is the method you want to use: artists = Artist.all # SELECT * FROM artists # => [#"YJM", :id=>1}>, # #"AS", :id=>2}>] ==== Using an Enumerable Interface Sequel::Dataset uses an Enumerable Interface, so it provides a method named each that yields hashes or model objects as they are retrieved from the database: Artist.each{|x| p x.name} # SELECT * FROM artists "YJM" "AS" This means that all of the methods in the Enumerable module are available, such as +map+: artist_names = Artist.map{|x| x.name} # SELECT * FROM artists # => ["YJM", "AS"] ==== As an Array of Column Values Sequel also has an extended +map+ method that takes an argument. If you provide an argument to +map+, it will return an array of values for the given column. So the previous example can be handled more easily with: artist_names = Artist.map(:name) # SELECT * FROM artists # => ["YJM", "AS"] One difference between these two ways of returning an array of values is that providing +map+ with an argument is really doing: artist_names = Artist.map{|x| x[:name]} # not x.name Note that regardless of whether you provide +map+ with an argument, it does not modify the columns selected. If you only want to select a single column and return an array of the columns values, you can use +select_map+: artist_names = Artist.select_map(:name) # SELECT name FROM artists # => ["YJM", "AS"] It's also common to want to order such a map, so Sequel provides a +select_order_map+ method as well: artist_names = Artist.select_order_map(:name) # SELECT name FROM artists ORDER BY name # => ["AS", "YJM"] In all of these cases, you can provide an array of column symbols and an array of arrays of values will be returned: artist_names = Artist.select_map([:id, :name]) # SELECT id, name FROM artists # => [[1, "YJM"], [2, "AS"]] ==== As a Hash Sequel makes it easy to take an SQL query and return it as a ruby hash, using the +as_hash+ method: artist_names = Artist.as_hash(:id, :name) # SELECT * FROM artists # => {1=>"YJM", 2=>"AS"} As you can see, the +as_hash+ method uses the first symbol as the key and the second symbol as the value. So if you swap the two arguments the hash will have its keys and values transposed: artist_names = Artist.as_hash(:name, :id) # SELECT * FROM artists # => {"YJM"=>1, "AS"=>2} Now what if you have multiple values for the same key? By default, +as_hash+ will just have the last matching value. If you care about all matching values, use +to_hash_groups+, which makes the values of the array an array of matching values, in the order they were received: artist_names = Artist.to_hash_groups(:name, :id) # SELECT * FROM artists # => {"YJM"=>[1, 10, ...], "AS"=>[2, 20, ...]} If you only provide one argument to +as_hash+, it uses the entire hash or model object as the value: artist_names = DB[:artists].as_hash(:name) # SELECT * FROM artists # => {"YJM"=>{:id=>1, :name=>"YJM"}, "AS"=>{:id=>2, :name=>"AS"}} and +to_hash_groups+ works similarly: artist_names = DB[:artists].to_hash_groups(:name) # SELECT * FROM artists # => {"YJM"=>[{:id=>1, :name=>"YJM"}, {:id=>10, :name=>"YJM"}], ...} Model datasets have a +as_hash+ method that can be called without any arguments, in which case it will use the primary key as the key and the model object as the value. This can be used to easily create an identity map: artist_names = Artist.as_hash # SELECT * FROM artists # => {1=>#1, :name=>"YGM"}>, # 2=>#2, :name=>"AS"}>} There is no equivalent handling to +to_hash_groups+, since there would only be one matching record, as the primary key must be unique. Note that +as_hash+ never modifies the columns selected. However, just like Sequel has a +select_map+ method to modify the columns selected and return an array, Sequel also has a +select_hash+ method to modify the columns selected and return a hash: artist_names = Artist.select_hash(:name, :id) # SELECT name, id FROM artists # => {"YJM"=>1, "AS"=>2} Likewise, +select_hash_groups+ also exists: artist_names = Artist.select_hash_groups(:name, :id) # SELECT name, id FROM artists # => {"YJM"=>[1, 10, ...], "AS"=>[2, 20, ...]} == Modifying datasets Note that the retrieval methods discussed above just return the row(s) included in the existing dataset. In most cases, you aren't interested in every row in a table, but in a subset of the rows, based on some criteria. In Sequel, filtering the dataset is generally done separately than retrieving the records. There are really two types of dataset methods that you will be using: 1. Methods that return row(s), discussed above 2. Methods that return modified datasets, discussed below Sequel datasets are frozen and use a method chaining, functional style API that returns modified datasets. Let's start with a simple example. This is a basic dataset that includes all records in the table +artists+: ds1 = DB[:artists] # SELECT * FROM artists Let's say we are only interested in the artists whose names start with "A": ds2 = ds1.where(Sequel.like(:name, 'A%')) # SELECT * FROM artists WHERE (name LIKE 'A%' ESCAPE '\') Here we see that +where+ returns a dataset that adds a +WHERE+ clause to the query. It's important to note that +where+ does not modify the receiver: ds1 # SELECT * FROM artists ds2 # SELECT * FROM artists WHERE (name LIKE 'A%' ESCAPE '\') In Sequel, dataset methods do not modify the dataset itself, so you can freely use the dataset in multiple places without worrying that its usage in one place will affect its usage in another place. This is what is meant by a functional style API. Let's say we only want to select the id and name columns, and that we want to order by name: ds3 = ds2.order(:name).select(:id, :name) # SELECT id, name FROM artists WHERE (name LIKE 'A%' ESCAPE '\') ORDER BY name Note how you don't need to assign the returned value of order to a variable, and then call select on that. Because order just returns a dataset, you can call select directly on the returned dataset. This is what is meant by a method chaining API. Also note how you can call methods that modify different clauses in any order. In this case, the WHERE clause was added first, then the ORDER clause, then the SELECT clause was modified. This makes for a flexible API, where you can modify any part of the query at any time. == Filters Filtering is probably the most common dataset modifying action done in Sequel. Both the +where+ and +filter+ methods filter the dataset by modifying the dataset's WHERE clause. Both accept a wide variety of input formats, discussed below. === Hashes The most common format for providing filters is via a hash. In general, Sequel treats conditions specified with a hash as equality, inclusion, or identity. What type of condition is used depends on the values in the hash. Unless Sequel has special support for the value's class, it uses a simple equality statement: Artist.where(id: 1) # SELECT * FROM artists WHERE (id = 1) Artist.where(name: 'YJM') # SELECT * FROM artists WHERE (name = 'YJM') For arrays, Sequel uses the IN operator with a value list: Artist.where(id: [1, 2]) # SELECT * FROM artists WHERE (id IN (1, 2)) For datasets, Sequel uses the IN operator with a subselect: Artist.where(id: Album.select(:artist_id)) # SELECT * FROM artists WHERE (id IN ( # SELECT artist_id FROM albums)) For boolean values such as nil, true, and false, Sequel uses the IS operator: Artist.where(id: nil) # SELECT * FROM artists WHERE (id IS NULL) For ranges, Sequel uses a pair of inequality statements: Artist.where(id: 1..5) # SELECT * FROM artists WHERE ((id >= 1) AND (id <= 5)) Artist.where(id: 1...5) # SELECT * FROM artists WHERE ((id >= 1) AND (id < 5)) Finally, for regexps, Sequel uses an SQL regular expression. Note that this is only supported by default on PostgreSQL and MySQL. It can also be supported on SQLite when using the sqlite adapter with the :setup_regexp_function Database option. Artist.where(name: /JM$/) # SELECT * FROM artists WHERE (name ~ 'JM$') If there are multiple arguments in the hash, the filters are ANDed together: Artist.where(id: 1, name: /JM$/) # SELECT * FROM artists WHERE ((id = 1) AND (name ~ 'JM$')) This works the same as if you used two separate +where+ calls: Artist.where(id: 1).where(name: /JM$/) # SELECT * FROM artists WHERE ((id = 1) AND (name ~ 'JM$')) === Array of Two Element Arrays If you use an array of two element arrays, it is treated as a hash. The only advantage to using an array of two element arrays is that it allows you to duplicate keys, so you can do: Artist.where([[:name, /JM$/], [:name, /^YJ/]]) # SELECT * FROM artists WHERE ((name ~ 'JM$')) AND ((name ~ '^YJ')) === Virtual Row Blocks If a block is passed to a filter, it is treated as a virtual row block: Artist.where{id > 5} # SELECT * FROM artists WHERE (id > 5) You can learn more about virtual row blocks in the {"Virtual Rows" guide}[rdoc-ref:doc/virtual_rows.rdoc]. You can provide both regular arguments and a block, in which case the results will be ANDed together: Artist.where(name: 'A'...'M'){id > 5} # SELECT * FROM artists WHERE ((name >= 'A') AND (name < 'M') AND (id > 5)) Using virtual row blocks, what you can do with single entry hash or an array with a single two element array can also be done using the =~ method: Artist.where{id =~ 5} # SELECT * FROM artists WHERE (id = 5) === Symbols If you have a boolean column in the database, and you want only true values, you can just provide the column symbol to filter: Artist.where(:retired) # SELECT * FROM artists WHERE retired === SQL::Expression Sequel has a DSL that allows easily creating SQL expressions. These SQL expressions are instances of subclasses of Sequel::SQL::Expression. You've already seen an example earlier: Artist.where(Sequel.like(:name, 'Y%')) # SELECT * FROM artists WHERE name LIKE 'Y%' ESCAPE '\' In this case Sequel.like returns a Sequel::SQL::BooleanExpression object, which is used directly in the filter. You can use the DSL to create arbitrarily complex expressions. SQL::Expression objects can be created via singleton methods on the Sequel module. The most common method is Sequel.[], which takes any object and wraps it in a SQL::Expression object. In most cases, the SQL::Expression returned supports the & operator for +AND+, the | operator for +OR+, and the ~ operator for inversion: Artist.where(Sequel.like(:name, 'Y%') & (Sequel[{b: 1}] | Sequel.~(c: 3))) # SELECT * FROM artists WHERE ((name LIKE 'Y%' ESCAPE '\') AND ((b = 1) OR (c != 3))) You can combine these expression operators with the virtual row support: Artist.where{(a > 1) & ~((b(c) < 1) | d)} # SELECT * FROM artists WHERE ((a > 1) AND (b(c) >= 1) AND NOT d) Note the use of parentheses when using the & and | operators, as they have lower precedence than other operators. The following will not work: Artist.where{a > 1 & ~(b(c) < 1 | d)} # Raises a TypeError === Strings with Placeholders Assuming you want to get your hands dirty and use SQL fragments in filters, Sequel allows you to do so if you explicitly mark the strings as literal strings using +Sequel.lit+. You can use placeholders in the string and pass arguments for the placeholders: Artist.where(Sequel.lit("name LIKE ?", 'Y%')) # SELECT * FROM artists WHERE (name LIKE 'Y%') This is the most common type of placeholder, where each question mark is substituted with the next argument: Artist.where(Sequel.lit("name LIKE ? AND id = ?", 'Y%', 5)) # SELECT * FROM artists WHERE (name LIKE 'Y%' AND id = 5) You can also use named placeholders with a hash, where the named placeholders use colons before the placeholder names: Artist.where(Sequel.lit("name LIKE :name AND id = :id", name: 'Y%', id: 5)) # SELECT * FROM artists WHERE (name LIKE 'Y%' AND id = 5) You don't have to provide any placeholders if you don't want to: Artist.where(Sequel.lit("id = 2")) # SELECT * FROM artists WHERE id = 2 However, if you are using any untrusted input, you should definitely be using placeholders. In general, unless you are hardcoding values in the strings, you should use placeholders. You should never pass a string that has been built using interpolation, unless you are sure of what you are doing. Artist.where(Sequel.lit("id = #{params[:id]}")) # Don't do this! Artist.where(Sequel.lit("id = ?", params[:id])) # Do this instead Artist.where(id: params[:id].to_i) # Even better === Inverting You may be wondering how to specify a not equals condition in Sequel, or the NOT IN operator. Sequel has generic support for inverting conditions, so to write a not equals condition, you write an equals condition, and invert it: Artist.where(id: 5).invert # SELECT * FROM artists WHERE (id != 5) Note that +invert+ inverts the entire filter: Artist.where(id: 5).where{name > 'A'}.invert # SELECT * FROM artists WHERE ((id != 5) OR (name <= 'A')) In general, +invert+ is used rarely, since +exclude+ allows you to invert only specific filters: Artist.exclude(id: 5) # SELECT * FROM artists WHERE (id != 5) Artist.where(id: 5).exclude{name > 'A'} # SELECT * FROM artists WHERE ((id = 5) AND (name <= 'A') So to do a NOT IN with an array: Artist.exclude(id: [1, 2]) # SELECT * FROM artists WHERE (id NOT IN (1, 2)) Or to use the NOT LIKE operator: Artist.exclude(Sequel.like(:name, '%J%')) # SELECT * FROM artists WHERE (name NOT LIKE '%J%' ESCAPE '\') You can use Sequel.~ to negate expressions: Artist.where(Sequel.~(id: 5)) # SELECT * FROM artists WHERE id != 5 On Sequel expression objects, you can use ~ to negate them: Artist.where(~Sequel.like(:name, '%J%')) # SELECT * FROM artists WHERE (name NOT LIKE '%J%' ESCAPE '\') You can use !~ on Sequel expressions to create negated expressions: Artist.where{id !~ 5} # SELECT * FROM artists WHERE (id != 5) === Removing To remove all existing filters, use +unfiltered+: Artist.where(id: 1).unfiltered # SELECT * FROM artists == Ordering Sequel offers quite a few methods to manipulate the SQL ORDER BY clause. The most basic of these is +order+: Artist.order(:id) # SELECT * FROM artists ORDER BY id You can specify multiple arguments to order by more than one column: Album.order(:artist_id, :id) # SELECT * FROM album ORDER BY artist_id, id Note that unlike +where+, +order+ replaces an existing order, it does not append to an existing order: Artist.order(:id).order(:name) # SELECT * FROM artists ORDER BY name If you want to add a column to the end of the existing order: Artist.order(:id).order_append(:name) # SELECT * FROM artists ORDER BY id, name If you want to add a column to the beginning of the existing order: Artist.order(:id).order_prepend(:name) # SELECT * FROM artists ORDER BY name, id === Reversing Just like you can invert an existing filter, you can reverse an existing order, using +reverse+ without an order: Artist.order(:id).reverse # SELECT FROM artists ORDER BY id DESC Alternatively, you can provide reverse with the order: Artist.reverse(:id) # SELECT FROM artists ORDER BY id DESC To specify a single entry be reversed, Sequel.desc can be used: Artist.order(Sequel.desc(:id)) # SELECT FROM artists ORDER BY id DESC This allows you to easily use both ascending and descending orders: Artist.order(:name, Sequel.desc(:id)) # SELECT FROM artists ORDER BY name, id DESC === Removing Just like you can remove filters with +unfiltered+, you can remove orders with +unordered+: Artist.order(:name).unordered # SELECT * FROM artists == Selected Columns Sequel offers a few methods to manipulate the columns selected. As you may be able to guess, the main method used is +select+: Artist.select(:id, :name) # SELECT id, name FROM artists You just specify all of the columns that you are selecting as arguments to the method. If you are dealing with model objects, you'll want to include the primary key if you want to update or destroy the object. You'll also want to include any keys (primary or foreign) related to associations you plan to use. If a column is not selected, and you attempt to access it, you will get nil: artist = Artist.select(:name).first # SELECT name FROM artists LIMIT 1 artist[:id] # => nil Like +order+, +select+ replaces the existing selected columns: Artist.select(:id).select(:name) # SELECT name FROM artists To add to the existing selected columns, use +select_append+: Artist.select(:id).select_append(:name) # SELECT id, name FROM artists To remove specifically selected columns, and default back to all columns, use +select_all+: Artist.select(:id).select_all # SELECT * FROM artists To select all columns from a given table, provide an argument to +select_all+: Artist.select_all(:artists) # SELECT artists.* FROM artists === Distinct To treat duplicate rows as a single row when retrieving the records, use +distinct+: Artist.distinct.select(:name) # SELECT DISTINCT name FROM artists Note that DISTINCT is a separate SQL clause, it's not a function that you pass to select. == Limit and Offset You can limit the dataset to a given number of rows using +limit+: Artist.limit(5) # SELECT * FROM artists LIMIT 5 You can provide a second argument to +limit+ to specify an offset: Artist.limit(5, 10) # SELECT * FROM artists LIMIT 5 OFFSET 10 You can also call the +offset+ method separately: Artist.limit(5).offset(10) # SELECT * FROM artists LIMIT 5 OFFSET 10 Either of these would return the 11th through 15th records in the original dataset. To remove a limit and offset from a dataset, use +unlimited+: Artist.limit(5, 10).unlimited # SELECT * FROM artists == Grouping The SQL GROUP BY clause is used to combine multiple rows based on the values of a given group of columns. To modify the GROUP BY clause of the SQL statement, you use +group+: Album.group(:artist_id) # SELECT * FROM albums GROUP BY artist_id You can remove an existing grouping using +ungrouped+: Album.group(:artist_id).ungrouped # SELECT * FROM albums If you want to add a column to the end of the existing grouping columns: Album.group(:artist_id).group_append(:name) # SELECT * FROM albums GROUP BY artist_id, name A common use of grouping is to count based on the number of grouped rows, and Sequel provides a +group_and_count+ method to make this easier: Album.group_and_count(:artist_id) # SELECT artist_id, count(*) AS count FROM albums GROUP BY artist_id This will return the number of albums for each artist_id. If you want to select and group on the same columns, you can use +select_group+: Album.select_group(:artist_id) # SELECT artist_id FROM albums GROUP BY artist_id Usually you would add a +select_append+ call after that, to add some sort of aggregation: Album.select_group(:artist_id).select_append{sum(num_tracks).as(tracks)} # SELECT artist_id, sum(num_tracks) AS tracks FROM albums GROUP BY artist_id == Having The SQL HAVING clause is similar to the WHERE clause, except that filters the results after the grouping has been applied, instead of before. One possible use is if you only wanted to return artists who had at least 10 albums: Album.group_and_count(:artist_id).having{count.function.* >= 10} # SELECT artist_id, count(*) AS count FROM albums # GROUP BY artist_id HAVING (count(*) >= 10) Both the WHERE clause and the HAVING clause are removed by +unfiltered+: Album.group_and_count(:artist_id).having{count.function.* >= 10}. where(:name.like('A%')).unfiltered # SELECT artist_id, count(*) AS count FROM albums GROUP BY artist_id == Joins Sequel has support for many different SQL join types. The underlying method used is +join_table+: Album.join_table(:inner, :artists, id: :artist_id) # SELECT * FROM albums # INNER JOIN artists ON (artists.id = albums.artist_id) In most cases, you won't call +join_table+ directly, as Sequel provides shortcuts for all common (and most uncommon) join types. For example +join+ does an inner join: Album.join(:artists, id: :artist_id) # SELECT * FROM albums # INNER JOIN artists ON (artists.id = albums.artist_id) And +left_join+ does a LEFT JOIN: Album.left_join(:artists, id: :artist_id) # SELECT * FROM albums # LEFT JOIN artists ON (artists.id = albums.artist_id) === Table/Dataset to Join For all of these specialized join methods, the first argument is generally the name of the table to which you are joining. However, you can also provide a dataset, in which case a subselect is used: Album.join(Artist.where{name < 'A'}, id: :artist_id) # SELECT * FROM albums # INNER JOIN (SELECT * FROM artists WHERE (name < 'A')) AS t1 # ON (t1.id = albums.artist_id) === Join Conditions The second argument to the specialized join methods is the conditions to use when joining, which is similar to a filter expression, with a few minor exceptions. ==== Implicit Qualification A hash used as the join conditions operates similarly to a filter, except that unqualified symbol keys are automatically qualified with the table from the first argument, and unqualified symbol values are automatically qualified with the last table joined (or the first table in the dataset if there hasn't been a previous join): Album.join(:artists, id: :artist_id) # SELECT * FROM albums # INNER JOIN artists ON (artists.id = albums.artist_id) Note how the +id+ symbol is automatically qualified with +artists+, while the +artist_id+ symbol is automatically qualified with +albums+. Because Sequel uses the last joined table for implicit qualifications of values, you can do things like: Album.join(:artists, id: :artist_id). join(:members, artist_id: :id) # SELECT * FROM albums # INNER JOIN artists ON (artists.id = albums.artist_id) # INNER JOIN members ON (members.artist_id = artists.id) Note that when joining to the +members+ table, +artist_id+ is qualified with +members+ and +id+ is qualified with +artists+. While a good default, implicit qualification is not always correct: Album.join(:artists, id: :artist_id). join(:tracks, album_id: :id) # SELECT * FROM albums # INNER JOIN artists ON (artists.id = albums.artist_id) # INNER JOIN tracks ON (tracks.album_id = artists.id) Note here how +id+ is qualified with +artists+ instead of +albums+. This is wrong as the foreign key tracks.album_id refers to albums.id, not artists.id. To fix this, you need to explicitly qualify when joining: Album.join(:artists, id: :artist_id). join(:tracks, album_id: Sequel[:albums][:id]) # SELECT * FROM albums # INNER JOIN artists ON (artists.id = albums.artist_id) # INNER JOIN tracks ON (tracks.album_id = albums.id) Just like in filters, an array of two element arrays is treated the same as a hash, but allows for duplicate keys: Album.join(:artists, [[:id, :artist_id], [:id, 1..5]]) # SELECT * FROM albums INNER JOIN artists # ON ((artists.id = albums.artist_id) # AND (artists.id >= 1) AND (artists.id <= 5)) And just like in the hash case, unqualified symbol elements in the array are implicitly qualified. By default, Sequel only qualifies unqualified symbols in the conditions. However, You can provide an options hash with a qualify: :deep option to do a deep qualification, which can qualify subexpressions. For example, let's say you are doing a JOIN using case insensitive string comparison: Album.join(:artists, {Sequel.function(:lower, :name) => Sequel.function(:lower, :artist_name)}, qualify: :deep) # SELECT * FROM albums INNER JOIN artists # ON (lower(artists.name) = lower(albums.artist_name)) Note how the arguments to lower were qualified correctly in both cases. ==== USING Joins The most common type of join conditions is a JOIN ON, as displayed above. However, the SQL standard allows for join conditions to be specified with JOIN USING, assuming the column name is the same in both tables. For example, if instead of having a primary column named +id+ in all of your tables, you use +artist_id+ in your +artists+ table and +album_id+ in your +albums+ table, you could do: Album.join(:artists, [:artist_id]) # SELECT * FROM albums INNER JOIN artists USING (artist_id) See here how you specify the USING columns as an array of symbols. ==== NATURAL Joins NATURAL joins take it one step further than USING joins, by assuming that all columns with the same names in both tables should be used for joining: Album.natural_join(:artists) # SELECT * FROM albums NATURAL JOIN artists In this case, you don't even need to specify any conditions. ==== Join Blocks You can provide a block to any of the join methods that accept conditions. This block should accept 3 arguments: the table alias for the table currently being joined, the table alias for the last table joined (or first table), and an array of previous Sequel::SQL::JoinClauses. This allows you to qualify columns similar to how the implicit qualification works, without worrying about the specific aliases being used. For example, let's say you wanted to join the albums and artists tables, but only want albums where the artist's name comes before the album's name. Album.join(:artists, id: :artist_id) do |j, lj, js| Sequel[j][:name] < Sequel[lj][:name] end # SELECT * FROM albums INNER JOIN artists # ON ((artists.id = albums.artist_id) # AND (artists.name < albums.name)) Because greater than can't be expressed with a hash in Sequel, you need to use a block and qualify the tables manually. == From In general, the FROM table is the first clause populated when creating a dataset. For a standard Sequel::Model, the dataset already has the FROM clause populated, and the most common way to create datasets is with the Database#[] method, which populates the FROM clause. However, you can modify the tables you are selecting FROM using +from+: Album.from(:albums, :old_albums) # SELECT * FROM albums, old_albums Be careful with this, as multiple tables in the FROM clause use a cross join by default, so the number of rows will be number of albums times the number of old albums. Using multiple FROM tables and setting conditions in the WHERE clause is an old-school way of joining tables: DB.from(:albums, :artists).where{{artists[:id]=>albums[:artist_id]}} # SELECT * FROM albums, artists WHERE (artists.id = albums.artist_id) === Using the current dataset in a subselect In some cases, you may want to wrap the current dataset in a subselect. Here's an example using +from_self+: Album.order(:artist_id).limit(100).from_self.group(:artist_id) # SELECT * FROM (SELECT * FROM albums ORDER BY artist_id LIMIT 100) # AS t1 GROUP BY artist_id This is different than without +from_self+: Album.order(:artist_id).limit(100).group(:artist_id) # SELECT * FROM albums GROUP BY artist_id ORDER BY name LIMIT 100 Without +from_self+, you are doing the grouping, and limiting the number of grouped records returned to 100. So assuming you have albums by more than 100 artists, you'll end up with 100 results. With +from_self+, you are limiting the number of records before grouping. So if the artist with the lowest id had 100 albums, you'd get 1 result, not 100. == Locking for Update Sequel allows you to easily add a FOR UPDATE clause to your queries so that the records returned can't be modified by another query until the current transaction commits. You just use the +for_update+ dataset method when returning the rows: DB.transaction do album = Album.for_update.first(id: 1) # SELECT * FROM albums WHERE (id = 1) FOR UPDATE album.num_tracks += 1 album.save end This will ensure that no other connection modifies the row between when you select it and when the transaction ends. === Optimistic Locking One of the model plugins that ships with Sequel is an optimistic locking plugin, which provides a database independent way to detect and raise an error if two different connections modify the same row. It's useful for things like web forms where you cannot keep a transaction open while the user is looking at the form, because of the web's stateless nature. == Custom SQL Sequel makes it easy to use custom SQL for the query by providing it to the Database#[] method as a string: DB["SELECT * FROM artists"] # SELECT * FROM artists You can also use the +with_sql+ dataset method to return a dataset that uses that exact SQL: DB[:albums].with_sql("SELECT * FROM artists") # SELECT * FROM artists With either of these methods, you can use placeholders: DB["SELECT * FROM artists WHERE id = ?", 5] # SELECT * FROM artists WHERE id = 5 DB[:albums].with_sql("SELECT * FROM artists WHERE id = :id", id: 5) # SELECT * FROM artists WHERE id = 5 Note that if you specify the dataset using custom SQL, you can still call the dataset modification methods, but in many cases they will appear to have no affect: DB["SELECT * FROM artists"].select(:name).order(:id) # SELECT * FROM artists You can use the implicit_subquery extension to automatically wrap queries that use custom SQL in subqueries if a method is called that would modify the SQL: DB.extension :implicit_subquery DB["SELECT * FROM artists"].select(:name).order(:id) # SELECT name FROM (SELECT * FROM artists) AS t1 ORDER BY id" If you must drop down to using custom SQL, it's recommended that you only do so for specific parts of a query. For example, if the reason you are using custom SQL is to use a custom operator in the database in the SELECT clause: DB["SELECT name, (foo !@# ?) AS baz FROM artists", 'bar'] it's better to use Sequel's DSL, and use a literal string for the custom operator: DB[:artists].select(:name, Sequel.lit("(foo !@# ?)", 'bar').as(:baz)) That way Sequel's method chaining still works, and it increases Sequel's ability to introspect the code. == Checking for Records If you just want to know whether the current dataset would return any rows, use empty?: Album.empty? # SELECT 1 FROM albums LIMIT 1 # => false Album.where(id: 0).empty? # SELECT 1 FROM albums WHERE (id = 0) LIMIT 1 # => true Album.where(Sequel.like(:name, 'R%')).empty? # SELECT 1 FROM albums WHERE (name LIKE 'R%' ESCAPE '\') LIMIT 1 # => false == Aggregate Calculations The SQL standard defines a few helpful methods to get aggreate information about datasets, such as +count+, +sum+, +avg+, +min+, and +max+. There are dataset methods for each of these aggregate functions. +count+ just returns the number of records in the dataset. Album.count # SELECT count(*) AS count FROM albums LIMIT 1 # => 2 If you pass an expression to count, it will return the number of records where that expression in not NULL: Album.count(:artist_id) # SELECT count(artist_id) AS count FROM albums LIMIT 1 # => 1 The other methods take a column argument and call the aggregate function with the argument: Album.sum(:id) # SELECT sum(id) AS sum FROM albums LIMIT 1 # => 3 Album.avg(:id) # SELECT avg(id) AS avg FROM albums LIMIT 1 # => 1.5 Album.min(:id) # SELECT min(id) AS min FROM albums LIMIT 1 # => 1 Album.max(:id) # SELECT max(id) AS max FROM albums LIMIT 1 # => 2 sequel-5.63.0/doc/reflection.rdoc000066400000000000000000000133361434214120600166560ustar00rootroot00000000000000= Reflection Sequel supports reflection information in multiple ways. == Adapter in Use You can get the adapter in use using Database#adapter_scheme: DB.adapter_scheme # :postgres, :jdbc, :odbc == Database Connected To In some cases, the adapter scheme will be the same as the database to which you are connecting. However, many adapters support multiple databases. You can use the Database#database_type method to get the type of database to which you are connecting: DB.database_type # :postgres, :h2, :mssql == Tables in the Database Database#tables gives an array of table name symbols: DB.tables # [:table1, :table2, :table3, ...] == Views in the Database Database#views and gives an array of view name symbols: DB.views # [:view1, :view2, :view3, ...] == Indexes on a table Database#indexes takes a table name gives a hash of index information. Keys are index names, values are subhashes with the keys :columns and :unique : DB.indexes(:table1) # {:index1=>{:columns=>[:column1], :unique=>false}, :index2=>{:columns=>[:column2, :column3], :unique=>true}} Index information generally does not include partial indexes, functional indexes, or indexes on the primary key of the table. == Foreign Key Information for a Table Database#foreign_key_list takes a table name and gives an array of hashes of foreign key information: DB.foreign_key_list(:table1) # [{:columns=>[:column1], :table=>:referenced_table, :key=>[:referenced_column1]}] At least the following entries will be present in the hash: :columns :: An array of columns in the given table :table :: The table referenced by the columns :key :: An array of columns referenced (in the table specified by :table), but can be nil on certain adapters if the primary key is referenced. The hash may also contain entries for: :deferrable :: Whether the constraint is deferrable :name :: The name of the constraint :on_delete :: The action to take ON DELETE :on_update :: The action to take ON UPDATE == Column Information for a Table Database#schema takes a table symbol and returns column information in an array with each element being an array with two elements. The first elements of the subarray is a column symbol, and the second element is a hash of information about that column. The hash should include the following keys: :allow_null :: Whether NULL/nil is an allowed value for this column. Used by the Sequel::Model typecasting code. :db_type :: The type of column the database provided, as a string. Used by the schema_dumper plugin for a more specific type translation. :default :: The default value of the column, as either a string or nil. Uses a database specific format. Used by the schema_dumper plugin for converting to a ruby value. :primary_key :: Whether this column is one of the primary key columns for the table. Used by the Sequel::Model code to determine primary key columns. :ruby_default :: The default value of the column as a ruby object, or nil if there is no default or the default could not be successfully parsed into a ruby object. :type :: The type of column, as a symbol (e.g. :string). Used by the Sequel::Model typecasting code. Example: DB.schema(:table) # [[:column1, {:allow_null=>true, :db_type=>'varchar(255)', :default=>'blah', :primary_key=>false, :type=>:string}], ...] == Column Information for a Model Model#db_schema returns pretty much the same information, except it returns it as a hash with column keys instead of an array of two element arrays. Model.db_schema # {:column1=>{:allow_null=>true, :db_type=>'varchar(255)', :default=>'blah', :primary_key=>false, :type=>:string}, ...} == Columns used by a dataset/model Dataset#columns returns the columns of the current dataset as an array of symbols: DB[:table].columns # [:column1, :column2, :column3, ...] Dataset#columns! does the same thing, except it ignores any cached value. In general, the cached value should never be incorrect, unless the database schema is changed after the dataset is created. DB[:table].columns! # [:column1, :column2, :column3, ...] Model.columns does the same thing as Dataset#columns, using the model's dataset: Model.columns # [:column1, :column2, :column3, ...] == Associations Defined Sequel::Model offers complete introspection capability for all associations. You can get an array of association symbols with Model.associations: Model.associations # [:association1, :association2, ...] You can get the association reflection for a single association via the Model.association_reflection. Association reflections are subclasses of hash: Model.association_reflection(:association1) # # You can get an array of all association reflections via Model.all_association_reflections: Model.all_association_reflections # [#, ...] Finally, you can get a hash of association reflections via Model.association_reflections: Model.association_reflections # {:association1=>#, ...} == Validations Defined When using the validation_class_methods plugin, you can use the validation_reflections class method to get a hash with validation reflection information. This returns a hash keyed on the column name symbol: Model.validation_reflections[:column] # => [[:presence, {}], [:length, {:maximum=>255, :message=>'is just too long'}]] Similarly, when using the constraint_validations plugin, you can use the constraint_validation_reflections class method: Model.constraint_validation_reflections[:column] # => [[:presence, {}], [:max_length, {:argument=>255, :message=>'is just too long'}]] sequel-5.63.0/doc/release_notes/000077500000000000000000000000001434214120600164755ustar00rootroot00000000000000sequel-5.63.0/doc/release_notes/1.0.txt000066400000000000000000000026131434214120600175360ustar00rootroot00000000000000=== New code organization Sequel is now divided into two parts: sequel_core and sequel_model. These two parts are distributed as two separate gems. The sequel gem bundles sequel_core and sequel_model together. If you don't use Sequel::Model in your code, you can just install and use sequel_core. === New model hooks implementation The hooks implementation have been rewritten from scratch, is much more robust and offers a few new features: * More ways to define hooks: hooks can now be defined by supplying a block or a method name, or by overriding the hook instance method. * Inheritable hooks: Hooks can now be inherited, which means that you can define general hooks in a model superclass, and use them in subclasses. You can also define global hooks on Sequel::Model that will be invoked for all model classes. * Hook chains can be broken by returning false from within the hook. * New after_initialize hook, invoked after instance initialization. * The hook invocation order can no longer be changed. Hooks are invoked in order of definition, from the top of the class hierarchy (that is, from Sequel::Model) down to the specific class. === Miscellanea * Removed deprecated adapter stubs, and all other deprecations in both sequel_core and sequel_model. * Fixed String#to_time to raise error correctly for invalid time stamps. * Fixed error behavior when parse_tree or ruby2ruby are not available. sequel-5.63.0/doc/release_notes/1.1.txt000066400000000000000000000105641434214120600175430ustar00rootroot00000000000000=== DRY Sequel models With the new Sequel release you no longer need to explicitly specify the table name for each model class, assuming your model name is the singular of the table name (just like in ActiveRecord or DataMapper): class UglyBug < Sequel::Model end UglyBug.table_name #=> :ugly_bugs === New model validations and support for virtual attributes Sequel model now include validation functionality which largly follows the validations offered in ActiveRecord. Validations can be checked anytime by calling Model#valid?, with validation errors accessible through Model#errors: class Item < Sequel::Model validates_presence_of :name end my_item = Item.new my_item.valid? #=> false my_item.errors.full_messages #=> ["name is not present"] The Model#save method has been changed to check for validity before saving. If the model instance is not valid, the #save method returns false without saving the instance. You can also bypass the validity test by calling Model#save! instead. Model classes also now support virtual attributes, letting you assign values to any attribute (virtual or persistent) at initialization time: class User < Sequel::Model attr_accessor :password end u = User.new(:password => 'blah', ...) u.password #=> 'blah' Also, virtual attributes can be validated just like persistent attributes. === Other changes (long list!) * Added Model#reload as alias to Model#refresh. * Changed Model.create to accept a block (#126). * Fixed Model#initialize to accept nil values (#115). * Added Model#update_with_params method with support for virtual attributes and auto-filtering of unrelated parameters, and changed Model.create_with_params to support virtual attributes (#128). * Fixed Model.dataset to correctly set the dataset if using implicit naming or inheriting the superclass dataset (thanks celldee). * Finalized support for virtual attributes. * Fixed Model#set to work with string keys (#143). * Fixed Model.create to correctly initialize instances marked as new (#135). * Fixed Model#initialize to convert string keys into symbol keys. This also fixes problem with validating objects initialized with string keys (#136). * Added Dataset#table_exists? convenience method. * Changed Dataset#group_and_count to accept multiple columns (#134). * Added Dataset#select_all method. * Added Dataset#select_more, Dataset#order_more methods (#129). * Fixed Dataset#count to work correctly for grouped datasets (#144). * Fixed joining datasets using aliased tables (#140). * Added support for UNSIGNED constraint, used in MySQL? (#127). * Implemented constraint definitions inside Database#create_table. * Enhanced Database.connect to accept options with string keys, so it can now accept options loaded from YAML files. Database.connect also automatically converts :username option into :user for compatibility with existing YAML configuration files for AR and DataMapper. * Changed ODBC::Database to support connection using driver and database name, also added support for untitled columns in ODBC::Dataset (thanks Leonid Borisenko). * Changed MySQL adapter to support specifying socket option. * Fixed MySQL adapter to correctly format foreign key definitions (#123). * Changed MySQL::Dataset to allow HAVING clause on ungrouped datasets, and put HAVING clause before ORDER BY clause (#133). * Changed mysql adapter to default to localhost if :host option is not specified (#114). * Added String#to_date. Updated mysql adapter to use String#to_date for mysql date types (thanks drfreeze). * Fixed postgres adapter to define PGconn#async_exec as alias to #exec if not defined (for pure-ruby postgres driver). * Changed postgres adapter to quote column references using double quotes. * Applied patch for oracle adapter: fix behavior of limit and offset, transactions, #table_exists?, #tables and additional specs (thanks Liming Lian #122). * Added support additional field types in postgresql adapter (#146). * Added support for date field types in postgresql adapter (#145). * Added support for limiting and paginating datasets with fixed SQL, e.g. using Database#fetch. * Added new Dataset#from_self method that returns a dataset selecting from the original dataset. * Allow for additional filters on a grouped dataset (#119 and #120) * Refactored Sequelizer to use Proc#to_sexp (method provided by r2r). * Fixed bin/sequel to require sequel_model if available. sequel-5.63.0/doc/release_notes/1.3.txt000066400000000000000000000060671434214120600175500ustar00rootroot00000000000000=== Better model associations The latest release of sequel_model includes a new associations functionality written by Jeremy Evans which replaces the old relations code in previous versions. Please note that this version is not completely backward-compatible and you should therefore upgrade with caution. The new implementation supports three kinds of relations: one_to_many, many_to_one and many_to_many, which correspond to has_many, belongs_to and has_and_belongs_to_many relations in ActiveRecord. In fact, the new implementation includes aliases for ActiveRecord assocation macros and is basically compatible with ActiveRecord conventions. It also supports DRY implicit class name references. Here's a simple example: class Author < Sequel::Model has_many :books # equivalent to one_to_many end class Book < Sequel::Model belongs_to :author # equivalent to many_to_one has_and_belongs_to_many :categories # equivalent to many_to_many end class Category < Sequel::Model has_and_belongs_to_many :books end These macros will create the following methods: * Author#books, Author#add_book, Author#remove_book * Book#author, Book#categories, Book#add_category, Book#remove_category * Category#books, Category#add_book, Category#remove_book Unlike ActiveRecord, one_to_many and many_to_many association methods return a dataset: a = Author[1234] a.books.sql #=> 'SELECT * FROM books WHERE (author_id = 1234)' You can also tell Sequel to cache the association result set and return it as an array: class Author < Sequel::Model has_many :books, :cache => true end Author[1234].books.class #=> Array You can of course bypass the defaults and specify class names and key names: class Node < Sequel::Model belongs_to :parent, :class => Node belongs_to :session, :key => :producer_id end Another useful option is :order, which sets the order for the association dataset: class Author < Sequel::Model has_many :books, :order => :title end Author[1234].books.sql #=> 'SELECT * FROM books WHERE (author_id = 1234) ORDER BY title' More information about associations can be found in the Sequel documentation. === Other changes * Added configuration file for running specs (#186). * Changed Database#drop_index to accept fixed arity (#173). * Changed column definition sql to put UNSIGNED constraint before unique in order to satisfy MySQL (#171). * Enhanced MySQL adapter to support load data local infile_, added compress option for mysql connection by default (#172). * Fixed bug when inserting hashes in array tuples mode. * Changed SQLite adapter to catch RuntimeError raised when executing a statement and raise Error::InvalidStatement with the offending SQL and error message (#188). * Fixed Dataset#reverse to not raise for unordered dataset (#189). * Added Dataset#unordered method and changed #order to remove order if nil is specified (#190). * Fixed reversing order of ASC expression (#164). * Added support for :null => true option when defining table columns (#192). * Fixed Symbol#method_missing to accept variable arity (#185). sequel-5.63.0/doc/release_notes/1.4.0.txt000066400000000000000000000046161434214120600177050ustar00rootroot00000000000000Eager loading for all types of associations: Artist.eager(:albums).all Album.eager(:artist, :genre, :tracks).all Album.eager(:artist).eager(:genre).eager(:tracks).all Album.filter(:year=>2008).eager(:artist).all Eager loading supports cascading to an unlimited depth, and doesn't have any aliasing issues: Artist.eager(:albums=>:tracks).all Artist.eager(:albums=>{:tracks=>:genre}).all Unfortunately, eager loading comes at the expense of a small amount of backward compatibility. If you were using uncached associations (the default in sequel_model 0.5), they no longer work the same way. Now, all associations act as if :cache=>true (which is now set for all associations, so if you wrote a tool that worked with both cached and uncached associations, it should still work). One to many associations now populate the corresponding many to one instance variable (even when eagerly loaded): # Assuming: Album.one_to_many :tracks album = Album.first # This following code is only one query, # not a query for the album and one for each track album.tracks.each{|t| puts t.album.name} ActiveRecord style has_many :through associations are now supported via many_to_many. many_to_many will no longer select the entire result set, just the columns of the associated table (and not the join table), so it works for both has_and_belongs_to_many (simple join table) and has_many :through (join table model) scenarios. If you want to include all or part of the join table attributes, see the :select option for many_to_many associations. We reduced the number of gems from three (sequel, sequel_core, sequel_model) to two (sequel, sequel_core). Basically, sequel_model is now just sequel, and the old sequel gem metapackage is no longer. There isn't a reason to have a gem metapackage for two gems when one (sequel_model) depends on the other (sequel_core). This required a version bump for the model part of sequel from 0.5.0.2 to 1.4.0 (since the previous sequel gem version was 1.3). Sequel 1.4.0 has fixes for 11 trackers issues, including fixes to the MySQL and PostgreSQL adapters. We have switched the source control repository for Sequel from Google Code (which uses subversion) to github (which uses git). If you would like to contribute to Sequel, please fork the github repository, make your changes, and send a pull request. As before, posting patches on the Google Code issue tracker is fine as well. sequel-5.63.0/doc/release_notes/1.5.0.txt000066400000000000000000000117101434214120600176770ustar00rootroot00000000000000You can now graph a dataset and have the result split into component tables: DB[:artists].graph(:albums, :artist_id=>:id).first # => {:artists=>{:id=>artists.id, :name=>artists.name}, \ # :albums=>{:id=>albums.id, :name=>albums.name, :artist_id=>albums.artist_id}} This aliases columns if necessary so they don't stomp on each other, which is what usually happens if you just join the tables: DB[:artists].left_outer_join(:albums, :artist_id=>:id).first # => {:id=>(albums.id||artists.id), :name=>(albums.name||artist.names), \ :artist_id=>albums.artist_id} Models can use graph as well, in which case the values will be model objects: Artist.graph(Album, :artist_id=>:id) # => {:artists=>#, :albums=>#} Models can now eager load via .eager_graph, which will load all the results and all associations in a single query. This is necessary if you want to filter on columns in associated tables. It works exactly the same way as .eager, and supports cascading of associations as well: # Artist.one_to_many :albums # Album.one_to_many :tracks # Track.many_to_one :genre Artist.eager_graph(:albums=>{:tracks=>:genre}).filter( \ :tracks_name=>"Firewire").all This will give you all artists have have an album with a track named "Firewire", and calling .albums on one of those artists will only return albums that have a track named "Firewire", and calling .tracks on one of those albums will return only the track(s) named "Firewire". You can use set_graph_aliases to select specific columns: DB[:artists].graph(:albums, :artist_id=>:id).set_graph_aliases( \ :artist_name=>[:artists, :name], :album_name=>[:albums, :name]).first # => {:artists=>{:name=>artists.name}, :albums=>{:name=>albums.name}} You can use eager_graph with set_graph_aliases to have eager loading with control over the SELECT clause. All associations now update their reciprocal associations whenever the association methods are used, so you don't need to refresh the association or model to have the reciprocal association updated: Album.many_to_one :band Band.one_to_many :albums # Note that all of these associations are cached, # so after the first access there are no additional # database queries to fetch associated records. # many_to_one setter adds to reciprocal association band1.albums # => [] album1.band = band1 band1.albums # => [album1] band2.albums # => [] album1.band = band2 band1.albums # => [] band2.albums # => [album1] album1.band = band2 band2.albums # => [album1] album1.band = nil band2.albums # => [] # one_to_many add_* method sets reciprocal association # one_to_many remove_* method removes reciprocal association album1.band # => nil band1.add_album(album1) album1.band # => band1 band2.add_album(album1) album1.band # => band2 band2.remove_album(album1) album1.band # => nil Post.many_to_many :tags Tag.many_to_many :posts # many_to_many add_* method adds to reciprocal association # many_to_many remove_* method removes from reciprocal association post1.tags # => [] tag1.posts # => [] tag1.add_post(post1) post1.tags # => [tag1] tag1.posts # => [post1] tag1.remove_post(post1) post1.tags # => [] tag1.posts # => [] post1.add_tag(tag1) post1.tags # => [tag1] tag1.posts # => [post1] post1.remove_tag(tag1) post1.tags # => [] tag1.posts # => [] The MySQL and PostgreSQL adapters now support index types: index :some_column, :type => :hash # or :spatial, :full_text, :rtree, etc. Starting in Sequel 1.5.0, some methods are deprecated. These methods will be removed in Sequel 2.0.0. The deprecation framework is fairly flexible. You can choose where the messages get sent: Sequel::Deprecation.deprecation_message_stream = STDERR # the default Sequel::Deprecation.deprecation_message_stream = \ File.new('deprecation.txt', 'wb') # A file Sequel::Deprecation.deprecation_message_stream = nil # ignore the messages You can even have all deprecation messages accompanied by a traceback, so you can see exactly where in your code you are using a deprecated method: Sequel::Deprecation.print_tracebacks = true All deprecation methods come with an message telling you what alternative code will work. In addition to deprecating some methods, we removed the ability to have arrays returned instead of hashes. The array code still had debugging messages left it in, and we are not aware of anyone using it. Hashes have been returned by default since Sequel 0.3. We have also removed the Numeric date/time extensions (e.g. 3.days.ago). The existing extensions were incomplete, better ones are provided elsewhere, and the extensions were not really related to Sequel's purpose. Sequel no longer depends on ParseTree, RubyInline, or ruby2ruby. They are still required to use the block filters. Sequel's only gem dependency is on the tiny metaid. Sequel 1.5.0 has fixes for 12 tracker issues, including fixes to the Informix, MySQL, ODBC, ADO, JDBC, Postgres, and SQLite adapters. sequel-5.63.0/doc/release_notes/2.0.0.txt000066400000000000000000000266541434214120600177100ustar00rootroot00000000000000Blockless Filter Expressions ---------------------------- Before 2.0.0, in order to specify complex SQL expressions, you either had to resort to writing the SQL yourself in a string or using an expression inside a block that was parsed by ParseTree. Because ParseTree was required, only ruby 1.8.* was supported, and supporting other ruby versions (ruby 1.9, JRuby, Rubinius) would never be possible. With 2.0.0, you no longer need to use a block to write complex SQL expressions. The basics of the blockless filters are the usual arithmetic, inequality, and binary operators: + = addition - = subtraction * = multiplication / = division > = greater than < = less than >= = greater than or equal to <= = less than or equal to ~ = negation & = AND | = OR You can use these operators on Symbols, LiteralStrings, and other Sequel::SQL::Expressions. Note that there is no equal operator or not equal operator, to specify those, you use a Hash. Here are some examples: # Ruby code => SQL WHERE clause :active => active ~:active => NOT active ~~:active => active ~~~:active => NOT active :is_true[] => is_true() ~:is_true[] => NOT is_true() :x > 100 => (x > 100) :x < 100.01 => (x < 100.01) :x <= 0 => (x <= 0) :x >= 1 => (x >= 1) ~(:x > 100) => (x <= 100) {:x => 100} => (x = 100) {:x => 'a'} => (x = 'a') {:x => nil} => (x IS NULL) ~{:x => 100} => (x != 100) ~{:x => 'a'} => (x != 'a') ~{:x => nil} => (x IS NOT NULL) {:x => /a/} => (x ~ 'blah') # Default, MySQL different ~{:x => /a/} => (x !~ 'blah') # Default, MySQL different :x.like('a') => (x LIKE 'a') ~:x.like('a') => (x NOT LIKE 'a') :x.like(/a/) => (x ~ 'a') # Default, MySQL different ~:x.like('a', /b/) => ((x NOT LIKE 'a') AND (x !~ 'b')) # Default ~{:x => 1..5} => ((x < 1) OR (x > 5)) ~{:x => DB[:items].select(:i)} => (x NOT IN (SELECT i FROM items)) ~{:x => [1,2,3]} => (x NOT IN (1, 2, 3)) :x + 1 > 100 => ((x + 1) > 100) (:x * :y) < 100.01 => ((x * y) < 100.01) (:x - :y/2) >= 100 => ((x - (y / 2)) >= 100) (((:x - :y)/(:x + :y))*:z) <= 100 => ((((x - y) / (x + y)) * z) <= 100) ~((((:x - :y)/(:x + :y))*:z) <= 100) => ((((x - y) / (x + y)) * z) > 100) :x & :y => (x AND y) :x & :y & :z => ((x AND y) AND z) :x & {:y => :z} => (x AND (y = z)) {:y => :z} & :x => ((y = z) AND x) {:x => :a} & {:y => :z} => ((x = a) AND (y = z)) (:x > 200) & (:y < 200) => ((x > 200) AND (y < 200)) :x | :y => (x OR y) :x | :y | :z => ((x OR y) OR z) :x | {:y => :z} => (x OR (y = z)) {:y => :z} | :x => ((y = z) OR x) {:x => :a} | {:y => :z} => ((x = a) OR (y = z)) (:x > 200) | (:y < 200) => ((x > 200) OR (y < 200)) (:x | :y) & :z => ((x OR y) AND z) :x | (:y & :z) => (x OR (y AND z)) (:x & :w) | (:y & :z) => ((x AND w) OR (y AND z)) ~((:x | :y) & :z) => ((NOT x AND NOT y) OR NOT z) ~((:x & :w) | (:y & :z)) => ((NOT x OR NOT w) AND (NOT y OR NOT z)) ~((:x > 200) | (:y & :z)) => ((x <= 200) AND (NOT y OR NOT z)) ~('x'.lit + 1 > 100) => ((x + 1) <= 100) 'x'.lit.like(/a/) => (x ~ 'a') # (x ~ \'a\') None of these require blocks, you can use any directly in a call to filter: DB[:items].filter((:price * :tax) - :discount > 100) # => SELECT * FROM items WHERE (((price * tax) - discount) > 100) DB[:items].filter(:active & ~:archived) # => SELECT * FROM items WHERE (active AND NOT archived) SQL String Concatenation ------------------------ Sequel now has support for expressing SQL string concatenation in an easy way: [:name, :title].sql_string_join(" - ") # SQL: name || ' - ' || title You can use this in selecting columns, creating filters, ordering datasets, and possibly elsewhere. Schema Reflection Support/Typecasting on Assignment --------------------------------------------------- When used with PostgreSQL, MySQL, or SQLite, Sequel now has the ability to get information from the database's schema in regards to column types: DB.schema(:artist) => [[:id, {:type=>:integer, :db_type=>"integer", :max_chars=>0 :numeric_precision=>32, :allow_null=>false, :default=>"nextval('artist_id_seq'::regclass)"}], [:name, {:type=>:string, :default=>nil, :db_type=>"text", :numeric_precision=>0, :allow_null=>true, :max_chars=>0}]] Models now use this information to typecast values on attribute assignment. For example, if you have an integer column named number and a text (e.g. varchar) column named title: 1.5.1: model.number = '1' model.number # => '1' model.title = 1 model.title # => 1 2.0.0: model.number = '1' model.number # => 1 model.title = 1 model.title # => '1' Typecasting can be turned off on a global, per class, and per object basis: Sequel::Model.typecast_on_assignment = false # Global Album.typecast_on_assignment = false # Per Class Album.new.typecast_on_assignment = false # Per Object Typecasting is somewhat strict, it does not allow obviously bogus data to be used: model.number = 'a' # Raises error This is in contrast to how some other ORMs handle the situation: model.number = 'a' model.number # => 0 If Sequel is being used with a web framework and you want to display friendly error messages to the user, you should probably turn typecasting off and set up the necessary validations in your models. Model Association Improvements ------------------------------ Associations can now be eagerly loaded even if they have a block, though the block should not rely on being evaluated in the context of an instance. This allows you filter on associations when eagerly loading: Artist.one_to_many :albums_with_10_tracks, :class=>:Album do |ds| ds.filter(:num_tracks => 10) end Artist.filter(:name.like('A%)).eager(:albums_with_10_tracks).all # SELECT * FROM artists WHERE (name LIKE 'A%') # SELECT albums.* FROM albums WHERE ((artist_id IN (...)) AND # (num_tracks = 10)) Associations now have a remove_all_ method for removing all associated objects in a single query: Artist.many_to_many :albums Artist[1].remove_all_albums # DELETE FROM albums_artists WHERE artist_id = 1 Artist.one_to_many :albums Artist[1].remove_all_albums # UPDATE albums SET artist_id = NULL WHERE artist_id = 1 All associations can specify a :select option to change which columns are selected. Previously only many to many associations suppported this. The SQL used when eagerly loading through eager_graph can be modified via the :graph_join_type, :graph_conditions, and :graph_join_conditions options. :graph_join_type changes the join type from the default of :left_outer. This can be useful if you do not want any albums that don't have an artist in the result set: Album.many_to_one :artist, :graph_join_type=>:inner Album.eager_graph(:artist).sql # SELECT ... FROM albums INNER JOIN artists ... :graph_conditions adds conditions on the join to the table you are joining, the eager_graph equivalent of an association block argument in eager. It takes either a hash or an array where all elements are arrays of length two, similar to join_table, where key symbols specify columns in the joined table and value symbols specify columns in the last joined or primary table: Album.many_to_one :artist, :graph_conditions=>{:active=>true} Album.eager_graph(:artist).sql # SELECT ... FROM albums LEFT OUTER JOIN artists ON ((artists.id = # albums.artist_id) AND (artists.active = 't')) :graph_join_table_conditions exists for many to many associations only, and operates the same as :graph_conditions, except it specifies a condition on the many to many join table instead of the associated model's table. This is necessary if the join table is also model table with other columns on which you may want to filter: Album.many_to_many :genres, :join_table=>:ag, \ :graph_join_table_conditions=>{:active=>true} Album.eager_graph(:genres).sql # SELECT ... FROM albums LEFT OUTER JOIN ag ON ((ag.album_id = albums.id) AND (ag.active = 't')) LEFT OUTER JOIN genres ON (genres.id = ag.genre_id) Other Small Improvements ------------------------ * Dataset#invert returns a dataset that matches all records not matching the current filter. * Dataset#unfiltered returns a dataset that has any filters removed. * Dataset#last_page? and Dataset#first_page? for paginated datasets. * The sequel command line tool now support an -E or --echo argument that logs all SQL to the standard output. It also can take a path to a yaml file with database connection options, in addition to a database URL. * Databases can now have multiple SQL loggers, so you can log to the standard output as well as a file. * SQL identifiers (columns and tables) are now quoted by default (you can turn this off via Sequel.quote_identifiers = false if need be). * Sequel.connect now takes an optional block that will disconnect the database when the block finishes. * AlterTableGenerator now has add_primary_key and add_foreign_key methods. * Running the specs without ParseTree installed skips the specs that require ParseTree. * You can use an array of arrays instead of a hash when specifying conditions, which may be necessary in certain situations where you would be using the same hash key more than once. * Almost all documentation for Sequel was updated for 2.0.0, so if you found Sequel documentation lacking before, check out the new RDoc pages. * There have been many minor refactoring improvements, the code should now be easier to read and follow. * Sequel now has no external dependencies. * Sequel::Models now have before_validation and after_validation hooks. * Sequel::Model hooks that return false cause the methods that call them (such as save) to return false. * Sequel::Models can now load their schema on first instantiation, instead of when they are created, via Sequel::Model.lazy_load_schema=. This is helpful for certain web frameworks that reload all models on every request. * Hook methods that use blocks can now include an optional tag, which allows them to work well with web frameworks that load source files every time they are modified. The PostgreSQL adapter has been rewritten and now supports ruby-pg. There have also been improvements in the following adapters: DBI, MySQL, SQLite, Oracle, and MSSQL. All of the methods that have been deprecated in 1.5.0 have now been removed. If you are want to upgrade to Sequel 2.0.0 from version 1.4.0 or previous, upgrade to 1.5.1 first, fix all of the deprecation warnings that show up, and then upgrade to 2.0.0. There were some backwards incompatible changes made in 2.0.0 beyond the removal of deprecated methods. These are: * Inflector is no longer used, the inflection methods were moved directly into String (where they belong because inflections only make sense for strings). So to override singularization or pluralization rules, use String.inflections instead of Inflector.inflections. * MySQL tinyints are now returned as boolean values instead of integers. MySQL doesn't have a boolean type, and usually it is recommended to use tinyint for a boolean column. * You can no longer pass an array to Dataset#order or Dataset#select, you need to pass each argument separately (the * operator is your friend). * You must use '?' instead of '(?)' when interpolating an array argument into a string (e.g. filter('x IN ?', [1,2,3])) * You must pass an explicit table alias argument to join_table and related methods, you can no longer include the table alias inside the table argument. * sqlite:// URLs now operate the same as file:// URLs (2 slashes for a relative path, 3 for an absolute path). sequel-5.63.0/doc/release_notes/2.1.0.txt000066400000000000000000000230751434214120600177030ustar00rootroot00000000000000Model Improvements ------------------ * one_to_many/many_to_many associations now support a :limit option, adding a limit/offset to the records returned. This was possible before using a block, so it is just added for convenience. * Associations now support a :read_only option, which doesn't create methods that modify the database. * Associations now support a :graph_select option, which allows specifying the columns of associated models to include when using eager_graph. * one_to_many associations now have a :one_to_one option. When used it creates a getter and setter method similar to many_to_one. This fills the same role as ActiveRecord's has_one, but it is implemented as a couple of convenience methods over one_to_many, so it still requires that you specify the association name as a plural. * Model datasets now have to_hash augmented so that it can be called without any arguments, in which case it yields an identity map (a hash with keys being primary key values and values being model instances). * The Model.set_sti_key method was added, for easily setting up single table inheritance. It should be called only in the parent class. * Calls to def_dataset_method with a block are now cached and reapplied to the new dataset if set_dataset is called afterward, or in a subclass. * All validation methods can now be made conditional via an :if option, which takes either a symbol (which specifies an instance method) or a proc (which is instance_evaled). * Model#set and Model#update have been added back, they are now aliases of #set_with_params and #update_with_params. * Models now have set_only/set_except/update_only/update_except instance methods that take a hash (like you would provide to set or update) and additional arguments specifying which columns to allow or disallow. * Models now have a set_allowed_columns and set_restricted_columns methods, which operate similarly to ActiveRecord's attr_accessible and attr_protected. It is recommend that you use the set_only or update_only instead of these methods, though. You can ignore the allowed or restricted columns by using #set_all or #update_all. * The primary key column(s) is restricted by default. To allow it to be set via new/set/update, use: Sequel::Model.unrestrict_primary_key # Global Artist.unrestrict_primary_key # Per Class * It is now easy to override the one_to_many/many_to_many association methods that modify the database (add_/remove_/remove_all_), as they have been broken into two methods, one that handles the caching features and a private one (prepended with an _) that handles the database changes (and which you can easily override without worrying about the caching). Table Joining ------------- Dataset#join_table got a nice overhaul. You can now use any join type your database allows: DB[:artist].join_table(:natural, :albums) DB[:numbers].join_table(:cross, :numbers) You can now specify the conditions as * String: "a.b = c.d" # ON a.b = c.d * Expression :x < :y # ON x < y * Array of Symbols: [:x, :y, :z] # USING (x, y, z) * nil # no conditions, used for NATURAL or CROSS joins Dataset#join_table also takes a block that yields three arguments: * join_table_alias - The alias/name of the table currently being joined * last_join_table_alias - The alias name of the last table joined (if there was one) or the first FROM table (if not). * joins - An array of JoinClause objects for all previous joins in the query. Using the block you can specify conditions for complex joins without needing to know in advance what table aliases will be used. Expanded SQL Syntax Support --------------------------- SQL Case statements are now supported directly using hashes or arrays: {:x > 1 => 1}.case(0) # CASE WHEN x > 1 THEN 1 ELSE 0 END [[{:x=>1}, 0], [:x < 1, 1], [:x > 1, 2]].case(-1) # CASE WHEN x = 1 THEN 0 WHEN x < 1 THEN 1 WHEN x > 1 THEN 2 ELSE -1 END You should use an array instead of a hash for multiple conditions unless all conditions are orthogonal. The SQL extract function has special syntax: EXTRACT(day FROM date) This syntax is now supported via the following ruby code: :date.extract(:day) Other Notable Changes --------------------- * The sequel command line tool can now run migrations. The -m option specifies the directory holding the migration files, and the -M options specifies the version to which to migrate. * The PostgreSQL adapter supports nested transactions/savepoints. * The schema parser now understands decimal fields, and will typecast to BigDecimal. * PostgreSQL's numeric type is now recognized and returned as BigDecimal. * HAVING now comes before ORDER BY, which most databases seem to prefer. If your database wants HAVING after ORDER BY, please let us know. * Symbol#qualify now exists, to specify the table name for a given symbol, similar to the use of #as to specify an alias. This is mainly helpful in conjuction with the #join_table block, as that provides the table aliases to use to qualify the columns inside the block. * BitwiseMethods (&, |, ^, ~, <<, >>) have been added to the NumericExpression class, so you can do the following: (x + 1) ^ 10 # SQL: (x + 1) ^ 10 ~(x + 1) # SQL: ~(x + 1) Usually, &, |, and ~ operate in a logical manner, but for NumericExpressions, they take on their usual bitwise meaning, since logical operations only make sense for booleans. * #cast_numeric and #cast_string exist for Symbols, Strings, and other Sequel Expressions, which return the results casted and wrapped in either NumericExpression or StringExpression, so you can use the BitwiseMethods (&, |, ^, ~, <<, >>) or StringConcatenationMethods (+) directly. # Dataset#to_hash can take only one argument, in which case it uses that argument to specify the key, and uses the entire hash for the value. # Dataset#graph can now take an array of columns to select from the joined table via the :select option. # Dataset#filter and similar methods now combine the block and regular argument conditions if both are given, instead of ignoring the regular argument conditions. # Dataset#filter(false) can now be used to make sure that no records are returned. Dataset#filter(true) also works, but it's a no-op. Before, these raised errors. # Dataset#count does a subquery for a dataset using DISTINCT, since the otherwise it would yield a count for the query without DISTINCT. ParseTree Support Officially Deprecated --------------------------------------- The support for ParseTree-based block filters has officially been deprecated and will be removed in Sequel 2.2. To use the expression filters (which don't require ParseTree) inside blocks, use: SEQUEL_NO_PARSE_TREE = true require 'sequel' # OR require 'sequel' Sequel.use_parse_tree = false This is the default if ParseTree cannot be loaded. If ParseTree can be loaded, it remains the default, in order not to immediately break existing code. With this set, you can use the expression filters inside of blocks: dataset.filter{((:x + 1) & 10 < :y) & :z} That doesn't gain you all that much, but there are some methods that feed block arguments into filter, such as the following: dataset.first(5){((:x + 1) & 10 < :y) & :z} Which will get you the first 5 records matching the condition. Backwards Incompatible Changes ------------------------------ * To change the datetime classe used from Time to DateTime, you now use: Sequel.datetime_class = DateTime # instead of Sequel.time_class * Models now raise errors if you try to access a missing or restricted method via new/set/update, instead of just silently skipping that parameter. To get the old behavior: Sequel::Model.strict_param_setting = false * The association_dataset method now takes into account the :eager option and the block argument, where it didn't before. It also takes into account the new :limit option. * Association methods now raise errors in most cases if the model doesn't have a valid primary key. * Dataset#join_table used to allow a symbol as a conditions argument as a shortcut for a hash: DB[:artist].join(:albums, :artist_id) # ON albums.artist_id = artists.id With the changes to #join_table, this no longer works. It would now be interpreted as a boolean column: DB[:artist].join(:albums, :artist_id) # ON artists.id Use the following slightly longer version for the old behavior: DB[:artist].join(:albums, :artist_id=>:id) # ON albums.artist_id = artists.id * MySQL users need to be careful when upgrading, the following code will once again cause an error: DB[:artists].each{|artist| DB[:albums].each{|album| ...}} To fix it, change the code to: DB[:artists].all{|artist| DB[:albums].each{|album| ...}} The issue is the MySQL adapter doesn't release the database connection while running each, and the second call to each gets the same database connection (while the other query is still running), because it is in the same thread. Using #all for the outside query ensures that the database connection is released before the block is called. The reason for this change was that the workaround provided for MySQL could potentially cause issues with transactions for all adapters. * String#asc and String#desc are no longer defined, as ordering on a plain string column should be a no-op. They are still defined on LiteralStrings. * You can no longer abuse the SQL::Function syntax to use a table alias with specified columns (e.g. :table[:col1, :col2, :col3]) or to cast to types (e.g. :x.cast_as(:varchar[20])). Use a LiteralString in both cases. sequel-5.63.0/doc/release_notes/2.10.0.txt000066400000000000000000000334501434214120600177610ustar00rootroot00000000000000New Supported Adapters and Databases ------------------------------------ * A DataObjects adapter was added that supports PostgreSQL, MySQL, and SQLite. DataObjects is the underlying database library used by DataMapper, and has potential performance advantages by doing all typecasting in C. * A Firebird Adapter was added, it requires the modified Ruby Fb adapter found at http://github.com/wishdev/fb. * An H2 JDBC subadapter was added, based on the code used in JotBot. H2 is an embeddable Java database, and may be preferable to using SQLite on JDBC because SQLite requires native code. New Core Features ----------------- * Sequel now has database independent migrations. Before, column types in migrations were not translated per database, so it was difficult to set up a migration that worked on multiple databases. Sequel now accepts ruby classes as database types, in addition to symbols and strings. If a ruby class is used, it is translated to the most appropriate database type. Here is an example using all supported classes (with Sequel's default database type): DB.create_table(:cats) do primary_key :id, :type=>Integer # integer String :a # varchar(255) column :b, File # blob Fixnum :c # integer foreign_key :d, :other_table, :type=>Bignum # bigint Float :e # double precision BigDecimal :f # numeric Date :g # date DateTime :h # timestamp Time :i # timestamp Numeric :j # numeric TrueClass :k # boolean FalseClass :l # boolean end Type translations were tested on the PostgreSQL, MySQL, SQLite, and H2 databases. The default translations should work OK for most databases, but there will probably be a type or two that doesn't work. Please send in a patch if Sequel uses a column type that doesn't work on your database. Note that existing migrations still work fine, in most cases. If you were using strings or symbols for types before, they should still work. See the Backwards Compatibility section below for details. Also note that this doesn't relate solely to migrations, as any database schema modification method that accepts types will accept one of the above classes. * A ton of internal work was done to better support databases that fold unqouted identifiers to uppercase (which is the SQL standard). Sequel now allows you to set a method to call on identifiers going both into and out of the database. The default is to downcase identifiers coming out, and upcase identifiers going in, though this is overridden by the PostgreSQL, MySQL, and SQLite adapters to not do anything (since they fold to lowercase by default). The settings are called identifier_input_method and identifier_output_method, and like most Sequel settings, they can be set globally, per database, or per dataset: # Global (use uppercase in ruby and lowercase in the database) Sequel.identifier_input_method = :downcase Sequel.identifier_output_method = :upcase # Per Database (use camelized names in the database, and # underscored names in ruby) DB.identifier_input_method = :camelize DB.identifier_output_method = :underscore # Per Dataset (obfuscate your database columns!) class String; def rot_13; tr('A-Za-z', 'N-ZA-Mn-za-m') end end ds = DB[:table] ds.identifier_input_method = :rot_13 ds.identifier_output_method = :rot_13 * Schema parsing support was added to the JDBC adapter, using the JDBC metadata methods. This means that models that use the JDBC adapter will typecast data in their column setters and automatically select the correct primary key column(s). This is currently the only adapter that supports schema parsing when using an MSSQL or Oracle database. * Database#create_table now takes options, which you can use to specify a MySQL engine, charset, and/or collation. You can also set a default engine, charset, and collation for MySQL to use: Sequel::MySQL.default_engine = 'InnoDB' Sequel::MySQL.default_charset = 'utf8' Sequel::MySQL.default_collate = 'utf8' The defaults will be used if the options are not provided. If a default engine is set, you can specify :engine=>nil to not use it (same goes for charset and collate). * The Sequel::DatabaseConnectionError exception class was added. It is raised by the connection pool if there is an error attempting to instantiate a database connection. Also, if the adapter returns nil instead of raising an error for faulty connection parameters, DatabaseConnectionError will be raised immediately, instead of the connection pool busy waiting until if gives up with a PoolTimeoutError. * Database#tables is now supported on the JDBC adapter, returning an Array of table name symbols. * Sequel now converts the following Java types returned by the JDBC adapter into ruby types: Java::JavaSQL::Timestamp, Java::JavaSQL::Time, Java::JavaSQL::Date, Java::JavaMath::BigDecimal, and Java::JavaIo::BufferedReader. * When using the PostgreSQL adapter with the postgres-pr driver, Sequel will use a custom string escaping routine unless force_standard_strings = false. This means that using Sequel's defaults, postgres-pr will correctly escape strings now. * The SQLite adapter now returns float, real, and double precision columns as Floats. * The SQLite adapter logs beginning, committing, and rolling back transactions. * Sequel now has an internal version (before, the only way to tell the version was to look at the gem being used). It is accessible at Sequel.version. New Model Features ------------------ * A new validates_not_string validation was added for Sequel Models. It is intended to be used with the raise_on_typecast_failure = false setting. In this case, for a non-string database column, if there is a string value when the record is going to be saved, it is due to the fact that Sequel was not able to typecast the given data correctly (so it is almost certainly not valid). This should make Sequel easier to use with web applications. * An :allow_missing validation option was added to all standard validations. This option skips the validation if the attribute is not in the object's values. It is different from :allow_nil, which will skip the value if it is present but nil in the values. The intended use case for this option is when the database provides a good default. If the attribute is not present in values, the database will use its default. If the attribute is present in the values but equals nil, Sequel will attempt to insert it into the database as a NULL value, instead of using the database's default. If you don't want Sequel to insert a NULL value in the database, but you want the database to provide the default, this is the option to use. * validates_each now accepts :allow_nil and :allow_blank options, so it is easier to create custom validations with the same options as the standard validations. * Before_* hooks now run in the reverse order that they were added. The purpose of hooks is to wrap existing functionality, and making later before_* hooks run before previous before_* hooks is the correct behavior. * You can now add you own hook types, via Model.add_hook_type. This is intended for plugin use. All of the standard hooks are now implemented using this method. * The value of new? in a after_save hook now reflects the previous state of the model (so true for a create and false for an update), instead of always being false. This makes it easier to have a complex after_save hook that still needs to differentiate between a newly created record and an updated record, without having to add separate after_create and after_update hooks. * The value of changed_columns in an after_update hook now reflects the value before the update occurred, instead of usually being empty. Previously, to have this functionality, you generally had to save the value to an instance variable in a before_update hook so you could reference it in the after_update hook. Other Improvements ------------------ * Sequel now longer overwrites the following Symbol instance methods when running on ruby 1.9: [], <, <=, >, and >=. One of Sequel's principals is that it does not override methods defined by ruby, and now that ruby 1.9 defines the above methods on Symbol, Sequel shouldn't be overwriting them. Sequel already provides a way to work around this issue when another library adds the same methods to Symbol that Sequel does. For example, you need to change the following: dataset.filter(:number > 1) dataset.filter(:number >= 2) dataset.filter(:name < 'M') dataset.filter(:name <= 'I') dataset.filter(:is_bool[:x]) To: dataset.filter{|o| o.number > 1} dataset.filter{|o| o.number >= 2} dataset.filter{|o| o.name < 'M'} dataset.filter{|o| o.name <= 'I'} dataset.filter{|o| o.is_bool(:x)} The argument provided to the block is a Sequel::SQL::VirtualRow. This class uses method_missing so that any methods called on it return Sequel::SQL::Identifiers (if no arguments are provided) or Sequel::SQL::Function (if arguments are provided). If you were using one of the above symbol methods outside of a filter, you can to call sql_string, sql_number, or sql_function on the symbol. So the following would also work: dataset.filter(:number.sql_number > 1) dataset.filter(:number.sql_number >= 2) dataset.filter(:name.sql_string < 'M') dataset.filter(:name.sql_number <= 'I') dataset.filter(:is_bool.sql_function(:x)) Using the block argument makes for a nicer API, though, so I recommend using it when possible. Note that if you are running ruby 1.8 or jruby without the --1.9 flag, you don't need to worry. If you are running ruby 1.9 or jruby --1.9, or you plan to at some point in the future, you should inspect your code for existing uses of these methods. Here are a couple command lines that should find most uses: # Find :symbol[] egrep -nr ':['\''"]?[a-zA-Z_0-9]*['\''"]?\[' * # Find :symbol (<|>|<=|>=) egrep -nr '[^:]:['\''"]?[a-zA-Z_0-9]*['\''"]? *[<>]=?' * * Database#quote_identifiers now affects future schema modifications when using the database. Previous, it only affected future schema modifications if a schema modification method had not yet been called. * Literalization of Times and DateTimes is now correct when using the MySQL JDBC subadapter. * Literalization of Blobs is now correct when using the PostgreSQL JDBC subadapter. * Index and table names are quoted when creating indices in the PostgreSQL adapter. * Dataset#delete was changed in the SQLite adapter to add a where clause that is always true, instead of doing an explicit count first and the deleting. This is simpler, though it could potentially have performance implications. * The sequel command line tool now supports symbol keys and unnested hashes in YAML files, so it should work with Merb's database.yml. It also includes the error class in the case of an error. * The integration type tests were greatly expanded. Generally, running the integration tests is a good way to determine how well your database is supported. * Dataset#quote_identifier now returns LiteralStrings as-is, instead of treating them as regular strings. * Sequel no longer modifies the MySQL::Result class when using the MySQL adapter. Backwards Compatibilty ---------------------- * If you were previously using a database that returned uppercase identifiers, it will probably return lowercase identifiers by default now. To get back the old behavior: DB.identifier_output_method = nil * The module hierarchy under Sequel::SQL has changed. Now, modules do not include other modules, and the following modules were removed since they would have been empty after removing the modules they included: Sequel::SQL::SpecificExpressionMethods and Sequel::SQL::GenericExpressionMethods. * Sequel no longer assumes the public schema by default when connecting to PostgreSQL. You can still set the default schema to use (even to public). * The ability to load schema information for all tables at once was removed from the PostgreSQL adapter. While it worked, it had some issues, and it was difficult to keep it working when some new features were used. This ability wasn't exposed to the user, and was purely an optimization. If you have any code like: DB.schema by itself after the Database object was instantiated, you should remove it. * The Database#primary_key API changed in the PostgreSQL shared adapter, it now accepts an options hash with :server and :conn keys instead of a server symbol. Also, quite a few private Database instance methods changed, as well as some constants in the AdapterMethods. * It is possible that some migrations will break, though it is unlikely. If you were using any of the classes mentioned above as a method inside a migration, it might be broken. However, since String, Float, and Integer wouldn't have worked as methods before, it is unlikely that anyone used this. * The meaning of #String, #Integer, and #Float inside Sequel::SQL::Generator (i.e. inside a Database#create_table block) has changed. Before, these used to call private Kernel methods, now, they set up columns with the appropriate database type. * The Database#lowercase method in the DBI adapter was removed, as its use case is now met by the identifier_output_method support. * Database#uri is now aliased explicitly via a real method, to allow for easier subclassing. * You can no longer pass nil as the second argument to Database#create_table. sequel-5.63.0/doc/release_notes/2.11.0.txt000066400000000000000000000200221434214120600177510ustar00rootroot00000000000000Optimizations ------------- * Model.[] was optimized to use static SQL in cases where doing so should result in the same output. This should result in a 30-40% performance increase. Since this can be the most significant or only method call in a web application action, this has potential to significantly enhance the performance of web application code. In order for this optimization to have an effect, you need to make sure that you are calling set_dataset with a Symbol and not a Dataset object: # Optimized: class Foo < Sequel::Model; end class Foo < Sequel::Model(:foos); end class Foo < Sequel::Model set_dataset :foos end # Not Optimized, but otherwise equivalent: class Foo < Sequel::Model(Model.db[:foos]); end class Foo < Sequel::Model set_dataset db[:foos] end * Dataset#literal was refactored for performance reasons to make overriding it in subclasses unnecessary. The changes made result in a 20-25% performance increase. Sequel can spend about 10% of it's time in Dataset#literal, so this may be only a 2% overall performance improvement. New Features ------------ * Association datasets now know about the model objects that created them, as well as the related association reflection. This makes association extensions much more powerful. For example, you can now create generic association extensions such as: module FindOrCreate def find_or_create(vals) first(vals) || association_reflection.associated_class. \ create(vals.merge(association_reflection[:key]=> \ model_object.id)) end end The above will work for any standard one_to_many association: Artist.one_to_many :albums, :extend=>FindOrCreate # Create an album named Foo related to this artist, # unless such an album already exists Artist.first.albums_dataset.find_or_create(:name=>'Foo') Before, the only way to do the above was to use a closure inside the :dataset option proc, which couldn't be done generically for multiple associations. * A :conditions association option was added, which allows simple filters to be set up without defining :graph_conditions and an association block: # 2.10.0 one_to_many(:japanese_verses, :class=>:Verse, \ :graph_conditions=>{:languageid=>3})do |ds| ds.filter(:languageid=>3) end # 2.11.0 one_to_many(:japanese_verses, :class=>:Verse, \ :conditions=>{:languageid=>3}) * A :clone association option was added, which allows you to clone an existing association. This is most useful when you are dealing with a legacy schema and had to define the same options redundantly for each type of association. You can now do: many_to_many :deputies, :class=>:Employee, \ :join_table=>:employeecurrentaudits, :left_key=>:currentauditid, \ :right_key=>:employeeid, :order=>[:firstname, :lastname] do |ds| ds.filter(:active).filter(:capacity=>1) end many_to_many :project_managers, :clone=>:deputies do |ds| ds.filter(:active).filter(:capacity=>2) end many_to_many :team_leaders, :clone=>:deputies do |ds| ds.filter(:active).filter(:capacity=>3) end All of the above would use the same :class, :join_table, :left_key, :right_key, and :order options. If you don't provide an association block, but you are cloning an association that has one, the cloned association's block is used. You can use the :block=>nil option to not use a block even if the cloned association has a block. * Dataset#select, #select_more, #order, #order_more, and #get all take a block that yields a Sequel::SQL::VirtualRow instance, similar to the behavior of filter. This allows for the easier use of SQL functions on Ruby 1.9: # 2.10.0 dataset.select(:prettify.sql_function(:name)) # 2.11.0 dataset.select{|o| o.prettify(:name)} * String#lit can now accept arguments and return an SQL literal string. This allows you to do things that were previously hard or at least unnecessarily verbose. For example, you can now easily use the SQL standard SUBSTRING function: column = :user pattern = params[:pattern] dataset.select{|o| o.substring('? from ?'.lit(column, pattern))} * A validates_inclusion_of validation method was added to Model. You can provide a Range or an Array in the :in option to specify the allowed values: validates_inclusion_of :value, :in=>1..5 validates_inclusion_of :weekday, :in=>%w'Monday Tuesday ...' * Dataset#with_sql was added, which returns a copy of the dataset with static SQL. This is useful if you want to keep the same row_proc/graph/transform/etc., but want to use your own custom SQL. Other Improvements ------------------ * You can now use Sequel's database independent types when casting: dataset.select(:number.cast(String)) Among other things, the default cast types for cast_string and cast_numeric now work in the MySQL adapter. * Model#set_associated_object was added. The many_to_one association setter method calls it. This allows you to easily override the association setters for all many_to_one associations of a class by modifying a single method. * Typecasting invalid date strings now raises a Sequel::Error::InvalidValue instead of an argument error, which means that you can use raise_on_typecast_failure = false and not have an error raised when an invalid date format is used. * String#to_sequel_blob was added and should now be used instead of String#to_blob. sqlite3-ruby defines String#to_blob differently, which could cause problems. * Blob columns are now fully supported in the SQLite adapter, with the hex escape syntax being used for input, and returning columns of type Sequel::SQL::Blob on output. * The SQLite adapter drop_column support is now significantly more robust. * The SQLite adapter now supports rename_column. * The MySQL adapter now supports stored procedures with multiple arguments. * The MySQL adapter can now not use a compressed connection to the server via the :compress=>false option. * The MySQL adapter now sets a default timeout of 30 days to the database connection, you can change it via the :timeout option, which accepts a number of seconds. * The MySQL adapter now sets SQL_AUTO_IS_NULL to false by default, you can use the :auto_is_null=>true option to not do this. * The MySQL adapter now sets the encoding option on the database connection itself, so it works across reconnects. * Sequel itself no longer uses String#lit or Symbol#* internally, so it shouldn't break if another library defines them. * The default index name is now generated correctly if a non-String or Symbol column is used. * Some ruby -w warnings have been fixed. * INSERTs are now sent to the master database instead of the slave database(s) if using a master/slave database configuration and PostgreSQL 8.2+ or Firebird. * DateTime literalization has been fixed in the Firebird adapter. * Date literalization has been fixed in the H2 JDBC subadapter. * Release notes for versions from 1.0 to the present are now included in the Sequel repository and the RDoc documentation, see http://sequel.rubyforge.org/rdoc/files/doc/release_notes/ Backwards Compatibilty ---------------------- * The optimization of Model.[] may break if you modify the model's dataset behind its back. Always use Model.set_dataset if you want to change a Model's dataset. * Sequel::Dataset::UnsupportedExceptIntersect and Sequel::Dataset::UnsupportedExceptIntersectAll will now only be defined if you are using an adapter that requires them. * The private Model#cache_delete_unless_new method has been removed. * Sequel::SQL::IrregularFunction was removed, as it was a bad hack that is not used by Sequel anymore. Unless you were instantiating it directly or using a plugin/extension that did, this shouldn't affect you. Using a Sequel::SQL::Function with a Sequel::SQL::PlaceholderLiteralString is recommended instead, see the substring example above. sequel-5.63.0/doc/release_notes/2.12.0.txt000066400000000000000000000462631434214120600177710ustar00rootroot00000000000000Overview -------- Sequel 2.12 is really just a stepping stone to Sequel 3.0, which will be released next month. All major changes currently planned for 3.0 have been made in 2.12, but 2.12 contains many features that have been deprecated and will be removed or moved into extensions or plugins in 3.0. Deprecation Logging ------------------- If you use a deprecated method or feature, Sequel will by default print a deprecation message and 10 lines of backtrace to standard error to easily allow you to figure out which code needs to be updated. You can change where the deprecation messages go and how many lines of backtrace are given using the following: # Log deprecation information to a file Sequel::Deprecation.output = File.open('deprecated.txt', 'wb') # Use 5 lines of backtrace when logging deprecation messages Sequel::Deprecation.backtraces = 5 # Use all backtrace lines when logging deprecation messages Sequel::Deprecation.backtraces = true # Don't include backtraces in the deprecation logging Sequel.Deprecation.backtraces = false # Turn off all deprecation logging Sequel::Deprecation.output = nil Deprecated Features Moving to Extensions ---------------------------------------- * Migrations are being moved into sequel/extensions/migration. There isn't any reason that they should be loaded in normal use since they are used so rarely. The sequel command line tool uses this extension to run the migrations. * Adding the blank? method to all objects has been moved into sequel/extensions/blank. * Dataset#print and Sequel::PrettyTable have been moved into sequel/extensions/pretty_table. * Dataset#query and related methods have been moved into sequel/extensions/query. * Dataset#paginate and related methods have been moved into sequel/extensions/pagination. * String inflection methods (e.g. "people".singularize) have been moved into sequel/extensions/inflector. * String date/time conversion methods (e.g. '2000-01-01'.to_date) have been moved into sequel/extensions/string_date_time. Deprecated Model Features Moving to Plugins ------------------------------------------- * Model validation class methods have been moved to a plugin. Sequel users are encouraged to write their own validate instance method instead. A new validation_helpers plugin has been added to make this easier, it's explained in the New Features section. If you want to continue using the validation class methods: Sequel::Model.plugin :validation_class_methods * Model hook class methods have been moved to a plugin. Sequel users are encouraged to write their own hook instance methods, and call super to get hooks specified in superclasses or plugins. If you want to continue using the hook class methods: Sequel::Model.plugin :hook_class_methods * Model schema methods (e.g. Model.set_schema, Model.create_table, Model.drop_table) have been moved to a plugin. The use of these methods has been discouraged for a long time. If you want to use them: Sequel::Model.plugin :schema * Model.set_sti_key has been moved to a plugin. So you should change: MyModel.set_sti_key :key_column to: MyModel.plugin :single_table_inheritance, :key_column * Model.set_cache has been moved to a plugin. So you should change: MyModel.set_cache cache_store, opts to: MyModel.plugin :caching, cache_store, opts * Model.serialize has been moved to a plugin. So you should change: MyModel.serialize :column, :format=>:yaml to: MyModel.plugin :serialization, :yaml, :column Because the previous serialization support depended on dataset transforms, the new serialization support is implemented differently, and behavior may not be identical in all cases. However, this should be a drop in replacement for most users. Deprecated Features To Be Removed in Sequel 3.0 ----------------------------------------------- * Dataset#transform is deprecated without any replacement planned. It was announced on the Sequel mailing list that transforms would be removed unless someone said they needed them, and nobody said that they did. * Dataset#multi_insert and Dataset#import are no longer aliases of each other. Dataset#multi_insert now takes an array of hashes, and Dataset#import now takes an array of columns and an array of arrays of values. Using multi_insert with import's API or vice-versa is deprecated. * Calling Dataset#[] with no arguments or an integer argument is deprecated. * Calling Dataset#map with both an argument and a block is deprecated. * Database#multi_threaded? and Database#logger are both deprecated. * Calling Database#transaction with a symbol to specify which server to use is deprecated. You should now call it with an option hash with a :server key. * Array#extract_options! and Object#is_one_of? are both deprecated. * The metaprogramming methods taken from metaid are deprecated and have been moved into Sequel::Metaprogramming. If you want them available to specific objects/classes, just include or extend with Sequel::Metaprogramming. If you want all objects to have access to the metaprogramming methods, install metaid. Note that the class_def method from metaid doesn't exist in Sequel::Metaprogramming, since it really isn't different from define_method (except it is public instead of private). * Module#class_attr_overridable, #class_attr_reader, and #metaalias are deprecated. * Using Model#set or #update when the columns for the model are not set and you provide a hash with symbol keys is deprecated. Basically, you must have setter methods now for any columns used in #set or #update. * Model#set_with_params and #update_with_params are deprecated, use #set and #update instead. * Model#save! is deprecated, use #save(:validate=>false). * Model.is and Model.is_a are deprecated, use Model.plugin. * Model.str_columns, Model#str_columns, #set_values, and #update_values are deprecated. You should use #set and #update instead of #set_values and #update_values, though they operate differently. * Model.delete_all, Model.destroy_all, Model.size, and Model.uniq are deprecated, use .delete, .destroy, .count, and .distinct. * Model.belongs_to, Model.has_many, and Model.has_and_belongs_to_many are deprecated, use .many_to_one, .one_to_many, and .many_to_many. * Model#dataset is deprecated, use Model.dataset. * SQL::CastMethods#cast_as is deprecated, use #cast. * Calling Database#schema without a table argument is deprecated. * Dataset#uniq is deprecated, use Dataset#distinct. * Dataset#symbol_to_column_ref is deprecated, use #literal. * Dataset#quote_column_ref is deprecated, use #quote_identifier. * Dataset#size is deprecated, use #count. * Passing options to Dataset#each, #all, #single_record, #single_value, #sql, #select_sql, #update, #update_sql, #delete, #delete_sql, and #exists is deprecated. Modify the options first using clone or a related method, then call one of the above methods. * Dataset#create_view and #create_or_replace_view are deprecated, use the database methods instead. * Dataset.dataset_classes, #model_classes, #polymorphic_key, and #set_model are deprecated. * Database#>> is deprecated. * String#to_blob and SQL::Blob#to_blob are deprecated, use #to_sequel_blob. * The use of Symbol#| to create array subscripts is deprecated, use Symbol#sql_subscript. * Symbol#to_column_ref is deprecated, use Dataset#literal. * String#expr is deprecated, use String#lit. * Array#to_sql, String#to_sql, and String#split_sql are deprecated. * Passing an array to Database#<< is deprecated. * Range#interval is deprecated. * Enumerable#send_each is deprecated. * When using ruby 1.8, Hash#key is deprecated. * Sequel.open is deprecated, use Sequel.connect. * Sequel.use_parse_tree and Sequel.use_parse_tree= are deprecated. * All upcase_identifier methods and the :upcase_identifiers database option are deprecated, use identifier_input_method = :upcase instead. * Using a virtual row block without an argument is deprecated, see Sequel.virtual_row_instance_eval= under New Features. * When using the JDBC adapter, Java::JavaSQL::Timestamp#usec is deprecated. Sequel has returned Java::JavaSQL::Timestamp as DateTime or Time for a few versions, so this shouldn't affect most people. * Sequel will no longer require bigdecimal/util, enumerator, or yaml in 3.0. If you need them in your code, make sure you require them yourself. Using features added by requiring these standard libaries will not bring up a deprecation warning, for obvious reasons. * Sequel::Error::InvalidTransform, Sequel::Error::NoExistingFilter, and Sequel::Error::InvalidStatement exceptions will be removed in Sequel 3.0. You will not get a deprecation message if you reference them in 2.12. * Sequel::Model::Validation::Errors is deprecated, use Sequel::Model::Errors instead. Referencing the old name will not bring up a deprecation message. New Features ------------ * Sequel.virtual_row_instance_eval= was added, which lets you give Sequel 2.12 the behavior that will be the standard in 3.0. It changes blocks passed to Dataset#filter, #select, or #order that don't accept arguments (or accept any number of arguments) to instance eval the block in the context of a new VirtualRow instance instead of passing a new VirtualRow instance to the block. It allows you to change code that looks like this: dataset.filter{|o| (o.number > 10) & (o.name > 'M')} to: dataset.filter{(number > 10) & (name > 'M')} When instance_eval is used, only local variables are available to the block. Any calls to instance methods will be interpreted as calling VirtualRow#method_missing, which generates identifiers or functions. When virtual_row_instance_eval is enabled, the following type of code will break: # amount is a instance method dataset.filter{:number + amount > 0} Just like this example, the only type of code that should break is when a virtual row block was used when it wasn't necessary (since it doesn't use the VirtualRow argument). When Sequel.virtual_row_instance_eval = false, using a virtual row block that doesn't accept an argument will cause a deprecation message. Here's a regular expression that should catch most places where you are using a virtual row block without an argument. egrep -nr '[^A-Za-z0-9_](filter|select|select_more|order|order_more|get|where|having|from|first|and|or|exclude|find|subset|constraint|check)( *(\([^)]*\) *)?){*[^|]' * An RDoc page explaining virtual row blocks was added as well. * A validation_helpers model plugin was added that allows you to do validations similar to the old class level validations inside the Model#validate instance method. The API has changed, but it's capable of most of the same validations. It doesn't handle acceptance_of or confirmation_of validations, as those shouldn't be handled in the model. # Old class level validations validates_format_of :col, :with=>/.../ validates_length_of :col, :maximum=>5 validates_length_of :col, :minimum=>3 validates_length_of :col, :is=>4 validates_length_of :col, :within=>3..5 validates_not_string :col validates_numericality_of :col validates_numericality_of :col, :only_integer=>true validates_presence_of :col validates_inclusion_of :col, :in=>[3, 4, 5] validates_uniqueness_of :col, :col2 validates_uniqueness_of([:col, :col2]) # New instance level validations def validate validates_format /.../, :col validates_max_length 5, :col validates_min_length 3, :col validates_exact_length 4, :col validates_length_range 3..5, :col validates_not_string :col validates_numeric :col validates_integer :col validates_presence :col validates_includes([3,4,5], :col) validates_unique :col, :col2 validates_unique([:col, :col2]) end Another change made is to specify the same type of validation on multiple attributes, you must use an array: # Old validates_length_of :name, :password, :within=>3..5 # New def validate validates_length_range 3..5, [:name, :password] end The :message, :allow_blank, :allow_missing, and :allow_nil options are still respected. The :tag option is not needed as instance level validations work with code reloading without workarounds. The :if option is also not needed for instance level validations: # Old validates_presence_of :name, :if=>:new? validates_presence_of :pass, :if=>{flag > 3} # New def validate validates_presence(:name) if new? validates_presence(:pass) if flag > 3 end The validates_each also doesn't have an equivalent instance method, since it is much easier to just write your own validation: # Old validates_each(:date) do |o,a,v| o.errors.add(a, '...') unless v > Date.today end # New def validate errors.add(:date, '...') unless date > Date.today end * MySQL adapter datasets now have on_duplicate_key_update and insert_ignore methods which modify the SQL used to support ON DUPLICATE KEY UPDATE and INSERT INGORE syntax in multi_insert and import. * If you use the MySQL native adapter, you can set: Sequel::MySQL.convert_invalid_date_time = nil to return dates like "0000-00-00" and times like "25:00:00" as nil values instead of raising an error. You can also set it to :string to return the values as strings. * You can now use Sequel without modifying any core classes, by defining a SEQUEL_NO_CORE_EXTENSIONS constant or environment variable. In 2.12, this may still add some deprecated methods to the core classes, but in 3.0 no methods will be added to the core classes if you use this. * You can now use Sequel::Model without the associations implementation by defining a SEQUEL_NO_ASSOCIATIONS constant or environment variable. Other Improvements ------------------ * Model column accessors have been made faster and the overhead of creating them has been reduced significantly. * ~{:bool_col=>true} now generates an bool_col IS NOT TRUE filter instead of bool_col != TRUE. This makes it return records with NULL values. If you only want to have false records, you should use {:bool_col=>false}. This works better with SQL's 3 valued boolean logic. It is slightly inconsistent with ~{:col=>1}, since that won't return values where col is NULL, but it gives the user the ability to create an IS [NOT] (TRUE|FALSE) filter, which Sequel previously did not support. If you really want the old behavior, you can change it to ~{true=>:bool_col}. * Model.use_transactions was added for setting whether model objects should use transactions when destroying or saving records. Like most Sequel options, it's settable on a global, per model, and per object basis: Sequel::Model.use_transactions = false MyModel.use_transactions = true my_model.use_transactions = false You can also turn it on or off for specific save calls: my_model.save(:transaction=>true) * The Oracle adapter now supports schema parsing. * When using Model.db=, all current dataset options are copied to a new dataset created with the new db. * Model::Errors#count was refactored to improve performance. * Most exception classes that were located under Sequel::Error are now located directly under Sequel. The old names are not deprecated (unless mentioned above), but their use is now discouraged. The exceptions have the same name except that Sequel::Error::PoolTimeoutError changed to Sequel::PoolTimeout. * Dataset#where now always affects the WHERE clause. Before, it was just an alias of filter, so it modified the HAVING clause if the dataset already had a HAVING clause. * The optimization of Model.[] introduced in 2.11.0 broke on databases that didn't support LIMIT. The optimization now works on those databases. * All of the the RDoc documentation was reviewed and many updates were made, resulting in significantly improved documentation quality. * Model.def_dataset_method now works when the model doesn't have an associated dataset, as it will add the method to a dataset given to set_dataset in the future. * Database#get and #select now take a block that is passed to the dataset they create. * You can disable the use of INSERT RETURNING in the shared PostgreSQL adapter using disable_insert_returning. This is mostly useful if you are inserting a large number of records. * A bug relating to aliasing columns in the JDBC adapter has been fixed. * Sequel can now create and drop schema-qualified views. * Performance of Dataset#destroy for model datasets was improved. * The specs now run on Rspec 1.2. * Internal dependence on the methods that Sequel adds to core classes has been eliminated, any internal use of methods that Sequel adds to the core classes is now considered a bug. * A possible bug where Database#rename_table would not remove a cached schema entry has been fixed. * The Oracle and MySQL adapters now raise an error as soon as you call distinct on a dataset, instead of waiting until the SQL is generated. Backwards Compatibilty ---------------------- * Saving a newly inserted record in an after_create or after_save hook is no longer broken. It broke in 2.10 as a side effect of allowing the hook to check whether or not the record was a new record. The code has been changed so that a @was_new instance variable will be defined to true if the record was just created. Similarly, instead of not modifying changed_columns until after the after hooks run, a @columns_updated instance variable will be available in the after hooks that is a hash of exactly what attribute keys and values were used in the update. These changes break compatibility with 2.11.0 and 2.10.0, but restore compatibility with 2.9.0 and previous versions. * PostgreSQL no longer uses savepoints for nested transactions by default. If you want to use a savepoint, you have to pass the :savepoint option to the transaction method. Using savepoints by default broke expectations when a method raised Rollback to rollback the transaction, and it only rolled back to the last savepoint. * The anonymous model classes created by Sequel::Model() are now stored in Model::ANONYMOUS_MODEL_CLASSES instead of the @models class instance variable of the main module. * The mappings of adapter schemes to classes are now stored in Sequel::ADAPTER_MAP instead of the Database @@adapters class variable. * Model instances no longer contain a reference to their class's @db_schema. * Database schema sql methods (e.g. alter_table_sql) are now private. * Database#[] no longer accepts a block. It's not possible to call it with a block in general usage, anyway. * The Sequel::Schema::SQL module no longer exists, the methods it included were placed directly in the Sequel::Database class. * The Sequel::SQL::SpecificExpression class has been removed, subclasses now inherit from Sequel::SQL::Expression. * Sequel now requires its own files with an absolute path. * The file hierarchy of the sequel library changed significantly. sequel-5.63.0/doc/release_notes/2.2.0.txt000066400000000000000000000225151434214120600177020ustar00rootroot00000000000000The Most Powerful and Flexible Associations of Any Ruby ORM ----------------------------------------------------------- Sequel can now support any association type supported by ActiveRecord, and many association types ActiveRecord doesn't support. Association callbacks (:before_add, :after_add, :before_remove, :after_remove) have been added, and work for all association types. Each of the callback options can be a Symbol specifying an instance method that takes one argument (the associated object), or a Proc that takes two arguments (the current object and the associated object), or an array of Symbols and Procs. Additionally, an :after_load callback is available, which is running after loading the associated record(s) from the database. Association extensions are now supported: class FindOrCreate def find_or_create(vals) first(vals) || create(vals) end end class Author < Sequel::Model one_to_many :authorships, :extend=>FindOrCreate end Author.first.authorships_dataset.find_or_create(:name=>'Bob') Sequel has been able to support most has_many :through style associations since 1.3, via many_to_many (since it doesn't break on join tables that are also model tables, unlike ActiveRecord's has_and_belongs_to_many). Now it can also support has_many :through style associations where it goes through a has_many association. Sequel can now support polymorphic associations. Polymorphic associations are really a design flaw, so Sequel doesn't support them directly, but the tools that Sequel gives you make them pretty easy to implement. Sequel can also support associations that ActiveRecord does not. For example, a belongs_to association where the column referenced in the associated table is not the primary key, an association that depends on multiple columns in each table, or even situations where the association has a column in the primary table that can be referenced by any of multiple columns in a second table that has a has_one style association with the table you want to associate with. Some of those associations can be supported for a single object using custom SQL in ActiveRecord, but none are supported when eager loading or allow further filtering. Not only can all of these cases be supported with Sequel::Model, all can be supported with eager loading, and can allow for further filtering. See http://sequel.rubyforge.org/files/sequel/doc/advanced_associations_rdoc.html for details and example code for all association types covered above. There have also been many additional options added for controlling eager loading via eager_graph. Every part of the SQL JOINs can now be controlled via one of the options, so you can use JOIN USING, NATURAL JOIN, or arbitrary JOIN ON conditions. Finally, just to show off the power that Sequel gives you when eager loading, here is example code that will eagerly load all descendants and ancestors in a tree structure, without knowing the depth of the tree: class Node < Sequel::Model set_schema do primary_key :id foreign_key :parent_id, :nodes end create_table many_to_one :parent one_to_many :children, :key=>:parent_id # Only useful when eager loading many_to_one :ancestors, :eager_loader=>(proc do |key_hash, nodes, associations| # Handle cases where the root node has the same parent_id as primary_key # and also when it is NULL non_root_nodes = nodes.reject do |n| if [nil, n.pk].include?(n.parent_id) # Make sure root nodes have their parent association set to nil n.associations[:parent] = nil true else false end end unless non_root_nodes.empty? id_map = {} # Create an map of parent_ids to nodes that have that parent id non_root_nodes.each{|n| (id_map[n.parent_id] ||= []) << n} # Doesn't cause an infinte loop, because when only the root node # is left, this is not called. Node.filter(Node.primary_key=>id_map.keys).eager(:ancestors).all do |node| # Populate the parent association for each node id_map[node.pk].each{|n| n.associations[:parent] = node} end end end) many_to_one :descendants, :eager_loader=>(proc do |key_hash, nodes, associations| id_map = {} nodes.each do |n| # Initialize an empty array of child associations for each parent node n.associations[:children] = [] # Populate identity map of nodes id_map[n.pk] = n end # Doesn't cause an infinite loop, because the :eager_loader is not called # if no records are returned. Exclude id = parent_id to avoid infinite loop # if the root note is one of the returned records and it has parent_id = id # instead of parent_id = NULL. Node.filter(:parent_id=>id_map.keys).exclude(:id=>:parent_id).eager(:descendants).all do |node| # Get the parent from the identity map parent = id_map[node.parent_id] # Set the child's parent association to the parent node.associations[:parent] = parent # Add the child association to the array of children in the parent parent.associations[:children] << node end end) end nodes = Node.filter(:id < 10).eager(:ancestors, :descendants).all New Adapter Features -------------------- * PostgreSQL bytea fields are now fully supported. * The PostgreSQL adapter now uses the safer connection-specific string escaping if you are using ruby-pg. * The SQLite adapter supports drop_column and add_index. * You can now use URL parameters in the connection string, enabling you to connect to PostgreSQL via a socket using postgres://user:password@blah/database?host=/tmp Other New Features ------------------ * Dataset#graph now takes a block which it passes to join_table. * Symbol#identifier has been added, which can be used if another library defines the same operator(s) on Symbol that Sequel defines. * Filter blocks now yield a VirtualRow instance, which can yield Identifiers, QualifiedIdentifiers, or Functions. Like Symbol#identifier, this is useful if another library defines the same operator(s) on Symbol that Sequel defines. * You can now call Model.to_hash to get an identity map for all rows (before this required Model.dataset.to_hash). * A model that can get it's column information from the schema will set it in the dataset, potentially saving many queries. * Model.validates_presence_of now works correctly for boolean columns. Notable Bug Fixes ----------------- * Caching now works with Model subclasses. * Model validation methods now work with source reloading. * The PostgreSQL adapter no longer raises an Error if you try to insert a record with the primary key already specified. * Sequel no longer messes with the native MySQL adapter, so you can use Sequel and ActiveRecord with MySQL in the same process. * Dataset#count now works correctly for limited dataset. * PostgreSQL Database#transaction method yields a connection, similar to the other adapters. * Using a hash argument in #distinct, #order, or #group is treated as an expression instead of a column alias. * Cloned datasets no longer ignore the existing columns unless it is necessary. * The :quote_identifiers and :single_threaded Database options now work correctly. Backwards Incompatible Changes ------------------------------ * ParseTree support, deprecated in 2.1.0, has been removed in 2.2.0. You should use the expression filter syntax instead, perferably without the block (though it can be used inside a block as well). This usually involves the following types of changes: filter{:x == :y} => filter(:x => :y) filter{:x << :y} => filter(:x => :y) filter{:x && :y} => filter(:x & :y) # Don't forget about change filter{:x || :y} => filter(:x | :y) # in operator precedence filter{:x.like?('%blah%')} => filter(:x.like('%blah%')) filter do => filter((:x > 1) & (:y < 2)) :x > 1 :y < 2 end * Attempts to save an invalid Model instance will raise an error by default. To revert to returning a nil value, use: Sequel::Model.raise_on_save_failure = false # Global Album.raise_on_save_failure = false # Class album = Album.new album.raise_on_save_failure = false # Instance Note that before, save would return false where now it returns nil if you disable raising on save failure. * Dataset#update no longer takes a block, as it's use of the block depended on ParseTree. With the introduction of the expression syntax in 2.0.0, it's no longer necessary. You should use a hash with an expression as the value instead: DB[:table].update(:column=>:column + 1) * validates_presence of now considers false as present instead of absent. This is so it works with boolean columns. * Dataset#graph ignores any previously selected columns when it is called for the first time. * Dataset#columns ignores any filtering, ordering, or distinct clauses. This shouldn't cause issues unless you were using SQL functions with side effects and expecting them to be called when columns was called (unlikely at best). One significant point of note is that the 2.2.0 release will be the last release with both a sequel_core and sequel gem. Starting with 2.3.0 they will be combined into one sequel gem. You will still be able to get just the sequel_core part by requiring 'sequel_core', but they will be packaged together. sequel-5.63.0/doc/release_notes/2.3.0.txt000066400000000000000000000064561434214120600177110ustar00rootroot00000000000000JRuby and Ruby 1.9 Officially Supported --------------------------------------- Sequel now officially supports JRuby 1.1.3 and Ruby 1.9 (svn revision 18194 at least). Using JRuby with the JDBC adapter, PostgreSQL, MySQL, and SQLite now enjoy almost full support, though not everything works the same as using the native adapter. Depending on what you are doing, it may make sense to use postgres-pr on JRuby instead of PostgreSQL-JDBC. To use the new JDBC support, the database connection string you give Sequel is now passed directly to JDBC, here are a few examples: Sequel.connect('jdbc:postgresql://host/database?user=*&password=*') Sequel.connect('jdbc:mysql://host/database?user=*&password=*') Sequel.connect('jdbc:sqlite::memory:') Sequel.connect('jdbc:sqlite:relative/path.db') Sequel.connect('jdbc:sqlite:/absolute/path.db') Single Gem ---------- Sequel is now distributed as a single gem named sequel, by combining the previous sequel_core and sequel gems. You can still just "require 'sequel_core'" if you don't want the model functionality. Database Adapter Improvements ----------------------------- * Dataset#empty? now works using the MySQL adapter. * The Oracle adapter now works with a nonstandard database port. * The JDBC adapter should load JDBC drivers automatically for PostgreSQL, MySQL, SQLite, Oracle, and MSSQL. For PostgreSQL, MySQL, and SQLite, the jdbc-* gem can be used, for the others, you must have the correct .jar in your CLASSPATH. * The PostgreSQL adapter no longer raises an error when inserting records into a table without a primary key. * Database#disconnect now works for the ADO adapter. * The ADO adapter no longer raises an error if the dataset contains no records. * The ODBC adapter no longer errors when converting ::ODBC::Time values. Backwards Incompatible Changes ------------------------------ * Sequel::Worker has been removed. There are no known users, and the specs caused problems on JRuby. * Assigning an empty string to a non-string, non-blob model attribute converts it to nil by default. You can use "Model.typecast_empty_string_to_nil = false" to get the old behavior. This should make web development with Sequel significantly easier, hopefully at no expense to other uses. * Database.uri_to_options is now a private class method. * Model.create_table! now acts the same as Database.create_table!, dropping the table unconditionally and then creating it. This was done for consistency. If you are using Model.create_table! in production code, you should change it to "Model.create_table unless Model.table_exists?", otherwise you risk wiping out your production data. I recommended you use the migration feature instead of Model.set_schema, as that handles altering existing tables. Other Notable Changes --------------------- * Using validates_length_of more than once on the same attribute with different options without a tag no longer causes the first use to be ignored. This was a side effect of the validation tags added in 2.2.0. * Other than the adapters, Sequel now has 100% code coverage (line coverage). * Model#set* methods now return self. * An integration test suite was added, testing Sequel against a live database with nothing mocked, which helped greatly when testing the new support for JDBC adapters. sequel-5.63.0/doc/release_notes/2.4.0.txt000066400000000000000000000072431434214120600177050ustar00rootroot00000000000000Prepared Statements/Bound Variables =================================== Sequel now supports prepared statements and bound variables. No matter which database you are using, Sequel uses exactly the same API. To specify placeholders, you use the :$placeholder syntax: ds = DB[:items].filter(:name=>:$n) To use a bound variable: ds.call(:select, :n=>'Jim') This will do the equivalent of selecting records that have the name 'Jim'. In addition to :select, you can use :first or :delete. There is also support for bound variables when inserting or updating records: ds.call(:update, {:n=>'Jim', :new_n=>'Bob'}, :name=>:$new_n) Which will update all records that have the name 'Jim' to have the name 'Bob'. Prepared statement support is very similar to bound variable support, except that the statement is first prepared with a name: ps = ds.prepare(:select, :select_by_name) It is then called later with the bound arguments to use: ps.call(:n=>'Jim') DB.call(:select_by_name, :n=>'Jim') # same as above For inserting or updating, the hash to use when inserting or updating is given to prepare: ps2 = ds.prepare(:update, :update_name, :name=>:$new_n) ps2.call(:n=>'Jim', :new_n=>'Bob') There is some level of native support for these features in the PostgreSQL, MySQL, SQLite, and JDBC adapters. For other adapters, support is emulated, but it shouldn't be too difficult to add native support for them. For more details see: http://sequel.rubyforge.org/rdoc/files/doc/prepared_statements_rdoc.html Read-Only Slave/Writable Master and Database Sharding ===================================================== Sequel now has built in support for master/slave database configurations, just by setting an option in Sequel.connect: DB=Sequel.connect('postgres://master_server/database', \ :servers=>{:read_only=>{:host=>'slave_server'}}) That will use slave_server for SELECT queries and master_server for other queries. It's fairly easy to use multiple slaves or even multiple masters, examples are included in the link below. Sharding support requires some code other than the database configuration, but is still fairly simple. For example, to set up a 16 shard configuration based on a hex character: servers = {} (('0'..'9').to_a + ('a'..'f').to_a).each do |hex| servers[hex.to_sym] = {:host=>"hash_host_#{hex}"} end DB=Sequel.connect('postgres://hash_host/hashes', :servers=>servers) To set which shard to use for a query, use the Dataset#server method: DB[:hashes].server(:a).filter(:hash=>/31337/) For more details see: http://sequel.rubyforge.org/rdoc/files/doc/sharding_rdoc.html Other Changes ============= * The sequel.rubyforge.org website has a new design thanks to boof. The online RDoc is now located at http://sequel.rubyforge.org/rdoc. * Support was added for anonymous column names in the ADO adapter. * Better MSSQL support in the ADO, ODBC, and JDBC adapters. The odbc_mssql adapter has been removed. If you use MSSQL with ODBC, please use the odbc adapter with a :db_type=>'mssql' option. * The following Sequel::Error exception subclasses were removed: InvalidExpression, InvalidFilter, InvalidJoinType, and WorkerStop. * Documentation was added for the PostgreSQL, MySQL, SQLite, and JDBC adapters. * Various internal interfaces were refactored. For example, if you use an adapter not included with Sequel, it probably won't work until you update it to the new internal API. * Many low level methods (such as Database#transaction), now take an optional server argument to indicate which server to use. * Model plugins that have a DatasetMethods module with non-public methods no longer have Model methods created that call those methods. sequel-5.63.0/doc/release_notes/2.5.0.txt000066400000000000000000000110021434214120600176720ustar00rootroot00000000000000New Features ------------ * The values that are used to insert/update records can now be scoped similar to how filter expressions can be scoped. set_defaults is used to set defaults which can be overridden, and set_overrides is used to set defaults which cannot be overridden: DB[:t].set_defaults(:x=>1).insert_sql # => INSERT INTO t (x) VALUES (1) DB[:t].set_defaults(:x=>1).insert_sql(:x=>2) # => INSERT INTO t (x) VALUES (2) DB[:t].set_defaults(:x=>1).insert_sql(:y=>2) # => INSERT INTO t (x, y) VALUES (1, 2) DB[:t].set_overrides(:x=>1).insert_sql(:x=>2) # => INSERT INTO t (x) VALUES (1) The difference between set_defaults and set_overrides is that with set_defaults, the last value takes precedence, while with set_overrides, the first value takes precedence. * The schema generators now support creating and altering tables with composite primary and/or foreign keys: DB.create_table(:items) do integer :id text :name primary_key [:id, :name] foreign_key [:id, :name], :other_table, \ :key=>[:item_id, :item_name] end DB.alter_table(:items) do add_primary_key [:id, :name] add_foreign_key [:id, :name], :other_table, \ :key=>[:item_id, :item_name] end * The AlterTableGenerator now supports unique constraints: DB.alter_table(:items) do add_unique_constraint [:aaa, :bbb, :ccc], :name => :con3 end * The schema generators now support ON UPDATE (previously, they only supported ON DELETE): DB.create_table(:items) do foreign_key :project_id, :projects, :on_update => :cascade end * When connecting to a PostgreSQL server version 8.2 and higher, Sequel now uses the INSERT ... RETURNING ... syntax, which should speed up row inserts on PostgreSQL. In addition, Sequel Models use RETURNING * to speed up model object creation. * You can now validate multiple attributes at once. This is useful if the combination of two or more attribute values is important, such as checking the uniqueness of multiple columns. validates_uniqueness_of now supports this directly: validates_uniqueness_of [:column1, :column2] This protects against the database having multiple rows with the same values for both :column1 and :column2. This is different from: validates_uniqueness_of :column1, :column2 Which checks that the value of column1 is unique in the table, and that the value of column2 is unique in the table (which is much more restrictive). Other Improvements ------------------ * Dataset methods insert_sql, delete_sql, and update_sql respect the :sql option, allowing you to do things such as: ds = DB['INSERT INTO t (time) VALUES (CURRENT_TIMESTAMP)'] ds.insert ds.insert * The database adapters (at least MySQL, PostgreSQL, SQLite, and JDBC) generally raise Sequel::DatabaseError for database problems, making it easier to tell what is a true database error versus an error raised by Sequel itself. * Sequel uses the async features of ruby-pg so that the entire interpreter is not blocked while waiting for the results of queries. * Sequel now supports the 2008.08.17 version of ruby-pg. * MSSQL support has been improved when using the ODBC and ADO adapters. * Index names are quoted and creating or dropping indexes. * Automatically generated column accessor methods no longer override instance methods specified by plugins. * Inserting a row with an already specified primary key inside a transaction now works correctly when using PostgreSQL. * before_save and before_update hooks now work as expected when using save_changes. * count and paginate now work correctly on graphed datasets. Backwards Compatibility ----------------------- * The SQLite adapter now raises Sequel::DatabaseError instead of Sequel::Error::InvalidStatement whenever an SQLite3::Exception is raised by the SQLite3 driver. * Date and DateTime conversions now convert 2 digit years. To revert to the previous behavior: Sequel.convert_two_digit_years = false Note that Ruby 1.8 and 1.9 handle Date parsing differently, so there is no backwards compatibility change for Ruby 1.9. However, this also means that the MM/DD/YY date syntax commonly used in the United States is not always parsed correctly on Ruby 1.9, greatly limiting the use of 2 digit year conversion. * You can no longer abuse the SQL function syntax for specifying database types. For example, you must change: :type=>:varchar[255] to: :type=>:varchar, :size=>255 sequel-5.63.0/doc/release_notes/2.6.0.txt000066400000000000000000000140541434214120600177050ustar00rootroot00000000000000New Features ------------ * Schema parsing was refactored, resulting in a huge speedup when using MySQL. MySQL now uses the DESCRIBE statement instead of the INFORMATION_SCHEMA. PostgreSQL now uses the pg_* system catalogs instead of the INFORMATION schema. * The schema information now includes the :primary_key field. Models now use this field to automatically determine the primary key for a table, so it no longer needs to be specified explicitly. Models even handle the composite primary key case. * The raise_on_typecast_failure switch was added, with it being true by default (so no change in behavior). This allows the user to silently ignore errors when typecasting fails, at the global, class, and instance levels. Sequel::Model.raise_on_typecast_failure = false # Global Artist.raise_on_typecast_failure = true # Class artist = Artist.new artist.raise_on_typecast_failure = false # Instance Album.raise_on_typecast_failure = true Album.new(:numtracks=>'a') # => raises Sequel::Error::InvalidValue Album.raise_on_typecast_failure = false Album.new(:numtracks=>'a') # => #"a"}> * Associations' orders are now respected when eager loading via eager_graph. Sequel will qualify the columns in the order with the alias being used, so you can have overlapping columns when eager loading multiple associations. Artist.one_to_many :albums, :order=>:name Album.one_to_many :tracks, :order=>:number Artist.order(:artists__name).eager_graph(:albums=>:tracks).sql # => ... ORDER BY artists.name, albums.name, tracks.number * The support for CASE expressions has been enhanced by allowing the use of an optional expression: {1=>2}.case(0, :x) # => CASE x WHEN 1 THEN 2 ELSE 0 END [[:a, 1], [:b, 2], [:c, 3]].case(4, :y) # => CASE y WHEN a THEN 1 WHEN b THEN 2 WHEN c THEN 3 ELSE 4 END Previously, to get something equivalent to this, you had to do: {{:x=>1}=>2}.case(0) # => CASE WHEN (x = 1) THEN 2 ELSE 0 END [[{:y=>:a}, 1], [{:y=>:b}, 2], [{:y=>:c}, 3]].case(4) # => CASE WHEN (y = a) THEN 1 WHEN (y = b) THEN 2 WHEN (y = c) THEN 3 ELSE 4 END * You can now change the NULL/NOT NULL value of an existing column using the set_column_allow_null method. # Set NOT NULL DB.alter_table(:artists){set_column_allow_null :name, false} # Set NULL DB.alter_table(:artists){set_column_allow_null :name, true} * You can now get the schema information for a table in a non-public schema in PostgreSQL using the implicit :schema__table syntax. Before, the :schema option had to be given explicitly to Database#schema. This allows models to get schema information for tables outside the public schema. * Transactions are now supported on MSSQL. * Dataset#tables now returns all tables in the database for MySQL databases accessed via JDBC. * Database#drop_view can now drop multiple views at once. Other Improvements ------------------ * The SQLite adapter now respects the Sequel.datetime_class option for timestamp and datetime columns. * Adding a unique constraint no longer explicity creates a unique index. If you want a unique index, use index :unique=>true. * If no language is specified when creating a full text index on PostgreSQL, the simple language is assumed. * Errors when typecasting fails are now Sequel::Error::InvalidValue instead of the more generic Sequel::Error. * Specifying constraints now works correctly for all types of arguments. Previously, it did not work unless a block or interpolated string were used. * Loading an association with the same name as a table in the FROM clause no longer causes an error. * When eagerly loading many_to_one associations where no objects have an associated object, the negative lookup is now cached. * String keys can now be used with Dataset#multi_insert, just like they can be used for Dataset#insert. * Dataset#join_table now generates the correct SQL when doing the first join to a dataset where the first source is a dataset, when an unqualified column is used in the conditions. * Cascading associations after *_to_many associations can now be eagerly loaded via eager_graph. * Eagerly loading *_to_many associations that are cascaded behind a many_to_one association now have their duplicates removed if a cartesian product join is done. * The SQLite adapter now uses string literals in all of the AS clauses. While the SQL standard specifies that identifiers should be used, SQLite documentation explicitly states that string literals are expected (though it generally works with identifiers by converting them implicitly). * Database methods that modify the schema now remove the cached schema entry. * The hash keys that Database#schema returns when no table is requested are now always supposed to be symbols. * The generation of SQL for composite foreign keys on MySQL has been fixed. * A schema.rdoc file was added to the documentation explaining the various parts of Sequel related to schema generation and modification and how they interact (http://sequel.rubyforge.org/rdoc/files/doc/schema_rdoc.html). * The RDoc template for the website was changed from the default template to the hanna template. Backwards Compatibility ----------------------- * The :numeric_precision and :max_chars schema entries have been removed. Use the :db_type entry to determine this information, if available. * The SQLite adapter used to always return Time instances for timestamp types, even if Sequel.datetime_class was DateTime. For datetime types it always returned a DateTime instance. It now returns an instance of Sequel.datetime_class in both cases. * It's possible that the including of associations' orders when eager loading via eager_graph could cause problems. You can use the :order_eager_graph=>false option to not use the :order option when eager loading via :eager_graph. * There were small changes in SQL creation where the AS keyword is now used explicitly. These should have no effect, but could break tests for explicit SQL. sequel-5.63.0/doc/release_notes/2.7.0.txt000066400000000000000000000145431434214120600177110ustar00rootroot00000000000000Performance Optimizations ------------------------- * Fetching a large number of records with the PostgreSQL adapter is significantly faster (up to 3-4 times faster than before). * Instantiating model objects has been made much faster, as many options (such as raise_on_save_failure) are now lazily loaded, and hook methods are now much faster if no hooks have been defined for that type of hook. New Association Options ----------------------- * The :eager_grapher option has been added allowing you to supply your own block to implement eager loading via eager_graph. * many_to_one and one_to_many associations now have a :primary_key option, specifying the name of the column that the :key option references. * many_to_many associations now have :left_primary_key and :right_primary_key options, specifying the columns that :left_key and :right_key reference, respectively. * many_to_many associations now have a :uniq option, that adds an :after_load callback that makes the returned array of objects unique. Other New Features ------------------ * Dataset#set_graph_aliases now allows you to supply a third argument for each column you want graph into the dataset, allowing you to use arbitrary SQL expressions that are graphed into the correct table: ds.set_graph_aliases!(:a=>[:b, :c], :d=>[:e, :f, 42]) # SELECT b.c AS a, 42 AS d FROM ... ds.first # => {:b=>{:c=>?}, :e=>{:f=>42}} * Dataset#add_graph_aliases was added, that adds additional graph aliases instead of replacing the existing ones (as #set_graph_aliases does). It's basically the equivalent of select_more for graphs. * Dataset#join_table changed it's final argument from a symbol specifying a table name to an option hash (with backwards compatibility kept), and adds support for a :implicit_qualifier option, which it uses instead of the last joined table to qualify columns. * Association's :after_load callbacks are now called when eager loading via eager (but not when eager loading via eager_graph). * Any expression can now be used as the argument to Symbol#like, which means that you can pattern match columns to other columns. Before, it always transformed the argument to a string. :a.like(:b) # 2.6.0: a LIKE 'b' # 2.7.0: a LIKE b * Array#sql_array was added, allowing you to specify that an array in ruby be treated like an array in SQL. This is true anyway, except for arrays of all two pairs, which are treated like hashes, for specifying multiple conditions with the same key: DB[:foo].filter([:a,:b] => [[1,2],[3,4]].sql_array) # => SELECT * FROM foo WHERE ((a, b) IN ((1, 2), (3, 4))) * ComplexExpression#== and #sql? were added, allowing for easier testing. * Full text searching on PostgreSQL now joins multiple columns with a space, to prevent joining border words, and it works when there is a match in one column but the other column is NULL. Other Improvements ------------------ * Instance methods added by creating associations are added to an anonymous module included by the class, so they can be overridden in the class while still allowing the use of super to get the default behavior (this is similar to column accessor methods). * Many improvements were added to support using multiple schemas in PostgreSQL. * Model::Validation::Errors objects are now more compatible with Rails, by adding a #count method and making #on return nil if there are no error messages for that attribute. * Serialized columns in models are no longer typecast. * Associations are now inherited when a model class is subclassed. * Many improvements were made that should make adding custom association types easier. * A corner case in eager_graph where the wrong table name would be used to qualify a column name has been fixed. * Dataset's cached column information is no longer modified if #each is called with an option that modifies the columns. * You should now be able to connect to Oracle via the JDBC adapter, and with the same support it has when using the oracle adapter. * Model.association_reflections is now a public methods, so you can grab a hash of all association reflections at once (keyed by association name symbol). * The :encoding/:charset option now works in the PostgreSQL adapter if the postgres-pr driver is used. * The numeric(x,y) type is now interpreted as decimal. Backwards Compatibilty ---------------------- * The first argument to Model#initialize must be a hash, you can no longer use nil. For example, the following code will break if :album is not in params: Album.new(params[:album]) Additionally, Model#initialize does not call the block if the second argument is true. * The Sequel::Model.lazy_load_schema setting was removed. It should no longer be necessary now that schema loading is relatively speedy, and schemas can be loaded at startup and cached. * The PostgreSQL adapter will default to using a unix socket in /tmp if no host is specified. Before, a TCP/IP socket to localhost was used if no host was specified. This change makes Sequel operate similarly to the PostgreSQL command line tools. * The ASSOCIATION_TYPES constant has changed from an array to a hash and it has been moved. The RECIPROCAL_ASSOCIATIONS constant has been removed. This is unlikely to matter unless you were using custom association types. * The PostgreSQL adapter now sets the PostgreSQL DateStyle, in order to implement an optimization. To turn this off, set Sequel::Postgres.use_iso_date_format = false. * When using the PostgreSQL adapter, in many places the schema is specified explicitly. If you do not specify a schema, a default one is used (public by default). If you use a schema other than public for your work, use Database#default_schema= to set it. For any table outside of the default schema, you should specify the schema explicitly, even if it is in the PostgreSQL search_path. * Model::Validation::Errors#on now returns nil instead of [] if there are no errors for an attribute. * Hooks added to a superclass after a subclass has been created no longer have an effect on the subclass. * The Postgres.string_to_bool method has been removed. * PostgreSQL full text searching now always defaults to using the simple dictionary. If you want to use another dictionary, it must be specified explicitly, both when searching and when creating a full text index. sequel-5.63.0/doc/release_notes/2.8.0.txt000066400000000000000000000143701434214120600177100ustar00rootroot00000000000000New Features ------------ * Sequel now supports database stored procedures similar to its support for prepared statements. The API is as follows: DB[:table].call_sproc(:select, :mysp, 'param1', 'param2') # or sp = DB[:table].prepare_sproc(:select, :mysp) sp.call('param1', 'param2') sp.call('param3', 'param4') This works with Model datasets as well, allowing them to return model objects: Album.call_sproc(:select, :new_albums) #=> [#, #] You can call a stored procedure directly on the Database object if you want to, but the results and API are adapter dependent, and you definitely shouldn't do it if the stored procedure returns rows: DB.call_sproc(:mysp, :args=>['param1', 'param2']) Currently, the MySQL and JDBC adapters support stored procedures. Other adapters may support them in a future version. * The connection pool code can now remove connections if the adapter raises a Sequel::DatabaseDisconnectError indicating that the connection has been lost. When a query is attempted and the adapter raises this error, the connection pool removes the connection from the pool, and reraises the error. The Oracle and PostgreSQL adapters currently support this, and other adapters may support it in a future version. * Whether to upcase or quote identifiers can now be set separately. Previously, upcasing was done when quoting except when using SQLite, PostgreSQL, or MySQL. Now, you can turn upcasing off while still quoting. This may be necessary if you are using a MSSQL database that has lower case table names that conflict with reserved words. It also allows you to uppercase identifiers when using SQLite, PostgreSQL, or MySQL, which may be beneficial in certain cases. To turn upcasing on or off: # Global Sequel.upcase_identifiers = true # Database DB = Sequel.connect("postgres://...", :upcase_identifiers=>true) DB.upcase_identifiers = false # Dataset ds = DB[:items] ds.upcase_identifiers = true * Options are now supported when altering a columns type: DB.alter_table(:items) do set_column_type :score, :integer, :unsigned=>true set_column_type :score, :varchar, :size=>30 set_column_type :score, :enum, :elements=>['a', 'b'] end * Standard conforming strings are now turned on by default in the PostgreSQL adapter. This makes PostgreSQL not interpret backslash escapes. This is the PostgreSQL recommended setting, which will be the default setting in a future version of PostgreSQL. If you don't want for force the use of standard strings, use: Sequel::Postgres.force_standard_strings = false You need to do that after you call Sequel.connect but before you use the database for anything, since that setting is set on initial connection. * Sequel now raises an error if you attempt to use EXCEPT [ALL] or INTERSECT [ALL] on a database that doesn't support it. * Sequel now raises an error if you attempt to use DISTINCT ON with MySQL or Oracle, which don't support it. * A subadapter for the Progress RDBMS was added to the ODBC adapter. To connect to a Progress database, use the :db_type=>'progress' option. This adapter targets Progress 9. * The ODBC adapter now supports transactions. * The MSSQL shared adapter now supports multi_insert (for inserting multiple rows at once), and unicode string literals. Other Improvements ------------------ * There were many improvements related to using schemas in databases. Using schema-qualified tables should work in most if not all cases now. Model associations, getting the schema, joins, and many other parts of Sequel were modified to allow the use of schema-qualifed tables. * You can now use literal strings with placeholders as well as subselects when using prepared statements. For example, the following all work now: DB[:items].filter("id = ?", :$i).call(:select, :i=>1) DB[:items].filter(:id=>DB[:items].select(:id)\ .filter(:id=>:$i)).call(:select, :i=>1) DB["SELECT * FROM items WHERE id = ?", :$i].call(:select, :i=>1) * Model#initialize received a few more micro-optimizations. * Model#refresh now clears the changed columns as well as the associations. * You can now drop columns inside a transaction when using SQLite. * You can now submit multiple SQL queries at once in the MySQL adapter: DB['SELECT 1; SELECT 2'].all #=> [{:"1"=>1, :"2"=>2}] This may fix issues if you've seen a MySQL "commands out of sync" message. Note that this doesn't work if you are connecting to MySQL via JDBC. * You can now use AliasedExpressions directly in table names given to join_table: DB.from(:i.as(:j)).join(:k.as(:l), :a=>:b) #=> ... FROM i AS j INNER JOIN k AS l ON (l.a = j.b) * Database#rename_table once again works on PostgreSQL. It was broken in 2.7.0. * The interval type is now treated as it's own type. It was previously treated as an integer type. * Subselects are now aliased correctly when using Oracle. * UNION, INTERSECT, and EXCEPT statements now appear before ORDER and LIMIT on most databases. If you use these constructs, please test and make sure that they work correctly with your database. * SQL EXCEPT clause now works on Oracle, which uses MINUS instead. * Dataset#exists now returns a LiteralString, to make it easier to use. * The Sequel.odbc_mssql method was removed, as the odbc_mssql adapter was removed in a previous version. Instead, use: Sequel.odbc(..., :db_type=>'mssql') Backwards Compatibilty ---------------------- * The hash returned by Database#schema when no table name is provided uses quoted strings instead of symbols as keys. The hash has a default proc, so using the symbol will return the same value as before, but if you use each to iterate through the hash, the keys will be different. This was necessary to handle schema-qualified tables. * Database#table_exists? no longer checks the output of Database#tables. If the table exists in the schema, it returns true, otherwise, it does a query. This was necessary because table_exists? accepts multiple formats for table names and Database#tables is an array of symbols. * When getting the schema on PostgreSQL, the default schema is now used even if the :schema=>nil option is used. sequel-5.63.0/doc/release_notes/2.9.0.txt000066400000000000000000000072531434214120600177130ustar00rootroot00000000000000New Features ------------ * Compound SQL statement (i.e. UNION, EXCEPT, and INTERSECT) support is much improved. Chaining compound statement calls now longer wipes out previous compound statements calls of the same type. Also, the ordering of the compound statements is no longer fixed per adapter, it now reflects the order they were called on the object. For example, the following now work as expected: ds1.union(ds2).union(ds3) ds1.except(ds2).except(ds3) ds1.intersect(ds2).intersect(ds3) ds1.union(ds2).except(ds3) ds1.except(ds2).intersect(ds3) ds1.intersect(ds2).union(ds3) * Exception classes ValidationFailure and BeforeHookFailure were added so it is eaiser to catch a failed validation. These are both subclasses of Sequel::Error, so there shouldn't be any backwards compatibility issues. Error messages are also improved, as the ValidationFailure message is a string containing all validation failures and the BeforeHookFailure message contains which hook type caused the failure (i.e. before_save, before_create, or before_validate). * The sequel command line tool now has a -L option to load all files in the given directory. This is mainly useful for loading a directory of model files. The files are loaded after the database connection is set up. * Methods to create and drop database functions, triggers, and procedural languages were added to the PostgreSQL adapter. Other Improvements ------------------ * Database#schema now raises an error if you pass a table that doesn't exist. Before, some adapters would return an empty schema. The bigger problem with this is that it made table_exists? return the wrong value, since it looks at the Database's schema. Generally, this bug would show up in the following code: class Blah < Sequel::Model end Blah.table_exists? # True even if blahs is not a table * AlterTableGenerator#add_foreign_key now works for MySQL. * Error messages in model association methods that add/remove an associated object are now more descriptive. * Dataset#destroy for model datasets now works with databases that can't handle nested queries. However, it now loads all model objects being destroyed before attempting to destroy any of them. * Dataset#count now works correctly for compound SQL statements (i.e. UNION, EXCEPT, and INTERSECT). * BigDecimal NaN and (+/-)Infinity values are now literalized correctly. Database support for this is hit or miss. Sqlite will work correctly, PostgreSQL raises an error if you try to store an infinite value in a numeric column (though it works for float columns), and MySQL converts all three to 0. * The SQLite adapter no longer loses primary key information when dropping columns. * The SQLite adapter now supports dropping indicies. * A bug in the MSSQL adapter's literalization of LiteralStrings has been fixed. * The literalization of blobs on PostgreSQL (bytea columns) has been fixed. * Sequel now raises an error if you attempt to subclass Sequel::Model before setting up a database connection. * The native postgresql adapter has been changed to only log client messages of level WARNING by default. You can modify this via: Sequel::Postgres.client_min_messages = nil # Use Server Default Sequel::Postgres.client_min_messages = :notice # Use NOTICE level * Model#inspect now calls Model#inspect_values for easier overloading. Backwards Compatibilty ---------------------- * The API to Model#save_failure (a private method) was changed to remove the second argument. * SQLite columns with type numeric, decimal, or money are now returned as BigDecimal values. Before, they were probably returned as strings. sequel-5.63.0/doc/release_notes/3.0.0.txt000066400000000000000000000203601434214120600176750ustar00rootroot00000000000000Deprecated Methods/Features Removed ----------------------------------- Methods and features that were deprecated in 2.12.0 have been removed in 3.0.0. Many features were moved into plugins or extensions, so in many cases you just need to require an extension or use Model.plugin and not make any changes to your code. See the 2.12.0 release notes for the list of methods/features deprecated in 2.12.0. If you are upgrading from a previous 2.x release, please upgrade to 2.12.0 first, fix your code to remove all deprecation warnings, and then upgrade to 3.0.0. New Adapter ----------- * Sequel now has an Amalgalite adapter. Amalgalite is a ruby extension that embeds SQLite without requiring a separate SQLite installation. The adapter is functionality complete but significantly slower than the native SQLite adapter. New Features ------------ * The JDBC, PostgreSQL, MySQL, and SQLite adapters all now have a Database#indexes method that returns indexes for a given table: DB.indexes(:songs) => {:songs_name_index=>{:unique=>true, :columns=>[:name]}, :songs_lyricid_index=>{:unique=>false, :columns=>[:lyricid]}} * A schema_dumper extension was added to Sequel. It supports dumping the schema of a table (including indexes) as a string that can be evaluated in the context of a Database object to create the table. It also supports dumping all tables in the database as a string containing a Migration subclass that will rebuild the database. require 'sequel/extensions/schema_dumper' DB.dump_table_schema(:table) DB.dump_schema_migration DB.dump_schema_migration(:same_db=>true) DB.dump_schema_migration(:indexes=>false) DB.dump_indexes_migration The :same_db option causes Sequel to not translate column types to generic column types. By default, the migration created will use generic types so it will run on other databases. However, if you only want to support a single database, using the :same_db option will make the migration use the exact database type parsed from the database. The :indexes=>false option causes indexes not be included in the migration. The dump_indexes_migration can be used to create a separate migration with the indexes. This can be useful if you plan on loading a lot of data right after creating the tables, since it is faster to add indexes after the data has been added. * Using options with the generic database types is now supported to a limited extent. For example, the following code now works: DB.create_table(:table) do String :a, :size=>50 # varchar(50) String :b, :text=>true # text String :c, :fixed=>true, :size=>30 # char(30) Time :ts # timestamp Time :t, :only_time=>true # time end * Using Dataset#filter and related methods with multiple arguments now works much more intuitively: # 2.12.0 dataset.filter(:a, :b=>1) # a IS NULL AND (b = 1) IS NULL # 3.0.0 dataset.filter(:a, :b=>1) # a AND b = 1 * You can now create temporary tables by passing the :temp=>true option to Database#create_table. * The Oracle shared adapter now supports emulation of autoincrementing primary keys by creating a sequence and a trigger, similar to how the Firebird adapter works. * The Database#database_type method was added that returns a symbol specifying the database type being used. This can be different than Database.adapter_scheme if you are using an adapter like JDBC that allows connecting to multiple different types of databases. * Database#drop_index and related methods now support an options hash that respects the :name option, so they can now be used to drop an index that doesn't use the default index name. * The PostgreSQL shared adapter now supports a Database#reset_primary_key_sequence method to reset the primary key sequence for a given table, based on code from ActiveRecord. * SQL::QualifiedIdentifiers can now be qualified, allowing you to do: :column.qualify(:table).qualify(:schema) * Using the :db_type=>'mssql' option with the DBI adapter will now load the MSSQL support. * The MySQL shared adapter now supports Dataset#full_text_sql, which you can use in queries like the following: ds.select(:table.*, ds.full_text_sql(:column, 'value').as(:ft)) Other Improvements ------------------ * Sequel will now release connections from the connection pool automatically if they are held by a dead thread. This can happen if you are using MRI 1.8 and you are heavily multithreaded or you call Thread#exit! or similar method explicitly. Those methods skip the execution of ensure blocks which normally release the connections when the threads exit. * Model#save will now always use the same server when refreshing data after an insert. This fixes an issue when Sequel's master/slave database support is used with models. * SQL Array references are now quoted correctly, so code like this now works: :table__column.sql_subscript(1) * The PostgreSQL shared adapter now handles sequences that need to be quoted correctly (previously these were quoted twice). * String quoting on Oracle no longer doubles backslashes. * Database#count now works correctly when used on MSSQL when using an adapter that doesn't handle unnamed columns. * Full text searching in the MySQL adapter now works correctly when multiple search terms are used. * Altering a column's name, type, default, or NULL/NOT NULL status on MySQL now keeps other relevent column information. For example, if you alter a column's type, it'll keep an existing default. This functionality isn't complete, there may be other column information that is lost. * Fix creation of an index with a given type on MySQL, since MySQL's documentation lies. * The schema parser now handles decimal types with size specifiers, fixing use on MySQL. * Dataset#quote_identifier now works correctly when given an SQL::Identifier. This allows you to do: dataset.select{sum(hours).as(hours)} Backwards Compatibility ----------------------- * Sequel will now use instance_eval on all virtual row blocks without an argument. This can lead to much nicer code: dataset.filter{(number > 10) & (name > 'M')} # WHERE number > 10 AND name > 'M' 2.12.0 raised a deprecation warning if you used a virtual row block without an argument and you hadn't set Sequel.virtual_row_instance_eval = true. * Dataset#exclude now inverts the given argument, instead of negating it. This only changes its behavior if it is called with a hash or array of all two pairs that have more than one element. # 2.12.0 dataset.exclude(:a=>1, :b=>1) # a != 1 AND b != 1 # 3.0.0 dataset.exclude(:a=>1, :b=>1) # a != 1 OR b != 1 This was done for consistency, since exclude would only negate a hash if it was given an argument, it would invert the same hash if you used a block: # 2.12.0 dataset.exclude{{:a=>1, :b=>1}} # a != 1 OR b != 1 If you want the previous behavior, change the code to the following: dataset.filter({:a=>1, :b=>1}.sql_negate) * As noted above, the methods/features deprecated in 2.12.0 were removed. * The private Dataset#select_*_sql methods now only take a single argument, the SQL string being built. * Dataset#from when called without arguments would previously cause an error to be raised when the SQL string is generated. Now it causes no FROM clause to be used, similar to how Dataset#select with no arguments causes SELECT * to be used. * The internals of the generic type support and the schema generators were changed significantly, which could have some fallout in terms of old migrations breaking if they used the generic types and were relying on some undocumented behavior (such as using Integer as a type with the :unsigned option). * The Firebird adapter no longer translates the text database specific type. Use the following instead: String :column, :text=>true * The MySQL shared adapter used to use the timestamp type for Time, now it uses datetime. This is because the timestamp type cannot represent everything that the ruby Time class can represent. * Metaprogramming#metaattr_accessor and #metaattr_reader methods were removed. * Dataset#irregular_function_sql was removed. sequel-5.63.0/doc/release_notes/3.1.0.txt000066400000000000000000000375761434214120600177170ustar00rootroot00000000000000New Plugins ----------- 3 new plugins were added that implement features supported by DataMapper: identity_map, tactical_eager_loading, and lazy_attributes. These plugins don't add any real new features, since you can do most of what they allow before simply by being a little more explicit in your Sequel code. However, some people prefer a less explicit approach that uses a bit more magic, and now Sequel can accomodate them. * The identity_map plugin allows you to create a 1-1 correspondence of model objects to database rows via a temporary thread-local identity map. It makes the following statment true: Sequel::Model.with_identity_map do Album.filter{(id > 0) & (id < 2)}.first.object_id == \ Album.first(:id=>1).object_id end As the code above implies, you need to use the with_identity_map method with a block to use the identity mapping feature. By itself, identity maps don't offer much, but Sequel uses them as a cache when looking up objects by primary key or looking up many_to_one associated objects. Basically, it can be used as a performance enhancer, and it also allows the support of the lazy_attributes plugin. The identity_map plugin is expected to be most useful in web applications. With that in mind, here's a Rack middleware that wraps each request in a with_identity_map call, so the identity_map features are available inside the web app: Sequel::Model.plugin :identity_map class SequelIdentityMap def initialize(app) @app = app end def call(env) Sequel::Model.with_identity_map{@app.call(env)} end end * The tactical_eager_loading plugin allows you to eagerly load an association for all models retrieved in the same group whenever one of the models accesses the association: # 2 queries total Album.filter{id<100}.all do |a| a.artists end In order for this correctly, you must use Dataset#all to load the records, you cannot iterate over them via Dataset#each. This is because eager loading requires that you have all records in advance, and when using Dataset#each you cannot know about later records in the dataset. Before, you could just be explicit about the associations you needed and make sure to eagerly load them using eager before calling Dataset#all. * The lazy_attributes plugin builds on the identity_map and tactical_eager_loading plugins and allows you to create attributes that are lazily loaded from the database: Album.plugin :lazy_attributes, :review This will remove the :review attribute from being selected by default. If you try to access the attribute after it is selected, it'll retrieve the value from the database. If the object was retrieved with a group of other objects and an identity map is in use, it'll retrieve the lazy attribute for the entire group of objects at once, similar to the tatical_eager_loading plugin: # 2 queries total Sequel::Model.with_identity_map do Album.filter{id<100}.all do |a| a.review end end Before, you could just set the default selected columns for a model to not include the lazy attributes, and just use select_more to add them to any query where the resulting model objects will need the attributes. * A many_through_many plugin was also added. This very powerful plugin allows you to create associations to multiple objects through multiple join tables. Here are some examples: # Assume the following many to many associations: Artist.many_to_many :albums Album.many_to_many :tags # Same as Artist.many_to_many :albums Artist.many_through_many :albums, [[:albums_artists, :artist_id, :album_id]] # All tags associated to any album this artist is associated to Artist.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] # All artists associated to any album this artist is associated to Artist.many_through_many :artists, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_artists, :album_id, :artist_id]] # All albums by artists that are associated to any album this # artist is associated to Artist.many_through_many :artist_albums, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_artists, :album_id, :artist_id], [:artists, :id, :id], [:albums_artists, :artist_id, :album_id]] Basically, for each join table between this model and the associated model, you use an array with a join table name, left key name (key closer to this model), and right key name (key closer to the associated model). In usual Sequel fashion, this association type works not just for single objects, but it can also be eagerly loaded via eager or eager_graph. There are numerous additional configuration options, please see the RDoc for details. New bin/sequel Features ----------------------- The bin/sequel command line tool now supports the following options: * -C: Copies one database to another. You must specify two database arguments. Works similar to Taps, copying the table schema, then the table data, then creating the indexes. * -d: Dump the schema of the database in the database-independent migration format. * -D: Dump the schema of the database in the database-specific migration format. * -h: Display the help * -t: Output the full backtrace if an exception is raised The bin/sequel tool is now better about checking which options can be used together. It also now supports using the -L option multiple times and having it load model files from multiple directory trees. New Features ------------ * Dataset#qualify_to and #qualify_to_first_source were added. They allow you to qualify unqualified columns in the current dataset to the given table or the first source. This can be used to join a dataset that has unqualified columns to a new table which has columns with the same name. For example, take this dataset: ds = DB[:albums].select(:name).order(:name).filter(:id=>1) # SELECT name FROM albums WHERE (id = 1) ORDER BY name Let's say you want to join it to the artists table: ds2 = ds.join(:artists, :id=>:artist_id) # SELECT name FROM albums # INNER JOIN artists ON (artists.id = albums.artist_id) # WHERE (id = 1) ORDER BY name That's going to give you an error, as the artists table already has columns named id and name. This new feature allows you to do the following: ds2 = ds.qualify_to_first_source.join(:artists, :id=>:artist_id) # SELECT albums.name FROM albums # INNER JOIN artists ON (artists.id = albums.artist_id) # WHERE (albums.id = 1) ORDER BY albums.name By doing this, all unqualified columns are qualified, so you get a usable query. This is expected to be most useful for users that have a default order or filter on their models and want to join the model to another table. Before you had to replace the filters, selection, etc. manually, or use qualified columns by default even though the weren't needed in most cases. * Savepoints are now supported using SQLite and MySQL, assuming you are using a database version that supports them. You need to pass the :savepoint option to Database#transaction to use a savepoint. * Model plugins can now depend on other plugins, simply by calling the Model.plugin method inside the plugin's apply method: module LazyAttributes def self.apply(model) model.plugin :tactical_eager_loading end * Model.plugin now takes a block with is passed to the plugin's apply and configure method (see Backwards Compatibility section for more information on the configure method). * You can see which plugins are loaded for a model by using Model.plugins. * You can use Sequel.extension method to load extensions: Sequel.extension :pagination, :query This will only load extensions that ship with Sequel, unlike the Model.plugin method which will also load external plugins. * You can now use Database#create_table? to create the table if it doesn't already exist (a very common need, it seems). The schema plugin now supports Model.create_table? as well. * #sql_subscript is now an allowed method on most SQL expression objects that Sequel generates. Also, arguments to #sql_subscript can now be other expressions instead of just integers. * Associations can now take a :cartesian_product_number option, which can be used to tell Sequel whether to turn on duplicate object detection when eagerly loading objects through eager_graph. This number should be 0 if the association can never create multiple rows for each row in the current table, 1 if it can create multiple rows in the each row in the current table, and 2 if the association itself causes a cartesian product. * On MySQL, Dataset#insert_ignore now affects #insert as well as multi_insert and import. * Database#create_table now supports an :ignore_index_errors option, and Database#add_index now supports an :ignore_errors option. These are used by the schema_dumper when dumping an database schema to be restored on another database type, since indexes aren't usually required for proper operation and some indexes can't be transferred. * The ADO adapter now takes a :provider option, which can be used to set the provider. * The ADO adapter now takes a :command_timeout option, which tells the connection how long to wait before giving up and raising an exception. * The Sequel.amalgalite adapter method was added. Like the Sequel.sqlite method, you can call it with no arguments to get an in memory database. Other Improvements ------------------ * MySQL "commands out of sync" errors should no longer occur unless you are nesting queries (calling Dataset#each inside Dataset#each). A bug dating at least to 2007 and possibly since the initial creation of the Sequel MySQL adapter was the cause. Before, SQL that caused a result set that was sent using a method where Sequel doesn't yield a result set would cause the "commands out of sync" error on the following query. For example, the following code would cause the error: DB << "SHOW DATABASES" If for some reason a "commands out of sync" error does occur, Sequel will disconnect the connection from the connection pool, so it won't continually stay in the pool and raise errors every time it is used. * The schema_dumper extension is much better about parsing defaults from the database. It can now correctly parse most defaults on MySQL, SQLite, and PostgreSQL databases. It no longer includes defaults that it can't parse to a ruby object unless a database- specific dump is requested. * The schema_dumper extension now dumps tables in alphabetical order. * Ordered and limited datasets are now handled correctly when using union, intersect, and except. Also, union, intersect, and except now always return a from_self dataset, so further limiting, filtering, and ordering of them now works as expected. * Dataset#graph now works correctly with a complex dataset without having to use from_self. Before, code like the following didn't do what was expected: DB[:albums]. graph(DB[:artists].filter{name > 'M'}, :id=>:artist_id) Before, the filter on DB[:artists] would be dropped. Now, Sequel correctly uses a subselect. * You can now specify serialization formats per column in the serialization plugin, either by calling the plugin multiple times or by using the new serialize_attributes method: Album.plugin :serialization Album.serialize_attributes :marshal, :review Album.serialize_attributes :yaml, :name Album.serialization_map #{:name=>:yaml, :review=>:marshal} The public API for the serialization plugin is still backwards compatible, but the internals have changed slightly to support this new feature. * You can now use eager_graph to eagerly load associations for models that lack primary keys. * The :eager_graph association option now works when lazily-loading many_to_many associations. * Dataset#add_graph_aliases now works correctly even if set_graph_aliases hasn't been used. * The PostgreSQL Database#tables method now assumes the public schema if a schema is not given and there is no default_schema. * The PostgreSQL Database#indexes method no longer returns partial indexes or functional indexes. * The MySQL Database#indexes method no longer returns indexes on partial columns (prefix indexes). * Default values for String :text=>true and File columns on MySQL are ignored, since MySQL doesn't support them. They are not ignored if you use text and blob, since then you are using the database-specific syntax and Sequel doesn't do translation when the database-specific syntax is used. * On PostgreSQL, attempting the reset the primary key sequence for a table without a primary key no longer causes an error. * Using a placeholder string in an association's :condition option now works correctly (e.g. :conditions=>['n = ?', 1]) * An error is no longer raised if you attempt to load a plugin that has a DatasetMethods module but no public dataset methods. * The check for dataset[n] where n is an integer was fixed. It now raises an error inside of returning a limited dataset. * On PostgreSQL, Dataset#insert with static SQL now works correctly. * A reflection.rdoc file was added giving an overview of Sequel's reflection support. * The Migrator now works correctly with file names like 001_12312412_file_name.rb. * The association code now requires the classes match when looking for a reciprocal association. * An unlikely threading bug (race condition) was possible when using the validation_class_methods plugin. The plugin was refactored and now uses a mutex to avoid the issue. One of the refactoring changes makes it so that you can no longer use a class level vaildation inside a Class.new block (since inherited isn't called until the block finishes). * The exception messages when Sequel.string_to_* fail have been fixed. * The String :text=>true generic database type has been fixed when using the Firebird adapter. Backwards Compatibility ----------------------- * A plugin's apply method is now only called the first time a plugin is loaded. Plugins can now have a configure method that is called every time the plugin is loaded, and is always called after the instance methods, class methods, and dataset method submodules have been added to the model. This is different from apply, which is called before the submodules are loaded. If you are a plugin author, please check your implementation to make sure this doesn't cause problems for you. If you have questions, please post on the Sequel mailing list. This new plugin feature will make certain things a lot easier, and it should be mostly backwards compatible. However, if a plugin was previously expected to be loaded multiple times with the apply method called each time, it will no longer work correctly. * The plugin_opts methods defined now include multiple args in an array if multiple args are given. Before, the plugin_opts methods just returned the first argument. * Database#table_exists? no longer checks the cached schema information. By default, it will always do a database query (unless overridden in an adapter). This shouldn't affect the results, but if were using the method a lot and expecting it to use cached information, it doesn't have the same performance characteristics. * The internal storage of the :select option for datasets have changed. You can no longer use a hash as a way of aliasing columns. Dataset#select now does the translation from the hash to SQL::AliasedExpression instances. Basically, if you were using Dataset#clone directly with a :select option with hashes for aliasing, you should switch to using Dataset#select or changing the hashes to AliasedExpressions yourself. sequel-5.63.0/doc/release_notes/3.10.0.txt000066400000000000000000000270451434214120600177650ustar00rootroot00000000000000New Features ------------ * A real one_to_one association was added to Sequel, replacing the previous :one_to_one option of the one_to_many association. This is a fully backwards incompatible change, any code that uses the :one_to_one option of one_to_many will be broken in Sequel 3.10.0, as that option now raises an exception. Keeping backwards compatibility was not possible, as even the name of the association needs to be changed. Here are the code changes you need to make: * The association definition needs to change from one_to_many to one_to_one, with no :one_to_one option, and with the association name changed from the plural form to the singular form: # Before Lyric.one_to_many :songs, :one_to_one=>true # After Lyric.one_to_one :song * All usage of the association when eager loading or when getting reflections need to use the new singular association name: # Before Lyric.eager(:songs).all Lyric.eager_graph(:songs).all Lyric.association_reflection(:songs) # After Lyric.eager(:song).all Lyric.eager_graph(:song).all Lyric.association_reflection(:song) Any Sequel plugins or extensions that deal with the internals of associations need to be made aware of the one_to_one association, and how it is different than one_to_many's previous :one_to_one option. Here are some internal changes that may affect you: * one_to_one associations are now cached like many_to_one associations instead of like one_to_many associations. So the cache includes the associated object or nil, instead of an array. Note that this change means that all custom :eager_loader options for one_to_one associations need to change to use this new caching scheme. * The one_to_one association setter method is now handled similarly to the many_to_one setter method, instead of using the internal one_to_many association add method. * Instead of raising an error when multiple rows are returned, one_to_one associations now use limit(1) to only return a single row. There were some other fixes made during these changes: * The one_to_one setter now accepts nil to disassociate the record. Previously, this raised an error. * If the one_to_one association already had a separate object associated, and you assigned a different object in the setter method, Sequel now disassociates the old object before associating the new object, fixing some potential issues if there is a UNIQUE constraint on the foreign key column. * Using the many_to_one association setter where the reciprocal association is a one_to_one association with a currently different cached associated object no longer raises an exception. * The nested_attributes and association_dependencies plugins both now correctly handle one_to_one associations. If you need any help migrating, please post on the Sequel Google Group or ask in the #sequel IRC channel. * Both many_to_one and one_to_one associations now use before_set and after_set callbacks instead of trying to make the one_to_many and many_to_many associations' (before|after)_(add|remove) callbacks work. This change makes the code simpler, makes writing callbacks easier, and no longer requires Sequel to send a query to the database to get the currently associated object in the many_to_one association setter method (you can still do so manually in a before_set callback if you want to). * Dataset#for_update was added as a default dataset method. Previously, it was only supported on PostgreSQL. It has been tested to work on PostgreSQL, MySQL, SQLite (where it is ignored), H2, and MSSQL. * Dataset#lock_style was added as a backbone for Dataset#for_update, but allowing you to specify custom lock styles. These can either be symbols recognized by the adapters, or strings which are treated as literal SQL. * Model#lock! was added, which uses Dataset#for_update to lock model rows for specific instances. Combined with the Dataset#for_update, Sequel now has an equivalent to ActiveRecord's pessimistic locking support. * A composition plugin was added, given similar functionality as ActiveRecord's composed_of. The composition plugin allows you to easily define getter and setter instance methods for a class where the backing data is composed of other getters and decomposed to other setters. A simple example of this is when you have a database table with separate columns for year, month, and day, but where you want to deal with Date objects in your ruby code. This can be handled with: Model.composition :date, :mapping=>[:year, :month, :day] The :mapping option is optional, but if not used, you need define custom composition and decomposition procs via the :composer and :decomposer options. Note that when using the composition object, you should not modify the underlying columns if you are also instantiating the composition, as otherwise the composition object values will override any underlying columns when the object is saved. * An rcte_tree plugin was added, which uses recursive common table expressions to load all ancestors and descendants in a single query. If your database supports recursive common table expressions (PostgreSQL 8.4+, MSSQL 2005+, newer versions of Firebird), using recursive common table expressions to load all ancestors and descendants is significantly faster than storing trees as nested sets and using nested set queries. Usage: Model.plugin :rcte_tree # Lazy loading model = Model.first model.parent model.children model.ancestors # Populates :parent association as well model.descendants # Populates :children association as well # Eager loading - also populates the :parent and children # associations for all ancestors and descendants Model.filter(:id=>[1, 2]).eager(:ancestors, :descendants).all # Eager loading children and grandchildren Model.filter(:id=>[1, 2]).eager(:descendants=>2).all # Eager loading children, grandchildren, and great grandchildren Model.filter(:id=>[1, 2]).eager(:descendants=>3).all * Dataset#first_source_table was added, giving you the unaliased version of the table for the first source. * Add Sequel::BasicObject.remove_methods!, useful on ruby 1.8 if you require other libraries after Sequel that add methods to Object. For example, if YAML is required after sequel, then the following will raise an error: DB[:a].filter{x > y} because YAML adds the y method to all objects. Now, you can call Sequel::BasicObject.remove_methods!, which will remove those methods from Sequel::BasicObject, allowing them to be used as intended in the above DSL. * Sequel associations now accept an :eager_loader_key option, which can be useful for associations to specify the column to use for the key_hash for custom :eager_loaders. * A JDBC subadapter for the AS400 database was added. Other Improvements ------------------ * The one_to_one setter method and the one_to_many and many_to_many remove_all methods now apply the association options (such as filters) on the appropriate dataset: Artist.one_to_many :good_albums, :class=>:Album, :conditions=>{:good=>true} a = Artist[10] a.remove_all_good_albums # Before: WHERE artist_id = 10 # After: WHERE artist_id = 10 AND good IS TRUE * Plugin loading now works correctly when the plugin module name is the same name as an already defined top level constant. This means that the active_model plugin should now work correctly if you require active_model before loading the Sequel plugin. * The nested_attributes plugin now preserves nested attributes for *_to_one associations on validation failures. * Transactions now work correctly on Oracle when using the JDBC adapter. * Dataset#limit once again works correctly on MSSQL 2000. It was broken in Sequel 3.9.0. * many_to_one associations now use limit(1) to ensure only one record is returned. If you don't want this (because maybe you are using the :eager_graph association option), you need to set the :key option to nil and use a custom :dataset option. * many_to_one and one_to_many associations now work correctly with the association :eager option to eagerly load associations specified by :eager when lazy loading the association. * The typecast_on_load plugin now correctly handles reloading/refreshing the object, both explicitly and implicitly on object creation. * The schema parser and dumper now return tinyint columns as booleans when connecting to mysql using the do adapter, since DataObjects now returns the columns as booleans. * The schema dumper now deals better with unusual or database specific primary key types when using the :same_db option. * On ruby 1.8, Sequel::BasicObject now undefs private methods in addition to public and protected methods. So the following code now works as expected: DB[:a].filter{x > p} # WHERE x > p * Sequel.connect with a block now returns the value of the block: max_price = Sequel.connect('sqlite://items.db') do |db| db[:items].max(:price) end * MSSQL emulated offset support now works correctly when Sequel's core extensions are not loaded. * Sequel::BasicObject now works correctly on rubinius, and almost all Sequel specs now pass on rubinius. * The nested_attributes plugin now uses a better exception message no matching associated object is found. * Sequel now raises a more informative error if you attempt to use the native sqlite adapter with the sqlite3 gem instead of the sqlite3-ruby gem. * Multiple complex expressions with the same operator are now combined for simpler SQL: DB[:a].filter(:a=>1, :b=>2).filter(:c=>3) # Before: (((a = 1) AND (b = 2)) AND (c = 3)) # After: ((a = 1) AND (b = 2) AND (c = 3)) * The Sequel::Model dataset methods (class methods proxied to the model's dataset) and the Sequel::Dataset mutation methods (methods that have a ! counterpart to modify the object in place) have both been updated to use new dataset methods added in recent versions. Backwards Compatibility ----------------------- * The :one_to_one option of the one_to_many associations now raises an exception. Please see the section above about the new real one_to_one association. * The change to apply the association options to the one_to_many and many_to_many remove_all methods has the potential to break some code that uses the remove_all method on associations that use association options. This is especially true for many_to_many associations, as filters in many_to_many associations will often reference columns in the associated table, while the dataset used in the remove_all method only contains the join table. Such cases should be handled by manually overriding the _remove_all association instance method in the class. It was determined that it was better to issue possibly invalid queries than to issue queries that make unexpected modifications. * Dataset#group_and_count now longer orders the dataset by the count. Since it returns a modified dataset, if you want to order the dataset, just call order on the returned dataset. * many_to_one associations now require a working :class option. Previously, if you provided a custom :dataset option, a working :class option was not required in some cases. * The MSSQL shared adapter dataset methods switched from using the :table_options internal option key to using the :lock internal option key. sequel-5.63.0/doc/release_notes/3.11.0.txt000066400000000000000000000236011434214120600177600ustar00rootroot00000000000000= New Features * A few new features were added to query logging. Sequel now includes execution time when logging queries. Queries that raise exceptions are now logged at ERROR level. You can now set the log_warn_duration attribute on the Database instance and queries that take longer than that will be logged at WARN level. By using different log levels, you can now only log queries that raise errors, or only log queries that take a long time. # The default - Log all successful queries at INFO level DB.log_warn_duration = nil # Log all successful queries at WARN level DB.log_warn_duration = 0 # Log successful queries that take the database more than half a # second at WARN level, other successful queries at INFO level DB.log_warn_duration = 0.5 All adapters included with Sequel have been modified to support the new logging API. The previous API is still available, so any external adapters should still work, though switching to the new logging API is encouraged. * Sequel::Model now has a require_modification flag. If not set explicitly, it is enabled by default if the dataset provides an accurate number of rows matched by an update or delete statement. When this setting is enabled, Sequel will raise an exception if you attempt to update or delete a model object and it doesn't end up affecting exactly one row. For example: DB.create_table(:as){primary_key :id} class A < Sequel::Model; end a = A.create # delete object from database a.delete a.require_modification = false a.save # no error! a.delete # no error! a.require_modification = true a.save # Sequel::NoExistingObject exception raised a.delete # Sequel::NoExistingObject exception raised Like many other Sequel::Model settings, this can be set on a global, per class, and per instance level: Sequel::Model.require_modification = false # global Album.require_modification = true # class album.require_modification = false # instance * An instance_filters plugin was added to the list of built in plugins, allowing you to add arbitrary filters when updating or destroying an instance. This allows you to continue using models when previously you would have had to drop down to using datasets to get the desired behavior: class Item < Sequel::Model plugin :instance_filters end # These are two separate objects that represent the same # database row. i1 = Item.first(:id=>1, :delete_allowed=>false) i2 = Item.first(:id=>1, :delete_allowed=>false) # Add an instance filter to the object. This filter is in effect # until the object is successfully updated or deleted. i1.instance_filter(:delete_allowed=>true) # Attempting to delete the object where the filter doesn't # match any rows raises an error. i1.delete # raises Sequel::Error # The other object that represents the same row has no # instance filters, and can be updated normally. i2.update(:delete_allowed=>true) # Even though the filter is now still in effect, since the # database row has been updated to allow deleting, # delete now works. i1.delete * An :after_connect database option is now supported. If provided, the option value should be a proc that takes a single argument. It will be called with the underlying connection object before connection object is added to the connection pool, allowing you to set per connection options in a thread-safe manner. This is useful for customizations you want set on every connection that Sequel doesn't already support. For example, on PostgreSQL if you wanted to set the schema search_path on every connection: DB = Sequel.postgres('dbname', :after_connect=>(proc do |conn| conn.execute('SET search_path TO schema1,schema2') end)) * A :test database option is now supported. If set to true, it automatically calls test_connection to make sure a connection can be made before returning a Database instance. For backwards compatibility reasons, this is not set to true by default, but it is possible that the default will change in a future version of Sequel. * The Dataset#select_append method was added, which always appends to the existing selected columns. It operates identically to select_more, except in the case that no columns are currently selected: ds = DB[:a] # SELECT * FROM items ds.select_more({:id=>DB[:b].select(:a_id)}.as(:in_b)) # SELECT id IN (SELECT a_id FROM b) AS in_b FROM a ds.select_append({:id=>DB[:b].select(:a_id)}.as(:in_b)) # SELECT *, id IN (SELECT a_id FROM b) AS in_b FROM a * The Dataset#provides_accurate_rows_matched? method was added which allows you to see if the dataset will return the actual number of rows matched/affected by an update or delete call. * Sequel will now emulate DISTINCT ON support using GROUP BY on MySQL. On MySQL, GROUP BY is similar to DISTINCT ON, except that the order of returned rows is not deterministic. * Support for connecting to Microsoft SQL Server using the JTDS JDBC driver was added to the jdbc adapter. * JDNI connection strings are now supported in the JDBC adapter. * The JDBC adapter should now work in situations where driver auto-loading has problems, just as when using Tomcat or Trinidad. * Sequel's JDBC adapter schema parsing now supports a :scale option, useful for numeric/decimal columns. * Sequel's schema parsing on Microsoft SQL Server now supports :column_size and :scale options. * When connecting to SQLite, a Database#sqlite_version method is available that gives you the SQLite version as an integer (e.g. 30613 for 3.6.13). = Other Improvements * Sequel no longer raises an error if you give Dataset#filter or related method an empty argument such as {}, [], or ''. This allows code such as the following to work: h = {} h[:name] = name if name h[:number] = number if number ds = ds.filter(h) Before, this would raise an error if both name and number were nil. * Numeric and decimal columns with a 0 scale are now treated as integer columns by the model typecasting code, since such columns cannot store non-integer values. * Calling Database#disconnect when using the single threaded connection pool no longer raises an error if there is no current connection. * When using the :ignore_index_errors options to Database#create_table, correctly swallow errors raised by Sequel due to the adapter not supporting the given index type. * The JDBC adapter no longer leaks ResultSets when retrieving metadata. * You can now connect to PostgreSQL when using ruby 1.9 with the -Ku switch. * When using the native MySQL adapter, only tinyint(1) columns are now returned as booleans when using the convert_tinyint_to_bool setting (the default). Previously, all tinyint columns would be converted to booleans if the setting was enabled. * Correctly handle inserts returning the autogenerated keys when using MySQL JDBC Driver version 5.1.12 with the jdbc adapter. * The native MySQL adapter now supports :config_default_group and :config_local_infile options. * When connecting to SQLite, you can provide the :auto_vacuum, :foreign_keys, :synchronous, and :temp_store options for making the appropriate PRAGMA setting on the database in a thread-safe manner. The previous thread-unsafe PRAGMA setting methods are available, but their use is discouraged. * Sequel will not enable savepoints when connecting to SQLite unless the version is 3.6.8 or greater. * Using limit with distinct now works correctly on Microsoft SQL Server. * Database#rename_table now works correctly on Microsoft SQL Server. * If you specify an explicit :provider when using the ADO adapter, transactions will now work correctly. The default :provider uses a new native connection for each query, so it cannot work with transactions, or things like temporary tables. * If you specify an explicit :provider when connecting to Microsoft SQL Server using the ADO adapter (e.g. SQLNCLI10 or SQLNCLI), Sequel is now able to provide an accurate number of rows modified and deleted. * Using set_column_allow_null with a decimal column with a precision and scale now works correctly when connecting to Microsoft SQL Server. * You can now connect to Microsoft SQL Server using the dbi adapter. * Sequel now recognizes the NUMBER database type as a synonym for NUMERIC and DECIMAL, which may help some Oracle users. * Transactions can now be rolled back correctly when connecting to Oracle via JDBC. * The active_model plugin now supports ActiveModel 3.0.0beta2. * Many documentation improvements were made, including the addition of a dataset basics guide, an association basics guide, an expanded virtual row guide, and the separation of the Sequel::Dataset RDoc page into sections. Additional, the RDoc class/method documentation now contains links to the appropriate guides. = Backwards Compatibility * When connecting to SQLite, Sequel now automatically sets the foreign_keys PRAGMA to true, which will make SQLite 3.6.19+ use database enforced foreign key constraints. If you do not want the database to enforce the foreign key constraints, you should use the :foreign_keys=>false option when connecting to the database. * Sequel no longer creates #{plugin_name}_opts class, instance, and dataset methods for each plugin loaded. No built-in plugin used them, and I couldn't find an external plugin that did either. * The Model#associations method is no longer available if the default Associations plugin is not loaded due to the SEQUEL_NO_ASSOCIATIONS constant or environment variable being set. * DISTINCT ON support is turned off by default, and only enabled when using PostgreSQL, since that appears to be the only database that supports it. Previously, it was enabled by default and most common adapters turned it off. sequel-5.63.0/doc/release_notes/3.12.0.txt000066400000000000000000000312101434214120600177540ustar00rootroot00000000000000= Migration Changes * A TimestampMigrator has been added to Sequel, and is automatically used if any migration has a version greater than 20000100. This migrator operates similarly to the default ActiveRecord migrator, in that it allows missing migrations. It differs from the ActiveRecord migrator in that it supports migrations with the same timestamp/version as well as a wide variety of timestamp formats (though the ActiveRecord default of YYYYMMDDHHMMSS is recommended and should be used in portable code). Sequel still defaults to the old migrator, but you can use the new one without making changes to your old migrations. Just make sure your new migration starts with a version greater than 20000100, and Sequel will automatically convert the previous schema table to the new format. * A new migration DSL was added: Sequel.migration do up do end down do end end The old style of using a Sequel::Migration subclass is still supported, but it is recommended that new code use the new DSL. * The default migrator also had significant issues fixed. First, it now saves the migration version after each migration, instead of after all migrations, which means Sequel won't attempt to apply already applied migrations if there was previously an error when applying multiple migrations at once on a database that didn't support transactional schema modification. Second, duplicate migration versions in the default migrator now raise an exception, as do missing migration versions. Neither should happen when using the default migrator, which requires consecutive integer versions, similar to the old ActiveRecord migrator. * Execution times for migrations are now logged to the database's loggers. = New Plugins * A sharding plugin has been added that allows model objects to work well with sharded databases. When using it, model objects know which shard they were retrieved from, so when you save the object, it is saved back to that shard. The sharding plugin also works with associations, so associated records are retrieved from the same shard the main object was retreived from. The sharding plugin also works with both methods of eager loading, and provides methods that you can use to create objects on specific shards. * An update_primary_key plugin has been added that allows Sequel to work correctly if you modify the primary key of a model object. This should not be necessary if you are using surrogate keys, but if your database uses natural primary keys which can change, this should be helpful. * An association_pks plugin has been added that adds association_pks and association_pks= methods to model objects for both one_to_many and many_to_many associations. The association_pks method returns an array of primary key values for the associated objects, and the association_pks= method modifies the database to ensure that the object is only associated to the objects specified by the array of primary keys provided to it. * A string_stripper plugin has been added that strips all strings that are assigned to attribute values. This is useful for web applications where you want to easily remove leading and trailing whitespace in form entries before storing them in the database. * A skip_create_refresh plugin has been added that skips the refresh of after you save a new model object. On most databases, Sequel refreshes the model object after inserting it in order to get values for all of the columns. For performance reasons, you can use this plugin to skip the refresh if it isn't necessary for you. = Other New Features * Sequel::Model#set_fields and update_fields were added. These methods have a similar API to set_only and update_only, but they operate differently. While set_only and update_only operate over the hash, these methods operate over the array of fields, so they don't raise errors if the hash contains fields not in the array: params = {:a=>1, :b=>2, :c=>3} album = Album[1] # raises Error because :a is not in the fields album.set_only(params, [:b, :c]) # Just sets the value of album.b and album.c album.set_fields(params, [:b, :c]) Other than handling entries in the hash that aren't in the array, set_fields and update_fields also handle entries not in the hash differently: # Doesn't modify the object, since the hash is empty album.set_only({}, [:b, :c]) # Sets album.b and album.c to nil, since they aren't in the hash album.set_fields({}, [:b, :c]) * The :eager_loader association option has a new API, though the previous API still works. Instead of accepting three arguments, it can now accept a single hash argument, which will use the :key_hash, :rows, and :association keys for the previous three arguments. The hash will also contain a :self key whose value is the dataset doing the eager load, which was not possible to determine using the old API. * Sequel::SQL::Expression#hash has been added so that the objects are now safe to use as hash keys. * A Dataset#order_prepend method has been added allowing you to prepend to an existing order. This is useful if want to modify a dataset's order such that it first orders by the columns you provide, but for any rows where the columns you provide are equal, uses the existing order to further order the dataset: ds.order(:albums__name).order_prepend(:artists__name) # ORDER BY artists.name, albums.name * When creating foreign key columns, you can now use a :deferrable option to set up a foreign key constraint that is not checked until the end of the transaction: DB.create_table(:albums) do primary_key :id String :name foreign_key :artist_id, :artists, :deferrable=>true end * many_to_many associations now support a :join_table_block option that is used by the add/remove/remove_all methods. It can modify the dataset to ensure that certain columns are included when inserting or to add a filter so that only certain records are deleted. It's useful if you have a many_to_many association that is filtered to only a subset of the matching rows in the join table. * The single_table_inheritance plugin now supports :model_map and :key_map options to set up a custom mapping of column values to model classes. For simple situations such as when you are mapping integer values to certain classes, a :model_map hash is sufficient: Employee.plugin :single_table_inheritance, :type_id, :model_map=>{1=>:Staff, 2=>:Manager} Here the :model_map keys are type_id column values, and the :model_map values are symbols or strings specifying class names. For more complex conditions, you can use a pair of procs: Employee.plugin :single_table_inheritance, :type_name, :model_map=>proc{|v| v.reverse}, :key_map=>proc{|klass| klass.name.reverse} Here the type_name column is a string column holding the reverse of the class's name. * The single_table_inheritance plugin now correctly sets up subclass filters for middle tables in a class hierarchy with more than 2 levels. For example, with this code: class Employee < Sequel::Model; end Employee.plugin :single_table_inheritance, :kind class Manager < Employee; end class Executive < Manager; end Sequel previously would not return Executives if you used Manager.all. It now correctly recognizes subclasses so that it will return both Managers and Executives. * Sequel::Model.qualified_primary_key_hash has been added, giving you a hash that can be used for filtering. It is similar to primary_key_hash, but it qualifies the keys with the model's table. It's useful if you have joined the table to another table that has columns with the same name, but you want to only look for a single model object in that dataset. * For consistency, you can now use Dataset#order_append as an alias for order_more. = Other Improvements * Sequel now correctly removes schema entries when altering tables. Previously, some adapters that had to query the existing schema when altering tables resulted in the previous schema being cached. * Sequel::Model::Errors#on now always returns nil if there are no errors on the attribute. Previously, it would return an empty array in certain cases. Additionally, Sequel::Model::Errors#empty? now returns true if there are no errors, where it certain cases it would return false even if there were no errors. * The schema_dumper extension now works with tables specified as Sequel::SQL::Identifiers. * Sequel now recognizes the timestamp(N) with(out) time zone column type. * The lazy_attributes plugin no longer requires the core extensions to work correctly. * DatabaseDisconnectError support has been added to the ODBC adapter, allowing Sequel to detect disconnects and remove the connection from the connection pool. * A leak of JDBC statement objects when using transactions was fixed in the jdbc adapter. * The jdbc adapter now gives a nicer error message if you use a connection string that it doesn't recognize and there is an error when connecting. * Temporary table creation was fixed on Microsoft SQL Server, but it is not recommended as it changes the name of the table. If you use Microsoft SQL Server, you should prefix your temporary table names with # and use the regular create table method. * A large number of guides were added to Sequel to make it easier for new and existing users to learn more about Sequel. The following guides were added: * Querying in Sequel * Migration and Schema Modification * Model Hooks * Model Validations * Sequel for SQL Users * Sequel for ActiveRecord Users * RDoc section support was added to Sequel::Database, making the method documentation easier to read. = Backwards Compatibility * Sequel::Database now defines the indexes and tables methods, even if the adapter does not implement them, similar to how connect and execute are defined. Previously, you could use respond_to? to check if the adapter supported them, now they raise Sequel::NotImplemented if the database adapter does not implement them. * Sequel used to raise NotImplementedError in certain default definitions of methods inside Sequel::Database and Sequel::Dataset, when the methods were supposed to be overridden in subclasses. Sequel now uses a Sequel::NotImplemented exception class for these exceptions, which is a subclass of Sequel::Error. * Sequel no longer applies all association options to the dataset used to remove all many_to_many associated objects. You should use the new :join_table_block option to get similar behavior if you were filtering the many_to_many association based on columns in the join table and you wanted remove_all to only remove the related columns. * Sequel now calls certain before and after hook actions in plugins in a different order than before. This should not have an effect unless you were relying on them being called in the previous order. Now, when overriding before hooks in plugins, Sequel always does actions before calling super, and when overriding after hooks in plugins, Sequel always does actions after calling super. * The hook_class_methods plugin no longer skips later after hooks if a previous after hook returns false. That behavior now only occurs for before hooks. * Sequel now only removes primary key values when updating objects if you are saving the entire object and you have not modified the values of the primary keys. Previously, Sequel would remove primary key values when updating even if you specified the primary key column specifically or the primary key column was modified and you used save_changes/update. * Sequel now uses explicit methods instead of aliases for certain methods. This should only affect you if for example you overrode Dataset#group to do one thing and wanted Dataset#group_by to do the default action. Now, Dataset#group_by, and methods like it, are explicit methods that just call the methods they previously aliased. This also means that if you were overriding Dataset#group and explicitly aliasing group_by to it, you no longer need the alias. * The single_table_inheritance plugin now uses IN instead of = for subclass filters. This could lead to poor performance if the database has a very bad query planner. * The private transaction_statement_object method was removed from the JDBC adapter, and Sequel will no longer check for the presence of the method in the transaction code. * The Sequel::Migrator object is now a class instead of a module, and has been pretty much rewritten. If you were using any methods of it besides apply and run, they no longer work. sequel-5.63.0/doc/release_notes/3.13.0.txt000066400000000000000000000200551434214120600177620ustar00rootroot00000000000000= New Plugins * A json_serializer plugin was added that allows you to serialize model instances or datasets to JSON using to_json. It requires the json library. The API was modeled on ActiveRecord's JSON serialization support. You can use :only and :except options to specify the columns included, :include to specify associations to include, as well pass options to nested associations using a hash. In addition to serializing to JSON, it also adds support for parsing JSON to model objects via JSON.parse or #from_json. * An xml_serializer plugin was added that allows you to serialize model instances or datasets to XML. It requries the nokogiri library. It has a similar API to the json_serializer plugin, using to_xml instead of to_json, and the from_xml class method instead of JSON.parse. * A tree plugin was added that allows you to treat Sequel::Model objects as being part of a tree. It provides similar features to rcte_tree, but works on databases that don't support recursive common table expressions. In addition to the standard parent and children associations, it provides instance methods to get the ancestors, descendants, and siblings of the given tree node, and class methods to get the roots of the tree. * A list plugin was added that allows you to treat Sequel::Model objects as being part of a list. This adds instance methods to get the next and prev items in the list, or to move the item to a specific place in the list. You can specify that all rows in the table belong to the same list, or specify arbitrary scopes so that the same table can contain many separate lists. = Other New Features * Sequel is now compatible with Ruby 1.9.2pre3. * Sequel now supports prepared transactions/two-phase commit on PostgreSQL, MySQL, and H2. You can specify that you want to use prepared transactions using the :prepare option which should be some transaction id string: DB.transaction(:prepare=>'some string') do ... end Assuming that no exceptions are raised in the transaction block, Sequel will prepare the transaction. You can then commit the transaction later: DB.commit_prepared_transaction('some string') If you need to rollback the prepared transaction, you can do so as well: DB.rollback_prepared_transaction('some string') * Sequel now supports customizable transaction isolation levels on PostgreSQL, MySQL, and Microsoft SQL Server. You can specify the transaction isolation level to use for any transaction using the :isolation option with an :uncommitted, :committed, :repeatable, or :serializable value: DB.transaction(:isolation=>:serializable) do ... end You can also set the default isolation level for transactions via the transaction_isolation_level Database attribute: DB.transaction_isolation_level = :committed If you are connecting to Microsoft SQL Server, it is recommended that you set a default transaction isolation level if you plan on using this feature. * You can specify a NULLS FIRST/LAST ordering by using the :nulls=>:first/:last option to asc and desc: Album.filter(:release_date.desc(:nulls=>:first), :name.asc(:nulls=>:last)) # ORDER BY release_date DESC NULLS FIRST, # name ASC NULLS LAST This syntax is supported by PostgreSQL 8.3+, Firebird 1.5+, Oracle, and probably some other databases as well, and makes it possible for the user to specify whether NULL values should sort before or after other values. * Sequel::Model.find_or_create now accepts a block that is a yielded a new model object to be created if an existing model object is not found. Node.find_or_create(:name=>'A'){|i| i.parent_id = 4} * The :frame option for windows and window functions can now be a string that is used literally in the SQL. This is necessary if you want to specify a custom frame, such as one that uses a specific number of rows preceding or following. * Savepoints are now supported on H2. * A :methods_module association option was added, allowing you to specify the module into which association instance methods are placed. By default, it uses the module containing the column accessor methods. = Other Improvements * The :encoding option for the native MySQL adapter should now work correctly in all cases. This fix was included in 3.12.1. * Sequel now handles arrays of two element arrays automatically when using them as the value of a filter hash: DB[a].filter([:a, :b]=>[[1, 2], [3, 4]]) Previously, you had to call .sql_array on the array in order to tell Sequel that it was a value list and not a conditions specifier. * Sequel no longer attempts to use class polymorphism in the class_table_inheritance plugin if you don't specify a cti_key. * When using the native SQLite adapter, prepared statements are now cached per connection for increased performance. Previously, Sequel prepared a new statement for every query. * tinyint(1) columns are now handled as booleans when connecting to MySQL via JDBC. * On PostgreSQL, if no :schema option is provided for Database#tables, #table_exists?, or #schema, and no default_schema is used, assume all schemas except the default non-public ones. Previously, it assumed the public schema for tables and table_exists?, but did not assume any schema for #schema. This fixes issues if you use table names that overlap with table names in the information_schema, such as domains. It's still recommended that you specify a default_schema if you are using a schema other than public. * Unsigned integers are now handled correctly in the schema dumper. * Sequel::SQL::PlaceholderLiteralString is now a GenericExpression subclass, allowing you to treat it like most other Sequel expression objects: '(a || ?)'.lit(:b).like('Test%') # ((a || b) LIKE 'Test%') * Sequel now supports the bitwise shift operators (<< and >>) on Microsoft SQL Server by emulating them. * Sequel now supports most bitwise operators (&, |, ^, <<, >>) on H2 by emulating them. The bitwise complement operator is not yet supported. * Sequel now logs the SQL queries that are sent when connecting to MySQL. * If a plugin cannot be loaded, Sequel now gives a more detailed error message. = Backwards Compatibility * Array#sql_array and the Sequel::SQL::SQLArray class are now considered deprecated. Use the Array#sql_value_list and the Sequel::SQL::ValueList class instead. SQLArray is now just an alias for ValueList, but it now is an Array subclass instead of a Sequel::SQL::Expression subclass. * Using the ruby bitwise xor operator (^) on PostgreSQL now uses PostgreSQL's bitwise xor operator (#) instead of PostgreSQL's exponentiation operator (^). If you want exponentiation, use the power function. * Using the ruby bitwise complement operator (~) on MySQL now returns a signed integer instead of an unsigned integer, for better compatibility with other databases. * Using nil as a case expression value (the 2nd argument to Hash#case and Array#case) will now use NULL as the case expression value, instead of omitting the case expression value: # 3.12.0 {1=>2}.case(0, nil) # CASE WHEN 1 THEN 2 ELSE 0 END # 3.13.0 {1=>2}.case(0, nil) # CASE NULL WHEN 1 THEN 2 ELSE 0 END In general, you would never use nil explicitly, but the new behavior makes more sense if you have a variable that might be nil: parent_id = Node[1].parent_id {1=>2}.case(0, parent_id) If parent_id IS NULL/nil, then previously Sequel would have generated unexpected SQL. If you don't want a case expression value to be used, do not pass a second argument to #case. * Some internal transaction methods now take an optional options hash, so if you have a custom adapter, you will need to make changes. * Some internal association methods now take an optional options hash. * Some Rakefile task names were modified in the name of consistency: spec_coverage -> spec_cov integration -> spec_integration integration_cov -> spec_integration_cov sequel-5.63.0/doc/release_notes/3.14.0.txt000066400000000000000000000105011434214120600177560ustar00rootroot00000000000000= New Features * Dataset#grep now accepts :all_patterns, :all_columns, and :case_insensitive options. Previously, grep would use a case sensitive search where it would match if any pattern matched any column. These three options give you more control over how the pattern matching will work: dataset.grep([:a, :b], %w'%test% foo') # WHERE ((a LIKE '%test%') OR (a LIKE 'foo') # OR (b LIKE '%test%') OR (b LIKE 'foo')) dataset.grep([:a, :b], %w'%foo% %bar%', :all_patterns=>true) # WHERE (((a LIKE '%foo%') OR (b LIKE '%foo%')) # AND ((a LIKE '%bar%') OR (b LIKE '%bar%'))) dataset.grep([:a, :b], %w'%foo% %bar%', :all_columns=>true) # WHERE (((a LIKE '%foo%') OR (a LIKE '%bar%')) # AND ((b LIKE '%foo%') OR (b LIKE '%bar%'))) dataset.grep([:a, :b], %w'%foo% %bar%', :all_patterns=>true,:all_columns=>true) # WHERE ((a LIKE '%foo%') AND (b LIKE '%foo%') # AND (a LIKE '%bar%') AND (b LIKE '%bar%')) dataset.grep([:a, :b], %w'%test% foo', :case_insensitive=>true) # WHERE ((a ILIKE '%test%') OR (a ILIKE 'foo') # OR (b ILIKE '%test%') OR (b ILIKE 'foo')) * When using the schema plugin, you can now provide a block to the create_table methods to set the schema and create the table in the same call: class Artist < Sequel::Model create_table do primary_key :id String :name end end * The tree plugin now accepts a :single_root option, which uses a before_save hook to attempt to ensure that there is only a single root in the tree. It also adds a Model.root method to get the single root of the tree. * The tree plugin now adds a Model#root? instance method to check if the current node is a root of the tree. * Model#save now takes a :raise_on_failure option which will override the object's raise_on_save_failure setting. This makes it easier to get the desired behavior (raise or just return false) in library code without using a begin/ensure block. * The Database#adapter_scheme instance method was added, which operates the same as the class method. * Sequel now handles the literalization of OCI8::CLOB objects in the Oracle adapter. = Other Improvements * When using the timezone support, Sequel will now correctly load times and datetimes in standard time when the current timezone is in daylight time, or vice versa. Previously, if you tried to to load a time or datetime in December when in July in a timezone that used daylight time, it would be off by an hour. * The rcte_tree plugin now works correctly when a :conditions option is used. * The single_table_inheritance plugin now works correctly when the class discriminator column has the same name as an existing ruby method (such as type). * Database#each_server now works correctly when a connection string is used to connect, instead of an options hash. * Model#destroy now respects the object's use_transactions setting, instead of always using a transaction. * Model#exists? now uses a simpler and faster query. * Sequel now handles the aggregate methods such as count and sum correctly on Microsoft SQL Server when using an ordered dataset with a clause such as DISTINCT or GROUP and without a limit. * Sequel now handles rename_table correctly on Microsoft SQL Server when using a case sensitive collation, or when qualifying the table with a schema. * Sequel now parses the schema correctly on Oracle when the same table name is used in multiple schemas. * Sequel now handles OCIInvalidHandle errors when disconnecting in the Oracle adapter. * Sequel now raises a Sequel::Error instead of an ArgumentError if the current or target migration version does not exist. * When a mismatched number of composite keys are used in associations, Sequel now uses a more detailed error message. * Significant improvements were made to the Dataset and Model RDoc documentation. = Backwards Compatibility * Model#valid? now must accept an optional options hash. * The Model#save_failure private method was renamed to raise_hook_failure. * The LOCAL_DATETIME_OFFSET_SECS and LOCAL_DATETIME_OFFSET constants have been removed from the Sequel module. * Sequel now uses obj.to_json instead of JSON.generate(obj). This shouldn't affect backwards compatibility, but did fix a bug in certain cases. sequel-5.63.0/doc/release_notes/3.15.0.txt000066400000000000000000000055401434214120600177660ustar00rootroot00000000000000= Performance Enhancements * A mysql2 adapter was added to Sequel. It offers a large (2-6x) performance increase over the standard mysql adapter. In order to use it, you need to install mysql2, and change your connection strings to use mysql2:// instead of mysql://. * Support for sequel_pg was added to the native postgres adapter, when pg is being used as the backend. sequel_pg also offers a large (2-6x) performance increase over the default row fetching code that the Sequel postgres adapter uses. In order to use it, you just need to install sequel_pg, and the postgres adapter will pick it up automatically. * Mass assignment has been made about 10x faster by caching the allowed setter methods in the model. = Other Improvements * The following construct is now safe to use in environments that reload code without unloading existing constants: class MyModel < Sequel::Model(DB[:table]) end Previously, this would raise a superclass mismatch TypeError. * Sequel now handles the case where both an implicit and an explicit table alias are given to join_table, preferring the explicit alias. This can happen if you are using models with aliased table names and eager graphing them. Previously, this would result in invalid SQL, with both aliases being used. * You can use use an aliased table for the :join_table option of a many_to_many association. * The active_model plugin now supports the final release of ActiveModel 3.0.0. * Typecasting now works correctly for attributes loaded lazily when using the lazy_attributes plugin. * The class_table_inheritance plugin now works with non-integer primary keys on SQLite. * Temporary tables are now ignored when parsing the schema on PostgreSQL. * On MySQL, an :auto_increment key with a true value is added to the Database#schema output hash if the related column is auto incrementing. * The mysql adapter now handles Mysql::Error exceptions raised when disconnecting. * On SQLite, emulated alter_table commands that require dropping the table now preserve the foreign key information, if SQLite foreign key support is enabled (it is by default). * DSN-less connections now work correctly in more cases in the ODBC adapter. * A workaround has been added for a bug in the Microsoft SQL Server JDBC Driver 3.0, involving it incorrectly returning a smallint instead of a char type for the IS_AUTOINCREMENT metadata value. * An bug in the error handling when connecting to PostgreSQL using the do (DataObjects) adapter has been fixed. = Backwards Compatibility * The caching of allowed mass assignment methods can result in the incorrect exception class being raised if you manually undefine instance setter methods in the model class. If you do this, you need to clear the setter methods cache manually: MyModel.clear_setter_methods_cache sequel-5.63.0/doc/release_notes/3.16.0.txt000066400000000000000000000033461434214120600177710ustar00rootroot00000000000000= New Adapter * A swift adapter was added to Sequel. Swift is a relatively new ruby database library, built on top of a relatively new backend called dbic++. While not yet considered production ready, it is very fast. The swift adapter is about 33% faster and 40% more memory efficient for selects than the postgres adapter using pg with sequel_pg, though it is slower and less memory efficient for inserts and updates. Sequel's swift adapter currently supports only PostgreSQL and MySQL, but support for other databases will probably be added in the future. = Other Improvements * Sequel now correctly literalizes DateTime objects on ruby 1.9 for databases that support fractional seconds. * The identity_map plugin now handles composite keys in many_to_one associations. * The rcte_tree plugin now works when the model's dataset does not select all columns. This can happen when using the lazy_attributes plugin on the same model. * Sequel now supports INTERSECT and EXCEPT on Microsoft SQL Server 2005+. * The Database#create_language method in the shared PostgreSQL adapter now accepts a :replace option to replace the currently loaded procedural language if it already exists. This option is ignored for PostgreSQL versions before 9.0. * The identity_map plugin now handles cases where the plugin is loaded separately by two different models. = Backwards Compatibility * While not technically backwards compatibility related, it was discovered that the identity_map plugin is incompatible with the standard eager loading of many_to_many and many_through_many associations. If you want to eagerly load those associations and use the identity_map plugin, you should use eager_graph instead of eager. sequel-5.63.0/doc/release_notes/3.17.0.txt000066400000000000000000000044451434214120600177730ustar00rootroot00000000000000= New Features * You can now change the level at which Sequel logs SQL statements, by calling Database#sql_log_level= with the method name symbol. The default is still :info for backwards compatibility. Previously, you had to use a proxy logger to get similar capability. * You can now specify graph aliases where the alias would be the same as the table column name using just the table name symbol, instead having to repeat the alias as the second element of an array. More clearly: # < 3.17.0: DB[:a].graph(:b, :a_id=>:id). set_graph_aliases(:c=>[:a, :c], :d=>[:b, :d]) # >= 3.17.0: DB[:a].graph(:b, :a_id=>:id).set_graph_aliases(:c=>:a, :d=>:b) Both of these now yield the SQL: SELECT a.c, b.d FROM a LEFT OUTER JOIN b ON (b.a_id = a.id) * You should now be able to connect to MySQL over SSL in the native MySQL adapter using the :sslca, :sslkey, and related options. * Database#views and Database#view_exists? methods were added to the Oracle adapter, allowing you to get a an array of view name symbols and to check whether a given view exists. = Other Improvements * The nested_attributes plugin now avoids unnecessary update calls when deleting associated objects, resulting in better performance. * The optimistic_locking plugin now increments the lock column if no other columns were modified but the Model#modified! was called. This means it now works correctly with the nested_attributes plugin when no changes to the main model object are made. * The xml_serializer plugin can now round-trip nil values correctly. Previously, nil values would be converted into empty strings. This is accomplished by including a nil attribute in the xml tag. * Database#each_server now works correctly when using the jdbc and do adapters and a connection string without a separate :adapter option. * You can now clone many_through_many associations. * The default wait_timeout used by the mysql and mysql2 adapters was decreased slightly so that it works correctly with MySQL database servers that run on Windows. * Many improvements were made to the AS400 jdbc subadapter. * Many improvements were made to the swift adapter and subadapters. * Dataset#ungraphed now removes any cached graph aliases set with set_graph_aliases or add_graph_aliases. sequel-5.63.0/doc/release_notes/3.18.0.txt000066400000000000000000000100701434214120600177630ustar00rootroot00000000000000= New Features * Reversible migration support has been added: Sequel.migration do change do create_table(:artists) do primary_key :id String :name, :null=>false end end end The change block acts the same way as an up block, except that it automatically creates a down block that reverses the changes. So the above is equivalent to: Sequel.migration do up do create_table(:artists) do primary_key :id String :name, :null=>false end end down do drop_table :artists end end The following methods are supported in a change block: * create_table * add_column * add_index * rename_column * rename_table * alter_table (supporting the following methods): * add_column * add_constraint * add_foreign_key (with a symbol, not an array) * add_primary_key (with a symbol, not an array) * add_index * add_full_text_index * add_spatial_index * rename_column Use of an other method in a change block will result in the creation of a down block that raises an exception. * A to_dot extension has been added that adds a Dataset#to_dot method, which returns a string that can be used as input to the graphviz dot program in order to create visualizations of the dataset's abstract syntax tree. Examples: * http://sequel.jeremyevans.net/images/to_dot_simple.gif * http://sequel.jeremyevans.net/images/to_dot_complex.gif Both the to_dot extension and reversible migrations support were inspired by Aaron Patterson's recent work on ActiveRecord and ARel. * The user can now control how the connection pool handles attempts to access shards that haven't been configured. The default is still to assume the :default shard. However, you can specify a different shard using the :servers_hash option when connecting to the database: DB = Sequel.connect(..., :servers_hash=>Hash.new(:some_shard)) You can also use this feature to raise an exception if an unconfigured shard is used: DB = Sequel.connect(..., :servers_hash=>Hash.new{raise ...}) * The mysql and mysql2 adapters now both support the :read_timeout and :connect_timeout options. read_timeout is the timeout in seconds for reading back results of a query, and connect_timeout is the timeout in seconds before a connection attempt is abandoned. = Other Improvements * The json_serializer plugin will now typecast column values for columns with unrestricted setter methods when parsing JSON into model objects. It now also calls the getter method when creating the JSON, instead of directly taking the values from the underlying hash. * When parsing the schema for a model with an aliased table name, the unaliased table name is now used. * The SQLite adapter has been updated to not rely on the native type_translation support, since that will be removed in the next major version of sqlite3-ruby. Sequel now implements it's own type translation in the sqlite adapter, similarly to how the mysql and postgres adapters handle type translation. * On SQLite, when emulating natively unsupported schema methods such as drop_column, Sequel will now attempt to recreate applicable indexes on the underlying table. * A more informative error message is now used when connecting fails when using the jdbc adapter. * method_missing is no longer removed from Sequel::BasicObject on ruby 1.8. This should improve compatibility in some cases with Rubinius. = Backwards Compatibility * On SQLite, Sequel no longer assumes that a plain integer in a datetime or timestamp field represents a unix epoch time. * Previously, saving a model object that used the instance_hooks plugin removed all instance hooks. Now, only the applicable hooks are removed. So if you save a new object, the update instance hooks won't be removed. And if you save an existing object, delete instance hooks won't be removed. * The private Dataset#identifier_list method has been moved into the SQLite adapter, since that is the only place it was used. sequel-5.63.0/doc/release_notes/3.19.0.txt000066400000000000000000000052351434214120600177730ustar00rootroot00000000000000= New Features * The add_* association methods now accept a primary key, and associates the receiver to the associated model object with that primary key: artist.add_album(42) # equivalent to: artist.add_album(Album[42]) * The validation_class_methods plugin now has the ability to reflect on validations: Album.plugin :validation_class_methods Album.validates_acceptance_of(:a) Album.validation_reflections # => {:a=>[[:acceptance, {:tag=>:acceptance, :allow_nil=>true, :message=>"is not accepted", :accept=>"1"}]]} = Other Improvements * In the postgres, mysql, and sqlite adapters, typecasting now uses methods instead of procs. Since methods aren't closures (while procs are), this makes typecasting faster (up to 15%). * When typecasting model column values, the classes of the new and existing values are checked in addition to the values themselves. Previously, if the new and existing values were equal (i.e. 1.0 and 1), it wouldn't update the value. Now, if the classes are different, it always updates the value. * Date and DateTime objects are now handled correctly when using prepared statements/bound variables in the jdbc adapter. * Date, DateTime, Time, true, false, and SQL::Blob objects are now handled correctly when using prepared statements/bound variables in the sqlite adapter. * Sequel now uses varbinary(max) instead of image for the generic File (blob) type on Microsoft SQL Server. This makes it possible to use an SQL::Blob object as a prepared statement argument. * Sequel now handles blobs better in the Amalgalite adapter. * When disconnecting a connection using the sqlite adapter, all open prepared statements are now closed first. Previously, attempting to disconnect a connection with open prepared statements resulted in an error. * The license file has been renamed from COPYING to MIT-LICENSE, to make it easier to determine at a glance which license is used. = Backwards Compatibility * Because Sequel switched the generic File type from image to varbinary(max) on Microsoft SQL Server, any migrations/schema modification methods that used the File type will now result in a different column type than before. * The MYSQL_TYPE_PROCS, PG_TYPE_PROCS, and SQLITE_TYPE_PROCS constants have been removed from the mysql, postgres, and sqlite adapters, respectively. The UNIX_EPOCH_TIME_FORMAT and FALSE_VALUES constants have also been removed from the sqlite adapter. * Typecasting in the sqlite adapters now uses to_i and to_f instead of Integer() and Float() with rescues. If you put non-numeric data in numeric columns on SQLite, this could cause problems. sequel-5.63.0/doc/release_notes/3.2.0.txt000066400000000000000000000250471434214120600177060ustar00rootroot00000000000000New Features ------------ * Common table expressions (CTEs) are now supported. CTEs use the SQL WITH clause, and specify inline views that queries can use. They also support a recursive mode, where the CTE can recursively query its own output, allowing you do do things like load all branches for a given node in a plain tree structure. The standard with takes an alias and a dataset: DB[:vw].with(:vw, DB[:table].filter{col < 1}) # WITH vw AS (SELECT * FROM table WHERE col < 1) # SELECT * FROM vw The recursive with takes an alias, a nonrecursive dataset, and a recursive dataset: DB[:vw].with_recursive(:vw, DB[:tree].filter(:id=>1), DB[:tree].join(:vw, :id=>:parent_id). select(:vw__id, :vw__parent_id)) # WITH RECURSIVE vw AS (SELECT * FROM tree # WHERE (id = 1) # UNION ALL # SELECT vw.id, vw.parent_id # FROM tree # INNER JOIN vw ON (vw.id = tree.parent_id)) # SELECT * FROM vw CTEs are supported by Microsoft SQL Server 2005+, DB2 7+, Firebird 2.1+, Oracle 9+, and PostgreSQL 8.4+. * SQL window functions are now supported, and a DSL has been added to ease their creation. Window functions act similarly to aggregate functions but operate on sliding ranges of rows. In virtual row blocks (blocks passed to filter, select, order, etc.) you can now provide a block to method calls to change the default behavior to create functions that weren't possible previously. The blocks aren't called, but their presence serves as a flag. What function is created depends on the arguments to the method: * If there are no arguments, an SQL::Function is created with the name of method used, and no arguments. Previously, it was not possible to create functions without arguments using the virtual row block DSL. Example: DB.dataset.select{version{}} # SELECT version() * If the first argument is :*, an SQL::Function is created with a single wildcard argument (*). This is mostly useful for count: DB[:t].select{count(:*){}} # SELECT count(*) FROM t * If the first argument is :distinct, an SQL::Function is created with the keyword DISTINCT prefacing all remaining arguments. This is useful for aggregate functions such as count: DB[:t].select{count(:distinct, col1){}} # SELECT count(DISTINCT col1) FROM t * If the first argument is :over, the second argument, if provided, should be a hash of options to pass to SQL::Window. The options hash can also contain :*=>true to use a wildcard argument as the function argument, or :args=>... to specify an array of arguments to use as the function arguments. DB[:t].select{rank(:over){}} # SELECT rank() OVER () DB[:t].select{count(:over, :*=>true){}} # SELECT count(*) OVER () DB[:t].select{sum(:over, :args=>col1, :partition=>col2, :order=>col3){}} # SELECT sum(col1) OVER (PARTITION BY col2 ORDER BY col3) PostgreSQL also supports named windows. Named windows can be specified by Dataset#window, and window functions can reference them using the :window option. * Schema information for columns now includes a :ruby_default entry which contains a ruby object that represents the default given by the database (which is stored in :default). Not all :default entries can be parsed into a :ruby_default, but if the schema_dumper extension previously supported it, it should work. * Methods to create compound datasets (union, intersect, except), now take an options hash instead of a true/false flag. The previous API is still supported, but switching to specifying the ALL setting using :all=>true is recommended. Additionally, you can now set :from_self=>false to not wrap the returned dataset in a "SELECT * FROM (...)". * Dataset#ungraphed was added that removes the graphing information from the dataset. This allows you to use Dataset#graph for the automatic aliasing, or #eager_graph for the automatic aliasing and joining, and then remove the graphing information so that the resulting objects will not be split into subhashes or associations. * There were some introspection methods added to Dataset to describe which capabilities that dataset does or does not support: supports_cte? supports_distinct_on? supports_intersect_except? supports_intersect_except_all? supports_window_functions? In addition to being available for the user to use, these are also used internally, so attempting to use a CTE on a dataset that doesn't support it will raise an Error. * Dataset#qualify was added, which is like qualify_to with a default of first_source. Additionally, qualify now affects PlaceholderLiteralStrings. It doesn't scan the string (as Sequel never attempts to parse SQL), but if you provide the column as a symbol placeholder argument, it will qualify it. * You can now specify the table and column Sequel::Migrator will use to record the current schema version. The new Migrator.run method must be used to use these new options. * The JDBC adapter now accepts :user and :password options, instead of requiring them to be specified in the connection string and handled by the JDBC driver. This should allow connections to Oracle using the Thin JDBC driver. * You can now specify the max_connections, pool_timeout, and single_threaded settings directly in the connection string: postgres:///database?single_threaded=t postgres:///database?max_connections=10&pool_timeout=20 * Dataset#on_duplicate_key_update now affects Dataset#insert when using MySQL. * You can now specify the :opclass option when creating PostgreSQL indexes. Currently, this only supports a single operator class for all columns. If you need different operator classes per column, please post on sequel-talk. * Model#autoincrementing_primary_key was added and can be used if the autoincrementing key isn't the same as the primary key. The only likely use for this is on MySQL MyISAM tables with composite primary keys where only one of the composite parts is autoincrementing. * You can now use database column values as search patterns and specify the text to search as a String or Regexp: String.send(:include, Sequel::SQL::StringMethods) Regexp.send(:include, Sequel::SQL::StringMethods) 'a'.like(:x) # ('a' LIKE x) /a/.like(:x) # ('a' ~ x) /a/i.like(:x) # ('a' ~* x) /a/.like(:x, 'b') # (('a' ~ x) OR ('a' ~ 'b')) * The Dataset#dataset_alias private method was added. It can be overridden if you have tables named t0, t1, etc., and want to make sure the default dataset aliases that Sequel uses do not clash with existing table names. * Sequel now raises an Error if you call Sequel.connect with something that is not a Hash or String. * bin/sequel now accepts a -N option to not test the database connection. * An opening_databases.rdoc file was added to the documentation directory, which should be a good introduction for new users about how to set up your Database connection. Other Improvements ------------------ * MySQL native adapter SELECT is much faster than before, up to 75% faster. * JDBC SELECT is about 10% faster than before. It's still much slower than the native adapters, due to conversion issues. * bin/sequel now works with a YAML file on ruby 1.9. * MySQL foreign key table constraints have been fixed. * Database#indexes now works on PostgreSQL if the schema used is a Symbol. It also works on PostgreSQL versions all the way back to 7.4. * Graphing of datasets with dataset sources has been fixed. * Changing a columns name, type, or NULL status on MySQL now supports a much wider selection of column defaults. * The stored procedure code is now thread-safe. Sequel is thread-safe in general, but due to a bug the previous stored procedure code was not thread-safe. * The ODBC adapter now drops statements automatically instead of requiring the user to do so manually, making it more similar to other adapters. * The single_table_inheritance plugin no longer overwrites the STI field if the field already has a value. This allows you to use create in the generic class to insert a value that will be returned as a subclass: Person.create(:kind => "Manager") * When altering colums on MySQL, :unsigned, :elements, :size and other options given are no longer ignored. * The PostgreSQL shared adapter's explain and analyze methods have been fixed, they had been broken in 3.0. * Parsing of the server's version is more robust on PostgreSQL. It should now work correctly for 8.4 and 8.4rc1 type versions. Backwards Compatibility ----------------------- * Dataset#table_exists? has been removed, since it never worked perfectly. Use Database#table_exists? instead. * Model.grep now calls Dataset#grep instead of Enumerable#grep. If you are using Model.grep, you need to modify your application. * The MSSQL shared adapter previously used the :with option for storing the NOLOCK setting of the query. That option has been renamed to :table_options, since :with is now used for CTEs. This should not have an effect unless you where using the option manually. * Previously, providing a block to a method calls in virtual row blocks did not change behavior, where now it causes a different code path to be used. In both cases, the block is not evaluated, but that may change in a future version. * Dataset#to_table_reference protected method was removed, as it was no longer used. * The pool_timeout setting is now converted to an Integer, so if you used to pass in a Float, it no longer works the same way. * Most files in adapters/utils have been removed, in favor of integrating the code directly into Database and Dataset. If you were previously checking for the UnsupportedIntersectExcept or related modules, use the Dataset introspection methods instead (e.g. supports_intersect_except?). * If you were using the ODBC adapter and manually dropping returned statements, you should note that now statements are dropped automatically, and the execute method doesn't return a statement object. * The MySQL adapter on_duplicate_key_update_sql is now a private method. * If you were modifying the :from dataset option directly, note that Sequel now expects this option to be preprocessed. See the new implementation of Dataset#from for an idea of the changes required. * Dataset#simple_select_all? now returns false instead of true for a dataset that selects from another dataset. sequel-5.63.0/doc/release_notes/3.20.0.txt000066400000000000000000000027701434214120600177640ustar00rootroot00000000000000= New Features * The swift adapter now supports an SQLite subadapter. Use the :db_type => 'sqlite' option when connecting. You can use an in memory database with the following connection string: swift:///?database=:memory:&db_type=sqlite * Arbitrary JDBC properties can now be set in the JDBC adapter using the :jdbc_properties option when connecting. The value of this option should be a hash where keys and values are JDBC property keys and values. * Basic Microsoft Access support was added to the ado adapter. The creation of autoincrementing primary key integers now works, and identifiers are now quoted with []. * The Database#indexes method now supports a :partial option when connecting to MySQL, which makes it include partial indexes (which are usually excluded). = Other Improvements * The class_table_inheritance plugin now handles subclass associations better. Previously, the implicit eager loading code had issues when you called an association method that only existed in the subclass. * The error message used when a validates_max_length validation is applied to a nil column value has been improved. You can override the message yourself using the :nil_message option. * The read_timeout and connect_timeout options now work correctly in the mysql adapter. * Another MySQL disconnect error message is now recognized. = Backwards Compatibility * The swift adapter was upgraded to support swift 0.8.1. Older versions of swift are no longer supported. sequel-5.63.0/doc/release_notes/3.21.0.txt000066400000000000000000000066401434214120600177650ustar00rootroot00000000000000= New Features * A tinytds adapter was added, enabling Sequel users on a C-based ruby running on *nix easy access to Microsoft SQL Server. Previously, the best way to connect to Microsoft SQL Server from a C-based ruby on *nix was to use the ODBC adapter with unixodbc and freetds. However, setting that up is nontrivial, while setting up tinytds is very easy. Note that the tinytds adapter currently requires the git master branch of tiny_tds, but tiny_tds 0.4.0 should include the related changes. * An association_autoreloading plugin has been added to Sequel, which removes stale many_to_one associations from the cache when the associated foreign key setter is used to change the value of the foreign key. * bin/sequel now operates more like a standard *nix utility. If given a file on the command line after the connection string, it will assume that file has ruby code and load it. If stdin is not a tty, it will read from stdin and execute it as ruby code. For recent Linux users, this means you can have a shebang line such as: #!/usr/bin/sequel postgres://user:pass@host/db to create a self contained script. * bin/sequel now supports -r and -I options similar to ruby's -r and -I options. * MySQL datasets now have a calc_found_rows method that uses SQL_CALC_FOUND_ROWS, which provides a fast way to limit the number of results returned by a dataset while having an easy way to determine how many rows would have been returned if no limit was applied. = Other Improvements * The Sequel::Model.plugin method can now be overridden just like any other method. Previously, it was the only method that was defined directly on the class. This allows the creation of plugins that can modify the plugin system itself. * Symbol splitting (:table__column___alias) now works correctly for identifiers that include characters that aren't in [\w ]. Among other things, this means that identifiers with accented characters or even kanji characters can be used with symbol splitting. * If cover? is defined, it is now used in preference to include? for the validates_includes/validates_inclusion_of validations. ruby 1.9 defines include? differently for some ranges and can be very slow, while cover? is similar to the 1.8 behavior of just checking the beginning and end of the range. * The bin/sequel -L option now takes effect even if the -m, -C, -d, or -D options are used. * The schema_dumper extension now recognizes the "bigint unsigned" type. * On Microsoft SQL Server, if joining to a subselect that uses a common table expression, that common table expression is promoted to the main dataset. This allows most queries to work correctly, but is vulnerable to issues if both the current dataset and the joined dataset use common table expressions with the same name. Unfortunately, unlike PostgreSQL, Microsoft SQL Server does not allow common table expressions to occur in subselects. * The NULL/NOT NULL, DEFAULT, and UNIQUE column options now use the proper order on H2 and Oracle, so they can now be used in conjunction with each other. * Row locks are now enabled on Oracle. * The join_table method on MySQL no longer ignores the block it was given. * The informix adapter now supports ruby-informix version >= 0.7.3, while still being backwards compatible with older versions. * The specs now run under both RSpec 2 and RSpec 1. sequel-5.63.0/doc/release_notes/3.22.0.txt000066400000000000000000000027501434214120600177640ustar00rootroot00000000000000= New Features * Support COLLATE in column definitions. At least MySQL and Microsoft SQL Server support them, and PostgreSQL 9.1 should as well. * When connecting to Microsoft SQL Server, you can use the mssql_unicode_strings accessor to turn of the default usage of unicode strings (N'') and use regular strings (''). This can improve performance, but changes the behavior. It's set to true by default for backwards compatibility. You can change it at both the dataset and database level: DB.mssql_unicode_strings = false # default for datasets dataset.mssql_unicode_strings = false # just this dataset * In the oracle adapter, if Sequel.application_timezone is :utc, set the timezone for the connection to use the 00:00 timezone. = Other Improvements * In the single_table_inheritance plugin, correctly handle a multi-level class hierarchy so that loading instances from a middle level of the hierarchy can return instances of subclasses. * Don't use a schema when creating a temporary table, even if default_schema is set. * Fix the migrator when a default_schema is used. * In the ado adapter, assume a connection to SQL Server if the :conn_string is given and doesn't indicate Access/Jet. * Fix fetching rows in the tinytds adapter when the identifier_output_method is nil. * The tinytds adapter now checks for disconnect errors, but it might not be reliable until the next release of tiny_tds. * The odbc adapter now handles ODBC::Time instances correctly. sequel-5.63.0/doc/release_notes/3.23.0.txt000066400000000000000000000154611434214120600177700ustar00rootroot00000000000000= New Features * Sequel now allows dynamic customization for eager loading. Previously, the parameters for eager loading were fixed at association creation time. Now, they can be modified at query time. To dynamically modify an eager load, you use a hash with the proc as the value. For example, if you have this code: Artist.eager(:albums) And you only want to eagerly load albums where the id is greater than or equal to some number provided by the user, you do: min = params[:min].to_i Artist.eager(:albums=>proc{|ds| ds.where{id > min}}) This also works when eager loading via eager_graph: Artist.eager_graph(:albums=>proc{|ds| ds.where{id > min}}) For eager_graph, the dataset is the dataset to graph into the current dataset, and filtering it will result in an SQL query that joins to a subquery. You can also use dynamic customization while cascading to also eagerly load dependent associations, by making the hash value a single entry hash with a proc key and the value being the dependent associations to eagerly load. For example, if you want to eagerly load tracks for those albums: Artist.eager(:albums=>{proc{|ds| ds.where{id > min}}=>:tracks}) * Sequel also now allows dynamic customization for regular association loading. Previously, this was possible by using the association's dataset: albums = artist.albums_dataset.filter{id > min} However, then there was no handling of caching, callbacks, or reciprocals. For example: albums.each{|album| album.artist} Would issue one query per album to get the artist, because the reciprocal association was not set. Now you can provide a block to the association method: albums = artist.albums{|ds| ds.filter{id > min}} This block is called with the dataset used to retrieve the associated objects, and should return a modified version of that dataset. Note that ruby 1.8.6 doesn't allow blocks to take block arguments, so you have to pass the block as a separate proc argument to the association method if you are still using 1.8.6. * Sequel now supports filtering by associations. This wasn't previously supported as filtering is a dataset level feature and associations are a model level feature, and datasets do not depend on models. Now, model datasets have the ability to filter by associations. For example, to get all albums for a given artist, you could do: artist = Artist[1] Album.filter(:artist=>artist) Since the above can also be accomplished with: artist.albums this may not seem like a big improvement, but it allows you to filter on multiple associations simultaneously: Album.filter(:artist=>artist, :publisher=>publisher) For simple many_to_one associations, the above is just a simpler way to do: Album.filter(:artist_id=>artist.id, :publisher_id=>publisher.id) Sequel supports this for all association types, including many_to_many and many_through_many, where a subquery is used, and it also works when composite key associations are used: Album.filter(:artist=>artist, :tags=>tag) This will give you the albums for that artist that are also tagged with that tag. To provide multiple values for the same association, mostly useful for many_to_many associations, you can either use separate filter calls or specify the conditions as an array: Album.filter(:tags=>tag1).filter(:tags=>tag2) Album.filter([[:tags, tag1], [:tags, tag2]]) * A columns_introspection extension has been added that makes datasets attempt to guess their columns in some cases instead of issuing a database query. This can improve performance in cases where the columns are needed implicitly, such as graphing. After loading the extension, you can enable the support for specific datasets by extending them with Sequel::ColumnIntrospection. To enable introspection for all datasets, use: Sequel::Dataset.introspect_all_columns * A serialization_modification_detection plugin has been added. Previously, Sequel could not detect modifications made to serialized objects. It could detect modification if you assigned a new value: model.hash_column = model.hash_column.merge(:foo=>:bar) but not if you just modified the object directly: model.hash_columns[:foo] = :bar With this plugin, such modifications can be detected, at a potentially significant performance cost. = Other Improvements * When using a migration directory containing both older integer migrations and newer timestamp migrations, where some integer migrations have not been applied, make sure to apply the remaining integer migrations before the timestamp migrations. Previously, they could be applied out of order due to a lexicographic sort being used instead of a numeric sort. * If a model does not select all columns from its table, the insert_select optimization is no longer used. Previously, creating a new model object for such a model could result in the object containing columns that the model does not select. * You can now use :select=>[] as an option for many_to_many associations to select all columns from both the associated table and the join table. Previously, this raised an error and required you do :select=>'*'.lit as a workaround. The default remains to select all columns in the associated table and none from the join table. * The xml_serializer plugin now handles namespaced models by using __ instead of / as the namespace separator. Previously, / was used and caused problems as it is not valid XML. * The :eager_grapher association option can now accept a proc that takes a single hash of options instead of a fixed 3 arguments. This is the recommended way going forward of writing custom :eager_graphers, and all of the internal ones have been converted. The previous way of using 3 arguments is still supported. * A bug in the identity_map plugin for many_to_one associations without full association reflection information has been fixed. * Sequel is now using GitHub Issues for issue tracking. Old issues have been migrated from Google Code. = Backwards Compatibility * The filter by associations support breaks backward compatibilty for users who previously added an sql_literal instance method to Sequel::Model. Usually, that was done to for reasons similar to but inferior than the filter by association support. The following code can be used as a temporary workaround until you can modify your program to use the new filter by associations support: Sequel::Model::Associations::DatasetMethods. send(:remove_method, :complex_expression_sql) * The private Sequel::Model#_load_associated_objects method now takes an additional, optional options hash. Plugins that override that method need to be modified. sequel-5.63.0/doc/release_notes/3.24.0.txt000066400000000000000000000377651434214120600200040ustar00rootroot00000000000000= Prepared Statement Plugins * The prepared_statements plugin makes Sequel::Model classes use prepared statements for creating, updating, and destroying model instances, as well as looking up model objects by primary key. With this plugin, all of the following will use prepared statements: Artist.plugin :prepared_statements Artist.create(:name=>'Foo') a = Artist[1] a.update(:name=>'Bar') a.destroy * The prepared_statements_safe plugin reduces the number of prepared statements that can be created by doing two things. First, it makes the INSERT statements used when creating instances to use as many columns as possible, setting specific values for all columns with parseable default values. Second, it changes save_changes to just use save, saving all columns instead of just the changed ones. The reason for this plugin is that Sequel's default behavior of using only the values specifically set when creating instances and having update only set changed columns by default can lead to a large number of prepared statements being created. For prepared statements to be used, each set of columns in the insert and update statements needs to have its own prepared statement. If you have a table with 1 primary key column and 4 other columns, you can have up to 2^4 = 16 prepared statements created, one for each subset of the 4 columns. If you have 1 primary key column and 20 other columns, there are over a million subsets, and you could hit your database limit for prepared statements (a denial of service attack). Using the prepared_statements_safe plugin mitigates this issue by reducing the number of columns that may or may not be present in the query, in many cases making sure that each model will only have a single INSERT and a single UPDATE prepared statement. * The prepared_statements_associations plugin allows normal association method calls to use prepared statements if possible. For example: Artist.plugin :prepared_statements_associations Artist.many_to_one :albums Artist[1].albums Will use a prepared statement to return the albums for that artist. This plugin works for all supported association types. There are some associations (filtered and custom associations) that Sequel cannot currently use a prepared statement reliably, for those Sequel will use a regular query. * The prepared_statements_with_pk plugin allows the new Dataset#with_pk method (explained below) to use prepared statements. For example: Artist.plugin :prepared_statements_with_pk Artist.filter(...).with_pk(1) Will use a prepared statement for this query. The most benefit from prepared statements come from queries that are expensive to parse and plan but quick to execute, so using this plugin with a complex filter can in certain cases yield significant performance improvements. However, this plugin should be considered unsafe as it is possible that it will create an unbounded number of prepared statements. It extracts parameters from the dataset using Dataset#unbind (explained below), so if your code has conditions that vary per query but that Dataset#unbind does not handle, an unbounded number of prepared statements can be created. For example: Artist.filter(:a=>params[:b].to_i).with_pk[1] Artist.exclude{a > params[:b].to_i}.with_pk[1] are safe, but: Artist.filter(:a=>[1, params[:b].to_i]).with_pk[1] Artist.exclude{a > params[:b].to_i + 2}.with_pk[1] are not. For queries that are not safe, Dataset#with_pk should not be used with this plugin, you should switch to looking up by primary key manually (for a regular query): Artist.filter(:a=>[1, params[:b].to_i])[:id=>1] or using the prepared statement API to create a custom prepared statement: # PS = {} PS[:name] ||= Artist.filter(:a=>[1, :$b], :id=>:$id). prepare(:select, :name) PS[:name].call(:b=>params[:b].to_i, :id=>1) = Other New Features * Filtering by associations got a lot more powerful. Sequel 3.23.0 introduced filtering by associations: Album.filter(:artist=>artist) This capability is much expanded in 3.24.0, allowing you to exclude by associations: Album.exclude(:artist=>artist) This will match all albums not by that artist. You can also filter or exclude by multiple associated objects: Album.filter(:artist=>[artist1, artist2]) Album.exclude(:artist=>[artist1, artist2]) The filtered dataset will match all albums by either of those two artists, and the excluded dataset will match all albums not by either of those two artists. You can also filter or exclude by using a model dataset: Album.filter(:artist=>Artist.filter(:name.like('A%'))).all Album.exclude(:artist=>Artist.filter(:name.like('A%'))).all Here the filtered dataset will match all albums where the associated artist has a name that begins with A, and the excluded dataset will match all albums where the associated artist does not have a name that begins with A. All of these types of filtering and excluding work with all of association types that ship with Sequel, even the many_through_many plugin. * Sequel now supports around hooks, which wrap the related before hook, behavior, and after hook. Like other Sequel hooks, these are implemented as instance methods. For example, if you wanted to log DatabaseErrors raised during save: class Artist < Sequel::Model def around_save super rescue Sequel::DatabaseError => e # log the error raise end end All around hooks should call super, not yield. If an around hook doesn't call super or yield, it is treated as a hook failure, similar to before hooks returning false. For around_validation, the return value of super should be whether the object is valid. For other around hooks, the return value of super is currently true, but it's possible that will change in the future. * Dataset#with_pk has been added to model datasets that allows you to find the object with the matching primary key: Artist.filter(:name.like('A%')).with_pk(1) This should make easier the common case where you want to find a particular object that is associated to another object: Artist[1].albums_dataset.with_pk(2) Before, there was no way to do that without manually specifying the primary key: Artist[1].albums_dataset[:id=>2] To use a composite primary key with with_pk, you have to provide an array: Artist[1].albums_dataset.with_pk([1, 2]) * Dataset#[] for model datasets will now call with_pk if given a single Integer argument. This makes the above case even easier: Artist[1].albums_dataset[2] Note that for backwards compatibility, this only works for single integer primary keys. If you have a composite primary key or a string/varchar primary key, you have to use with_pk. * Dataset#unbind has been added, which allows you to take a dataset that uses static bound values and convert them to placeholders. Currently, the only cases handled are SQL::ComplexExpression objects that use a =, !=, <, >, <=, or >= operator where the first argument is a Symbol, SQL::Indentifier, or SQL::QualifiedIdentifier, and the second argument is a Numeric, String, Date, or Time. Dataset#unbind returns a two element array, where the first element is a modified copy of the receiver, and the second element is a bound variable hash: ds, bv = DB[:table].filter(:a=>1).unbind ds # DB[:table].filter(:a=>:$a) bv # {:a=>1} The purpose of doing this is that you can then use prepare or call on the returned dataset with the returned bound variables: ds.call(:select, bv) # SELECT * FROM table WHERE (a = ?); [1] ps = ds.prepare(:select, :ps_name) # PREPARE ps_name AS SELECT * FROM table WHERE (a = ?) ps.call(bv) # EXECUTE ps_name(1) Basically, Dataset#unbind takes a specific statement and attempts to turn it into a generic statement, along with the placeholder values it extracted. Unfortunately, Dataset#unbind cannot handle all cases. For example: DB[:table].filter{a + 1 > 10}.unbind will not unbind any values. Also, if you have a query with multiple different values for a variable, it will raise an UnbindDuplicate exception: DB[:table].filter(:a=>1).or(:a=>2).unbind * A defaults_setter plugin has been added that makes it easy to automatically set default values when creating new objects. This plugin makes Sequel::Model behave more like ActiveRecord in that new model instances (before saving) will have default values parsed from the database. Unlike ActiveRecord, only values with non-NULL defaults are set. Also, Sequel allows you to easily modify the default values used: Album.plugin :default_values Album.new.values # {:copies_sold => 0} Album.default_values[:copies_sold] = 42 Album.new.values # {:copies_sold => 42} Before, this was commonly done in an after_initialize hook, but that's slower as it is also called for model instances loaded from the database. * A Database#views method has been added that returns an array of symbols representing view names in the database. This works just like Database#tables except it returns views. * A Sequel::ASTTransformer class was added that makes it easy to write custom transformers of Sequel's internal abstract syntax trees. Dataset#qualify now uses a subclass of ASTTransformer to do its transformations, as does the new Dataset#unbind. = Other Improvements * Database#create_table? now uses a single query with IF NOT EXISTS if the database supports such syntax. Previously, it issued a SELECT query to determine table existence. Sequel currently supports this syntax on MySQL, H2, and SQLite 3.3.0+. The Database#supports_create_table_if_not_exists? method was added to allow users to determine whether this syntax is supported. * Multiple column IN/NOT IN emulation now works correctly with model datasets (or other datasets that use a row_proc). * You can now correctly invert SQL::Constant instances: Sequel::NULL # NULL ~Sequel::NULL # NOT NULL Sequel::TRUE # TRUE ~Sequel::TRUE # FALSE * A bug in the association_pks plugin has been fixed in the case where the associated table had a different primary key column name than the current table. * The emulated prepared statement support now supports nil and false as bound values. * The to_dot extension was refactored for greater readability. The only change was a small fix in the display for SQL::Subscript instances. * The Dataset#supports_insert_select? method is now available to let you know if the dataset supports insert_select. You should use this method instead of respond_to? for checking for insert_select support. * Prepared statements/bound variable can now use a new :insert_select type for preparing a statement that will insert a row and return the row inserted, if the dataset supports insert_select. * The Model#initialize_set private method now exists for easier plugin writing. It is only called for new model objects, with the hash given to initialize. By default, it just calls set. * A small bug when creating anonymous subclasses of Sequel::Model on ruby 1.9 has been fixed. * If Thread#kill is used inside a transaction on ruby 1.8 or rubinius, the transaction is rolled back. This situation is not handled correctly on JRuby or ruby 1.9, and I'm not sure it's possible to handle correctly on those implementations. * The postgres adapter now supports the Sequel::Postgres::PG_NAMED_TYPES hash for associating conversion procs for custom types that don't necessarily have the same type oid on different databases. This hash uses symbol keys and proc values: Sequel::Postgres::PG_NAMED_TYPES[:interval] = proc{|v| ...} The conversion procs now use a separate hash per Database object instead of a hash shared across all Database objects. You can now modify the types for a particular Database object, but you have to use the type oid: DB.conversion_procs[42] = proc{|v| ...} * On SQLite and MSSQL, literalization of true and false values given directly to Dataset#filter has been fixed. So the following now works correctly on those databases: DB[:table].filter(true) DB[:table].filter(false) Unfortunately, because SQLite and MSSQL don't have a real boolean type, these will not work: DB[:table].filter{a & true} DB[:table].filter{a & false} You currently have to work around the issue by doing: DB[:table].filter{a & Sequel::TRUE} DB[:table].filter{a & Sequel::FALSE} It is possible that a future version of Sequel will remove the need for this workaround, but that requires having a separate literalization method specific to filters. * The MySQL bit type is no longer treated as a boolean. On MySQL, the bit type is a bitfield, which is very different than the MSSQL bit type, which is the closest thing to a boolean on MSSQL. * The bool database type is now recognized as a boolean. Some SQLite databases use bool, such as the ones used in Firefox. * SQL_AUTO_IS_NULL=0 is now set by default when connecting to MySQL using the swift or jdbc adapters. Previously, it was only set by default when using the mysql or mysql2 adapters. * Dataset#limit now works correctly on Access, using the TOP syntax. * Dataset#limit now works correctly on DB2, using the FETCH FIRST syntax. * The jdbc mssql subadapter was split into separate subadapters for sqlserver (using Microsoft's driver) and jtds (using the open source JTDS driver). * The jdbc jtds subadapter now supports converting Java CLOB objects to ruby strings. * Tables from the INFORMATION_SCHEMA are now ignored when parsing schema on JDBC. * The informix adapter has been split into shared/specific parts, and a jdbc informix subadapter has been added. * Dataset#insert_select now works correctly on MSSQL when the core extensions are disabled. * The sqlite adapter now logs when preparing a statement. * You no longer need to be a PostgreSQL superuser to run the postgres adapter specs. * The connection pool specs are now about 10 times faster and not subject to race conditions due to using Queues instead of sleeping. = Backwards Compatibility * Model#save no longer calls Model#valid?. It now calls the Model#_valid? private method that Model#valid? also calls. To mark a model instance invalid, you should override the Model#validate method and add validation errors to the object. * The BeforeHookFailure exception class has been renamed to HookFailure since hook failures can now be raised by around hooks that don't call super. BeforeHookFailure is now an alias to HookFailure, so no code should break, but you should update your code to reflect the new name. * Any custom argument mappers used for prepared statements now need to implement the prepared_arg? private instance method and have it return true. * If your databases uses bit as a boolean type and isn't MSSQL, it's possible that those columns will no longer be treated as booleans. Please report such an issue on the bugtracker. * It is possible that the filtering and excluding by association datasets will break backwards compatibility in some apps. This can only occur if you are using a symbol with the same name as an association with a model dataset whose model is the same as the associated class. As associations almost never have the same names as columns, this would require either aliasing or joining to another table. If for some reason this does break your app, you can work around it by changing the symbol to an SQL::Identifier or a literal string. * The Sequel::Postgres.use_iso_date_format= method now only affects future Database objects. * On MySQL, Database#tables no longer returns view names, it only returns table names. You have to use Database#views to get view names now. sequel-5.63.0/doc/release_notes/3.25.0.txt000066400000000000000000000061711434214120600177700ustar00rootroot00000000000000= New Features * drop_table, drop_view, drop_column, and drop_constraint all now support a :cascade option for using CASCADE. DB.drop_table(:tab, :cascade=>true) # DROP TABLE tab CASCADE DB.drop_column(:tab, :col, :cascade=>true) # ALTER TABLE tab DROP COLUMN col CASCADE A few databases support CASCADE for dropping tables and views, but only PostgreSQL appears to support it for columns and constraints. Using the :cascade option when the underlying database doesn't support it will probably result in a DatabaseError being raised. * You can now use datasets as expressions, allowing things such as: DB[:table1].select(:column1) > DB[:table2].select(:column2) # (SELECT column1 FROM table1) > (SELECT column2 FROM table2) DB[:table1].select(:column1).cast(Integer) # CAST((SELECT column1 FROM table1) AS integer) * Dataset#select_group has been added for grouping and selecting on the same columns. DB[:a].select_group(:b, :c) # SELECT b, c FROM a GROUP BY b, c * Dataset#exclude_where and #exclude_having methods have been added, allowing you to specify which clause to affect. #exclude's behavior is still to add to the HAVING clause if one is present, and use the WHERE clause otherwise. * Dataset#select_all now accepts optional arguments and will select all columns from those arguments if present: DB[:a].select_all(:a) # SELECT a.* FROM a DB.from(:a, :b).select_all(:a, :b) # SELECT a.*, b.* FROM a, b * Dataset#group and #group_and_count now both accept virtual row blocks: DB[:a].select(:b).group{c(d)} # SELECT b FROM a GROUP BY c(d) * If you use a LiteralString as a validation error message, Errors#full_messages will now not add the related column name to the start of the error message. * Model.set_dataset now accepts SQL::Identifier, SQL::QualifiedIdentifier, and SQL::AliasedExpression instances, treating them like Symbols. = Other Improvements * The association_pks plugin's setter method will now automatically convert a given array of strings to an array of integers if the primary key field is an integer field, which should make it easier to use in web applications. * nil bound variable, prepared statement, and stored procedure arguments are now handled correctly in the JDBC adapter. * On 1.9, you can now load plugins even when ::ClassMethods, ::InstanceMethods, or ::DatasetMethods is defined. = Backwards Compatibility * The tinytds adapter now only works with tiny_tds 0.4.5 and greater. Also, if you were using the tinytds adapter with FreeTDS 0.91rc1, you need to upgrade to FreeTDS 0.91rc2 for it to work. Also, if you were referencing an entry in the freetds.conf file, you now need to specify it directly using the :dataserver option when connecting, the adapter no longer copies the :host option to the :dataserver option. * On postgresql, Sequel now no longer drops tables with CASCADE by default. You now have to use the :cascade option to drop_table if you want to use CASCADE. * The Database#drop_table_sql private method now takes an additional options hash argument. sequel-5.63.0/doc/release_notes/3.26.0.txt000066400000000000000000000074621434214120600177750ustar00rootroot00000000000000= Performance Enhancements * The internal implementation of eager_graph has been made 75% to 225% faster than before, with greater benefits to more complex graphs. * Dataset creation has been made much faster (2.5x on 1.8 and 4.4x on 1.9), and dataset cloning has been made significantly faster (40% on 1.8 and 20% on 1.9). = Other Improvements * Strings passed to setter methods for integer columns are no longer considered to be in octal format if they include leading zeroes. The previous behavior was never intended, but was a side effect of using Kernel#Integer. Strings with leading zeroes are now treated as decimal, and you can still use the 0x prefix to treat them as hexidecimal. If anyone was relying on the old octal behavior, let me know and I'll add an extension that restores the octal behavior. * The identity_map plugin now works with the standard eager loading of many_to_many and many_through_many associations. * Database#create_table! now only attempts to drop the table if it already exists. Previously, it attempted to drop the table unconditionally ignoring any errors, which resulted in misleading error messages if dropping the table raised an error caused by permissions or referential integrity issues. * The default connection pool now correctly handles the case where a disconnect error is raised and an exception is raised while running the disconnection proc. * Disconnection errors are now detected when issuing transaction statements such as BEGIN/ROLLBACK/COMMIT. Previously, these statements did not handle disconnect errors on most adapters. * More disconnection errors are now detected. Specifically, the ado adapter and do postgres subadapter now handle disconnect errors, and the postgres adapter handles more types of disconnect errors. * Database#table_exists? now always issues a query to select from the table, it no longer attempts to parse the schema to determine the information on PostgreSQL and Oracle. * Date, DateTime, and Time values are now literalized correctly on Microsoft Access. * Connecting with the mysql adapter with an options hash now works if the :port option is a string, which makes it easier to use when the connection information is stored in YAML. * The xml_serializer plugin now works around a bug in pure-Java nokogiri regarding the handling of nil values. * Nicer error messages are now used if there is an attempt to call an invalid or restricted setter method. * The RDocs are now formatted with hanna-nouveau, which allows for section ordering, so the Database and Dataset RDoc pages are more friendly. = Backwards Compatibility * If you call a Dataset method such as #each on an eager_graphed dataset, you now get plain hashes that have column alias symbol keys and their values. Previously, you got a graphed response with table alias keys and model values. It's not wise to depend on the behavior, the only supported way of returning records when eager loading is to use #all. * An error is now raised if you attempt to eager load via Dataset#eager a many_to_many association that includes an :eager_graph option. Previously, incorrect SQL would have been generated and an error raised by the database. * Datasets are no longer guaranteed to have @row_proc, @indentifier_input_method, and @identifier_output_method defined as instance variables. You should be be using methods to access them anyway. * Database#table_exists? on PostgreSQL no longer accepts an options hash. Previously, you could use a :schema option. You must now provide the schema inside the table argument (e.g. :schema__table). * If you want to use the rdoc tasks in Sequel's Rakefile, and you are still using the hanna RDoc template with RDoc 2.3, you need to upgrade to using hanna-nouveau with RDoc 3.8+. sequel-5.63.0/doc/release_notes/3.27.0.txt000066400000000000000000000055211434214120600177700ustar00rootroot00000000000000= New Features * Model.dataset_module has been added for easily adding methods to a model's dataset: Album.dataset_module do def with_name_like(x) filter(:name.like(x)) end def selling_at_least(x) filter{copies_sold > x} end end Album.with_name_like('Foo%').selling_at_least(100000).all Previously, you could use def_dataset_method to accomplish the same thing. dataset_module is generally cleaner, plus you are using actual methods instead of blocks, so calling the methods is faster on some ruby implementations. * Sequel now uses a Sequel::SQLTime class (a subclass of Time) when dealing with values for SQL time columns (which don't have a date component). These values are handled correctly when used in filters or insert/update statements (using only the time component), so Sequel can now successfully round trip values for time columns. Not all adapters support returning time column values as SQLTime instances, but the most common ones do. * You can now drop foreign key, primary key, and unique constraints on MySQL by passing the :type=>(:foreign_key|:primary_key|:unique) option to Database#drop_constraint. * The ODBC adapter now has initial support for the DB2 database, use the :db_type=>'db2' option to load the support. = Other Improvements * The mysql2 adapter now uses native prepared statements. * The tinytds adapter now uses uses sp_executesql for prepared statements. * DateTime and Time objects are now converted to Date objects when they are assigned to a date column in a Model instance. * When converting a Date object to a DateTime object, the resulting DateTime object now has no fractional day components. Previously, depending on your timezone settings, it could have had fractional day components. * The mysql2 adapter now supports stored procedures, as long as they don't return results. * Mass assignment protection now handles including modules in model classes and extending model instances with modules. Previously, if you defined a setter method in a module, access to it may have been restricted. * The prepared_statements_safe plugin now works on classes without datasets, so you can now do the following to load it for all models: Sequel::Model.plugin :prepared_statements_safe * Dataset#hash now works correctly when handling SQL::Expression instances. * Model#hash now correctly handles classes with no primary key or with a composite primary key. * Model#exists? now always returns false for new model objects. = Backwards Compatibility * If you were previously setting primary key values manually for new model objects and then calling exists? to see if the instance is already in the database, you need to change your code from: model.exists? to: model.this.get(1).nil? sequel-5.63.0/doc/release_notes/3.28.0.txt000066400000000000000000000302551434214120600177730ustar00rootroot00000000000000= New Adapter Support * Sequel now has much better support for the DB2 database. * An ibmdb adapter has been added, and is the recommended adapter to to use if you want to connect to DB2 from MRI. * A jdbc db2 subadapter has been added, allowing good DB2 support on JRuby. * The db2 adapter has been cleaned up substantially, and now works well, but it is still recommended that you switch to ibmdb if you are using the db2 adapter. * The firebird adapter has been split into shared and specific parts, and quite a few fixes were made to it. * A jdbc firebird subadapter has been added, allowing connection to firebird databases from JRuby. = New PostgreSQL 9.1 Features * Dataset#returning has been added for using the RETURNING clause on INSERT/UPDATE/DELETE queries. RETURNING allows such queries to return results in much the same way as a SELECT query works. When Dataset#returning is used, Dataset #insert, #update, and #delete now accept a block that is passed to Dataset #fetch_rows which is yielded plain ruby hashes for each row inserted, updated, or deleted. If Dataset#returning is used and a block is not given to those methods, those methods will return an array of plain hashes for all rows inserted, updated, and deleted. * Dataset#with_sql now treats a symbol as a first argument as a method name to call to get the SQL. The expected use case for this is with Dataset#returning and insert/update/delete: DB[:items]. returning(:id). with_sql(:update_sql, :b => :b + 1). map(:id) Basically, it makes it more easily to statically set the insert/update/delete SQL, and then be able to use the full dataset API for returning results. As mentioned above, using Dataset#returning with #insert, #update, and #delete yields plain hashes, so if you want to have the row_proc applied (e.g. you are using models), you need to use this method instead, since you can then call #each or #all to make sure the row_proc is called on all returned rows. * Dataset#with (common table expressions) now affects INSERT/UPDATE/DELETE queries. * Database#create_table? now uses CREATE TABLE IF NOT EXISTS on PostgreSQL 9.1. = Other New Features * The :limit option is now respected when eager loading via either eager or eager_graph. By default, Sequel will just do an array slice of the resulting ruby array, which gets the correct answer, but does not offer any performance improvements. Sequel also offers a new :eager_limit_strategy option for using more advanced query types that only load the related records from the database. The available values for the :eager_limit_strategy option are: :window_function - This uses the row_number window function partitioned by the related key fields. It can only be used on databases that support window functions (PostgreSQL 8.4+, Microsoft SQL Server 2005+, DB2). :correlated_subquery - This uses a correlated subquery that is limited. It works on most databases except MySQL and DB2. You can provide a value of true as the option to have Sequel pick a strategy to use. Sequel will never use a correlated subquery for true, since in some cases it can perform worse than loading all related records and doing the array slice in ruby. If you want to enable an eager_limit_strategy globally, you can set Sequel::Model.default_eager_limit_strategy to a value, and all associations that use :limit will default to using that strategy. * one_to_one associations that do not represent true one-to-one database relationships, but represent one-to-many relationships where you are only returning the first object based on a given order are also now handled correctly when eager loading. Previously, eager loading such associations resulted in the last matching object being associated instead of the first matching object being associated. You can also use an :eager_limit_strategy for one_to_one associations. In addition to the :window_function and :correlated_subquery values, there is also a :distinct_on value that is available on PostgreSQL for using DISTINCT ON, which is the fastest strategy if you are using PostgreSQL. * Dataset#map, #to_hash, #select_map, #select_order_map, and #select_hash now accept arrays of symbols, and if given arrays of symbols, use arrays of results. For example: DB[:items].map([:id, :name]) # => [[1, 'foo'], [2, 'bar'], ...] DB[:items].to_hash([:id, :foo_id], [:name, :bar_id]) # => {[1, 3]=>['foo', 5], [2, 4]=>['bar', 6], ...} * For SQL expression objects where Sequel cannot deduce the type of the object, it now will consider the type of the argument when a &, |, or + operator is used. For example: :x & 1 Previously, this did "x AND 1", now it does "x & 1". Using a logical operator on an integer doesn't make sense, but it's possible people did so if the database uses 1/0 for true/false. Likewise: :x + 'foo' Previously, this did "x + 'foo'" (addition), now it does "x || 'foo'" (string concatenation). * The sql_string, sql_number, and sql_boolean methods are now available on SQL::ComplexExpressions, so you can do: (:x + 1).sql_string + ' foos' # (x + 1) || ' foos' Previously, there was not an easy way to generate such SQL expressions. * :after_load association hooks are now applied when using eager_graph. Previously, they were only applied when using eager, not when using eager_graph. * Database#copy_table has been added to the postgres adapter if pg is used as the underlying driver. It allows you to get very fast exports of table data in text or CSV format. It also accepts datasets, allowing fast exports of arbitrary queries in text or CSV format. * SQL extract support (:timestamp.extract(:year)) is now emulated on the databases that don't natively support it, such as SQLite, Microsoft SQL Server, and DB2. At least the following values are supported for extraction: :year, :month, :day, :hour, :minute, and :second. * The bitwise XOR operator is now emulated on SQLite. Previously, attempting to use it would cause the database to raise an error. * A Database#use_timestamp_timezones accessor has been added on SQLite. This allows you to turn off the use of timezones in timestamps by setting the value to false. This is necessary if you want you want to use the SQLite datetime functions, or the new ability to emulate extract. Note that this setting does not affect the current database content. To convert old databases to the new format, you'll have to resave all rows that have timestamps. At some point in the future, Sequel may default to not using timezones in timestamps by default on SQLite, so if you would like to rely on the current behavior, you should set this accessor to true now. * Sequel now works around bugs in MySQL when using a subselect with a LIMIT by using a nested subselect. * Sequel now works around issues in Microsoft SQL Server and DB2 when using a subselect with IN/NOT IN that uses the emulated offset support. * The jdbc adapter now returns java.sql.Clob objects as Sequel::SQL::Blobs. * Sequel now considers database clob types as the :blob schema type. * Sequel::SQLTime.create has been added for more easily creating instances: Sequel::SQLTime.create(hour, minute, second, usec) * Dataset#select_all now accepts SQL::AliasedExpression and SQL::JoinClause arguments and returns the appropriate SQL::ColumnAll value that selects all columns from the related table. * Model.set_dataset now accepts Sequel::LiteralString objects that represent table names. This usage is not encouraged except in rare cases such as using a set returning function in PostgreSQL. * Dataset#supports_cte? now takes an optional argument specifying the type of query (:insert, :update, :delete, :select). It defaults to :select. * Dataset#supports_returning? has been added. It requires an argument specifying the type of query (:insert, :update, or :delete). * Dataset#supports_cte_in_subqueries? has been added for checking for support for this ability. Apparently, only PostgreSQL currently supports this. For other adapters that support CTEs but not in subqueries, if a subquery with a CTE is used in a JOIN, the CTE is moved from the subquery to the main query. * Dataset#supports_select_all_and_column has been added for seeing if "SELECT *, foo ..." style queries are supported. This is false on DB2, which doesn't allow such queries. When it is false, using select_append on a dataset that doesn't specifically select columns will now change the query to do "SELECT table.*, foo ..." instead, working around the limitation on DB2. * Dataset#supports_ordered_distinct_on? has been added. Currently, this is only true on PostgreSQL. MySQL can emulate DISTINCT ON using GROUP BY, but it doesn't respect ORDER BY, so it some cases it cannot be used equivalently. * Dataset#supports_where_true? has been added for checking for support of WHERE TRUE (or WHERE 1 if 1 is true). Not all databases support using such a construct, and on the databases that do not, you have to use WHERE (1 = 1) or something similar. = Other Improvements * Sequel 3.27.0 was negatively affected by a serious bug in ActiveSupport's Time.=== that has still not been fixed, which broke the literalization of Time objects. In spite of the bad precedent it sets, Sequel now avoids using Time.=== on a subclass of Time to work around this ActiveSupport bug. * Dataset#with_pk now uses a qualified primary key instead of an unqualified primary key, which means it can now be used correctly after joining to a separate table. * Association after_load hooks when lazy loading are now called after the association has been loaded, which allows them to change which records are cached. This makes the lazy load case more similar to the eager load case. * The metaprogrammatically created methods that implement Sequel's DSL support have been made significantly faster by using module_eval instead of define_method. * The type translation in the postgres, mysql, and sqlite adapters has been made faster by using Method objects that result in more direct processing. * Typecasting values for time columns from Time values to Sequel::SQLTime values now correctly handles fractional seconds on ruby 1.9. = Backwards Compatibility * Dataset#insert_returning_sql has been changed to a private method in the PostgreSQL and Firebird adapters, and it operates differently than it did previously. The private #insert_returning_pk_sql and #insert_returning_select_sql methods have been removed. * Dataset#with_pk no longer does some defensive checking for misuse of primary keys (e.g. providing a composite key when the model uses a single key). Previously, Sequel would raise an Error immediately, now such behavior is undefined, with the most likely behavior being the database raising an Error. * The :alias_association_type_map and :alias_association_name_map settings have been removed from the :eager_graph dataset option, in favor of just storing the related association reflection. * The internals of the db2 adapter have changed substantially, if you were relying on some of the private methods defined in it, you will probably have to modify your code. * The firebird adapter was substanially modified, specifically parts related to insert returning autogenerated primary key values, so if you were previously using the adapter you should probably take more care than usual when testing your upgrade. * The Dataset::WITH_SUPPORTED constant has been removed. * The Dataset#supports_cte? method now accepts an optional argument. If you overrode this method, your overridden method now must accept an optional argument. * If you were previously doing: :x & 1 and wanting "x AND 1", you have to switch to: :x.sql_boolean & 1 Likewise, if you were previously doing: :x + 'foo' and wanting "x + 'foo'", you need to switch to: :x.sql_number + 'foo' * Sequel no longer does defensive type checking in the SQL expression support, as it was often more strict than the database and would not allow the creation of expressions that were valid for the database. sequel-5.63.0/doc/release_notes/3.29.0.txt000066400000000000000000000444301434214120600177740ustar00rootroot00000000000000= New Adapter Support * Sequel now has much better support for Oracle, both in the ruby-oci8-based oracle adapter and in the jdbc/oracle adapter. * Sequel now has much better support for connecting to HSQLDB using the jdbc adapter. This support does not work correctly with the jdbc-hsqldb gem, since the version it uses is too old. You'll need to load the .jar file manually until the gem is updated. * Sequel now has much better support for connecting to Apache Derby databases using the jdbc adapter. This works with the jdbc-derby gem, but it's recommend you grab an updated .jar file as the jdbc-derby gem doesn't currently support truncate or booleans. * The db2 adapter has had most of the remaining issues fixed, and can now run Sequel's test suite cleanly. It's still recommended that users switch to the ibmdb adapter if they are connecting to DB2. * A mock adapter has been added which provides a mock Database object that allows you to easily set the returned rows, the number of rows modified by update/delete, and the autogenerating primary key integer for insert. It also allows you to set specific columns in the dataset when retrieving rows. The specs were full of partial implementations of mock adapters, this mock adapter is much more complete and offers full support for mocking transactions and database sharding. Example: DB = Sequel.mock(:fetch=>{:id=>1}, :numrows=>2, :autoid=>3) DB[:items].all # => [{:id => 1}] DB[:items].insert # => 3 DB[:items].insert # => 4 DB[:items].delete # => 2 DB[:items].update(:id=>2) # => 2 DB.sqls # => ['SELECT ...', 'INSERT ...', ...] In addition to being useful in the specs, the mock adapter is also used if you use bin/sequel without a database argument, which makes it much easier to play around with Sequel on the command line without being tied to a real database. = New Transaction Features * Database after_commit and after_rollback hooks have been added, allowing you to set procs that are called after the currently- in-effect transaction commits or rolls back. If the Database is not currently in a transaction, the after_commit proc is called immediately and the after_rollback proc is ignored. * Model after_commit, after_rollback, after_destroy_commit, and after_destroy_rollback hooks have been added that use the new Database after_commit/after_rollback hook to execute code after commit or rollback. * Database#transaction now supports a :rollback => :reraise option to reraise any Sequel::Rollback exceptions raised by the block. * Database#transaction now supports a :rollback => :always option to always rollback the transaction, which is mostly useful when using transaction-based testing. * Sequel.transaction has been added, allowing you to run simultaneous transactions on multiple Database objects: Sequel.transaction([DB1, DB2]){...} # similar to: DB1.transaction{DB2.transaction{...}} You can combine this with the :rollback => :always option to easily use multiple databases in the same test suite and make sure that changes are rolled back on all of them. * Database#in_transaction? has been added so that users can detect whether the code is currently inside a transaction. * The generic JDBC transaction support, used by 6 of Sequel's jdbc subapters, now supports savepoints if the underlying JDBC driver supports savepoints. = Other New Features * A dataset_associations plugin has been added, allowing datasets to call association methods, which return datasets of rows in the associated table that are associated to rows in the current dataset. # Dataset of tracks from albums with name < 'M' # by artists with name > 'M' Artist.filter(:name > 'M').albums.filter(:name < 'M').tracks # SELECT * FROM tracks # WHERE (tracks.album_id IN ( # SELECT albums.id FROM albums # WHERE ((albums.artist_id IN ( # SELECT artists.id FROM artists # WHERE (name > 'M'))) # AND (name < 'M')))) * Database#extend_datasets has been added, allowing you to do the equivalent of extending all of the database's future datasets with a module. For performance, it creates an anonymous subclass of the current dataset class and includes a module in it, and uses the subclass to create future datasets. Using this feature allows you to override any dataset method and call super, similar to how Sequel::Model plugins work. The method takes either a module: Sequel.extension :columns_introspection DB.extend_datasets(Sequel::ColumnsIntrospection) or a block that it uses to create an anonymous module: DB.extend_datasets do # Always select from table.* instead of * def from(*tables) ds = super if !@opts[:select] || @opts[:select].empty? ds = ds.select_all(*tables) end ds end end * Database#<< and Dataset#<< now return self, which allow them to be used in chaining: DB << "UPDATE foo SET bar_id = NULL" << "DROP TABLE bars" DB[:foo] << {:bar_id=>0} << DB[:bars].select(:id) * A Database#timezone accessor has been added, allowing you to override Sequel.database_timezone on a per-Database basis, which allows you to use two separate Database objects in the same process that have different timezones. * You can now modify the type conversion procs on a per-Database basis when using the mysql, sqlite, and ibmdb adapters, by modifying the hash returned by Database#conversion_procs. * Model.dataset_module now accepts a Module instance as an argument, and extends the model's dataset with that module. * When using the postgres adapter with the pg driver, you can now use Database#listen to wait for notifications. All adapters that connect to postgres now support Database#notify to send notifications: # process 1 DB.listen('foo') do |ev, pid, payload| ev # => 'foo' notify_pid # => some Integer payload # => 'bar' end # process 2 DB.notify('foo', :payload=>'bar') * many_to_one associations now have a :qualify option that can be set to false to not qualify the primary key when loading the association. This shouldn't be necessary to use in most cases, but in some cases qualifying a primary key breaks certain queries (e.g. using JOIN USING on the same column on Oracle). * Database#schema can now take a dataset as an argument if it just selects from a single table. If a dataset is provided, the schema parsing will use that dataset's identifier_input_method and identifier_output_method for the parsing, instead of the database's default. This makes it possible for Model classes to correctly get the table schema if they use a dataset whose identifier_(input|output)_method differs from the database default. * On databases that support common table expressions (CTEs) but do not support CTE usage in subselects, Sequel now emulates support by moving CTEs from the subselect to the main select when using the Dataset from, from_self, with, with_recursive, union, intersect, and except methods. * The bitwise compliment operator is now emulated on H2. * You can now set the convert_tinyint_to_bool setting on a per-Database basis in the mysql and mysql2 adapters. * You can now set the convert_invalid_date_time setting on a per-Database basis in the mysql adapter. * Database instances now have a dataset_class accessor that allows you to set which class is used when creating datasets. This is mostly used to implement the extend_datasets support, but it could be useful for other purposes. * Dataset#unused_table_alias now accepts an optional 2nd argument, which should be an array of additional symbols that should be considered as already used. * Dataset#requires_placeholder_type_specifiers? has been added to check if the dataset requires you use type specifiers for bound variable placeholders. The prepared_statements plugin now checks this setting and works correctly on adapters that set it to true, such as oracle. * Dataset#recursive_cte_requires_column_aliases? has been added to check if you must provide a column list for a recursive CTE. The rcte_tree plugin now checks this setting an works correctly on databases that set it to true, such as Oracle and HSQLDB. = Performance Improvements * Numerous optimizations were made to loading model objects from the database, resulting in a 7-16% speedup. Model.call was added, and now .load is just an alias for .call. This allows you to make the model dataset's row_proc the model itself, instead of needing a separate block, which improves performance. While Model.load used to call .new (and therefore #initialize), Model.call uses .allocate/#set_values/#after_initialize for speed. This saves a method call or two, and skips setting the @new instance variable. * Dataset#map, #to_hash, #select_map, #select_order_map, and #select_hash are now faster if any of the provided arguments are an array of symbols. * The Model.[] optimization is now applied in more cases. = Other Improvements * Sequel now creates accessor methods for all columns in a model's table, even if the dataset doesn't select the columns. This has been the specified behavior for a while, but the spec was broken. This allows you do to: Model.dataset = DB[:table].select(:column1, :column2) Model.select_more(:column3).first.column3 * Model.def_dataset_method now correctly handles method names that can't be used directly (such as method names with spaces). This isn't so the method can be used with arbitrary user input, but it will allow safe creation of dataset methods that are derived from column names, which could contain spaces. * Model.def_dataset_method no longer overrides private model methods. * The optimization that Model.[] uses now works correctly if the model's dataset uses a different identifier_input_method than the database. * Sharding is supported correctly by default for the transactions used by model objects. Previously, you had to use the sharding plugin to make sure the same shard was used for transactions as for the insert/update/delete statements. * Sequel now fully supports using an aliased table for the :join_table option of a many_to_many association. The only real use case for an aliased :join_table option is when the join table is the same as the associated model table. * A bug when eagerly loading a many_through_many association with composite keys where one of the join tables requires an alias has been fixed. * Sequel's transaction internals have had substantial improvments. You can now open up simultaneous transactions on two separate shards of the same Database object in the same thread. The new design allows for future support of connection pools that aren't based on threads. Sequel no longer abuses thread-local variables to store savepoint state. * Dataset#select_map and #select_order_map now return an array of single element arrays if given an array with a single entry as an argument. Previously, they returned an array of values, which wasn't consistent. * Sequel's emulation of bitwise operators with more than 2 arguments now works on all adapters that use the emulation. The emulation was broken in 3.28.0 when more than 2 arguments were used on H2, DB2, Microsoft SQL Server, PostgreSQL, and SQLite. * Dataset#columns now correctly handles the emulated offset support used on DB2, Oracle, and Microsoft SQL Server when using the jdbc, odbc, ado, and dbi adapters. Previously, Dataet#columns could contain the row number column, which wasn't in the hashes yielded by Dataset#each. * Sequel can now parse primary key information on Microsoft SQL Server. Previously, the only adapter that supported this was the jdbc adapter, which uses the generic JDBC support. The shared mssql adapter now supports parsing the information directly from the database system tables. This means that if you are using Model objects with a Microsoft SQL Server database using the tinytds, odbc, or ado adapters, the model primary key information will be set automatically. * Sequel's prepared statement support no longer defines singleton methods on the prepared statement objects. * StringMethods#like is now case sensitive on SQLite and Microsoft SQL Server, making it more similar to other databases. * Sequel now works around an SQLite column naming bug if you select columns qualified with the alias of a subselect without providing an alias for the column itself. * Sequel now handles more bound variable types when using bound variables outside of prepared statements on SQLite. * Sequel now works around a bug in certain versions of the JDBC/SQLite driver when emulating alter table support for operations such as drop_column. * Sequel now emulates the add_constraint and drop_constraint alter table operations on SQLite, though the emulation has issues. * Sequel now correctly handles composite primary keys when emulating alter_table operations on SQLite. * Sequel now applies the correct PRAGMA statements by default when connecting to SQLite via the amalgalite and swift adapters. * Sequel now supports using savepoints inside prepared transactions on MySQL. * Sequel now closes JDBC ResultSet objects as soon as it is done using them, leading to potentially lower memory usage in the JDBC adapter, and fixes issues if you try to drop a table before GC has collected a related ResultSet. * Sequel can now correctly insert all default values into a table on DB2. Before, this didn't work correctly if the table had more than one column. * Another type of disconnection error is now recognized in the mysql2 adapter. * Sequel now uses better error messages if you attempt to execute a prepared statement without a name using the postgres, mysql, and mysql2 adapters. * Some small fixes have been made that allow Sequel to run better when $SAFE=1. However, Sequel is not officially supported using $SAFE > 0, so there could be many issues remaining. * Sequel's core and model specs were cleaned up by using the mock adapter to eliminate a lot of redundant code. * Sequel's integration tests were sped up considerably, halving the execution time on some adapters. = Backwards Compatibility * Because Model.load is now an alias for .call, plugins should no longer override load. Instead, they should override .call. * Loading model objects from the database no longer calls Model#initialize. Instead, it calls Model.allocate, Model#set_values, and Model#after_initialize. So if you were overriding #initialize and expecting the changes to affect model objects loaded from the database, you need to change your code. Additionally, @new is no longer set to false for objects retieved from the database, since setting it to false hurts performance. Model#new? still returns true or false, so this only affects you if you are checking the instance variables directly. * Dataset#<< no longer returns the autogenerated primary key for the inserted row. As mentioned above, it now returns self to allow for chaining. If you were previously relying on the return value, switch from #<< to #insert. * Dataset#map no longer calls the row_proc if given an argument, and Dataset#to_hash no longer calls the row_proc if given two arguments. This should only affect your code if you were using a row_proc that modified the content of the hash (e.g. Model#after_initialize). If you were relying on the old behavior, switch: dataset.map(:foo) # to dataset.map{|r| r[:foo]} dataset.to_hash(:foo, :bar) # to h = {} dataset.each{|r| h[r[:foo]] = r[:bar]} h * Model classes now need to have a dataset before you can define associations on them. * Model classes now pass their dataset to Database#schema, instead of their table name. * The :eager_block association option (which defaults to the association's block argument) is now called before the :eager_graph association option has been applied, instead of after. * The many_to_many association reflection :qualified_right_key entry is now a method named qualified_right_key. Switch any code using association_reflection[:qualified_right_key] to use association_reflection.qualified_right_key. * If you are using like on SQLite and Microsoft SQL Server and want it to be case insensitive, switch to using ilike: # Case sensitive DB[:foos].where(:name.like('bar%')) # Case insensitive DB[:foos].where(:name.ilike('bar%')) Sequel now sets the case_sensitive_like PRAGMA to true by default on SQLite. To set it to false instead, pass the :case_sensitive_like=>false option to the database when creating it. * Sequel's alter table emulation on SQLite now renames the current table then populates the replacement table, instead of populating the replacement table at a temporary name, dropping the current table, and then renaming the replacement table. * The strings 'n' and 'no' (case insensitive) when typecasted to boolean are now considered false values instead of true. * The transaction internals had extensive changes, if you have any code that depended on the transaction internals, it will probably require changes. * Using the Sequel::MySQL module settings for convert_tinyint_to_bool and convert_invalid_date_time now only affects future Database objects. You should switch to using the per-Database methods if you are currently using the Sequel::MySQL module methods. * The customized transaction support in the do (DataObjects) adapter was removed. All three subadapters (postgres, mysql, sqlite) of the do adapter implement their own transaction support, so this should have no effect unless you were using the do adapter with a different database type. * The oracle support changed dramatically, so if you were relying on the internals of the oracle support, you should take extra care when upgrading. = Advance Notice * The next release of Sequel will contain significant changes to how a dataset is literalized into an SQL string. If you have a custom plugin, extension, or adapter that overrides a method containing "literal", "sql", or "quote", or you make other modifications or extensions to how Sequel currently literalizes datasets to SQL, your code will likely need to be modified to support the next release. sequel-5.63.0/doc/release_notes/3.3.0.txt000066400000000000000000000167501434214120600177100ustar00rootroot00000000000000New Features ------------ * An association_proxies plugin has been added. This is not a full-blown proxy implementation, but it allows you to write code such as: artist.albums.filter{num_tracks > 10} Without the plugin, you have to call filter specifically on the association's dataset: artist.albums_dataset.filter{num_tracks > 10} The plugin works by proxying array methods to the array of associated objects, and all other methods to the association's dataset. This results in the following behavior: # Will load the associated objects (unless they are already # cached), and return the length of the array artist.albums.length # Will issue an SQL query with COUNT (even if the association # is already cached), and return the result artist.albums.count * The add_*/remove_*/remove_all_* association methods now take additional arguments that are passed down to the _add_*/_remove_*/_remove_all_* methods. One of the things this allows you to do is update additional columns in join tables for many_to_many associations: class Album many_to_many :artists def _add_artist(artist, values={}) DB[:albums_artists]. insert(values.merge(:album_id=>id, :artist_id=>artist.id)) end end album = Album[1] artist1 = Artist[2] artist2 = Artist[3] album.add_artist(artist1, :relationship=>'composer') album.add_artist(artist2, :relationship=>'arranger') * The JDBC adapter now accepts a :convert_types option to turn off Java type conversion. The option is true by default for backwards compatibility and correctness, but can be set to false to double performance. The option can be set at the database and dataset levels: DB = Sequel.jdbc('jdbc:postgresql://host/database', :convert_types=>false) DB.convert_types = true ds = DB[:table] ds.convert_types = false * Dataset#from_self now takes an option hash and respects an :alias option, giving the table alias to use. * Dataset#unlimited was added, similar to unfiltered and unordered. * SQL::WindowFunction is now a subclass of SQL::GenericExpression, so you can alias it and treat it like any other SQL::Function. Other Improvements ------------------ * Microsoft SQL Server support is much, much better in Sequel 3.3.0 than in previous versions. Support is pretty good with the ODBC, ADO, and JDBC adapters, close to the level of support for PostreSQL, MySQL, SQLite, and H2. Improvements are too numerous to list, but here are some highlights: * Dataset#insert now returns the primary key (identity field), so it can be used easier with models. * Transactions can now use savepoints (except on ADO). * Offsets are supported when using SQL Server 2005 or 2008, using a ROW_NUMBER window function. However, you must specify an order for your dataset (which you probably are already doing if you are using offsets). * Schema parsing has been implemented, though it doesn't support primary key parsing (except on JDBC, since the JDBC support is used there). * The SQL syntax Sequel uses is now much more compatible, and most schema modification methods and database types now work correctly. * The ADO and ODBC adapters both work much better now. The ADO adapter no longer attempts to use transactions, since I've found that ADO does not give a stable native connection (and hence transactions weren't possible). I strongly recommend against using the ADO adapter in production. * The H2 JDBC subadapter now supports rename_column, set_column_null, set_column_type, and add_foreign_key. * Altering a columns type, null status, or default is now supported on SQLite. You can also add primary keys and unique columns. * Both the ADO and ODBC adapters now catch the native exception classes and raise Sequel::DatabaseErrors. * Model classes now default to associating to other classes in the same scope. This makes it easier to use namespaced models. * The schema parser and schema dumper now support the following types: nchar, nvarchar, ntext, smalldatetime, smallmoney, binary, and varbinary. * You can now specify the null status for a column using :allow_null in addition to :null. This is to make it easier to use the table creation methods with the results of the schema parser. * Renaming a NOT NULL column without a default now works on MySQL. * Model class initialization now raises an exception if there is a problem connecting to the database. * Connection pool performance has been increased slightly. * The literal_time method in the ODBC adapter has been fixed. * An unlikely but potential bug in the MySQL adapter has been fixed. Backwards Compatibility ----------------------- * The convert_tinyint_to_bool setting moved from the main Sequel module to the Sequel::MySQL module. The native MySQL adapter is the only adapter that converted tinyint columns to booleans when the rows are returned, so you can only use the setting with the native MySQL adapter. Additionally, the setting's behavior has changed. When parsing the schema, now only tinyint(1) columns are now considered as boolean, instead of all tinyint columns. This allows you to use tinyint(4) columns for storing small integers and tinyint(1) columns as booleans, and not have the schema parsing support consider the tinyint(4) columns as booleans. Unfortunately, due to limitations in the native MySQL driver, all tinyint column values are converted to booleans upon retrieval, not just tinyint(1) column values. Unfortunately, the previous Sequel behavior was to use the default tinyint size (tinyint(4)) when creating boolean columns (using the TrueClass or FalseClass generic types). If you were using the generic type support to create the columns, you should modify your database to change the column type from tinyint(4) to tinyint(1). If you use MySQL with tinyint columns, these changes have the potential to break applications. Care should be taken when upgrading if these changes apply to you. * Model classes now default to associating to other classes in the same scope. It's highly unlikely anyone was relying on the previous behavior, but if you have a model inside a module that you are associating to a model outside of a module, you now need to specify the associated class using the :class option. * Model#save no longer includes the primary key fields in the SET clause of the UPDATE query, only in the WHERE clause. I'm not sure if this affects backwards compatibility of production code, but it can break tests that expect specific SQL. * Behavior to handle empty identifiers has now been standardized. If any database adapter returns an empty identifier, Sequel will use 'untitled' as the identifier. This can break backwards compatibility if the adapter previously used another default and you were relying on that default. This was necessary to fix any possible "interning empty string" exceptions. * On MSSQL, Sequel now uses the datetime type instead of the timestamp type for generic DateTimes. It now uses bit for the TrueClass and FalseClass generic types, and image for the File generic type. * Sequel now unescapes URL parts: Sequel.connect(ado:///db?host=server%5cinstance) However, this can break backward compatibility if you previously expected it not to be unescaped. * The columns_for private SQLite Database method has been removed. sequel-5.63.0/doc/release_notes/3.30.0.txt000066400000000000000000000121061434214120600177570ustar00rootroot00000000000000= Dataset Literalization Refactoring * As warned about in the 3.29.0 release notes, dataset literalization has been completely refactored. It now uses an append-only design which is faster in all cases, about twice as fast for large objects and deeply nested structures, and over two orders of magnitude faster in some pathological cases. This change should not affect applications, but may affect custom extensions or adapters that dealt with literalization of objects. Most literalization methods now have a method with an _append suffix that does the actual literalization, which takes the sql string to append to as the first argument. If you were overriding a literalization method, you now probably need to override the _append version instead. If you have this literalization method: def foo_sql(bar) "BAR #{literal(bar.baz)}" end You need to change the code to: def foo_sql_append(sql, bar) sql << "BAR " literal_append(sql, bar.baz) end def foo_sql(bar) sql = "" foo_sql_append(sql, bar) sql end If you have questions about modifying your custom adapter or extension, please ask on the Google Group or the IRC channel. = New Features * Model#set_server has been added to the base support (it was previously only in the sharding plugin), which allows you to set the shard on which to save/delete the model instance: foo1.set_server(:server_a).save foo2.set_server(:server_a).destroy * Model#save now accepts a :server option that uses set_server to set the shard to use. Unlike most other #save options, this option persists past the end of the save. Previously, the :server option only affected the transaction code, it now affects the INSERT/UPDATE statement as well. * When initiating a new dataset graph, any existing selected columns is assumed to be the columns to select for the graph from the current/master table. Before, there was not a way to specify the columns to select from the current/master table. * A :graph_alias_base association option has been added, which is used to set the base alias name to use when eager graphing. This is mostly useful when cascading eager graphs to dependent associations, where multiple associations with the same name in different models are being graphed simultaneously. * You can now specify nanoseconds and a timezone offset when converting a hash or array to a timestamp. The nanoseconds and offset are the 7th and 8th entries in the array, and the :nanos and :offset entry in the hash. * The postgres adapter now respects a :connect_timeout option if you are using the pg driver. = Other Improvements * Type conversion of Java to Ruby types in the JDBC adapter has been made much faster, as conversion method lookup is now O(number of columns) instead of O(number of columns*number of rows). * Sequel::SQL::Blob literalization is now much faster on adapters that use hex encoding, by switching to String#unpack('H*'). * Database#after_commit and after_rollback now respect the :server option to set the server/shard to use. * Symbol splitting (e.g. for table__column) is now slightly faster. * All adapters now pass the dataset :limit/:offset value through Dataset#literal instead of using it verbatim. Note that Dataset#limit already called to_i on input strings, so this isn't a security issue. However, the previous code broke if you provided a Sequel-specific object (e.g. Sequel::SQL::Function) as the :limit/:offset value. * Calling graph and eager_graph on an already graphed dataset no longer modifies the receiver. * Model#set_server now correctly handles the case where @this is already loaded. * Dataset#destroy for model datasets now uses the dataset's shard for transactions. * When emulating offset support using ROW_NUMBER (on Microsoft SQL Server, DB2, and Oracle), explicitly order by the ROW_NUMBER result, as otherwise the results are not guaranteed to be ordered. * Explicitly force a case insensitive collation when emulating ILIKE on Microsoft SQL Server. Previously, ILIKE could be case sensitive on Microsoft SQL Server if case sensitive collation was the database default. * Using on_duplicate_key_update with prepared statements on MySQL now works correctly. * The tinytds adapter now works correctly if the identifier_output_method is nil. * The plugin/extension specs were cleaned up using the mock adapter. = Backwards Compatibility * In addition to the previously mentioned dataset literalization changes, any custom adapters that overrode *_clause_methods methods need to be modified to add a method that adds the SELECT/UPDATE/INSERT/DELETE. Previously, this was done by default, but due to common table expressions and the dataset literalization changes, a separate method is now needed. * Dataset#on_duplicate_key_update_sql has been removed from the shared mysql adapter. * The :columns dataset option used when inserting is no longer literalized in advance. * Dataset#as_sql no longer takes an expression, it just takes the alias, and only adds the alias part. sequel-5.63.0/doc/release_notes/3.31.0.txt000066400000000000000000000137321434214120600177660ustar00rootroot00000000000000= New Features * The serialization plugin can now support custom serialization formats, by supplying a serializer/deserializer pair of callable objects. You can also register custom deserializers via Sequel::Plugins::Serialization.register_format, so that they can be referenced by name. Example: Sequel::Plugins::Serialization.register_format(:reverse, lambda{|v| v.reverse}, lambda{|v| v.reverse}) class User < Sequel::Model serialize_attributes :reverse, :password end * Dataset#import and #multi_insert now support a :return=>:primary_key option. When this option is used, the methods return an array of primary key values, one for each inserted row. Usage of this option on MySQL requires that a separate query be issued per row (instead of the single query for all rows that MySQL would usually use). * PostgreSQL can now use Dataset#returning in conjunction with import/multi_insert to set a custom column to return. * Microsoft SQL Server can now use Dataset#output in conjection with import/multi_insert to set a custom column to return. * Dataset#import and #multi_insert now respect a :server option to set the server/shard on which to execute the queries. Additionally, options given to this method are also passed to Dataset#transaction. * Dataset#insert_multiple now returns an array of inserted primary keys. * Model.def_column_alias has been added to make it easy to create alias methods for columns. This is useful if you have a legacy database and want to create friendly method names for the underlying columns. Note that this alias only affects the setter and getter methods. This does not affect the dataset level, so you still need to use the actual column names in dataset filters. * many_to_one associations can now have the same name as the related foreign key column, using the :key_column option. Use of this feature is not recommended, as it is much better to either rename the column or rename the association. Here's an example of usage: # Example schema: # albums artists # :id /--> :id # :artist --/ :name # :name class Album < Sequel::Model def_column_alias(:artist_id, :artist) many_to_one :artist, :key_column=>:artist end * The mock adapter can now mock out database types, by providing a shared adapter name as the host (e.g. mock://postgres). This emulation is not perfect, but in most cases it allows you to see what SQL Sequel would generate on a given database without needing to install the required database driver. * Sequel now supports creating full text indexes on Microsoft SQL Server. Before using it, you must have previously setup a default full text search catalog, and you need to provide a :key_index option with an index name symbol. * Dataset#group_rollup and #group_cube methods have been added for GROUP BY ROLLUP and GROUP BY CUBE support. These features are in a recent SQL standard, and they are supported to various degrees on Microsoft SQL Server, DB2, Oracle, MySQL, and Derby. * Dataset#full_text_search on Microsoft SQL Server now supports multiple search terms. * The jdbc adapter now supports a :login_timeout option, giving the timeout in seconds. = Other Improvements * Dataset#exists can now be used with prepared statement placeholders. * Dataset#full_text_search can now be used with prepared statement placeholders on PostgreSQL, MySQL, and Microsoft SQL Server. * If tables from two separate schema are detected when parsing the schema for a table on PostgreSQL, an error is now raised. Previously, no error was raised, which led to weird errors later, such as duplicate columns in a model's primary_key. * RETURNING is now supported with UPDATE/DELETE on PostgreSQL 8.2+. Previously, Sequel only supported it on 9.1+, but PostgreSQL introduced support for it in 8.2. * The shared postgres adapter now correctly handles the return value for Dataset#insert if you provide a separate column array and value array on PostgreSQL < 8.2. * Handle case in the PostgreSQL adapter where the server version cannot be determined via a query. * H2 clob types are now treated as string instead of as blob. Treating clob as blob breaks on H2, as it doesn't automatically hex-unescape the input for clobs as it does for blobs. * Dataset#empty? now works correctly when the dataset has an offset and offset support is being emulated. * The mock adapter no longer defaults to downcasing identifiers on output. = Backwards Compatibility * Dataset#exists now returns a PlaceholderLiteralString instead of a LiteralString, which could potentially break some code. If you would like a String returned, you can pass the returned object to Dataset#literal: dataset.literal(dataset.exists) * Dataset#from no longer handles :a__b__c___d as "a.b.c AS d". This was not the intended behavior, and nowhere else in Sequel is a symbol treated that way. Now, Dataset#from is consistent with the rest of Sequel, using "a.b__c AS d". This should only affect people in very rare cases, as most databases don't use three level qualified tables. One exception is Microsoft SQL Server, which can use three level qualified tables for cross-database access. * Previously, Dataset#insert_multiple returned an array of hashes, now it returns an array of primary key values. * Dataset#EXRACT_CLOSE in the shared sqlite adapter has been renamed to Dataset#EXTRACT_CLOSE. * Dataset::StoredProcedureMethods::SQL_QUERY_TYPE and Dataset::ArgumentMapper::SQL_QUERY_TYPE constants have been removed, as have related sql_query_type private methods. * The serialization plugin was significantly refactored. Model.serialization_map now contains a callable object instead of a Symbol, and Model.serialization_format has been removed. Model.define_serialized_attribute_accessors private method now takes two callable objects before the columns, instead of a single symbol. sequel-5.63.0/doc/release_notes/3.32.0.txt000066400000000000000000000173241434214120600177700ustar00rootroot00000000000000= New Features * Prepared statements now support :map and :to_hash prepared statement types. The main reason for this is that certain extensions (e.g. sequel_pg) optimize map/to_hash calls, and there previously was not a way to use prepared statements with the map/to_hash optimizations. * Sequel.empty_array_handle_nulls has been added to change how IN/NOT IN operations with an empty array are handled. See the Backwards Compatibility section for details. * 5 new association options have been added that allow you to define associations where the underlying columns clash with standard ruby method names: many_to_one :primary_key_method one_to_many :key_method one_to_many :primary_key_column many_to_many :left_primary_key_column many_to_many :right_primary_key_method Using these new options, you can now define associations that work correctly when the underlying primary/foreign key columns clash with existing ruby method names. See the RDoc for details. * A use_after_commit_rollback setting has been added to models. This defaults to true, but can be set to false for performance or to allow models to be used in prepared transactions (which don't support after_commit/after_rollback). * Dataset#update_ignore has been added when connecting to MySQL, enabling use of the UPDATE IGNORE syntax to skip updating a row if the update would cause a unique constraint to be violated. * Database#indexes is now supported when connecting to Microsoft SQL Server. * On Microsoft SQL Server, the :include option is now supported when creating indexes, for storing column values in the index, which can be used by the query optimizer. = Other Improvements * The filtering/excluding by associations code now uses qualified identifiers instead of unqualified identifiers, which allows it to avoid ambiguous column names if you are doing your own joins. * Virtual row blocks that return arrays are now handled correctly in Dataset#select_map/select_order_map. * Dataset#select_map/select_order_map can now take both a block argument as well as a regular argument. * Dataset#select_order_map now handles virtual row blocks that return ordered expressions. * Database#table_exists? should no longer generate false negatives if you only have permission to retrieve some column values but not all. Note that if you lack permission to SELECT from the table itself, table_exists? can still generate false negatives. * The active_model plugin now supports ActiveModel 3.2, by adding support for to_partial_path. * The serialization_modification_detection plugin now handles changed_columns correctly both for new objects and after saving objects. * The serialization plugin now clears the deserialized values when it does the automatic refresh after saving a new object, mostly for consistency. You can use the skip_create_refresh plugin to skip refreshing when creating a new model object. * Column default values are now wrapped in parentheses on SQLite, which fixes some cases such as when the default is an SQL function call. * Alter table emulation now works correctly on SQLite when foreign keys reference the table being altered. The emulation requires a renaming/deleting the existing table and creating a new table, which can break foreign key references. Sequel now disables the foreign key PRAGMA when altering tables, so SQLite won't track the table renames and break the foreign key relationships. * The set_column_type table alteration method no longer modifies default values and NULL/NOT NULL settings on Microsoft SQL Server, H2, and SQLite. * On MySQL, Time/DateTime columns now use the timestamp type if the default value is Sequel::CURRENT_TIMESTAMP, since it is currently impossible for MySQL to have a non-constant default for a datetime column (without using a workaround like a trigger). * Metadata methods such as tables, views, and view_exists? are now handled correctly on Oracle if custom identifier input methods are used. * Sequel now ignores errors that occur when attempting to get information on column defaults in Oracle (which can happen if you lack permission to the appropriate table). Previously, such errors would cause the schema parser to raise an error, now, the schema information is just returned without default information. * Database#indexes now skips the primary key index when connecting to DB2, Derby, HSQLDB, and Oracle via the jdbc adapter. * Database#indexes now works correctly on DB2. * The progress adapter has been fixed, it had been broken since the dataset literalization refactoring. * Dataset#naked! now works correctly. Previously, it just returned the receiver unmodified. * Dataset#paginate! has been removed, as it was broken. * The query extension no longer breaks Dataset#clone if an argument is not given. * Transaction related queries are no longer logged twice in the mock adapter. = Backwards Compatibility * Sequel's default handling of NOT IN operators with an empty array of values has changed, which can change which rows are returned for such queries. Previously, Sequel was inconsistent in that it tried to handle NULL values correctly in the IN case, but not in the NOT IN case. Now, it defaults to handling NULL values correctly in both cases: # 3.31.0 DB[:a].where(:b=>[]) # SELECT * FROM a WHERE (b != b) DB[:a].exclude(:b=>[]) # SELECT * FROM a WHERE (1 = 1) # 3.32.0 DB[:a].where(:b=>[]) # SELECT * FROM a WHERE (b != b) DB[:a].exclude(:b=>[]) # SELECT * FROM a WHERE (b = b) The important change in behavior is that in the NOT IN case, if the left hand argument is NULL, the filter returns NULL instead of true. This has the potential to change query results. "Correct" here is really an opinion and not a fact, as there are valid arguments for the alternative behavior: DB[:a].where(:b=>[]) # SELECT * FROM a WHERE (1 = 0) DB[:a].exclude(:b=>[]) # SELECT * FROM a WHERE (1 = 1) The difference is that the "correct" NULL behavior is more consistent with the non-empty array cases. For example, if b is NULL: # "Correct" NULL handling # Empty array: where(:b=>[]) WHERE (b != b) # NULL WHERE (b = b) # NULL # Non-empty array: where(:b=>[1, 2]) WHERE (b IN (1, 2)) # NULL WHERE (b NOT IN (1, 2)) # NULL # Static boolean handling # Empty array: where(:b=>[]) WHERE (1 = 0) # false WHERE (1 = 1) # true # Non-empty array: where(:b=>[1, 2]) WHERE (b IN (1, 2)) # NULL WHERE (b NOT IN (1, 2)) # NULL Sequel chooses to default to behavior consistent with the non-empty array cases (similar to SQLAlchemy). However, there are two downsides to this handling. The first is that some databases with poor optimizers (e.g. MySQL) might do a full table scan with the default syntax. The second is that the static boolean handling may be generally perferable, if you believe that IN/NOT IN with an empty array should always be true or false and never NULL even if the left hand argument is NULL. As there really isn't a truly correct answer in this case, Sequel defaults to the "correct" NULL handling, and allows you to switch to the static boolean handling via: Sequel.empty_array_handle_nulls = false This is currently a global setting, it may be made Database or Dataset specific later if requested. Also, it is possible the default will switch in the future, so if you care about a specific handling, you should set your own default. * Database#table_exists? now only rescues Sequel::DatabaseErrors instead of StandardErrors, so it's possible it will raise errors instead of returning false on custom adapters that don't wrap their errors correctly. sequel-5.63.0/doc/release_notes/3.33.0.txt000066400000000000000000000141531434214120600177660ustar00rootroot00000000000000= New Features * A server_block extension has been added that makes Sequel's sharding support easier to use by scoping database access inside the block to a given server/shard: Sequel.extension :server_block DB.extend Sequel::ServerBlock DB.with_server(:shard_1) do # All of these will execute against shard_1 DB.tables DB[:table].all DB.run 'SOME SQL' end * An arbitrary_servers extension has been added that extends Sequel's sharding support so that you can use arbitrary connection options instead of referencing an existing, predefined server/shard: Sequel.extension :arbitrary_servers DB.pool.extend Sequel::ArbitraryServers DB[:table].server(:host=>'foo', :database=>'bar').all You can use this extension in conjunction with the server_block extension: DB.with_server(:host=>'foo', :database=>'bar') do DB.synchronize do # All of these will execute on host foo, database bar DB.tables DB[:table].all DB.run 'SOME SQL' end end The combination of these two extensions makes it pretty easy to write a thread-safe Rack middleware that scopes each request to an arbitrary database. * The sqlite adapter now supports an integer_booleans setting for using 1/0 for true/false values, instead of the the 't'/'f' values used by default. As SQLite recommends using integers to store booleans, converting your existing database and enabling this setting is recommended, but for backwards compatibility it is set to false. You can convert you existing database by doing the following for each table/column that has booleans: DB[:table].update(:boolean_column=>{'t'=>1}. case(0, :boolean_column)) The integer_booleans default setting may change in a future version of Sequel, so you should set it manually to false if you prefer the current default. * You can now disable transaction use in migrations, in one of two ways. You generally only need to do this if you are using an SQL query inside a migration that is specifically not supported inside a transaction, such as CREATE INDEX CONCURRENTLY on PostgreSQL. The first way to disable transactions is on a per-migration basis by calling the no_transaction method inside the Sequel.migration block: Sequel.migration do no_transaction change do # ... end end That will make it so that a transaction is not used for that particular migration. The second way is passing the :use_tranctions=>false option when calling Migrator.run (using the API), which will completely disable transactions for all migrations during the migrator run. * The postgres adapter now respects an :sslmode option when using pg as the underlying driver, you can set the value of this option to disable, allow, prefer, or require. * Database#create_schema and #drop_schema are now defined when connecting to PostgreSQL. * Database#supports_savepoints_in_prepared_transactions? has been added for checking if savepoints are supported inside prepared transactions. This is true if both savepoints and prepared transactions are both supported, except on MySQL > 5.5.12 (due to MySQL bug 64374). = Other Improvements * The mysql and mysql2 adapters now both provide an accurate number of rows matched, so Sequel::Model usage on those adapters will now raise a NoExistingObject exception by default if you attempt to delete or update an instance that no longer exists in the database. * Foreign key creation now works correctly without specifying the :key option when using MySQL with the InnoDB table engine. InnoDB requires that you list the column explicitly, even if you are referencing the primary key of the table, so if the :key option is not given, the database schema is introspected to find the primary key for the table. If you are attempting to create a table with a self-referential foreign key, it introspects the generator to get the primary key for the table. * The sqlite adapter will now return 1/0 stored in boolean columns as true/false. It will convert dates stored as Integers/Floats to Date objects by assuming they represent the julian date. It will convert times stored as Integers/Floats to Sequel::SQLTime objects by assuming they represent a number of seconds. It will convert datetimes stored as Integers by assuming they represent a unix epoch time integer, and datetimes stored as Floats by assuming the represent the julian date (with fractional part representing the time of day). These changes make Sequel handle SQLite's recommendations for boolean/date/time storage. * The instance_hooks plugin's (before|after)_*_hook methods now return self so they can be used in a method chain. * The list plugin now automatically adds new entries to the end of the list when creating the entries, if the position field is not specifically set. * An identifier_output_method is now respected in the mysql2 adapter. * NaN/Infinity Float values are now quoted correctly for input on PostgreSQL, and the postgres adapter correctly handles them on retrieval from the database. * The :collate column option is now respected when creating tables or altering columns on MySQL. * You can now force use of the TimestampMigrator when the IntegerMigrator would be used by default by calling TimestampMigrator.apply or .run. * Mock adapter usage with a specific SQL dialect now uses the appropriate defaults for quoting identifiers. * You can now disable the use of sudo in the rake install/uninstall tasks using the SUDO='' environment variable. * A very misleading error message has been fixed when attempting to constantize an invalid string in the model inflector. = Backwards Compatibility * The sqlite adapter now typecasts columns that SQLite stores as INTEGER/REAL. Previously, it only typecasted columns that SQLite stored as TEXT/BLOB. For details about SQLite storage, see http://www.sqlite.org/datatype3.html. Any custom type conversion procs used with the sqlite adapter should be modified to work with Integer/Float objects in addition to String objects. sequel-5.63.0/doc/release_notes/3.34.0.txt000066400000000000000000000650731434214120600177760ustar00rootroot00000000000000= New PostgreSQL Extensions * A pg_array extension has been added, supporting PostgreSQL's numeric and string array types. Both single dimensional and multi-dimensional array types are supported. Array values are returned as instances of Sequel::Postgres::PGArray, which is a delegate class of Array. You can turn an existing array into a PGArray using Array#pg_array. If you are using arrays in model objects, you need to load support for that: DB.extend Sequel::Postgres::PGArray::DatabaseMethods This makes schema parsing and typecasting of array columns work correctly. This extension also allows you to use PGArray objects and arrays in bound variables when using the postgres adapter with pg. * A pg_hstore extension has been added, supporting PostgreSQL's hstore type, which is a simple hash with string keys and string or NULL values. hstore values are retrieved as instances of Sequel::Postgres::HStore, which is a delegate class of Hash. You can turn an existing hash into an hstore using Hash#hstore. If you are using hstores in model objects, you need to load support for that: DB.extend Sequel::Postgres::HStore::DatabaseMethods This makes schema parsing and typecasting of hstore columns work correctly. This extension also allows you to use HStore objects and hashes in bound variables when using the postgres adapter with pg. * A pg_array_ops extension has been added, making it easier to call PostgreSQL array operators and functions using plain ruby code. Examples: a = :array_column.pg_array a[1] # array_column[1] a[1][2] # array_column[1][2] a.push(1) # array_column || 1 a.unshift(1) # 1 || array_column a.any # ANY(array_column) a.join # array_to_string(array_column, '', NULL) If you are also using the pg_array extension, you can turn a PGArray object into a query object, which allows you to run operations on array literals: a = [1, 2].pg_array.op a.push(3) # ARRAY[1,2] || 3 * A pg_hstore_ops extension has been added, making it easier to call PostgreSQL hstore operators and functions using plain ruby code. Examples: h = :hstore_column.hstore h['a'] # hstore_column -> 'a' h.has_key?('a') # hstore_column ? 'a' h.keys # akeys(hstore_column) h.to_array # hstore_to_array(hstore_column) If you are also using the pg_hstore extension, you can turn an HStore object into a query object, which allows you to run operations on hstore literals: h = {'a' => 'b'}.hstore.op h[a] # '"a"=>"b"'::hstore -> 'a' * A pg_auto_parameterize extension has been added for automatically using bound variables for all queries. For example, it can take code such as: DB[:table].where(:column=>1) and do: SELECT * FROM table WHERE column = $1; -- [1] Note that automatically parameterizing queries is not generally faster unless the bound variables are large (i.e. long text/bytea values). Also, there are multiple corner cases when automatically parameterizing queries, though most can be worked around by adding explicit casts. * A pg_statement_cache extension has been added that works with the pg_auto_parameterize extension for automatically caching prepared statements and reusing them when using the postgres adapter with pg. The combination of these two extensions makes it possible to take an entire Sequel application and turn most or all of the queries into prepared statements. Note that these two extensions do not necessarily improve performance. For simple queries, they actually hurt performance. They do help for complex queries, but in all cases, it's faster to use Sequel's prepared statements API manually. = Other New Extensions * A query_literals extension has been added that makes the select, group, and order methods operate similar to the filter methods in that if they are given a regular string as their first argument, they treat it as a literal string, with additional arguments, if any, used as placeholder values. This extension allows you to write code such as: DB[:table].select('a, b, ?' 2).group('a, b').order('c') # Without query_literals: # SELECT 'a, b, ?', 2 FROM table GROUP BY 'a, b' ORDER BY 'c' # With query_literals: # SELECT a, b, 2 FROM table GROUP BY a, b ORDER BY c Sequel's default handling in this case is to use literal strings, which is generally not desired and on some databases not even valid syntax. In general, you'll probably want to use this extension for all of a database's datasets, which you can do via: Sequel.extension :query_literals DB.extend_datasets(Sequel::QueryLiterals) The next major version of Sequel (4.0.0) will probably integrate this extension into the core library. * A select_remove extension has been added that adds Dataset#select_remove, for removing selected columns/expressions from a dataset: ds = DB[:table] # Assume table has columns a, b, and c ds.select_remove(:c) # SELECT a, b FROM table # Removal by column alias ds.select(:a, :b___c, :c___b).select_remove(:c) # SELECT a, c AS b FROM table # Removal by expression ds.select(:a, :b___c, :c___b).select_remove(:c___b) # SELECT a, b AS c FROM table This method makes it easier to select all columns except for the columns given. This is common in cases where a table has a few large columns that are expensive to retrieve. This method does have some corner cases, so read the documentation before using it. * A schema_caching extension has added that makes it possible for Database instances to dump the cached schema metadata to a marshalled file, and load the cached schema metadata from the file. This can be significantly faster than reparsing the schema from the database, especially for databases with high latency. bin/sequel -S has been added to dump the schema for the given database to a file, and DB.load_schema_cache(filename) can be used to populate the schema cache inside your application. This should be done after creating the Database object but before loading your model files. Note that Sequel does no checking to ensure that the cached schema currently reflects the state of the database. That is up to the application. * A null_dataset extension has been added, which adds Dataset#nullify for creating a dataset that will not issue a database query. It implements the null object pattern for datasets, and is probably most useful in methods that must return a dataset, but can determine that such a dataset will never return a row. = New Plugins * A static_cache plugin has been added, allowing you to cache a model statically. This plugin is useful for models whose tables do not change while the application is running, such as lookup tables. When using this plugin, the following methods will no longer require queries: * Primary key lookups (e.g. Model[1]) * Model.all calls * Model.each calls * Model.map calls without an argument * Model.to_hash calls without an argument The statically cached model instances are frozen so they are not accidently modified. * A many_to_one_pk_lookup plugin has been added that changes the many_to_one association retrieval code to do a simple primary key lookup on the associated class in most cases. This results in significantly better performance, especially if the associated model is using a caching plugin (either caching or static_cache). = Core Extension Replacements * Most of Sequel's core extensions now have equivalent methods defined on the Sequel module: :column.as(:alias) -> Sequel.as(:column, :alias) :column.asc -> Sequel.asc(:column) :column.desc -> Sequel.desc(:column) :column.cast(Integer) -> Sequel.cast(:column, Integer) :column.cast_numeric -> Sequel.cast_numeric(:column) :column.cast_string -> Sequel.cast_string(:column) :column.extract(:year) -> Sequel.extract(:year, :column) :column.identifier -> Sequel.identifier(:column) :column.ilike('A%') -> Sequel.ilike(:column, 'A%') :column.like('A%') -> Sequel.like(:column, 'A%') :column.qualify(:table) -> Sequel.qualify(:table, :column) :column.sql_subscript(1) -> Sequel.subscript(:column, 1) :function.sql_function(1) -> Sequel.function(:function, 1) 'some SQL'.lit -> Sequel.lit('some SQL') 'string'.to_sequel_blob -> Sequel.blob('string') {:a=>1}.case(0) -> Sequel.case({:a=>1}, 0) {:a=>1}.sql_negate -> Sequel.negate(:a=>1) {:a=>1}.sql_or -> Sequel.or(:a=>1) [[1, 2]].sql_value_list -> Sequel.value_list([[1, 2]]) [:a, :b].sql_string_join -> Sequel.join([:a, :b]) ~{:a=>1} -> Sequel.~(:a=>1) :a + 1 -> Sequel.+(:a, 1) :a - 1 -> Sequel.-(:a, 1) :a * 1 -> Sequel.*(:a, 1) :a / 1 -> Sequel./(:a, 1) :a & 1 -> Sequel.&(:a, 1) :a | 1 -> Sequel.|(:a, 1) * You can now wrap any object in a Sequel expression using Sequel.expr. This is similar to the sql_expr extension, but without defining the sql_expr method on all objects: 1.sql_expr -> Sequel.expr(1) The sql_expr extension now just has Object#sql_expr call Sequel.expr. * Virtual Rows now have methods defined that handle the standard mathematical operators: select{|o| o.+(1, :a)} # SELECT (1 + a) the standard inequality operators: where{|o| o.>(2, :a)} # WHERE (2 > a) and the standard boolean operators: where{|o| o.&({:a=>1}, o.~(:b=>1))} # WHERE ((a = 1) AND (b != 1)) Additionally, there is now direct support for creating literal strings in instance_evaled virtual row blocks using `: where{a > `some crazy SQL`} # WHERE (a > some crazy SQL) This doesn't override Kernel.`, since virtual rows use a BasicObject subclass. Previously, using ` would result in calling the SQL function named ` with the given string, which probably isn't valid syntax on most databases. * You can now require 'sequel/no_core_ext' to load Sequel without the core extensions. The previous way of setting the SEQUEL_NO_CORE_EXTENSIONS constant or environment variable before loading Sequel still works. * The core extensions have been moved from Sequel's core library into an extension that is loadable with Sequel.extension. This extension is still loaded by default for backwards compatibility. However, the next major version of Sequel will no longer load this extension by default (though it will still be available to load manually). * You can now check if the core extensions have been loaded by using Sequel.core_extensions?. = Foreign Keys in the Schema Dumper * Database#foreign_key_list has been added that gives an array of foreign key constraints on the table. It is currently implemented on MySQL, PostgreSQL, and SQLite, and may be implemented on other database types in the future. Each entry in the return array is a hash, with at least the following keys present: :columns :: An array of columns in the given table :table :: The table referenced by the columns :key :: An array of columns referenced (in the table specified by :table), but can be nil on certain adapters if the primary key is referenced. The hash may also contain entries for: :deferrable :: Whether the constraint is deferrable :name :: The name of the constraint :on_delete :: The action to take ON DELETE :on_update :: The action to take ON UPDATE * The schema_dumper extension now dumps foreign key constraints on databases that support Database#foreign_key_list. On such databases, dumping a schema migration will dump the tables in topological order, such that referenced tables always come before referencing tables. In case there is a circular dependency, Sequel breaks the dependency and adds separate foreign key constraints at the end of the migration. However, when a circular dependency is broken, the migration can probably not be migrated down. Foreign key constraints can also be dumped as a separate migration using Database#dump_foreign_key_migration, similar to how Database#dump_indexes_migration works. * When using bin/sequel -C to copy databases, foreign key constraints are now copied if the source database supports Database#foreign_key_list. = Other New Features * Dataset#to_hash_groups and #select_hash_groups have been added. These methods are similar to #to_hash and #select_hash in that they return a hash, but hashes returned by *_hash_groups methods have arrays of all matching values, unlike the *_hash methods which just use the last matching value. Example: DB[:table].all # => [{:a=>1, :b=>2}, {:a=>1, :b=>3}, {:a=>2, :b=>4}] DB[:table].to_hash(:a, :b) # => {1=>3, 2=>4} DB[:table].to_hash_groups(:a, :b) # => {1=>[2, 3], 2=>[4]} * Model#set_fields and #update_fields now accept :missing=>:skip and :missing=>:raise options, allowing them to be used in more cases. :missing=>:skip skips missing entries in the hash, instead of setting the field to the default hash value. :missing=>:raise raises an error for missing fields, similar to strict_param_setting = true. It's recommended that these options be used in new code in preference to #set_only and #update_only. * Database#drop_table? has been added, for dropping tables if they already exist. This uses DROP TABLE IF EXISTS on the databases that support it. Database#supports_drop_table_if_exists? has been added for checking whether the database supports that syntax. * Database#create_join_table has been added that allows easy creation of many_to_many join tables: DB.create_join_table(:album_id=>:albums, :artist_id=>:artists) This uses real foreign keys for both of the columns, uses a composite primary key of both of the columns, and adds an additional composite index of the columns in reverse order. The primary key and additional index should ensure that almost all operations on the join table can benefit from an index. In terms of customization, the values in the hash can be hashes themselves for column specific options, and an additional options hash can also be given to override some of the default settings. Database#drop_join_table also exists and takes the same options as create_join_table. It mostly exists to make it easy to reverse migrations that use create_join_table. * Model#freeze has been added that freezes a model such that it works correctly in a read-only state. Before, it used the standard Object#freeze, which broke some things that should work, and allowed changes that shouldn't be allowed (like modifying the instance's values). * ConnectionPool#all_connections has been added, which yields each available connection in the pool to the block. For threaded pools, it does not yield connections that are currently being used by other threads. When using this method, it is important to only operate on the yielded connection objects, and not make any modifications to the pool itself. The pool is also locked until the method returns. * ConnectionPool#after_connect= has been added, allowing you to change a connection pool's after_connect proc after instantiating the pool. * ConnectionPool#disconnection_proc= has been added, allowing you to change a connection pool's disconnection_proc after instantiating the pool. * A Model.cache_anonymous_models accessor has been added, and can be set to false to disable the caching of classes created by Sequel::Model(). This caching is only useful if you want to reload the model's file without getting a superclass mismatch. This setting is true by default for backwards compatibility, but may be changed to false in a later version, so you should manually set it to true if you are using code reloading. * Model.instance_dataset has been added for getting the dataset used for model instances (a naked dataset restricted to a single row). * Dataset#with_sql_delete has been added for running the given SQL string as a delete and returning the number of rows modified. It's designed as a replacement for with_sql(sql).delete, which is slower as it requires cloning the dataset. * The :on_update and :on_delete entries for foreign_key now accept string arguments which are used literally. * Prepared statement objects now have a log_sql accessor that can be turned on to log the entire SQL statement instead of just the prepared statement name. * Dataset#multi_replace has been added on MySQL. This is similar to multi_insert, but uses REPLACE instead of INSERT. * Dataset#explain has been added to MySQL. You can use an :extended=>true option to use EXPLAIN EXTENDED. * A Database#type_supported? method has been added on PostgreSQL to check if the database supports the given type: DB.type_supported?(:hstore) * Datatabase#reset_conversion_procs has been added to the postgres adapter, for use by extensions that modify the default conversion procs and want to have the database use the updated defaults. * A Database#convert_infinite_timestamps accessor has been added to the postgres adapter, allowing you to return infinite timestamps as nil, a string, or a float. * SQL::PlaceholderLiteralString objects can now use a placeholder array, where placeholder values are inserted between array elements. This is about 2.5-3x faster than using a string with ? placeholders, and allows usage of ? inside the array: Sequel.lit(["(", " ? ", ")"], 1, 2) # (1 ? 2) * SQL::Subscript#[] has been added for accessing members of a multi-dimensional array: Sequel.subscript(:column, 1)[2][3] # column[1][2][3] * SQL::Wrapper has been added for wrapping arbitrary objects in a Sequel expression object. * SQL::QualifiedIdentifier objects can now contain arbitrary Sequel expressions. Before, they could only contain a few expression types. This makes it easier to add extensions to support PostgreSQL row-valued types. = Performance Improvements * Model.[] when called with a primary key has been made about 110% faster for most models by avoiding cloning datasets. * Model.[] when called without arguments or with a single nil argument is much faster as it now returns nil immediately instead of issuing a database query. * Model#delete and Model#destroy have been made about 75% faster for most models by using a static SQL string. * Model.new is now twice as fast when passed an empty hash. * Model#set is now four times as fast when passed an empty hash. * Model#this has been made about 85% faster by reducing the number of dataset clones needed from 3 to 1. * Some proc activations have been removed, giving minor speedups when running on MRI. = Other Improvements * Database#uri and #url now return the connection string given to Sequel.connect. Previously, they tried to reconstruct the url using the database's options, but that didn't work well in corner cases. * Database#inspect now shows the URL and/or options given when connecting to the database. Previously, it showed the URL, or all of the databases options if constructing the URL raised an error. * Sequel no longer checks for prepared transactions support when using transactions unless a prepared transaction is specifically requested. * The schema utility dataset cached in the Database object is now reset if you use Database#extend_datasets, ensuring that the new value will use the given extension. * The prepared_statements* plugins now log the full SQL by default. Since the user doesn't choose the name of the prepared statements, it was often difficult to determine what SQL was actually run if you were only looking at a subsection of the SQL log. * The nested_attributes plugin's delete/remove support now works correctly when a false value is given for _delete/_remove and strict_param_setting is true. * The hook_class_methods and validation_class_methods plugins now work correctly when subclassing if the subclass attempts to create instances inside Model.inherited. * The caching plugin has been refactored. Model.cache_get_pk and cache_delete_pk have been added for retrieving/deleting from the cache by primary key. Model.cache_key is now a public method. * The typecast_on_load plugin now works correctly when saving new model objects when insert_select is supported. * In the sql_expr extension, nil.sql_expr is no longer treated as a boolean value. It is now treated as a value with generic type. * The postgres adapter no longer issues a query to map type names to type oids if no named conversion procs have been registered. * The postgres adapter now works around issues in ruby-pg by supporting fractional seconds for Time/DateTime values, and supporting SQL::Blob (bytea) values with embedded "\0" characters. * The postgres adapter now supports pre-defining the PG_NAMED_TYPES and PG_TYPES constants. This is so extensions can define them, so they don't have to load the postgres adapter file first. If extensions need to use these constants, they should do: PG_NAMED_TYPES = {} unless defined?(PG_NAMED_TYPES) PG_TYPES = {} unless defined?(PG_TYPES) That way they work whether they are loaded before or after the postgres adapter. * PostgreSQL 8.2-9.0 now correctly add the RETURNING clause when building queries. Sequel 3.31.0 added support for returning values from delete/update queries in PostgreSQL 8.2-9.0, but didn't change the literalization code to use the RETURNING clause on those versions. * The jdbc/postgres adapter now converts Java arrays (Java::OrgPostgresqlJdbc4::Jdbc4Array) to ruby arrays. * Tables and schemas with embedded ' characters are now handled correctly when parsing primary keys and sequences on PostgreSQL. * Identifiers are now escaped on MySQL and SQLite. Previously they were quoted, but internal ` characters were not doubled. * Fractional seconds for the time type are now returned correctly on jdbc (assuming they are returned as java.sql.Time values by JDBC). * Multiple changes were made to ensure that Sequel works correctly when the core extensions are not loaded. * Composite foreign key constraints are now retained when emulating alter_table operations on SQLite. Previously, only single foreign key constraints were retained. * An error is no longer raised when no indexes exist when calling Database#indexes on jdbc/sqlite. * A possible SystemStackError has been fixed in the SQLite adapter, when trying to delete a dataset that uses a having clause and no where clause. * ROLLUP/CUBE support now works correctly on Microsoft SQL Server 2005. * Unsigned tinyint types are now recognized in the schema dumper. * Using primary_key :column, :type=>Bignum now works correctly on H2. Previously, the column created was not autoincrementing. * Using a bound variable for a limit is now supported in the ibmdb adapter on ruby 1.9. * Connecting to PostgreSQL via the swift adapter has been fixed when using newer versions of swift. * The mock adapter now handles calling the Database#execute methods directly (instead of via a dataset). * The mock adapter now has the ability to have per-shared adapter specific initialization code executed. This has been used to fix some bugs when using the shared postgres adapter. * The pretty_table extension has been split into two extensions, one that adds a method to Dataset and one that just adds the PrettyTable class. Also, PrettyTable.string has been added to get a string copy of the table. * A spec_model_no_assoc task has been added for running model specs without the association plugin loaded. This is to check that the SEQUEL_NO_ASSOCIATIONS setting works correctly. = Deprecated Features to be Removed in Sequel 3.35.0 * Ruby <1.8.7 support is now deprecated. * PostgreSQL <8.2 support is now deprecated. * Dataset#disable_insert_returning on PostgreSQL is now deprecated. Starting in 3.35.0, RETURNING will now always be used to get the primary key value when inserting. * Array#all_two_pairs? is now deprecated. It was part of the core extensions, but the core extensions have been refactored to no longer require it. As it doesn't specifically relate to creating Sequel expression objects, it is being removed. The private Array#sql_expr_if_all_two_pairs method is deprecated as well. = Other Backwards Compatibility Issues * The generic Bignum type now uses bigint on SQLite, similar to other databases. The integer type was previously used. The only exception is for auto incrementing primary keys, which still use integer for Bignum as SQLite doesn't support autoincrementing columns other than integer. * On SQLite, Dataset#explain now returns a string, similar to PostgreSQL (and now MySQL). * When using the JDBC adapter, Java::OrgPostgresqlUtil::PGobject objects are converted to ruby strings if the dataset is set to convert types (the default setting). This is to support the hstore extension, but it could have unforeseen effects if custom types were used. * For PostgreSQL connection objects, #primary_key and #sequence now require their arguments are provided as already literalized strings. Note that these methods are being removed in the next version because they will not be needed after PostgreSQL <8.2 support is dropped. * Database#uri and #url now return a string or nil, but never raise an exception. Previously, they would either return a string or raise an exception. * The Model @simple_pk and @simple_table instance variables should no longer be modified directly. Instead, the setter methods should be used. * Model.primary_key_lookup should no longer be called with a nil value. * Logging of prepared statements on some adapters has been changed slightly, so log parsers might need to be updated. * Dataset#identifier_append and #table_ref_append no longer treat literal strings and blobs specially. Previously, they were treated as identifiers. * Dataset#qualified_identifier_sql_append now takes 3 arguments, so any extensions that override it should be modified accordingly. * Some internally used constants and private methods have been deleted: Database::CASCADE Database::NO_ACTION Database::SET_DEFAULTS Database::SET_NULL Database::RESTRICT Dataset::COLUMN_ALL or moved: MySQL::Dataset::AFFECTED_ROWS_RE -> MySQL::Database MySQL::Dataset#affected_rows -> MySQL::Database * The sql_expr extension no longer creates the Sequel::SQL::GenericComplexExpression class. sequel-5.63.0/doc/release_notes/3.35.0.txt000066400000000000000000000132461434214120600177720ustar00rootroot00000000000000= New Features * A dirty plugin has been added, which saves the initial value of the column when the column is changed, similar to ActiveModel::Dirty: artist.name # => 'Foo' artist.name = 'Bar' artist.initial_value(:name) # 'Foo' artist.column_change(:name) # ['Foo', 'Bar'] artist.column_changes # {:name => ['Foo', 'Bar']} artist.column_changed?(:name) # true artist.reset_column(:name) artist.name # => 'Foo' artist.column_changed?(:name) # false artist.update(:name=>'Bar') artist.column_changes # => {} artist.previous_changes # => {:name=>['Foo', 'Bar']} * Database#create_table now respects an :as option to create a database based on the results of a query. The :as option value should either be an SQL string or a dataset. DB.create_table(:new_foos, :as=>DB[:foos].where(:new=>true)) * The json_serializer and xml_serializer plugins can now serialize arbitrary arrays of model objects by passing an :array option to the to_json class method. This works around an issue in ruby's JSON library where Array#to_json does not pass arguments given to it to the members of the array. Artist.to_json(:array=>[Artist[1]], :include=>:albums) * You can now use the % (modulus) operator in the same way you can use the bitwise operators in Sequel: :column.sql_number % 1 # (column % 1) * On PostgreSQL, you can now provide :only, :cascade, and :restart options to Dataset#truncate to use ONLY, CASCADE, and RESTART IDENTITY. Additionally, you can now truncate multiple tables at the same time: DB.from(:table1, :table2).truncate(:cascade=>true) * The :index option when creating columns in the schema generator can now take a hash of index options: DB.create_table(:foo){Integer :bar, :index=>{:unique=>true}} * A Database#cache_schema accessor has been added, it can be set to false to have the Database never cache schema results. This can be useful in Rails development mode, so that you don't need to restart a running server to have models pick up the new schema. * Database#log_exception has been added for easier instrumentation. It is called with the exception and SQL query string for all queries that raise an exception. * The Sequel.migration DSL now has a transaction method that forces transaction use for the given migration. = Other Improvements * Many theoretical thread-safety issues have been fixed for ruby implementations that don't use a global interpreter lock. Previously, Sequel relied on MRI's global interpreter lock for part of its thread safety, now it does manually locking in more places to avoid thread-safety issues on JRuby (and other ruby implementations without a global interpreter lock). No Sequel user ever reported a production error related to the previous thread-safety issues, and most of the issues fixed were so difficult to hit that even tests specifically designed to raise errors were unable to do so. * Sequel.single_threaded = true now disables the mutex synchronization that enforces thread safety for additional performance in single threaded mode. * Sequel's migrators now only attempt to use transactions by default if the underlying database supports transactional DDL. SQLite does support transactional DDL, but Sequel will not use transactions for SQLite migrations as it causes issues when emulating alter_table operations for tables with foreign keys. * Errors that occur when rolling back database transactions are now handled correctly. Previously, the underlying exception was raised, it wasn't correctly wrapped in a Sequel::DatabaseError, and if it was due to a database disconnection, the connection wasn't removed from the pool. * Sequel no longer sets ruby instance variables on java objects, fixing warnings on JRuby 1.7 and attempting to be forward compatible with JRuby 2.0. * Sequel now uses date and timestamp formats that are multilanguage and not DATEFORMAT dependent on Microsoft SQL Server. * Sequel now correctly escapes blackslash-carriage return-line feed on Microsoft SQL Server. * Parsing the column default values in the oracle adapter no longer requires database superuser privileges. * Sequel now correctly handles parsing schema for tables in other databases on MySQL. Previously, it would always look in the current database. * Sequel no longer doubles backslashes in strings by default. It now only does so on MySQL, since that is the only database that appears to use backslashes for escaping. This fixes issues with backslashes being doubled on some of the less commonly used adapters. * The pg_auto_parameterize extension now works correctly when using cursors. * Dataset#truncate now raises an Error if you attempt to do so on a dataset that uses HAVING. Previously, it only checked for WHERE. * The schema dumper now recognized the identity type. = Backwards Compatibility * Association reflections now store cached information in a separate subhash due to the thread-safety changes. Any code accessing an association reflection should always call the related method to get the cached data instead of checking for a specific location in the hash. * Association reflection internals for many_through_many associations changed significantly, any code that accesses the edge information in the reflection will need to be changed to use the new methods instead of accessing the old values directly. * The features deprecated in 3.34.0 have now been removed: * Ruby <1.8.7 support * PostgreSQL <8.2 support * Dataset#disable_insert_returning on PostgreSQL * Array#all_two_pairs? and #sql_expr_if_all_two_pairs sequel-5.63.0/doc/release_notes/3.36.0.txt000066400000000000000000000257441434214120600200010ustar00rootroot00000000000000= New Features * An eager_each plugin has been added, which automatically makes eagerly loaded datasets do eager loading if you call #each (or another Enumerable method) instead of #all. By default, if you call #each on an eager dataset, it will not do eager loading, and if you call #each on an eager_graph dataset, you will get plain hashes with columns from all joined tables instead of model objects. With this plugin, #each on both eager and eager_graph datasets will do eager loading. * The nested attributes plugin now supports composite primary keys in associated records. Additionally, it now deals better with natural primary keys in associated records. There is a new :unmatched_pk option that can be set to :create if you want to create new associated records when the input hash contains primary key information that doesn't match one of the existing associated objects. The nested attributes plugin now also supports a :transform option. If given, this option is called with the parent object and the input hash given for each associated record passed into the nested atttributes setter. The callable should return the hash of attributes to use. * Model#from_json in the json_serializer plugin now takes an options hash and recognizes the :fields option. If the :fields option is given, it should be an array of field names, and set_fields is called with the array instead of using set. This allows you to easily filter which fields in the hash are set in the model instance. The entire options hash is also passed to set_fields if :fields is present, so you can additionally use the :missing => :raise or :missing => :skip options that set_fields supports. * The Dataset#to_json method in the json_serializer plugin now respects :root=>:collection and :root=>:instance options. If :root=>:collection is given, only the collection is wrapped in a hash, and if :root=>:instance is given, only the instances are wrapped in a hash. For backwards compatibility, both the instances and collection are wrapped in a hash: Model.to_json(:root=>true) # {"models":[{"model":{"id":1}}]} Model.to_json(:root=>:collection) # {"models":[{"id":1}]} Model.to_json(:root=>:instance) # [{"model":{"id":1}}] Wrapping both the collection and instance in a root by default is probably an undesired behavior, so the default for :root=>true may change in the next major version of Sequel. Users who want the current behavior should switch to using :root=>:both. * The schema_dumper extension now respects an :index_names option when dumping. This option can be set to false to never dump the index names. It can also be set to :namespace, in which case if the database does not have a global index namespace, it will automatically prefix the name of the index with the name of the table. Database#global_index_namespace? was added to check if the database uses a global index namespace. If false, index names are probably namespaced per table (MySQL, MSSQL, Oracle). * :each is now a valid prepared statement type. This prepared statement type requires a block when you call the statement, and iterates over the records of the statement a row at a time. Previously, there wasn't a way to iterate over the records of a prepared statement a row at a time, since the :select and :all types collect all rows into an array before iterating over them. * A :connection_handling=>:queue option is now respected for database objects, and changes the threaded connection pools to use a queue instead of a stack as the data structure for storing available connections. A queue does not perform as well as a stack, but reduces the likelihood of stale connections. It is possible that Sequel will change in the future from using a stack by default to using a queue by default, so any users who specifically desire a stack to be used should specify the :connection_handling=>:stack option. * Sequel::Migrator now supports is_current? class method to check if there are no outstanding migrations to apply. It also supports a check_current class method, which raises an exception if there are outstanding migrations to apply. * A pg_json extension has been added, supporting PostgreSQL's 9.2 json type, similarly to the pg_array and pg_hstore extensions. Note that with the current PostgreSQL json code, the root object can be a string or number, but ruby's json library requires the root json value to be an object or array. So you will probably get an exception if you attempt to retrieve a PostgreSQL json value that ruby's JSON library won't parse. * A pg_inet extension has been added, which automatically typecasts PostgreSQL inet and cidr types to ruby IPAddr objects on retrieval. * Database#transaction on PostgreSQL now recognizes :read_only and :deferrable options, and can use them to set the READ ONLY and DEFERRABLE transaction flags. A :synchronous option is also recognized, which can be set to true, false, :local, or :remote_write, and sets the value of synchronous_commit just for that transaction. * When adding and dropping indexes on PostgreSQL, a :concurrently option can be used to create or drop the index CONCURRENTLY, which doesn't require a full write table lock. * When dropping indexes on PostgreSQL, :if_exists and :cascade options are now recognized. * When using alter_table set_column_type on PostgreSQL, the :using option is respected, and can be used to force a specific conversion from the previous value to the new value with the USING syntax. * On MySQL, you can now set an :sql_mode option when connecting. This can be a string or symbol or an array of them, and each should match one of MySQL's sql_modes. MySQL's default SQL mode is fairly loose, and using one of the strict sql modes is recommended, but for backwards compatibility, Sequel will not set a specific SQL mode by default. However, that may change in the next major version of Sequel, so to be forwards compatible you should set :sql_mode=>nil if you do not desire a strict SQL mode to be set automatically. * Partial indexes are now supported on Microsoft SQL Server 2008 (SQL Server refers to them as filtered indexes). Attempting to use a partial index on an earlier version of SQL Server will result in the database raising an exception. * A jdbc/progress adapter has been added, supporting the Progress database via the jdbc adapter. = Other Improvements * Dataset#get now works correctly if you pass it a nil or false argument. Previously, it ignored the argument and used the block instead. If you want to use the block argument, you should not pass in a regular argument. * Database#call now passes any blocks given to it to the underlying prepared statement object. Before, a passed block was ignored. * Sequel::Model.db is no longer set automatically when creating an anonymous class with an associated database object. This fixes cases where a library would create namespaced models, and the database used by the library would be set as the default for the user's application code. * Model *_to_one association setters are now no-ops if you pass a value that is the same as the cached value. This fixes issues with reciprocal associations getting reordered, and is better for performance. For cases where the old behavior is desired, the set_associated_object_if_same? method can be overridden to return true for object. If you are manually setting objects in the associations cache before calling the setter method, you may want to set that. * The dirty plugin no longer affects the return value of refresh and lock!. Internal changes should now help ensure that plugins don't affect the return values of these methods. * Sequel now supports JRuby 1.7's new exception handling, fixing exception handling when connecting in the jdbc adapter. * When dumping unsigned integer types in the schema dumper, if the unsigned values could overflow a 32-bit signed integer type, the generic Bignum class is used as the type. This should fix issues when copying a database containing an unsigned 32-bit integer column with values between 2^31 and 2^32-1. * In the optimistic_locking plugin, attempting to refresh and save after a failed save now works correctly. Before, the second save would never modify a row. * Time types on jdbc/postgres are now typecasted accurately on retrieval, before they could be off by up to a millisecond due to floating point issues. * Disconnect detection in the mysql2 adapter has been improved. * The jdbc/mysql, do/mysql, and swift/mysql adapters all now support the :timeout option to set the MySQL wait_timeout. * Savepoints in prepared transactions are now supported on MySQL 5.5.23+, since the bug that caused them to be unsupported starting in 5.5.13 has been fixed. * Parsing foreign key metadata for tables with an explicit schema now works correctly on PostgreSQL. * bin/sequel -C now namespaces indexes automatically when copying from a database without a global index namespace to a database with a global index namespace. * Indexes are now dropped in reverse order that they were added in the schema_dumper. * The Model typecasting code works around bugs in objects where object.==('') would raise an exception instead of returning false. * A better error message is used if an invalid JDBC URL is provided and the JDBC driver's new.connect method returns NULL. * A document describing Sequel's object model has been added, describing the objects Sequel uses to represent SQL concepts. * Most adapter specific options to Database methods are now mentioned in the main Database method RDoc. = Backwards Compatibility * The nested_attributes plugin internals changed significantly. If you were overriding one of the nested_attributes* private methods and calling super to get the default behavior, you may have to update your code. * Database#case_sensitive_like has been removed on SQLite. This method never worked correctly, it always returned false even if the case_sensitive_like PRAGMA was set. That's because SQLite doesn't offer a getter for this PRAGMA, only a setter. Note that Database#case_sensitive_like= still exists and works correctly. * Database#single_value has been removed from the native SQLite adapter. This method was designed for internal use, and hasn't been used for some time. Any current users of the method should switch to Dataset#single_value. * The private Database#defined_columns_for method in the SQLite adapter no longer takes an options hash. * A couple jdbc/postgres adapter methods are now private. Previously, the jdbc/postgres adapter overrode some private superclass methods but left the methods public. * When using the optimistic_locking plugin, refreshing inside a before_update method after calling super will now result in the lock checking being skipped. * The private Model#_refresh no longer returns self, so external plugins should no longer rely on that behavior. sequel-5.63.0/doc/release_notes/3.37.0.txt000066400000000000000000000340371434214120600177750ustar00rootroot00000000000000= New Features * Database#extension and Dataset#extension have been added and make it much easier to use extensions that just define modules, where you previously had to manually extend a Database or Dataset object with the module to get the extension's behavior. These methods operate similarly to model plugins, where you just specify the extension symbol, except that you can specify multiple extensions at once: DB.extension(:pg_array, :pg_hstore) For databases, these modify the Database itself (and potentially all of its datasets). Dataset#extension operates like other dataset methods, returning a modified clone of the dataset with the extension added: dataset = dataset.extension(:columns_introspection) Dataset#extension! has also been added for modifying the receiver instead of returning a clone. Not all extensions are usable by Database#extension or Dataset#extension, the extension has to have specific support for it. The following extensions support both Database#extension and Dataset#extension: * columns_introspection * query_literals * split_array_nil The following extensions support just Database#extension: * arbitrary_servers * looser_typecasting * pg_array * pg_auto_parameterize * pg_hstore * pg_inet * pg_interval * pg_json * pg_range * pg_statement_cache * server_block Any user that was loading these extensions with Sequel.extension and then manually extending objects with the extension's module is encouraged to switch to Database#extension and/or Dataset#extension. * Dataset join methods now respect a :qualify=>:deep option to do deep qualification of expressions, allowing qualification of subexpressions in the expression tree. This can allow you to do things like: DB[:a].join(:b, {:c.cast(Integer)=>:d.cast(Integer)}, :qualify=>:deep) # SELECT * FROM a INNER JOIN b # ON (CAST(b.c AS INTEGER) = CAST(a.d AS INTEGER)) For backwards compatibility, by default Sequel will only do automatic qualification if the arguments are simple symbols. This may change in a future version, if automatic qualification of only symbols is desired, switch to using :qualify=>:symbol. You can also choose to do no automatic qualification using the :qualify=>false option. * All of Sequel's model associations now work with key expressions that are not simple column references, without creating a fully custom association. So you can create associations where the primary/foreign key values are stored in PostgreSQL array or hstore columns, for example. * The pg_array extension has now been made more generic, so that it is easy to support array types for any scalar type that is currently supported. All scalar types that Sequel's postgres adapter supports now have corresponding array types supported in the pg_array extension. So if you load the pg_array extension and return a date array column, the returned values will be arrays of ruby Date objects. Other pg_* extensions that add support for PostgreSQL-specific scalar types now support array versions of those types if the pg_array extension is loaded first. * A pg_range extension has been added, making it easy to deal with PostgreSQL 9.2+'s range types. As ruby's Range class does not support all PostgreSQL range type values (such as empty ranges, unbounded ranges, or ranges with an exlusive beginning), range types are returned as instances of Sequel::Postgres::PGRange, which has an API similar to Range. You can turn a PGRange into a Range using PGRange#to_range, assuming that the range type value does not use features that are incompatible with ruby's Range class. The pg_range extension supports all range types supported by default in PostgreSQL 9.2, and makes it easy to support custom range types. * A pg_range_ops extension has been added, which adds DSL support for PostgreSQL range operators and functions, similar to the pg_array_ops and pg_hstore_ops extensions. * A pg_interval extension has been added, which makes Sequel return PostgreSQL interval types as instances of ActiveSupport::Duration. This is useful if you want to take the interval value and use it in calculations in ruby (assuming you load the appropriate parts of ActiveSupport). * A split_array_nil extension has been added, which changes how Sequel compiles IN/NOT IN expressions with arrays with nil values. where(:col=>[1, nil]) # Default: # WHERE (col IN (1, NULL)) # with split_array_nil extension: # WHERE ((col IN (1)) OR (col IS NULL)) exclude(:col=>[1, nil]) # Default: # WHERE (col NOT IN (1, NULL)) # with split_array_nil extension: # WHERE ((col NOT IN (1)) AND (col IS NOT NULL)) * The nested_attributes plugin now allows the :fields option to be a proc, which is called with the associated object and should return an array of allowable fields. * You can now specify the graph alias base when using eager_graph on a per-call basis. Previously, it could only be set on a per association basis. This is helpful if you have multiple associations to the same class, and are cascading the eager graph to dependent associations of that class for both of the associations. Previously, there was no way to manually give descriptive names to the tables in the cascaded associations, but you can now do so by passing the association as an Sequel::SQL::AliasedExpression instance instead of a plain Symbol. Here's a usage example: ds = Game.eager_graph(:winner=>:players.as(:winning_players), :loser=>:players.as(:losing_players)). where(:winning_players__name=>'A', :losing_players__name=>'B') * many_through_many associations now differentiate between column references and method references, by supporting the :left_primary_key_column and :right_primary_key_method options that many_to_many associations support. * Custom :eager_loader procs that accept a single hash argument now have an additional entry passed in the hash, :id_map, which is easier to use than the :key_hash entry (which is still present for backwards compatibility). Anyone with custom :eager_loader procs is encouraged to switch from using :key_hash to :id_map. * You can now override the create_table/alter_table schema generators per database/adapter. This allows for database specific generator subclasses, which have methods for unique features for that database. * You can now setup exclusion constraints on PostgreSQL using the create_table and alter_table schema generators: DB.create_table(:t) do ... exclude([[:col1, '&&'], [:col2, '=']]) # EXCLUDE USING gist (col1 WITH &&, col2 WITH =) end One common use for exclusion constraints is to make sure that no two rows have overlapping values/ranges/circles. * When adding foreign key constraints to an existing table on PostgreSQL, you can use the :not_valid option to mark the constraint as not yet valid. This will make it so that future changes to the table need to respect the foreign key constraint, but existing rows do not. After cleaning up the existing data, you can then use the alter_table validate_constraint method to mark the constraint as valid. * An eval_inspect extension has been added that attempts to do do the following for Sequel::SQL::Expression instances: eval(obj.inspect) == obj # => true There are a lot of cases that this extension does not handle, but it does a decent job in most cases. This is currently only used internally in a specific case in the schema_dumper extension. = Other Improvements * The filter by associations support now respects the method reference vs column reference distinction that other parts of the association code have respected since 3.32.0. * In the nested_attributes plugin, new one_to_one associated values are saved once instead of twice. Previously it attempted to save them before they were associated to the current model object, which can violate some validations/constraints. * When saving an associated object in the one_to_one association setter method, Sequel no longer adds an unnecessary filter condition when nullifying the foreign key for existing rows in the associated table. * The list plugin's before_create method now calls super, which fixes usage when other plugins that define before_create are loaded before it. * In the pg_array extension, when typecasting an Array to PGArray, a recursive map is done on the input array to convert each value in the input array to the expected type, using the typecasting method that would be used for the scalar value. For example, for model objects, where ids is an integer array column: model.set(:ids=>['1', '2']).ids.to_a # => [1, 2] * The pg_array extension now correctly handles bytea arrays used in bound variables. * The pg_array extension no longer uses the JSON-based parser for floating point types, since it doesn't handle NaN and Infinity values correctly. * When typecasting in the pg_array extension, PGArray values are only returned verbatim if they have a matching database type. Otherwise, the underlying array is rewrapped in a new PGArray value with the correct database type. * H2 clob types are now recognized as strings instead of blobs. Previously the code attempted to do this, but it didn't do so correctly. * The jdbc/postgres adapter now converts scalar values of the array to the appropriate type. Previously, if you retrieved a date array, you got back a ruby array of JavaSQL::SQL::Date instances. Now, you get back a ruby array of ruby Date instances. * The schema_dumper extension now dumps migrations as change migrations, instead of separate up/down migrations, resulting in simpler code. * When dumping non-integer foreign keys in the schema dumper, an explicit type is now used. Previously, the column would have been dumped as an integer column. * When dumping unsigned integer columns in the schema dumper, add a column > 0 constraint in the dumped migration. * On Microsoft SQL Server, when updating a dataset with a limit, the limit is now respected. * When emulating offset using the ROW_NUMBER window function, do not require that the dataset be ordered. If an order is not provided, default to ordering on all of the columns in the dataset. If you want to override the default order used in such a case, you need to override the default_offset_order method for the dataset. * On SQLite, casting to Date/Time/DateTime now calls an SQLite date/datetime function instead of using a cast, as SQLite treats such a cast as a cast to integer. * When using JRuby 1.6 in ruby 1.9 mode and typecasting a time column, workaround a bug where Time#nsec is 0 even though Time#usec is not. * The odbc/mssql adapter now correctly handles the case where SCOPE_IDENTITY returns NULL after an insert. * bin/sequel now accepts multiple -l options for logging to multiple output files. * In addition to Sequel's rigorous pre-push testing, Sequel now also uses TravisCI for continuous integration testing across a wider range of ruby implementations. = Backwards Compatibility * The keys in the :key_hash entry passed to the :eager_loader proc are now method references instead of column references. For most associations, they are the same thing, but for associations using the :key_column/:primary_key_column/:left_primary_key_column options, the values could be different. If you were using one of those options and had a custom eager_loader, you should switch from indexing into the :key_hash option to just using the :id_map option. * The :key_hash entry passed to the :eager_loader proc is now no longer guaranteed to contain key maps for associations other than the one currently being eagerly loaded. Previously, it contained key maps for all associations that were being eagerly loaded. If you have a custom :eager_loader proc that accessed a key map for a separate association that was being loaded concurrently, you'll now have to build the key map manually if it doesn't exist. * If you previously explicitly specified an :eager_loader_key option when defining an association, you may need to change it so that it is a method reference instead of a column reference, or possibly just omit the option. * If you have a custom :eager_loader proc for an association where the default :eager_loader_key option references a method that the model does not respond to (or raises an exception), you may need to specify the :eager_loader_key=>nil option. * In the pg_auto_parameterize extension, String values are no longer automatically casted to text. This is because the default type of a string literal in PostgreSQL is unknown, not text. This makes it much less likely to require manual casts, but has the potential to break existing code relying on the automatic cast to text. As a work around, any query that can no longer be automatically parameterized after this query just needs to add manual casting to text. * Sequel now raises an exception if you attempt to clone associations with different types, except if one type is one_to_many and the other is one_to_one. Cloning from other types was usually a bug, and raising an exception early will make it much easier to track such bugs down. * When running the plugin/extension and PostgreSQL adapter specs, a json library is now required. * The json/postgres adapter array typecasting internals have been modified, if you were relying on the internals, you may need to update your code. * The pg_array extension internals changed significantly. PGArray no longer has any subclasses by default, as parsing is now done in separate objects. Anyone relying on the pg_array internals will need to update their code. * The postgres adapter no longer sets up type conversion of int2vector and money types, since in both cases the conversion was incorrect in most cases. These types will now be returned as strings. If you are relying on the conversion, you'll need to add your own custom type procs. sequel-5.63.0/doc/release_notes/3.38.0.txt000066400000000000000000000207131434214120600177720ustar00rootroot00000000000000= New Features * A pg_row extension has been added that supports PostgreSQL's row-valued/composite types. You can register support for specific row types: DB.register_row_type(:address) Then you can create values of that row type: ad = DB.row_type(:address, ['555 Foo St.', 'Bar City', '98765']) # or ad = DB.row_type(:address, :street=>'555 Foo St.', :city=>'Bar City', :zip=>'98765') Which you can use in your datasets: DB[:people].insert(:name=>'Me', :address=>ad) If you are using the native postgres adapter, when retreiving row type values, they will be returned as instances of the row type, which are hash-like objects: ad = DB[:people].get(:address) ad[:street] # => '555 Foo St.' ad[:city] # => 'Bar City' ad[:zip] # => '98765' If you are also using the pg_array extension, then arrays of composite types are supported automatically. Composite types can also include arrays of other types as well as other composite types, though recursive composite types are not allowed by PostgreSQL. Using arrays and composite types brings one of the benefits of document databases to PostgreSQL, allowing you to store nested structures inside a single row. * A pg_row_ops extension has been added that adds DSL support for accessing members of row-valued/composite types. You first create a row op: r = Sequel.pg_row_op(:row_column) Then you can get DSL support for accessing members of that row_column via the #[] method: r[:a] # (row_column).a This works with composite types containing composite types: r[:a][:b] # ((row_column).a).b When used in conjunction with the pg_array_ops extension, there is support for composite types that include arrays, as well as arrays of composite types: r[1][:a] # (row_column[1]).a r[:a][1] # (row_column).a[1] The extension offers additional support for referencing a table's type when it contains a column with the same name, see the RDoc for details. * A pg_row plugin has been added, that works with the pg_row extension, and allows you to represent row-valued types as Sequel::Model objects (instead of the hash-like objects they use by default). In your model class, you load the plugin: class Address < Sequel::Model(:address) plugin :pg_row end Then you can use Address instances in your datasets: ad = Address.new(:street=>'555 Foo St.', :city=>'Bar City', :zip=>'98765') DB[:people].insert(:name=>'Me', :address=>ad) And if you are using the native postgres adapter, the dataset will return the type as a model instance: ad = DB[:people].get(:address) ad.street # => '555 Foo St.' ad.city # => 'Bar City' ad.zip # => '98765' * A pg_typecast_on_load plugin has been added. This plugin is designed for use with the jdbc/postgres, do/postgres, and swift/postgres adapters, and it is similar to the typecast_on_load plugin. However, while the typecast_on_load plugin uses setter methods, the pg_typecast_on_load plugin uses the same code that the native postgres adapter uses for typecasting. * The tinytds adapter now supports a :textsize option to override the default TEXTSIZE setting. The FreeTDS default is fairly small (~64k), so if you want to use large blob or text columns, you should probably set this to a value larger than the largest text/blob you want to use. * Sequel.expr when called with a symbol now splits the symbol and returns an Identifier, QualifiedIdentifier, or AliasedExpression, depending on the content of the symbol. Previously, it only wrapped the symbol using a Wrapper. * Identifier#* and QualifiedIdentifier#* when called without any argument now represent a selection of all columns from the represented table: Sequel.expr(:table).* # table.* Sequel.expr(:schema__table).* # schema.table.* This makes it easier to represent the selection of all columns in a table without using the core extensions. * Model#values now has a Model#to_hash alias. * SQL::Blob values now have as, cast, and lit methods even if the core extensions are not loaded. = Other Improvements * When loading multiple pg_* extensions into a Database instance, the conversion procs are only reset once instead of once per extension. * All adapters that access PostgreSQL now store type conversion procs, similar to the native postgres adapter. This has been added to make it easier to write extensions that support advanced PostgreSQL types. * Database#schema output on PostgreSQL now includes the type oid for each column. * You can now register custom array types to specific Database instances, using the :type_procs and :typecast_methods_module options, so it is now possible to have custom array types without affecting global state. * Dropping of columns with defaults now works correctly on Microsoft SQL Server. Before, it would fail as the related constraint was not dropped first. * The MySQL type "double(x,y)" is now recognized as a float type. * The jdbc/jtds and jdbc/derby adapters now handle nil prepared statement values in more cases. * Blob prepared statement arguments are now handled correctly on jdbc/db2 and jdbc/oracle. * Sequel now works around a Time#nsec bug in JRuby 1.6 ruby 1.9 mode when using Time values in prepared statements in the jdbc adapter. * Java::JavaUtil::UUID types are now returned as ruby strings when converting types in the jdbc adapter. * Real boolean literals are now used on derby 10.7+. On derby <10.7 Sequel still uses (1 = 1) and (1 != 1) for true and false. This allows you to use boolean columns with a true/false default on derby 10.7+. * Clobs are now treated as string types instead of blobs on derby, since treating clob as blob doesn't work there. * The swift adapter now supports an output identifier method. * The swift adapter now returns blobs as SQL::Blob instances. * The schema_dumper extension no longer produces code that requires the core extensions. * All of Sequel's specs now run without the core extensions loaded, ensuring that none of the internals depend on the core extensions. The only exception is the specs for the core extensions themselves. = Backwards Compatibility * The pg_* extensions no longer modify core classes if the core_extensions extension is not loaded. All methods they added now have equivalent methods on the main Sequel module: Sequel.pg_array Sequel.pg_array_op Sequel.hstore Sequel.hstore_op Sequel.pg_json Sequel.pg_range Sequel.pg_range_op * The Sequel::SQL::IdentifierMethods module has been removed. This module was only included in Symbol if the core_extensions were enabled. Since it only defined a single method, now the core extensions just define that method directly on Symbol. * The swift adapter now requires swift-db-{postgres,mysql,sqlite3} gems instead of the swift gem. swift/postgres requires swift-db-postgres 0.2.0+, swift/sqlite requires swift-db-sqlite 0.1.2+, and swift/mysql requires swift-db-mysql. * Sequel will no longer typecast a string to a PostgreSQL array or hstore column in a model column setter. This is because the parsers that Sequel uses were designed to support only PostgreSQL's output format. It's unlikely that a user would provide that format for typecasting, and while there aren't known security issues with the parsers, they were not designed to handle arbtirary user input, so typecasting from string is no longer allowed and will now raise an error. The only reason such typecasting was allowed in the first place was to work around issues in the jdbc/postgres, do/postgres, and swift/postgres adapters, using the the typecast_on_load plugin. If you were previously using the typecast_on_load plugin for hstore or array columns, you need to switch to using the new pg_typecast_on_load plugin. * The private get_conversion_procs method in the postgres adapter no longer accepts an argument. * The Sequel::Postgres::PGArray::DatabaseMethods singleton define_array_typecast_method method has been removed. This method was designed for internal use. * The change to make Sequel.expr split symbols can cause the following type of code to break: Sequel.expr(:column___alias).desc This is because expr now returns an AliasedExpression, which doesn't support the desc method. However, as you can't apply an order to an aliased expression, nobody should be relying on this. sequel-5.63.0/doc/release_notes/3.39.0.txt000066400000000000000000000215201434214120600177700ustar00rootroot00000000000000= New Features * A constraint_validations extension and plugin have been added, which allow you to define validations when creating tables, which are enforced by database constraints, and have those validations be automatically discovered and used by your Sequel::Model classes. The extension is designed to be used in your migrations/schema modification code: DB.extension(:constraint_validations) DB.create_constraint_validations_table DB.create_table(:foos) do primary_key :id String :name validate do min_length 5, :name end end This creates a database CHECK constraint that ensures that the minimum length for the column is 5 characters. It also adds metadata about the validation to the sequel_constraint_validations table. To have the model class automatically create validations, just include the plugin in the model: class Foo < Sequel::Model plugin :constraint_validations end Note that MySQL does not enforce CHECK constraints (it parses but ignores them), so using the extension on MySQL does not actually enforce constraints at the database level, though it still does support the automatic model validations if the plugin is used. * Dataset#count now takes an argument or a virtual row block, allowing you to do: DB[:table].count(:column_name) DB[:table].count{function_name(column1, column2)} When count is given an argument, instead of returning the total number of rows, it returns the number of rows where the argument has a non-NULL value. * Database#copy_into has been added to the postgres adapter when the pg driver is being used, and can be used for very fast inserts into tables if you already have the input preformatted in PostgreSQL text or CSV format. * set_table_not_null has been added to the alter table generator, for a nicer API: alter_table(:t){set_column_not_null :col} # instead of alter_table(:t){set_column_allow_null :col, false} Additionally, set_column_allow_null now defaults the second argument to true for a nicer API: alter_table(:t){set_column_allow_null :col} # instead of alter_table(:t){set_column_allow_null :col, true} * Database#supports_regexp? has been added for checking if the database supports Regexp in filters. Currently, only MySQL and PostgreSQL support Regexps. Attempting to use a Regexp on a database that doesn't support it now raises an error when attempting to generate the SQL, instead of sending invalid SQL to the database. * Sequel.char_length has been added for a cross platform char_length function (emulated when char_length is not supported natively by the database). * Sequel.trim has been added for a cross platform trim function (emulated when trim is not supported natively by the database). * ValidationFailed and HookFailed exceptions now have a model method that returns the model instance related to the exception. This makes it possible to use Model.create inside a begin/rescue block and get access to the underlying instance if there is a validation or before/around hook error. * The subclasses plugin now accepts a block, which is called with each model class created. This is useful if you want to apply changes to classes created in the future instead of just existing classes. * The validates_unique validation in the validation_helpers plugin now accepts a :where option for a custom uniqueness filter. Among other things this makes it easy to implement a case insensitive uniqueness validation on a case sensitive column. * The threaded connection pools now support a :connection_handling=>:disconnect option, which makes them disconnect connections after use instead of returning them to the pool. This makes it possible to completely control connection lifetime using Database#synchronize. * The pg_row_op extension now has support for PGRowOp#*, for referencing the members of the composite type as separate columns. * MySQL's set type and default value are now recognized. * bin/sequel now accepts a -c argument for running an arbitrary code string instead of using an IRB prompt. = Other Improvements * Sequel now parses current date/timestamp column defaults when parsing the schema for a table. The values will be returned as Sequel::CURRENT_DATE for date columns and Sequel::CURRENT_TIMESTAMP for timestamp columns. The schema_dumper extension will work with these defaults, so if you dump the schema for a table with a column that uses a current timestamp default, the dumped schema will include the default. The defaults setter plugin also works with these changes, so that when new model objects are instantiated, they get the current Date/Time/DateTime values set. * On MySQL and PostgreSQL, Sequel will now by default attempt to combine multiple alter_table operations into a single query where it believes it can do so correctly. This can potentially improve performance ~N times, where N is the number of alter table operations. This can change the SQL used for old migrations (though it shouldn't change the result), and is a potentially risky change. This may be disabled by default in future versions if it causes problems. * The defaults_setter plugin now correctly sets false default values. * The schema_dumper plugin now preserves fractional seconds in timestamp column defaults when dumping. * Time->DateTime and DateTime->Time typecasts now retain fractional seconds on ruby 1.8. * Array arguments passed to most PGArrayOp methods are now automatically wrapped in a PGArray. If you want to use this support, you need to make sure to load both the pg_array and pg_array_op extensions. * Sequel now does a better job of finding the sequence for a given table on PostgreSQL, handling more corner cases. A small side effect of this is sometimes sequence names will be quoted. * Some potential thread-safety issues when using Sequel with PostgreSQL on a non-GVL ruby implementation have been fixed. * Sequel now correctly caches the server version query on MySQL. * Sets of alter_table operations on MySQL and Microsoft SQL Server that require parsing the current database schema, where later alter_table operations depend on earlier ones, should now work correctly. * You can now drop check constraints on tables on SQLite, though doing so drops all check constraints on the table, not only the specific check constraint given. * The identity_map plugin no longer breaks if used with a model without a primary key. * Sequel::SQL::NegativeBooleanConstant now inherits from Constant instead of BooleanConstant. This means that Sequel::NULL == Sequel::NOTNULL is now false instead of true. * You can now override the convert_tinyint_to_bool settings on a per-Dataset basis in the mysql and mysql2 adapters, though the overriding is different depending on the adapter. Check the commit log for details. * timestamp(N) types are now recognized as datetime, which should fix certain cases on Oracle. * Dataset#insert now handles a single model instance argument as a single value if the model uses the pg_row plugin. * When joining a model dataset using a model class as the table argument, a subselect is used unless the model is a simple select from the underlying table. * The specs now cleanup after themselves, dropping the tables that they create for testing. = Backwards Compatibility * The defaults_setter plugin's behavior changed due to the current date/timestamp support. Previously, it would not set a value for the column, since the default wasn't recognized. Therefore, the database would use the default value on insert, which would be the database's current timestamp. Now, the value is set to the current Date/Time/DateTime on model object instantiation, so the database wouldn't use the column default. Instead of the database's current timestamp on insert, the column value will be the application's current timestamp on model instantiation. Users who don't want this behavior can remove the default values in the model: Model.default_values.delete(:column_name) * Plain (non-model) datasets no longer allow insert to accept a single model instance argument. Also, they no longer call values on a single argument if the object responds to it. * Plain (non-model) datasets no longer accept model classes as tables in the join/graph methods. Also, they no longer call table_name on the argument if the object responds to it. * The schema_dumper extension now requires the eval_inspect extension, which changes inspect output for Sequel::SQL::Expression objects. * Custom adapters that override Database#alter_table_sql_list now need to make sure it returns an already flattened array. * The identity_map_key method in the identity_map plugin now returns nil instead of a random string if the given pk is nil. sequel-5.63.0/doc/release_notes/3.4.0.txt000066400000000000000000000307161434214120600177070ustar00rootroot00000000000000New Plugins ----------- * A nested_attributes plugin was added allowing you to modify associated objects directly through a model object, similar to ActiveRecord's Nested Attributes. Artist.plugin :nested_attributes Artist.one_to_many :albums Artist.nested_attributes :albums a = Artist.new(:name=>'YJM', :albums_attributes=>[{:name=>'RF'}, {:name=>'MO'}]) # No database activity yet a.save # Saves artist and both albums a.albums.map{|x| x.name} # ['RF', 'MO'] It takes most of the same options as ActiveRecord, as well as a a few additional options: * :destroy - Allow destruction of nested records. * :limit - For *_to_many associations, a limit on the number of records that will be processed, to prevent denial of service attacks. * :remove - Allow disassociation of nested records (can remove the associated object from the parent object, but not destroy the associated object). * :strict - Set to false to not raise an error message if a primary key is provided in a record, but it doesn't match an existing associated object. If a block is provided, it is passed each nested attribute hash. If the hash should be ignored, the block should return anything except false or nil. * A timestamps plugin was added for automatically adding before_create and before_update hooks for setting values on timestamp columns. There are a couple of existing external plugins that handle timestamps, but the implementations are suboptimal. The new built-in plugin supports the following options (with the default in parentheses): * :create - The field to hold the create timestamp (:created_at) * :force - Whether to overwrite an existing create timestamp (false) * :update - The field to hold the update timestamp (:updated_at) * :update_on_create - Whether to set the update timestamp to the create timestamp when creating (false) * An instance_hooks plugin was added for adding hooks to specific w model instances: obj = Model.new obj.after_save_hook{do_something} obj.save # calls do_something after the obj has been saved All of the standard hooks are supported, except for after_initialize. Instance level before hooks are executed in reverse order of addition before calling super. Instance level after hooks are executed in order of addition after calling super. If any of the instance level before hook blocks return false, no more instance level before hooks are called and false is returned. Instance level hooks are cleared when the object is saved successfully. * A boolean_readers plugin was added for creating attribute? methods for boolean columns. This can provide a nicer API: obj = Model[1] obj.active # Sequel default column reader obj.active? # Using the boolean_readers plugin You can provide a block when loading the plugin to change the criteria used to determine if the column is boolean: Sequel::Model.plugin(:boolean_readers) do |c| db_schema[c][:db_type] =~ /\Atinyint/ end This may be useful if you are using MySQL and have some tinyint columns that represent booleans and others that represent integers. You can turn the convert_tinyint_to_bool setting off and use the attribute methods for the integer value and the attribute? methods for the boolean value. Other New Features ------------------ * Sequel now has support for converting Time/DateTime to local or UTC time upon storage, retrieval, or typecasting. There are three different timezone settings: * Sequel.database_timezone - The timezone that timestamps use in the database. If the database returns a time without an offset, it is assumed to be in this timezone. * Sequel.typecast_timezone - Similar to database_timezone, but used for typecasting data from a source other than the database. This is currently only used by the model typecasting code. * Sequel.application_timezone - The timezone that the application wants to deal with. All Time/DateTime objects are converted into this timezone upon retrieval from the database. Unlike most things in Sequel, these are only global settings, you cannot change them per database. There are only three valid timezone settings: * nil (the default) - Don't do any timezone conversion. This is the historical behavior. * :local - Convert to local time/Consider time to be in local time. * :utc - Convert to UTC/Consider time to be in UTC. So if you want to store times in the database as UTC, but deal with them in local time in the application: Sequel.application_timezone = :local Sequel.database_timezone = :utc If you want to set all three timezones to the same value: Sequel.default_timezone = :utc There are three conversion methods that are called: * Sequel.database_to_application_timestamp - Called on time objects coming out of the database. If the object coming out of the database (usually a string) does not have an offset, assume it is already in the database_timezone. Return a Time/DateTime object (depending on Sequel.datetime_class), in the application_timzone. * Sequel.application_to_database_timestamp - Used when literalizing Time/DateTime objects into an SQL string. Converts the object to the database_timezone before literalizing them. * Sequel.typecast_to_application_timestamp - Called when typecasting objects for model datetime columns. If the object being typecasted does not already have an offset, assume it is already in the typecast_timezone. Return a Time/DateTime object (depending on Sequel.datetime_class), in the application_timezone. Sequel does not yet support named timezones or per thread modification of the timezone (for showing all timestamps in the current user's timezone). Extensions to support both features are planned for a future version. * Dataset#truncate was added for truncating tables. Truncate allows for fast removal of all rows in a table. * Sequel now supports typecasting a hash to date, time, and datetime types. This allows easy usage of Sequel with forms that split the entry of these database types into separate from fields. With this code, you can just have field names like: date[year] date[month] date[day] Rack will parse that into: {'date'=>{'year'=>?, 'month'=>?, 'day'=>?}} So then you can do: obj.date = params['date'] # or obj.set(params) * validates_unique now takes a block that can be used to scope the uniqueness constraint. This allows you to easily set up uniqueness validations that are only necessary in a given scope. For example, a validation on username, but only for active users (as inactive users are soft deleted but remain in the table). You just pass a block to validates_unique: validates_unique(:name){|ds| ds.filter(:active)} * The serialization plugin now supports json. * Sequel now supports generic concepts of CURRENT_{DATE,TIME,TIMESTAMP}. Most databases support these SQL concepts, but not all, and some implementations act differently. The Sequel::SQL::Constants module holds the three constants, which are instances of SQL::Constant, an SQL::GenericExpression subclass. This module is included in Sequel, so you can reference the constants more easily (e.g. Sequel::CURRENT_TIMESTAMP). It's separated out into a separate module so that you can just include that module in the top level scope, allowing you to reference the constants directly (e.g. CURRENT_TIMESTAMP). DB[:events].filter{date < ::Sequel::CURRENT_DATE} # or: include Sequel::SQL::Constants DB[:events].filter{date < ::CURRENT_DATE} * Database#run was added for executing arbitrary SQL on a database. It's an alias for Database#<<, but it allows for a nicer API inside migrations, since you can now do: run 'SQL' instead of: self << 'SQL' You can also provide a :server option to run the SQL on the given server/shard: run 'SQL', :server=>:shard1 * Sequel::Model() can now take a database argument in addition to a symbol or dataset argument. If a database is given, it'll create an anonymous subclass attached to the given database. Other changes were made to allow the following code to work: class Item < Sequel::Model(DB2) end That will work correctly assuming a table named items in DB2. * Dataset#ungrouped was added for removing a grouping from an existing dataset. Also, Dataset#group when called with no arguments or with a nil argument also removes any existing grouping instead of resulting in invalid SQL. * Model#modified? was added, letting you know if the model has been modified. If the model hasn't been modified, calling Model#save_changes will do nothing. * SQL::OrderedExpression now supports #asc, #desc, and #invert. Other Improvements ------------------ * The serialization and lazy_attribute plugins now add accessor methods to a module included in the class, instead of to the model class itself. This allows the methods to be overridden in the class and work well with super, as well for the plugins to work together on the same column. Make sure the lazy_attributes accessor is setup before the serialization accessor if you want to have a lazy serialized column. * Calling the add_* method for many_to_many association now saves the record if the record is new. This makes it operate more similarly to one_to_many associations. Previously, it raised an Error. * Dataset#import now works correctly when called with a dataset. Previously, it generated incorrect SQL. * The JDBC adapter now converts byte arrays to/from SQL::Blob. * The JDBC adapter now attempts to bind unknown types using setObject instead of raising, so it can work with native Java objects. It also binds boolean parameters correctly. * Using multiple emulated ALTER TABLE statements (such as drop_column) in a single alter_table block now works correctly on SQLite. * Database#indexes now works on JDBC for tables in a non-default schema. It also now properly detects unique indexes on MSSQL. * Database#schema on JDBC now accepts a :schema option. Also, returned schema hashes now include a :column_size entry specifying the maximum length/precision for the column, since the :db_type entry doesn't have contain the information on JDBC. * Datasets without tables now work correctly on Oracle, so things like DB.get(...) now work. * A descriptive error message is given if you attempt to use Sequel with the mysql.rb driver (which Sequel doesn't support). * The postgres adapter now works correctly with a modified postgres-pr that raises PGErrors instead of RuntimeErrors (e.g. http://github.com/jeremyevans/postgres-pr). * You now get a Sequel::InvalidOperation instead of a NoMethodError if you attempt to update a dataset without a table. * The inflection support has been modified to reduce code duplication. Backwards Compatibility ----------------------- * Sequel now includes fractional seconds in timestamps for all adapters except MySQL. It's possible that this may break timestamp columns for databases that are not regularly tested. * Sequel now includes timezone values in timestamps on Microsoft SQL Server, Oracle, PostgreSQL and SQLite. The modification for SQLite is probably the biggest cause for concern, since SQLite stores times as text. If you have an SQLite database that uses timestamps and is accessed by something other than Sequel, you should make sure that it works with the timestamp format that Sequel now uses. * The default timestamp format used by Sequel now uses a space instead of 'T' between the date and time parts, which could possibly affect some databases that are not regularly tested. * Attempting to insert into a grouped dataset or a dataset that selects from multiple tables will now raise an Error. Previously, it would ignore any GROUP or JOIN settings and generate bad SQL if there were multiple FROM tables. * Database#<< now always returns nil. Before, the return value was adapter dependent. * ODBC::Time and ODBC::DateTime values are now converted to the Sequel.datetime_class. Before, ODBC::Time used Time and ODBC::DateTime used DateTime regardless of the Sequel.datetime_class setting. * The default inflections were modified, fixing some obvious errors and possibly changing some existing inflections. Further changes to the default inflections are unlikely. sequel-5.63.0/doc/release_notes/3.40.0.txt000066400000000000000000000053631434214120600177670ustar00rootroot00000000000000= New Features * Sequel now has vastly improved support for Microsoft Access. * Sequel now supports the CUBRID database, with a cubrid adapter that uses the cubrid gem, and a jdbc/cubrid adapter for accessing CUBRID via JDBC on JRuby. * The association_pks plugin now supports composite keys. * Database#transaction now accepts a :disconnect=>:retry option, in which case it will automatically retry the block if it detects a disconnection. This is potentially dangerous, and should only be used if the entire block is idempotent. There is also no checking against an infinite retry loop. * SQL::CaseExpression#with_merged_expression has been added, for converting a CaseExpression with an associated expression to one without an associated expression, by merging the expression into each condition. = Other Improvements * Sequel now quotes arguments/columns in common table expressions. * Sequel now handles nil values correctly in the pg_row extension. * Sequel::Postgres::HStore instances can now be marshalled. * Sequel now uses clob for String :text=>true types on databases that don't support a text type. * On PostgreSQL, Sequel now quotes channel identifier names when using LISTEN/NOTIFY. * On PostgreSQL, Sequel now correctly handles the case where named type conversion procs have been added before the Database object is instantiated. * On DB2, Sequel now explicitly sets NOT NULL for unique constraint columns instead of foreign key columns. DB2 does not allow columns in unique constraints to be NULL, but does allow foreign key columns to be NULL. * In the oracle adapter, clob values are now returned as ruby strings upon retrieval. * Sequel now detects more types of disconnections in the postgres, mysql, and mysql2 adapters. * If a database provides a default column value that isn't a ruby string, it is used directly as the ruby default, instead of causing the schema parsing to fail. = Backwards Compatibility * Code using Sequel's oracle adapter that expected clob values to be returned as OCI8::CLOB instances needs to be modified to work with ruby strings. * Because Sequel now quotes column names in common table expressions, those names are now case sensitive, which could break certain poorly coded queries. Similar issues exist with the quoting of channel identifier names in LISTEN/NOTIFY on PostgreSQL. * The private Database#requires_return_generated_keys? method has been removed from the jdbc adapter. Custom jdbc subadapters relying on this method should override the private Database#execute_statement_insert method instead to ensure that RETURN_GENERATED_KEYS is used for insert statements. * The private Dataset#argument_list and #argument_list_append methods have been removed. sequel-5.63.0/doc/release_notes/3.41.0.txt000066400000000000000000000140051434214120600177610ustar00rootroot00000000000000= New Features * A connection_validator extension has been added, which automatically determines if connections checked out from the pool are still valid. If they are not valid, the connection is disconnected and another connection is used automatically, transparent to user code. Checking if connections are valid requires a query, so this extension causes a performance hit. For that reason, connections are only checked by default if they have been inactive for more than a configured amount of time (1 hour by default). You can choose to validate connections on every checkout via: DB.pool.connection_validation_timeout = -1 However, this can cause a substantial performance hit unless you are purposely using coarse connection checkouts via manual calls to Database#synchronize (for example, in a Rack middleware). Using coarse checkouts can greatly reduce the amount of concurrency that Sequel supports (for example, limiting the number of concurrent requests to the number of database connections), so this method is not without its tradeoffs. * Sequel.delay has been added for a generic form of delayed evaluation. This method takes a block and delays evaluating it until query literalization. By default, Sequel evaluates most arguments immediately: foo = 1 ds = DB[:bar].where(:baz=>foo) # SELECT * FROM bar WHERE (baz = 1) foo = 2 ds # SELECT * FROM bar WHERE (baz = 1) Using Sequel.delay, you can delay the evaluation: foo = 1 ds = DB[:bar].where(:baz=>Sequel.delay{foo}) # SELECT * FROM bar WHERE (baz = 1) foo = 2 ds # SELECT * FROM bar WHERE (baz = 2) * Sequel now supports the :unlogged option when creating tables on PostgreSQL, to create an UNLOGGED table. * On SQLite, Database#transaction now supports a :mode option for setting up IMMEDIATE/EXCLUSIVE SQLite transactions. Sequel also supports a Database#transaction_mode accessor for setting the default transaction mode on SQLite. * Most pg_* extension objects (e.g. PGArray) now support the #as method for creating an SQL::AliasedExpression object. * The single_table_inheritance plugin now supports non-bijective mappings. In lay terms, this means that a one-to-one mapping of column values to classes is no longer required. You can now have multiple column values that map to a single class in the :model_map option, and specify a :key_chooser option to choose which column value to use for the given model class. * The touch plugin now handles the touching of many_to_many associations, and other associations that use joined datasets. * ConnectionPool#pool_type has been added. It returns a symbol representing the type of connection pool in use (similar to Database#database_type). * Database#valid_connection? has been added for checking if a given connection is still valid. * Database#disconnect_connection is now part of the public API, and can be used to disconnect a given connection. = Other Improvements * Uniqueness validation now correctly handles nil values. Previously, it checked the underlying table for other rows where the column IS NULL, but that is incorrect behavior. Sequel's new (correct) behavior is to skip the uniqueness check if the column is nil. * Foreign key parsing is now supported on Microsoft SQL Server. * Dataset#reverse and #reverse_order now accept virtual row blocks. * Changing the name of the primary key column, and possibly other schema changes on the primary key column, are now supported on MySQL. * Primary key columns are now specifically marked as NOT NULL on SQLite, as non-integer primary keys on SQLite are not considered NOT NULL by default. * Failure to create a native prepared statement is now handled better in the postgres, mysql, and mysql2 adapters. * Firebird now emulates selecting data without an underlying table (e.g. DB.get(1)). * Finding the name of the constraint that sets column defaults on Microsoft SQL Server now works correctly on JRuby 1.7. * An additional type of disconnect error is now recognized in the jdbc/sqlserver adapter. * Many adapters have been fixed so that they don't raise an exception if trying to disconnect an already disconnected connection. * Many adapters have been fixed so that Database#log_connection_execute logs and executes the given SQL on the connection. * Many adapters have been fixed so that Database#database_error_classes returns an array of database exception classes for that adapter. * Database#log_exception now handles a nil exception message. * Dataset#limit(nil, nil) now resets offset in addition to limit, but you should still use Dataset#unlimited instead. * A bin/sequel usage quide has been added to the documentation. = Backwards Compatibility * Sequel now treats clob columns as strings instead of blobs (except on DB2 when use_clob_as_blob = true). This can make it so the values are returned as strings instead of SQL::Blob values. Since SQL::Blob is a String subclass, this generally will not affect user code unless you are passing the values as input to a separate blob column. * The Database <-> ConnectionPool interface was completely changed. Sequel no longer supports custom connection procs or disconnection procs in the connection pools. The :disconnection_proc Database option is no longer respected, and blocks passed to Database.new are now ignored. This change should not be user-visible, but if you had any code that was monkeying with the connection pool internals, you may need to modify it. * Code that was using the uniqueness check to also check for presence should add a separate check for presence. Such code was broken, as it only worked if there was already a NULL column value in the table. If you were relying on this broken behavior, you should clean up the NULL data in the column and then mark the database column as NOT NULL. * If you have code that specifically abuses the fact that non-integer primary keys on SQLite allow NULL values by default, it will no longer work. sequel-5.63.0/doc/release_notes/3.42.0.txt000066400000000000000000000054221434214120600177650ustar00rootroot00000000000000= New Features * Dataset#avg, #interval, #min, #max, #range, and #sum now accept virtual row blocks, allowing you to more easily get aggregate values of expressions based on the table: DB[:table].sum{some_function(column1, column2)} # => 134 # SELECT sum(some_function(column1, column2)) FROM table * Database#do has been added on PostgreSQL for using the DO anonymous code block execution statement. * Model.dataset_module now uses a Module subclass, which allows you to call subset inside a dataset_module block, making it easier to consolidate dataset method code: class Album < Sequel::Model dataset_module do subset(:gold){copies_sold > 500000} end end * Database#copy_table and #copy_into are now supported on jdbc/postgres. * Sequel now supports deferred constraints on constraint types other than foreign keys. The only databases that appear to implement this are Oracle and PostgreSQL. * Sequel now supports INITIALLY IMMEDIATE deferred constraints via the :deferrable=>:immediate constraint/column option. * Sequel now supports setting the default size of string columns, via the default_string_column_size option or accessor. In some cases, Sequel's default string column size of 255 is too large (e.g. MySQL with utf8mb4 character set), and this allows you to change it. = Other Improvements * Dataset#count and other methods now use a subselect in the case where the dataset has an offset but no limit. * If an error occurs while attempting to commit a transaction, Sequel now attempts to rollback the transaction. Some databases do this automatically, but not all. Among other things, this fixes issues with deferred foreign key constraint violations on SQLite. * When extending a model's dataset, the model's instance_dataset is reset, insuring that it will also be extended with the module. * When passing an invalid argument to Dataset#filter, the exception message now includes the argument. * The force_encoding plugin now works with frozen string values. * Public methods added to a model dataset_module now have model class methods created for them even if the method was added outside of a dataset_module block. * On PostgreSQL, Database#indexes now includes a :deferrable entry for each index hash, which will be true for unique indexes where the underlying constraint is deferrable. * On Microsoft SQL Server 2000, Dataset#update no longer includes a limit (TOP), allowing it to work correctly. = Backwards Compatibility * Model.dataset_methods has been removed. This was used to store blocks for methods created via def_dataset_method and subset. The internals have been changed so that a dataset_module is always used in these cases, therefore there was no longer a reason for this method. sequel-5.63.0/doc/release_notes/3.43.0.txt000066400000000000000000000075661434214120600200010ustar00rootroot00000000000000= New Features * A core_refinements extension has been added, which offers refinement versions of Sequel's core extensions. This requires the new experimental refinement support added in ruby 2.0, and allows you to use the Sequel DSL methods in a file without actually modifying the Symbol, String, Array, and Hash classes. * A date_arithmetic extension has been added for performing database-independent date calculations (adding/subtracting an interval to/from a date): Sequel.extension :date_arithmetic e = Sequel.date_add(:date_column, :years=>1, :months=>2, :days=>3) DB[:table].where(e > Sequel::CURRENT_DATE) In addition to providing the interval as a hash, you can also provide it as an ActiveSupport::Duration object. This extension is supported on 11 database types. * Dataset#get can now take an array of multiple expressions to get an array of values, similar to map/select_map: value1, value2 = DB[:table].get([:column1, :column2]) * Sequel can now handle [host.]database.schema.table qualified tables on Microsoft SQL Server. To implement this support, the split_qualifiers method has been added to Database and Dataset for taking a possibly qualified identifier and splitting it into an array of identifier strings. * The string_stripper plugin now offers the ability to manually specify which columns to skip stripping for via Model.skip_string_stripping. = Other Improvements * The jdbc adapter now works with the new jdbc-* gems, which require a manual load_driver step that the older jdbc-* gems did not require. * The string_stripper plugin no longer strips blob columns or values. * Database#copy_into in both the postgres and jdbc/postgres adapters has been fixed to better handle exceptions. * Dataset#hash and Model#hash are now significantly faster. * Lambda procs with 0 arity can now be used as virtual row blocks on ruby 1.9. Previously, attempting to use a lambda proc with 0 arity as a virtual row block on ruby 1.9 would raise an exception. * Schema-qualified composite types are now handled correctly in the pg_row extension. * Database#reset_primary_key_sequence on PostgreSQL now works correctly when a default_schema is set. * tinyint(1) unsigned columns on MySQL are now parsed as booleans instead of integers on MySQL if converting tinyint to boolean. * The jdbc adapter now supports the jdbc-hsqldb gem, so you can now install that instead of having to require the .jar manually. * Blobs are now casted correctly on DB2 when the use_clob_as_blob setting is false. * Oracle timestamptz types are now handled correctly in the jdbc/oracle adapter. * Sequel now defaults to :prefetch_rows = 100 in the oracle adapter, which can significantly improve performance. * Sequel now defines respond_to_missing? where method_missing? is defined and the object also responds to respond_to?. * Sequel::BasicObject now responds to instance_exec on ruby 1.8. = Backwards Compatibility * The meta_def method that was defined on Database, Dataset, and Model classes and instances has been moved to an extension named meta_def, and is no longer loaded by default. This method was previously used internally, and it wasn't designed for external use. If you have code that uses meta_def, you should now load the extension manually: Sequel.extension :meta_def * The private _*_dataset_helper model association methods are no longer defined. The AssociationReflection#dataset_helper_method public method is also no longer defined. * Dataset#schema_and_table now always returns strings (or nil). Before, in some cases it would return symbols. * Using a conditions specifier array with Dataset#get no longer works due to the new multiple values support in Database#get. So code such as: DB[:table].get([[:a, 1], [:b, 2]]) should be changed to: DB[:table].get(Sequel.expr([[:a, 1], [:b, 2]])) sequel-5.63.0/doc/release_notes/3.44.0.txt000066400000000000000000000133061434214120600177670ustar00rootroot00000000000000= New Features * Dataset#paged_each has been added, for processing entire datasets without keeping all rows in memory, even if the underlying driver keeps all query results in memory. This is implemented using limits and offsets, and requires an order (model datasets use a default order by primary key). It defaults to fetching 1000 rows at a time, but that can be changed via the :rows_per_fetch option. This method is drop-in compatible for each. Previously, the pagination extension's each_page method could be used for a similar purpose, but users of each_page are now encouraged to switch to paged_each. * Sequel now recognizes constraint violation exceptions on most databases, and will raise specific exceptions for different types of constraint violations, instead of the generic Sequel::DatabaseError: * Sequel::ConstraintViolation (generic superclass) * Sequel::CheckConstraintViolation * Sequel::NotNullConstraintViolation * Sequel::ForeignKeyConstraintViolation * Sequel::UniqueConstraintViolation * Sequel::Postgres::ExclusionConstraintViolation * The :dataset association option can now take accept an optional association reflection option. Instead of doing: Album.one_to_many :artists, :dataset=>{Artist...} you can now do: Album.one_to_many :artists, :dataset=>{|r| r.associated_dataset...} This second form will preform better. * Temporary views are now supported on PostgreSQL and SQLite using the :temp option to create_view. = Other Improvements * Row fetching speed in the tinytds adapter has been increased by up to 60%. * Row fetching speed in the mysql2 adapter when using an identifier output method has been increased by up to 50%. * On databases where offsets are emulated via the ROW_NUMBER window function (Oracle, DB2, Microsoft SQL Server), using an offset in a subselect is now supported. For example, the following code previously didn't work correctly with emulated offsets: # Second 5 rows ordered by column2 of the second 10 rows ordered # by column 1. DB[:table].order(:column1).limit(10, 10). from_self.order(:column2).limit(5, 5) Row processing speed has been increased slightly for all adapters that supported databases where offsets are emulated. * Association method performance has improved by caching an intermediate dataset. This can close to triple the performance of the association_dataset method, and increase the performance of the association method by close to 30%. * Virtual Row performance has increased about 30% in the typical case by using a shared VirtualRow instance. * Database#create_or_replace_view is now emulated on databases that don't support it directly by dropping the view before attempting to create it. * The columns_introspection extension can now introspect for simple select * queries from subselects, and it can now use the cached schema information in the database for simple select * queries from tables. * The identity_map plugin now works correctly with many-to-many right-side composite keys. * Dataset#last for Model datasets now works even if you don't specify an order explicitly, giving the last entry by primary key. Note that Dataset#first for model datasets still does not order by default. * The eager_each plugin no longer uses Object#extend at runtime. * Database#remove_cached_schema is now thread-safe on non-GVL ruby implementations. * Connection errors in the jdbc adapter now provide slightly more helpful messages. * Sequel now uses the standard offset emulation code in the jdbc/as400 adapter, instead of custom offset emulation code specific to that adapter. * Database#create_view with a dataset now works correctly when using the pg_auto_parameterize extension. * Database#columns no longer calls the row_proc. * Dataset#schema_and_table no longer turns a literal string into a non-literal string. * The oracle adapter now works with a :prefetch_rows=>nil option, which explicitly disables prefetching. * The mock mssql adapter now sets a server_version so that more parts of it work. = Backwards Compatibility * Offset emulation via ROW_NUMBER works by moving the query to a subselect that also selects from the ROW_NUMBER window function, and filtering on the ROW_NUMBER in the main query. Previously, the ROW_NUMBER was also present in the output columns, and some adapter code was needed to hide that fact. Now, the outer select selects all of the inner columns in the subselect except for the ROW_NUMBER, reducing the adapter code needed. This has the side effect of potentially requiring a query (or multiple queries for multiple subselects) to determine the columns to use. The columns_introspection extension may reduce the number of queries needed. * The correlated_subquery eager limit strategy is no longer supported on Microsoft SQL Server for many_*_many associations. As the window_function eager limit strategy is supported there, there is no reason to use the correlated_subquery strategy. * The public AssociationReflection#_dataset_method method has been removed. * The private _*_dataset methods for associations (e.g. _albums_dataset) have been removed. * The private Dataset#offset_returns_row_number_column? method has been removed. * :conditions options for associations are now added to the association dataset before the foreign key filters, instead of after. This should have no effect unless you were introspecting the dataset's opts or sql and acting on it. * The added abilities in the columns_introspection plugin to use cached schema for introspection can now cause it to return incorrect results if the table's schema has changed since it was cached by Sequel. sequel-5.63.0/doc/release_notes/3.45.0.txt000066400000000000000000000172061434214120600177730ustar00rootroot00000000000000= New Features * Database#transaction now recognizes a :retry_on option, which should contain an exception class or array of exception classes. If the transaction raises one of the given exceptions, Sequel will automatically retry the transaction block. It's a bad idea to use this option if the transaction block is not idempotent. By default, Sequel only retries the block 5 times by default, to protect against infinite looping. You can change the number of retries with the :num_retries option. Users of the :disconnect=>:retry option are encouraged to switch to :retry_on=>Sequel::DatabaseDisconnectError. * Dataset#escape_like has been added for escaping LIKE metacharacters. This is designed for the case where part of the LIKE pattern is based on user input that should not treat the metacharacters specially. * Serialization failures/deadlocks are now raised as Sequel::SerializationFailure exception instances. This exception class is a good candidate for the transaction :retry_on option. * On PostgreSQL, you can now provide the :force_standard_strings and :client_min_messages Database options to override the defaults on a per-instance basis. * On PostgreSQL, Database#tables and #views now recognizes a :qualify option, which if true will return qualified identifiers instead of plain symbols. * Transaction isolation levels are now supported on Oracle, DB2, and all jdbc subadapters using the JDBC transaction support. * Dataset.def_mutation_method now accepts a :module option for the module in which to define the methods (defaulting to self). * An unlimited_update plugin has been added. It's sole purpose is to eliminate a MySQL warning in replicated environments, since by default Sequel::Model uses a LIMIT clause when updating on MySQL. * The named_timezones extension now adds a Sequel.tzinfo_disambiguator accessor to automatically handle TZInfo::AmbiguousTime exceptions. This should be a callable object that accepts two arguments, a DateTime instance and an array of timezone periods, and returns the timezone period to use. = Other Improvements * Sequel now handles JSON securely, specifying the :create_additions=>false option when using JSON.parse. If you really want to get the old vulnerable behavior back, override Sequel.parse_json. * The json_serializer and xml_serializer plugins are now secure by default. Before, the default behavior of these plugins allowed for round tripping, such that: Album.from_xml(album.to_xml) == album Unfortunately, that requires that the deserialization allow the setting of any column. Since the plugins also handle associations, you could also set any column in any associated object, even cascading to associated objects of those objects. The new default behavior only allows deserialization to set the same columns that mass-assignment would set, and not to handle associated objects at all by default. The following additional options are supported: :fields :: The specific fields to set (this was already supported by the json_serializer plugin). :associations :: The specific associations to handle. :all_columns :: The previous behavior of setting all columns. :all_associations :: The previous behavior of setting all associations. Since JSON parsing no longer deserializes into arbitrary ruby instances, from_json and array_from_json class methods have been added to the json_serializer plugin, for deserializing into model instances. These mirror the from_xml and array_from_xml class methods in the xml_serializer plugin. Note that the :all_columns and :all_associations methods were only added to make backwards compatibility easier. It is likely they will be removed in Sequel 4, along with the json_create class method. * Sequel now attempts to use database specific error codes or SQLState codes instead of regexp parsing to determine if a more specific DatabaseError subclass should be used. This should make error handling faster and more robust. * Sequel now uses ESCAPE '\' when using LIKE, for similar behavior across databases. Previously, no ESCAPE clause was used, so behavior differed across databases, with most not using escaping, and PostgreSQL, MySQL, and H2 defaulting to backslash as the escape character. * The query extension has been reimplemented and now uses a proxy object instead of Object#extend. * The :pool_timeout Database option now supports fractional seconds. * Database#quote_identifier is now a public method. * Metadata parsing (schema, indexes, foreign_key_list) on PostgreSQL now correctly handles the case where an unqualified table name is used and tables with that name exist in multiple schemas. It now picks the first matching table in the schema_search_path, instead of failing or returning results from all tables. * Sequel::Model instances no longer attempt to typecast the money type on PostgreSQL, since the previous typecast didn't work correctly, and correct typecasting is locale-dependent. * Sequel no longer picks up foreign keys for tables in other databases when using Database#foreign_key_list on MySQL. * A warning when using the mysql2 3.12 beta has been eliminated. * A warning has been eliminated when using the jdbc/oracle adapter on JRuby 1.7. * Sequel's ilike emulation should now work by default on databases without specific syntax support. * Dataset#from_self! no longer creates a self referential dataset. * Coverage testing now uses simplecov instead of rcov on ruby 1.9+. = Backwards Compatibility * The switch to using JSON.parse :create_additions=>false means that if your app expected JSON to deserialize into arbitrary ruby objects, it is probably broken. You should update your application code to manually convert the deserialized hashes into the ruby objects you want. Note that it's not just this new version of Sequel that will cause that, older versions of Sequel will break in the same way if you update your JSON library to a version that is not vulnerable by default. This potentially affects the pg_json extension and serialization plugin if you were expecting the JSON stored in the database to be deserialized into arbitrary ruby objects. See the json_serializer/xml_serializer changes mentioned in the Other Improvements section. * The reimplemented query extension is not completely backwards compatible. For example, inside a query block, self refers to the proxy object instead of a dataset, and calling methods that return rows no longer raises an exception. * The metadata parsing methods on PostgreSQL no longer work with unqualified tables where the table is not in the schema search path. This makes metadata parsing consistent with how datasets operate. For tables outside the schema search path, you must qualify it before use now. Additionally, using a nonexistent table name will raise an exception instead of returning empty results in some cases. * The Dataset#def_mutation_method instance method has been removed. This method added mutation methods directly on the dataset instance, which is generally not desired. Using the def_mutation_method class method with the :module option is now the recommended approach. * The switch to using ESCAPE for LIKE characters is backwards incompatible on databases that don't use escaping by default, when backslash is used in a LIKE pattern as a regular character. Now you have to double the backslash in the pattern. * Database#database_error_regexps private method now can return any enumerable yielding regexp/exception class pairs, it is no longer specified to return a hash. sequel-5.63.0/doc/release_notes/3.46.0.txt000066400000000000000000000104301434214120600177640ustar00rootroot00000000000000= New Features * Dataset#first! has been added. This is identical to #first, except where #first would return nil due to no row matching, #first! raises a Sequel::NoMatchingRow exception. The main benefit here is that a standard exception class is now used, so external libraries can deal with these exceptions appropriately (such as web applications returning a 404 error). * Dataset#with_pk! has been added to model datasets. Similar to #first!, this raises a Sequel::NoMatchingRow exception instead of returning nil if there is no matching row. * A drop_foreign_key method has been added to the alter_table generator: alter_table(:tab){drop_foreign_key :col} This relies on foreign_key_list working and including the name of the foreign key. Previously, you'd have to drop the foreign key constraint before dropping the column in some cases. * Column constraints can now be named using :*_constraint_name options: create_table(:tab) do primary_key :id, :primary_key_constraint_name=>:pk_name foriegn_key :t_id, :t, :foreign_key_constraint_name=>:fk_name, :unique=>true, :unique_constraint_name=>:uk_name end This makes it easier to name constraints, which has always been recommended as it makes it easier to drop such constraints in the future. * On Microsoft SQL Server, Dataset#cross_apply and #outer_apply have been added to use CROSS/OUTER APPLY. These are useful if you want to join a table to the output of a function that takes the table as an argument. = Other Improvements * The connection pools are now faster when using the :connection_handling=>:queue option. * External connection pool classes can now be loaded automatically by the :pool_class option. * Database#each_server now raises if not given a block. Previously, it just leaked Database references. * On Microsoft SQL Server, ] characters are now escaped correctly in identifiers. * On PostgreSQL, infinite dates are also handled when using Database#convert_infinite_timestamps. Previously, infinite dates were incorrectly converted to 0000-01-01. * The associations, composition, serialization, and dirty plugins now clear caches stored in the instance in some additional cases, such as when saving model instances when the dataset supports insert_select. * Model#validates_type in the validation_helpers plugin now handles false values correctly. * The string_stripper plugin has been fixed to not change the result of Model.set_dataset. * You can now drop primary key constraints on H2, using: alter_table(:tab){drop_constraint :foo, :type=>:primary_key} * The jdbc/as400 adapter has been fixed, it was broken starting in Sequel 3.44.0. * A Security guide has been added explaining various security issues to think about when using Sequel. = Backwards Compatibility * The change to make associations, composition, serialization, and dirty now clear caches after saving when the dataset supports insert_select can break code that expected the previous behavior. For example: artist = Artist[1] artist.has_albums # => false album = Album.new(:artist=>artist) def album.after_create super artist.update(:has_albums=>true) end album.save artist.has_albums # => false Such code should either refresh the artist after saving the album, or use album.artist.has_albums. You already had to do that if the dataset did not support insert_select; the impetus for this change was to make the behavior consistent. * Decimal/numeric columns are now strictly typecast by default, similar to integer and real/double precision columns. If you want the previous loose typecasting to for decimal/numeric columns, use the looser_typecasting extension. * External adapters that called Database.set_adapter_scheme with a string should change to using a symbol. * Dataset#select_map, #select_order_map, and #get now raise an exception if they are passed a plain string inside an array. If you do want to use a plain string, you now need to alias it: dataset.get([Sequel.as('string', :some_alias)]) = Sequel 4 Implementation Planning * Sequel 4 implementation planning has begun. If you want to view and/or provide feedback on the implementation plan, see https://github.com/jeremyevans/sequel-4-plans sequel-5.63.0/doc/release_notes/3.47.0.txt000066400000000000000000000262571434214120600200030ustar00rootroot00000000000000= New Plugins * An auto_validations plugin has been added, which automatically adds not null, type, and unique validations based on information obtained from parsing the database schema. If you don't require customization of the validation error message per column, this can significantly DRY up validation code. Currently this plugin requires the database support index parsing; that restriction will be removed in Sequel 4. * An input_transformer plugin has been added, for automatically running a transformation proc on all model column setter input before use. This is a generalization of the string_stripper plugin, allowing arbitrary modifications to the input. * An error_splitter plugin has been added, for splitting validation errors applying to multiple columns into a separate validation error per column. This is useful if you want to to include such errors when using Errors#on to get all errors on the column. In general, only uniqueness errors apply to multiple columns, so those are the only errors likely to be affected. = Other New Features * Database.extension has been added, allowing you to load an extension into all future databases. This is similar to loading a plugin into Sequel::Model itself. For example, if you want all Database instances to use the query_literals extension, run the following before creating your Database instances: Sequel::Database.extension :query_literals * Database.after_initialize has been added for running a hook on all new databases created. * Model.default_set_fields_options has been added, allowing you to set the default options for the #set_fields and #update_fields methods. This is useful if you want to make :missing=>:raise or :missing=>:skip the default behavior. * The :setter, :adder, :remover, and :clearer association options have been added. These allow you to override the default implementation used to modify the association. :setter affects the *_to_one setter method, :adder the *_to_many add_* method, :remover the *_to_many remove_* method, and :clearer the *_to_many remove_all_* method. Previously, you had to override a private method to get the same behavior, this just offers a nicer API for that. * A :keep_reference Database option has been added. When set to false, a reference to the Database instance is not kept in Sequel::DATABASES. This is designed for Database instances created by libraries, so they don't accidentally get chosen as the default Sequel::Model database. * Model#modified! now accepts a column and marks that column as changed. This is useful if you plan on mutating the column value as opposed to reassigning it. * Model#modified? now accepts a column and returns whether the column has been changed. * The migrators now support an :allow_missing_migration_files option, which makes them silently ignore errors related to missing migration files. * validates_schema_types has been added to validation_helpers, which validates that the column values are instances of the expected ruby type for the given database schema type. This is a more robust version of the validates_not_string extension, and users of validates_not_string are encouraged to switch soon, as validates_not_string is going away in Sequel 4. validates_schema_type has been added to validation_class_methods, which preforms the same validation, but it requires the columns be listed explicitly. validates_type in validation_helpers has been expanded to accept an array of allowable classes. Related to this is the addition of Database#schema_type_class for returning the type class(es) for the given schema type symbol. * validates_not_null has been added to the validation_helpers plugin. This is similar to the validates_presence validation, but only checks for nil values, allowing empty/blank strings. * In the caching plugin, when the :ignore_exceptions option is true, exceptions raised when deleting an object from the cache are now ignored correctly. * On PostgreSQL, Sequel now supports a :search_path Database option to automatically set the client connection search_path. This allows you to control which schemas do no require qualification, and in which order to check schemas when referencing unqualified objects. If you were using the default_schema setting, it is recommended that you switch to using :search_path instead. * The pg_array extension can now register array types on a per-Database basis via Database#register_array_type. Previously, only global registration of array types was allowed. Additionally, when registering array types on a per-Database basis, the oids can be looked up automatically, making it possible to register array types with just a type name: DB.register_array_type(:interval) * The pg_array extension now automatically creates conversion procs for array types of all named types used by the database. This means that if you use the pg_array and pg_hstore extensions, the hstore[] type is now handled correctly. * The postgres adapter now supports :use_iso_date_format and :convert_infinite_timestamps Database options. Previously, use_iso_date_format was only a global setting, and convert_infinite_timestamps could only be set after initialization. * Database#supports_schema_parsing? has been added to check if schema parsing via the Database#schema method is supported. = Other Improvements * A race condition related to prepared_sql for newly prepared statements has been fixed. * Dataset#get now works correctly if given an array with multiple columns if there were no returned rows. * The plugins that ship with Sequel now handle frozen model instances correctly. * Freezing of model instances now works correctly for models without primary keys. * Database constraints added with the constraint_validations plugin now handle NULL values correctly if the :allow_nil=>true setting is used. * The pagination, pretty_table, query, schema_caching, schema_dumper, and select_remove extensions can now be loaded by Database#extension. If you are loading them globally via Sequel.extension, switch to using Database#extension, since that will be required starting in Sequel 4. * The lazy_attributes plugin no longer uses the identity_map plugin internally, and eager loading lazy attributes now works correctly without an active identity map. * The many_to_one_pk_lookup plugin now handles many more corner cases, and should be safe to enable by default. * The static_cache plugin now has optimized implementations of Model.map, .to_hash, and .to_hash_groups which work without a database query. Model.count without arguments has also been optimized to not require a database query. * Fetching new records has been made faster when using the update_primary_key plugin, since it was changed to cache the primary key values lazily. * When using the update_primary_key plugin, if the primary key changes, clear the associations cache of all non-many_to_one associations (since those will likely be based on the primary key). * The pg_typecast_on_load plugin no longer errors if given a column that doesn't have a matching oid conversion proc. * Handling of domain types on PostgreSQL has been significantly improved. Domain type columns now have correct model typecasting, and the pg_row extension correctly sets up conversion procs for domain types inside composite types. * Postgres::HStoreOp#- now automatically casts string input to text, so that PostgreSQL doesn't assume the string is an hstore. * Postgres::PGRangeOp#starts_before and #ends_after have been renamed to #ends_before and #starts_after. The previous names were misleading. The old names are still available for backwards compatibility, but they will be removed in the Sequel 4. * The pg_row plugin now handles aliased tables correctly. * Model#validate in the validation_class_methods plugin no longer skips validate methods in superclasses or previously loaded plugins. * Loading the touch plugin into a model subclass after it has been loaded into a model superclass no longer ignores inherited touched associations. * Sequel no longer resets the conversion procs for the Database instance when using Databaset#extension to load a pg_* extension that adds global conversion procs. Instead, the global conversion procs are added to the instance-specific conversion procs. The result of this is that manually added conversion procs will not be lost if an extension is loaded afterward. * The jdbc adapter now references the driver class before loading subadapter specific code, which can fix issues if the database tries to connect on initialization (such as the jdbc/postgres adapter if the pg_hstore extension is loaded previously). * A guide describing Sequel's support for advanced PostgreSQL features has been added. = Backwards Compatibility * If you have already used the constraint_validations plugin to create validations with the :allow_nil=>true option, you should drop and regenerate those constraints to ensure they handle NULL values correctly. * The change to make PostgreSQL automatically handle domain types can break previous code that set up special conversions and typecasts per domain type. In the schema parsing, if you want to get the domain type information, it will be contained in the :db_domain_type and :domain_oid schema entries. * Sequel::Postgres.use_iso_date_format is now only defined if you are using the postgres adapter. Previously, it could be defined when using other adapters with a pg_* extension, even though the setting had no effect in that case. * The validation_class_methods plugin now copies validations into the subclass upon inheritance, instead of recursing into the superclass on validation. This makes it more similar to how all the other Sequel plugins work. However, it also means that if you add validations to a superclass after creating a subclass, the subclass won't have those validations. Additionally if you skip superclass validations in a child class after creating a grandchild class, the grandchild class could still have the parent class's validations. * The validates_unique validation in validation_helpers no longer attempts to do the uniqueness query if the underlying columns have validation errors. The reasoning behind this is that if the underlying columns are not valid, the uniqueness query can cause a DatabaseError. * If you were passing strings in hstore format to Postgres::HStoreOp#-, you should manually cast them to hstore: hstore_op - Sequel.cast('a=>b', :hstore) * The default validation error message for validates_type has been modified. * Database#schema_column_type was made public accidently by an adapter and a few extensions. That has been fixed, but if you were calling it with an explicit receiver and it happened to work by accident before, you'll need to update your code. = Sequel 4 Implementation Planning * Sequel 4 implementation work will begin shortly. All Sequel users are encouraged to read about the proposed changes and provide feedback on the implementation plan. For details, see https://github.com/jeremyevans/sequel-4-plans. sequel-5.63.0/doc/release_notes/3.48.0.txt000066400000000000000000000411601434214120600177720ustar00rootroot00000000000000= Deprecation Warnings The main change in Sequel 3.48.0 is the deprecation of Sequel features that will be modified, moved, or removed in Sequel 4. For the reasoning behind these changes, please review the commits logs at https://github.com/jeremyevans/sequel-4-plans/commits/master == Deprecation Logging If you use a deprecated method or feature, Sequel will by default print a deprecation message and 10 lines of backtrace to stderr to easily allow you to figure out which code needs to be updated. You can change where the deprecation messages go and how many lines of backtrace are given using the following: # Log deprecation information to a file Sequel::Deprecation.output = File.open('deprecated.txt', 'wb') # Turn off all deprecation logging Sequel::Deprecation.output = nil # Use 5 lines of backtrace when logging deprecation messages Sequel::Deprecation.backtrace_filter = 5 # Use all backtrace lines when logging deprecation messages Sequel::Deprecation.backtrace_filter = true # Don't include backtraces in the deprecation logging Sequel::Deprecation.backtrace_filter = false # Select which backtrace lines to output Sequel::Deprecation.backtrace_filter = \ lambda{|line, line_no| line_no < 3 || line =~ /my_app/} == Major Change * The core extensions will no longer be loaded by default. You will have to use `Sequel.extension :core_extensions` to load the core extensions. * The Symbol#[] and Symbol#{<,>,<=,>=} methods will no longer be provided by the core extensions on ruby 1.8. You will have to use `Sequel.extension :ruby18_symbol_extensions` to use them. == Core Behavior Changes * Dataset#filter becomes an alias for #where, and #exclude becomes an alias for #exclude_where. You will have to use `DB.extension :filter_having` to get the previous behavior. Dataset#and and #or will also only affect the WHERE clause. * Dataset#and, #or, and #invert will not raise errors for no existing filter. * Dataset#select_more becomes an alias for #select_append. * Dataset#select and #from will not longer consider a hash argument as an alias specification. You will have to use `DB.extension :hash_aliases` to get the previous behavior. * Database#dataset and Dataset.new will not take an options hash. * Database#transaction :disconnect=>:retry option will be removed. * Calling Dataset#add_graph_aliases before #graph or #set_graph_aliases will raise an Error. * Datasets will have a frozen options hash by default. * Dataset#set_overrides and #set_defaults will move to the set_overrides extension. * Sequel.empty_array_handle_nulls will be removed. To get the empty_array_handle_nulls = false behavior, you will have to use `DB.extension :empty_array_ignore_nulls`. * The second argument to Dataset #union, #intersect, and #except must be an options hash if it is given. * The fourth argument to Dataset #join_table must be an options hash if it is given. * Using a mismatched number of placeholders and arguments in a placeholder literal string will raise an error. * Dataset#graph_each will move to the graph_each extension. * Database#default_schema will be removed. * Dataset#[]= will be moved to the sequel_3_dataset_methods extension. * Dataset#insert_multiple will be moved to the sequel_3_dataset_methods extension. * Dataset#set will be moved to the sequel_3_dataset_methods extension. * Dataset#to_csv will be moved to the sequel_3_dataset_methods extension. * Dataset#db= and #opts= setters will be moved to the sequel_3_dataset_methods extension. * Dataset#qualify_to and #qualify_to_first_source will be moved to the sequel_3_dataset_methods extension. * Remove default methods that raise Sequel::NotImplemented: Database#connect, #execute, #foreign_key_list, #indexes, #tables, and #views, and Dataset#fetch_rows. * Sequel::SQL::Expression#to_s will be removed. * All Dataset methods in Dataset::PUBLIC_APPEND_METHODS except for #literal, #quote_identifier, and #quote_schema_table will be removed. * All Dataset methods in Dataset::PRIVATE_APPEND_METHODS will be removed. * Sequel k_require, ts_require, tsk_require, and check_requiring_thread will be removed. * Dataset.def_append_methods will be removed. * Dataset#table_ref_append will be removed. * Sequel.virtual_row_instance_eval accessor will be removed. * Database#reset_schema_utility_dataset will be removed. == Adapter Behavior Changes * The Database#do method will be removed from the ado, db2, dbi, informix, odbc, openbase, and oracle adapters. * The jdbc adapter will raise an error when parsing the schema for a table if it detects results for the same table name in multiple schemas. * The Database#query method will be removed from the informix adapter. * Dataset#lock on PostgreSQL will check the given lock mode. * Sequel will check the client_min_messages setting before use on PostgreSQL. * Prepared statement placeholders on PostgreSQL will no longer support implicit casting via :$x__type. == Extension Behavior Changes * The following extensions will no longer make global changes to the Database and Dataset classes: null_dataset, pagination, pretty_table, query, schema_caching, schema_dumper, select_remove, and to_dot. These will be changed to Database/Dataset specific extensions. * The pg_auto_parameterize and pg_statement_cache extensions will be removed. * Sequel::Dataset.introspect_all_columns will be removed from the columns_introspection extension. * PGRangeOp#starts_before and #ends_after will be removed from the pg_range_ops extension. == Model Behavior Changes * Model#initialize will accept only one argument. * The after_initialize hook will be moved to a plugin. * Move blacklist-based security methods (#set_except, #update_except, .set_restricted_columns) to a plugin. * The :eager_loader and :eager_grapher association option procs will always be passed a hash. * Model string column setters will consider array and hash input to be invalid. * Remove save taking multiple arguments for the columns to save. Add Model#save :columns option for saving specific columns. * Don't automatically choose a reciprocal association with a condition or block. * Don't automatically set up reciprocal associations if multiple ones match. * Model::Errors#[] will no longer modify the receiver. If you want autovivification, use the active_model plugin. * Model.set_primary_key will not longer accept composite keys as multiple arguments. * The correlated_subquery eager limit strategy will be removed. * The following Model class dataset methods will be removed: print, each_page, paginate, set, add_graph_aliases, insert_multiple, query, set_overrides, set_defaults, to_csv. * The Model.{destroy,delete,update} class dataset methods will be moved to the scissors plugin. * Model#pk_or_nil will be removed. * Model#set_values will no longer be called directly by any Sequel code, and overriding it is deprecated. It will be removed in Sequel 4.1. * Model.cache_anonymous_models accessor will move to Sequel module. * Model::InstanceMethods.class_attr_overridable and .class_attr_reader will be removed. * The :one_to_one option check for one_to_many associations will be removed. == Plugin Behavior Changes * Public dataset methods will no longer have class methods automatically added. * The validates_not_string validation will be removed from the validation_class_methods and validation_helpers plugin. * In the json_serializer plugin, the to_json :root=>true option means :root=>:collection instead of :root=>:both. * In the json_serializer plugin, the to_json :naked option will default to true, and there will not be way to add the JSON.create_id automatically. * In the json_serializer plugin, from_json will no longer automatically delete the JSON.create_id key from the input hash. * The #to_json and #to_xml :all_columns and :all_associations options in the json_serializer and xml_serializer plugins will be removed. * The Model.json_create method will be removed from the json_serializer plugin. * The validates_type validation will raise validation errors for nil if :allow_nil=>true is not used. * auto_validate_presence_columns will be removed from the auto_validations plugin * The identity_map plugin will be removed. == Internal Changes * The sequel_core.rb and sequel_model.rb files will be removed. * Dataset#{quote_identifiers,identifier_output_method, identifier_input_method} will assume Database implements the methods. = Forwards Compatibility Not all changes planned in Sequel 4 have deprecation warnings. The following changes will be made in Sequel 4 but do not have deprecation warnings in 3.48.0: * The threaded connection pools will default to :connection_handling=>:queue. You can manually set :connection_handling=>:stack to get the current behavior. * Dataset#join_table will default to :qualify=>:deep. You can manually set :qualify=>:symbol to get the current behavior. This can be set at a global level by overriding Dataset#default_join_table_qualification. * Model.raise_on_typecast_failure will default to false. Set this to true to get the current behavior of raising typecast errors in the setter methods. * Model#save will no longer call Model#_refresh or Model#set_values internally after an insert. Manually refreshes will be treated differently than after creation refreshes in Sequel 4. * On SQLite, integer_booleans will be true by default. Set this to false to get the current behavior of 't' for true and 'f' for false. * On SQLite, use_timestamp_timezones will be false by default. Set this to true to get the current behavior with timezone information in timestamps. * The default value for most option hash arguments will be an empty frozen hash. If you are overriding methods and modifying option hashes, fix your code. * The defaults_setter plugin will work in a lazy manner instead of an eager manner. If you must have the values hash contain defaults for new objects (instead of just getting defaults from getter methods), you'll need to fork the current plugin. * Model#set_all will allow setting the primary key columns. * The many_to_one_pk_lookup plugin will be integrated into the default associations support. * The association_autoreloading plugin will be integrated into the default associations support. * Plugins will extend the class with ClassMethods before including InstanceMethods in the class. * Dataset#get, #select_map, and #select_order_map will automatically add aliases for unaliased expressions if given a single expression. * Database#tables and #views on PostgreSQL will check against the current schemas in the search path. * Sequel::SQL::SQLArray alias for ValueList will be removed. * Sequel::SQL::NoBooleanInputMethods will be removed. * Sequel::NotImplemented will be removed. * Sequel::Model::EMPTY_INSTANCE_VARIABLES will be removed. * Sequel will no longer provide a default database for the adapter or integration specs. = New Features * You can now choose which Errors class to use on a per model basis by overriding Model#errors_class. * The following Database methods have been added to check for support: supports_index_parsing?, supports_foreign_key_parsing?, support_table_listing?, supports_view_listing?. * The pg_hstore_ops extension now integrates with the pg_array, pg_hstore, and pg_array_ops extensions, allowing you to pass in arrays and hashes to be treated as PGArrays and HStores, and returning ArrayOps for PostgreSQL functions/operators that return arrays. * Sequel.object_to_json and Sequel.json_parser_error_class have been added and all internal json usage uses them, so you can now override these methods if you want to use an alternative json library with Sequel. * The association_proxies plugin now accepts a block allowing the user control over which methods are proxied to the dataset or the cached array of instances. You can base the decision on where to send the method using a variety of factors including the method name, the method arguments, the state of the current instance, or the related association. Here's an example of a simple case just depending on the name of the method; Model.plugin :association_proxies do |opts| [:find, :where, :create].include?(opts[:method]) end If the block returns true, the method is sent to the dataset, otherwise it is sent to the array of associated objects. * The auto_validations plugin now accepts a :not_null=>:presence option, for doing a presence validation instead of a not_null validation. This is useful for databases with NOT NULL constraints where you also want to disallow empty strings. * The auto_validations plugin now validates against explicit nil values in NOT NULL columns that have defaults. * The constraint_validations plugin now reflects validations, using Model.constraint_validation_reflections. Model.constraint_validation_reflections[:column] # => [[:presence, {}], # [:max_length, {:argument=>255, :message=>'just too long'}]] * The constraint_validations plugin can now be set to pass specific validations options to the validation_helpers plugin. This can be useful if using the auto_validations plugin with this plugin to avoid duplicate error messages for nil values: Model.plugin :constraint_validations, :validates_options=>{:presence=>{:allow_nil=>true}} * The named_timezones extension can now be loaded as a database extension, which allows for automatic conversions of string timezones: DB.extension :named_timezones DB.timezone = 'America/Los_Angeles' * Offsets are now emulated by Microsoft Access using a combination of reverse orders and total counts. This is slow, especially on large datasets, but probably better than having no support at all. It is also possible to use the same code to support Microsoft SQL Server 2000, but as Sequel does not support that (minimum supported version is 2005), you have to do it manually: Sequel.require 'adapters/utils/emulate_offset_with_reverse_and_count' DB.extend_datasets Sequel::EmulateOffsetWithReverseAndCount = Other Improvements * Dataset#clone is now faster. * Database methods that create datasets (fetch, from, select, get) are now faster. * Model.with_pk and .with_pk! are now faster. * Dataset#or now just clones if given an empty argument, similar to Dataset#where. * Sequel now correctly frees statements after using them in the ibmdb adapter. Previously, they weren't freed until GC, which could result in errors if all available handles are in use. * Dataset creation is now faster on Microsoft SQL Server. * The mediumint and mediumtext types are now recognized on MySQL. * The ado adapter now handles disconnecting an already disconnected connection. * The auto_validations plugin now works on databases that don't support index parsing. However, it will not set up automatic uniqueness validations on such databases. * The validation_helpers is more strict in some cases when checking for nil values, using a specific nil check instead of general falsely check. * The table inheritance plugins now correctly handle usage of set_dataset in a subclass. * The bin/sequel command line tool now has specs. = Backwards Compatibility * Sequel now uses aliases for many internal Dataset#get calls, such as those used by table_exists? and max. * Sequel now no longer uses class variables internally. Instead, instance variables of the Sequel::Database class are used. * Sequel now sets up the identifier mangling methods on Database initialization instead of on first use. * The private Database#adapter_initialize method has been added for per adapter configuration. All internal adapters have been switched to use this method instead of overridding initialize, and all external adapters should as well. This makes sure that Database instances are not added to Sequel::DATABASES until they have been completely initialized. * Virtual row blocks no longer convert their return values to an array. Among other things, this means that having a virtual row block return a hash works as expected. * The private Dataset#hash_key_symbol method now only takes a single argument. * Database#constraint_validations in the constraint_validations plugin now returns raw hash rows, instead of arrays of validation method call arguments. * Dataset#count now uses a lowercase count function in the SQL. * Passing a non-String or Hash as the first argument to an adapter method (e.g. Sequel.postgres(1)) now raises an error. Before, this used to work on some adapters that implicitly converted the database name to a string. * The stats and dcov rake tasks were removed. sequel-5.63.0/doc/release_notes/3.5.0.txt000066400000000000000000000504141434214120600177050ustar00rootroot00000000000000New Plugins ----------- * A class_table_inheritance plugin has been added, supporting model inheritance in the database using a table-per-model-class approach. Each table stores only attributes unique to that model or subclass hierarchy. For example, with this hierarchy: Employee / \ Staff Manager | Executive the following database schema may be used (table - columns): * employees - id, name, kind * staff - id, manager_id * managers - id, num_staff * executives - id, num_managers The class_table_inheritance plugin assumes that the main table (e.g. employees) has a primary key field (usually autoincrementing), and all other tables have a foreign key of the same name that points to the same key in their superclass's table. For example: * employees.id - primary key, autoincrementing * staff.id - foreign key referencing employees(id) * managers.id - foreign key referencing employees(id) * executives.id - foreign key referencing managers(id) When using the class_table_inheritance plugin, subclasses use joined datasets: Employee.dataset.sql # SELECT * FROM employees Manager.dataset.sql # SELECT * FROM employees # INNER JOIN managers USING (id) Executive.dataset.sql # SELECT * FROM employees # INNER JOIN managers USING (id) # INNER JOIN executives USING (id) This allows Executive.all to return instances with all attributes loaded. The plugin overrides deleting, inserting, and updating in the model to work with multiple tables, by handling each table individually. This plugin allows and encourages the use of a :key option to mark a column holding the class name. This allows methods on the superclass to return instances of specific subclasses. a = Employee.all # [<#Staff>, <#Manager>, <#Executive>] This plugin requires the lazy_attributes plugin and uses it to handle subclass specific attributes that would not be loaded when calling superclass methods (since those wouldn't join to the subclass tables). For example: a.first.values # {:id=>1, name=>'S', :kind=>'Staff'} a.first.manager_id # Loads the manager_id attribute from the # database The class_table_inheritance plugin requires JOIN USING and therefore is not supported on H2 or Microsoft SQL Server, which do not support that SQL-92 feature. * An associations_dependencies plugin was added for deleting, destroying, or nullifying associated objects when destroying a model object. This just gives an easy way to add the necessary before and after destroy hooks. The following association types support the following dependency actions: * :many_to_many - :nullify (removes all related entries in join table) * :many_to_one - :delete, :destroy * :one_to_many - :delete, :destroy, :nullify (sets foreign key to NULL for all associated objects) This plugin works directly with the association datasets and does not use any cached association values. The :delete action will delete all associated objects from the database in a single SQL call. The :destroy action will load each associated object from the database and call the destroy method on it. The plugin call takes a hash of association symbol keys and dependency action symbol values. Alternatively, you can specify additional dependencies later using add_association_dependencies: Business.plugin :association_dependencies, :address=>:delete # or: Artist.plugin :association_dependencies Artist.add_association_dependencies :albums=>:destroy, :reviews=>:delete, :tags=>:nullify * A force_encoding plugin was added that forces the encoding of strings used in model instances. When model instances are loaded from the database, all values in the hash that are strings are forced to the given encoding. Whenever you update a model column attribute, the resulting value is forced to a given encoding if the value is a string. There are two ways to specify the encoding. You can either do so in the plugin call itself, or via the forced_encoding class accessor: class Album < Sequel::Model plugin :force_encoding, 'UTF-8' # or plugin :force_encoding self.forced_encoding = 'UTF-8' end This plugin only works on ruby 1.9, since strings don't have encodings in 1.8. * A typecast_on_load plugin was added, for fixing bad database typecasting when loading model objects. Most of Sequel's database adapters don't have complete control over typecasting, and may return columns that aren't typecast correctly (with correct being defined as how the model object would typecast the same column values). This plugin modifies Model.load to call the setter methods (which typecast by default) for all columns given. You can either specify the columns to typecast on load in the plugin call itself, or afterwards using add_typecast_on_load_columns: Album.plugin :typecast_on_load, :release_date, :record_date # or: Album.plugin :typecast_on_load Album.add_typecast_on_load_columns :release_date, :record_date If the database returns release_date and record_date columns as strings instead of dates, this will ensure that if you access those columns through the model object, you'll get Date objects instead of strings. * A touch plugin was added, which adds Model#touch for updating an instance's timestamp, as well as touching associations when an instance is updated or destroyed. The Model#touch instance method saves the object with a modified timestamp. By default, it uses the :updated_at column, but you can set which column to use. It also supports touching of associations, so that when the current model object is updated or destroyed, the associated rows in the database can have their modified timestamp updated to the current timestamp. Example: class Album < Sequel::Model plugin :touch, :column=>:modified_on, :associations=>:artist end * A subclasses plugin was added, for recording all of a models subclasses and descendent classes. Direct subclasses are available via the subclasses method, and all descendent classes are available via the descendents method: c = Class.new(Sequel::Model) c.plugin :subclasses sc1 = Class.new(c) sc2 = Class.new(c) ssc1 = Class.new(sc1) c.subclasses # [sc1, sc2] sc1.subclasses # [ssc1] sc2.subclasses # [] ssc1.subclasses # [] c.descendents # [sc1, ssc1, sc2] The main use case for this is if you want to modify all models after the model subclasses have been created. Since mutable options are copied when subclassing, modifying parent classes does not affect current subclasses, only future ones. The subclasses plugin allows you get all subclasses so that you can easily modify them. The plugin only records subclasses created after the plugin call, though. * An active_model plugin was added, giving Sequel::Model an ActiveModel complaint API, in so much as it passes the ActiveModel::Lint tests. New Extensions -------------- * A named_timezones extension was added, allowing you to use named timezones such as "America/Los_Angeles" (the default Sequel timezone support only supports UTC or local time). This extension requires TZInfo. It also sets the Sequel.datetime_class to DateTime, so database timestamps will be returned as DateTime instances instead of Time instances. This is because ruby's Time class doesn't support timezones other than UTC and local time. This plugin allows you to pass either strings or TZInfo::Timezone instance to Sequel.database_timezone=, application_timezone=, and typecast_timezone=. If a string is passed, it is converted to a TZInfo::Timezone using TZInfo::Timezone.get. Let's say you have the database server in New York and the application server in Los Angeles. For historical reasons, data is stored in local New York time, but the application server only services clients in Los Angeles, so you want to use New York time in the database and Los Angeles time in the application. This is easily done via: Sequel.database_timezone = 'America/New_York' Sequel.application_timezone = 'America/Los_Angeles' Then, before timestamps are stored in the database, they are converted to New York time. When timestamps are retrieved from the database, they are converted to Los Angeles time. * A thread_local_timezones extension was added. This allows you to set a per-thread timezone that will override the default global timezone while the thread is executing. The main use case is for web applications that execute each request in its own thread, and want to set the timezones based on the request. The most common example is having the database always store time in UTC, but have the application deal with the timezone of the current user. That can be done with: Sequel.database_timezone = :utc # In each thread: Sequel.thread_application_timezone = current_user.timezone This extension is designed to work with the named_timezones extension. * An sql_expr extension was added that adds .sql_expr methods to all objects, giving them easy access to Sequel's DSL: 1.sql_expr < :a # 1 < a false.sql_expr & :a # FALSE AND a true.sql_expr | :a # TRUE OR a ~nil.sql_expr # NOT NULL "a".sql_expr + "b" # 'a' || 'b' Proc#sql_expr uses a virtual row: proc{[[a, b], [a, c]]}.sql_expr | :x # (((a = b) AND (a = c)) OR x) * A looser_typecasting extension was added, for using to_f and to_i instead of the more strict Kernel.Float and Kernel.Integer when typecasting floats and integers. To use it, you should extend the database with the Sequel::LooserTypecasting module after loading the extension: Sequel.extension :looser_typecasting DB.extend(Sequel::LooserTypecasting) This makes the behavior more like ActiveRecord: a = Artist.new(:num_albums=>'a') a.num_albums # => 0 Other New Features ------------------ * Associations now support composite keys. All of the :*key options options now accept arrays of symbols instead of plain symbols. Example: Artist.primary_key # [:name, :city] Album.many_to_one :artist, :key=>[:artist_name, :artist_city] Artist.one_to_many :albums, :key=>[:artist_name, :artist_city] All association types are supported, including the built-in many_to_many association and the many_through_many plugin. Both methods of eager loading work with composite keys for all association types. Setter and add/remove/remove_all methods also now work with composite keys. * Associations now respect a :validate option, which can be set to false to not validate when implicitly saving associated objects. There isn't a lot of implicit saving in Sequel's association methods, but this gives the user the control over validation when the association methods implicitly save an object. * In addition to the regular association methods, the nested_attributes plugin was also updated to respect the :validate_association option. It was also modified to not validate associated objects twice, once when the parent object was validated and again when the associated object was saved. Additionally, if you pass :validate=>false to the save method when saving the parent object, it will not longer attempt to validate associated objects when saving them. * Dataset#insert and #insert_sql were refactored and now support the following API: * No arguments - Treat as a single empty hash argument * Single argument: * Hash - Use keys as columns and values as values * Array - Use as values, without specifying columns * Dataset - Use a subselect, without specifying columns * LiteralString - Use as the values * 2 arguments: * Array, Array - Use first array as keys, second as values * Array, Dataset - Use a subselect, with the array as columns * Array, LiteralString - Use LiteralString as the values, with the array as the columns * Anything else: Treat all given values an an array of values * Graphing now works with previously joined datasets. The main use case of this is when eagerly loading (via eager_graph) model associations for models backed by joined datasets, such as those created by the class_table_inheritance plugin. * Sequel.virtual_row was added allowing you to easily use the VirtualRow support outside of select, order, and filter calls: net_benefit = Sequel.virtual_row{revenue > cost} good_employee = Sequel.virtual_row{num_commendations > 0} fire = ~net_benefit & ~good_employee demote = ~net_benefit & good_employee promote = net_benefit & good_employee DB[:employees].filter(fire).update(:employed=>false) DB[:employees].filter(demote).update(:rank=>:rank-1) DB[:employees].filter(promote).update(:rank=>:rank+1) * When Sequel wraps exception in its own classes (to provide database independence), it now keeps the wrapped exception available in a wrapped_exception accessor. This allows you to more easily determine the wrapped exception class, without resorting to parsing the exception message. begin DB.run('...') rescue Sequel::DatabaseError => e case e.wrapped_exception when Mysql::Error ... when PGError ... end end * The MySQL adapter now supports a Dataset#split_multiple_result_sets method that yields arrays of rows (one per result set), instead of rows. This allows you to submit multiple statements at the same time (or call a stored procedure that returns multiple result sets), and know which rows are related to which result sets. This violates a lot of Sequel's internal assumptions and should be used with care. Existing row_procs are modified to work correctly, but graphing will not work on these datasets. * The ADO adapter now accepts a :conn_string option and uses that as the full ADO connection string. This can be used to connect to any datasource ADO supports, such as Microsoft Excel. * The Microsoft SQL Server shared adapter now supports a Database#server_version method. * The Microsoft SQL Server shared adapter now supports updating and deleting from joined datasets. * The Microsoft SQL Server shared adapter now supports a Dataset#output method that uses the OUTPUT clause. * Model#_save now calls either Model#_insert or Model#_update for inserting/updating the row in the database. This allows for easier overriding when you want to allow creating and updating model objects backed by a joined dataset. * Dataset#graph now takes a :from_self_alias option specifying the alias to use for the subselect created if the receiver is a joined but not yet graphed dataset. It defaults to the first source table in the receiver. Other Improvements ------------------ * Typecasting model attributes is now done before checking existing values, instead of after. Before, the code for the model attribute setters would compare the given value to the existing entry. If it didn't match, the value was typecasted and then assigned. That led to the following situation: a = Album[1] a.num_tracks # => 10 params # => {'num_tracks'=>'10'} a.set(params) a.changed_columns # => [:num_tracks] The new behavior typecasts the value first, and only sets it and records the column as changed if it doesn't match the typecasted value. * Model#modified? is now always true if the record is new. modified? indicates the instance's status relative to the database, and since a new object is not yet in the database, and saving the object would add it, the object is considered modified. A consequence of this is that Model#save_changes now always saves if the object is new. If you want to check if there were changes to columns since the object was first initialized, you should use !changed_columns.empty?, which was the historical way to handle the situation. * The DataObjects (do) adpater now supports DataObjects 0.10. * Dataset#select_more and Dataset#order_more no longer affect the receiver. They are supposed to just return a modified copy of the receiver instead of modifying the receiver itself. For a few versions they have been broken in that they modified the receiver in addition to returning a modified copy. * Performance was increased for execution of prepared statements with multiple bound variables on MySQL. * On MySQL, database errors raised when preparing statements or setting bound variable values are now caught and raised as Sequel::DatabaseErrors. * On MySQL, more types of disconnection errors are detected. * When altering columns in MySQL, options such as :unsigned, :elements, and :size that are given in the call are now respected. * MySQL enum defaults are now handled correctly in the schema dumper. * The schema dumper no longer attempts to use unparseable defaults as literals on MySQL, since MySQL does not provide defaults as valid literals. * The emulated offset support in the shared Microsoft SQL Server adapter now works better with model classes (or any datasets with row_procs). * Microsoft SQL Server now supports using the WITH clause in delete, update, and insert calls. * Parsed indexes when connecting to Microsoft SQL Server via JDBC no longer include primary key indexes. * Dataset#insert_select now returns nil if disable_insert_returning is used in the shared PostgreSQL adapter. This makes it work as expected with model object creation. * Calling Model.set_primary_key with an array of symbols to set a composite primary key is now supported. You can also provide multiple symbol arguments to do the same thing. Before, specifying an array of symbols broke the Model.[] optimization. * Literalization of timezones in timestamps now works correctly on Oracle. * __FILE__ and __LINE__ are now used everywhere that eval is called with a string, which makes for better backtraces. * The native MySQL adapter now correctly handles returning before yielding all result sets. Previously, this caused a commands out of sync error. * Table names in common table expressions are now quoted. * The Oracle adapter's Dataset#except now accepts a hash, giving it the same API as the default Dataset#except. * When connecting to Microsoft SQL Server via ADO, allow Dataset#insert to take multiple arguments. * Fractional timestamps are no longer used on ODBC. * Schema parsing now works on MSSQL when the database is set to not quote identifiers. * Timezone offsets are no longer used on Microsoft SQL Server, since they only work for the datetimeoffset type. * Only 3 fractional digits in timestamps are used in Microsoft SQL Server, since an error is raised if the use the datetime type with more than that. * The integration test suite now has guards for expected failures when run on known databases. Expected failures are marked as pending. Backwards Compatibility ----------------------- * Graphing to an previously joined (but not graphed) dataset now causes the receiver to be wrapped in a subselect, so if you graph a dataset to a previously joined dataset, and then filter the dataset referring to tables that were in the joined dataset (other than the first table), the SQL produced will probably no longer be valid. You should either filter the dataset before graphing or use the name of the first source of the joined dataset (which is what the subselected is aliased to) if filtering afterward. In certain cases, this change can cause tables to be aliased differently, so if you were graphing previously joined datasets and then filtering using the automatically generated aliases, you might need to modify your code. * The DataObjects (do) adpater no longer supports DataObjects 0.9.x. * The Dataset#virtual_row_block_call private instance method has been removed. * Sequel's timezone support was significantly refactored, so if you had any custom modifications to the timezone support, they might need to be refactored as well. * The SQL generation code was significantly refactored, so if you had any custom modifications in that area, you might need to refactor as well. sequel-5.63.0/doc/release_notes/3.6.0.txt000066400000000000000000000341131434214120600177040ustar00rootroot00000000000000New Features ------------ * Dataset#filter and related methods now accept a string with named placeholders, and a hash with placeholder values: ds.filter('copies_sold > :sales', :sales=>500000) Sequel's general support for this syntax is nicer: ds.filter{copies_sold > 500000} But named placeholder support can make it easier to port code from other database libraries. Also, it works much better than the ? placeholder support if you have a long SQL statement: DB['SELECT :n FROM t WHERE p > :q AND p < :r', :n=>1,:q=>2,:r=>3] Sequel doesn't subsitute values that don't appear in the hash: ds.where('price < :p AND id in :ids', :p=>100) # WHERE (price < 100 AND id in :ids) This makes it easier to spot missed placeholders, and avoids issues with PostgreSQL's :: casting syntax or : inside string literals. * The Model add_ association method now accepts a hash and creates a new associated model object associated to the receiver: Artist[:name=>'YJM'].add_album(:name=>'RF') * The Model remove_ association method now accepts a primary key and removes the associated model object from the association. For models using composite primary keys, an array of primary key values can be used. Example: Artist[:name=>'YJM'].remove_album(1) # regular pk Artist[:name=>'YJM'].remove_album([2, 3]) # composite pk * Dataset#bind was added, allowing you to bind values before calling Dataset#call. This is more consistent with Sequel's general approach where queries can be built in any order. * The native postgres adapter now has Dataset#use_cursor, which allows you to process huge datasets without keeping all records in memory. The default number of rows per cursor fetch is 1000, but that can be modified: DB[:huge_table].use_cursor.each{|r| p r} DB[:huge_table].use_cursor(:rows_per_fetch=>10000).each{|r| p r} This probably won't work with prepared statements or bound variables. * The nested_attributes plugin now adds newly created objects to the cached association array immediately, even though the changes are not persisted to the database until after the object is saved. The reasoning for this is that otherwise there is no way to access the newly created associated objects before the save, and no way to access them at all if validation fails. This makes the nested_attributes plugin much easier to use, since now you can just iterate over the cached association array when building the form. If validation fails, it will have the newly created failed objects in the array, so you can easily display the form as the user entered it for them to make changes. This change doesn't affect many_to_one associations, since those don't have a cached association array. This also does not affect updating existing records, since those are already in the cached array. * You can now easily override the default options used in the validation_helpers plugin (the recommended validation plugin). Options can be overridden at a global level: Sequel::Plugins::ValidationHelpers::DEFAULT_OPTIONS[:format]. merge!(:message=>"incorrect format", :allow_missing=>true) Options can also be overridden on a per-class level: class Album < Sequel::Model plugin :validation_helpers DEFAULT_VALIDATION_OPTIONS = { :format=>{:message=>"incorrect format", :allow_missing=>true}} private def default_validation_helpers_options(type) super.merge(DEFAULT_VALIDATION_OPTIONS[type] || {}) end end * You can now use a proc instead of a string for the validation_helpers :message option. This should allow much easier internationalization support. If a proc is given, Sequel calls it to get the format string to use. Whether the proc should take an argument depends on whether the associated validation method takes an argument before the array of columns to validate, and the argument provided is what is passed to the proc. The exception to this is the validates_not_string method, which doesn't take an argument, but does pass one to the proc (a symbol with the schema type of the column). Combined with the above default option support, full internationalization support for the validation_helpers plugin should be fairly easy. * The nested_attributes plugin now accepts a :fields option that specifies the fields that are allowed. If specified, the plugin will use set_only instead of set when mass assigning attributes. Without this, the only way to control which fields are allowed is to set allowed/restricted attributes at a class level in the associated class. * Associations now accept a :distinct option which uses the SQL DISTINCT clause. This can be used instead of :uniq for many_to_many and many_through_many associations to handle the uniqueness in the database instead of in ruby. It can also be useful for one_to_many associations to models that don't have primary keys. * The caching plugin now accepts an :ignore_exceptions option that allows it to work with memcached (which raises exceptions instead of returning nil for missing records). * Sequel now emulates JOIN USING poorly using JOIN ON for databases that don't support JOIN USING (MSSQL and H2). This isn't guaranteed to work for all queries, since USING and ON have different semantics, but should work in most cases. * The MSSQL shared adapter now supports insert_select, for faster model object creation. If for some reason you need to disable it, you can use disable_insert_output. * Model#modified! has been added which explicitly marks the object as modified. So even if no column values have been modified, calling save_changes/update will still run through the regular save process and call all before and after save/update hooks. * Model#marshallable! has been added which removes unmarshallable attributes from the object. Previously, you couldn't marshal a saved model object because it contained a dataset with a singleton class. Custom _dump and _load methods could be used instead, but this approach is easier to implement. * Dataset#literal_other now calls sql_literal on the object with the current dataset instance, if the object responds to it. This makes it easier to support the literalization of arbitrary objects. Note that if the object is a subclass of a class handled by an existing dataset literalization method, you cannot use this method. You have to override the specific Dataset#literal_* method in that case. * Model#save_changes now accepts an option hash that is passed to save: album.save_changes(:validate=>false) * A bunch of Dataset#*_join methods have been added, for specific join types: * cross_join * natural_join * full_join * left_join * right_join * natural_full_join * natural_left_join * natural_right_join Previously, you had to use join_table(:cross, ...) to use a CROSS JOIN. * You can now create clustered indexes on Microsoft SQL Server using the :clustered option. * AssociationReflection#associated_object_keys has been added, specifying the keys in the associated model object that are related to this association. * Sequel::SQL::SQLArray#to_a was added. Other Improvements ------------------ * Constant lookup in virtual row blocks now works correctly in ruby 1.9. Virtual row blocks are based on BasicObject on ruby 1.9, which doesn't allow referencing objects in the top level scope. So the following code would cause an error on 1.9: DB[:bonds].filter{maturity_date > Time.now} Sequel now uses a Sequel::BasicObject class on 1.9 with a const_missing that looks up constants in Object, which allows the above code to work. * Sequel no longer attempts to load associated objects when one of the key fields in the current table is NULL. This fixes the behavior when the :primary_key option for the association is used to point to a non-primary key. A consequence of this change is that attempting to load a *_to_many association for a new model object now returns an empty array instead of raising an exception. This has its own advantage of allowing the same association viewing code to work on both new and existing objects. Previously, you had to actively avoid calling the association method on new objects, or Sequel would raise an exception. * Dataset aggreate methods (sum/avg/min/max/range/interval) now work correctly with limited, grouped, or compound datasets. Previously, count worked with them, but other aggregate methods did not. These methods now use a subquery if called on a limited, grouped or compound dataset. * It is no longer required to have an existing GROUP BY clause to use a HAVING clause (except on SQLite, which doesn't permit it). Sequel has always had this limitation, but it's not required by the SQL standard, and there are valid reasons to use HAVING without GROUP BY. * Sequel will now emulate support for databases that don't support multiple column IN/NOT IN syntax, such as MSSQL and SQLite: ds.filter([:col1, :col2]=>[[1, 2], [3, 4]].sql_array) # default: WHERE (col1, col2) IN ((1, 2), (3, 4)) # emulated: WHERE (((col1 = 1) AND (col2 = 2)) OR # ((col1 = 3) AND (col2 = 4))) This is necessary for eager loading associated objects for models with composite primary keys. * Sequel now emulates :column.ilike('blah%') for case insensitive searches on MSSQL and H2. MSSQL is case insensitive by default, so it is the same as like. H2 is case sensitive, so Sequel uses a case insensitive cast there. * The nested_attributes plugin no longer allows modification of keys related to the association. This fixes a possible security issue with the plugin, where a user could associate the nested record to a different record. For example: Artist.one_to_many :albums Artist.plugin :nested_attributes Artist.nested_attributes :albums artist = Artist.create artist2 = Artist.create album = Album.create artist.add_album(album) artist.albums_attributes = [{:id=>album.id, :artist_id=>artist2.id}] artist.save * The one_to_many remove_* association method now makes sure that the object to be removed is currently associated to this object. Before, the method could be abused to disassociate the object from whatever object it was associated to. * Model add_ and remove_ association methods now check that the passed object is of the correct class. * Calling the add_* association method no longer adds the record to the cached association array if the object is already in the array. Previously, Sequel did this for reciprocal associations, but not for regular associations. This makes the most sense for one_to_many associations, since those can only be associated to the object once. For many_to_many associations, if you want an option to disable the behavior, please bring it up on the Sequel mailing list. * An array with a string and placeholders that is passed to Dataset#filter is no longer modified. Previously: options = ["name like ?", "%dog%"] DB[:players].where(options) options # => ["%dog%"] * Getting the most recently inserted autoincremented primary key is now optimized when connecting to MySQL via JDBC. * Model.inherited now calls Class.inherited. * The MSSQL shared adapter once again works on ruby 1.9. It was broken in 3.5.0 due to minor syntax issues. * The force_encoding plugin now handles refreshing an existing object, either explicitly or implicitly when new objects are created. To use the force_encoding plugin with the identity_map plugin, the identity_map plugin should be loaded first. * Using nil as a bound variable now works on PostgreSQL. Before, Sequel would incorrectly use "" instead of NULL, since it transformed all objects to strings before binding them. Sequel now binds the objects directly. * The Amalgalite adapter is now significantly faster, especially for code that modifies the schema or submits arbitrary SQL statements using Database <<, run, or execute_ddl. * Model#save_changes is now used when updating existing associated objects in the nested_attributes plugin. This should be significantly faster for the common case of submitting a complex form with nested objects without making modifications. * You can now prepare insert statements that take multiple arguments, such as insert(1, 2, 3) and insert(columns, values). * Dataset#group_and_count now supports aliased columns. * Adding indexes to tables outside the default schema now works. * Eager graphing now works better with models that use aliased tables. * Sequel now correctly parses the column schema information for tables in a non-default schema on Microsoft SQL Server. * changed_columns is now cleared for when saving new model objects for adapters that support insert_select, such as PostgreSQL. * Dataset#replace on MySQL now works correctly when default values are used. * Dataset#lock on PostgreSQL now works correctly. * Dataset#explain now works correctly on SQLite, and works using any adapter. It also works correctly on Amalgalite. * The JDBC adapter now handles binding Time arguments correctly when using prepared statements. * Model add_ and remove_ association methods now have more descriptive exception messages. * Dataset#simple_select_all? now ignores options that don't affect the SQL, such as :server. * Dataset#window in the PostgreSQL adapter now respects existing named windows. * Sequel now better handles a failure to begin a new transaction. * The dataset code was split into some additional files for improved readability. * Many documentation improvements were made. Backwards Compatibility ----------------------- * Model::Errors no longer uses a default proc, but emulates one in the [] method. This is unlikely to have a negative affect unless you are calling a method on it that doesn't call [] (maybe using it in a C extension?). * Model#table_name now only provides the alias if an aliased table is used. * The Sequel::Dataset::STOCK_COUNT_OPTS constant has been removed. * Dataset#lock on PostgreSQL now returns nil instead of a dataset. sequel-5.63.0/doc/release_notes/3.7.0.txt000066400000000000000000000151261434214120600177100ustar00rootroot00000000000000New Features ------------ * Sequel now has support for deleting and updating joined datasets on MySQL and PostgreSQL. Previously, Sequel only supported this to a limited extent on Microsoft SQL Server, and support there has been improved as well. This allows you to do: DB.create_table!(:a){Integer :a; Integer :d} DB.create_table!(:b){Integer :b; Integer :e} DB.create_table!(:c){Integer :c; Integer :f} # Insert some rows ds = DB.from(:a, :b). join(:c, :c=>:e.identifier). where(:d=>:b) ds.where(:f=>6).update(:a => 10) ds.where(:f=>5).delete Which will set the a column to 10 for all rows in table a, where an associated row in table c (through table b) has a value of 6 for column f. It will delete rows from table a where an associated row in table c (through table b) has a value of 5 for column f. Sequel assumes the that first FROM table is the table being updated/deleted. MySQL and Microsoft SQL Server do not require multiple FROM tables, but PostgreSQL does. * Dataset #select_map, #select_order_map, and #select_hash convenience methods were added for quickly creating arrays and hashes from a dataset. select_map and select_order_map both return arrays of values for the column specified. The column can be specified either via an argument or a block, similar to Dataset#get. Both accept any valid objects as arguments. select_hash returns a hash. It requires two symbol arguments, but can handle implicit qualifiers or aliases in the symbols. Neither of these methods offer any new functionality, they just cut down on the number of required key strokes: select_map(:column) # select(:column).map(:column) select_order_map(:column) # select(:column).order(:column). # map(:column) select_hash(:key_column, :value_column) # select(:key_column, :value_column). # to_hash(:key_column, :value_column) * The NULL, NOTNULL, TRUE, SQLTRUE, FALSE, and SQLFALSE constants were added to Sequel::SQL::Constants. This allows you to do: include Sequel::SQL::Constants DB[:table].where(:a=>'1', :b=>NOTNULL) Previously, the shortest way to do this was: DB[:table].where(:a=>'1').exclude(:b=>nil) It may make the code more descriptive: DB[:table].where(:b=>NULL) # compared to DB[:table].where(:b=>nil) This gives the option to use SQL terminology instead of ruby terminology. The other advantage of using the constants it that they handle operators and methods like other Sequel::SQL objects: NULL & SQLFALSE # BooleanExpression => "(NULL AND FALSE)" nil & false # false NULL + :a # NumericExpression => "(NULL + a)" nil + :a # raises NoMethodError NULL.sql_string + :a # StringExpression => "(NULL || a)" NULL.as(:b) # AliasedExpression => "NULL AS b" For complex systems that want to represent SQL boolean objects in ruby (where you don't know exactly how they'll be used), using the constants is recommended. In order not to be too verbose, including Sequel::SQL::Constants is recommended. It's not done by default, but you can still reference the constants under the main Sequel module by default (e.g. Sequel::NULL). * The validates_unique method in the validation_helpers plugin now supports an :only_if_modified option, which should speed up the common case where the unique attribute is not modified for an existing record. It's not on by default, since it's possible the database could be changed between retrieving the model object and updating it. * The Dataset #union, #intersect, and #except methods now accept an :alias option which is used as the alias for the returned dataset. DB[:table].union(DB[:old_table], :alias=>:table) * Model#destroy now supports a :transaction option, similar to Model#save. * The shared Oracle adapter now supports Dataset#sequence for returning autogenerated primary key values on insert from a related sequence. This makes Oracle work correctly when using models, with something like the following: class Album < Sequel::Model set_dataset dataset.sequence(:seq_albums_id) end You currently need to call Dataset#sequence in every model class where the underlying table uses a sequence to generate primary key values. Other Improvements ------------------ * In Model #save and #destroy when using transactions and when raise_on_save_failure is false, ensure that transactions are rolled back if a before hook returns false. * Dataset#group_and_count now handles arguments other than Symbols. A previous change to the method raised an exception if a Symbol was not provided. It also handles AliasedExpressions natively, so the following works correctly: DB[:table].group_and_count(:column.as(:alias)) * Sequel no longer uses native autoreconnection in the mysql adapter. Native autoreconnection has problems with prepared statements, where a new native connection is used behind Sequel's back, so Sequel thinks the prepared statement has already been defined on the connection, when it fact it hasn't. Any other changes that affect the state of the connection will be lost when native autoreconnection is used as well. Sequel's connection pool already handles reconnection if it detects a disconnection. This commit also adds an additional exception message to recognize as a disconnect. If there other exception messages related to disconnects, please post them on the Sequel mailing list. * The schema_dumper plugin now specifies the :type option for primary key if it isn't Integer. * On PostgreSQL, the bigserial type is used if :type=>Bignum is given as an option to primary key. This makes it operate more similarly to other adapters that support autoincrementing 64-bit integer primary keys. * The native mysql adapter will now attempt to load options in the [client] section of the my.cnf file. * The rake spec tasks for the project now work correctly with RSpec 1.2.9. Backwards Compatibility ----------------------- * Dataset::GET_ERROR_MSG and Dataset::MAP_ERROR_MSG constants were removed. Both were replaced with Dataset::ARG_BLOCK_ERROR_MSG. * The behavior of the Model#save_failure private instance method was modified. It now always raises an exception, and validation failures no longer call it. * The internals of how autogenerated primary key metadata is stored when creating tables on PostgreSQL has been modified. * The native MySQL adapter no longer sets the OPT_LOCAL_INFILE option to "client" on the native connection. sequel-5.63.0/doc/release_notes/3.8.0.txt000066400000000000000000000134071434214120600177110ustar00rootroot00000000000000New Features ------------ * Dataset#each_server was added, allowing you to run the same query (most likely insert/update/delete) on all shards. This is useful if you have a sharded database but have lookup tables that should be identical on all shards. It works by yielding copies of the current dataset that are tied to each server/shard: DB[:table].filter(:id=>1).each_server do |ds| ds.update(:name=>'foo') end * Database#each_server was added, allowing you to run schema modification methods on all shards. It works by yielding a new Sequel::Database object for each shard, that will connect to only that shard: DB.each_server do |db| db.create_table(:t){Integer :num} end * You can now add and remove servers/shards from the connection pool while Sequel is running: DB.add_servers(:shard1=>{:host=>'s1'}, :shard2=>{:host=>'s2'}) DB.remove_servers(:shard1, :shard2) * When you attempt to disconnect from a server that has connections currently in use, Sequel will now schedule those connections to be disconnected when they are returned to the pool. Previously, Sequel disconnected available connections, but ignored connections currently in use, so it wasn't possible to guarantee complete disconnection from the server. Even with this new feature, you can only guarantee eventual disconnection, since disconnection of connections in use happens asynchronously. * Database#disconnect now accepts a :servers option specifying the server(s) from which to disconnect. This should be a symbol or array of symbols representing servers/shards. Only those specified will be disconnected: DB.disconnect(:servers=>[:shard1, :shard2]) * A validates_type validation was added to the validation_helpers plugin. It allows you to check that a given column contains the correct type. I can be helpful if you are also using the serialization plugin to store serialized ruby objects, by making sure that the objects are of the correct type (e.g. Hash): def validate validates_type(Hash, :options) end * Sequel::SQL::Expression#== is now supported for all expressions: :column.qualify(:table).cast(:type) == \ :column.qualify(:table).cast(:type) # => true :column.qualify(:table).cast(:type) == \ :other_column.qualify(:table).cast(:type) # => false * When using the generic File type to create blob columns on MySQL, you can specify the specific database type by using the :size option (with :tiny, :medium, and :long values recognized): DB.create_table(:docs){File :body, :size=>:long} # longblob * The mysql adapter will now default to using mysqlplus, falling back to use mysql. mysqlplus is significantly better for threaded code because queries do not block the entire interpreter. * The JDBC adapter is now able to detect certain types of disconnect errors. * ConnectionPool.servers and Database.servers were added, which return an array of symbols specifying the servers/shards in use. Other Improvements ------------------ * The single-threaded connection pool now raises DatabaseConnectionErrors if unable to connect, so it now operates more similarly to the default connection pool. * The single-threaded connection pool now operates more similar to the default connection pool when given a nonexistent server. * PGErrors are now correctly converted to DatabaseErrors in the postgres adapter when preparing statements or executing prepared statements. * DatabaseDisconnectErrors are now raised correctly in the postgres adapter if the connection status is not OK after a query raises an error. * In the mysql adapter, multiple statements in a single query should now be handled correctly in the all cases, not just when using Dataset#each. So you can now submit multiple queries in a single string to Database#run. * Model object creation on Microsoft SQL Server 2000 once again works correctly. Previously, an optimization was used that was only supported on 2005+. * Backslashes are no longer doubled inside string literals when connecting to Microsoft SQL Server. * The ORDER clause now correctly comes after the HAVING clause on Microsoft SQL Server. * Sequel now checks that there is an active transaction before rolling back transactions on Microsoft SQL Server, since there are cases where Microsoft SQL Server will roll back transactions implicitly. * Blobs are now handled correctly when connecting to H2. * 64-bit integers are now handled correctly in JDBC prepared statements. * In the boolean_readers plugin, correctly handle columns not in the db_schema, and don't raise an error if the model's columns can't be determined. * In the identity_map plugin, remove instances from the cache if they are deleted or destroyed. Backwards Compatibility ----------------------- * Dataset::FROM_SELF_KEEP_OPTS was merged into Dataset::NON_SQL_OPTIONS. While used in different places, they were used for the same purpose, and entries missing from one should have been included in the other. * The connection pool internals changed substantially. Now, ConnectionPool #allocated and #available_connections will return nil instead of an array or hash if they are called with a nonexistent server. These are generally only used internally, though they are part of the public API. #created_count and #size still return the size of the :default server when called with a nonexistent server, though. * The meta_eval and metaclass private methods were removed from Sequel::MetaProgramming (only the meta_def public method remains). If you want these methods, use the metaid gem. * The irregular ox->oxen pluralization rule was removed from the default inflections, as it screws up the more common box->boxes. sequel-5.63.0/doc/release_notes/3.9.0.txt000066400000000000000000000217471434214120600177200ustar00rootroot00000000000000New Features ------------ * The ConnectionPool classes were refactored from 2 separate classes to a 5 class hierarchy, with one main class and 4 subclasses, one for each combination of sharding and threading. The primary reason for this refactoring is to make it so that the user doesn't have to pay a performance penalty for sharding if they aren't using it. A connection pool that supports sharding is automatically used if the :servers option is used when setting up the database connection. In addition, the default connection pool no longer contains the code to schedule future disconnections of currently allocated connections. The sharded connection pool must be used if that feature is desired. The unsharded connection pools are about 25-30% faster than the sharded versions. * An optimistic_locking plugin was added to Sequel::Model. This plugin implements a simple database-independent locking mechanism to ensure that concurrent updates do not override changes: class Person < Sequel::Model plugin :optimistic_locking end p1 = Person[1] p2 = Person[1] # works p1.update(:name=>'Jim') # raises Sequel::Plugins::OptimisticLocking::Error p2.update(:name=>'Bob') In order for this plugin to work, you need to make sure that the database table has a lock_version column (or other column you name via the lock_column class level accessor) that defaults to 0. The optimistic_locking plugin does not work with the class_table_inheritance plugin. * Dataset#unused_table_alias was added, which takes a symbol and returns either that symbol or a new symbol which can be used as a table alias when joining a table to the dataset. The symbol returned is guaranteed to not already be used by the dataset: DB[:test].unused_table_alias(:blah) # => :blah DB[:test].unused_table_alias(:test) # => :test_0 The use case is when you need to join a table to a dataset, where the table may already be used inside the dataset, and you want to generate a unique alias: ds.join(:table.as(ds.unused_table_alias(:table)), ...) * The Sequel::ValidationFailed exception now has an errors accessor which returns the Sequel::Model::Errors instance with the validation errors. This can be helpful in situations where a generalized rescue is done where the model object reference is not available. * bin/sequel now works without an argument, which is useful for testing SQL generation (and not much else). * Support SELECT ... INTO in the MSSQL adapter, using Dataset#into, which takes a table argument. * You can now provide your own connection pool class via the :pool_class option when instantiating the database. Other Improvements ------------------ * IN/NOT IN constructs with an empty array are now handled properly. DB[:table].filter(:id=>[]) # IN DB[:table].exclude(:id=>[]) # NOT IN Before, the IN construct would mostly work, other than some minor differences in NULL semantics. However, the NOT IN construct would not work. Sequel now handles the NOT IN case using an expression that evaluates to true. * If using an IN/NOT IN construct with multiple columns and a dataset argument, where multiple column IN/NOT IN support is emulated, a separate query is done to get the records, which is then handled like an array of values. This means that the following type of query now works on all tested databases: DB[:table1].filter([:id1, :id2]=>DB[:table2].select(:id1, :id2)) * Schemas and aliases are now handled correctly when eager graphing. * Implicitly qualified symbols are now handled correctly in update statements, useful if you are updating a joined dataset and need to reference a column that appears in multiple tables. * The active_model plugin has been brought up to date with activemodel 3.0 beta (though it doesn't work on edge). Additionally, the active_model plugin now requires active_model in order to use ActiveModel::Naming. * In the schema_dumper extension, always include the varchar limit, even if it is 255 columns (the default). This makes it so that PostgreSQL will use a varchar(255) column instead of a text column when restoring a schema dump of a varchar(255) column from another database. * You can now load adapters from outside the Sequel lib directory, now they just need to be in a sequel/adapters directory somewhere in the LOAD_PATH. * You can now load extensions from outside the Sequel lib directory using Sequel.extension. External extensions need to be in a sequel/extensions directory somewhere in the LOAD_PATH. * Using bound variables for limit and offset in prepared statements now works correctly. * Performance of prepared statements was improved in the native SQLite adapter. * The schema_dumper extension now passes the options hash from dump_*_migration to Database#tables. * In the single_table_inheritance plugin, qualify the sti_key column with the table name, so that subclass datasets can safely be joined to other tables having the same column name. * In the single_table_inheritance plugin, handle case where the sti_key value is nil or '' specially, so that those cases always return an instance of the main model class. This fixes issues if constantize(nil) returns Object instead of raising an exception. * No longer use Date#to_s for literalization, always use ISO8601 format for dates. * A couple lambdas which were instance_evaled were changed to procs for ruby 1.9.2 compatibility. * MSSQL emulated offset support was simplified to only use one subquery, and made to work correctly on ruby 1.9. * Emulate multiple column IN/NOT IN on H2, since it doesn't handle all cases correctly. * ODBC timestamps are now handled correctly if the database_timezone is nil. * ArgumentErrors raised when running queries in the ODBC adapter are now raised as DatabaseErrors. * Attempting to use DISTINCT ON on SQLite now raises an error before sending the query to the database. * The options hash passed to the database connection method is no longer modified. However, there may be additional options present in Database#opts that weren't specified by the options hash passed to the database connection method. * Make Dataset#add_graph_aliases handle the case where the dataset has not yet been graphed. * You can now provide an SQL::Identifier as a 4th argument to Dataset#join_table, and unsupported arguments are caught and an exception is raised. * The gem specification has been moved out of the Rakefile, so that the gem can now be built without rake, and works well with gem build and bundler. * The Rakefile no longer assumes the current directory is in the $LOAD_PATH, so it should work correctly on ruby 1.9.2. * All internal uses of require are now thread safe. * Empty query parameter keys in connection strings are now ignored instead of raising an exception. * The specs were changed so that you can run them in parallel. Previously there was a race condition in the migration extension specs. Backwards Compatibility ----------------------- * If you plan on using sharding at any point, you now must pass a :servers option when connecting to the database, even if it is an empty hash. You can no longer just call Database#add_servers later. * The connection_proc and disconnection_proc accessors were removed from the connection pools, so you can no longer modify the procs after the connection pool has been instantiated. You must now provide the connection_proc as the block argument when instantiating the pool, and the disconnection_proc via the :disconnection_proc option. * In the hash passed to Dataset#update, symbol keys with a double embedded underscore are now considerated as implicit qualifiers, instead of being used verbatim. If you have a column that includes a double underscore, you now need to wrap it in an SQL::Identifier or use a String instead. * The connection pools no longer convert non-StandardError based exceptions to RuntimeErrors. Previously, all of the common adapters turned this feature off, so there is no change for most users. * Sequel::ConnectionPool is now considered an abstract class and should not be instantiated directly. Use ConnectionPool.get_pool to return an instance of the appropriate subclass. * The Sequel::SingleThreadedPool constant is no longer defined. * The private Dataset#eager_unique_table_alias method was removed, use the new public Dataset#unused_table_alias method instead, which has a slightly different API. * The private Dataset#eager_graph_qualify_order method was removed, used Dataset#qualified_expression instead. * The private Sequel::Model class methods plugin_gem_location and plugin_gem_location_old have been removed. * Gems built with the rake tasks now show up in the root directory instead of the pkg subdirectory, and no tarball package is created. Other News ---------- * Sequel now has an official blog at http://sequel.jeremyevans.net/blog.html. sequel-5.63.0/doc/release_notes/4.0.0.txt000066400000000000000000000236331434214120600177040ustar00rootroot00000000000000= Backwards Compatibility * All behavior resulting in deprecation messages in 3.48.0 has been removed or modified. If you plan on upgrading to Sequel 4.0.0 and have not yet upgraded to 3.48.0, upgrade to 3.48.0 first, fix code that results in deprecation warnings, and then upgrade to 4.0.0. * The threaded connection pools now default to :connection_handling=>:queue. You can manually set :connection_handling=>:stack to get the previous behavior. * Model.raise_on_typecast_failure now defaults to false. Set this to true to get the previous behavior of raising typecast errors in the setter methods. * Model#save no longer calls Model#_refresh or Model#set_values internally after an insert. Manual refreshes are now treated differently than after creation refreshes. * On SQLite, integer_booleans now defaults to true. Set this to false to get the previous behavior of 't' for true and 'f' for false. Sequel will not automatically upgrade your data, users are responsible for doing that if they want to switch the integer_booleans setting. Note that regardless of the setting, Sequel will return the correct ruby values when retrieving the rows. Example Code to Migrate Existing Data: DB[:table].where(:column=>'t').update(:column=>1) DB[:table].where(:column=>'f').update(:column=>0) * On SQLite, use_timestamp_timezones is now false by default. Set this to true to get the previous behavior with timezone information in timestamps. Sequel will not automatically upgrade your data, users are responsible for doing that if they want to switch the use_timestamp_timezones setting. Note that regardless of the setting, Sequel will return the correct ruby values when retrieving the rows. * Using window functions when eagerly loading associations with limits or offsets is now done automatically if the database supports it. Previously, this had to be enabled manually. If you would like to disable this optimization and just do the slicing in ruby, set default_eager_limit_strategy = nil. * The default value for most option hash arguments is now an shared empty frozen hash. If you are overriding methods and modifying option hashes, fix your code. * The defaults_setter plugin now works in a lazy manner instead of an eager manner. So calling the related method returns the default value if there is no value stored, but Sequel does not add the default values to the internal values hash, and will not attempt to insert what it thinks is the default value when saving the new object. * Model#set_all and #update_all now allow setting the primary key columns. * The many_to_one_pk_lookup and association_autoreloading plugins are now integrated into the default associations support. * Plugins now extend the class with ClassMethods before including InstanceMethods in the class. * Dataset#get, #select_map, and #select_order_map now automatically add aliases for unaliased expressions if given a single expression. * Database#tables and #views on PostgreSQL now check against the current schemas in the search path. * Calling ungraphed on an eager_graph dataset will restore the row_proc for that dataset. This is not backwards compatible if your method chain does: dataset.eager_graph.naked.ungraphed Switch such code to: dataset.eager_graph.ungraphed.naked * The Model#set_restricted and #update_restricted private methods have a slightly different API now. * Sequel::SQL::SQLArray alias for ValueList has been removed. * Sequel::SQL::NoBooleanInputMethods has been removed. * Sequel::NotImplemented has been removed. Default implementations of methods that used to raise this exception have been removed. * Sequel::Model::EMPTY_INSTANCE_VARIABLES has been removed. * The Sequel::Postgres::DatabaseMethods::EXCLUDE_SCHEMAS and SYSTEM_TABLE_REGEXP constants have been removed. * Dataset#columns_without_introspection has been removed from the columns_introspection extension. * Sequel no longer provides a default database for the adapter or integration specs. Additionally, if you are using spec_config.rb to configure a database to use when adapter/integration testing, you may need to modify it, as Sequel now uses the DB constant for the database being tested. * The SEQUEL_MSSQL_SPEC_REQUIRE and SEQUEL_DB2_SPEC_REQUIRE environment variables are no longer respected when adapter/integration testing those databases. Use RUBYOPT with the -r flag. * In the 3.48.0 release notes, it was announced that Dataset#join_table would default to :qualify=>:deep in 4.0.0. This change was made but reverted before the release of 4.0.0 as it was determined too likely to break existing code, there was no deprecation warning (since it just changed a setting), and the benefit was minimal. You can make deep qualification the default by by overriding Dataset#default_join_table_qualification. = New Features * A pg_array_associations plugin has been added, for creating an association based on a PostgreSQL array column containing foreign keys. Example: # Database schema: # tags albums # :id (int4) <--\ :id # :name \-- :tag_ids (int4[]) # :name class Album plugin :pg_array_associations pg_array_to_many :tags end class Tag plugin :pg_array_associations many_to_pg_array :albums end This operates similarly to a many_to_many association, but does not require a join table. All of the usual Sequel association features are supported, such as adding, removing, and clearing associations, eager loading via eager and eager_graph, filtering by associations, and dataset associations. Note that until PostgreSQL gains the ability to enforce foreign key constraints in array columns, this plugin is not recommended for production use unless you plan on emulating referential integrity constraints via triggers. * Dataset#from now accepts virtual_row blocks, making it easy to use with table returning functions: DB.from{table_returning_function(arg)} * Sequel.deep_qualify has been added, for easily doing a deep qualification of objects: Sequel.deep_qualify(:table, Sequel.+(:column, 1)) # ("table"."column" + 1) Sequel.deep_qualify(:table, Sequel.like(:a, 'b')) # ("table"."a" LIKE 'b' ESCAPE '\') * The prepared_statements_associations plugin now handles one_to_one associations. * SQL::Subscript objects now handle ruby range arguments, operating as an SQL array slice: Sequel.subscript(:a, 1..2) # a[1:2] * Database#create_view now accepts a :columns option to provide explicit column names for the view. * Postgres::ArrayOp#[] now returns an ArrayOp if given a range, since a PostgreSQL array slice can be treated as an array. * Postgres::ArrayOp#hstore has been added for creating hstores from PostgreSQL arrays. * When creating full text indexes on PostgreSQL, the :index_type=>:gist option can be used to use a gist index instead of the default gin index. This can be useful if insert/update speed is more important than lookup speed. * You can now provide the :owner option to Database#create_schema on PostgreSQL to specify the owner of the schema. * You can now provide the :if_exists option to Database#drop_view on PostgreSQL to not raise an error if the view doesn't exist. * The pg_json extension now handles non-JSON plain strings, integers and floats in PostgreSQL JSON columns. = Support for New Features in PostgreSQL 9.3 * A pg_json_ops extension has been added to support the new json operators and functions. * Postgres::ArrayOp#replace and #remove have been added for using the array_replace and array_remove functions. * You can now provide the :if_not_exists option when using Database#create_schema on PostgreSQL to not raise an error if the schema already exists. * Database#create_view now supports a :recursive option on PostgreSQL for creating recursive views. * Database#create_view and #drop_view now support a :materialized option on PostgreSQL for creating/dropping materialized views. * Database#refresh_view has been added on PostgreSQL for refreshing materialized views. = Other Improvements * Check constraints are now always surrounded by parantheses, since that is required by the SQL standard. This fixes issues in the cases where parentheses were not used automatically, such as when a function call was used. * Using an offset without a limit when eager loading now works correctly. * The prepared_statements_associations plugin now works correctly when the associated class uses a filtered dataset. * The prepared_statements_associations plugin can now use a prepared statement for cases where the association uses :conditions. * Boolean prepared statement arguments now work correctly in the sqlite adapter when the integer_booleans setting is true. * Dataset#inspect on prepared statements now handles anonymous dataset classes correctly. * When dataset string/blob literalization depends on having a database connection and the dataset has an assigned server, a connection to the assigned server is used. * More disconnect errors are now handled when using the postgres adapter with the postgres-pr driver, and in the jdbc/oracle adapter. * Composite primary keys are now parsed correctly on SQLite 3.7.16+. * Blobs are now hex escaped on MySQL, which can solve some encoding issues when blobs are used as literals in the same SQL query with UTF-8 strings. * BigDecimals instances are now formatted nicer in the pretty_table extension. * Sequel now raises an exception when attempting to literalize infinite and NaN floats on MySQL. In general, this would result in MySQL raising an error, but in extreme cases it could have failed silently. * You can now use a NO_SEQUEL_PG environment variable to not automatically require sequel_pg in the postgres adapter. * Dataset#unbind now always uses symbol keys in the bind variable hash. sequel-5.63.0/doc/release_notes/4.1.0.txt000066400000000000000000000061751434214120600177070ustar00rootroot00000000000000= New Features * Database#run and #<< now accept SQL::PlaceholderLiteralString objects, allowing you to more easily run arbitrary DDL queries with placeholders: DB.run Sequel.lit("CREATE TABLE ? (? integer)", :table, :column) * You can now provide options for check constraints by calling the constraint/add_constraint methods with a hash as the first argument. On PostgreSQL, you can now use the :not_valid option for check constraints, so they are enforced for inserts and updates, but not for existing rows. DB.create_table(:table) do ... constraint({:name=>:constraint_name, :not_valid=>true}) do column_name > 10 end end * Dataset#stream has been added to the mysql2 adapter, and will have the dataset stream results if used with mysql2 0.3.12+. This allows you to process large datasets without keeping the entire dataset in memory. DB[:large_table].stream.each{|r| ...} * Database#error_info has been added to the postgres adapter. It is supported on PostgreSQL 9.3+ if pg-0.16.0+ is used as the underlying driver, and it gives you a hash of metadata related to the exception: DB[:table_name].insert(1) rescue DB.error_info($!) # => {:schema=>"public", :table=>"table_name", :column=>nil, :constraint=>"constraint_name", :type=>nil} * The :deferrable option is now supported when adding exclusion constraints on PostgreSQL, to allow setting up deferred exclusion constraints. * The :inherits option is now supported in Database#create_table on PostgreSQL, for table inheritance: DB.create_table(:t1, :inherits=>:t0){} # CREATE TABLE t1 () INHERITS (t0) * Dataset#replace and #multi_replace are now supported on SQLite, just as they have been previously on MySQL. * In the jdbc adapter, Java::JavaUtil::HashMap objects are now converted to ruby Hash objects. This is to make it easier to handle the PostgreSQL hstore type when using the jdbc/postgres adapter. * The odbc adapter now supports a :drvconnect option that accepts an ODBC connection string that is passed to ruby-odbc verbatim. = Other Improvements * The prepared_statements plugin no longer breaks the instance_filters and update_primary_key plugins. * Dropping indexes for tables in a specific schema is now supported on PostgreSQL. Sequel now explicitly specifies the same schema as the table when dropping such indexes. * Calling Model#add_association methods with a primary key value now raises a Sequel::NoMatchingRow if there is no object in the associated table with that primary key. Previously, this situation was not handled and resulted in a NoMethodError being raised later. * When an invalid virtual row block function call is detected, an error is now properly raised. Previously, the error was not raised until the SQL was produced for the query. = Backwards Compatibility * The :driver option to the odbc adapter is deprecated and will be removed in a future version. It is thought to be broken, and users wanting to use DSN-less connections should use the new :drvconnect option. * The Postgres::ArrayOp#text_op private method has been removed. sequel-5.63.0/doc/release_notes/4.10.0.txt000066400000000000000000000220271434214120600177610ustar00rootroot00000000000000= Performance Enhancements * Dataset literalization for simple datasets is now faster by creating a per-adapter SQL literalization method instead of having all adapters share a generic method with higher overhead. Sequel.split_symbol now caches results globally. Symbol literalization is now cached per Database. Combining these three optimizations, here are the performance increases compared to 4.9.0 for a couple example datasets: ds1 = DB[:a] ds2 = DB[:a].select(:a, :b).where(:c=>1).order(:d, :e) .sql .all (1 row) ds1 140% 11% ds2 187% 32% * Regular association loading now uses a placeholder literalizer in most cases, for up to an 85% improvement when loading simple associations. * Eager loading associations using Dataset#eager now uses a placeholder literalizer in most cases, for up to a 20% improvement when eager loading simple associations. * Eager loading associations with limits using Dataset#eager now uses a UNION-based strategy by default. After extensive testing, this was found to be the fastest strategy if the key columns are indexed. Unfortunately, it is a much slower strategy if the key columns are not indexed. You can override the default UNION strategy by using the :eager_limit_strategy association option. On some databases, execution time of UNION queries with n subqueries increases faster than O(n). Also, there are limits on the number of subqueries supported in a single UNION query. Sequel chooses a default limit of 40 subqueries per UNION query. You can increase this via the :subqueries_per_union association option. * Dataset#import and #multi_insert can now insert multiple rows in a single query on H2, HSQLDB, Derby, SQLAnywhere, CUBRID, SQLite, Oracle, DB2, and Firebird, which should be significantly faster than previous versions that issued a separate INSERT query per row. * The many_to_many setter method in the association_pks plugin now uses Dataset#import to insert many rows at once, instead of using a seperate query per insert. * The jdbc adapter's type conversion has been rewritten to be more similar to the other adapters, setting up the type conversion procs before iterating over results. This increases performance up to 20%. * The jdbc/oracle adapter now defaults to a fetch_size of 100, similar to the oci8-based oracle adapter, significantly improving performance for large datasets. = New Features * Database#transaction now supports an :auto_savepoint option. This option makes it so that transactions inside the transaction block automatically use savepoints unless they use the :savepoint=>false option. This should make testing transactional behavior easier. * Model.prepared_finder has been added. This has an API similar to Model.finder, but it uses a prepared statement instead of a placeholder literalizer. It is less flexible than Model.finder as prepared statements have fixed SQL, but it may perform better. * Common table expressions (WITH clauses) are now supported on SQLite 3.8.3+. * :correlated_subquery has been added as an eager_graph and filter by association limit strategy for one_to_one and one_to_many associations. In certain cases it was found that this is faster than the :window_function limit strategy. It is the default filter by associations limit strategy on databases that do not support window functions. Filtering by limited associations using a correlated subquery strategy does not work in all cases, but it should handle most cases correctly. * The prepared_statement_associations plugin now handles one_through_one and one_through_many associations. * Sequel now emulates support for offsets without limits on MySQL, SQLite, H2, SQLAnywhere, and CUBRID. * In the jdbc adapter, the Database#fetch_size accessor and :fetch_size option can be used to automatically set the JDBC fetch size for JDBC Statement objects created by the database. * Dataset#with_fetch_size has been added to jdbc adapter datasets, setting the fetch size to use on ResultSets generated by the dataset. This generally has the effect of overriding the Database fetch_size setting. * On MySQL 5.6.5+, Sequel supports a :fractional_seconds Database option, which will use fractional seconds for timestamp values, and have the schema modification code create timestamp columns that accept fractional timestamps by default. * Database#call_mssql_sproc on Microsoft SQL Server now handles named parameters: DB.call_mssql_sproc(:sproc_name, :args => { 'input_arg1_name' => 'input arg1 value', 'input_arg2_name' => 'input arg2 value', 'output_arg_name' => [:output, 'int', 'result key name'] }) * Database#drop_view now supports an :if_exists option on SQLite, MySQL, H2, and HSQLDB. * Database#drop_table now supports an :if_exists option on HSQLDB. * A :filter_limit_strategy association option has been added, for choosing the strategy that will be used when filtering/excluding by associations with limits. For backwards compatibility, Sequel will fallback to looking at the :eager_limit_strategy option. * A :server_version Database option is now supported on Microsoft SQL Server, which will use the value given instead of querying for it. = Other Improvements * Dataset::PlaceholderLiteralizer arguments are how handled correctly when emulating offsets via the row_number window function on DB2, MSSQL <=2012, and Oracle. * Dataset::PlaceholderLiteralizer now handles DelayedEvaluation objects correctly. * Offset emulation is skipped if static SQL is used on Access, DB2, and MSSQL <=2008. * Additional disconnect errors are now recognized in the postgres adapter. * The :foreign_key_constraint_name option is now respected when adding a foreign key column to an existing table on MySQL. * Sequel now attempts to work around a bug on MySQL 5.6+ when combining DROP FOREIGN KEY and DROP INDEX in the same ALTER TABLE statement. * Dataset#for_update is now respected on H2. * Timestamp with local time zone types are now returned as Time/DateTime objects on jdbc/oracle. * Model.include now has the same API as Module.include. * Model#marshallable! now works correctly when using the tactical_eager_loading plugin. * The pg_array_associations plugin now attempts to automatically determine the correct array type to use, and explicitly casts to that array type in more places. * The auto_validations plugin now handles models that select from subqueries. * The association_pks plugin does no longer creates getter and setter methods for one_through_one associations. * bin/sequel now uses the Sequel code in the related lib directory. This makes it easier to use from a repository checkout. = Backwards Compatibility * AssociationReflection#associated_dataset now returns a joined dataset for associations that require joins (e.g. many_to_many). Anyone using this directly for associations that require joins probably needs to update their code. * Model.associate now adds the association instance methods instead of relying on the def_#{association_type} method doing so. Anyone using custom association types probably needs to update their code. * Model.eager_loading_dataset, .apply_association_dataset_opts, and .def_{add_method,association_dataset_methods,remove_methods} are now deprecated. * Key conditions for associations requiring joins have been moved from the JOIN ON clause to the WHERE clause. This should be optimized the same by the database, but it can break tests that expect specific SQL. * Dataset#_insert_sql and #_update_sql are now private instead of protected. * The install/uninstall rake tasks have been removed. * Model association and association reflection internals have changed significantly, if you were relying on them, you'll probably need to update your code. * Database transaction internals have changed significantly, if you were relying on them, you'll probably need to update your code. * Dataset literalization internals have changed significantly, with the Dataset#*_clause_methods private methods being removed. Custom adapters that used these methods should switch to using the new Dataset.def_sql_method method. * Common table expressions are no longer enabled by default in Sequel. External adapters for databases that support common table expressions should define Dataset#supports_cte?(type) to return true. * Support for RETURNING is no longer determined via introspection. External adapters for databases that support RETURNING should define Dataset#supports_returning?(type) to return true. * The new jdbc adapter type conversion code may not be completely compatible with the previous code. The currently known case where it is different is on jdbc/postgresql, when using an array type where no conversion proc exists, the returned object will be a ruby array containing java objects, instead of a ruby array containing ruby objects. It is recommended that jdbc/postgresql users using array types use the pg_array extension to avoid this issue. sequel-5.63.0/doc/release_notes/4.11.0.txt000066400000000000000000000136701434214120600177660ustar00rootroot00000000000000= New SQL Function Features * SQL::Function now supports an options hash for functions. Unfortunately, since SQL::Function#initialize does not support an options hash, you need to use SQL::Function.new! to create a function with an options hash. You can also call methods on the SQL::Function instance, which will return a new SQL::Function with the appropriate option set. * SQL::Function#quoted has been added, which will return a new SQL::Function instance that will quote the function name (if the database supports quoting function names). * SQL::Function#unquoted has been added, which will return a new SQL::Function instance that will not quote the function name. * SQL::Function#lateral has been added, which will return a new SQL::Function instance that will be preceded by LATERAL when literalized, useful for set-returning functions. * SQL::Function#within_group has been added, for creating ordered-set and hypothetical-set functions that use WITHIN GROUP. * SQL::Function#filter has been added, for creating filtered aggregate function calls using FILTER. * SQL::Function#with_ordinality has been added, for creating set returning functions that also include a row number for every row in the set, using WITH ORDINALITY. = New PostgreSQL Features * The jsonb type added in 9.4 is now supported in the pg_json extension. To create a jsonb type manually, you need to call Sequel.pg_jsonb. The new json and jsonb functions and operators added in 9.4 are now supported in the pg_json_ops extension. You can use the jsonb functions and operators by creating a Postgres::JSONBOp using Sequel.pg_jsonb_op. * Database#full_text_search now takes a :rank option to order by the ranking. * Database#refresh_view now supports a :concurrently option, to refresh a materialized view concurrently, supported on 9.4+. * Postgres::ArrayOp#cardinality has been added to the pg_array_ops extension, for easy use of the cardinality method added in 9.4. * Postgres::ArrayOp#unnest in the pg_array_ops extension now accepts arguments. PostgreSQL 9.4+ supports this if unnest is used in the FROM clause. = Other New Features * Sequel now supports derived column lists (table aliases that include column aliases) via Sequel.as and SQL::AliasedMethods#as: Sequel.as(:table, :alias, [:c1, :c2]) # table AS alias(c1, c2) Not all databases support this, but it is in SQL92 and Sequel now supports it by default. Derived column lists make it easier to alias columns when using set-returning functions. Dataset#from_self now supports derived column lists via the new :column_aliases option (which requires the :alias option to take effect). * Database#create_view now supports a :check option, to use WITH CHECK OPTION. You can also use :check=>:local for WITH LOCAL CHECK OPTION. These clauses make it so when you are inserting into/updating the view, you can only modify rows in the underlying table if the result would be returned by the view. * The :after_connect Database option proc now can accept two arguments. If the arity of the proc is 2, Sequel will pass both the connection object and the shard symbol. * The class_table_inheritance plugin now supports a :model_map option similar to the single_table_inheritance plugin, allowing use of the plugin without storing ruby class names in the database. Note that if you use this option, you must set the correct value for the kind column manually when creating the row. * Support for CUBRID/SQLAnywhere emulation has been added to the mock adapter. = Other Improvements * Dataset#import now supports a default slice size, which Sequel sets to 500 on SQLite as that is the limit that SQLite supports in a single statement. * The serialization plugin now only modifies changed_columns in the setter method if the deserialized value has changed, similar to how Sequel's standard column setters work. Note that if you are mutating the deserialized value (i.e. not calling the setter method), you still need to use the serialization_modification_detection plugin. * Plugins that set column values for new objects before creation now use before_validation instead of before_create, which works better when the auto_validations plugin is used. * The :read_only transaction option is now applied per-savepoint on PostgreSQL. Note that this allows you to have a READ ONLY savepoint in a READ WRITE transaction, it does not allow you to have a READ WRITE savepoint in a READ ONLY transaction. * In the ibm_db adapter, fix warnings when using certain column names. * Support connecting to a DB2 catalog name in the ibm_db adapter, by providing a :database option without a :host or :port option. * The mock adapter now sets an emulated version when using MySQL and SQLite. Additionally, the emulated version for PostgreSQL and Microsoft SQL Server has been updated. = Backwards Compatibility * External adapters that override Dataset#as_sql_append now need to have the method accept two arguments. * Model.eager_loading_dataset, .apply_association_dataset_opts, and .def_{add_method,association_dataset_methods,remove_methods} have been removed (they were deprecated in 4.10.0). * SQL::WindowFunction and SQL::EmulatedFunction classes are now deprecated, as well as Dataset methods that literalize instances of these classes. These classes are replaced by using options on SQL::Function instances. * Passing a table_alias argument when creating an SQL::JoinClause manually is no longer supported. You now need to pass the table as an SQL::AliasedExpression if the table needs to be aliased. * ASTTransformer no longer transforms the table alias for SQL::JoinClause. This is for consistency with SQL::AliasedExpression. * SQL standard casts are now used in Database#full_text_search, which can break tests that expect specific SQL. * The to_dot extension now uses slightly different output for SQL::Function and SQL::JoinClause instances. sequel-5.63.0/doc/release_notes/4.12.0.txt000066400000000000000000000101541434214120600177610ustar00rootroot00000000000000= New Features * Database#schema now includes :max_length entries for string columns, specifying the size of the string field. The auto_validations plugin now uses this information to automatically set up max_length validations on those fields. * The Dataset join methods now support a :reset_implicit_qualifier option. If set to false, this makes the join not reset the implicit qualifier, so that the next join will not consider this table as the last table joined. Example: DB[:a].join(:b, :c=>:d). join(:e, :f=>:g) # SELECT * FROM a # INNER JOIN b ON (b.c = a.d) # INNER JOIN e ON (e.f = b.g) DB[:a].join(:b, {:c=>:d}, :reset_implicit_qualifier=>false). join(:e, :f=>:g) # SELECT * FROM a # INNER JOIN b ON (b.c = a.d) # INNER JOIN e ON (e.f = a.g) * The Dataset cross and natural join methods now accept an options hash. Example: DB[:a].cross_join(:b, :table_alias=>:c) # SELECT * FROM a CROSS JOIN b AS c * Model#set_nested_attributes has been added to the nested_attributes plugin, which allows you to to set the nested_attributes options to use per-call. This is very helpful if you have multiple forms that handle associated objects, but with different input fields used for the associated objects depending on the form. Example: album.set_nested_attributes(:tracks, params[:track_attributes], :fields=>[:a, :b, :c]) * Database#values has been added on PostgreSQL, which creates a dataset that uses VALUES instead of SELECT. Just as PostgreSQL allows, you can also use orders, limits, and offsets with this dataset. * A :notice_receiver option is now supported in the postgres adapter if the pg driver is used. This should be a proc, which will be passed to the pg connection's set_notice_receiver method. * A Database :readonly option is now supported in the sqlite adapter, which opens the database in a read-only mode, causing an error if a query is issued that would modify the database. * A :before_thread_exit option has been added to Database#listen_for_static_cache_updates in the pg_static_cache_updater extension, allowing you to run code before the created thread exits. = Other Improvements * Eager loading limited associations using a UNION now works correctly when an association block is used. This fixes a regression that first occurred in 4.10.0, when the union eager loader became the default eager loader. * When creating a new associated object in the nested_attributes plugin, where the reciprocal association is a many_to_one association, set the cached reciprocal object in the new associated object before saving it. This fixes issues when validations in the associated object require access to the current object, which may not yet be saved in the database. * The prepared_statements and prepared_statements_associations plugins now automatically use explicit column references when preparing statements. This fixes issues on PostgreSQL when a column is added to a table while a prepared statement exists that selects * from the table. Previously, all further attempts to use the prepared statement will fail. This allows you to run migrations that add columns to tables while concurrently running an application that uses the prepared statements plugins. Note that many other schema modifications can cause issues when running migrations while concurrently running an application, but most of those are not specific to usage of prepared statements. * Dataset#insert_select on PostgreSQL now respects an existing RETURNING clause, and won't override it to use RETURNING *. A similar fix was applied to the generalized prepared statements support as well. * The interval parser in the pg_interval extension now supports intervals with 2-10 digits for hours. Previously, it only supported using 2 digits for hours. = Backwards Compatibility * The methods and classes deprecated in 4.11.0 have been removed. * The nested_attributes internal API has changed significantly. If you were calling any private nested_attributes methods, you'll probably need to update your code. sequel-5.63.0/doc/release_notes/4.13.0.txt000066400000000000000000000145501434214120600177660ustar00rootroot00000000000000= New Features * A modification_detection plugin has been added, for automatic detection of in-place column value modifications. This makes it so you don't have to call Model#modified! manually when changing a value in place. * A column_select plugin has been added, for automatically selecting explicitly qualified columns in model datasets. Example: Sequel::Model.plugin :column_select class Album < Sequel::Model end Album.dataset.sql # SELECT albums.id, albums.name, albums.artist_id # FROM albums * An insert_returning_select plugin has been added, for automatically setting up RETURNING clauses for models that select explicit columns. This is useful when using the column_select or lazy_attributes plugins. * A pg_enum extension has been added, for easier dealing with PostgreSQL enum types. The possible values for the type are then returned in the schema hashes under the :enum_values key. It also adds create_enum, drop_enum, and add_enum_value Database methods for migration support. * A round_timestamps extension has been added, for automatically rounding timestamps to database supported precision when literalizing. * A dataset_source_alias extension has been added, for automatically aliasing datasets to their first source, instead of using t1, t2. Example: DB.from(:a, DB[:b]).sql # SELECT * FROM a, (SELECT * FROM b) AS t1 DB.extension(:dataset_source_alias) DB.from(:a, DB[:b]).sql # SELECT * FROM a, (SELECT * FROM b) AS b * On Microsoft SQL Server, Sequel now emulates RETURNING support using the OUTPUT clause, as long as only simple column references are used. = Other Improvements * A regression has been fixed in the timestamps and table inheritance plugins, where column values would not be saved when skipping validations. This was first broken in 4.11.0. * A regression has been fixed on JRuby and Rubinius when using Sequel::Model(dataset) if the dataset needs to literalize a symbol (and most do). This was first broken in 4.10.0. * Primary keys are now automatically setup for models even if the models select specific columns. * The lazy_attributes plugin now uses qualified columns in its selection, instead of unqualified columns. * When looking up model instances by primary key, Sequel now uses a qualified primary key if the model uses a joined dataset. * For associations that require joins, Sequel will now use the associated model's selection directly (instead of associated_table.*) if the associated model's selection consists solely of qualified columns. Among other things, this means that a many_to_many association to a model that uses lazy attributes will not eagerly load the lazy attributes by default. * Model#save now uses insert_select if there is an existing RETURNING clause used by the underlying dataset, even if the model selects specific columns. * In Dataset#insert, aliased tables are now automatically unaliased. This allows you to use a dataset with an aliased table and have full SELECT/INSERT/UPDATE/DELETE support, assuming the database supports aliased tables in UPDATE and DELETE. * Dataset#graph now qualifies columns correctly if the current dataset is a joined dataset and it moves the current dataset to a subselect. * Dataset#joined_dataset? is now a public method, and can be used to determine whether the dataset uses a join, either explicitly via JOIN or implicitly via multiple FROM tables. * The Dataset#unqualified_column_for helper method has been added, returning the unqualified version of a possibly qualified column. * The composition and serialization plugins now support validations on the underlying columns. Previously, they didn't update the underlying columns until after validations were performed. This works better when using the auto_validations plugin. * The class_table_inheritance plugin now uses JOIN ON instead of JOIN USING, which makes it work on all databases that Sequel supports. Additionally, the plugin now explicitly selects qualified columns from all of the tables. * The list plugin now adds an after_destroy hook that will renumber rows after the current row, similar to how moving existing values in the list works. * The pg_json extension is now faster when json column value is a plain string, number, true, false, or nil, if the underlying json library handles such values natively. * External jdbc, odbc, and do subadapters can now be loaded automatically without requiring them first, assuming proper support in the external subadapter. * When using create_table on MySQL, correctly handle the :key option to when calling foreign_key with a column reference. * On Oracle, use all_tab_cols instead of user_tab_cols for getting default values when parsing the schema. This makes it work if the user does not own the table. * On Oracle, use all_tables and all_views for Database#tables and Database#views. This works better for users with limited rights. * Additional disconnect errors are now recognized in the postgres and jdbc/mysql adapters. * Sequel::Model now uses copy constructors (e.g. initialize_copy) instead of overriding #dup and #clone. * The rake default task now runs plugin specs in addition to core and model specs. = bin/sequel Improvements * Add the sequel lib directory to the front of the load path instead of the end, fixing cases where you end up requiring an old version of the sequel gem (e.g. by using sequel_pg). * Add the sequel lib directory as an absolute path, fixing cases where you later change the current directory. * Require sequel later in the code, so that bin/sequel -h doesn't need to require sequel, and full backtrace is not printed if requiring sequel raises an error (unless -t is used). * If an exception is raised, put a newline between the exception message and backtrace. * Don't allow usage of -C with any of -cdDmS. * If sequel -v is given along with a database or code string to execute, print the Sequel version but also continue, similar to how ruby -v works. = Backwards Compatibility * The switch from JOIN ON to JOIN USING in the class_table_inheritance can break certain usage, such as querying using unqualified primary key. Users should switch to using a qualified primary key instead. * Calling Dataset#returning when the underlying database does not support it now raises an Error. sequel-5.63.0/doc/release_notes/4.14.0.txt000066400000000000000000000053741434214120600177730ustar00rootroot00000000000000= New Features * Delayed evaluation blocks can now accept the dataset literalizing the delayed evaluation as an argument. This makes it so the delayed evaluation result can depend on the dataset doing the literalization: ds = DB[:a].where(Sequel.delay do |ds| {Sequel.qualify(ds.first_source, :col)=>1} end) ds.sql # SELECT * FROM a WHERE (a.col = 1) ds.from(:b).sql # SELECT * FROM b WHERE (b.col = 1) * Database#create_trigger on PostgreSQL now supports a :when option to create a filter for the trigger, so that it is only triggered when the filter condition is true. * You can now override the cache key prefix in the caching plugin by overriding the cache_key_prefix class method. This can be useful when using a table inheritance plugin. = Other Improvements * You can now pass arbitrary types to Dataset#where and related methods. Previously, if a type was not explicitly handled, an exception would be raised. Now you can pass any object that can be literalized. The only exception is that you can't pass Numeric objects, since #where and similar methods should only deal with boolean expressions. * association_join and related methods now work correctly if the dataset already has an explicit selection. * A regression has been fixed in the class_table_inheritance plugin when using a hierarchy of more than 2 levels, when using the superclass to load a subclass instance more than 2 levels below, and later attempting to load a column contained in one of the middle tables. * When using _delete or _remove keys in the nested_attributes plugin to remove existing associated objects, the associated objects are now deleted from the cached association array at time of call. This is for consistency when adding new associated objects, where the new associated objects are added to the cached association array at time of call. * The nested_attributes plugin now handles composite primary keys correctly when working around validation issues for one_to_one and one_to_many associations. * If exception A is raised during a transaction, and exception B is raised while attempting to rollback the transaction, the transaction code will now raise exception A instead of exception B. * An additional serialization failure is now detected on PostgreSQL. * An additional disconnect error is now recognized in the jdbc/jtds adapter. * The code examples in the RDoc are now syntax highlighted, and many minor fixes to the code examples in the RDoc were made. Additionally, many other improvements were made to the RDoc. = Backwards Compatibility * Dataset#delayed_evaluation_sql_append now accepts the delayed evaluation as an argument, instead of the callable contained by the delayed evaluation. sequel-5.63.0/doc/release_notes/4.15.0.txt000066400000000000000000000041211434214120600177610ustar00rootroot00000000000000= New Features * fdbsql and jdbc/fdbsql adapters have been added, for connecting to FoundationDB SQL Layer. * A split_values plugin has been added, for moving non-column entries from the values hash into a separate hash. This allows you to select additional columns (e.g. computed columns) when retrieving model instances, and be able to save those instances without removing the additional columns. * A Sequel::Model.cache_associations accessor has been added, which can be set to false to not cache any association metadata. This can fix issues in a development environment that uses code reloading. * The active_model plugin now supports activemodel 4.2.0beta1. * More PostgreSQL array types are handled automatically by the pg_array extension, such as xml[] and uuid[]. * Creating foreign tables is now supported on PostgreSQL via the :foreign and :options create_table options. * The :nolog Database option is now supported in the informix adapter, where it disables the use of transactions. * PlaceholderLiteralizer#with_dataset has been added, allowing you to create another PlaceholderLiteralizer with a modified dataset, useful if you want to change the row_proc or any non-SQL dataset options. = Other Improvements * The tactical_eager_loading plugin once again works correctly with limited associations. * A bug in older versions of MySQL is now worked around when schema dumping a table with multiple timestamp columns. * On PostgreSQL, create_view(:view_name, dataset, :materialized=>true) is now reversible. * Postgres::{JSON,JSONB}Op#to_record and #to_recordset no longer take an optional argument. This was supported in PostgreSQL 9.4beta1, but removed before PostgreSQL 9.4beta2. * Dataset#insert now returns the last inserted id in the informix adapter. * Sequel no longer raises an exception in AssociationReflection#reciprocal if the associated class has an association that does not have a valid associated class. * Sequel now raises an exception if a primary key is necessary to use an association, but the model does not have a primary key. sequel-5.63.0/doc/release_notes/4.16.0.txt000066400000000000000000000023301434214120600177620ustar00rootroot00000000000000= New Features * Model#qualified_pk_hash has been added, which is similar to Model#pk_hash, but uses qualified keys. * Dataset#distinct now accepts a virtual row block. * Database#drop_table with :foreign=>true option now drops foreign tables on PostgreSQL. Database#create_table with :foreign option is now reversible on PostgreSQL. = Other Improvements * Sequel::Model.cache_associations = false now skips the database's schema cache when loading the schema for a model. This fixes some issues in environments that use code reloading. * Database#create_table? and #create_join_table? no longer use IF NOT EXISTS if indexes are being created. * Model.primary_key_hash and .qualified_primary_key_hash have been optimized. * validates_unique in the validation_helpers plugin now uses a qualified primary key if the model's dataset is joined. This fixes a case when the auto_validations and class_table_inheritance plugins are used together. * Disconnect errors are now recognized in the postgres adapter when SSL is used for connecting. * Empty string default values are no longer converted to nil default values on MySQL. * Database#foreign_key_list now works correctly on Microsoft SQL Server 2005. sequel-5.63.0/doc/release_notes/4.17.0.txt000066400000000000000000000030371434214120600177700ustar00rootroot00000000000000= New Features * A :preconnect Database option has been added, for automatically creating the maximum number of connections to the database on instantiation. This is useful when there is high latency for initial connection setup, where Sequel's usual approach of connecting as needed can cause pauses at runtime. * Database#sharded? has been added for checking whether the Database object uses multiple servers. * Dataset#server? has been added, for returning a cloned dataset associated with the given server/shard if the dataset does not already have a server set. This returns the receiver if the server has already been set or the Database is not sharded. = Other Improvements * Sequel now uses the correct shard when deleting model instances. Previously, the correct shard was only used in the unoptimized case, not in the optimized case. * Sequel now uses the correct shard when using Dataset#insert_select on PostgreSQL. This was first broken in the 4.13.0 release. * Sequel now correctly handles Sequel::SQL::Blob instances used in bound variables in the postgres adapter. Previously this resulted in duplicate apostrophes being used. * When using the jdbc/sqlite3 adapter with jdbc-sqlite3 3.8.7, Sequel now handles date objects and empty blobs correctly, working around bugs in the driver. = Backwards Compatibility * In the update_or_create plugin, Model.update_or_create now always returns the object. Previously it would not return the object if the object already existed but no updates were necessary. sequel-5.63.0/doc/release_notes/4.18.0.txt000066400000000000000000000026671434214120600200010ustar00rootroot00000000000000= New Features * An :auto_increment key has been added to the schema information for primary key columns on JDBC, PostgreSQL, MySQL, MSSQL, DB2, and SQLite. This fixes issues in the schema_dumper extension where non-auto-incrementing integer primary keys are no longer dumped as auto-incrementing. For adapters that don't have specific support for detecting auto incrementing primary keys, Sequel now assumes a primary key is auto incrementing only if it is not a composite primary key and the type contains int (e.g. int, integer, bigint). = Other Improvements * Dataset#empty? now ignores any order on the dataset. Previously, calling empty? on a dataset ordered by an alias in the SELECT list could raise an exception. * Schema qualified tables are now handled correctly in many_through_many associations. * Using a hash as the value for the :eager association option now works correctly. * All PG::ConnectionBad exceptions are now treated as disconnect errors in the postgres adapter. This should be more robust than the previous method of trying to recognize disconnect errors by trying to parse the exception message. * Sequel now skips a hash allocation when issuing queries through datasets if sharding is not used. * Sequel no longer uses the JDBC schema parsing in the jdbc/sqlserver adapter. Instead, it uses the MSSQL schema parsing, which should be more accurate than the generic JDBC schema parsing. sequel-5.63.0/doc/release_notes/4.19.0.txt000066400000000000000000000041431434214120600177710ustar00rootroot00000000000000= New Features * Model#get_column_value and #set_column_value have been added for getting/setting column values. Historically, to get column values, you would just send the column name, and to set column values you would send the column name suffixed by =. However, this doesn't work when such methods are already defined by ruby or Sequel itself (e.g. class, model, object_id). Both #get_column_value and #set_column_value are just aliases to #send, but you can safely override the methods to handle column names that conflict with existing method names. Both the core model code and all of the plugins that ship with Sequel have been updated to use these new methods. External plugins are strongly encouraged to switch to these new methods. * A column_conflicts plugin has been added to automatically handle columns that conflict with existing method names. So if you have a column named "model" in your table, you can just load the column_conflicts plugin and Sequel will handle things correctly. * A accessed_columns plugin has been added, which records which columns have been accessed for a model instance. This is useful in development when you are planning on restricted the columns selected by the dataset that retrieved the instance. SELECTing only the columns you need can result in significant performance increases, and the accessed_columns plugin makes that easier. * Model#cancel_action has been added for canceling actions in before hooks, instead of having the before hook methods return false (which is still supported). In addition to being easier to use, this also makes it possible to use custom exception messages for hook failures, if you are using the default behavior of raising exceptions on save failures. = Other Improvements * Dataset#union, #intersect, and #except now automatically handle datasets with raw SQL, by wrapping such datasets in subqueries. * The integer migrator now stores the correct migration number when migrating with allow_missing_migration_files set. * A :timeout=>nil Database option on MySQL no longer sets a wait_timeout. sequel-5.63.0/doc/release_notes/4.2.0.txt000066400000000000000000000123241434214120600177010ustar00rootroot00000000000000= New Features * LATERAL subqueries are now supported on PostgreSQL 9.3+, HSQLDB, and DB2 via Dataset#lateral: DB.from(:a, DB[:b].where(:c=>:a__d).lateral) # SELECT * FROM a, # LATERAL (SELECT * FROM b WHERE (c = a.d)) AS t1 You can use a similar syntax when joining tables: DB[:a].cross_join(DB[:b].where(:c=>:a__d).lateral) # SELECT * FROM a # CROSS JOIN LATERAL (SELECT * FROM b WHERE (c = a.d)) AS t1 If you are using Microsoft SQL Server, you can use the new mssql_emulate_lateral_with_apply extension to emulate LATERAL subqueries via CROSS/OUTER APPLY. * The static_cache plugin now supports a :frozen=>false option. When this option is used, instead of returning the frozen cached values, the model now returns new, unfrozen objects that can be modified. Note that if you make any database modifications, you are responsible for updating the cache manually. * A pg_static_cache_updater extension has been added. This extension can automatically update the caches used by the static_cache plugin, whenever the underlying database table is updated, using PostgreSQL's notification channels. This works by defining triggers on the underlying model tables that use NOTIFY, and spinning up a thread in your application processes that uses LISTEN, and refreshes the cache for the related model whenever it receives a notification that the underlying table has been modified. This extension should make it possible to use the static_cache plugin with the :frozen=>false option for any table that is small and not frequently updated. * A from_block extension has been added that makes Database#from operate like Dataset#from in regards to a passed block, allowing you to write code like: DB.from{table_returning_function(arg1, arg2)} * Database#supports_partial_indexes? has been added for checking for partial index support. Partial indexes are now supported on SQLite 3.8.0+. * A pg_loose_count extension has been added for fast approximate counts of PostgreSQL tables. This uses the system tables and should be fairly accurate if the table statistics are up to date: DB.loose_count(:table) * The Dataset#use_cursor method in the postgres adapter now supports a :cursor_name option. You can set this option if you want to use nested cursors. * The mysql2 adapter now supports a :flags Database option allowing to set custom mysql2 flags (e.g. ::Mysql2::Client::MULTI_STATEMENTS). = Other Improvements * Dataset#freeze has been implemented. Previously, it was not implemented, so Object#freeze was used, which resulted in a dataset that wasn't cloneable. Dataset#freeze now works as expected, resulting in a cloneable dataset, but it doesn't allow methods to be called that mutate the receiver. * Dataset#dup has been implemented. Previously, it was not implemented, so Object#dup was used, which resulted in a dataset that shared an options hash with the receiver, so modifying the dup's opts could also change the original dataset. Now dup works similarly to clone, except that the returned object will not be frozen. * Model#dup has been implemented. Previously, it was not implemented, so Object#dup was used, which resulted in a model instance that shared the values hash with the receiver, so modifying the dup's values also changed the original's values. Now, dup does a shallow copy of some of the internal data structures as well, so the copy is more independent. Note that you still need to be careful if you mutate objects: m = Model.new(:a=>'a') m2 = m.dup m.a.gsub!('a', 'b') # also changes m2 * Model#clone has been implemented. Previously, it had the same issues as dup. Now, it calls the new Model#dup, but also freezes the returned object if the receiver is frozen. * Placeholder literal strings with an empty parameter hash are now handled correctly. = Backwards Compatibility * The static_cache plugin now disallows saving/destroying instances unless the :frozen=>false option is used. As the cached objects returned by the model were frozen anyway, this affects creating new instances or saving/destroying instances returned from the underlying dataset. * Model#set_values has been removed (it was deprecated starting in Sequel 4.0). * The following Model class methods are no longer defined: insert_multiple, set, to_csv, paginate, query, set_overrides, set_defaults. By default, these methods used call the dataset method of the same name, but as those methods are no longer defined on datasets by default, they also resulted in a NoMethodError. * Dataset#query!, #set_defaults!, and #set_overrides! are no longer defined on all datasets. They are now only defined on datasets that use the query or set_overrides extensions. * Partial indexes are no longer returned by Database#indexes on MSSQL, for consistency with PostgreSQL. Note that the same change was desired for SQLite, but SQLite currently does not offer reflection support for determining which indexes are partial. * Database#foreign_key_list on MSSQL now will return a SQL::QualifiedIdentifier instead of a symbol for the :table entry if the schema of the referenced table does not match the schema of the referencing table. sequel-5.63.0/doc/release_notes/4.20.0.txt000066400000000000000000000064261434214120600177670ustar00rootroot00000000000000= New Features * A :before_retry option has been added to Database#transaction, which specifies a proc to call when retrying if the :retry_on option is used. This can be used to implement additional logging, sleeping between retries, or other things. * The to_json method :root option in the json_serializer plugin can now be a string value to specify the name for the object key, instead of using the underscored model name. * Dataset#paged_each now returns an enumerator if not passed a block. * You can now set the :instance_specific association option to false. Previously, it was automatically set to true in some cases. If you know the association does not depend on anything instance-specific other than the foreign/primary key, setting this option can allow Sequel to perform some additional optimizations. = Other Improvements * Eager loading queries are now skipped if there are no matching keys. There was code to check this previously, but it was accidently removed in an earlier refactoring. * Eager loading an association with a limit and an eager block and cascaded associations now works correctly when the window_function limit strategy is used (the default on databases that support window functions). * Eager loading an association with a limit with an eager block now works correctly on databases do not support window functions but do support correlated subqueries. * The parent association is now set on associated objects when loading descendants in the rcte_tree plugin. This allows the parent method on any of the descendants to work without issuing a database query. * The prepared_statements_associations plugin now only uses prepared statements if association metadata is being cached. Previously, it would use prepared statements even if association metadata was not cached, which could leak the prepared statements. * Model#dup now duplicates the associations hash for the object. * Model#freeze no longer validates an object if the the errors for the object are already frozen. The static_cache plugin now freezes the errors before freezing the object, so that it doesn't validate the object. This can skip many database queries when the auto_validations plugin is used and there is a unique constraint or index on the related table. * AUTOINCREMENT is now used again on SQLite by default for primary keys. It was removed when :auto_increment was added to the schema hashes, but the removal changed SQLite's behavior. This restores the previous behavior. * Microsoft SQL Server's bit type is now recognized as a boolean type by the schema dumper. * The pg_enum extension's create_enum method can now be used in reversible migrations. * set_column_type with the :auto_increment=>true option once again works on MySQL. It had been broken since Sequel started adding :auto_increment to the schema hashes. * The mysql2 adapter now recognizes the :charset option as a synonym for :encoding. * The swift adapter now respects database and application timezone settings. = Backwards Compatibility * AssociationReflection#apply_ruby_eager_limit_strategy no longer checks that the strategy is :ruby, callers are now expected to check the value themselves. This should only matter if you are using custom association types. sequel-5.63.0/doc/release_notes/4.21.0.txt000066400000000000000000000064601434214120600177660ustar00rootroot00000000000000= New Features * SQL::GenericExpression#=~ has been added as an alternative method of specifying equality/inclusion/identity. Previously, you had to use a hash. This led to some slightly weird looking syntax when used inside virtual rows: DB[:items].where{{function(:column)=>0}} # SELECT FROM items WHERE function(column) = 0 You can now use =~ as an equivalent: DB[:items].where{function(:column) =~ 0} # SELECT FROM items WHERE function(column) = 0 Like when using a hash, this works also for inclusion: DB[:items].where{function(:column) =~ [1,2,3]} # SELECT FROM items WHERE function(column) IN (1, 2, 3) for identity: DB[:items].where{function(:column) =~ nil} # SELECT FROM items WHERE function(column) IS NULL and for matching (on MySQL/PostgreSQL): DB[:items].where{function(:column) =~ /foo/i} # SELECT FROM items WHERE function(column) ~* 'foo' This new syntax makes more complex conditions simpler to express: DB[:items].where{(function(:column) =~ 0) | (column =~ 1)} # SELECT FROM items WHERE function(column) = 0 OR column = 1 compared to previous versions of Sequel: DB[:items].where{Sequel.|({function(:column) => 0}, {:column => 1})} On ruby 1.9+, you can also use SQL::GenericExpression#!~ to invert the condition: DB[:items].where{function(:column) !~ 0} # SELECT FROM items WHERE function(column) != 0 DB[:items].where{function(:column) !~ [1,2,3]} # SELECT FROM items WHERE function(column) NOT IN (1, 2, 3) DB[:items].where{function(:column) !~ nil} # SELECT FROM items WHERE function(column) IS NOT NULL DB[:items].where{function(:column) !~ /foo/i} # SELECT FROM items WHERE function(column) !~* 'foo' This makes it simpler to write inverted conditions. Ruby 1.8 doesn't support overriding the !~ method, but you can still use the unary ~ method to invert: DB[:items].where{~(function(:column) =~ 0)} * Database#add_named_conversion_proc has been added on PostgreSQL to make it easier to add conversion procs by name instead of by OID: DB.add_named_conversion_proc(:citext){|s| s} * Database#full_text_search on PostgreSQL now supports :tsquery and :tsvector options for using existing tsquery and/or tsvector arguments, instead of assuming the arguments are query terms or the text to be search. = Other Improvements * Database#transaction now works inside after_commit and after_rollback hooks. Previously, it didn't work correctly as it thought it was already inside the previously committed/rolled back transaction. * Sequel.pg_jsonb now returns JSONBOp instances instead of JSONOp instances when passed other than Array or Hash. * The tinytds adapter no longer tries to cancel a query on a closed connection, which was causing an exception to be raised. = Backwards Compatibility * The default root name used in the JSON serializer is now demodulized before being underscored. This changes the behavior when the model is namespaced. For example, if the model class name is Mod::Model, the previous default root name would be "mod/model", the new default root name is "model". * If you were calling =~ or !~ on SQL::GenericExpression objects and expecting the default ruby behavior of returning nil for =~ and true for !~, you'll have to update your code. sequel-5.63.0/doc/release_notes/4.22.0.txt000066400000000000000000000055731434214120600177730ustar00rootroot00000000000000= New Features * A csv_serializer plugin has been added, for serializing model objects and datasets to CSV, or parsing CSV into a model object or array of model objects. Behavior and API is similar to the existing xml_serializer and json_serializer plugins. * Sequel::MassAssignmentRestriction is now raised for mass assignment errors in strict mode (the default). Previously the generic Sequel::Error was used. = Other Improvements * On Ruby 1.9+, Sequel now uses condition variables instead of busy waiting for connections in the threaded conection pools. This can significantly decrease overhead when waiting for connections, which can improve performance on machines that are compute bottlenecked. This also makes the connection pool checkouts more fair, reducing the chance that a request for a connection will fail with a PoolTimeout when under heavy resource contention. * Sequel now attempts to avoid hash allocations and rehashing in performance sensitive code. This can speed up Dataset#clone, Model#clone, and #Model#dup by about 60%, and speed up method chains such as: ds.select(:a).where(:a=>1).order(1) by almost 20%. * Symbol#to_proc is used instead of explicit blocks across the the library, which should improve performance slightly on Ruby 1.9+. * When Model#cancel_action is used in association before hooks, Sequel will now return false if raise_on_save_failure = false, instead of raising an exception. This mirrors the behavior when Model#cancel_action is used inside model save hooks when raise_on_save_failure = false. * Dataset#to_hash and #to_hash_groups now work correctly on model datasets when given a single array argument. * The auto_validations plugin now works correctly on columns that have a default value, but where the default value is not parseable into a ruby object by the adapter. * The tree plugin now correctly sets the reciprocal association in the children association it creates. * In the pg_array extension, if the :default value when creating a column is set to a ruby array, Sequel will now convert it to a PostgreSQL array. * Sequel no longer adds a :max_length entry to the schema for varchar(max) columns on Microsoft SQL Server. * Adapters now are specified to set the :default schema entry for columns to nil if the adapter can determine the :default is nil. Adapters that ship with Sequel already did this, but previously it was unspecified behavior. * Sequel no longer silently ignores the :jdbc_properties Database option in the jdbc adapter. Previously, it only used the :jdbc_properties option if it was not able to connect without it. * Bit types are now converted to boolean values in the ODBC adapter. = Backwards Compatibility * The db2, dbi, fdbsql, firebird, jdbc/fdbsql, informix, and openbase adapters are now deprecated and will be removed in a future version of Sequel. sequel-5.63.0/doc/release_notes/4.23.0.txt000066400000000000000000000046061434214120600177700ustar00rootroot00000000000000= New Features * An update_refresh plugin has been added, for refreshing a model instance when updating. The default behavior is to only refresh when inserting. However, if you have triggers on the model's table, it's a good idea to refresh when updating to pick up the possibly changed values. On databases that support UPDATE RETURNING, such as PostgreSQL, the update and refresh are done in a single query. * A delay_add_association plugin has been added, for delaying add_* method calls for associations until after the receiver has been saved, if the receiver is a new object. Example: artist = Artist.new(:name=>'Foo') artist.add_album(Album.new(:name=>'Bar')) # No database queries yet artist.save # Saves artist, then album * A validate_associated plugin has been added, for validating associated objects when validating the current object. This was extracted from the nested_attributes plugin, and is also used by the delay_add_association plugin. For example, if you have an albums association and you want to validate all associated objects before saving the current object, you can cal validate_associated_object for each object: def validate super reflection = association_reflection(:albums) associations[:albums].each do |obj| validate_associated_object(reflection, obj) end end = Other Improvements * Database#transaction now returns the block return value if :rollback=>:always is used. Previously, it would return nil in that case. * Postgres::JSONBOp#[] and #get_text now return JSONBOp instances instead of JSONOp instances. * Model#move_to, #move_up, and #move_down in the list plugin now automatically handle out-of-range targets by defaulting to the first or last position in the list. Previously, using an out of range target would raise an exception. * Database#add_named_conversion_proc on PostgreSQL now works for enum types. * dataset.call_sproc(:insert, ...) now works correctly on JDBC. * postgresql:// connection strings are now supported, since that is the protocol name supported by libpq. * Sequel has switched from rspec to minitest/spec for testing, and now uses random test order when testing. During the conversion process, many test order dependency bugs were fixed. = Backwards Compatibility * The deprecated fdbsql, jdbc/fdbsql, and openbase adapters have been removed. sequel-5.63.0/doc/release_notes/4.24.0.txt000066400000000000000000000100041434214120600177560ustar00rootroot00000000000000= New Features * A pg_inet_ops extension has been added, for DSL support for calling PostgreSQL inet functions and operators. Example: r = Sequel.pg_inet_op(:inet) ~r # ~inet r & :other # inet & other r | :other # inet | other r << :other # inet << other r >> :other # inet >> other r.contained_by(:other) # inet << other r.contained_by_or_equals(:other) # inet <<= other r.contains(:other) # inet >> other r.contains_or_equals(:other) # inet >>= other r.contains_or_contained_by(:other) # inet && other r.abbrev # abbrev(inet) r.broadcast # broadcast(inet) r.family # family(inet) r.host # host(inet) r.hostmask # hostmask(inet) r.masklen # masklen(inet) r.netmask # netmask(inet) r.network # network(inet) r.set_masklen(16) # set_masklen(inet, 16) r.text # text(inet) * The association_pks plugin now supports a :delay_pks association option. When set to true, this makes the methods created by the plugin usable on new objects, by delaying the saving of the associated pks until after the new object has been saved. When set to :always, this also changes the behavior of the methods for existing objects, so that nothing is persisted until the object has been saved. Example: Album.plugin :association_pks Album.many_to_many :tags, :delay_pks=>true album = Album.new(:tag_pks=>[1,2,3]) # No database query album.save # Queries to insert album, and then update albums_tags * The class_table_inheritance plugin now supports subclasses that don't require additional columns, and therefore do not need to join to additional tables. It now loads the single_table_inheritance plugin and supports options that were previously only supported by single_table_inheritance, such as the :key_map and :key_chooser options. * The validation_helpers plugin now supports a :from=>:values option in the validation methods, which will take the value directly from the values hash instead of calling the related method. This allows validation_helpers to differentiate between validations on underlying database column and validations on the model. The auto_validations plugin has been modified to use this feature, since all validations it generates are for validations on the underlying database columns. * The auto_validations plugin now supports options to pass to each of the underlying validation methods: Sequel::Model.plugin :auto_validations, :unique_opts=>{:only_if_modified=>true} In addition to :unique_opts, there is support for :not_null_opts (for NOT NULL columns without a default), :explicit_not_null_opts (for NOT NULL columns with a default), :max_length_opts, and :schema_types_opts. * The update_refresh plugin now accepts a :columns option, which specifies the columns to refresh. This option is currently only respected if the related dataset supports RETURNING. * The :timeout option to Database#listen in the postgres adapter can now be a callable object, previously it had to be Numeric. This allows you to dynamically change the timeout based on current application state. = Other Improvements * The uniqueness validations added by the auto_validations plugin now use a symbol key in the related Errors instance if the underlying index was on a single column. Previously, the uniqueness validations for a single column would use an array key in the related Errors instance. * The jdbc subadapters now correctly handle 64-bit autoincrementing primary keys. * The jdbc subadapters now work correctly if they issue queries while the subadapter is being loaded. This can happen in the jdbc/postgresql adapter if the pg_hstore extension is used. = Backwards Compatibility * The deprecated db2 and dbi adapters have been removed. sequel-5.63.0/doc/release_notes/4.25.0.txt000066400000000000000000000144021434214120600177650ustar00rootroot00000000000000= New Features * The =~ and !~ methods are now defined on ComplexExpressions in addition to GenericExpressions, allowing the following code to work: DB[:table].where{(column1 + column2) =~ column3} * Dataset#group_append has been added for appending to an existing GROUP BY clause: ds = DB[:table].group(:column1) # SELECT * FROM table GROUP BY column1 ds = ds.group_append(:column2) # SELECT * FROM table GROUP BY column1, column2 * An inverted_subsets plugin has been added, for automatic creation of methods for the inversion of the subset criteria. For example: Album.plugin :inverted_subsets Album.subset :published, :published=>true Album.published # SELECT * FROM albums WHERE published IS TRUE Album.not_published # SELECT * FROM albums WHERE published IS NOT TRUE By default, the subset method name is prefixed with "not_". You can pass a block to override the default behavior: Album.plugin(:inverted_subsets){|name| "exclude_#{name}"} Album.subset :published, :published=>true Album.exclude_published # SELECT * FROM albums WHERE published IS NOT TRUE * A singular_table_names plugin has been added, which changes Sequel to not pluralize table names by default. Sequel::Model.plugin :singular_table_names class FooBar < Sequel::Model; end FooBar.table_name # => foo_bar * Dataset#insert_conflict and #insert_ignore have been added on PostgreSQL. When using PostgreSQL 9.5+, they allow you to ignore unique or exclusion constraint violations on inserting, or to do an update instead: DB[:table].insert_conflict.insert(:a=>1, :b=>2) # INSERT INTO TABLE (a, b) VALUES (1, 2) # ON CONFLICT DO NOTHING You can pass a specific constraint name using :constraint, to only ignore a specific constraint violation: DB[:table].insert_conflict(:constraint=>:table_a_uidx). insert(:a=>1, :b=>2) # INSERT INTO TABLE (a, b) VALUES (1, 2) # ON CONFLICT ON CONSTRAINT table_a_uidx DO NOTHING If the unique or exclusion constraint covers the whole table (e.g. it isn't a partial unique index), then you can just specify the column using the :target option: DB[:table].insert_conflict(:target=>:a).insert(:a=>1, :b=>2) # INSERT INTO TABLE (a, b) VALUES (1, 2) # ON CONFLICT (a) DO NOTHING If you want to update the existing row instead of ignoring the constraint violation, you can pass an :update option with a hash of values to update. You must pass either the :target or :constraint options when passing the :update option: DB[:table].insert_conflict(:target=>:a, :update=>{:b=>:excluded__b}). insert(:a=>1, :b=>2) # INSERT INTO TABLE (a, b) VALUES (1, 2) # ON CONFLICT (a) DO UPDATE SET b = excluded.b Additionally, if you only want to do the update in certain cases, you can specify an :update_where option, which will be used as a filter. If the row doesn't match the conditions, the constraint violation will be ignored, but the row will not be updated: DB[:table].insert_conflict(:constraint=>:table_a_uidx, :update=>{:b=>:excluded__b}, :update_where=>{:table__status_id=>1}). insert(:a=>1, :b=>2) # INSERT INTO TABLE (a, b) VALUES (1, 2) # ON CONFLICT ON CONSTRAINT table_a_uidx # DO UPDATE SET b = excluded.b WHERE (table.status_id = 1) * Dataset#group_rollup and #group_cube are now supported when using PostgreSQL 9.5+. * Sequel now supports Dataset#returning when using prepared statements and bound variables: DB[:table].returning.prepare(:insert, :i, :col=>:$col). call(:col=>42) # => [{:col=>42}] = Other Improvements * The serialization plugin now integrates with the dirty plugin, so that column changes are detected correctly. However, column values that are changed and then changed back to the original value are still detected as changed. * Dataset#for_update and similar locking methods now cause Sequel not to use the :read_only shard if sharding is used. * The association_pks plugin now clears cached delayed associated pks when the object is refreshed. * The :collate column option when adding columns now literalizes non-String values on PostgreSQL. Previously, the :collate option value was used verbatim. This is because PostgreSQL's collations generally require quoting as they are uppercase or mixed-case. * Sequel's metadata parsing methods now support Microsoft SQL Server 2012+ when used in case sensitive mode. * Sequel now recognizes an addition check constraint violation exception on SQLite. * Sequel now recognizes constraint violations when using the swift/sqlite adapter. * Sequel now automatically REORGs tables when altering them in the jdbc/db2 adapter. = Backwards Compatibility * Sequel now defaults to ignoring NULL values when using IN/NOT IN with an empty array. Previously, code such as: DB[:table].where(:column=>[]) would be literalized as: SELECT * FROM table WHERE (column != column) This yields a NULL value when column is NULL, similarly to how most other SQL operators work. Unfortunately, most databases do not optimize this, and such a query can require a sequential scan of the table. Sequel previously shipped with a empty_array_ignore_nulls extension that literalized the query to: SELECT * FROM table WHERE (1 = 0) which databases will generally optimize to a constant false value, resulting in much faster queries. This behavior is now the default. Users that desire the previous behavior can use the new empty_array_consider_nulls extension. * The deprecated firebird and informix adapters have been removed. * Calling prepare on a prepared statement now raises an exception. It was supported accidently before, as prepared statements are dataset instances. * Model::DatasetModule#subset now calls Model.subset instead of the other way around. This makes it possible to modify the behavior of subset in a plugin. * The :collate column option change on PostgreSQL can break code that used already quoted values in symbols. For example: String :column_name, collate=>:'"C"' would need to change to: String :column_name, collate=>:C # or String :column_name, collate=>'"C"' sequel-5.63.0/doc/release_notes/4.26.0.txt000066400000000000000000000032671434214120600177750ustar00rootroot00000000000000= New Features * Add Dataset#grouping_sets to support GROUP BY GROUPING SETS on PostgreSQL 9.5+, MSSQL 2008+, Oracle, DB2, and SQLAnywhere: DB[:test].group([:type_id, :b], :type_id, []).grouping_sets # SELECT * FROM test # GROUP BY GROUPING SETS((type_id, b), (type_id), ()) * Sequel::NoMatchingRow exceptions raised by Sequel now give access to the dataset that raised the exception via the dataset method. This makes it easier to write generic error handling code. * Support :if_exists option to drop_column on PostgreSQL: DB.drop_column :t, :col, :if_exists=>true ALTER TABLE t DROP COLUMN IF EXISTS col = Other Improvements * Make the class_table_inheritance plugin work correctly without an sti_key. This was broken in a recent refactoring to make class table inheritance support multiple classes for a single table. * Make Class.new(ModelClass){set_dataset :table} work correctly on ruby 1.8. This was broken in a refactoring to allow the singular_table_names plugin to work. * Make offset emulation via ROW_NUMBER better handle ambiguous column names for datasets without an ORDER BY clause, but with an explicit SELECT clause. * Make pg_range extension use PostgreSQL range function constructors instead of casting string literals to the appropriate range type, if the range type is known. This allows arbitrary expressions to be used inside ranges, such as CURRENT_TIMESTAMP in timestamp ranges. * Make Dataset#== not consider frozen status. * Allow Dataset#prepare on already prepared statements in situations where determining the SQL for a prepared statement requires it. * Detect additional disconnect errors when using the tinytds adapter. sequel-5.63.0/doc/release_notes/4.27.0.txt000066400000000000000000000057611434214120600177770ustar00rootroot00000000000000= New Features * A before_after_save plugin has been added, which for newly created objects refreshes the object before calling after_create, and resets the modified flag before calling after_update. Previously, these actions were not taken until after after_save was called. This will be the default behavior in Sequel 5. * In create_table blocks, primary_key now supports a :keep_order option, which will not change the order in which the primary key is added. Without this option, Sequel's historical behavior of making the primary key column the first column is used. DB.create_table(:foo) do Integer :a primary_key :b, :keep_order=>true end # CREATE TABLE foo # (a integer, b integer PRIMARY KEY AUTOINCREMENT) The schema dumper now uses this option if necessary, allowing it to correctly dump tables where the primary key column is not the first column. * Dataset#single_record! and #single_value! have been added. These are faster versions of #single_record and #single_value that don't require cloning the dataset. If you are sure the dataset will only return a single row or a single value, you can use these methods for better performance. * The new jsonb and json functions added in PostgreSQL 9.5 are now supported by the pg_json_ops extension. Sequel.pg_jsonb_op(:metadata).set(%w'a b', [1,2,3]) # jsonb_set("metadata", ARRAY['a','b'], '[1,2,3]'::jsonb, true) = Other Improvements * Sequel.synchronize is no longer a stub on MRI. Testing has shown that relying on the global interpreter lock to protect multi-threaded access to hashes is not safe in all environments, so Sequel now uses a mutex on MRI just as it does on other ruby interpreters. * Database#schema now sets the :auto_increment option correctly for auto incrementing primary keys if they are not the first column in the table. * Dataset#single_value and #with_sql_single_value are now slightly faster by avoiding an array allocation. * Model datasets can now use #with_sql_single_value and return a single value, instead of an array in [:column_name, value] format. * Model#persisted? in the active_model plugin will now return false if the transaction that inserts the row for the object is rolled back. * bin/sequel now warns if additional arguments are passed that it ignores. In Sequel 5, bin/sequel will raise an error in these cases. * Database#foreign_key_list on PostgreSQL now returns referenced composite keys in the correct order. * The postgres adapter now works with postgres-pr 0.7.0. Note that postgres adapter users that want a pure-ruby driver are encouraged to use jeremyevans-postgres-pr as that has many additional bugfixes and is the version tested with Sequel on a regular basis. * The jdbc/postgresql adapter now recognizes an additional disconnect error. = Backwards Compatibility * Users who were relying on #with_sql_single_value returning an array instead of a single value for model datasets need to update their code. sequel-5.63.0/doc/release_notes/4.28.0.txt000066400000000000000000000035261434214120600177750ustar00rootroot00000000000000= New Features * A subset_conditions plugin has been added, which adds a method for each subset that returns the filter conditions for the subset. This makes it easier to reuse the subset conditions: class Foo < Sequel::Model plugin :subset_conditions subset :active, :active=>true end Foo.exclude(Foo.active_conditions) Foo.where(:a=>1).or(Foo.active_conditions) * A boolean_subsets plugin has been added, which adds a subset for each boolean column: # Assume boolean column :active Foo.plugin :boolean_subsets Foo.active # SELECT * FROM foos WHERE (active IS TRUE) You can provide a block to the plugin to change the arguments passed to subset: Foo.plugin :boolean_subsets do |column| [:"where_#{column}", column] end Foo.where_active # SELECT * FROM foos WHERE active As with similar plugins, you can add the boolean_subsets plugin to Sequel::Model itself, and all subclasses created afterward will have the boolean subset methods automatically created. = Other Improvements * If Model#refresh can't find the related row, Sequel now raises a Sequel::NoExistingObject exception instead of a generic Sequel::Error exception. * In the csv_serializer plugin, when calling #to_csv on a model class or dataset, instead of using #[] to access data, #send is used to call methods. This is more similar to other plugins as well as Model#to_csv. * The list plugin now works better with the auto_validations plugin, or any other time there is a validation on the position column. = Backwards Compatibility * The change to the csv_serializer plugin can change results if you are overriding any of the column accessor methods. It can also break existing code if one of the columns being used isn't defined as a method or the method requires more than one argument. sequel-5.63.0/doc/release_notes/4.29.0.txt000066400000000000000000000027101434214120600177700ustar00rootroot00000000000000= New Features * A uuid plugin has now been added. This plugin will automatically create a uuid for newly created model objects. Model.plugin :uuid Model.create.uuid => # some UUID * Model#json_serializer_opts has been added to the json_serializer plugin, allowing you to override the JSON serialization options on a per instance basis without passing the options directly to Model#to_json. This is useful if you are including the model instance inside another datastructure that will be serialized to JSON. obj.json_serializer_opts(:root => true) [obj].to_json # => '[{"obj":{"id":1,"name":"Foo"}}]' = Other Improvements * The Database#transaction :retry_on option now works when using savepoints. * Calling Database#table_exists? inside a transaction will now use a savepoint if the database supports it, so that if the table doesn't exist, it will not affect the state of the transaction. * Blobs can now be used as bound variables in the oracle adapter. * The sqlanywhere adapter now works with database sharding. * The Dataset#full_text_search :rank option has been fixed to order by rank descending instead of ascending. * External adapters that do not support INSERT with DEFAULT VALUES can now override Dataset#insert_empty_columns_values to set the columns and values to use for an empty INSERT. * External adapters can now implement Dataset#date_add_sql_append to integrate with the date_arithmetic extension. sequel-5.63.0/doc/release_notes/4.3.0.txt000066400000000000000000000024331434214120600177020ustar00rootroot00000000000000= New Features * The tree and rcte_tree plugins now support composite keys. * An error_sql Database extension has been added. This extension adds the DatabaseError#sql method, which should return the database query that caused the error. This is useful for drivers that don't include the SQL used as part of the error message. = Other Improvements * Empty blobs are now literalized correctly on MySQL. * Empty arrays are now literalized correctly on PostgreSQL <8.4. * In the pagination extension, Dataset#page_count is now 1 even if the dataset is empty. This fixes issues with last_page? and page_range returning bad values for empty datasets. * In the pagination extension, calling Dataset#each_page without a block now returns an Enumerator. * Dataset#qualify and Sequel.delay now work together, qualifying the object returned by the delayed evaluation. * Migrator.migrator_class is now a public method. * The PostgreSQL citext type is now recognized as a string. * Another disconnect error is now recognized in the jdbc/as400 adapter. * Guides about using and creating Sequel extensions and model plugins have been added. = Backwards Compatibility * If you were expecting Dataset#page_count on a empty paginated dataset to return 0, you need to update your code. sequel-5.63.0/doc/release_notes/4.30.0.txt000066400000000000000000000025231434214120600177620ustar00rootroot00000000000000= New Features * Overriding the :limit and :eager_limit_strategy association options can now be done on a per-call basis when eager loading, by using an eager block callback and setting the :eager_limit or :eager_limit_strategy dataset options. Example: Album.eager(:tracks=>proc{|ds| ds.clone(:eager_limit=>5)}).all * Dataset#insert_conflict and #insert_ignore have been added on SQLite, adding support for the INSERT OR ... SQL syntax: DB[:table].insert_ignore.insert(:a=>1, :b=>2) # INSERT OR IGNORE INTO TABLE (a, b) VALUES (1, 2) DB[:table].insert_conflict(:replace).insert(:a=>1, :b=>2) # INSERT OR REPLACE INTO TABLE (a, b) VALUES (1, 2) * An identifier_columns plugin has been added, which allows Sequel::Model#save to work when column names contain double underscores. = Other Improvements * IPv6 addresses can now be used in connection URLs when using ruby 1.9.3+. * The :db_type entries in column schema hashes now include sizes for string and decimal types on DB2 and when using the jdbc adapter's generic schema parsing. * Database#row_type in the pg_row extension now handles different formats of specifying schema qualified types. So a row type registered via :schema__type can be found using Sequel.qualify(:schema, :type). * Another disconnect error is recognized in the tinytds adapter. sequel-5.63.0/doc/release_notes/4.31.0.txt000066400000000000000000000044521434214120600177660ustar00rootroot00000000000000= Improvements * Sequel now works with ruby 2.3's --enable-frozen-string-literal, and all of the library files are set to use frozen string literals by default. A couple adapters and extensions depend on libraries that have issues with frozen string literals. Pull requests have been sent to each of those dependencies. * The migrators will now raise an exception if a migration file contains no migrations or more than one migration. * The jdbc/postgresql adapter now supports using PostgreSQL specific types in bound variables. Note that the current version of jdbc-postgres (9.4.1204) has regressions that affect this, users who need this support should stick with jdbc-postgres 9.4.1200 or below. * The jdbc/postgresql adapter now works around a regression in Java method lookup in JRuby 9.0.5.0 * The setter methods added by the association_pks plugin now do type casting immediately, instead of right before the data will be used. This makes them more similar to column setter methods, and ensures that future calls to the getters that use cached values will return correctly typecast data. * The PostgreSQL array parser in the pg_array extension now handles arrays with explicit bounds. The explicit bounds are ignored, so such values do not round trip, and there is currently no support for creating arrays with explicit bounds. * Creating a table with a simple non-incrementing primary key and a self-referential foreign key now works correctly on MySQL: DB.create_table!(:table) do Integer :id, :primary_key=>true foreign_key :fk, :table end * Database#disconnect in the oracle adapter now works correctly on more recent versions of oci8 where #logoff can raise OCIException instead of OCIInvalidHandle. = Backwards Compatibility * The pg_array extension no longer defines Sequel::Postgres::PGArray::JSONCreator. This should only affect backwards compatibility if you were accessing the constant directly. The :parser option to Sequel::Postgres::PGArray.register is also no longer respected, but that should not affect backwards compatibility. * The Sequel::Model#convert_cpk_array private method that was added by the association_pks plugin has been removed. Sequel::Model#convert_pk_array handles both simple and composite primary keys now. sequel-5.63.0/doc/release_notes/4.32.0.txt000066400000000000000000000113171434214120600177650ustar00rootroot00000000000000= New Features * A no_auto_literal_strings extension has been added, which removes the automatic usage of strings in filter arguments as literal SQL code. By default, if you do: DB[:albums].where("name > 'N'") By default Sequel will treat "name > 'N'" as SQL code. However, this makes it much easier to introduce SQL injection: # SQL Injection vulnerability in default Sequel DB[:albums].where("name > 'params[:letter]'") Sequel does support using placeholders when using literal strings: # Safe in default Sequel DB[:albums].where("name > ?", params[:letter]) However, if you forget to use placeholders, you can end up with SQL injection. Accidental usage of filter strings derived from user input as literal SQL code is probably the most common SQL injection vector in applications using Sequel. With the no_auto_literal_strings extension, passing a plain string as the first or only argument to a filter method raises an exception. If you want to use literal SQL code, you have to do so explicitly: DB[:albums].where(Sequel.lit("name > 'N'")) You can also specify placeholders when using Sequel.lit: DB[:albums].where(Sequel.lit("name > ?", params[:letter])) Note that in many cases, you can avoid using literal SQL strings completely: DB[:albums].where{|v| v.name > params[:letter]} * one_through_one associations now support a setter method: Foo.one_through_one :bar foo = Foo[1] foo.bar = Bar[2] foo.bar = nil This will check the current entry in the join table, and based on the argument and the current entry, run a DELETE, INSERT, or UPDATE query, or take no action if the join table is already in the correct state. * Model.default_association_options has been added, which supports default options for all future associations. You can use this to do: Model.default_association_options = {:read_only=>true} Which makes associations not create modification methods by default. You could still create the modification methods by passing :read_only=>true when creating association. * The tactical_eager_loading plugin now supports two additional options when calling an association method: :eager and :eager_reload. Example: artist = Artist.all.first # Loads all albums for all of the artists, # and all tracks for all of those albums artist.albums(:eager=>:tracks) # Reload the artists association for all artists artist.albums(:eager_reload=>true) You can also use the :eager option for an eager loading callback: # Eagerly load the albums with names starting with A-M artist.albums(:eager=>proc{|ds| ds.where(:name > 'N')}) * The association_pks plugin now supports an :association_pks_nil association option in the association_pks setter, for determining how nil values should be handled. In Sequel <4.31.0, if you provided nil, it would either raise an exception immediately if :delay_pks was not set, or on saving if :delay_pks was set. In Sequel 4.31.0, if :delay_pks was not set, it would remove all associated rows. If :delay_pks was set, it would do nothing. You can now set :association_pks_nil=>:remove to remove all associated values on nil, or :association_pks_nil=>:ignore to ignore a nil value passed to the method. Without :association_pks_nil set, an exception will be raised. * Dataset#delete_from has been added on MySQL, allowing deletions from multiple tables in a single query: DB[:a].join(:b, :a_id=>:id).delete_from(:a, :b).delete # DELETE a, b FROM a INNER JOIN b ON (b.a_id = a.id) * The JDBC schema parser now includes a :remarks entry for each column, which contains comments on the column. = Other Improvements * The setter method added by the association_pks plugin now handles the empty array correctly when :delay_pks is set. Previously, if the empty array was passed, Sequel made no modifications in this case. Sequel now correctly removes all associated values if an empty array is passed. * The eager_each plugin now handles eager loading when using Dataset#first and related methods. Previously, the behavior was unspecified. In Sequel <4.27.0 Dataset#first did eager loading correctly in the eager case, but incorrectly in the eager_graph case. In Sequel 4.27.0-4.31.0, it did not do eager loading in either case. * The tactical_eager_loading plugin will not automatically eager load if passing a proc or block to an association method, since the proc or block could be specific to the receiver. * Sequel now uses a mutex to synchronize access to the association cache on MRI, as it does on other ruby implementations. = Backwards Compatibility * See above for changes in eager_each and association_pks plugin behavior. sequel-5.63.0/doc/release_notes/4.33.0.txt000066400000000000000000000067261434214120600177760ustar00rootroot00000000000000= New Features * A Sequel::Model.require_valid_table accessor has been added. This setting is false for backwards compatibility, but if set to true, will raise an error you try to create a model class where an invalid table name is used or the schema or columns cannot be determined. This makes it easier to catch bugs, as things will fail fast, but it means that you must change code like: class Foo < Sequel::Model set_dataset :my_foos end to: class Foo < Sequel::Model(:my_foos) end as otherwise Foo will attempt to use the foos table by default when creating the class, which will raise an error as it is not the correct table name. * Sequel::Database#transaction now supports a :savepoint=>:only option, which will create a savepoint if already inside a transaction, but will yield without creating a transaction if not inside a transaction. The use case for this is when you are running code that may raise an exception, and you don't want to invalidate the current transaction state. = Other Improvements * The graph_each extension now splits results into subhashes when using Sequel::Dataset#first, as it did before Sequel 4.27.0. * On PostgreSQL, Dataset#insert_conflict now accepts an array of columns as the value for the :target option. * You can now pass a Sequel::SQL::Identifier or a Sequel::SQL::QualifiedIdentifer as the table argument when creating a foreign key. Previously, only symbols were supported, and using other values required specifying the :table option. So this will now work to reference a table that includes a double underscore: foreign_key :foo_id, Sequel.identifier(:fo__oo) * Creating model classes inside a transaction on PostgreSQL where the implicit table name isn't correct no longer causes the transaction to fail. Similar issues were also fixed in the boolean_readers, boolean_subsets, and class_table_inheritance plugins. * On PostgreSQL, You can now use the :qualify=>true option in the schema dumper, to dump using schema-qualified table names. * On Microsoft SQL Server, the set_column_allow_null and set_column_not_null alter table methods now work on varchar(max), text, and similar columns. * On Oracle, Sequel::Database#sequence_for_table now returns nil if given a table that doesn't exist or that the user does not have access to. * Passing arbitrary objects to a model association method now indicates that the association should be reloaded, which was used to work but was broken in Sequel 4.32.0. * It is now possible to raise Sequel::ValidationFailed and Sequel::HookFailed without an argument. = Backwards Compatibility * Sequel::Model no longer swallows many errors when subclassing or setting datasets. While this should hopefully not affect backwards compatibility, it may break things where the methods were raising exceptions. If this does break backwards compatibility, it is most likely because it is no longer hiding another bug that should be fixed. Specific changes include: * Model.inherited no longer rescues exceptions raised by set_dataset * When subclassing a model that has a dataset, the columns and schema are just copied from the superclass * Only Sequel::Error is rescued in calls to columns and schema, before it would rescue StandardError. * The Sequel.firebird and Sequel.informix adapter methods have been removed, they are no longer needed as the firebird and informix adapters were removed a few versions back. sequel-5.63.0/doc/release_notes/4.34.0.txt000066400000000000000000000062741434214120600177750ustar00rootroot00000000000000= New Features * A duplicate_columns_handler extension has been added, for printing a warning or raising an exception if a dataset returns multiple columns with the same name. You can set this globally for the Database: DB.extension :duplicate_columns_handler DB.opts[:on_duplicate_columns] = :warn DB.opts[:on_duplicate_columns] = proc do |columns| columns.include?(:foo) ? :raise : :ignore end or for specific datasets: ds = DB[:table].extension(:duplicate_columns_handler) ds = ds.on_duplicate_columns(:raise) ds = ds.on_duplicate_columns do |columns| columns.include?(:foo) ? :raise : :ignore end This makes it easier to detect when duplicate columns are returned, which in some cases can cause undesired behavior, such as the values for later columns of the same name overwriting values for earlier columns. * The Dataset#to_hash, #to_hash_groups, #select_hash, and #select_hash_groups methods now take an options hash as a third argument. This options hash can now contain a :hash option, which specifies the object in which the resulting values should be placed. You can use this to have the values inserted into a custom hash, or another object responding to #[] and #[]=. * A validators_operator validation has been added to the validation_helpers plugin: class Example < Sequel::Model def validate super validates_operator(:>, 3, :column1) validates_operator(:<=, 4, [:column2, :column3]) end end * The pg_range extension now adds a #register_range_type Database method, supporting per-Database custom range types: DB.register_range_type('timerange') * The dataset_associations plugin now supports a :dataset_associations_join association option on associations that use joined datasets. This option will have the datasets returned by the dataset association methods join to the same tables that would be joined when retriving the associated objects, allowing selected columns, orders, and filters that reference columns in the joined tables to work correctly. * The Database :preconnect option can now be set to :concurrently, which will create the connections in separate threads. This can significantly speed up preconnection in high-latency environments. * The Database :name option is now supported, holding an arbitrary name for the database. Currently, it is only used in PoolTimeout exception messages, but it may be used in other places in the future. = Other Improvements * The prepared_statements_safe plugin now works correctly when using CURRENT_DATE and CURRENT_TIMESTAMP default values for columns. * Sequel now recognizes an addition unique constraint violation on Microsoft SQL Server. * PoolTimeout exception messages now include the server/shard to which the connection was attempted when using the sharded threaded connection pool. = Backwards Compatibility * Users of sequel_pg should upgrade to 1.6.17, as older versions of sequel_pg may not work with Sequel 4.34.0+. * Any custom extensions that override Dataset#to_hash, #to_hash_groups, #select_hash, and #select_hash_groups need to be modified to add support for accepting the options hash. sequel-5.63.0/doc/release_notes/4.35.0.txt000066400000000000000000000111271434214120600177670ustar00rootroot00000000000000= Forwards Compatibility * Ruby 2.4 will unify the Fixnum and Bignum classes into the Integer class, making both Fixnum and Bignum references to Integer. This will have the affect of changing the behavior of Sequel migrations that use a reference to the Bignum class. For example, code like this will change behavior in ruby 2.4: DB.create_table(:table) do add_column :column, Bignum end # or: DB.get(Sequel.cast('1', Bignum)) as this references the Bignum class. On ruby <2.4, this will create a 64-bit integer column, on ruby 2.4+, it will create a 32-bit integer column. Code like this will be fine and does not need changing: DB.create_table(:table) do Bignum :column end as this calls the Bignum method. Sequel now supports the :Bignum symbol as a generic type, so you can now switch references to the Bignum class to the :Bignum symbol whenever you want a generic 64-bit integer type: DB.create_table(:table) do add_column :column, :Bignum end # or: DB.get(Sequel.cast('1', :Bignum)) Note that you should only do this if you are using Sequel 4.35.0+, as previous versions of Sequel will treat the :Bignum symbol as a database-specific type named Bignum. = New Features * A Sequel::Database#log_connection_info accessor has been added. If set to true, this includes connection information in Sequel's query log. In threaded connection pools (the default), this makes it simple to see which connection is executing which queries. DB.log_connection_info = true DB.get(1) # Logged: (0.000004s) (conn: 9713390226040) SELECT 1 AS v LIMIT * Sequel::Model#lock! now supports an optional lock style, instead of always using FOR UPDATE (which is still the default): Example.first.lock!('FOR NO KEY UPDATE') #=> SELECT * FROM examples WHERE id = 1 FOR NO KEY UPDATE LIMIT 1 * Sequel::Dataset#skip_locked has been added, which skips locked rows when returning query results. This is useful whenever you are implementing a queue or similar data structure. Currently, this is supported on PostgreSQL 9.5+, Oracle, and Microsoft SQL Server. * An sql_comments extension has been added for setting SQL comments on queries: ds = DB[:table].comment("Some Comment").all # SELECT * FROM table -- Some Comment # All consecutive whitespace in the comment is replaced by a single space, and the comment ends in a newline so that it works correctly in subqueries. This extension is mostly useful if you are doing analysis of your database server query log and want to include higher level information about the query in the comment. * A server_logging extension has been added, which includes server/shard information in the query log, if connection info is being logged. DB.extension :server_logging DB.log_connection_info = true DB.get(1) # Logged: (0.000004s) (conn: 9712828677240, server: read_only) # SELECT 1 AS v LIMIT 1 DB[:a].insert(:b=>1) # Logged: (0.000003s) (conn: 9712534040260, server: default) # INSERT INTO a (b) VALUES (1) * On PostgreSQL, Database#full_text_search now supports a :headline option for adding an extract of the matched text to the SELECT list. * Sequel::Postgres::PGRange#cover? has been added to the pg_range extension, which works with empty, unbounded, and exclusive beginning ranges. Previously, using #cover? with these ranges would raise an exception. Note that cover? is now always defined, where previously it was only defined on ruby 1.9+. = Other Improvements * The jdbc adapters now work correctly on JRuby 9.1. Previously, some parts were broken on JRuby 9.1 due to frozen string literal issues. * Sequel::Dataset#to_hash and #to_hash_groups now work correctly for model datasets doing eager loading. * Using Sequel::Database#transaction with the :rollback=>:always option now automatically uses a savepoint if supported when run inside another transaction. If savepoints are not supported, using :rollback=>:always inside a transaction will now raise an exception. * The delay_add_association plugin now handles hashes and primary keys passed to the add_* association methods. * The json_serializer :include option now works correctly when using *_to_many associations with the association_proxies plugin. * The schema_dumper extension now recognizes bool as a boolean type, for consistency with the Database schema parser. = Backwards Compatibility * Custom adapters should switch from using log_yield to log_connection_yield so that they work correctly when using log_connection_info. sequel-5.63.0/doc/release_notes/4.36.0.txt000066400000000000000000000105711434214120600177720ustar00rootroot00000000000000= New Features * Sequel::Model::Model() has been added, which allows for Sequel::Model() like behavior where the base class used is a subclass of Sequel::Model. To make it easier to use, Sequel::Model.def_Model has also been added, which takes a module and adds a Model() method to the module that calls Model() on the receiver. A :class_namespace association option has been added to make it possible to set a default namespace for the :class option if given as a symbol or string. Sequel::Model.Model.cache_anonymous_models has been added and controls whether to cache anonymous model subclasses created by Sequel::Model::Model() on a per-class basis. These changes are designed to make it easier to use namespaced models, for example: module Foo Model = Class.new(Sequel::Model) Model.def_Model(self) DB = Model.db = Sequel.connect(ENV['FOO_DATABASE_URL']) Model.plugin :prepared_statements Model.default_association_options[:class_namespace] = 'Foo' class Bar < Model # Uses Foo::DB[:bars] as dataset # Implicitly uses Foo::Baz as associated class one_to_many :bazes # Uses Foo::Baz due to :class_namespace option one_to_many :oldest_bazes, :class=>:Baz, :order=>:id end class Baz < Model(:my_baz) # Uses Foo::DB[:my_baz] as dataset # Implicitly uses Foo::Bar as associated class one_to_many :bars # Uses Foo::Bar due to :class_namespace option one_to_many :oldest_bars, :class=>:Bar, :order=>:id end end * A string_agg extension has been added for aggregate string concatentation support on PostgreSQL 9+, SQLAnywhere 12+, Oracle11g+, DB 9.7+, MySQL, HSQLDB, H2, and CUBRID: DB.extension :string_agg ds = DB[:table] ds.get(Sequel.string_agg(:c)) # ',' default separator ds.get(Sequel.string_agg(:c, ' - ')) # custom separator ds.get(Sequel.string_agg(:c).order(:bar)) # force order ds.get(Sequel.string_agg(:c).distinct) # remove duplicates * A connection_expiration extension has been added, for automatically removing connections from the connection pool after they have been open for a given amount of time (4 hours by default). * Support for <, <=, >, and >= operator validations when using integer and string arguments has been added to the constraint_validations extension and plugin. * Sequel::SQL::Function#order has been added to support ordered aggregate functions: Sequel.function(:foo, :bar).order(:baz) # foo(bar ORDER BY baz) = Other Improvements * The validates_operator validation in validation_helpers now considers nil values as invalid unless :allow_nil or a similar option is used. Previously, using validates_operator with a nil value would probably raise a NoMethodError. This makes validates_operator more similar to other validations. * The threaded connection pools no longer hold the pool mutex when disconnecting connections, which is useful if the driver blocks when disconnecting connections. * The connection_validator extension no longer holds a reference to connections that have been disconnected. * The connection_validator extension no longer overwrites the connection_validation_timeout if loaded a second time. * Sequel now closes cursors as soon as it is done using them in the oracle adapter, instead of waiting for GC to clean them up. * Sequel now handles disconnect errors that occur when literalizing strings in the mysql2 and postgres adapters. = Backwards Compatibility * Using the Bignum class as a generic type is now deprecated. As announced in the 4.35.0 release notes, ruby 2.4 is unifying the Fixnum and Bignum classes into Integer, which results in the behavior of the Bignum class changing. 4.35.0 added support for using the :Bignum symbol as a generic 64-bit integer type, and Sequel users now need to switch to that to avoid the deprecation warning. Sequel 4.41.0 (to be released in December), will drop support for using the Bignum class as a generic type. This is being done before the release of ruby 2.4 to hopefully make it unlikely that users will be subject to a behavior changes when upgrading ruby versions. Related to this change, external adapters need to switch from overriding Database#type_literal_generic_bignum to Database#type_literal_generic_bignum_symbol. sequel-5.63.0/doc/release_notes/4.37.0.txt000066400000000000000000000035171434214120600177750ustar00rootroot00000000000000= New Features * Database#values has been added on SQLite#3.8.3+, operating similarly to the support on PostgreSQL: DB.values([[1, 2], [3, 4]]).select_map([:column1, :column2]) # => [[1, 2], [3, 4]] * Regular expressions in dataset filters are now supported on Oracle 10g+: DB[:t].where(:c=>/re/) # SELECT * FROM "T" WHERE REGEXP_LIKE("C",'re') = Other Improvements * Sequel now supports the use of native prepared statements and bound variables in the mysql2 adapter, when mysql2 0.4+ is used. Previously, the mysql2 adapter supported database prepared statements, but variables were always literalized. That is still supported when mysql2 <0.4 is used. * The connection pool now removes connections if it detects a disconnect error that is not raised as a Sequel::DatabaseDisconnectError. Such exceptions are reraised without converted them to Sequel::DatabaseDisconnectError, but the related connection is now removed from the pool. * The reversible migration support now handles add_constraint with an options hash as the first argument. * ASTTransformer now handles Sequel.extract, allowing Dataset#qualify and other uses of ASTTransformer to work with such values. * The create_view :columns option is now suppported on SQLite 3.9.0+. * An additional disconnect error is now recognized in the postgres adapter. * A frozen string literal issue has been fixed when multiple different database connection approaches have failed in the jdbc adapter. = Backwards Compatibility * External database adapters need to make sure that Database#database_error_classes returns a valid result if called during Database#initialize. If you have an external adapter where one of the error classes depends on an argument given when connecting (such as the connection string), you may have to make some changes. sequel-5.63.0/doc/release_notes/4.38.0.txt000066400000000000000000000056001434214120600177710ustar00rootroot00000000000000= New Features * Sequel::SQL::NumericMethods#coerce has been added, which adds support for ruby's coercion protocol when performing numeric operations. Previously, Sequel supported code like: Sequel.expr{a - 1} This is because a in this case returns a Sequel::SQL::Indentifier, which defines #- to return a Sequel::SQL::NumericExpression. By supporting #coerce, the following code now also works: Sequel.expr{1 - a} This is because Integer#- calls #coerce on the argument if it is defined (ruby's coercion protocol). Previously, you had to handle this differently, using something like: Sequel.expr(1) - a # or Sequel.-(1, a) * Sequel now supports the ** operator for exponentiation on expressions, similar to the +, -, *, and / operators. Sequel uses the database power function to implement this by default on the databases that support it (most of them). On Access, it uses the ^ operator, on Derby it is emulated using a combination of exp/ln (with some loss of precision). SQLite doesn't support a power function at all, but Sequel emulates it using multiplication for known integer exponents. * Sequel::SQLTime.date= has been added, which allows you to set the date used for Sequel::SQLTime instances. Sequel::SQLTime is a subclass of Time that is literalized using only the time components, and is the ruby class used to store values of database time columns on most adapters. Sequel::SQLTime defaults to using the current date, but you can now set a specific date, for more consistency with some drivers (Mysql2 uses 2000-01-01, tiny_tds uses 1900-01-01). * The postgres adapter now supports a :driver_options option when using the pg driver, which is passed directly to pg. This can be used to specify a client SSL certificate or to specify the certificate authority root certificate when using :sslmode=>'verify-full'. = Other Improvements * Sequel no longer uses after_commit/rollback database hooks by default if the after_commit/after_rollback model methods are not overridden. This provides a performance speedup, but the main benefit is that it no longer causes memory issues when saving a large number of model instances in a single transaction, and it also works with prepared transactions/2 phase commit. You can still set use_after_commit_rollback= manually to force the after_commit/rollback setting. Note that Sequel 5 will move after_commit/rollback model hooks to a plugin, and the default and recommended approach will be to use the database after_commit/rollback hooks in the after_save or similar model hooks. = Backwards Compatibility * The Sequel::Model use_after_commit_rollback class and instance methods now return nil by default instead of true. nil now indicates the default behavior of checking whether the appropriate model hook has been defined, and only adding a database hook if so. sequel-5.63.0/doc/release_notes/4.39.0.txt000066400000000000000000000111301434214120600177650ustar00rootroot00000000000000= New Features * Sequel.[] has been added as an alias to Sequel.expr. This makes it a little easier to get Sequel-specific objects: Sequel[:table].* # "table".* Sequel[:table__column].as(:alias) # "table"."column" AS "alias" Sequel[:column] + 1 # ("column" + 1) * The timestamps plugin now supports an :allow_manual_update option. If this option is used, the timestamps plugin will not override the update timestamp when saving if the user has modified it since retrieving the object. * The touch plugin now also touches associations on create in addition to update and delete. * The IntegerMigrator now supports a :relative option, which will migrate that many migrations up (for positive numbers) or down (for negative numbers). * Database#rollback_checker has been added, which returns a callable that can be called later to determine whether the transaction ended up committing or rolling back. So if you may need to check transaction status at some future point, and don't need immediate action on rollback/commit, it is better to use a rollback checker than to add an after commit/rollback hook. rbc = nil DB.transaction do rbc = DB.rollback_checker rbc.call #=> nil end rbc.call # => false DB.transaction(:rollback=>:always) do rbc = DB.rollback_checker end rbc.call # => true * The add_column schema method now supports an :if_not_exists option on PostgreSQL 9.6+, which will only add the column if it does not already exist: DB.add_column :t, :c, Integer, :if_not_exists=>true # ALTER TABLE "t" ADD COLUMN IF NOT EXISTS "c" integer * The add_column schema method now supports an :after and :first option on MySQL to add the column after an existing column or as the first column: DB.add_column :t, :c, Integer, :first=>true # ALTER TABLE `t` ADD COLUMN `c` integer FIRST DB.add_column :t, :c1, Integer, :after=>:c2 # ALTER TABLE `t` ADD COLUMN `c1` integer AFTER `c2` * JSONBOp#insert has been added to the pg_json_ops extension, which supports the new jsonb_insert function added in PostgreSQL 9.6+: Sequel.pg_jsonb_op(:c).insert(%w'0 a', 'a'=>1) # jsonb_insert("c", ARRAY['0','a'], '{"a":1}'::jsonb, false) * Dataset#full_text_search on PostgreSQL now supports a :to_tsquery=>:phrase option, to enable the native phrase searching added in PostgreSQL 9.6+: DB[:t].full_text_search(:c, 'foo bar', :to_tsquery=>:phrase) # SELECT * FROM "t" # WHERE # (to_tsvector(CAST('simple' AS regconfig), (COALESCE("c", ''))) # @@ phraseto_tsquery(CAST('simple' AS regconfig), 'foo bar')) * Sequel::Database.set_shared_adapter_scheme has been added, allowing external adapters to add support for Sequel's mock adapter. External adapters should have a shared adapter requirable at sequel/adapters/shared/adapter_name, that uses the following format: # in sequel/adapters/shared/mydb module Sequel::MyDB Sequel::Database.set_shared_adapter_scheme :mydb, self def self.mock_adapter_setup(db) # Any mock-adapter specific setup to perform on the # given Database instance end module DatabaseMethods # methods for all Database objects using this adapter end module DatasetMethods # methods for all Dataset objects using this adapter end end = Other Improvements * The hook_class_methods plugin only adds a Database transaction hook if one of the after commit/rollback hook class methods is actually used. This means that loading the plugin no longer keeps all saved/deleted objects in memory until transaction commit. * The active_model plugin now uses a rollback checker instead of an after_rollback hook, so models that use the active_model plugin no longer store all saved model instances in memory until transaction commit. * When using the IntegerMigrator, attempting to migrate to a migration number above the maximum will now migrate to the lastest version, and attempting to migrate to a migration number below 0 will now migrate all the way down. * The pg_interval extension now supports ActiveSupport::Duration objects that use week and hour parts (new in ActiveSupport 5). = Backwards Compatibility * The change to the touch plugin to touch associations on create could possibly affect existing behavior, so if you are using this plugin, you should test that this does not cause any problems. * External adapters that tried to add support for the mock adapter now need to update their code to use the new Sequel::Database.set_shared_adapter_scheme method. sequel-5.63.0/doc/release_notes/4.4.0.txt000066400000000000000000000065761434214120600177170ustar00rootroot00000000000000= New Features * Sequel now supports Sybase SQLAnywhere, via the sqlanywhere and jdbc/sqlanywhere adapters. * The filter by associations support now handles cases where the association has :conditions or a block (as long as the block does not rely on instance-specific behavior). This allows you to handle the following: Album.many_to_many :popular_tags, :class=>:Tag do |ds| ds.where{tags__popularity > 9000} end Album.where(:popular_tags=>[Tag[1], Tag[2]]) This will return all albums whose popular_tags would include at least one of those two tags. Previously, the block would be ignored, returning albums containing one those tags even if the tags weren't popular. * A table_select plugin has been added that changes the default selection for models from * to table.*. This is useful for people who want ActiveRecord-like behavior instead of SQL-like behavior, where joining tables doesn't automatically include columns in the other table. This can fix issues where joining another table that has columns with the same name as columns in the model table without specifying an explicit selection results in model objects being returned where the values in the model object are the values from the joined table instead of the model table. * Dataset#offset has been added, for specifying offset separately from limit. Previous this was possible via: ds.limit(nil, offset) but this is a friendlier API. * The jdbc adapter now has support for foreign key parsing. This is used if there is no specific support for the underlying database. * Foreign key parsing is now supported on Oracle. = Other Improvements * Association add_*/remove_*/remove_all_* methods for pg_array_to_many associations now work on unsaved model objects. * In the constraint_validations extension, deletes from the metadata table are now processed before inserts, so that dropping an existing constraint and readding a constraint with the same name now works correctly. * Cloning an association now copies the :eager_block option correctly from the source association if it was passed as the block to the source association method. * Cloning a cloned association now copies the block for the association. * The descendants method in the tree plugin no longer modifies an array it is iterating over. * The jdbc/postgresql adapter now supports PostgreSQL-specific types, with pretty much the same support as the postgres adapter. When using the pg_* extensions, the dataset will now handle the PostgreSQL types correctly and return instances of the correct Ruby classes (e.g. hstore is returned as Sequel::Postgres::HStore). You should no longer need to use the typecast_on_load or pg_typecast_on_load plugins when using model objects that use these types when using the jdbc/postgresql adapter. * Offset emulation on Oracle now handles cases where selected columns can't be ordered. * Offset emulation on DB2 no longer automatically orders on all columns if the dataset itself is unordered. * Types containing spaces are now returning correctly when parsing the schema in the oracle adapter. * Database#tables no longer returns tables in the recycle bin on Oracle. * add_foreign_key now works correctly on HSQLDB, by splitting the column addition and constraint addition into two separate statements. * add_primary_key now works correctly on H2. sequel-5.63.0/doc/release_notes/4.40.0.txt000066400000000000000000000150131434214120600177610ustar00rootroot00000000000000= New Features * A Sequel.split_symbols setting has been added. This setting is true by default, so there is no change to backwards compatibility by default. However, users can now do: Sequel.split_symbols = false to disable the splitting of symbols. This will make Sequel no longer treat symbols with double or triple underscores as qualified or aliased identifiers, instead treating them as plain identifiers. It will also make Sequel no longer treat virtual row methods with double underscores as qualified identifiers. Examples: # Sequel.split_symbols = true :column # "column" :table__column # "table"."column" :column___alias # "column" AS "alias" :table__column___alias # "table"."column" AS "alias" Sequel.expr{table__column} # "table"."column" # Sequel.split_symbols = false :column # "column" :table__column # "table__column" :column___alias # "column___alias" :table__column___alias # "table__column___alias" Sequel.expr{table__column} # "table__column" Disabling symbol splitting can make things much easier if leading trailing, double, or triple underscores are used in identifiers in your database. Disabling symbol splitting makes Sequel simpler, even if it does make it slightly less easy to create qualified and aliased identifiers. It is possible that the symbol splitting will be disabled by default starting in Sequel 5. Note that due to Database symbol literal caching, you should not change the Sequel.split_symbols setting after creating a Database instance. * SQL::Identifier#[] and SQL::QualifiedIdentifier#[] have been added for creating qualified identifiers. This makes it easier and more natural to create qualified identifiers from existing identifiers. Previously, you could do: Sequel[:column].qualify(:table) You can now use the more natural: Sequel[:table][:column] This can also be used in virtual rows: Sequel.expr{table[:column]} This offers a easy way to create qualified identifers when symbol splitting has been disabled. * A symbol_aref extension has been added, allowing the use of Symbol#[] to create qualified identifiers if passed a Symbol, SQL::Identifier, or SQL::QualifiedIdentifier. This doesn't break any existing ruby behavior, as ruby currrently raises an exception in such cases. Example: :table[:column] # "table"."column" This extension can make it easier to create qualified identifiers if symbol splitting is disabled. A symbol_aref_refinement extension has also been added, which adds a refinement version of the extension that can be enabled via: using Sequel::SymbolAref * A symbol_as extension has been added, which adds the Symbol#as method to create aliased identifiers. This was previously part of the core extensions, but has been separated so it can be included by itself. Example: :column.as(:alias) # "column" AS "alias" This extension can make it easier to create aliased identifiers if symbol splitting is disabled. A symbol_as_refinement extension has also been added, which adds a refinement version of the extension that can be enabled via: using Sequel::SymbolAs * An s extension has been added, which adds the Sequel::S module, containing a private #S method that calls Sequel.expr. You can include this module in any module or class where you would like the S method to be available: class Album < Sequel::Model extend Sequel::S one_to_many :tracks, :order=>S(:number).desc end You can include this in Object if you want the S method to be available globally: Object.send(:include, Sequel::S) Sequel::S also works if it is used as a refinement, adding the S method to Object while the refinement is active: using Sequel::S This extension can make it easier to create qualified and aliased identifiers if symbol splitting is disabled: S(:table)[:column] S(:column).as(:alias) * Dataset#insert_conflict on PostgreSQL now supports a :conflict_where option, allowing for the handling of insert conflicts when using a partial unique index: DB[:table].insert_conflict(:target=>:a, :conflict_where=>{:c=>true}).insert(:a=>1, :b=>2) # INSERT INTO TABLE (a, b) VALUES (1, 2) # ON CONFLICT (a) WHERE (c IS TRUE) DO NOTHING = Other Improvements * Sequel no longer attempts to combine arguments for non-associative operators, as doing so leads to invalid code in cases such as: Sequel.expr{column1 - (column2 - 1)} * Sequel now automatically adds NOT NULL constraints on columns when adding a primary key constraint on the columns, if the database doesn't handle that situation correctly. * Database#rollback_checker now returns a thread-safe object. * SQL::QualifiedIdentifier#initialize now converts SQL::Identifier arguments to strings, fixing usage of such objects in the schema methods. * The prepared_statements plugin now correctly handles lookup by primary key on models with joined datasets. * The dataset_associations plugin now handles many_through_many and one_through_many associations that use a single join table. Note there is no reason to create such associations, as many_to_many and one_through_one associations will work for such cases. * The insert_returning_select plugin now handles cases where the model doesn't have a valid dataset, fixing usage with the lazy_attributes and dataset_associations plugins, and potentially other plugins. * The column_select plugin no longer raises an exception if the model's table does not exist. * The class_table_inheritance plugin now works when the prepared_statements plugin is also used. * Some adapters now avoid thread-safety issues during loading on ruby implementations without a GVL by avoiding the modification of shared datastructures. * When using Database#tables with the :qualify=>true option on PostgreSQL, table names with double or triple underscores are now handled correctly. = Backwards Compatibility * The following Dataset constants are now frozen: NON_SQL_OPTIONS, ACTION_METHODS, QUERY_METHODS, CONDITIONED_JOIN_TYPES, UNCONDITIONED_JOIN_TYPES, and JOIN_METHODS. Of these, NON_SQL_OPTIONS was previously modified in a non-thread-safe manner by some adapters. External adapters should switch to having the adapter's dataset non_sql_options method return an array of options that do not affect the SELECT SQL for the adapter's datasets, rather than modifying NON_SQL_OPTIONS. sequel-5.63.0/doc/release_notes/4.41.0.txt000066400000000000000000000065171434214120600177730ustar00rootroot00000000000000= New Features * Dataset#with_* methods have been added as equivalents for a few Dataset#*= methods, but instead of modifying the receiver, they return a modified copy, similar to the dataset query methods. Specific methods added: with_extend :: Extends clone with given modules with_row_proc :: Modifies row_proc in clone with_quote_identifiers :: Modifies quote_identifiers setting in clone with_identifier_input_method :: Modifies identifier_input_method setting in clone with_identifier_output_method :: Modifies identifier_output_method setting in clone Similarly, on Microsoft SQL Server, a with_mssql_unicode_strings method has been added, which returns a clone with the mssql_unicode_strings setting modified. * On DB2, Sequel now supports an :offset_strategy Database option, which can be set to :limit_offset for "LIMIT X OFFSET Y" or :offset_fetch for "OFFSET Y FETCH FIRST X ROWS ONLY". Depending on what version of DB2 is used and how DB2 is configured, it's possible one of these strategies will work. For backwards compatibility, the current default is still to emulate offsets using the ROW_NUMBER window function. * In the json_serializer plugin, you can now use an Sequel::SQL::AliasedExpression instance as an association name value, which allows you to rename the association in the resulting JSON: album.to_json(:include=>{Sequel.as(:album, :s)=>{:only=>:name}}) # => '{"id":1,"name":"RF","artist_id":2,"s":{"name":"YJM"}}' = Other Improvements * The association dataset methods now correctly handle cases where one of the keys is nil. Previously, they would incorrectly use an IS NULL predicate in such cases. Now, they use a false predicate. * The hook_class_methods plugin handling of commit hooks has been fixed. The implementation of commit hooks (but not rollback hooks) was broken in hook_class_methods starting in 4.39.0 due to changes to avoid keeping references to all model instances until the transaction was committed or rolled back. * Using the Fixnum schema method no longer raises a warning on ruby 2.4+, as it now uses the Integer class instead of the Fixnum constant. * The ado adapter has been greatly improved. It now avoids memory leaks, has much better type handling, and passes almost all specs. Note that the ado adapter's behavior can change depending on the version of ruby in use, try to use ruby 2.2+ for best compatibility. * Dataset#graph no longer mutates the receiver. Previously, it set an empty hash as the :graph option in the receiver, which was unintentional and not desired. * Pure java exceptions that don't support the message= method are now handled properly when reraising the exception on connection errors in the jdbc adapter. = Backwards Compatibility * Support for using the Bignum constant as a generic type has been removed, as was preannounced in the 4.36.0 release notes. Users should switch to using the :Bignum constant if they haven't already. * Users of the ado adapter may need to update their code now that the ado adapter correctly handles most types. * The spec_*_w rake tasks in the repository now require ruby 2.4+ and use the warning library for filtering warnings, instead of trying to filter warnings with egrep. sequel-5.63.0/doc/release_notes/4.42.0.txt000066400000000000000000000217461434214120600177750ustar00rootroot00000000000000= New Features * There have been numerous improvements this release related to frozen datasets. Frozen datasets now work in almost all cases, except when calling a dataset mutation method. When using ruby 2.4, Sequel uses the new support for clone(:freeze=>false) to actually freeze datasets while allowing them to copy singleton classes/extended modules from the dataset calling clone. On earlier versions of ruby, the dataset opts are now frozen, preventing more types of accidental modification. The dataset internals were refactored to reduce the number of instance variables. Now, datasets store all of their state in opts. Additionally, all datasets now use a thread-safe cache for storing cached state such as the dataset's columns. Previously, accessing/setting the columns was not thread-safe, unless the ruby interpreter used thread-safe methods for instance variable getting/setting. Frozen datasets use this new cache to optimize repeated method calls, resulting in substantial performance speedups. This can include caching returned and/or intermediate datasets, SELECT and DELETE SQL generated, as well as internal objects designed to optimize the building of SQL strings with different arguments. Even for fairly simple datasets, this can result in up to 10x performance improvements for dataset methods that don't require database access, and up to 3x performance improvements for dataset methods that do require database access. * A freeze_datasets Database extension has been added which automatically freezes all datasets for the Database instance. This also enables dataset caching when creating datasets using Database#[] and #from using a single symbol, such as DB[:table_name]. In addition to speeding up the methods themselves, this also allows code such as: DB[:foo].for_update.first To run much faster by avoiding any dataset creation or SQL string building after the first call. The freeze_datasets extension makes #dup an alias of #clone, ensuring that all cloned datasets that were originally created by the Database instance are frozen. It is highly recommended that you start using the freeze_datasets extension in your applications using Sequel, as this extension will become the default and only behavior in Sequel 5. Unfrozen datasets and dataset mutation will not be supported in Sequel 5. * The dataset methods created by Model#subset and Model::DatasetModule#subset now cache the returned dataset if the current dataset is frozen, none of the arguments are Procs, and a block is not provided. This can result in up to a 3x performance improvement for method chains that use subsets, such as: ModelClass.subset1.subset2.subset3.first * Model::DatasetModule has had the following methods added to it: distinct, exclude, exclude_having, grep, group, group_and_count, group_append, having, limit, offset, order, order_append, order_prepend, select, select_all, select_append, select_group, where, and server. These methods create dataset methods that when called call the dataset method with the same name on the receiver. Example: class ModelClass < Sequel::Model dataset_module do select :with_id_and_name, :id, :name where :active, :active order :by_name, :name end end ModelClass.active.by_name.with_id_and_name.all # SELECT id, name FROM model_classes WHERE active ORDER BY name # Equivalent to: ModelClass. where(:active). order(:name). select(:id, :name). all In addition to being easier than defining the methods manually, this also enables caching of the datasets in most cases, so that the above method chain does not create any additional datasets after the first call. * Dataset#with_extend now accepts a block and will create a module with that block that will be used to extend the object, after any modules given as arguments have been applied: DB[:table].with_extend{def foo; 1 end}.foo => 1 * The identifier mangling support for datasets (identifier_input_method and identifier_output_method) has been moved to a identifier_mangling database extension, but it is still loaded by default. You can disable the loading of this extension by using the :identifier_mangling=>false Database option. Sequel 5 will stop loading of this extension by default, requiring you to load it manually via Database#extension if you need it. Sequel's default remains the same as before, to convert identifiers to uppercase on input and lowercase on output on databases that fold unquoted identifiers to uppercase (per the SQL standard), and to not mangle identifiers at all on databases that fold unquoted identifiers to lowercase (MySQL, PostgreSQL, SQLite). The identifier_mangling extension just allows you to change the default behavior. * On DB2, Dataset#with_convert_smallint_to_bool has been added, which returns a modified dataset with the convert_smallint_to_bool setting changed. Previously, chaging the smallint_to_bool setting required mutating a dataset. * The mock adapter now supports Dataset#with_{autoid,fetch,numrows}, allowing mocking of results when using frozen datasets. = Other Improvements * Using an eager load callback when eager loading a one_to_one association that uses an order or offset now works correctly on databases that do not support window functions. * Dataset#== and Dataset#hash are now faster as they don't need to generate SQL. As all internal state is now stored in the opts, it just considers the class, db, and opts. * The prepared statement/bound variable internals were heavily refactored to be simpler and more robust, to more easily support native prepared statements, and to work with frozen datasets. * When emulating alter table operations on SQLite, integer primary keys now use AUTOINCREMENT, since that is Sequel's default when creating tables on SQLite. * On SQLite, Database#schema no longer uses :auto_increment entries when the table has a composite primary key. * Most dataset opts values are now frozen to prevent accidental modification and allow for thread-safe access. * SQL::Expression subclass instances are now always frozen. * Dataset::PlaceholderLiteralizer and Dataset::PlaceholderLiteralizer::Argument instances are now always frozen. * Dataset#ungraphed now works on a frozen model dataset. * Model#set_server now works when the model uses a frozen dataset. * The pagination and null_dataset extensions now work on frozen datasets. * Dataset#server now works for frozen model datasets when the model uses the sharding plugin. * Calling eager_graph or association_join on a model dataset is now deprecated if it would ignore the association's :conditions option and the :graph_conditions, :graph_block, or :graph_only_conditions association option is not used. * Using the :eager_limit dataset option in an eager_load callback with a singular association now raises an Error. Previously, the behavior was undefined. * Calling Dataset#prepare without a name argument is now deprecated. Previously, it raised an Error in the mysql, mysql2, and postgres adapters, but was allowed on other adapters. * The looser_typecasting extension now handles the strict BigDecimal parsing introduced in ruby 2.4. * When using the duplicate_columns_handler extension with :on_duplicate_columns=>:warn, the warning message is now prepend with the file and line. * Internally, Sequel uses Dataset#where instead of #filter, reverse instead of reverse_order, and select_append instead of select_more to save a method call and array creation. * Dataset#db= and #opts= in the sequel_3_dataset_methods extension now raise a RuntimeError if the dataset is frozen. * Sequel's tests now run without warnings when using Minitest 5.10. * Sequel now issues a deprecation message instead of a warning when used with PostgreSQL <8.2. = Backwards Compatibility * Any external dataset extensions or adapters that modified or directly accessed dataset instance variables other than @db and @opts (such as @columns) needs to be updated to work with the new dataset internals. * Any external adapters that implemented native prepared statements/ bound variables need to be updated to work with the new internal prepared statement API. * Model.set_dataset and .dataset= now operate on a clone of the dataset given, instead of mutating the dataset that is passed in. This allows them to work with frozen datasets, but can change the behavior if you mutate a dataset after passing it to one of these methods. Anyone doing that needs to change their code to get the current copy of the model's dataset, and mutate that, or better yet, avoid mutating datasets at all. * Dataset#columns now calls #columns! instead of the other way around, which may require external plugins/extensions that override #columns to switch to overriding #columns!. * External adapters that want to disable identifier mangling by default need to be updated. sequel-5.63.0/doc/release_notes/4.43.0.txt000066400000000000000000000065171434214120600177750ustar00rootroot00000000000000= New Features * Database#freeze has now been implemented. It is now recommended to use it in production and during testing, after loading extensions and making other changes to Database state. Once frozen, the Database settings cannot be modified, but the Database can execute queries and return results. By freezing the Database, you gain greater thread-safety assurance and will be alerted via an exception if runtime code attempts to modify Database state. = Other Improvements * Model#refresh now uses the same optimization that Model.with_pk uses, resulting in faster refreshing for most models. * The prepared_statements plugin no longer automatically uses prepared statements in cases where it is likely to be slower. Now, prepared statements are only used by default for INSERT and UPDATE statements (and DELETE on Oracle and DB2). This change was made after benchmarking showed that using prepared statements for primary key lookups, refreshes, and deletes is significantly slower than non-prepared statements across almost all adapters. * Database#extension no longer attempts to load the same extension more than once. * The timestamp migrator now handles key length limitations when using MySQL with InnoDB engine and utf8mb4 charset default. * The jdbc/sqlite adapter will now use SQLite result codes for more accurate exception types, if the jdbc-sqlite driver supports them. * dataset_module is now inherited correctly if using the single_table_inheritance plugin. This was broken in 4.42.0, and fixed in 4.42.1. * The prepared_statements plugin now respects a server specified for the model instance, instead of always using the default server. * The prepared_statements_associations plugin now respects a server specified for the model instance if using the sharding plugin, instead of always using the default server. * The prepared_statements_associations plugin now works correctly when using some instance-specific associations, such as many_to_one associations using a nil :key option. * The prepared_statements_with_pk plugin now respects a server specified for the dataset, instead of always using the default server. * Model#freeze now freezes the associations hash after validating the model instance, instead of before, fixing cases where validation calls association methods. * Sequel no longer continually tries to determine the server version on HSQLDB, if the first attempt fails. * The mock adapter now uses a thread-safe incrementor for autoid. * Mysql2 native prepared statement support now better handles sharding where connection options differ per shard. * On Oracle, Database#sequence_for_table is now thread-safe. * On PostgreSQL, Database#type_supported? is now thread-safe. * On MySQL, Database#supports_timestamp_usecs? now correctly memoizes false values. * The jdbc/postgresql adapter now works correctly if the pg_hstore extension is loaded first. = Backwards Compatibility * Maintainers of external adapters and Database extensions should update their code to support Database#freeze. * Code that relies on Database extensions being loaded multiple times if called multiple times needs to be updated, because now the extension only gets loaded the first time Database#extension is called. * @enum_labels in the pg_enum extension is now frozen by default. sequel-5.63.0/doc/release_notes/4.44.0.txt000066400000000000000000000105301434214120600177640ustar00rootroot00000000000000= New Features * Model.freeze is now supported and recommended in production and during testing. It freezes all class-level metadata, preventing possible thread-safety issues at runtime. * Model.finalize_associations has been added, speeding up some association reflection methods by about 10x. This method should be called after all associated models have been loaded. This can speed up the retrieval of associated objects for small datasets by 5-10%. One advantage of using this is it will raise an exception if it recognizes that any of your associations are not defined correctly, such as referencing an associated class that doesn't exist. * Model.freeze_descendents has been added to the subclasses plugin. This method finalizes associations for all descendent classes, then freezes the descendent class. It's designed to make it easy to freeze all model classes in use: Sequel::Model.plugin :subclasses Dir['./models/*.rb'].each{|f| require f} Sequel::Model.freeze_descendents * An implicit_subquery dataset extension has been added, which implicitly uses a subquery if you have a dataset with raw SQL and you call a method that would modify the SQL used: DB['SELECT * FROM foo'].where(:bar=>1) # SELECT * FROM foo DB.extension :implicit_subquery DB['SELECT * FROM foo'].where(:bar=>1) # SELECT * FROM (SELECT * FROM foo) AS t1 WHERE (bar = 1) * Model datasets now have where_all, where_each, and where_single_value methods for returning data: class Album < Sequel::Model; end Album.where_all(:id=>[1,2,3]) # => [Album[1], Album[3], Album[2]] Album.where_each(:id=>[1,2,3]) do |album| # ... end Album.select(:name).where_single_value(:id=>1) # "Album's Name" These methods are designed for use by other dataset methods you define, and are optimized for frozen datasets if the methods will be called multiple times on the same dataset. where_all and where_each can increase performance by up to 40% for small datasets compared to where.all and where.each. where_single_value can be up to twice as fast as where.single_value. * Model.dataset_module now supports an eager method for eager loading: class Album < Sequel::Model many_to_one :artist dataset_module do eager :with_artist, :artist end end Album.with_artist.all # eagerly loads artist association = Other Improvements * The jdbc adapter now supports Database#freeze. Possible thread-safety issues when initializing multiple jdbc Database instances in separate threads at the same time have been fixed. * The postgres adapter now raises an exception if it recognizes that the loaded version of sequel_pg is incompatible. * Sequel classes that are subclasses of core classes now define custom #inspect methods so instances can easily be differentiated from core class instances. For example: Sequel::SQL::Blob.new('a') # => # Sequel::SQLTime.now # => # Sequel::LiteralString.new("foo") # => # class Album < Sequel::Model; end Album.many_to_one :artist # => # Sequel::SQL::ValueList.new([[1,2]]) # => # * Dataset#from_self now copies the columns from the current dataset if they are present, since wrapping a dataset in a subquery should not change the columns returned. * On PostgreSQL, array type conversion now correctly handles false values. * Another disconnect error is now recognized by the jdbc/as400 adapter. * Modifications to Sequel::Model::Associations::ASSOCIATION_TYPES are now thread safe, fixing issues if separate threads attempt to load separate model plugins that modify this hash. * The force_encoding plugin no longer modifies the encoding of Sequel::SQL::Blob instances. * Many plugins were updated so they no longer add constants to the namespace of the model that loads them. = Backwards Compatibility * Maintainers of external model plugins should update their code to support Model.freeze. = Upcoming Deprecation * Starting in Sequel 4.45.0, Sequel will be adding deprecation warnings for features that will be removed or where behavior will change in Sequel 5. sequel-5.63.0/doc/release_notes/4.45.0.txt000066400000000000000000000317401434214120600177730ustar00rootroot00000000000000= Deprecated Features * Dataset mutation is now deprecated. Users should switch to using the non-mutating methods. # Instead of: dataset.where!(:foo) # Switch to: dataset = dataset.where(:foo) * Support for the Cubrid, Firebird, Informix, and Progress databases has been deprecated. Any users of this support should consider creating an external adapter with the current code and maintaining such support themselves. * The do (DataObjects), swift, and jdbc/as400 adapters have been deprecated. Any users of these adapters should consider creating an external adapter with the current code and maintaining the adapter themselves. * Model transaction hooks (after_commit, after_rollback, after_destroy_commit, after_destroy_rollback) are now deprecated. Users should switch to calling the after_commit and after_rollback database transaction hooks directly. # Instead of: def after_commit super do_something end # Switch to: def after_save super db.after_commit{do_something} end * Passing a block to Database#from is now deprecated. For backwards compatibility, this block affected the WHERE clause instead of the FROM clause. In Sequel 5, Database#from blocks will be treated like Dataset#from blocks, and will affect the FROM clause. This behavior has been available for years by using the from_block extension. # Instead of: DB.from(:foo){a > b} # Switch to: DB.from(:foo).where{a > b} * Passing non-hash arguments and multiple arguments to the model association methods is now deprecated. Switch to using a hash as an argument. # Instead of: model.association(true) model.association(proc{|ds| ds.where(:foo)}) # Switch to: model.association(:reload=>true) model.association(:callback=>proc{|ds| ds.where(:foo)}) model.association{|ds| ds.where(:foo)} * Passing procs as filter arguments is now deprecated. These should now be passed as blocks instead of arguments. # Instead of: dataset.where(proc{foo > bar}) # Switch to: dataset.where{foo > bar} * Passing multiple arguments or an array as filter arguments when the array/arguments does not represent a conditions specifier (array of two element arrays, treated like a hash) is now deprecated. Switch to calling the filter method separately with each argument or using Sequel.& to combine the arguments: # Instead of: dataset.where(:foo, :bar) dataset.where([:foo, :bar]) # Switch to: dataset.where(:foo).where(:bar) dataset.where(Sequel.&(:foo, :bar)) * Returning false from model before hooks to cancel an action is now deprecated. Switch to calling cancel_action instead. # Instead of: def before_save return false if something super end # Switch to: def before_save cancel_action('something bad') if something super end * Database#each_server has been deprecated. Switch to using Database#servers and Database#with_server from server_block extension: # Instead of: DB.each_server{|db| db.run("foo")} # Switch to: DB.extension :server_block DB.servers.each{|s| DB.with_server(s){DB.run("foo")}} * Calling Database#add_servers and Database#remove_servers on a database that does not use the :servers option is now deprecated. Currently, the calls to add_servers and remove_servers are ignored for such databases, which can hide errors. * Sequel::Postgres::PG_NAMED_TYPES is now deprecated. Switch to calling Database#add_named_conversion_proc instead. # Instead of: require 'sequel/adapters/utils/pg_types' Sequel::Postgres::PG_NAMED_TYPES[:foo] = lambda{|v| v} DB = Sequel.connect('postgres://...') # Switch to: DB = Sequel.connect('postgres://...') DB.add_named_conversion_proc(:foo){|v| v} * Modifying the identifier mangling settings for a Database or Dataset is now deprecated unless the identifier_mangling extension is explicitly loaded into the Database instance. * The Sequel::Database.single_threaded accessor is now deprecated. Switch to using Sequel.single_threaded= and Sequel.single_threaded?. * Sequel::Database.identifier_input_method, Sequel::Database.identifier_output_method, and Sequel::Database.quote_identifier accessors are now deprecated. Switch to modifying the setting for each Database instance. * Sequel.identifier_input_method=, Sequel.identifier_output_method=, and Sequel.quote_identifer= setter methods are now deprecated. Switch to modifying the setting for each Database instance. * Calling Dataset#delete/update/truncate on datasets with limits or offsets is now deprecated, unless the database will respect the limit or offset. Currently, only MySQL and Microsoft SQL Server have limited support for such deletes and updates. You should either call unlimited or skip_limit_check before calling delete/update/truncate. * Deprecate having duplicate column names in subclass tables when using the class_table_inheritance plugin. The documentation has warned against this for a long time, but the code did not enforce it. * When using the association_pks plugin setter methods without the :delay_pks association option set, a warning is now issued. In Sequel 5, the default will be to assume that the :delay_pks option is :always, and not to make modifications until the object is saved. If you would like to keep the current behavior, set the :delay_pks=>false association option. The current :delay_pks=>true behavior will be removed in Sequel 5, with it being treated like :delay_pks=>:always. If you are relying on the current behavior of :delay_pks=>true (delay for new objects, immediate for existing objects), you will need to update your code. * Database#dup/clone are now deprecated. They have never been handled correctly, since the default implementation from Kernel has been used. * Model.dup/clone are now deprecated. They have never been handled correctly, as the default implemenation from Kernel/Module has been used. * Database#use on MySQL is now deprecated. Switch to creating a new Database instance instead of modifying the database for an existing instance. * Database#database_name on MySQL is now deprecated. Switch to asking the database server which database you are connected to: # Instead of: DB.database_name # Switch to: DB.get{DATABASE{}} * In the lazy_attributes, nested_attributes, composition, and serialization plugins, the *_module accessors are now deprecated. These were implementation details that should not have been exposed. * The schema plugin is now deprecated. Switch to defining the schema before creating the model class using the Database schema methods. * The scissors plugin is deprecated. It existed for compatibility with Sequel 3, but it is dangerous as it makes it easier to modify all rows when the intent was to modify a single row. * The prepared_statements_associations and prepared_statements_with_pk plugins are now deprecated. These plugins generally make things slower. * Dataset#unbind, Sequel::Unbinder, and Sequel::UnbindDuplicate are now deprecated. This mostly existed to support the prepared_statements_associations and prepared_statements_with_pk plugins. * Sequel::Error::* exception class aliases are now deprecated. Switch to using the exception classes in the Sequel namespace. * Sequel::BeforeHookFailed is now deprecated. Switch to using Sequel::HookFailed. * Calling Sequel::Qualifier.new with 2 arguments is now deprecated. Users should switch to calling it with a single argument (the table used for qualifying unqualified identifiers). * Treating unrecognized prepared statement types as :select is now deprecated. Switch to using :select as the prepared statement type. * The @was_new instance variable available in model after_save hooks is now deprecated. There is no deprecation warning associated with this change. # Instead of: def after_save super if @was_new do_something else do_something_else end end # Switch to: def after_create super do_something end def after_update super do_something_else end * The @columns_updated instance variable available in model after_save and after_update hooks is deprecated. Switch to using the new columns_updated plugin and calling the columns_updated method. * The Sequel.cache_anonymous_models accessor has been deprecated. Switch to using Sequel::Model.cache_anonymous_models. * Sequel::Model::ANONYMOUS_MODEL_CLASSES and Sequel::Model::ANONYMOUS_MODEL_CLASSES_MUTEX have been deprecated. * Sequel::Database::ResetIdentifierMangling has been deprecated. = New Features * A validation_contexts plugin has been added, which adds support for a :validation_context option to Model#save and Model#valid?. The value for this option will be available via the validation_context method inside the validation hooks and validate method. class Album < Sequel::Model plugin :validation_contexts def validate super if validation_context == :approve errors.add(:status_id, 'not 42') unless status_id == 42 end end end album = Album.first album.status_id = 41 album.valid?(:validation_context=>:approve) # => false album.status_id = 42 album.valid?(:validation_context=>:approve) # => true * A columns_updated plugin has been added, allowing you to get access to the hash used for updating a model instance via the columns_updated method: class Album < Sequel::Model plugin :columns_updated def after_update super if columns_updated.has_key?(:foo) do_something(columns_updated[:foo]) end end end * Dataset#delete on Microsoft SQL Server now respects limits. Note that Microsoft SQL Server does not respect orders for deletes, only limits, which makes this support not very useful. Currently a deprecation warning will be issued when using a delete with an order and a limit, and in Sequel 5 an exception will be raised. * An odbc/oracle subadapter has been added. * A Model.dataset_module_class accessor has been added, allowing plugins to add support for custom behavior in dataset_module blocks. * Support for deprecating constants on Ruby 2.3+ has been added. Note that you will only get warnings for deprecated constant use if you are running on Ruby 2.3+. If you are running on a previous version of Ruby, you should scan your code manually for deprecated constant use. = Other Improvements * Using Model#cancel_action inside validation hooks now works correctly when Model#valid? is called. * Model#[] now handles columns with false values correctly when using the split_values plugin. * When calling Dataset#union/intersect/except on a dataset with an offset but no limit, the dataset is wrapped in a subquery, just like a dataset with a limit. * The dumping of 64-bit autoincrementing primary key columns by the schema_dumper extension is now handled correctly when using the :same_db option. * The schema_dumper extension now supports the :schema option when dumping schema. * On Microsoft SQL Server and SQLAnywhere, ORDER BY clauses now come after UNION/INTERSECT/EXCEPT instead of before, fixing issues when the :from_self=>false option is used with union/intersect/except and an order is applied afterward. * On Microsoft SQL Server, if calling Dataset#union/intersect/except on a dataset with an order and without a limit or offset, the order is removed. When using UNION/INTERSECT/EXCEPT, Microsoft SQL Server does not guarantee any ordering unless you specify an order for the compound dataset. As a general rule, you should always apply orders after compounds instead of before. * On Microsoft SQL Server <2012, when using a dataset with an offset without a limit in a UNION/INTERSECT/EXCEPT query, Sequel now uses TOP (100) PERCENT to work around the limitation that using orders in subqueries is not supported unless there is a limit (offsets are emulated by a ROW_NUMBER window function with an order in this case). * Database#indexes on MySQL now handles qualified identifiers. * Sequel now literalizes Sequel::SQLTime instances with 3 fractional digits in the jdbc/postgresql adapter, fixing issues on JRuby 9.1.8.0+ (the first JRuby version to support greater than millisecond precision). = Backwards Compatibility * When using the association_proxies plugin and passing a block when loading the plugin, the :proxy_argument option in hash passed to the block is now an empty hash instead of nil if no argument was given to the association method. * The private Model#_valid? method now takes a single options hash argument, instead of 2 arguments. * The pg_hstore extension no longer modifies PG_NAMED_TYPES. This should not affect behavior if the pg_hstore extension is loaded into the Database instance. * Support for pg <0.8.0 has been dropped. pg 0.8.0 was released in January 2008. sequel-5.63.0/doc/release_notes/4.46.0.txt000066400000000000000000000357261434214120600200040ustar00rootroot00000000000000= Deprecated Features * Symbol splitting is now deprecated by default. Sequel has split symbols since the very first version, but it has caused many problems over the years and while terse, it isn't intuitive to new Sequel users and causes significant problems when using databases that use double/triple underscores in identifiers. If you are using symbols with embedded double/triple underscores, such as: :table__column :column___alias :table__column___alias you either need to turn symbol splitting on by doing: Sequel.split_symbols = true or you need to convert the symbols to Sequel objects: Sequel[:table][:column] Sequel[:column].as(:alias) Sequel[:table][:column].as(:alias) Sequel ships with multiple extensions that make creation of those Sequel objects less verbose, so consider using the symbol_aref, symbol_aref_refinement, symbol_as, symbol_as_refinement, and/or s extensions. To automatically convert symbols with double/triple underscores to their Sequel object equivalents, you can use the sequel-unsplit tool available at https://github.com/jeremyevans/sequel-unsplit. This deprecation also affects virtual row block methods that use double underscores. For example: DB[:table].where{table__column > 3} should be changed to: DB[:table].where{table[:column] > 3} * Automatically treating plain strings passed to filtering/update methods as literal strings has been deprecated, with support moved to the auto_literal_strings extension. The automatic conversion of plain strings to literal SQL is the most common cause of SQL injections in applications using Sequel, since many methods pass their arguments down to the filtering methods, without considering whether the argument might be a string derived from user input. By requiring explicit marking of literal SQL strings, SQL injections are less likely and easier to audit for. This change means that unless you want to use the auto_literal_strings extension, code such as: DB[:table].where("a = 1") DB[:table].where("a > ?", 1) should to be converted to: DB[:table].where(Sequel.lit("a = 1")) DB[:table].where(Sequel.lit("a > ?", 1)) or even better, avoid literal SQL completely by converting it to use equivalent Sequel expressions: DB[:table].where(:a => 1) DB[:table].where{a > 1} This change also affects passing Dataset#update a string: # Before DB[:table].update("a = a + 1") # Change to DB[:table].update(Sequel.lit("a = a + 1")) DB[:table].update(:a => Sequel[:a] + 1) Note that this deprecation does not affect cases where literal SQL is used for the entire query, such as when using any of the following: DB["SELECT * FROM foo"] DB.fetch("SELECT * FROM foo WHERE a = ?", 1) DB.dataset.with_sql("SELECT * FROM foo WHERE a = ?", 1) * Passing blocks to virtual row methods has been deprecated, with support moved to the virtual_row_method_block extension. Historically, passing blocks to virtual row methods changed how the methods were handled, but in recent years alternative methods have been added to get the same results. If you don't want to use the virtual_row_method_block extension, conversion is fairly simple: # WHERE a() # Before where{a{}} # Change to where{a.function} # SELECT count(*) # Before select{count(:*){}} # Change to select{count.function.*} # SELECT count(DISTINCT c) # Before select{count(:distinct, :c){}} # Change to select{count(:c).distinct} # SELECT sum(c) OVER (PARTITION BY a) # Before select{count(:over, :args=>c, :partition=>:a){}} # Change to select{count(:c).over(:partition=>:a)} * Model.set_allowed_columns and Model#{set,update}_{all,only} have been deprecated, with support moved to the whitelist_security plugin. These were the historical mass assignment methods supported by Sequel, but set_fields and update_fields have been recommended instead for many years. * Model.finder and .prepared_finder have been deprecated by default, with support moved to the finder plugin. Model.finder was originally added to make it easy to create optimized finder methods, but few Sequel users actually use it, so it makes more sense to move it to a plugin. * Model.def_dataset_method and Model.subset have been deprecated by default, with support moved to the def_dataset_method plugin. It's been recommended for many years to use Model.dataset_module to define dataset methods, instead of calling def_dataset_method and subset on the model class. * Using ` in virtual rows to create literal SQL is now deprecated, switch to using Sequel.lit instead: # Before DB[:table].where{`a = 1`} # Change to DB[:table].where(Sequel.lit('a = 1')) * Corner cases in argument handling in the filtering methods are now deprecated, including: * Ignoring a filtering method called without an argument or block. In Sequel 5, this will raise an exception. * Ignoring empty string arguments or other objects that respond to empty? and return true. In Sequel 5, only an empty array or hash will be ignored. * Ignoring an explicit nil argument when a block is passed. In Sequel 5, this will use a NULL filter. * Ignoring an explicit nil argument when there is no existing filter on the dataset. In Sequel 5, this will use a NULL filter. * Using a joined dataset as a Sequel::Model dataset is now deprecated. Such datasets should now be wrapped in a subquery. In Sequel 5, such datasets will automatically be wrapped in a subquery aliased to the first table. # Before Model.dataset = DB[:a].join(:b, :id=>:b_id) # Change to Model.dataset = DB[:a].join(:b, :id=>:b_id).from_self(:alias=>:a) * Model.first_where has been deprecated, Model.first should be used instead. * Database#log_yield is now deprecated. This does not affect any of the adapters that ship with Sequel, but external adapters that have not yet been updated to support #log_connection_yield will need to be updated. * The set_overrides extension is now deprecated. Anyone using it should consider supporting it as an external extension. * Many internal Database and Dataset regexp and string constants that were previously used internally have been deprecated. Additionally, some historical aliases for existing constants have also been deprecated, such as Sequel::Schema::Generator. Ruby 2.3+ is required to receive deprecation warnings related to these constants. * Passing model classes as the first argument to Dataset#join_table and Dataset#graph is now deprecated. Pass the model's table name or the model's datasets instead. * Passing model instances to Dataset#insert and #insert_sql is now deprecated. Call values on the model instance to get the values hash, and pass that as the argument instead. * Calling Dataset#set_graph_aliases before Dataset#graph is now deprecated. Dataset#set_graph_aliases should now be called after Dataset#graph, not before. * The sequel/no_core_ext file is deprecated. Sequel hasn't loaded the core extensions by default since Sequel 3. You can use the following if you want to support both Sequel 3 and Sequel 5: begin require 'sequel/no_core_ext' rescue LoadError require 'sequel' end * Database#pragma_get and #pragma_set on SQLite are now deprecated, along with any method that calls them, such as auto_vacuum, temp_store, foreign_keys, case_sensitive_like, synchronous, and their setter methods. To set these pragrams for all SQLite database connections, the appropriate options should be passed when creating the Database instance. * Automatically looking up the dataset class for a Database instance by looking for a DatasetClass constant in the Database's class is now deprecated. All adapters that ship with Sequel have been converted, but external adapters should now define the Database#dataset_class_default private method appropriately to return the correct dataset class. * Calling Model.db= on a model with a dataset is now deprecated. If a model already has a dataset, you must now use set_dataset or dataset= to change the dataset, not db=. * Sequel::SQL::Expression#sql_literal and #lit are now deprecated. These aren't used internally and aren't expected to be used externally. * {Integer,Timestamp}Migrator::DEFAULT_SCHEMA_{COLUMN,TABLE} are now deprecated. They have been replaced by default_schema_column and default_schema_table instance methods. * Passing a Schema::CreateTableGenerator instance as the second argument to Database#create_table is now deprecated. Database#create_table still supports passing the generator via the :generator option. * Passing a second argument to Database#alter_table is now deprecated. * Sequel::BasicObject.remove_methods! is now deprecated. It has always been a no-op on ruby 1.9+. * Referencing the PG_NAMED_TYPES constant in your code is now deprecated. Previously, adding entries to the PG_NAMED_TYPES was deprecated, but no deprecation message would be issued by referencing the constant. * The conversion of - to _ in adapter schemes is now deprecated. This does not affect any internal adapters, but it may affect external ones. * The Database#jdbc_* methods in the jdbc/db2 adapter (e.g. jdbc_tables) are now deprecated. Call the regular versions instead (e.g. tables). * Dataset#_filter and #_filter_or_exclude private methods have been deprecated. If you have an extension that was calling these methods, switch to the new #add_filter private method. = New Features * The class_table_inheritance plugin now supports an :alias option. If provided, this wraps subclass datasets in subqueries, avoiding problems with ambiguous columns and cases where the wrong table name is used. Due to the deprecation of joined datasets for models, use of the class_table_inheritance plugin without this :alias option will result in deprecation warnings. In Sequel 5, class_table_inheritance will default to using an :alias option with the same as the name of the parent table. * The Dataset#sqltime_precision private method has been added. Adapters can use override this if the precision for time values is different from the precision for timestamp values. Sequel uses this support on Microsoft SQL Server, so that time values now support microsecond precision, instead of millisecond precision. = Other Improvements * Sequel::Model classes that use a SQL::Identifier or SQL::QualifiedIdentifier FROM table value will now use optimized lookups and deletes, just as is done for those that use a Symbol or String. * Dataset#simple_select_all? now handles aliased subqueries correctly, returning false instead of true. * If Sequel.application_timezone is set to :utc, Sequel::SQLTime.create will create instances using utc time instead of local time. * If there is an exception while rolling back a transaction when using the :rollback=>:always option, the exception is now raised instead of being ignored. * If a migration file does not contain a migration or contains multiple migrations, the exception raised will now include the file name in the exception message. * In the jdbc/sqlserver adapter, time values with fractional seconds and datetimeoffset values are now handled better when using some versions of the underlying JDBC driver. * An additional disconnect error is recognized when using the mysql and mysql2 adapters. * Dataset#full_text_search on Microsoft SQL Server now works correctly if the no_auto_literal_strings extension is used. * Calling Database#disconnect when using the single connection pool without an active connection works correctly again. It was broken starting in 4.43.0 during changes to implement Database#freeze. * Model class methods are no longer added for private methods defined in a dataset_module block. Previously, a public model class method was defined, but it would raise an error when called. * Fixnum is no longer referenced in the sqlanywhere shared adapter, fixing deprecation warnings on ruby 2.4. * Sequel no longer uses constants for building SQL queries, relying on frozen string literal support for better performance on ruby 2.3+. However, this decreases SQL query building performance on ruby <2.3. For the fastest SQL query building, update to a recent version of ruby. * Sequel no longer ignores an empty object argument to a filtering method if a block is provided. Previously, this could raise an exception or produce invalid SQL. * Many small modifications were made to reduce array allocations, providing minor speedups. * Internal use of Array#at has been replaced with Array#[], providing minor speedups on recent ruby versions. * The jdbc/db2 adapter no longer adds jdbc_* methods to JDBC::Database. * Sequel no longer issues deprecation warnings on ruby 1.8.7. Sequel 5 will drop support for ruby 1.8.7, and it doesn't make sense to issue a deprecation warning if you couldn't upgrade anyway. = Backwards Compatibility * When specifying the :fields option to a nested_attributes setter, set_fields is now used internally instead of set_only. set_fields has been recommended over set_fields since it's introduction in Sequel 3.12, but nested_attributes was added in Sequel 3.4, before set_fields was available. The result of this change is that if additional fields are provided that do not match the fields in the :fields option, they will be ignored instead of an exception being raised. * When specifying a function name using a Sequel::SQL::Identifier instance, the function name is no longer quoted unless Sequel::SQL::Function#quoted is used to create a quoted function. The reason for this is to make converting virtual row method block code easier. # Before Sequel.function(Sequel[:a]) # "a"() # Now Sequel.function(Sequel[:a]) # a() Sequel.function(Sequel[:a]).quoted # "a"() * When passing an SQL::PlaceholderLiteralString instance to a dataset filtering method, the placeholder string is now always wrapped in parentheses: ds.where(Sequel.lit('? OR ?', :a, :b)).where(:c) # Before: WHERE a OR b AND c # Now: WHERE (a OR b) AND c This is more of a bugfix than a backwards compatibility issue, but is listed in the backwards compatibility section as there may be applications that could break due to this change. * Model.subset now calls Model.dataset_module.subset, instead of the other way around. If your code depends on this, you will need to make modifications. * The private Database#column_definition_order method no longer uses const_get(:COLUMN_DEFINITION_ORDER). External adapters that defined COLUMN_DEFINITION_ORDER but did not override this method must now override this method. * The private Database#native_function_name method no longer uses const_get(:EMULATED_FUNCTION_MAP). External adapters that defined EMULATED_FUNCTION_MAP but did not override this method must now override this method. sequel-5.63.0/doc/release_notes/4.47.0.txt000066400000000000000000000041261434214120600177730ustar00rootroot00000000000000= Deprecated Features * Setting an invalid dataset for a model is now deprecated. Historically, Sequel has swallowed exceptions for this to keep backwards compatibility, but it generally just results in code breaking later. To allow invalid datasets to be used: Sequel::Model.require_valid_table = false * The association_autoreloading and many_to_one_pk_lookup plugins are now deprecated. They were moved from plugins to standard model behavior in Sequel 4.0, and have been no-ops since. * The pg_typecast_on_load plugin is now deprecated. It is only useful on the already deprecated do and swift adapters. = New Features * Database#with_server in the server_block extension now accepts an optional second argument for the read only server to use. This allows for overriding the default server while providing a separate default for read only queries: DB.with_server(:server1, :server1ro) do DB[:a].all # Uses server1ro DB[:b].insert(1) # Uses server1 end * Model.default_association_type_options has been added, allowing the ability to set default options per association type. This can be used to make some association types read_only by default: opts = Sequel::Model.default_association_type_options opts[:one_to_many] = opts[:many_to_many] = {:read_only=>true} * Database#views on PostgreSQL now accepts a :materialized option to return materialized views instead of regular views. = Other Improvements * Setting Sequel::Model.require_valid_table = true no longer raises an exception when using a valid dataset that selects from a subquery or table returning function or uses a join. * The defaults_setter plugin now inherits any custom default values when subclassing. * The schema_dumper extension now handles Oracle 11g XE behavior of appending not null to the database type. = Backwards Compatibility * External callers of Database#check_non_connection_error (private method) should update their code to call it with a true or false argument specifying whether to raise an error for exceptions that are not connection errors. sequel-5.63.0/doc/release_notes/4.48.0.txt000066400000000000000000000267301434214120600200010ustar00rootroot00000000000000= Deprecated Features * The identifier_columns plugin is now deprecated. There is no reason to use it when Sequel.split_symbols = false, which will be the default in Sequel 5. * The filter_having, hash_aliases, and sequel_3_dataset_methods extensions are now deprecated. They only existed for backwards compatibility with Sequel 3. * The query_literals extension is now deprecated. It changes behavior in a way that makes SQL injections more likely. * The meta_def extension is now deprecated. It is no longer necessary, since on ruby 1.9+ you can use define_singleton_method. * The empty_array_ignore_nulls extension has been deprecated. It has been a no-op since Sequel 4.25.0. * The cti_base_model, cti_key, and cti_model_map class methods in the class_table_inheritance plugin are now deprecated. Use cti_models.first instead of cti_base_model, sti_key instead of cti_key, and sti_model_map instead of cti_model_map. * The :strict option in the nested_attributes plugin is now deprecated. Switch to using the :unmatched_pk option. * Database#reset_conversion_procs on PostgreSQL is now deprecated. There will be no need to call it in Sequel 5. * Using global conversion procs added by the pg_* extensions, without loading the pg_* extension into the Database instance, are now deprecated. Additionally, using PGArray.register or PGRange.register to register global types is now also deprecated. Use Database#register_array_type or Database#register_range_type instead to register the types on a per-Database basis. * Treating :natural_inner join type as NATURAL LEFT JOIN on MySQL is now deprecated. MySQL doesn't support NATURAL INNER JOIN, but if you were going to convert it, NATURAL JOIN would make more sense. * Unexpected values passed to Dataset#insert_conflict on SQLite are now deprecated. Only values that result in one of the following strings will be allowed in Sequel 5: ROLLBACK, ABORT, FAIL, IGNORE, and REPLACE. * The Dataset#and, #exclude_where, #interval, and #range methods are now deprecated. Undeprecated copies are now available in the new sequel_4_dataset_methods extension. * Model.<< is now deprecated. Intuitivately, you would except this to call Model.create and return the Model class, but it calls << on the model's dataset and returns the dataset. * The Sequel::Postgres::PG_TYPES constant is now deprecated. All conversion procs should now be added on a per-Database basis using add_conversion_proc or add_named_conversion_proc. The following private Database methods related to conversion procs are now deprecated, though some are still called internally and therefore do not have deprecation warnings: * add_named_conversion_procs * conversion_procs_updated * convert_named_procs_to_procs * copy_conversion_procs * get_conversion_procs Related to this, loading the sequel/adapters/utils/pg_types.rb file is now deprecated. * The following adapter or database specific global accessors for setting defaults are now deprecated: * Sequel::DB2.use_clob_as_blob * Sequel::IBMDB.convert_smallint_to_bool * Sequel::MySQL.convert_invalid_date_time * Sequel::MySQL.convert_tinyint_to_bool * Sequel::MySQL.default_charset * Sequel::MySQL.default_collate * Sequel::MySQL.default_engine * Sequel::Postgres.use_iso_date_format * Sequel::Postgres.client_min_messages * Sequel::Postgres.force_standard_strings * Sequel::SqlAnywhere.convert_smallint_to_bool Use the Database instance accessors or Database options instead to change behavior. * The following adapter or database specific dataset mutation methods are now deprecated: * convert_smallint_to_bool= (ibmdb adapter, SQLAnywhere) * convert_types= (jdbc adapter) * mssql_unicode_strings= (Microsoft SQL Server) Use the with_* methods which return a modified copy of the dataset instead of these mutation methods. * The Dataset#non_sql_options private method is now deprecated. External adapters that overrode this method should switch to overriding Dataset#non_sql_option?. * The Database#timestamp_convertor private method in the jdbc adapter is now deprecated. Users should switch to method(:timestamp_convert). * Modification of the Sequel::JDBC::TypeConvertor class is now deprecated. External jdbc subadapters that were using this to add custom conversion procs should be modified. * Having the pg_row extension respect conversion procs for subtypes added after the registeration of the composite type is now deprecated. Now, all subtypes should have the appropriate conversion proc added before the composite type is registered. * Array#sql_array in the core_extensions extension is now deprecated. Switch to using Array#sql_value_list. * The SEQUEL_POSTGRES_USES_PG constant added by the postgres adapter is now deprecated. Sequel::Postgres::USES_PG should be used instead. * Many more internal Sequel constants have been deprecated. = New Features * The Model#to_json and Dataset#to_json methods in the json_serializer plugin now support a block. This block is called with the hash/array that would have be serialized to JSON, and the block should return the object to serialize. This makes it easy to customize the JSON output by adding new entries, or wrapping the object in other object. The Dataset#to_json method supports an :instance_block option, which should be a proc that will be passed to Model#to_json. In order to implement this, Sequel.object_to_json now passes any block given to the to_json call on the object. If you are overriding Sequel.object_to_json, you are responsible for making sure the block is passed appropriately. * The association_pks plugin now supports a :association_pks_use_associated_table association option for many_to_many associations. If this option is used, instead of just looking at the join table, the association_pks getter will get the primary keys from the associated table. This can be useful if the association's right_primary_key does not match the associated model's primary key, and you are interested in the primary keys of the associated objects. If this option is used, no association_pks setter method is created. * Dataset#as_hash has been added as a replacement to #to_hash. If you want, you can now undef_method :to_hash and use as_hash and things will work. Doing so can work around problems when using keyword argument splats in ruby 2.0+. For example: def foo(*a, **b) end foo(City.order(:id)) results in foo being called with a being [] and b being City.order(:id).to_hash, which is unexpected and undesired behavior. If you want to use keyword argument splats or other places where ruby will call to_hash implicitly if it is defined, using undef_method :to_hash is recommended. * A Database#add_conversion_proc method has been added on PostgreSQL. This method takes a type OID and either a block or a callable argument to use as the conversion proc for the type OID. * The following adapter or database specific Database accessors have been added for changing settings on a per-Database basis: * convert_smallint_to_bool (ibmdb adapter) * default_charset (MySQL) * default_collate (MySQL) * default_engine (MySQL) * use_clob_as_blob (DB2) * A Dataset#with_convert_types method has been added to the jdbc adapter, for returning a modified dataset with the convert_types setting changed. = Other Improvements * Using the postgres adapter with pg 0.21.0 no longer results in deprecation warnings. * When using the class_table_inheritance plugin and using a direct subclass of the parent class that does not use a separate table, as well as using the :alias option, the insert SQL used is now correct. Previously, it attempted to insert into a subquery, which is not valid SQL. Additionally, the dataset for such a model no longer uses a subquery, since there is no reason to do so as there is no join. * Model.skip_auto_validations(:not_null) in the auto_validations plugin now skips not null checks for columns with default values, in addition to skipping not null checks for columns without default values. * The static_cache plugin now supports the options hash argument to to_hash and to_hash_groups. Currently, it only supports the :hash option, since the :all option doesn't make sense. * When touching associations in the touch plugin, clear the cached association, because otherwise the cached values will be stale. * The validation_class_methods plugin no longer requires the blank extension. * The validation_helpers plugin methods that support the :allow_blank option now work correctly if the blank extension is not loaded. * Loading the column_conflicts plugin into a model a second time no longer removes existing column conflict settings. * On SQLite 3.8.8+, indexes automatically created from unique constraints are now included in Database#indexes output. * On SQLite 3.8.8+, partial indexes are now excluded from Database#indexes output. * Database#indexes on PostgreSQL 9.5+ now uses a simpler query with the array_position function. * Database#foreign_key_list on PostgreSQL now uses a single query instead of two queries, and also uses the array_position function on PostgreSQL 9.5+ to simplify the queries. * On PostgreSQL and Derby, when calling Database#create_table with the :ignore_index_errors option inside a transaction, a savepoint is used around each index creation so that an index error does not cause the entire transaction to fail. A savepoint is also used on Microsoft SQL Server, but it appears that Microsoft SQL Server rolls back the entire transaction if CREATE INDEX returns an error, instead of just rolling back to the savepoint. * Encoding is now preserved when parsing PostgreSQL arrays in the pg_array extension. * Database#copy_table in the postgres adapter now does not hide the underlying exception if an exception is raised during processing. * Database#copy_into in the jdbc/postgresql adapter now does not hide the underlying exception if an exception is raised during processing. * Database#copy_into in the jdbc/postgresql adapter now respects the :server option for using a specific shard. * Calling #reset_conversion_procs on Database instance that uses the pg_hstore extension now results in the hstore type still being parsed. Previously, the hstore conversion proc would be dropped. * The postgres adapter no longer monkey-patches postgres-pr if it uses that as the driver. * Multiple thread-safety issues in the mock adapter have been fixed. * Thread safety issues when simultaneously loading multiple adapters that access PostgreSQL have been fixed. * Hash allocations have been reduced in the csv_serializer, json_serializer, and xml_serializer plugins. * The deprecated Sequel::Model::ANONYMOUS_MODEL_CLASSES constant is now correctly populated with classes created by Sequel::Model(). This was broken in starting in Sequel 4.45.0. = Backwards Compatibility * The pg_array_associations plugin now loads the pg_array extension into the Database instance if it is not already loaded. This can break cases where the pg_array_associations plugin is used on a non-PostgreSQL database. * Support for using the old postgres driver has been removed from the postgres adapter. The postgres adapter now only supports pg and postgres-pr. * When the postgres-pr driver is being used by the postgres adapter, connecting to a database is only allowed if standard strings are being forced (the default). sequel-5.63.0/doc/release_notes/4.49.0.txt000066400000000000000000000201101434214120600177640ustar00rootroot00000000000000= Forward Compatibility Sequel 4.49.0 will be the last minor release of Sequel 4. While the vast majority of backwards incompatible changes in Sequel 5 have deprecation warnings in 4.49.0, there are a few changes that do not. Here is a brief list of changes coming in Sequel 5 that do not have deprecation warnings (note that this list may not be exhaustive): * The {before,after,around}_validation hooks will always be called when saving, even if the validate: false option is used. This will allow you to use the before_validation hook to make changes to the model instance that are required before validation and before saving even if not validating. Currently, you would have to use both a before_save and before_validation hook, which would both be run on normal instance saving. * Getting values for newly created model instances after insertion now happens before after_create is called, instead of after. This behavior is currently available via the before_after_save plugin, and and will become the default behavior. * Sequel will now immediately attempt to the connect to the database when a Database instance is created, in order to fail fast. This behavior is currently available via the test: true option, and will become the default behavior. You can force not testing the connection by using the test: false option. * The validates_unique method in the validation_helpers plugin will now only check for uniqueness by default if the record is new or one of the related columns has been modified by default. You can use only_if_modified: false to force the uniqueness check. * Database schema methods and schema generator methods will return nil instead of some internal value. * Many cases where Sequel uses send internally will be switched to public_send so they only call public methods, unless it is specifically expected that they will call private methods. * Model association hooks will be nil instead of empty arrays by default. They will only be arrays if that hook has been set for the association. * Internal uses of instance_eval with a block will be changed to instance_exec. This will allow them to be used with lambdas that take no arguments. Unfortunately, it will break the case where a lambda is currently used that takes one argument. * Most internal constants will be frozen, unless there is a requirement that they be modified at runtime. * The @was_new instance variable set during model instance creation will be removed. = Deprecated Features * Model association before callbacks returning false canceling the action is now deprecated. The callbacks should now call Model#cancel_action to cancel the action. * Loading plugins by requiring them via sequel_#{plugin} is now deprecated. Affected plugins should move the plugin file so it can be required via sequel/plugins/#{plugin}. * In the mock adapter, Dataset#autoid=, #_fetch=, and #numrows= are now deprecated. They modified the dataset itself, which would not work for frozen datasets. Dataset#with_autoid, #with_fetch, and #with_numrows should be used instead, which return a modified copy. * In the null_dataset extension, Dataset#nullify! is now deprecated. It modified the dataset itself, which would not work for frozen datasets. Dataset#nullify should be used instead, which returns a modified copy. * Modifying the validation_helpers plugin DEFAULT_OPTIONS hash is now deprecated. Any change to the default options should be done by overriding the Model#default_validation_helpers_options private method. * Modifying ConnectionPool::CONNECTION_POOL_MAP to support an external connection pool is now deprecated. To use an external connection pool, pass the pool class via the :pool_class Database option. Additionally, using a :pool_class option that is not a class or a symbol for one of the default connection pools is also deprecated. * ConnectionPool#created_count is now deprecated. This method was misnamed, as it was in alias to size, but the name implies it returns how many connections have been created, as opposed to how many connections are still in the pool. * Sequel::SQL::Function#f is now deprecated, switch to using #name instead. * Sequel::SQL::AliasedExpression#aliaz is now deprecated, switch to using #alias instead. * The :eager_loading_predicate_key association option and eager_loading_predicate_key association method are now deprecated. The predicate_key option and method should be used instead. * The cti_columns class method in the class_table_inheritance plugin is now deprecated. * The serialized_columns class method in the serialization plugin is now deprecated. * Having ds.join_table(:table, :cross, :a=>:b) be treated as an inner join on MySQL is now deprecated. * Sequel::IBMDB::Connection#prepared_statements= in the ibmdb adapter is now deprecated. * Additional internal constants are now deprecated. = New Features * Database#extend_datasets and Database#with_extend if given a block now use a Dataset::DatasetModule instance instead of a plain Module instance. Dataset::DatasetModule is a subset of Model::DatasetModule, and allows for the easy creation of dataset methods that can perform caching for frozen datasets. Defining dataset methods is done by calling methods with the same name as dataset methods inside the extend_datasets or with_extend block: DB.extend_datasets do order :by_id, :id select :with_id_and_name, :id, :name where :active, :active end This is equivalent to: DB.extend_datasets do def by_id order(:id) end def with_id_and_name select(:id, :name) end def active where(:active) end end Except that for frozen datasets (the default in Sequel 5), code like: 100.times do DB[:table].active.with_id_and_name.by_id end will only allocate 4 datasets instead of 400, and can be 3-4 times faster. * Dataset#where_{all,each,single_value} are now core dataset methods instead of just model dataset methods. These methods allow you to replace: dataset.where(cond).all dataset.where(cond).each{} dataset.where(cond).single_value with: dataset.where_all(cond) dataset.where_each(cond){} dataset.where_single_value(cond) The advantage of #where_{all,each,single_value} is that frozen datasets can take potentially advantage of caching and perform 70%-300% faster. * Oracle 12 native limit/offset support is now supported, which in particular makes offset queries much faster as they don't have to be emulated using the row_number window function. * Dataset#paged_each in the mysql2 adapter now supports a :stream=>false option to disable streaming and fallback to the default implementation. * The postgres adapter now supports the :sslrootcert option directly, you no longer need to specify it using the :driver_options hash. * The single_table_inheritance plugin now supports an sti_class_from_sti_key method for getting the appropriate subclass for the given key. = Other Improvements * Using the dataset_associations plugin with a many_through_many association that joins to the same table multiple times is now handled correctly by automatically aliasing the table appropriately. * On Ruby 2.1+, Sequel::Error#cause will use wrapped_exception if one is set. This doesn't result in different behavior in most cases, but it can in cases where nested exception handling is done and Sequel tries to raise the most relevant exception. * Using the composition plugin with the :mapping option now works correctly when using the column_conflicts plugin. * The validation_helpers plugin's validates_max_length method now correctly gets the default :nil_message option from the default_validation_helpers_options method instead of looking at the plugin defaults. * The duplicate_columns_handler extension no longer makes the Dataset#columns= method public. * On H2 1.4+, alter_table add_primary_key now works correctly. * The jdbc/sqlserver adapter's datetimeoffset type handling now works with more JDBC driver versions. sequel-5.63.0/doc/release_notes/4.5.0.txt000066400000000000000000000022601434214120600177020ustar00rootroot00000000000000= New Features * An mssql_optimistic_locking plugin has been added. This is similar to the regular optimistic_locking plugin, but instead of using an integer lock column, it uses a timestamp/rowversion lock column. * Database#create_table with the :temp=>true option on PostgreSQL now supports an :on_commit option. This option can be set to :drop or :delete_rows to either drop or empty the temporary table on transaction commit. = Other Improvements * Dataset#insert no longer errors on PostgreSQL if the related table is a placeholder literal string. * Unique constraints are now copied when emulating alter_table operations on SQLite. * Clob column values are no longer returned as SQL::Blob instances by the db2 and ibmdb adapters unless use_clob_as_blob is true. * SQL::Blob objects now work correctly as prepared statement arguments in the jdbc/db2 adapter if use_clob_as_blob is false. = Backwards Compatibility * The Model.primary_key array for models with composite keys is now frozen. * On DB2, use_clob_as_blob now defaults to false instead of true. * Sequel no longer uses RubyForge. The Sequel website is now located at http://sequel.jeremyevans.net. sequel-5.63.0/doc/release_notes/4.6.0.txt000066400000000000000000000017671434214120600177160ustar00rootroot00000000000000= New Features * Database#call_mssql_sproc is now available for calling stored procedures on Microsoft SQL Server, including the use of output parameters. * The Database#{commit,rollback}_prepared_transaction methods now support a :server option for the server on which to operate. = Other Improvements * On Microsoft SQL Server 2012, the native OFFSET/FETCH support is now used for offsets, instead of emulating support via the ROW_NUMBER window function. * Eager loading is now skipped when doing eager(...).naked.all on a model dataset, instead of raising an error. This can fix issues when the eager_each plugin is used. * A couple additional disconnection errors are now detected in the jdbc/postgresql adapter. * The tinytds adapter now handles returning rows when the fields are not immediately available. * RuntimeErrors raised by oci8 are now handled correctly in the oracle adapter. * Sequel's specs now work with RSpec 3, while still running correctly on RSpec 1.3 and 2. sequel-5.63.0/doc/release_notes/4.7.0.txt000066400000000000000000000076221434214120600177130ustar00rootroot00000000000000= New Features * Alternatives for the more complex virtual row method calls have been added: # Window Functions using SQL::Function#over # before: select{sum(:over, :args=>:col1, :partition=>:col2){}} select{sum(:col1).over(:partition=>:col2)} # count(*) using SQL::Function#* # before: select{count(:*){}} select{count{}.*} # count(distinct col) using SQL::Function#distinct # before: select{count(:distinct, :col){}} select{count(:col).distinct} Additionally, schema qualified functions are now supported via SQL::QualifiedIdentifier#function, and quoted functions are now supported via SQL::Identifier#function on some databases: # "func"("col") select{func.function(:col)} # "schema"."func"("col1") select{schema__func.function(:col1)} If the database does not support quoting function names, then Sequel will not quote them. * An update_or_create plugin has been added, for updating a matching object if one exists, or creating an object if it does not. For example, the following code will update the number of copies sold for album with the name 'Hello', or it will create an album with the name 'Hello' and 1000 number of copies sold: Album.plugin :update_or_create Album.update_or_create(:name=>'Hello') do |album| album.num_copies_sold = 1000 end You can also use a shorter form of this, with two hashes: Album.update_or_create({:name=>'Hello'}, {:num_copies_sold=>1000}) This plugin also adds a method named find_or_new, which does the same thing as update_or_create, except it doesn't persist any changes. * A :raise_on_save_failure option has been added for one_to_many, pg_array_to_many, and many_to_pg_array associations. This mirrors the Model.raise_on_save_failure setting, and if set to false, it will make the add/remove methods return nil instead of raising an error if there is a validation/hook error when saving the associated record. * The validates_unique validation in validation_helpers now supports a :dataset option to provide the base dataset to use to check uniqueness. This is useful when the model itself uses a filtered dataset, but the unique index in the database is on an unfiltered dataset. The auto_validations plugin uses this option to ensure that unique validations are setup correctly in subclasses using single table inheritance. = Other Improvements * Sequel now automatically rolls back transactions in killed threads on ruby 2.0+. It is still impossible to do so on ruby 1.9. * In the instance_hooks plugin, validation instance hooks are now not cleared until after a successful save. * Composite unique key constraint violations are now recognized and raised as Sequel::UniqueConstraintViolation on SQLite. * Primary key unique constraint violations are now recognized and and raised as Sequel::UniqueConstraintViolation on Microsoft SQL Server and SQLAnywhere. * If an exception occurs when using a cursor in the postgres adapter, and an exception also occurs when closing the cursor when cleaning up, the initial exception is now raised. * You can now get tables in a specific schema in the jdbc adapter using the :schema option to Database#tables. This was already supported in most jdbc subadapters because they implement #tables using database specific code instead of looking at the JDBC metadata, but it should now work for all jdbc subadapters. * Sequel::SQLTime#to_s is now defined and returns a string in HH:MM:SS format (leaving off the date). = Backwards Compatibility * The odbc adapter's :driver option is no longer deprecated, as reports were received that it still works. * If you were re-adding instance validation hooks using instance_hooks after a save failure, and then retrying the save, you may now end up with duplicate validations. You no longer need to re-add validation hooks unless the object was saved successfully. sequel-5.63.0/doc/release_notes/4.8.0.txt000066400000000000000000000155111434214120600177100ustar00rootroot00000000000000= New Features * A one_through_one association type has been added. This is similar to the many_to_many association type in that it uses a join table, but it returns a single record instead of an array of records. This is designed for cases where the foreign key in the join table that references the current table has a unique constraint, or where you want to use an order to just pick the first matching record. Similarly, the many_through_many plugin now also offers a one_through_many association. * An association_join method has been added to model datasets, for setting up joins based on associations. This basically does the same join that eager_graph would do, but does not make the other changes that eager_graph makes. Unlike eager_graph (which uses LEFT OUTER JOINs by default), association_join uses INNER JOINs, but there are also association_*_join methods (e.g. association_left_join) for using different join types. Similar to eager_graph, you can use cascading of associations or multiple associations. Album.association_join(:artist, :tracks) Artist.association_left_join(:albums=>:tracks) * Dataset#eager_graph_with_options has been added for model datasets. It currently supports a :join_type option, for overriding the type of join to use on a per-call basis, as well as a :limit_strategy option. The API is similar to eager_graph, except that the associations to eagerly load are passed in as a single argument, and it takes an options hash. The :limit_strategy option works similarly to the :eager_limit_strategy option when eagerly loading. If set to true and the database supports window functions, it will join the current dataset to a subquery that uses a window function to correctly restrict the join to only those objects that fall within the association's limit/offset. The :limit_strategy option is not on by default. It is possible for it to perform significantly worse than the default strategy (which uses array slicing in ruby). The :limit_strategy significantly changes the SQL used, and can change the results of the query if any filters/orders related to the association are used. It's recommended you only use the :limit_strategy option if you are experiencing a bottleneck and you have benchmarked that it is faster and still produces the desired results. Artist.eager_graph_with_options(:first_10_albums, :limit_strategy=>true) # SELECT artists.id, artists.name, # first_10_albums.id AS first_10_albums_id, # first_10_albums.name AS first_10_albums_name, # first_10_albums.artist_id, # first_10_albums.release_date # FROM artists # LEFT OUTER JOIN ( # SELECT id, name, artist_id, release_date # FROM ( # SELECT *, row_number() OVER (PARTITION BY tracks.album_id) # AS x_sequel_row_number_x # FROM albums # ) AS t1 WHERE (x_sequel_row_number_x <= 10) # ) AS first_10_albums ON (first_10_albums.artist_id = artists.id) * Dataset#full_text_search on PostgreSQL now supports :plain and :phrase options. :plain takes the search terms as a single string, and searches for rows where all terms are used. :phrase is similar to :plain, but also adds a substring search to ensure that the string given appears verbatim in the text. * A :graph_order association option has been added, for using a different order when using eager_graph. This is mostly designed for cases where :order should be qualified in other cases, but using a qualification breaks eager_graph because the correct qualifier is not known until runtime. * SQL::AliasedExpression#alias has been added as an alias for #aliaz. = Other Improvements * Sequel will now automatically use an eager limit strategy for *_one associations that use an :order option. For associations that are truly one-to-one, an :order option is not needed, so it only makes sense to have an :order option if the association could theoretically return multiple results (in which case an eager limit strategy is helpful). * The queries that Sequel uses to filter by associations when those associations have conditions are now simpler and easier for the database to execute. * The queries that Sequel uses for dataset associations now handle cases where unqualified identifiers were used in the receiving dataset that would be made ambiguous by a join. * A limit strategy is now used when filtering by associations if the association has a limit and the database supports window functions. This allows Sequel to setup a correct filter in such cases. Artist.where(:first_10_albums=>Album[1]).all # SELECT * # FROM artists # WHERE (artists.id IN ( # SELECT albums.artist_id # FROM albums # WHERE ((albums.artist_id IS NOT NULL) AND (albums.id IN ( # SELECT id FROM ( # SELECT albums.id, row_number() OVER # (PARTITION BY albums.artist_id ORDER BY release_date) # AS x_sequel_row_number_x # FROM albums # ) AS t1 # WHERE (x_sequel_row_number_x <= 10) # )) AND (albums.id = 1)))) * A limit strategy is now used in the dataset_associations plugin if the association has a limit and the database supports window functions. This makes the resulting datasets return correct results. Artist.first_10_albums # SELECT * # FROM albums # WHERE ((albums.artist_id IN ( # SELECT artists.id FROM artists) # ) AND (albums.id IN ( # SELECT id FROM ( # SELECT albums.id, row_number() OVER # (PARTITION BY albums.artist_id ORDER BY release_date) # AS x_sequel_row_number_x # FROM albums # ) AS t1 # WHERE (x_sequel_row_number_x <= 10) # ))) # ORDER BY release_date * You can now pass symbols with embedded qualifiers or aliases, as well as SQL::Identifier, SQL::QualifiedIdentifier, and SQL::AliasedExpression objects as the first argument to Dataset#graph. * The nested_attributes plugin now automatically handles presence validations on foreign keys when creating associated objects. It now sets the foreign key value (or a placeholder value) before validating such objects. * Offsets on *_one associations are now respected when using eager_graph. * eager graphing *_many associations with offsets no longer breaks if there are no associated results. * Database#register_array_type in the pg_array extension now works correctly if there is no existing scalar conversion proc for the type. * Unique, foreign key, and not null constraint violations are now recognized correctly on SQLite 3.8.2+. * The odbc adapter now returns fractional seconds in timestamps. * The obdc/mssql adapter now inputs timestamps with 3 decimal places. = Backwards Compatibility * The private Model.apply_window_function_eager_limit_strategy method has been removed. sequel-5.63.0/doc/release_notes/4.9.0.txt000066400000000000000000000177011434214120600177140ustar00rootroot00000000000000= Performance Enhancements * Dataset::PlaceholderLiteralizer has been added as an optimization framework. This allows you to record changes to a given dataset using placeholder arguments, and later quickly execute the query providing values for the placeholders. This is similar in idea to prepared statements, except that the SQL for each query can change depending on the values for the placeholders. Using this optimization framework, generating the SQL for query is about 3x faster, and since SQL generation time is a significant portion of total time for simple queries, simple queries can execute up to 50% faster. There are two APIs for this optimization framework. There is a lower level dataset API: loader = Sequel::Dataset::PlaceholderLiteralizer. loader(DB[:items]) do |pl, ds| ds.where(:id=>pl.arg).exclude(:name=>pl.arg).limit(1) end loader.first(1, "foo") # SELECT * FROM items WHERE ((id = 1) AND (name != 'foo')) LIMIT 1 loader.first([1, 2], %w"foo bar") # SELECT * FROM items WHERE ((id IN (1, 2)) AND # (name NOT IN ('foo', 'bar'))) LIMIT 1 There is also a higher level model API (Model.finder): class Item < Sequel::Model # Given class method that returns a dataset def self.by_id_and_not_name(id, not_name) where(:id=>id).exclude(:name=>not_name) end # Create optimized method that returns first value finder :by_id_and_not_name end # Call optimized method Album.first_by_id_and_not_name(1, 'foo') # SELECT * FROM items WHERE ((id = 1) AND (name != 'foo')) LIMIT 1 Model.finder defaults to creating a method that returns the first matching row, but using the :type option you can create methods that call each, all, or get. There is also an option to choose the method name (:name), as well as one to specify the number of arguments to use if the method doesn't take a fixed number (:arity). Finally, Model.find, .first, and .first! now automatically use an optimized finder if given a single argument. Model.[] uses an optimized finder if given a single hash, and Model.[], .with_pk, and .with_pk! use an optimized finder if the model has a composite primary key. In all of these cases, these methods are about 50% faster than before. * The pure-ruby PostgreSQL array parser that ships with Sequel has been replaced with a strscan-based parser. This parser avoids O(n^2) performance for arrays with multibyte strings, and in general is much faster. Parsing an array with a single string with 100,000 multibyte characters is about 1000x faster, and now about half the speed of the C implementation in sequel_pg. * Dataset#paged_each now has a :strategy=>:filter option that dramatically improves performance, especially if the columns being ordered by are indexed. Unfortunately, there are enough corner cases to this approach that it cannot be used by default. At the least, the dataset needs to be selecting the columns it is ordering by, not aliasing the columns it is ordering by in the SELECT clause, not have NULLs in any of the columns being ordered by, and not itself use a limit or offset. If you are ordering by expressions that are not simple column values, you can provide a :filter_value option proc that takes the last retrieved row and array of order by expressions, and returns an array of values in the last retrieved row for those order by expressions. * In the postgres adapter, Dataset#paged_each now automatically uses a cursor for improved performance. * In the mysql2 adapter, Dataset#paged_each now automatically uses streaming for improved performance, if streaming is supported. * Dataset#with_sql_{each,all,first,single_value,insert,update} have been added. These methods take specific SQL and execute it on the database, returning the appropriate value. They are significantly faster than the previous approach of with_sql(SQL).{each,all,first,single_value,insert,update}, as they don't require cloning the dataset. = New Features * Database#create_join_table! and #create_join_table? have been added, for consistency with #create_table! and #create_table?. * A :hold option has been added to Dataset#use_cursor in the postgres adapter, which uses WITH HOLD in the query, allowing for usage of the cursor outside the enclosing transaction. When :hold is used, Sequel does not automatically use a transaction around the cursor call. * Dataset#where_current_of has been added to the postgres adapter, for updating rows based on a cursor's current position. This can be used to update a large dataset where new values depend on some ruby method, without keeping all rows in memory. ds = DB[:huge_table] ds.use_cursor(:rows_per_fetch=>1).each do |row| ds.where_current_of.update(:column=>ruby_method(row)) end * A current_datetime_timestamp extension has been added, for creating Time/DateTime instances that are literalized as CURRENT_TIMESTAMP. When the dataset uses this extension, models that use the touch and timestamps plugins will use CURRENT_TIMESTAMP for the timestamps. * The jdbc adapter now supports a :driver option, useful when Sequel doesn't have direct support for the underlying driver, and where java.sql.DriverManager.getConnection does not work correctly due to Java class loading issues. = Other Improvements * Multiple corner cases in Dataset#eager_graph have been fixed. * Calling Dataset#columns when using the eager_each plugin no longer triggers eager loading. * Database#column_schema_to_ruby_default is now a public method in the schema_dumper extension. * When validating associated objects for one_to_many and one_to_one associations in the nested_attributes plugin, don't remove column values if the association's foreign key is the associated model's primary key. * On PostgreSQL, Dataset#disable_insert_returning has been added back. This disables the automatic use of RETURNING for INSERTs for the dataset. This is necessary in cases where INSERT RETURNING doesn't work, such as PostgreSQL <8.2 (or PostgreSQL variants that forked before 8.2), or when using partitioning with trigger functions, or conditional rules. Note that if you use disable_insert_returning, insert will not return the autoincremented primary key. You need to call currval or lastval manually using the same connection to get the value, or use nextval to get the value to use before inserting. * The pg_array extension now uses the correct database type when typecasting values for smallint, oid, real, character, and varchar arrays. Previously, Sequel did not use the correct database type in some cases (e.g. text[] for a varchar[]), which resulted in errors if the value was used in a filter expression. * Additional unique constraint violations are now recognized on SQLite. * Check constraint violations are now recognized on SQLite >=3.8.2. * Adapters that emulate bitwise operators now do so using an append only design, similar to how all other queries are built in Sequel. = Backwards Compatibility * In some cases Sequel no longer adds superfluous parentheses when constructing SQL strings. If you are testing for specific SQL, this can cause test failures. * The pg_array extension no longer recognizes the :typecast_method option when registering an array type. The option allowed reuse of an existing typecast method, but as that results in an incorrect type at the database level, the option was fundementally broken. * The internals of the PostgreSQL array parser have changed. If you were relying on them, you'll need to update your code. * Dataset#complex_expression_arg_pairs private method now returns nested expression objects instead of an already literalized string in some cases. Custom adapters that call this method will probably need to be changed. It's recommended that such adapters switch to using the new Dataset#complex_expression_emulate_append method if possible. sequel-5.63.0/doc/release_notes/5.0.0.txt000066400000000000000000000130511434214120600176760ustar00rootroot00000000000000= Major Changes * Datasets are now frozen by default. Since Sequel's inception, datasets have used a method-chaining API that returned modified copies, but previously they still supported direct mutation. Now, datasets are always frozen and cannot be mutated. This allows many additional default optimizations related to caching, and provides greater thread safety. ds = DB[:table] # Before ds.row_proc = lambda{|h| h} # Now ds = ds.with_row_proc(lambda{|h| h}) * Symbol splitting to create qualified and/or aliased identifiers is now disabled by default. While symbol splitting allowed for shorter code, it was not obvious and caused significant issues when using column names with embedded double or triple underscores. Sequel now offers many ways to create qualified and/or aliased identifiers. # Before :table__column # "table"."column" # Now :table__column # "table__column" Sequel[:table][:column] # "table"."column" # To get back historical behavior Sequel.split_symbols = true * Sequel no longer allows the use of plain ruby strings as SQL code fragments in the dataset filtering methods, as that makes it easier to introduce SQL injection vulnerabilities. You can use Sequel.lit to create literal strings (SQL code fragments), which makes it easier to do security auditing of applications using Sequel. # Before DB[:table].where("column = 1").all # Now DB[:table].where(Sequel.lit("column = 1")).all # or better DB[:table].where(column: 1).all # To get back historical behavior DB.extension :auto_literal_strings = Backwards Compatibility * All adapters, extensions, plugins, features, and constants deprecated in 4.49.0 have been removed. Before upgrading to Sequel 5.0.0, upgrade to 4.49.0 and fix all deprecation warnings. * Support for ruby 1.8.7 has been dropped, the minimum ruby version is now 1.9.2. * The {before,after,around}_validation hooks are now always called when saving, even if the validate: false option is used. This allows you to use the before_validation hook to make changes to the model instance that are required before validation and before saving even if not validating. * Getting column values for newly created model instances after insertion now happens before after_create is called, instead of after. * Sequel now immediately attempts to the connect to the database when a Database instance is created, in order to fail fast if the connection parameters are invalid. * The validates_unique method in the validation_helpers plugin now only checks for uniqueness by default if the record is new or one of the related columns has been modified by default. * Database schema modification methods and schema generator methods now return nil instead of some internal value. * Many cases where Sequel used Kernel#send internally have been switched to Kernel#public_send so they only call public methods. * Model association hooks are now nil instead of empty arrays by default. * Internal uses of instance_eval with a block have been changed to instance_exec. This allows them to be used with lambdas that take no arguments. * Most internal constants are now frozen, unless there is a requirement that they be modified at runtime. * The Model @was_new instance variable is now no longer set when saving new model instances. * The private Sequel::Postgres::PGArray::Parser#new_entry_buffer method in the pg_array extension has been removed. * Modifying Model.input_transformer_order in the input_transformer plugin no longer has an effect. = New Features * Database#add_index :if_not_exists option is now supported on PostgreSQL 9.5+. * SQL::Subscript#expression has been added to retrieve the expression that is subscripted. = Other Improvements * Threaded connection pools no longer block while new connections are being made. Previously, attempting to establish a new connection blocked all connection pool activity until the new connection was made. * Many minor performance improvements have been made. * The class_table_inheritance plugin now raises an error during Model#update if a query does not modify a single row, just as the default Model#update does. * ConnectionPool#size is now thread-safe in both threaded connection pools. Internal callers that already have the connection pool mutex should switch to using #_size (a new private method). * Registration of new serialization formats in the serialization plugin is now thread-safe. * If transactional schema modifications are not supported, a savepoint will not automatically be created when adding indexes for new tables inside transactions. This fixes issues when making schema changes inside transactions on MySQL. * Attempting to create a prepared statement using a dataset that uses a delayed evaluation now raises an error, because the prepared statement would not respect the delayed evaluation. * The bin/sequel -M option now uses base 10. Previously, it used the Kernel#Integer default, which was base 8 if there was a preceding 0. = Deprecated Features These deprecated features will be removed in Sequel 5.1.0. * Model.allowed_columns in the base plugin is now deprecated. Use the whitelist_security plugin if you want to call it. * Model use_after_commit_rollback class and instance accessors are now deprecated. * Defining the Model#_before_validation method is now deprecated. You can change to using before_validation. * The private Model.plugin_module_defined? method is now deprecated. sequel-5.63.0/doc/release_notes/5.1.0.txt000066400000000000000000000017541434214120600177060ustar00rootroot00000000000000= Improvements * Database#copy_into in the jdbc/postgresql adapter now works correctly when using multibyte characters in strings. * The alter_table add_foreign_key method is now reversible when the :foreign_key_constraint_name option is used. * The jdbc/h2 and jdbc/hsqldb adapters now respect the :foreign_key_constraint_name option. * Calling Model.freeze on an already frozen model no longer raises an error. * An unnecessary database query is now avoided when loading the pg_inet extension when the pg_array extension is already loaded. * A better exception message is now used when migrating with an empty migration directory. = Backwards Compatibility * Model.allowed_columns has been removed. Use the whitelist_security plugin if you want to call it. * Model use_after_commit_rollback class and instance accessors have been removed. * Support for the Model#_before_validation method has been removed. * The private Model.plugin_module_defined? method has been removed. sequel-5.63.0/doc/release_notes/5.10.0.txt000066400000000000000000000066631434214120600177720ustar00rootroot00000000000000= New Features * Ruby 2.6+ endless ranges are now supported as condition specifier values, using a >= operator for them: DB[:t].where(c: 1...) # SELECT * FROM t WHERE (c >= 1) * Ruby 2.6+ endless ranges are now supported in the pg_range extension: DB[:t].where(id: 1).update(r: 1...) # UPDATE t SET r = '[1,)' WHERE (id = 1) * The :include option when creating indexes is now supported on PostgreSQL 11, specifying additional columns to include in the index without indexing them. This is useful to allow index only scans in additional cases. * The :tablespace option is now supported when creating tables, indexes, and materialized views on PostgreSQL. * The list plugin now supports a :top option, which can be used to specify the top of the list. The default value for the top of the list is 1, but using this option you can make the top of the list be 0. = Other Improvements * In the pg_array_associations plugin, filtering by associations for many_to_pg_array associations now works correctly on PostgreSQL 11. Previously it did not work on PostgreSQL 11 due to new restrictions on using set returning functions in the the SELECT list. * When setting the value of a column to the same value the column already has, for a new model object that has not yet been persisted, where the column is used as the foreign key for at least one many_to_one association, do not clear any related associations from the associations cache. * In the pg_array extension, if there are separate conversion procs for timetz and time types, the conversion proc for the timetz[] type now correctly uses the conversion proc for the timetz type to convert scalar values, instead of the conversion proc for the time type. * Empty arrays and hashes are now correctly handled in Dataset#{first,where_all,where_each,where_single_value} when a cached placeholder literalizer is used. * In the tree plugin, Model#{ancestors,descendants,self_and_siblings} now work correctly when custom parent/children association names are used. * The inner loop of the postgres adapter row fetching code is now 2-3% faster. * When using the postgres adapter with pg-0.18+, set a type_map_for_queries for the connection to allow it to handle input type casts for Integer, Float, TrueClass, and FalseClass values without allocating strings. * SQLTime.parse (and therefore Sequel.string_to_time) now respects the SQLTime.date and Sequel.application_timezone settings. * The jdbc/postgresql adapter now correctly parses timetz types. * On JRuby 9.2.0.0, when handling BC timestamps without timezones in the pg_extended_date_support extension, assume local time and not UTC time if the database timezone is not specified and Sequel.datetime_class is Time. * Errors indicating that a MySQL database is in read-only mode are now treated as disconnect errors in the mysql and mysql2 adapters, for better behavior in failover scenarios. * Sequel::Model datasets now support the use of IN/NOT IN operators where the second argument for the operator (the right hand side) is a set returning function. Previously, the Sequel::Model code assumed the right hand side of an IN/NOT IN operator was a datasets or array, since those are the only values where Sequel will automatically create such an operator. * Sequel no longer loads the strscan library in the pg_array extension if it is not necessary because the parser from sequel_pg is used. sequel-5.63.0/doc/release_notes/5.11.0.txt000066400000000000000000000071601434214120600177640ustar00rootroot00000000000000= New Features * Sequel now supports more window frame specification types when using window functions. You can now provide the window frame specification as a hash, and Sequel will format the correct SQL. Specifically, this adds support for RANGE and GROUPS, numeric offsets, and EXCLUDE on a database that supports it (e.g. PostgreSQL 11+). Examples: DB[:albums].select{function(c1).over(:partition=>c2, :order=>:c3, :frame=>{:type=>:range, :start=>1, :end=>1})} # SELECT function(c1) OVER (PARTITION BY c2 ORDER BY c3 # RANGE BETWEEN 1 PRECEDING AND 1 FOLLOWING) FROM albums DB[:albums].select{function(c1).over(:partition=>c2, :order=>:c3, :frame=>{:type=>:groups, :start=>[2, :preceding], :end=>[1, :preceding]})} # SELECT function(c1) OVER (PARTITION BY c2 ORDER BY c3 # GROUPS BETWEEN 2 PRECEDING AND 1 PRECEDING) FROM albums DB[:albums].select{function(c1).over(:partition=>c2, :order=>:c3, :frame=>{:type=>:range, :start=>:preceding, :exclude=>:current})} # SELECT function(c1) OVER (PARTITION BY c2 ORDER BY c3 # RANGE UNBOUNDED PRECEDING EXCLUDE CURRENT ROW) FROM albums * The SQLite 3.24+ ON CONFLICT clause to INSERT is now supported. This support is very similar to the PostgreSQL support for the same feature, also known as UPSERT (UPDATE if the row already exists, INSERT if it does not). This support is different than the previous support for INSERT ON CONFLICT REPLACE (also known as INSERT OR REPLACE), but it uses the same method name in order to be compatible with the PostgreSQL support. The new syntax requires passing a hash to Dataset#insert_conflict. Examples: DB[:table].insert_conflict({}).insert(a: 1, b: 2) # INSERT INTO TABLE (a, b) VALUES (1, 2) # ON CONFLICT DO NOTHING DB[:table].insert_conflict(target: :a).insert(a: 1, b: 2) # INSERT INTO TABLE (a, b) VALUES (1, 2) # ON CONFLICT (a) DO NOTHING DB[:table].insert_conflict(target: :a, conflict_where: {c: true}).insert(a: 1, b: 2) # INSERT INTO TABLE (a, b) VALUES (1, 2) # ON CONFLICT (a) WHERE (c IS TRUE) DO NOTHING DB[:table].insert_conflict(target: :a, update: {b: Sequel[:excluded][:b]}).insert(a: 1, b: 2) # INSERT INTO TABLE (a, b) VALUES (1, 2) # ON CONFLICT (a) DO UPDATE SET b = excluded.b DB[:table].insert_conflict(target: :a, update: {b: Sequel[:excluded][:b]}, update_where: {Sequel[:table][:status_id] => 1}).insert(a: 1, b: 2) # INSERT INTO TABLE (a, b) VALUES (1, 2) ON CONFLICT (a) # DO UPDATE SET b = excluded.b WHERE (table.status_id = 1) * Dataset#window for the WINDOW clause has been moved from the PostgreSQL-specific support to core, and has been enabled on MySQL 8+ and SQLAnywhere. This allows you to specify a shared window specification in a query, which can be used by multiple window functions. = Other Improvements * When using the static_cache plugin, Model.first when called without a block and without arguments or with a single Integer argument now uses the cached values instead of issuing a query. * Using set_column_default with a nil value now correctly removes an existing default value on MySQL when the column is NOT NULL. * Window function support has been enabled on SQLAnywhere, since it works correctly. * Dumping schema for numeric/decimal columns with default values now works correctly. This was broken starting in Sequel 5.9.0 due to changes to use BigDecimal() instead of BigDecimal.new(). * The jdbc/sqlserver adapter now works correctly on JRuby 9.2+. * An additional check constraint violation failure message is now recognized on SQLite. sequel-5.63.0/doc/release_notes/5.12.0.txt000066400000000000000000000127341434214120600177700ustar00rootroot00000000000000= New Features * An eager_graph_eager plugin has been added, which allows you to chain eager loads using separate queries to an existing dataset that uses eager_graph. Given the following model associations: Band.one_to_many :albums Album.one_to_many :tracks Let's say you wanted to return bands ordered by album name, and eagerly load those albums, you can do that using: Band.eager_graph(:albums).order{albums[:name]} Let's say you also wanted to eagerly load the tracks for each album. You could just add them to the eager_graph call: Band.eager_graph(albums: :tracks).order{albums[:name]} However, the bloats the result set, and you aren't ordering by the track information, so a join is not required. The eager_graph_eager plugin allows you to specify that the tracks be eagerly loaded in a separate query after the eager_graph load of albums: Band.eager_graph(:albums). eager_graph_eager([:albums], :tracks). order{albums[:name]} eager_graph_eager's first argument is a dependency chain, specified as an array of symbols. This specifies the point at which to perform the eager load. The remaining arguments are arguments that could be passed to Dataset#eager to specify what dependent associations should be loaded at that point. * A caller_logging Database extension has been added, which logs caller information before queries, filtering out the internal Sequel callers. Example: DB.extension :caller_logging DB[:table].first # Logger: # (0.000041s) (source: /path/to/app/foo/t.rb:12 in `get_first`) # SELECT * FROM table LIMIT 1 You can further filter the caller lines by setting Database#caller_logging_ignore to a regexp of additional caller lines to ignore. This is useful if you have specific methods or internal extensions/plugins that you would also like to ignore as they obscure the code actually making the request. DB.caller_logging_ignore = %r{/path/to/app/lib/plugins} You can also format the caller before it is placed in the logger, using caller_logging_formatter: DB.caller_logging_formatter = lambda do |caller| "(#{caller.sub(/\A\/path\/to\/app\//, '')})" end DB[:table].first # Logger: # (0.000041s) (foo/t.rb:12 in `get_first`) SELECT * FROM table LIMIT 1 * Database#call_procedure has been added to the postgres adapter, and is usable on PostgreSQL 11+ for calling procedures created with CREATE PROCEDURE. DB.call_procedure(:foo, 1, "bar") # CALL foo(1, 'bar') This method will return a hash of results if the procedure returns a result, or nil if it does not return a result. = Other Improvements * It is now possible to use Dataset#eager_graph in an eager load callback for associations that use join tables. This allows you to eager load some associations using separate queries and other associations using joins. For example: Band.eager(:albums=>proc{|ds| ds.eager_graph(:tracks)}) Will load the bands in one query, and load the albums and tracks in a separate query using a join. Previously, this construction worked only for associations that did not use join tables. It now works for associations that use join tables, as long as existing selected columns are not removed inside the callback. * The tactical_eager_loading plugin now handles automatic eager loading for associated objects that were created during the load of dataset that uses eager_graph. When using the plugin, the following code will now only execute 2 queries, instead of issuing a separate query for each album to get the tracks for the album. artists = Artist.eager_graph(:albums).all artists.each do |artist| artist.albums.each do |album| album.tracks end end * Calling Dataset#graph with a dataset with existing selections where the column aliases cannot be determined automatically now works correctly by using a subselect. Previously, attempting to do this would raise an exception. This allows the following code to work: DB[:table].select_all(:table).select_append(expr).graph(...) * Datasets now cache the EagerGraphLoader object that is generated to convert arrays of hashes into an object graph, so that subsequent eager loads on the same dataset do not need to recompute the same information. Most EagerGraphLoader internal state is now frozen to prevent unintentional modification. * Sequel.extension now loads files from gems. Previously, it used Kernel.require, which does not load files from gems. * Adapters that emulate prepared statements using literalization now use a placeholder literalizer and should execute significantly faster. More prepared statement internal metadata is now frozen to prevent unintentional modification. * Dataset#intersect, #except, and #nowait are now supported on MariaDB 10.3+. * The constraint_validations extension now respects the constraint_validations_table setting when adding metadata for the constraint validations. * In the oracle adapter, the clob prepared statement argument type is now mapped to the OCI8::CLOB class, allowing the use of Oracle procedures with clob output parameters. * The Model.load_cache method in the static_cache plugin is now public. = Backwards Compatibility * The private Dataset#prepared_arg? method has been removed. It is no longer necessary after the refactoring to the prepared statement code. External adapters that currently call the method should be updated to no longer call the method. sequel-5.63.0/doc/release_notes/5.13.0.txt000066400000000000000000000016751434214120600177730ustar00rootroot00000000000000= New Features * A constant_sql_override Database extension has been added, allowing for overriding the SQL used by constants such as Sequel::CURRENT_TIMESTAMP. This can be used to force CURRENT_TIMESTAMP to be literalized at a particular time zone: DB.extension :constant_sql_override DB.set_constant_sql(Sequel::CURRENT_TIMESTAMP, "CURRENT_TIMESTAMP AT TIME ZONE 'UTC'") * Prepared statements now support the :single_value type, which returns the first column value in the dataset. prep_stmt = DB[:table].select(:column).prepare(:single_value, :ps) prep_stmt.call # PREPARE ps AS SELECT column FROM table LIMIT 1; # EXECUTE ps; # => 42 = Other Improvements * Dataset#from_self will no longer use a cached dataset if any options are given, as that can result in incorrect behavior. * Model.all in the static_cache plugin now accepts a block, mirroring the API when the static_cache plugin is not used. sequel-5.63.0/doc/release_notes/5.14.0.txt000066400000000000000000000047661434214120600200000ustar00rootroot00000000000000= New Features * The :nulls option when creating ordered expressions is now supported on all databases that Sequel ships support for. For databases that do not support NULLS FIRST/NULLS LAST, support is emulated. ds.order(Sequel.asc(:name, :nulls=>:last)) # When emulated: # ORDER BY (CASE WHEN (name IS NULL) THEN 2 ELSE 1 END), name ASC * Model#pk_equal? has been added as a more descriptive name for Model#===. Model#=== is now an alias of Model#pk_equal?. * The roots and roots_dataset class methods in the tree plugin are now also available as dataset methods. = Other Improvements * Inverting expressions using the ANY/SOME/ALL SQL operators now works correctly: # Sequel <5.14.0 Sequel.~(:a=>Sequel.function(:any, :x)) # "(a != any(x))" # Sequel >=5.14.0 Sequel.~(:a=>Sequel.function(:any, :x)) # "NOT (a = any(x))" Sequel has always tried to push inversion down to create SQL that is easier to reason about. However, inversion cannot be pushed down if an ANY/SOME/ALL SQL operator is used, because that is a different type of operation that just happens to use the same syntax. Sequel now avoids inversion push down for boolean operators where the right hand side is an SQL::Function, LiteralString, or SQL::PlaceholderLiteralString. * When creating a boolean expression from a hash or array of pairs, if the right hand side is an unfrozen array and string, use a frozen copy in the expression, so that mutating the array or string argument later does not affect the expression. * When using the defaults_setter plugin with the :cache option, do not cache values for columns without parseable defaults. If the default value exists but is not parseable, caching such values could result in incorrect behavior if the model instance is saved later. * For models with composite primary keys, Model#=== now returns false if any primary key value is nil, mirroring the behavior for the scalar primary key case. * Model datasets no longer cache SQL if they include a subquery that cannot cache SQL. * The SQL used for constraints in the constraint_validations extension when the :allow_nil option is used is now clearer and easier to understand. * The postgres adapter no longer specifies a default port when using the pg driver, in order to work with configurations where the :service option is used in the :driver_options hash. The pg driver defaults to port 5432 if no port is given, so this should not affect backwards compatibility. sequel-5.63.0/doc/release_notes/5.15.0.txt000066400000000000000000000032531434214120600177670ustar00rootroot00000000000000= New Features * A :qualify_tables option has been added to the class_table_inheritance plugin, which will automatically qualify subclass tables with the same qualifier as the superclass table if the superclass table is qualified. * Model#save_validation_on_next_save! has been added, which skips all validation on the next save to the object, including the running of validation related hooks. This method is designed for use only when Model#valid? is called on the object before saving, to avoid running validations on the object twice. This method takes precedence even over an explicit validate: true option passed to Model#save, and as such should be used with care. * The postgres adapter now supports a :conn_str Database option to use a PostgreSQL connection string (e.g. "host=foo port=2442") when connecting. This option has preference over other connection related options if it is present. = Other Improvements * If a foreign key for a model object is changed from a nil value to a non-nil value, any cached associated objects related to the foreign key are no longer removed. Such associated objects could only be set manually, and if they have been set manually, it is probably not a good idea to remove them automatically. * When using the nested_attributes plugin, new *_to_many associated objects are not validated twice when saving. * The default table alias when using the class_table_inheritance plugin now correctly handles qualified tables. * A theoretical thread safety issue when assigning connections in the threaded connection pools has been fixed. * Renaming columns is now supported without emulation when using SQLite 3.25+. sequel-5.63.0/doc/release_notes/5.16.0.txt000066400000000000000000000100661434214120600177700ustar00rootroot00000000000000= New Features * Database#rollback_on_exit has been added, which allows you to rollback transactions instead of committing them when exiting the transaction block. Previously, the only way to rollback a transaction from inside a transaction block was to raise an exception. This allows you to tell Sequel to roll the transaction back on exit, and then use return or throw to exit the transaction block. Database#rollback_on_exit supports savepoints, including multiple savepoint levels, as well as canceling rollbacks: DB.transaction do # BEGIN DB.rollback_on_exit end # ROLLBACK DB.transaction do # BEGIN DB.transaction(savepoint: true) do # SAVEPOINT DB.rollback_on_exit(savepoint: true) end # ROLLBACK TO SAVEPOINT end # COMMIT DB.transaction do # BEGIN DB.transaction(savepoint: true) do # SAVEPOINT DB.transaction(savepoint: true) do # SAVEPOINT DB.rollback_on_exit(savepoint: true) end # ROLLBACK TO SAVEPOINT end # RELEASE SAVEPOINT end # COMMIT DB.transaction do # BEGIN DB.transaction(savepoint: true) do # SAVEPOINT DB.rollback_on_exit(savepoint: true) end # ROLLBACK TO SAVEPOINT end # COMMIT DB.transaction do # BEGIN DB.transaction(savepoint: true) do # SAVEPOINT DB.transaction(savepoint: true) do # SAVEPOINT DB.rollback_on_exit(savepoint: 2) end # ROLLBACK TO SAVEPOINT end # ROLLBACK TO SAVEPOINT end # COMMIT DB.transaction do # BEGIN DB.transaction(savepoint: true) do # SAVEPOINT DB.transaction(savepoint: true) do # SAVEPOINT DB.rollback_on_exit(savepoint: 3) end # ROLLBACK TO SAVEPOINT end # ROLLBACK TO SAVEPOINT end # ROLLBACK DB.transaction do # BEGIN DB.rollback_on_exit DB.rollback_on_exit(cancel: true) end # COMMIT * Sequel now supports window functions on SQLite 3.26.0+. SQLite technically supports window functions on 3.25.0+, but enabling window function support in Sequel opens up a code path that generates queries that cause older versions of SQLite to produce a segmentation fault. This bug in SQLite has been fixed in 3.26.0. = Other Improvements * Sequel::Model no longer overrides existing methods when defining getters and setters. Historically, it only checked for existing method definitions for methods that could be directly expressed (e.g. not requiring send). Sequel 5 broke the check for setter methods that could be directly expressed. This fixes cases where model inheritance is used and the setter methods are overridden in a parent class. * Alter table emulation now works correctly on SQLite 3.26.0+. * The one_to_one association setter does not modify reciprocal associations in cases where doing so is not necessary. This can fix some cases where the nested_attributes plugin is used. * The class_table_inheritance plugin can now take advantage of the schema_caching extension to prevent database queries to determine column information when the class is created. * The nested_attributes plugin no longer validates one_to_one associations twice when saving. * The class_table_inheritance plugin :qualify_tables option now correctly qualifies subclasses of subclasses. * SQL expressions that are subscripted are now wrapped in parentheses. This fixes at least subscripting a function expression on PostgreSQL: DB[:t].select{array_agg(column).sql_subscript(1)} # SELECT (array_agg(column))[1] FROM t * Sequel::Migrator now uses more descriptive error messages if a missing or empty migration directory is given. * bin/sequel -C when converting from SQLite to another database type will now use 64-bit integer columns in the other database when the SQLite column type is integer, as SQLite supports storing 64-bit values in integer columns, and most other databases only support 32-bit values in integer columns. = Backwards Compatibility * The mysql adapter no longer attempts to load the mysqlplus driver, it now only attempts to load the mysql driver. sequel-5.63.0/doc/release_notes/5.17.0.txt000066400000000000000000000020111434214120600177600ustar00rootroot00000000000000= New Features * An instance-level skip_auto_validations method has been added to the auto_validations plugin, allowing you to skip all or specific types of auto validations inside the block: model_instance.skip_auto_validations(:unique) do puts model_instance.valid? end * A Database :preconnect_extensions option has been added. This option is similar to :extensions, but the extensions are loaded before the :preconnect option is processed. This allows you to use the server_logging extension with the :preconnect option. * For specifying custom table aliases when using eager_graph and association_join, you can now use: Sequel[:association].as(:table_alias) in addition to: Sequel.as(:association, :table_alias) = Other Improvements * The ado/mssql adapter now retrieves the number of deleted or updated rows for a query without issuing a separate query. * Sequel now avoids the use of Proc.new with an implicit block, as that feature will be deprecated starting in Ruby 2.7. sequel-5.63.0/doc/release_notes/5.18.0.txt000066400000000000000000000042351434214120600177730ustar00rootroot00000000000000= New Features * A throw_failures plugin has been added for throwing ValidationFailed and HookFailed exceptions instead of raising them. This can improve performance by up to 10x on JRuby and 10-15% on CRuby. However, you would need to modify your exception handling from: begin # model.save rescue Sequel::ValidationFailed => e # handle failure end to: e = catch(Sequel::ValidationFailed) do # model.save end if e.is_a?(Sequel::ValidationFailed) # handle failure end The throw_failures plugin will still work if you are not catching the exception, falling back to the default behavior of raising the exception. * SQL::Blob.call has been added, so that SQL::Blob can be used directly as a callable to create a new instance, resulting in better performance in cases where a callable is needed. = Other Improvements * Type conversion is many adapters is now faster by switching from Proc/Method instances to using singleton call methods on plain objects. This can improve performance of row fetching by up to 10% in some cases. * Row fetching is slightly faster in the jdbc and sqlite adapters, by switching from each to while. * tzinfo 2 is now supported when using the named_timezones extension. tzinfo 1 remains supported. * The optimized Dataset#paged_each methods in the postgres and mysql2 adapters now support being called without a block, returning an Enumerator in that case, to mirror the behavior of the default Dataset#paged_each method. * Sequel no longer uses flow-control exceptions in the connection_expiration and connection_validator extensions, significantly improving performance on JRuby. * The after_initialize plugin no longer makes the argument to Model.call optional. = Backwards Compatibility * Some internal by not private constants and methods previously used for type conversion in adapters have been removed: * JDBC::Oracle.OracleDecimal * JDBC::Oracle.OracleClob * JDBC::Postgres.RubyPGArray * JDBC::Postgres.RubyPGHstore * JDBC::SqlAnywhere.SqlAnywhereBoolean * JDBC::SQLServer.MSSQLRubyTime * MySQL::TYPE_TRANSLATOR * Postgres::TYPE_TRANSLATOR sequel-5.63.0/doc/release_notes/5.19.0.txt000066400000000000000000000017051434214120600177730ustar00rootroot00000000000000= New Features * A Database#rename_enum_value method has been added to the pg_enum extension. It is supported on PostgreSQL 10+: DB.rename_enum_value(:enum_type, 'old_name', 'new_name') = Other Improvements * The performance of row fetching and type conversion in the sqlanywhere adapter has been improved. * The performance of row fetching in the sqlite adapter has been improved. * Calling Database#drop_table now drops any constraint validations metadata for the table if using the constraint_validations extension. However, modifying the table using Database#alter_table does not affect the constraint validations metadata. * The sqlite adapter when used with ruby-sqlite3 1.4.0+ now uses SQLite extended result codes for a more accurate determination of specific database errors types. * Performance for typecasting to decimal and floats has been improved slightly. * Performance when merging hashes has been improved slightly. sequel-5.63.0/doc/release_notes/5.2.0.txt000066400000000000000000000024671434214120600177110ustar00rootroot00000000000000= New Features * A pg_extended_date_support extension has been added. This extension adds support for infinite and BC dates/timestamps on PostgreSQL. The postgres adapter already had a convert_infinite_timestamps setting, but it wasn't supported in the jdbc/postgresql adapter and it didn't handle BC dates/timestamps. Setting a non-default convert_infinite_timestamps setting in the postgres adapter will now automatically load the extension for backwards compatibility. The pg_extended_date_support extension by default just fixes the handling of BC dates/timestamps. To get it to handle infinite timestamps, you need to choose the appropriate setting for your application: DB.extension :pg_extended_date_support DB.convert_infinite_timestamps = :string # or :float or :nil This extension also enables the handling of timezone offsets with seconds, which is not natively supported by ruby's Time class in ruby <2.5. = Improvements * The jdbc/mysql adapter now handles smallint unsigned and integer unsigned column types where the value for the column is outside of the range of a Java short or integer. * Sequel::Model.inherited no longer modifies an existing @dataset instance variable if one has already been set. This fixes a regression that was introduced in Sequel 5.0.0. sequel-5.63.0/doc/release_notes/5.20.0.txt000066400000000000000000000060771434214120600177720ustar00rootroot00000000000000= New Features * Database#after_commit and #after_rollback transaction hook methods now support a :savepoint option. Using the :savepoint option makes the hooks savepoint-aware, so after_commit will only be called if all enclosing savepoints and the transaction are committed, and after_rollback will be called when any of the enclosing savepoints are rolled back (which may be before transaction commit/rollback). Examples: x = nil DB.transaction do # BEGIN DB.transaction(savepoint: true) do # SAVEPOINT DB.after_commit(savepoint: true){x = 1} DB.after_rollback(savepoint: true){x = 2} x # nil end # RELEASE SAVEPOINT x # nil end # COMMIT x # 1 x = nil DB.transaction do # BEGIN DB.transaction(savepoint: true) do # SAVEPOINT DB.after_commit(savepoint: true){x = 1} DB.after_rollback(savepoint: true){x = 2} x # nil raise Sequel::Rollback end # ROLLBACK TO SAVEPOINT x # 2 end # COMMIT x # 2 x = nil DB.transaction do # BEGIN DB.transaction(savepoint: true) do # SAVEPOINT DB.after_commit(savepoint: true){x = 1} DB.after_rollback(savepoint: true){x = 2} end # RELEASE SAVEPOINT x # nil raise Sequel::Rollback end x # 2 * The pg_auto_constraint_validations plugin now supports a pg_auto_constraint_validation_override method for overriding the columns and message for a specific constraint. This is useful if the database cannot determine the columns (due to the constraint containing a database function call), or if you would like to customize the message per constraint. = Other Improvements * The one_to_one association setter now works with models that use joined datasets, such as child models when using the class_table_inheritance plugin. * Database#check_constraints on PostgreSQL now also includes CHECK constraints where the related columns are not known. The :columns entry in the hash will be an empty array in such cases. The exclusion of such constraints in previous versions was not intentional, and the documentation implied that all CHECK constraints were returned. * Many cases where instance_exec was previously used on model instances have been changed so that instance methods are defined and called instead. This avoids the creation of singleton classes for model instances, and can significantly improve performance in some cases. This affects all associations as well as the following plugins: * composition * hook_class_methods * validation_class_methods Other cases where instance_exec is now avoided and a different approach is used: * association_dependencies plugin * PlaceholderLiteralString#with_dataset * The auto_validations plugin now works with child models when using the class_table_inheritance plugin. * Database#server_version now works correctly in the mysql2 adapter when using the MySQL driver with MariaDB 10+. * The float unsigned type is now recognized and supported in the schema parser and schema_dumper extension. sequel-5.63.0/doc/release_notes/5.21.0.txt000066400000000000000000000063771434214120600177760ustar00rootroot00000000000000= New Features * The pg_json extension now adds a Database#wrap_json_primitives accessor. When set to true, JSON primitive values (string, number, true, false, and null) will be wrapped by delegate Ruby objects instead of using Ruby primitives. This allows the values to round trip, so the following code will work even for primitive values in json_column: DB.extension :pg_json DB.wrap_json_primitives = true value = DB[:table].get(:json_column) DB[:other_table].insert(json_column: value) This should be enabled with care, especially in cases where false and null JSON values are used, as the behavior will change if the objects are used in a boolean context in Ruby, as only false and nil in Ruby are treated as false: # assume JSON false or null value value = DB[:table].get(:json_column) if value # executed if wrap_json_primitives is true else # executed by default end When typecasting input in model objects to a JSON type, string input will still be parsed as JSON. However, you can set the Database#typecast_json_strings accessor to true, and then string input will be considered as a JSON string instead of parsing the string as JSON. To prevent backwards compatibility issues, Sequel.pg_json/pg_jsonb behavior has not changed. To support wrapping Ruby primitives in the delegate objects, new Sequel.pg_json_wrap/pg_jsonb_wrap methods have been added. These methods only handle the Ruby primitives, they cannot be used if the existing object is already a delegate object. As model objects always consider a nil value as SQL NULL and do not typecast it, if you want to explicitly set a JSON null value, you need to wrap it explicitly: model_object.json_column = Sequel.pg_json_wrap(nil) = Other Improvements * Sequel now supports window function options :window, :exclude, and :frame :type=>:groups, :start, and :end on SQLite 3.28.0+. * The server_block extension now respects the :servers_hash Database option. This makes it more similar to Sequel's default behavior. However, that means by default, the server_block extension will default to handling unknown shards as the default shard, instead of raising an error for them. * The rcte_tree plugin now disallows eager graphing of the ancestors and descendants associations. Previously, eager graphing of these associations generated incorrect results. It is not possible to eager graph these extensions, but normal eager loading does work. * The ado adapter's performance has been improved by using faster callables for type conversion and a more efficient inner loop. * The sqlite adapter now converts a :timeout option given as a string to an integer. This allows you to use the option inside of a connection string. * The mysql and mysql2 adapters now recognize an additional DatabaseLockTimeout error. * The jdbc/mysql adapter now works correctly when using JRuby with Java 11. * The ado adapter now handles numeric values when using locales that use comma instead of period as the decimal separator. = Backwards Compatibility * In the pg_json extension, the following singleton methods of Sequel::Postgres::JSONDatabaseMethods are now deprecated: * parse_json * db_parse_json * db_parse_jsonb sequel-5.63.0/doc/release_notes/5.22.0.txt000066400000000000000000000030131434214120600177570ustar00rootroot00000000000000= New Features * Sequel now supports Ruby 2.7+ startless ranges in filters: DB[:table].where(:column=>(..10)) # SELECT * FROM table WHERE (column <= 10) DB[:table].where(:column=>(...10)) # SELECT * FROM table WHERE (column < 10) It also supports startless, endless ranges in filters, using a condition that is always true: DB[:table].where(:column=>(nil..nil)) # SELECT * FROM table WHERE (1 = 1) * Sequel now supports startless ranges in the pg_range extension: DB.extension :pg_range DB[:table].insert(:column=>(..10)) # INSERT INTO "table" ("column") VALUES ('[,10]') RETURNING "id" DB[:table].insert(:column=>(...10)) # INSERT INTO "table" ("column") VALUES ('[,10)') RETURNING "id" DB[:table].insert(:column=>(nil..nil)) # INSERT INTO "table" ("column") VALUES ('[,]') RETURNING "id" * Sequel now supports a :materialized option in Dataset#with on PostgreSQL 12+, to control the inlining of common table expressions: DB[:t].with(:t, DB[:t2], :materialized=>false) # WITH "t" AS NOT MATERIALIZED (SELECT * FROM "t2") # SELECT * FROM "t" DB[:t].with(:t, DB[:t2], :materialized=>true) # WITH "t" AS MATERIALIZED (SELECT * FROM "t2") # SELECT * FROM "t" = Other Improvements * Database#primary_key_sequence now works for tables without serial sequences on PostgreSQL 12+. * Dataset#multi_insert and #import with return: :primary_key option on Microsoft SQL Server now work correctly if the dataset uses a row_proc (e.g. for model datasets). sequel-5.63.0/doc/release_notes/5.23.0.txt000066400000000000000000000036221434214120600177660ustar00rootroot00000000000000= New Features * An insert_conflict plugin has been added for automatically handling constraint conflicts when saving new model instances. It is supported on PostgreSQL 9.5+ and SQLite 3.24.0+. Album.new(name: 'Foo', copies_sold: 1000). insert_conflict( target: :name, update: {copies_sold: Sequel[:excluded][:b]} ). save * On Microsoft SQL Server, the Database :ansi option has been added, which sets the following ANSI related options: * ANSI_NULLS * ANSI_PADDING * ANSI_WARNINGS * ANSI_NULL_DFLT_ON * QUOTED_IDENTIFIER * CONCAT_NULL_YIELDS_NULL = Other Improvements * Sequel.datetime_class = Time is now supported when using the named_timezones extension. For backwards compatibility, the named_timezones extension still sets Sequel.datetime_class = DateTime. When using Ruby 2.6+, the Time instances have the timezone set on them using Ruby 2.6+'s timezone support, but basic support works correctly in earlier versions of Ruby. * On Microsoft SQL Server, Sequel now handles parsing schema for tables in another database on the same server or in a database on a linked server. * The pg_json extension now correctly handles subclasses of core classes when wrapping objects. This stopped working in Sequel 5.21.0, when support for wrapping JSON primitives was added. * Sequel now works around a couple bugs in jdbc-sqlite 3.27.2.1, allowing schema parsing and foreign key parsing to work. * Dataset#execute* private methods now respect an explicitly given :server option, fixing Dataset#paged_each in the postgres adapter when using sharding. * Timezone offsets are now handled correctly when typecasting an array or hash to datetime when Sequel.datetime_class = Time. * Sequel now avoids errors when parsing schema when using the mock SQLite adapter. * A minor thread-safety issue has been fixed in the named_timezones extension. sequel-5.63.0/doc/release_notes/5.24.0.txt000066400000000000000000000046151434214120600177720ustar00rootroot00000000000000= New Features * A :cache_file plugin option has been added to the pg_auto_constraint_validations plugin. This option specifies a file to use to cache the metadata the plugin uses, so the plugin does not need to run 5 queries per model at startup to load the metadata. This can dramatically improve startup time when using the plugin with a large number of models. To create the metadata file, load the plugin into Sequel::Model (or whatever class you are using as the base class for your model classes) with the :cache_file option, and after loading all of the subclasses of that class, run: Sequel::Model.dump_pg_auto_constraint_validations_cache As when using the schema_caching and index_caching extensions, it is up to the user to ensure that the cached metadata matches the current database schema. Sequel does no checking of this, as checking would take more time, and the point of this plugin is to improve startup performance. * A static_cache_cache plugin has been added. This plugin allows for caching rows for models using the static_cache plugin. This prevents the need to issue a query at model creation time to get the rows. This plugin should be loaded into Sequel::Model (or whatever class you are using as the base class for your model classes) before loading the models using the static_cache plugin. To create the metadata file, after all subclasses of that class have been loaded, run: Sequel::Model.dump_static_cache_cache * :unique_deferrable and :primary_key_deferrable column options are now supported on PostgreSQL 9+ and Oracle. This allows you to created deferrable unique and primary key column constraints. You could already create deferrable table constraints using the :deferrable option to the primary_key and unique methods. * A :generated_always_as column option is now supported on PostgreSQL 12+, for creating generated columns. * A Database#skip_logging? private method has been added. This is designed for use in extensions, to force log timing even when no loggers are configured. = Other Improvements * Sequel no longer sets the :host option to localhost by default in the mysql2 adapter. This prevents Sequel from overriding a host specified in the defaults_file. * All database array types are converted to Ruby arrays in the jdbc adapter. Previously, this was only done in the jdbc/postgresql subadapter. sequel-5.63.0/doc/release_notes/5.25.0.txt000066400000000000000000000022111434214120600177610ustar00rootroot00000000000000= New Features * An association_multi_add_remove plugin has been added. This plugin adds a shortcut for adding or removing multiple associated objects in a single method call: Artist.plugin :association_multi_add_remove Artist.many_to_one :albums Artist[1].add_albums([Album[2], Album[3]]) Artist[1].remove_albums([Album[4], Album[5]]) It also offers a setter method, which will add and remove associated objects as necessary: Artist[1].albums = [Album[3], Album[4]] = Other Improvements * The sharding plugin now integrates with the server_block extension. This makes it so if you retrieve a model instance inside a with_server block, saving the model instance will save it back to the shard from which it was retrieved. * Setting a default for a column on Microsoft SQL Server now works correctly if the column already has a default. * Sequel::SQL::NumericMethods#coerce no longer raises NoMethodError if the super method is not defined. This fixes some cases when comparing Date/DateTime instances to Sequel objects. * The csv_serializer plugin now avoids keyword argument separation issues on Ruby 2.7+. sequel-5.63.0/doc/release_notes/5.26.0.txt000066400000000000000000000027071434214120600177740ustar00rootroot00000000000000= New Features * Support for SQL/JSON path expressions has been added to the pg_json_ops extension. These are supported in PostgreSQL 12+. Examples: j = Sequel.pg_json_op(:json_column) j.path_exists('$.foo') # (jsonb_column @? '$.foo') j.path_match('$.foo') # (jsonb_column @@ '$.foo') j.path_exists!('$.foo') # jsonb_path_exists(jsonb_column, '$.foo') j.path_match!('$.foo') # jsonb_path_match(jsonb_column, '$.foo') j.path_query('$.foo') # jsonb_path_query(jsonb_column, '$.foo') j.path_query_array('$.foo') # jsonb_path_query_array(jsonb_column, '$.foo') j.path_query_first('$.foo') # jsonb_path_query_first(jsonb_column, '$.foo') * The nested_attributes method in the nested_attributes plugin now supports a :require_modification option, which can override the default require_modification setting for the nested objects. This can be useful to avoid errors if multiple requests are submitted simultaneously to delete the same nested row. = Other Improvements * The dirty plugin now works correctly with the typecast_on_load plugin. * Sequel::Postgres::PGRange#hash has been added to the pg_range extension, allowing PGRange instances to be usable as hash keys. * Table aliases are now supported for single table INSERT statements on PostgreSQL 9.5+, which can make some insert_conflict usage easier. * Two more foreign key constraint violation types are now recognized on MySQL 8.0.13+. sequel-5.63.0/doc/release_notes/5.27.0.txt000066400000000000000000000013261434214120600177710ustar00rootroot00000000000000= New Features * Sequel::DEFAULT has been added a constant for the DEFAULT expression, useful in inserts and especially updates: DB[:a].where(:id=>1).update(:b=>Sequel::DEFAULT) # UPDATE "a" SET "b" = DEFAULT WHERE "id" = 1 * SQL::Function#filter for filtered aggregate functions is now supported on all databases. On databases not supporting it natively (all except PostgreSQL 9.4+ and SQLite 3.30+), a CASE statement is used to emulate the support. = Other Improvements * NULLS FIRST/LAST is now used without emulation on SQLite 3.30+. * The pg_enum extension now works correctly on PostgreSQL 8.3-9.0. * Postgres::ArrayOp#join in the pg_array_ops extension now works correctly on PostgreSQL <9.1. sequel-5.63.0/doc/release_notes/5.28.0.txt000066400000000000000000000011171434214120600177700ustar00rootroot00000000000000= New Features * An any_not_empty extension has been added, for making Dataset#any? without a block be the same as !empty?. This can result in a much faster database query. * An exclude_or_null extension has been added, adding a Dataset#exclude_or_null method that returns rows where the given expression is false or NULL. This extension is supported on PostgreSQL, SQLite, MySQL, H2, and HSQLDB. = Other Improvements * When using the jdbc/postgresql adapter, calling with_fetch_size on a dataset will emit a warning. This is because the driver will ignore the setting. sequel-5.63.0/doc/release_notes/5.29.0.txt000066400000000000000000000015701434214120600177740ustar00rootroot00000000000000= New Features * An empty_failure_backtraces plugin has been added for using empty backtraces for ValidationFailed and HookFailed exceptions. In many cases, these exceptions are automatically handled (e.g. web form submission handling to display appropriate error pages), and using empty backtraces is 10-15x faster on JRuby 9.2.7.0+. * Dataset#json_serializer_opts has been added to the json_serializer plugin. This allows setting default options on a per-Dataset basis for all Dataset#to_json calls. = Other Improvements * Another disconnect error is now recognized in the tinytds adapter. * Using Sequel with the the CRuby master branch (what will be Ruby 3) now works by supporting a second argument for Dataset#initialize_clone. * Sequel now avoids a warning in verbose mode when using the postgres adapter, a recent version of ruby-pg, and bound variables. sequel-5.63.0/doc/release_notes/5.3.0.txt000066400000000000000000000106351434214120600177060ustar00rootroot00000000000000= New Features * An :extensions Database option is now supported, which will load the named extensions into the Database before any connections are initiated: DB = Sequel.connect('mock:///', :extensions=>[:error_sql, :synchronize_sql]) DB = Sequel.connect('mock:///?extensions=error_sql,synchronize_sql') * A :connect_sqls Database option is now supported, which will issue the given queries on all new connections: DB = Sequel.connect('postgres:///', :connect_sqls=>[ 'SET random_page_cost = 1.0', "SET default_tablespace = 'foo'" ]) * DatasetModule#reverse has been added for simpler use of descending orders: class Foo < Sequel::Model dataset_module do reverse :newest_first, :created_at end end Foo.newest_first.first(10) * A synchronize_sql extension has been added. This extension checks out a connection around SQL string creation, and is useful in the cases where escaping values in the query requires a connection and a large number of values need to be escaped. * The following features are now supported on MariaDB 10.2+: * Common table expressions. * Window functions. * Dropping CHECK constraints. Older versions of MariaDB/MySQL ignored CHECK constraints that were added, and Sequel did not attempt to filter them out, so Sequel did not require changes to add CHECK constraints. MariaDB 10.2 CHECK constraints work correctly with Sequel's constraint_validations extension/plugin. * Raising CHECK constraint violations as Sequel::CheckConstraintViolation instances. * Recognizing curdate() as Sequel::CURRENT_DATE when used as the default value for a date column. * Date::Infinity values are now supported in the pg_extended_date_support extension: DB.convert_infinite_timestamps = :date This returns infinite dates/timestamps as Date::Infinity instances, and literalizes Date::Infinity instances correctly. = Improvements * Database#reset_primary_key_sequence now works correctly on PostgreSQL 10. * If a commit or rollback raises an exception when using the postgres adapter, Sequel will check the connection's current transaction status and only send another rollback if the connection is currently inside a transaction. This fixes a warning that is issued in most cases if a commit or rollback fails. * The jdbc/postgresql adapter now forces JDBC PreparedStatement instances created by Dataset#call to never be prepared server side, working around an caching issue in the jdbc-postgres drier in versions greater than 9.4.1200. * Database#indexes will no longer return indexes which are in the process of being dropped on PostgreSQL 9.3+. Additionally, Database#indexes will now return indexes that have indcheckxmin set. The previous removal of indexes with indcheckxmin set is more likely to cause false negatives than correctly remove indexes not yet valid. * Common table expressions are no longer hoisted from subqueries on SQLite. They are still hoisted from queries used in UNION/INSERT/EXCEPT, since SQLite does not support common table expressions at that level. * On Microsoft SQL Server, using an INSERT query with a subquery that uses a common table expression now hoists the common table expression from subquery level to main query level, allowing such queries to work. * An additional disconnect error is now recognized in the oracle adapter. * bin/sequel now adds a Database logger before the initial connection is made, allowing you to see any connection setup statements issued to the database. = Backwards Compatibility * Calling a filtering method with no argument and a virtual row block that returns nil on a dataset with no existing filter is deprecated in this version and will emit a warning. The behavior in this version remains the same, where the dataset is not modified. The behavior will change in Sequel 5.4.0 so that a WHERE NULL filter will be added in that case, instead of the filter being ignored, so that the behavior is similar to calling the filtering method with a nil argument. # Sequel 5.3.0 DB[:a].where{nil} # SELECT * FROM a # Sequel 5.4.0 DB[:a].where{nil} # SELECT * FROM a WHERE NULL * Support for PostgreSQL <8.1 has been dropped from Database#indexes. Sequel's PostgreSQL support requires >=8.2 for Dataset#insert to work, so it doesn't make sense to support earlier versions in other cases. sequel-5.63.0/doc/release_notes/5.30.0.txt000066400000000000000000000011421434214120600177570ustar00rootroot00000000000000= New Features * Sequel now supports generated columns on SQLite 3.31+ using the :generated_always_as and :generated_type options. Example: DB.create_table(:table) do primary_key :id Numeric :amount, null: false Numeric :tax, null: false Numeric :total, generated_always_as: (Sequel[:amount] + :tax) end = Other Improvements * The Database#transaction :before_retry option is now called before retrying the transaction even when the :num_retries option is set to nil. * The gem no longer ships with specs and older release notes, reducing the gem size by over 40%. sequel-5.63.0/doc/release_notes/5.31.0.txt000066400000000000000000000116661434214120600177740ustar00rootroot00000000000000= New Features * A forbid_lazy_load plugin has been added to forbid the lazy loading of model associations if the current object was retreived with other objects. This plugin helps detect N+1 query issues. This plugin will raise an error if a lazy load is detected in such cases: Album.plugin :forbid_lazy_load Album.one_to_many :tracks Album.each do |album| album.tracks # Could be N+1, raises Sequel::Plugins::ForbidLazyLoad::Error end Album.first.tracks # Could not be N+1, no error raised The forbid_lazy_load plugin is designed to be loaded into the base model class (generally Sequel::Model), and can be loaded only in test mode, or only in certain test mode configurations, so that it does not have any production performance impact. Note that an alternative approach that Sequel has supported for many years is the tactical_eager_loading plugin, which automatically eager loads when an N+1 query issue is detected. * An association_lazy_eager_option plugin has been added which supports the :eager option for the association method. If the association has not been loaded, this eagerly loads the associations specified by the :eager option when loading the association. If the association has already been loaded, this option is ignored, with the assumption that whatever loaded the association already used the correct eager loading. Example: Album.plugin :association_lazy_eager_option Album.one_to_many :tracks Track.many_to_one :artist album = Album.first album.tracks(:eager=>:artist) # Loads tracks for album, then artist for each track (2 queries) album.tracks(:eager=>:artist) # No query issued as association is cached You could previously have similar behavior for uncached associations by passing a block to the association method and calling eager on the yielded dataset. However, that would ignore any cached association, causing redundant loading of the association in such cases. * On PostgreSQL 10+, creating partitioned tables and partitions of other tables is now supported. To create a partitioned table, use the :partition_by option: DB.create_table(:table1, partition_by: :date_column, partition_type: :range) do Integer :id Date :date_column end DB.create_table(:table2, partition_by: :string_column, partition_type: :list) do Integer :id String :string_column end DB.create_table(:table3, partition_by: :int_column, partition_type: :hash) do Integer :id Integer :int_column end To add partitions of other tables, use the :partition_of option. This option will use a custom DSL specific to partitions of other tables. For range partitioning, you can use the from and to methods to specify the inclusive beginning and exclusive ending of the range of the partition. You can call the minvalue and maxvalue methods to get the minimum and maximum values for the column(s) in the range, useful as arguments to from and to: DB.create_table(:table1a, partition_of: :table1) do from minvalue to 0 end DB.create_table(:table1b, partition_of: :table1) do from 0 to 100 end DB.create_table(:table1c, partition_of: :table1) do from 100 to maxvalue end For list partitioning, you use the values_in method. You can also use the default method to mark a partition as the default partition: DB.create_table(:table2a, partition_of: :table2) do values_in 1, 2, 3 end DB.create_table(:table2b, partition_of: :table2) do values_in 4, 5, 6 end DB.create_table(:table2c, partition_of: :table2) do default end For hash partitioning, you use the modulus and remainder methods: DB.create_table(:table3a, partition_of: :table3) do modulus 3 remainder 0 end DB.create_table(:table3b, partition_of: :table3) do modulus 3 remainder 1 end DB.create_table(:table3c, partition_of: :table3) do modulus 3 remainder 2 end * On PostgreSQL 12+ and SQLite 3.31+, column schema hashes now have a :generated entry for whether the column is a generated column. * The schema_dumper extension now dumps generated columns correctly when using the :same_db option on PostgreSQL 12+. * A skip_saving_columns plugin has been added. This allows skipping saving of specific columns for the model. By default, it skips saving of generated columns, but you can customize the columns that it skips: Album.plugin :skip_saving_columns Album.skip_saving_columns = [:some_column] = Other Improvements * The alter_table drop_constraint :primary_key option on SQLite now works correctly for non-integer primary keys. * When an error is raised due to an irreversible migration, the error message now includes the file containing the migration for easier debugging. sequel-5.63.0/doc/release_notes/5.32.0.txt000066400000000000000000000037561434214120600177760ustar00rootroot00000000000000= New Features * A fiber_concurrency extension has been added, for using Fiber.current instead of Thread.current when checking out a connection. This allows separate fibers of the same thread to use separate connections. In addition to allowing direct use of fibers, this also allows concurrent use of multiple enumerators that use database connections in the same thread. When using this extension, you must be careful and ensure that you are not using more concurrent fibers than your connection pool size. Otherwise, all fibers will block while one fiber waits until a connection is available. It is possible this issue will be addressed when Ruby implements a fiber scheduler (currently being discussed for inclusion in Ruby 3). * A run_transaction_hooks Database extension has been added, allowing for running the transaction hooks before commit/rollback, which can be helpful for testing the hooks when using transactional testing. = Other Improvements * Database#create_table? now works correctly with the :partition_of option on PostgreSQL. * The timestamp(N) with time zone type is now recognized by the schema parser. * Singleton methods of the Sequel module have now been moved into a Sequel::SequelMethods module. This allows you to extend Sequel with a module that overrides the methods and call super to get the default behavior. * The pg_inet extension no longer defines inet/cidr conversion procs if sequel_pg 1.13.0+ is in use. This is because sequel_pg 1.13.0+ will respect the conversion procs and defining them makes things slower. sequel_pg 1.13.0+ handles the same conversion by default without needing a conversion proc. * Method visibility issues in the model, plugin, extension, and adapter code have been fixed. Most cases fixed were private methods being accidentally made public when they were overridden. During this change, Model#_insert_values was changed from public to private, since it was originally intended to be private. sequel-5.63.0/doc/release_notes/5.33.0.txt000066400000000000000000000015571434214120600177740ustar00rootroot00000000000000= New Features * Custom join types are now supported on a per-association basis when using eager_graph/association_join. This builds on the previous support for custom aliases, using Sequel::SQL::AliasedExpression: class Artist < Sequel::Model; end class Album < Sequel::Model; end class Track < Sequel::Model; end Artist.one_to_many :albums Album.one_to_many :tracks Artist.eager_graph( Sequel[:albums].as(:a, join_type: :inner) => Sequel[:tracks].as(:t, join_type: :left) ) * A Database#current_timestamp_utc accessor has been added on SQLite. Setting this to true will keep CURRENT_TIMESTAMP, CURRENT_TIME, and CURRENT_DATE in UTC instead of converting them to localtime. = Other Improvements * The smallserial PostgreSQL type is now recognized and Sequel will not try to mark smallserial columns as identity columns. sequel-5.63.0/doc/release_notes/5.34.0.txt000066400000000000000000000030301434214120600177610ustar00rootroot00000000000000= New Features * The association_pks plugin now creates *_pks_dataset methods for each association. These are similar to the existing *_pks getter methods, but they return a dataset of the keys instead of the keys themselves. * The association_pks plugin now supports a :cache_pks association option, which will cache calls to the *_pks getter method. The default behavior remains that the *_pks getter method only returns cached values if the *_pks= setter method has been used to set the values. * The *_pks getter methods supported by the association_pks plugin now support a :refresh option to ignore any cached values, similar to how the association getter methods work. = Other Improvements * If trying to disconnect a server that doesn't exist when using a sharded connection pool, a Sequel::Error is now raised. Previously, the sharded threaded pool raised a NoMethodError and the sharded single connection pool did not raise an error. * If using the :savepoint option when savepoints are not supported, a Sequel::InvalidOperation exception is now raised, instead of a NoMethodError. * Calling Dataset#eager_graph with no arguments now returns the dataset. * If not connected to the database, the single connection pool will not yield any connections to Database#pool.all_connections. * Forcing a :ruby eager limit strategy for an association without a limit or offset now works correctly. * Multiple unnecessary conditionals have been removed. * Sequel core and model code now have 100% branch coverage. sequel-5.63.0/doc/release_notes/5.35.0.txt000066400000000000000000000041011434214120600177620ustar00rootroot00000000000000= New Features * An instance_specific_default plugin has been added for setting the default for the :instance_specific association option, or warning/raises in cases where it is not specified. This allows you to easily find associations that would be considering instance specific by default, and mark them as not instance specific for better performance. = Other Improvements * Setting the :instance_specific association option to false now works correctly if the association uses a block. Associations that set the :dataset option are now always considered instance specific, even if the :instance_specific option is explicitly passed. * The validation_class_methods plugin now considers all :if, :allow_missing, :allow_nil, and :allow_blank options. Previously, it only considered the first of those options that was set. * Model.finalize_associations no longer breaks if you have instance-specific associations. * Model.plugin now warns if you load the plugin with arguments or a block if the plugin does not accept arguments or block. This is because a future change to Sequel could break the call. * When emulating unsupported alter table operations on SQLite, Sequel now copies composite unique constraints unless the alter table operation is the dropping of a unique constraint. * Sequel now recognizes an additional disconnect error in the oracle adapter. * In the run_transaction_hooks extension, calling run_after_{commit,rollback}_hooks now raises the correct exception class. * In the pg_range extension, conversion procs for the tsrange[] and tstzrange[] types are not added unless the Database uses the pg_array extension. * Multiple unnecessary conditionals in plugins and extensions have been removed. * Sequel plugin and extension code now have 100% branch coverage. * Sequel now avoids a statement not reached verbose warning in Dataset#clone. = Backwards Compatibility * The output of Dataset#to_dot in the to_dot extension has changed slightly, including hash entries with nil keys. These entries were previously ignored. sequel-5.63.0/doc/release_notes/5.36.0.txt000066400000000000000000000037641434214120600200010ustar00rootroot00000000000000= New Features * Dataset#with_ties has been added on PostgreSQL 13+ and Microsoft SQL Server, which will have a limited dataset also return all rows with the same order as the final row. * In the pg_json_ops extension, the following methods have been added to Postgres::JSONBOp, all of which require PostgreSQL 13+: * #set_lax * #path_exists_tz! * #path_match_tz! * #path_query_tz * #path_query_array_tz * #path_query_first_tz * On Oracle, the Database#view_exists? method now accepts a :current_schema option to limit the views returned to the current schema, instead of all non-system schemas. = Other Improvements * Sequel will now pass keyword arguments through in the following cases: * When loading plugins (Model.plugin) * Class methods automically defined for methods defined in a Model.dataset_module block * Methods defined by Plugins.def_dataset_method * Database methods called inside migrations * Methods called via an association proxy when using the association_proxies plugin. * Dataset methods called inside a Dataset#query block when using the query extension. Previously, keywords were not handled in these cases, which would cause deprecation warnings in Ruby 2.7 and ArgumentErrors in Ruby 3.0. Note that Sequel itself does not use keyword arguments at all, so all of these changes only affect cases where external methods are defined that accept keywords, and Sequel methods are called with keywords that end up being delegated to the external methods. * The odbc adapter will now stream result sets instead of loading the entire result set in memory and then iterating over it. * Sequel now recognizes another disconnect error in the mysql and mysql2 adapters. = Backwards Compatibility * Due to the odbc adapter change to use streaming, issuing queries inside a Dataset#each block will no longer work unless a different shard or thread is used. The behavior of such code is considered undefined on all Sequel adapters. sequel-5.63.0/doc/release_notes/5.37.0.txt000066400000000000000000000023241434214120600177710ustar00rootroot00000000000000= New Features * Model#column_previously_was and #column_previously_changed? have been added to the dirty plugin, for getting the previous values of the column before saving and for whether there were changes before saving. Model#column_previously_changed? accepts :from and :to options to allow you to more easily determine if the value changed from and/or to specific values. This information was previously obtainable via Model#previous_changes, but these new methods offer a friendlier interface. * Postgres::PGRow::{Array,Hash}Row#op has been added to the pg_row_ops extension if the pg_row extension is loaded. This is similar to how the pg_array_ops, pg_hstore_ops, and pg_json_ops and #op method to their objects. This makes it easier to perform row operations on literal rows. = Other Improvements * The schema_dumper extension now supports more unsigned numeric types, such as "decimal(7,2) unsigned" and "real unsigned". * IntegerMigrator now raises an Migrator::Error if attempting to migrate down when there are migration files missing and needed for the down migration. Previously, IntegerMigrator would not raise an exception and would make no database changes in this case. sequel-5.63.0/doc/release_notes/5.38.0.txt000066400000000000000000000021001434214120600177620ustar00rootroot00000000000000= New Features * The jdbc/mysql adapter now supports the newer com.mysql.cj.jdbc.Driver driver. The adapter will still attempt to load the older com.mysql.jdbc.Driver if the com.mysql.cj.jdbc.Driver is not found. = Other Improvements * When testing a connection after creating a new Database instance raises an exception, the Database instance is removed from Sequel::DATABASES. * The single_table_inheritance and prepared_statements plugins now work correctly if loaded into the same class. * Database connect and disconnect errors are no longer swallowed when calling Database#create_or_replace_view, Database#server_version on PostgreSQL, or Database#create_table* on Oracle. = Backwards Compatibility * Previously, instantiating a new Database instance directly using Sequel::Database.new did not test the connection by default. That was instead handled by Sequel::Database.connect. The test connection now happens inside Database#initialize. This should only affect backwards compatibility for code that is calling Sequel::Database.new directly. sequel-5.63.0/doc/release_notes/5.39.0.txt000066400000000000000000000011761434214120600177770ustar00rootroot00000000000000= New Features * On Microsoft SQL Server, the :clustered option is now supported for primary key and unique constraints. You can use a true value for CLUSTERED and a false value for NONCLUSTERED. = Other Improvements * Partitioned tables are now included in the result of Database#tables on PostgreSQL. * alter_table set_column_allow_null no longer drops the size of binary columns on Microsoft SQL Server. * In the tree plugin, the roots_dataset method now works correctly with queries using joins by qualifying the parent column. * A fork safety guide has been added, discussing fork safety issues when using Sequel. sequel-5.63.0/doc/release_notes/5.4.0.txt000066400000000000000000000063421434214120600177070ustar00rootroot00000000000000= New Features * An index_caching extension has been added, which makes Database#indexes use a cache similar to Database#schema, and also offers methods for saving and loading the cache from a file, similar to the schema_caching extension. This can speed up model loaded in certain cases when the auto_validations plugin is used. * A datetime_parse_to_time extension has been added, which parses strings without timezone offsets using DateTime.parse intead of Time.parse. This can fix problems when the string being parsed represents a time not valid in the local timezone due to daylight savings time shifts. Time.parse silently shifts such times by 1 hour instead of raising an exception, resulting in incorrect behavior in that case. It only makes sense to use this extension when the times in the database are stored in UTC but not returned with timezone information, the timezone for the Database instance (or Sequel.database_timezone) is set to :utc (not the default), and Time is used as the datetime_class (the default). * A pg_timestamptz extension has been added for switching the default generic timestamp type from timestamp to timestamptz. * Sequel.date_{add,sub} in the date_arithmetic extension now supports a :cast option for setting the cast type. This value defaults to Time for backwards compatibility, which uses the default generic timestamp type for the database. * The class_table_inheritance plugin now supports an :ignore_subclass_columns option which takes an array of column symbols to ignore in subclasses. This allows you to use the plugin when your table inheritance hierarchy includes non-primary key columns with the same name in different tables. = Improvements * Dataset#insert_select now returns false instead of nil if it runs an INSERT statement but does not return a value on Microsoft SQL Server or PostgreSQL. This can happen on both databases if triggers are used. Model#save now checks for a false value returned by Dataset#insert_select, and does not issue another INSERT statement in that case. * Database#indexes now correctly handles SQL::Identifier arguments on SQLite, Microsoft SQL Server, SQLAnywhere, and DB2. * Dataset#to_json in the json_serializer plugin and Dataset#to_xml in the xml_serializer plugin now both handle datasets that use eager_graph. * Dataset#nullify now caches the dataset it returns, for better performance if it is called more than once on the same dataset. * Database#synchronize is now optimized on ruby 2.5+ and is about 10% faster by relying on the new lazy proc allocation feature. = Backwards Compatibility * Fractional second timestamps are now enabled on DB2. If you are connecting to a DB2 database that does not support fractional seconds, you should add the following code (where DB is your Sequel::Database instance): DB.extend_datasets do def supports_timestamp_usecs? false end end * Calling a filtering method with no argument and a virtual row block that returns nil on a dataset with no existing filter now adds a WHERE NULL filter, to match the behavior if given a nil argument. Previously, a deprecation warning was issued and a dataset with no filter was returned. sequel-5.63.0/doc/release_notes/5.40.0.txt000066400000000000000000000027401434214120600177650ustar00rootroot00000000000000= New Features * On SQLite 3.33.0+, the UPDATE FROM syntax is now supported. This allows you to update one table based on a join to another table. The SQLite syntax is based on the PostgreSQL syntax, and the Sequel API is the same for both. You need to pass multiple tables to Dataset#from. The first table is the table to update, and the remaining tables are used to construct the UPDATE FROM clause: DB[:a, :b].where{{a[:c]=>b[:d]}}.update(:e=>'f') # UPDATE a SET e = 'f' FROM b WHERE (a.c = b.d) Unlike PostgreSQL, SQLite does not support the deletion of joined datasets. Related to this, the following methods for testing database support for modifying joined datasets have been added: * supports_updating_joins? * supports_deleting_joins? = Other Improvements * The pg_interval and date_arithmetic extensions now support ActiveSupport 6.1. * Sequel no longer issues method redefinition warnings in verbose mode. As Ruby 3 has dropped uninitialized instance variable warnings, Sequel is now verbose warning free on Ruby 3. = Backwards Compatibility * Trying to truncate or insert into a joined dataset now correctly raises an exception even if the joined dataset supports updates. * The private Dataset#check_modification_allowed! method is now deprecated, and users (custom adapters) should now switch to one of the more specific methods introduced in this version: * check_insert_allowed! * check_update_allowed! * check_delete_allowed! sequel-5.63.0/doc/release_notes/5.41.0.txt000066400000000000000000000020041434214120600177570ustar00rootroot00000000000000= New Features * The validation methods added by the validation_helpers plugin now support the :skip_invalid option, which will not add a validation error on a column if it already has a validation error. This can be useful if you want to avoid having duplicate errors. * The auto_validations plugin now supports a :skip_invalid plugin option, which will pass the :skip_invalid option when calling validation methods. = Other Improvements * The :adder, :remover, and :clearer association options now support keyword arguments in Ruby 2.7+. * In the pg_interval extension, Sequel now uses the same number of seconds per month and seconds per year as active_support. It originally used the same number, but active_support changed the values in active_support 5.1. Sequel now uses the active_support values if they are available. * When adding a String column on PostgreSQL, an explicit text: true option now takes precedence over an explicit :size option, as it does in Sequel's default behavior. sequel-5.63.0/doc/release_notes/5.42.0.txt000066400000000000000000000131161434214120600177660ustar00rootroot00000000000000= New Features * An async_thread_pool Database extension has been added, which executes queries and processes results using a separate thread pool. This allows you do do things like: foos = DB[:foos].async.all bars = DB[:bars].async.select_map(:name) foo_bars = DB[:foo_bars].async.each{|x| p x} and have the three method calls (all, select_map, and each) execute concurrently. On Ruby implementations without a global VM lock, such as JRuby, it will allow for parallel execution of the method calls. On CRuby, the main benefit will be for cases where query execution takes a long time or there is significant latency between the application and the database. When you call a method on foos, bars, or foo_bars, if the thread pool hasn't finished processing the method, the calling code will block until the method call has finished. By default, for consistency, calling code will not preempt the async thread pool. For example, if you do: DB[:foos].async.all.size The calling code will always wait for the async thread pool to run the all method, and then the calling code will call size on the result. This ensures that async queries will not use the same connection as the the calling thread, even if calling thread has a connection checked out. In some cases, such as when the async thread pool is very busy, preemption is desired for performance reasons. If you set the :preempt_async_thread Database option before loading the async_thread_pool extension, preemption will be allowed. With preemption allowed, if the async thread pool has not started the processing of the method at the time the calling code needs the results of the method, the calling code will preempt the async thread pool, and run the method on the current thread. By default, the async thread pool uses the same number of threads as the Database objects :max_connections attribute (the default for that is 4). You can modify the number of async threads by setting the :num_async_threads Database option before loading the Database async_thread_pool extension. Most Dataset methods that execute queries on the database and return results will operate asynchronously if the the dataset is set to be asynchronous via the Dataset#async method. This includes most methods available due to the inclusion in Enumerable, even if not defined by Dataset itself. There are multiple caveats when using the async_thread_pool extension: * Asynchronous behavior is harder to understand and harder to debug. It would be wise to only use this support in cases where it provides is significant performance benefit. * Dataset methods executed asynchronously will use a separate database connection than the calling thread, so they will not respect transactions in the calling thread, or other cases where the calling thread checks out a connection directly using Database#synchronize. They will also not respect the use of Database#with_server (from the server_block extension) in the calling thread. * Dataset methods executed asynchronously should never ignore their return value. Code such as: DB[:table].async.insert(1) is probablematic because without storing the return value, you have no way to block until the insert has been completed. * The returned object for Dataset methods executed asynchronously is a proxy object (promise). So you should never do: row = DB[:table].async.first # ... if row end # or: bool = DB[:table].async.get(:boolean_column) # ... if bool end because the if branches will always be taken as row and bool will never be nil or false. If you want to get the underlying value, call itself on the proxy object (or __value if using Ruby <2.2). For the same reason, you should not use the proxy objects directly in case expressions or as arguments to Class#===. Use itself or __value in those cases. * Dataset methods executed asynchronously that include blocks have the block executed asynchronously as well, assuming that the method calls the block. Because these blocks are executed in a separate thread, you cannot use control flow modifiers such as break or return in them. * An async_thread_pool model plugin has been added. This requires the async_thread_pool extension has been loaded into the model's Database object, and allows you to call Model.async instead of Model.dataset.async. It also adds async support to the destroy, with_pk, and with_pk! model dataset methods. * Model#to_json_data has been added to the json_serializer plugin, for returning a hash of data that can be converted to JSON, instead of a JSON string. * A :reject_nil option has been added to the nested_attributes method in the nested_attributes plugin. This will ignore calls to the nested attributes setter method where nil is passed as the setter method argument. = Other Improvements * Model#freeze now works in case where model validation modifies the object beyond adding errors. * Model#freeze in the composition, serialization, and serialization_modification_detection plugins now works in cases where validation would end up loading the composed or serialized values. * Database#extension now avoids a possible thread safety issue that could result in the extension being loaded into the Database twice. * The ado adapter now supports overriding the timestamp conversion proc. Previously, unlike other conversion procs, the timestamp conversion proc was hard coded and could not be overridden. sequel-5.63.0/doc/release_notes/5.43.0.txt000066400000000000000000000073571434214120600200010ustar00rootroot00000000000000= New Features * A column_encryption plugin has been added to support encrypting the content of individual columns in a table. Column values are encrypted with AES-256-GCM using a per-value cipher key derived from a key provided in the configuration using HMAC-SHA256. If you would like to support encryption of columns in more than one model, you should probably load the plugin into the parent class of your models and specify the keys: Sequel::Model.plugin :column_encryption do |enc| enc.key 0, ENV["SEQUEL_COLUMN_ENCRYPTION_KEY"] end This specifies a single master encryption key. Unless you are actively rotating keys, it is best to use a single master key. In the above call, 0 is the id of the key, and ENV["SEQUEL_COLUMN_ENCRYPTION_KEY"] is the content of the key, which must be a string with exactly 32 bytes. As indicated, this key should not be hardcoded or otherwise committed to the source control repository. For models that need encrypted columns, you load the plugin again, but specify the columns to encrypt: ConfidentialModel.plugin :column_encryption do |enc| enc.column :encrypted_column_name enc.column :searchable_column_name, searchable: true enc.column :ci_searchable_column_name, searchable: :case_insensitive end With this, all three specified columns (encrypted_column_name, searchable_column_name, and ci_searchable_column_name) will be marked as encrypted columns. When you run the following code: ConfidentialModel.create( encrypted_column_name: 'These', searchable_column_name: 'will be', ci_searchable_column_name: 'Encrypted' ) It will save encrypted versions to the database. encrypted_column_name will not be searchable, searchable_column_name will be searchable with an exact match, and ci_searchable_column_name will be searchable with a case insensitive match. To search searchable encrypted columns, use with_encrypted_value. This example code will return the model instance created in the code example in the previous section: ConfidentialModel. with_encrypted_value(:searchable_column_name, "will be") with_encrypted_value(:ci_searchable_column_name, "encrypted"). first To rotate encryption keys, add a new key above the existing key, with a new key ID: Sequel::Model.plugin :column_encryption do |enc| enc.key 1, ENV["SEQUEL_COLUMN_ENCRYPTION_KEY"] enc.key 0, ENV["SEQUEL_OLD_COLUMN_ENCRYPTION_KEY"] end Newly encrypted data will then use the new key. Records encrypted with the older key will still be decrypted correctly. To force reencryption for existing records that are using the older key, you can use the needing_reencryption dataset method and the reencrypt instance method. For a small number of records, you can probably do: ConfidentialModel.needing_reencryption.all(&:reencrypt) With more than a small number of records, you'll want to do this in batches. It's possible you could use an approach such as: ds = ConfidentialModel.needing_reencryption.limit(100) true until ds.all(&:reencrypt).empty? After all values have been reencrypted for all models, and no models use the older encryption key, you can remove it from the configuration: Sequel::Model.plugin :column_encryption do |enc| enc.key 1, ENV["SEQUEL_COLUMN_ENCRYPTION_KEY"] end The column_encryption plugin supports encrypting serialized data, as well as enforcing uniquenss of searchable encrypted columns (in the absence of key rotation). By design, it does not support compression, mixing encrypted and unencrypted data in the same column, or support arbitrary encryption ciphers. See the plugin documentation for more details. sequel-5.63.0/doc/release_notes/5.44.0.txt000066400000000000000000000026431434214120600177730ustar00rootroot00000000000000= New Features * A concurrent_eager_loading plugin has been added. This plugin builds on top of the async_thread_pool Database extension and allows eager loading multiple associations concurrently in separate threads. With this plugin, you can mark datasets for concurrent eager loading using eager_load_concurrently: Album.eager_load_concurrently.eager(:artist, :genre, :tracks).all Datasets that are marked for concurrent eager loading will use concurrent eager loading if they are eager loading more than one association. If you would like to make concurrent eager loading the default, you can load the plugin with the :always option. All of the association types that ship with Sequel now support concurrent eager loading when using this plugin. For custom eager loaders using the :eager_loader association option, please see the documentation for the plugin for how to enable custom eager loading for them. = Other Improvements * The date_arithmetic extension now handles ActiveSupport::Duration values with weeks, as well as :weeks as a key in a hash value. Weeks are converted into 7 days internally. * The shared SQLite adapter now emulates the dropping of non-composite unique constraints. Non-composite unique constraints are now treated similarly to composite unique constraints, in that dropping any unique constraints on a table will drop all unique constraints on that table. sequel-5.63.0/doc/release_notes/5.45.0.txt000066400000000000000000000023351434214120600177720ustar00rootroot00000000000000= New Features * A auto_validations_constraint_validations_presence_message plugin has been added that provides integration for the auto_validations and constraint_validations plugin in the following conditions: * The column has a NOT NULL constraint * The column has a presence constraint validation with both the :message and :allow_nil options used. In this case, when saving a nil value in the column, the plugin will make it so the more specific message from the presence constraint validation is used, instead of the generic message from auto_validations. = Other Improvements * On SQLite 3.35.0+, Sequel now uses ALTER TABLE DROP COLUMN for dropping columns, instead of emulating the dropped column by recreating the table. * The Dataset#with :materialized option is now supported on SQLite 3.35.0+ for specifying whether common table expressions should be materialized. * The odbc adapter now correct handles boolean columns with NULL values. Previously, such values were returned as false instead of nil. = Backwards Compatibility * The change to use ALTER TABLE DROP COLUMN on SQLite 3.35.0+ can cause backwards compatibility issues if SQLite 3.35.0+ does not allow dropping the column. sequel-5.63.0/doc/release_notes/5.46.0.txt000066400000000000000000000104141434214120600177700ustar00rootroot00000000000000= New Features * An unused_associations plugin has been added, which allows you to determine which associations and association methods are not used. You can use this to avoid defining the unused associations and association methods, which can save memory. This plugin is supported on Ruby 2.5+, and uses method coverage to determine if the plugin's methods are called. Because Sequel::Model adds association methods to an anonymous module included in the class, directly using the method coverage data to determine which associations are used is challenging. This plugin is mostly designed for reporting. You can have a test suite that runs with method coverage enabled, and use the coverage information to get data on unused associations: # Calls Coverage.result cov_data = Sequel::Model.update_associations_coverage unused_associations_data = Sequel::Model.update_unused_associations_data(coverage_data: cov_data) Sequel::Model.unused_associations(unused_associations_data: unused_associations_data) # => [["Class1", "assoc1"], ...] unused_associations returns an array of two element arrays, where the first element is the class name and the second element is the association name. The returned values will be associations where all of the association methods are not used. In addition to determining which associations are not used, you can also use this to determine if you are defining association methods that are not used: Sequel::Model.unused_association_options(unused_associations_data: unused_associations_data) # => [["Class2", "assoc2", {:read_only=>true}], ...] unused_association_options is similar to unused_associations, but returns an array of three element arrays, where the third element is a hash of association options that should be used to avoid defining the unused association methods. It's common in Sequel to define associations and only use them for reading data and not for modifications, and you can use this to easily see which associations are only used for reading data. As the determination of whether associations are used is based on method coverage, this will report as unused any associations that are used but where the association methods are not called. These cases are rare, but can happen if you have libraries that use the association reflection metadata without calling the association methods, or use the association only in combination with another plugin such as dataset_associations. You can set the :is_used association option to explicitly mark an association as used, and have this plugin avoid reporting it as unused. In addition to just reporting on unused associations, you can also directly use the unused associations metadata to automatically avoid defining unused associations or unused associations methods. You can set a :file option when loading the plugin: Sequel::Model.plugin :unused_associations, file: 'unused_associations.json' Then run the method coverage testing. This will save the unused associations metadata to the file. Then you can use this metadata automatically by also setting the :modify_associations option: Sequel::Model.plugin :unused_associations, file: 'unused_associations.json', modify_associations: true With the :modify_associations option, unused associations are skipped instead of being defined, and the options returned by unused_association_options are automatically used. Note that using the :modify_associations option is risky unless you have complete coverage and do not have cases where the associations are used without calling methods. It is common to have multiple test suites where you need to combine coverage. The plugin supports this by using a :coverage_file option: Sequel::Model.plugin :unused_associations, coverage_file: 'unused_associations_coverage.json' In this case, you would run update_associations_coverage after each test suite, and update_unused_associations_data only after all test suites have been run. * Passing nil as the value of the :setter, :adder, :remover, or :clearer association options will cause the related method to not be defined, instead of using the default value. This allows you to only define the methods you will actually be using. sequel-5.63.0/doc/release_notes/5.47.0.txt000066400000000000000000000047361434214120600200030ustar00rootroot00000000000000= New Features * Sequel now supports using separate queries for each table for both lazy and eager loading of the following associations: * many_to_many * one_through_one * many_through_many # many_through_many plugin * one_through_many # many_through_many plugin For many_to_many/one_through_one, you specify the :join_table_db association option, which should be a Sequel::Database instance containing the join table. It is possible for the current table, join table, and associated table all to be in separate databases: JOIN_TABLE_DB = Sequel.connect('...') Album.many_to_many :artists, join_table_db: JOIN_TABLE_DB For many_through_many/one_through_many, you can use the :db option in each join table specification. All join tables can be in separate databases: JTDB1 = Sequel.connect('...') JTDB2 = Sequel.connect('...') # Tracks on all albums this artist appears on Artist.many_through_many :album_tracks, [ {table: :albums_artists, left: :artist_id, right: :album_id, db: JTDB1}, {table: :artists, left: :id, right: :id, db: JTDB2} ], class: :Track, right_primary_key: :album_id * The :allow_eager_graph association option has been added. Setting this option to false will disallow eager loading via #eager_graph. This is useful if you can eager load the association via #eager, but not with #eager_graph. * The :allow_filtering_by association option has been added. Setting this option to false will disallow the use of filtering by associations for the association. * Dataset#returning is now supported on SQLite 3.35.0+. To work around bugs in the SQLite implementation, identifiers used in the RETURNING clause are automatically aliased. Additionally, prepared statements that use the RETURNING clause on SQLite seem to have issues, so the prepared_statements plugin does not automatically use prepared statements on SQLite for queries that use the RETURNING clause. * Database#rename_tables has been added on MySQL to support renaming multiple tables in the same query. = Other Improvements * The unused_associations plugin now tracks access to the association reflection for associations, so it will no longer show an association as completely unused if something is accessing the association reflection for it. This eliminates most of the false positives, where the plugin would show an association as unused even though something was using it without calling the association methods. sequel-5.63.0/doc/release_notes/5.48.0.txt000066400000000000000000000010201434214120600177630ustar00rootroot00000000000000= New Features * A Sequel::Database#like_without_collate accessor has been added on Microsoft SQL Server, which avoids using the COLLATE clause for LIKE expressions. This can speed up query performance significantly. * A private Sequel::Model::Errors#full_message method has been added to make it easier to support internationalization for Sequel::Model error messages. = Other Improvements * The association reflection tracking in the unused_associations plugin now works correctly when combining coverage runs. sequel-5.63.0/doc/release_notes/5.49.0.txt000066400000000000000000000047731434214120600200060ustar00rootroot00000000000000= New Features * Model#validates_no_null_byte has been added to the validation_helpers. It checks that the value being validated does not contain an ASCII NUL ('\0') byte. Some databases will return an error if a string contains a NUL byte. The auto_validations plugin will now automatically add no_null_byte validations for all string columns in the model's table. This will change exceptions raised by NUL bytes from database errors to validation failures. If you are using auto_validations and would like to have a table accept NUL bytes in string columns, use the following code inside the model: skip_auto_validations(:no_null_byte) * JSONB subscripts are now supported on PostgreSQL 14+ when using the pg_json_ops extension. You can use JSONB subscripts to more easily update part of a JSONB column: DB[:table].update(Sequel.pg_jsonb_op(:column)['key'] => 'value') UPDATE "table" SET "column"['key'] = 'value' * hstore subscripts are now supported on PostgreSQL 14+ when using the pg_hstore_ops extension. You can use hstore subscripts to more easily update part of an hstore column: DB[:table].update(Sequel.hstore_op(:column)['key'] => 'value') UPDATE "table" SET "column"['key'] = 'value' * Sequel now supports table aliases for JOIN USING columns on PostgreSQL 14+. These allow you to reference the USING columns in the query using a qualified identifier. To use this support, pass an SQL::AliasedExpression as the expression to join on: DB[:t1].join(:t2, Sequel.as([:c1, :c2], :alias)) # SELECT * FROM "t1" INNER JOIN "t2" USING ("c1", "c2") AS "alias" * Database#create_trigger on PostgreSQL now supports a :replace option for CREATE OR REPLACE TRIGGER (supported in PostgreSQL 14+). * SQL::Expression#sequel_ast_transform has been added to support AST transforms of custom expression classes. = Other Improvements * Sequel now supports calling PostgreSQL procedures without arguments when using Database#call_procedure. Previously, attempts to call procuredures without arguments would call the procedure with a single NULL argument. * Sequel now uses defined?(yield) instead of block_given? internally for better performance on CRuby. defined?(yield) is faster as it is built into the VM, while block_given? is a regular method and has the overhead of calling a regular method. Note that defined?(yield) is not implemented correctly on JRuby before 9.0.0.0, so this release of Sequel drops support for JRuby versions before 9.0.0.0. sequel-5.63.0/doc/release_notes/5.5.0.txt000066400000000000000000000044371434214120600177130ustar00rootroot00000000000000= New Features * The defaults_setter plugin now supports a :cache option, which will cache default values in the model object's values hash: Model.plugin :defaults_setter o = Model.new o.column # => 1 # default value o.values # => {} Model.plugin :defaults_setter, cache: true o = Model.new o.column # => 1 # default value o.values # => {:column => 1} * The pg_array extension now sets a :callable_default schema entry for recognized empty array defaults. * The pg_hstore extension now sets a :callable_default schema entry for recognized empty hstore defaults. * The pg_json extension now sets a :callable_default schema entry for recognized empty json/jsonb array/hash defaults. * The pg_inet extension now sets a :ruby_default schema entry for recognized inet/cidr defaults. * The pg_range extension now sets a :ruby_default schema entry for recognized range defaults. * The defaults_setter plugin will now give preference to a :callable_default schema entry over a :ruby_default schema entry. Combined with the other changes listed above, this makes default values recognized by the pg_array, pg_hstore, and pg_json extensions work well if the defaults_setter :cache option is also used. = Other Improvements * The modification_detection plugin no longer breaks column change detection for new objects. * Database#copy_table in the postgres adapter now handles errors that occur when processing rows. Previously, an exception could be raised on the next query in that case. * The results of the changed_columns method are now cached in many places internally where they are called in a loop. This results in better performance, especially if the modification_detection or serialization_modification_detection plugins are used. = Backwards Compatibility * The pg_interval extension now sets a :ruby_default schema entry for recognized interval defaults to the same value Sequel would return if the default value was returned. Previously, Sequel would use a string in the :ruby_schema schema value. * String values in hashes returned by Database#schema are now frozen to prevent possible thread-safety issues and issues with unintentional modification of a shared string. The hashes themselves are not frozen and can still be modified. sequel-5.63.0/doc/release_notes/5.50.0.txt000066400000000000000000000065251434214120600177730ustar00rootroot00000000000000= New Features * A pg_multirange extension has been added with support for PostgreSQL 14+ multirange types. Multirange types are similar to an array of ranges, where a value is in the multirange if it is in any of the ranges contained in the multirange. Multiranges are useful when you need to check against multiple ranges that do not overlap. You can create multiranges using Sequel.pg_multirange, passing an array of ranges and a multirange type: DB.extension :pg_multirange multirange = Sequel.pg_multirange(array_of_date_ranges, :datemultirange) Sequel.pg_multirange returns a PGMultiRange, which operates as a delegate to an array of PGRange objects. Behavior of the object is similar to an array, except that cover? is supported, which will test if any of the included ranges covers the argument: multirange.cover?(Date.today) Like the pg_range extension, this also supports registering custom multirange types, and using multirange types as bound variables. The pg_range_ops extension now supports both ranges and multiranges, with a few new methods added to Postgres::RangeOp for converting between them: * range_merge * multirange * and unnest * An sql_log_normalizer extension has been added for normalizing logged SQL, replacing numbers and strings inside the SQL string with question marks. This is useful for analytics and sensitive data. DB[:table].first(a: 1, b: 'something') # Without sql_log_normalizer extension, logged SQL is: # SELECT * FROM "table" WHERE (("a" = 1) AND ("b" = 'something')) LIMIT 1 DB.extension :sql_log_normalizer DB[:table].first(a: 1, b: 'something') # With sql_log_normalizer_extension, logged SQL is: # SELECT * FROM "table" WHERE (("a" = ?) AND ("b" = ?)) LIMIT ? This extension scans the logged SQL for numbers and strings, attempting to support the database's rules for string quoting. This means it should work with SQL that Sequel didn't itself create. However, there are corner cases that the extension doesn't handle, such as the use of apostrophes inside quoted identifiers, and potentially other cases of database specific SQL where the normal string quoting rules are changed, such as the use of escape strings on PostgreSQL (E'escape string'). * A :before_preconnect Database option has been added. This is useful for configuring extensions added via :preconnect_extensions before the connection takes place. = Other Improvements * Dataset#columns! now uses a LIMIT 0 query instead of a LIMIT 1 query by default. This can improve performance in cases where the row returned would be large. Some databases do not support a LIMIT 0 query, and some adapters that ship with Sequel have been updated to continue using LIMIT 1. Custom adapters should be updated to use LIMIT 1 if the database does not support LIMIT 0. * The lazy_attributes plugin no longer modifies the database schema. Previously, it could modify the database schema indirectly, resulting in the loss of typecasting for models that were not based on a single table or view, such as usage with the class_table_inheritance plugin. * Model#freeze in the composition, serialization, and serialization_modification_detection plugins now returns self. In addition to being more correct, this fixes usage of these plugins with the static_cache plugin. sequel-5.63.0/doc/release_notes/5.51.0.txt000066400000000000000000000032741434214120600177720ustar00rootroot00000000000000= New Features * On PostgreSQL 14+, Dataset#with_recursive now supports :search and :cycle options for result ordering and cycle detection. These use the SEARCH and CYCLE clauses added in PostgreSQL 14: DB[:t].with_recursive(:t, DB[:i1].where(parent_id: nil), DB[:i1].join(:t, id: :parent_id).select_all(:i1), search: {by: :id, type: :breadth}, cycle: {columns: :id, cycle_value: 1, noncycle_value: 2}) # WITH RECURSIVE t AS ( # SELECT * FROM i1 WHERE (parent_id IS NULL) # UNION ALL # (SELECT i1.* FROM i1 INNER JOIN t ON (t.id = i1.parent_id)) # ) # SEARCH BREADTH FIRST BY id SET ordercol # CYCLE id SET is_cycle TO 1 DEFAULT 2 USING path * On MySQL, column schema hashes now contain an :extra entry, which contains the Extra string returned in MySQL's DESCRIBE results for the column. = Other Improvements * When eager loading via the tactical_eager_loading plugin, objects that already have an association loaded will not have it reloaded unless the :eager_reload option is given. * When cloning an association and using a different :class option than the cloned association, the :class option given when cloning will now take precedence over the :class option for the cloned association. * When using the mock postgres adapter, the adapter defaults to supporting PostgreSQL 14 (previously, it defaulted to supporting PostgreSQL 9.5). * Sequel now avoids a method redefined warning in the lazy attributes plugin in verbose warnings mode. = Other * Sequel's primary discussion forum is now GitHub Discussions. The sequel-talk Google Group is still available for users who would prefer to use that instead. sequel-5.63.0/doc/release_notes/5.52.0.txt000066400000000000000000000057471434214120600200020ustar00rootroot00000000000000= New Features * When the sql_comments Database extension is used, Database#with_comments is now added, which can be used for including comments for all queries executed inside a given block. This can be useful if you want to analyze database query logs, and want to group all related queries: DB.with_comments(model: Album, action: :all) do DB[:albums].all # SELECT * FROM albums -- model:Album,action:all end * An sql_comments plugin has been added, which will automatically add SQL comments for all queries generated by model class, instance and dataset methods: Album.plugin :sql_comments album = Album[1] # SELECT * FROM albums WHERE (id = 1) LIMIT 1 # -- model:Album,method_type:class,method:[] album.update(:name=>'A') # UPDATE albums SET name = 'baz' WHERE (id = 1) # -- model:Album,method_type:instance,method:update Album.where(id: 1).delete # DELETE FROM albums WHERE (id = 1) # -- model:Album,method_type:dataset,method:delete This plugin requires you have loaded the sql_comments Database extension into the related Database before use. * A date_parse_input_handler extension has been added to support custom handling of input to date parsing methods. Among other things, you can use this to limit the length of strings that will be parsed, which can prevent ArgumentErrors in newer Ruby versions: Sequel.extension :date_parse_input_handler Sequel.date_parse_input_handler do |string| string.b[0, 128] end = Other Improvements * On Ruby 3.1, the core_refinements extension now avoids the deprecated Refinement#include, switching to Refinement#import_methods. * On Ruby 3.1, the subclasses plugin will use Ruby's native support for Class#subclasses. * The subclasses plugin has renamed descendents to descendants and freeze_descendents to freeze_descendants. The previous method names are still available as aliases. * The :ruby_default schema entry for datetime/timestamp columns now respects Sequel.datetime_class. Previously, the value for the :ruby_default schema entry would always be a DateTime value for such columns. * The pg_interval extension now works with ActiveSupport 7.0. * The shared postgres adapter now respects Database#default_string_column_size for setting the size of string columns that don't use text as the database type. * Database#supports_check_constraints? now returns true on MySQL 8.0.19+. This fixes drop_constraint in certain cases when combining the constraint dropping with other changes in the same alter_table block. * The mysql adapter now supports the ruby-mysql 3 API (ruby-mysql is a pure-ruby MySQL driver). * The mysql adapter no longer uses the connection's server_version method if it is defined, as the method does not return the correct value when using the ruby-mysql driver with MariaDB. * Comments added by the sql_comments extension no longer modify cached SQL for a dataset. = Other * This is Sequel's 250th release! sequel-5.63.0/doc/release_notes/5.53.0.txt000066400000000000000000000017551434214120600177760ustar00rootroot00000000000000= Improvements * The jdbc/h2 subadapter now supports H2 version 2.0. It continues to support H2 versions 1.3 and 1.4. * The mysql2 adapter's prepared statement support now reuses existing native prepared statements, instead of only binding variables on newly prepared statements. This was the intended behavior previously, and should result in increased performance in cases where preparing a query takes significant time. * The subclasses plugin now ignores an existing Class#subclasses method if it is defined in Ruby. This fixes cases where usage of ActiveSupport would break the subclasses plugin. * Database#call_sproc in the jdbc adapter will now always close the prepared call it creates. Before, if there was an exception raised when setting the arguments for the prepared call, the prepared call would not be closed. * A more appropriate error is now issued if you try to use the column_encryption plugin to encrypt a column without setting up an encryption key. sequel-5.63.0/doc/release_notes/5.54.0.txt000066400000000000000000000022471434214120600177740ustar00rootroot00000000000000= New Feature * An enum plugin has been added. This plugin allows you to create model-level enums, giving names to underlying values of a column. For example: Album.plugin :enum Album.enum :status_id, good: 1, bad: 2 Adds Album#good! and Album#bad! for changing the status_id to 1 or 2 respectively. It adds Album#good? and Album#bad? for checking whether the status_id is 1 or 2 respectively. It overrides Album#status_id to return :good or :bad instead of 1 or 2, respectively, and overrides Album#status_id= to accept :good or :bad instead of 1 or 2 respectively. Additionally, it adds good and bad dataset methods for filtering the model's dataset to records where status_id is 1 or 2 respectively. It also adds not_good and not_bad dataset methods for filtering the model's dataset to records where status_id is not 1 or not 2 respectively. You can use :prefix and :suffix options when calling enum to add a prefix or suffix to the method names created. You can set the :override_accessors option to false to not override the accessor methods for the column, and set the :dataset_methods option to false to not add dataset methods. sequel-5.63.0/doc/release_notes/5.55.0.txt000066400000000000000000000015441434214120600177740ustar00rootroot00000000000000= New Features * An auto_restrict_eager_graph plugin has been added for automatically disallowing the use of eager_graph with associations using blocks but lacking graph_* options. This can prevent potentionally invalid usage, as the restrictions added by the block are not used by eager_graph. * The sqlite adapter now supports the :setup_regexp_function Database option. This option will define a REGEXP function in the database that will allow regexp support in queries, such as: DB[:table].where(column: /(some|pattern)/) Note that this creates a Ruby Regexp object per column value tested, so it isn't the most optimal approach. = Other Improvements * Calling dataset aggregate methods such as #max on a model dataset now works correctly. Previously, it could fail if called enough times to optimize using a placeholder literalizer. sequel-5.63.0/doc/release_notes/5.56.0.txt000066400000000000000000000037401434214120600177750ustar00rootroot00000000000000= New Features * On SQLite, Database#create_table now supports a :strict option to use the STRICT keyword when creating the table. When this option is used, SQLite will enforce the types for each column. When using this option, you are limited to using the following column types: int, integer, real, text, blob, and any (any allows for dynamic types). * An sqlite_json_ops extension has been added, providing DSL support for JSON functions and operators supported in SQLite 3.38.0. Usage is similar to the pg_json_ops extension. First, you create an appropriate object: j = Sequel.sqlite_json_op(:json_column) # or: j = Sequel[:json_column].sqlite_json_op Then, you call methods on that object to create expressions for the JSON functions and operators: j[1] # (json_column ->> 1) j.get_text(1) # (json_column -> 1) j.extract('$.a') # json_extract(json_column, '$.a') j.array_length # json_array_length(json_column) j.type # json_type(json_column) j.valid # json_valid(json_column) j.json # json(json_column) j.insert('$.a', 1) # json_insert(json_column, '$.a', 1) j.set('$.a', 1) # json_set(json_column, '$.a', 1) j.replace('$.a', 1) # json_replace(json_column, '$.a', 1) j.remove('$.a') # json_remove(json_column, '$.a') j.patch('{"a":2}') # json_patch(json_column, '{"a":2}') j.each # json_each(json_column) j.tree # json_tree(json_column) = Other Improvements * The alter_table add_column and add_foreign_key methods now support the :index option to create an index on the added column, for compatibility with the :index option on the create_table column and foreign_key methods. * The schema_dumper extension now treats the "INTEGER" type the same as the "integer" type. This fixes some behavior when using SQLite 3.37.0+. * Sequel's website has a much improved visual design. sequel-5.63.0/doc/release_notes/5.57.0.txt000066400000000000000000000017371434214120600200020ustar00rootroot00000000000000= New Features * An is_distinct_from extension has been added with support for the SQL IS DISTINCT FROM operator. This operator is similar to the not equals operator, except in terms of NULL handling. It returns true if only one side is NULL, and false if both sides are NULL. You can call is_distinct_from on Sequel itself or on Sequel objects: Sequel.is_distinct_from(:column_a, :column_b) Sequel[:column_a].is_distinct_from(:column_b) # (column_a IS DISTINCT FROM column_b) On databases not supporting IS DISTINCT FROM, support is emulated using a CASE statement. * Column definitions on MySQL can use the :on_update_current_timestamp option for ON UPDATE CURRENT_TIMESTAMP, which creates a column that will automatically have its value set to CURRENT_TIMESTAMP on every update. * Database#create_function on PostgreSQL now supports a :parallel option to set the thread safety of the funciton. The value should be :safe, :unsafe, or :restricted. sequel-5.63.0/doc/release_notes/5.58.0.txt000066400000000000000000000022561434214120600200000ustar00rootroot00000000000000= New Features * Dataset#merge and related #merge_* methods have been added for the MERGE statement. MERGE is supported on PostgreSQL 15+, Oracle, Microsoft SQL Server, DB2, H2, HSQLDB, and Derby. You can use MERGE to insert, update, and/or delete in a single query. You call the #merge_* methods to setup the MERGE statement, and #merge to execute it on the database: ds = DB[:m1] merge_using(:m2, i1: :i2). merge_insert(i1: :i2, a: Sequel[:b]+11). merge_delete{a > 30}. merge_update(i1: Sequel[:i1]+:i2+10, a: Sequel[:a]+:b+20) ds.merge # MERGE INTO m1 USING m2 ON (i1 = i2) # WHEN NOT MATCHED THEN INSERT (i1, a) VALUES (i2, (b + 11)) # WHEN MATCHED AND (a > 30) THEN DELETE # WHEN MATCHED THEN UPDATE SET i1 = (i1 + i2 + 10), a = (a + b + 20) On PostgreSQL, the following additional MERGE related methods are available: * #merge_do_nothing_when_matched * #merge_do_nothing_when_not_matched * A :disable_split_materialized Database option is now supported on MySQL. This disables split_materialized support in the optimizer, working around a bug in MariaDB 10.5+ that causes failures in Sequel's association tests. sequel-5.63.0/doc/release_notes/5.59.0.txt000066400000000000000000000064601434214120600200020ustar00rootroot00000000000000= New Features * A require_valid_schema plugin has been added, for checking that model classes have schema parsed as expected. By default, model classes are not required to have valid schema, because it is allowed to have model classes based on arbitrary datasets (such as those using joins or set-returning functions), and it is not possible to determine the schema for arbitary datasets. Sequel swallows non-connection errors when trying to parse schema for a model's dataset, but if schema parsing fails when you would expect it to succeed, it results in a model where typecasting does not work as expected. The require_valid_schema plugin will raise an error when setting the dataset for a model if schema parsing fails and the dataset uses a simple table where you would expect schema parsing to succeed. You can also provide an argument of :warn when loading the plugin, to warn instead of raising an error. This plugin may not work correctly in all cases for all adapters, especially external adapters. Adapters are not required to support schema parsing. Even if supported, adapters may not support parsing schema for qualified tables, or parsing schema for views. You should consider this plugin as a possible safety net. Users are encouraged to try using it and report any unexpected breakage, as that may help improve schema parsing in adapters that Sequel ships. * is_json and is_not_json methods have been added to the pg_json_ops extension, for the IS [NOT] JSON operator supported in PostgreSQL 15+. * Index creation methods on PostgreSQL 15+ now support a :nulls_distinct option, for NULLS [NOT] DISTINCT. This allows you to create unique indexes where NULL values are not considered distinct. * View creation methods on PostgreSQL 15+ now support a :security_invoker option to create a view where access is determined by the permissions of the role that is accessing the view, instead of the role that created the view. = Other Improvements * The :allow_eager association option is now set to false by default for associations explicitly marked as :instance_specific, if the :eager_loader association is not given. * The postgres adapter now supports the sequel-postgres-pr driver. The sequel-postgres-pr driver is a slimmed down fork of the postgres-pr driver designed specifically for use by Sequel. * Model code that explicitly does not swallow connection errors will also now not swallow disconnect errors. This can fix issues where model classes are being loaded at runtime, and the query to get the columns/schema for the model uses a connection that has been disconnected. * Model classes created from aliased expressions and literal strings no longer use the simple_table optimization, as there are cases where doing so is not safe. = Backwards Compatibility * The change to not swallow disconnect errors when not swallowing connection errors can result in exceptions being raised which weren't raised previously. In most cases, this will alert you to issues in your application that should be fixed, but it potentially it can result in regressions if you were OK with the errors being swallowed. If this does result in regressions in your application, please file an issue and we can probably add a setting controlling this feature. sequel-5.63.0/doc/release_notes/5.6.0.txt000066400000000000000000000023701434214120600177060ustar00rootroot00000000000000= Improvements * Running migrations using one of the included migrators on separate Database objects in separate threads simultaneously is now supported. Previously, the migrators were not thread-safe. * On Ruby 2.5+, :db_type entries in the schema hashes are now deduped for a slight memory savings when using many columns with the same database type. * The schema_caching extension now freezes string values in the resulting hashes, just as the default schema parsing code started doing in 5.5.0. * The schema_caching extension now supports the :callable_default schema values used by the pg_json, pg_array, and pg_hstore extensions, by removing the entry before caching and resetting it after restoring the cache. * Identifier mangling rules are now respected when renaming columns on Microsoft SQL Server. = Backwards Compatibility * The migrator internals were modified in order to support thread-safety. The private Migrator#remove_migration_classes method has been removed, and #load_migration_file now returns the migration object/class instead of populating Migration.descendants. Migration.descendants is now only used for temporary storage, and will no longer contain all migration objects/classes used by the migrator. sequel-5.63.0/doc/release_notes/5.60.0.txt000066400000000000000000000015311434214120600177640ustar00rootroot00000000000000= New Features * The date_arithmetic extension now supports arbitrary expressions as interval values on PostgreSQL 9.4+. Previously, only integers were supported for the interval values. = Other Improvements * Most Kernel#respond_to? calls have been converted to equivalent defined? calls for better performance. defined? is a keyword and is about 50% faster for the same behavior. * The is_distinct_from extension now supports the IS DISTINCT FROM syntax natively on SQLite 3.39+, instead of emulating it. * HAVING without GROUP BY is now supported on SQLite 3.39+. * Coverage testing has been significantly expanded. Previously, the core, model, plugin, and extension code had 100% line/branch coverage. 100% line/branch coverage has been added for the core extensions, bin/sequel, and the postgres adapter with the pg driver. sequel-5.63.0/doc/release_notes/5.61.0.txt000066400000000000000000000033111434214120600177630ustar00rootroot00000000000000= Improvements * When typecasting strings to other types, Sequel::Database will now by default not typecast strings that are much longer than expected for the underlying type. Depending on the underlying type, there is a limit of either 100 or 1000 bytes on the input string. This avoids potential performance issues when trying to convert arbitrary sized user input to specific types. * The respond_to? to defined? change made in 5.60.0 was reverted in 5.60.1 as it broke cases on Ruby < 3 where the object had an unused refinement that added the method. * When typecasting strings to integer, strings such as -0xa are now treated as negative hexidecimal strings, similar to how 0xa is treated as a positive hexidecimal string. * Database#foreign_key_list now returns results for partitioned tables on PostgreSQL 11+. * Timestamps before the date of calendar reform are now handled correctly by the pg_extended_date_support extension when using Ruby 3.2 preview 2+. = Backwards Compatibility * The change to not typecast strings that are too long can break backwards compatibility for applications that expect typecasting for input beyond Sequel's limits. You can disable the string bytesize checking by setting: DB.check_string_typecast_bytesize = false or by passing the check_string_typecast_bytesize: false option when creating the Database instance. * Code to workaround a bug in JRuby 9.2.0.0 has been removed from the pg_extended_date_support extension. Users of the extension should upgrade to a newer JRuby version. * The is_json and is_not_json methods have been removed from the pg_json_ops extension, as the underlying support was removed in PostgreSQL 15 beta 4. sequel-5.63.0/doc/release_notes/5.62.0.txt000066400000000000000000000126131434214120600177710ustar00rootroot00000000000000= New Features * The pg_auto_parameterize extension for automatically using bound variables when using postgres adapter with the pg driver has been added back to Sequel. This extension was originally added in Sequel 3.34.0, but was removed in 4.0.0 due to the many corner cases it had. Almost all of the corner cases have now been fixed, and the extension is now recommended for production use. Compared to the original version in Sequel 3, the reintroduced version of the extension includes the following changes: * Handles integers used in LIMIT/ORDER * Respects explicit CASTs * Tries to convert column IN (int, ...) into column = ANY($) with an array parameter * Uses the same parameter for the same object used more than once in a query * Uses parameters when inserting multiple rows via Dataset#import * Supports automatically parameterizing all of the PostgreSQL-specific types that Sequel ships support for in pg_* extensions (though some values of those types may not support automatic parameterization). * Supports skipping auto parameterization for specific values. Automatic parameterization is generally slower than Sequel's default behavior, since some optimizations Sequel uses by default do not currently support automatic parameterization. Applications may need changes to work correctly with the pg_auto_parameterize extension, such as the addition of explicit casts. Please read the extension documentation for more details. * Integer column schema entries now include :min_value and :max_value entries on most databases, indicating the minimum and maximum values supported for the column. The validation_helpers plugin now has validates_max_value and validates_min_value for testing the column value is not greater than the given maximum value and not less than the given minimum value, respectively. The auto_validations plugin now automatically uses the :min_value and :max_value column schema entries with the new validation_helpers methods to validate that the column values for integer columns are in the allowed range. * A primary_key_lookup_check_values plugin has been added for automatically typecasting and checking the primary key values are in the allowed range (given by :min_value and :max_value column schema entries) during lookup. If typecasting fails or the value is outside the allowed range, the primary key lookup will return nil without issuing a query (or will raise a NoMatchingRow error if using with_pk!). Note that this can change behavior in some cases if you are passing filter conditions during lookup instead of passing primary key values. The plugin tries to support most common filter conditions, but there are still cases that will break. * Sequel now supports shard-specific :after_connect and :connect_sqls Database options, allowing you to customize behavior for specific shards: DB = Sequel.connect('url', servers: { :shard1 => {host: '...', after_connect: proc{|conn|}}, :shard2 => {host: '...', connect_sqls: ['...']}, }) Note that these shard-specific options will not be respected if you are calling after_connect= or connect_sqls= on the Database's connection pool. = Other Improvements * A Sequel::Postgres::IntegerOutsideBigintRange exception will now be raised if trying to literalize an integer outside PostgreSQL bigint range, to avoid PostgreSQL treating the integer as a numeric type and not respecting indexes on the related column. A pg_extended_integer_support extension has been added for customizing the behavior when literalizing an integer outside PostgreSQL bigint range, either quoting it or getting the historical behavior of using it directly in the query. * Dataset#import and #multi_insert no longer use transactions when they only run a single query. * Fractional seconds in timestamps are now respected in the named_timezones extension. * Using hstore[] types as bound variables now works on PostgreSQL. * Using BC dates and timestamps in bound variables now works on PostgreSQL. * A corner case has been fixed in eager loading where the window function eager limit strategy would be used without removing the row_number entries from the result. * The shared postgres adapter now caches reflection datasets, speeding up Database#indexes and similar methods. * The mock postgres adapter now assumes PostgreSQL 15 instead of PostgreSQL 14 by default. = Backwards Compatibility * If you are using Ruby integers outside PostgreSQL bigint range when dealing with PostgreSQL numeric column values, this version may not be compatible. It is recommended you explicitly convert the Ruby integers to BigDecimal objects if you are using them for numeric column values. You can also use the pg_extended_integer_support extension introduced in this version. = Workaround for Older Versions * If you cannot upgrade to Sequel 5.62.0, but still want to avoid the problems that come from using literal large integers on PostgreSQL, you can use the following code, where DB is your Sequel::Database object: DB.extend_datasets do def literal_integer(v) if v > 9223372036854775807 || v < -9223372036854775808 raise Sequel::InvalidValue, "PostgreSQL int too large: #{v}" end super end end This workaround should work all the way back to Sequel 3.29.0, released in November 2011. sequel-5.63.0/doc/release_notes/5.63.0.txt000066400000000000000000000030321434214120600177650ustar00rootroot00000000000000= New Features * On Ruby 3.2, the pool_class: :timed_queue Database option can now be used to use an alternative connection pool that stores connections in a queue, and uses the new Queue#pop :timeout option in Ruby 3.2 to implement the pool timeout. This new connection pool is simpler than the default connection pool. It is not yet the default connection pool on Ruby 3.2, but it may become the default in a later version. Users of Ruby 3.2 are encouraged to try out the pool_class: :timed_queue Database option and provide feedback on how it works in their application. = Other Improvements * The tactical_eager_loading plugin now works in combination with the single_table_inheritance and class_table_inheritance plugins, when loading an association only defined in a specific subclass. Previously, eager loading would be skipped in such a case. Now, an eager load will be attempted for all instances supporting the association. * The validate_associated plugin now avoids database type errors for non-integer association keys. In cases where the associated object doesn't have a value for the associated key, and the current object does not have a key value that can be set in the associated object, validation errors in the associated object related to the associated key will be ignored. * Thread-keyed connection pool hashes now use compare_by_identity for better performance. * The JRuby workaround in the named_timezones extension is no longer used on JRuby 9.3.9.0+, as JRuby fixed the related bug. sequel-5.63.0/doc/release_notes/5.7.0.txt000066400000000000000000000112251434214120600177060ustar00rootroot00000000000000= New Features * An integer64 extension has been added, which treats the Integer class as a generic 64-bit integer type. Sequel's default behavior for Integer is to use the integer type, which on most databases is a 32-bit type. This affects all internal use of the Integer class as a generic database type, so that methods like primary_key and foreign_key also default to using a 64-bit integer type when using this extension. * When using PostgreSQL 10+, you can use the :identity option when creating columns to create identity columns: DB.create_table(:table){Integer :id, identity: true} # CREATE TABLE "table" ("id" integer GENERATED BY DEFAULT AS IDENTITY) If you want to disallow using a user provided value when inserting, or updating you can use a value of :always: DB.create_table(:table){Integer :id, identity: :always} # CREATE TABLE "table" ("id" integer GENERATED ALWAYS AS IDENTITY) * Database#convert_serial_to_identity has been added on PostgreSQL 10.2+. This method can convert existing serial columns to identity columns in most cases, but it currently requires superuser permissions as it modifies the system tables directly. * Dataset#overriding_system_value and #overriding_user_value are now supported on PostgreSQL to work with identity columns. You can use #overriding_system_value to force the use of a user provided value for identity columns that are GENERATED ALWAYS, and you can use #overriding_user_value to ignore any user value for identity columns and always use the next entry in the sequence. = Other Improvements * On PostgreSQL 10.2+, identity columns are now used instead of serial columns as the default for auto incrementing primary keys: DB.create_table(:table){primary_key :id} # Sequel 5.7.0+ and PostgreSQL 10.2+ # CREATE TABLE "table" ("id" integer # GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY) # Older Sequel version or older PostgreSQL version # CREATE TABLE "table" ("id" serial PRIMARY KEY) Identity columns fix many issues that serial columns have, in addition to being the SQL standard way to support auto incrementing columns. * PostgreSQL identity columns are now correctly recognized and the :auto_increment schema entry is now populated for them. * Dataset#with_sql_{all,each,first,single_value} now use a cached dataset to avoid clobbering the current dataset's columns. Previously, the clobbering of the current dataset's columns was documented and the method warned against using SQL with different columns. These methods are now safe to use in such cases, but will not have the same performance advantages if the current dataset is not cached. * On ruby 2.1+, Sequel now uses Process::CLOCK_MONOTONIC when performing elapsed time calculations so that it is not affected by modifications to the system's time. * In the postgres adapter, prepared statement errors related to changing types are now treated as disconnect errors. While they are not technically disconnect errors, treating them as such will in general reduce the total number of exceptions generated from 1 per affected statement per connection to 1 per connection. * In the pg_array_associations plugin, the array_type for pg_array_to_many and many_to_pg_array association reflections is now always the scalar type for the array (e.g. integer). Previously, the array type (e.g. integer[]) was used in some cases. This didn't previously result in issues as PostgreSQL considers integer[][] the same type as integer[]. * In the pg_array_associations plugin, the many_to_pg_array association remove_all_* method now uses the appropriate cast to work for non-integer array types such as bigint[]. * Database#server_version on PostgreSQL 10.1+ now works correctly when the connection does not support the server_version method. Now the server_version_num database setting is always used to ensure consistent behavior across adapters. * In the jdbc/oracle adapter, temporary clobs are now manually freed to prevent a memory leak, in line with the Oracle JDBC driver recommendations. * The Sequel <4 release notes and changelog are no longer shipped with the gem, decreasing the size of the gem by 20%. = Backwards Compatibility * The switch to using identity columns instead of serial columns by default on PostgreSQL 10.2+ may break backwards compatibilty in some situations, such as code that relies on what are generally considered bugs in serial columns, such as CREATE TABLE LIKE using the same sequence for the column in both the existing table and the new table, or that dropping the default value for the column does not drop the related sequence. sequel-5.63.0/doc/release_notes/5.8.0.txt000066400000000000000000000147411434214120600177150ustar00rootroot00000000000000= New Features * A pg_auto_constraint_validations plugin has been added, which automatically converts many constraint violations raised as exceptions to ValidationFailed exceptions when saving a model instance. The following constraint violation types are recognized and supported: * NOT NULL * CHECK * UNIQUE (except expression/functional indexes) * FOREIGN KEY (both referencing and referenced by) In the cases where the plugin cannot determine an appropriate validation failure for the constraint violation, it just reraises the original exception. This plugin is not intended as a replacement for other validations, it is intended as a last resort. The purpose of validations is to provide nice error messages for the user, and the error messages generated by this plugin are fairly generic. The error messages can be customized using the :messages plugin option, but there is only a single message used per constraint type. * Database#check_constraints has been added on PostgreSQL. This returns metadata related to each check constraint on a table: DB.create_table(:foo) do Integer :i Integer :j constraint(:ic, Sequel[:i] > 2) constraint(:jc, Sequel[:j] > 2) constraint(:ijc, Sequel[:i] - Sequel[:j] > 2) end DB.check_constraints(:foo) # => { # :ic=>{:definition=>"CHECK ((i > 2))", :columns=>[:i]}, # :jc=>{:definition=>"CHECK ((j > 2))", :columns=>[:j]}, # :ijc=>{:definition=>"CHECK (((i - j) > 2))", :columns=>[:i, :j]} # } * Database#foreign_key_list now supports a :reverse option on PostgreSQL, which returns foreign keys referencing the given table, instead of of foreign keys in the given table referencing other tables: DB.create_table!(:a) do primary_key :id Integer :i Integer :j foreign_key :a_id, :a, :foreign_key_constraint_name=>:a_a unique [:i, :j] end DB.create_table!(:b) do foreign_key :a_id, :a, :foreign_key_constraint_name=>:a_a Integer :c Integer :d foreign_key [:c, :d], :a, :key=>[:j, :i], :name=>:a_c_d end DB.foreign_key_list(:a, :reverse=>true) # => [ # {:name=>:a_a, :columns=>[:a_id], :key=>[:id], :on_update=>:no_action, # :on_delete=>:no_action, :deferrable=>false, :table=>:a, :schema=>:public}, # {:name=>:a_a, :columns=>[:a_id], :key=>[:id], :on_update=>:no_action, # :on_delete=>:no_action, :deferrable=>false, :table=>:b, :schema=>:public}, # {:name=>:a_c_d, :columns=>[:c, :d], :key=>[:j, :i], :on_update=>:no_action, # :on_delete=>:no_action, :deferrable=>false, :table=>:b, :schema=>:public} # ] * Dataset#nowait has been added, which will make the query fail with a Sequel::DatabaseLockTimeout exception if it encounters a locked row, overriding the default database behavior that would wait until the lock was released. This method is supported on PostgreSQL, Microsoft SQL Server, Oracle, and MySQL 8+. * Database#indexes now supports an :include_partial option on PostgreSQL, which will include partial indexes in the output (Sequel by default excludes partial indexes). * Common table expressions and window functions are now supported when using MySQL 8+. * Dataset#skip_locked is now supported on MySQL 8+. * The connection_expiration extension now supports a Database#connection_expiration_random_delay attribute, which is used to randomize the expiration times, avoiding the thundering herd problem. * The pg_enum extension now supports a rename_enum method for renaming existing enum types. * Database#error_info on PostgreSQL now returns much more metadata regarding the error. = Other Improvements * The dataset returned by the following dataset methods is cached, which can improve performance significantly in certain cases: * #distinct (without arguments or block) * #from_self (without options) * #lateral * #qualify (without argument) * #returning (without arguments) * #select_all (without arguments) * If the primary_key serial: true, type: :serial, or type: :bigserial options are given on PostgreSQL 10.2+, use a serial primary key instead of an identity primary key. This change was included in Sequel 5.7.1. * The :search_path Database option is now supported as a shard option on PostgreSQL, so different shards can use different search paths. * The correct column order in Database#foreign_key_list on MySQL is now forced, fixing issues on MySQL 8+. * When using case sensitive regexp matches on MySQL 8+, Sequel now uses the REGEXP_LIKE function instead of the REGEXP BINARY operator, to work around what appears to be a bug in MySQL 8+ related to the change in MySQL's regexp engine. * On MySQL 5.7+, the :extended option to Dataset#explain is now ignored, since the :extended option's behavior in previous MySQL versions is now the default behavior. * The MySQL HY000 generic SQL state error code is now ignored in the mysql2 adapter, so it falls back to using the more accurate backup error mapping in that case. * The pg_enum extension's schema modification methods now work correctly if the Database instance is frozen. * The tactical_eager_loading plugin now respects the :allow_eager association option, and will not attempt to eagerly load associations when :allow_eager is false. * Using multiple add_constraint calls and a set_column_null call in the same alter_table block on SQLite now works correctly. Note that if you are planning on ever modifying existing tables beyond adding columns, you should probably choose a database that natively supports such modification (SQLite does not). * Hashes returned by Database#foreign_key_list on PostgreSQL now include a :schema entry, unless the support has been enabled to make the :table entry be a qualified identifier. * Dataset#support_cte?(:insert) no longer returns true on SQLAnywhere. SQLAnywhere only supports common table expressions for INSERT ... SELECT, not for all INSERT statements. INSERT ... WITH ... SELECT is already supported in Sequel using: DB[:t1].insert(DB[:t2].with(DB[:t3])) * Model#_valid? is no longer made a public method in the error_splitter plugin. = Backwards Compatibility * Calling the filter method on a proxy object returned by the association_proxies plugin now warns on ruby <2.6. This is because starting in ruby 2.6, the behavior will change and the method will be called on the array of associated objects instead of on the dataset, as Enumerable#filter is being added in ruby 2.6. sequel-5.63.0/doc/release_notes/5.9.0.txt000066400000000000000000000101231434214120600177040ustar00rootroot00000000000000= New Features * An escaped_like extension has been added, for the creation of LIKE/ILIKE expressions with placeholders in patterns without access to a dataset. This adds escaped_like and escaped_ilike methods to the same Sequel expression objects that support like and ilike. These methods take two arguments, the first being the pattern, with ? placeholders, and the second being the placeholder value (which can be an array for multiple placeholders): Sequel.extension :escaped_like DB[:table].where{string_column.escaped_like('?%', user_input)} # user_input is 'foo': # SELECT * FROM table WHERE string_column LIKE 'foo%' # user_input is '%foo': # SELECT * FROM table WHERE string_column LIKE '\%foo%' * Generated columns on MySQL 5.7+ and MariaDB 5.2+ are now supported using the :generated_always_as option when creating the column. The :generated_type option can also be used to specify the type of generated column (virtual or stored). Examples: DB.add_column :t, :c, Integer, generated_always_as: Sequel[:a]+'b' # ALTER TABLE `t` ADD COLUMN `c` varchar(255) # GENERATED ALWAYS AS (CONCAT(`a`, 'b')) DB.add_column :t, :c, Integer, generated_always_as: Sequel[:a]+'b', generated_type: :virtual # ALTER TABLE `t` ADD COLUMN `c` varchar(255) # GENERATED ALWAYS AS (CONCAT(`a`, 'b')) VIRTUAL DB.add_column :t, :c, Integer, generated_always_as: Sequel[:a]+'b', generated_type: :stored # ALTER TABLE `t` ADD COLUMN `c` varchar(255) # GENERATED ALWAYS AS (CONCAT(`a`, 'b')) STORED * Sequel::Model.has_dataset? has been added for checking whether the model class has an associated dataset. This will generally be true for most model classes, but will be false for abstract model classes (such as Sequel::Model itself). * Sequel::VERSION_NUMBER has been added for easier future version comparisons. The version number for 5.9.0 is 50090. = Other Improvements * When disconnecting connections in the threaded connection pools, the disconnection is performed without holding the connection pool mutex, since disconnection may block. * The sharded threaded connection pool no longer deadlocks when disconnecting connections if the connection_validator or connection_expiration extension is used. * If a thread dies and does not check a connection back into the connection pool, Sequel now disconnects the connection when it detects the dead thread, instead of assuming the connection is safe to be reused. * When using eager_graph with cascaded associations, a unique object is now used instead of a shared object in cases where using a shared object may cause further cascaded associated objects to be duplicated. * On PostgreSQL, the ESCAPE modifier to the LIKE/ILIKE operators is no longer used, since the default ESCAPE value is the one Sequel uses. This change was made in order to allow the LIKE/ILIKE operators to work with the ANY function, as PostgreSQL does not support the use of the ESCAPE modifier in such cases. * A hash argument passed to Model.nested_attributes in the nested_attributes plugin is now no longer modified. * Internal data structures for eager and eager_graph datasets are now frozen to avoid unintentional modification. * Nondeterministic behavior in Database#foreign_key_list with the :reverse option on PostgreSQL is now avoided by using an unambiguous order. * Performance has been improved slightly by avoiding unnecessary hash allocations. * Performance has been improved slightly by using while instead of Kernel#loop. * BigDecimal() is now used instead of BigDecimal.new(), as the latter has been deprecated. * The jdbc adapter now avoids referencing ::NativeException on JRuby 9.2+, since JRuby has deprecated it. It is still used on older versions of JRuby, since some JRuby 1.7 code may still require it. * Sequel now works around multiple Date/Time conversion bugs in JRuby 9.2.0.0 for BC dates in the pg_extended_date_support extension. These bugs have already been fixed in JRuby, and the workarounds will be removed after the release of JRuby 9.2.1.0. sequel-5.63.0/doc/schema_modification.rdoc000066400000000000000000000541101434214120600205040ustar00rootroot00000000000000= Schema modification methods Here's a brief description of the most common schema modification methods: == +create_table+ +create_table+ is the most common schema modification method, and it's used for adding new tables to the database. You provide it with the name of the table as a symbol, as well a block: create_table(:artists) do primary_key :id String :name end Note that if you want a primary key for the table, you need to specify it, Sequel does not create one by default. === Column types Most method calls inside the create_table block will create columns, since +method_missing+ calls +column+. Columns are generally created by specifying the column type as the method name, followed by the column name symbol to use, and after that any options that should be used. If the method is a ruby class name that Sequel recognizes, Sequel will transform it into the appropriate type for the given database. So while you specified +String+, Sequel will actually use +varchar+ or +text+ depending on the underlying database. Here's a list of all ruby classes that Sequel will convert to database types: create_table(:columns_types) do # common database type used Integer :a0 # integer String :a1 # varchar(255) String :a2, size: 50 # varchar(50) String :a3, fixed: true # char(255) String :a4, fixed: true, size: 50 # char(50) String :a5, text: true # text File :b # blob Fixnum :c # integer Bignum :d # bigint Float :e # double precision BigDecimal :f # numeric BigDecimal :f2, size: 10 # numeric(10) BigDecimal :f3, size: [10, 2] # numeric(10, 2) Date :g # date DateTime :h # timestamp Time :i # timestamp Time :i2, only_time: true # time Numeric :j # numeric TrueClass :k # boolean FalseClass :l # boolean end Note that in addition to the ruby class name, Sequel also pays attention to the column options when determining which database type to use. Also note that for boolean columns, you can use either TrueClass or FalseClass, they are treated the same way (ruby doesn't have a Boolean class). Also note that this conversion is only done if you use a supported ruby class name. In all other cases, Sequel uses the type specified verbatim: create_table(:columns_types) do # database type used string :a1 # string datetime :a2 # datetime blob :a3 # blob inet :a4 # inet end In addition to specifying the types as methods, you can use the +column+ method and specify the types as the second argument, either as ruby classes, symbols, or strings: create_table(:columns_types) do # database type used column :a1, :string # string column :a2, String # varchar(255) column :a3, 'string' # string column :a4, :datetime # datetime column :a5, DateTime # timestamp column :a6, 'timestamp(6)' # timestamp(6) end If you use a ruby class as the type, Sequel will try to guess the appropriate type name for the database you are using. If a symbol or string is used as the type, it is used verbatim as the type name in SQL, with the exception of :Bignum. Using the symbol :Bignum as a type will use the appropriate 64-bit integer type for the database you are using. === Column options When using the type name as method, the third argument is an options hash, and when using the +column+ method, the fourth argument is the options hash. The following options are supported: :default :: The default value for the column. :index :: Create an index on this column. If given a hash, use the hash as the options for the index. :null :: Mark the column as allowing NULL values (if true), or not allowing NULL values (if false). If unspecified, will default to whatever the database default is (usually true). :primary_key :: Mark this column as the primary key. This is used instead of the primary key method if you want a non-autoincrementing primary key. :primary_key_constraint_name :: The name to give the primary key constraint. :type :: Overrides the type given as the method name or a separate argument. Not usually used by +column+ itself, but often by other methods such as +primary_key+ or +foreign_key+. :unique :: Mark the column as unique, generally has the same effect as creating a unique index on the column. :unique_constraint_name :: The name to give the unique constraint. === Other methods In addition to the +column+ method and other methods that create columns, there are other methods that can be used: ==== +primary_key+ You've seen this one used already. It's used to create an autoincrementing integer primary key column. create_table(:a0){primary_key :id} If you want an autoincrementing 64-bit integer: create_table(:a0){primary_key :id, type: :Bignum} If you want to create a primary key column that doesn't use an autoincrementing integer, you should not use this method. Instead, you should use the :primary_key option to the +column+ method or type method: create_table(:a1){Integer :id, primary_key: true} # Non autoincrementing integer primary key create_table(:a2){String :name, primary_key: true} # varchar(255) primary key If you want to create a composite primary key, you should call the +primary_key+ method with an array of column symbols. You can provide a specific name to use for the primary key constraint via the :name option: create_table(:items) do Integer :group_id Integer :position primary_key [:group_id, :position], name: :items_pk end If provided with an array, +primary_key+ does not create a column, it just sets up the primary key constraint. ==== +foreign_key+ +foreign_key+ is used to create a foreign key column that references a column in another table (or the same table). It takes the column name as the first argument, the table it references as the second argument, and an options hash as its third argument. A simple example is: create_table(:albums) do primary_key :id foreign_key :artist_id, :artists String :name end +foreign_key+ accepts the same options as +column+. For example, to have a unique foreign key with varchar(16) type: foreign_key :column_name, :table, unique: true, type: 'varchar(16)' +foreign_key+ also accepts some specific options: :deferrable :: Makes the foreign key constraint checks deferrable, so they aren't checked until the end of the transaction. :foreign_key_constraint_name :: The name to give the foreign key constraint. :key :: The column in the associated table that this column references. Unnecessary if this column references the primary key of the associated table, at least on most databases. :on_delete :: Specify the behavior of this foreign key column when the row with the primary key it references is deleted, can be :restrict, :cascade, :set_null, or :set_default. You can also use a string, which is used literally. :on_update :: Specify the behavior of this foreign key column when the row with the primary key it references modifies the value of the primary key. Takes the same options as :on_delete. Like +primary_key+, if you provide +foreign_key+ with an array of symbols, it will not create a column, but create a foreign key constraint: create_table(:artists) do String :name String :location primary_key [:name, :location] end create_table(:albums) do String :artist_name String :artist_location String :name foreign_key [:artist_name, :artist_location], :artists end When using an array of symbols, you can also provide a :name option to name the constraint: create_table(:albums) do String :artist_name String :artist_location String :name foreign_key [:artist_name, :artist_location], :artists, name: 'albums_artist_name_location_fkey' end If you want to add a foreign key for a single column with a named constraint, you must use the array form with a single symbol: create_table(:albums) do primary_key :id Integer :artist_id String :name foreign_key [:artist_id], :artists, name: 'albums_artist_id_fkey' end ==== +index+ +index+ creates indexes on the table. For single columns, calling index is the same as using the :index option when creating the column: create_table(:a){Integer :id, index: true} # Same as: create_table(:a) do Integer :id index :id end create_table(:a){Integer :id, index: {unique: true}} # Same as: create_table(:a) do Integer :id index :id, unique: true end Similar to the +primary_key+ and +foreign_key+ methods, calling +index+ with an array of symbols will create a multiple column index: create_table(:albums) do primary_key :id foreign_key :artist_id, :artists Integer :position index [:artist_id, :position] end The +index+ method also accepts some options: :name :: The name of the index (generated based on the table and column names if not provided). :type :: The type of index to use (only supported by some databases) :unique :: Make the index unique, so duplicate values are not allowed. :where :: Create a partial index (only supported by some databases) ==== +unique+ The +unique+ method creates a unique constraint on the table. A unique constraint generally operates identically to a unique index, so the following three +create_table+ blocks are pretty much identical: create_table(:a){Integer :a, unique: true} create_table(:a) do Integer :a index :a, unique: true end create_table(:a) do Integer :a unique :a end Just like +index+, +unique+ can set up a multiple column unique constraint, where the combination of the columns must be unique: create_table(:a) do Integer :a Integer :b unique [:a, :b] end ==== +full_text_index+ and +spatial_index+ Both of these create specialized index types supported by some databases. They both take the same options as +index+. ==== +constraint+ +constraint+ creates a named table constraint: create_table(:artists) do primary_key :id String :name constraint(:name_min_length){char_length(name) > 2} end Instead of using a block, you can use arguments that will be handled similarly to Dataset#where: create_table(:artists) do primary_key :id String :name constraint(:name_length_range, Sequel.function(:char_length, :name)=>3..50) end ==== +check+ +check+ operates just like +constraint+, except that it doesn't take a name and it creates an unnamed constraint: create_table(:artists) do primary_key :id String :name check{char_length(name) > 2} end It's recommended that you use the +constraint+ method and provide a name for the constraint, as that makes it easier to drop the constraint later if necessary. == +create_join_table+ +create_join_table+ is a shortcut that you can use to create simple many-to-many join tables: create_join_table(artist_id: :artists, album_id: :albums) which expands to: create_table(:albums_artists) do foreign_key :album_id, :albums foreign_key :artist_id, :artists primary_key [:album_id, :artist_id] index [:artist_id, :album_id] end == create_table :as To create a table from the result of a SELECT query, instead of passing a block to +create_table+, provide a dataset to the :as option: create_table(:older_items, as: DB[:items].where{updated_at < Date.today << 6}) == +alter_table+ +alter_table+ is used to alter existing tables, changing their columns, indexes, or constraints. It it used just like +create_table+, accepting a block which is instance_evaled, and providing its own methods: === +add_column+ One of the most common methods, +add_column+ is used to add a column to the table. Its API is similar to that of +create_table+'s +column+ method, where the first argument is the column name, the second is the type, and the third is an options hash: alter_table(:albums) do add_column :copies_sold, Integer, default: 0 end === +drop_column+ As you may expect, +drop_column+ takes a column name and drops the column. It's often used in the +down+ block of a migration to drop a column added in an +up+ block: alter_table(:albums) do drop_column :copies_sold end === +rename_column+ +rename_column+ is used to rename a column. It takes the old column name as the first argument, and the new column name as the second argument: alter_table(:albums) do rename_column :copies_sold, :total_sales end === +add_primary_key+ If you forgot to include a primary key on the table, and want to add one later, you can use +add_primary_key+. A common use of this is to make many_to_many association join tables into real models: alter_table(:albums_artists) do add_primary_key :id end Just like +create_table+'s +primary_key+ method, if you provide an array of symbols, Sequel will not add a column, but will add a composite primary key constraint: alter_table(:albums_artists) do add_primary_key [:album_id, :artist_id] end It is possible to specify a name for the primary key constraint: via the :name option: alter_table(:albums_artists) do add_primary_key [:album_id, :artist_id], name: :albums_artists_pkey end If you just want to take an existing single column and make it a primary key, call +add_primary_key+ with an array with a single symbol: alter_table(:artists) do add_primary_key [:id] end === +add_foreign_key+ +add_foreign_key+ can be used to add a new foreign key column or constraint to a table. Like +add_primary_key+, if you provide it with a symbol as the first argument, it creates a new column: alter_table(:albums) do add_foreign_key :artist_id, :artists end If you want to add a new foreign key constraint to an existing column, you provide an array with a single element: alter_table(:albums) do add_foreign_key [:artist_id], :artists end It's encouraged to provide a name when adding the constraint, via the :foreign_key_constraint_name option if adding the column and the constraint: alter_table(:albums) do add_foreign_key :artist_id, :artists, foreign_key_constraint_name: :albums_artist_id_fkey end or via the :name option if just adding the constraint: alter_table(:albums) do add_foreign_key [:artist_id], :artists, name: :albums_artist_id_fkey end To set up a multiple column foreign key constraint, use an array with multiple column symbols: alter_table(:albums) do add_foreign_key [:artist_name, :artist_location], :artists, name: :albums_artist_name_location_fkey end === +drop_foreign_key+ +drop_foreign_key+ is used to drop foreign keys from tables. If you provide a symbol as the first argument, it drops both the foreign key constraint and the column: alter_table(:albums) do drop_foreign_key :artist_id end If you want to just drop the foreign key constraint without dropping the column, use an array. It's encouraged to use the :name option to provide the constraint name to drop, though on some databases Sequel may be able to find the name through introspection: alter_table(:albums) do drop_foreign_key [:artist_id], name: :albums_artist_id_fkey end An array is also used to drop a composite foreign key constraint: alter_table(:albums) do drop_foreign_key [:artist_name, :artist_location], name: :albums_artist_name_location_fkey end If you do not provide a :name option and Sequel is not able to determine the name to use, it will probably raise a Sequel::Error exception. === +add_index+ +add_index+ works just like +create_table+'s +index+ method, creating a new index on the table: alter_table(:albums) do add_index :artist_id end It accepts the same options as +create_table+'s +index+ method, and you can set up a multiple column index using an array: alter_table(:albums_artists) do add_index [:album_id, :artist_id], unique: true end === +drop_index+ As you may expect, +drop_index+ drops an existing index: alter_table(:albums) do drop_index :artist_id end Just like +drop_column+, it is often used in the +down+ block of a migration. To drop an index with a specific name, use the :name option: alter_table(:albums) do drop_index :artist_id, name: :artists_id_index end === +add_full_text_index+, +add_spatial_index+ Corresponding to +create_table+'s +full_text_index+ and +spatial_index+ methods, these two methods create new indexes on the table. === +add_constraint+ This adds a named constraint to the table, similar to +create_table+'s +constraint+ method: alter_table(:albums) do add_constraint(:name_min_length){char_length(name) > 2} end There is no method to add an unnamed constraint, but you can pass +nil+ as the first argument of +add_constraint+ to do so. However, it's not recommended to do that as it is more difficult to drop such a constraint. === +add_unique_constraint+ This adds a unique constraint to the table, similar to +create_table+'s +unique+ method. This usually has the same effect as adding a unique index. alter_table(:albums) do add_unique_constraint [:artist_id, :name] end You can also specify a name via the :name option when adding the constraint: alter_table(:albums) do add_unique_constraint [:artist_id, :name], name: :albums_artist_id_name_ukey end === +drop_constraint+ This method drops an existing named constraint: alter_table(:albums) do drop_constraint(:name_min_length) end There is no database independent method to drop an unnamed constraint. Generally, the database will give it a name automatically, and you will have to figure out what it is. For that reason, you should not add unnamed constraints that you ever might need to remove. On some databases, you must specify the type of constraint via a :type option: alter_table(:albums) do drop_constraint(:albums_pk, type: :primary_key) drop_constraint(:albums_fk, type: :foreign_key) drop_constraint(:albums_uk, type: :unique) end === +set_column_default+ This modifies the default value of a column: alter_table(:albums) do set_column_default :copies_sold, 0 end To remove a default value for a column, use +nil+ as the value: alter_table(:albums) do set_column_default :copies_sold, nil end === +set_column_type+ This modifies a column's type. Most databases will attempt to convert existing values in the columns to the new type: alter_table(:albums) do set_column_type :copies_sold, :Bignum end You can specify the type as a string or symbol, in which case it is used verbatim, or as a supported ruby class or the :Bignum symbol, in which case it gets converted to an appropriate database type. === +set_column_allow_null+ This allows you to set the column as allowing NULL values: alter_table(:albums) do set_column_allow_null :artist_id end === +set_column_not_null+ This allows you to set the column as not allowing NULL values: alter_table(:albums) do set_column_not_null :artist_id end == Other +Database+ schema modification methods Sequel::Database has many schema modification instance methods, most of which are shortcuts to the same methods in +alter_table+. The following +Database+ instance methods just call +alter_table+ with a block that calls the method with the same name inside the +alter_table+ block with all arguments after the first argument (which is used as the table name): * +add_column+ * +drop_column+ * +rename_column+ * +add_index+ * +drop_index+ * +set_column_default+ * +set_column_type+ For example, the following two method calls do the same thing: alter_table(:artists){add_column :copies_sold, Integer} add_column :artists, :copies_sold, Integer There are some other schema modification methods that have no +alter_table+ counterpart: === +drop_table+ +drop_table+ takes multiple arguments and treats all arguments as a table name to drop: drop_table(:albums_artists, :albums, :artists) Note that when dropping tables, you may need to drop them in a specific order if you are using foreign keys and the database is enforcing referential integrity. In general, you need to drop the tables containing the foreign keys before the tables containing the primary keys they reference. === drop_table? drop_table? is similar to drop_table, except that it only drops the table if the table already exists. On some databases, it uses IF NOT EXISTS, on others it does a separate query to check for existence. === +rename_table+ You can rename an existing table using +rename_table+. Like +rename_column+, the first argument is the current name, and the second is the new name: rename_table(:artist, :artists) === create_table! create_table! drops the table if it exists before attempting to create it, so: create_table!(:artists) do primary_key :id end is the same as: drop_table?(:artists) create_table(:artists) do primary_key :id end === create_table? create_table? only creates the table if it does not already exist, so: create_table?(:artists) do primary_key :id end is the same as: unless table_exists?(:artists) create_table(:artists) do primary_key :id end end === +create_view+ and +create_or_replace_view+ These can be used to create views. The difference between them is that +create_or_replace_view+ will unconditionally replace an existing view of the same name, while +create_view+ will probably raise an error. Both methods take the name as the first argument, and either an string or a dataset as the second argument: create_view(:gold_albums, DB[:albums].where{copies_sold > 500000}) create_or_replace_view(:gold_albums, "SELECT * FROM albums WHERE copies_sold > 500000") === +drop_view+ +drop_view+ drops existing views. Just like +drop_table+, it can accept multiple arguments: drop_view(:gold_albums, :platinum_albums) sequel-5.63.0/doc/security.rdoc000066400000000000000000000406231434214120600163720ustar00rootroot00000000000000= Security Considerations with Sequel When using Sequel, there are some security areas you should be aware of: * Code Execution * SQL Injection * Denial of Service * Mass Assignment * General Parameter Handling == Code Execution The most serious security vulnerability you can have in any library is a code execution vulnerability. Sequel should not be vulnerable to this, as it never calls eval on a string that is derived from user input. However, some Sequel methods used for creating methods via metaprogramming could conceivably be abused to do so: * Sequel::Dataset.def_sql_method * Sequel::JDBC.load_driver * Sequel::Plugins.def_dataset_methods * Sequel::Dataset.prepared_statements_module (private) * Sequel::SQL::Expression.to_s_method (private) As long as you don't call those with user input, you should not be vulnerable to code execution. == SQL Injection The primary security concern in SQL database libraries is SQL injection. Because Sequel promotes using ruby objects for SQL concepts instead of raw SQL, it is less likely to be vulnerable to SQL injection. However, because Sequel still makes it easy to use raw SQL, misuse of the library can result in SQL injection in your application. There are basically two kinds of possible SQL injections in Sequel: * SQL code injections * SQL identifier injections === SQL Code Injections ==== Full SQL Strings Some Sequel methods are designed to execute raw SQL strings, including: * Sequel::Database#execute * Sequel::Database#execute_ddl * Sequel::Database#execute_dui * Sequel::Database#execute_insert * Sequel::Database#run * Sequel::Database#<< * Sequel::Dataset#fetch_rows * Sequel::Dataset#with_sql_all * Sequel::Dataset#with_sql_delete * Sequel::Dataset#with_sql_each * Sequel::Dataset#with_sql_first * Sequel::Dataset#with_sql_insert * Sequel::Dataset#with_sql_single_value * Sequel::Dataset#with_sql_update Here are some examples of use: DB.execute 'SQL' DB.execute_ddl 'SQL' DB.execute_dui 'SQL' DB.execute_insert 'SQL' DB.run 'SQL' DB << 'SQL' DB.fetch_rows('SQL'){|row| } DB.dataset.with_sql_all('SQL') DB.dataset.with_sql_delete('SQL') DB.dataset.with_sql_each('SQL'){|row| } DB.dataset.with_sql_first('SQL') DB.dataset.with_sql_insert('SQL') DB.dataset.with_sql_single_value('SQL') DB.dataset.with_sql_update('SQL') If you pass a string to these methods that is derived from user input, you open yourself up to SQL injection. These methods are not designed to work at all with user input. If you must call them with user input, you should escape the user input manually via Sequel::Database#literal. Example: DB.run "SOME SQL #{DB.literal(params[:user].to_s)}" ==== Full SQL Strings, With Possible Placeholders Other Sequel methods are designed to support execution of raw SQL strings that may contain placeholders: * Sequel::Database#[] * Sequel::Database#fetch * Sequel::Dataset#with_sql Here are some examples of use: DB['SQL'].all DB.fetch('SQL').all DB.dataset.with_sql('SQL').all With these methods you should use placeholders, in which case Sequel automatically escapes the input: DB['SELECT * FROM foo WHERE bar = ?', params[:user].to_s] ==== Manually Created Literal Strings Sequel generally treats ruby strings as SQL strings (escaping them correctly), and not as raw SQL. However, you can convert a ruby string to a literal string, and Sequel will then treat it as raw SQL. This is typically done through Sequel.lit[rdoc-ref:Sequel::SQL::Builders#lit]. Sequel.lit('a') Using Sequel.lit[rdoc-ref:Sequel::SQL::Builders#lit] to turn a ruby string into a literal string results in SQL injection if the string is derived from user input. With both of these methods, the strings can contain placeholders, which you can use to safely include user input inside a literal string: Sequel.lit('a = ?', params[:user_id].to_s) Even though they have similar names, note that Sequel::Database#literal operates very differently from String#lit or Sequel.lit[rdoc-ref:Sequel::SQL::Builders#lit]. Sequel::Database#literal is for taking any supported object, and getting an SQL representation of that object, while String#lit or Sequel.lit[rdoc-ref:Sequel::SQL::Builders#lit] are for treating a ruby string as raw SQL. For example: DB.literal(Date.today) # "'2013-03-22'" DB.literal('a') # "'a'" DB.literal(Sequel.lit('a')) # "a" DB.literal(a: 'a') # "(\"a\" = 'a')" DB.literal(a: Sequel.lit('a')) # "(\"a\" = a)" ==== SQL Filter Fragments Starting in Sequel 5, Sequel does not automatically convert plain strings to literal strings in typical code. Instead, you can use Sequel.lit to create literal strings: Sequel.lit("name > 'A'") To safely include user input as part of an SQL filter fragment, use Sequel.lit with placeholders: DB[:table].where(Sequel.lit("name > ?", params[:id].to_s)) # Safe Be careful to never call Sequel.lit where the first argument is derived from user input. There are a few uncommon cases where Sequel will still convert plain strings to literal strings. ==== SQL Fragment passed to Dataset#lock_style and Model#lock! The Sequel::Dataset#lock_style and Sequel::Model#lock! methods also treat an input string as SQL code. These methods should not be called with user input. DB[:table].lock_style(params[:id]) # SQL injection! Album.first.lock!(params[:id]) # SQL injection! ==== SQL Type Names In general, most places where Sequel needs to use an SQL type that should be specified by the user, it allows you to use a ruby string, and that string is used verbatim as the SQL type. You should not use user input for type strings. DB[:table].select(Sequel.cast(:a, params[:id])) # SQL injection! ==== SQL Function Names In most cases, Sequel does not quote SQL function names. You should not use user input for function names. DB[:table].select(Sequel.function(params[:id])) # SQL injection! ==== SQL Window Frames For backwards compatibility, Sequel supports regular strings in the window function :frame option, which will be treated as a literal string: DB[:table].select{fun(arg).over(frame: 'SQL Here')} You should make sure the frame argument is not derived from user input, or switch to using a hash as the :frame option value. ==== auto_literal_strings extension If the auto_literal_strings extension is used for backwards compatibility, then Sequel will treat plain strings as literal strings if they are used as the first argument to a filtering method. This can lead to SQL injection: DB[:table].where("name > #{params[:id].to_s}") # SQL injection when using auto_literal_strings extension If you are using the auto_literal_strings extension, you need to be very careful, as the following methods will treat a plain string given as the first argument as a literal string: * Sequel::Dataset#where * Sequel::Dataset#having * Sequel::Dataset#filter * Sequel::Dataset#exclude * Sequel::Dataset#exclude_having * Sequel::Dataset#or * Sequel::Dataset#first * Sequel::Dataset#last * Sequel::Dataset#[] Even stuff that looks like it may be safe isn't: DB[:table].first(params[:num_rows]) # SQL injection when using auto_literal_strings extension The Model.find[rdoc-ref:Sequel::Model::ClassMethods#find] and Model.find_or_create[rdoc-ref:Sequel::Model::ClassMethods#find_or_create] class methods will also treat string arguments as literal strings if the auto_literal_strings extension is used: Album.find(params[:id]) # SQL injection when using auto_literal_strings extension Similar to the filter methods, the auto_literal_strings extension also makes Sequel::Dataset#update treats a string argument as raw SQL: DB[:table].update("column = 1") So you should not do: DB[:table].update(params[:changes]) # SQL injection when using auto_literal_strings extension or: DB[:table].update("column = #{params[:value].to_s}") # SQL injection when using auto_literal_strings extension Instead, you should do: DB[:table].update(column: params[:value].to_s) # Safe Because using the auto_literal_strings extension makes SQL injection so much eaiser, it is recommended to not use it, and instead use Sequel.lit with placeholders. === SQL Identifier Injections Usually, Sequel treats ruby symbols as SQL identifiers, and ruby strings as SQL strings. However, there are some parts of Sequel that treat ruby strings as SQL identifiers if an SQL string would not make sense in the same context. For example, Sequel::Database#from and Sequel::Dataset#from will treat a string as a table name: DB.from('t') # SELECT * FROM "t" Another place where Sequel treats ruby strings as identifiers are the Sequel::Dataset#insert and Sequel::Dataset#update methods: DB[:t].update('b'=>1) # UPDATE "t" SET "b" = 1 DB[:t].insert('b'=>1) # INSERT INTO "t" ("b") VALUES (1) Note how the identifier is still quoted in these cases. Sequel quotes identifiers by default on most databases. However, it does not quote identifiers by default on DB2. On those databases using an identifier derived from user input can lead to SQL injection. Similarly, if you turn off identifier quoting manually on other databases, you open yourself up to SQL injection if you use identifiers derived from user input. When Sequel quotes identifiers, using an identifier derived from user input does not lead to SQL injection, since the identifiers are also escaped when quoting. Exceptions to this are Oracle (can't escape ") and Microsoft Access (can't escape ]). In general, even if doesn't lead to SQL Injection, you should avoid using identifiers derived from user input unless absolutely necessary. Sequel also allows you to create identifiers using Sequel.identifier[rdoc-ref:Sequel::SQL::Builders#identifier] for plain identifiers, Sequel.qualify[rdoc-ref:Sequel::SQL::Builders#qualify] and Sequel::SQL::Indentifier#[][rdoc-ref:Sequel::SQL::QualifyingMethods#[]] for qualified identifiers, and Sequel.as[rdoc-ref:Sequel::SQL::Builders#as] for aliased expressions. So if you pass any of those values derived from user input, you are dealing with the same scenario. Note that the issues with SQL identifiers do not just apply to places where strings are used as identifiers, they also apply to all places where Sequel uses symbols as identifiers. However, if you are creating symbols from user input, you at least have a denial of service vulnerability in ruby <2.2, and possibly a more serious vulnerability. Note that many Database schema modification methods (e.g. create_table, add_column) also allow for SQL identifier injections, and possibly also SQL code injections. These methods should never be called with user input. == Denial of Service Sequel converts some strings to symbols. Because symbols in ruby <2.2 are not garbage collected, if the strings that are converted to symbols are derived from user input, you have a denial of service vulnerability due to memory exhaustion. The strings that Sequel converts to symbols are generally not derived from user input, so Sequel in general is not vulnerable to this. However, users should be aware of the cases in which Sequel creates symbols, so they do not introduce a vulnerability into their application. === Column Names/Aliases Sequel returns SQL result sets as an array of hashes with symbol keys. The keys are derived from the name that the database server gives the column. These names are generally static. For example: SELECT column FROM table The database will generally use "column" as the name in the result set. If you use an alias: SELECT column AS alias FROM table The database will generally use "alias" as the name in the result set. So if you allow the user to control the alias name: DB[:table].select(:column.as(params[:alias])) Then you can have a denial of service vulnerability. In general, such a vulnerability is unlikely, because you are probably indexing into the returned hash(es) by name, and if an alias was used and you didn't expect it, your application wouldn't work. === Database Connection Options All database connection options are converted to symbols. For a connection URL, the keys are generally fixed, but the scheme is turned into a symbol and the query option keys are used as connection option keys, so they are converted to symbols as well. For example: postgres://host/database?option1=foo&option2=bar Will result in :postgres, :option1, and :option2 symbols being created. Certain option values are also converted to symbols. In the general case, the sql_log_level option value is, but some adapters treat additional options similarly. This is not generally a risk unless you are allowing the user to control the connection URLs or are connecting to arbitrary databases at runtime. == Mass Assignment Mass assignment is the practice of passing a hash of columns and values to a single method, and having multiple column values for a given object set based on the content of the hash. The security issue here is that mass assignment may allow the user to set columns that you didn't intend to allow. The Model#set[rdoc-ref:Sequel::Model::InstanceMethods#set] and Model#update[rdoc-ref:Sequel::Model::InstanceMethods#update] methods do mass assignment. The default configuration of Sequel::Model allows all model columns except for the primary key column(s) to be set via mass assignment. Example: album = Album.new album.set(params[:album]) # Mass Assignment Both Model.new[rdoc-ref:Sequel::Model::InstanceMethods::new] and Model.create[rdoc-ref:Sequel::Model::ClassMethods#create] call Model#set[rdoc-ref:Sequel::Model::InstanceMethods#set] internally, so they also allow mass assignment: Album.new(params[:album]) # Mass Assignment Album.create(params[:album]) # Mass Assignment When the argument is derived from user input, instead of these methods, it is encouraged to either use Model#set_fields[rdoc-ref:Sequel::Model::InstanceMethods#set_fields] or Model#update_fields[rdoc-ref:Sequel::Model::InstanceMethods#update_fields], which allow you to specify which fields to allow on a per-call basis. This pretty much eliminates the chance that the user will be able to set a column you did not intend to allow: album.set_fields(params[:album], [:name, :copies_sold]) album.update_fields(params[:album], [:name, :copies_sold]) These two methods iterate over the second argument (+:name+ and +:copies_sold+ in this example) instead of iterating over the entries in the first argument (params[:album] in this example). If you want to override the columns that Model#set[rdoc-ref:Sequel::Model::InstanceMethods#set] allows by default during mass assignment, you can use the whitelist_security plugin, then call the set_allowed_columns class method. Album.plugin :whitelist_security Album.set_allowed_columns(:name, :copies_sold) Album.create(params[:album]) # Only name and copies_sold set Being explicit on a per-call basis using the set_fields and update_fields methods is recommended instead of using the whitelist_security plugin and setting a global whitelist. For more details on the mass assignment methods, see the {Mass Assignment Guide}[rdoc-ref:doc/mass_assignment.rdoc]. == General Parameter Handling This issue isn't necessarily specific to Sequel, but it is a good general practice. If you are using values derived from user input, it is best to be explicit about their type. For example: Album.where(id: params[:id]) is probably a bad idea. Assuming you are using a web framework, params[:id] could be a string, an array, a hash, nil, or potentially something else. Assuming that +id+ is an integer field, you probably want to do: Album.where(id: params[:id].to_i) If you are looking something up by name, you should try to enforce the value to be a string: Album.where(name: params[:name].to_s) If you are trying to use an IN clause with a list of id values based on input provided on a web form: Album.where(id: params[:ids].to_a.map(&:to_i)) Basically, be as explicit as possible. While there aren't any known security issues in Sequel when you do: Album.where(id: params[:id]) It allows the attacker to choose to do any of the following queries: id IS NULL # nil id = '1' # '1' id IN ('1', '2', '3') # ['1', '2', '3'] id = ('a' = 'b') # {'a'=>'b'} id = ('a' IN ('a', 'b') AND 'c' = '') # {'a'=>['a', 'b'], 'c'=>''} While none of those allow for SQL injection, it's possible that they might have an issue in your application. For example, a long array or deeply nested hash might cause the database to have to do a lot of work that could be avoided. In general, it's best to let the attacker control as little as possible, and explicitly specifying types helps a great deal there. sequel-5.63.0/doc/sharding.rdoc000066400000000000000000000267631434214120600163330ustar00rootroot00000000000000= Primary/Replica Configurations and Database Sharding Sequel has support for primary/replica configurations (writable primary database with read only replicas databases), as well as database sharding (where you can pick a server to use for a given dataset). Support for both features is database independent, and should work for all database adapters that ship with Sequel. == The :servers Database option Sharding and read_only support are both enabled via the :servers database option. Using the :servers database option makes Sequel use a connection pool class that supports sharding, and the minimum required to enable sharding support is to use the empty hash: DB=Sequel.connect('postgres://primary_server/database', servers: {}) In most cases, you are probably not going to want to use an empty hash. Keys in the server hash are not restricted to type, but the general recommendation is to use a symbol unless you have special requirements. Values in the server hash should be either hashes or procs that return hashes. These hashes are merged into the Database object's default options hash to get the connection options for the shard, so you don't need to override all options, just the ones that need to be modified. For example, if you are using the same user, password, and database name and just the host is changing, you only need a :host entry in each shard's hash. Note that all servers should have the same schema for all tables you are accessing, unless you really know what you are doing. == Primary and Replica Database Configurations === Single Primary, Single Replica To use a single, read-only replica that handles SELECT queries, the following is the simplest configuration: DB=Sequel.connect('postgres://primary_server/database', servers: {read_only: {host: 'replica_server'}}) This will use the replica_server for SELECT queries and primary_server for other queries. If you want to ensure your queries are going to a specific database, you can force this for a given query by using the .server method and passing the symbol name defined in the connect options. For example: # Force the SELECT to run on the primary server DB[:users].server(:default).all # Force the DELETE to run on the read-only replica DB[:users].server(:read_only).delete === Single Primary, Multiple Replicas Let's say you have 4 replica servers with names replica_server0, replica_server1, replica_server2, and replica_server3. num_read_only = 4 read_only_host = rand(num_read_only) read_only_proc = proc do |db| {host: "replica_server#{(read_only_host+=1) % num_read_only}"} end DB=Sequel.connect('postgres://primary_server/database', servers: {read_only: read_only_proc}) This will use one of the replica servers for SELECT queries and use the primary server for other queries. It's also possible to pick a random host instead of using the round robin approach presented above, but that can result in less optimal resource usage. === Multiple Primary, Multiple Replicas This involves the same basic idea as the multiple replicas, single primary, but it shows that the primary database is named :default. So for 4 primary servers and 4 replica servers: num_read_only = 4 read_only_host = rand(num_read_only) read_only_proc = proc do |db| {host: "replica_server#{(read_only_host+=1) % num_read_only}"} end num_default = 4 default_host = rand(num_default) default_proc = proc do |db| {host: "primary_server#{(default_host+=1) % num_default}"} end DB=Sequel.connect('postgres://primary_server/database', servers: {default: default_proc, read_only: read_only_proc}) == Sharding There is specific support in Sequel for handling primary/replica database combinations, with the only necessary setup being the database configuration. However, since sharding is always going to be implementation dependent, Sequel supplies the basic infrastructure, but you have to tell it which server to use for each dataset. Let's assume a simple scenario, a distributed rainbow table for SHA-1 hashes, sharding based on the first hex character (for a total of 16 shards). First, you need to configure the database: servers = {} (('0'..'9').to_a + ('a'..'f').to_a).each do |hex| servers[hex.to_sym] = {host: "hash_host_#{hex}"} end DB=Sequel.connect('postgres://hash_host/hashes', servers: servers) This configures 17 servers, the 16 shard servers (/hash_host_[0-9a-f]/), and 1 default server which will be used if no shard is specified ("hash_host"). If you want the default server to be one of the shard servers (e.g. hash_host_a), it's easiest to do: DB=Sequel.connect('postgres://hash_host_a/hashes', servers: servers) That will still set up a second pool of connections for the default server, since it considers the default server and shard servers independent. Note that if you always set the shard on a dataset before using it in queries, it will not attempt to connect to the default server. Sequel may use the default server in queries it generates itself, such as to get column names or table schemas, so you should always have a default server that works. To set the shard for a given query, you use the Dataset#server method: DB[:hashes].server(:a).where(hash: /31337/) That will return all matching rows on the hash_host_a shard that have a hash column that contains 31337. Rainbow tables are generally used to find specific hashes, so to save some work, you might want to add a method to the dataset that automatically sets the shard to use. This is fairly easy using a Sequel::Model: class Rainbow < Sequel::Model(:hashes) dataset_module do def plaintext_for_hash(hash) raise(ArgumentError, 'Invalid SHA-1 Hash') unless /\A[0-9a-f]{40}\z/.match(hash) server(hash[0...1].to_sym).where(hash: hash).get(:plaintext) end end end Rainbow.plaintext_for_hash("e580726d31f6e1ad216ffd87279e536d1f74e606") === :servers_hash Option The connection pool can be further controlled to change how it handles attempts to access shards that haven't been configured. The default is to assume the :default shard. However, you can specify a different shard using the :servers_hash option when connecting to the database: DB = Sequel.connect('postgres://...', servers_hash: Hash.new(:some_shard)) You can also use this feature to raise an exception if an unconfigured shard is used: DB = Sequel.connect('postgres://...', servers_hash: Hash.new{raise 'foo'}) If you specify a :servers_hash option to raise an exception for non configured shards you should also explicitly specify a :read_only entry in your :servers option for the case where a shard is not specified. In most cases it is sufficient to make the :read_only entry the same as the :default shard: servers = {read_only: {}} (('0'..'9').to_a + ('a'..'f').to_a).each do |hex| servers[hex.to_sym] = {host: "hash_host_#{hex}"} end DB=Sequel.connect('postgres://hash_host/hashes', servers: servers, servers_hash: Hash.new{raise "Invalid Server"}) === Sharding Plugin Sequel comes with a sharding plugin that makes it easy to use sharding with model objects. It makes sure that objects retrieved from a specific shard are always saved back to that shard, allows you to create objects on specific shards, and even makes sure associations work well with shards. You just need to remember to set to model to use the plugin: class Rainbow < Sequel::Model(:hashes) plugin :sharding end Rainbow.server(:a).first(id: 1).update(plaintext: 'VGM') If all of your models are sharded, you can set all models to use the plugin via: Sequel::Model.plugin :sharding === server_block Extension By default, you must specify the server/shard you want to use for every dataset/action, or Sequel will use the default shard. If you have a group of queries that should use the same shard, it can get a bit redundant to specify the same shard for all of them. The server_block extension adds a Database#with_server method that scopes all database access inside the block to the given shard by default: DB.extension :server_block DB.with_server(:a) do # this SELECT query uses the "a" shard if r = Rainbow.first(hash: /31337/) r.count += 1 # this UPDATE query also uses the "a" shard r.save end end The server_block extension doesn't currently integrate with the sharding plugin, as it ties into the Dataset#server method. This shouldn't present a problem in practice as long as you just access the models inside the with_server block, since they will use the shard set by with_server by default. However, you will probably have issues if you retrieve the models inside the block and save them outside of the block. If you need to do that, call the server method explicitly on the dataset used to retrieve the model objects. The with_server method also supports a second argument for the default read_only server to use, which can be useful if you are mixing sharding and primary/replica servers: DB.extension :server_block DB.with_server(:a, :a_read_only) do # this SELECT query uses the "a_read_only" shard if r = Rainbow.first(hash: /31337/) r.count += 1 # this UPDATE query also uses the "a" shard r.save end end === arbitrary_servers Extension By default, Sequel's sharding support is designed to work with predefined shards. It ships with Database#add_servers and Database#remove_servers methods to modify these predefined shards on the fly, but it is a bit cumbersome to work with truly arbitrary servers (requiring you to call add_servers before use, then remove_servers after use). The arbitrary_servers extension allows you to pass a server/shard options hash as the server to use, and those options will be merged directly into the database's default options: DB.extension :arbitrary_servers DB[:rainbows].server(host: 'hash_host_a').all # or DB[:rainbows].server(host: 'hash_host_b', database: 'backup').all arbitrary_servers is designed to work well in conjunction with the server_block extension: DB.with_server(host: 'hash_host_b', database: 'backup') do DB.synchronize do # All queries here default to the backup database on hash_host_b end end If you are using arbitrary_servers with server_block, you may want to define the following method (or something similar) so that you don't need to call synchronize separately: def DB.with_server(*) super{synchronize{yield}} end The reason for the synchronize method is that it checks out a connection and makes the same connection available for the duration of the block. If you don't do that, Sequel will probably disconnect from the database and reconnect to the database on each request, since connections to arbitrary servers are not cached. Note that this extension only works with the sharded threaded connection pool. If you are using the sharded single connection pool, you need to switch to the sharded threaded connection pool before using this extension. If you are passing the :single_threaded option to the Database, just remove that option. If you are setting: Sequel.single_threaded = true just remove or comment out that code. == JDBC If you are using the jdbc adapter, note that it does not handle separate options such as +:host+, +:user+, and +:port+. If you would like to use the +:servers+ option when connecting to a JDBC database, each hash value in the +servers+ option should contain a +:uri+ key with a JDBC connection string for that shard as the value. Example: DB=Sequel.connect('jdbc:postgresql://primary_server/database', servers: {read_only: {uri: 'jdbc:postgresql://replica_server/database'}}) sequel-5.63.0/doc/sql.rdoc000066400000000000000000000665421434214120600153320ustar00rootroot00000000000000= Sequel for SQL Users One of the main benefits of Sequel is that it doesn't require the user to know SQL in order to use it, though SQL knowledge is certainly helpful. Unlike most other Sequel documentation, this guide assumes you know SQL, and provides an easy way to discover how to do something in Sequel given the knowledge of how to do so in SQL. == You Can Just Use SQL With Sequel, it's very easy to just use SQL for your queries. If learning Sequel's DSL seems like a waste of time, you are certainly free to write all your queries in SQL. Sequel uses a few different methods depending on the type of query you are doing. === SELECT For SELECT queries, you should probably use Database#fetch with a string and a block: DB.fetch("SELECT * FROM albums") do |row| puts row[:name] end Database#fetch will take the query you give it, execute it on the database, and yield a hash with column symbol keys for each row returned. If you want to use some placeholder variables, you can set the placeholders with ? and add the corresponding arguments to +fetch+: DB.fetch("SELECT * FROM albums WHERE name LIKE ?", 'A%') do |row| puts row[:name] end You can also use named placeholders by starting the placeholder with a colon, and using a hash for the argument: DB.fetch("SELECT * FROM albums WHERE name LIKE :pattern", pattern: 'A%') do |row| puts row[:name] end This can be helpful for long queries where it is difficult to match the question marks in the query with the arguments. What Sequel actually does internally is two separate things. It first creates a dataset representing the query, and then it executes the dataset's SQL code to retrieve the objects. Often, you want to define a dataset at some point, but not execute it until later. You can do this by leaving off the block, and storing the dataset in a variable: ds = DB.fetch("SELECT * FROM albums") Then, when you want to retrieve the rows later, you can call +each+ on the dataset to retrieve the rows: ds.each{|r| puts r[:name]} You should note that Database#[] calls Database#fetch if a string is provided, so you can also do: ds = DB["SELECT * FROM albums"] ds.each{|r| puts r[:name]} However, note that Database#[] cannot take a block directly, you have to call +each+ on the returned dataset. There are plenty of other methods besides +each+. For example, the +all+ method returns all records in the dataset as an array: DB["SELECT * FROM albums"].all # [{:id=>1, :name=>'RF', ...}, ...] === INSERT, UPDATE, DELETE INSERT, UPDATE, and DELETE all work the same way. You first create the dataset with the SQL you want to execute using Database#[]: insert_ds = DB["INSERT INTO albums (name) VALUES (?)", 'RF'] update_ds = DB["UPDATE albums SET name = ? WHERE name = ?", 'MO', 'RF'] delete_ds = DB["DELETE FROM albums WHERE name = ?", 'MO'] Then, you call the +insert+, +update+, or +delete+ method on the returned dataset: insert_ds.insert update_ds.update delete_ds.delete +update+ and +delete+ should return the number of rows affected, and +insert+ should return the autogenerated primary key integer for the row inserted (if any). === Other Queries All other queries such as TRUNCATE, CREATE TABLE, and ALTER TABLE should be executed using Database#run: DB.run "CREATE TABLE albums (id integer primary key, name varchar(255))" You can also use Database#<<: DB << "ALTER TABLE albums ADD COLUMN copies_sold INTEGER" === Other Places Almost everywhere in Sequel, you can drop down to literal SQL by providing a literal string, which you can create with Sequel.lit: DB[:albums].select('name') # SELECT 'name' FROM albums DB[:albums].select(Sequel.lit('name')) # SELECT name FROM albums For a simpler way of creating literal strings, you can also use the {core_extensions extension}[rdoc-ref:doc/core_extensions.rdoc], which adds the String#lit method, and other methods that integrate Sequel's DSL with the Ruby language: DB[:albums].select('name'.lit) So you can use Sequel's DSL everywhere you find it helpful, and fallback to literal SQL if the DSL can't do what you want or you just find literal SQL easier. == Translating SQL Expressions into Sequel The rest of this guide assumes you want to use Sequel's DSL to represent your query, that you know how to write the query in SQL, but you aren't sure how to write it in Sequel's DSL. This section will describe how specific SQL expressions are handled in Sequel. The next section will discuss how to create queries by using method chaining on datasets. === Database#literal It's important to get familiar with the Database#literal method, which will return the SQL that will be used for a given expression: DB.literal(1) # => "1" DB.literal(:column) # => "\"column\"" DB.literal('string') # => "'string'" Try playing around to see how different objects get literalized into SQL === Database Loggers Some Sequel methods handle literalization slightly differently than Database#literal. If you want to see all SQL queries that Sequel is sending to the database, you should add a database logger: DB.loggers << Logger.new($stdout) Now that you know how to see what SQL is being used, let's jump in and see how to map SQL syntax to Sequel syntax: === Identifiers In Sequel, SQL identifiers are usually specified as Ruby symbols: :column # "column" As you can see, Sequel quotes identifiers by default. Depending on your database, it may uppercase them by default as well: :column # "COLUMN" on some databases A plain symbol is usually treated as an unqualified identifier. However, if you are using multiple tables in a query, and you want to reference a column in one of the tables that has the same name as a column in another one of the tables, you need to qualify that reference. Note that you can't use a period to separate them: :table.column # calls the column method on the symbol Also note that specifying the period inside the symbol doesn't work if you are quoting identifiers: :"table.column" # "table.column" instead of "table"."column" There are a few different Sequel methods for creating qualified identifier objects. The recommended way is to explicitly create a qualified identifier by using Sequel.[] to create an identifier and call [] or +qualify+ on that, or by using the Sequel.qualify method with the table and column symbols: Sequel[:table][:column] # "table"."column" Sequel[:column].qualify(:table) # "table"."column" Sequel.qualify(:table, :column) # "table"."column" Another way to generate identifiers is to use Sequel's {virtual row support}[rdoc-ref:doc/virtual_rows.rdoc]: DB[:albums].select{name} # SELECT "name" FROM "albums" DB[:albums].select{albums[:name]} # SELECT "albums"."name" FROM "albums" You can also use the symbol_aref extension for creating qualified identifiers: Sequel.extension :symbol_aref :table[:column] # "table"."column" === Numbers In general, Ruby numbers map directly to SQL numbers: # Integers 1 # 1 -1 # -1 # Floats 1.5 # 1.5 # BigDecimals BigDecimal.new('1000000.123091029') # 1000000.123091029 === Strings In general, Ruby strings map directly to SQL strings: 'name' # 'name' "name" # 'name' === Aliasing You can use the Sequel.as method to create an alias, and the +as+ method on most Sequel-specific expression objects: Sequel.as(:column, :alias) # "column" AS "alias" Sequel[:column].as(:alias) # "column" AS "alias" Sequel[:table][:column].as(:alias) # "table"."column" AS "alias" (Sequel[:column] + 1).as(:alias) # ("column" + 1) AS "alias" You can also use the symbol_as extension for creating aliased identifiers: Sequel.extension :symbol_as :column.as(:alias) # "column" AS "alias" If you want to use a derived column list, you can provide an array of column aliases: Sequel.as(:table, :alias, [:c1, :c2]) # "table" AS "alias"("c1", "c2") === Functions The easiest way to use SQL functions is via a virtual row: DB[:albums].select{func.function} # SELECT func() FROM "albums" DB[:albums].select{func(col1, col2)} # SELECT func("col1", "col2") FROM "albums" You can also use the Sequel.function method on the symbol that contains the function name: Sequel.function(:func) # func() Sequel.function(:func, :col1, :col2) # func("col1", "col2") === Aggregate Functions Aggregate functions work the same way as normal functions, since they share the same syntax: Sequel.function(:sum, :column) # sum(column) To use the DISTINCT modifier to an aggregate function, call the +distinct+ method on the function expression, which returns a new function expression: DB[:albums].select{sum(:column).distinct} # SELECT sum(DISTINCT column) FROM albums If you want to use the wildcard as the sole argument of the aggregate function, use the * method on the function expression: Sequel.function(:count).* # count(*) DB[:albums].select{count.function.*} # SELECT count(*) FROM albums Note that Sequel provides helper methods for aggregate functions such as +count+, +sum+, +min+, +max+, +avg+, and +group_and_count+, which handle common uses of aggregate functions. === Window Functions If the database supports window functions, Sequel can handle them by calling the +over+ method on a function expression: DB[:albums].select{row_number.function.over} # SELECT row_number() OVER () FROM albums DB[:albums].select{count.function.*.over} # SELECT count(*) OVER () FROM albums DB[:albums].select{function(:col1).over(partition: col2, order: col3)} # SELECT function(col1) OVER (PARTITION BY col2 ORDER BY col3) FROM albums DB[:albums].select{function(c1, c2).over(partition: [c3, c4], order: [c5, c6.desc])} # SELECT function(c1, c2) OVER (PARTITION BY c3, c4 ORDER BY c5, c6 DESC) FROM albums DB[:albums].select{function(c1).over(partition: c2, order: :c3, frame: :rows)} # SELECT function(c1) OVER (PARTITION BY c2 ORDER BY c3 ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) FROM albums DB[:albums].select{function(c1).over(partition: c2, order: :c3, frame: {type: :range, start: 1, end: 1})} # SELECT function(c1) OVER (PARTITION BY c2 ORDER BY c3 RANGE BETWEEN 1 PRECEDING AND 1 FOLLOWING) FROM albums DB[:albums].select{function(c1).over(partition: c2, order: :c3, frame: {type: :groups, start: [2, :preceding], end: [1, :preceding]})} # SELECT function(c1) OVER (PARTITION BY c2 ORDER BY c3 GROUPS BETWEEN 2 PRECEDING AND 1 PRECEDING) FROM albums DB[:albums].select{function(c1).over(partition: c2, order: :c3, frame: {type: :range, start: :preceding, exclude: :current})} # SELECT function(c1) OVER (PARTITION BY c2 ORDER BY c3 RANGE UNBOUNDED PRECEDING EXCLUDE CURRENT ROW) FROM albums === Schema Qualified Functions If the database supports schema qualified functions, Sequel can handle them by calling the +function+ method on a qualified identifier: DB[:albums].select{schema[:function].function} # SELECT schema.function() FROM albums DB[:albums].select{schema[:function].function(:col, 2, "a")} # SELECT schema.function(col, 2, 'a') FROM albums === Portable/Emulated Functions Sequel offers some support for portable SQL functions, allowing you to call standard SQL functions, where Sequel will emulate support on databases that lack native support. Some examples are: Sequel.char_length(:column) # char_length(column) Sequel.extract(:year, :column) # extract(year FROM column) Sequel.trim(:column) # trim(column) === Equality Operator (=) Sequel uses hashes to specify equality: {column: 1} # ("column" = 1) You can also specify this as an array of two element arrays: [[:column, 1]] # ("column" = 1) For expression objects, you can also use the =~ method: where{column =~ 1} # ("column" = 1) === Not Equal Operator (!=) You can specify a not equals condition by inverting the hash or array of two element arrays using Sequel.negate or Sequel.~: Sequel.negate(column: 1) # ("column" != 1) Sequel.negate([[:column, 1]]) # ("column" != 1) Sequel.~(column: 1) # ("column" != 1) Sequel.~([[:column, 1]]) # ("column" != 1) The difference between the two is that +negate+ only works on hashes and arrays of element arrays, and it negates all entries in the hash or array, while ~ does a general inversion. This is best shown by an example with multiple entries: Sequel.negate(column: 1, foo: 2) # (("column" != 1) AND (foo != 2)) Sequel.~(column: 1, foo: 2) # (("column" != 1) OR (foo != 2)) You can also use the ~ method on an equality expression: where{~(column =~ 1)} # ("column" != 1) Or you can use the !~ method: where{column !~ 1} # ("column" != 1) The most common need for not equals is in filters, in which case you can use the +exclude+ method: DB[:albums].exclude(column: 1) # SELECT * FROM "albums" WHERE ("column" != 1) Note that +exclude+ does a generalized inversion, similar to Sequel.~. === Inclusion and Exclusion Operators (IN, NOT IN) Sequel also uses hashes to specify inclusion, and inversions of those hashes to specify exclusion: {column: [1, 2, 3]} # ("column" IN (1, 2, 3)) Sequel.~(column: [1, 2, 3]) # ("column" NOT IN (1, 2, 3)) As you may have guessed, Sequel switches from an = to an IN when the hash value is an array. It also does this for datasets, which easily allows you to test for inclusion and exclusion in a subselect: {column: DB[:albums].select(:id)} # ("column" IN (SELECT "id" FROM "albums")) Sequel.~(column: DB[:albums].select(:id)) # ("column" NOT IN (SELECT "id" FROM "albums")) Similar to =, you can also use =~ with expressions for inclusion: where{column =~ [1, 2, 3]} # ("column" IN (1, 2, 3)) and !~ for exclusion: where{column !~ [1, 2, 3]} # ("column" NOT IN (1, 2, 3)) Sequel also supports the SQL EXISTS operator using Dataset#exists: DB[:albums].exists # EXISTS (SELECT * FROM albums) === Identity Operators (IS, IS NOT) Hashes in Sequel use IS if the value is +true+, +false+, or +nil+: {column: nil} # ("column" IS NULL) {column: true} # ("column" IS TRUE) {column: false} # ("column" IS FALSE) Negation works the same way as it does for equality and inclusion: Sequel.~(column: nil) # ("column" IS NOT NULL) Sequel.~(column: true) # ("column" IS NOT TRUE) Sequel.~(column: false) # ("column" IS NOT FALSE) Likewise, =~ works for identity and !~ for negative identity on expressions: where{column =~ nil} # ("column" IS NULL) where{column !~ nil} # ("column" IS NOT NULL) === Inversion Operator (NOT) Sequel's general inversion operator is ~, which works on symbols and most Sequel-specific expression objects: Sequel.~(:column) # NOT "column" Note that ~ will actually apply the inversion operation to the underlying object, which is why Sequel.~(column: 1) produces (column != 1) instead of NOT (column = 1). === Inequality Operators (< > <= >=) Sequel defines the inequality operators directly on most Sequel-specific expression objects: Sequel[:table][:column] > 1 # ("table"."column" > 1) Sequel[:table][:column] < 1 # ("table"."column" < 1) Sequel.function(:func) >= 1 # (func() >= 1) Sequel.function(:func, :column) <= 1 # (func("column") <= 1) If you want to use them on a symbol, you should call Sequel.[] with the symbol to get an expression object: Sequel[:column] > 1 # ("column" > 1) A common use of virtual rows is to handle inequality operators: DB[:albums].where{col1 > col2} # SELECT * FROM "albums" WHERE ("col1" > "col2") === Standard Mathematical Operators (+ - * /) The standard mathematical operates are defined on most Sequel-specific expression objects: Sequel[:column] + 1 # "column" + 1 Sequel[:table][:column] - 1 # "table"."column" - 1 Sequel[:table][:column] * 1 # "table"."column" * 1 Sequel[:column] / 1 # "column" / 1 Sequel[:column] ** 1 # power("column", 1) You can also call the operator methods directly on the Sequel module: Sequel.+(:column, 1) # "column" + 1 Sequel.-(Sequel[:table][:column], 1) # "table"."column" - 1 Sequel.*(Sequel[:table][:column], 1) # "table"."column" * 1 Sequel./(:column, 1) # "column" / 1 Sequel.**(:column, 1) # power("column", 1) Note that since Sequel implements support for Ruby's coercion protocol, the following also works: 1 + Sequel[:column] 1 - Sequel[:table][:column] === Boolean Operators (AND OR) Sequel defines the & and | methods on most Sequel-specific expression objects to handle AND and OR: Sequel[:column1] & :column2 # ("column1" AND "column2") Sequel[{column1: 1}] | {column2: 2} # (("column1" = 1) OR ("column2" = 2)) (Sequel.function(:func) > 1) & :column3 # ((func() > 1) AND "column3") Note the use of parentheses in the last statement. If you omit them, you won't get what you expect. Because & has higher precedence than > Sequel.function(:func) > 1 & :column3 is parsed as: Sequel.function(:func) > (1 & :column3) You can also use the Sequel.& and Sequel.| methods: Sequel.&(:column1, :column2) # ("column1" AND "column2") Sequel.|({column1: 1}, {column2: 2}) # (("column1" = 1) OR ("column2" = 2)) You can use hashes and arrays of two element arrays to specify AND and OR with equality conditions: {column1: 1, column2: 2} # (("column1" = 1) AND ("column2" = 2)) [[:column1, 1], [:column2, 2]] # (("column1" = 1) AND ("column2" = 2)) As you can see, these literalize with ANDs by default. You can use the Sequel.or method to use OR instead: Sequel.or(column1: 1, column2: 2) # (("column1" = 1) OR ("column2" = 2)) As you can see in the above examples, Sequel.| and Sequel.or work differently. Sequel.| is for combining an arbitrary number of expressions using OR. If you pass a single argument, Sequel.| will just convert it to a Sequel expression, similar to Sequel.expr. Sequel.or is for taking a single hash or array of two element arrays and combining the elements of that single argument using OR instead of AND: Sequel.|(column1: 1, column2: 2) # (("column1" = 1) AND ("column2" = 2)) Sequel.or(column1: 1, column2: 2) # (("column1" = 1) OR ("column2" = 2)) Sequel.|({column1: 1}, {column2: 2}) # (("column1" = 1) OR ("column2" = 2)) Sequel.or({column1: 1}, {column2: 2}) # ArgumentError You've already seen the Sequel.negate method, which will use ANDs if multiple entries are used: Sequel.negate(column1: 1, column2: 2) # (("column1" != 1) AND ("column2" != 2)) To negate while using ORs, the Sequel.~ operator can be used: Sequel.~(column1: 1, column2: 2) # (("column1" != 1) OR ("column2" != 2)) Note again that Dataset#exclude uses ~, not +negate+: DB[:albums].exclude(column1: 1, column2: 2) # SELECT * FROM "albums" WHERE (("column" != 1) OR ("column2" != 2)) === Casts Casting in Sequel is done with the +cast+ method, which is available on most of the Sequel-specific expression objects: Sequel[:name].cast(:text) # CAST("name" AS text) Sequel['1'].cast(:integer) # CAST('1' AS integer) Sequel[:table][:column].cast(:date) # CAST("table"."column" AS date) You can also use the Sequel.cast method: Sequel.cast(:name, :text) # CAST("name" AS text) === Bitwise Mathematical Operators (& | ^ << >> ~) Sequel allows the use of bitwise mathematical operators on Sequel::SQL::NumericExpression objects: Sequel[:number] + 1 # => # (Sequel[:number] + 1) & 5 # (("number" + 1) & 5) As you can see, when you use the + operator on a symbol, you get a NumericExpression. You can turn an expression a NumericExpression using +sql_number+: Sequel[:number].sql_number | 5 # ("number" | 5) Sequel.function(:func).sql_number << 7 # (func() << 7) Sequel.cast(:name, :integer).sql_number >> 8 # (CAST("name" AS integer) >> 8) Sequel allows you to do the cast and conversion at the same time via +cast_numeric+: Sequel[:name].cast_numeric ^ 9 # (CAST("name" AS integer) ^ 9) Note that &, |, and ~ are already defined to do AND, OR, and NOT on most expressions, so if you want to use the bitwise operators, you need to make sure that they are converted first: ~Sequel[:name] # NOT "name" ~Sequel[:name].sql_number # ~"name" === String Operators (||, LIKE, Regexp) Sequel allows the use of the string concatenation operator on Sequel::SQL::StringExpression objects, which can be created using the +sql_string+ method on an expression: Sequel[:name].sql_string + ' - Name' # ("name" || ' - Name') Just like for the bitwise operators, Sequel allows you to do the cast and conversion at the same time via +cast_string+: Sequel[:number].cast_string + ' - Number' # (CAST(number AS varchar(255)) || ' - Number') Note that similar to the mathematical operators, you cannot switch the order the expression and have it work: 'Name - ' + Sequel[:name].sql_string # raises TypeError Just like for the mathematical operators, you can use Sequel.[] to wrap the object: Sequel['Name - '] + :name # ('Name - ' || "name") The Sequel.join method concatenates all of the elements in the array: Sequel.join(['Name', :name]) # ('Name' || "name") Just like Ruby's String#join, you can provide an argument for a string used to join each element: Sequel.join(['Name', :name], ' - ') # ('Name' || ' - ' || "name") For the LIKE operator, Sequel defines the +like+ and +ilike+ methods on most Sequel-specific expression objects: Sequel[:name].like('A%') # ("name" LIKE 'A%' ESCAPE '\') Sequel[:name].ilike('A%') # ("name" ILIKE 'A%' ESCAPE '\') You can also use the Sequel.like and Sequel.ilike methods: Sequel.like(:name, 'A%') # ("name" LIKE 'A%' ESCAPE '\') Sequel.ilike(:name, 'A%') # ("name" ILIKE 'A%' ESCAPE '\') Note the above syntax for +ilike+, while Sequel's default, is specific to PostgreSQL. However, most other adapters override the behavior. For example, on MySQL, Sequel uses LIKE BINARY for +like+, and LIKE for +ilike+. If the database supports both case sensitive and case insensitive LIKE, then +like+ will use a case sensitive LIKE, and +ilike+ will use a case insensitive LIKE. Inverting the LIKE operator works like other inversions: ~Sequel.like(:name, 'A%') # ("name" NOT LIKE 'A%' ESCAPE '\') Sequel also supports SQL regular expressions on MySQL and PostgreSQL (and SQLite when using the sqlite adapter with the :setup_regexp_function Database option). You can use these by passing a Ruby regular expression to +like+ or +ilike+, or by making the regular expression a hash value: Sequel.like(:name, /^A/) # ("name" ~ '^A') ~Sequel.ilike(:name, /^A/) # ("name" !~* '^A') {name: /^A/i} # ("name" ~* '^A') Sequel.~(name: /^A/) # ("name" !~ '^A') Note that using +ilike+ with a regular expression will always make the regexp case insensitive. If you use +like+ or the hash with regexp value, it will only be case insensitive if the Regexp itself is case insensitive. === Order Specifications (ASC, DESC) Sequel supports specifying ascending or descending order using the +asc+ and +desc+ method on most Sequel-specific expression objects: Sequel[:column].asc # "column" ASC Sequel[:table][:column].desc # "table"."column" DESC You can also use the Sequel.asc and Sequel.desc methods: Sequel.asc(:column) # "column" ASC Sequel.desc(Sequel[:table][:column]) # "table"."column" DESC On some databases, you can specify null ordering: Sequel.asc(:column, nulls: :first) # "column" ASC NULLS FIRST Sequel.desc(Sequel[:table][:column], nulls: :last) # "table"."column" DESC NULLS LAST === All Columns (.*) To select all columns in a table, Sequel supports the * method on identifiers and qualified identifiers without an argument: Sequel[:table].* # "table".* Sequel[:schema][:table].* # "schema"."table".* === CASE statements Sequel supports SQL CASE statements using the Sequel.case method. The first argument is a hash or array of two element arrays representing the conditions, the second argument is the default value (ELSE). The keys of the hash (or first element in each array) is the WHEN condition, and the values of the hash (or second element in each array) is the THEN result. Here are some examples: Sequel.case({column: 1}, 0) # (CASE WHEN "column" THEN 1 ELSE 0 END) Sequel.case([[:column, 1]], 0) # (CASE WHEN "column" THEN 1 ELSE 0 END) Sequel.case({{column: nil}=>1}, 0) # (CASE WHEN (column IS NULL) THEN 1 ELSE 0 END) If the hash or array has multiple arguments, multiple WHEN clauses are used: Sequel.case({c: 1, d: 2}, 0) # (CASE WHEN "c" THEN 1 WHEN "d" THEN 2 ELSE 0 END) Sequel.case([[:c, 1], [:d, 2]], 0) # (CASE WHEN "c" THEN 1 WHEN "d" THEN 2 ELSE 0 END) If you provide a 3rd argument to Sequel.case, it goes between CASE and WHEN: Sequel.case({2=>1, 3=>5}, 0, :column) # (CASE column WHEN 2 THEN 1 WHEN 3 THEN 5 ELSE 0 END) === Subscripts/Array Access ([]) Sequel supports SQL subscripts using the +sql_subscript+ method on most Sequel-specific expression objects: Sequel[:column].sql_subscript(3) # column[3] Sequel[:table][:column].sql_subscript(3) # table.column[3] You can also use the Sequel.subscript method: Sequel.subscript(:column, 3) # column[3] Just like in SQL, you can use any expression as a subscript: Sequel.subscript(:column, Sequel.function(:func)) # column[func()] == Building Queries in Sequel In Sequel, the SQL queries are build with method chaining. === Creating Datasets You generally start creating a dataset by calling Dataset#[] with a symbol specifying the table name: DB[:albums] # SELECT * FROM albums If you want to select from multiple FROM tables, use multiple arguments: DB[:albums, :artists] # SELECT * FROM albums, artists If you don't want to select from any FROM tables, just call +dataset+: DB.dataset # SELECT * === Chaining Methods Once you have your dataset object, you build queries by chaining methods, usually with one method per clause in the query: DB[:albums].select(:id, :name).where(Sequel.like(:name, 'A%')).order(:name) # SELECT id, name FROM albums WHERE (name LIKE 'A%' ESCAPE '\') ORDER BY name Note that the order of your method chain is not usually important unless you have multiple methods that affect the same clause: DB[:albums].order(:name).where(Sequel.like(:name, 'A%')).select(:id, :name) # SELECT id, name FROM albums WHERE (name LIKE 'A%' ESCAPE '\') ORDER BY name === Using the Same Dataset for SELECT, INSERT, UPDATE, and DELETE Also note that while the SELECT clause is displayed when you look at a dataset, a Sequel dataset can be used for INSERT, UPDATE, and DELETE as well. Here's an example: ds = DB[:albums] ds.all # SELECT * FROM albums ds.insert(name: 'RF') # INSERT INTO albums (name) VALUES ('RF') ds.update(name: 'RF') # UPDATE albums SET name = 'RF' ds.delete # DELETE FROM albums In general, the +insert+, +update+, and +delete+ methods use the appropriate clauses you defined on the dataset: ds = DB[:albums].where(id: 1) ds.all # SELECT * FROM albums WHERE (id = 1) ds.insert(name: 'RF') # INSERT INTO albums (name) VALUES ('RF') ds.update(name: 'RF') # UPDATE albums SET name = 'RF' WHERE (id = 1) ds.delete # DELETE FROM albums WHERE (id = 1) Note how +update+ and +delete+ used the +where+ argument, but +insert+ did not, because INSERT doesn't use a WHERE clause. === Methods Used for Each SQL Clause To see which methods exist that affect each SQL clause, see the {"Dataset Basics" guide}[rdoc-ref:doc/dataset_basics.rdoc]. sequel-5.63.0/doc/testing.rdoc000066400000000000000000000252321434214120600161770ustar00rootroot00000000000000= Testing with Sequel Whether or not you use Sequel in your application, you are usually going to want to have tests that ensure that your code works. When you are using Sequel, it's helpful to integrate it into your testing framework, and it's generally best to run each test in its own transaction if possible. That keeps all tests isolated from each other, and it's simple as it handles all of the cleanup for you. Sequel doesn't ship with helpers for common libraries, as the exact code you need is often application-specific, but this page offers some examples that you can either use directly or build on. == Transactional tests These run each test in its own transaction, the recommended way to test. === minitest/spec ==== with minitest-hooks require 'minitest/hooks/default' DB = Sequel.postgres # change if using sqlite etc class Minitest::HooksSpec def around DB.transaction(rollback: :always, auto_savepoint: true){super} end end ==== without minitest-hooks DB = Sequel.postgres # change if using sqlite etc class Minitest::Spec def run(*args, &block) DB.transaction(rollback: :always, auto_savepoint: true){super} end end === minitest/test DB = Sequel.postgres # change if using sqlite etc # Use this class as the base class for your tests class SequelTestCase < Minitest::Test def run(*args, &block) DB.transaction(rollback: :always, auto_savepoint: true){super} end end === rspec >= 2.8 DB = Sequel.postgres # change the database if you are using sqlite etc. RSpec.configure do |c| c.around(:each) do |example| DB.transaction(rollback: :always, auto_savepoint: true){example.run} end end == Transactional testing with multiple databases You can use the Sequel.transaction method to run a transaction on multiple databases, rolling all of them back. Instead of: DB.transaction(rollback: :always) Use Sequel.transaction with an array of databases: Sequel.transaction([DB1, DB2, DB3], rollback: :always) == Transactional testing with savepoints Using minitest/spec and minitest-hooks, and assuming your database supports it, you can use transactions around entire test suites, using savepoints around each test. This can sigificantly speed up any test suite where there is a lot of shared setup in a before all hook. By using savepoints per test, each test is isolated from each other, rolling back changes after it completes, and by using transactions per test suite, you only pay the cost to load the data once for the test suite, and it is automatically rolled back after the test suite completes. Example: require 'minitest/hooks/default' class Minitest::HooksSpec def around DB.transaction(rollback: :always, savepoint: true, auto_savepoint: true){super} end def around_all DB.transaction(rollback: :always){super} end end describe "some large test suite" do before(:all) do DB[:table].import # Large number of rows end end == Nontransactional tests In some cases, it is not possible to use transactions. For example, if you are testing a web application that is running in a separate process, you don't have access to that process's database connections, so you can't run your examples in transactions. In that case, the best way to handle things is to cleanup after each test by deleting or truncating the database tables used in the test. The order in which you delete/truncate the tables is important if you are using referential integrity in your database (which you should be doing). If you are using referential integrity, you need to make sure to delete in tables referencing other tables before the tables that are being referenced. For example, if you have an +albums+ table with an +artist_id+ field referencing the +artists+ table, you want to delete/truncate the +albums+ table before the +artists+ table. Note that if you have cyclic references in your database, you will probably need to write your own custom cleaning code. === minitest/spec or rspec describe "some test suite" do after do [:table1, :table2].each{|x| DB.from(x).truncate} # or [:table1, :table2].each{|x| DB.from(x).delete} end end === minitest/test class SomeTestClass < Minitest::Test def teardown [:table1, :table2].each{|x| DB.from(x).truncate} # or [:table1, :table2].each{|x| DB.from(x).delete} end end = Testing Sequel Itself Sequel has multiple separate test suites. All test suites use minitest/spec, with the minitest-hooks and minitest-global_expectations extensions. To install the dependencies necessary to test Sequel, run gem install --development sequel. == rake The default rake task runs Sequel's core, model, plugin, and extension specs, the same as rake spec or rake spec_core spec_model spec_plugin. == rake spec_core The +spec_core+ rake task runs Sequel's core specs. These specs use a mocked database connection, and test for specific SQL used and for generally correct behavior. == rake spec_model The +spec_model+ rake task runs Sequel's model specs. These specs also use a mocked database connection, and operate similar to the core tests. == rake spec_plugin The +spec_plugin+ rake task runs the specs for the plugins and extensions that ship with Sequel. These also use a mocked database connection, and operate very similarly to the general Sequel core and model specs. == rake spec_core_ext The +spec_core_ext+ rake task runs the specs for the core_extensions extension. These are run separately from the other extension tests to make sure none of the other extensions require the core_extensions. == rake spec_bin The +spec_bin+ rake task runs the specs for bin/sequel. These use an SQLite3 database, and require either the sqlite3 (non-JRuby) or jdbc-sqlite3 (JRuby) gem. == rake spec_adapter (e.g. rake spec_postgres) The spec_adapter specs run against a real database connection with nothing mocked, and test for correct results. They are slower than the standard specs, but they will catch errors that are mocked out by the default specs, as well as show issues that only occur on a certain database, adapter, or a combination of the two. These specs are broken down into two parts. For each database, there are specific specs that only apply to that database, and these are called the adapter specs. There are also shared specs that apply to all (or almost all) databases, these are called the integration specs. For database types that don't have specific adapter tests, you can use rake spec_integration to just run the shared integration tests. Each adapter needs a specific gem installed in order to run. Please see the {connecting to a database guide}[rdoc-ref:doc/opening_databases.rdoc] for which gem you need to install for the adapter you are testing. == Environment variables Sequel uses environment variables when testing to specify either the database to be tested or specify how testing should be done. You can also specify the databases to test by copying spec/spec_config.rb.example to spec/spec_config.rb and modifying it. See that file for details. It may be necessary to use +spec_config.rb+ as opposed to an environment variable if your database connection cannot be specified by a connection string. Sequel does not create test databases automatically, except for file-based databases such as SQLite/H2/HSQLDB/Derby. It's up to the user to create the test databases manually and give Sequel a valid connection string in an environment variable (or setup the connection object in +spec_config.rb+). === Connection Strings The SEQUEL_INTEGRATION_URL environment variable specifies the Database connection URL to use for the adapter and integration specs. Additionally, when running the adapter specs, you can also use the SEQUEL_ADAPTER_URL environment variable (e.g. SEQUEL_POSTGRES_URL for spec_postgres). === Other SEQUEL_ASYNC_THREAD_POOL :: Use the async_thread_pool extension when running the specs SEQUEL_ASYNC_THREAD_POOL_PREEMPT :: Use the async_thread_pool extension when running the specs, with the :preempt_async_thread option SEQUEL_CHECK_PENDING :: Try running all specs (note, can cause lockups for some adapters), and raise errors for skipped specs that don't fail SEQUEL_COLUMNS_INTROSPECTION :: Use the columns_introspection extension when running the specs SEQUEL_CONCURRENT_EAGER_LOADING :: Use the async_thread_pool extension and concurrent_eager_loading plugin when running the specs SEQUEL_CONNECTION_VALIDATOR :: Use the connection validator extension when running the specs SEQUEL_DUPLICATE_COLUMNS_HANDLER :: Use the duplicate columns handler extension with value given when running the specs SEQUEL_ERROR_SQL :: Use the error_sql extension when running the specs SEQUEL_FIBER_CONCURRENCY :: Use the fiber_concurrency extension when running the adapter and integration specs SEQUEL_FREEZE_DATABASE :: Freeze the database before running the integration specs SEQUEL_IDENTIFIER_MANGLING :: Use the identifier_mangling extension when running the specs SEQUEL_INDEX_CACHING :: Use the index_caching extension when running the specs SEQUEL_INTEGER64 :: Use the integer64 extension when running the adapter or integration specs SEQUEL_MODEL_PREPARED_STATEMENTS :: Use the prepared_statements plugin when running the specs SEQUEL_MODEL_THROW_FAILURES :: Use the throw_failures plugin when running the specs SEQUEL_NO_CACHE_ASSOCIATIONS :: Don't cache association metadata when running the specs SEQUEL_NO_PENDING :: Don't skip any specs, try running all specs (note, can cause lockups for some adapters) SEQUEL_PG_AUTO_PARAMETERIZE :: Use the pg_auto_parameterize extension when running the postgres specs SEQUEL_PG_TIMESTAMPTZ :: Use the pg_timestamptz extension when running the postgres specs SEQUEL_PRIMARY_KEY_LOOKUP_CHECK_VALUES :: Use the primary_key_lookup_check_values extension when running the adapter or integration specs SEQUEL_QUERY_PER_ASSOCIATION_DB_0_URL :: Run query-per-association integration tests with multiple databases (all 4 must be set to run) SEQUEL_QUERY_PER_ASSOCIATION_DB_1_URL :: Run query-per-association integration tests with multiple databases (all 4 must be set to run) SEQUEL_QUERY_PER_ASSOCIATION_DB_2_URL :: Run query-per-association integration tests with multiple databases (all 4 must be set to run) SEQUEL_QUERY_PER_ASSOCIATION_DB_3_URL :: Run query-per-association integration tests with multiple databases (all 4 must be set to run) SEQUEL_SPLIT_SYMBOLS :: Turn on symbol splitting when running the adapter and integration specs SEQUEL_SYNCHRONIZE_SQL :: Use the synchronize_sql extension when running the specs SEQUEL_TZINFO_VERSION :: Force the given tzinfo version when running the specs (e.g. '>=2') sequel-5.63.0/doc/thread_safety.rdoc000066400000000000000000000032561434214120600173460ustar00rootroot00000000000000= Thread Safety Most Sequel usage (and all common Sequel usage) is thread safe by default. Specifically, multiple threads can operate on Database instances, Dataset instances, and Model classes concurrently without problems. In general, Database instance and Model classes are not modified after application startup, and Dataset instances are always frozen. == Connection Pool In order to allow multiple threads to operate on the same database at the same time, Sequel uses a connection pool. The connection pool is designed so that a thread uses a connection for the minimum amount of time, returning the connection to the pool as soon as it is done using the connection. If a thread requests a connection and the pool does not have an available connection, a new connection will be created. If the maximum number of connections in the pool has already been reached, the thread will block until a connection is available or the connection pool timeout has elapsed (in which case a Sequel::PoolTimeout error will be raised). == Exceptions This is a small list of things that are specifically non thread-safe. This is not an exhaustive list, there may be cases not mentioned here. 1) Model instances: Model instances are not thread-safe unless they are frozen first. Multiple threads should not operate on an unfrozen model instance concurrently. 2) Model class modifications: Model class modifications, such as adding associations and loading plugins, are not designed to be thread safe. You should not modify a class in one thread if any other thread can concurrently access it. Model subclassing is designed to be thread-safe, so you create a model subclass in a thread and modify it safely. sequel-5.63.0/doc/transactions.rdoc000066400000000000000000000233121434214120600172270ustar00rootroot00000000000000= Database Transactions Sequel uses autocommit mode by default for all of its database adapters, so in general in Sequel if you want to use database transactions, you need to be explicit about it. There are a few cases where transactions are used implicitly by default: * Dataset#import to insert many records at once * Dataset#paged_each to iterate over large datasets in batches * Model#save * Model#destroy * Migrations if the database supports transactional schema * Database#use_cursor in the postgres adapter * Dataset#lock on PostgreSQL if given a block * setter methods created by the association_pks plugin * move* methods in the list plugin Everywhere else, it is up to you to use a database transaction if you want to. == Basic Transaction Usage In Sequel, the Database#transaction method should be called if you want to use a database transaction. This method must be called with a block. If the block does not raise an exception, the transaction is committed: DB.transaction do # BEGIN DB[:foo].insert(1) # INSERT end # COMMIT If the block raises a Sequel::Rollback exception, the transaction is rolled back, but no exception is raised outside the block: DB.transaction do # BEGIN raise Sequel::Rollback end # ROLLBACK # no exception raised If any other exception is raised, the transaction is rolled back, and the exception is raised outside the block: DB.transaction do # BEGIN raise ArgumentError end # ROLLBACK # ArgumentError raised If you want the current transaction to be rolled back when the transaction block exits instead of being committed (even if an exception is not raised), use Database#rollback_on_exit DB.transaction do # BEGIN DB.rollback_on_exit end # ROLLBACK If you want Sequel::Rollback exceptions to be reraised, use the rollback: :reraise option: DB.transaction(rollback: :reraise) do # BEGIN raise Sequel::Rollback end # ROLLBACK # Sequel::Rollback raised If you always want to rollback (useful for testing), use the rollback: :always option: DB.transaction(rollback: :always) do # BEGIN DB[:foo].insert(1) # INSERT end # ROLLBACK # no exception raised If you want to check whether you are currently in a transaction, use the Database#in_transaction? method: DB.in_transaction? # false DB.transaction do DB.in_transaction? # true end == Transaction Hooks You can add hooks to an in progress transaction that are called after the transaction commits or rolls back: x = nil DB.transaction do DB.after_commit{x = 1} DB.after_rollback{x = 2} x # nil end x # 1 x = nil DB.transaction do DB.after_commit{x = 1} DB.after_rollback{x = 2} raise Sequel::Rollback end x # 2 == Nested Transaction Calls / Savepoints You can nest calls to transaction, which by default just reuses the existing transaction: DB.transaction do # BEGIN DB.transaction do DB[:foo].insert(1) # INSERT end end # COMMIT You can use the savepoint: true option in the inner transaction to explicitly use a savepoint (if the database supports it): DB.transaction do # BEGIN DB.transaction(savepoint: true) do # SAVEPOINT DB[:foo].insert(1) # INSERT end # RELEASE SAVEPOINT end # COMMIT You can use the auto_savepoint: true option in the outer transaction to explicitly use a savepoint in the inner transaction (if the database supports it): DB.transaction(auto_savepoint: true) do # BEGIN DB.transaction do # SAVEPOINT DB[:foo].insert(1) # INSERT end # RELEASE SAVEPOINT end # COMMIT If a Sequel::Rollback exception is raised inside the savepoint block, it will only rollback to the savepoint: DB.transaction do # BEGIN DB.transaction(savepoint: true) do # SAVEPOINT raise Sequel::Rollback end # ROLLBACK TO SAVEPOINT # no exception raised end # COMMIT Other exceptions, unless rescued inside the outer transaction block, will rollback the savepoint and the outer transactions, since they are reraised by the transaction code: DB.transaction do # BEGIN DB.transaction(savepoint: true) do # SAVEPOINT raise ArgumentError end # ROLLBACK TO SAVEPOINT end # ROLLBACK # ArgumentError raised If you want the current savepoint to be rolled back when the savepoint block exits instead of being committed (even if an exception is not raised), use Database#rollback_on_exit(savepoint: true) DB.transaction do # BEGIN DB.transaction(savepoint: true) do # SAVEPOINT DB.rollback_on_exit(savepoint: true) end # ROLLBACK TO SAVEPOINT end # COMMIT DB.transaction do # BEGIN DB.transaction(savepoint: true) do # SAVEPOINT DB.transaction(savepoint: true) do # SAVEPOINT DB.rollback_on_exit(savepoint: true) end # ROLLBACK TO SAVEPOINT end # RELEASE SAVEPOINT end # COMMIT If you want the current savepoint and potentially enclosing savepoints to be rolled back when the savepoint blocks exit (even if an exception is not raised), use Database#rollback_on_exit(savepoint: integer) DB.transaction do # BEGIN DB.transaction(savepoint: true) do # SAVEPOINT DB.transaction(savepoint: true) do # SAVEPOINT DB.rollback_on_exit(savepoint: 2) end # ROLLBACK TO SAVEPOINT end # ROLLBACK TO SAVEPOINT end # COMMIT DB.transaction do # BEGIN DB.transaction(savepoint: true) do # SAVEPOINT DB.transaction(savepoint: true) do # SAVEPOINT DB.rollback_on_exit(savepoint: 3) end # ROLLBACK TO SAVEPOINT end # ROLLBACK TO SAVEPOINT end # ROLLBACK === Savepoint Hooks When using savepoints, you can use the +:savepoint+ option to +after_commit+ or +after_rollback+ to use a savepoint hook. For +after_commit+, this will only run the hook after transaction commit if all enclosing savepoints are released (not rolled back). For +after_rollback+, this will run the hook after any enclosing savepoint is rolled back (before transaction commit), or after the transaction is rolled back if all enclosing savepoints are released: x = nil DB.transaction do # BEGIN DB.transaction(savepoint: true) do # SAVEPOINT DB.after_commit(savepoint: true){x = 1} DB.after_rollback(savepoint: true){x = 2} x # nil end # RELEASE SAVEPOINT x # nil end # COMMIT x # 1 x = nil DB.transaction do # BEGIN DB.transaction(savepoint: true) do # SAVEPOINT DB.after_commit(savepoint: true){x = 1} DB.after_rollback(savepoint: true){x = 2} x # nil raise Sequel::Rollback end # ROLLBACK TO SAVEPOINT x # 2 end # COMMIT x # 2 x = nil DB.transaction do # BEGIN DB.transaction(savepoint: true) do # SAVEPOINT DB.after_commit(savepoint: true){x = 1} DB.after_rollback(savepoint: true){x = 2} end # RELEASE SAVEPOINT x # nil raise Sequel::Rollback end x # 2 == Prepared Transactions / Two-Phase Commit Sequel supports database prepared transactions on PostgreSQL, MySQL, and H2. With prepared transactions, at the end of the transaction, the transaction is not immediately committed (it acts like a rollback). Later, you can call +commit_prepared_transaction+ to commit the transaction or +rollback_prepared_transaction+ to roll the transaction back. Prepared transactions are usually used with distributed databases to make sure all databases commit the same transaction or none of them do. To use prepared transactions in Sequel, you provide a string as the value of the :prepare option: DB.transaction(prepare: 'foo') do # BEGIN DB[:foo].insert(1) # INSERT end # PREPARE TRANSACTION 'foo' Later, you can commit the prepared transaction: DB.commit_prepared_transaction('foo') or roll the prepared transaction back: DB.rollback_prepared_transaction('foo') == Transaction Isolation Levels The SQL standard supports 4 isolation levels: READ UNCOMMITTED, READ COMMITTED, REPEATABLE READ, and SERIALIZABLE. Not all databases implement the levels as specified in the standard (or implement the levels at all), but on most databases, you can specify which transaction isolation level you want to use via the :isolation option to Database#transaction. The isolation level is specified as one of the following symbols: :uncommitted, :committed, :repeatable, and :serializable. Using this option makes Sequel use the correct transaction isolation syntax for your database: DB.transaction(isolation: :serializable) do # BEGIN # SET TRANSACTION ISOLATION LEVEL SERIALIZABLE DB[:foo].insert(1) # INSERT end # COMMIT == Automatically Restarting Transactions Sequel offers the ability to automatically restart transactions if specific types of errors are detected. For example, if you want to automatically restart a transaction if a serialization failure is detected: DB.transaction(isolation: :serializable, retry_on: [Sequel::SerializationFailure]) do ModelClass.find_or_create(name: 'Foo') end At the serializable transaction isolation level, find_or_create may raises a Sequel::SerializationFailure exception if multiple threads simultaneously run that code. With the :retry_on option set, the transaction will be automatically retried until it succeeds. Note that automatic retrying should not be used unless the entire transaction block is idempotent, as otherwise it can cause non-idempotent behavior to execute multiple times. For example, with the following code: DB.transaction(isolation: :serializable, retry_on: [Sequel::SerializationFailure]) do logger.info 'Ensuring existence of ModelClass with name Foo' ModelClass.find_or_create(name: 'Foo') end The logger.info method will be called multiple times if there is a serialization failure. The :num_retries option can be used to set the maximum number of times to retry. It is set to 5 times by default. sequel-5.63.0/doc/validations.rdoc000066400000000000000000000654761434214120600170550ustar00rootroot00000000000000= Model Validations This guide is based on http://guides.rubyonrails.org/active_record_validations.html == Overview This guide is designed to teach you how to use Sequel::Model's validation support. It attempts to explain how Sequel's validation support works, what validations are useful for, and how to use the +validation_helpers+ plugin to add specific types of validations to your models. == Why Validations? Validations are primarily useful for associating error messages to display to the user with specific attributes on the model. It is also possible to use them to enforce data integrity for model instances, but that's not recommended unless the only way to modify the database is through model instances, or you have complex data integrity requirements that aren't possible to specify via database-level constraints. == Data Integrity Data integrity is best handled by the database itself. For example, if you have a date column that should never contain a NULL value, the column should be specified in the database as NOT NULL. If you have an integer column that should only have values from 1 to 10, there should be a CHECK constraint that ensures that the value of that column is between 1 and 10. And if you have a varchar column where the length of the entries should be between 2 and 255, you should be setting the size of the varchar column to 255, and using a CHECK constraint to ensure that all values have at least two characters. Unfortunately, sometimes there are situations where that is not possible. For example, if you don't have control over the schema and cannot add constraints, or you are using MySQL (which doesn't support CHECK constraints), it may be necessary to use a model validation to enforce the database integrity. In some cases you may have data integrity requirements that are difficult to enforce via database constraints, especially if you are targetting multiple database types. Validations are generally easier to write than database constraints, so if data integrity isn't of great importance, using validations to provide minimal data integrity may be acceptable. == Usage Regardless of whether you are using validations for data integrity or just for error messages, the usage is the same. Whenever you attempt to save a model instance, before sending the INSERT or UPDATE query to the database, Sequel::Model will attempt to validate the instance by calling +validate+. If +validate+ does not add any errors to the object, the object is considered valid, and valid? will return true. If +validate+ adds any errors to the object, valid? will return false, and the save will either raise a Sequel::ValidationFailed exception (the default), or return nil (if +raise_on_save_failure+ is false). By validating the object before sending the database query, Sequel attempts to ensure that invalid objects are not saved in the database. However, if you are not enforcing the same validations in the database via constraints, it's possible that invalid data can get added to the database via some other method. This leads to odd cases such as retrieving a model object from the database, not making any changes to it, attempting to save it, and having the save raise an error. == Skipping Validations Sequel::Model uses the +save+ method to save model objects, and all saving of model objects passes through the +save+ method. This means that all saving of model objects goes through the validation process. The only way to skip validations when saving a model object is to pass the validate: false option to +save+. If you use that option, +save+ will not attempt to validate the object before saving it. Note that it's always possible to update the instance's database row without using +save+, by using a Sequel dataset to update it, or updating it via another program. Validations will only be run if you call +save+ on the model object, or another model method that calls +save+. For example, the +create+ class method instantiates a new instance of the model, and then calls +save+, so it validates the object. However, the +insert+ class method is a dataset method that just inserts the raw hash into the database, so it doesn't validate the object. == valid? and +validate+ Sequel::Model uses the valid? method to check whether or not a model instance is valid. This method should not be overridden. Instead, the +validate+ method should be overridden to add validations to the model: class Album < Sequel::Model def validate super errors.add(:name, 'cannot be empty') if !name || name.empty? end end Album.new.valid? # false Album.new(name: '').valid? # false Album.new(name: 'RF').valid? # true If the valid? method returns false, you can call the +errors+ method to get an instance of Sequel::Model::Errors describing the errors on the model: a = Album.new # => # a.valid? # => false a.errors # => {:name=>["cannot be empty"]} You may notice that the +errors+ method appears to return a hash. That's because Sequel::Model::Errors is a subclass of Hash. Note that calling the +errors+ method before the valid? method will result in an +errors+ being empty: Album.new.errors # => {} So just remember that you shouldn't check +errors+ until after you call valid?. Sequel::Model::Errors has some helper methods that make it easy to get an array of all of the instance's errors, or for checking for errors on a specific attribute. These will be covered later in this guide. == +validation_helpers+ While Sequel::Model does provide a validations framework, it does not define any built-in validation helper methods that you can call. However, Sequel ships with a plugin called +validation_helpers+ that handles most basic validation needs. So instead of specifying validations like this: class Album < Sequel::Model def validate super errors.add(:name, 'cannot be empty') if !name || name.empty? errors.add(:name, 'is already taken') if name && new? && Album[{name: name}] errors.add(:website, 'cannot be empty') if !website || website.empty? errors.add(:website, 'is not a valid URL') unless website =~ /\Ahttps?:\/\// end end You can call simple methods such as: class Album < Sequel::Model plugin :validation_helpers def validate super validates_presence [:name, :website] validates_unique :name validates_format /\Ahttps?:\/\//, :website, message: 'is not a valid URL' end end Other than +validates_unique+, which has its own API, the methods defined by +validation_helpers+ have one of the following two APIs: (atts, opts={}):: For methods such as +validates_presence+, which do not take an additional argument. (arg, atts, opts={}):: For methods such as +validates_format+, which take an additional argument. For both of these APIs, +atts+ is either a column symbol or array of column symbols, and +opts+ is an optional options hash. The following methods are provided by +validation_helpers+: === +validates_presence+ This method checks that the specified attributes are not blank. In general, if an object responds to blank?, it calls the method to determine if the object is blank. Otherwise, nil is considered blank, empty strings or strings that just contain whitespace are blank, and objects that respond to empty? and return true are considered blank. All other objects are considered non-blank for the purposes of +validates_presence+. This means that +validates_presence+ is safe to use on boolean columns where you want to ensure that either true or false is used, but not NULL. class Album < Sequel::Model def validate super validates_presence [:name, :website, :debut_album] end end === +validates_not_null+ This is similar to +validates_presence+, but only checks for NULL/nil values, allowing other blank objects such as empty strings or strings with just whitespace. === +validates_format+ +validates_format+ is used to ensure that the string value of the specified attributes matches the specified regular expression. It's useful for checking that fields such as email addresses, URLs, UPC codes, ISBN codes, and the like, are in a specific format. It can also be used to validate that only certain characters are used in the string. class Album < Sequel::Model def validate super validates_format /\A\d\d\d-\d-\d{7}-\d-\d\z/, :isbn validates_format /\A[0-9a-zA-Z:' ]+\z/, :name end end === +validates_exact_length+, +validates_min_length+, +validates_max_length+, +validates_length_range+ These methods all deal with ensuring that the length of the specified attribute matches the criteria specified by the first argument to the method. +validates_exact_length+ is for checking that the length of the attribute is equal to that value, +validates_min_length+ is for checking that the length of the attribute is greater than or equal to that value, +validates_max_length+ is for checking that the length of the attribute is less than or equal to that value, and +validates_length_range+ is for checking that the length of the attribute falls in the value, which should be a range or an object that responds to include?. class Album < Sequel::Model def validate super validates_exact_length 17, :isbn validates_min_length 3, :name validates_max_length 100, :name validates_length_range 3..100, :name end end === +validates_integer+, +validates_numeric+ These methods check that the specified attributes can be valid integers or valid floats. +validates_integer+ tests the attribute value using Kernel.Integer and +validates_numeric+ tests the attribute using Kernel.Float. If the Kernel methods raise an exception, the validation fails, otherwise it succeeds. class Album < Sequel::Model def validate super validates_integer :copies_sold validates_numeric :replaygain end end === +validates_includes+ +validates_includes+ checks that the specified attributes are included in the first argument to the method, which is usually an array, but can be any object that responds to include?. class Album < Sequel::Model def validate super validates_includes [1, 2, 3, 4, 5], :rating end end === +validates_operator+ +validates_operator+ checks that a given +operator+ method returns a truthy value when called on attribute with a specified value for comparison. Generally, this is used for inequality checks (>, >=, etc.) but any method that can be called on the attribute that accepts an argument and returns a truthy value may be used. class Album < Sequel::Model def validate super validates_operator(:>, 3, :tracks) end end === +validates_type+ +validates_type+ checks that the specified attributes are instances of the class specified in the first argument. The class can be specified as the class itself, or as a string or symbol with the class name, or as a an array of classes. class Album < Sequel::Model def validate super validates_type String, [:name, :website] validates_type :Artist, :artist validates_type [String, Integer], :foo end end === +validates_schema_types+ +validates_schema_types+ uses the database metadata for the model's table to determine which ruby type(s) should be used for the given database type, and calls +validates_type+ with that ruby type. It's designed to be used with the default raise_on_typecast_failure = false setting, where Sequel will attempt to typecast values, but silently ignore any errors raised: album = Album.new album.copies_sold = '1' album.copies_sold # => 1 album.copies_sold = 'banana' album.copies_sold # => 'banana' In general, you can call +validates_schema_types+ with all columns. If any of those columns has a value that doesn't match the type that Sequel expects, it's probably because the column was set and Sequel was not able to typecast it correctly, which means it probably isn't valid. For example, let's say that you want to check that a couple of columns contain valid dates: class Album < Sequel::Model def validate super validates_schema_types [:release_date, :record_date] end end album = Album.new album.release_date = 'banana' album.release_date # => 'banana' album.record_date = '2010-05-17' album.record_date # => # album.valid? # => false album.errors # => {:release_date=>["is not a valid date"]} For web applications, you usually want the default setting, so that you can accept all of the input without raising an error, and then present the user with all error messages. If raise_on_typecast_failure = true is set and the user submits any invalid data, Sequel will immediately raise an error. +validates_schema_types+ is helpful because it allows you to check for typecasting errors on columns, and provides a good default error message stating that the attribute is not of the expected type. === +validates_unique+ +validates_unique+ has a similar but different API than the other +validation_helpers+ methods. It takes an arbitrary number of arguments, which should be column symbols or arrays of column symbols. If any argument is a symbol, Sequel sets up a unique validation for just that column. If any argument is an array of symbols, Sequel sets up a unique validation for the combination of the columns. This means that you get different behavior depending on whether you call the object with an array or with separate arguments. For example: validates_unique(:name, :artist_id) Will set up a 2 separate uniqueness validations. It will make it so that no two albums can have the same name, and that each artist can only be associated with one album. In general, that's probably not what you want. You probably want it so that two albums can have the same name, unless they are by the same artist. To do that, you need to use an array: validates_unique([:name, :artist_id]) That sets up a single uniqueness validation for the combination of the fields. You can mix and match the two approaches. For example, if all albums should have a unique UPC, and no artist can have duplicate album names: validates_unique(:upc, [:name, :artist_id]) +validates_unique+ also accepts a block to scope the uniqueness constraint. For example, if you want to ensure that all active albums have a unique name, but inactive albums can duplicate the name: validates_unique(:name){|ds| ds.where(:active)} If you provide a block, it is called with the dataset to use for the uniqueness check, which you can then filter to scope the uniqueness validation to a subset of the model's dataset. You can also include an options hash as the last argument. Unlike the other validations, the options hash for +validates_unique+ only recognizes for these options: :dataset :: The base dataset to use for the unique query, defaults to the model's dataset :message :: The message to use :only_if_modified :: Only check the uniqueness if the object is new or one of the columns has been modified (true by default). :where :: A callable object where call takes three arguments, a dataset, the current object, and an array of columns, and should return a modified dataset that is filtered to include only rows with the same values as the current object for each column in the array. This is useful any time the unique constraints are derived from the columns and not the columns themselves (such as unique constraints on lower(column)). +validates_unique+ is the only method in +validation_helpers+ that checks with the database. Attempting to validate uniqueness outside of the database suffers from a race condition, so any time you want to add a uniqueness validation, you should make sure to add a uniqueness constraint or unique index on the underlying database table. See the {"Migrations and Schema Modification" guide}[rdoc-ref:doc/migration.rdoc] for details on how to do that. == +validation_helpers+ Options All other +validation_helpers+ methods accept the following options: === :message The :message option overrides the default validation error message. Can be either a string or a proc. If a string, it is used directly. If a proc, the proc is called and should return a string. If the validation method takes an argument before the array of attributes, that argument is passed as an argument to the proc. class Album < Sequel::Model def validate super validates_presence :copies_sold, message: 'was not given' validates_min_length 3, :name, message: lambda{|s| "should be more than #{s} characters"} end end === :allow_nil The :allow_nil option skips the validation if the attribute value is nil or if the attribute is not present. It's commonly used when you have a +validates_presence+ method already on the attribute, and don't want multiple validation errors for the same attribute: class Album < Sequel::Model def validate super validates_presence :copies_sold validates_integer :copies_sold, allow_nil: true end end Without the :allow_nil option to +validates_integer+, if the copies_sold attribute was nil, you would get two separate validation errors, instead of a single validation error. === :allow_blank The :allow_blank is similar to the :allow_nil option, but instead of just skipping the attribute for nil values, it skips the attribute for all blank values. For example, let's say that artists can have a website. If they have one, it should be formatted like a URL, but it can be nil or an empty string if they don't have one. class Album < Sequel::Model def validate super validates_format /\Ahttps?:\/\//, :website, allow_blank: true end end a = Album.new a.website = '' a.valid? # true === :allow_missing The :allow_missing option is different from the :allow_nil option, in that instead of checking if the attribute value is nil, it checks if the attribute is present in the model instance's values hash. :allow_nil will skip the validation when the attribute is in the values hash and has a nil value and when the attribute is not in the values hash. :allow_missing will only skip the validation when the attribute is not in the values hash. If the attribute is in the values hash but has a nil value, :allow_missing will not skip it. The purpose of this option is to work correctly with missing columns when inserting or updating records. Sequel only sends the attributes in the values hash when doing an insert or update. If the attribute is not present in the values hash, Sequel doesn't specify it, so the database will use the table's default value when inserting the record, or not modify the value when saving it. This is different from having an attribute in the values hash with a value of nil, which Sequel will send as NULL. If your database table has a non NULL default, this may be a good option to use. You don't want to use allow_nil, because if the attribute is in values but has a value nil, Sequel will attempt to insert a NULL value into the database, instead of using the database's default. == Conditional Validation Because Sequel uses the +validate+ instance method to handle validation, making validations conditional is easy as it works exactly the same as ruby's standard conditionals. For example, if you only want to validate an attribute when creating an object: validates_presence :name if new? If you only want to validate the attribute when updating an existing object: validates_integer :copies_sold unless new? Let's say you only to make a validation conditional on the status of the object: validates_presence :name if status_id > 1 validates_integer :copies_sold if status_id > 3 You can use all the standard ruby conditional expressions, such as +case+: case status_id when 1 validates_presence :name when 2 validates_presence [:name, :artist_id] when 3 validates_presence [:name, :artist_id, :copies_sold] end You can make the input to some validations dependent on the values of another attribute: validates_min_length(status_id > 2 ? 5 : 10, [:name]) validates_presence(status_id < 2 ? :name : [:name, :artist_id]) Basically, there's no special syntax you have to use for conditional validations. Just handle conditionals the way you would in other ruby code. == Default Error Messages These are the default error messages for all of the helper methods in +validation_helpers+: :exact_length :: is not #{arg} characters :format :: is invalid :includes :: is not in range or set: #{arg.inspect} :integer :: is not a number :length_range :: is too short or too long :max_length :: is longer than #{arg} characters :min_length :: is shorter than #{arg} characters :not_null :: is not present :numeric :: is not a number :schema_types :: is not a valid #{schema_type} :type :: is not a #{arg} :presence :: is not present :unique :: is already taken == Modifying the Default Options You can override Sequel::Model#default_validation_helpers_options private method to override the default settings on a per validation type basis: class Sequel::Model private def default_validation_helpers_options(type) case type when :presence {message: 'cannot be empty'} when :includes {message: 'invalid option', allow_nil: true} when :max_length {message: lambda{|i| "cannot be more than #{i} characters"}, allow_nil: true} when :format {message: 'contains invalid characters', allow_nil: true} else super end end end == Custom Validations Just as the first validation example showed, you aren't limited to the validation methods defined by +validation_helpers+. Inside the +validate+ method, you can add your own validations by adding to the instance's errors using errors.add whenever an attribute is not valid: class Album < Sequel::Model def validate super errors.add(:release_date, 'cannot be before record date') if release_date < record_date end end Just like conditional validations, with custom validations you are just using the standard ruby conditionals, and calling errors.add with the column symbol and the error message if you detect invalid data. It's fairly easy to create your own custom validations that can be reused in all your models. For example, if there is a common need to validate that one column in the model comes before another column: class Sequel::Model def validates_after(col1, col2) errors.add(col1, "cannot be before #{col2}") if send(col1) < send(col2) end end class Album < Sequel::Model def validate super validates_after(:release_date, :record_date) end end == Setting Validations for All Models Let's say you want to add some default validations that apply to all of your model classes. It's fairly easy to do by overriding the +validate+ method in Sequel::Model, adding some validations to it, and if you override +validate+ in your model classes, just make sure to call +super+. class Sequel::Model def self.string_columns @string_columns ||= columns.reject{|c| db_schema[c][:type] != :string} end def validate super validates_format(/\A[^\x00-\x08\x0e-\x1f\x7f\x81\x8d\x8f\x90\x9d]*\z/n, model.string_columns, message: "contains invalid characters") end end This will make sure that all string columns in the model are validated to make sure they don't contain any invalid characters. Just remember that if you override the +validate+ method in your model classes, you need to call +super+: class Album < Sequel::Model def validate super # Important! validates_presence :name end end If you forget to call +super+, the validations that you defined in Sequel::Model will not be enforced. It's a good idea to call super whenever you override one of Sequel::Model's methods, unless you specifically do not want the default behavior. == Sequel::Model::Errors As mentioned earlier, Sequel::Model::Errors is a subclass of Hash with a few special methods, the most common of which are described here: === +add+ +add+ is the method used to add error messages for a given column. It takes the column symbol as the first argument and the error message as the second argument: errors.add(:name, 'is not valid') === +on+ +on+ is a method usually used after validation has been completed, to determine if there were any errors on a given attribute. It takes the column value, and returns an array of error messages if there were any, or nil if not: errors.on(:name) If you want to make some validations dependent upon the results of other validations, you may want to use +on+ inside your validates method: validates_integer(:release_date) unless errors.on(:record_date) Here, you don't care about validating the release date if there were validation errors for the record date. === +full_messages+ +full_messages+ returns an array of error messages for the object. It's commonly called after validation to get a list of error messages to display to the user: album.errors # => {:name=>["cannot be empty"]} album.errors.full_messages # => ["name cannot be empty"] Note that the column names used in the errors are used verbatim in the error messages. If you want full control over the error messages, you can use +add+ with a literal string: errors.add(:name, Sequel.lit("Album name is not valid")) errors.full_messages # => ["Album name is not valid"] Alternatively, feel free to override Sequel::Model::Errors#full_messages. As long as it returns an array of strings, overriding it is completely safe. === +count+ +count+ returns the total number of error messages in the errors. album.errors.count # => 1 == Other Validation Plugins === +constraint_validations+ Sequel ships with a +constraint_validations+ plugin and extension, that allows you to setup constraints when creating your database tables, and have Model validations automatically created that mirror those constraints. === +auto_validations+ auto_validations uses the not null and type information obtained from parsing the database schema, and the unique index information from parsing the database's index information, and automatically setting up not_null, string length, schema type, and unique validations. If you don't require customizing validation messages on a per-column basis, it can DRY up a lot of validation code. === +validation_class_methods+ Sequel ships with the +validation_class_methods+ plugin, which uses class methods instead of instance methods to define validations. It exists mostly for legacy compatibility, but it is still supported. sequel-5.63.0/doc/virtual_rows.rdoc000066400000000000000000000211721434214120600172610ustar00rootroot00000000000000= Virtual Row Blocks Dataset methods where, order, and select all take blocks that are referred to as virtual row blocks. Many other dataset methods pass the blocks they are given into one of those three methods, so there are actually many Sequel::Dataset methods that take virtual row blocks. == Why Virtual Rows Virtual rows offer a less verbose way to express many queries. For example, by default if you want to express an inequality filter in Sequel, you can do: dataset.where(Sequel[:a] > Sequel.function(:b, :c)) # WHERE (a > b(c)) With virtual rows, you can use the less verbose: dataset.where{a > b(c)} # WHERE (a > b(c)) == Regular Procs vs Instance Evaled Procs Virtual row blocks behave differently depending on whether the block accepts an argument. If the block accepts an argument, it is called with an instance of Sequel::SQL::VirtualRow. If it does not accept an argument, it is evaluated in the context of an instance of Sequel::SQL::VirtualRow. ds = DB[:items] # Regular block ds.where{|o| o.column > 1} # WHERE (column > 1) # Instance-evaled block ds.where{column > 1} # WHERE (column > 1) If you aren't familiar with the difference between regular blocks and instance evaled blocks, inside regular blocks methods called without an explicit receiver call the method on the receiver in the surrounding scope, while instance evaled blocks call the method on the receiver of the instance_eval call (the Sequel::SQL::VirtualRow instance in this case). in both cases, local variables available in the surrounding scope will be available inside the block. However, instance variables in the surrounding scope will not be available inside the block if using an instance evaled block, and methods called without an explicit receiver inside an instance evaled block will not call methods in the surrounding scope. For example: def self.a 42 end b = 32 @d = 100 # Regular block ds.where{|o| o.c > a - b + @d} # WHERE (c > 110) # Instance-evaled block ds.where{c > a - b + @d} # WHERE (c > ((a - 32) + NULL)) There are three related differences here: * Regular blocks use +o.c+ instead of just +c+ * +a+ results in 42 in the regular block, but creates an expression object in the instance evaled block * @d results in 100 in the regular block, but nil in the instance evaled block In the regular block, you need to call +c+ with an explicit receiver (the virtual row block argument), while in the instance evaled block +c+ can be called directly, as the default receiver has changed inside the block. For +a+, note how ruby calls the method on the receiver of the surrounding scope in the regular block, which returns an integer, and does the subtraction before Sequel gets access to it. In the instance evaled block, calling +a+ without a receiver calls the a method on the VirtualRow instance. For @d, note that in a regular block, the value hasn't changed, but in the instance evaled block, instance variable access returns nil. For +b+, note that it operates the same in both cases, as it is a local variable. The choice for whether to use a regular block or an instance evaled block is up to you. The same things can be accomplished with both. Instance evaled block tend to produce shorter code, but by modifying the scope can be more difficult to understand. If you are not sure which to use, use instance evaled blocks unless you need to call methods or access instance variables of the surrounding scope inside the block. == Local Variables vs Method Calls If you have a method that accepts 0 arguments and has the same name as a local variable, you can call it with () to differentiate the method call from the local variable access. This is mostly useful in instance evaled blocks: b = 32 ds.where{b() > b} # WHERE b > 32 It's also possible to use an explicit self receiver in instance evaled blocks: b = 32 ds.where{self.b > b} # WHERE b > 32 == VirtualRow Methods VirtualRow is a class that returns SQL::Identifiers or SQL::Functions depending on how it is called. == SQL::Identifiers - Regular columns SQL::Identifiers can be thought of as regular column references in SQL, not qualified by any table. You get an SQL::Identifier if the method is called without arguments: ds.where{|o| o.column > 1} ds.where{column > 1} # WHERE (column > 1) == SQL::QualifiedIdentifiers - Qualified columns You can qualified identifiers by calling #[] on an identifier: ds.where{|o| o.table[:column] > 1} ds.where{table[:column] > 1} # WHERE table.column > 1 == SQL::Functions - SQL function calls SQL::Functions can be thought of as function calls in SQL. You get a simple function call if you call a method with arguments: ds.where{|o| o.function(1) > 1} ds.where{function(1) > 1} # WHERE function(1) > 1 To call a SQL function with multiple arguments, just use those arguments in your function call: ds.where{|o| o.function(1, o.a) > 1} ds.where{function(1, a) > 1} # WHERE function(1, a) > 1 If the SQL function does not accept any arguments, create an identifier, then call the function method on it to produce a function: ds.select{|o| o.version.function} ds.select{version.function} # SELECT version() To use the SQL wildcard (*) as the sole argument in a function call, create a function without arguments, then call the * method on the function: ds.select{|o| o.count.function.*} ds.select{count.function.*} # SELECT count(*) To append the DISTINCT keyword before the method arguments, just call the distinct method on the returned Function: ds.select{|o| o.count(o.col1).distinct} ds.select{count(col1).distinct} # SELECT count(DISTINCT col1) ds.select{|o| o.count(o.col1, o.col2).distinct} ds.select{count(col1, col2).distinct} # SELECT count(DISTINCT col1, col2) == SQL::Functions with windows - SQL window function calls To create a window function call, just call the over method on the Function object returned, with the options for the window: ds.select{|o| o.rank.function.over} ds.select{rank.function.over} # SELECT rank() OVER () ds.select{|o| o.count.function.*.over} ds.select{count.function.*.over} # SELECT count(*) OVER () ds.select{|o| o.sum(o.col1).over(partition: o.col2, order: o.col3)} ds.select{sum(col1).over(partition: col2, order: col3)} # SELECT sum(col1) OVER (PARTITION BY col2 ORDER BY col3) == Operators VirtualRows use method_missing to handle almost all method calls. Since the objects given by method_missing are SQL::Identifiers or SQL::Functions, you can use all operators that they provide (see DatasetFiltering[http://sequel.jeremyevans.net/rdoc/files/doc/dataset_filtering_rdoc.html#label-Filtering+using+expressions]): ds.select{|o| o.price - 100} ds.select{price - 100} # SELECT (price - 100) ds.where{|o| (o.price < 200) & (o.tax * 100 >= 23)} ds.where{(price < 200) & (tax * 100 >= 0.23)} # WHERE ((price < 200) AND ((tax * 100) >= 0.23)) However, VirtualRows have special handling of some operator methods to make certain things easier. The operators all use a prefix form. === Math Operators The standard +, -, *, and / mathematical operators are defined: ds.select{|o| o.-(1, o.a).as(b)} ds.select{self.-(1, a).as(b)} # SELECT (1 - a) AS b === Boolean Operators The & and | methods are defined to use AND and OR: ds.where{|o| o.&({a: :b}, :c)} ds.where{self.&({a: :b}, :c)} # WHERE ((a = b) AND c) The ~ method is defined to do inversion: ds.where{|o| o.~({a: 1, b: 2})} ds.where{self.~({a: 1, b: 2})} # WHERE ((a != 1) OR (b != 2)) === Inequality Operators The standard >, <, >=, and <= inequality operators are defined: ds.where{|o| o.>(1, :c)} ds.where{self.>(1, :c)} # WHERE (1 > c) == Returning multiple values It's common when using select and order virtual row blocks to want to return multiple values. If you want to do that, you just need to return an array: ds.select{|o| [o.column1, o.sum(o.column2).as(o.sum)]} ds.select{[column1, sum(column2).as(sum)]} # SELECT column1, sum(column2) AS sum Note that if you forget the array brackets, you'll end up with a syntax error: # Invalid ruby syntax ds.select{|o| o.column1, o.sum(o.column2).as(o.sum)} ds.select{column1, sum(column2).as(sum)} == Split symbols Note that if you turn on symbol splitting for backwards compatibility, Sequel will split virtual row methods with double underscores and return them as qualified identifiers: Sequel.split_symbols = true ds.where{|o| o.table__column} ds.where{table__column} WHERE table.column It's not recommended that you rely on this, it's better to convert the calls to the recommended form: ds.where{|o| o.table[:column]} ds.where{table[:column]} sequel-5.63.0/lib/000077500000000000000000000000001434214120600136465ustar00rootroot00000000000000sequel-5.63.0/lib/sequel.rb000066400000000000000000000000771434214120600154750ustar00rootroot00000000000000# frozen-string-literal: true require_relative 'sequel/model' sequel-5.63.0/lib/sequel/000077500000000000000000000000001434214120600151445ustar00rootroot00000000000000sequel-5.63.0/lib/sequel/adapters/000077500000000000000000000000001434214120600167475ustar00rootroot00000000000000sequel-5.63.0/lib/sequel/adapters/ado.rb000066400000000000000000000216161434214120600200450ustar00rootroot00000000000000# frozen-string-literal: true require 'win32ole' module Sequel # The ADO adapter provides connectivity to ADO databases in Windows. module ADO # ADO constants (DataTypeEnum) # Source: https://msdn.microsoft.com/en-us/library/ms675318(v=vs.85).aspx AdBigInt = 20 AdBinary = 128 #AdBoolean = 11 #AdBSTR = 8 #AdChapter = 136 #AdChar = 129 #AdCurrency = 6 #AdDate = 7 AdDBDate = 133 #AdDBTime = 134 AdDBTimeStamp = 135 #AdDecimal = 14 #AdDouble = 5 #AdEmpty = 0 #AdError = 10 #AdFileTime = 64 #AdGUID = 72 #AdIDispatch = 9 #AdInteger = 3 #AdIUnknown = 13 AdLongVarBinary = 205 #AdLongVarChar = 201 #AdLongVarWChar = 203 AdNumeric = 131 #AdPropVariant = 138 #AdSingle = 4 #AdSmallInt = 2 #AdTinyInt = 16 #AdUnsignedBigInt = 21 #AdUnsignedInt = 19 #AdUnsignedSmallInt = 18 #AdUnsignedTinyInt = 17 #AdUserDefined = 132 AdVarBinary = 204 #AdVarChar = 200 #AdVariant = 12 AdVarNumeric = 139 #AdVarWChar = 202 #AdWChar = 130 bigint = Object.new def bigint.call(v) v.to_i end numeric = Object.new def numeric.call(v) if v.include?(',') BigDecimal(v.tr(',', '.')) else BigDecimal(v) end end binary = Object.new def binary.call(v) Sequel.blob(v.pack('c*')) end date = Object.new def date.call(v) Date.new(v.year, v.month, v.day) end CONVERSION_PROCS = {} [ [bigint, AdBigInt], [numeric, AdNumeric, AdVarNumeric], [date, AdDBDate], [binary, AdBinary, AdVarBinary, AdLongVarBinary] ].each do |callable, *types| callable.freeze types.each do |i| CONVERSION_PROCS[i] = callable end end CONVERSION_PROCS.freeze class Database < Sequel::Database set_adapter_scheme :ado attr_reader :conversion_procs # In addition to the usual database options, # the following options have an effect: # # :command_timeout :: Sets the time in seconds to wait while attempting # to execute a command before cancelling the attempt and generating # an error. Specifically, it sets the ADO CommandTimeout property. # :driver :: The driver to use in the ADO connection string. If not provided, a default # of "SQL Server" is used. # :conn_string :: The full ADO connection string. If this is provided, # the usual options are ignored. # :provider :: Sets the Provider of this ADO connection (for example, "SQLOLEDB"). # If you don't specify a provider, the default one used by WIN32OLE # has major problems, such as creating a new native database connection # for every query, which breaks things such as temporary tables. # # Pay special attention to the :provider option, as without specifying a provider, # many things will be broken. The SQLNCLI10 provider appears to work well if you # are connecting to Microsoft SQL Server, but it is not the default as that is not # always available and would break backwards compatability. def connect(server) opts = server_opts(server) s = opts[:conn_string] || "driver=#{opts[:driver]};server=#{opts[:host]};database=#{opts[:database]}#{";uid=#{opts[:user]};pwd=#{opts[:password]}" if opts[:user]}" handle = WIN32OLE.new('ADODB.Connection') handle.CommandTimeout = opts[:command_timeout] if opts[:command_timeout] handle.Provider = opts[:provider] if opts[:provider] handle.Open(s) handle end def disconnect_connection(conn) conn.Close rescue WIN32OLERuntimeError nil end def freeze @conversion_procs.freeze super end # Just execute so it doesn't attempt to return the number of rows modified. def execute_ddl(sql, opts=OPTS) execute(sql, opts) end # Just execute so it doesn't attempt to return the number of rows modified. def execute_insert(sql, opts=OPTS) execute(sql, opts) end # Use pass by reference in WIN32OLE to get the number of affected rows, # unless is a provider is in use (since some providers don't seem to # return the number of affected rows, but the default provider appears # to). def execute_dui(sql, opts=OPTS) return super if opts[:provider] synchronize(opts[:server]) do |conn| begin log_connection_yield(sql, conn){conn.Execute(sql, 1)} WIN32OLE::ARGV[1] rescue ::WIN32OLERuntimeError => e raise_error(e) end end end def execute(sql, opts=OPTS) synchronize(opts[:server]) do |conn| begin r = log_connection_yield(sql, conn){conn.Execute(sql)} begin yield r if defined?(yield) ensure begin r.close rescue ::WIN32OLERuntimeError end end rescue ::WIN32OLERuntimeError => e raise_error(e) end end nil end private def adapter_initialize case @opts[:conn_string] when /Microsoft\.(Jet|ACE)\.OLEDB/io require_relative 'ado/access' extend Sequel::ADO::Access::DatabaseMethods self.dataset_class = ADO::Access::Dataset else @opts[:driver] ||= 'SQL Server' case @opts[:driver] when 'SQL Server' require_relative 'ado/mssql' extend Sequel::ADO::MSSQL::DatabaseMethods self.dataset_class = ADO::MSSQL::Dataset set_mssql_unicode_strings end end @conversion_procs = CONVERSION_PROCS.dup @conversion_procs[AdDBTimeStamp] = method(:adb_timestamp_to_application_timestamp) super end def adb_timestamp_to_application_timestamp(v) # This hard codes a timestamp_precision of 6 when converting. # That is the default timestamp_precision, but the ado/mssql adapter uses a timestamp_precision # of 3. However, timestamps returned by ado/mssql have nsec values that end up rounding to a # the same value as if a timestamp_precision of 3 was hard coded (either xxx999yzz, where y is # 5-9 or xxx000yzz where y is 0-4). # # ADO subadapters should override this they would like a different timestamp precision and the # this code does not work for them (for example, if they provide full nsec precision). # # Note that fractional second handling for WIN32OLE objects is not correct on ruby <2.2 to_application_timestamp([v.year, v.month, v.day, v.hour, v.min, v.sec, (v.nsec/1000.0).round * 1000]) end def dataset_class_default Dataset end # The ADO adapter's default provider doesn't support transactions, since it # creates a new native connection for each query. So Sequel only attempts # to use transactions if an explicit :provider is given. def begin_transaction(conn, opts=OPTS) super if @opts[:provider] end def commit_transaction(conn, opts=OPTS) super if @opts[:provider] end def database_error_classes [::WIN32OLERuntimeError] end def disconnect_error?(e, opts) super || (e.is_a?(::WIN32OLERuntimeError) && e.message =~ /Communication link failure/) end def rollback_transaction(conn, opts=OPTS) super if @opts[:provider] end end class Dataset < Sequel::Dataset def fetch_rows(sql) execute(sql) do |recordset| cols = [] conversion_procs = db.conversion_procs recordset.Fields.each do |field| cols << [output_identifier(field.Name), conversion_procs[field.Type]] end self.columns = cols.map(&:first) return if recordset.EOF max = cols.length recordset.GetRows.transpose.each do |field_values| h = {} i = -1 while (i += 1) < max name, cp = cols[i] h[name] = if (v = field_values[i]) && cp cp.call(v) else v end end yield h end end end # ADO can return for for delete and update statements, depending on the provider. def provides_accurate_rows_matched? false end end end end sequel-5.63.0/lib/sequel/adapters/ado/000077500000000000000000000000001434214120600175125ustar00rootroot00000000000000sequel-5.63.0/lib/sequel/adapters/ado/access.rb000066400000000000000000000251711434214120600213060ustar00rootroot00000000000000# frozen-string-literal: true require_relative '../shared/access' require_relative '../utils/split_alter_table' module Sequel module ADO # Database and Dataset instance methods for Access specific # support via ADO. module Access class AdoSchema QUERY_TYPE = { :columns => 4, :indexes => 12, :tables => 20, :views => 23, :foreign_keys => 27 }.freeze attr_reader :type, :criteria def initialize(type, crit) @type = QUERY_TYPE[type] @criteria = Array(crit) end class Column DATA_TYPE = { 2 => "SMALLINT", 3 => "INTEGER", 4 => "REAL", 5 => "DOUBLE", 6 => "MONEY", 7 => "DATETIME", 11 => "BIT", 14 => "DECIMAL", 16 => "TINYINT", 17 => "BYTE", 72 => "GUID", 128 => "BINARY", 130 => "TEXT", 131 => "DECIMAL", 201 => "TEXT", 205 => "IMAGE" }.freeze DATA_TYPE.each_value(&:freeze) def initialize(row) @row = row end def [](col) @row[col] end def allow_null self["IS_NULLABLE"] end def default self["COLUMN_DEFAULT"] end def db_type t = DATA_TYPE[self["DATA_TYPE"]] if t == "DECIMAL" && precision t + "(#{precision.to_i},#{(scale || 0).to_i})" elsif t == "TEXT" && maximum_length && maximum_length > 0 t + "(#{maximum_length.to_i})" else t end end def precision self["NUMERIC_PRECISION"] end def scale self["NUMERIC_SCALE"] end def maximum_length self["CHARACTER_MAXIMUM_LENGTH"] end end end module DatabaseMethods include Sequel::Access::DatabaseMethods include Sequel::Database::SplitAlterTable # Remove cached schema after altering a table, since otherwise it can be cached # incorrectly in the rename column case. def alter_table(name, *) super remove_cached_schema(name) nil end # Access doesn't let you disconnect if inside a transaction, so # try rolling back an existing transaction first. def disconnect_connection(conn) conn.RollbackTrans rescue nil super end def execute_insert(sql, opts=OPTS) synchronize(opts[:server]) do |conn| begin log_connection_yield(sql, conn){conn.Execute(sql)} last_insert_sql = "SELECT @@IDENTITY" res = log_connection_yield(last_insert_sql, conn){conn.Execute(last_insert_sql)} res.GetRows.transpose.each{|r| return r.shift} rescue ::WIN32OLERuntimeError => e raise_error(e) end end nil end def tables(opts=OPTS) m = output_identifier_meth ado_schema_tables.map {|tbl| m.call(tbl['TABLE_NAME'])} end def views(opts=OPTS) m = output_identifier_meth ado_schema_views.map {|tbl| m.call(tbl['TABLE_NAME'])} end # OpenSchema returns compound indexes as multiple rows def indexes(table_name,opts=OPTS) m = output_identifier_meth idxs = ado_schema_indexes(table_name).inject({}) do |memo, idx| unless idx["PRIMARY_KEY"] index = memo[m.call(idx["INDEX_NAME"])] ||= { :columns=>[], :unique=>idx["UNIQUE"] } index[:columns] << m.call(idx["COLUMN_NAME"]) end memo end idxs end # OpenSchema returns compound foreign key relationships as multiple rows def foreign_key_list(table, opts=OPTS) m = output_identifier_meth fks = ado_schema_foreign_keys(table).inject({}) do |memo, fk| name = m.call(fk['FK_NAME']) specs = memo[name] ||= { :columns => [], :table => m.call(fk['PK_TABLE_NAME']), :key => [], :deferrable => fk['DEFERRABILITY'], :name => name, :on_delete => fk['DELETE_RULE'], :on_update => fk['UPDATE_RULE'] } specs[:columns] << m.call(fk['FK_COLUMN_NAME']) specs[:key] << m.call(fk['PK_COLUMN_NAME']) memo end fks.values end private # Emulate rename_column by adding the column, copying data from the old # column, and dropping the old column. def alter_table_sql(table, op) case op[:op] when :rename_column unless sch = op[:schema] raise(Error, "can't find existing schema entry for #{op[:name]}") unless sch = op[:schema] || schema(table).find{|c| c.first == op[:name]} sch = sch.last end [ alter_table_sql(table, :op=>:add_column, :name=>op[:new_name], :default=>sch[:ruby_default], :type=>sch[:db_type], :null=>sch[:allow_null]), from(table).update_sql(op[:new_name]=>op[:name]), alter_table_sql(table, :op=>:drop_column, :name=>op[:name]) ] when :set_column_null, :set_column_default raise(Error, "can't find existing schema entry for #{op[:name]}") unless sch = op[:schema] || schema(table).find{|c| c.first == op[:name]} sch = sch.last sch = if op[:op] == :set_column_null sch.merge(:allow_null=>op[:null]) else sch.merge(:ruby_default=>op[:default]) end [ alter_table_sql(table, :op=>:rename_column, :name=>op[:name], :new_name=>:sequel_access_backup_column, :schema=>sch), alter_table_sql(table, :op=>:rename_column, :new_name=>op[:name], :name=>:sequel_access_backup_column, :schema=>sch) ] else super end end def begin_transaction(conn, opts=OPTS) log_connection_yield('Transaction.begin', conn){conn.BeginTrans} end def commit_transaction(conn, opts=OPTS) log_connection_yield('Transaction.commit', conn){conn.CommitTrans} end def rollback_transaction(conn, opts=OPTS) log_connection_yield('Transaction.rollback', conn){conn.RollbackTrans} end def schema_column_type(db_type) case db_type.downcase when 'bit' :boolean when 'byte', 'guid' :integer when 'image' :blob else super end end def schema_parse_table(table_name, opts) m = output_identifier_meth(opts[:dataset]) m2 = input_identifier_meth(opts[:dataset]) tn = m2.call(table_name.to_s) idxs = ado_schema_indexes(tn) ado_schema_columns(tn).map {|row| specs = { :allow_null => row.allow_null, :db_type => row.db_type, :default => row.default, :primary_key => !!idxs.find {|idx| idx["COLUMN_NAME"] == row["COLUMN_NAME"] && idx["PRIMARY_KEY"] }, :type => if row.db_type =~ /decimal/i && row.scale == 0 :integer else schema_column_type(row.db_type) end, :ado_type => row["DATA_TYPE"] } specs[:default] = nil if blank_object?(specs[:default]) specs[:allow_null] = specs[:allow_null] && !specs[:primary_key] [ m.call(row["COLUMN_NAME"]), specs ] } end def ado_schema_tables rows=[] fetch_ado_schema(:tables, [nil,nil,nil,'TABLE']) do |row| rows << row end rows end def ado_schema_views rows=[] fetch_ado_schema(:views, [nil,nil,nil]) do |row| rows << row end rows end def ado_schema_indexes(table_name) rows=[] fetch_ado_schema(:indexes, [nil,nil,nil,nil,table_name.to_s]) do |row| rows << row end rows end def ado_schema_columns(table_name) rows=[] fetch_ado_schema(:columns, [nil,nil,table_name.to_s,nil]) do |row| rows << AdoSchema::Column.new(row) end rows.sort!{|a,b| a["ORDINAL_POSITION"] <=> b["ORDINAL_POSITION"]} end def ado_schema_foreign_keys(table_name) rows=[] fetch_ado_schema(:foreign_keys, [nil,nil,nil,nil,nil,table_name.to_s]) do |row| rows << row end rows.sort!{|a,b| a["ORDINAL"] <=> b["ORDINAL"]} end def fetch_ado_schema(type, criteria=[]) execute_open_ado_schema(type, criteria) do |s| cols = [] s.Fields.each{|f| cols << f.Name} s.GetRows.transpose.each do |r| row = {} cols.each{|c| row[c] = r.shift} yield row end unless s.eof end end # This is like execute() in that it yields an ADO RecordSet, except # instead of an SQL interface there's this OpenSchema call # cf. http://msdn.microsoft.com/en-us/library/ee275721(v=bts.10) def execute_open_ado_schema(type, criteria=[]) ado_schema = AdoSchema.new(type, criteria) synchronize(opts[:server]) do |conn| begin r = log_connection_yield("OpenSchema #{type.inspect}, #{criteria.inspect}", conn) { if ado_schema.criteria.empty? conn.OpenSchema(ado_schema.type) else conn.OpenSchema(ado_schema.type, ado_schema.criteria) end } yield(r) if defined?(yield) rescue ::WIN32OLERuntimeError => e raise_error(e) end end nil end end class Dataset < ADO::Dataset include Sequel::Access::DatasetMethods end end end end sequel-5.63.0/lib/sequel/adapters/ado/mssql.rb000066400000000000000000000037361434214120600212070ustar00rootroot00000000000000# frozen-string-literal: true require_relative '../shared/mssql' module Sequel module ADO module MSSQL module DatabaseMethods include Sequel::MSSQL::DatabaseMethods def execute_dui(sql, opts=OPTS) return super unless @opts[:provider] synchronize(opts[:server]) do |conn| begin sql = "SET NOCOUNT ON; #{sql}; SELECT @@ROWCOUNT" rst = log_connection_yield(sql, conn){conn.Execute(sql)} rst.GetRows[0][0] rescue ::WIN32OLERuntimeError => e raise_error(e) end end end private # The ADO adapter's default provider doesn't support transactions, since it # creates a new native connection for each query. So Sequel only attempts # to use transactions if an explicit :provider is given. def begin_transaction(conn, opts=OPTS) super if @opts[:provider] end def commit_transaction(conn, opts=OPTS) super if @opts[:provider] end def rollback_transaction(conn, opts=OPTS) super if @opts[:provider] end end class Dataset < ADO::Dataset include Sequel::MSSQL::DatasetMethods # Use a nasty hack of multiple SQL statements in the same call and # having the last one return the most recently inserted id. This # is necessary as ADO's default :provider uses a separate native # connection for each query. def insert(*values) return super if (@opts[:sql] && !@opts[:prepared_sql]) || @opts[:returning] with_sql("SET NOCOUNT ON; #{insert_sql(*values)}; SELECT CAST(SCOPE_IDENTITY() AS INTEGER)").single_value end # If you use a better :provider option for the database, you can get an # accurate number of rows matched. def provides_accurate_rows_matched? !!db.opts[:provider] end end end end end sequel-5.63.0/lib/sequel/adapters/amalgalite.rb000066400000000000000000000130501434214120600213730ustar00rootroot00000000000000# frozen-string-literal: true require 'amalgalite' require_relative 'shared/sqlite' module Sequel module Amalgalite # Type conversion map class for Sequel's use of Amalgamite class SequelTypeMap < ::Amalgalite::TypeMaps::DefaultMap methods_handling_sql_types.delete('string') methods_handling_sql_types.merge!( 'datetime' => %w'datetime timestamp', 'time' => %w'time', 'float' => ['float', 'double', 'real', 'double precision'], 'decimal' => %w'numeric decimal money' ) # Store the related database object, in order to be able to correctly # handle the database timezone. def initialize(db) @db = db end # Return blobs as instances of Sequel::SQL::Blob instead of # Amalgamite::Blob def blob(s) SQL::Blob.new(s) end # Return numeric/decimal types as instances of BigDecimal # instead of Float def decimal(s) BigDecimal(s) end # Return datetime types as instances of Sequel.datetime_class def datetime(s) @db.to_application_timestamp(s) end def time(s) Sequel.string_to_time(s) end # Don't raise an error if the value is a string and the declared # type doesn't match a known type, just return the value. def result_value_of(declared_type, value) if value.is_a?(::Amalgalite::Blob) SQL::Blob.new(value.to_s) elsif value.is_a?(String) && declared_type (meth = self.class.sql_to_method(declared_type.downcase)) ? public_send(meth, value) : value else super end end end class Database < Sequel::Database include ::Sequel::SQLite::DatabaseMethods set_adapter_scheme :amalgalite # Mimic the file:// uri, by having 2 preceding slashes specify a relative # path, and 3 preceding slashes specify an absolute path. def self.uri_to_options(uri) # :nodoc: { :database => (uri.host.nil? && uri.path == '/') ? nil : "#{uri.host}#{uri.path}" } end private_class_method :uri_to_options # Connect to the database. Since SQLite is a file based database, # the only options available are :database (to specify the database # name), and :timeout, to specify how long to wait for the database to # be available if it is locked, given in milliseconds (default is 5000). def connect(server) opts = server_opts(server) opts[:database] = ':memory:' if blank_object?(opts[:database]) db = ::Amalgalite::Database.new(opts[:database]) db.busy_handler(::Amalgalite::BusyTimeout.new(opts.fetch(:timeout, 5000)/50, 50)) db.type_map = SequelTypeMap.new(self) connection_pragmas.each{|s| log_connection_yield(s, db){db.execute_batch(s)}} db end def database_type :sqlite end def execute_ddl(sql, opts=OPTS) _execute(sql, opts){|conn| log_connection_yield(sql, conn){conn.execute_batch(sql)}} nil end def execute_dui(sql, opts=OPTS) _execute(sql, opts){|conn| log_connection_yield(sql, conn){conn.execute_batch(sql)}; conn.row_changes} end def execute_insert(sql, opts=OPTS) _execute(sql, opts){|conn| log_connection_yield(sql, conn){conn.execute_batch(sql)}; conn.last_insert_rowid} end def execute(sql, opts=OPTS) _execute(sql, opts) do |conn| begin yield(stmt = log_connection_yield(sql, conn){conn.prepare(sql)}) ensure stmt.close if stmt end end end # Run the given SQL with the given arguments and return the first value of the first row. def single_value(sql, opts=OPTS) _execute(sql, opts){|conn| log_connection_yield(sql, conn){conn.first_value_from(sql)}} end private # Yield an available connection. Rescue # any Amalgalite::Errors and turn them into DatabaseErrors. def _execute(sql, opts) synchronize(opts[:server]){|conn| yield conn} rescue ::Amalgalite::Error, ::Amalgalite::SQLite3::Error => e raise_error(e) end # The Amagalite adapter does not need the pool to convert exceptions. # Also, force the max connections to 1 if a memory database is being # used, as otherwise each connection gets a separate database. def connection_pool_default_options o = super.dup # Default to only a single connection if a memory database is used, # because otherwise each connection will get a separate database o[:max_connections] = 1 if @opts[:database] == ':memory:' || blank_object?(@opts[:database]) o end def dataset_class_default Dataset end def database_error_classes [::Amalgalite::Error, ::Amalgalite::SQLite3::Error] end end class Dataset < Sequel::Dataset include ::Sequel::SQLite::DatasetMethods def fetch_rows(sql) execute(sql) do |stmt| self.columns = cols = stmt.result_fields.map{|c| output_identifier(c)} col_count = cols.size stmt.each do |result| row = {} col_count.times{|i| row[cols[i]] = result[i]} yield row end end end private # Quote the string using the connection instance method. def literal_string_append(sql, v) db.synchronize(@opts[:server]){|c| sql << c.quote(v)} end end end end sequel-5.63.0/lib/sequel/adapters/ibmdb.rb000066400000000000000000000313151434214120600203540ustar00rootroot00000000000000# frozen-string-literal: true require 'ibm_db' require_relative 'shared/db2' module Sequel module IBMDB tt = Class.new do def boolean(s) !s.to_i.zero? end def int(s) s.to_i end end.new # Hash holding type translation methods, used by Dataset#fetch_rows. DB2_TYPES = { :boolean => tt.method(:boolean), :int => tt.method(:int), :blob => ::Sequel::SQL::Blob.method(:new), :time => ::Sequel.method(:string_to_time), :date => ::Sequel.method(:string_to_date) }.freeze # Wraps an underlying connection to DB2 using IBM_DB, to provide a more # rubyish API. class Connection # A hash with prepared statement name symbol keys, where each value is # a two element array with an sql string and cached Statement value. attr_reader :prepared_statements # Error class for exceptions raised by the connection. class Error < StandardError attr_reader :sqlstate def initialize(message, sqlstate) @sqlstate = sqlstate super(message) end end # Create the underlying IBM_DB connection. def initialize(connection_param) @conn = if connection_param.class == String IBM_DB.connect(connection_param, '', '') else # connect using catalog IBM_DB.connect(*connection_param) end self.autocommit = true @prepared_statements = {} end # Check whether the connection is in autocommit state or not. def autocommit IBM_DB.autocommit(@conn) == 1 end # Turn autocommit on or off for the connection. def autocommit=(value) IBM_DB.autocommit(@conn, value ? IBM_DB::SQL_AUTOCOMMIT_ON : IBM_DB::SQL_AUTOCOMMIT_OFF) end # Close the connection, disconnecting from DB2. def close IBM_DB.close(@conn) end # Commit the currently outstanding transaction on this connection. def commit IBM_DB.commit(@conn) end # Return the related error message for the connection. def error_msg IBM_DB.getErrormsg(@conn, IBM_DB::DB_CONN) end # Return the related error message for the connection. def error_sqlstate IBM_DB.getErrorstate(@conn, IBM_DB::DB_CONN) end # Execute the given SQL on the database, and return a Statement instance # holding the results. def execute(sql) stmt = IBM_DB.exec(@conn, sql) raise Error.new(error_msg, error_sqlstate) unless stmt Statement.new(stmt) end # Execute the related prepared statement on the database with the given # arguments. def execute_prepared(ps_name, *values) stmt = @prepared_statements[ps_name].last res = stmt.execute(*values) unless res raise Error.new("Error executing statement #{ps_name}: #{error_msg}", error_sqlstate) end stmt end # Prepare a statement with the given +sql+ on the database, and # cache the prepared statement value by name. def prepare(sql, ps_name) if stmt = IBM_DB.prepare(@conn, sql) ps_name = ps_name.to_sym stmt = Statement.new(stmt) @prepared_statements[ps_name] = [sql, stmt] else err = error_msg err = "Error preparing #{ps_name} with SQL: #{sql}" if error_msg.nil? || error_msg.empty? raise Error.new(err, error_sqlstate) end end # Rollback the currently outstanding transaction on this connection. def rollback IBM_DB.rollback(@conn) end end # Wraps results returned by queries on IBM_DB. class Statement # Hold the given statement. def initialize(stmt) @stmt = stmt end # Return the number of rows affected. def affected IBM_DB.num_rows(@stmt) end # If this statement is a prepared statement, execute it on the database # with the given values. def execute(*values) IBM_DB.execute(@stmt, values) end # Return the results of a query as an array of values. def fetch_array IBM_DB.fetch_array(@stmt) if @stmt end # Return the field name at the given column in the result set. def field_name(ind) IBM_DB.field_name(@stmt, ind) end # Return the field type for the given field name in the result set. def field_type(key) IBM_DB.field_type(@stmt, key) end # Return the field precision for the given field name in the result set. def field_precision(key) IBM_DB.field_precision(@stmt, key) end # Free the memory related to this statement. def free IBM_DB.free_stmt(@stmt) end # Free the memory related to this result set, only useful for prepared # statements which have a different result set on every call. def free_result IBM_DB.free_result(@stmt) end # Return the number of fields in the result set. def num_fields IBM_DB.num_fields(@stmt) end end class Database < Sequel::Database include Sequel::DB2::DatabaseMethods set_adapter_scheme :ibmdb # Hash of connection procs for converting attr_reader :conversion_procs # Whether to convert smallint values to bool for this Database instance attr_accessor :convert_smallint_to_bool # Create a new connection object for the given server. def connect(server) opts = server_opts(server) connection_params = if opts[:host].nil? && opts[:port].nil? && opts[:database] # use a cataloged connection opts.values_at(:database, :user, :password) else # use uncataloged connection so that host and port can be supported 'Driver={IBM DB2 ODBC DRIVER};' \ "Database=#{opts[:database]};" \ "Hostname=#{opts[:host]};" \ "Port=#{opts[:port] || 50000};" \ 'Protocol=TCPIP;' \ "Uid=#{opts[:user]};" \ "Pwd=#{opts[:password]};" \ end Connection.new(connection_params) end def execute(sql, opts=OPTS, &block) if sql.is_a?(Symbol) execute_prepared_statement(sql, opts, &block) else synchronize(opts[:server]){|c| _execute(c, sql, opts, &block)} end rescue Connection::Error => e raise_error(e) end def execute_insert(sql, opts=OPTS) synchronize(opts[:server]) do |c| if sql.is_a?(Symbol) execute_prepared_statement(sql, opts) else _execute(c, sql, opts) end _execute(c, "SELECT IDENTITY_VAL_LOCAL() FROM SYSIBM.SYSDUMMY1", opts){|stmt| i = stmt.fetch_array.first.to_i; i} end rescue Connection::Error => e raise_error(e) end # Execute a prepared statement named by name on the database. def execute_prepared_statement(ps_name, opts) args = opts[:arguments] ps = prepared_statement(ps_name) sql = ps.prepared_sql synchronize(opts[:server]) do |conn| unless conn.prepared_statements.fetch(ps_name, []).first == sql log_connection_yield("PREPARE #{ps_name}: #{sql}", conn){conn.prepare(sql, ps_name)} end args = args.map{|v| v.nil? ? nil : prepared_statement_arg(v)} log_sql = "EXECUTE #{ps_name}" if ps.log_sql log_sql += " (" log_sql << sql log_sql << ")" end begin stmt = log_connection_yield(log_sql, conn, args){conn.execute_prepared(ps_name, *args)} if defined?(yield) yield(stmt) else stmt.affected end ensure stmt.free_result if stmt end end end def freeze @conversion_procs.freeze super end private # Execute the given SQL on the database, yielding the related statement if a block # is given or returning the number of affected rows if not, and ensuring the statement is freed. def _execute(conn, sql, opts) stmt = log_connection_yield(sql, conn){conn.execute(sql)} if defined?(yield) yield(stmt) else stmt.affected end ensure stmt.free if stmt end def adapter_initialize @convert_smallint_to_bool = typecast_value_boolean(opts.fetch(:convert_smallint_to_bool, true)) @conversion_procs = DB2_TYPES.dup @conversion_procs[:timestamp] = method(:to_application_timestamp) end # IBM_DB uses an autocommit setting instead of sending SQL queries. # So starting a transaction just turns autocommit off. def begin_transaction(conn, opts=OPTS) log_connection_yield('Transaction.begin', conn){conn.autocommit = false} set_transaction_isolation(conn, opts) end # This commits transaction in progress on the # connection and sets autocommit back on. def commit_transaction(conn, opts=OPTS) log_connection_yield('Transaction.commit', conn){conn.commit} end def database_error_classes [Connection::Error] end def database_exception_sqlstate(exception, opts) exception.sqlstate end def dataset_class_default Dataset end # Don't convert smallint to boolean for the metadata # dataset, since the DB2 metadata does not use # boolean columns, and some smallint columns are # accidently treated as booleans. def _metadata_dataset super.with_convert_smallint_to_bool(false) end # Format Numeric, Date, and Time types specially for use # as IBM_DB prepared statements argument vlaues. def prepared_statement_arg(v) case v when Numeric v.to_s when Date, Time literal(v).gsub("'", '') else v end end # Set autocommit back on def remove_transaction(conn, committed) conn.autocommit = true ensure super end # This rolls back the transaction in progress on the # connection and sets autocommit back on. def rollback_transaction(conn, opts=OPTS) log_connection_yield('Transaction.rollback', conn){conn.rollback} end # Convert smallint type to boolean if convert_smallint_to_bool is true def schema_column_type(db_type) if convert_smallint_to_bool && db_type =~ /smallint/i :boolean else super end end end class Dataset < Sequel::Dataset include Sequel::DB2::DatasetMethods module CallableStatementMethods # Extend given dataset with this module so subselects inside subselects in # prepared statements work. def subselect_sql_append(sql, ds) ps = ds.to_prepared_statement(:select). clone(:append_sql=>sql, :prepared_args=>prepared_args). with_extend(CallableStatementMethods) ps = ps.bind(@opts[:bind_vars]) if @opts[:bind_vars] ps.prepared_sql end end PreparedStatementMethods = prepared_statements_module(:prepare_bind, Sequel::Dataset::UnnumberedArgumentMapper) # Whether to convert smallint to boolean arguments for this dataset. # Defaults to the Database setting. def convert_smallint_to_bool opts.has_key?(:convert_smallint_to_bool) ? opts[:convert_smallint_to_bool] : db.convert_smallint_to_bool end # Return a cloned dataset with the convert_smallint_to_bool option set. def with_convert_smallint_to_bool(v) clone(:convert_smallint_to_bool=>v) end def fetch_rows(sql) execute(sql) do |stmt| columns = [] convert = convert_smallint_to_bool cps = db.conversion_procs stmt.num_fields.times do |i| k = stmt.field_name i key = output_identifier(k) type = stmt.field_type(i).downcase.to_sym # decide if it is a smallint from precision type = :boolean if type == :int && convert && stmt.field_precision(i) < 8 type = :blob if type == :clob && db.use_clob_as_blob columns << [key, cps[type]] end cols = columns.map{|c| c[0]} self.columns = cols while res = stmt.fetch_array row = {} res.zip(columns).each do |v, (k, pr)| row[k] = ((pr ? pr.call(v) : v) if v) end yield row end end self end private def bound_variable_modules [CallableStatementMethods] end def prepared_statement_modules [PreparedStatementMethods] end end end end sequel-5.63.0/lib/sequel/adapters/jdbc.rb000066400000000000000000000712371434214120600202100ustar00rootroot00000000000000# frozen-string-literal: true require 'java' require_relative 'utils/stored_procedures' module Sequel module JDBC # Make it accesing the java.sql hierarchy more ruby friendly. module JavaSQL include_package 'java.sql' end # Used to identify a jndi connection and to extract the jndi # resource name. JNDI_URI_REGEXP = /\Ajdbc:jndi:(.+)/ # Contains procs keyed on subadapter type that extend the # given database object so it supports the correct database type. DATABASE_SETUP = {} # Create custom NativeException alias for nicer access, and also so that # JRuby 9.2+ so it doesn't use the deprecated ::NativeException NativeException = java.lang.Exception # Default database error classes DATABASE_ERROR_CLASSES = [NativeException] if JRUBY_VERSION < '9.2' # On JRuby <9.2, still include ::NativeException, as it is still needed in some cases DATABASE_ERROR_CLASSES << ::NativeException end DATABASE_ERROR_CLASSES.freeze # Allow loading the necessary JDBC support via a gem. def self.load_gem(name) require "jdbc/#{name.to_s.downcase}" rescue LoadError # jdbc gem not used, hopefully the user has the .jar in their CLASSPATH else if defined?(::Jdbc) && ( ::Jdbc.const_defined?(name) rescue nil ) jdbc_module = ::Jdbc.const_get(name) # e.g. Jdbc::SQLite3 jdbc_module.load_driver if jdbc_module.respond_to?(:load_driver) end end # Attempt to load the JDBC driver class, which should be specified as a string # containing the driver class name (which JRuby should autoload). # Note that the string is evaled, so this method is not safe to call with # untrusted input. # Raise a Sequel::AdapterNotFound if evaluating the class name raises a NameError. def self.load_driver(drv, gem=nil) load_gem(gem) if gem if drv.is_a?(String) eval drv else *try, last = drv try.each do |try_drv| begin return eval(try_drv) rescue NameError end end eval last end rescue NameError raise Sequel::AdapterNotFound, "#{drv} not loaded#{", try installing jdbc-#{gem.to_s.downcase} gem" if gem}" end class TypeConvertor CONVERTORS = convertors = {} %w'Boolean Float Double Int Long Short'.each do |meth| x = x = convertors[meth.to_sym] = Object.new class_eval("def x.call(r, i) v = r.get#{meth}(i); v unless r.wasNull end", __FILE__, __LINE__) end %w'Object Array String Time Date Timestamp BigDecimal Blob Bytes Clob'.each do |meth| x = x = convertors[meth.to_sym] = Object.new class_eval("def x.call(r, i) r.get#{meth}(i) end", __FILE__, __LINE__) end x = convertors[:RubyTime] = Object.new def x.call(r, i) if v = r.getTime(i) Sequel.string_to_time("#{v.to_string}.#{sprintf('%03i', v.getTime.divmod(1000).last)}") end end x = convertors[:RubyDate] = Object.new def x.call(r, i) if v = r.getDate(i) Date.civil(v.getYear + 1900, v.getMonth + 1, v.getDate) end end x = convertors[:RubyTimestamp] = Object.new def x.call(r, i) if v = r.getTimestamp(i) Sequel.database_to_application_timestamp([v.getYear + 1900, v.getMonth + 1, v.getDate, v.getHours, v.getMinutes, v.getSeconds, v.getNanos]) end end x = convertors[:RubyBigDecimal] = Object.new def x.call(r, i) if v = r.getBigDecimal(i) ::Kernel::BigDecimal(v.to_string) end end x = convertors[:RubyBlob] = Object.new def x.call(r, i) if v = r.getBytes(i) Sequel::SQL::Blob.new(String.from_java_bytes(v)) end end x = convertors[:RubyClob] = Object.new def x.call(r, i) if v = r.getClob(i) v.getSubString(1, v.length) end end x = convertors[:RubyArray] = Object.new def x.call(r, i) if v = r.getArray(i) v.array.to_ary end end MAP = Hash.new(convertors[:Object]) types = Java::JavaSQL::Types { :BOOLEAN => :Boolean, :CHAR => :String, :DOUBLE => :Double, :FLOAT => :Double, :INTEGER => :Int, :LONGNVARCHAR => :String, :LONGVARCHAR => :String, :NCHAR => :String, :REAL => :Float, :SMALLINT => :Short, :TINYINT => :Short, :VARCHAR => :String, }.each do |type, meth| MAP[types.const_get(type)] = convertors[meth] end BASIC_MAP = MAP.dup { :ARRAY => :Array, :BINARY => :Blob, :BLOB => :Blob, :CLOB => :Clob, :DATE => :Date, :DECIMAL => :BigDecimal, :LONGVARBINARY => :Blob, :NCLOB => :Clob, :NUMERIC => :BigDecimal, :TIME => :Time, :TIMESTAMP => :Timestamp, :VARBINARY => :Blob, }.each do |type, meth| BASIC_MAP[types.const_get(type)] = convertors[meth] MAP[types.const_get(type)] = convertors[:"Ruby#{meth}"] end MAP.freeze BASIC_MAP.freeze end class Database < Sequel::Database set_adapter_scheme :jdbc # The Java database driver we are using (should be a Java class) attr_reader :driver # Whether to convert some Java types to ruby types when retrieving rows. # True by default, can be set to false to roughly double performance when # fetching rows. attr_accessor :convert_types # The fetch size to use for JDBC Statement objects created by this database. # By default, this is nil so a fetch size is not set explicitly. attr_accessor :fetch_size # Map of JDBC type ids to callable objects that return appropriate ruby values. attr_reader :type_convertor_map # Map of JDBC type ids to callable objects that return appropriate ruby or java values. attr_reader :basic_type_convertor_map # Execute the given stored procedure with the give name. If a block is # given, the stored procedure should return rows. def call_sproc(name, opts = OPTS) args = opts[:args] || [] sql = "{call #{name}(#{args.map{'?'}.join(',')})}" synchronize(opts[:server]) do |conn| begin cps = conn.prepareCall(sql) i = 0 args.each{|arg| set_ps_arg(cps, arg, i+=1)} if defined?(yield) yield log_connection_yield(sql, conn){cps.executeQuery} else log_connection_yield(sql, conn){cps.executeUpdate} if opts[:type] == :insert last_insert_id(conn, opts) end end rescue *DATABASE_ERROR_CLASSES => e raise_error(e) ensure cps.close if cps end end end # Connect to the database using JavaSQL::DriverManager.getConnection, and falling back # to driver.new.connect if the driver is known. def connect(server) opts = server_opts(server) conn = if jndi? get_connection_from_jndi else args = [uri(opts)] args.concat([opts[:user], opts[:password]]) if opts[:user] && opts[:password] begin JavaSQL::DriverManager.setLoginTimeout(opts[:login_timeout]) if opts[:login_timeout] raise StandardError, "skipping regular connection" if opts[:jdbc_properties] JavaSQL::DriverManager.getConnection(*args) rescue StandardError, *DATABASE_ERROR_CLASSES => e raise e unless driver # If the DriverManager can't get the connection - use the connect # method of the driver. (This happens under Tomcat for instance) props = java.util.Properties.new if opts && opts[:user] && opts[:password] props.setProperty("user", opts[:user]) props.setProperty("password", opts[:password]) end opts[:jdbc_properties].each{|k,v| props.setProperty(k.to_s, v)} if opts[:jdbc_properties] begin c = driver.new.connect(args[0], props) raise(Sequel::DatabaseError, 'driver.new.connect returned nil: probably bad JDBC connection string') unless c c rescue StandardError, *DATABASE_ERROR_CLASSES => e2 if e2.respond_to?(:message=) && e2.message != e.message e2.message = "#{e2.message}\n#{e.class.name}: #{e.message}" end raise e2 end end end setup_connection_with_opts(conn, opts) end # Close given adapter connections, and delete any related prepared statements. def disconnect_connection(c) @connection_prepared_statements_mutex.synchronize{@connection_prepared_statements.delete(c)} c.close end def execute(sql, opts=OPTS, &block) return call_sproc(sql, opts, &block) if opts[:sproc] return execute_prepared_statement(sql, opts, &block) if [Symbol, Dataset].any?{|c| sql.is_a?(c)} synchronize(opts[:server]) do |conn| statement(conn) do |stmt| if block if size = fetch_size stmt.setFetchSize(size) end yield log_connection_yield(sql, conn){stmt.executeQuery(sql)} else case opts[:type] when :ddl log_connection_yield(sql, conn){stmt.execute(sql)} when :insert log_connection_yield(sql, conn){execute_statement_insert(stmt, sql)} opts = Hash[opts] opts[:stmt] = stmt last_insert_id(conn, opts) else log_connection_yield(sql, conn){stmt.executeUpdate(sql)} end end end end end alias execute_dui execute def execute_ddl(sql, opts=OPTS) opts = Hash[opts] opts[:type] = :ddl execute(sql, opts) end def execute_insert(sql, opts=OPTS) opts = Hash[opts] opts[:type] = :insert execute(sql, opts) end def freeze @type_convertor_map.freeze @basic_type_convertor_map.freeze super end # Use the JDBC metadata to get a list of foreign keys for the table. def foreign_key_list(table, opts=OPTS) m = output_identifier_meth schema, table = metadata_schema_and_table(table, opts) foreign_keys = {} metadata(:getImportedKeys, nil, schema, table) do |r| if fk = foreign_keys[r[:fk_name]] fk[:columns] << [r[:key_seq], m.call(r[:fkcolumn_name])] fk[:key] << [r[:key_seq], m.call(r[:pkcolumn_name])] elsif r[:fk_name] foreign_keys[r[:fk_name]] = {:name=>m.call(r[:fk_name]), :columns=>[[r[:key_seq], m.call(r[:fkcolumn_name])]], :table=>m.call(r[:pktable_name]), :key=>[[r[:key_seq], m.call(r[:pkcolumn_name])]]} end end foreign_keys.values.each do |fk| [:columns, :key].each do |k| fk[k] = fk[k].sort.map{|_, v| v} end end end # Use the JDBC metadata to get the index information for the table. def indexes(table, opts=OPTS) m = output_identifier_meth schema, table = metadata_schema_and_table(table, opts) indexes = {} metadata(:getIndexInfo, nil, schema, table, false, true) do |r| next unless name = r[:column_name] next if respond_to?(:primary_key_index_re, true) and r[:index_name] =~ primary_key_index_re i = indexes[m.call(r[:index_name])] ||= {:columns=>[], :unique=>[false, 0].include?(r[:non_unique])} i[:columns] << m.call(name) end indexes end # Whether or not JNDI is being used for this connection. def jndi? !!(uri =~ JNDI_URI_REGEXP) end # All tables in this database def tables(opts=OPTS) get_tables('TABLE', opts) end # The uri for this connection. You can specify the uri # using the :uri, :url, or :database options. You don't # need to worry about this if you use Sequel.connect # with the JDBC connectrion strings. def uri(opts=OPTS) opts = @opts.merge(opts) ur = opts[:uri] || opts[:url] || opts[:database] ur =~ /^\Ajdbc:/ ? ur : "jdbc:#{ur}" end # All views in this database def views(opts=OPTS) get_tables('VIEW', opts) end private # Call the DATABASE_SETUP proc directly after initialization, # so the object always uses sub adapter specific code. Also, # raise an error immediately if the connection doesn't have a # uri, since JDBC requires one. def adapter_initialize @connection_prepared_statements = {} @connection_prepared_statements_mutex = Mutex.new @fetch_size = @opts[:fetch_size] ? typecast_value_integer(@opts[:fetch_size]) : default_fetch_size @convert_types = typecast_value_boolean(@opts.fetch(:convert_types, true)) raise(Error, "No connection string specified") unless uri resolved_uri = jndi? ? get_uri_from_jndi : uri setup_type_convertor_map_early @driver = if (match = /\Ajdbc:([^:]+)/.match(resolved_uri)) && (prok = Sequel::Database.load_adapter(match[1].to_sym, :map=>DATABASE_SETUP, :subdir=>'jdbc')) prok.call(self) else @opts[:driver] end setup_type_convertor_map end # Yield the native prepared statements hash for the given connection # to the block in a thread-safe manner. def cps_sync(conn, &block) @connection_prepared_statements_mutex.synchronize{yield(@connection_prepared_statements[conn] ||= {})} end def database_error_classes DATABASE_ERROR_CLASSES end def database_exception_sqlstate(exception, opts) if database_exception_use_sqlstates? while exception.respond_to?(:cause) exception = exception.cause return exception.getSQLState if exception.respond_to?(:getSQLState) end end nil end # Whether the JDBC subadapter should use SQL states for exception handling, true by default. def database_exception_use_sqlstates? true end def dataset_class_default Dataset end # Raise a disconnect error if the SQL state of the cause of the exception indicates so. def disconnect_error?(exception, opts) cause = exception.respond_to?(:cause) ? exception.cause : exception super || (cause.respond_to?(:getSQLState) && cause.getSQLState =~ /^08/) end # Execute the prepared statement. If the provided name is a # dataset, use that as the prepared statement, otherwise use # it as a key to look it up in the prepared_statements hash. # If the connection we are using has already prepared an identical # statement, use that statement instead of creating another. # Otherwise, prepare a new statement for the connection, bind the # variables, and execute it. def execute_prepared_statement(name, opts=OPTS) args = opts[:arguments] if name.is_a?(Dataset) ps = name name = ps.prepared_statement_name else ps = prepared_statement(name) end sql = ps.prepared_sql synchronize(opts[:server]) do |conn| if name and cps = cps_sync(conn){|cpsh| cpsh[name]} and cps[0] == sql cps = cps[1] else log_connection_yield("CLOSE #{name}", conn){cps[1].close} if cps if name opts = Hash[opts] opts[:name] = name end cps = log_connection_yield("PREPARE#{" #{name}:" if name} #{sql}", conn){prepare_jdbc_statement(conn, sql, opts)} if size = fetch_size cps.setFetchSize(size) end cps_sync(conn){|cpsh| cpsh[name] = [sql, cps]} if name end i = 0 args.each{|arg| set_ps_arg(cps, arg, i+=1)} msg = "EXECUTE#{" #{name}" if name}" if ps.log_sql msg += " (" msg << sql msg << ")" end begin if defined?(yield) yield log_connection_yield(msg, conn, args){cps.executeQuery} else case opts[:type] when :ddl log_connection_yield(msg, conn, args){cps.execute} when :insert log_connection_yield(msg, conn, args){execute_prepared_statement_insert(cps)} opts = Hash[opts] opts[:prepared] = true opts[:stmt] = cps last_insert_id(conn, opts) else log_connection_yield(msg, conn, args){cps.executeUpdate} end end rescue *DATABASE_ERROR_CLASSES => e raise_error(e) ensure cps.close unless name end end end # Execute the prepared insert statement def execute_prepared_statement_insert(stmt) stmt.executeUpdate end # Execute the insert SQL using the statement def execute_statement_insert(stmt, sql) stmt.executeUpdate(sql) end # The default fetch size to use for statements. Nil by default, so that the # default for the JDBC driver is used. def default_fetch_size nil end # Gets the connection from JNDI. def get_connection_from_jndi jndi_name = JNDI_URI_REGEXP.match(uri)[1] javax.naming.InitialContext.new.lookup(jndi_name).connection end # Gets the JDBC connection uri from the JNDI resource. def get_uri_from_jndi conn = get_connection_from_jndi conn.meta_data.url ensure conn.close if conn end # Backbone of the tables and views support. def get_tables(type, opts) ts = [] m = output_identifier_meth if schema = opts[:schema] schema = schema.to_s end metadata(:getTables, nil, schema, nil, [type].to_java(:string)){|h| ts << m.call(h[:table_name])} ts end # Support Date objects used in bound variables def java_sql_date(date) java.sql.Date.new(Time.local(date.year, date.month, date.day).to_i * 1000) end # Support DateTime objects used in bound variables def java_sql_datetime(datetime) ts = java.sql.Timestamp.new(Time.local(datetime.year, datetime.month, datetime.day, datetime.hour, datetime.min, datetime.sec).to_i * 1000) ts.setNanos((datetime.sec_fraction * 1000000000).to_i) ts end # Support fractional seconds for Time objects used in bound variables def java_sql_timestamp(time) ts = java.sql.Timestamp.new(time.to_i * 1000) ts.setNanos(time.nsec) ts end def log_connection_execute(conn, sql) statement(conn){|s| log_connection_yield(sql, conn){s.execute(sql)}} end # By default, there is no support for determining the last inserted # id, so return nil. This method should be overridden in # subadapters. def last_insert_id(conn, opts) nil end # Yield the metadata for this database def metadata(*args, &block) synchronize do |c| result = c.getMetaData.public_send(*args) begin metadata_dataset.send(:process_result_set, result, &block) ensure result.close end end end # Return the schema and table suitable for use with metadata queries. def metadata_schema_and_table(table, opts) im = input_identifier_meth(opts[:dataset]) schema, table = schema_and_table(table) schema ||= opts[:schema] schema = im.call(schema) if schema table = im.call(table) [schema, table] end # Created a JDBC prepared statement on the connection with the given SQL. def prepare_jdbc_statement(conn, sql, opts) conn.prepareStatement(sql) end # Java being java, you need to specify the type of each argument # for the prepared statement, and bind it individually. This # guesses which JDBC method to use, and hopefully JRuby will convert # things properly for us. def set_ps_arg(cps, arg, i) case arg when Integer cps.setLong(i, arg) when Sequel::SQL::Blob cps.setBytes(i, arg.to_java_bytes) when String cps.setString(i, arg) when Float cps.setDouble(i, arg) when TrueClass, FalseClass cps.setBoolean(i, arg) when NilClass set_ps_arg_nil(cps, i) when DateTime cps.setTimestamp(i, java_sql_datetime(arg)) when Date cps.setDate(i, java_sql_date(arg)) when Time cps.setTimestamp(i, java_sql_timestamp(arg)) when Java::JavaSql::Timestamp cps.setTimestamp(i, arg) when Java::JavaSql::Date cps.setDate(i, arg) else cps.setObject(i, arg) end end # Use setString with a nil value by default, but this doesn't work on all subadapters. def set_ps_arg_nil(cps, i) cps.setString(i, nil) end # Return the connection. Can be overridden in subadapters for database specific setup. def setup_connection(conn) conn end # Setup the connection using the given connection options. Return the connection. Can be overridden in subadapters for database specific setup. def setup_connection_with_opts(conn, opts) setup_connection(conn) end def schema_column_set_db_type(schema) case schema[:type] when :string if schema[:db_type] =~ /\A(character( varying)?|n?(var)?char2?)\z/io && schema[:column_size] > 0 schema[:db_type] += "(#{schema[:column_size]})" end when :decimal if schema[:db_type] =~ /\A(decimal|numeric)\z/io && schema[:column_size] > 0 && schema[:scale] >= 0 schema[:db_type] += "(#{schema[:column_size]}, #{schema[:scale]})" end end end def schema_parse_table(table, opts=OPTS) m = output_identifier_meth(opts[:dataset]) schema, table = metadata_schema_and_table(table, opts) pks, ts = [], [] metadata(:getPrimaryKeys, nil, schema, table) do |h| next if schema_parse_table_skip?(h, schema) pks << h[:column_name] end schemas = [] metadata(:getColumns, nil, schema, table, nil) do |h| next if schema_parse_table_skip?(h, schema) s = { :type=>schema_column_type(h[:type_name]), :db_type=>h[:type_name], :default=>(h[:column_def] == '' ? nil : h[:column_def]), :allow_null=>(h[:nullable] != 0), :primary_key=>pks.include?(h[:column_name]), :column_size=>h[:column_size], :scale=>h[:decimal_digits], :remarks=>h[:remarks] } if s[:primary_key] s[:auto_increment] = h[:is_autoincrement] == "YES" end s[:max_length] = s[:column_size] if s[:type] == :string if s[:db_type] =~ /number|numeric|decimal/i && s[:scale] == 0 s[:type] = :integer end schema_column_set_db_type(s) schemas << h[:table_schem] unless schemas.include?(h[:table_schem]) ts << [m.call(h[:column_name]), s] end if schemas.length > 1 raise Error, 'Schema parsing in the jdbc adapter resulted in columns being returned for a table with the same name in multiple schemas. Please explicitly qualify your table with a schema.' end ts end # Skip tables in the INFORMATION_SCHEMA when parsing columns. def schema_parse_table_skip?(h, schema) h[:table_schem] == 'INFORMATION_SCHEMA' end # Called after loading subadapter-specific code, overridable by subadapters. def setup_type_convertor_map end # Called before loading subadapter-specific code, necessary so that subadapter initialization code # that runs queries works correctly. This cannot be overridden in subadapters. def setup_type_convertor_map_early @type_convertor_map = TypeConvertor::MAP.merge(Java::JavaSQL::Types::TIMESTAMP=>method(:timestamp_convert)) @basic_type_convertor_map = TypeConvertor::BASIC_MAP.dup end # Yield a new statement object, and ensure that it is closed before returning. def statement(conn) stmt = conn.createStatement yield stmt rescue *DATABASE_ERROR_CLASSES => e raise_error(e) ensure stmt.close if stmt end # A conversion method for timestamp columns. This is used to make sure timestamps are converted using the # correct timezone. def timestamp_convert(r, i) if v = r.getTimestamp(i) to_application_timestamp([v.getYear + 1900, v.getMonth + 1, v.getDate, v.getHours, v.getMinutes, v.getSeconds, v.getNanos]) end end end class Dataset < Sequel::Dataset include StoredProcedures PreparedStatementMethods = prepared_statements_module( "sql = self; opts = Hash[opts]; opts[:arguments] = bind_arguments", Sequel::Dataset::UnnumberedArgumentMapper, %w"execute execute_dui") do private def execute_insert(sql, opts=OPTS) sql = self opts = Hash[opts] opts[:arguments] = bind_arguments opts[:type] = :insert super end end StoredProcedureMethods = prepared_statements_module( "sql = @opts[:sproc_name]; opts = Hash[opts]; opts[:args] = @opts[:sproc_args]; opts[:sproc] = true", Sequel::Dataset::StoredProcedureMethods, %w"execute execute_dui") do private def execute_insert(sql, opts=OPTS) sql = @opts[:sproc_name] opts = Hash[opts] opts[:args] = @opts[:sproc_args] opts[:sproc] = true opts[:type] = :insert super end end def fetch_rows(sql, &block) execute(sql){|result| process_result_set(result, &block)} self end # Set the fetch size on JDBC ResultSets created from the returned dataset. def with_fetch_size(size) clone(:fetch_size=>size) end # Set whether to convert Java types to ruby types in the returned dataset. def with_convert_types(v) clone(:convert_types=>v) end private # Whether we should convert Java types to ruby types for this dataset. def convert_types? ct = @opts[:convert_types] ct.nil? ? db.convert_types : ct end # Extend the dataset with the JDBC stored procedure methods. def prepare_extend_sproc(ds) ds.with_extend(StoredProcedureMethods) end # The type conversion proc to use for the given column number i, # given the type conversion map and the ResultSetMetaData. def type_convertor(map, meta, type, i) map[type] end # The basic type conversion proc to use for the given column number i, # given the type conversion map and the ResultSetMetaData. # # This is implemented as a separate method so that subclasses can # override the methods separately. def basic_type_convertor(map, meta, type, i) map[type] end def prepared_statement_modules [PreparedStatementMethods] end # Split out from fetch rows to allow processing of JDBC result sets # that don't come from issuing an SQL string. def process_result_set(result) meta = result.getMetaData if fetch_size = opts[:fetch_size] result.setFetchSize(fetch_size) end cols = [] i = 0 convert = convert_types? map = convert ? db.type_convertor_map : db.basic_type_convertor_map meta.getColumnCount.times do i += 1 cols << [output_identifier(meta.getColumnLabel(i)), i, convert ? type_convertor(map, meta, meta.getColumnType(i), i) : basic_type_convertor(map, meta, meta.getColumnType(i), i)] end max = i self.columns = cols.map{|c| c[0]} while result.next row = {} i = -1 while (i += 1) < max n, j, pr = cols[i] row[n] = pr.call(result, j) end yield row end ensure result.close end end end end sequel-5.63.0/lib/sequel/adapters/jdbc/000077500000000000000000000000001434214120600176515ustar00rootroot00000000000000sequel-5.63.0/lib/sequel/adapters/jdbc/db2.rb000066400000000000000000000042561434214120600206540ustar00rootroot00000000000000# frozen-string-literal: true Sequel::JDBC.load_driver('com.ibm.db2.jcc.DB2Driver') require_relative '../shared/db2' require_relative 'transactions' module Sequel module JDBC Sequel.synchronize do DATABASE_SETUP[:db2] = proc do |db| db.singleton_class.class_eval do alias jdbc_schema_parse_table schema_parse_table alias jdbc_tables tables alias jdbc_views views alias jdbc_indexes indexes include Sequel::JDBC::DB2::DatabaseMethods alias schema_parse_table jdbc_schema_parse_table alias tables jdbc_tables alias views jdbc_views alias indexes jdbc_indexes %w'schema_parse_table tables views indexes'.each do |s| remove_method(:"jdbc_#{s}") end end db.extend_datasets Sequel::DB2::DatasetMethods com.ibm.db2.jcc.DB2Driver end end module DB2 module DatabaseMethods include Sequel::DB2::DatabaseMethods include Sequel::JDBC::Transactions private def set_ps_arg(cps, arg, i) case arg when Sequel::SQL::Blob if use_clob_as_blob cps.setString(i, arg) else super end else super end end def last_insert_id(conn, opts=OPTS) statement(conn) do |stmt| sql = "SELECT IDENTITY_VAL_LOCAL() FROM SYSIBM.SYSDUMMY1" rs = log_connection_yield(sql, conn){stmt.executeQuery(sql)} rs.next rs.getLong(1) end end # Primary key indexes appear to be named sqlNNNN on DB2 def primary_key_index_re /\Asql\d+\z/i end def setup_type_convertor_map super map = @type_convertor_map types = Java::JavaSQL::Types map[types::NCLOB] = map[types::CLOB] = method(:convert_clob) end def convert_clob(r, i) if v = r.getClob(i) v = v.getSubString(1, v.length) v = Sequel::SQL::Blob.new(v) if use_clob_as_blob v end end end end end end sequel-5.63.0/lib/sequel/adapters/jdbc/derby.rb000066400000000000000000000243001434214120600213020ustar00rootroot00000000000000# frozen-string-literal: true Sequel::JDBC.load_driver('org.apache.derby.jdbc.EmbeddedDriver', :Derby) require_relative 'transactions' require_relative '../utils/columns_limit_1' module Sequel module JDBC Sequel.synchronize do DATABASE_SETUP[:derby] = proc do |db| db.extend(Sequel::JDBC::Derby::DatabaseMethods) db.dataset_class = Sequel::JDBC::Derby::Dataset org.apache.derby.jdbc.EmbeddedDriver end end module Derby module DatabaseMethods include ::Sequel::JDBC::Transactions # Derby doesn't support casting integer to varchar, only integer to char, # and char(254) appears to have the widest support (with char(255) failing). # This does add a bunch of extra spaces at the end, but those will be trimmed # elsewhere. def cast_type_literal(type) (type == String) ? 'CHAR(254)' : super end def database_type :derby end def freeze svn_version super end # Derby uses an IDENTITY sequence for autoincrementing columns. def serial_primary_key_options {:primary_key => true, :type => Integer, :identity=>true, :start_with=>1} end # The SVN version of the database. def svn_version @svn_version ||= begin v = synchronize{|c| c.get_meta_data.get_database_product_version} v =~ /\((\d+)\)\z/ $1.to_i end end # Derby supports transactional DDL statements. def supports_transactional_ddl? true end private # Derby optimizes away Sequel's default check of SELECT NULL FROM table, # so use a SELECT * FROM table there. def _table_exists?(ds) ds.first end def alter_table_sql(table, op) case op[:op] when :rename_column "RENAME COLUMN #{quote_schema_table(table)}.#{quote_identifier(op[:name])} TO #{quote_identifier(op[:new_name])}" when :set_column_type # Derby is very limited in changing a columns type, so adding a new column and then dropping the existing column is # the best approach, as mentioned in the Derby documentation. temp_name = :x_sequel_temp_column_x [alter_table_sql(table, op.merge(:op=>:add_column, :name=>temp_name)), from(table).update_sql(temp_name=>::Sequel::SQL::Cast.new(op[:name], op[:type])), alter_table_sql(table, op.merge(:op=>:drop_column)), alter_table_sql(table, op.merge(:op=>:rename_column, :name=>temp_name, :new_name=>op[:name]))] when :set_column_null "ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(op[:name])} #{op[:null] ? 'NULL' : 'NOT NULL'}" else super end end # Derby does not allow adding primary key constraints to NULLable columns. def can_add_primary_key_constraint_on_nullable_columns? false end # Derby doesn't allow specifying NULL for columns, only NOT NULL. def column_definition_null_sql(sql, column) null = column.fetch(:null, column[:allow_null]) sql << " NOT NULL" if null == false || (null.nil? && column[:primary_key]) end # Add NOT LOGGED for temporary tables to improve performance. def create_table_sql(name, generator, options) s = super s += ' NOT LOGGED' if options[:temp] s end # Insert data from the current table into the new table after # creating the table, since it is not possible to do it in one step. def create_table_as(name, sql, options) super from(name).insert(sql.is_a?(Dataset) ? sql : dataset.with_sql(sql)) end # Derby currently only requires WITH NO DATA, with a separate insert # to import data. def create_table_as_sql(name, sql, options) "#{create_table_prefix_sql(name, options)} AS #{sql} WITH NO DATA" end # Temporary table creation on Derby uses DECLARE instead of CREATE. def create_table_prefix_sql(name, options) if options[:temp] "DECLARE GLOBAL TEMPORARY TABLE #{quote_identifier(name)}" else super end end DATABASE_ERROR_REGEXPS = { /The statement was aborted because it would have caused a duplicate key value in a unique or primary key constraint or unique index/ => UniqueConstraintViolation, /violation of foreign key constraint/ => ForeignKeyConstraintViolation, /The check constraint .+ was violated/ => CheckConstraintViolation, /cannot accept a NULL value/ => NotNullConstraintViolation, /A lock could not be obtained due to a deadlock/ => SerializationFailure, }.freeze def database_error_regexps DATABASE_ERROR_REGEXPS end # Use IDENTITY_VAL_LOCAL() to get the last inserted id. def last_insert_id(conn, opts=OPTS) statement(conn) do |stmt| sql = 'SELECT IDENTITY_VAL_LOCAL() FROM sysibm.sysdummy1' rs = log_connection_yield(sql, conn){stmt.executeQuery(sql)} rs.next rs.getLong(1) end end # Handle nil values by using setNull with the correct parameter type. def set_ps_arg_nil(cps, i) cps.setNull(i, cps.getParameterMetaData.getParameterType(i)) end # Derby uses RENAME TABLE syntax to rename tables. def rename_table_sql(name, new_name) "RENAME TABLE #{quote_schema_table(name)} TO #{quote_schema_table(new_name)}" end # Primary key indexes appear to be named sqlNNNN on Derby def primary_key_index_re /\Asql\d+\z/i end # If an :identity option is present in the column, add the necessary IDENTITY SQL. def type_literal(column) if column[:identity] sql = "#{super} GENERATED BY DEFAULT AS IDENTITY" if sw = column[:start_with] sql += " (START WITH #{sw.to_i}" sql << " INCREMENT BY #{column[:increment_by].to_i}" if column[:increment_by] sql << ")" end sql else super end end # Derby uses clob for text types. def uses_clob_for_text? true end def valid_connection_sql @valid_connection_sql ||= select(1).sql end end class Dataset < JDBC::Dataset include ::Sequel::Dataset::ColumnsLimit1 # Derby doesn't support an expression between CASE and WHEN, # so remove conditions. def case_expression_sql_append(sql, ce) super(sql, ce.with_merged_expression) end # If the type is String, trim the extra spaces since CHAR is used instead # of varchar. This can cause problems if you are casting a char/varchar to # a string and the ending whitespace is important. def cast_sql_append(sql, expr, type) if type == String sql << "RTRIM(" super sql << ')' else super end end def complex_expression_sql_append(sql, op, args) case op when :%, :'B~' complex_expression_emulate_append(sql, op, args) when :&, :|, :^, :<<, :>> raise Error, "Derby doesn't support the #{op} operator" when :** sql << 'exp(' literal_append(sql, args[1]) sql << ' * ln(' literal_append(sql, args[0]) sql << "))" when :extract sql << args[0].to_s << '(' literal_append(sql, args[1]) sql << ')' else super end end # Derby supports GROUP BY ROLLUP (but not CUBE) def supports_group_rollup? true end # Derby does not support IS TRUE. def supports_is_true? false end # Derby 10.11+ supports MERGE. def supports_merge? db.svn_version >= 1616546 end # Derby does not support IN/NOT IN with multiple columns def supports_multiple_column_in? false end private def empty_from_sql " FROM sysibm.sysdummy1" end # Derby needs a hex string casted to BLOB for blobs. def literal_blob_append(sql, v) sql << "CAST(X'" << v.unpack("H*").first << "' AS BLOB)" end # Derby needs the standard workaround to insert all default values into # a table with more than one column. def insert_supports_empty_values? false end # Newer Derby versions can use the FALSE literal, but older versions need an always false expression. def literal_false if db.svn_version >= 1040133 'FALSE' else '(1 = 0)' end end # Derby handles fractional seconds in timestamps, but not in times def literal_sqltime(v) v.strftime("'%H:%M:%S'") end # Newer Derby versions can use the TRUE literal, but older versions need an always false expression. def literal_true if db.svn_version >= 1040133 'TRUE' else '(1 = 1)' end end # Derby supports multiple rows for VALUES in INSERT. def multi_insert_sql_strategy :values end # Emulate the char_length function with length def native_function_name(emulated_function) if emulated_function == :char_length 'length' else super end end # Offset comes before limit in Derby def select_limit_sql(sql) if o = @opts[:offset] sql << " OFFSET " literal_append(sql, o) sql << " ROWS" end if l = @opts[:limit] sql << " FETCH FIRST " literal_append(sql, l) sql << " ROWS ONLY" end end end end end end sequel-5.63.0/lib/sequel/adapters/jdbc/h2.rb000066400000000000000000000211601434214120600205070ustar00rootroot00000000000000# frozen-string-literal: true Sequel::JDBC.load_driver('org.h2.Driver', :H2) module Sequel module JDBC Sequel.synchronize do DATABASE_SETUP[:h2] = proc do |db| db.extend(Sequel::JDBC::H2::DatabaseMethods) db.dataset_class = Sequel::JDBC::H2::Dataset org.h2.Driver end end module H2 module DatabaseMethods def commit_prepared_transaction(transaction_id, opts=OPTS) run("COMMIT TRANSACTION #{transaction_id}", opts) end def database_type :h2 end def freeze h2_version version2? super end def h2_version @h2_version ||= get(Sequel.function(:H2VERSION)) end def rollback_prepared_transaction(transaction_id, opts=OPTS) run("ROLLBACK TRANSACTION #{transaction_id}", opts) end # H2 uses an IDENTITY type for primary keys def serial_primary_key_options {:primary_key => true, :type => :identity, :identity=>true} end # H2 supports CREATE TABLE IF NOT EXISTS syntax def supports_create_table_if_not_exists? true end # H2 supports prepared transactions def supports_prepared_transactions? true end # H2 supports savepoints def supports_savepoints? true end private # H2 does not allow adding primary key constraints to NULLable columns. def can_add_primary_key_constraint_on_nullable_columns? false end # If the :prepare option is given and we aren't in a savepoint, # prepare the transaction for a two-phase commit. def commit_transaction(conn, opts=OPTS) if (s = opts[:prepare]) && savepoint_level(conn) <= 1 log_connection_execute(conn, "PREPARE COMMIT #{s}") else super end end def alter_table_sql(table, op) case op[:op] when :add_column if (pk = op.delete(:primary_key)) || (ref = op.delete(:table)) if pk op[:null] = false end sqls = [super(table, op)] if pk && (h2_version >= '1.4' || op[:type] != :identity) # H2 needs to add a primary key column as a constraint in this case sqls << "ALTER TABLE #{quote_schema_table(table)} ADD PRIMARY KEY (#{quote_identifier(op[:name])})" end if ref op[:table] = ref constraint_name = op[:foreign_key_constraint_name] sqls << "ALTER TABLE #{quote_schema_table(table)} ADD#{" CONSTRAINT #{quote_identifier(constraint_name)}" if constraint_name} FOREIGN KEY (#{quote_identifier(op[:name])}) #{column_references_sql(op)}" end sqls else super(table, op) end when :rename_column "ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(op[:name])} RENAME TO #{quote_identifier(op[:new_name])}" when :set_column_null "ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(op[:name])} SET#{' NOT' unless op[:null]} NULL" when :set_column_type if sch = schema(table) if cs = sch.each{|k, v| break v if k == op[:name]; nil} cs = cs.dup cs[:default] = cs[:ruby_default] op = cs.merge!(op) end end sql = "ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(op[:name])} #{type_literal(op)}".dup column_definition_order.each{|m| send(:"column_definition_#{m}_sql", sql, op)} sql when :drop_constraint if op[:type] == :primary_key "ALTER TABLE #{quote_schema_table(table)} DROP PRIMARY KEY" else super(table, op) end else super(table, op) end end # Default to a single connection for a memory database. def connection_pool_default_options o = super uri == 'jdbc:h2:mem:' ? o.merge(:max_connections=>1) : o end DATABASE_ERROR_REGEXPS = { /Unique index or primary key violation/ => UniqueConstraintViolation, /Referential integrity constraint violation/ => ForeignKeyConstraintViolation, /Check constraint violation/ => CheckConstraintViolation, /NULL not allowed for column/ => NotNullConstraintViolation, /Deadlock detected\. The current transaction was rolled back\./ => SerializationFailure, }.freeze def database_error_regexps DATABASE_ERROR_REGEXPS end def execute_statement_insert(stmt, sql) stmt.executeUpdate(sql, JavaSQL::Statement::RETURN_GENERATED_KEYS) end def prepare_jdbc_statement(conn, sql, opts) opts[:type] == :insert ? conn.prepareStatement(sql, JavaSQL::Statement::RETURN_GENERATED_KEYS) : super end # Get the last inserted id using getGeneratedKeys, scope_identity, or identity. def last_insert_id(conn, opts=OPTS) if stmt = opts[:stmt] rs = stmt.getGeneratedKeys begin if rs.next begin rs.getLong(1) rescue rs.getObject(1) rescue nil end end ensure rs.close end elsif !version2? statement(conn) do |stmt| sql = 'SELECT IDENTITY()' rs = log_connection_yield(sql, conn){stmt.executeQuery(sql)} rs.next rs.getLong(1) end end end def primary_key_index_re /\Aprimary_key/i end # H2 does not support named column constraints. def supports_named_column_constraints? false end # Use BIGINT IDENTITY for identity columns that use :Bignum type def type_literal_generic_bignum_symbol(column) column[:identity] ? 'BIGINT AUTO_INCREMENT' : super end def version2? return @version2 if defined?(@version2) @version2 = h2_version.to_i >= 2 end end class Dataset < JDBC::Dataset ILIKE_PLACEHOLDER = ["CAST(".freeze, " AS VARCHAR_IGNORECASE)".freeze].freeze # Emulate the case insensitive LIKE operator and the bitwise operators. def complex_expression_sql_append(sql, op, args) case op when :ILIKE, :"NOT ILIKE" super(sql, (op == :ILIKE ? :LIKE : :"NOT LIKE"), [SQL::PlaceholderLiteralString.new(ILIKE_PLACEHOLDER, [args[0]]), args[1]]) when :&, :|, :^, :<<, :>>, :'B~' complex_expression_emulate_append(sql, op, args) else super end end # H2 does not support derived column lists def supports_derived_column_lists? false end # H2 requires SQL standard datetimes def requires_sql_standard_datetimes? true end # H2 doesn't support IS TRUE def supports_is_true? false end # H2 doesn't support JOIN USING def supports_join_using? false end # H2 supports MERGE def supports_merge? true end # H2 doesn't support multiple columns in IN/NOT IN def supports_multiple_column_in? false end private # H2 expects hexadecimal strings for blob values def literal_blob_append(sql, v) if db.send(:version2?) super else sql << "'" << v.unpack("H*").first << "'" end end def literal_false 'FALSE' end def literal_true 'TRUE' end # H2 handles fractional seconds in timestamps, but not in times def literal_sqltime(v) v.strftime("'%H:%M:%S'") end # H2 supports multiple rows in INSERT. def multi_insert_sql_strategy :values end def select_only_offset_sql(sql) if db.send(:version2?) super else sql << " LIMIT -1 OFFSET " literal_append(sql, @opts[:offset]) end end # H2 supports quoted function names. def supports_quoted_function_names? true end end end end end sequel-5.63.0/lib/sequel/adapters/jdbc/hsqldb.rb000066400000000000000000000167401434214120600214630ustar00rootroot00000000000000# frozen-string-literal: true Sequel::JDBC.load_driver('org.hsqldb.jdbcDriver', :HSQLDB) require_relative 'transactions' module Sequel module JDBC Sequel.synchronize do DATABASE_SETUP[:hsqldb] = proc do |db| db.extend(Sequel::JDBC::HSQLDB::DatabaseMethods) db.dataset_class = Sequel::JDBC::HSQLDB::Dataset org.hsqldb.jdbcDriver end end module HSQLDB module DatabaseMethods include ::Sequel::JDBC::Transactions def database_type :hsqldb end def freeze db_version super end # HSQLDB uses an IDENTITY sequence as the default value for primary # key columns. def serial_primary_key_options {:primary_key => true, :type => :integer, :identity=>true, :start_with=>1} end # The version of the database, as an integer (e.g 2.2.5 -> 20205) def db_version return @db_version if defined?(@db_version) v = get(Sequel.function(:DATABASE_VERSION)) @db_version = if v =~ /(\d+)\.(\d+)\.(\d+)/ $1.to_i * 10000 + $2.to_i * 100 + $3.to_i end end # HSQLDB supports DROP TABLE IF EXISTS def supports_drop_table_if_exists? true end private def alter_table_sql(table, op) case op[:op] when :add_column if op[:table] [super(table, op.merge(:table=>nil)), alter_table_sql(table, op.merge(:op=>:add_constraint, :type=>:foreign_key, :name=>op[:foreign_key_constraint_name], :columns=>[op[:name]], :table=>op[:table]))] else super end when :rename_column "ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(op[:name])} RENAME TO #{quote_identifier(op[:new_name])}" when :set_column_type "ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(op[:name])} SET DATA TYPE #{type_literal(op)}" when :set_column_null "ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(op[:name])} SET #{op[:null] ? 'NULL' : 'NOT NULL'}" else super end end # HSQLDB requires parens around the SELECT, and the WITH DATA syntax. def create_table_as_sql(name, sql, options) "#{create_table_prefix_sql(name, options)} AS (#{sql}) WITH DATA" end DATABASE_ERROR_REGEXPS = { /integrity constraint violation: unique constraint or index violation/ => UniqueConstraintViolation, /integrity constraint violation: foreign key/ => ForeignKeyConstraintViolation, /integrity constraint violation: check constraint/ => CheckConstraintViolation, /integrity constraint violation: NOT NULL check constraint/ => NotNullConstraintViolation, /serialization failure/ => SerializationFailure, }.freeze def database_error_regexps DATABASE_ERROR_REGEXPS end # IF EXISTS comes after table name on HSQLDB def drop_table_sql(name, options) "DROP TABLE #{quote_schema_table(name)}#{' IF EXISTS' if options[:if_exists]}#{' CASCADE' if options[:cascade]}" end # IF EXISTS comes after view name on HSQLDB def drop_view_sql(name, options) "DROP VIEW #{quote_schema_table(name)}#{' IF EXISTS' if options[:if_exists]}#{' CASCADE' if options[:cascade]}" end # Use IDENTITY() to get the last inserted id. def last_insert_id(conn, opts=OPTS) statement(conn) do |stmt| sql = 'CALL IDENTITY()' rs = log_connection_yield(sql, conn){stmt.executeQuery(sql)} rs.next rs.getLong(1) end end # Primary key indexes appear to start with sys_idx_sys_pk_ on HSQLDB def primary_key_index_re /\Asys_idx_sys_pk_/i end # If an :identity option is present in the column, add the necessary IDENTITY SQL. # It's possible to use an IDENTITY type, but that defaults the sequence to start # at 0 instead of 1, and we don't want that. def type_literal(column) if column[:identity] sql = "#{super} GENERATED BY DEFAULT AS IDENTITY" if sw = column[:start_with] sql += " (START WITH #{sw.to_i}" sql << " INCREMENT BY #{column[:increment_by].to_i}" if column[:increment_by] sql << ")" end sql else super end end # HSQLDB uses clob for text types. def uses_clob_for_text? true end # HSQLDB supports views with check option. def view_with_check_option_support :local end end class Dataset < JDBC::Dataset # Handle HSQLDB specific case insensitive LIKE and bitwise operator support. def complex_expression_sql_append(sql, op, args) case op when :ILIKE, :"NOT ILIKE" super(sql, (op == :ILIKE ? :LIKE : :"NOT LIKE"), args.map{|v| SQL::Function.new(:ucase, v)}) when :&, :|, :^, :%, :<<, :>>, :'B~' complex_expression_emulate_append(sql, op, args) else super end end # HSQLDB requires recursive CTEs to have column aliases. def recursive_cte_requires_column_aliases? true end # HSQLDB requires SQL standard datetimes in some places. def requires_sql_standard_datetimes? true end # HSQLDB does support common table expressions, but the support is broken. # CTEs operate more like temprorary tables or views, lasting longer than the duration of the expression. # CTEs in earlier queries might take precedence over CTEs with the same name in later queries. # Also, if any CTE is recursive, all CTEs must be recursive. # If you want to use CTEs with HSQLDB, you'll have to manually modify the dataset to allow it. def supports_cte?(type=:select) false end # HSQLDB does not support IS TRUE. def supports_is_true? false end # HSQLDB supports lateral subqueries. def supports_lateral_subqueries? true end # HSQLDB 2.3.4+ supports MERGE. Older versions also support MERGE, but not all # features that are in Sequel's tests. def supports_merge? db.db_version >= 20304 end private def empty_from_sql " FROM (VALUES (0))" end # Use string in hex format for blob data. def literal_blob_append(sql, v) sql << "X'" << v.unpack("H*").first << "'" end # HSQLDB uses FALSE for false values. def literal_false 'FALSE' end # HSQLDB handles fractional seconds in timestamps, but not in times def literal_sqltime(v) v.strftime("'%H:%M:%S'") end # HSQLDB uses TRUE for true values. def literal_true 'TRUE' end # HSQLDB supports multiple rows in INSERT. def multi_insert_sql_strategy :values end # Use WITH RECURSIVE instead of WITH if any of the CTEs is recursive def select_with_sql_base opts[:with].any?{|w| w[:recursive]} ? "WITH RECURSIVE " : super end end end end end sequel-5.63.0/lib/sequel/adapters/jdbc/jtds.rb000066400000000000000000000021201434214120600211350ustar00rootroot00000000000000# frozen-string-literal: true Sequel::JDBC.load_driver('Java::net.sourceforge.jtds.jdbc.Driver', :JTDS) require_relative 'mssql' module Sequel module JDBC Sequel.synchronize do DATABASE_SETUP[:jtds] = proc do |db| db.extend(Sequel::JDBC::JTDS::DatabaseMethods) db.extend_datasets Sequel::MSSQL::DatasetMethods db.send(:set_mssql_unicode_strings) Java::net.sourceforge.jtds.jdbc.Driver end end module JTDS module DatabaseMethods include Sequel::JDBC::MSSQL::DatabaseMethods private # JTDS exception handling with SQLState is less accurate than with regexps. def database_exception_use_sqlstates? false end def disconnect_error?(exception, opts) super || exception.message =~ /\AInvalid state, the Connection object is closed\.\z/ end # Handle nil values by using setNull with the correct parameter type. def set_ps_arg_nil(cps, i) cps.setNull(i, cps.getParameterMetaData.getParameterType(i)) end end end end end sequel-5.63.0/lib/sequel/adapters/jdbc/mssql.rb000066400000000000000000000013651434214120600213420ustar00rootroot00000000000000# frozen-string-literal: true require_relative '../shared/mssql' module Sequel module JDBC module MSSQL module DatabaseMethods include Sequel::MSSQL::DatabaseMethods private # Get the last inserted id using SCOPE_IDENTITY(). def last_insert_id(conn, opts=OPTS) statement(conn) do |stmt| sql = opts[:prepared] ? 'SELECT @@IDENTITY' : 'SELECT SCOPE_IDENTITY()' rs = log_connection_yield(sql, conn){stmt.executeQuery(sql)} rs.next rs.getLong(1) end end # Primary key indexes appear to start with pk__ on MSSQL def primary_key_index_re /\Apk__/i end end end end end sequel-5.63.0/lib/sequel/adapters/jdbc/mysql.rb000066400000000000000000000060351434214120600213470ustar00rootroot00000000000000# frozen-string-literal: true module Sequel module JDBC driver = Sequel::JDBC.load_driver(%w'com.mysql.cj.jdbc.Driver com.mysql.jdbc.Driver', :MySQL) require_relative '../shared/mysql' Sequel.synchronize do DATABASE_SETUP[:mysql] = proc do |db| db.extend(Sequel::JDBC::MySQL::DatabaseMethods) db.extend_datasets Sequel::MySQL::DatasetMethods driver end end module MySQL module DatabaseMethods include Sequel::MySQL::DatabaseMethods private # MySQL exception handling with SQLState is less accurate than with regexps. def database_exception_use_sqlstates? false end # Raise a disconnect error if the SQL state of the cause of the exception indicates so. def disconnect_error?(exception, opts) exception.message =~ /\ACommunications link failure/ || super end # Get the last inserted id using LAST_INSERT_ID(). def last_insert_id(conn, opts=OPTS) if stmt = opts[:stmt] rs = stmt.getGeneratedKeys begin if rs.next rs.getLong(1) else 0 end ensure rs.close end else statement(conn) do |st| rs = st.executeQuery('SELECT LAST_INSERT_ID()') rs.next rs.getLong(1) end end end # MySQL 5.1.12 JDBC adapter requires generated keys # and previous versions don't mind. def execute_statement_insert(stmt, sql) stmt.executeUpdate(sql, JavaSQL::Statement::RETURN_GENERATED_KEYS) end # Return generated keys for insert statements. def prepare_jdbc_statement(conn, sql, opts) opts[:type] == :insert ? conn.prepareStatement(sql, JavaSQL::Statement::RETURN_GENERATED_KEYS) : super end # Convert tinyint(1) type to boolean def schema_column_type(db_type) db_type =~ /\Atinyint\(1\)/ ? :boolean : super end # Run the default connection setting SQL statements. # Apply the connectiong setting SQLs for every new connection. def setup_connection(conn) mysql_connection_setting_sqls.each{|sql| statement(conn){|s| log_connection_yield(sql, conn){s.execute(sql)}}} super end # Handle unsigned integer values def setup_type_convertor_map super TypeConvertor::BASIC_MAP.dup @type_convertor_map[Java::JavaSQL::Types::SMALLINT] = @type_convertor_map[Java::JavaSQL::Types::INTEGER] @type_convertor_map[Java::JavaSQL::Types::INTEGER] = @type_convertor_map[Java::JavaSQL::Types::BIGINT] @basic_type_convertor_map[Java::JavaSQL::Types::SMALLINT] = @basic_type_convertor_map[Java::JavaSQL::Types::INTEGER] @basic_type_convertor_map[Java::JavaSQL::Types::INTEGER] = @basic_type_convertor_map[Java::JavaSQL::Types::BIGINT] end end end end end sequel-5.63.0/lib/sequel/adapters/jdbc/oracle.rb000066400000000000000000000101121434214120600214360ustar00rootroot00000000000000# frozen-string-literal: true Sequel::JDBC.load_driver('Java::oracle.jdbc.driver.OracleDriver') require_relative '../shared/oracle' require_relative 'transactions' module Sequel module JDBC Sequel.synchronize do DATABASE_SETUP[:oracle] = proc do |db| db.extend(Sequel::JDBC::Oracle::DatabaseMethods) db.dataset_class = Sequel::JDBC::Oracle::Dataset Java::oracle.jdbc.driver.OracleDriver end end module Oracle JAVA_BIG_DECIMAL_CONSTRUCTOR = java.math.BigDecimal.java_class.constructor(Java::long).method(:new_instance) ORACLE_DECIMAL = Object.new def ORACLE_DECIMAL.call(r, i) if v = r.getBigDecimal(i) i = v.long_value if v == JAVA_BIG_DECIMAL_CONSTRUCTOR.call(i) i else ::Kernel::BigDecimal(v.to_string) end end end ORACLE_CLOB = Object.new def ORACLE_CLOB.call(r, i) return unless clob = r.getClob(i) str = clob.getSubString(1, clob.length) clob.freeTemporary if clob.isTemporary str end module DatabaseMethods include Sequel::Oracle::DatabaseMethods include Sequel::JDBC::Transactions def self.extended(db) db.instance_exec do @autosequence = opts[:autosequence] @primary_key_sequences = {} end end private # Oracle exception handling with SQLState is less accurate than with regexps. def database_exception_use_sqlstates? false end def disconnect_error?(exception, opts) super || exception.message =~ /\AClosed Connection/ end # Default the fetch size for statements to 100, similar to the oci8-based oracle adapter. def default_fetch_size 100 end def last_insert_id(conn, opts) unless sequence = opts[:sequence] if t = opts[:table] sequence = sequence_for_table(t) end end if sequence sql = "SELECT #{literal(sequence)}.currval FROM dual" statement(conn) do |stmt| begin rs = log_connection_yield(sql, conn){stmt.executeQuery(sql)} rs.next rs.getLong(1) rescue java.sql.SQLException nil end end end end # Primary key indexes appear to start with sys_ on Oracle def primary_key_index_re /\Asys_/i end def schema_parse_table(*) sch = super sch.each do |c, s| if s[:type] == :decimal && s[:scale] == -127 s[:type] = :integer elsif s[:db_type] == 'DATE' s[:type] = :datetime end end sch end def schema_parse_table_skip?(h, schema) super || (h[:table_schem] != current_user unless schema) end # As of Oracle 9.2, releasing savepoints is no longer supported. def supports_releasing_savepoints? false end def setup_type_convertor_map super @type_convertor_map[:OracleDecimal] = ORACLE_DECIMAL @type_convertor_map[:OracleClob] = ORACLE_CLOB end end class Dataset < JDBC::Dataset include Sequel::Oracle::DatasetMethods NUMERIC_TYPE = Java::JavaSQL::Types::NUMERIC TIMESTAMP_TYPE = Java::JavaSQL::Types::TIMESTAMP CLOB_TYPE = Java::JavaSQL::Types::CLOB TIMESTAMPTZ_TYPES = [Java::oracle.jdbc.OracleTypes::TIMESTAMPTZ, Java::oracle.jdbc.OracleTypes::TIMESTAMPLTZ].freeze def type_convertor(map, meta, type, i) case type when NUMERIC_TYPE if meta.getScale(i) == 0 map[:OracleDecimal] else super end when *TIMESTAMPTZ_TYPES map[TIMESTAMP_TYPE] when CLOB_TYPE map[:OracleClob] else super end end end end end end sequel-5.63.0/lib/sequel/adapters/jdbc/postgresql.rb000066400000000000000000000200661434214120600224050ustar00rootroot00000000000000# frozen-string-literal: true Sequel::JDBC.load_driver('org.postgresql.Driver', :Postgres) require_relative '../shared/postgres' module Sequel module JDBC Sequel.synchronize do DATABASE_SETUP[:postgresql] = proc do |db| db.dataset_class = Sequel::JDBC::Postgres::Dataset db.extend(Sequel::JDBC::Postgres::DatabaseMethods) org.postgresql.Driver end end module Postgres module DatabaseMethods include Sequel::Postgres::DatabaseMethods # Add the primary_keys and primary_key_sequences instance variables, # so we can get the correct return values for inserted rows. def self.extended(db) super db.send(:initialize_postgres_adapter) end # Remove any current entry for the oid in the oid_convertor_map. def add_conversion_proc(oid, *) super Sequel.synchronize{@oid_convertor_map.delete(oid)} end # See Sequel::Postgres::Adapter#copy_into def copy_into(table, opts=OPTS) data = opts[:data] data = Array(data) if data.is_a?(String) if defined?(yield) && data raise Error, "Cannot provide both a :data option and a block to copy_into" elsif !defined?(yield) && !data raise Error, "Must provide either a :data option or a block to copy_into" end synchronize(opts[:server]) do |conn| begin copy_manager = org.postgresql.copy.CopyManager.new(conn) copier = copy_manager.copy_in(copy_into_sql(table, opts)) if defined?(yield) while buf = yield java_bytes = buf.to_java_bytes copier.writeToCopy(java_bytes, 0, java_bytes.length) end else data.each do |d| java_bytes = d.to_java_bytes copier.writeToCopy(java_bytes, 0, java_bytes.length) end end rescue Exception => e copier.cancelCopy if copier raise ensure unless e begin copier.endCopy rescue NativeException => e2 raise_error(e2) end end end end end # See Sequel::Postgres::Adapter#copy_table def copy_table(table, opts=OPTS) synchronize(opts[:server]) do |conn| copy_manager = org.postgresql.copy.CopyManager.new(conn) copier = copy_manager.copy_out(copy_table_sql(table, opts)) begin if defined?(yield) while buf = copier.readFromCopy yield(String.from_java_bytes(buf)) end nil else b = String.new while buf = copier.readFromCopy b << String.from_java_bytes(buf) end b end rescue => e raise_error(e, :disconnect=>true) ensure if buf && !e raise DatabaseDisconnectError, "disconnecting as a partial COPY may leave the connection in an unusable state" end end end end def oid_convertor_proc(oid) if (conv = Sequel.synchronize{@oid_convertor_map[oid]}).nil? conv = if pr = conversion_procs[oid] lambda do |r, i| if v = r.getString(i) pr.call(v) end end else false end Sequel.synchronize{@oid_convertor_map[oid] = conv} end conv end private def disconnect_error?(exception, opts) super || exception.message =~ /\A(This connection has been closed\.|FATAL: terminating connection due to administrator command|An I\/O error occurred while sending to the backend\.)\z/ end # For PostgreSQL-specific types, return the string that should be used # as the PGObject value. Returns nil by default, loading pg_* extensions # will override this to add support for specific types. def bound_variable_arg(arg, conn) nil end # Work around issue when using Sequel's bound variable support where the # same SQL is used in different bound variable calls, but the schema has # changed between the calls. This is necessary as jdbc-postgres versions # after 9.4.1200 violate the JDBC API. These versions cache separate # PreparedStatement instances, which are eventually prepared server side after the # prepareThreshold is met. The JDBC API violation is that PreparedStatement#close # does not release the server side prepared statement. def prepare_jdbc_statement(conn, sql, opts) ps = super unless opts[:name] ps.prepare_threshold = 0 end ps end # If the given argument is a recognized PostgreSQL-specific type, create # a PGObject instance with unknown type and the bound argument string value, # and set that as the prepared statement argument. def set_ps_arg(cps, arg, i) if v = bound_variable_arg(arg, nil) obj = org.postgresql.util.PGobject.new obj.setType("unknown") obj.setValue(v) cps.setObject(i, obj) else super end end # Use setNull for nil arguments as the default behavior of setString # with nil doesn't appear to work correctly on PostgreSQL. def set_ps_arg_nil(cps, i) cps.setNull(i, JavaSQL::Types::NULL) end # Execute the connection configuration SQL queries on the connection. def setup_connection_with_opts(conn, opts) conn = super statement(conn) do |stmt| connection_configuration_sqls(opts).each{|sql| log_connection_yield(sql, conn){stmt.execute(sql)}} end conn end def setup_type_convertor_map super @oid_convertor_map = {} end end class Dataset < JDBC::Dataset include Sequel::Postgres::DatasetMethods # Warn when calling as the fetch size is ignored by the JDBC adapter currently. def with_fetch_size(size) warn("Sequel::JDBC::Postgres::Dataset#with_fetch_size does not currently have an effect.", :uplevel=>1) super end private # Literalize strings similar to the native postgres adapter def literal_string_append(sql, v) sql << "'" << db.synchronize(@opts[:server]){|c| c.escape_string(v)} << "'" end # SQL fragment for Sequel::SQLTime, containing just the time part def literal_sqltime(v) v.strftime("'%H:%M:%S#{sprintf(".%03d", (v.usec/1000.0).round)}'") end STRING_TYPE = Java::JavaSQL::Types::VARCHAR ARRAY_TYPE = Java::JavaSQL::Types::ARRAY PG_SPECIFIC_TYPES = [Java::JavaSQL::Types::ARRAY, Java::JavaSQL::Types::OTHER, Java::JavaSQL::Types::STRUCT, Java::JavaSQL::Types::TIME_WITH_TIMEZONE, Java::JavaSQL::Types::TIME].freeze # Return PostgreSQL hstore types as ruby Hashes instead of # Java HashMaps. Only used if the database does not have a # conversion proc for the type. HSTORE_METHOD = Object.new def HSTORE_METHOD.call(r, i) if v = r.getObject(i) v.to_hash end end def type_convertor(map, meta, type, i) case type when *PG_SPECIFIC_TYPES oid = meta.getField(i).getOID if pr = db.oid_convertor_proc(oid) pr elsif oid == 2950 # UUID map[STRING_TYPE] elsif meta.getPGType(i) == 'hstore' HSTORE_METHOD else super end else super end end end end end end sequel-5.63.0/lib/sequel/adapters/jdbc/sqlanywhere.rb000066400000000000000000000036651434214120600225520ustar00rootroot00000000000000# frozen-string-literal: true require_relative '../shared/sqlanywhere' require_relative 'transactions' module Sequel module JDBC drv = [ lambda{Java::sybase.jdbc4.sqlanywhere.IDriver}, lambda{Java::ianywhere.ml.jdbcodbc.jdbc4.IDriver}, lambda{Java::sybase.jdbc.sqlanywhere.IDriver}, lambda{Java::ianywhere.ml.jdbcodbc.jdbc.IDriver}, lambda{Java::com.sybase.jdbc4.jdbc.Sybdriver}, lambda{Java::com.sybase.jdbc3.jdbc.Sybdriver} ].each do |class_proc| begin break class_proc.call rescue NameError end end raise(Sequel::AdapterNotFound, "no suitable SQLAnywhere JDBC driver found") unless drv Sequel.synchronize do DATABASE_SETUP[:sqlanywhere] = proc do |db| db.extend(Sequel::JDBC::SqlAnywhere::DatabaseMethods) db.convert_smallint_to_bool = true db.dataset_class = Sequel::JDBC::SqlAnywhere::Dataset drv end end module SqlAnywhere module DatabaseMethods include Sequel::SqlAnywhere::DatabaseMethods include Sequel::JDBC::Transactions private # Use @@IDENTITY to get the last inserted id def last_insert_id(conn, opts=OPTS) statement(conn) do |stmt| sql = 'SELECT @@IDENTITY' rs = log_connection_yield(sql, conn){stmt.executeQuery(sql)} rs.next rs.getLong(1) end end end class Dataset < JDBC::Dataset include Sequel::SqlAnywhere::DatasetMethods private SMALLINT_TYPE = Java::JavaSQL::Types::SMALLINT BOOLEAN_METHOD = Object.new def BOOLEAN_METHOD.call(r, i) v = r.getShort(i) v != 0 unless r.wasNull end def type_convertor(map, meta, type, i) if convert_smallint_to_bool && type == SMALLINT_TYPE BOOLEAN_METHOD else super end end end end end end sequel-5.63.0/lib/sequel/adapters/jdbc/sqlite.rb000066400000000000000000000111251434214120600214770ustar00rootroot00000000000000# frozen-string-literal: true Sequel::JDBC.load_driver('org.sqlite.JDBC', :SQLite3) require_relative '../shared/sqlite' module Sequel module JDBC Sequel.synchronize do DATABASE_SETUP[:sqlite] = proc do |db| db.extend(Sequel::JDBC::SQLite::DatabaseMethods) db.extend_datasets Sequel::SQLite::DatasetMethods db.set_integer_booleans org.sqlite.JDBC end end module SQLite module ForeignKeyListPragmaConvertorFix # For the use of the convertor for String, working around a bug # in jdbc-sqlite3 that reports fields are of type # java.sql.types.NUMERIC even though they contain non-numeric data. def type_convertor(_, _, _, i) i > 2 ? TypeConvertor::CONVERTORS[:String] : super end end module TableInfoPragmaConvertorFix # For the use of the convertor for String, working around a bug # in jdbc-sqlite3 that reports dflt_value field is of type # java.sql.types.NUMERIC even though they contain string data. def type_convertor(_, _, _, i) i == 5 ? TypeConvertor::CONVERTORS[:String] : super end end module DatabaseMethods include Sequel::SQLite::DatabaseMethods # Swallow pointless exceptions when the foreign key list pragma # doesn't return any rows. def foreign_key_list(table, opts=OPTS) super rescue Sequel::DatabaseError => e raise unless foreign_key_error?(e) [] end # Swallow pointless exceptions when the index list pragma # doesn't return any rows. def indexes(table, opts=OPTS) super rescue Sequel::DatabaseError => e raise unless foreign_key_error?(e) {} end private # Add workaround for bug when running foreign_key_list pragma def _foreign_key_list_ds(_) super.with_extend(ForeignKeyListPragmaConvertorFix) end # Add workaround for bug when running table_info pragma def _parse_pragma_ds(_, _) super.with_extend(TableInfoPragmaConvertorFix) end DATABASE_ERROR_REGEXPS = Sequel::SQLite::DatabaseMethods::DATABASE_ERROR_REGEXPS.merge(/Abort due to constraint violation/ => ConstraintViolation).freeze def database_error_regexps DATABASE_ERROR_REGEXPS end # Use last_insert_rowid() to get the last inserted id. def last_insert_id(conn, opts=OPTS) statement(conn) do |stmt| rs = stmt.executeQuery('SELECT last_insert_rowid()') rs.next rs.getLong(1) end end # Default to a single connection for a memory database. def connection_pool_default_options o = super uri == 'jdbc:sqlite::memory:' ? o.merge(:max_connections=>1) : o end # Execute the connection pragmas on the connection. def setup_connection(conn) conn = super(conn) statement(conn) do |stmt| connection_pragmas.each{|s| log_connection_yield(s, conn){stmt.execute(s)}} end conn end # Whether the given exception is due to a foreign key error. def foreign_key_error?(exception) exception.message =~ /query does not return ResultSet/ end # Use getLong instead of getInt for converting integers on SQLite, since SQLite does not enforce a limit of 2**32. # Work around regressions in jdbc-sqlite 3.8.7 for date and blob types. def setup_type_convertor_map super @type_convertor_map[Java::JavaSQL::Types::INTEGER] = @type_convertor_map[Java::JavaSQL::Types::BIGINT] @basic_type_convertor_map[Java::JavaSQL::Types::INTEGER] = @basic_type_convertor_map[Java::JavaSQL::Types::BIGINT] x = @type_convertor_map[Java::JavaSQL::Types::DATE] = Object.new def x.call(r, i) if v = r.getString(i) Sequel.string_to_date(v) end end x = @type_convertor_map[Java::JavaSQL::Types::BLOB] = Object.new def x.call(r, i) if v = r.getBytes(i) Sequel::SQL::Blob.new(String.from_java_bytes(v)) elsif !r.wasNull Sequel::SQL::Blob.new('') end end end # The result code for the exception, if the jdbc driver supports result codes for exceptions. def sqlite_error_code(exception) exception.resultCode.code if exception.respond_to?(:resultCode) end end end end end sequel-5.63.0/lib/sequel/adapters/jdbc/sqlserver.rb000066400000000000000000000056541434214120600222360ustar00rootroot00000000000000# frozen-string-literal: true Sequel::JDBC.load_driver('com.microsoft.sqlserver.jdbc.SQLServerDriver') require_relative 'mssql' module Sequel module JDBC Sequel.synchronize do DATABASE_SETUP[:sqlserver] = proc do |db| db.extend(Sequel::JDBC::SQLServer::DatabaseMethods) db.extend_datasets Sequel::MSSQL::DatasetMethods db.send(:set_mssql_unicode_strings) com.microsoft.sqlserver.jdbc.SQLServerDriver end end module SQLServer MSSQL_RUBY_TIME = Object.new def MSSQL_RUBY_TIME.call(r, i) # MSSQL-Server TIME should be fetched as string to keep the precision intact, see: # https://docs.microsoft.com/en-us/sql/t-sql/data-types/time-transact-sql#a-namebackwardcompatibilityfordownlevelclientsa-backward-compatibility-for-down-level-clients if v = r.getString(i) Sequel.string_to_time("#{v}") end end module DatabaseMethods include Sequel::JDBC::MSSQL::DatabaseMethods def setup_type_convertor_map super map = @type_convertor_map map[Java::JavaSQL::Types::TIME] = MSSQL_RUBY_TIME # Work around constant lazy loading in some drivers begin dto = Java::MicrosoftSql::Types::DATETIMEOFFSET rescue NameError end if dto map[dto] = lambda do |r, i| if v = r.getDateTimeOffset(i) to_application_timestamp(v.to_s) end end end end # Work around a bug in SQL Server JDBC Driver 3.0, where the metadata # for the getColumns result set specifies an incorrect type for the # IS_AUTOINCREMENT column. The column is a string, but the type is # specified as a short. This causes getObject() to throw a # com.microsoft.sqlserver.jdbc.SQLServerException: "The conversion # from char to SMALLINT is unsupported." Using getString() rather # than getObject() for this column avoids the problem. # Reference: http://social.msdn.microsoft.com/Forums/en/sqldataaccess/thread/20df12f3-d1bf-4526-9daa-239a83a8e435 module MetadataDatasetMethods def type_convertor(map, meta, type, i) if output_identifier(meta.getColumnLabel(i)) == :is_autoincrement map[Java::JavaSQL::Types::VARCHAR] else super end end def basic_type_convertor(map, meta, type, i) if output_identifier(meta.getColumnLabel(i)) == :is_autoincrement map[Java::JavaSQL::Types::VARCHAR] else super end end end private def _metadata_dataset super.with_extend(MetadataDatasetMethods) end def disconnect_error?(exception, opts) super || (exception.message =~ /connection is closed/) end end end end end sequel-5.63.0/lib/sequel/adapters/jdbc/transactions.rb000066400000000000000000000071431434214120600227130ustar00rootroot00000000000000# frozen-string-literal: true module Sequel module JDBC module Transactions def freeze supports_savepoints? super end # Check the JDBC DatabaseMetaData for savepoint support def supports_savepoints? return @supports_savepoints if defined?(@supports_savepoints) @supports_savepoints = synchronize{|c| c.getMetaData.supports_savepoints} end # Check the JDBC DatabaseMetaData for support for serializable isolation, # since that's the value most people will use. def supports_transaction_isolation_levels? synchronize{|conn| conn.getMetaData.supportsTransactionIsolationLevel(JavaSQL::Connection::TRANSACTION_SERIALIZABLE)} end private JDBC_TRANSACTION_ISOLATION_LEVELS = {:uncommitted=>JavaSQL::Connection::TRANSACTION_READ_UNCOMMITTED, :committed=>JavaSQL::Connection::TRANSACTION_READ_COMMITTED, :repeatable=>JavaSQL::Connection::TRANSACTION_REPEATABLE_READ, :serializable=>JavaSQL::Connection::TRANSACTION_SERIALIZABLE}.freeze # Set the transaction isolation level on the given connection using # the JDBC API. def set_transaction_isolation(conn, opts) level = opts.fetch(:isolation, transaction_isolation_level) if (jdbc_level = JDBC_TRANSACTION_ISOLATION_LEVELS[level]) && conn.getMetaData.supportsTransactionIsolationLevel(jdbc_level) _trans(conn)[:original_jdbc_isolation_level] = conn.getTransactionIsolation log_connection_yield("Transaction.isolation_level = #{level}", conn){conn.setTransactionIsolation(jdbc_level)} end end # Most JDBC drivers that support savepoints support releasing them. def supports_releasing_savepoints? true end # JDBC savepoint object for the current savepoint for the connection. def savepoint_obj(conn) _trans(conn)[:savepoints][-1][:obj] end # Use JDBC connection's setAutoCommit to false to start transactions def begin_transaction(conn, opts=OPTS) if in_savepoint?(conn) _trans(conn)[:savepoints][-1][:obj] = log_connection_yield('Transaction.savepoint', conn){conn.set_savepoint} else log_connection_yield('Transaction.begin', conn){conn.setAutoCommit(false)} set_transaction_isolation(conn, opts) end end # Use JDBC connection's commit method to commit transactions def commit_transaction(conn, opts=OPTS) if in_savepoint?(conn) if supports_releasing_savepoints? log_connection_yield('Transaction.release_savepoint', conn){conn.release_savepoint(savepoint_obj(conn))} end else log_connection_yield('Transaction.commit', conn){conn.commit} end end # Use JDBC connection's setAutoCommit to true to enable non-transactional behavior def remove_transaction(conn, committed) if jdbc_level = _trans(conn)[:original_jdbc_isolation_level] log_connection_yield("Transaction.restore_isolation_level", conn){conn.setTransactionIsolation(jdbc_level)} end unless in_savepoint?(conn) conn.setAutoCommit(true) end ensure super end # Use JDBC connection's rollback method to rollback transactions def rollback_transaction(conn, opts=OPTS) if in_savepoint?(conn) log_connection_yield('Transaction.rollback_savepoint', conn){conn.rollback(savepoint_obj(conn))} else log_connection_yield('Transaction.rollback', conn){conn.rollback} end end end end end sequel-5.63.0/lib/sequel/adapters/mock.rb000066400000000000000000000261271434214120600202350ustar00rootroot00000000000000# frozen-string-literal: true require_relative 'utils/unmodified_identifiers' module Sequel module Mock class Connection # Sequel::Mock::Database object that created this connection attr_reader :db # Shard this connection operates on, when using Sequel's # sharding support (always :default for databases not using # sharding). attr_reader :server # The specific database options for this connection. attr_reader :opts # Store the db, server, and opts. def initialize(db, server, opts) @db = db @server = server @opts = opts end # Delegate to the db's #_execute method. def execute(sql) @db.send(:_execute, self, sql, :log=>false) end end class Database < Sequel::Database set_adapter_scheme :mock # Set the autogenerated primary key integer # to be returned when running an insert query. # Argument types supported: # # nil :: Return nil for all inserts # Integer :: Starting integer for next insert, with # futher inserts getting an incremented # value # Array :: First insert gets the first value in the # array, second gets the second value, etc. # Proc :: Called with the insert SQL query, uses # the value returned # Class :: Should be an Exception subclass, will create a new # instance an raise it wrapped in a DatabaseError. def autoid=(v) @autoid = case v when Integer i = v - 1 proc{@mutex.synchronize{i+=1}} else v end end # Set the columns to set in the dataset when the dataset fetches # rows. Argument types supported: # nil :: Set no columns # Array of Symbols :: Used for all datasets # Array (otherwise) :: First retrieval gets the first value in the # array, second gets the second value, etc. # Proc :: Called with the select SQL query, uses the value # returned, which should be an array of symbols attr_writer :columns # Set the hashes to yield by execute when retrieving rows. # Argument types supported: # # nil :: Yield no rows # Hash :: Always yield a single row with this hash # Array of Hashes :: Yield separately for each hash in this array # Array (otherwise) :: First retrieval gets the first value # in the array, second gets the second value, etc. # Proc :: Called with the select SQL query, uses # the value returned, which should be a hash or # array of hashes. # Class :: Should be an Exception subclass, will create a new # instance an raise it wrapped in a DatabaseError. attr_writer :fetch # Set the number of rows to return from update or delete. # Argument types supported: # # nil :: Return 0 for all updates and deletes # Integer :: Used for all updates and deletes # Array :: First update/delete gets the first value in the # array, second gets the second value, etc. # Proc :: Called with the update/delete SQL query, uses # the value returned. # Class :: Should be an Exception subclass, will create a new # instance an raise it wrapped in a DatabaseError. attr_writer :numrows # Mock the server version, useful when using the shared adapters attr_accessor :server_version # Return a related Connection option connecting to the given shard. def connect(server) Connection.new(self, server, server_opts(server)) end def disconnect_connection(c) end # Store the sql used for later retrieval with #sqls, and return # the appropriate value using either the #autoid, #fetch, or # #numrows methods. def execute(sql, opts=OPTS, &block) synchronize(opts[:server]){|c| _execute(c, sql, opts, &block)} end alias execute_ddl execute # Store the sql used, and return the value of the #numrows method. def execute_dui(sql, opts=OPTS) execute(sql, opts.merge(:meth=>:numrows)) end # Store the sql used, and return the value of the #autoid method. def execute_insert(sql, opts=OPTS) execute(sql, opts.merge(:meth=>:autoid)) end # Return all stored SQL queries, and clear the cache # of SQL queries. def sqls @mutex.synchronize do s = @sqls.dup @sqls.clear s end end # Enable use of savepoints. def supports_savepoints? shared_adapter? ? super : true end private def _execute(c, sql, opts=OPTS, &block) sql += " -- args: #{opts[:arguments].inspect}" if opts[:arguments] sql += " -- #{@opts[:append]}" if @opts[:append] sql += " -- #{c.server.is_a?(Symbol) ? c.server : c.server.inspect}" if c.server != :default log_connection_yield(sql, c){} unless opts[:log] == false @mutex.synchronize{@sqls << sql} ds = opts[:dataset] begin if block columns(ds, sql) if ds _fetch(sql, (ds._fetch if ds) || @fetch, &block) elsif meth = opts[:meth] if meth == :numrows _numrows(sql, (ds.numrows if ds) || @numrows) else if ds @mutex.synchronize do v = ds.autoid if v.is_a?(Integer) ds.send(:cache_set, :_autoid, v + 1) end v end end || _nextres(@autoid, sql, nil) end end rescue => e raise_error(e) end end def _fetch(sql, f, &block) case f when Hash yield f.dup when Array if f.all?{|h| h.is_a?(Hash)} f.each{|h| yield h.dup} else _fetch(sql, @mutex.synchronize{f.shift}, &block) end when Proc h = f.call(sql) if h.is_a?(Hash) yield h.dup elsif h h.each{|h1| yield h1.dup} end when Class if f < Exception raise f else raise Error, "Invalid @fetch attribute: #{v.inspect}" end when nil # nothing else raise Error, "Invalid @fetch attribute: #{f.inspect}" end end def _nextres(v, sql, default) case v when Integer v when Array v.empty? ? default : _nextres(@mutex.synchronize{v.shift}, sql, default) when Proc v.call(sql) when Class if v < Exception raise v else raise Error, "Invalid @autoid/@numrows attribute: #{v.inspect}" end when nil default else raise Error, "Invalid @autoid/@numrows attribute: #{v.inspect}" end end def _numrows(sql, v) _nextres(v, sql, 0) end # Additional options supported: # # :autoid :: Call #autoid= with the value # :columns :: Call #columns= with the value # :fetch :: Call #fetch= with the value # :numrows :: Call #numrows= with the value # :extend :: A module the object is extended with. # :sqls :: The array to store the SQL queries in. def adapter_initialize opts = @opts @mutex = Mutex.new @sqls = opts[:sqls] || [] @shared_adapter = false case db_type = opts[:host] when String, Symbol db_type = db_type.to_sym unless mod = Sequel.synchronize{SHARED_ADAPTER_MAP[db_type]} begin require "sequel/adapters/shared/#{db_type}" rescue LoadError else mod = Sequel.synchronize{SHARED_ADAPTER_MAP[db_type]} end end if mod @shared_adapter = true extend(mod::DatabaseMethods) extend_datasets(mod::DatasetMethods) if mod.respond_to?(:mock_adapter_setup) mod.mock_adapter_setup(self) end end end unless @shared_adapter extend UnmodifiedIdentifiers::DatabaseMethods extend_datasets UnmodifiedIdentifiers::DatasetMethods end self.autoid = opts[:autoid] self.columns = opts[:columns] self.fetch = opts[:fetch] self.numrows = opts[:numrows] extend(opts[:extend]) if opts[:extend] sqls end def columns(ds, sql, cs=@columns) case cs when Array unless cs.empty? if cs.all?{|c| c.is_a?(Symbol)} ds.columns(*cs) else columns(ds, sql, @mutex.synchronize{cs.shift}) end end when Proc ds.columns(*cs.call(sql)) when nil # nothing else raise Error, "Invalid @columns attribute: #{cs.inspect}" end end def dataset_class_default Dataset end def quote_identifiers_default shared_adapter? ? super : false end def shared_adapter? @shared_adapter end end class Dataset < Sequel::Dataset # The autoid setting for this dataset, if it has been overridden def autoid cache_get(:_autoid) || @opts[:autoid] end # The fetch setting for this dataset, if it has been overridden def _fetch cache_get(:_fetch) || @opts[:fetch] end # The numrows setting for this dataset, if it has been overridden def numrows cache_get(:_numrows) || @opts[:numrows] end # If arguments are provided, use them to set the columns # for this dataset and return self. Otherwise, use the # default Sequel behavior and return the columns. def columns(*cs) if cs.empty? super else self.columns = cs self end end def fetch_rows(sql, &block) execute(sql, &block) end def quote_identifiers? @opts.fetch(:quote_identifiers, db.send(:quote_identifiers_default)) end # Return cloned dataset with the autoid setting modified def with_autoid(autoid) clone(:autoid=>autoid) end # Return cloned dataset with the fetch setting modified def with_fetch(fetch) clone(:fetch=>fetch) end # Return cloned dataset with the numrows setting modified def with_numrows(numrows) clone(:numrows=>numrows) end private def execute(sql, opts=OPTS, &block) super(sql, opts.merge(:dataset=>self), &block) end def execute_dui(sql, opts=OPTS, &block) super(sql, opts.merge(:dataset=>self), &block) end def execute_insert(sql, opts=OPTS, &block) super(sql, opts.merge(:dataset=>self), &block) end def non_sql_option?(key) super || key == :fetch || key == :numrows || key == :autoid end end end end sequel-5.63.0/lib/sequel/adapters/mysql.rb000066400000000000000000000324111434214120600204420ustar00rootroot00000000000000# frozen-string-literal: true require 'mysql' raise(LoadError, "require 'mysql' did not define Mysql::CLIENT_MULTI_RESULTS!, so it not supported. Please install the mysql or ruby-mysql gem.\n") unless defined?(Mysql::CLIENT_MULTI_RESULTS) require_relative 'utils/mysql_mysql2' require_relative 'utils/mysql_prepared_statements' module Sequel module MySQL boolean = Object.new def boolean.call(s) s.to_i != 0 end TYPE_TRANSLATOR_BOOLEAN = boolean.freeze integer = Object.new def integer.call(s) s.to_i end TYPE_TRANSLATOR_INTEGER = integer.freeze float = Object.new def float.call(s) s.to_f end # Hash with integer keys and callable values for converting MySQL types. MYSQL_TYPES = {} { [0, 246] => ::Kernel.method(:BigDecimal), [2, 3, 8, 9, 13, 247, 248] => integer, [4, 5] => float, [249, 250, 251, 252] => ::Sequel::SQL::Blob }.each do |k,v| k.each{|n| MYSQL_TYPES[n] = v} end MYSQL_TYPES.freeze class Database < Sequel::Database include Sequel::MySQL::DatabaseMethods include Sequel::MySQL::MysqlMysql2::DatabaseMethods include Sequel::MySQL::PreparedStatements::DatabaseMethods set_adapter_scheme :mysql # Hash of conversion procs for the current database attr_reader :conversion_procs # Whether to convert tinyint columns to bool for the current database attr_reader :convert_tinyint_to_bool # By default, Sequel raises an exception if in invalid date or time is used. # However, if this is set to nil or :nil, the adapter treats dates # like 0000-00-00 and times like 838:00:00 as nil values. If set to :string, # it returns the strings as is. attr_reader :convert_invalid_date_time # Connect to the database. In addition to the usual database options, # the following options have effect: # # :auto_is_null :: Set to true to use MySQL default behavior of having # a filter for an autoincrement column equals NULL to return the last # inserted row. # :charset :: Same as :encoding (:encoding takes precendence) # :compress :: Set to false to not compress results from the server # :config_default_group :: The default group to read from the in # the MySQL config file. # :config_local_infile :: If provided, sets the Mysql::OPT_LOCAL_INFILE # option on the connection with the given value. # :connect_timeout :: Set the timeout in seconds before a connection # attempt is abandoned. # :encoding :: Set all the related character sets for this # connection (connection, client, database, server, and results). # :read_timeout :: Set the timeout in seconds for reading back results # to a query. # :socket :: Use a unix socket file instead of connecting via TCP/IP. # :timeout :: Set the timeout in seconds before the server will # disconnect this connection (a.k.a @@wait_timeout). def connect(server) opts = server_opts(server) if Mysql.respond_to?(:init) conn = Mysql.init conn.options(Mysql::READ_DEFAULT_GROUP, opts[:config_default_group] || "client") conn.options(Mysql::OPT_LOCAL_INFILE, opts[:config_local_infile]) if opts.has_key?(:config_local_infile) if encoding = opts[:encoding] || opts[:charset] # Set encoding before connecting so that the mysql driver knows what # encoding we want to use, but this can be overridden by READ_DEFAULT_GROUP. conn.options(Mysql::SET_CHARSET_NAME, encoding) end if read_timeout = opts[:read_timeout] and defined? Mysql::OPT_READ_TIMEOUT conn.options(Mysql::OPT_READ_TIMEOUT, read_timeout) end if connect_timeout = opts[:connect_timeout] and defined? Mysql::OPT_CONNECT_TIMEOUT conn.options(Mysql::OPT_CONNECT_TIMEOUT, connect_timeout) end else # ruby-mysql 3 API conn = Mysql.new # no support for default group conn.local_infile = opts[:config_local_infile] if opts.has_key?(:config_local_infile) if encoding = opts[:encoding] || opts[:charset] conn.charset = encoding end if read_timeout = opts[:read_timeout] conn.read_timeout = read_timeout end if connect_timeout = opts[:connect_timeout] conn.connect_timeout = connect_timeout end conn.singleton_class.class_eval do alias real_connect connect alias use_result store_result end end conn.ssl_set(opts[:sslkey], opts[:sslcert], opts[:sslca], opts[:sslcapath], opts[:sslcipher]) if opts[:sslca] || opts[:sslkey] conn.real_connect( opts[:host] || 'localhost', opts[:user], opts[:password], opts[:database], (opts[:port].to_i if opts[:port]), opts[:socket], Mysql::CLIENT_MULTI_RESULTS + Mysql::CLIENT_MULTI_STATEMENTS + (opts[:compress] == false ? 0 : Mysql::CLIENT_COMPRESS) ) sqls = mysql_connection_setting_sqls # Set encoding a slightly different way after connecting, # in case the READ_DEFAULT_GROUP overrode the provided encoding. # Doesn't work across implicit reconnects, but Sequel doesn't turn on # that feature. sqls.unshift("SET NAMES #{literal(encoding.to_s)}") if encoding sqls.each{|sql| log_connection_yield(sql, conn){conn.query(sql)}} add_prepared_statements_cache(conn) conn end def disconnect_connection(c) c.close rescue Mysql::Error nil end # Modify the type translators for the date, time, and timestamp types # depending on the value given. def convert_invalid_date_time=(v) m0 = ::Sequel.method(:string_to_time) @conversion_procs[11] = (v != false) ? lambda{|val| convert_date_time(val, &m0)} : m0 m1 = ::Sequel.method(:string_to_date) m = (v != false) ? lambda{|val| convert_date_time(val, &m1)} : m1 [10, 14].each{|i| @conversion_procs[i] = m} m2 = method(:to_application_timestamp) m = (v != false) ? lambda{|val| convert_date_time(val, &m2)} : m2 [7, 12].each{|i| @conversion_procs[i] = m} @convert_invalid_date_time = v end # Modify the type translator used for the tinyint type based # on the value given. def convert_tinyint_to_bool=(v) @conversion_procs[1] = v ? TYPE_TRANSLATOR_BOOLEAN : TYPE_TRANSLATOR_INTEGER @convert_tinyint_to_bool = v end def execute_dui(sql, opts=OPTS) execute(sql, opts){|c| return affected_rows(c)} end def execute_insert(sql, opts=OPTS) execute(sql, opts){|c| return c.insert_id} end def freeze server_version @conversion_procs.freeze super end private # Execute the given SQL on the given connection. If the :type # option is :select, yield the result of the query, otherwise # yield the connection if a block is given. def _execute(conn, sql, opts) r = log_connection_yield((log_sql = opts[:log_sql]) ? sql + log_sql : sql, conn){conn.query(sql)} if opts[:type] == :select yield r if r elsif defined?(yield) yield conn end if conn.respond_to?(:more_results?) while conn.more_results? do if r r.free r = nil end begin conn.next_result r = conn.use_result rescue Mysql::Error => e raise_error(e, :disconnect=>true) if MYSQL_DATABASE_DISCONNECT_ERRORS.match(e.message) break end yield r if opts[:type] == :select end end rescue Mysql::Error => e raise_error(e) ensure r.free if r # Use up all results to avoid a commands out of sync message. if conn.respond_to?(:more_results?) while conn.more_results? do begin conn.next_result r = conn.use_result rescue Mysql::Error => e raise_error(e, :disconnect=>true) if MYSQL_DATABASE_DISCONNECT_ERRORS.match(e.message) break end r.free if r end end end def adapter_initialize @conversion_procs = MYSQL_TYPES.dup self.convert_tinyint_to_bool = true self.convert_invalid_date_time = false end # Try to get an accurate number of rows matched using the query # info. Fall back to affected_rows if there was no match, but # that may be inaccurate. def affected_rows(conn) s = conn.info if s && s =~ /Rows matched:\s+(\d+)\s+Changed:\s+\d+\s+Warnings:\s+\d+/ $1.to_i else conn.affected_rows end end # MySQL connections use the query method to execute SQL without a result def connection_execute_method :query end # If convert_invalid_date_time is nil, :nil, or :string and # the conversion raises an InvalidValue exception, return v # if :string and nil otherwise. def convert_date_time(v) yield v rescue InvalidValue case @convert_invalid_date_time when nil, :nil nil when :string v else raise end end def database_error_classes [Mysql::Error] end def database_exception_sqlstate(exception, opts) exception.sqlstate end def dataset_class_default Dataset end def disconnect_error?(e, opts) super || (e.is_a?(::Mysql::Error) && MYSQL_DATABASE_DISCONNECT_ERRORS.match(e.message)) end # Convert tinyint(1) type to boolean if convert_tinyint_to_bool is true def schema_column_type(db_type) convert_tinyint_to_bool && db_type =~ /\Atinyint\(1\)/ ? :boolean : super end end class Dataset < Sequel::Dataset include Sequel::MySQL::DatasetMethods include Sequel::MySQL::MysqlMysql2::DatasetMethods include Sequel::MySQL::PreparedStatements::DatasetMethods # Yield all rows matching this dataset. If the dataset is set to # split multiple statements, yield arrays of hashes one per statement # instead of yielding results for all statements as hashes. def fetch_rows(sql) execute(sql) do |r| i = -1 cps = db.conversion_procs cols = r.fetch_fields.map do |f| # Pretend tinyint is another integer type if its length is not 1, to # avoid casting to boolean if convert_tinyint_to_bool is set. type_proc = f.type == 1 && cast_tinyint_integer?(f) ? cps[2] : cps[f.type] [output_identifier(f.name), type_proc, i+=1] end self.columns = cols.map(&:first) if opts[:split_multiple_result_sets] s = [] yield_rows(r, cols){|h| s << h} yield s else yield_rows(r, cols){|h| yield h} end end self end # Don't allow graphing a dataset that splits multiple statements def graph(*) raise(Error, "Can't graph a dataset that splits multiple result sets") if opts[:split_multiple_result_sets] super end # Makes each yield arrays of rows, with each array containing the rows # for a given result set. Does not work with graphing. So you can submit # SQL with multiple statements and easily determine which statement # returned which results. # # Modifies the row_proc of the returned dataset so that it still works # as expected (running on the hashes instead of on the arrays of hashes). # If you modify the row_proc afterward, note that it will receive an array # of hashes instead of a hash. def split_multiple_result_sets raise(Error, "Can't split multiple statements on a graphed dataset") if opts[:graph] ds = clone(:split_multiple_result_sets=>true) ds = ds.with_row_proc(proc{|x| x.map{|h| row_proc.call(h)}}) if row_proc ds end private # Whether a tinyint field should be casted as an integer. By default, # casts to integer if the field length is not 1. Can be overwritten # to make tinyint casting dataset dependent. def cast_tinyint_integer?(field) field.length != 1 end def execute(sql, opts=OPTS) opts = Hash[opts] opts[:type] = :select super end # Handle correct quoting of strings using ::MySQL.quote. def literal_string_append(sql, v) sql << "'" << ::Mysql.quote(v) << "'" end # Yield each row of the given result set r with columns cols # as a hash with symbol keys def yield_rows(r, cols) while row = r.fetch_row h = {} cols.each{|n, p, i| v = row[i]; h[n] = (v && p) ? p.call(v) : v} yield h end end end end end sequel-5.63.0/lib/sequel/adapters/mysql2.rb000066400000000000000000000230071434214120600205250ustar00rootroot00000000000000# frozen-string-literal: true require 'mysql2' require_relative 'utils/mysql_mysql2' module Sequel module Mysql2 NativePreparedStatements = if ::Mysql2::VERSION >= '0.4' true else require_relative 'utils/mysql_prepared_statements' false end class Database < Sequel::Database include Sequel::MySQL::DatabaseMethods include Sequel::MySQL::MysqlMysql2::DatabaseMethods include Sequel::MySQL::PreparedStatements::DatabaseMethods unless NativePreparedStatements set_adapter_scheme :mysql2 # Whether to convert tinyint columns to bool for this database attr_accessor :convert_tinyint_to_bool # Connect to the database. In addition to the usual database options, # the following options have effect: # # :auto_is_null :: Set to true to use MySQL default behavior of having # a filter for an autoincrement column equals NULL to return the last # inserted row. # :charset :: Same as :encoding (:encoding takes precendence) # :encoding :: Set all the related character sets for this # connection (connection, client, database, server, and results). # # The options hash is also passed to mysql2, and can include mysql2 # options such as :local_infile. def connect(server) opts = server_opts(server) opts[:username] ||= opts.delete(:user) opts[:flags] ||= 0 opts[:flags] |= ::Mysql2::Client::FOUND_ROWS if ::Mysql2::Client.const_defined?(:FOUND_ROWS) opts[:encoding] ||= opts[:charset] conn = ::Mysql2::Client.new(opts) conn.query_options.merge!(:symbolize_keys=>true, :cache_rows=>false) if NativePreparedStatements conn.instance_variable_set(:@sequel_default_query_options, conn.query_options.dup) end sqls = mysql_connection_setting_sqls # Set encoding a slightly different way after connecting, # in case the READ_DEFAULT_GROUP overrode the provided encoding. # Doesn't work across implicit reconnects, but Sequel doesn't turn on # that feature. if encoding = opts[:encoding] sqls.unshift("SET NAMES #{conn.escape(encoding.to_s)}") end sqls.each{|sql| log_connection_yield(sql, conn){conn.query(sql)}} add_prepared_statements_cache(conn) conn end def execute_dui(sql, opts=OPTS) execute(sql, opts){|c| return c.affected_rows} end def execute_insert(sql, opts=OPTS) execute(sql, opts){|c| return c.last_id} end def freeze server_version super end # Return the version of the MySQL server to which we are connecting. def server_version(_server=nil) @server_version ||= super() end private if NativePreparedStatements # Use a native mysql2 prepared statement to implement prepared statements. def execute_prepared_statement(ps_name, opts, &block) if ps_name.is_a?(Sequel::Dataset::ArgumentMapper) ps = ps_name ps_name = ps.prepared_statement_name else ps = prepared_statement(ps_name) end sql = ps.prepared_sql synchronize(opts[:server]) do |conn| stmt, ps_sql = conn.prepared_statements[ps_name] unless ps_sql == sql stmt.close if stmt stmt = log_connection_yield("Preparing #{ps_name}: #{sql}", conn){conn.prepare(sql)} conn.prepared_statements[ps_name] = [stmt, sql] end opts = Hash[opts] opts[:sql] = "Executing #{ps_name || sql}" if ps_name && ps.log_sql opts[:log_sql] = " (#{sql})" end _execute(conn, stmt, opts, &block) end end end # Execute the given SQL on the given connection. If the :type # option is :select, yield the result of the query, otherwise # yield the connection if a block is given. def _execute(conn, sql, opts) stream = opts[:stream] if NativePreparedStatements if args = opts[:arguments] args = args.map{|arg| bound_variable_value(arg)} end case sql when ::Mysql2::Statement stmt = sql sql = opts[:sql] || '' when Dataset sql = sql.sql close_stmt = true stmt = conn.prepare(sql) end end r = log_connection_yield((log_sql = opts[:log_sql]) ? sql + log_sql : sql, conn, args) do if stmt conn.query_options.merge!(:cache_rows=>true, :database_timezone => timezone, :application_timezone => Sequel.application_timezone, :stream=>stream, :cast_booleans=>convert_tinyint_to_bool) stmt.execute(*args) else conn.query(sql, :database_timezone => timezone, :application_timezone => Sequel.application_timezone, :stream=>stream) end end if opts[:type] == :select if r if stream begin r2 = yield r ensure # If r2 is nil, it means the block did not exit normally, # so the rest of the results must be drained to prevent # "commands out of sync" errors. r.each{} unless r2 end else yield r end end elsif defined?(yield) yield conn end rescue ::Mysql2::Error => e raise_error(e) ensure if stmt conn.query_options.replace(conn.instance_variable_get(:@sequel_default_query_options)) stmt.close if close_stmt end end # Set the convert_tinyint_to_bool setting based on the default value. def adapter_initialize self.convert_tinyint_to_bool = true end if NativePreparedStatements # Handle bound variable arguments that Mysql2 does not handle natively. def bound_variable_value(arg) case arg when true 1 when false 0 when DateTime, Time literal(arg)[1...-1] else arg end end end def connection_execute_method :query end def database_error_classes [::Mysql2::Error] end def database_exception_sqlstate(exception, opts) state = exception.sql_state state unless state == 'HY000' end def dataset_class_default Dataset end # If a connection object is available, try pinging it. Otherwise, if the # error is a Mysql2::Error, check the SQL state and exception message for # disconnects. def disconnect_error?(e, opts) super || ((conn = opts[:conn]) && !conn.ping) || (e.is_a?(::Mysql2::Error) && (e.sql_state =~ /\A08/ || MYSQL_DATABASE_DISCONNECT_ERRORS.match(e.message))) end # Convert tinyint(1) type to boolean if convert_tinyint_to_bool is true def schema_column_type(db_type) convert_tinyint_to_bool && db_type =~ /\Atinyint\(1\)/ ? :boolean : super end end class Dataset < Sequel::Dataset include Sequel::MySQL::DatasetMethods include Sequel::MySQL::MysqlMysql2::DatasetMethods include Sequel::MySQL::PreparedStatements::DatasetMethods unless NativePreparedStatements STREAMING_SUPPORTED = ::Mysql2::VERSION >= '0.3.12' if NativePreparedStatements PreparedStatementMethods = prepared_statements_module( "sql = self; opts = Hash[opts]; opts[:arguments] = bind_arguments", Sequel::Dataset::UnnumberedArgumentMapper, %w"execute execute_dui execute_insert") end def fetch_rows(sql) execute(sql) do |r| self.columns = r.fields.map!{|c| output_identifier(c.to_s)} r.each(:cast_booleans=>convert_tinyint_to_bool?){|h| yield h} end self end # Use streaming to implement paging if Mysql2 supports it and # it hasn't been disabled. def paged_each(opts=OPTS, &block) if STREAMING_SUPPORTED && opts[:stream] != false unless defined?(yield) return enum_for(:paged_each, opts) end stream.each(&block) else super end end # Return a clone of the dataset that will stream rows when iterating # over the result set, so it can handle large datasets that # won't fit in memory (Requires mysql 0.3.12+ to have an effect). def stream clone(:stream=>true) end private # Whether to cast tinyint(1) columns to integer instead of boolean. # By default, uses the database's convert_tinyint_to_bool # setting. Exists for compatibility with the mysql adapter. def convert_tinyint_to_bool? @db.convert_tinyint_to_bool end def execute(sql, opts=OPTS) opts = Hash[opts] opts[:type] = :select opts[:stream] = @opts[:stream] super end if NativePreparedStatements def bound_variable_modules [PreparedStatementMethods] end def prepared_statement_modules [PreparedStatementMethods] end end # Handle correct quoting of strings using ::Mysql2::Client#escape. def literal_string_append(sql, v) sql << "'" << db.synchronize(@opts[:server]){|c| c.escape(v)} << "'" end end end end sequel-5.63.0/lib/sequel/adapters/odbc.rb000066400000000000000000000076471434214120600202210ustar00rootroot00000000000000# frozen-string-literal: true require 'odbc' module Sequel module ODBC # Contains procs keyed on subadapter type that extend the # given database object so it supports the correct database type. DATABASE_SETUP = {} class Database < Sequel::Database set_adapter_scheme :odbc def connect(server) opts = server_opts(server) conn = if opts.include?(:drvconnect) ::ODBC::Database.new.drvconnect(opts[:drvconnect]) elsif opts.include?(:driver) drv = ::ODBC::Driver.new drv.name = 'Sequel ODBC Driver130' opts.each do |param, value| if :driver == param && value !~ /\A\{.+\}\z/ value = "{#{value}}" end drv.attrs[param.to_s.upcase] = value.to_s end ::ODBC::Database.new.drvconnect(drv) else ::ODBC::connect(opts[:database], opts[:user], opts[:password]) end conn.autocommit = true conn end def disconnect_connection(c) c.disconnect end def execute(sql, opts=OPTS) synchronize(opts[:server]) do |conn| begin r = log_connection_yield(sql, conn){conn.run(sql)} yield(r) if defined?(yield) rescue ::ODBC::Error, ArgumentError => e raise_error(e) ensure r.drop if r end nil end end def execute_dui(sql, opts=OPTS) synchronize(opts[:server]) do |conn| begin log_connection_yield(sql, conn){conn.do(sql)} rescue ::ODBC::Error, ArgumentError => e raise_error(e) end end end private def adapter_initialize if (db_type = @opts[:db_type]) && (prok = Sequel::Database.load_adapter(db_type.to_sym, :map=>DATABASE_SETUP, :subdir=>'odbc')) prok.call(self) end end def connection_execute_method :do end def database_error_classes [::ODBC::Error] end def dataset_class_default Dataset end def disconnect_error?(e, opts) super || (e.is_a?(::ODBC::Error) && /\A08S01/.match(e.message)) end end class Dataset < Sequel::Dataset def fetch_rows(sql) execute(sql) do |s| i = -1 cols = s.columns(true).map{|c| [output_identifier(c.name), c.type, i+=1]} columns = cols.map{|c| c[0]} self.columns = columns s.each do |row| hash = {} cols.each do |n,t,j| v = row[j] # We can assume v is not false, so this shouldn't convert false to nil. hash[n] = (convert_odbc_value(v, t) if v) end yield hash end end self end private def convert_odbc_value(v, t) # When fetching a result set, the Ruby ODBC driver converts all ODBC # SQL types to an equivalent Ruby type; with the exception of # SQL_TYPE_DATE, SQL_TYPE_TIME and SQL_TYPE_TIMESTAMP. # # The conversions below are consistent with the mappings in # ODBCColumn#mapSqlTypeToGenericType and Column#klass. case v when ::ODBC::TimeStamp db.to_application_timestamp([v.year, v.month, v.day, v.hour, v.minute, v.second, v.fraction]) when ::ODBC::Time Sequel::SQLTime.create(v.hour, v.minute, v.second) when ::ODBC::Date Date.new(v.year, v.month, v.day) else if t == ::ODBC::SQL_BIT v == 1 else v end end end def default_timestamp_format "{ts '%Y-%m-%d %H:%M:%S'}" end def literal_date(v) v.strftime("{d '%Y-%m-%d'}") end def literal_false '0' end def literal_true '1' end end end end sequel-5.63.0/lib/sequel/adapters/odbc/000077500000000000000000000000001434214120600176565ustar00rootroot00000000000000sequel-5.63.0/lib/sequel/adapters/odbc/db2.rb000066400000000000000000000003701434214120600206520ustar00rootroot00000000000000# frozen-string-literal: true require_relative '../shared/db2' Sequel.synchronize do Sequel::ODBC::DATABASE_SETUP[:db2] = proc do |db| db.extend ::Sequel::DB2::DatabaseMethods db.extend_datasets ::Sequel::DB2::DatasetMethods end end sequel-5.63.0/lib/sequel/adapters/odbc/mssql.rb000066400000000000000000000031401434214120600213400ustar00rootroot00000000000000# frozen-string-literal: true require_relative '../shared/mssql' module Sequel module ODBC Sequel.synchronize do DATABASE_SETUP[:mssql] = proc do |db| db.extend Sequel::ODBC::MSSQL::DatabaseMethods db.dataset_class = Sequel::ODBC::MSSQL::Dataset db.send(:set_mssql_unicode_strings) end end module MSSQL module DatabaseMethods include Sequel::MSSQL::DatabaseMethods def execute_insert(sql, opts=OPTS) synchronize(opts[:server]) do |conn| begin log_connection_yield(sql, conn){conn.do(sql)} begin last_insert_id_sql = 'SELECT SCOPE_IDENTITY()' s = log_connection_yield(last_insert_id_sql, conn){conn.run(last_insert_id_sql)} if (rows = s.fetch_all) and (row = rows.first) and (v = row.first) Integer(v) end ensure s.drop if s end rescue ::ODBC::Error => e raise_error(e) end end end end class Dataset < ODBC::Dataset include Sequel::MSSQL::DatasetMethods private # Use ODBC format, not Microsoft format, as the ODBC layer does # some translation, but allow for millisecond precision. def default_timestamp_format "{ts '%Y-%m-%d %H:%M:%S%N'}" end # Use ODBC format, not Microsoft format, as the ODBC layer does # some translation. def literal_date(v) v.strftime("{d '%Y-%m-%d'}") end end end end end sequel-5.63.0/lib/sequel/adapters/odbc/oracle.rb000066400000000000000000000004041434214120600214460ustar00rootroot00000000000000# frozen-string-literal: true require_relative '../shared/oracle' Sequel.synchronize do Sequel::ODBC::DATABASE_SETUP[:oracle] = proc do |db| db.extend ::Sequel::Oracle::DatabaseMethods db.extend_datasets ::Sequel::Oracle::DatasetMethods end end sequel-5.63.0/lib/sequel/adapters/oracle.rb000066400000000000000000000302051434214120600205410ustar00rootroot00000000000000# frozen-string-literal: true require 'oci8' require_relative 'shared/oracle' module Sequel module Oracle class Database < Sequel::Database include DatabaseMethods set_adapter_scheme :oracle # ORA-00028: your session has been killed # ORA-01012: not logged on # ORA-02396: exceeded maximum idle time, please connect again # ORA-03113: end-of-file on communication channel # ORA-03114: not connected to ORACLE # ORA-03135: connection lost contact CONNECTION_ERROR_CODES = [ 28, 1012, 2396, 3113, 3114, 3135 ].freeze ORACLE_TYPES = { :blob=>lambda{|b| Sequel::SQL::Blob.new(b.read)}, :clob=>:read.to_proc }.freeze # Hash of conversion procs for this database. attr_reader :conversion_procs def connect(server) opts = server_opts(server) if opts[:database] dbname = opts[:host] ? \ "//#{opts[:host]}#{":#{opts[:port]}" if opts[:port]}/#{opts[:database]}" : opts[:database] else dbname = opts[:host] end conn = OCI8.new(opts[:user], opts[:password], dbname, opts[:privilege]) if prefetch_rows = opts.fetch(:prefetch_rows, 100) conn.prefetch_rows = typecast_value_integer(prefetch_rows) end conn.autocommit = true conn.non_blocking = true # The ruby-oci8 gem which retrieves oracle columns with a type of # DATE, TIMESTAMP, TIMESTAMP WITH TIME ZONE is complex based on the # ruby version and Oracle version (9 or later) # In the now standard case of Oracle 9 or later, the timezone # is determined by the Oracle session timezone. Thus if the user # requests Sequel provide UTC timezone to the application, # we need to alter the session timezone to be UTC if Sequel.application_timezone == :utc conn.exec("ALTER SESSION SET TIME_ZONE='-00:00'") end class << conn attr_reader :prepared_statements end conn.instance_variable_set(:@prepared_statements, {}) conn end def disconnect_connection(c) c.logoff rescue OCIException nil end def execute(sql, opts=OPTS, &block) _execute(nil, sql, opts, &block) end def execute_insert(sql, opts=OPTS) _execute(:insert, sql, opts) end def freeze @conversion_procs.freeze super end private def _execute(type, sql, opts=OPTS, &block) synchronize(opts[:server]) do |conn| begin return execute_prepared_statement(conn, type, sql, opts, &block) if sql.is_a?(Symbol) if args = opts[:arguments] r = conn.parse(sql) args = cursor_bind_params(conn, r, args) nr = log_connection_yield(sql, conn, args){r.exec} r = nr unless defined?(yield) else r = log_connection_yield(sql, conn){conn.exec(sql)} end if defined?(yield) yield(r) elsif type == :insert last_insert_id(conn, opts) else r end rescue OCIException, RuntimeError => e # ruby-oci8 is naughty and raises strings in some places raise_error(e) ensure r.close if r.is_a?(::OCI8::Cursor) end end end def adapter_initialize @autosequence = @opts[:autosequence] @primary_key_sequences = {} @conversion_procs = ORACLE_TYPES.dup end PS_TYPES = {'string'=>String, 'integer'=>Integer, 'float'=>Float, 'decimal'=>Float, 'date'=>Time, 'datetime'=>Time, 'time'=>Time, 'boolean'=>String, 'blob'=>OCI8::BLOB, 'clob'=>OCI8::CLOB}.freeze def cursor_bind_params(conn, cursor, args) i = 0 args.map do |arg, type| i += 1 case arg when true arg = 'Y' when false arg = 'N' when BigDecimal arg = arg.to_f when ::Sequel::SQL::Blob arg = ::OCI8::BLOB.new(conn, arg) when String if type == 'clob' arg = ::OCI8::CLOB.new(conn, arg) end end cursor.bind_param(i, arg, PS_TYPES[type] || arg.class) arg end end def connection_execute_method :exec end def database_error_classes [OCIException, RuntimeError] end def database_specific_error_class(exception, opts) return super unless exception.respond_to?(:code) case exception.code when 1400, 1407 NotNullConstraintViolation when 1 UniqueConstraintViolation when 2291, 2292 ForeignKeyConstraintViolation when 2290 CheckConstraintViolation when 8177 SerializationFailure else super end end def dataset_class_default Dataset end def execute_prepared_statement(conn, type, name, opts) ps = prepared_statement(name) sql = ps.prepared_sql if cursora = conn.prepared_statements[name] cursor, cursor_sql = cursora if cursor_sql != sql cursor.close cursor = nil end end unless cursor cursor = log_connection_yield("PREPARE #{name}: #{sql}", conn){conn.parse(sql)} conn.prepared_statements[name] = [cursor, sql] end args = cursor_bind_params(conn, cursor, opts[:arguments]) log_sql = "EXECUTE #{name}" if ps.log_sql log_sql += " (" log_sql << sql log_sql << ")" end r = log_connection_yield(log_sql, conn, args){cursor.exec} if defined?(yield) yield(cursor) elsif type == :insert last_insert_id(conn, opts) else r end end def last_insert_id(conn, opts) unless sequence = opts[:sequence] if t = opts[:table] sequence = sequence_for_table(t) end end if sequence sql = "SELECT #{literal(sequence)}.currval FROM dual" begin cursor = log_connection_yield(sql, conn){conn.exec(sql)} row = cursor.fetch row.each{|v| return (v.to_i if v)} rescue OCIError nil ensure cursor.close if cursor end end end def begin_transaction(conn, opts=OPTS) log_connection_yield('Transaction.begin', conn){conn.autocommit = false} set_transaction_isolation(conn, opts) end def commit_transaction(conn, opts=OPTS) log_connection_yield('Transaction.commit', conn){conn.commit} end def disconnect_error?(e, opts) super || (e.is_a?(::OCIError) && CONNECTION_ERROR_CODES.include?(e.code)) end def oracle_column_type(h) case h[:oci8_type] when :number case h[:scale] when 0 :integer when -127 :float else :decimal end when :date :datetime else schema_column_type(h[:db_type]) end end def remove_transaction(conn, committed) conn.autocommit = true ensure super end def rollback_transaction(conn, opts=OPTS) log_connection_yield('Transaction.rollback', conn){conn.rollback} end def schema_parse_table(table, opts=OPTS) schema, table = schema_and_table(table) schema ||= opts[:schema] schema_and_table = if ds = opts[:dataset] ds.literal(schema ? SQL::QualifiedIdentifier.new(schema, table) : SQL::Identifier.new(table)) else "#{"#{quote_identifier(schema)}." if schema}#{quote_identifier(table)}" end table_schema = [] m = output_identifier_meth(ds) im = input_identifier_meth(ds) # Primary Keys ds = metadata_dataset. from{[all_constraints.as(:cons), all_cons_columns.as(:cols)]}. where{{ cols[:table_name]=>im.call(table), cons[:constraint_type]=>'P', cons[:constraint_name]=>cols[:constraint_name], cons[:owner]=>cols[:owner]}} ds = ds.where{{cons[:owner]=>im.call(schema)}} if schema pks = ds.select_map{cols[:column_name]} # Default values defaults = begin metadata_dataset.from(:all_tab_cols). where(:table_name=>im.call(table)). as_hash(:column_name, :data_default) rescue DatabaseError {} end metadata = synchronize(opts[:server]) do |conn| begin log_connection_yield("Connection.describe_table", conn){conn.describe_table(schema_and_table)} rescue OCIError => e raise_error(e) end end metadata.columns.each do |column| h = { :primary_key => pks.include?(column.name), :default => defaults[column.name], :oci8_type => column.data_type, :db_type => column.type_string, :type_string => column.type_string, :charset_form => column.charset_form, :char_used => column.char_used?, :char_size => column.char_size, :data_size => column.data_size, :precision => column.precision, :scale => column.scale, :fsprecision => column.fsprecision, :lfprecision => column.lfprecision, :allow_null => column.nullable? } h[:type] = oracle_column_type(h) h[:auto_increment] = h[:type] == :integer if h[:primary_key] h[:max_length] = h[:char_size] if h[:type] == :string table_schema << [m.call(column.name), h] end table_schema end end class Dataset < Sequel::Dataset include DatasetMethods # Oracle already supports named bind arguments, so use directly. module ArgumentMapper include Sequel::Dataset::ArgumentMapper protected # Return a hash with the same values as the given hash, # but with the keys converted to strings. def map_to_prepared_args(bind_vars) prepared_args.map{|v, t| [bind_vars[v], t]} end private # Oracle uses a : before the name of the argument for named # arguments. def prepared_arg(k) y, type = k.to_s.split("__", 2) prepared_args << [y.to_sym, type] i = prepared_args.length LiteralString.new(":#{i}") end end BindArgumentMethods = prepared_statements_module(:bind, ArgumentMapper) PreparedStatementMethods = prepared_statements_module(:prepare, BindArgumentMethods) def fetch_rows(sql) execute(sql) do |cursor| cps = db.conversion_procs cols = columns = cursor.get_col_names.map{|c| output_identifier(c)} metadata = cursor.column_metadata cm = cols.zip(metadata).map{|c, m| [c, cps[m.data_type]]} self.columns = columns while r = cursor.fetch row = {} r.zip(cm).each{|v, (c, cp)| row[c] = ((v && cp) ? cp.call(v) : v)} yield row end end self end # Oracle requires type specifiers for placeholders, at least # if you ever want to use a nil/NULL value as the value for # the placeholder. def requires_placeholder_type_specifiers? true end private def literal_other_append(sql, v) case v when OraDate literal_append(sql, db.to_application_timestamp(v)) when OCI8::CLOB v.rewind literal_append(sql, v.read) else super end end def prepared_arg_placeholder ':' end def bound_variable_modules [BindArgumentMethods] end def prepared_statement_modules [PreparedStatementMethods] end end end end sequel-5.63.0/lib/sequel/adapters/postgres.rb000066400000000000000000001013611434214120600211440ustar00rootroot00000000000000# frozen-string-literal: true require_relative 'shared/postgres' begin require 'pg' # :nocov: Sequel::Postgres::PGError = PG::Error if defined?(PG::Error) Sequel::Postgres::PGconn = PG::Connection if defined?(PG::Connection) Sequel::Postgres::PGresult = PG::Result if defined?(PG::Result) # Work around postgres-pr 0.7.0+ which ships with a pg.rb file unless defined?(PG::Connection) raise LoadError unless defined?(PGconn::CONNECTION_OK) end if defined?(PG::TypeMapByClass) # :nocov: type_map = Sequel::Postgres::PG_QUERY_TYPE_MAP = PG::TypeMapByClass.new type_map[Integer] = PG::TextEncoder::Integer.new type_map[FalseClass] = type_map[TrueClass] = PG::TextEncoder::Boolean.new type_map[Float] = PG::TextEncoder::Float.new end Sequel::Postgres::USES_PG = true rescue LoadError => e # :nocov: begin require 'sequel/postgres-pr' rescue LoadError begin require 'postgres-pr/postgres-compat' rescue LoadError raise e end end Sequel::Postgres::USES_PG = false # :nocov: end module Sequel module Postgres # :nocov: if USES_PG # Whether the given sequel_pg version integer is supported. def self.sequel_pg_version_supported?(version) version >= 10617 end end # :nocov: # PGconn subclass for connection specific methods used with the # pg or postgres-pr driver. class Adapter < PGconn # The underlying exception classes to reraise as disconnect errors # instead of regular database errors. DISCONNECT_ERROR_CLASSES = [IOError, Errno::EPIPE, Errno::ECONNRESET] # :nocov: if defined?(::PG::ConnectionBad) # :nocov: DISCONNECT_ERROR_CLASSES << ::PG::ConnectionBad end DISCONNECT_ERROR_CLASSES.freeze disconnect_errors = [ 'ERROR: cached plan must not change result type', 'could not receive data from server', 'no connection to the server', 'connection not open', 'connection is closed', 'terminating connection due to administrator command', 'PQconsumeInput() ' ] # Since exception class based disconnect checking may not work, # also trying parsing the exception message to look for disconnect # errors. DISCONNECT_ERROR_RE = /\A#{Regexp.union(disconnect_errors)}/ if USES_PG # Hash of prepared statements for this connection. Keys are # string names of the server side prepared statement, and values # are SQL strings. attr_reader :prepared_statements # :nocov: unless public_method_defined?(:async_exec_params) alias async_exec_params async_exec end elsif !const_defined?(:CONNECTION_OK) # Handle old postgres-pr # sequel-postgres-pr already implements this API CONNECTION_OK = -1 # Escape bytea values. Uses historical format instead of hex # format for maximum compatibility. def escape_bytea(str) str.gsub(/[\000-\037\047\134\177-\377]/n){|b| "\\#{sprintf('%o', b.each_byte{|x| break x}).rjust(3, '0')}"} end # Escape strings by doubling apostrophes. This only works if standard # conforming strings are used. def escape_string(str) str.gsub("'", "''") end alias finish close def async_exec(sql) PGresult.new(@conn.query(sql)) end def block(timeout=nil) end def status CONNECTION_OK end class PGresult < ::PGresult alias nfields num_fields alias ntuples num_tuples alias ftype type alias fname fieldname alias cmd_tuples cmdtuples end end # :nocov: # Raise a Sequel::DatabaseDisconnectError if a one of the disconnect # error classes is raised, or a PGError is raised and the connection # status cannot be determined or it is not OK. def check_disconnect_errors yield rescue *DISCONNECT_ERROR_CLASSES => e disconnect = true raise(Sequel.convert_exception_class(e, Sequel::DatabaseDisconnectError)) rescue PGError => e disconnect = false begin s = status rescue PGError disconnect = true end status_ok = (s == Adapter::CONNECTION_OK) disconnect ||= !status_ok disconnect ||= e.message =~ DISCONNECT_ERROR_RE disconnect ? raise(Sequel.convert_exception_class(e, Sequel::DatabaseDisconnectError)) : raise ensure block if status_ok && !disconnect end # Execute the given SQL with this connection. If a block is given, # yield the results, otherwise, return the number of changed rows. def execute(sql, args=nil) args = args.map{|v| @db.bound_variable_arg(v, self)} if args q = check_disconnect_errors{execute_query(sql, args)} begin defined?(yield) ? yield(q) : q.cmd_tuples ensure q.clear if q && q.respond_to?(:clear) end end private # Return the PGResult containing the query results. def execute_query(sql, args) @db.log_connection_yield(sql, self, args){args ? async_exec_params(sql, args) : async_exec(sql)} end end class Database < Sequel::Database include Sequel::Postgres::DatabaseMethods set_adapter_scheme :postgresql set_adapter_scheme :postgres # Convert given argument so that it can be used directly by pg. Currently, pg doesn't # handle fractional seconds in Time/DateTime or blobs with "\0". Only public for use by # the adapter, shouldn't be used by external code. def bound_variable_arg(arg, conn) case arg when Sequel::SQL::Blob {:value=>arg, :type=>17, :format=>1} # :nocov: # Not covered by tests as tests use pg_extended_date_support # extension, which has basically the same code. when DateTime, Time literal(arg) # :nocov: else arg end end # Call a procedure with the given name and arguments. Returns a hash if the procedure # returns a value, and nil otherwise. Example: # # DB.call_procedure(:foo, 1, 2) # # CALL foo(1, 2) def call_procedure(name, *args) dataset.send(:call_procedure, name, args) end # Connects to the database. In addition to the standard database # options, using the :encoding or :charset option changes the # client encoding for the connection, :connect_timeout is a # connection timeout in seconds, :sslmode sets whether postgres's # sslmode, and :notice_receiver handles server notices in a proc. # :connect_timeout, :driver_options, :sslmode, and :notice_receiver # are only supported if the pg driver is used. def connect(server) opts = server_opts(server) if USES_PG connection_params = { :host => opts[:host], :port => opts[:port], :dbname => opts[:database], :user => opts[:user], :password => opts[:password], :connect_timeout => opts[:connect_timeout] || 20, :sslmode => opts[:sslmode], :sslrootcert => opts[:sslrootcert] }.delete_if { |key, value| blank_object?(value) } # :nocov: connection_params.merge!(opts[:driver_options]) if opts[:driver_options] # :nocov: conn = Adapter.connect(opts[:conn_str] || connection_params) conn.instance_variable_set(:@prepared_statements, {}) if receiver = opts[:notice_receiver] conn.set_notice_receiver(&receiver) end # :nocov: if conn.respond_to?(:type_map_for_queries=) && defined?(PG_QUERY_TYPE_MAP) # :nocov: conn.type_map_for_queries = PG_QUERY_TYPE_MAP end # :nocov: else unless typecast_value_boolean(@opts.fetch(:force_standard_strings, true)) raise Error, "Cannot create connection using postgres-pr unless force_standard_strings is set" end conn = Adapter.connect( (opts[:host] unless blank_object?(opts[:host])), opts[:port] || 5432, nil, '', opts[:database], opts[:user], opts[:password] ) end # :nocov: conn.instance_variable_set(:@db, self) # :nocov: if encoding = opts[:encoding] || opts[:charset] if conn.respond_to?(:set_client_encoding) conn.set_client_encoding(encoding) else conn.async_exec("set client_encoding to '#{encoding}'") end end # :nocov: connection_configuration_sqls(opts).each{|sql| conn.execute(sql)} conn end # Always false, support was moved to pg_extended_date_support extension. # Needs to stay defined here so that sequel_pg works. def convert_infinite_timestamps false end # Enable pg_extended_date_support extension if symbol or string is given. def convert_infinite_timestamps=(v) case v when Symbol, String, true extension(:pg_extended_date_support) self.convert_infinite_timestamps = v end end def disconnect_connection(conn) conn.finish rescue PGError, IOError nil end # :nocov: if USES_PG && Object.const_defined?(:PG) && ::PG.const_defined?(:Constants) && ::PG::Constants.const_defined?(:PG_DIAG_SCHEMA_NAME) # :nocov: # Return a hash of information about the related PGError (or Sequel::DatabaseError that # wraps a PGError), with the following entries (any of which may be +nil+): # # :schema :: The schema name related to the error # :table :: The table name related to the error # :column :: the column name related to the error # :constraint :: The constraint name related to the error # :type :: The datatype name related to the error # :severity :: The severity of the error (e.g. "ERROR") # :sql_state :: The SQL state code related to the error # :message_primary :: A single line message related to the error # :message_detail :: Any detail supplementing the primary message # :message_hint :: Possible suggestion about how to fix the problem # :statement_position :: Character offset in statement submitted by client where error occurred (starting at 1) # :internal_position :: Character offset in internal statement where error occurred (starting at 1) # :internal_query :: Text of internally-generated statement where error occurred # :source_file :: PostgreSQL source file where the error occurred # :source_line :: Line number of PostgreSQL source file where the error occurred # :source_function :: Function in PostgreSQL source file where the error occurred # # This requires a PostgreSQL 9.3+ server and 9.3+ client library, # and ruby-pg 0.16.0+ to be supported. def error_info(e) e = e.wrapped_exception if e.is_a?(DatabaseError) r = e.result { :schema => r.error_field(::PG::PG_DIAG_SCHEMA_NAME), :table => r.error_field(::PG::PG_DIAG_TABLE_NAME), :column => r.error_field(::PG::PG_DIAG_COLUMN_NAME), :constraint => r.error_field(::PG::PG_DIAG_CONSTRAINT_NAME), :type => r.error_field(::PG::PG_DIAG_DATATYPE_NAME), :severity => r.error_field(::PG::PG_DIAG_SEVERITY), :sql_state => r.error_field(::PG::PG_DIAG_SQLSTATE), :message_primary => r.error_field(::PG::PG_DIAG_MESSAGE_PRIMARY), :message_detail => r.error_field(::PG::PG_DIAG_MESSAGE_DETAIL), :message_hint => r.error_field(::PG::PG_DIAG_MESSAGE_HINT), :statement_position => r.error_field(::PG::PG_DIAG_STATEMENT_POSITION), :internal_position => r.error_field(::PG::PG_DIAG_INTERNAL_POSITION), :internal_query => r.error_field(::PG::PG_DIAG_INTERNAL_QUERY), :source_file => r.error_field(::PG::PG_DIAG_SOURCE_FILE), :source_line => r.error_field(::PG::PG_DIAG_SOURCE_LINE), :source_function => r.error_field(::PG::PG_DIAG_SOURCE_FUNCTION) } end end def execute(sql, opts=OPTS, &block) synchronize(opts[:server]){|conn| check_database_errors{_execute(conn, sql, opts, &block)}} end # :nocov: if USES_PG # :nocov: # +copy_table+ uses PostgreSQL's +COPY TO STDOUT+ SQL statement to return formatted # results directly to the caller. This method is only supported if pg is the # underlying ruby driver. This method should only be called if you want # results returned to the client. If you are using +COPY TO+ # with a filename, you should just use +run+ instead of this method. # # The table argument supports the following types: # # String :: Uses the first argument directly as literal SQL. If you are using # a version of PostgreSQL before 9.0, you will probably want to # use a string if you are using any options at all, as the syntax # Sequel uses for options is only compatible with PostgreSQL 9.0+. # This should be the full COPY statement passed to PostgreSQL, not # just the SELECT query. If a string is given, the :format and # :options options are ignored. # Dataset :: Uses a query instead of a table name when copying. # other :: Uses a table name (usually a symbol) when copying. # # The following options are respected: # # :format :: The format to use. text is the default, so this should be :csv or :binary. # :options :: An options SQL string to use, which should contain comma separated options. # :server :: The server on which to run the query. # # If a block is provided, the method continually yields to the block, one yield # per row. If a block is not provided, a single string is returned with all # of the data. def copy_table(table, opts=OPTS) synchronize(opts[:server]) do |conn| conn.execute(copy_table_sql(table, opts)) begin if defined?(yield) while buf = conn.get_copy_data yield buf end b = nil else b = String.new b << buf while buf = conn.get_copy_data end res = conn.get_last_result if !res || res.result_status != 1 raise PG::NotAllCopyDataRetrieved, "Not all COPY data retrieved" end b rescue => e raise_error(e, :disconnect=>true) ensure if buf && !e raise DatabaseDisconnectError, "disconnecting as a partial COPY may leave the connection in an unusable state" end end end end # +copy_into+ uses PostgreSQL's +COPY FROM STDIN+ SQL statement to do very fast inserts # into a table using input preformatting in either CSV or PostgreSQL text format. # This method is only supported if pg 0.14.0+ is the underlying ruby driver. # This method should only be called if you want # results returned to the client. If you are using +COPY FROM+ # with a filename, you should just use +run+ instead of this method. # # The following options are respected: # # :columns :: The columns to insert into, with the same order as the columns in the # input data. If this isn't given, uses all columns in the table. # :data :: The data to copy to PostgreSQL, which should already be in CSV or PostgreSQL # text format. This can be either a string, or any object that responds to # each and yields string. # :format :: The format to use. text is the default, so this should be :csv or :binary. # :options :: An options SQL string to use, which should contain comma separated options. # :server :: The server on which to run the query. # # If a block is provided and :data option is not, this will yield to the block repeatedly. # The block should return a string, or nil to signal that it is finished. def copy_into(table, opts=OPTS) data = opts[:data] data = Array(data) if data.is_a?(String) if defined?(yield) && data raise Error, "Cannot provide both a :data option and a block to copy_into" elsif !defined?(yield) && !data raise Error, "Must provide either a :data option or a block to copy_into" end synchronize(opts[:server]) do |conn| conn.execute(copy_into_sql(table, opts)) begin if defined?(yield) while buf = yield conn.put_copy_data(buf) end else data.each{|buff| conn.put_copy_data(buff)} end rescue Exception => e conn.put_copy_end("ruby exception occurred while copying data into PostgreSQL") ensure conn.put_copy_end unless e while res = conn.get_result raise e if e check_database_errors{res.check} end end end end # Listens on the given channel (or multiple channels if channel is an array), waiting for notifications. # After a notification is received, or the timeout has passed, stops listening to the channel. Options: # # :after_listen :: An object that responds to +call+ that is called with the underlying connection after the LISTEN # statement is sent, but before the connection starts waiting for notifications. # :loop :: Whether to continually wait for notifications, instead of just waiting for a single # notification. If this option is given, a block must be provided. If this object responds to +call+, it is # called with the underlying connection after each notification is received (after the block is called). # If a :timeout option is used, and a callable object is given, the object will also be called if the # timeout expires. If :loop is used and you want to stop listening, you can either break from inside the # block given to #listen, or you can throw :stop from inside the :loop object's call method or the block. # :server :: The server on which to listen, if the sharding support is being used. # :timeout :: How long to wait for a notification, in seconds (can provide a float value for fractional seconds). # If this object responds to +call+, it will be called and should return the number of seconds to wait. # If the loop option is also specified, the object will be called on each iteration to obtain a new # timeout value. If not given or nil, waits indefinitely. # # This method is only supported if pg is used as the underlying ruby driver. It returns the # channel the notification was sent to (as a string), unless :loop was used, in which case it returns nil. # If a block is given, it is yielded 3 arguments: # * the channel the notification was sent to (as a string) # * the backend pid of the notifier (as an integer), # * and the payload of the notification (as a string or nil). def listen(channels, opts=OPTS, &block) check_database_errors do synchronize(opts[:server]) do |conn| begin channels = Array(channels) channels.each do |channel| sql = "LISTEN ".dup dataset.send(:identifier_append, sql, channel) conn.execute(sql) end opts[:after_listen].call(conn) if opts[:after_listen] timeout = opts[:timeout] if timeout timeout_block = timeout.respond_to?(:call) ? timeout : proc{timeout} end if l = opts[:loop] raise Error, 'calling #listen with :loop requires a block' unless block loop_call = l.respond_to?(:call) catch(:stop) do while true t = timeout_block ? [timeout_block.call] : [] conn.wait_for_notify(*t, &block) l.call(conn) if loop_call end end nil else t = timeout_block ? [timeout_block.call] : [] conn.wait_for_notify(*t, &block) end ensure conn.execute("UNLISTEN *") end end end end end private # Execute the given SQL string or prepared statement on the connection object. def _execute(conn, sql, opts, &block) if sql.is_a?(Symbol) execute_prepared_statement(conn, sql, opts, &block) else conn.execute(sql, opts[:arguments], &block) end end # Execute the prepared statement name with the given arguments on the connection. def _execute_prepared_statement(conn, ps_name, args, opts) conn.exec_prepared(ps_name, args) end # Add the primary_keys and primary_key_sequences instance variables, # so we can get the correct return values for inserted rows. def adapter_initialize @use_iso_date_format = typecast_value_boolean(@opts.fetch(:use_iso_date_format, true)) initialize_postgres_adapter # :nocov: add_conversion_proc(17, method(:unescape_bytea)) if USES_PG add_conversion_proc(1082, TYPE_TRANSLATOR_DATE) if @use_iso_date_format # :nocov: self.convert_infinite_timestamps = @opts[:convert_infinite_timestamps] end # Convert exceptions raised from the block into DatabaseErrors. def check_database_errors yield rescue => e raise_error(e, :classes=>database_error_classes) end # Set the DateStyle to ISO if configured, for faster date parsing. def connection_configuration_sqls(opts=@opts) sqls = super # :nocov: sqls << "SET DateStyle = 'ISO'" if @use_iso_date_format # :nocov: sqls end # :nocov: if USES_PG def unescape_bytea(s) ::Sequel::SQL::Blob.new(Adapter.unescape_bytea(s)) end end # :nocov: DATABASE_ERROR_CLASSES = [PGError].freeze def database_error_classes DATABASE_ERROR_CLASSES end def disconnect_error?(exception, opts) super || Adapter::DISCONNECT_ERROR_CLASSES.any?{|klass| exception.is_a?(klass)} || exception.message =~ Adapter::DISCONNECT_ERROR_RE end def database_exception_sqlstate(exception, opts) # :nocov: if exception.respond_to?(:result) && (result = exception.result) # :nocov: result.error_field(PGresult::PG_DIAG_SQLSTATE) end end def dataset_class_default Dataset end # Execute the prepared statement with the given name on an available # connection, using the given args. If the connection has not prepared # a statement with the given name yet, prepare it. If the connection # has prepared a statement with the same name and different SQL, # deallocate that statement first and then prepare this statement. # If a block is given, yield the result, otherwise, return the number # of rows changed. def execute_prepared_statement(conn, name, opts=OPTS, &block) ps = prepared_statement(name) sql = ps.prepared_sql ps_name = name.to_s if args = opts[:arguments] args = args.map{|arg| bound_variable_arg(arg, conn)} end unless conn.prepared_statements[ps_name] == sql conn.execute("DEALLOCATE #{ps_name}") if conn.prepared_statements.include?(ps_name) conn.check_disconnect_errors{log_connection_yield("PREPARE #{ps_name} AS #{sql}", conn){conn.prepare(ps_name, sql)}} conn.prepared_statements[ps_name] = sql end log_sql = "EXECUTE #{ps_name}" if ps.log_sql log_sql += " (" log_sql << sql log_sql << ")" end q = conn.check_disconnect_errors{log_connection_yield(log_sql, conn, args){_execute_prepared_statement(conn, ps_name, args, opts)}} begin defined?(yield) ? yield(q) : q.cmd_tuples ensure q.clear if q && q.respond_to?(:clear) end end # Don't log, since logging is done by the underlying connection. def log_connection_execute(conn, sql) conn.execute(sql) end def rollback_transaction(conn, opts=OPTS) super unless conn.transaction_status == 0 end end class Dataset < Sequel::Dataset include Sequel::Postgres::DatasetMethods def fetch_rows(sql) return cursor_fetch_rows(sql){|h| yield h} if @opts[:cursor] execute(sql){|res| yield_hash_rows(res, fetch_rows_set_cols(res)){|h| yield h}} end # Use a cursor for paging. def paged_each(opts=OPTS, &block) unless defined?(yield) return enum_for(:paged_each, opts) end use_cursor(opts).each(&block) end # Uses a cursor for fetching records, instead of fetching the entire result # set at once. Note this uses a transaction around the cursor usage by # default and can be changed using `hold: true` as described below. # Cursors can be used to process large datasets without holding all rows # in memory (which is what the underlying drivers may do by default). # Options: # # :cursor_name :: The name assigned to the cursor (default 'sequel_cursor'). # Nested cursors require different names. # :hold :: Declare the cursor WITH HOLD and don't use transaction around the # cursor usage. # :rows_per_fetch :: The number of rows per fetch (default 1000). Higher # numbers result in fewer queries but greater memory use. # # Usage: # # DB[:huge_table].use_cursor.each{|row| p row} # DB[:huge_table].use_cursor(rows_per_fetch: 10000).each{|row| p row} # DB[:huge_table].use_cursor(cursor_name: 'my_cursor').each{|row| p row} # # This is untested with the prepared statement/bound variable support, # and unlikely to work with either. def use_cursor(opts=OPTS) clone(:cursor=>{:rows_per_fetch=>1000}.merge!(opts)) end # Replace the WHERE clause with one that uses CURRENT OF with the given # cursor name (or the default cursor name). This allows you to update a # large dataset by updating individual rows while processing the dataset # via a cursor: # # DB[:huge_table].use_cursor(rows_per_fetch: 1).each do |row| # DB[:huge_table].where_current_of.update(column: ruby_method(row)) # end def where_current_of(cursor_name='sequel_cursor') clone(:where=>Sequel.lit(['CURRENT OF '], Sequel.identifier(cursor_name))) end # :nocov: if USES_PG # :nocov: PREPARED_ARG_PLACEHOLDER = LiteralString.new('$').freeze # PostgreSQL specific argument mapper used for mapping the named # argument hash to a array with numbered arguments. Only used with # the pg driver. module ArgumentMapper include Sequel::Dataset::ArgumentMapper protected # An array of bound variable values for this query, in the correct order. def map_to_prepared_args(hash) prepared_args.map{|k| hash[k.to_sym]} end private def prepared_arg(k) y = k if i = prepared_args.index(y) i += 1 else prepared_args << y i = prepared_args.length end LiteralString.new("#{prepared_arg_placeholder}#{i}") end end BindArgumentMethods = prepared_statements_module(:bind, [ArgumentMapper], %w'execute execute_dui') PreparedStatementMethods = prepared_statements_module(:prepare, BindArgumentMethods, %w'execute execute_dui') private def bound_variable_modules [BindArgumentMethods] end def prepared_statement_modules [PreparedStatementMethods] end # PostgreSQL uses $N for placeholders instead of ?, so use a $ # as the placeholder. def prepared_arg_placeholder PREPARED_ARG_PLACEHOLDER end end private # Generate and execute a procedure call. def call_procedure(name, args) sql = String.new sql << "CALL " identifier_append(sql, name) sql << "(" expression_list_append(sql, args) sql << ")" with_sql_first(sql) end # Use a cursor to fetch groups of records at a time, yielding them to the block. def cursor_fetch_rows(sql) server_opts = {:server=>@opts[:server] || :read_only} cursor = @opts[:cursor] hold = cursor[:hold] cursor_name = quote_identifier(cursor[:cursor_name] || 'sequel_cursor') rows_per_fetch = cursor[:rows_per_fetch].to_i db.public_send(*(hold ? [:synchronize, server_opts[:server]] : [:transaction, server_opts])) do begin execute_ddl("DECLARE #{cursor_name} NO SCROLL CURSOR WITH#{'OUT' unless hold} HOLD FOR #{sql}", server_opts) rows_per_fetch = 1000 if rows_per_fetch <= 0 fetch_sql = "FETCH FORWARD #{rows_per_fetch} FROM #{cursor_name}" cols = nil # Load columns only in the first fetch, so subsequent fetches are faster execute(fetch_sql) do |res| cols = fetch_rows_set_cols(res) yield_hash_rows(res, cols){|h| yield h} return if res.ntuples < rows_per_fetch end while true execute(fetch_sql) do |res| yield_hash_rows(res, cols){|h| yield h} return if res.ntuples < rows_per_fetch end end rescue Exception => e raise ensure begin execute_ddl("CLOSE #{cursor_name}", server_opts) rescue raise e if e raise end end end end # Set the columns based on the result set, and return the array of # field numers, type conversion procs, and name symbol arrays. def fetch_rows_set_cols(res) cols = [] procs = db.conversion_procs res.nfields.times do |fieldnum| cols << [procs[res.ftype(fieldnum)], output_identifier(res.fname(fieldnum))] end self.columns = cols.map{|c| c[1]} cols end # Use the driver's escape_bytea def literal_blob_append(sql, v) sql << "'" << db.synchronize(@opts[:server]){|c| c.escape_bytea(v)} << "'" end # Use the driver's escape_string def literal_string_append(sql, v) sql << "'" << db.synchronize(@opts[:server]){|c| c.escape_string(v)} << "'" end # For each row in the result set, yield a hash with column name symbol # keys and typecasted values. def yield_hash_rows(res, cols) ntuples = res.ntuples recnum = 0 while recnum < ntuples fieldnum = 0 nfields = cols.length converted_rec = {} while fieldnum < nfields type_proc, fieldsym = cols[fieldnum] value = res.getvalue(recnum, fieldnum) converted_rec[fieldsym] = (value && type_proc) ? type_proc.call(value) : value fieldnum += 1 end yield converted_rec recnum += 1 end end end end end # :nocov: if Sequel::Postgres::USES_PG && !ENV['NO_SEQUEL_PG'] begin require 'sequel_pg' if defined?(Gem) && (sequel_pg_spec = Gem.loaded_specs['sequel_pg'] rescue nil) && (sequel_pg_spec.version < Gem::Version.new('1.6.17')) raise Sequel::Error, "the installed sequel_pg is too old, please update to at least sequel_pg-1.6.17" end rescue LoadError end end # :nocov: sequel-5.63.0/lib/sequel/adapters/postgresql.rb000066400000000000000000000000731434214120600214770ustar00rootroot00000000000000# frozen-string-literal: true require_relative 'postgres' sequel-5.63.0/lib/sequel/adapters/shared/000077500000000000000000000000001434214120600202155ustar00rootroot00000000000000sequel-5.63.0/lib/sequel/adapters/shared/access.rb000066400000000000000000000221211434214120600220010ustar00rootroot00000000000000# frozen-string-literal: true require_relative '../utils/emulate_offset_with_reverse_and_count' require_relative '../utils/unmodified_identifiers' require_relative '../utils/columns_limit_1' module Sequel module Access Sequel::Database.set_shared_adapter_scheme(:access, self) module DatabaseMethods include UnmodifiedIdentifiers::DatabaseMethods def database_type :access end # Doesn't work, due to security restrictions on MSysObjects #def tables # from(:MSysObjects).where(Type: 1, Flags: 0).select_map(:Name).map(&:to_sym) #end # Access doesn't support renaming tables from an SQL query, # so create a copy of the table and then drop the from table. def rename_table(from_table, to_table) create_table(to_table, :as=>from(from_table)) drop_table(from_table) end # Access uses type Counter for an autoincrementing keys def serial_primary_key_options {:primary_key => true, :type=>:Counter} end private def alter_table_set_column_type_sql(table, op) "ALTER COLUMN #{quote_identifier(op[:name])} #{type_literal(op)}" end # Access doesn't support CREATE TABLE AS, it only supports SELECT INTO. # Emulating CREATE TABLE AS using SELECT INTO is only possible if a dataset # is given as the argument, it can't work with a string, so raise an # Error if a string is given. def create_table_as(name, ds, options) raise(Error, "must provide dataset instance as value of create_table :as option on Access") unless ds.is_a?(Sequel::Dataset) run(ds.into(name).sql) end DATABASE_ERROR_REGEXPS = { /The changes you requested to the table were not successful because they would create duplicate values in the index, primary key, or relationship/ => UniqueConstraintViolation, /You cannot add or change a record because a related record is required|The record cannot be deleted or changed because table/ => ForeignKeyConstraintViolation, /One or more values are prohibited by the validation rule/ => CheckConstraintViolation, /You must enter a value in the .+ field|cannot contain a Null value because the Required property for this field is set to True/ => NotNullConstraintViolation, }.freeze def database_error_regexps DATABASE_ERROR_REGEXPS end # Access's Byte type will accept much larger values, # even though it only stores 0-255. Do not set min/max # values for the Byte type. def column_schema_integer_min_max_values(db_type) return if /byte/i =~ db_type super end def drop_index_sql(table, op) "DROP INDEX #{quote_identifier(op[:name] || default_index_name(table, op[:columns]))} ON #{quote_schema_table(table)}" end # Access doesn't have a 64-bit integer type, so use integer and hope # the user isn't using more than 32 bits. def type_literal_generic_bignum_symbol(column) :integer end # Access doesn't have a true boolean class, so it uses bit def type_literal_generic_trueclass(column) :bit end # Access uses image type for blobs def type_literal_generic_file(column) :image end end module DatasetMethods include(Module.new do Dataset.def_sql_method(self, :select, %w'select distinct limit columns into from join where group order having compounds') end) include EmulateOffsetWithReverseAndCount include UnmodifiedIdentifiers::DatasetMethods include ::Sequel::Dataset::ColumnsLimit1 EXTRACT_MAP = {:year=>"'yyyy'", :month=>"'m'", :day=>"'d'", :hour=>"'h'", :minute=>"'n'", :second=>"'s'"}.freeze EXTRACT_MAP.each_value(&:freeze) OPS = {:'%'=>' Mod '.freeze, :'||'=>' & '.freeze}.freeze CAST_TYPES = {String=>:CStr, Integer=>:CLng, Date=>:CDate, Time=>:CDate, DateTime=>:CDate, Numeric=>:CDec, BigDecimal=>:CDec, File=>:CStr, Float=>:CDbl, TrueClass=>:CBool, FalseClass=>:CBool}.freeze # Access doesn't support CASE, so emulate it with nested IIF function calls. def case_expression_sql_append(sql, ce) literal_append(sql, ce.with_merged_expression.conditions.reverse.inject(ce.default){|exp,(cond,val)| Sequel::SQL::Function.new(:IIF, cond, val, exp)}) end # Access doesn't support CAST, it uses separate functions for # type conversion def cast_sql_append(sql, expr, type) sql << CAST_TYPES.fetch(type, type).to_s sql << '(' literal_append(sql, expr) sql << ')' end def complex_expression_sql_append(sql, op, args) case op when :ILIKE complex_expression_sql_append(sql, :LIKE, args) when :'NOT ILIKE' complex_expression_sql_append(sql, :'NOT LIKE', args) when :'!=' sql << '(' literal_append(sql, args[0]) sql << ' <> ' literal_append(sql, args[1]) sql << ')' when :'%', :'||' sql << '(' c = false op_str = OPS[op] args.each do |a| sql << op_str if c literal_append(sql, a) c ||= true end sql << ')' when :** sql << '(' literal_append(sql, args[0]) sql << ' ^ ' literal_append(sql, args[1]) sql << ')' when :extract part = args[0] raise(Sequel::Error, "unsupported extract argument: #{part.inspect}") unless format = EXTRACT_MAP[part] sql << "datepart(" << format.to_s << ', ' literal_append(sql, args[1]) sql << ')' else super end end # Use Date(), Now(), and Time() for CURRENT_DATE, CURRENT_TIMESTAMP, and CURRENT_TIME def constant_sql_append(sql, constant) case constant when :CURRENT_DATE sql << 'Date()' when :CURRENT_TIMESTAMP sql << 'Now()' when :CURRENT_TIME sql << 'Time()' else super end end # Emulate cross join by using multiple tables in the FROM clause. def cross_join(table) clone(:from=>@opts[:from] + [table]) end # Access uses [] to escape metacharacters, instead of backslashes. def escape_like(string) string.gsub(/[\\*#?\[]/){|m| "[#{m}]"} end # Specify a table for a SELECT ... INTO query. def into(table) clone(:into => table) end # Access uses [] for quoting identifiers, and can't handle # ] inside identifiers. def quoted_identifier_append(sql, v) sql << '[' << v.to_s << ']' end # Access does not support derived column lists. def supports_derived_column_lists? false end # Access doesn't support INTERSECT or EXCEPT def supports_intersect_except? false end # Access does not support IS TRUE def supports_is_true? false end # Access doesn't support JOIN USING def supports_join_using? false end # Access does not support multiple columns for the IN/NOT IN operators def supports_multiple_column_in? false end # Access doesn't support truncate, so do a delete instead. def truncate delete nil end private # Access uses # to quote dates def literal_date(d) d.strftime('#%Y-%m-%d#') end # Access uses # to quote datetimes def literal_datetime(t) t.strftime('#%Y-%m-%d %H:%M:%S#') end alias literal_time literal_datetime # Use 0 for false on MSSQL def literal_false '0' end # Use -1 for true on MSSQL def literal_true '-1' end # Emulate the char_length function with len def native_function_name(emulated_function) if emulated_function == :char_length 'len' else super end end # Access does not natively support NULLS FIRST/LAST. def requires_emulating_nulls_first? true end # Access doesn't support ESCAPE for LIKE. def requires_like_escape? false end # Access requires parentheses when joining more than one table def select_from_sql(sql) if f = @opts[:from] sql << ' FROM ' if (j = @opts[:join]) && !j.empty? sql << ('(' * j.length) end source_list_append(sql, f) end end def select_into_sql(sql) if i = @opts[:into] sql << " INTO " identifier_append(sql, i) end end # Access requires parentheses when joining more than one table def select_join_sql(sql) if js = @opts[:join] js.each do |j| literal_append(sql, j) sql << ')' end end end # Access uses TOP for limits def select_limit_sql(sql) if l = @opts[:limit] sql << " TOP " literal_append(sql, l) end end end end end sequel-5.63.0/lib/sequel/adapters/shared/db2.rb000066400000000000000000000404621434214120600212170ustar00rootroot00000000000000# frozen-string-literal: true require_relative '../utils/emulate_offset_with_row_number' require_relative '../utils/columns_limit_1' module Sequel module DB2 Sequel::Database.set_shared_adapter_scheme(:db2, self) module DatabaseMethods # Whether to use clob as the generic File type, false by default. attr_accessor :use_clob_as_blob def database_type :db2 end # Return the database version as a string. Don't rely on this, # it may return an integer in the future. def db2_version return @db2_version if defined?(@db2_version) @db2_version = metadata_dataset.with_sql("select service_level from sysibmadm.env_inst_info").first[:service_level] end alias_method :server_version, :db2_version def freeze db2_version offset_strategy super end # Use SYSIBM.SYSCOLUMNS to get the information on the tables. def schema_parse_table(table, opts = OPTS) m = output_identifier_meth(opts[:dataset]) im = input_identifier_meth(opts[:dataset]) metadata_dataset.with_sql("SELECT * FROM SYSIBM.SYSCOLUMNS WHERE TBNAME = #{literal(im.call(table))} ORDER BY COLNO"). collect do |column| column[:db_type] = column.delete(:typename) if column[:db_type] =~ /\A(VAR)?CHAR\z/ column[:db_type] << "(#{column[:length]})" end if column[:db_type] == "DECIMAL" column[:db_type] << "(#{column[:longlength]},#{column[:scale]})" end column[:allow_null] = column.delete(:nulls) == 'Y' identity = column.delete(:identity) == 'Y' if column[:primary_key] = identity || !column[:keyseq].nil? column[:auto_increment] = identity end column[:type] = schema_column_type(column[:db_type]) column[:max_length] = column[:longlength] if column[:type] == :string [ m.call(column.delete(:name)), column] end end # Use SYSCAT.TABLES to get the tables for the database def tables metadata_dataset. with_sql("SELECT TABNAME FROM SYSCAT.TABLES WHERE TYPE='T' AND OWNER = #{literal(input_identifier_meth.call(opts[:user]))}"). all.map{|h| output_identifier_meth.call(h[:tabname]) } end # Use SYSCAT.TABLES to get the views for the database def views metadata_dataset. with_sql("SELECT TABNAME FROM SYSCAT.TABLES WHERE TYPE='V' AND OWNER = #{literal(input_identifier_meth.call(opts[:user]))}"). all.map{|h| output_identifier_meth.call(h[:tabname]) } end # Use SYSCAT.INDEXES to get the indexes for the table def indexes(table, opts = OPTS) m = output_identifier_meth table = table.value if table.is_a?(Sequel::SQL::Identifier) indexes = {} metadata_dataset. from(Sequel[:syscat][:indexes]). select(:indname, :uniquerule, :colnames). where(:tabname=>input_identifier_meth.call(table), :system_required=>0). each do |r| indexes[m.call(r[:indname])] = {:unique=>(r[:uniquerule]=='U'), :columns=>r[:colnames][1..-1].split('+').map{|v| m.call(v)}} end indexes end def offset_strategy return @offset_strategy if defined?(@offset_strategy) @offset_strategy = case strategy = opts[:offset_strategy].to_s when "limit_offset", "offset_fetch" opts[:offset_strategy] = strategy.to_sym else opts[:offset_strategy] = :emulate end end # DB2 supports transaction isolation levels. def supports_transaction_isolation_levels? true end # On DB2, a table might need to be REORGed if you are testing existence # of it. This REORGs automatically if the database raises a specific # error that indicates it should be REORGed. def table_exists?(name) v ||= false # only retry once sch, table_name = schema_and_table(name) name = SQL::QualifiedIdentifier.new(sch, table_name) if sch from(name).first true rescue DatabaseError => e if e.to_s =~ /Operation not allowed for reason code "7" on table/ && v == false # table probably needs reorg reorg(name) v = true retry end false end private def alter_table_sql(table, op) case op[:op] when :add_column if op[:primary_key] && op[:auto_increment] && op[:type] == Integer [ "ALTER TABLE #{quote_schema_table(table)} ADD #{column_definition_sql(op.merge(:auto_increment=>false, :primary_key=>false, :default=>0, :null=>false))}", "ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{literal(op[:name])} DROP DEFAULT", "ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{literal(op[:name])} SET #{auto_increment_sql}" ] else "ALTER TABLE #{quote_schema_table(table)} ADD #{column_definition_sql(op)}" end when :drop_column "ALTER TABLE #{quote_schema_table(table)} DROP #{column_definition_sql(op)}" when :rename_column # renaming is only possible after db2 v9.7 "ALTER TABLE #{quote_schema_table(table)} RENAME COLUMN #{quote_identifier(op[:name])} TO #{quote_identifier(op[:new_name])}" when :set_column_type "ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(op[:name])} SET DATA TYPE #{type_literal(op)}" when :set_column_default "ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(op[:name])} SET DEFAULT #{literal(op[:default])}" when :add_constraint if op[:type] == :unique sqls = op[:columns].map{|c| ["ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(c)} SET NOT NULL", reorg_sql(table)]} sqls << super sqls.flatten else super end else super end end # REORG the related table whenever it is altered. This is not always # required, but it is necessary for compatibilty with other Sequel # code in many cases. def apply_alter_table(name, ops) alter_table_sql_list(name, ops).each do |sql| execute_ddl(sql) reorg(name) end end # DB2 uses an identity column for autoincrement. def auto_increment_sql 'GENERATED ALWAYS AS IDENTITY' end # DB2 does not allow adding primary key constraints to NULLable columns. def can_add_primary_key_constraint_on_nullable_columns? false end # Supply columns with NOT NULL if they are part of a composite # primary key or unique constraint def column_list_sql(g) ks = [] g.constraints.each{|c| ks = c[:columns] if [:primary_key, :unique].include?(c[:type])} g.columns.each{|c| c[:null] = false if ks.include?(c[:name]) } super end # Insert data from the current table into the new table after # creating the table, since it is not possible to do it in one step. def create_table_as(name, sql, options) super from(name).insert(sql.is_a?(Dataset) ? sql : dataset.with_sql(sql)) end # DB2 requires parens around the SELECT, and DEFINITION ONLY at the end. def create_table_as_sql(name, sql, options) "#{create_table_prefix_sql(name, options)} AS (#{sql}) DEFINITION ONLY" end # Here we use DGTT which has most backward compatibility, which uses # DECLARE instead of CREATE. CGTT can only be used after version 9.7. # http://www.ibm.com/developerworks/data/library/techarticle/dm-0912globaltemptable/ def create_table_prefix_sql(name, options) if options[:temp] "DECLARE GLOBAL TEMPORARY TABLE #{quote_identifier(name)}" else super end end DATABASE_ERROR_REGEXPS = { /DB2 SQL Error: SQLCODE=-803, SQLSTATE=23505|One or more values in the INSERT statement, UPDATE statement, or foreign key update caused by a DELETE statement are not valid because the primary key, unique constraint or unique index/ => UniqueConstraintViolation, /DB2 SQL Error: (SQLCODE=-530, SQLSTATE=23503|SQLCODE=-532, SQLSTATE=23504)|The insert or update value of the FOREIGN KEY .+ is not equal to any value of the parent key of the parent table|A parent row cannot be deleted because the relationship .+ restricts the deletion/ => ForeignKeyConstraintViolation, /DB2 SQL Error: SQLCODE=-545, SQLSTATE=23513|The requested operation is not allowed because a row does not satisfy the check constraint/ => CheckConstraintViolation, /DB2 SQL Error: SQLCODE=-407, SQLSTATE=23502|Assignment of a NULL value to a NOT NULL column/ => NotNullConstraintViolation, /DB2 SQL Error: SQLCODE=-911, SQLSTATE=40001|The current transaction has been rolled back because of a deadlock or timeout/ => SerializationFailure, }.freeze def database_error_regexps DATABASE_ERROR_REGEXPS end # DB2 has issues with quoted identifiers, so # turn off database quoting by default. def quote_identifiers_default false end # DB2 uses RENAME TABLE to rename tables. def rename_table_sql(name, new_name) "RENAME TABLE #{quote_schema_table(name)} TO #{quote_schema_table(new_name)}" end # Run the REORG TABLE command for the table, necessary when # the table has been altered. def reorg(table) execute_ddl(reorg_sql(table)) end # The SQL to use for REORGing a table. def reorg_sql(table) "CALL SYSPROC.ADMIN_CMD(#{literal("REORG TABLE #{quote_schema_table(table)}")})" end # Treat clob as blob if use_clob_as_blob is true def schema_column_type(db_type) (use_clob_as_blob && db_type.downcase == 'clob') ? :blob : super end # SQL to set the transaction isolation level def set_transaction_isolation_sql(level) "SET CURRENT ISOLATION #{Database::TRANSACTION_ISOLATION_LEVELS[level]}" end # We uses the clob type by default for Files. # Note: if user select to use blob, then insert statement should use # use this for blob value: # cast(X'fffefdfcfbfa' as blob(2G)) def type_literal_generic_file(column) use_clob_as_blob ? :clob : :blob end # DB2 uses smallint to store booleans. def type_literal_generic_trueclass(column) :smallint end alias type_literal_generic_falseclass type_literal_generic_trueclass # DB2 uses clob for text types. def uses_clob_for_text? true end # DB2 supports views with check option. def view_with_check_option_support :local end end module DatasetMethods include EmulateOffsetWithRowNumber include ::Sequel::Dataset::ColumnsLimit1 BITWISE_METHOD_MAP = {:& =>:BITAND, :| => :BITOR, :^ => :BITXOR, :'B~'=>:BITNOT}.freeze # DB2 casts strings using RTRIM and CHAR instead of VARCHAR. def cast_sql_append(sql, expr, type) if(type == String) sql << "RTRIM(CHAR(" literal_append(sql, expr) sql << "))" else super end end def complex_expression_sql_append(sql, op, args) case op when :&, :|, :^, :%, :<<, :>> complex_expression_emulate_append(sql, op, args) when :'B~' literal_append(sql, SQL::Function.new(:BITNOT, *args)) when :extract sql << args[0].to_s sql << '(' literal_append(sql, args[1]) sql << ')' else super end end def quote_identifiers? @opts.fetch(:quote_identifiers, false) end def supports_cte?(type=:select) type == :select end # DB2 supports GROUP BY CUBE def supports_group_cube? true end # DB2 supports GROUP BY ROLLUP def supports_group_rollup? true end # DB2 supports GROUPING SETS def supports_grouping_sets? true end # DB2 does not support IS TRUE. def supports_is_true? false end # DB2 supports lateral subqueries def supports_lateral_subqueries? true end # DB2 supports MERGE def supports_merge? true end # DB2 does not support multiple columns in IN. def supports_multiple_column_in? false end # DB2 only allows * in SELECT if it is the only thing being selected. def supports_select_all_and_column? false end # DB2 supports window functions def supports_window_functions? true end # DB2 does not support WHERE 1. def supports_where_true? false end private # Normalize conditions for MERGE WHEN. def _merge_when_conditions_sql(sql, data) if data.has_key?(:conditions) sql << " AND " literal_append(sql, _normalize_merge_when_conditions(data[:conditions])) end end # Handle nil, false, and true MERGE WHEN conditions to avoid non-boolean # type error. def _normalize_merge_when_conditions(conditions) case conditions when nil, false {1=>0} when true {1=>1} when Sequel::SQL::DelayedEvaluation Sequel.delay{_normalize_merge_when_conditions(conditions.call(self))} else conditions end end def empty_from_sql ' FROM "SYSIBM"."SYSDUMMY1"' end # Emulate offset with row number by default, and also when the limit_offset # strategy is used without a limit, as DB2 doesn't support that syntax with # no limit. def emulate_offset_with_row_number? super && (db.offset_strategy == :emulate || (db.offset_strategy == :limit_offset && !@opts[:limit])) end # DB2 needs the standard workaround to insert all default values into # a table with more than one column. def insert_supports_empty_values? false end # Use 0 for false on DB2 def literal_false '0' end # DB2 doesn't support fractional seconds in times, only fractional seconds in timestamps. def literal_sqltime(v) v.strftime("'%H:%M:%S'") end # Use 1 for true on DB2 def literal_true '1' end # DB2 uses a literal hexidecimal number for blob strings def literal_blob_append(sql, v) if db.use_clob_as_blob super else sql << "BLOB(X'" << v.unpack("H*").first << "')" end end # DB2 can insert multiple rows using a UNION def multi_insert_sql_strategy :union end # Emulate the char_length function with length def native_function_name(emulated_function) if emulated_function == :char_length 'length' else super end end # DB2 does not require that ROW_NUMBER be ordered. def require_offset_order? false end # At least some versions of DB do not support NULLS FIRST/LAST. def requires_emulating_nulls_first? true end # Modify the sql to limit the number of rows returned. # Uses :offset_strategy Database option to determine how to format the # limit and offset. def select_limit_sql(sql) strategy = db.offset_strategy return super if strategy == :limit_offset if strategy == :offset_fetch && (o = @opts[:offset]) sql << " OFFSET " literal_append(sql, o) sql << " ROWS" end if l = @opts[:limit] if l == 1 sql << " FETCH FIRST ROW ONLY" else sql << " FETCH FIRST " literal_append(sql, l) sql << " ROWS ONLY" end end end # DB2 supports quoted function names. def supports_quoted_function_names? true end def _truncate_sql(table) # "TRUNCATE #{table} IMMEDIATE" is only for newer version of db2, so we # use the following one "ALTER TABLE #{quote_schema_table(table)} ACTIVATE NOT LOGGED INITIALLY WITH EMPTY TABLE" end end end end sequel-5.63.0/lib/sequel/adapters/shared/mssql.rb000066400000000000000000001232651434214120600217120ustar00rootroot00000000000000# frozen-string-literal: true require_relative '../utils/emulate_offset_with_row_number' require_relative '../utils/split_alter_table' module Sequel module MSSQL Sequel::Database.set_shared_adapter_scheme(:mssql, self) def self.mock_adapter_setup(db) db.instance_exec do @server_version = 11000000 end end module DatabaseMethods FOREIGN_KEY_ACTION_MAP = {0 => :no_action, 1 => :cascade, 2 => :set_null, 3 => :set_default}.freeze include Sequel::Database::SplitAlterTable # Whether to use N'' to quote strings, which allows unicode characters inside the # strings. True by default for compatibility, can be set to false for a possible # performance increase. This sets the default for all datasets created from this # Database object. attr_accessor :mssql_unicode_strings # Whether to use LIKE without COLLATE Latin1_General_CS_AS. Skipping the COLLATE # can significantly increase performance in some cases. attr_accessor :like_without_collate # Execute the given stored procedure with the given name. # # Options: # :args :: Arguments to stored procedure. For named arguments, this should be a # hash keyed by argument named. For unnamed arguments, this should be an # array. Output parameters to the function are specified using :output. # You can also name output parameters and provide a type by using an # array containing :output, the type name, and the parameter name. # :server :: The server/shard on which to execute the procedure. # # This method returns a single hash with the following keys: # # :result :: The result code of the stored procedure # :numrows :: The number of rows affected by the stored procedure # output params :: Values for any output paramters, using the name given for the output parameter # # Because Sequel datasets only support a single result set per query, and retrieving # the result code and number of rows requires a query, this does not support # stored procedures which also return result sets. To handle such stored procedures, # you should drop down to the connection/driver level by using Sequel::Database#synchronize # to get access to the underlying connection object. # # Examples: # # DB.call_mssql_sproc(:SequelTest, {args: ['input arg', :output]}) # DB.call_mssql_sproc(:SequelTest, {args: ['input arg', [:output, 'int', 'varname']]}) # # named params: # DB.call_mssql_sproc(:SequelTest, args: { # 'input_arg1_name' => 'input arg1 value', # 'input_arg2_name' => 'input arg2 value', # 'output_arg_name' => [:output, 'int', 'varname'] # }) def call_mssql_sproc(name, opts=OPTS) args = opts[:args] || [] names = ['@RC AS RESULT', '@@ROWCOUNT AS NUMROWS'] declarations = ['@RC int'] values = [] if args.is_a?(Hash) named_args = true args = args.to_a method = :each else method = :each_with_index end args.public_send(method) do |v, i| if named_args k = v v, type, select = i raise Error, "must provide output parameter name when using output parameters with named arguments" if v == :output && !select else v, type, select = v end if v == :output type ||= "nvarchar(max)" if named_args varname = select else varname = "var#{i}" select ||= varname end names << "@#{varname} AS #{quote_identifier(select)}" declarations << "@#{varname} #{type}" value = "@#{varname} OUTPUT" else value = literal(v) end if named_args value = "@#{k}=#{value}" end values << value end sql = "DECLARE #{declarations.join(', ')}; EXECUTE @RC = #{name} #{values.join(', ')}; SELECT #{names.join(', ')}" ds = dataset.with_sql(sql) ds = ds.server(opts[:server]) if opts[:server] ds.first end def database_type :mssql end # Microsoft SQL Server namespaces indexes per table. def global_index_namespace? false end # Return foreign key information using the system views, including # :name, :on_delete, and :on_update entries in the hashes. def foreign_key_list(table, opts=OPTS) m = output_identifier_meth im = input_identifier_meth schema, table = schema_and_table(table) current_schema = m.call(get(Sequel.function('schema_name'))) fk_action_map = FOREIGN_KEY_ACTION_MAP fk = Sequel[:fk] fkc = Sequel[:fkc] ds = metadata_dataset.from(Sequel.lit('[sys].[foreign_keys]').as(:fk)). join(Sequel.lit('[sys].[foreign_key_columns]').as(:fkc), :constraint_object_id => :object_id). join(Sequel.lit('[sys].[all_columns]').as(:pc), :object_id => fkc[:parent_object_id], :column_id => fkc[:parent_column_id]). join(Sequel.lit('[sys].[all_columns]').as(:rc), :object_id => fkc[:referenced_object_id], :column_id => fkc[:referenced_column_id]). where{{object_schema_name(fk[:parent_object_id]) => im.call(schema || current_schema)}}. where{{object_name(fk[:parent_object_id]) => im.call(table)}}. select{[fk[:name], fk[:delete_referential_action], fk[:update_referential_action], pc[:name].as(:column), rc[:name].as(:referenced_column), object_schema_name(fk[:referenced_object_id]).as(:schema), object_name(fk[:referenced_object_id]).as(:table)]}. order(fk[:name], fkc[:constraint_column_id]) h = {} ds.each do |row| if r = h[row[:name]] r[:columns] << m.call(row[:column]) r[:key] << m.call(row[:referenced_column]) else referenced_schema = m.call(row[:schema]) referenced_table = m.call(row[:table]) h[row[:name]] = { :name => m.call(row[:name]), :table => (referenced_schema == current_schema) ? referenced_table : Sequel.qualify(referenced_schema, referenced_table), :columns => [m.call(row[:column])], :key => [m.call(row[:referenced_column])], :on_update => fk_action_map[row[:update_referential_action]], :on_delete => fk_action_map[row[:delete_referential_action]] } end end h.values end def freeze server_version super end # Use the system tables to get index information def indexes(table, opts=OPTS) m = output_identifier_meth im = input_identifier_meth indexes = {} table = table.value if table.is_a?(Sequel::SQL::Identifier) i = Sequel[:i] ds = metadata_dataset.from(Sequel.lit('[sys].[tables]').as(:t)). join(Sequel.lit('[sys].[indexes]').as(:i), :object_id=>:object_id). join(Sequel.lit('[sys].[index_columns]').as(:ic), :object_id=>:object_id, :index_id=>:index_id). join(Sequel.lit('[sys].[columns]').as(:c), :object_id=>:object_id, :column_id=>:column_id). select(i[:name], i[:is_unique], Sequel[:c][:name].as(:column)). where{{t[:name]=>im.call(table)}}. where(i[:is_primary_key]=>0, i[:is_disabled]=>0). order(i[:name], Sequel[:ic][:index_column_id]) if supports_partial_indexes? ds = ds.where(i[:has_filter]=>0) end ds.each do |r| index = indexes[m.call(r[:name])] ||= {:columns=>[], :unique=>(r[:is_unique] && r[:is_unique]!=0)} index[:columns] << m.call(r[:column]) end indexes end # The version of the MSSQL server, as an integer (e.g. 10001600 for # SQL Server 2008 Express). def server_version(server=nil) return @server_version if @server_version if @opts[:server_version] return @server_version = Integer(@opts[:server_version]) end @server_version = synchronize(server) do |conn| (conn.server_version rescue nil) if conn.respond_to?(:server_version) end unless @server_version m = /^(\d+)\.(\d+)\.(\d+)/.match(fetch("SELECT CAST(SERVERPROPERTY('ProductVersion') AS varchar)").single_value.to_s) @server_version = (m[1].to_i * 1000000) + (m[2].to_i * 10000) + m[3].to_i end @server_version end # MSSQL 2008+ supports partial indexes. def supports_partial_indexes? dataset.send(:is_2008_or_later?) end # MSSQL supports savepoints, though it doesn't support releasing them def supports_savepoints? true end # MSSQL supports transaction isolation levels def supports_transaction_isolation_levels? true end # MSSQL supports transaction DDL statements. def supports_transactional_ddl? true end # Microsoft SQL Server supports using the INFORMATION_SCHEMA to get # information on tables. def tables(opts=OPTS) information_schema_tables('BASE TABLE', opts) end # Microsoft SQL Server supports using the INFORMATION_SCHEMA to get # information on views. def views(opts=OPTS) information_schema_tables('VIEW', opts) end private # Add CLUSTERED or NONCLUSTERED as needed def add_clustered_sql_fragment(sql, opts) clustered = opts[:clustered] unless clustered.nil? sql += " #{'NON' unless clustered}CLUSTERED" end sql end # Add dropping of the default constraint to the list of SQL queries. # This is necessary before dropping the column or changing its type. def add_drop_default_constraint_sql(sqls, table, column) if constraint = default_constraint_name(table, column) sqls << "ALTER TABLE #{quote_schema_table(table)} DROP CONSTRAINT #{constraint}" end end # MSSQL uses the IDENTITY(1,1) column for autoincrementing columns. def auto_increment_sql 'IDENTITY(1,1)' end def alter_table_sql(table, op) case op[:op] when :add_column "ALTER TABLE #{quote_schema_table(table)} ADD #{column_definition_sql(op)}" when :drop_column sqls = [] add_drop_default_constraint_sql(sqls, table, op[:name]) sqls << super when :rename_column "sp_rename #{literal("#{quote_schema_table(table)}.#{quote_identifier(op[:name])}")}, #{literal(metadata_dataset.with_quote_identifiers(false).quote_identifier(op[:new_name]))}, 'COLUMN'" when :set_column_type sqls = [] if sch = schema(table) if cs = sch.each{|k, v| break v if k == op[:name]; nil} cs = cs.dup add_drop_default_constraint_sql(sqls, table, op[:name]) cs[:default] = cs[:ruby_default] op = cs.merge!(op) default = op.delete(:default) end end sqls << "ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{column_definition_sql(op)}" sqls << alter_table_sql(table, op.merge(:op=>:set_column_default, :default=>default, :skip_drop_default=>true)) if default sqls when :set_column_null sch = schema(table).find{|k,v| k.to_s == op[:name].to_s}.last type = sch[:db_type] if [:string, :decimal, :blob].include?(sch[:type]) && !["text", "ntext"].include?(type) && (size = (sch[:max_chars] || sch[:column_size])) size = "MAX" if size == -1 type += "(#{size}#{", #{sch[:scale]}" if sch[:scale] && sch[:scale].to_i > 0})" end "ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(op[:name])} #{type_literal(:type=>type)} #{'NOT ' unless op[:null]}NULL" when :set_column_default sqls = [] add_drop_default_constraint_sql(sqls, table, op[:name]) unless op[:skip_drop_default] sqls << "ALTER TABLE #{quote_schema_table(table)} ADD CONSTRAINT #{quote_identifier("sequel_#{table}_#{op[:name]}_def")} DEFAULT #{literal(op[:default])} FOR #{quote_identifier(op[:name])}" else super(table, op) end end def begin_savepoint_sql(depth) "SAVE TRANSACTION autopoint_#{depth}" end def begin_transaction_sql "BEGIN TRANSACTION" end # MSSQL does not allow adding primary key constraints to NULLable columns. def can_add_primary_key_constraint_on_nullable_columns? false end # MSSQL tinyint types are unsigned. def column_schema_tinyint_type_is_unsigned? true end # Handle MSSQL specific default format. def column_schema_normalize_default(default, type) if m = /\A(?:\(N?('.*')\)|\(\((-?\d+(?:\.\d+)?)\)\))\z/.match(default) default = m[1] || m[2] end super(default, type) end # Commit the active transaction on the connection, does not release savepoints. def commit_transaction(conn, opts=OPTS) log_connection_execute(conn, commit_transaction_sql) unless savepoint_level(conn) > 1 end def commit_transaction_sql "COMMIT TRANSACTION" end # MSSQL uses the name of the table to decide the difference between # a regular and temporary table, with temporary table names starting with # a #. def create_table_prefix_sql(name, options) "CREATE TABLE #{quote_schema_table(options[:temp] ? "##{name}" : name)}" end # MSSQL doesn't support CREATE TABLE AS, it only supports SELECT INTO. # Emulating CREATE TABLE AS using SELECT INTO is only possible if a dataset # is given as the argument, it can't work with a string, so raise an # Error if a string is given. def create_table_as(name, ds, options) raise(Error, "must provide dataset instance as value of create_table :as option on MSSQL") unless ds.is_a?(Sequel::Dataset) run(ds.into(name).sql) end DATABASE_ERROR_REGEXPS = { /Violation of UNIQUE KEY constraint|(Violation of PRIMARY KEY constraint.+)?Cannot insert duplicate key/ => UniqueConstraintViolation, /conflicted with the (FOREIGN KEY.*|REFERENCE) constraint/ => ForeignKeyConstraintViolation, /conflicted with the CHECK constraint/ => CheckConstraintViolation, /column does not allow nulls/ => NotNullConstraintViolation, /was deadlocked on lock resources with another process and has been chosen as the deadlock victim/ => SerializationFailure, /Lock request time out period exceeded\./ => DatabaseLockTimeout, }.freeze def database_error_regexps DATABASE_ERROR_REGEXPS end # The name of the constraint for setting the default value on the table and column. # The SQL used to select default constraints utilizes MSSQL catalog views which were introduced in 2005. # This method intentionally does not support MSSQL 2000. def default_constraint_name(table, column_name) if server_version >= 9000000 table_name = schema_and_table(table).compact.join('.') self[Sequel[:sys][:default_constraints]]. where{{:parent_object_id => Sequel::SQL::Function.new(:object_id, table_name), col_name(:parent_object_id, :parent_column_id) => column_name.to_s}}. get(:name) end end def drop_index_sql(table, op) "DROP INDEX #{quote_identifier(op[:name] || default_index_name(table, op[:columns]))} ON #{quote_schema_table(table)}" end def index_definition_sql(table_name, index) index_name = index[:name] || default_index_name(table_name, index[:columns]) raise Error, "Partial indexes are not supported for this database" if index[:where] && !supports_partial_indexes? if index[:type] == :full_text "CREATE FULLTEXT INDEX ON #{quote_schema_table(table_name)} #{literal(index[:columns])} KEY INDEX #{literal(index[:key_index])}" else "CREATE #{'UNIQUE ' if index[:unique]}#{'CLUSTERED ' if index[:type] == :clustered}INDEX #{quote_identifier(index_name)} ON #{quote_schema_table(table_name)} #{literal(index[:columns])}#{" INCLUDE #{literal(index[:include])}" if index[:include]}#{" WHERE #{filter_expr(index[:where])}" if index[:where]}" end end # Backbone of the tables and views support. def information_schema_tables(type, opts) m = output_identifier_meth metadata_dataset.from(Sequel[:information_schema][:tables].as(:t)). select(:table_name). where(:table_type=>type, :table_schema=>(opts[:schema]||'dbo').to_s). map{|x| m.call(x[:table_name])} end # Always quote identifiers in the metadata_dataset, so schema parsing works. def _metadata_dataset super.with_quote_identifiers(true) end # Handle clustered and nonclustered primary keys def primary_key_constraint_sql_fragment(opts) add_clustered_sql_fragment(super, opts) end # Use sp_rename to rename the table def rename_table_sql(name, new_name) "sp_rename #{literal(quote_schema_table(name))}, #{quote_identifier(schema_and_table(new_name).pop)}" end def rollback_savepoint_sql(depth) "IF @@TRANCOUNT > 0 ROLLBACK TRANSACTION autopoint_#{depth}" end def rollback_transaction_sql "IF @@TRANCOUNT > 0 ROLLBACK TRANSACTION" end def schema_column_type(db_type) case db_type when /\A(?:bit)\z/io :boolean when /\A(?:(?:small)?money)\z/io :decimal when /\A(timestamp|rowversion)\z/io :blob else super end end # MSSQL uses the INFORMATION_SCHEMA to hold column information, and # parses primary key information from the sysindexes, sysindexkeys, # and syscolumns system tables. def schema_parse_table(table_name, opts) m = output_identifier_meth(opts[:dataset]) m2 = input_identifier_meth(opts[:dataset]) tn = m2.call(table_name.to_s) info_sch_sch = opts[:information_schema_schema] inf_sch_qual = lambda{|s| info_sch_sch ? Sequel.qualify(info_sch_sch, s) : Sequel[s]} table_id = metadata_dataset.from(inf_sch_qual.call(Sequel[:sys][:objects])).where(:name => tn).select_map(:object_id).first identity_cols = metadata_dataset.from(inf_sch_qual.call(Sequel[:sys][:columns])). where(:object_id=>table_id, :is_identity=>true). select_map(:name) pk_index_id = metadata_dataset.from(inf_sch_qual.call(Sequel[:sys][:sysindexes])). where(:id=>table_id, :indid=>1..254){{(status & 2048)=>2048}}. get(:indid) pk_cols = metadata_dataset.from(inf_sch_qual.call(Sequel[:sys][:sysindexkeys]).as(:sik)). join(inf_sch_qual.call(Sequel[:sys][:syscolumns]).as(:sc), :id=>:id, :colid=>:colid). where{{sik[:id]=>table_id, sik[:indid]=>pk_index_id}}. select_order_map{sc[:name]} ds = metadata_dataset.from(inf_sch_qual.call(Sequel[:information_schema][:tables]).as(:t)). join(inf_sch_qual.call(Sequel[:information_schema][:columns]).as(:c), :table_catalog=>:table_catalog, :table_schema => :table_schema, :table_name => :table_name). select{[column_name.as(:column), data_type.as(:db_type), character_maximum_length.as(:max_chars), column_default.as(:default), is_nullable.as(:allow_null), numeric_precision.as(:column_size), numeric_scale.as(:scale)]}. where{{c[:table_name]=>tn}} if schema = opts[:schema] ds = ds.where{{c[:table_schema]=>schema}} end ds.map do |row| if row[:primary_key] = pk_cols.include?(row[:column]) row[:auto_increment] = identity_cols.include?(row[:column]) end row[:allow_null] = row[:allow_null] == 'YES' ? true : false row[:default] = nil if blank_object?(row[:default]) row[:type] = if row[:db_type] =~ /number|numeric|decimal/i && row[:scale] == 0 :integer else schema_column_type(row[:db_type]) end row[:max_length] = row[:max_chars] if row[:type] == :string && row[:max_chars] >= 0 [m.call(row.delete(:column)), row] end end # Set the mssql_unicode_strings settings from the given options. def set_mssql_unicode_strings @mssql_unicode_strings = typecast_value_boolean(@opts.fetch(:mssql_unicode_strings, true)) end # MSSQL has both datetime and timestamp classes, most people are going # to want datetime def type_literal_generic_datetime(column) :datetime end # MSSQL doesn't have a true boolean class, so it uses bit def type_literal_generic_trueclass(column) :bit end # MSSQL uses varbinary(max) type for blobs def type_literal_generic_file(column) :'varbinary(max)' end # Handle clustered and nonclustered unique constraints def unique_constraint_sql_fragment(opts) add_clustered_sql_fragment(super, opts) end # MSSQL supports views with check option, but not local. def view_with_check_option_support true end end module DatasetMethods include(Module.new do Dataset.def_sql_method(self, :select, %w'with select distinct limit columns into from lock join where group having compounds order') end) include EmulateOffsetWithRowNumber CONSTANT_MAP = {:CURRENT_DATE=>'CAST(CURRENT_TIMESTAMP AS DATE)'.freeze, :CURRENT_TIME=>'CAST(CURRENT_TIMESTAMP AS TIME)'.freeze}.freeze EXTRACT_MAP = {:year=>"yy", :month=>"m", :day=>"d", :hour=>"hh", :minute=>"n", :second=>"s"}.freeze EXTRACT_MAP.each_value(&:freeze) LIMIT_ALL = Object.new.freeze Dataset.def_sql_method(self, :delete, %w'with delete limit from output from2 where') Dataset.def_sql_method(self, :insert, %w'with insert into columns output values') Dataset.def_sql_method(self, :update, [['if is_2005_or_later?', %w'with update limit table set output from where'], ['else', %w'update table set output from where']]) # Use the database's mssql_unicode_strings setting if the dataset hasn't overridden it. def mssql_unicode_strings opts.has_key?(:mssql_unicode_strings) ? opts[:mssql_unicode_strings] : db.mssql_unicode_strings end # Return a cloned dataset with the mssql_unicode_strings option set. def with_mssql_unicode_strings(v) clone(:mssql_unicode_strings=>v) end def complex_expression_sql_append(sql, op, args) case op when :'||' super(sql, :+, args) when :LIKE, :"NOT LIKE" super(sql, op, complex_expression_sql_like_args(args, " COLLATE Latin1_General_CS_AS)")) when :ILIKE, :"NOT ILIKE" super(sql, (op == :ILIKE ? :LIKE : :"NOT LIKE"), complex_expression_sql_like_args(args, " COLLATE Latin1_General_CI_AS)")) when :<<, :>> complex_expression_emulate_append(sql, op, args) when :extract part = args[0] raise(Sequel::Error, "unsupported extract argument: #{part.inspect}") unless format = EXTRACT_MAP[part] if part == :second expr = args[1] sql << "CAST((datepart(" << format.to_s << ', ' literal_append(sql, expr) sql << ') + datepart(ns, ' literal_append(sql, expr) sql << ")/1000000000.0) AS double precision)" else sql << "datepart(" << format.to_s << ', ' literal_append(sql, args[1]) sql << ')' end else super end end # MSSQL doesn't support the SQL standard CURRENT_DATE or CURRENT_TIME def constant_sql_append(sql, constant) if c = CONSTANT_MAP[constant] sql << c else super end end # Uses CROSS APPLY to join the given table into the current dataset. def cross_apply(table) join_table(:cross_apply, table) end # Disable the use of INSERT OUTPUT def disable_insert_output clone(:disable_insert_output=>true) end # MSSQL treats [] as a metacharacter in LIKE expresions. def escape_like(string) string.gsub(/[\\%_\[\]]/){|m| "\\#{m}"} end # MSSQL uses the CONTAINS keyword for full text search def full_text_search(cols, terms, opts = OPTS) terms = "\"#{terms.join('" OR "')}\"" if terms.is_a?(Array) where(Sequel.lit("CONTAINS (?, ?)", cols, terms)) end # Insert a record, returning the record inserted, using OUTPUT. Always returns nil without # running an INSERT statement if disable_insert_output is used. If the query runs # but returns no values, returns false. def insert_select(*values) return unless supports_insert_select? with_sql_first(insert_select_sql(*values)) || false end # Add OUTPUT clause unless there is already an existing output clause, then return # the SQL to insert. def insert_select_sql(*values) ds = (opts[:output] || opts[:returning]) ? self : output(nil, [SQL::ColumnAll.new(:inserted)]) ds.insert_sql(*values) end # Specify a table for a SELECT ... INTO query. def into(table) clone(:into => table) end # Allows you to do a dirty read of uncommitted data using WITH (NOLOCK). def nolock lock_style(:dirty) end # Uses OUTER APPLY to join the given table into the current dataset. def outer_apply(table) join_table(:outer_apply, table) end # Include an OUTPUT clause in the eventual INSERT, UPDATE, or DELETE query. # # The first argument is the table to output into, and the second argument # is either an Array of column values to select, or a Hash which maps output # column names to selected values, in the style of #insert or #update. # # Output into a returned result set is not currently supported. # # Examples: # # dataset.output(:output_table, [Sequel[:deleted][:id], Sequel[:deleted][:name]]) # dataset.output(:output_table, id: Sequel[:inserted][:id], name: Sequel[:inserted][:name]) def output(into, values) raise(Error, "SQL Server versions 2000 and earlier do not support the OUTPUT clause") unless supports_output_clause? output = {} case values when Hash output[:column_list], output[:select_list] = values.keys, values.values when Array output[:select_list] = values end output[:into] = into clone(:output => output) end # MSSQL uses [] to quote identifiers. def quoted_identifier_append(sql, name) sql << '[' << name.to_s.gsub(/\]/, ']]') << ']' end # Emulate RETURNING using the output clause. This only handles values that are simple column references. def returning(*values) values = values.map do |v| unless r = unqualified_column_for(v) raise(Error, "cannot emulate RETURNING via OUTPUT for value: #{v.inspect}") end r end clone(:returning=>values) end # On MSSQL 2012+ add a default order to the current dataset if an offset is used. # The default offset emulation using a subquery would be used in the unordered # case by default, and that also adds a default order, so it's better to just # avoid the subquery. def select_sql if @opts[:offset] raise(Error, "Using with_ties is not supported with an offset on Microsoft SQL Server") if @opts[:limit_with_ties] return order(1).select_sql if is_2012_or_later? && !@opts[:order] end super end # The version of the database server. def server_version db.server_version(@opts[:server]) end def supports_cte?(type=:select) is_2005_or_later? end # MSSQL 2005+ supports GROUP BY CUBE. def supports_group_cube? is_2005_or_later? end # MSSQL 2005+ supports GROUP BY ROLLUP def supports_group_rollup? is_2005_or_later? end # MSSQL 2008+ supports GROUPING SETS def supports_grouping_sets? is_2008_or_later? end # MSSQL supports insert_select via the OUTPUT clause. def supports_insert_select? supports_output_clause? && !opts[:disable_insert_output] end # MSSQL 2005+ supports INTERSECT and EXCEPT def supports_intersect_except? is_2005_or_later? end # MSSQL does not support IS TRUE def supports_is_true? false end # MSSQL doesn't support JOIN USING def supports_join_using? false end # MSSQL 2008+ supports MERGE def supports_merge? is_2008_or_later? end # MSSQL 2005+ supports modifying joined datasets def supports_modifying_joins? is_2005_or_later? end # MSSQL does not support multiple columns for the IN/NOT IN operators def supports_multiple_column_in? false end # MSSQL supports NOWAIT. def supports_nowait? true end # MSSQL 2012+ supports offsets in correlated subqueries. def supports_offsets_in_correlated_subqueries? is_2012_or_later? end # MSSQL 2005+ supports the OUTPUT clause. def supports_output_clause? is_2005_or_later? end # MSSQL 2005+ can emulate RETURNING via the OUTPUT clause. def supports_returning?(type) supports_insert_select? end # MSSQL uses READPAST to skip locked rows. def supports_skip_locked? true end # MSSQL 2005+ supports window functions def supports_window_functions? true end # MSSQL cannot use WHERE 1. def supports_where_true? false end # Use WITH TIES when limiting the result set to also include additional # rows matching the last row. def with_ties clone(:limit_with_ties=>true) end protected # If returned primary keys are requested, use OUTPUT unless already set on the # dataset. If OUTPUT is already set, use existing returning values. If OUTPUT # is only set to return a single columns, return an array of just that column. # Otherwise, return an array of hashes. def _import(columns, values, opts=OPTS) if opts[:return] == :primary_key && !@opts[:output] output(nil, [SQL::QualifiedIdentifier.new(:inserted, first_primary_key)])._import(columns, values, opts) elsif @opts[:output] # no transaction: our multi_insert_sql_strategy should guarantee # that there's only ever a single statement. sql = multi_insert_sql(columns, values)[0] naked.with_sql(sql).map{|v| v.length == 1 ? v.values.first : v} else super end end # If the dataset using a order without a limit or offset or custom SQL, # remove the order. Compounds on Microsoft SQL Server have undefined # order unless the result is specifically ordered. Applying the current # order before the compound doesn't work in all cases, such as when # qualified identifiers are used. If you want to ensure a order # for a compound dataset, apply the order after all compounds have been # added. def compound_from_self if @opts[:offset] && !@opts[:limit] && !is_2012_or_later? clone(:limit=>LIMIT_ALL).from_self elsif @opts[:order] && !(@opts[:sql] || @opts[:limit] || @opts[:offset]) unordered else super end end private # Normalize conditions for MERGE WHEN. def _merge_when_conditions_sql(sql, data) if data.has_key?(:conditions) sql << " AND " literal_append(sql, _normalize_merge_when_conditions(data[:conditions])) end end # Handle nil, false, and true MERGE WHEN conditions to avoid non-boolean # type error. def _normalize_merge_when_conditions(conditions) case conditions when nil, false {1=>0} when true {1=>1} when Sequel::SQL::DelayedEvaluation Sequel.delay{_normalize_merge_when_conditions(conditions.call(self))} else conditions end end # MSSQL requires a semicolon at the end of MERGE. def _merge_when_sql(sql) super sql << ';' end # MSSQL does not allow ordering in sub-clauses unless TOP (limit) is specified def aggregate_dataset (options_overlap(Sequel::Dataset::COUNT_FROM_SELF_OPTS) && !options_overlap([:limit])) ? unordered.from_self : super end # Allow update and delete for unordered, limited datasets only. def check_not_limited!(type) return if @opts[:skip_limit_check] && type != :truncate raise Sequel::InvalidOperation, "Dataset##{type} not suppored on ordered, limited datasets" if opts[:order] && opts[:limit] super if type == :truncate || @opts[:offset] end # Whether we are using SQL Server 2005 or later. def is_2005_or_later? server_version >= 9000000 end # Whether we are using SQL Server 2008 or later. def is_2008_or_later? server_version >= 10000000 end # Whether we are using SQL Server 2012 or later. def is_2012_or_later? server_version >= 11000000 end # Determine whether to add the COLLATE for LIKE arguments, based on the Database setting. def complex_expression_sql_like_args(args, collation) if db.like_without_collate args else args.map{|a| Sequel.lit(["(", collation], a)} end end # Use strict ISO-8601 format with T between date and time, # since that is the format that is multilanguage and not # DATEFORMAT dependent. def default_timestamp_format "'%Y-%m-%dT%H:%M:%S%N%z'" end # Only include the primary table in the main delete clause def delete_from_sql(sql) sql << ' FROM ' source_list_append(sql, @opts[:from][0..0]) end # MSSQL supports FROM clauses in DELETE and UPDATE statements. def delete_from2_sql(sql) if joined_dataset? select_from_sql(sql) select_join_sql(sql) end end alias update_from_sql delete_from2_sql def delete_output_sql(sql) output_sql(sql, :DELETED) end # There is no function on Microsoft SQL Server that does character length # and respects trailing spaces (datalength respects trailing spaces, but # counts bytes instead of characters). Use a hack to work around the # trailing spaces issue. def emulate_function?(name) name == :char_length || name == :trim end def emulate_function_sql_append(sql, f) case f.name when :char_length literal_append(sql, SQL::Function.new(:len, Sequel.join([f.args.first, 'x'])) - 1) when :trim literal_append(sql, SQL::Function.new(:ltrim, SQL::Function.new(:rtrim, f.args.first))) end end # Microsoft SQL Server 2012+ has native support for offsets, but only for ordered datasets. def emulate_offset_with_row_number? super && !(is_2012_or_later? && @opts[:order]) end # Return the first primary key for the current table. If this table has # multiple primary keys, this will only return one of them. Used by #_import. def first_primary_key @db.schema(self).map{|k, v| k if v[:primary_key] == true}.compact.first end def insert_output_sql(sql) output_sql(sql, :INSERTED) end alias update_output_sql insert_output_sql # Handle CROSS APPLY and OUTER APPLY JOIN types def join_type_sql(join_type) case join_type when :cross_apply 'CROSS APPLY' when :outer_apply 'OUTER APPLY' else super end end # MSSQL uses a literal hexidecimal number for blob strings def literal_blob_append(sql, v) sql << '0x' << v.unpack("H*").first end # Use YYYYmmdd format, since that's the only format that is # multilanguage and not DATEFORMAT dependent. def literal_date(v) v.strftime("'%Y%m%d'") end # Use 0 for false on MSSQL def literal_false '0' end # Optionally use unicode string syntax for all strings. Don't double # backslashes. def literal_string_append(sql, v) sql << (mssql_unicode_strings ? "N'" : "'") sql << v.gsub("'", "''").gsub(/\\((?:\r\n)|\n)/, '\\\\\\\\\\1\\1') << "'" end # Use 1 for true on MSSQL def literal_true '1' end # MSSQL 2008+ supports multiple rows in the VALUES clause, older versions # can use UNION. def multi_insert_sql_strategy is_2008_or_later? ? :values : :union end def non_sql_option?(key) super || key == :disable_insert_output || key == :mssql_unicode_strings end def select_into_sql(sql) if i = @opts[:into] sql << " INTO " identifier_append(sql, i) end end # MSSQL 2000 uses TOP N for limit. For MSSQL 2005+ TOP (N) is used # to allow the limit to be a bound variable. def select_limit_sql(sql) if l = @opts[:limit] return if is_2012_or_later? && @opts[:order] && @opts[:offset] shared_limit_sql(sql, l) end end def shared_limit_sql(sql, l) if is_2005_or_later? if l == LIMIT_ALL sql << " TOP (100) PERCENT" else sql << " TOP (" literal_append(sql, l) sql << ')' end else sql << " TOP " literal_append(sql, l) end if @opts[:limit_with_ties] sql << " WITH TIES" end end def update_limit_sql(sql) if l = @opts[:limit] shared_limit_sql(sql, l) end end alias delete_limit_sql update_limit_sql # Handle dirty, skip locked, and for update locking def select_lock_sql(sql) lock = @opts[:lock] skip_locked = @opts[:skip_locked] nowait = @opts[:nowait] for_update = lock == :update dirty = lock == :dirty lock_hint = for_update || dirty if lock_hint || skip_locked sql << " WITH (" if lock_hint sql << (for_update ? 'UPDLOCK' : 'NOLOCK') end if skip_locked || nowait sql << ', ' if lock_hint sql << (skip_locked ? "READPAST" : "NOWAIT") end sql << ')' else super end end # On 2012+ when there is an order with an offset, append the offset (and possible # limit) at the end of the order clause. def select_order_sql(sql) super if is_2012_or_later? && @opts[:order] if o = @opts[:offset] sql << " OFFSET " literal_append(sql, o) sql << " ROWS" if l = @opts[:limit] sql << " FETCH NEXT " literal_append(sql, l) sql << " ROWS ONLY" end end end end def output_sql(sql, type) return unless supports_output_clause? if output = @opts[:output] output_list_sql(sql, output) elsif values = @opts[:returning] output_returning_sql(sql, type, values) end end def output_list_sql(sql, output) sql << " OUTPUT " column_list_append(sql, output[:select_list]) if into = output[:into] sql << " INTO " identifier_append(sql, into) if column_list = output[:column_list] sql << ' (' source_list_append(sql, column_list) sql << ')' end end end def output_returning_sql(sql, type, values) sql << " OUTPUT " if values.empty? literal_append(sql, SQL::ColumnAll.new(type)) else values = values.map do |v| case v when SQL::AliasedExpression Sequel.qualify(type, v.expression).as(v.alias) else Sequel.qualify(type, v) end end column_list_append(sql, values) end end # MSSQL does not natively support NULLS FIRST/LAST. def requires_emulating_nulls_first? true end # MSSQL supports 100-nsec precision for time columns, but ruby by # default only supports usec precision. def sqltime_precision 6 end # MSSQL supports millisecond timestamp precision for datetime columns. # 100-nsec precision is supported for datetime2 columns, but Sequel does # not know what the column type is when formatting values. def timestamp_precision 3 end # Only include the primary table in the main update clause def update_table_sql(sql) sql << ' ' source_list_append(sql, @opts[:from][0..0]) end def uses_with_rollup? !is_2008_or_later? end end end end sequel-5.63.0/lib/sequel/adapters/shared/mysql.rb000066400000000000000000001126721434214120600217200ustar00rootroot00000000000000# frozen-string-literal: true require_relative '../utils/replace' require_relative '../utils/split_alter_table' require_relative '../utils/unmodified_identifiers' module Sequel module MySQL Sequel::Database.set_shared_adapter_scheme(:mysql, self) def self.mock_adapter_setup(db) db.instance_exec do @server_version = 50617 end end module DatabaseMethods include UnmodifiedIdentifiers::DatabaseMethods include Sequel::Database::SplitAlterTable CAST_TYPES = {String=>:CHAR, Integer=>:SIGNED, Time=>:DATETIME, DateTime=>:DATETIME, Numeric=>:DECIMAL, BigDecimal=>:DECIMAL, File=>:BINARY}.freeze COLUMN_DEFINITION_ORDER = [:generated, :collate, :null, :default, :unique, :primary_key, :auto_increment, :references].freeze # Set the default charset used for CREATE TABLE. You can pass the # :charset option to create_table to override this setting. attr_accessor :default_charset # Set the default collation used for CREATE TABLE. You can pass the # :collate option to create_table to override this setting. attr_accessor :default_collate # Set the default engine used for CREATE TABLE. You can pass the # :engine option to create_table to override this setting. attr_accessor :default_engine # MySQL's cast rules are restrictive in that you can't just cast to any possible # database type. def cast_type_literal(type) CAST_TYPES[type] || super end def commit_prepared_transaction(transaction_id, opts=OPTS) run("XA COMMIT #{literal(transaction_id)}", opts) end def database_type :mysql end # Use the Information Schema's KEY_COLUMN_USAGE table to get # basic information on foreign key columns, but include the # constraint name. def foreign_key_list(table, opts=OPTS) m = output_identifier_meth im = input_identifier_meth ds = metadata_dataset. from(Sequel[:INFORMATION_SCHEMA][:KEY_COLUMN_USAGE]). where(:TABLE_NAME=>im.call(table), :TABLE_SCHEMA=>Sequel.function(:DATABASE)). exclude(:CONSTRAINT_NAME=>'PRIMARY'). exclude(:REFERENCED_TABLE_NAME=>nil). order(:CONSTRAINT_NAME, :POSITION_IN_UNIQUE_CONSTRAINT). select(Sequel[:CONSTRAINT_NAME].as(:name), Sequel[:COLUMN_NAME].as(:column), Sequel[:REFERENCED_TABLE_NAME].as(:table), Sequel[:REFERENCED_COLUMN_NAME].as(:key)) h = {} ds.each do |row| if r = h[row[:name]] r[:columns] << m.call(row[:column]) r[:key] << m.call(row[:key]) else h[row[:name]] = {:name=>m.call(row[:name]), :columns=>[m.call(row[:column])], :table=>m.call(row[:table]), :key=>[m.call(row[:key])]} end end h.values end def freeze server_version mariadb? supports_timestamp_usecs? super end # MySQL namespaces indexes per table. def global_index_namespace? false end # Use SHOW INDEX FROM to get the index information for the # table. # # By default partial indexes are not included, you can use the # option :partial to override this. def indexes(table, opts=OPTS) indexes = {} remove_indexes = [] m = output_identifier_meth schema, table = schema_and_table(table) table = Sequel::SQL::Identifier.new(table) sql = "SHOW INDEX FROM #{literal(table)}" if schema schema = Sequel::SQL::Identifier.new(schema) sql += " FROM #{literal(schema)}" end metadata_dataset.with_sql(sql).each do |r| name = r[:Key_name] next if name == 'PRIMARY' name = m.call(name) remove_indexes << name if r[:Sub_part] && ! opts[:partial] i = indexes[name] ||= {:columns=>[], :unique=>r[:Non_unique] != 1} i[:columns] << m.call(r[:Column_name]) end indexes.reject{|k,v| remove_indexes.include?(k)} end def rollback_prepared_transaction(transaction_id, opts=OPTS) run("XA ROLLBACK #{literal(transaction_id)}", opts) end # Whether the database is MariaDB and not MySQL def mariadb? return @is_mariadb if defined?(@is_mariadb) @is_mariadb = !(fetch('SELECT version()').single_value! !~ /mariadb/i) end # Get version of MySQL server, used for determined capabilities. def server_version @server_version ||= begin m = /(\d+)\.(\d+)\.(\d+)/.match(fetch('SELECT version()').single_value!) (m[1].to_i * 10000) + (m[2].to_i * 100) + m[3].to_i end end # MySQL supports CREATE TABLE IF NOT EXISTS syntax. def supports_create_table_if_not_exists? true end # Generated columns are supported in MariaDB 5.2.0+ and MySQL 5.7.6+. def supports_generated_columns? server_version >= (mariadb? ? 50200 : 50706) end # MySQL 5+ supports prepared transactions (two-phase commit) using XA def supports_prepared_transactions? server_version >= 50000 end # MySQL 5+ supports savepoints def supports_savepoints? server_version >= 50000 end # MySQL doesn't support savepoints inside prepared transactions in from # 5.5.12 to 5.5.23, see http://bugs.mysql.com/bug.php?id=64374 def supports_savepoints_in_prepared_transactions? super && (server_version <= 50512 || server_version >= 50523) end # Support fractional timestamps on MySQL 5.6.5+ if the :fractional_seconds # Database option is used. Technically, MySQL 5.6.4+ supports them, but # automatic initialization of datetime values wasn't supported to 5.6.5+, # and this is related to that. def supports_timestamp_usecs? return @supports_timestamp_usecs if defined?(@supports_timestamp_usecs) @supports_timestamp_usecs = server_version >= 50605 && typecast_value_boolean(opts[:fractional_seconds]) end # MySQL supports transaction isolation levels def supports_transaction_isolation_levels? true end # Return an array of symbols specifying table names in the current database. # # Options: # :server :: Set the server to use def tables(opts=OPTS) full_tables('BASE TABLE', opts) end # Return an array of symbols specifying view names in the current database. # # Options: # :server :: Set the server to use def views(opts=OPTS) full_tables('VIEW', opts) end # Renames multiple tables in a single call. # # DB.rename_tables [:items, :old_items], [:other_items, :old_other_items] # # RENAME TABLE items TO old_items, other_items TO old_other_items def rename_tables(*renames) execute_ddl(rename_tables_sql(renames)) renames.each{|from,| remove_cached_schema(from)} end private def alter_table_add_column_sql(table, op) pos = if after_col = op[:after] " AFTER #{quote_identifier(after_col)}" elsif op[:first] " FIRST" end sql = if related = op.delete(:table) sql = super + "#{pos}, ADD " op[:table] = related op[:key] ||= primary_key_from_schema(related) if constraint_name = op.delete(:foreign_key_constraint_name) sql << "CONSTRAINT #{quote_identifier(constraint_name)} " end sql << "FOREIGN KEY (#{quote_identifier(op[:name])})#{column_references_sql(op)}" else "#{super}#{pos}" end end def alter_table_change_column_sql(table, op) o = op[:op] opts = schema(table).find{|x| x.first == op[:name]} opts = opts ? opts.last.dup : {} opts[:name] = o == :rename_column ? op[:new_name] : op[:name] opts[:type] = o == :set_column_type ? op[:type] : opts[:db_type] opts[:null] = o == :set_column_null ? op[:null] : opts[:allow_null] opts[:default] = o == :set_column_default ? op[:default] : opts[:ruby_default] opts.delete(:default) if opts[:default] == nil opts.delete(:primary_key) unless op[:type] || opts[:type] raise Error, "cannot determine database type to use for CHANGE COLUMN operation" end opts = op.merge(opts) if op.has_key?(:auto_increment) opts[:auto_increment] = op[:auto_increment] end "CHANGE COLUMN #{quote_identifier(op[:name])} #{column_definition_sql(opts)}" end alias alter_table_rename_column_sql alter_table_change_column_sql alias alter_table_set_column_type_sql alter_table_change_column_sql alias alter_table_set_column_null_sql alter_table_change_column_sql def alter_table_set_column_default_sql(table, op) return super unless op[:default].nil? opts = schema(table).find{|x| x[0] == op[:name]} if opts && opts[1][:allow_null] == false "ALTER COLUMN #{quote_identifier(op[:name])} DROP DEFAULT" else super end end def alter_table_add_constraint_sql(table, op) if op[:type] == :foreign_key op[:key] ||= primary_key_from_schema(op[:table]) end super end def alter_table_drop_constraint_sql(table, op) case op[:type] when :primary_key "DROP PRIMARY KEY" when :foreign_key name = op[:name] || foreign_key_name(table, op[:columns]) "DROP FOREIGN KEY #{quote_identifier(name)}" when :unique "DROP INDEX #{quote_identifier(op[:name])}" when :check, nil if supports_check_constraints? "DROP CONSTRAINT #{quote_identifier(op[:name])}" end end end def alter_table_sql(table, op) case op[:op] when :drop_index "#{drop_index_sql(table, op)} ON #{quote_schema_table(table)}" when :drop_constraint if op[:type] == :primary_key if (pk = primary_key_from_schema(table)).length == 1 return [alter_table_sql(table, {:op=>:rename_column, :name=>pk.first, :new_name=>pk.first, :auto_increment=>false}), super] end end super else super end end # Handle MySQL specific default format. def column_schema_normalize_default(default, type) if column_schema_default_string_type?(type) return if [:date, :datetime, :time].include?(type) && /\ACURRENT_(?:DATE|TIMESTAMP)?\z/.match(default) default = "'#{default.gsub("'", "''").gsub('\\', '\\\\')}'" end super(default, type) end def column_schema_to_ruby_default(default, type) return Sequel::CURRENT_DATE if mariadb? && server_version >= 100200 && default == 'curdate()' super end # Don't allow combining adding foreign key operations with other # operations, since in some cases adding a foreign key constraint in # the same query as other operations results in MySQL error 150. def combinable_alter_table_op?(op) super && !(op[:op] == :add_constraint && op[:type] == :foreign_key) && !(op[:op] == :drop_constraint && op[:type] == :primary_key) end # The SQL queries to execute on initial connection def mysql_connection_setting_sqls sqls = [] if wait_timeout = opts.fetch(:timeout, 2147483) # Increase timeout so mysql server doesn't disconnect us # Value used by default is maximum allowed value on Windows. sqls << "SET @@wait_timeout = #{wait_timeout}" end # By default, MySQL 'where id is null' selects the last inserted id sqls << "SET SQL_AUTO_IS_NULL=0" unless opts[:auto_is_null] # If the user has specified one or more sql modes, enable them if sql_mode = opts[:sql_mode] sql_mode = Array(sql_mode).join(',').upcase sqls << "SET sql_mode = '#{sql_mode}'" end # Disable the use of split_materialized in the optimizer. This is # needed to pass association tests on MariaDB 10.5+. if opts[:disable_split_materialized] && typecast_value_boolean(opts[:disable_split_materialized]) sqls << "SET SESSION optimizer_switch='split_materialized=off'" end sqls end def auto_increment_sql 'AUTO_INCREMENT' end # MySQL needs to set transaction isolation before begining a transaction def begin_new_transaction(conn, opts) set_transaction_isolation(conn, opts) log_connection_execute(conn, begin_transaction_sql) end # Use XA START to start a new prepared transaction if the :prepare # option is given. def begin_transaction(conn, opts=OPTS) if (s = opts[:prepare]) && savepoint_level(conn) == 1 log_connection_execute(conn, "XA START #{literal(s)}") else super end end # Support :on_update_current_timestamp option. def column_definition_default_sql(sql, column) super sql << " ON UPDATE CURRENT_TIMESTAMP" if column[:on_update_current_timestamp] end # Add generation clause SQL fragment to column creation SQL. def column_definition_generated_sql(sql, column) if (generated_expression = column[:generated_always_as]) sql << " GENERATED ALWAYS AS (#{literal(generated_expression)})" case (type = column[:generated_type]) when nil # none, database default when :virtual sql << " VIRTUAL" when :stored sql << (mariadb? ? " PERSISTENT" : " STORED") else raise Error, "unsupported :generated_type option: #{type.inspect}" end end end def column_definition_order COLUMN_DEFINITION_ORDER end # MySQL doesn't allow default values on text columns, so ignore if it the # generic text type is used def column_definition_sql(column) column.delete(:default) if column[:type] == File || (column[:type] == String && column[:text] == true) super end # Prepare the XA transaction for a two-phase commit if the # :prepare option is given. def commit_transaction(conn, opts=OPTS) if (s = opts[:prepare]) && savepoint_level(conn) <= 1 log_connection_execute(conn, "XA END #{literal(s)}") log_connection_execute(conn, "XA PREPARE #{literal(s)}") else super end end # Use MySQL specific syntax for engine type and character encoding def create_table_sql(name, generator, options = OPTS) engine = options.fetch(:engine, default_engine) charset = options.fetch(:charset, default_charset) collate = options.fetch(:collate, default_collate) generator.constraints.sort_by{|c| (c[:type] == :primary_key) ? -1 : 1} # Proc for figuring out the primary key for a given table. key_proc = lambda do |t| if t == name if pk = generator.primary_key_name [pk] elsif !(pkc = generator.constraints.select{|con| con[:type] == :primary_key}).empty? pkc.first[:columns] elsif !(pkc = generator.columns.select{|con| con[:primary_key] == true}).empty? pkc.map{|c| c[:name]} end else primary_key_from_schema(t) end end # Manually set the keys, since MySQL requires one, it doesn't use the primary # key if none are specified. generator.constraints.each do |c| if c[:type] == :foreign_key c[:key] ||= key_proc.call(c[:table]) end end # Split column constraints into table constraints in some cases: # foreign key - Always # unique, primary_key - Only if constraint has a name generator.columns.each do |c| if t = c.delete(:table) same_table = t == name key = c[:key] || key_proc.call(t) if same_table && !key.nil? generator.constraints.unshift(:type=>:unique, :columns=>Array(key)) end generator.foreign_key([c[:name]], t, c.merge(:name=>c[:foreign_key_constraint_name], :type=>:foreign_key, :key=>key)) end end "#{super}#{" ENGINE=#{engine}" if engine}#{" DEFAULT CHARSET=#{charset}" if charset}#{" DEFAULT COLLATE=#{collate}" if collate}" end DATABASE_ERROR_REGEXPS = { /Duplicate entry .+ for key/ => UniqueConstraintViolation, /foreign key constraint fails/ => ForeignKeyConstraintViolation, /cannot be null/ => NotNullConstraintViolation, /Deadlock found when trying to get lock; try restarting transaction/ => SerializationFailure, /CONSTRAINT .+ failed for/ => CheckConstraintViolation, /\A(Statement aborted because lock\(s\) could not be acquired immediately and NOWAIT is set\.|Lock wait timeout exceeded; try restarting transaction)/ => DatabaseLockTimeout, }.freeze def database_error_regexps DATABASE_ERROR_REGEXPS end # Backbone of the tables and views support using SHOW FULL TABLES. def full_tables(type, opts) m = output_identifier_meth metadata_dataset.with_sql('SHOW FULL TABLES').server(opts[:server]).map{|r| m.call(r.values.first) if r.delete(:Table_type) == type}.compact end def index_definition_sql(table_name, index) index_name = quote_identifier(index[:name] || default_index_name(table_name, index[:columns])) raise Error, "Partial indexes are not supported for this database" if index[:where] && !supports_partial_indexes? index_type = case index[:type] when :full_text "FULLTEXT " when :spatial "SPATIAL " else using = " USING #{index[:type]}" unless index[:type] == nil "UNIQUE " if index[:unique] end "CREATE #{index_type}INDEX #{index_name}#{using} ON #{quote_schema_table(table_name)} #{literal(index[:columns])}" end # Parse the schema for the given table to get an array of primary key columns def primary_key_from_schema(table) schema(table).select{|a| a[1][:primary_key]}.map{|a| a[0]} end # SQL statement for renaming multiple tables. def rename_tables_sql(renames) rename_tos = renames.map do |from, to| "#{quote_schema_table(from)} TO #{quote_schema_table(to)}" end.join(', ') "RENAME TABLE #{rename_tos}" end # Rollback the currently open XA transaction def rollback_transaction(conn, opts=OPTS) if (s = opts[:prepare]) && savepoint_level(conn) <= 1 log_connection_execute(conn, "XA END #{literal(s)}") log_connection_execute(conn, "XA PREPARE #{literal(s)}") log_connection_execute(conn, "XA ROLLBACK #{literal(s)}") else super end end def schema_column_type(db_type) case db_type when /\Aset/io :set when /\Amediumint/io :integer when /\Amediumtext/io :string else super end end # Use the MySQL specific DESCRIBE syntax to get a table description. def schema_parse_table(table_name, opts) m = output_identifier_meth(opts[:dataset]) im = input_identifier_meth(opts[:dataset]) table = SQL::Identifier.new(im.call(table_name)) table = SQL::QualifiedIdentifier.new(im.call(opts[:schema]), table) if opts[:schema] metadata_dataset.with_sql("DESCRIBE ?", table).map do |row| extra = row.delete(:Extra) if row[:primary_key] = row.delete(:Key) == 'PRI' row[:auto_increment] = !!(extra.to_s =~ /auto_increment/i) end if supports_generated_columns? # Extra field contains VIRTUAL or PERSISTENT for generated columns row[:generated] = !!(extra.to_s =~ /VIRTUAL|STORED|PERSISTENT/i) end row[:allow_null] = row.delete(:Null) == 'YES' row[:default] = row.delete(:Default) row[:db_type] = row.delete(:Type) row[:type] = schema_column_type(row[:db_type]) row[:extra] = extra [m.call(row.delete(:Field)), row] end end # Return nil if CHECK constraints are not supported, because # versions that don't support check constraints don't raise # errors for values outside of range. def column_schema_integer_min_max_values(db_type) super if supports_check_constraints? end # Split DROP INDEX ops on MySQL 5.6+, as dropping them in the same # statement as dropping a related foreign key causes an error. def split_alter_table_op?(op) server_version >= 50600 && (op[:op] == :drop_index || (op[:op] == :drop_constraint && op[:type] == :unique)) end # CHECK constraints only supported on MariaDB 10.2+ and MySQL 8.0.19+ # (at least MySQL documents DROP CONSTRAINT was supported in 8.0.19+). def supports_check_constraints? server_version >= (mariadb? ? 100200 : 80019) end # MySQL can combine multiple alter table ops into a single query. def supports_combining_alter_table_ops? true end # MySQL supports CREATE OR REPLACE VIEW. def supports_create_or_replace_view? true end # MySQL does not support named column constraints. def supports_named_column_constraints? false end # Respect the :size option if given to produce # tinyblob, mediumblob, and longblob if :tiny, # :medium, or :long is given. def type_literal_generic_file(column) case column[:size] when :tiny # < 2^8 bytes :tinyblob when :medium # < 2^24 bytes :mediumblob when :long # < 2^32 bytes :longblob else # 2^16 bytes :blob end end # MySQL has both datetime and timestamp classes, most people are going # to want datetime def type_literal_generic_datetime(column) if supports_timestamp_usecs? :'datetime(6)' elsif column[:default] == Sequel::CURRENT_TIMESTAMP :timestamp else :datetime end end # MySQL has both datetime and timestamp classes, most people are going # to want datetime. def type_literal_generic_only_time(column) if supports_timestamp_usecs? :'time(6)' else :time end end # MySQL doesn't have a true boolean class, so it uses tinyint(1) def type_literal_generic_trueclass(column) :'tinyint(1)' end # MySQL 5.0.2+ supports views with check option. def view_with_check_option_support :local if server_version >= 50002 end end # Dataset methods shared by datasets that use MySQL databases. module DatasetMethods MATCH_AGAINST = ["MATCH ".freeze, " AGAINST (".freeze, ")".freeze].freeze MATCH_AGAINST_BOOLEAN = ["MATCH ".freeze, " AGAINST (".freeze, " IN BOOLEAN MODE)".freeze].freeze Dataset.def_sql_method(self, :delete, %w'with delete from where order limit') Dataset.def_sql_method(self, :insert, %w'insert ignore into columns values on_duplicate_key_update') Dataset.def_sql_method(self, :select, %w'with select distinct calc_found_rows columns from join where group having window compounds order limit lock') Dataset.def_sql_method(self, :update, %w'with update ignore table set where order limit') include Sequel::Dataset::Replace include UnmodifiedIdentifiers::DatasetMethods def complex_expression_sql_append(sql, op, args) case op when :IN, :"NOT IN" ds = args[1] if ds.is_a?(Sequel::Dataset) && ds.opts[:limit] super(sql, op, [args[0], ds.from_self]) else super end when :~, :'!~', :'~*', :'!~*', :LIKE, :'NOT LIKE', :ILIKE, :'NOT ILIKE' if !db.mariadb? && db.server_version >= 80000 && [:~, :'!~'].include?(op) func = Sequel.function(:REGEXP_LIKE, args[0], args[1], 'c') func = ~func if op == :'!~' return literal_append(sql, func) end sql << '(' literal_append(sql, args[0]) sql << ' ' sql << 'NOT ' if [:'NOT LIKE', :'NOT ILIKE', :'!~', :'!~*'].include?(op) sql << ([:~, :'!~', :'~*', :'!~*'].include?(op) ? 'REGEXP' : 'LIKE') sql << ' ' sql << 'BINARY ' if [:~, :'!~', :LIKE, :'NOT LIKE'].include?(op) literal_append(sql, args[1]) if [:LIKE, :'NOT LIKE', :ILIKE, :'NOT ILIKE'].include?(op) sql << " ESCAPE " literal_append(sql, "\\") end sql << ')' when :'||' if args.length > 1 sql << "CONCAT" array_sql_append(sql, args) else literal_append(sql, args[0]) end when :'B~' sql << "CAST(~" literal_append(sql, args[0]) sql << " AS SIGNED INTEGER)" else super end end # MySQL's CURRENT_TIMESTAMP does not use fractional seconds, # even if the database itself supports fractional seconds. If # MySQL 5.6.4+ is being used, use a value that will return # fractional seconds. def constant_sql_append(sql, constant) if constant == :CURRENT_TIMESTAMP && supports_timestamp_usecs? sql << 'CURRENT_TIMESTAMP(6)' else super end end # Use GROUP BY instead of DISTINCT ON if arguments are provided. def distinct(*args) args.empty? ? super : group(*args) end # Sets up the select methods to use SQL_CALC_FOUND_ROWS option. # # dataset.calc_found_rows.limit(10) # # SELECT SQL_CALC_FOUND_ROWS * FROM table LIMIT 10 def calc_found_rows clone(:calc_found_rows => true) end # Sets up the select methods to delete from if deleting from a # joined dataset: # # DB[:a].join(:b, a_id: :id).delete # # DELETE a FROM a INNER JOIN b ON (b.a_id = a.id) # # DB[:a].join(:b, a_id: :id).delete_from(:a, :b).delete # # DELETE a, b FROM a INNER JOIN b ON (b.a_id = a.id) def delete_from(*tables) clone(:delete_from=>tables) end # Return the results of an EXPLAIN query as a string. Options: # :extended :: Use EXPLAIN EXTENDED instead of EXPLAIN if true. def explain(opts=OPTS) # Load the PrettyTable class, needed for explain output Sequel.extension(:_pretty_table) unless defined?(Sequel::PrettyTable) ds = db.send(:metadata_dataset).with_sql(((opts[:extended] && (db.mariadb? || db.server_version < 50700)) ? 'EXPLAIN EXTENDED ' : 'EXPLAIN ') + select_sql).naked rows = ds.all Sequel::PrettyTable.string(rows, ds.columns) end # Return a cloned dataset which will use LOCK IN SHARE MODE to lock returned rows. def for_share lock_style(:share) end # Adds full text filter def full_text_search(cols, terms, opts = OPTS) where(full_text_sql(cols, terms, opts)) end # MySQL specific full text search syntax. def full_text_sql(cols, terms, opts = OPTS) terms = terms.join(' ') if terms.is_a?(Array) SQL::PlaceholderLiteralString.new((opts[:boolean] ? MATCH_AGAINST_BOOLEAN : MATCH_AGAINST), [Array(cols), terms]) end # Sets up the insert methods to use INSERT IGNORE. # Useful if you have a unique key and want to just skip # inserting rows that violate the unique key restriction. # # dataset.insert_ignore.multi_insert( # [{name: 'a', value: 1}, {name: 'b', value: 2}] # ) # # INSERT IGNORE INTO tablename (name, value) VALUES (a, 1), (b, 2) def insert_ignore clone(:insert_ignore=>true) end # Sets up the insert methods to use ON DUPLICATE KEY UPDATE # If you pass no arguments, ALL fields will be # updated with the new values. If you pass the fields you # want then ONLY those field will be updated. If you pass a # hash you can customize the values (for example, to increment # a numeric field). # # Useful if you have a unique key and want to update # inserting rows that violate the unique key restriction. # # dataset.on_duplicate_key_update.multi_insert( # [{name: 'a', value: 1}, {name: 'b', value: 2}] # ) # # INSERT INTO tablename (name, value) VALUES (a, 1), (b, 2) # # ON DUPLICATE KEY UPDATE name=VALUES(name), value=VALUES(value) # # dataset.on_duplicate_key_update(:value).multi_insert( # [{name: 'a', value: 1}, {name: 'b', value: 2}] # ) # # INSERT INTO tablename (name, value) VALUES (a, 1), (b, 2) # # ON DUPLICATE KEY UPDATE value=VALUES(value) # # dataset.on_duplicate_key_update( # value: Sequel.lit('value + VALUES(value)') # ).multi_insert( # [{name: 'a', value: 1}, {name: 'b', value: 2}] # ) # # INSERT INTO tablename (name, value) VALUES (a, 1), (b, 2) # # ON DUPLICATE KEY UPDATE value=value + VALUES(value) def on_duplicate_key_update(*args) clone(:on_duplicate_key_update => args) end # MySQL uses the nonstandard ` (backtick) for quoting identifiers. def quoted_identifier_append(sql, c) sql << '`' << c.to_s.gsub('`', '``') << '`' end # MariaDB 10.2+ and MySQL 8+ support CTEs def supports_cte?(type=:select) if db.mariadb? type == :select && db.server_version >= 100200 else case type when :select, :update, :delete db.server_version >= 80000 end end end # MySQL does not support derived column lists def supports_derived_column_lists? false end # MySQL can emulate DISTINCT ON with its non-standard GROUP BY implementation, # though the rows returned cannot be made deterministic through ordering. def supports_distinct_on? true end # MySQL supports GROUP BY WITH ROLLUP (but not CUBE) def supports_group_rollup? true end # MariaDB 10.3+ supports INTERSECT or EXCEPT def supports_intersect_except? db.mariadb? && db.server_version >= 100300 end # MySQL does not support limits in correlated subqueries (or any subqueries that use IN). def supports_limits_in_correlated_subqueries? false end # MySQL supports modifying joined datasets def supports_modifying_joins? true end # MySQL 8+ and MariaDB 10.3+ support NOWAIT. def supports_nowait? db.server_version >= (db.mariadb? ? 100300 : 80000) end # MySQL's DISTINCT ON emulation using GROUP BY does not respect the # query's ORDER BY clause. def supports_ordered_distinct_on? false end # MySQL supports pattern matching via regular expressions def supports_regexp? true end # MySQL 8+ supports SKIP LOCKED. def supports_skip_locked? !db.mariadb? && db.server_version >= 80000 end # Check the database setting for whether fractional timestamps # are suppported. def supports_timestamp_usecs? db.supports_timestamp_usecs? end # MySQL 8+ supports WINDOW clause. def supports_window_clause? !db.mariadb? && db.server_version >= 80000 end # MariaDB 10.2+ and MySQL 8+ support window functions def supports_window_functions? db.server_version >= (db.mariadb? ? 100200 : 80000) end # Sets up the update methods to use UPDATE IGNORE. # Useful if you have a unique key and want to just skip # updating rows that violate the unique key restriction. # # dataset.update_ignore.update(name: 'a', value: 1) # # UPDATE IGNORE tablename SET name = 'a', value = 1 def update_ignore clone(:update_ignore=>true) end private # Allow update and delete for limited datasets, unless there is an offset. def check_not_limited!(type) super if type == :truncate || @opts[:offset] end # Consider the first table in the joined dataset is the table to delete # from, but include the others for the purposes of selecting rows. def delete_from_sql(sql) if joined_dataset? sql << ' ' tables = @opts[:delete_from] || @opts[:from][0..0] source_list_append(sql, tables) sql << ' FROM ' source_list_append(sql, @opts[:from]) select_join_sql(sql) else super end end # MySQL doesn't use the SQL standard DEFAULT VALUES. def insert_columns_sql(sql) values = opts[:values] if values.is_a?(Array) && values.empty? sql << " ()" else super end end # MySQL supports INSERT IGNORE INTO def insert_ignore_sql(sql) sql << " IGNORE" if opts[:insert_ignore] end # MySQL supports UPDATE IGNORE def update_ignore_sql(sql) sql << " IGNORE" if opts[:update_ignore] end # MySQL supports INSERT ... ON DUPLICATE KEY UPDATE def insert_on_duplicate_key_update_sql(sql) if update_cols = opts[:on_duplicate_key_update] update_vals = nil if update_cols.empty? update_cols = columns elsif update_cols.last.is_a?(Hash) update_vals = update_cols.last update_cols = update_cols[0..-2] end sql << " ON DUPLICATE KEY UPDATE " c = false co = ', ' values = '=VALUES(' endp = ')' update_cols.each do |col| sql << co if c quote_identifier_append(sql, col) sql << values quote_identifier_append(sql, col) sql << endp c ||= true end if update_vals eq = '=' update_vals.map do |col,v| sql << co if c quote_identifier_append(sql, col) sql << eq literal_append(sql, v) c ||= true end end end end # MySQL doesn't use the standard DEFAULT VALUES for empty values. def insert_values_sql(sql) values = opts[:values] if values.is_a?(Array) && values.empty? sql << " VALUES ()" else super end end # Transforms :straight to STRAIGHT_JOIN. def join_type_sql(join_type) if join_type == :straight 'STRAIGHT_JOIN' else super end end # MySQL allows a LIMIT in DELETE and UPDATE statements. def limit_sql(sql) if l = @opts[:limit] sql << " LIMIT " literal_append(sql, l) end end alias delete_limit_sql limit_sql alias update_limit_sql limit_sql # MySQL uses a preceding X for hex escaping strings def literal_blob_append(sql, v) if v.empty? sql << "''" else sql << "0x" << v.unpack("H*").first end end # Use 0 for false on MySQL def literal_false '0' end # Raise error for infinitate and NaN values def literal_float(v) if v.infinite? || v.nan? raise InvalidValue, "Infinite floats and NaN values are not valid on MySQL" else super end end # SQL fragment for String. Doubles \ and ' by default. def literal_string_append(sql, v) sql << "'" << v.gsub("\\", "\\\\\\\\").gsub("'", "''") << "'" end # Use 1 for true on MySQL def literal_true '1' end # MySQL supports multiple rows in VALUES in INSERT. def multi_insert_sql_strategy :values end def non_sql_option?(key) super || key == :insert_ignore || key == :update_ignore || key == :on_duplicate_key_update end # MySQL does not natively support NULLS FIRST/LAST. def requires_emulating_nulls_first? true end def select_only_offset_sql(sql) sql << " LIMIT " literal_append(sql, @opts[:offset]) sql << ",18446744073709551615" end # Support FOR SHARE locking when using the :share lock style. # Use SKIP LOCKED if skipping locked rows. def select_lock_sql(sql) lock = @opts[:lock] if lock == :share if !db.mariadb? && db.server_version >= 80000 sql << ' FOR SHARE' else sql << ' LOCK IN SHARE MODE' end else super end if lock if @opts[:skip_locked] sql << " SKIP LOCKED" elsif @opts[:nowait] sql << " NOWAIT" end end end # MySQL specific SQL_CALC_FOUND_ROWS option def select_calc_found_rows_sql(sql) sql << ' SQL_CALC_FOUND_ROWS' if opts[:calc_found_rows] end # Use WITH RECURSIVE instead of WITH if any of the CTEs is recursive def select_with_sql_base opts[:with].any?{|w| w[:recursive]} ? "WITH RECURSIVE " : super end # MySQL uses WITH ROLLUP syntax. def uses_with_rollup? true end end end end sequel-5.63.0/lib/sequel/adapters/shared/oracle.rb000066400000000000000000000567401434214120600220230ustar00rootroot00000000000000# frozen-string-literal: true require_relative '../utils/emulate_offset_with_row_number' module Sequel module Oracle Sequel::Database.set_shared_adapter_scheme(:oracle, self) def self.mock_adapter_setup(db) db.instance_exec do @server_version = 11000000 @primary_key_sequences = {} end end module DatabaseMethods attr_accessor :autosequence def create_sequence(name, opts=OPTS) self << create_sequence_sql(name, opts) end def create_trigger(*args) self << create_trigger_sql(*args) end def current_user @current_user ||= metadata_dataset.get{sys_context('USERENV', 'CURRENT_USER')} end def drop_sequence(name) self << drop_sequence_sql(name) end def database_type :oracle end def foreign_key_list(table, opts=OPTS) m = output_identifier_meth im = input_identifier_meth schema, table = schema_and_table(table) ds = metadata_dataset. from{[all_cons_columns.as(:pc), all_constraints.as(:p), all_cons_columns.as(:fc), all_constraints.as(:f)]}. where{{ f[:table_name]=>im.call(table), f[:constraint_type]=>'R', p[:owner]=>f[:r_owner], p[:constraint_name]=>f[:r_constraint_name], pc[:owner]=>p[:owner], pc[:constraint_name]=>p[:constraint_name], pc[:table_name]=>p[:table_name], fc[:owner]=>f[:owner], fc[:constraint_name]=>f[:constraint_name], fc[:table_name]=>f[:table_name], fc[:position]=>pc[:position]}}. select{[p[:table_name].as(:table), pc[:column_name].as(:key), fc[:column_name].as(:column), f[:constraint_name].as(:name)]}. order{[:table, fc[:position]]} ds = ds.where{{f[:schema_name]=>im.call(schema)}} if schema fks = {} ds.each do |r| if fk = fks[r[:name]] fk[:columns] << m.call(r[:column]) fk[:key] << m.call(r[:key]) else fks[r[:name]] = {:name=>m.call(r[:name]), :columns=>[m.call(r[:column])], :table=>m.call(r[:table]), :key=>[m.call(r[:key])]} end end fks.values end def freeze current_user server_version @conversion_procs.freeze super end # Oracle namespaces indexes per table. def global_index_namespace? false end IGNORE_OWNERS = %w'APEX_040000 CTXSYS EXFSYS MDSYS OLAPSYS ORDDATA ORDSYS SYS SYSTEM XDB XDBMETADATA XDBPM XFILES WMSYS'.freeze def tables(opts=OPTS) m = output_identifier_meth metadata_dataset.from(:all_tables). server(opts[:server]). where(:dropped=>'NO'). exclude(:owner=>IGNORE_OWNERS). select(:table_name). map{|r| m.call(r[:table_name])} end def views(opts=OPTS) m = output_identifier_meth metadata_dataset.from(:all_views). server(opts[:server]). exclude(:owner=>IGNORE_OWNERS). select(:view_name). map{|r| m.call(r[:view_name])} end # Whether a view with a given name exists. By default, looks in all schemas other than system # schemas. If the :current_schema option is given, looks in the schema for the current user. def view_exists?(name, opts=OPTS) ds = metadata_dataset.from(:all_views).where(:view_name=>input_identifier_meth.call(name)) if opts[:current_schema] ds = ds.where(:owner=>Sequel.function(:SYS_CONTEXT, 'userenv', 'current_schema')) else ds = ds.exclude(:owner=>IGNORE_OWNERS) end ds.count > 0 end # The version of the Oracle server, used for determining capability. def server_version(server=nil) return @server_version if @server_version @server_version = synchronize(server) do |conn| (conn.server_version rescue nil) if conn.respond_to?(:server_version) end unless @server_version @server_version = if m = /(\d+)\.(\d+)\.?(\d+)?\.?(\d+)?/.match(fetch("select version from PRODUCT_COMPONENT_VERSION where lower(product) like 'oracle%'").single_value) (m[1].to_i*1000000) + (m[2].to_i*10000) + (m[3].to_i*100) + m[4].to_i else 0 end end @server_version end # Oracle supports deferrable constraints. def supports_deferrable_constraints? true end # Oracle supports transaction isolation levels. def supports_transaction_isolation_levels? true end private def alter_table_sql(table, op) case op[:op] when :add_column if op[:primary_key] sqls = [] sqls << alter_table_sql(table, op.merge(:primary_key=>nil)) if op[:auto_increment] seq_name = default_sequence_name(table, op[:name]) sqls << drop_sequence_sql(seq_name) sqls << create_sequence_sql(seq_name, op) sqls << "UPDATE #{quote_schema_table(table)} SET #{quote_identifier(op[:name])} = #{seq_name}.nextval" end sqls << "ALTER TABLE #{quote_schema_table(table)} ADD PRIMARY KEY (#{quote_identifier(op[:name])})" sqls else "ALTER TABLE #{quote_schema_table(table)} ADD #{column_definition_sql(op)}" end when :set_column_null "ALTER TABLE #{quote_schema_table(table)} MODIFY #{quote_identifier(op[:name])} #{op[:null] ? 'NULL' : 'NOT NULL'}" when :set_column_type "ALTER TABLE #{quote_schema_table(table)} MODIFY #{quote_identifier(op[:name])} #{type_literal(op)}" when :set_column_default "ALTER TABLE #{quote_schema_table(table)} MODIFY #{quote_identifier(op[:name])} DEFAULT #{literal(op[:default])}" else super(table, op) end end def auto_increment_sql '' end # Do not support min/max integer values on Oracle, since # Oracle uses a number type, and integer just adds a # constaint on the number type. def column_schema_integer_min_max_values(db_type) nil end def create_sequence_sql(name, opts=OPTS) "CREATE SEQUENCE #{quote_identifier(name)} start with #{opts [:start_with]||1} increment by #{opts[:increment_by]||1} nomaxvalue" end def create_table_from_generator(name, generator, options) drop_statement, create_statements = create_table_sql_list(name, generator, options) swallow_database_error{execute_ddl(drop_statement)} if drop_statement create_statements.each{|sql| execute_ddl(sql)} end def create_table_sql_list(name, generator, options=OPTS) statements = [create_table_sql(name, generator, options)] drop_seq_statement = nil generator.columns.each do |c| if c[:auto_increment] c[:sequence_name] ||= default_sequence_name(name, c[:name]) unless c[:create_sequence] == false drop_seq_statement = drop_sequence_sql(c[:sequence_name]) statements << create_sequence_sql(c[:sequence_name], c) end unless c[:create_trigger] == false c[:trigger_name] ||= "BI_#{name}_#{c[:name]}" trigger_definition = <<-end_sql BEGIN IF :NEW.#{quote_identifier(c[:name])} IS NULL THEN SELECT #{c[:sequence_name]}.nextval INTO :NEW.#{quote_identifier(c[:name])} FROM dual; END IF; END; end_sql statements << create_trigger_sql(name, c[:trigger_name], trigger_definition, {:events => [:insert]}) end end end [drop_seq_statement, statements] end def create_trigger_sql(table, name, definition, opts=OPTS) events = opts[:events] ? Array(opts[:events]) : [:insert, :update, :delete] sql = <<-end_sql CREATE#{' OR REPLACE' if opts[:replace]} TRIGGER #{quote_identifier(name)} #{opts[:after] ? 'AFTER' : 'BEFORE'} #{events.map{|e| e.to_s.upcase}.join(' OR ')} ON #{quote_schema_table(table)} REFERENCING NEW AS NEW FOR EACH ROW #{definition} end_sql sql end DATABASE_ERROR_REGEXPS = { /unique constraint .+ violated/ => UniqueConstraintViolation, /integrity constraint .+ violated/ => ForeignKeyConstraintViolation, /check constraint .+ violated/ => CheckConstraintViolation, /cannot insert NULL into|cannot update .+ to NULL/ => NotNullConstraintViolation, /can't serialize access for this transaction/ => SerializationFailure, /resource busy and acquire with NOWAIT specified or timeout/ => DatabaseLockTimeout, }.freeze def database_error_regexps DATABASE_ERROR_REGEXPS end def default_sequence_name(table, column) "seq_#{table}_#{column}" end def drop_sequence_sql(name) "DROP SEQUENCE #{quote_identifier(name)}" end def remove_cached_schema(table) Sequel.synchronize{@primary_key_sequences.delete(table)} super end TRANSACTION_ISOLATION_LEVELS = {:uncommitted=>'READ COMMITTED'.freeze, :committed=>'READ COMMITTED'.freeze, :repeatable=>'SERIALIZABLE'.freeze, :serializable=>'SERIALIZABLE'.freeze}.freeze # Oracle doesn't support READ UNCOMMITTED OR REPEATABLE READ transaction # isolation levels, so upgrade to the next highest level in those cases. def set_transaction_isolation_sql(level) "SET TRANSACTION ISOLATION LEVEL #{TRANSACTION_ISOLATION_LEVELS[level]}" end def sequence_for_table(table) return nil unless autosequence Sequel.synchronize{return @primary_key_sequences[table] if @primary_key_sequences.has_key?(table)} begin sch = schema(table) rescue Sequel::Error return nil end pk = sch.select{|k, v| v[:primary_key]} pks = if pk.length == 1 seq = "seq_#{table}_#{pk.first.first}" seq.to_sym unless from(:user_sequences).where(:sequence_name=>input_identifier_meth.call(seq)).empty? end Sequel.synchronize{@primary_key_sequences[table] = pks} end # Oracle supports CREATE OR REPLACE VIEW. def supports_create_or_replace_view? true end # Oracle's integer/:number type handles larger values than # most other databases's bigint types, so it should be # safe to use for Bignum. def type_literal_generic_bignum_symbol(column) :integer end # Oracle doesn't have a time type, so use timestamp for all # time columns. def type_literal_generic_only_time(column) :timestamp end # Oracle doesn't have a boolean type or even a reasonable # facsimile. Using a char(1) seems to be the recommended way. def type_literal_generic_trueclass(column) :'char(1)' end # SQL fragment for showing a table is temporary def temporary_table_sql 'GLOBAL TEMPORARY ' end # Oracle uses clob for text types. def uses_clob_for_text? true end # Oracle supports views with check option, but not local. def view_with_check_option_support true end end module DatasetMethods ROW_NUMBER_EXPRESSION = LiteralString.new('ROWNUM').freeze BITAND_PROC = lambda{|a, b| Sequel.lit(["CAST(BITAND(", ", ", ") AS INTEGER)"], a, b)} include(Module.new do Dataset.def_sql_method(self, :select, %w'with select distinct columns from join where group having compounds order limit lock') end) def complex_expression_sql_append(sql, op, args) case op when :& complex_expression_arg_pairs_append(sql, args, &BITAND_PROC) when :| complex_expression_arg_pairs_append(sql, args){|a, b| Sequel.lit(["(", " - ", " + ", ")"], a, complex_expression_arg_pairs([a, b], &BITAND_PROC), b)} when :^ complex_expression_arg_pairs_append(sql, args) do |*x| s1 = complex_expression_arg_pairs(x){|a, b| Sequel.lit(["(", " - ", " + ", ")"], a, complex_expression_arg_pairs([a, b], &BITAND_PROC), b)} s2 = complex_expression_arg_pairs(x, &BITAND_PROC) Sequel.lit(["(", " - ", ")"], s1, s2) end when :~, :'!~', :'~*', :'!~*' raise InvalidOperation, "Pattern matching via regular expressions is not supported in this Oracle version" unless supports_regexp? if op == :'!~' || op == :'!~*' sql << 'NOT ' end sql << 'REGEXP_LIKE(' literal_append(sql, args[0]) sql << ',' literal_append(sql, args[1]) if op == :'~*' || op == :'!~*' sql << ", 'i'" end sql << ')' when :%, :<<, :>>, :'B~' complex_expression_emulate_append(sql, op, args) else super end end # Oracle doesn't support CURRENT_TIME, as it doesn't have # a type for storing just time values without a date, so # use CURRENT_TIMESTAMP in its place. def constant_sql_append(sql, c) if c == :CURRENT_TIME super(sql, :CURRENT_TIMESTAMP) else super end end # Oracle uses MINUS instead of EXCEPT, and doesn't support EXCEPT ALL def except(dataset, opts=OPTS) raise(Sequel::Error, "EXCEPT ALL not supported") if opts[:all] compound_clone(:minus, dataset, opts) end # Use a custom expression with EXISTS to determine whether a dataset # is empty. def empty? db[:dual].where(@opts[:offset] ? exists : unordered.exists).get(1) == nil end # Oracle requires SQL standard datetimes def requires_sql_standard_datetimes? true end # Create a copy of this dataset associated to the given sequence name, # which will be used when calling insert to find the most recently # inserted value for the sequence. def sequence(s) clone(:sequence=>s) end # Handle LIMIT by using a unlimited subselect filtered with ROWNUM, # unless Oracle 12 is used. def select_sql return super if @opts[:sql] return super if supports_fetch_next_rows? o = @opts[:offset] if o && o != 0 columns = clone(:append_sql=>String.new, :placeholder_literal_null=>true).columns dsa1 = dataset_alias(1) rn = row_number_column limit = @opts[:limit] ds = unlimited. from_self(:alias=>dsa1). select_append(ROW_NUMBER_EXPRESSION.as(rn)). from_self(:alias=>dsa1). select(*columns). where(SQL::Identifier.new(rn) > o) ds = ds.where(SQL::Identifier.new(rn) <= Sequel.+(o, limit)) if limit sql = @opts[:append_sql] || String.new subselect_sql_append(sql, ds) sql elsif limit = @opts[:limit] ds = unlimited # Lock doesn't work in subselects, so don't use a subselect when locking. # Don't use a subselect if custom SQL is used, as it breaks somethings. ds = ds.from_self unless @opts[:lock] sql = @opts[:append_sql] || String.new subselect_sql_append(sql, ds.where(SQL::ComplexExpression.new(:<=, ROW_NUMBER_EXPRESSION, limit))) sql else super end end # Oracle requires recursive CTEs to have column aliases. def recursive_cte_requires_column_aliases? true end def supports_cte?(type=:select) type == :select end # Oracle does not support derived column lists def supports_derived_column_lists? false end # Oracle supports FETCH NEXT ROWS since 12c, but it doesn't work when # locking or when skipping locked rows. def supports_fetch_next_rows? server_version >= 12000000 && !(@opts[:lock] || @opts[:skip_locked]) end # Oracle supports GROUP BY CUBE def supports_group_cube? true end # Oracle supports GROUP BY ROLLUP def supports_group_rollup? true end # Oracle supports GROUPING SETS def supports_grouping_sets? true end # Oracle does not support INTERSECT ALL or EXCEPT ALL def supports_intersect_except_all? false end # Oracle does not support IS TRUE. def supports_is_true? false end # Oracle does not support limits in correlated subqueries. def supports_limits_in_correlated_subqueries? false end # Oracle supports MERGE def supports_merge? true end # Oracle supports NOWAIT. def supports_nowait? true end # Oracle does not support offsets in correlated subqueries. def supports_offsets_in_correlated_subqueries? false end # Oracle does not support SELECT *, column def supports_select_all_and_column? false end # Oracle supports SKIP LOCKED. def supports_skip_locked? true end # Oracle supports timezones in literal timestamps. def supports_timestamp_timezones? true end # Oracle does not support WHERE 'Y' for WHERE TRUE. def supports_where_true? false end # Oracle supports window functions def supports_window_functions? true end # The version of the database server def server_version db.server_version(@opts[:server]) end # Oracle 10+ supports pattern matching via regular expressions def supports_regexp? server_version >= 10010002 end private # Handle nil, false, and true MERGE WHEN conditions to avoid non-boolean # type error. def _normalize_merge_when_conditions(conditions) case conditions when nil, false {1=>0} when true {1=>1} when Sequel::SQL::DelayedEvaluation Sequel.delay{_normalize_merge_when_conditions(conditions.call(self))} else conditions end end # Handle Oracle's non standard MERGE syntax def _merge_when_sql(sql) raise Error, "no WHEN [NOT] MATCHED clauses provided for MERGE" unless merge_when = @opts[:merge_when] insert = update = delete = nil types = merge_when.map{|d| d[:type]} raise Error, "Oracle does not support multiple INSERT, UPDATE, or DELETE clauses in MERGE" if types != types.uniq merge_when.each do |data| case data[:type] when :insert insert = data when :update update = data else # when :delete delete = data end end if delete raise Error, "Oracle does not support DELETE without UPDATE clause in MERGE" unless update raise Error, "Oracle does not support DELETE without conditions clause in MERGE" unless delete.has_key?(:conditions) end if update sql << " WHEN MATCHED" _merge_update_sql(sql, update) _merge_when_conditions_sql(sql, update) if delete sql << " DELETE" _merge_when_conditions_sql(sql, delete) end end if insert sql << " WHEN NOT MATCHED" _merge_insert_sql(sql, insert) _merge_when_conditions_sql(sql, insert) end end # Handle Oracle's non-standard MERGE WHEN condition syntax. def _merge_when_conditions_sql(sql, data) if data.has_key?(:conditions) sql << " WHERE " literal_append(sql, _normalize_merge_when_conditions(data[:conditions])) end end # Allow preparing prepared statements, since determining the prepared sql to use for # a prepared statement requires calling prepare on that statement. def allow_preparing_prepared_statements? true end # Oracle doesn't support the use of AS when aliasing a dataset. It doesn't require # the use of AS anywhere, so this disables it in all cases. Oracle also does not support # derived column lists in aliases. def as_sql_append(sql, aliaz, column_aliases=nil) raise Error, "oracle does not support derived column lists" if column_aliases sql << ' ' quote_identifier_append(sql, aliaz) end # The strftime format to use when literalizing the time. def default_timestamp_format "TIMESTAMP '%Y-%m-%d %H:%M:%S%N %z'" end def empty_from_sql ' FROM DUAL' end # There is no function on Oracle that does character length # and respects trailing spaces (datalength respects trailing spaces, but # counts bytes instead of characters). Use a hack to work around the # trailing spaces issue. def emulate_function?(name) name == :char_length end # Oracle treats empty strings like NULL values, and doesn't support # char_length, so make char_length use length with a nonempty string. # Unfortunately, as Oracle treats the empty string as NULL, there is # no way to get trim to return an empty string instead of nil if # the string only contains spaces. def emulate_function_sql_append(sql, f) if f.name == :char_length literal_append(sql, Sequel::SQL::Function.new(:length, Sequel.join([f.args.first, 'x'])) - 1) end end # If this dataset is associated with a sequence, return the most recently # inserted sequence value. def execute_insert(sql, opts=OPTS) opts = Hash[opts] if f = @opts[:from] opts[:table] = f.first end opts[:sequence] = @opts[:sequence] super end # Use a colon for the timestamp offset, since Oracle appears to require it. def format_timestamp_offset(hour, minute) sprintf("%+03i:%02i", hour, minute) end # Oracle doesn't support empty values when inserting. def insert_supports_empty_values? false end # Use string in hex format for blob data. def literal_blob_append(sql, v) sql << "'" << v.unpack("H*").first << "'" end # Oracle uses 'N' for false values. def literal_false "'N'" end # Oracle uses the SQL standard of only doubling ' inside strings. def literal_string_append(sql, v) sql << "'" << v.gsub("'", "''") << "'" end # Oracle uses 'Y' for true values. def literal_true "'Y'" end # Oracle can insert multiple rows using a UNION def multi_insert_sql_strategy :union end def select_limit_sql(sql) return unless supports_fetch_next_rows? if offset = @opts[:offset] sql << " OFFSET " literal_append(sql, offset) sql << " ROWS" end if limit = @opts[:limit] sql << " FETCH NEXT " literal_append(sql, limit) sql << " ROWS ONLY" end end # Use SKIP LOCKED if skipping locked rows. def select_lock_sql(sql) super if @opts[:lock] if @opts[:skip_locked] sql << " SKIP LOCKED" elsif @opts[:nowait] sql << " NOWAIT" end end end # Oracle supports quoted function names. def supports_quoted_function_names? true end end end end sequel-5.63.0/lib/sequel/adapters/shared/postgres.rb000066400000000000000000002722061434214120600224210ustar00rootroot00000000000000# frozen-string-literal: true require_relative '../utils/unmodified_identifiers' module Sequel # Top level module for holding all PostgreSQL-related modules and classes # for Sequel. All adapters that connect to PostgreSQL support the following options: # # :client_min_messages :: Change the minimum level of messages that PostgreSQL will send to the # the client. The PostgreSQL default is NOTICE, the Sequel default is # WARNING. Set to nil to not change the server default. Overridable on # a per instance basis via the :client_min_messages option. # :force_standard_strings :: Set to false to not force the use of standard strings. Overridable # on a per instance basis via the :force_standard_strings option. # :search_path :: Set the schema search_path for this Database's connections. # Allows to to set which schemas do not need explicit # qualification, and in which order to check the schemas when # an unqualified object is referenced. module Postgres Sequel::Database.set_shared_adapter_scheme(:postgres, self) # Exception class ranged when literalizing integers outside the bigint/int8 range. class IntegerOutsideBigintRange < InvalidValue; end NAN = 0.0/0.0 PLUS_INFINITY = 1.0/0.0 MINUS_INFINITY = -1.0/0.0 boolean = Object.new def boolean.call(s) s == 't' end integer = Object.new def integer.call(s) s.to_i end float = Object.new def float.call(s) case s when 'NaN' NAN when 'Infinity' PLUS_INFINITY when '-Infinity' MINUS_INFINITY else s.to_f end end date = Object.new def date.call(s) ::Date.new(*s.split('-').map(&:to_i)) end TYPE_TRANSLATOR_DATE = date.freeze bytea = Object.new def bytea.call(str) str = if str =~ /\A\\x/ # PostgreSQL 9.0+ bytea hex format str[2..-1].gsub(/(..)/){|s| s.to_i(16).chr} else # Historical PostgreSQL bytea escape format str.gsub(/\\(\\|'|[0-3][0-7][0-7])/) {|s| if s.size == 2 then s[1,1] else s[1,3].oct.chr end } end ::Sequel::SQL::Blob.new(str) end CONVERSION_PROCS = {} { [16] => boolean, [17] => bytea, [20, 21, 23, 26] => integer, [700, 701] => float, [1700] => ::Kernel.method(:BigDecimal), [1083, 1266] => ::Sequel.method(:string_to_time), [1082] => ::Sequel.method(:string_to_date), [1184, 1114] => ::Sequel.method(:database_to_application_timestamp), }.each do |k,v| k.each do |n| CONVERSION_PROCS[n] = v end end CONVERSION_PROCS.freeze module MockAdapterDatabaseMethods def bound_variable_arg(arg, conn) arg end def primary_key(table) :id end private # Handle NoMethodErrors when parsing schema due to output_identifier # being called with nil when the Database fetch results are not set # to what schema parsing expects. def schema_parse_table(table, opts=OPTS) super rescue NoMethodError [] end end def self.mock_adapter_setup(db) db.instance_exec do @server_version = 150000 initialize_postgres_adapter extend(MockAdapterDatabaseMethods) end end class CreateTableGenerator < Sequel::Schema::CreateTableGenerator # Add an exclusion constraint when creating the table. Elements should be # an array of 2 element arrays, with the first element being the column or # expression the exclusion constraint is applied to, and the second element # being the operator to use for the column/expression to check for exclusion: # # exclude([[:col1, '&&'], [:col2, '=']]) # # EXCLUDE USING gist (col1 WITH &&, col2 WITH =) # # To use a custom operator class, you need to use Sequel.lit with the expression # and operator class: # # exclude([[Sequel.lit('col1 inet_ops'), '&&'], [:col2, '=']]) # # EXCLUDE USING gist (col1 inet_ops WITH &&, col2 WITH =) # # Options supported: # # :name :: Name the constraint with the given name (useful if you may # need to drop the constraint later) # :using :: Override the index_method for the exclusion constraint (defaults to gist). # :where :: Create a partial exclusion constraint, which only affects # a subset of table rows, value should be a filter expression. def exclude(elements, opts=OPTS) constraints << {:type => :exclude, :elements => elements}.merge!(opts) end end class AlterTableGenerator < Sequel::Schema::AlterTableGenerator # Adds an exclusion constraint to an existing table, see # CreateTableGenerator#exclude. def add_exclusion_constraint(elements, opts=OPTS) @operations << {:op => :add_constraint, :type => :exclude, :elements => elements}.merge!(opts) end # Validate the constraint with the given name, which should have # been added previously with NOT VALID. def validate_constraint(name) @operations << {:op => :validate_constraint, :name => name} end end # Generator used for creating tables that are partitions of other tables. class CreatePartitionOfTableGenerator MINVALUE = Sequel.lit('MINVALUE').freeze MAXVALUE = Sequel.lit('MAXVALUE').freeze def initialize(&block) instance_exec(&block) end # The minimum value of the data type used in range partitions, useful # as an argument to #from. def minvalue MINVALUE end # The minimum value of the data type used in range partitions, useful # as an argument to #to. def maxvalue MAXVALUE end # Assumes range partitioning, sets the inclusive minimum value of the range for # this partition. def from(*v) @from = v end # Assumes range partitioning, sets the exclusive maximum value of the range for # this partition. def to(*v) @to = v end # Assumes list partitioning, sets the values to be included in this partition. def values_in(*v) @in = v end # Assumes hash partitioning, sets the modulus for this parition. def modulus(v) @modulus = v end # Assumes hash partitioning, sets the remainder for this parition. def remainder(v) @remainder = v end # Sets that this is a default partition, where values not in other partitions # are stored. def default @default = true end # The from and to values of this partition for a range partition. def range [@from, @to] end # The values to include in this partition for a list partition. def list @in end # The modulus and remainder to use for this partition for a hash partition. def hash_values [@modulus, @remainder] end # Determine the appropriate partition type for this partition by which methods # were called on it. def partition_type raise Error, "Unable to determine partition type, multiple different partitioning methods called" if [@from || @to, @list, @modulus || @remainder, @default].compact.length > 1 if @from || @to raise Error, "must call both from and to when creating a partition of a table if calling either" unless @from && @to :range elsif @in :list elsif @modulus || @remainder raise Error, "must call both modulus and remainder when creating a partition of a table if calling either" unless @modulus && @remainder :hash elsif @default :default else raise Error, "unable to determine partition type, no partitioning methods called" end end end # Error raised when Sequel determines a PostgreSQL exclusion constraint has been violated. class ExclusionConstraintViolation < Sequel::ConstraintViolation; end module DatabaseMethods include UnmodifiedIdentifiers::DatabaseMethods FOREIGN_KEY_LIST_ON_DELETE_MAP = {'a'=>:no_action, 'r'=>:restrict, 'c'=>:cascade, 'n'=>:set_null, 'd'=>:set_default}.freeze ON_COMMIT = {:drop => 'DROP', :delete_rows => 'DELETE ROWS', :preserve_rows => 'PRESERVE ROWS'}.freeze ON_COMMIT.each_value(&:freeze) # SQL fragment for custom sequences (ones not created by serial primary key), # Returning the schema and literal form of the sequence name, by parsing # the column defaults table. SELECT_CUSTOM_SEQUENCE_SQL = (<<-end_sql SELECT name.nspname AS "schema", CASE WHEN split_part(pg_get_expr(def.adbin, attr.attrelid), '''', 2) ~ '.' THEN substr(split_part(pg_get_expr(def.adbin, attr.attrelid), '''', 2), strpos(split_part(pg_get_expr(def.adbin, attr.attrelid), '''', 2), '.')+1) ELSE split_part(pg_get_expr(def.adbin, attr.attrelid), '''', 2) END AS "sequence" FROM pg_class t JOIN pg_namespace name ON (t.relnamespace = name.oid) JOIN pg_attribute attr ON (t.oid = attrelid) JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum) JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1]) WHERE cons.contype = 'p' AND pg_get_expr(def.adbin, attr.attrelid) ~* 'nextval' end_sql ).strip.gsub(/\s+/, ' ').freeze # SEQUEL6: Remove # SQL fragment for determining primary key column for the given table. Only # returns the first primary key if the table has a composite primary key. SELECT_PK_SQL = (<<-end_sql SELECT pg_attribute.attname AS pk FROM pg_class, pg_attribute, pg_index, pg_namespace WHERE pg_class.oid = pg_attribute.attrelid AND pg_class.relnamespace = pg_namespace.oid AND pg_class.oid = pg_index.indrelid AND pg_index.indkey[0] = pg_attribute.attnum AND pg_index.indisprimary = 't' end_sql ).strip.gsub(/\s+/, ' ').freeze # SEQUEL6: Remove # SQL fragment for getting sequence associated with table's # primary key, assuming it was a serial primary key column. SELECT_SERIAL_SEQUENCE_SQL = (<<-end_sql SELECT name.nspname AS "schema", seq.relname AS "sequence" FROM pg_class seq, pg_attribute attr, pg_depend dep, pg_namespace name, pg_constraint cons, pg_class t WHERE seq.oid = dep.objid AND seq.relnamespace = name.oid AND seq.relkind = 'S' AND attr.attrelid = dep.refobjid AND attr.attnum = dep.refobjsubid AND attr.attrelid = cons.conrelid AND attr.attnum = cons.conkey[1] AND attr.attrelid = t.oid AND cons.contype = 'p' end_sql ).strip.gsub(/\s+/, ' ').freeze # SEQUEL6: Remove # A hash of conversion procs, keyed by type integer (oid) and # having callable values for the conversion proc for that type. attr_reader :conversion_procs # Set a conversion proc for the given oid. The callable can # be passed either as a argument or a block. def add_conversion_proc(oid, callable=nil, &block) conversion_procs[oid] = callable || block end # Add a conversion proc for a named type, using the given block. # This should be used for types without fixed OIDs, which includes all types that # are not included in a default PostgreSQL installation. def add_named_conversion_proc(name, &block) unless oid = from(:pg_type).where(:typtype=>['b', 'e'], :typname=>name.to_s).get(:oid) raise Error, "No matching type in pg_type for #{name.inspect}" end add_conversion_proc(oid, block) end def commit_prepared_transaction(transaction_id, opts=OPTS) run("COMMIT PREPARED #{literal(transaction_id)}", opts) end # A hash of metadata for CHECK constraints on the table. # Keys are CHECK constraint name symbols. Values are hashes with the following keys: # :definition :: An SQL fragment for the definition of the constraint # :columns :: An array of column symbols for the columns referenced in the constraint, # can be an empty array if the database cannot deteremine the column symbols. def check_constraints(table) m = output_identifier_meth hash = {} _check_constraints_ds.where_each(:conrelid=>regclass_oid(table)) do |row| constraint = m.call(row[:constraint]) entry = hash[constraint] ||= {:definition=>row[:definition], :columns=>[]} entry[:columns] << m.call(row[:column]) if row[:column] end hash end # Convert the first primary key column in the +table+ from being a serial column to being an identity column. # If the column is already an identity column, assume it was already converted and make no changes. # # Only supported on PostgreSQL 10.2+, since on those versions Sequel will use identity columns # instead of serial columns for auto incrementing primary keys. Only supported when running as # a superuser, since regular users cannot modify system tables, and there is no way to keep an # existing sequence when changing an existing column to be an identity column. # # This method can raise an exception in at least the following cases where it may otherwise succeed # (there may be additional cases not listed here): # # * The serial column was added after table creation using PostgreSQL <7.3 # * A regular index also exists on the column (such an index can probably be dropped as the # primary key index should suffice) # # Options: # :column :: Specify the column to convert instead of using the first primary key column # :server :: Run the SQL on the given server def convert_serial_to_identity(table, opts=OPTS) raise Error, "convert_serial_to_identity is only supported on PostgreSQL 10.2+" unless server_version >= 100002 server = opts[:server] server_hash = server ? {:server=>server} : OPTS ds = dataset ds = ds.server(server) if server raise Error, "convert_serial_to_identity requires superuser permissions" unless ds.get{current_setting('is_superuser')} == 'on' table_oid = regclass_oid(table) im = input_identifier_meth unless column = (opts[:column] || ((sch = schema(table).find{|_, sc| sc[:primary_key] && sc[:auto_increment]}) && sch[0])) raise Error, "could not determine column to convert from serial to identity automatically" end column = im.call(column) column_num = ds.from(:pg_attribute). where(:attrelid=>table_oid, :attname=>column). get(:attnum) pg_class = Sequel.cast('pg_class', :regclass) res = ds.from(:pg_depend). where(:refclassid=>pg_class, :refobjid=>table_oid, :refobjsubid=>column_num, :classid=>pg_class, :objsubid=>0, :deptype=>%w'a i'). select_map([:objid, Sequel.as({:deptype=>'i'}, :v)]) case res.length when 0 raise Error, "unable to find related sequence when converting serial to identity" when 1 seq_oid, already_identity = res.first else raise Error, "more than one linked sequence found when converting serial to identity" end return if already_identity transaction(server_hash) do run("ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(column)} DROP DEFAULT", server_hash) ds.from(:pg_depend). where(:classid=>pg_class, :objid=>seq_oid, :objsubid=>0, :deptype=>'a'). update(:deptype=>'i') ds.from(:pg_attribute). where(:attrelid=>table_oid, :attname=>column). update(:attidentity=>'d') end remove_cached_schema(table) nil end # Creates the function in the database. Arguments: # name :: name of the function to create # definition :: string definition of the function, or object file for a dynamically loaded C function. # opts :: options hash: # :args :: function arguments, can be either a symbol or string specifying a type or an array of 1-3 elements: # 1 :: argument data type # 2 :: argument name # 3 :: argument mode (e.g. in, out, inout) # :behavior :: Should be IMMUTABLE, STABLE, or VOLATILE. PostgreSQL assumes VOLATILE by default. # :parallel :: The thread safety attribute of the function. Should be SAFE, UNSAFE, RESTRICTED. PostgreSQL assumes UNSAFE by default. # :cost :: The estimated cost of the function, used by the query planner. # :language :: The language the function uses. SQL is the default. # :link_symbol :: For a dynamically loaded see function, the function's link symbol if different from the definition argument. # :returns :: The data type returned by the function. If you are using OUT or INOUT argument modes, this is ignored. # Otherwise, if this is not specified, void is used by default to specify the function is not supposed to return a value. # :rows :: The estimated number of rows the function will return. Only use if the function returns SETOF something. # :security_definer :: Makes the privileges of the function the same as the privileges of the user who defined the function instead of # the privileges of the user who runs the function. There are security implications when doing this, see the PostgreSQL documentation. # :set :: Configuration variables to set while the function is being run, can be a hash or an array of two pairs. search_path is # often used here if :security_definer is used. # :strict :: Makes the function return NULL when any argument is NULL. def create_function(name, definition, opts=OPTS) self << create_function_sql(name, definition, opts) end # Create the procedural language in the database. Arguments: # name :: Name of the procedural language (e.g. plpgsql) # opts :: options hash: # :handler :: The name of a previously registered function used as a call handler for this language. # :replace :: Replace the installed language if it already exists (on PostgreSQL 9.0+). # :trusted :: Marks the language being created as trusted, allowing unprivileged users to create functions using this language. # :validator :: The name of previously registered function used as a validator of functions defined in this language. def create_language(name, opts=OPTS) self << create_language_sql(name, opts) end # Create a schema in the database. Arguments: # name :: Name of the schema (e.g. admin) # opts :: options hash: # :if_not_exists :: Don't raise an error if the schema already exists (PostgreSQL 9.3+) # :owner :: The owner to set for the schema (defaults to current user if not specified) def create_schema(name, opts=OPTS) self << create_schema_sql(name, opts) end # Support partitions of tables using the :partition_of option. def create_table(name, options=OPTS, &block) if options[:partition_of] create_partition_of_table_from_generator(name, CreatePartitionOfTableGenerator.new(&block), options) return end super end # Support partitions of tables using the :partition_of option. def create_table?(name, options=OPTS, &block) if options[:partition_of] create_table(name, options.merge!(:if_not_exists=>true), &block) return end super end # Create a trigger in the database. Arguments: # table :: the table on which this trigger operates # name :: the name of this trigger # function :: the function to call for this trigger, which should return type trigger. # opts :: options hash: # :after :: Calls the trigger after execution instead of before. # :args :: An argument or array of arguments to pass to the function. # :each_row :: Calls the trigger for each row instead of for each statement. # :events :: Can be :insert, :update, :delete, or an array of any of those. Calls the trigger whenever that type of statement is used. By default, # the trigger is called for insert, update, or delete. # :replace :: Replace the trigger with the same name if it already exists (PostgreSQL 14+). # :when :: A filter to use for the trigger def create_trigger(table, name, function, opts=OPTS) self << create_trigger_sql(table, name, function, opts) end def database_type :postgres end # Use PostgreSQL's DO syntax to execute an anonymous code block. The code should # be the literal code string to use in the underlying procedural language. Options: # # :language :: The procedural language the code is written in. The PostgreSQL # default is plpgsql. Can be specified as a string or a symbol. def do(code, opts=OPTS) language = opts[:language] run "DO #{"LANGUAGE #{literal(language.to_s)} " if language}#{literal(code)}" end # Drops the function from the database. Arguments: # name :: name of the function to drop # opts :: options hash: # :args :: The arguments for the function. See create_function_sql. # :cascade :: Drop other objects depending on this function. # :if_exists :: Don't raise an error if the function doesn't exist. def drop_function(name, opts=OPTS) self << drop_function_sql(name, opts) end # Drops a procedural language from the database. Arguments: # name :: name of the procedural language to drop # opts :: options hash: # :cascade :: Drop other objects depending on this function. # :if_exists :: Don't raise an error if the function doesn't exist. def drop_language(name, opts=OPTS) self << drop_language_sql(name, opts) end # Drops a schema from the database. Arguments: # name :: name of the schema to drop # opts :: options hash: # :cascade :: Drop all objects in this schema. # :if_exists :: Don't raise an error if the schema doesn't exist. def drop_schema(name, opts=OPTS) self << drop_schema_sql(name, opts) end # Drops a trigger from the database. Arguments: # table :: table from which to drop the trigger # name :: name of the trigger to drop # opts :: options hash: # :cascade :: Drop other objects depending on this function. # :if_exists :: Don't raise an error if the function doesn't exist. def drop_trigger(table, name, opts=OPTS) self << drop_trigger_sql(table, name, opts) end # Return full foreign key information using the pg system tables, including # :name, :on_delete, :on_update, and :deferrable entries in the hashes. # # Supports additional options: # :reverse :: Instead of returning foreign keys in the current table, return # foreign keys in other tables that reference the current table. # :schema :: Set to true to have the :table value in the hashes be a qualified # identifier. Set to false to use a separate :schema value with # the related schema. Defaults to whether the given table argument # is a qualified identifier. def foreign_key_list(table, opts=OPTS) m = output_identifier_meth schema, _ = opts.fetch(:schema, schema_and_table(table)) h = {} fklod_map = FOREIGN_KEY_LIST_ON_DELETE_MAP reverse = opts[:reverse] (reverse ? _reverse_foreign_key_list_ds : _foreign_key_list_ds).where_each(Sequel[:cl][:oid]=>regclass_oid(table)) do |row| if reverse key = [row[:schema], row[:table], row[:name]] else key = row[:name] end if r = h[key] r[:columns] << m.call(row[:column]) r[:key] << m.call(row[:refcolumn]) else entry = h[key] = { :name=>m.call(row[:name]), :columns=>[m.call(row[:column])], :key=>[m.call(row[:refcolumn])], :on_update=>fklod_map[row[:on_update]], :on_delete=>fklod_map[row[:on_delete]], :deferrable=>row[:deferrable], :table=>schema ? SQL::QualifiedIdentifier.new(m.call(row[:schema]), m.call(row[:table])) : m.call(row[:table]), } unless schema # If not combining schema information into the :table entry # include it as a separate entry. entry[:schema] = m.call(row[:schema]) end end end h.values end def freeze server_version supports_prepared_transactions? _schema_ds _select_serial_sequence_ds _select_custom_sequence_ds _select_pk_ds _indexes_ds _check_constraints_ds _foreign_key_list_ds _reverse_foreign_key_list_ds @conversion_procs.freeze super end # Use the pg_* system tables to determine indexes on a table def indexes(table, opts=OPTS) m = output_identifier_meth cond = {Sequel[:tab][:oid]=>regclass_oid(table, opts)} cond[:indpred] = nil unless opts[:include_partial] indexes = {} _indexes_ds.where_each(cond) do |r| i = indexes[m.call(r[:name])] ||= {:columns=>[], :unique=>r[:unique], :deferrable=>r[:deferrable]} i[:columns] << m.call(r[:column]) end indexes end # Dataset containing all current database locks def locks dataset.from(:pg_class).join(:pg_locks, :relation=>:relfilenode).select{[pg_class[:relname], Sequel::SQL::ColumnAll.new(:pg_locks)]} end # Notifies the given channel. See the PostgreSQL NOTIFY documentation. Options: # # :payload :: The payload string to use for the NOTIFY statement. Only supported # in PostgreSQL 9.0+. # :server :: The server to which to send the NOTIFY statement, if the sharding support # is being used. def notify(channel, opts=OPTS) sql = String.new sql << "NOTIFY " dataset.send(:identifier_append, sql, channel) if payload = opts[:payload] sql << ", " dataset.literal_append(sql, payload.to_s) end execute_ddl(sql, opts) end # Return primary key for the given table. def primary_key(table, opts=OPTS) quoted_table = quote_schema_table(table) Sequel.synchronize{return @primary_keys[quoted_table] if @primary_keys.has_key?(quoted_table)} value = _select_pk_ds.where_single_value(Sequel[:pg_class][:oid] => regclass_oid(table, opts)) Sequel.synchronize{@primary_keys[quoted_table] = value} end # Return the sequence providing the default for the primary key for the given table. def primary_key_sequence(table, opts=OPTS) quoted_table = quote_schema_table(table) Sequel.synchronize{return @primary_key_sequences[quoted_table] if @primary_key_sequences.has_key?(quoted_table)} cond = {Sequel[:t][:oid] => regclass_oid(table, opts)} value = if pks = _select_serial_sequence_ds.first(cond) literal(SQL::QualifiedIdentifier.new(pks[:schema], pks[:sequence])) elsif pks = _select_custom_sequence_ds.first(cond) literal(SQL::QualifiedIdentifier.new(pks[:schema], LiteralString.new(pks[:sequence]))) end Sequel.synchronize{@primary_key_sequences[quoted_table] = value} if value end # Refresh the materialized view with the given name. # # DB.refresh_view(:items_view) # # REFRESH MATERIALIZED VIEW items_view # DB.refresh_view(:items_view, concurrently: true) # # REFRESH MATERIALIZED VIEW CONCURRENTLY items_view def refresh_view(name, opts=OPTS) run "REFRESH MATERIALIZED VIEW#{' CONCURRENTLY' if opts[:concurrently]} #{quote_schema_table(name)}" end # Reset the primary key sequence for the given table, basing it on the # maximum current value of the table's primary key. def reset_primary_key_sequence(table) return unless seq = primary_key_sequence(table) pk = SQL::Identifier.new(primary_key(table)) db = self s, t = schema_and_table(table) table = Sequel.qualify(s, t) if s if server_version >= 100000 seq_ds = metadata_dataset.from(:pg_sequence).where(:seqrelid=>regclass_oid(LiteralString.new(seq))) increment_by = :seqincrement min_value = :seqmin # :nocov: else seq_ds = metadata_dataset.from(LiteralString.new(seq)) increment_by = :increment_by min_value = :min_value # :nocov: end get{setval(seq, db[table].select(coalesce(max(pk)+seq_ds.select(increment_by), seq_ds.select(min_value))), false)} end def rollback_prepared_transaction(transaction_id, opts=OPTS) run("ROLLBACK PREPARED #{literal(transaction_id)}", opts) end # PostgreSQL uses SERIAL psuedo-type instead of AUTOINCREMENT for # managing incrementing primary keys. def serial_primary_key_options # :nocov: auto_increment_key = server_version >= 100002 ? :identity : :serial # :nocov: {:primary_key => true, auto_increment_key => true, :type=>Integer} end # The version of the PostgreSQL server, used for determining capability. def server_version(server=nil) return @server_version if @server_version ds = dataset ds = ds.server(server) if server @server_version = swallow_database_error{ds.with_sql("SELECT CAST(current_setting('server_version_num') AS integer) AS v").single_value} || 0 end # PostgreSQL supports CREATE TABLE IF NOT EXISTS on 9.1+ def supports_create_table_if_not_exists? server_version >= 90100 end # PostgreSQL 9.0+ supports some types of deferrable constraints beyond foreign key constraints. def supports_deferrable_constraints? server_version >= 90000 end # PostgreSQL supports deferrable foreign key constraints. def supports_deferrable_foreign_key_constraints? true end # PostgreSQL supports DROP TABLE IF EXISTS def supports_drop_table_if_exists? true end # PostgreSQL supports partial indexes. def supports_partial_indexes? true end # PostgreSQL 9.0+ supports trigger conditions. def supports_trigger_conditions? server_version >= 90000 end # PostgreSQL supports prepared transactions (two-phase commit) if # max_prepared_transactions is greater than 0. def supports_prepared_transactions? return @supports_prepared_transactions if defined?(@supports_prepared_transactions) @supports_prepared_transactions = self['SHOW max_prepared_transactions'].get.to_i > 0 end # PostgreSQL supports savepoints def supports_savepoints? true end # PostgreSQL supports transaction isolation levels def supports_transaction_isolation_levels? true end # PostgreSQL supports transaction DDL statements. def supports_transactional_ddl? true end # Array of symbols specifying table names in the current database. # The dataset used is yielded to the block if one is provided, # otherwise, an array of symbols of table names is returned. # # Options: # :qualify :: Return the tables as Sequel::SQL::QualifiedIdentifier instances, # using the schema the table is located in as the qualifier. # :schema :: The schema to search # :server :: The server to use def tables(opts=OPTS, &block) pg_class_relname(['r', 'p'], opts, &block) end # Check whether the given type name string/symbol (e.g. :hstore) is supported by # the database. def type_supported?(type) Sequel.synchronize{return @supported_types[type] if @supported_types.has_key?(type)} supported = from(:pg_type).where(:typtype=>'b', :typname=>type.to_s).count > 0 Sequel.synchronize{return @supported_types[type] = supported} end # Creates a dataset that uses the VALUES clause: # # DB.values([[1, 2], [3, 4]]) # # VALUES ((1, 2), (3, 4)) # # DB.values([[1, 2], [3, 4]]).order(:column2).limit(1, 1) # # VALUES ((1, 2), (3, 4)) ORDER BY column2 LIMIT 1 OFFSET 1 def values(v) @default_dataset.clone(:values=>v) end # Array of symbols specifying view names in the current database. # # Options: # :materialized :: Return materialized views # :qualify :: Return the views as Sequel::SQL::QualifiedIdentifier instances, # using the schema the view is located in as the qualifier. # :schema :: The schema to search # :server :: The server to use def views(opts=OPTS) relkind = opts[:materialized] ? 'm' : 'v' pg_class_relname(relkind, opts) end private # Dataset used to retrieve CHECK constraint information def _check_constraints_ds @_check_constraints_ds ||= metadata_dataset. from{pg_constraint.as(:co)}. left_join(Sequel[:pg_attribute].as(:att), :attrelid=>:conrelid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:conkey])). where(:contype=>'c'). select{[co[:conname].as(:constraint), att[:attname].as(:column), pg_get_constraintdef(co[:oid]).as(:definition)]} end # Dataset used to retrieve foreign keys referenced by a table def _foreign_key_list_ds @_foreign_key_list_ds ||= __foreign_key_list_ds(false) end # Dataset used to retrieve foreign keys referencing a table def _reverse_foreign_key_list_ds @_reverse_foreign_key_list_ds ||= __foreign_key_list_ds(true) end # Build dataset used for foreign key list methods. def __foreign_key_list_ds(reverse) if reverse ctable = Sequel[:att2] cclass = Sequel[:cl2] rtable = Sequel[:att] rclass = Sequel[:cl] else ctable = Sequel[:att] cclass = Sequel[:cl] rtable = Sequel[:att2] rclass = Sequel[:cl2] end if server_version >= 90500 cpos = Sequel.expr{array_position(co[:conkey], ctable[:attnum])} rpos = Sequel.expr{array_position(co[:confkey], rtable[:attnum])} # :nocov: else range = 0...32 cpos = Sequel.expr{SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(co[:conkey], [x]), x]}, 32, ctable[:attnum])} rpos = Sequel.expr{SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(co[:confkey], [x]), x]}, 32, rtable[:attnum])} # :nocov: end ds = metadata_dataset. from{pg_constraint.as(:co)}. join(Sequel[:pg_class].as(cclass), :oid=>:conrelid). join(Sequel[:pg_attribute].as(ctable), :attrelid=>:oid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:conkey])). join(Sequel[:pg_class].as(rclass), :oid=>Sequel[:co][:confrelid]). join(Sequel[:pg_attribute].as(rtable), :attrelid=>:oid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:confkey])). join(Sequel[:pg_namespace].as(:nsp), :oid=>Sequel[:cl2][:relnamespace]). order{[co[:conname], cpos]}. where{{ cl[:relkind]=>%w'r p', co[:contype]=>'f', cpos=>rpos }}. select{[ co[:conname].as(:name), ctable[:attname].as(:column), co[:confupdtype].as(:on_update), co[:confdeltype].as(:on_delete), cl2[:relname].as(:table), rtable[:attname].as(:refcolumn), SQL::BooleanExpression.new(:AND, co[:condeferrable], co[:condeferred]).as(:deferrable), nsp[:nspname].as(:schema) ]} if reverse ds = ds.order_append(Sequel[:nsp][:nspname], Sequel[:cl2][:relname]) end ds end # Dataset used to retrieve index information def _indexes_ds @_indexes_ds ||= begin if server_version >= 90500 order = [Sequel[:indc][:relname], Sequel.function(:array_position, Sequel[:ind][:indkey], Sequel[:att][:attnum])] # :nocov: else range = 0...32 order = [Sequel[:indc][:relname], SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(Sequel[:ind][:indkey], [x]), x]}, 32, Sequel[:att][:attnum])] # :nocov: end attnums = SQL::Function.new(:ANY, Sequel[:ind][:indkey]) ds = metadata_dataset. from{pg_class.as(:tab)}. join(Sequel[:pg_index].as(:ind), :indrelid=>:oid). join(Sequel[:pg_class].as(:indc), :oid=>:indexrelid). join(Sequel[:pg_attribute].as(:att), :attrelid=>Sequel[:tab][:oid], :attnum=>attnums). left_join(Sequel[:pg_constraint].as(:con), :conname=>Sequel[:indc][:relname]). where{{ indc[:relkind]=>'i', ind[:indisprimary]=>false, :indexprs=>nil, :indisvalid=>true}}. order(*order). select{[indc[:relname].as(:name), ind[:indisunique].as(:unique), att[:attname].as(:column), con[:condeferrable].as(:deferrable)]} # :nocov: ds = ds.where(:indisready=>true) if server_version >= 80300 ds = ds.where(:indislive=>true) if server_version >= 90300 # :nocov: ds end end # Dataset used to determine custom serial sequences for tables def _select_custom_sequence_ds @_select_custom_sequence_ds ||= metadata_dataset. from{pg_class.as(:t)}. join(:pg_namespace, {:oid => :relnamespace}, :table_alias=>:name). join(:pg_attribute, {:attrelid => Sequel[:t][:oid]}, :table_alias=>:attr). join(:pg_attrdef, {:adrelid => :attrelid, :adnum => :attnum}, :table_alias=>:def). join(:pg_constraint, {:conrelid => :adrelid, Sequel[:cons][:conkey].sql_subscript(1) => :adnum}, :table_alias=>:cons). where{{cons[:contype] => 'p', pg_get_expr(self.def[:adbin], attr[:attrelid]) => /nextval/i}}. select{ expr = split_part(pg_get_expr(self.def[:adbin], attr[:attrelid]), "'", 2) [ name[:nspname].as(:schema), Sequel.case({{expr => /./} => substr(expr, strpos(expr, '.')+1)}, expr).as(:sequence) ] } end # Dataset used to determine normal serial sequences for tables def _select_serial_sequence_ds @_serial_sequence_ds ||= metadata_dataset. from{[ pg_class.as(:seq), pg_attribute.as(:attr), pg_depend.as(:dep), pg_namespace.as(:name), pg_constraint.as(:cons), pg_class.as(:t) ]}. where{[ [seq[:oid], dep[:objid]], [seq[:relnamespace], name[:oid]], [seq[:relkind], 'S'], [attr[:attrelid], dep[:refobjid]], [attr[:attnum], dep[:refobjsubid]], [attr[:attrelid], cons[:conrelid]], [attr[:attnum], cons[:conkey].sql_subscript(1)], [attr[:attrelid], t[:oid]], [cons[:contype], 'p'] ]}. select{[ name[:nspname].as(:schema), seq[:relname].as(:sequence) ]} end # Dataset used to determine primary keys for tables def _select_pk_ds @_select_pk_ds ||= metadata_dataset. from(:pg_class, :pg_attribute, :pg_index, :pg_namespace). where{[ [pg_class[:oid], pg_attribute[:attrelid]], [pg_class[:relnamespace], pg_namespace[:oid]], [pg_class[:oid], pg_index[:indrelid]], [pg_index[:indkey].sql_subscript(0), pg_attribute[:attnum]], [pg_index[:indisprimary], 't'] ]}. select{pg_attribute[:attname].as(:pk)} end # Dataset used to get schema for tables def _schema_ds @_schema_ds ||= begin ds = metadata_dataset.select{[ pg_attribute[:attname].as(:name), SQL::Cast.new(pg_attribute[:atttypid], :integer).as(:oid), SQL::Cast.new(basetype[:oid], :integer).as(:base_oid), SQL::Function.new(:format_type, basetype[:oid], pg_type[:typtypmod]).as(:db_base_type), SQL::Function.new(:format_type, pg_type[:oid], pg_attribute[:atttypmod]).as(:db_type), SQL::Function.new(:pg_get_expr, pg_attrdef[:adbin], pg_class[:oid]).as(:default), SQL::BooleanExpression.new(:NOT, pg_attribute[:attnotnull]).as(:allow_null), SQL::Function.new(:COALESCE, SQL::BooleanExpression.from_value_pairs(pg_attribute[:attnum] => SQL::Function.new(:ANY, pg_index[:indkey])), false).as(:primary_key)]}. from(:pg_class). join(:pg_attribute, :attrelid=>:oid). join(:pg_type, :oid=>:atttypid). left_outer_join(Sequel[:pg_type].as(:basetype), :oid=>:typbasetype). left_outer_join(:pg_attrdef, :adrelid=>Sequel[:pg_class][:oid], :adnum=>Sequel[:pg_attribute][:attnum]). left_outer_join(:pg_index, :indrelid=>Sequel[:pg_class][:oid], :indisprimary=>true). where{{pg_attribute[:attisdropped]=>false}}. where{pg_attribute[:attnum] > 0}. order{pg_attribute[:attnum]} # :nocov: if server_version > 100000 # :nocov: ds = ds.select_append{pg_attribute[:attidentity]} # :nocov: if server_version > 120000 # :nocov: ds = ds.select_append{Sequel.~(pg_attribute[:attgenerated]=>'').as(:generated)} end end ds end end def alter_table_add_column_sql(table, op) "ADD COLUMN#{' IF NOT EXISTS' if op[:if_not_exists]} #{column_definition_sql(op)}" end def alter_table_generator_class Postgres::AlterTableGenerator end def alter_table_set_column_type_sql(table, op) s = super if using = op[:using] using = Sequel::LiteralString.new(using) if using.is_a?(String) s += ' USING ' s << literal(using) end s end def alter_table_drop_column_sql(table, op) "DROP COLUMN #{'IF EXISTS ' if op[:if_exists]}#{quote_identifier(op[:name])}#{' CASCADE' if op[:cascade]}" end def alter_table_validate_constraint_sql(table, op) "VALIDATE CONSTRAINT #{quote_identifier(op[:name])}" end # If the :synchronous option is given and non-nil, set synchronous_commit # appropriately. Valid values for the :synchronous option are true, # :on, false, :off, :local, and :remote_write. def begin_new_transaction(conn, opts) super if opts.has_key?(:synchronous) case sync = opts[:synchronous] when true sync = :on when false sync = :off when nil return end log_connection_execute(conn, "SET LOCAL synchronous_commit = #{sync}") end end # Set the READ ONLY transaction setting per savepoint, as PostgreSQL supports that. def begin_savepoint(conn, opts) super unless (read_only = opts[:read_only]).nil? log_connection_execute(conn, "SET TRANSACTION READ #{read_only ? 'ONLY' : 'WRITE'}") end end # Literalize non-String collate options. This is because unquoted collatations # are folded to lowercase, and PostgreSQL used mixed case or capitalized collations. def column_definition_collate_sql(sql, column) if collate = column[:collate] collate = literal(collate) unless collate.is_a?(String) sql << " COLLATE #{collate}" end end # Support identity columns, but only use the identity SQL syntax if no # default value is given. def column_definition_default_sql(sql, column) super if !column[:serial] && !['smallserial', 'serial', 'bigserial'].include?(column[:type].to_s) && !column[:default] if (identity = column[:identity]) sql << " GENERATED " sql << (identity == :always ? "ALWAYS" : "BY DEFAULT") sql << " AS IDENTITY" elsif (generated = column[:generated_always_as]) sql << " GENERATED ALWAYS AS (#{literal(generated)}) STORED" end end end # Handle PostgreSQL specific default format. def column_schema_normalize_default(default, type) if m = /\A(?:B?('.*')::[^']+|\((-?\d+(?:\.\d+)?)\))\z/.match(default) default = m[1] || m[2] end super(default, type) end # If the :prepare option is given and we aren't in a savepoint, # prepare the transaction for a two-phase commit. def commit_transaction(conn, opts=OPTS) if (s = opts[:prepare]) && savepoint_level(conn) <= 1 log_connection_execute(conn, "PREPARE TRANSACTION #{literal(s)}") else super end end # PostgreSQL can't combine rename_column operations, and it can combine # the custom validate_constraint operation. def combinable_alter_table_op?(op) (super || op[:op] == :validate_constraint) && op[:op] != :rename_column end VALID_CLIENT_MIN_MESSAGES = %w'DEBUG5 DEBUG4 DEBUG3 DEBUG2 DEBUG1 LOG NOTICE WARNING ERROR FATAL PANIC'.freeze.each(&:freeze) # The SQL queries to execute when starting a new connection. def connection_configuration_sqls(opts=@opts) sqls = [] sqls << "SET standard_conforming_strings = ON" if typecast_value_boolean(opts.fetch(:force_standard_strings, true)) cmm = opts.fetch(:client_min_messages, :warning) if cmm && !cmm.to_s.empty? cmm = cmm.to_s.upcase.strip unless VALID_CLIENT_MIN_MESSAGES.include?(cmm) raise Error, "Unsupported client_min_messages setting: #{cmm}" end sqls << "SET client_min_messages = '#{cmm.to_s.upcase}'" end if search_path = opts[:search_path] case search_path when String search_path = search_path.split(",").map(&:strip) when Array # nil else raise Error, "unrecognized value for :search_path option: #{search_path.inspect}" end sqls << "SET search_path = #{search_path.map{|s| "\"#{s.gsub('"', '""')}\""}.join(',')}" end sqls end # Handle exclusion constraints. def constraint_definition_sql(constraint) case constraint[:type] when :exclude elements = constraint[:elements].map{|c, op| "#{literal(c)} WITH #{op}"}.join(', ') sql = String.new sql << "#{"CONSTRAINT #{quote_identifier(constraint[:name])} " if constraint[:name]}EXCLUDE USING #{constraint[:using]||'gist'} (#{elements})#{" WHERE #{filter_expr(constraint[:where])}" if constraint[:where]}" constraint_deferrable_sql_append(sql, constraint[:deferrable]) sql when :foreign_key, :check sql = super if constraint[:not_valid] sql << " NOT VALID" end sql else super end end def database_specific_error_class_from_sqlstate(sqlstate) if sqlstate == '23P01' ExclusionConstraintViolation elsif sqlstate == '40P01' SerializationFailure elsif sqlstate == '55P03' DatabaseLockTimeout else super end end DATABASE_ERROR_REGEXPS = [ # Add this check first, since otherwise it's possible for users to control # which exception class is generated. [/invalid input syntax/, DatabaseError], [/duplicate key value violates unique constraint/, UniqueConstraintViolation], [/violates foreign key constraint/, ForeignKeyConstraintViolation], [/violates check constraint/, CheckConstraintViolation], [/violates not-null constraint/, NotNullConstraintViolation], [/conflicting key value violates exclusion constraint/, ExclusionConstraintViolation], [/could not serialize access/, SerializationFailure], [/could not obtain lock on row in relation/, DatabaseLockTimeout], ].freeze def database_error_regexps DATABASE_ERROR_REGEXPS end # SQL for doing fast table insert from stdin. def copy_into_sql(table, opts) sql = String.new sql << "COPY #{literal(table)}" if cols = opts[:columns] sql << literal(Array(cols)) end sql << " FROM STDIN" if opts[:options] || opts[:format] sql << " (" sql << "FORMAT #{opts[:format]}" if opts[:format] sql << "#{', ' if opts[:format]}#{opts[:options]}" if opts[:options] sql << ')' end sql end # SQL for doing fast table output to stdout. def copy_table_sql(table, opts) if table.is_a?(String) table else if opts[:options] || opts[:format] options = String.new options << " (" options << "FORMAT #{opts[:format]}" if opts[:format] options << "#{', ' if opts[:format]}#{opts[:options]}" if opts[:options] options << ')' end table = if table.is_a?(::Sequel::Dataset) "(#{table.sql})" else literal(table) end "COPY #{table} TO STDOUT#{options}" end end # SQL statement to create database function. def create_function_sql(name, definition, opts=OPTS) args = opts[:args] if !opts[:args].is_a?(Array) || !opts[:args].any?{|a| Array(a).length == 3 and %w'OUT INOUT'.include?(a[2].to_s)} returns = opts[:returns] || 'void' end language = opts[:language] || 'SQL' <<-END CREATE#{' OR REPLACE' if opts[:replace]} FUNCTION #{name}#{sql_function_args(args)} #{"RETURNS #{returns}" if returns} LANGUAGE #{language} #{opts[:behavior].to_s.upcase if opts[:behavior]} #{'STRICT' if opts[:strict]} #{'SECURITY DEFINER' if opts[:security_definer]} #{"PARALLEL #{opts[:parallel].to_s.upcase}" if opts[:parallel]} #{"COST #{opts[:cost]}" if opts[:cost]} #{"ROWS #{opts[:rows]}" if opts[:rows]} #{opts[:set].map{|k,v| " SET #{k} = #{v}"}.join("\n") if opts[:set]} AS #{literal(definition.to_s)}#{", #{literal(opts[:link_symbol].to_s)}" if opts[:link_symbol]} END end # SQL for creating a procedural language. def create_language_sql(name, opts=OPTS) "CREATE#{' OR REPLACE' if opts[:replace] && server_version >= 90000}#{' TRUSTED' if opts[:trusted]} LANGUAGE #{name}#{" HANDLER #{opts[:handler]}" if opts[:handler]}#{" VALIDATOR #{opts[:validator]}" if opts[:validator]}" end # Create a partition of another table, used when the create_table with # the :partition_of option is given. def create_partition_of_table_from_generator(name, generator, options) execute_ddl(create_partition_of_table_sql(name, generator, options)) end # SQL for creating a partition of another table. def create_partition_of_table_sql(name, generator, options) sql = create_table_prefix_sql(name, options).dup sql << " PARTITION OF #{quote_schema_table(options[:partition_of])}" case generator.partition_type when :range from, to = generator.range sql << " FOR VALUES FROM #{literal(from)} TO #{literal(to)}" when :list sql << " FOR VALUES IN #{literal(generator.list)}" when :hash mod, remainder = generator.hash_values sql << " FOR VALUES WITH (MODULUS #{literal(mod)}, REMAINDER #{literal(remainder)})" else # when :default sql << " DEFAULT" end sql << create_table_suffix_sql(name, options) sql end # SQL for creating a schema. def create_schema_sql(name, opts=OPTS) "CREATE SCHEMA #{'IF NOT EXISTS ' if opts[:if_not_exists]}#{quote_identifier(name)}#{" AUTHORIZATION #{literal(opts[:owner])}" if opts[:owner]}" end # DDL statement for creating a table with the given name, columns, and options def create_table_prefix_sql(name, options) prefix_sql = if options[:temp] raise(Error, "can't provide both :temp and :unlogged to create_table") if options[:unlogged] raise(Error, "can't provide both :temp and :foreign to create_table") if options[:foreign] temporary_table_sql elsif options[:foreign] raise(Error, "can't provide both :foreign and :unlogged to create_table") if options[:unlogged] 'FOREIGN ' elsif options[:unlogged] 'UNLOGGED ' end "CREATE #{prefix_sql}TABLE#{' IF NOT EXISTS' if options[:if_not_exists]} #{options[:temp] ? quote_identifier(name) : quote_schema_table(name)}" end # SQL for creating a table with PostgreSQL specific options def create_table_sql(name, generator, options) "#{super}#{create_table_suffix_sql(name, options)}" end # Handle various PostgreSQl specific table extensions such as inheritance, # partitioning, tablespaces, and foreign tables. def create_table_suffix_sql(name, options) sql = String.new if inherits = options[:inherits] sql << " INHERITS (#{Array(inherits).map{|t| quote_schema_table(t)}.join(', ')})" end if partition_by = options[:partition_by] sql << " PARTITION BY #{options[:partition_type]||'RANGE'} #{literal(Array(partition_by))}" end if on_commit = options[:on_commit] raise(Error, "can't provide :on_commit without :temp to create_table") unless options[:temp] raise(Error, "unsupported on_commit option: #{on_commit.inspect}") unless ON_COMMIT.has_key?(on_commit) sql << " ON COMMIT #{ON_COMMIT[on_commit]}" end if tablespace = options[:tablespace] sql << " TABLESPACE #{quote_identifier(tablespace)}" end if server = options[:foreign] sql << " SERVER #{quote_identifier(server)}" if foreign_opts = options[:options] sql << " OPTIONS (#{foreign_opts.map{|k, v| "#{k} #{literal(v.to_s)}"}.join(', ')})" end end sql end def create_table_as_sql(name, sql, options) result = create_table_prefix_sql name, options if on_commit = options[:on_commit] result += " ON COMMIT #{ON_COMMIT[on_commit]}" end result += " AS #{sql}" end def create_table_generator_class Postgres::CreateTableGenerator end # SQL for creating a database trigger. def create_trigger_sql(table, name, function, opts=OPTS) events = opts[:events] ? Array(opts[:events]) : [:insert, :update, :delete] whence = opts[:after] ? 'AFTER' : 'BEFORE' if filter = opts[:when] raise Error, "Trigger conditions are not supported for this database" unless supports_trigger_conditions? filter = " WHEN #{filter_expr(filter)}" end "CREATE #{'OR REPLACE ' if opts[:replace]}TRIGGER #{name} #{whence} #{events.map{|e| e.to_s.upcase}.join(' OR ')} ON #{quote_schema_table(table)}#{' FOR EACH ROW' if opts[:each_row]}#{filter} EXECUTE PROCEDURE #{function}(#{Array(opts[:args]).map{|a| literal(a)}.join(', ')})" end # DDL fragment for initial part of CREATE VIEW statement def create_view_prefix_sql(name, options) sql = create_view_sql_append_columns("CREATE #{'OR REPLACE 'if options[:replace]}#{'TEMPORARY 'if options[:temp]}#{'RECURSIVE ' if options[:recursive]}#{'MATERIALIZED ' if options[:materialized]}VIEW #{quote_schema_table(name)}", options[:columns] || options[:recursive]) if options[:security_invoker] sql += " WITH (security_invoker)" end if tablespace = options[:tablespace] sql += " TABLESPACE #{quote_identifier(tablespace)}" end sql end # SQL for dropping a function from the database. def drop_function_sql(name, opts=OPTS) "DROP FUNCTION#{' IF EXISTS' if opts[:if_exists]} #{name}#{sql_function_args(opts[:args])}#{' CASCADE' if opts[:cascade]}" end # Support :if_exists, :cascade, and :concurrently options. def drop_index_sql(table, op) sch, _ = schema_and_table(table) "DROP INDEX#{' CONCURRENTLY' if op[:concurrently]}#{' IF EXISTS' if op[:if_exists]} #{"#{quote_identifier(sch)}." if sch}#{quote_identifier(op[:name] || default_index_name(table, op[:columns]))}#{' CASCADE' if op[:cascade]}" end # SQL for dropping a procedural language from the database. def drop_language_sql(name, opts=OPTS) "DROP LANGUAGE#{' IF EXISTS' if opts[:if_exists]} #{name}#{' CASCADE' if opts[:cascade]}" end # SQL for dropping a schema from the database. def drop_schema_sql(name, opts=OPTS) "DROP SCHEMA#{' IF EXISTS' if opts[:if_exists]} #{quote_identifier(name)}#{' CASCADE' if opts[:cascade]}" end # SQL for dropping a trigger from the database. def drop_trigger_sql(table, name, opts=OPTS) "DROP TRIGGER#{' IF EXISTS' if opts[:if_exists]} #{name} ON #{quote_schema_table(table)}#{' CASCADE' if opts[:cascade]}" end # Support :foreign tables def drop_table_sql(name, options) "DROP#{' FOREIGN' if options[:foreign]} TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}" end # SQL for dropping a view from the database. def drop_view_sql(name, opts=OPTS) "DROP #{'MATERIALIZED ' if opts[:materialized]}VIEW#{' IF EXISTS' if opts[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if opts[:cascade]}" end # If opts includes a :schema option, use it, otherwise restrict the filter to only the # currently visible schemas. def filter_schema(ds, opts) expr = if schema = opts[:schema] schema.to_s else Sequel.function(:any, Sequel.function(:current_schemas, false)) end ds.where{{pg_namespace[:nspname]=>expr}} end def index_definition_sql(table_name, index) cols = index[:columns] index_name = index[:name] || default_index_name(table_name, cols) expr = if o = index[:opclass] "(#{Array(cols).map{|c| "#{literal(c)} #{o}"}.join(', ')})" else literal(Array(cols)) end if_not_exists = " IF NOT EXISTS" if index[:if_not_exists] unique = "UNIQUE " if index[:unique] index_type = index[:type] filter = index[:where] || index[:filter] filter = " WHERE #{filter_expr(filter)}" if filter nulls_distinct = " NULLS#{' NOT' if index[:nulls_distinct] == false} DISTINCT" unless index[:nulls_distinct].nil? case index_type when :full_text expr = "(to_tsvector(#{literal(index[:language] || 'simple')}::regconfig, #{literal(dataset.send(:full_text_string_join, cols))}))" index_type = index[:index_type] || :gin when :spatial index_type = :gist end "CREATE #{unique}INDEX#{' CONCURRENTLY' if index[:concurrently]}#{if_not_exists} #{quote_identifier(index_name)} ON #{quote_schema_table(table_name)} #{"USING #{index_type} " if index_type}#{expr}#{" INCLUDE #{literal(Array(index[:include]))}" if index[:include]}#{nulls_distinct}#{" TABLESPACE #{quote_identifier(index[:tablespace])}" if index[:tablespace]}#{filter}" end # Setup datastructures shared by all postgres adapters. def initialize_postgres_adapter @primary_keys = {} @primary_key_sequences = {} @supported_types = {} procs = @conversion_procs = CONVERSION_PROCS.dup procs[1184] = procs[1114] = method(:to_application_timestamp) end # Backbone of the tables and views support. def pg_class_relname(type, opts) ds = metadata_dataset.from(:pg_class).where(:relkind=>type).select(:relname).server(opts[:server]).join(:pg_namespace, :oid=>:relnamespace) ds = filter_schema(ds, opts) m = output_identifier_meth if defined?(yield) yield(ds) elsif opts[:qualify] ds.select_append{pg_namespace[:nspname]}.map{|r| Sequel.qualify(m.call(r[:nspname]).to_s, m.call(r[:relname]).to_s)} else ds.map{|r| m.call(r[:relname])} end end # Return an expression the oid for the table expr. Used by the metadata parsing # code to disambiguate unqualified tables. def regclass_oid(expr, opts=OPTS) if expr.is_a?(String) && !expr.is_a?(LiteralString) expr = Sequel.identifier(expr) end sch, table = schema_and_table(expr) sch ||= opts[:schema] if sch expr = Sequel.qualify(sch, table) end expr = if ds = opts[:dataset] ds.literal(expr) else literal(expr) end Sequel.cast(expr.to_s,:regclass).cast(:oid) end # Remove the cached entries for primary keys and sequences when a table is changed. def remove_cached_schema(table) tab = quote_schema_table(table) Sequel.synchronize do @primary_keys.delete(tab) @primary_key_sequences.delete(tab) end super end # SQL DDL statement for renaming a table. PostgreSQL doesn't allow you to change a table's schema in # a rename table operation, so speciying a new schema in new_name will not have an effect. def rename_table_sql(name, new_name) "ALTER TABLE #{quote_schema_table(name)} RENAME TO #{quote_identifier(schema_and_table(new_name).last)}" end def schema_column_type(db_type) case db_type when /\Ainterval\z/io :interval when /\Acitext\z/io :string else super end end # The dataset used for parsing table schemas, using the pg_* system catalogs. def schema_parse_table(table_name, opts) m = output_identifier_meth(opts[:dataset]) _schema_ds.where_all(Sequel[:pg_class][:oid]=>regclass_oid(table_name, opts)).map do |row| row[:default] = nil if blank_object?(row[:default]) if row[:base_oid] row[:domain_oid] = row[:oid] row[:oid] = row.delete(:base_oid) row[:db_domain_type] = row[:db_type] row[:db_type] = row.delete(:db_base_type) else row.delete(:base_oid) row.delete(:db_base_type) end row[:type] = schema_column_type(row[:db_type]) identity = row.delete(:attidentity) if row[:primary_key] row[:auto_increment] = !!(row[:default] =~ /\A(?:nextval)/i) || identity == 'a' || identity == 'd' end [m.call(row.delete(:name)), row] end end # Set the transaction isolation level on the given connection def set_transaction_isolation(conn, opts) level = opts.fetch(:isolation, transaction_isolation_level) read_only = opts[:read_only] deferrable = opts[:deferrable] if level || !read_only.nil? || !deferrable.nil? sql = String.new sql << "SET TRANSACTION" sql << " ISOLATION LEVEL #{Sequel::Database::TRANSACTION_ISOLATION_LEVELS[level]}" if level sql << " READ #{read_only ? 'ONLY' : 'WRITE'}" unless read_only.nil? sql << " #{'NOT ' unless deferrable}DEFERRABLE" unless deferrable.nil? log_connection_execute(conn, sql) end end # Turns an array of argument specifiers into an SQL fragment used for function arguments. See create_function_sql. def sql_function_args(args) "(#{Array(args).map{|a| Array(a).reverse.join(' ')}.join(', ')})" end # PostgreSQL can combine multiple alter table ops into a single query. def supports_combining_alter_table_ops? true end # PostgreSQL supports CREATE OR REPLACE VIEW. def supports_create_or_replace_view? true end # Handle bigserial type if :serial option is present def type_literal_generic_bignum_symbol(column) column[:serial] ? :bigserial : super end # PostgreSQL uses the bytea data type for blobs def type_literal_generic_file(column) :bytea end # Handle serial type if :serial option is present def type_literal_generic_integer(column) column[:serial] ? :serial : super end # PostgreSQL prefers the text datatype. If a fixed size is requested, # the char type is used. If the text type is specifically # disallowed or there is a size specified, use the varchar type. # Otherwise use the text type. def type_literal_generic_string(column) if column[:text] :text elsif column[:fixed] "char(#{column[:size]||default_string_column_size})" elsif column[:text] == false || column[:size] "varchar(#{column[:size]||default_string_column_size})" else :text end end # PostgreSQL 9.4+ supports views with check option. def view_with_check_option_support # :nocov: :local if server_version >= 90400 # :nocov: end end module DatasetMethods include UnmodifiedIdentifiers::DatasetMethods NULL = LiteralString.new('NULL').freeze LOCK_MODES = ['ACCESS SHARE', 'ROW SHARE', 'ROW EXCLUSIVE', 'SHARE UPDATE EXCLUSIVE', 'SHARE', 'SHARE ROW EXCLUSIVE', 'EXCLUSIVE', 'ACCESS EXCLUSIVE'].each(&:freeze).freeze Dataset.def_sql_method(self, :delete, [['if server_version >= 90100', %w'with delete from using where returning'], ['else', %w'delete from using where returning']]) Dataset.def_sql_method(self, :insert, [['if server_version >= 90500', %w'with insert into columns override values conflict returning'], ['elsif server_version >= 90100', %w'with insert into columns values returning'], ['else', %w'insert into columns values returning']]) Dataset.def_sql_method(self, :select, [['if opts[:values]', %w'values order limit'], ['elsif server_version >= 80400', %w'with select distinct columns from join where group having window compounds order limit lock'], ['else', %w'select distinct columns from join where group having compounds order limit lock']]) Dataset.def_sql_method(self, :update, [['if server_version >= 90100', %w'with update table set from where returning'], ['else', %w'update table set from where returning']]) # Return the results of an EXPLAIN ANALYZE query as a string def analyze explain(:analyze=>true) end # Handle converting the ruby xor operator (^) into the # PostgreSQL xor operator (#), and use the ILIKE and NOT ILIKE # operators. def complex_expression_sql_append(sql, op, args) case op when :^ j = ' # ' c = false args.each do |a| sql << j if c literal_append(sql, a) c ||= true end when :ILIKE, :'NOT ILIKE' sql << '(' literal_append(sql, args[0]) sql << ' ' << op.to_s << ' ' literal_append(sql, args[1]) sql << " ESCAPE " literal_append(sql, "\\") sql << ')' else super end end # Disables automatic use of INSERT ... RETURNING. You can still use # returning manually to force the use of RETURNING when inserting. # # This is designed for cases where INSERT RETURNING cannot be used, # such as when you are using partitioning with trigger functions # or conditional rules, or when you are using a PostgreSQL version # less than 8.2, or a PostgreSQL derivative that does not support # returning. # # Note that when this method is used, insert will not return the # primary key of the inserted row, you will have to get the primary # key of the inserted row before inserting via nextval, or after # inserting via currval or lastval (making sure to use the same # database connection for currval or lastval). def disable_insert_returning clone(:disable_insert_returning=>true) end # Return the results of an EXPLAIN query as a string def explain(opts=OPTS) with_sql((opts[:analyze] ? 'EXPLAIN ANALYZE ' : 'EXPLAIN ') + select_sql).map(:'QUERY PLAN').join("\r\n") end # Return a cloned dataset which will use FOR SHARE to lock returned rows. def for_share lock_style(:share) end # Run a full text search on PostgreSQL. By default, searching for the inclusion # of any of the terms in any of the cols. # # Options: # :headline :: Append a expression to the selected columns aliased to headline that # contains an extract of the matched text. # :language :: The language to use for the search (default: 'simple') # :plain :: Whether a plain search should be used (default: false). In this case, # terms should be a single string, and it will do a search where cols # contains all of the words in terms. This ignores search operators in terms. # :phrase :: Similar to :plain, but also adding an ILIKE filter to ensure that # returned rows also include the exact phrase used. # :rank :: Set to true to order by the rank, so that closer matches are returned first. # :to_tsquery :: Can be set to :plain or :phrase to specify the function to use to # convert the terms to a ts_query. # :tsquery :: Specifies the terms argument is already a valid SQL expression returning a # tsquery, and can be used directly in the query. # :tsvector :: Specifies the cols argument is already a valid SQL expression returning a # tsvector, and can be used directly in the query. def full_text_search(cols, terms, opts = OPTS) lang = Sequel.cast(opts[:language] || 'simple', :regconfig) unless opts[:tsvector] phrase_cols = full_text_string_join(cols) cols = Sequel.function(:to_tsvector, lang, phrase_cols) end unless opts[:tsquery] phrase_terms = terms.is_a?(Array) ? terms.join(' | ') : terms query_func = case to_tsquery = opts[:to_tsquery] when :phrase, :plain :"#{to_tsquery}to_tsquery" else (opts[:phrase] || opts[:plain]) ? :plainto_tsquery : :to_tsquery end terms = Sequel.function(query_func, lang, phrase_terms) end ds = where(Sequel.lit(["", " @@ ", ""], cols, terms)) if opts[:phrase] raise Error, "can't use :phrase with either :tsvector or :tsquery arguments to full_text_search together" if opts[:tsvector] || opts[:tsquery] ds = ds.grep(phrase_cols, "%#{escape_like(phrase_terms)}%", :case_insensitive=>true) end if opts[:rank] ds = ds.reverse{ts_rank_cd(cols, terms)} end if opts[:headline] ds = ds.select_append{ts_headline(lang, phrase_cols, terms).as(:headline)} end ds end # Insert given values into the database. def insert(*values) if @opts[:returning] # Already know which columns to return, let the standard code handle it super elsif @opts[:sql] || @opts[:disable_insert_returning] # Raw SQL used or RETURNING disabled, just use the default behavior # and return nil since sequence is not known. super nil else # Force the use of RETURNING with the primary key value, # unless it has been disabled. returning(insert_pk).insert(*values){|r| return r.values.first} end end # Handle uniqueness violations when inserting, by updating the conflicting row, using # ON CONFLICT. With no options, uses ON CONFLICT DO NOTHING. Options: # :conflict_where :: The index filter, when using a partial index to determine uniqueness. # :constraint :: An explicit constraint name, has precendence over :target. # :target :: The column name or expression to handle uniqueness violations on. # :update :: A hash of columns and values to set. Uses ON CONFLICT DO UPDATE. # :update_where :: A WHERE condition to use for the update. # # Examples: # # DB[:table].insert_conflict.insert(a: 1, b: 2) # # INSERT INTO TABLE (a, b) VALUES (1, 2) # # ON CONFLICT DO NOTHING # # DB[:table].insert_conflict(constraint: :table_a_uidx).insert(a: 1, b: 2) # # INSERT INTO TABLE (a, b) VALUES (1, 2) # # ON CONFLICT ON CONSTRAINT table_a_uidx DO NOTHING # # DB[:table].insert_conflict(target: :a).insert(a: 1, b: 2) # # INSERT INTO TABLE (a, b) VALUES (1, 2) # # ON CONFLICT (a) DO NOTHING # # DB[:table].insert_conflict(target: :a, conflict_where: {c: true}).insert(a: 1, b: 2) # # INSERT INTO TABLE (a, b) VALUES (1, 2) # # ON CONFLICT (a) WHERE (c IS TRUE) DO NOTHING # # DB[:table].insert_conflict(target: :a, update: {b: Sequel[:excluded][:b]}).insert(a: 1, b: 2) # # INSERT INTO TABLE (a, b) VALUES (1, 2) # # ON CONFLICT (a) DO UPDATE SET b = excluded.b # # DB[:table].insert_conflict(constraint: :table_a_uidx, # update: {b: Sequel[:excluded][:b]}, update_where: {Sequel[:table][:status_id] => 1}).insert(a: 1, b: 2) # # INSERT INTO TABLE (a, b) VALUES (1, 2) # # ON CONFLICT ON CONSTRAINT table_a_uidx # # DO UPDATE SET b = excluded.b WHERE (table.status_id = 1) def insert_conflict(opts=OPTS) clone(:insert_conflict => opts) end # Ignore uniqueness/exclusion violations when inserting, using ON CONFLICT DO NOTHING. # Exists mostly for compatibility to MySQL's insert_ignore. Example: # # DB[:table].insert_ignore.insert(a: 1, b: 2) # # INSERT INTO TABLE (a, b) VALUES (1, 2) # # ON CONFLICT DO NOTHING def insert_ignore insert_conflict end # Insert a record, returning the record inserted, using RETURNING. Always returns nil without # running an INSERT statement if disable_insert_returning is used. If the query runs # but returns no values, returns false. def insert_select(*values) return unless supports_insert_select? # Handle case where query does not return a row server?(:default).with_sql_first(insert_select_sql(*values)) || false end # The SQL to use for an insert_select, adds a RETURNING clause to the insert # unless the RETURNING clause is already present. def insert_select_sql(*values) ds = opts[:returning] ? self : returning ds.insert_sql(*values) end # Support SQL::AliasedExpression as expr to setup a USING join with a table alias for the # USING columns. def join_table(type, table, expr=nil, options=OPTS, &block) if expr.is_a?(SQL::AliasedExpression) && expr.expression.is_a?(Array) && !expr.expression.empty? && expr.expression.all? options = options.merge(:join_using=>true) end super end # Locks all tables in the dataset's FROM clause (but not in JOINs) with # the specified mode (e.g. 'EXCLUSIVE'). If a block is given, starts # a new transaction, locks the table, and yields. If a block is not given, # just locks the tables. Note that PostgreSQL will probably raise an error # if you lock the table outside of an existing transaction. Returns nil. def lock(mode, opts=OPTS) if defined?(yield) # perform locking inside a transaction and yield to block @db.transaction(opts){lock(mode, opts); yield} else sql = 'LOCK TABLE '.dup source_list_append(sql, @opts[:from]) mode = mode.to_s.upcase.strip unless LOCK_MODES.include?(mode) raise Error, "Unsupported lock mode: #{mode}" end sql << " IN #{mode} MODE" @db.execute(sql, opts) end nil end # Return a dataset with a WHEN MATCHED THEN DO NOTHING clause added to the # MERGE statement. If a block is passed, treat it as a virtual row and # use it as additional conditions for the match. # # merge_do_nothing_when_matched # # WHEN MATCHED THEN DO NOTHING # # merge_do_nothing_when_matched{a > 30} # # WHEN MATCHED AND (a > 30) THEN DO NOTHING def merge_do_nothing_when_matched(&block) _merge_when(:type=>:matched, &block) end # Return a dataset with a WHEN NOT MATCHED THEN DO NOTHING clause added to the # MERGE statement. If a block is passed, treat it as a virtual row and # use it as additional conditions for the match. # # merge_do_nothing_when_not_matched # # WHEN NOT MATCHED THEN DO NOTHING # # merge_do_nothing_when_not_matched{a > 30} # # WHEN NOT MATCHED AND (a > 30) THEN DO NOTHING def merge_do_nothing_when_not_matched(&block) _merge_when(:type=>:not_matched, &block) end # Support OVERRIDING USER|SYSTEM VALUE for MERGE INSERT. def merge_insert(*values, &block) h = {:type=>:insert, :values=>values} if override = @opts[:override] h[:override] = insert_override_sql(String.new) end _merge_when(h, &block) end # Use OVERRIDING USER VALUE for INSERT statements, so that identity columns # always use the user supplied value, and an error is not raised for identity # columns that are GENERATED ALWAYS. def overriding_system_value clone(:override=>:system) end # Use OVERRIDING USER VALUE for INSERT statements, so that identity columns # always use the sequence value instead of the user supplied value. def overriding_user_value clone(:override=>:user) end def supports_cte?(type=:select) if type == :select server_version >= 80400 else server_version >= 90100 end end # PostgreSQL supports using the WITH clause in subqueries if it # supports using WITH at all (i.e. on PostgreSQL 8.4+). def supports_cte_in_subqueries? supports_cte? end # DISTINCT ON is a PostgreSQL extension def supports_distinct_on? true end # PostgreSQL 9.5+ supports GROUP CUBE def supports_group_cube? server_version >= 90500 end # PostgreSQL 9.5+ supports GROUP ROLLUP def supports_group_rollup? server_version >= 90500 end # PostgreSQL 9.5+ supports GROUPING SETS def supports_grouping_sets? server_version >= 90500 end # True unless insert returning has been disabled for this dataset. def supports_insert_select? !@opts[:disable_insert_returning] end # PostgreSQL 9.5+ supports the ON CONFLICT clause to INSERT. def supports_insert_conflict? server_version >= 90500 end # PostgreSQL 9.3+ supports lateral subqueries def supports_lateral_subqueries? server_version >= 90300 end # PostgreSQL supports modifying joined datasets def supports_modifying_joins? true end # PostgreSQL 15+ supports MERGE. def supports_merge? server_version >= 150000 end # PostgreSQL supports NOWAIT. def supports_nowait? true end # Returning is always supported. def supports_returning?(type) true end # PostgreSQL supports pattern matching via regular expressions def supports_regexp? true end # PostgreSQL 9.5+ supports SKIP LOCKED. def supports_skip_locked? server_version >= 90500 end # PostgreSQL supports timezones in literal timestamps def supports_timestamp_timezones? true end # PostgreSQL 8.4+ supports WINDOW clause. def supports_window_clause? server_version >= 80400 end # PostgreSQL 8.4+ supports window functions def supports_window_functions? server_version >= 80400 end # Base support added in 8.4, offset supported added in 9.0, # GROUPS and EXCLUDE support added in 11.0. def supports_window_function_frame_option?(option) case option when :rows, :range true when :offset server_version >= 90000 when :groups, :exclude server_version >= 110000 else false end end # Truncates the dataset. Returns nil. # # Options: # :cascade :: whether to use the CASCADE option, useful when truncating # tables with foreign keys. # :only :: truncate using ONLY, so child tables are unaffected # :restart :: use RESTART IDENTITY to restart any related sequences # # :only and :restart only work correctly on PostgreSQL 8.4+. # # Usage: # DB[:table].truncate # # TRUNCATE TABLE "table" # # DB[:table].truncate(cascade: true, only: true, restart: true) # # TRUNCATE TABLE ONLY "table" RESTART IDENTITY CASCADE def truncate(opts = OPTS) if opts.empty? super() else clone(:truncate_opts=>opts).truncate end end # Use WITH TIES when limiting the result set to also include additional # rules that have the same results for the order column as the final row. # Requires PostgreSQL 13. def with_ties clone(:limit_with_ties=>true) end protected # If returned primary keys are requested, use RETURNING unless already set on the # dataset. If RETURNING is already set, use existing returning values. If RETURNING # is only set to return a single columns, return an array of just that column. # Otherwise, return an array of hashes. def _import(columns, values, opts=OPTS) if @opts[:returning] # no transaction: our multi_insert_sql_strategy should guarantee # that there's only ever a single statement. sql = multi_insert_sql(columns, values)[0] returning_fetch_rows(sql).map{|v| v.length == 1 ? v.values.first : v} elsif opts[:return] == :primary_key returning(insert_pk)._import(columns, values, opts) else super end end def to_prepared_statement(type, *a) if type == :insert && !@opts.has_key?(:returning) returning(insert_pk).send(:to_prepared_statement, :insert_pk, *a) else super end end private # Append the INSERT sql used in a MERGE def _merge_insert_sql(sql, data) sql << " THEN INSERT " columns, values = _parse_insert_sql_args(data[:values]) _insert_columns_sql(sql, columns) if override = data[:override] sql << override end _insert_values_sql(sql, values) end def _merge_matched_sql(sql, data) sql << " THEN DO NOTHING" end alias _merge_not_matched_sql _merge_matched_sql # Format TRUNCATE statement with PostgreSQL specific options. def _truncate_sql(table) to = @opts[:truncate_opts] || OPTS "TRUNCATE TABLE#{' ONLY' if to[:only]} #{table}#{' RESTART IDENTITY' if to[:restart]}#{' CASCADE' if to[:cascade]}" end # Allow truncation of multiple source tables. def check_truncation_allowed! raise(InvalidOperation, "Grouped datasets cannot be truncated") if opts[:group] raise(InvalidOperation, "Joined datasets cannot be truncated") if opts[:join] end # Only include the primary table in the main delete clause def delete_from_sql(sql) sql << ' FROM ' source_list_append(sql, @opts[:from][0..0]) end # Use USING to specify additional tables in a delete query def delete_using_sql(sql) join_from_sql(:USING, sql) end # Add ON CONFLICT clause if it should be used def insert_conflict_sql(sql) if opts = @opts[:insert_conflict] sql << " ON CONFLICT" if target = opts[:constraint] sql << " ON CONSTRAINT " identifier_append(sql, target) elsif target = opts[:target] sql << ' ' identifier_append(sql, Array(target)) if conflict_where = opts[:conflict_where] sql << " WHERE " literal_append(sql, conflict_where) end end if values = opts[:update] sql << " DO UPDATE SET " update_sql_values_hash(sql, values) if update_where = opts[:update_where] sql << " WHERE " literal_append(sql, update_where) end else sql << " DO NOTHING" end end end # Include aliases when inserting into a single table on PostgreSQL 9.5+. def insert_into_sql(sql) sql << " INTO " if (f = @opts[:from]) && f.length == 1 identifier_append(sql, server_version >= 90500 ? f.first : unaliased_identifier(f.first)) else source_list_append(sql, f) end end # Return the primary key to use for RETURNING in an INSERT statement def insert_pk (f = opts[:from]) && !f.empty? && (t = f.first) case t when Symbol, String, SQL::Identifier, SQL::QualifiedIdentifier if pk = db.primary_key(t) Sequel::SQL::Identifier.new(pk) end end end # Support OVERRIDING SYSTEM|USER VALUE in insert statements def insert_override_sql(sql) case opts[:override] when :system sql << " OVERRIDING SYSTEM VALUE" when :user sql << " OVERRIDING USER VALUE" end end # For multiple table support, PostgreSQL requires at least # two from tables, with joins allowed. def join_from_sql(type, sql) if(from = @opts[:from][1..-1]).empty? raise(Error, 'Need multiple FROM tables if updating/deleting a dataset with JOINs') if @opts[:join] else sql << ' ' << type.to_s << ' ' source_list_append(sql, from) select_join_sql(sql) end end # Support table aliases for USING columns def join_using_clause_using_sql_append(sql, using_columns) if using_columns.is_a?(SQL::AliasedExpression) super(sql, using_columns.expression) sql << ' AS ' identifier_append(sql, using_columns.alias) else super end end # Use a generic blob quoting method, hopefully overridden in one of the subadapter methods def literal_blob_append(sql, v) sql << "'" << v.gsub(/[\000-\037\047\134\177-\377]/n){|b| "\\#{("%o" % b[0..1].unpack("C")[0]).rjust(3, '0')}"} << "'" end # PostgreSQL uses FALSE for false values def literal_false 'false' end # PostgreSQL quotes NaN and Infinity. def literal_float(value) if value.finite? super elsif value.nan? "'NaN'" elsif value.infinite? == 1 "'Infinity'" else "'-Infinity'" end end # Handle Ruby integers outside PostgreSQL bigint range specially. def literal_integer(v) if v > 9223372036854775807 || v < -9223372036854775808 literal_integer_outside_bigint_range(v) else v.to_s end end # Raise IntegerOutsideBigintRange when attempting to literalize Ruby integer # outside PostgreSQL bigint range, so PostgreSQL doesn't treat # the value as numeric. def literal_integer_outside_bigint_range(v) raise IntegerOutsideBigintRange, "attempt to literalize Ruby integer outside PostgreSQL bigint range: #{v}" end # Assume that SQL standard quoting is on, per Sequel's defaults def literal_string_append(sql, v) sql << "'" << v.gsub("'", "''") << "'" end # PostgreSQL uses true for true values def literal_true 'true' end # PostgreSQL supports multiple rows in INSERT. def multi_insert_sql_strategy :values end # Dataset options that do not affect the generated SQL. def non_sql_option?(key) super || key == :cursor || key == :insert_conflict end # PostgreSQL requires parentheses around compound datasets if they use # CTEs, and using them in other places doesn't hurt. def compound_dataset_sql_append(sql, ds) sql << '(' super sql << ')' end # Backslash is supported by default as the escape character on PostgreSQL, # and using ESCAPE can break LIKE ANY() usage. def requires_like_escape? false end # Support FETCH FIRST WITH TIES on PostgreSQL 13+. def select_limit_sql(sql) l = @opts[:limit] o = @opts[:offset] return unless l || o if @opts[:limit_with_ties] if o sql << " OFFSET " literal_append(sql, o) end if l sql << " FETCH FIRST " literal_append(sql, l) sql << " ROWS WITH TIES" end else if l sql << " LIMIT " literal_append(sql, l) end if o sql << " OFFSET " literal_append(sql, o) end end end # Support FOR SHARE locking when using the :share lock style. # Use SKIP LOCKED if skipping locked rows. def select_lock_sql(sql) lock = @opts[:lock] if lock == :share sql << ' FOR SHARE' else super end if lock if @opts[:skip_locked] sql << " SKIP LOCKED" elsif @opts[:nowait] sql << " NOWAIT" end end end # Support VALUES clause instead of the SELECT clause to return rows. def select_values_sql(sql) sql << "VALUES " expression_list_append(sql, opts[:values]) end # Use WITH RECURSIVE instead of WITH if any of the CTEs is recursive def select_with_sql_base opts[:with].any?{|w| w[:recursive]} ? "WITH RECURSIVE " : super end # Support PostgreSQL 14+ CTE SEARCH/CYCLE clauses def select_with_sql_cte(sql, cte) super select_with_sql_cte_search_cycle(sql, cte) end def select_with_sql_cte_search_cycle(sql, cte) if search_opts = cte[:search] sql << if search_opts[:type] == :breadth " SEARCH BREADTH FIRST BY " else " SEARCH DEPTH FIRST BY " end identifier_list_append(sql, Array(search_opts[:by])) sql << " SET " identifier_append(sql, search_opts[:set] || :ordercol) end if cycle_opts = cte[:cycle] sql << " CYCLE " identifier_list_append(sql, Array(cycle_opts[:columns])) sql << " SET " identifier_append(sql, cycle_opts[:cycle_column] || :is_cycle) if cycle_opts.has_key?(:cycle_value) sql << " TO " literal_append(sql, cycle_opts[:cycle_value]) sql << " DEFAULT " literal_append(sql, cycle_opts.fetch(:noncycle_value, false)) end sql << " USING " identifier_append(sql, cycle_opts[:path_column] || :path) end end # The version of the database server def server_version db.server_version(@opts[:server]) end # PostgreSQL 9.4+ supports the FILTER clause for aggregate functions. def supports_filtered_aggregates? server_version >= 90400 end # PostgreSQL supports quoted function names. def supports_quoted_function_names? true end # Concatenate the expressions with a space in between def full_text_string_join(cols) cols = Array(cols).map{|x| SQL::Function.new(:COALESCE, x, '')} cols = cols.zip([' '] * cols.length).flatten cols.pop SQL::StringExpression.new(:'||', *cols) end # Use FROM to specify additional tables in an update query def update_from_sql(sql) join_from_sql(:FROM, sql) end # Only include the primary table in the main update clause def update_table_sql(sql) sql << ' ' source_list_append(sql, @opts[:from][0..0]) end end end end sequel-5.63.0/lib/sequel/adapters/shared/sqlanywhere.rb000066400000000000000000000342351434214120600231130ustar00rootroot00000000000000# frozen-string-literal: true require_relative '../utils/columns_limit_1' module Sequel module SqlAnywhere Sequel::Database.set_shared_adapter_scheme(:sqlanywhere, self) module DatabaseMethods attr_reader :conversion_procs # Set whether to convert smallint type to boolean for this Database instance attr_accessor :convert_smallint_to_bool def database_type :sqlanywhere end def freeze @conversion_procs.freeze super end def to_application_timestamp_sa(v) to_application_timestamp(v.to_s) if v end def schema_parse_table(table, opts) m = output_identifier_meth(opts[:dataset]) im = input_identifier_meth(opts[:dataset]) metadata_dataset. from{sa_describe_query("select * from #{im.call(table)}").as(:a)}. join(Sequel[:syscolumn].as(:b), :table_id=>:base_table_id, :column_id=>:base_column_id). order{a[:column_number]}. map do |row| auto_increment = row.delete(:is_autoincrement) row[:auto_increment] = auto_increment == 1 || auto_increment == true row[:primary_key] = row.delete(:pkey) == 'Y' row[:allow_null] = row[:nulls_allowed].is_a?(Integer) ? row.delete(:nulls_allowed) == 1 : row.delete(:nulls_allowed) row[:db_type] = row.delete(:domain_name) row[:type] = if row[:db_type] =~ /numeric/i and (row[:scale].is_a?(Integer) ? row[:scale] == 0 : !row[:scale]) :integer else schema_column_type(row[:db_type]) end row[:max_length] = row[:width] if row[:type] == :string [m.call(row.delete(:name)), row] end end def indexes(table, opts = OPTS) m = output_identifier_meth im = input_identifier_meth table = table.value if table.is_a?(Sequel::SQL::Identifier) indexes = {} metadata_dataset. from(Sequel[:dbo][:sysobjects].as(:z)). select{[ z[:name].as(:table_name), i[:name].as(:index_name), si[:indextype].as(:type), si[:colnames].as(:columns)]}. join(Sequel[:dbo][:sysindexes].as(:i), :id=>:id). join(Sequel[:sys][:sysindexes].as(:si), :iname=> :name). where{{z[:type] => 'U', :table_name=>im.call(table)}}. each do |r| indexes[m.call(r[:index_name])] = {:unique=>(r[:type].downcase=='unique'), :columns=>r[:columns].split(',').map{|v| m.call(v.split(' ').first)}} unless r[:type].downcase == 'primary key' end indexes end def foreign_key_list(table, opts=OPTS) m = output_identifier_meth im = input_identifier_meth fk_indexes = {} metadata_dataset. from{sys[:sysforeignkey].as(:fk)}. select{[ fk[:role].as(:name), fks[:columns].as(:column_map), si[:indextype].as(:type), si[:colnames].as(:columns), fks[:primary_tname].as(:table_name)]}. join(Sequel[:sys][:sysforeignkeys].as(:fks), :role => :role). join(Sequel[:sys][:sysindexes].as(:si), {:iname => Sequel[:fk][:role]}, {:implicit_qualifier => :fk}). where{{fks[:foreign_tname]=>im.call(table)}}. each do |r| unless r[:type].downcase == 'primary key' fk_indexes[r[:name]] = {:name=>m.call(r[:name]), :columns=>r[:columns].split(',').map{|v| m.call(v.split(' ').first)}, :table=>m.call(r[:table_name]), :key=>r[:column_map].split(',').map{|v| m.call(v.split(' IS ').last)}} end end fk_indexes.values end def tables(opts=OPTS) tables_and_views('U', opts) end def views(opts=OPTS) tables_and_views('V', opts) end private DATABASE_ERROR_REGEXPS = { /would not be unique|Primary key for table.+is not unique/ => Sequel::UniqueConstraintViolation, /Column .* in table .* cannot be NULL/ => Sequel::NotNullConstraintViolation, /Constraint .* violated: Invalid value in table .*/ => Sequel::CheckConstraintViolation, /No primary key value for foreign key .* in table .*/ => Sequel::ForeignKeyConstraintViolation, /Primary key for row in table .* is referenced by foreign key .* in table .*/ => Sequel::ForeignKeyConstraintViolation }.freeze def database_error_regexps DATABASE_ERROR_REGEXPS end # Sybase uses the IDENTITY column for autoincrementing columns. def auto_increment_sql 'IDENTITY' end # Sybase does not allow adding primary key constraints to NULLable columns. def can_add_primary_key_constraint_on_nullable_columns? false end def temporary_table_sql "GLOBAL TEMPORARY " end def begin_transaction_sql "BEGIN TRANSACTION" end def rollback_transaction_sql "IF @@TRANCOUNT > 0 ROLLBACK TRANSACTION" end def commit_transaction_sql "COMMIT TRANSACTION" end # Sybase has both datetime and timestamp classes, most people are going # to want datetime def type_literal_generic_datetime(column) :datetime end # Sybase doesn't have a true boolean class, so it uses integer def type_literal_generic_trueclass(column) :smallint end # SQLAnywhere uses image type for blobs def type_literal_generic_file(column) :image end def alter_table_sql(table, op) case op[:op] when :add_column "ALTER TABLE #{quote_schema_table(table)} ADD #{column_definition_sql(op)}" when :drop_column "ALTER TABLE #{quote_schema_table(table)} DROP #{column_definition_sql(op)}" when :drop_constraint case op[:type] when :primary_key "ALTER TABLE #{quote_schema_table(table)} DROP PRIMARY KEY" when :foreign_key if op[:name] || op[:columns] name = op[:name] || foreign_key_name(table, op[:columns]) if name "ALTER TABLE #{quote_schema_table(table)} DROP FOREIGN KEY #{quote_identifier(name)}" end end else super end when :rename_column "ALTER TABLE #{quote_schema_table(table)} RENAME #{quote_identifier(op[:name])} TO #{quote_identifier(op[:new_name].to_s)}" when :set_column_type "ALTER TABLE #{quote_schema_table(table)} ALTER #{quote_identifier(op[:name])} #{type_literal(op)}" when :set_column_null "ALTER TABLE #{quote_schema_table(table)} ALTER #{quote_identifier(op[:name])} #{'NOT ' unless op[:null]}NULL" when :set_column_default "ALTER TABLE #{quote_schema_table(table)} ALTER #{quote_identifier(op[:name])} DEFAULT #{literal(op[:default])}" else super(table, op) end end # SQLAnywhere tinyint types are unsigned. def column_schema_tinyint_type_is_unsigned? true end # SqlAnywhere doesn't support CREATE TABLE AS, it only supports SELECT INTO. # Emulating CREATE TABLE AS using SELECT INTO is only possible if a dataset # is given as the argument, it can't work with a string, so raise an # Error if a string is given. def create_table_as(name, ds, options) raise(Error, "must provide dataset instance as value of create_table :as option on SqlAnywhere") unless ds.is_a?(Sequel::Dataset) run(ds.into(name).sql) end # Use SP_RENAME to rename the table def rename_table_sql(name, new_name) "ALTER TABLE #{quote_schema_table(name)} RENAME #{quote_schema_table(new_name)}" end # Convert smallint type to boolean if convert_smallint_to_bool is true def schema_column_type(db_type) if convert_smallint_to_bool && db_type =~ /smallint/i :boolean elsif db_type =~ /unsigned (big)?int/i :integer else super end end def tables_and_views(type, opts=OPTS) m = output_identifier_meth metadata_dataset. from{sysobjects.as(:a)}. where{{a[:type]=>type}}. select_map{a[:name]}. map{|n| m.call(n)} end # SQLAnywhere supports views with check option, but not local. def view_with_check_option_support true end end module DatasetMethods Dataset.def_sql_method(self, :insert, %w'insert into columns values') Dataset.def_sql_method(self, :select, %w'with select distinct limit columns into from join where group having window compounds order lock') include ::Sequel::Dataset::ColumnsLimit1 # Whether to convert smallint to boolean arguments for this dataset. # Defaults to the IBMDB module setting. def convert_smallint_to_bool opts.has_key?(:convert_smallint_to_bool) ? opts[:convert_smallint_to_bool] : db.convert_smallint_to_bool end # Return a cloned dataset with the convert_smallint_to_bool option set. def with_convert_smallint_to_bool(v) clone(:convert_smallint_to_bool=>v) end def supports_cte?(type=:select) type == :select end # SQLAnywhere supports GROUPING SETS def supports_grouping_sets? true end def supports_multiple_column_in? false end def supports_where_true? false end def supports_is_true? false end def supports_join_using? false end def supports_timestamp_usecs? false end def supports_window_clause? true end def supports_window_functions? true end # Uses CROSS APPLY to join the given table into the current dataset. def cross_apply(table) join_table(:cross_apply, table) end # SqlAnywhere requires recursive CTEs to have column aliases. def recursive_cte_requires_column_aliases? true end def complex_expression_sql_append(sql, op, args) case op when :'||' super(sql, :+, args) when :<<, :>> complex_expression_emulate_append(sql, op, args) when :LIKE, :"NOT LIKE" sql << '(' literal_append(sql, args[0]) sql << (op == :LIKE ? ' REGEXP ' : ' NOT REGEXP ') pattern = String.new last_c = '' args[1].each_char do |c| if c == '_' and not pattern.end_with?('\\') and last_c != '\\' pattern << '.' elsif c == '%' and not pattern.end_with?('\\') and last_c != '\\' pattern << '.*' elsif c == '[' and not pattern.end_with?('\\') and last_c != '\\' pattern << '\[' elsif c == ']' and not pattern.end_with?('\\') and last_c != '\\' pattern << '\]' elsif c == '*' and not pattern.end_with?('\\') and last_c != '\\' pattern << '\*' elsif c == '?' and not pattern.end_with?('\\') and last_c != '\\' pattern << '\?' else pattern << c end if c == '\\' and last_c == '\\' last_c = '' else last_c = c end end literal_append(sql, pattern) sql << " ESCAPE " literal_append(sql, "\\") sql << ')' when :ILIKE, :"NOT ILIKE" super(sql, (op == :ILIKE ? :LIKE : :"NOT LIKE"), args) when :extract sql << 'datepart(' literal_append(sql, args[0]) sql << ',' literal_append(sql, args[1]) sql << ')' else super end end # SqlAnywhere uses \\ to escape metacharacters, but a ']' should not be escaped def escape_like(string) string.gsub(/[\\%_\[]/){|m| "\\#{m}"} end # Use today() for CURRENT_DATE and now() for CURRENT_TIMESTAMP and CURRENT_TIME def constant_sql_append(sql, constant) case constant when :CURRENT_DATE sql << 'today()' when :CURRENT_TIMESTAMP, :CURRENT_TIME sql << 'now()' else super end end # Specify a table for a SELECT ... INTO query. def into(table) clone(:into => table) end private # Use 1 for true on Sybase def literal_true '1' end # Use 0 for false on Sybase def literal_false '0' end # SQL fragment for String. Doubles \ and ' by default. def literal_string_append(sql, v) sql << "'" << v.gsub("\\", "\\\\\\\\").gsub("'", "''") << "'" end # SqlAnywhere uses a preceding X for hex escaping strings def literal_blob_append(sql, v) if v.empty? literal_append(sql, "") else sql << "0x" << v.unpack("H*").first end end # Sybase supports multiple rows in INSERT. def multi_insert_sql_strategy :values end # SQLAnywhere does not natively support NULLS FIRST/LAST. def requires_emulating_nulls_first? true end def select_into_sql(sql) if i = @opts[:into] sql << " INTO " identifier_append(sql, i) end end # Sybase uses TOP N for limit. def select_limit_sql(sql) l = @opts[:limit] o = @opts[:offset] if l || o if l sql << " TOP " literal_append(sql, l) else sql << " TOP 2147483647" end if o sql << " START AT (" literal_append(sql, o) sql << " + 1)" end end end # Use WITH RECURSIVE instead of WITH if any of the CTEs is recursive def select_with_sql_base opts[:with].any?{|w| w[:recursive]} ? "WITH RECURSIVE " : super end def join_type_sql(join_type) case join_type when :cross_apply 'CROSS APPLY' when :outer_apply 'OUTER APPLY' else super end end # SQLAnywhere supports millisecond timestamp precision. def timestamp_precision 3 end end end end sequel-5.63.0/lib/sequel/adapters/shared/sqlite.rb000066400000000000000000001162141434214120600220500ustar00rootroot00000000000000# frozen-string-literal: true require_relative '../utils/replace' require_relative '../utils/unmodified_identifiers' module Sequel module SQLite Sequel::Database.set_shared_adapter_scheme(:sqlite, self) def self.mock_adapter_setup(db) db.instance_exec do @sqlite_version = 30903 def schema_parse_table(*) [] end singleton_class.send(:private, :schema_parse_table) end end # No matter how you connect to SQLite, the following Database options # can be used to set PRAGMAs on connections in a thread-safe manner: # :auto_vacuum, :foreign_keys, :synchronous, and :temp_store. module DatabaseMethods include UnmodifiedIdentifiers::DatabaseMethods AUTO_VACUUM = [:none, :full, :incremental].freeze SYNCHRONOUS = [:off, :normal, :full].freeze TEMP_STORE = [:default, :file, :memory].freeze TRANSACTION_MODE = { :deferred => "BEGIN DEFERRED TRANSACTION".freeze, :immediate => "BEGIN IMMEDIATE TRANSACTION".freeze, :exclusive => "BEGIN EXCLUSIVE TRANSACTION".freeze, nil => "BEGIN".freeze }.freeze # Whether to use integers for booleans in the database. SQLite recommends # booleans be stored as integers, but historically Sequel has used 't'/'f'. attr_accessor :integer_booleans # Whether to keep CURRENT_TIMESTAMP and similar expressions in UTC. By # default, the expressions are converted to localtime. attr_accessor :current_timestamp_utc # A symbol signifying the value of the default transaction mode attr_reader :transaction_mode # Set the default transaction mode. def transaction_mode=(value) if TRANSACTION_MODE.include?(value) @transaction_mode = value else raise Error, "Invalid value for transaction_mode. Please specify one of :deferred, :immediate, :exclusive, nil" end end # SQLite uses the :sqlite database type. def database_type :sqlite end # Set the integer_booleans option using the passed in :integer_boolean option. def set_integer_booleans @integer_booleans = @opts.has_key?(:integer_booleans) ? typecast_value_boolean(@opts[:integer_booleans]) : true end # Return the array of foreign key info hashes using the foreign_key_list PRAGMA, # including information for the :on_update and :on_delete entries. def foreign_key_list(table, opts=OPTS) m = output_identifier_meth h = {} _foreign_key_list_ds(table).each do |row| if r = h[row[:id]] r[:columns] << m.call(row[:from]) r[:key] << m.call(row[:to]) if r[:key] else h[row[:id]] = {:columns=>[m.call(row[:from])], :table=>m.call(row[:table]), :key=>([m.call(row[:to])] if row[:to]), :on_update=>on_delete_sql_to_sym(row[:on_update]), :on_delete=>on_delete_sql_to_sym(row[:on_delete])} end end h.values end def freeze sqlite_version use_timestamp_timezones? super end # Use the index_list and index_info PRAGMAs to determine the indexes on the table. def indexes(table, opts=OPTS) m = output_identifier_meth im = input_identifier_meth indexes = {} table = table.value if table.is_a?(Sequel::SQL::Identifier) metadata_dataset.with_sql("PRAGMA index_list(?)", im.call(table)).each do |r| if opts[:only_autocreated] # If specifically asked for only autocreated indexes, then return those an only those next unless r[:name] =~ /\Asqlite_autoindex_/ elsif r.has_key?(:origin) # If origin is set, then only exclude primary key indexes and partial indexes next if r[:origin] == 'pk' next if r[:partial].to_i == 1 else # When :origin key not present, assume any autoindex could be a primary key one and exclude it next if r[:name] =~ /\Asqlite_autoindex_/ end indexes[m.call(r[:name])] = {:unique=>r[:unique].to_i==1} end indexes.each do |k, v| v[:columns] = metadata_dataset.with_sql("PRAGMA index_info(?)", im.call(k)).map(:name).map{|x| m.call(x)} end indexes end # The version of the server as an integer, where 3.6.19 = 30619. # If the server version can't be determined, 0 is used. def sqlite_version return @sqlite_version if defined?(@sqlite_version) @sqlite_version = begin v = fetch('SELECT sqlite_version()').single_value [10000, 100, 1].zip(v.split('.')).inject(0){|a, m| a + m[0] * Integer(m[1])} rescue 0 end end # SQLite supports CREATE TABLE IF NOT EXISTS syntax since 3.3.0. def supports_create_table_if_not_exists? sqlite_version >= 30300 end # SQLite 3.6.19+ supports deferrable foreign key constraints. def supports_deferrable_foreign_key_constraints? sqlite_version >= 30619 end # SQLite 3.8.0+ supports partial indexes. def supports_partial_indexes? sqlite_version >= 30800 end # SQLite 3.6.8+ supports savepoints. def supports_savepoints? sqlite_version >= 30608 end # Override the default setting for whether to use timezones in timestamps. # It is set to +false+ by default, as SQLite's date/time methods do not # support timezones in timestamps. attr_writer :use_timestamp_timezones # SQLite supports timezones in timestamps, since it just stores them as strings, # but it breaks the usage of SQLite's datetime functions. def use_timestamp_timezones? defined?(@use_timestamp_timezones) ? @use_timestamp_timezones : (@use_timestamp_timezones = false) end # Array of symbols specifying the table names in the current database. # # Options: # :server :: Set the server to use. def tables(opts=OPTS) tables_and_views(Sequel.~(:name=>'sqlite_sequence') & {:type => 'table'}, opts) end # Creates a dataset that uses the VALUES clause: # # DB.values([[1, 2], [3, 4]]) # # VALUES ((1, 2), (3, 4)) def values(v) @default_dataset.clone(:values=>v) end # Array of symbols specifying the view names in the current database. # # Options: # :server :: Set the server to use. def views(opts=OPTS) tables_and_views({:type => 'view'}, opts) end private # Dataset used for parsing foreign key lists def _foreign_key_list_ds(table) metadata_dataset.with_sql("PRAGMA foreign_key_list(?)", input_identifier_meth.call(table)) end # Dataset used for parsing schema def _parse_pragma_ds(table_name, opts) metadata_dataset.with_sql("PRAGMA table_#{'x' if sqlite_version > 33100}info(?)", input_identifier_meth(opts[:dataset]).call(table_name)) end # Run all alter_table commands in a transaction. This is technically only # needed for drop column. def apply_alter_table(table, ops) fks = fetch("PRAGMA foreign_keys") if fks run "PRAGMA foreign_keys = 0" run "PRAGMA legacy_alter_table = 1" if sqlite_version >= 32600 end transaction do if ops.length > 1 && ops.all?{|op| op[:op] == :add_constraint || op[:op] == :set_column_null} null_ops, ops = ops.partition{|op| op[:op] == :set_column_null} # Apply NULL/NOT NULL ops first, since those should be purely idependent of the constraints. null_ops.each{|op| alter_table_sql_list(table, [op]).flatten.each{|sql| execute_ddl(sql)}} # If you are just doing constraints, apply all of them at the same time, # as otherwise all but the last one get lost. alter_table_sql_list(table, [{:op=>:add_constraints, :ops=>ops}]).flatten.each{|sql| execute_ddl(sql)} else # Run each operation separately, as later operations may depend on the # results of earlier operations. ops.each{|op| alter_table_sql_list(table, [op]).flatten.each{|sql| execute_ddl(sql)}} end end remove_cached_schema(table) ensure if fks run "PRAGMA foreign_keys = 1" run "PRAGMA legacy_alter_table = 0" if sqlite_version >= 32600 end end # SQLite supports limited table modification. You can add a column # or an index. Dropping columns is supported by copying the table into # a temporary table, dropping the table, and creating a new table without # the column inside of a transaction. def alter_table_sql(table, op) case op[:op] when :add_index, :drop_index super when :add_column if op[:unique] || op[:primary_key] duplicate_table(table){|columns| columns.push(op)} else super end when :drop_column if sqlite_version >= 33500 super else ocp = lambda{|oc| oc.delete_if{|c| c.to_s == op[:name].to_s}} duplicate_table(table, :old_columns_proc=>ocp){|columns| columns.delete_if{|s| s[:name].to_s == op[:name].to_s}} end when :rename_column if sqlite_version >= 32500 super else ncp = lambda{|nc| nc.map!{|c| c.to_s == op[:name].to_s ? op[:new_name] : c}} duplicate_table(table, :new_columns_proc=>ncp){|columns| columns.each{|s| s[:name] = op[:new_name] if s[:name].to_s == op[:name].to_s}} end when :set_column_default duplicate_table(table){|columns| columns.each{|s| s[:default] = op[:default] if s[:name].to_s == op[:name].to_s}} when :set_column_null duplicate_table(table){|columns| columns.each{|s| s[:null] = op[:null] if s[:name].to_s == op[:name].to_s}} when :set_column_type duplicate_table(table){|columns| columns.each{|s| s.merge!(op) if s[:name].to_s == op[:name].to_s}} when :drop_constraint case op[:type] when :primary_key duplicate_table(table) do |columns| columns.each do |s| s[:unique] = false if s[:primary_key] s[:primary_key] = s[:auto_increment] = nil end end when :foreign_key if op[:columns] duplicate_table(table, :skip_foreign_key_columns=>op[:columns]) else duplicate_table(table, :no_foreign_keys=>true) end when :unique duplicate_table(table, :no_unique=>true) else duplicate_table(table) end when :add_constraint duplicate_table(table, :constraints=>[op]) when :add_constraints duplicate_table(table, :constraints=>op[:ops]) else raise Error, "Unsupported ALTER TABLE operation: #{op[:op].inspect}" end end def begin_new_transaction(conn, opts) mode = opts[:mode] || @transaction_mode sql = TRANSACTION_MODE[mode] or raise Error, "transaction :mode must be one of: :deferred, :immediate, :exclusive, nil" log_connection_execute(conn, sql) set_transaction_isolation(conn, opts) end # A name to use for the backup table def backup_table_name(table, opts=OPTS) table = table.gsub('`', '') (opts[:times]||1000).times do |i| table_name = "#{table}_backup#{i}" return table_name unless table_exists?(table_name) end end # SQLite allows adding primary key constraints on NULLABLE columns, but then # does not enforce NOT NULL for such columns, so force setting the columns NOT NULL. def can_add_primary_key_constraint_on_nullable_columns? false end # Surround default with parens to appease SQLite. Add support for GENERATED ALWAYS AS. def column_definition_default_sql(sql, column) sql << " DEFAULT (#{literal(column[:default])})" if column.include?(:default) if (generated = column[:generated_always_as]) if (generated_type = column[:generated_type]) && (generated_type == :stored || generated_type == :virtual) generated_type = generated_type.to_s.upcase end sql << " GENERATED ALWAYS AS (#{literal(generated)}) #{generated_type}" end end # SQLite does not restrict the integer type to a specific range. def column_schema_integer_min_max_values(db_type) nil end # Array of PRAGMA SQL statements based on the Database options that should be applied to # new connections. def connection_pragmas ps = [] v = typecast_value_boolean(opts.fetch(:foreign_keys, 1)) ps << "PRAGMA foreign_keys = #{v ? 1 : 0}" v = typecast_value_boolean(opts.fetch(:case_sensitive_like, 1)) ps << "PRAGMA case_sensitive_like = #{v ? 1 : 0}" [[:auto_vacuum, AUTO_VACUUM], [:synchronous, SYNCHRONOUS], [:temp_store, TEMP_STORE]].each do |prag, con| if v = opts[prag] raise(Error, "Value for PRAGMA #{prag} not supported, should be one of #{con.join(', ')}") unless v = con.index(v.to_sym) ps << "PRAGMA #{prag} = #{v}" end end ps end # Support creating STRICT tables via :strict option def create_table_sql(name, generator, options) "#{super}#{' STRICT' if options[:strict]}" end # SQLite support creating temporary views. def create_view_prefix_sql(name, options) create_view_sql_append_columns("CREATE #{'TEMPORARY 'if options[:temp]}VIEW #{quote_schema_table(name)}", options[:columns]) end DATABASE_ERROR_REGEXPS = { /(is|are) not unique\z|PRIMARY KEY must be unique\z|UNIQUE constraint failed: .+\z/ => UniqueConstraintViolation, /foreign key constraint failed\z/i => ForeignKeyConstraintViolation, /\A(SQLITE ERROR 275 \(CONSTRAINT_CHECK\) : )?CHECK constraint failed/ => CheckConstraintViolation, /\A(SQLITE ERROR 19 \(CONSTRAINT\) : )?constraint failed\z/ => ConstraintViolation, /\Acannot store [A-Z]+ value in [A-Z]+ column / => ConstraintViolation, /may not be NULL\z|NOT NULL constraint failed: .+\z/ => NotNullConstraintViolation, /\ASQLITE ERROR \d+ \(\) : CHECK constraint failed: / => CheckConstraintViolation }.freeze def database_error_regexps DATABASE_ERROR_REGEXPS end # Recognize SQLite error codes if the exception provides access to them. def database_specific_error_class(exception, opts) case sqlite_error_code(exception) when 1299 NotNullConstraintViolation when 1555, 2067, 2579 UniqueConstraintViolation when 787 ForeignKeyConstraintViolation when 275 CheckConstraintViolation when 19 ConstraintViolation when 517 SerializationFailure else super end end # The array of column schema hashes for the current columns in the table def defined_columns_for(table) cols = parse_pragma(table, OPTS) cols.each do |c| c[:default] = LiteralString.new(c[:default]) if c[:default] c[:type] = c[:db_type] end cols end # Duplicate an existing table by creating a new table, copying all records # from the existing table into the new table, deleting the existing table # and renaming the new table to the existing table's name. def duplicate_table(table, opts=OPTS) remove_cached_schema(table) def_columns = defined_columns_for(table) old_columns = def_columns.map{|c| c[:name]} opts[:old_columns_proc].call(old_columns) if opts[:old_columns_proc] yield def_columns if defined?(yield) constraints = (opts[:constraints] || []).dup pks = [] def_columns.each{|c| pks << c[:name] if c[:primary_key]} if pks.length > 1 constraints << {:type=>:primary_key, :columns=>pks} def_columns.each{|c| c[:primary_key] = false if c[:primary_key]} end # If dropping a foreign key constraint, drop all foreign key constraints, # as there is no way to determine which one to drop. unless opts[:no_foreign_keys] fks = foreign_key_list(table) # If dropping a column, if there is a foreign key with that # column, don't include it when building a copy of the table. if ocp = opts[:old_columns_proc] fks.delete_if{|c| ocp.call(c[:columns].dup) != c[:columns]} end # Skip any foreign key columns where a constraint for those # foreign keys is being dropped. if sfkc = opts[:skip_foreign_key_columns] fks.delete_if{|c| c[:columns] == sfkc} end constraints.concat(fks.each{|h| h[:type] = :foreign_key}) end # Determine unique constraints and make sure the new columns have them unique_columns = [] skip_indexes = [] indexes(table, :only_autocreated=>true).each do |name, h| skip_indexes << name if h[:unique] && !opts[:no_unique] if h[:columns].length == 1 unique_columns.concat(h[:columns]) elsif h[:columns].map(&:to_s) != pks constraints << {:type=>:unique, :columns=>h[:columns]} end end end unique_columns -= pks unless unique_columns.empty? unique_columns.map!{|c| quote_identifier(c)} def_columns.each do |c| c[:unique] = true if unique_columns.include?(quote_identifier(c[:name])) && c[:unique] != false end end def_columns_str = (def_columns.map{|c| column_definition_sql(c)} + constraints.map{|c| constraint_definition_sql(c)}).join(', ') new_columns = old_columns.dup opts[:new_columns_proc].call(new_columns) if opts[:new_columns_proc] qt = quote_schema_table(table) bt = quote_identifier(backup_table_name(qt)) a = [ "ALTER TABLE #{qt} RENAME TO #{bt}", "CREATE TABLE #{qt}(#{def_columns_str})", "INSERT INTO #{qt}(#{dataset.send(:identifier_list, new_columns)}) SELECT #{dataset.send(:identifier_list, old_columns)} FROM #{bt}", "DROP TABLE #{bt}" ] indexes(table).each do |name, h| next if skip_indexes.include?(name) if (h[:columns].map(&:to_s) - new_columns).empty? a << alter_table_sql(table, h.merge(:op=>:add_index, :name=>name)) end end a end # Does the reverse of on_delete_clause, eg. converts strings like +'SET NULL'+ # to symbols +:set_null+. def on_delete_sql_to_sym(str) case str when 'RESTRICT' :restrict when 'CASCADE' :cascade when 'SET NULL' :set_null when 'SET DEFAULT' :set_default when 'NO ACTION' :no_action end end # Parse the output of the table_info pragma def parse_pragma(table_name, opts) pks = 0 sch = _parse_pragma_ds(table_name, opts).map do |row| if sqlite_version > 33100 # table_xinfo PRAGMA used, remove hidden columns # that are not generated columns if row[:generated] = (row.delete(:hidden) != 0) next unless row[:type].end_with?(' GENERATED ALWAYS') row[:type] = row[:type].sub(' GENERATED ALWAYS', '') end end row.delete(:cid) row[:allow_null] = row.delete(:notnull).to_i == 0 row[:default] = row.delete(:dflt_value) row[:default] = nil if blank_object?(row[:default]) || row[:default] == 'NULL' row[:db_type] = row.delete(:type) if row[:primary_key] = row.delete(:pk).to_i > 0 pks += 1 # Guess that an integer primary key uses auto increment, # since that is Sequel's default and SQLite does not provide # a way to introspect whether it is actually autoincrementing. row[:auto_increment] = row[:db_type].downcase == 'integer' end row[:type] = schema_column_type(row[:db_type]) row end sch.compact! if pks > 1 # SQLite does not allow use of auto increment for tables # with composite primary keys, so remove auto_increment # if composite primary keys are detected. sch.each{|r| r.delete(:auto_increment)} end sch end # SQLite supports schema parsing using the table_info PRAGMA, so # parse the output of that into the format Sequel expects. def schema_parse_table(table_name, opts) m = output_identifier_meth(opts[:dataset]) parse_pragma(table_name, opts).map do |row| [m.call(row.delete(:name)), row] end end # Don't support SQLite error codes for exceptions by default. def sqlite_error_code(exception) nil end # Backbone of the tables and views support. def tables_and_views(filter, opts) m = output_identifier_meth metadata_dataset.from(:sqlite_master).server(opts[:server]).where(filter).map{|r| m.call(r[:name])} end # SQLite only supports AUTOINCREMENT on integer columns, not # bigint columns, so use integer instead of bigint for those # columns. def type_literal_generic_bignum_symbol(column) column[:auto_increment] ? :integer : super end end module DatasetMethods include Dataset::Replace include UnmodifiedIdentifiers::DatasetMethods # The allowed values for insert_conflict INSERT_CONFLICT_RESOLUTIONS = %w'ROLLBACK ABORT FAIL IGNORE REPLACE'.each(&:freeze).freeze CONSTANT_MAP = {:CURRENT_DATE=>"date(CURRENT_TIMESTAMP, 'localtime')".freeze, :CURRENT_TIMESTAMP=>"datetime(CURRENT_TIMESTAMP, 'localtime')".freeze, :CURRENT_TIME=>"time(CURRENT_TIMESTAMP, 'localtime')".freeze}.freeze EXTRACT_MAP = {:year=>"'%Y'", :month=>"'%m'", :day=>"'%d'", :hour=>"'%H'", :minute=>"'%M'", :second=>"'%f'"}.freeze EXTRACT_MAP.each_value(&:freeze) Dataset.def_sql_method(self, :delete, [['if db.sqlite_version >= 33500', %w'with delete from where returning'], ['elsif db.sqlite_version >= 30803', %w'with delete from where'], ["else", %w'delete from where']]) Dataset.def_sql_method(self, :insert, [['if db.sqlite_version >= 33500', %w'with insert conflict into columns values on_conflict returning'], ['elsif db.sqlite_version >= 30803', %w'with insert conflict into columns values on_conflict'], ["else", %w'insert conflict into columns values']]) Dataset.def_sql_method(self, :select, [['if opts[:values]', %w'with values compounds'], ['else', %w'with select distinct columns from join where group having window compounds order limit lock']]) Dataset.def_sql_method(self, :update, [['if db.sqlite_version >= 33500', %w'with update table set from where returning'], ['elsif db.sqlite_version >= 33300', %w'with update table set from where'], ['elsif db.sqlite_version >= 30803', %w'with update table set where'], ["else", %w'update table set where']]) def cast_sql_append(sql, expr, type) if type == Time or type == DateTime sql << "datetime(" literal_append(sql, expr) sql << ')' elsif type == Date sql << "date(" literal_append(sql, expr) sql << ')' else super end end # SQLite doesn't support a NOT LIKE b, you need to use NOT (a LIKE b). # It doesn't support xor, power, or the extract function natively, so those have to be emulated. def complex_expression_sql_append(sql, op, args) case op when :"NOT LIKE", :"NOT ILIKE" sql << 'NOT ' complex_expression_sql_append(sql, (op == :"NOT ILIKE" ? :ILIKE : :LIKE), args) when :^ complex_expression_arg_pairs_append(sql, args){|a, b| Sequel.lit(["((~(", " & ", ")) & (", " | ", "))"], a, b, a, b)} when :** unless (exp = args[1]).is_a?(Integer) raise(Sequel::Error, "can only emulate exponentiation on SQLite if exponent is an integer, given #{exp.inspect}") end case exp when 0 sql << '1' else sql << '(' arg = args[0] if exp < 0 invert = true exp = exp.abs sql << '(1.0 / (' end (exp - 1).times do literal_append(sql, arg) sql << " * " end literal_append(sql, arg) sql << ')' if invert sql << "))" end end when :extract part = args[0] raise(Sequel::Error, "unsupported extract argument: #{part.inspect}") unless format = EXTRACT_MAP[part] sql << "CAST(strftime(" << format << ', ' literal_append(sql, args[1]) sql << ') AS ' << (part == :second ? 'NUMERIC' : 'INTEGER') << ')' else super end end # SQLite has CURRENT_TIMESTAMP and related constants in UTC instead # of in localtime, so convert those constants to local time. def constant_sql_append(sql, constant) if (c = CONSTANT_MAP[constant]) && !db.current_timestamp_utc sql << c else super end end # SQLite performs a TRUNCATE style DELETE if no filter is specified. # Since we want to always return the count of records, add a condition # that is always true and then delete. def delete(&block) @opts[:where] ? super : where(1=>1).delete(&block) end # Return an array of strings specifying a query explanation for a SELECT of the # current dataset. Currently, the options are ignored, but it accepts options # to be compatible with other adapters. def explain(opts=nil) # Load the PrettyTable class, needed for explain output Sequel.extension(:_pretty_table) unless defined?(Sequel::PrettyTable) ds = db.send(:metadata_dataset).clone(:sql=>"EXPLAIN #{select_sql}") rows = ds.all Sequel::PrettyTable.string(rows, ds.columns) end # HAVING requires GROUP BY on SQLite def having(*cond) raise(InvalidOperation, "Can only specify a HAVING clause on a grouped dataset") if !@opts[:group] && db.sqlite_version < 33900 super end # Support insert select for associations, so that the model code can use # returning instead of a separate query. def insert_select(*values) return unless supports_insert_select? # Handle case where query does not return a row server?(:default).with_sql_first(insert_select_sql(*values)) || false end # The SQL to use for an insert_select, adds a RETURNING clause to the insert # unless the RETURNING clause is already present. def insert_select_sql(*values) ds = opts[:returning] ? self : returning ds.insert_sql(*values) end # SQLite uses the nonstandard ` (backtick) for quoting identifiers. def quoted_identifier_append(sql, c) sql << '`' << c.to_s.gsub('`', '``') << '`' end # When a qualified column is selected on SQLite and the qualifier # is a subselect, the column name used is the full qualified name # (including the qualifier) instead of just the column name. To # get correct column names, you must use an alias. def select(*cols) if ((f = @opts[:from]) && f.any?{|t| t.is_a?(Dataset) || (t.is_a?(SQL::AliasedExpression) && t.expression.is_a?(Dataset))}) || ((j = @opts[:join]) && j.any?{|t| t.table.is_a?(Dataset)}) super(*cols.map{|c| alias_qualified_column(c)}) else super end end # Handle uniqueness violations when inserting, by using a specified # resolution algorithm. With no options, uses INSERT OR REPLACE. SQLite # supports the following conflict resolution algoriths: ROLLBACK, ABORT, # FAIL, IGNORE and REPLACE. # # On SQLite 3.24.0+, you can pass a hash to use an ON CONFLICT clause. # With out :update option, uses ON CONFLICT DO NOTHING. Options: # # :conflict_where :: The index filter, when using a partial index to determine uniqueness. # :target :: The column name or expression to handle uniqueness violations on. # :update :: A hash of columns and values to set. Uses ON CONFLICT DO UPDATE. # :update_where :: A WHERE condition to use for the update. # # Examples: # # DB[:table].insert_conflict.insert(a: 1, b: 2) # # INSERT OR IGNORE INTO TABLE (a, b) VALUES (1, 2) # # DB[:table].insert_conflict(:replace).insert(a: 1, b: 2) # # INSERT OR REPLACE INTO TABLE (a, b) VALUES (1, 2) # # DB[:table].insert_conflict({}).insert(a: 1, b: 2) # # INSERT INTO TABLE (a, b) VALUES (1, 2) # # ON CONFLICT DO NOTHING # # DB[:table].insert_conflict(target: :a).insert(a: 1, b: 2) # # INSERT INTO TABLE (a, b) VALUES (1, 2) # # ON CONFLICT (a) DO NOTHING # # DB[:table].insert_conflict(target: :a, conflict_where: {c: true}).insert(a: 1, b: 2) # # INSERT INTO TABLE (a, b) VALUES (1, 2) # # ON CONFLICT (a) WHERE (c IS TRUE) DO NOTHING # # DB[:table].insert_conflict(target: :a, update: {b: Sequel[:excluded][:b]}).insert(a: 1, b: 2) # # INSERT INTO TABLE (a, b) VALUES (1, 2) # # ON CONFLICT (a) DO UPDATE SET b = excluded.b # # DB[:table].insert_conflict(target: :a, # update: {b: Sequel[:excluded][:b]}, update_where: {Sequel[:table][:status_id] => 1}).insert(a: 1, b: 2) # # INSERT INTO TABLE (a, b) VALUES (1, 2) # # ON CONFLICT (a) DO UPDATE SET b = excluded.b WHERE (table.status_id = 1) def insert_conflict(opts = :ignore) case opts when Symbol, String unless INSERT_CONFLICT_RESOLUTIONS.include?(opts.to_s.upcase) raise Error, "Invalid symbol or string passed to Dataset#insert_conflict: #{opts.inspect}. The allowed values are: :rollback, :abort, :fail, :ignore, or :replace" end clone(:insert_conflict => opts) when Hash clone(:insert_on_conflict => opts) else raise Error, "Invalid value passed to Dataset#insert_conflict: #{opts.inspect}, should use a symbol or a hash" end end # Ignore uniqueness/exclusion violations when inserting, using INSERT OR IGNORE. # Exists mostly for compatibility to MySQL's insert_ignore. Example: # # DB[:table].insert_ignore.insert(a: 1, b: 2) # # INSERT OR IGNORE INTO TABLE (a, b) VALUES (1, 2) def insert_ignore insert_conflict(:ignore) end # Automatically add aliases to RETURNING values to work around SQLite bug. def returning(*values) return super if values.empty? raise Error, "RETURNING is not supported on #{db.database_type}" unless supports_returning?(:insert) clone(:returning=>_returning_values(values).freeze) end # SQLite 3.8.3+ supports common table expressions. def supports_cte?(type=:select) db.sqlite_version >= 30803 end # SQLite supports CTEs in subqueries if it supports CTEs. def supports_cte_in_subqueries? supports_cte? end # SQLite does not support table aliases with column aliases def supports_derived_column_lists? false end # SQLite does not support deleting from a joined dataset def supports_deleting_joins? false end # SQLite does not support INTERSECT ALL or EXCEPT ALL def supports_intersect_except_all? false end # SQLite does not support IS TRUE def supports_is_true? false end # SQLite 3.33.0 supports modifying joined datasets def supports_modifying_joins? db.sqlite_version >= 33300 end # SQLite does not support multiple columns for the IN/NOT IN operators def supports_multiple_column_in? false end # SQLite 3.35.0 supports RETURNING on INSERT/UPDATE/DELETE. def supports_returning?(_) db.sqlite_version >= 33500 end # SQLite supports timezones in literal timestamps, since it stores them # as text. But using timezones in timestamps breaks SQLite datetime # functions, so we allow the user to override the default per database. def supports_timestamp_timezones? db.use_timestamp_timezones? end # SQLite cannot use WHERE 't'. def supports_where_true? false end # SQLite 3.28+ supports the WINDOW clause. def supports_window_clause? db.sqlite_version >= 32800 end # SQLite 3.25+ supports window functions. However, support is only enabled # on SQLite 3.26.0+ because internal Sequel usage of window functions # to implement eager loading of limited associations triggers # an SQLite crash bug in versions 3.25.0-3.25.3. def supports_window_functions? db.sqlite_version >= 32600 end # SQLite 3.28.0+ supports all window frame options that Sequel supports def supports_window_function_frame_option?(option) db.sqlite_version >= 32800 ? true : super end private # Add aliases to symbols and identifiers to work around SQLite bug. def _returning_values(values) values.map do |v| case v when Symbol _, c, a = split_symbol(v) a ? v : Sequel.as(v, c) when SQL::Identifier, SQL::QualifiedIdentifier Sequel.as(v, unqualified_column_for(v)) else v end end end # SQLite uses string literals instead of identifiers in AS clauses. def as_sql_append(sql, aliaz, column_aliases=nil) raise Error, "sqlite does not support derived column lists" if column_aliases aliaz = aliaz.value if aliaz.is_a?(SQL::Identifier) sql << ' AS ' literal_append(sql, aliaz.to_s) end # If col is a qualified column, alias it to the same as the column name def alias_qualified_column(col) case col when Symbol t, c, a = split_symbol(col) if t && !a alias_qualified_column(SQL::QualifiedIdentifier.new(t, c)) else col end when SQL::QualifiedIdentifier SQL::AliasedExpression.new(col, col.column) else col end end # Raise an InvalidOperation exception if insert is not allowed for this dataset. def check_insert_allowed! raise(InvalidOperation, "Grouped datasets cannot be modified") if opts[:group] raise(InvalidOperation, "Joined datasets cannot be modified") if joined_dataset? end alias check_delete_allowed! check_insert_allowed! # SQLite supports a maximum of 500 rows in a VALUES clause. def default_import_slice 500 end # SQL fragment specifying a list of identifiers def identifier_list(columns) columns.map{|i| quote_identifier(i)}.join(', ') end # Add OR clauses to SQLite INSERT statements def insert_conflict_sql(sql) if resolution = @opts[:insert_conflict] sql << " OR " << resolution.to_s.upcase end end # Add ON CONFLICT clause if it should be used def insert_on_conflict_sql(sql) if opts = @opts[:insert_on_conflict] sql << " ON CONFLICT" if target = opts[:constraint] sql << " ON CONSTRAINT " identifier_append(sql, target) elsif target = opts[:target] sql << ' ' identifier_append(sql, Array(target)) if conflict_where = opts[:conflict_where] sql << " WHERE " literal_append(sql, conflict_where) end end if values = opts[:update] sql << " DO UPDATE SET " update_sql_values_hash(sql, values) if update_where = opts[:update_where] sql << " WHERE " literal_append(sql, update_where) end else sql << " DO NOTHING" end end end # SQLite uses a preceding X for hex escaping strings def literal_blob_append(sql, v) sql << "X'" << v.unpack("H*").first << "'" end # Respect the database integer_booleans setting, using 0 or 'f'. def literal_false @db.integer_booleans ? '0' : "'f'" end # Respect the database integer_booleans setting, using 1 or 't'. def literal_true @db.integer_booleans ? '1' : "'t'" end # SQLite only supporting multiple rows in the VALUES clause # starting in 3.7.11. On older versions, fallback to using a UNION. def multi_insert_sql_strategy db.sqlite_version >= 30711 ? :values : :union end # Emulate the char_length function with length def native_function_name(emulated_function) if emulated_function == :char_length 'length' else super end end # SQLite supports NULLS FIRST/LAST natively in 3.30+. def requires_emulating_nulls_first? db.sqlite_version < 33000 end # SQLite does not support FOR UPDATE, but silently ignore it # instead of raising an error for compatibility with other # databases. def select_lock_sql(sql) super unless @opts[:lock] == :update end def select_only_offset_sql(sql) sql << " LIMIT -1 OFFSET " literal_append(sql, @opts[:offset]) end # Support VALUES clause instead of the SELECT clause to return rows. def select_values_sql(sql) sql << "VALUES " expression_list_append(sql, opts[:values]) end # SQLite does not support CTEs directly inside UNION/INTERSECT/EXCEPT. def supports_cte_in_compounds? false end # SQLite 3.30 supports the FILTER clause for aggregate functions. def supports_filtered_aggregates? db.sqlite_version >= 33000 end # SQLite supports quoted function names. def supports_quoted_function_names? true end # SQLite treats a DELETE with no WHERE clause as a TRUNCATE def _truncate_sql(table) "DELETE FROM #{table}" end # Use FROM to specify additional tables in an update query def update_from_sql(sql) if(from = @opts[:from][1..-1]).empty? raise(Error, 'Need multiple FROM tables if updating/deleting a dataset with JOINs') if @opts[:join] else sql << ' FROM ' source_list_append(sql, from) select_join_sql(sql) end end # Only include the primary table in the main update clause def update_table_sql(sql) sql << ' ' source_list_append(sql, @opts[:from][0..0]) end end end end sequel-5.63.0/lib/sequel/adapters/sqlanywhere.rb000066400000000000000000000124251434214120600216420ustar00rootroot00000000000000# frozen-string-literal: true require 'sqlanywhere' require_relative 'shared/sqlanywhere' module Sequel module SqlAnywhere class SQLAnywhereException < StandardError attr_reader :errno attr_reader :sql def initialize(message, errno, sql) super(message) @errno = errno @sql = sql end end boolean = Object.new def boolean.call(s) s.to_i != 0 end date = Object.new def date.call(s) ::Date.strptime(s) end decimal = Object.new class << decimal alias call BigDecimal public :call end time = Object.new def time.call(s) ::Sequel.string_to_time(s) end SQLANYWHERE_TYPES = {} { [0, 484] => decimal, [384] => date, [388] => time, [500] => boolean, [524, 528] => ::Sequel::SQL::Blob }.each do |k,v| k.each{|n| SQLANYWHERE_TYPES[n] = v} end SQLANYWHERE_TYPES.freeze class Database < Sequel::Database include Sequel::SqlAnywhere::DatabaseMethods attr_accessor :api set_adapter_scheme :sqlanywhere def connect(server) opts = server_opts(server) unless conn_string = opts[:conn_string] conn_string = [] conn_string << "Host=#{opts[:host]}#{":#{opts[:port]}" if opts[:port]}" if opts[:host] conn_string << "DBN=#{opts[:database]}" if opts[:database] conn_string << "UID=#{opts[:user]}" if opts[:user] conn_string << "Password=#{opts[:password]}" if opts[:password] conn_string << "CommLinks=#{opts[:commlinks]}" if opts[:commlinks] conn_string << "ConnectionName=#{opts[:connection_name]}" if opts[:connection_name] conn_string << "CharSet=#{opts[:encoding]}" if opts[:encoding] conn_string << "Idle=0" # Prevent the server from disconnecting us if we're idle for >240mins (by default) conn_string << nil conn_string = conn_string.join(';') end conn = @api.sqlany_new_connection raise LoadError, "Could not connect" unless conn && @api.sqlany_connect(conn, conn_string) == 1 if Sequel.application_timezone == :utc @api.sqlany_execute_immediate(conn, "SET TEMPORARY OPTION time_zone_adjustment=0") end conn end def disconnect_connection(c) @api.sqlany_disconnect(c) end def execute_dui(sql, opts=OPTS) synchronize(opts[:server]) do |conn| _execute(conn, :rows, sql, opts) end end def execute(sql, opts=OPTS, &block) synchronize(opts[:server]) do |conn| _execute(conn, :select, sql, opts, &block) end end def execute_insert(sql, opts=OPTS) synchronize(opts[:server]) do |conn| _execute(conn, :insert, sql, opts) end end def freeze @conversion_procs.freeze super end private def _execute(conn, type, sql, opts) unless rs = log_connection_yield(sql, conn){@api.sqlany_execute_direct(conn, sql)} result, errstr = @api.sqlany_error(conn) raise_error(SQLAnywhereException.new(errstr, result, sql)) end case type when :select yield rs if defined?(yield) when :rows return @api.sqlany_affected_rows(rs) when :insert _execute(conn, :select, 'SELECT @@IDENTITY', opts){|r| return @api.sqlany_get_column(r, 0)[1] if r && @api.sqlany_fetch_next(r) == 1} end ensure @api.sqlany_commit(conn) unless in_transaction? @api.sqlany_free_stmt(rs) if rs end def adapter_initialize @convert_smallint_to_bool = true @conversion_procs = SQLANYWHERE_TYPES.dup @conversion_procs[392] = method(:to_application_timestamp_sa) @api = SQLAnywhere::SQLAnywhereInterface.new raise LoadError, "Could not load SQLAnywhere DBCAPI library" if SQLAnywhere::API.sqlany_initialize_interface(@api) == 0 raise LoadError, "Could not initialize SQLAnywhere DBCAPI library" if @api.sqlany_init == 0 end def dataset_class_default Dataset end def log_connection_execute(conn, sql) _execute(conn, nil, sql, OPTS) end end class Dataset < Sequel::Dataset include Sequel::SqlAnywhere::DatasetMethods def fetch_rows(sql) db = @db cps = db.conversion_procs api = db.api execute(sql) do |rs| convert = convert_smallint_to_bool col_infos = [] api.sqlany_num_cols(rs).times do |i| _, _, name, _, type = api.sqlany_get_column_info(rs, i) cp = if type == 500 cps[500] if convert else cps[type] end col_infos << [output_identifier(name), cp] end self.columns = col_infos.map(&:first) max = col_infos.length if rs while api.sqlany_fetch_next(rs) == 1 i = -1 h = {} while (i+=1) < max name, cp = col_infos[i] v = api.sqlany_get_column(rs, i)[1] h[name] = cp && v ? cp.call(v) : v end yield h end end end self end end end end sequel-5.63.0/lib/sequel/adapters/sqlite.rb000066400000000000000000000315111434214120600205760ustar00rootroot00000000000000# frozen-string-literal: true require 'sqlite3' require_relative 'shared/sqlite' module Sequel module SQLite FALSE_VALUES = (%w'0 false f no n'.each(&:freeze) + [0]).freeze blob = Object.new def blob.call(s) Sequel::SQL::Blob.new(s.to_s) end boolean = Object.new def boolean.call(s) s = s.downcase if s.is_a?(String) !FALSE_VALUES.include?(s) end date = Object.new def date.call(s) case s when String Sequel.string_to_date(s) when Integer Date.jd(s) when Float Date.jd(s.to_i) else raise Sequel::Error, "unhandled type when converting to date: #{s.inspect} (#{s.class.inspect})" end end integer = Object.new def integer.call(s) s.to_i end float = Object.new def float.call(s) s.to_f end numeric = Object.new def numeric.call(s) s = s.to_s unless s.is_a?(String) BigDecimal(s) rescue s end time = Object.new def time.call(s) case s when String Sequel.string_to_time(s) when Integer Sequel::SQLTime.create(s/3600, (s % 3600)/60, s % 60) when Float s, f = s.divmod(1) Sequel::SQLTime.create(s/3600, (s % 3600)/60, s % 60, (f*1000000).round) else raise Sequel::Error, "unhandled type when converting to date: #{s.inspect} (#{s.class.inspect})" end end # Hash with string keys and callable values for converting SQLite types. SQLITE_TYPES = {} { %w'date' => date, %w'time' => time, %w'bit bool boolean' => boolean, %w'integer smallint mediumint int bigint' => integer, %w'numeric decimal money' => numeric, %w'float double real dec fixed' + ['double precision'] => float, %w'blob' => blob }.each do |k,v| k.each{|n| SQLITE_TYPES[n] = v} end SQLITE_TYPES.freeze sqlite_version = SQLite3::VERSION.split('.').map(&:to_i)[0..1] sqlite_version = sqlite_version[0] * 100 + sqlite_version[1] USE_EXTENDED_RESULT_CODES = sqlite_version >= 104 class Database < Sequel::Database include ::Sequel::SQLite::DatabaseMethods set_adapter_scheme :sqlite # Mimic the file:// uri, by having 2 preceding slashes specify a relative # path, and 3 preceding slashes specify an absolute path. def self.uri_to_options(uri) # :nodoc: { :database => (uri.host.nil? && uri.path == '/') ? nil : "#{uri.host}#{uri.path}" } end private_class_method :uri_to_options # The conversion procs to use for this database attr_reader :conversion_procs def initialize(opts = OPTS) super @allow_regexp = typecast_value_boolean(opts[:setup_regexp_function]) end # Connect to the database. Since SQLite is a file based database, # available options are limited: # # :database :: database name (filename or ':memory:' or file: URI) # :readonly :: open database in read-only mode; useful for reading # static data that you do not want to modify # :timeout :: how long to wait for the database to be available if it # is locked, given in milliseconds (default is 5000) def connect(server) opts = server_opts(server) opts[:database] = ':memory:' if blank_object?(opts[:database]) sqlite3_opts = {} sqlite3_opts[:readonly] = typecast_value_boolean(opts[:readonly]) if opts.has_key?(:readonly) db = ::SQLite3::Database.new(opts[:database].to_s, sqlite3_opts) db.busy_timeout(typecast_value_integer(opts.fetch(:timeout, 5000))) if USE_EXTENDED_RESULT_CODES db.extended_result_codes = true end connection_pragmas.each{|s| log_connection_yield(s, db){db.execute_batch(s)}} if typecast_value_boolean(opts[:setup_regexp_function]) db.create_function("regexp", 2) do |func, regexp_str, string| func.result = Regexp.new(regexp_str).match(string) ? 1 : 0 end end class << db attr_reader :prepared_statements end db.instance_variable_set(:@prepared_statements, {}) db end # Whether this Database instance is setup to allow regexp matching. # True if the :setup_regexp_function option was passed when creating the Database. def allow_regexp? @allow_regexp end # Disconnect given connections from the database. def disconnect_connection(c) c.prepared_statements.each_value{|v| v.first.close} c.close end # Run the given SQL with the given arguments and yield each row. def execute(sql, opts=OPTS, &block) _execute(:select, sql, opts, &block) end # Run the given SQL with the given arguments and return the number of changed rows. def execute_dui(sql, opts=OPTS) _execute(:update, sql, opts) end # Drop any prepared statements on the connection when executing DDL. This is because # prepared statements lock the table in such a way that you can't drop or alter the # table while a prepared statement that references it still exists. def execute_ddl(sql, opts=OPTS) synchronize(opts[:server]) do |conn| conn.prepared_statements.values.each{|cps, s| cps.close} conn.prepared_statements.clear super end end def execute_insert(sql, opts=OPTS) _execute(:insert, sql, opts) end def freeze @conversion_procs.freeze super end # Handle Integer and Float arguments, since SQLite can store timestamps as integers and floats. def to_application_timestamp(s) case s when String super when Integer super(Time.at(s).to_s) when Float super(DateTime.jd(s).to_s) else raise Sequel::Error, "unhandled type when converting to : #{s.inspect} (#{s.class.inspect})" end end private def adapter_initialize @conversion_procs = SQLITE_TYPES.dup @conversion_procs['datetime'] = @conversion_procs['timestamp'] = method(:to_application_timestamp) set_integer_booleans end # Yield an available connection. Rescue # any SQLite3::Exceptions and turn them into DatabaseErrors. def _execute(type, sql, opts, &block) synchronize(opts[:server]) do |conn| return execute_prepared_statement(conn, type, sql, opts, &block) if sql.is_a?(Symbol) log_args = opts[:arguments] args = {} opts.fetch(:arguments, OPTS).each{|k, v| args[k] = prepared_statement_argument(v)} case type when :select log_connection_yield(sql, conn, log_args){conn.query(sql, args, &block)} when :insert log_connection_yield(sql, conn, log_args){conn.execute(sql, args)} conn.last_insert_row_id when :update log_connection_yield(sql, conn, log_args){conn.execute_batch(sql, args)} conn.changes end end rescue SQLite3::Exception => e raise_error(e) end # The SQLite adapter does not need the pool to convert exceptions. # Also, force the max connections to 1 if a memory database is being # used, as otherwise each connection gets a separate database. def connection_pool_default_options o = super.dup # Default to only a single connection if a memory database is used, # because otherwise each connection will get a separate database o[:max_connections] = 1 if @opts[:database] == ':memory:' || blank_object?(@opts[:database]) o end def prepared_statement_argument(arg) case arg when Date, DateTime, Time literal(arg)[1...-1] when SQL::Blob arg.to_blob when true, false if integer_booleans arg ? 1 : 0 else literal(arg)[1...-1] end else arg end end # Execute a prepared statement on the database using the given name. def execute_prepared_statement(conn, type, name, opts, &block) ps = prepared_statement(name) sql = ps.prepared_sql args = opts[:arguments] ps_args = {} args.each{|k, v| ps_args[k] = prepared_statement_argument(v)} if cpsa = conn.prepared_statements[name] cps, cps_sql = cpsa if cps_sql != sql cps.close cps = nil end end unless cps cps = log_connection_yield("PREPARE #{name}: #{sql}", conn){conn.prepare(sql)} conn.prepared_statements[name] = [cps, sql] end log_sql = String.new log_sql << "EXECUTE #{name}" if ps.log_sql log_sql << " (" log_sql << sql log_sql << ")" end if block log_connection_yield(log_sql, conn, args){cps.execute(ps_args, &block)} else log_connection_yield(log_sql, conn, args){cps.execute!(ps_args){|r|}} case type when :insert conn.last_insert_row_id when :update conn.changes end end end # SQLite3 raises ArgumentError in addition to SQLite3::Exception in # some cases, such as operations on a closed database. def database_error_classes [SQLite3::Exception, ArgumentError] end def dataset_class_default Dataset end if USE_EXTENDED_RESULT_CODES # Support SQLite exception codes if ruby-sqlite3 supports them. def sqlite_error_code(exception) exception.code if exception.respond_to?(:code) end end end class Dataset < Sequel::Dataset include ::Sequel::SQLite::DatasetMethods module ArgumentMapper include Sequel::Dataset::ArgumentMapper protected # Return a hash with the same values as the given hash, # but with the keys converted to strings. def map_to_prepared_args(hash) args = {} hash.each{|k,v| args[k.to_s.gsub('.', '__')] = v} args end private # SQLite uses a : before the name of the argument for named # arguments. def prepared_arg(k) LiteralString.new("#{prepared_arg_placeholder}#{k.to_s.gsub('.', '__')}") end end BindArgumentMethods = prepared_statements_module(:bind, ArgumentMapper) PreparedStatementMethods = prepared_statements_module(:prepare, BindArgumentMethods) # Support regexp functions if using :setup_regexp_function Database option. def complex_expression_sql_append(sql, op, args) case op when :~, :'!~', :'~*', :'!~*' return super unless supports_regexp? case_insensitive = [:'~*', :'!~*'].include?(op) sql << 'NOT ' if [:'!~', :'!~*'].include?(op) sql << '(' sql << 'LOWER(' if case_insensitive literal_append(sql, args[0]) sql << ')' if case_insensitive sql << ' REGEXP ' sql << 'LOWER(' if case_insensitive literal_append(sql, args[1]) sql << ')' if case_insensitive sql << ')' else super end end def fetch_rows(sql) execute(sql) do |result| cps = db.conversion_procs type_procs = result.types.map{|t| cps[base_type_name(t)]} j = -1 cols = result.columns.map{|c| [output_identifier(c), type_procs[(j+=1)]]} self.columns = cols.map(&:first) max = cols.length result.each do |values| row = {} i = -1 while (i += 1) < max name, type_proc = cols[i] v = values[i] if type_proc && v v = type_proc.call(v) end row[name] = v end yield row end end end # Support regexp if using :setup_regexp_function Database option. def supports_regexp? db.allow_regexp? end private # The base type name for a given type, without any parenthetical part. def base_type_name(t) (t =~ /^(.*?)\(/ ? $1 : t).downcase if t end # Quote the string using the adapter class method. def literal_string_append(sql, v) sql << "'" << ::SQLite3::Database.quote(v) << "'" end def bound_variable_modules [BindArgumentMethods] end def prepared_statement_modules [PreparedStatementMethods] end # SQLite uses a : before the name of the argument as a placeholder. def prepared_arg_placeholder ':' end end end end sequel-5.63.0/lib/sequel/adapters/tinytds.rb000066400000000000000000000200221434214120600207660ustar00rootroot00000000000000# frozen-string-literal: true require 'tiny_tds' require_relative 'shared/mssql' module Sequel module TinyTDS class Database < Sequel::Database include Sequel::MSSQL::DatabaseMethods set_adapter_scheme :tinytds # Transfer the :user option to the :username option. def connect(server) opts = server_opts(server) opts[:username] = opts[:user] c = TinyTds::Client.new(opts) c.query_options.merge!(:cache_rows=>false) # SEQUEL6: Default to ansi: true if opts[:ansi] sql = %w( ANSI_NULLS ANSI_PADDING ANSI_WARNINGS ANSI_NULL_DFLT_ON QUOTED_IDENTIFIER CONCAT_NULL_YIELDS_NULL ).map{|v| "SET #{v} ON"}.join(";") log_connection_yield(sql, c){c.execute(sql)} end if (ts = opts[:textsize]) sql = "SET TEXTSIZE #{typecast_value_integer(ts)}" log_connection_yield(sql, c){c.execute(sql)} end c end # Execute the given +sql+ on the server. If the :return option # is present, its value should be a method symbol that is called # on the TinyTds::Result object returned from executing the # +sql+. The value of such a method is returned to the caller. # Otherwise, if a block is given, it is yielded the result object. # If no block is given and a :return is not present, +nil+ is returned. def execute(sql, opts=OPTS) synchronize(opts[:server]) do |c| begin m = opts[:return] r = nil if (args = opts[:arguments]) && !args.empty? types = [] values = [] args.each_with_index do |(k, v), i| v, type = ps_arg_type(v) types << "@#{k} #{type}" values << "@#{k} = #{v}" end case m when :do sql = "#{sql}; SELECT @@ROWCOUNT AS AffectedRows" single_value = true when :insert sql = "#{sql}; SELECT CAST(SCOPE_IDENTITY() AS bigint) AS Ident" single_value = true end sql = "EXEC sp_executesql N'#{c.escape(sql)}', N'#{c.escape(types.join(', '))}', #{values.join(', ')}" log_connection_yield(sql, c) do r = c.execute(sql) r.each{|row| return row.values.first} if single_value end else log_connection_yield(sql, c) do r = c.execute(sql) return r.public_send(m) if m end end yield(r) if defined?(yield) rescue TinyTds::Error => e raise_error(e, :disconnect=>!c.active?) ensure r.cancel if r && c.sqlsent? && c.active? end end end def execute_dui(sql, opts=OPTS) opts = Hash[opts] opts[:return] = :do execute(sql, opts) end def execute_insert(sql, opts=OPTS) opts = Hash[opts] opts[:return] = :insert execute(sql, opts) end def execute_ddl(sql, opts=OPTS) opts = Hash[opts] opts[:return] = :each execute(sql, opts) nil end private # Choose whether to use unicode strings on initialization def adapter_initialize set_mssql_unicode_strings end # For some reason, unless you specify a column can be # NULL, it assumes NOT NULL, so turn NULL on by default unless # the column is a primary key column. def column_list_sql(g) pks = [] g.constraints.each{|c| pks = c[:columns] if c[:type] == :primary_key} g.columns.each{|c| c[:null] = true if !pks.include?(c[:name]) && !c[:primary_key] && !c.has_key?(:null) && !c.has_key?(:allow_null)} super end # tiny_tds uses TinyTds::Error as the base error class. def database_error_classes [TinyTds::Error] end # Stupid MSSQL maps foreign key and check constraint violations # to the same error code, and doesn't expose the sqlstate. Use # database error numbers if present and unambiguous, otherwise # fallback to the regexp mapping. def database_specific_error_class(exception, opts) case exception.db_error_number when 515 NotNullConstraintViolation when 2627 UniqueConstraintViolation else super end end def dataset_class_default Dataset end # Return true if the :conn argument is present and not active. def disconnect_error?(e, opts) super || (opts[:conn] && !opts[:conn].active?) || ((e.is_a?(::TinyTds::Error) && /\A(Attempt to initiate a new Adaptive Server operation with results pending|The request failed to run because the batch is aborted, this can be caused by abort signal sent from client|Adaptive Server connection timed out|DBPROCESS is dead or not enabled)/.match(e.message))) end # Dispose of any possible results of execution. def log_connection_execute(conn, sql) log_connection_yield(sql, conn){conn.execute(sql).each} end # Return a 2 element array with the literal value and type to use # in the prepared statement call for the given value and connection. def ps_arg_type(v) case v when Integer [v, 'bigint'] when Float [v, 'double precision'] when Numeric [v, 'numeric'] when Time if v.is_a?(SQLTime) [literal(v), 'time'] else [literal(v), 'datetime'] end when DateTime [literal(v), 'datetime'] when Date [literal(v), 'date'] when nil ['NULL', 'nvarchar(max)'] when true ['1', 'int'] when false ['0', 'int'] when SQL::Blob [literal(v), 'varbinary(max)'] else [literal(v), 'nvarchar(max)'] end end end class Dataset < Sequel::Dataset include Sequel::MSSQL::DatasetMethods module ArgumentMapper include Sequel::Dataset::ArgumentMapper protected def map_to_prepared_args(hash) args = {} hash.each{|k,v| args[k.to_s.gsub('.', '__')] = v} args end private def prepared_arg(k) LiteralString.new("@#{k.to_s.gsub('.', '__')}") end end PreparedStatementMethods = prepared_statements_module("sql = prepared_sql; opts = Hash[opts]; opts[:arguments] = bind_arguments", ArgumentMapper) def fetch_rows(sql) execute(sql) do |result| # Mutating an array in the result is questionable, but supported # by tiny_tds developers (tiny_tds issue #57) columns = result.fields.map!{|c| output_identifier(c)} if columns.empty? args = [] args << {:timezone=>:utc} if db.timezone == :utc cols = nil result.each(*args) do |r| unless cols cols = result.fields.map{|c| [c, output_identifier(c)]} self.columns = columns = cols.map(&:last) end h = {} cols.each do |s, sym| h[sym] = r[s] end yield h end else self.columns = columns if db.timezone == :utc result.each(:timezone=>:utc){|r| yield r} else result.each{|r| yield r} end end end self end private # Properly escape the given string def literal_string_append(sql, v) sql << (mssql_unicode_strings ? "N'" : "'") sql << db.synchronize(@opts[:server]){|c| c.escape(v)}.gsub(/\\((?:\r\n)|\n)/, '\\\\\\\\\\1\\1') << "'" end def prepared_statement_modules [PreparedStatementMethods] end end end end sequel-5.63.0/lib/sequel/adapters/utils/000077500000000000000000000000001434214120600201075ustar00rootroot00000000000000sequel-5.63.0/lib/sequel/adapters/utils/columns_limit_1.rb000066400000000000000000000010601434214120600235270ustar00rootroot00000000000000# frozen-string-literal: true module Sequel class Dataset module ColumnsLimit1 COLUMNS_CLONE_OPTIONS = {:distinct => nil, :limit => 1, :offset=>nil, :where=>nil, :having=>nil, :order=>nil, :row_proc=>nil, :graph=>nil, :eager_graph=>nil}.freeze # Use a limit of 1 instead of a limit of 0 when # getting the columns. def columns! ds = clone(COLUMNS_CLONE_OPTIONS) ds.each{break} if cols = ds.cache[:_columns] self.columns = cols else [] end end end end end sequel-5.63.0/lib/sequel/adapters/utils/emulate_offset_with_reverse_and_count.rb000066400000000000000000000050101434214120600302520ustar00rootroot00000000000000# frozen-string-literal: true module Sequel module EmulateOffsetWithReverseAndCount # Make empty? work with an offset with an order. # By default it would break since the order would be based on # a column that empty does not select. def empty? if o = @opts[:offset] unlimited.count <= o else super end end # Emulate OFFSET support using reverse order in a subselect, requiring # a count of the number of rows. # # If offset is used, an order must be provided, since it needs to be # reversed in the subselect. Note that the order needs to be unambiguous # to work correctly, and you must select all columns that you are ordering on. def select_sql return super if @opts[:sql] return super unless o = @opts[:offset] order = @opts[:order] || default_offset_order if order.nil? || order.empty? raise(Error, "#{db.database_type} requires an order be provided if using an offset") end ds = unlimited row_count = @opts[:offset_total_count] || ds.clone(:append_sql=>String.new, :placeholder_literal_null=>true).count dsa1 = dataset_alias(1) if o.is_a?(Symbol) && @opts[:bind_vars] && /\A\$(.*)\z/ =~ o # Handle use of bound variable offsets. Unfortunately, prepared statement # bound variable offsets cannot be handled, since the bound variable value # isn't available until later. o = prepared_arg($1.to_sym) end reverse_offset = row_count - o ds = if reverse_offset > 0 ds.limit(reverse_offset). reverse(*order). from_self(:alias=>dsa1). limit(@opts[:limit]). order(*order) else # Sequel doesn't allow a nonpositive limit. If the offset # is greater than the number of rows, the empty result set # shuld be returned, so use a condition that is always false. ds.where(1=>0) end sql = @opts[:append_sql] || String.new subselect_sql_append(sql, ds) sql end # This does not support offsets in correlated subqueries, as it requires a query to get # a count that will be invalid if a correlated subquery is used. def supports_offsets_in_correlated_subqueries? false end private # The default order to use for datasets with offsets, if no order is defined. # By default, orders by all of the columns in the dataset. def default_offset_order clone(:append_sql=>String.new, :offset=>nil).columns end end end sequel-5.63.0/lib/sequel/adapters/utils/emulate_offset_with_row_number.rb000066400000000000000000000060731434214120600267360ustar00rootroot00000000000000# frozen-string-literal: true module Sequel module EmulateOffsetWithRowNumber # If the offset must be emulated with ROW_NUMBER, don't remove any ordering, # because it can cause invalid queries to be issued if an offset is required # when ordering. def empty? return super unless emulate_offset_with_row_number? select(Dataset::EMPTY_SELECT).limit(1).single_value!.nil? end # Emulate OFFSET support with the ROW_NUMBER window function # # The implementation is ugly, cloning the current dataset and modifying # the clone to add a ROW_NUMBER window function (and some other things), # then using the modified clone in a subselect which is selected from. # # If offset is used, an order must be provided, because the use of ROW_NUMBER # requires an order. def select_sql return super unless emulate_offset_with_row_number? offset = @opts[:offset] order = @opts[:order] if require_offset_order? order ||= default_offset_order if order.nil? || order.empty? raise(Error, "#{db.database_type} requires an order be provided if using an offset") end end columns = clone(:append_sql=>String.new, :placeholder_literal_null=>true).columns dsa1 = dataset_alias(1) rn = row_number_column sql = @opts[:append_sql] || String.new subselect_sql_append(sql, unlimited. unordered. select_append(Sequel.function(:ROW_NUMBER).over(:order=>order).as(rn)). from_self(:alias=>dsa1). select(*columns). limit(@opts[:limit]). where(SQL::Identifier.new(rn) > offset). order(rn)) sql end # This does not support offsets in correlated subqueries, as it requires a query to get # the columns that will be invalid if a correlated subquery is used. def supports_offsets_in_correlated_subqueries? false end private # Allow preparing prepared statements, since determining the prepared sql to use for # a prepared statement requires calling prepare on that statement. def allow_preparing_prepared_statements? true end # The default order to use for datasets with offsets, if no order is defined. # By default, orders by all of the columns in the dataset. def default_offset_order if (cols = opts[:select]) cols.each do |c| case c when Symbol return [split_alias(c).first] when SQL::Identifier, SQL::QualifiedIdentifier return [c] when SQL::AliasedExpression case c.expression when Symbol, SQL::Identifier, SQL::QualifiedIdentifier return [c.expression] end end end end clone(:append_sql=>String.new).columns end # Whether an order is required when using offset emulation via ROW_NUMBER, true by default. def require_offset_order? true end # Whether to use ROW_NUMBER to emulate offsets def emulate_offset_with_row_number? @opts[:offset] && !@opts[:sql] end end end sequel-5.63.0/lib/sequel/adapters/utils/mysql_mysql2.rb000066400000000000000000000060041434214120600231100ustar00rootroot00000000000000# frozen-string-literal: true require_relative '../shared/mysql' require_relative 'stored_procedures' module Sequel module MySQL # This module is used by the mysql and mysql2 adapters to support # prepared statements and stored procedures. module MysqlMysql2 module DatabaseMethods disconnect_errors = <<-END.split("\n").map(&:strip) Commands out of sync; you can't run this command now Can't connect to local MySQL server through socket MySQL server has gone away Lost connection to MySQL server during query MySQL client is not connected This connection is still waiting for a result, try again once you have the result closed MySQL connection The MySQL server is running with the --read-only option so it cannot execute this statement Connection was killed END # Error messages for mysql and mysql2 that indicate the current connection should be disconnected MYSQL_DATABASE_DISCONNECT_ERRORS = /\A#{Regexp.union(disconnect_errors)}/ # Support stored procedures on MySQL def call_sproc(name, opts=OPTS, &block) args = opts[:args] || [] execute("CALL #{name}#{args.empty? ? '()' : literal(args)}", opts.merge(:sproc=>false), &block) end # Executes the given SQL using an available connection, yielding the # connection if the block is given. def execute(sql, opts=OPTS, &block) if opts[:sproc] call_sproc(sql, opts, &block) elsif sql.is_a?(Symbol) || sql.is_a?(Sequel::Dataset::ArgumentMapper) execute_prepared_statement(sql, opts, &block) else synchronize(opts[:server]){|conn| _execute(conn, sql, opts, &block)} end end private def add_prepared_statements_cache(conn) class << conn attr_accessor :prepared_statements end conn.prepared_statements = {} end def database_specific_error_class(exception, opts) case exception.errno when 1048 NotNullConstraintViolation when 1062 UniqueConstraintViolation when 1451, 1452, 1216, 1217 ForeignKeyConstraintViolation when 4025 CheckConstraintViolation when 1205 DatabaseLockTimeout else super end end end module DatasetMethods include Sequel::Dataset::StoredProcedures StoredProcedureMethods = Sequel::Dataset.send(:prepared_statements_module, "sql = @opts[:sproc_name]; opts = Hash[opts]; opts[:args] = @opts[:sproc_args]; opts[:sproc] = true", Sequel::Dataset::StoredProcedureMethods, %w'execute execute_dui') private # Extend the dataset with the MySQL stored procedure methods. def prepare_extend_sproc(ds) ds.with_extend(StoredProcedureMethods) end end end end end sequel-5.63.0/lib/sequel/adapters/utils/mysql_prepared_statements.rb000066400000000000000000000041711434214120600257350ustar00rootroot00000000000000# frozen-string-literal: true module Sequel module MySQL module PreparedStatements module DatabaseMethods private # Executes a prepared statement on an available connection. If the # prepared statement already exists for the connection and has the same # SQL, reuse it, otherwise, prepare the new statement. Issue a SET # query with literalized values for each argument, then an EXECUTE to # execute the query with the arguments. def execute_prepared_statement(ps_name, opts, &block) args = opts[:arguments] ps = prepared_statement(ps_name) sql = ps.prepared_sql synchronize(opts[:server]) do |conn| unless conn.prepared_statements[ps_name] == sql _execute(conn, "PREPARE #{ps_name} FROM #{literal(sql)}", opts) conn.prepared_statements[ps_name] = sql end i = 0 _execute(conn, "SET " + args.map {|arg| "@sequel_arg_#{i+=1} = #{literal(arg)}"}.join(", "), opts) unless args.empty? opts = opts.merge(:log_sql=>" (#{sql})") if ps.log_sql _execute(conn, "EXECUTE #{ps_name}#{" USING #{(1..i).map{|j| "@sequel_arg_#{j}"}.join(', ')}" unless i == 0}", opts, &block) end end end module DatasetMethods module CallableStatementMethods # Extend given dataset with this module so subselects inside subselects in # prepared statements work. def subselect_sql_append(sql, ds) ds.clone(:append_sql=>sql, :prepared_args=>prepared_args, :bind_vars=>@opts[:bind_vars]). send(:to_prepared_statement, :select, nil, :extend=>bound_variable_modules). prepared_sql end end PreparedStatementMethods = Sequel::Dataset.send(:prepared_statements_module, :prepare_bind, Sequel::Dataset::UnnumberedArgumentMapper) private def bound_variable_modules [CallableStatementMethods] end def prepared_statement_modules [PreparedStatementMethods] end end end end end sequel-5.63.0/lib/sequel/adapters/utils/replace.rb000066400000000000000000000015221434214120600220470ustar00rootroot00000000000000# frozen-string-literal: true module Sequel class Dataset module Replace # Execute a REPLACE statement on the database (deletes any duplicate # rows before inserting). def replace(*values) execute_insert(replace_sql(*values)) end # SQL statement for REPLACE def replace_sql(*values) clone(:replace=>true).insert_sql(*values) end # Replace multiple rows in a single query. def multi_replace(*values) clone(:replace=>true).multi_insert(*values) end # Databases using this module support REPLACE. def supports_replace? true end private # If this is an replace instead of an insert, use replace instead def insert_insert_sql(sql) sql << (@opts[:replace] ? 'REPLACE' : 'INSERT') end end end end sequel-5.63.0/lib/sequel/adapters/utils/split_alter_table.rb000066400000000000000000000027351434214120600241340ustar00rootroot00000000000000# frozen-string-literal: true module Sequel::Database::SplitAlterTable private # Preprocess the array of operations. If it looks like some operations depend # on results of earlier operations and may require reloading the schema to # work correctly, split those operations into separate lists, and between each # list, remove the cached schema so that the later operations deal with the # then current table schema. def apply_alter_table(name, ops) modified_columns = [] op_groups = [[]] ops.each do |op| case op[:op] when :add_column, :set_column_type, :set_column_null, :set_column_default if modified_columns.include?(op[:name]) op_groups << [] else modified_columns << op[:name] end when :rename_column if modified_columns.include?(op[:name]) || modified_columns.include?(op[:new_name]) op_groups << [] end modified_columns << op[:name] unless modified_columns.include?(op[:name]) modified_columns << op[:new_name] unless modified_columns.include?(op[:new_name]) end if split_alter_table_op?(op) op_groups << [] end op_groups.last << op end op_groups.each do |opgs| next if opgs.empty? alter_table_sql_list(name, opgs).each{|sql| execute_ddl(sql)} remove_cached_schema(name) end end # Whether the given alter table op should start a new group. def split_alter_table_op?(op) false end end sequel-5.63.0/lib/sequel/adapters/utils/stored_procedures.rb000066400000000000000000000032151434214120600241700ustar00rootroot00000000000000# frozen-string-literal: true module Sequel class Dataset module StoredProcedureMethods # The name of the stored procedure to call def sproc_name @opts[:sproc_name] end # Call the stored procedure with the given args def call(*args, &block) clone(:sproc_args=>args).run(&block) end # Programmer friendly string showing this is a stored procedure, # showing the name of the procedure. def inspect "<#{self.class.name}/StoredProcedure name=#{@sproc_name}>" end # Run the stored procedure with the current args on the database def run(&block) case @opts[:sproc_type] when :select, :all all(&block) when :first first when :insert insert when :update update when :delete delete end end end module StoredProcedures # For the given type (:select, :first, :insert, :update, or :delete), # run the database stored procedure with the given name with the given # arguments. def call_sproc(type, name, *args) prepare_sproc(type, name).call(*args) end # Transform this dataset into a stored procedure that you can call # multiple times with new arguments. def prepare_sproc(type, name) prepare_extend_sproc(self).clone(:sproc_type=>type, :sproc_name=>name, :sql=>'') end private # Extend the dataset with the stored procedure methods. def prepare_extend_sproc(ds) ds.with_extend(StoredProcedureMethods) end end end end sequel-5.63.0/lib/sequel/adapters/utils/unmodified_identifiers.rb000066400000000000000000000011671434214120600251510ustar00rootroot00000000000000# frozen-string-literal: true module Sequel module UnmodifiedIdentifiers module DatabaseMethods private # Databases that use this module for unquoted identifiers to lowercase. def folds_unquoted_identifiers_to_uppercase? false end end module DatasetMethods private # Turn the given symbol/string into a symbol, keeping the current case. def output_identifier(v) v == '' ? :untitled : v.to_sym end # Turn the given symbol/string into a string, keeping the current case. def input_identifier(v) v.to_s end end end end sequel-5.63.0/lib/sequel/ast_transformer.rb000066400000000000000000000102541434214120600207040ustar00rootroot00000000000000# frozen-string-literal: true module Sequel # The +ASTTransformer+ class is designed to handle the abstract syntax trees # that Sequel uses internally and produce modified copies of them. By itself # it only produces a straight copy. It's designed to be subclassed and have # subclasses returned modified copies of the specific nodes that need to # be modified. class ASTTransformer # Return +obj+ or a potentially transformed version of it. def transform(obj) v(obj) end private # Recursive version that handles all of Sequel's internal object types # and produces copies of them. def v(o) case o when Symbol, Numeric, String, Class, TrueClass, FalseClass, NilClass o when Array o.map{|x| v(x)} when Hash h = {} o.each{|k, val| h[v(k)] = v(val)} h when SQL::NumericExpression if o.op == :extract o.class.new(o.op, o.args[0], v(o.args[1])) else o.class.new(o.op, *v(o.args)) end when SQL::ComplexExpression o.class.new(o.op, *v(o.args)) when SQL::Identifier SQL::Identifier.new(v(o.value)) when SQL::QualifiedIdentifier SQL::QualifiedIdentifier.new(v(o.table), v(o.column)) when SQL::OrderedExpression SQL::OrderedExpression.new(v(o.expression), o.descending, :nulls=>o.nulls) when SQL::AliasedExpression SQL::AliasedExpression.new(v(o.expression), o.alias, o.columns) when SQL::CaseExpression args = [v(o.conditions), v(o.default)] args << v(o.expression) if o.expression? SQL::CaseExpression.new(*args) when SQL::Cast SQL::Cast.new(v(o.expr), o.type) when SQL::Function h = {} o.opts.each do |k, val| h[k] = v(val) end SQL::Function.new!(o.name, v(o.args), h) when SQL::Subscript SQL::Subscript.new(v(o.expression), v(o.sub)) when SQL::Window opts = o.opts.dup opts[:partition] = v(opts[:partition]) if opts[:partition] opts[:order] = v(opts[:order]) if opts[:order] SQL::Window.new(opts) when SQL::PlaceholderLiteralString args = if o.args.is_a?(Hash) h = {} o.args.each{|k,val| h[k] = v(val)} h else v(o.args) end SQL::PlaceholderLiteralString.new(o.str, args, o.parens) when SQL::JoinOnClause SQL::JoinOnClause.new(v(o.on), o.join_type, v(o.table_expr)) when SQL::JoinUsingClause SQL::JoinUsingClause.new(v(o.using), o.join_type, v(o.table_expr)) when SQL::JoinClause SQL::JoinClause.new(o.join_type, v(o.table_expr)) when SQL::DelayedEvaluation SQL::DelayedEvaluation.new(lambda{|ds| v(o.call(ds))}) when SQL::Wrapper SQL::Wrapper.new(v(o.value)) when SQL::Expression if o.respond_to?(:sequel_ast_transform) o.sequel_ast_transform(method(:v)) else o end else o end end end # Handles qualifying existing datasets, so that unqualified columns # in the dataset are qualified with a given table name. class Qualifier < ASTTransformer # Set the table used to qualify unqualified columns def initialize(table) @table = table end private # Turn SQL::Identifiers and symbols that aren't implicitly # qualified into SQL::QualifiedIdentifiers. For symbols that # are not implicitly qualified by are implicitly aliased, return an # SQL::AliasedExpressions with a qualified version of the symbol. def v(o) case o when Symbol t, column, aliaz = Sequel.split_symbol(o) if t o elsif aliaz SQL::AliasedExpression.new(SQL::QualifiedIdentifier.new(@table, SQL::Identifier.new(column)), aliaz) else SQL::QualifiedIdentifier.new(@table, o) end when SQL::Identifier SQL::QualifiedIdentifier.new(@table, o) when SQL::QualifiedIdentifier, SQL::JoinClause # Return these directly, so we don't accidentally qualify symbols in them. o else super end end end end sequel-5.63.0/lib/sequel/connection_pool.rb000066400000000000000000000136301434214120600206640ustar00rootroot00000000000000# frozen-string-literal: true # The base connection pool class, which all other connection pools are based # on. This class is not instantiated directly, but subclasses should at # the very least implement the following API: # # initialize(Database, Hash) :: Initialize using the passed Sequel::Database # object and options hash. # hold(Symbol, &block) :: Yield a connection object (obtained from calling # the block passed to +initialize+) to the current block. For sharded # connection pools, the Symbol passed is the shard/server to use. # disconnect(Symbol) :: Disconnect the connection object. For sharded # connection pools, the Symbol passed is the shard/server to use. # servers :: An array of shard/server symbols for all shards/servers that this # connection pool recognizes. # size :: an integer representing the total number of connections in the pool, # or for the given shard/server if sharding is supported. # max_size :: an integer representing the maximum size of the connection pool, # or the maximum size per shard/server if sharding is supported. # # For sharded connection pools, the sharded API adds the following methods: # # add_servers(Array of Symbols) :: start recognizing all shards/servers specified # by the array of symbols. # remove_servers(Array of Symbols) :: no longer recognize all shards/servers # specified by the array of symbols. class Sequel::ConnectionPool OPTS = Sequel::OPTS POOL_CLASS_MAP = { :threaded => :ThreadedConnectionPool, :single => :SingleConnectionPool, :sharded_threaded => :ShardedThreadedConnectionPool, :sharded_single => :ShardedSingleConnectionPool, :timed_queue => :TimedQueueConnectionPool, } POOL_CLASS_MAP.to_a.each{|k, v| POOL_CLASS_MAP[k.to_s] = v} POOL_CLASS_MAP.freeze # Class methods used to return an appropriate pool subclass, separated # into a module for easier overridding by extensions. module ClassMethods # Return a pool subclass instance based on the given options. If a :pool_class # option is provided is provided, use that pool class, otherwise # use a new instance of an appropriate pool subclass based on the # :single_threaded and :servers options. def get_pool(db, opts = OPTS) connection_pool_class(opts).new(db, opts) end private # Return a connection pool class based on the given options. def connection_pool_class(opts) if pc = opts[:pool_class] unless pc.is_a?(Class) unless name = POOL_CLASS_MAP[pc] raise Sequel::Error, "unsupported connection pool type, please pass appropriate class as the :pool_class option" end require_relative "connection_pool/#{pc}" pc = Sequel.const_get(name) end pc else pc = if opts[:single_threaded] opts[:servers] ? :sharded_single : :single else opts[:servers] ? :sharded_threaded : :threaded end connection_pool_class(:pool_class=>pc) end end end extend ClassMethods # The after_connect proc used for this pool. This is called with each new # connection made, and is usually used to set custom per-connection settings. # Deprecated. attr_reader :after_connect # SEQUEL6: Remove # Override the after_connect proc for the connection pool. Deprecated. # Disables support for shard-specific :after_connect and :connect_sqls if used. def after_connect=(v) # SEQUEL6: Remove @use_old_connect_api = true @after_connect = v end # An array of sql strings to execute on each new connection. Deprecated. attr_reader :connect_sqls # SEQUEL6: Remove # Override the connect_sqls for the connection pool. Deprecated. # Disables support for shard-specific :after_connect and :connect_sqls if used. def connect_sqls=(v) # SEQUEL6: Remove @use_old_connect_api = true @connect_sqls = v end # The Sequel::Database object tied to this connection pool. attr_accessor :db # Instantiates a connection pool with the given Database and options. def initialize(db, opts=OPTS) # SEQUEL6: Remove second argument, always use db.opts @db = db @use_old_connect_api = false # SEQUEL6: Remove @after_connect = opts[:after_connect] # SEQUEL6: Remove @connect_sqls = opts[:connect_sqls] # SEQUEL6: Remove @error_classes = db.send(:database_error_classes).dup.freeze end # An array of symbols for all shards/servers, which is a single :default by default. def servers [:default] end private # Remove the connection from the pool. For threaded connections, this should be # called without the mutex, because the disconnection may block. def disconnect_connection(conn) db.disconnect_connection(conn) end # Whether the given exception is a disconnect exception. def disconnect_error?(exception) exception.is_a?(Sequel::DatabaseDisconnectError) || db.send(:disconnect_error?, exception, OPTS) end # Return a new connection by calling the connection proc with the given server name, # and checking for connection errors. def make_new(server) begin if @use_old_connect_api # SEQUEL6: Remove block conn = @db.connect(server) if ac = @after_connect if ac.arity == 2 ac.call(conn, server) else ac.call(conn) end end if cs = @connect_sqls cs.each do |sql| db.send(:log_connection_execute, conn, sql) end end conn else @db.new_connection(server) end rescue Exception=>exception raise Sequel.convert_exception_class(exception, Sequel::DatabaseConnectionError) end || raise(Sequel::DatabaseConnectionError, "Connection parameters not valid") end end sequel-5.63.0/lib/sequel/connection_pool/000077500000000000000000000000001434214120600203345ustar00rootroot00000000000000sequel-5.63.0/lib/sequel/connection_pool/sharded_single.rb000066400000000000000000000070431434214120600236400ustar00rootroot00000000000000# frozen-string-literal: true # A ShardedSingleConnectionPool is a single threaded connection pool that # works with multiple shards/servers. class Sequel::ShardedSingleConnectionPool < Sequel::ConnectionPool # The single threaded pool takes the following options: # # :servers :: A hash of servers to use. Keys should be symbols. If not # present, will use a single :default server. # :servers_hash :: The base hash to use for the servers. By default, # Sequel uses Hash.new(:default). You can use a hash with a default proc # that raises an error if you want to catch all cases where a nonexistent # server is used. def initialize(db, opts=OPTS) super @conns = {} @servers = opts.fetch(:servers_hash, Hash.new(:default)) add_servers([:default]) add_servers(opts[:servers].keys) if opts[:servers] end # Adds new servers to the connection pool. Primarily used in conjunction with primary/replica # or sharded configurations. Allows for dynamic expansion of the potential replicas/shards # at runtime. +servers+ argument should be an array of symbols. def add_servers(servers) servers.each{|s| @servers[s] = s} end # Yield all of the currently established connections def all_connections @conns.values.each{|c| yield c} end # The connection for the given server. def conn(server=:default) @conns[@servers[server]] end # Disconnects from the database. Once a connection is requested using # #hold, the connection is reestablished. Options: # :server :: Should be a symbol specifing the server to disconnect from, # or an array of symbols to specify multiple servers. def disconnect(opts=OPTS) (opts[:server] ? Array(opts[:server]) : servers).each do |s| raise Sequel::Error, "invalid server: #{s}" unless @servers.has_key?(s) disconnect_server(s) end end def freeze @servers.freeze super end # Yields the connection to the supplied block for the given server. # This method simulates the ConnectionPool#hold API. def hold(server=:default) server = pick_server(server) yield(@conns[server] ||= make_new(server)) rescue Sequel::DatabaseDisconnectError, *@error_classes => e disconnect_server(server) if disconnect_error?(e) raise end # The ShardedSingleConnectionPool always has a maximum size of 1. def max_size 1 end # Remove servers from the connection pool. Similar to disconnecting from all given servers, # except that after it is used, future requests for the server will use the # :default server instead. def remove_servers(servers) raise(Sequel::Error, "cannot remove default server") if servers.include?(:default) servers.each do |server| disconnect_server(server) @servers.delete(server) end end # Return an array of symbols for servers in the connection pool. def servers @servers.keys end # The number of different shards/servers this pool is connected to. def size @conns.length end def pool_type :sharded_single end private # Disconnect from the given server, if connected. def disconnect_server(server) if conn = @conns.delete(server) disconnect_connection(conn) end end # If the server given is in the hash, return it, otherwise, return the default server. def pick_server(server) @servers[server] end # Make sure there is a valid connection for each server. def preconnect(concurrent = nil) servers.each{|s| hold(s){}} end end sequel-5.63.0/lib/sequel/connection_pool/sharded_threaded.rb000066400000000000000000000313151434214120600241360ustar00rootroot00000000000000# frozen-string-literal: true require_relative 'threaded' # The slowest and most advanced connection, dealing with both multi-threaded # access and configurations with multiple shards/servers. # # In addition, this pool subclass also handles scheduling in-use connections # to be removed from the pool when they are returned to it. class Sequel::ShardedThreadedConnectionPool < Sequel::ThreadedConnectionPool # The following additional options are respected: # :servers :: A hash of servers to use. Keys should be symbols. If not # present, will use a single :default server. # :servers_hash :: The base hash to use for the servers. By default, # Sequel uses Hash.new(:default). You can use a hash with a default proc # that raises an error if you want to catch all cases where a nonexistent # server is used. def initialize(db, opts = OPTS) super @available_connections = {} @connections_to_remove = [] @connections_to_disconnect = [] @servers = opts.fetch(:servers_hash, Hash.new(:default)) remove_instance_variable(:@waiter) remove_instance_variable(:@allocated) @allocated = {} @waiters = {} add_servers([:default]) add_servers(opts[:servers].keys) if opts[:servers] end # Adds new servers to the connection pool. Allows for dynamic expansion of the potential replicas/shards # at runtime. +servers+ argument should be an array of symbols. def add_servers(servers) sync do servers.each do |server| unless @servers.has_key?(server) @servers[server] = server @available_connections[server] = [] allocated = {} allocated.compare_by_identity @allocated[server] = allocated @waiters[server] = ConditionVariable.new end end end end # A hash of connections currently being used for the given server, key is the # Thread, value is the connection. Nonexistent servers will return nil. Treat # this as read only, do not modify the resulting object. # The calling code should already have the mutex before calling this. def allocated(server=:default) @allocated[server] end # Yield all of the available connections, and the ones currently allocated to # this thread. This will not yield connections currently allocated to other # threads, as it is not safe to operate on them. This holds the mutex while # it is yielding all of the connections, which means that until # the method's block returns, the pool is locked. def all_connections t = Sequel.current sync do @allocated.values.each do |threads| threads.each do |thread, conn| yield conn if t == thread end end @available_connections.values.each{|v| v.each{|c| yield c}} end end # An array of connections opened but not currently used, for the given # server. Nonexistent servers will return nil. Treat this as read only, do # not modify the resulting object. # The calling code should already have the mutex before calling this. def available_connections(server=:default) @available_connections[server] end # The total number of connections opened for the given server. # Nonexistent servers will return the created count of the default server. # The calling code should NOT have the mutex before calling this. def size(server=:default) @mutex.synchronize{_size(server)} end # Removes all connections currently available on all servers, optionally # yielding each connection to the given block. This method has the effect of # disconnecting from the database, assuming that no connections are currently # being used. If connections are being used, they are scheduled to be # disconnected as soon as they are returned to the pool. # # Once a connection is requested using #hold, the connection pool # creates new connections to the database. Options: # :server :: Should be a symbol specifing the server to disconnect from, # or an array of symbols to specify multiple servers. def disconnect(opts=OPTS) (opts[:server] ? Array(opts[:server]) : sync{@servers.keys}).each do |s| disconnect_connections(sync{disconnect_server_connections(s)}) end end def freeze @servers.freeze super end # Chooses the first available connection to the given server, or if none are # available, creates a new connection. Passes the connection to the supplied # block: # # pool.hold {|conn| conn.execute('DROP TABLE posts')} # # Pool#hold is re-entrant, meaning it can be called recursively in # the same thread without blocking. # # If no connection is immediately available and the pool is already using the maximum # number of connections, Pool#hold will block until a connection # is available or the timeout expires. If the timeout expires before a # connection can be acquired, a Sequel::PoolTimeout is raised. def hold(server=:default) server = pick_server(server) t = Sequel.current if conn = owned_connection(t, server) return yield(conn) end begin conn = acquire(t, server) yield conn rescue Sequel::DatabaseDisconnectError, *@error_classes => e sync{@connections_to_remove << conn} if conn && disconnect_error?(e) raise ensure sync{release(t, conn, server)} if conn while dconn = sync{@connections_to_disconnect.shift} disconnect_connection(dconn) end end end # Remove servers from the connection pool. Similar to disconnecting from all given servers, # except that after it is used, future requests for the server will use the # :default server instead. def remove_servers(servers) conns = nil sync do raise(Sequel::Error, "cannot remove default server") if servers.include?(:default) servers.each do |server| if @servers.include?(server) conns = disconnect_server_connections(server) @waiters.delete(server) @available_connections.delete(server) @allocated.delete(server) @servers.delete(server) end end end if conns disconnect_connections(conns) end end # Return an array of symbols for servers in the connection pool. def servers sync{@servers.keys} end def pool_type :sharded_threaded end private # The total number of connections opened for the given server. # The calling code should already have the mutex before calling this. def _size(server) server = @servers[server] @allocated[server].length + @available_connections[server].length end # Assigns a connection to the supplied thread, if one # is available. The calling code should NOT already have the mutex when # calling this. # # This should return a connection is one is available within the timeout, # or nil if a connection could not be acquired within the timeout. def acquire(thread, server) if conn = assign_connection(thread, server) return conn end timeout = @timeout timer = Sequel.start_timer sync do @waiters[server].wait(@mutex, timeout) if conn = next_available(server) return(allocated(server)[thread] = conn) end end until conn = assign_connection(thread, server) elapsed = Sequel.elapsed_seconds_since(timer) # :nocov: raise_pool_timeout(elapsed, server) if elapsed > timeout # It's difficult to get to this point, it can only happen if there is a race condition # where a connection cannot be acquired even after the thread is signalled by the condition variable sync do @waiters[server].wait(@mutex, timeout - elapsed) if conn = next_available(server) return(allocated(server)[thread] = conn) end end # :nocov: end conn end # Assign a connection to the thread, or return nil if one cannot be assigned. # The caller should NOT have the mutex before calling this. def assign_connection(thread, server) alloc = nil do_make_new = false sync do alloc = allocated(server) if conn = next_available(server) alloc[thread] = conn return conn end if (n = _size(server)) >= (max = @max_size) alloc.to_a.each do |t,c| unless t.alive? remove(t, c, server) end end n = nil end if (n || _size(server)) < max do_make_new = alloc[thread] = true end end # Connect to the database outside of the connection pool mutex, # as that can take a long time and the connection pool mutex # shouldn't be locked while the connection takes place. if do_make_new begin conn = make_new(server) sync{alloc[thread] = conn} ensure unless conn sync{alloc.delete(thread)} end end end conn end # Return a connection to the pool of available connections for the server, # returns the connection. The calling code should already have the mutex # before calling this. def checkin_connection(server, conn) available_connections(server) << conn @waiters[server].signal conn end # Clear the array of available connections for the server, returning an array # of previous available connections that should be disconnected (or nil if none should be). # Mark any allocated connections to be removed when they are checked back in. The calling # code should already have the mutex before calling this. def disconnect_server_connections(server) remove_conns = allocated(server) dis_conns = available_connections(server) raise Sequel::Error, "invalid server: #{server}" unless remove_conns && dis_conns @connections_to_remove.concat(remove_conns.values) conns = dis_conns.dup dis_conns.clear @waiters[server].signal conns end # Disconnect all available connections immediately, and schedule currently allocated connections for disconnection # as soon as they are returned to the pool. The calling code should NOT # have the mutex before calling this. def disconnect_connections(conns) conns.each{|conn| disconnect_connection(conn)} end # Return the next available connection in the pool for the given server, or nil # if there is not currently an available connection for the server. # The calling code should already have the mutex before calling this. def next_available(server) case @connection_handling when :stack available_connections(server).pop else available_connections(server).shift end end # Returns the connection owned by the supplied thread for the given server, # if any. The calling code should NOT already have the mutex before calling this. def owned_connection(thread, server) sync{@allocated[server][thread]} end # If the server given is in the hash, return it, otherwise, return the default server. def pick_server(server) sync{@servers[server]} end # Create the maximum number of connections immediately. The calling code should # NOT have the mutex before calling this. def preconnect(concurrent = false) conn_servers = @servers.keys.map!{|s| Array.new(max_size - _size(s), s)}.flatten! if concurrent conn_servers.map!{|s| Thread.new{[s, make_new(s)]}}.map!(&:value) else conn_servers.map!{|s| [s, make_new(s)]} end sync{conn_servers.each{|s, conn| checkin_connection(s, conn)}} end # Raise a PoolTimeout error showing the current timeout, the elapsed time, the server # the connection attempt was made to, and the database's name (if any). def raise_pool_timeout(elapsed, server) name = db.opts[:name] raise ::Sequel::PoolTimeout, "timeout: #{@timeout}, elapsed: #{elapsed}, server: #{server}#{", database name: #{name}" if name}" end # Releases the connection assigned to the supplied thread and server. If the # server or connection given is scheduled for disconnection, remove the # connection instead of releasing it back to the pool. # The calling code should already have the mutex before calling this. def release(thread, conn, server) if @connections_to_remove.include?(conn) remove(thread, conn, server) else conn = allocated(server).delete(thread) if @connection_handling == :disconnect @connections_to_disconnect << conn else checkin_connection(server, conn) end end if waiter = @waiters[server] waiter.signal end end # Removes the currently allocated connection from the connection pool. The # calling code should already have the mutex before calling this. def remove(thread, conn, server) @connections_to_remove.delete(conn) allocated(server).delete(thread) if @servers.include?(server) @connections_to_disconnect << conn end end sequel-5.63.0/lib/sequel/connection_pool/single.rb000066400000000000000000000023421434214120600221430ustar00rootroot00000000000000# frozen-string-literal: true # This is the fastest connection pool, since it isn't a connection pool at all. # It is just a wrapper around a single connection that uses the connection pool # API. class Sequel::SingleConnectionPool < Sequel::ConnectionPool def initialize(db, opts=OPTS) super @conn = [] end # Yield the connection if one has been made. def all_connections yield @conn.first unless @conn.empty? end # Disconnect the connection from the database. def disconnect(opts=nil) return unless c = @conn.first disconnect_connection(c) @conn.clear nil end # Yield the connection to the block. def hold(server=nil) unless c = @conn.first @conn.replace([c = make_new(:default)]) end yield c rescue Sequel::DatabaseDisconnectError, *@error_classes => e disconnect if disconnect_error?(e) raise end # The SingleConnectionPool always has a maximum size of 1. def max_size 1 end def pool_type :single end # The SingleConnectionPool always has a size of 1 if connected # and 0 if not. def size @conn.empty? ? 0 : 1 end private # Make sure there is a valid connection. def preconnect(concurrent = nil) hold{} end end sequel-5.63.0/lib/sequel/connection_pool/threaded.rb000066400000000000000000000216011434214120600224410ustar00rootroot00000000000000# frozen-string-literal: true # A connection pool allowing multi-threaded access to a pool of connections. # This is the default connection pool used by Sequel. class Sequel::ThreadedConnectionPool < Sequel::ConnectionPool USE_WAITER = true # SEQUEL6: Remove Sequel::Deprecation.deprecate_constant(self, :USE_WAITER) # The maximum number of connections this pool will create (per shard/server # if sharding). attr_reader :max_size # An array of connections that are available for use by the pool. # The calling code should already have the mutex before calling this. attr_reader :available_connections # SEQUEL6: Remove # A hash with thread/fiber keys and connection values for currently allocated connections. # The calling code should already have the mutex before calling this. attr_reader :allocated # SEQUEL6: Remove # The following additional options are respected: # :max_connections :: The maximum number of connections the connection pool # will open (default 4) # :pool_timeout :: The amount of seconds to wait to acquire a connection # before raising a PoolTimeout error (default 5) def initialize(db, opts = OPTS) super @max_size = Integer(opts[:max_connections] || 4) raise(Sequel::Error, ':max_connections must be positive') if @max_size < 1 @mutex = Mutex.new @connection_handling = opts[:connection_handling] @available_connections = [] @allocated = {} @allocated.compare_by_identity @timeout = Float(opts[:pool_timeout] || 5) @waiter = ConditionVariable.new end # Yield all of the available connections, and the one currently allocated to # this thread. This will not yield connections currently allocated to other # threads, as it is not safe to operate on them. This holds the mutex while # it is yielding all of the available connections, which means that until # the method's block returns, the pool is locked. def all_connections hold do |c| sync do yield c @available_connections.each{|conn| yield conn} end end end # Removes all connections currently available. This method has the effect of # disconnecting from the database, assuming that no connections are currently # being used. If you want to be able to disconnect connections that are # currently in use, use the ShardedThreadedConnectionPool, which can do that. # This connection pool does not, for performance reasons. To use the sharded pool, # pass the servers: {} option when connecting to the database. # # Once a connection is requested using #hold, the connection pool # creates new connections to the database. def disconnect(opts=OPTS) conns = nil sync do conns = @available_connections.dup @available_connections.clear @waiter.signal end conns.each{|conn| disconnect_connection(conn)} end # Chooses the first available connection, or if none are # available, creates a new connection. Passes the connection to the supplied # block: # # pool.hold {|conn| conn.execute('DROP TABLE posts')} # # Pool#hold is re-entrant, meaning it can be called recursively in # the same thread without blocking. # # If no connection is immediately available and the pool is already using the maximum # number of connections, Pool#hold will block until a connection # is available or the timeout expires. If the timeout expires before a # connection can be acquired, a Sequel::PoolTimeout is raised. def hold(server=nil) t = Sequel.current if conn = owned_connection(t) return yield(conn) end begin conn = acquire(t) yield conn rescue Sequel::DatabaseDisconnectError, *@error_classes => e if disconnect_error?(e) oconn = conn conn = nil disconnect_connection(oconn) if oconn sync do @allocated.delete(t) @waiter.signal end end raise ensure if conn sync{release(t)} if @connection_handling == :disconnect disconnect_connection(conn) end end end end def pool_type :threaded end # The total number of connections opened, either available or allocated. # The calling code should not have the mutex before calling this. def size @mutex.synchronize{_size} end private # The total number of connections opened, either available or allocated. # The calling code should already have the mutex before calling this. def _size @allocated.length + @available_connections.length end # Assigns a connection to the supplied thread, if one # is available. The calling code should NOT already have the mutex when # calling this. # # This should return a connection is one is available within the timeout, # or raise PoolTimeout if a connection could not be acquired within the timeout. def acquire(thread) if conn = assign_connection(thread) return conn end timeout = @timeout timer = Sequel.start_timer sync do @waiter.wait(@mutex, timeout) if conn = next_available return(@allocated[thread] = conn) end end until conn = assign_connection(thread) elapsed = Sequel.elapsed_seconds_since(timer) # :nocov: raise_pool_timeout(elapsed) if elapsed > timeout # It's difficult to get to this point, it can only happen if there is a race condition # where a connection cannot be acquired even after the thread is signalled by the condition variable sync do @waiter.wait(@mutex, timeout - elapsed) if conn = next_available return(@allocated[thread] = conn) end end # :nocov: end conn end # Assign a connection to the thread, or return nil if one cannot be assigned. # The caller should NOT have the mutex before calling this. def assign_connection(thread) # Thread safe as instance variable is only assigned to local variable # and not operated on outside mutex. allocated = @allocated do_make_new = false to_disconnect = nil sync do if conn = next_available return(allocated[thread] = conn) end if (n = _size) >= (max = @max_size) allocated.keys.each do |t| unless t.alive? (to_disconnect ||= []) << allocated.delete(t) end end n = nil end if (n || _size) < max do_make_new = allocated[thread] = true end end if to_disconnect to_disconnect.each{|dconn| disconnect_connection(dconn)} end # Connect to the database outside of the connection pool mutex, # as that can take a long time and the connection pool mutex # shouldn't be locked while the connection takes place. if do_make_new begin conn = make_new(:default) sync{allocated[thread] = conn} ensure unless conn sync{allocated.delete(thread)} end end end conn end # Return a connection to the pool of available connections, returns the connection. # The calling code should already have the mutex before calling this. def checkin_connection(conn) @available_connections << conn conn end # Return the next available connection in the pool, or nil if there # is not currently an available connection. The calling code should already # have the mutex before calling this. def next_available case @connection_handling when :stack @available_connections.pop else @available_connections.shift end end # Returns the connection owned by the supplied thread, # if any. The calling code should NOT already have the mutex before calling this. def owned_connection(thread) sync{@allocated[thread]} end # Create the maximum number of connections immediately. The calling code should # NOT have the mutex before calling this. def preconnect(concurrent = false) enum = (max_size - _size).times conns = if concurrent enum.map{Thread.new{make_new(:default)}}.map(&:value) else enum.map{make_new(:default)} end sync{conns.each{|conn| checkin_connection(conn)}} end # Raise a PoolTimeout error showing the current timeout, the elapsed time, and the # database's name (if any). def raise_pool_timeout(elapsed) name = db.opts[:name] raise ::Sequel::PoolTimeout, "timeout: #{@timeout}, elapsed: #{elapsed}#{", database name: #{name}" if name}" end # Releases the connection assigned to the supplied thread back to the pool. # The calling code should already have the mutex before calling this. def release(thread) conn = @allocated.delete(thread) unless @connection_handling == :disconnect checkin_connection(conn) end @waiter.signal nil end # Yield to the block while inside the mutex. The calling code should NOT # already have the mutex before calling this. def sync @mutex.synchronize{yield} end end sequel-5.63.0/lib/sequel/connection_pool/timed_queue.rb000066400000000000000000000200251434214120600231660ustar00rootroot00000000000000# frozen-string-literal: true # :nocov: raise LoadError, "Sequel::TimedQueueConnectionPool is only available on Ruby 3.2+" unless RUBY_VERSION >= '3.2' # :nocov: # A connection pool allowing multi-threaded access to a pool of connections, # using a timed queue (only available in Ruby 3.2+). class Sequel::TimedQueueConnectionPool < Sequel::ConnectionPool # The maximum number of connections this pool will create. attr_reader :max_size # The following additional options are respected: # :max_connections :: The maximum number of connections the connection pool # will open (default 4) # :pool_timeout :: The amount of seconds to wait to acquire a connection # before raising a PoolTimeout (default 5) def initialize(db, opts = OPTS) super @max_size = Integer(opts[:max_connections] || 4) raise(Sequel::Error, ':max_connections must be positive') if @max_size < 1 @mutex = Mutex.new # Size inside array so this still works while the pool is frozen. @size = [0] @allocated = {} @allocated.compare_by_identity @timeout = Float(opts[:pool_timeout] || 5) @queue = Queue.new end # Yield all of the available connections, and the one currently allocated to # this thread. This will not yield connections currently allocated to other # threads, as it is not safe to operate on them. def all_connections hold do |conn| yield conn # Use a hash to record all connections already seen. As soon as we # come across a connection we've already seen, we stop the loop. conns = {} conns.compare_by_identity while true conn = nil begin break unless (conn = @queue.pop(timeout: 0)) && !conns[conn] conns[conn] = true yield conn ensure @queue.push(conn) if conn end end end end # Removes all connections currently in the pool's queue. This method has the effect of # disconnecting from the database, assuming that no connections are currently # being used. # # Once a connection is requested using #hold, the connection pool # creates new connections to the database. def disconnect(opts=OPTS) while conn = @queue.pop(timeout: 0) disconnect_connection(conn) end fill_queue nil end # Chooses the first available connection, or if none are # available, creates a new connection. Passes the connection to the supplied # block: # # pool.hold {|conn| conn.execute('DROP TABLE posts')} # # Pool#hold is re-entrant, meaning it can be called recursively in # the same thread without blocking. # # If no connection is immediately available and the pool is already using the maximum # number of connections, Pool#hold will block until a connection # is available or the timeout expires. If the timeout expires before a # connection can be acquired, a Sequel::PoolTimeout is raised. def hold(server=nil) t = Sequel.current if conn = sync{@allocated[t]} return yield(conn) end begin conn = acquire(t) yield conn rescue Sequel::DatabaseDisconnectError, *@error_classes => e if disconnect_error?(e) oconn = conn conn = nil disconnect_connection(oconn) if oconn sync{@allocated.delete(t)} fill_queue end raise ensure release(t) if conn end end def pool_type :timed_queue end # The total number of connections in the pool. def size sync{@size[0]} end private # Create a new connection, after the pool's current size has already # been updated to account for the new connection. If there is an exception # when creating the connection, decrement the current size. # # This should only be called after can_make_new?. If there is an exception # between when can_make_new? is called and when preallocated_make_new # is called, it has the effect of reducing the maximum size of the # connection pool by 1, since the current size of the pool will show a # higher number than the number of connections allocated or # in the queue. # # Calling code should not have the mutex when calling this. def preallocated_make_new make_new(:default) rescue Exception sync{@size[0] -= 1} raise end # Decrement the current size of the pool when disconnecting connections. # # Calling code should not have the mutex when calling this. def disconnect_connection(conn) sync{@size[0] -= 1} super end # If there are any threads waiting on the queue, try to create # new connections in a separate thread if the pool is not yet at the # maximum size. # # The reason for this method is to handle cases where acquire # could not retrieve a connection immediately, and the pool # was already at the maximum size. In that case, the acquire will # wait on the queue until the timeout. This method is called # after disconnecting to potentially add new connections to the # pool, so the threads that are currently waiting for connections # do not timeout after the pool is no longer full. def fill_queue if @queue.num_waiting > 0 Thread.new do while @queue.num_waiting > 0 && (conn = try_make_new) @queue.push(conn) end end end end # Whether the given size is less than the maximum size of the pool. # In that case, the pool's current size is incremented. If this # method returns true, space in the pool for the connection is # preallocated, and preallocated_make_new should be called to # create the connection. # # Calling code should have the mutex when calling this. def can_make_new?(current_size) if @max_size > current_size @size[0] += 1 end end # Try to make a new connection if there is space in the pool. # If the pool is already full, look for dead threads/fibers and # disconnect the related connections. # # Calling code should not have the mutex when calling this. def try_make_new return preallocated_make_new if sync{can_make_new?(@size[0])} to_disconnect = nil do_make_new = false sync do current_size = @size[0] @allocated.keys.each do |t| unless t.alive? (to_disconnect ||= []) << @allocated.delete(t) current_size -= 1 end end do_make_new = true if can_make_new?(current_size) end begin preallocated_make_new if do_make_new ensure if to_disconnect to_disconnect.each{|conn| disconnect_connection(conn)} fill_queue end end end # Assigns a connection to the supplied thread, if one # is available. # # This should return a connection is one is available within the timeout, # or raise PoolTimeout if a connection could not be acquired within the timeout. # # Calling code should not have the mutex when calling this. def acquire(thread) if conn = @queue.pop(timeout: 0) || try_make_new || @queue.pop(timeout: @timeout) sync{@allocated[thread] = conn} else name = db.opts[:name] raise ::Sequel::PoolTimeout, "timeout: #{@timeout}#{", database name: #{name}" if name}" end end # Create the maximum number of connections immediately. This should not be called # with a true argument unles no code is currently operating on the database. # # Calling code should not have the mutex when calling this. def preconnect(concurrent = false) if concurrent if times = sync{@max_size > (size = @size[0]) ? @max_size - size : false} times.times.map{Thread.new{if conn = try_make_new; @queue.push(conn) end}}.map(&:value) end else while conn = try_make_new @queue.push(conn) end end nil end # Releases the connection assigned to the supplied thread back to the pool. # # Calling code should not have the mutex when calling this. def release(thread) @queue.push(sync{@allocated.delete(thread)}) end # Yield to the block while inside the mutex. # # Calling code should not have the mutex when calling this. def sync @mutex.synchronize{yield} end end sequel-5.63.0/lib/sequel/core.rb000066400000000000000000000404251434214120600164260ustar00rootroot00000000000000# frozen-string-literal: true %w'bigdecimal date thread time uri'.each{|f| require f} # Top level module for Sequel # # There are some module methods that are added via metaprogramming, one for # each supported adapter. For example: # # DB = Sequel.sqlite # Memory database # DB = Sequel.sqlite('blog.db') # DB = Sequel.postgres('database_name', # user:'user', # password: 'password', # host: 'host' # port: 5432, # max_connections: 10) # # If a block is given to these methods, it is passed the opened Database # object, which is closed (disconnected) when the block exits, just # like a block passed to Sequel.connect. For example: # # Sequel.sqlite('blog.db'){|db| puts db[:users].count} # # For a more expanded introduction, see the {README}[rdoc-ref:README.rdoc]. # For a quicker introduction, see the {cheat sheet}[rdoc-ref:doc/cheat_sheet.rdoc]. module Sequel @convert_two_digit_years = true @datetime_class = Time @split_symbols = false @single_threaded = false # Mutex used to protect mutable data structures @data_mutex = Mutex.new # Frozen hash used as the default options hash for most options. OPTS = {}.freeze SPLIT_SYMBOL_CACHE = {} module SequelMethods # Sequel converts two digit years in Dates and DateTimes by default, # so 01/02/03 is interpreted at January 2nd, 2003, and 12/13/99 is interpreted # as December 13, 1999. You can override this to treat those dates as # January 2nd, 0003 and December 13, 0099, respectively, by: # # Sequel.convert_two_digit_years = false attr_accessor :convert_two_digit_years # Sequel can use either +Time+ or +DateTime+ for times returned from the # database. It defaults to +Time+. To change it to +DateTime+: # # Sequel.datetime_class = DateTime # # Note that +Time+ and +DateTime+ objects have a different API, and in # cases where they implement the same methods, they often implement them # differently (e.g. + using seconds on +Time+ and days on +DateTime+). attr_accessor :datetime_class # Set whether Sequel is being used in single threaded mode. By default, # Sequel uses a thread-safe connection pool, which isn't as fast as the # single threaded connection pool, and also has some additional thread # safety checks. If your program will only have one thread, # and speed is a priority, you should set this to true: # # Sequel.single_threaded = true attr_accessor :single_threaded # Alias of original require method, as Sequel.require does a relative # require for backwards compatibility. alias orig_require require private :orig_require # Returns true if the passed object could be a specifier of conditions, false otherwise. # Currently, Sequel considers hashes and arrays of two element arrays as # condition specifiers. # # Sequel.condition_specifier?({}) # => true # Sequel.condition_specifier?([[1, 2]]) # => true # Sequel.condition_specifier?([]) # => false # Sequel.condition_specifier?([1]) # => false # Sequel.condition_specifier?(1) # => false def condition_specifier?(obj) case obj when Hash true when Array !obj.empty? && !obj.is_a?(SQL::ValueList) && obj.all?{|i| i.is_a?(Array) && (i.length == 2)} else false end end # Creates a new database object based on the supplied connection string # and optional arguments. The specified scheme determines the database # class used, and the rest of the string specifies the connection options. # For example: # # DB = Sequel.connect('sqlite:/') # Memory database # DB = Sequel.connect('sqlite://blog.db') # ./blog.db # DB = Sequel.connect('sqlite:///blog.db') # /blog.db # DB = Sequel.connect('postgres://user:password@host:port/database_name') # DB = Sequel.connect('sqlite:///blog.db', max_connections: 10) # # You can also pass a single options hash: # # DB = Sequel.connect(adapter: 'sqlite', database: './blog.db') # # If a block is given, it is passed the opened +Database+ object, which is # closed when the block exits. For example: # # Sequel.connect('sqlite://blog.db'){|db| puts db[:users].count} # # If a block is not given, a reference to this database will be held in # Sequel::DATABASES until it is removed manually. This is by # design, and used by Sequel::Model to pick the default # database. It is recommended to pass a block if you do not want the # resulting Database object to remain in memory until the process # terminates, or use the keep_reference: false Database option. # # For details, see the {"Connecting to a Database" guide}[rdoc-ref:doc/opening_databases.rdoc]. # To set up a primary/replica or sharded database connection, see the {"Primary/Replica Database Configurations and Sharding" guide}[rdoc-ref:doc/sharding.rdoc]. def connect(*args, &block) Database.connect(*args, &block) end # Assume the core extensions are not loaded by default, if the core_extensions # extension is loaded, this will be overridden. def core_extensions? false end # Convert the +exception+ to the given class. The given class should be # Sequel::Error or a subclass. Returns an instance of +klass+ with # the message and backtrace of +exception+. def convert_exception_class(exception, klass) return exception if exception.is_a?(klass) e = klass.new("#{exception.class}: #{exception.message}") e.wrapped_exception = exception e.set_backtrace(exception.backtrace) e end # The current concurrency primitive, Thread.current by default. def current Thread.current end # Load all Sequel extensions given. Extensions are just files that exist under # sequel/extensions in the load path, and are just required. # In some cases, requiring an extension modifies classes directly, and in others, # it just loads a module that you can extend other classes with. Consult the documentation # for each extension you plan on using for usage. # # Sequel.extension(:blank) # Sequel.extension(:core_extensions, :named_timezones) def extension(*extensions) extensions.each{|e| orig_require("sequel/extensions/#{e}")} end # The exception classed raised if there is an error parsing JSON. # This can be overridden to use an alternative json implementation. def json_parser_error_class JSON::ParserError end # Convert given object to json and return the result. # This can be overridden to use an alternative json implementation. def object_to_json(obj, *args, &block) obj.to_json(*args, &block) end # Parse the string as JSON and return the result. # This can be overridden to use an alternative json implementation. def parse_json(json) JSON.parse(json, :create_additions=>false) end # If a mutex is given, synchronize access using it. If nil is given, just # yield to the block. This is designed for cases where a mutex may or may # not be provided. def synchronize_with(mutex) if mutex mutex.synchronize{yield} else yield end end # Convert each item in the array to the correct type, handling multi-dimensional # arrays. For each element in the array or subarrays, call the converter, # unless the value is nil. def recursive_map(array, converter) array.map do |i| if i.is_a?(Array) recursive_map(i, converter) elsif !i.nil? converter.call(i) end end end # For backwards compatibility only. require_relative should be used instead. def require(files, subdir=nil) # Use Kernel.require_relative to work around JRuby 9.0 bug Array(files).each{|f| Kernel.require_relative "#{"#{subdir}/" if subdir}#{f}"} end # Splits the symbol into three parts, if symbol splitting is enabled (not the default). # Each part will either be a string or nil. If symbol splitting # is disabled, returns an array with the first and third parts # being nil, and the second part beind a string version of the symbol. # # For columns, these parts are the table, column, and alias. # For tables, these parts are the schema, table, and alias. def split_symbol(sym) unless v = Sequel.synchronize{SPLIT_SYMBOL_CACHE[sym]} if split_symbols? v = case s = sym.to_s when /\A((?:(?!__).)+)__((?:(?!___).)+)___(.+)\z/ [$1.freeze, $2.freeze, $3.freeze].freeze when /\A((?:(?!___).)+)___(.+)\z/ [nil, $1.freeze, $2.freeze].freeze when /\A((?:(?!__).)+)__(.+)\z/ [$1.freeze, $2.freeze, nil].freeze else [nil, s.freeze, nil].freeze end else v = [nil,sym.to_s.freeze,nil].freeze end Sequel.synchronize{SPLIT_SYMBOL_CACHE[sym] = v} end v end # Setting this to true enables Sequel's historical behavior of splitting # symbols on double or triple underscores: # # :table__column # table.column # :column___alias # column AS alias # :table__column___alias # table.column AS alias # # It is only recommended to turn this on for backwards compatibility until # such symbols have been converted to use newer Sequel APIs such as: # # Sequel[:table][:column] # table.column # Sequel[:column].as(:alias) # column AS alias # Sequel[:table][:column].as(:alias) # table.column AS alias # # Sequel::Database instances do their own caching of literalized # symbols, and changing this setting does not affect those caches. It is # recommended that if you want to change this setting, you do so directly # after requiring Sequel, before creating any Sequel::Database instances. # # Disabling symbol splitting will also disable the handling # of double underscores in virtual row methods, causing such methods to # yield regular identifers instead of qualified identifiers: # # # Sequel.split_symbols = true # Sequel.expr{table__column} # table.column # Sequel.expr{table[:column]} # table.column # # # Sequel.split_symbols = false # Sequel.expr{table__column} # table__column # Sequel.expr{table[:column]} # table.column def split_symbols=(v) Sequel.synchronize{SPLIT_SYMBOL_CACHE.clear} @split_symbols = v end # Whether Sequel currently splits symbols into qualified/aliased identifiers. def split_symbols? @split_symbols end # Converts the given +string+ into a +Date+ object. # # Sequel.string_to_date('2010-09-10') # Date.civil(2010, 09, 10) def string_to_date(string) Date.parse(string, Sequel.convert_two_digit_years) rescue => e raise convert_exception_class(e, InvalidValue) end # Converts the given +string+ into a +Time+ or +DateTime+ object, depending on the # value of Sequel.datetime_class. # # Sequel.string_to_datetime('2010-09-10 10:20:30') # Time.local(2010, 09, 10, 10, 20, 30) def string_to_datetime(string) if datetime_class == DateTime DateTime.parse(string, convert_two_digit_years) else datetime_class.parse(string) end rescue => e raise convert_exception_class(e, InvalidValue) end # Converts the given +string+ into a Sequel::SQLTime object. # # v = Sequel.string_to_time('10:20:30') # Sequel::SQLTime.parse('10:20:30') # DB.literal(v) # => '10:20:30' def string_to_time(string) SQLTime.parse(string) rescue => e raise convert_exception_class(e, InvalidValue) end # Unless in single threaded mode, protects access to any mutable # global data structure in Sequel. # Uses a non-reentrant mutex, so calling code should be careful. # In general, this should only be used around the minimal possible code # such as Hash#[], Hash#[]=, Hash#delete, Array#<<, and Array#delete. def synchronize(&block) @single_threaded ? yield : @data_mutex.synchronize(&block) end if RUBY_VERSION >= '2.1' # A timer object that can be passed to Sequel.elapsed_seconds_since # to return the number of seconds elapsed. def start_timer Process.clock_gettime(Process::CLOCK_MONOTONIC) end else # :nocov: def start_timer # :nodoc: Time.now end # :nocov: end # The elapsed seconds since the given timer object was created. The # timer object should have been created via Sequel.start_timer. def elapsed_seconds_since(timer) start_timer - timer end # Uses a transaction on all given databases with the given options. This: # # Sequel.transaction([DB1, DB2, DB3]){} # # is equivalent to: # # DB1.transaction do # DB2.transaction do # DB3.transaction do # end # end # end # # except that if Sequel::Rollback is raised by the block, the transaction is # rolled back on all databases instead of just the last one. # # Note that this method cannot guarantee that all databases will commit or # rollback. For example, if DB3 commits but attempting to commit on DB2 # fails (maybe because foreign key checks are deferred), there is no way # to uncommit the changes on DB3. For that kind of support, you need to # have two-phase commit/prepared transactions (which Sequel supports on # some databases). def transaction(dbs, opts=OPTS, &block) unless opts[:rollback] rescue_rollback = true opts = Hash[opts].merge!(:rollback=>:reraise) end pr = dbs.reverse.inject(block){|bl, db| proc{db.transaction(opts, &bl)}} if rescue_rollback begin pr.call rescue Sequel::Rollback nil end else pr.call end end # If the supplied block takes a single argument, # yield an SQL::VirtualRow instance to the block # argument. Otherwise, evaluate the block in the context of a # SQL::VirtualRow instance. # # Sequel.virtual_row{a} # Sequel::SQL::Identifier.new(:a) # Sequel.virtual_row{|o| o.a} # Sequel::SQL::Function.new(:a) def virtual_row(&block) vr = VIRTUAL_ROW case block.arity when -1, 0 vr.instance_exec(&block) else block.call(vr) end end private # Return a hash of date information parsed from the given string. def _date_parse(string) Date._parse(string) end # Helper method that the database adapter class methods that are added to Sequel via # metaprogramming use to parse arguments. def adapter_method(adapter, *args, &block) options = args.last.is_a?(Hash) ? args.pop : OPTS opts = {:adapter => adapter.to_sym} opts[:database] = args.shift if args.first.is_a?(String) if args.any? raise ::Sequel::Error, "Wrong format of arguments, either use (), (String), (Hash), or (String, Hash)" end connect(opts.merge(options), &block) end # Method that adds a database adapter class method to Sequel that calls # Sequel.adapter_method. def def_adapter_method(*adapters) # :nodoc: adapters.each do |adapter| define_singleton_method(adapter){|*args, &block| adapter_method(adapter, *args, &block)} end end end extend SequelMethods require_relative "deprecated" require_relative "sql" require_relative "connection_pool" require_relative "exceptions" require_relative "dataset" require_relative "database" require_relative "timezones" require_relative "ast_transformer" require_relative "version" class << self # Allow nicer syntax for creating Sequel expressions: # # Sequel[1] # => Sequel::SQL::NumericExpression: 1 # Sequel["a"] # => Sequel::SQL::StringExpression: 'a' # Sequel[:a] # => Sequel::SQL::Identifier: "a" # Sequel[a: 1] # => Sequel::SQL::BooleanExpression: ("a" = 1) alias_method :[], :expr end # Add the database adapter class methods to Sequel via metaprogramming def_adapter_method(*Database::ADAPTERS) end sequel-5.63.0/lib/sequel/database.rb000066400000000000000000000025631434214120600172430ustar00rootroot00000000000000# frozen-string-literal: true module Sequel # Hash of adapters that have been used. The key is the adapter scheme # symbol, and the value is the Database subclass. ADAPTER_MAP = {} # Hash of shared adapters that have been registered. The key is the # adapter scheme symbol, and the value is the Sequel module containing # the shared adapter. SHARED_ADAPTER_MAP = {} # Array of all databases to which Sequel has connected. If you are # developing an application that can connect to an arbitrary number of # databases, delete the database objects from this (or use the :keep_reference # Database option or a block when connecting) or they will not get # garbage collected. DATABASES = [] # A Database object represents a virtual connection to a database. # The Database class is meant to be subclassed by database adapters in order # to provide the functionality needed for executing queries. class Database OPTS = Sequel::OPTS end require_relative "database/connecting" require_relative "database/dataset" require_relative "database/dataset_defaults" require_relative "database/logging" require_relative "database/features" require_relative "database/misc" require_relative "database/query" require_relative "database/transactions" require_relative "database/schema_generator" require_relative "database/schema_methods" end sequel-5.63.0/lib/sequel/database/000077500000000000000000000000001434214120600167105ustar00rootroot00000000000000sequel-5.63.0/lib/sequel/database/connecting.rb000066400000000000000000000275631434214120600214010ustar00rootroot00000000000000# frozen-string-literal: true module Sequel class Database # --------------------- # :section: 4 - Methods relating to adapters, connecting, disconnecting, and sharding # This methods involve the Database's connection pool. # --------------------- # Array of supported database adapters ADAPTERS = %w'ado amalgalite ibmdb jdbc mock mysql mysql2 odbc oracle postgres sqlanywhere sqlite tinytds'.map(&:to_sym) # The Database subclass for the given adapter scheme. # Raises Sequel::AdapterNotFound if the adapter # could not be loaded. def self.adapter_class(scheme) scheme.is_a?(Class) ? scheme : load_adapter(scheme.to_sym) end # Returns the scheme symbol for the Database class. def self.adapter_scheme @scheme end # Connects to a database. See Sequel.connect. def self.connect(conn_string, opts = OPTS) case conn_string when String if conn_string.start_with?('jdbc:') c = adapter_class(:jdbc) opts = opts.merge(:orig_opts=>opts.dup) opts = {:uri=>conn_string}.merge!(opts) else uri = URI.parse(conn_string) scheme = uri.scheme c = adapter_class(scheme) uri_options = c.send(:uri_to_options, uri) uri.query.split('&').map{|s| s.split('=')}.each{|k,v| uri_options[k.to_sym] = v if k && !k.empty?} unless uri.query.to_s.strip.empty? uri_options.to_a.each{|k,v| uri_options[k] = URI::DEFAULT_PARSER.unescape(v) if v.is_a?(String)} opts = uri_options.merge(opts).merge!(:orig_opts=>opts.dup, :uri=>conn_string, :adapter=>scheme) end when Hash opts = conn_string.merge(opts) opts = opts.merge(:orig_opts=>opts.dup) c = adapter_class(opts[:adapter_class] || opts[:adapter] || opts['adapter']) else raise Error, "Sequel::Database.connect takes either a Hash or a String, given: #{conn_string.inspect}" end opts = opts.inject({}) do |m, (k,v)| k = :user if k.to_s == 'username' m[k.to_sym] = v m end begin db = c.new(opts) if defined?(yield) return yield(db) end ensure if defined?(yield) db.disconnect if db Sequel.synchronize{::Sequel::DATABASES.delete(db)} end end db end # Load the adapter from the file system. Raises Sequel::AdapterNotFound # if the adapter cannot be loaded, or if the adapter isn't registered # correctly after being loaded. Options: # :map :: The Hash in which to look for an already loaded adapter (defaults to ADAPTER_MAP). # :subdir :: The subdirectory of sequel/adapters to look in, only to be used for loading # subadapters. def self.load_adapter(scheme, opts=OPTS) map = opts[:map] || ADAPTER_MAP if subdir = opts[:subdir] file = "#{subdir}/#{scheme}" else file = scheme end unless obj = Sequel.synchronize{map[scheme]} # attempt to load the adapter file begin require "sequel/adapters/#{file}" rescue LoadError => e # If subadapter file doesn't exist, just return, # using the main adapter class without database customizations. return if subdir raise Sequel.convert_exception_class(e, AdapterNotFound) end # make sure we actually loaded the adapter unless obj = Sequel.synchronize{map[scheme]} raise AdapterNotFound, "Could not load #{file} adapter: adapter class not registered in ADAPTER_MAP" end end obj end # Sets the adapter scheme for the Database class. Call this method in # descendants of Database to allow connection using a URL. For example the # following: # # class Sequel::MyDB::Database < Sequel::Database # set_adapter_scheme :mydb # ... # end # # would allow connection using: # # Sequel.connect('mydb://user:password@dbserver/mydb') def self.set_adapter_scheme(scheme) # :nodoc: @scheme = scheme Sequel.synchronize{ADAPTER_MAP[scheme] = self} end private_class_method :set_adapter_scheme # Sets the given module as the shared adapter module for the given scheme. # Used to register shared adapters for use by the mock adapter. Example: # # # in file sequel/adapters/shared/mydb.rb # module Sequel::MyDB # Sequel::Database.set_shared_adapter_scheme :mydb, self # # def self.mock_adapter_setup(db) # # ... # end # # module DatabaseMethods # # ... # end # # module DatasetMethods # # ... # end # end # # would allow the mock adapter to return a Database instance that supports # the MyDB syntax via: # # Sequel.connect('mock://mydb') def self.set_shared_adapter_scheme(scheme, mod) Sequel.synchronize{SHARED_ADAPTER_MAP[scheme] = mod} end # The connection pool for this Database instance. All Database instances have # their own connection pools. attr_reader :pool # Returns the scheme symbol for this instance's class, which reflects which # adapter is being used. In some cases, this can be the same as the # +database_type+ (for native adapters), in others (i.e. adapters with # subadapters), it will be different. # # Sequel.connect('jdbc:postgres://...').adapter_scheme # # => :jdbc def adapter_scheme self.class.adapter_scheme end # Dynamically add new servers or modify server options at runtime. Also adds new # servers to the connection pool. Only usable when using a sharded connection pool. # # servers argument should be a hash with server name symbol keys and hash or # proc values. If a servers key is already in use, it's value is overridden # with the value provided. # # DB.add_servers(f: {host: "hash_host_f"}) def add_servers(servers) unless sharded? raise Error, "cannot call Database#add_servers on a Database instance that does not use a sharded connection pool" end h = @opts[:servers] Sequel.synchronize{h.merge!(servers)} @pool.add_servers(servers.keys) end # The database type for this database object, the same as the adapter scheme # by default. Should be overridden in adapters (especially shared adapters) # to be the correct type, so that even if two separate Database objects are # using different adapters you can tell that they are using the same database # type. Even better, you can tell that two Database objects that are using # the same adapter are connecting to different database types. # # Sequel.connect('jdbc:postgres://...').database_type # # => :postgres def database_type adapter_scheme end # Disconnects all available connections from the connection pool. Any # connections currently in use will not be disconnected. Options: # :server :: Should be a symbol specifing the server to disconnect from, # or an array of symbols to specify multiple servers. # # Example: # # DB.disconnect # All servers # DB.disconnect(server: :server1) # Single server # DB.disconnect(server: [:server1, :server2]) # Multiple servers def disconnect(opts = OPTS) pool.disconnect(opts) end # Should only be called by the connection pool code to disconnect a connection. # By default, calls the close method on the connection object, since most # adapters use that, but should be overwritten on other adapters. def disconnect_connection(conn) conn.close end # Dynamically remove existing servers from the connection pool. Only usable # when using a sharded connection pool # # servers should be symbols or arrays of symbols. If a nonexistent server # is specified, it is ignored. If no servers have been specified for # this database, no changes are made. If you attempt to remove the :default server, # an error will be raised. # # DB.remove_servers(:f1, :f2) def remove_servers(*servers) unless sharded? raise Error, "cannot call Database#remove_servers on a Database instance that does not use a sharded connection pool" end h = @opts[:servers] servers.flatten.each{|s| Sequel.synchronize{h.delete(s)}} @pool.remove_servers(servers) end # An array of servers/shards for this Database object. # # DB.servers # Unsharded: => [:default] # DB.servers # Sharded: => [:default, :server1, :server2] def servers pool.servers end # Connect to the given server/shard. Handles database-generic post-connection # setup not handled by #connect, using the :after_connect and :connect_sqls # options. def new_connection(server) conn = connect(server) opts = server_opts(server) if ac = opts[:after_connect] if ac.arity == 2 ac.call(conn, server) else ac.call(conn) end end if cs = opts[:connect_sqls] cs.each do |sql| log_connection_execute(conn, sql) end end conn end # Returns true if the database is using a single-threaded connection pool. def single_threaded? @single_threaded end if RUBY_ENGINE == 'ruby' && RUBY_VERSION < '2.5' # :nocov: def synchronize(server=nil) @pool.hold(server || :default){|conn| yield conn} end # :nocov: else # Acquires a database connection, yielding it to the passed block. This is # useful if you want to make sure the same connection is used for all # database queries in the block. It is also useful if you want to gain # direct access to the underlying connection object if you need to do # something Sequel does not natively support. # # If a server option is given, acquires a connection for that specific # server, instead of the :default server. # # DB.synchronize do |conn| # # ... # end def synchronize(server=nil, &block) @pool.hold(server || :default, &block) end end # Attempts to acquire a database connection. Returns true if successful. # Will probably raise an Error if unsuccessful. If a server argument # is given, attempts to acquire a database connection to the given # server/shard. def test_connection(server=nil) synchronize(server){|conn|} true end # Check whether the given connection is currently valid, by # running a query against it. If the query fails, the # connection should probably be removed from the connection # pool. def valid_connection?(conn) sql = valid_connection_sql begin log_connection_execute(conn, sql) rescue Sequel::DatabaseError, *database_error_classes false else true end end private # The default options for the connection pool. def connection_pool_default_options {} end # Return the options for the given server by merging the generic # options for all server with the specific options for the given # server specified in the :servers option. def server_opts(server) opts = if @opts[:servers] and server_options = @opts[:servers][server] case server_options when Hash @opts.merge(server_options) when Proc @opts.merge(server_options.call(self)) else raise Error, 'Server opts should be a hash or proc' end elsif server.is_a?(Hash) @opts.merge(server) else @opts.dup end opts.delete(:servers) opts end # The SQL query to issue to check if a connection is valid. def valid_connection_sql @valid_connection_sql ||= select(nil).sql end end end sequel-5.63.0/lib/sequel/database/dataset.rb000066400000000000000000000050631434214120600206660ustar00rootroot00000000000000# frozen-string-literal: true module Sequel class Database # --------------------- # :section: 3 - Methods that create datasets # These methods all return instances of this database's dataset class. # --------------------- # Returns a dataset for the database. If the first argument is a string, # the method acts as an alias for Database#fetch, returning a dataset for # arbitrary SQL, with or without placeholders: # # DB['SELECT * FROM items'].all # DB['SELECT * FROM items WHERE name = ?', my_name].all # # Otherwise, acts as an alias for Database#from, setting the primary # table for the dataset: # # DB[:items].sql #=> "SELECT * FROM items" def [](*args) args.first.is_a?(String) ? fetch(*args) : from(*args) end # Returns a blank dataset for this database. # # DB.dataset # SELECT * # DB.dataset.from(:items) # SELECT * FROM items def dataset @dataset_class.new(self) end # Fetches records for an arbitrary SQL statement. If a block is given, # it is used to iterate over the records: # # DB.fetch('SELECT * FROM items'){|r| p r} # # The +fetch+ method returns a dataset instance: # # DB.fetch('SELECT * FROM items').all # # +fetch+ can also perform parameterized queries for protection against SQL # injection: # # DB.fetch('SELECT * FROM items WHERE name = ?', my_name).all # # See caveats listed in Dataset#with_sql regarding datasets using custom # SQL and the methods that can be called on them. def fetch(sql, *args, &block) ds = @default_dataset.with_sql(sql, *args) ds.each(&block) if block ds end # Returns a new dataset with the +from+ method invoked. If a block is given, # it acts as a virtual row block # # DB.from(:items) # SELECT * FROM items # DB.from{schema[:table]} # SELECT * FROM schema.table def from(*args, &block) if block @default_dataset.from(*args, &block) elsif args.length == 1 && (table = args[0]).is_a?(Symbol) @default_dataset.send(:cached_dataset, :"_from_#{table}_ds"){@default_dataset.from(table)} else @default_dataset.from(*args) end end # Returns a new dataset with the select method invoked. # # DB.select(1) # SELECT 1 # DB.select{server_version.function} # SELECT server_version() # DB.select(:id).from(:items) # SELECT id FROM items def select(*args, &block) @default_dataset.select(*args, &block) end end end sequel-5.63.0/lib/sequel/database/dataset_defaults.rb000066400000000000000000000060161434214120600225540ustar00rootroot00000000000000# frozen-string-literal: true module Sequel class Database # --------------------- # :section: 5 - Methods that set defaults for created datasets # This methods change the default behavior of this database's datasets. # --------------------- # The class to use for creating datasets. Should respond to # new with the Database argument as the first argument, and # an optional options hash. attr_reader :dataset_class # If the database has any dataset modules associated with it, # use a subclass of the given class that includes the modules # as the dataset class. def dataset_class=(c) unless @dataset_modules.empty? c = Class.new(c) @dataset_modules.each{|m| c.send(:include, m)} end @dataset_class = c reset_default_dataset end # Equivalent to extending all datasets produced by the database with a # module. What it actually does is use a subclass of the current dataset_class # as the new dataset_class, and include the module in the subclass. # Instead of a module, you can provide a block that is used to create an # anonymous module. # # This allows you to override any of the dataset methods even if they are # defined directly on the dataset class that this Database object uses. # # If a block is given, a Dataset::DatasetModule instance is created, allowing # for the easy creation of named dataset methods that will do caching. # # Examples: # # # Introspect columns for all of DB's datasets # DB.extend_datasets(Sequel::ColumnsIntrospection) # # # Trace all SELECT queries by printing the SQL and the full backtrace # DB.extend_datasets do # def fetch_rows(sql) # puts sql # puts caller # super # end # end # # # Add some named dataset methods # DB.extend_datasets do # order :by_id, :id # select :with_id_and_name, :id, :name # where :active, :active # end # # DB[:table].active.with_id_and_name.by_id # # SELECT id, name FROM table WHERE active ORDER BY id def extend_datasets(mod=nil, &block) raise(Error, "must provide either mod or block, not both") if mod && block mod = Dataset::DatasetModule.new(&block) if block if @dataset_modules.empty? @dataset_modules = [mod] @dataset_class = Class.new(@dataset_class) else @dataset_modules << mod end @dataset_class.send(:include, mod) reset_default_dataset end private # The default dataset class to use for the database def dataset_class_default Sequel::Dataset end # Reset the default dataset used by most Database methods that create datasets. def reset_default_dataset Sequel.synchronize{@symbol_literal_cache.clear} @default_dataset = dataset end # Whether to quote identifiers by default for this database, true by default. def quote_identifiers_default true end end end sequel-5.63.0/lib/sequel/database/features.rb000066400000000000000000000112151434214120600210530ustar00rootroot00000000000000# frozen-string-literal: true module Sequel class Database # --------------------- # :section: 9 - Methods that describe what the database supports # These methods all return booleans, with most describing whether or not the # database supprots a given feature. # --------------------- # Whether the database uses a global namespace for the index, true by default. If # false, the indexes are going to be namespaced per table. def global_index_namespace? true end # Whether the database supports CREATE TABLE IF NOT EXISTS syntax, # false by default. def supports_create_table_if_not_exists? false end # Whether the database supports deferrable constraints, false # by default as few databases do. def supports_deferrable_constraints? false end # Whether the database supports deferrable foreign key constraints, # false by default as few databases do. def supports_deferrable_foreign_key_constraints? supports_deferrable_constraints? end # Whether the database supports DROP TABLE IF EXISTS syntax, # false by default. def supports_drop_table_if_exists? supports_create_table_if_not_exists? end # Whether the database supports Database#foreign_key_list for # parsing foreign keys. def supports_foreign_key_parsing? respond_to?(:foreign_key_list) end # Whether the database supports Database#indexes for parsing indexes. def supports_index_parsing? respond_to?(:indexes) end # Whether the database supports partial indexes (indexes on a subset of a table), # false by default. def supports_partial_indexes? false end # Whether the database and adapter support prepared transactions # (two-phase commit), false by default. def supports_prepared_transactions? false end # Whether the database and adapter support savepoints, false by default. def supports_savepoints? false end # Whether the database and adapter support savepoints inside prepared transactions # (two-phase commit), false by default. def supports_savepoints_in_prepared_transactions? supports_prepared_transactions? && supports_savepoints? end # Whether the database supports schema parsing via Database#schema. def supports_schema_parsing? respond_to?(:schema_parse_table, true) end # Whether the database supports Database#tables for getting list of tables. def supports_table_listing? respond_to?(:tables) end # # Whether the database supports Database#views for getting list of views. def supports_view_listing? respond_to?(:views) end # Whether the database and adapter support transaction isolation levels, false by default. def supports_transaction_isolation_levels? false end # Whether DDL statements work correctly in transactions, false by default. def supports_transactional_ddl? false end # Whether CREATE VIEW ... WITH CHECK OPTION is supported, false by default. def supports_views_with_check_option? !!view_with_check_option_support end # Whether CREATE VIEW ... WITH LOCAL CHECK OPTION is supported, false by default. def supports_views_with_local_check_option? view_with_check_option_support == :local end private # Whether the database supports adding primary key constraints on NULLable columns, # automatically making them NOT NULL. If false, the columns must be set NOT NULL # before the primary key constraint is added. def can_add_primary_key_constraint_on_nullable_columns? true end # Whether this dataset considers unquoted identifiers as uppercase. True # by default as that is the SQL standard def folds_unquoted_identifiers_to_uppercase? true end # Whether the database supports combining multiple alter table # operations into a single query, false by default. def supports_combining_alter_table_ops? false end # Whether the database supports CREATE OR REPLACE VIEW. If not, support # will be emulated by dropping the view first. false by default. def supports_create_or_replace_view? false end # Whether the database supports named column constraints. True # by default. Those that don't support named column constraints # have to have column constraints converted to table constraints # if the column constraints have names. def supports_named_column_constraints? true end # Don't advertise support for WITH CHECK OPTION by default. def view_with_check_option_support nil end end end sequel-5.63.0/lib/sequel/database/logging.rb000066400000000000000000000060231434214120600206640ustar00rootroot00000000000000# frozen-string-literal: true module Sequel class Database # --------------------- # :section: 6 - Methods relating to logging # This methods affect relating to the logging of executed SQL. # --------------------- # Numeric specifying the duration beyond which queries are logged at warn # level instead of info level. attr_accessor :log_warn_duration # Array of SQL loggers to use for this database. attr_accessor :loggers # Whether to include information about the connection in use when logging queries. attr_accessor :log_connection_info # Log level at which to log SQL queries. This is actually the method # sent to the logger, so it should be the method name symbol. The default # is :info, it can be set to :debug to log at DEBUG level. attr_accessor :sql_log_level # Log a message at error level, with information about the exception. def log_exception(exception, message) log_each(:error, "#{exception.class}: #{exception.message.strip if exception.message}: #{message}") end # Log a message at level info to all loggers. def log_info(message, args=nil) log_each(:info, args ? "#{message}; #{args.inspect}" : message) end # Yield to the block, logging any errors at error level to all loggers, # and all other queries with the duration at warn or info level. def log_connection_yield(sql, conn, args=nil) return yield if skip_logging? sql = "#{connection_info(conn) if conn && log_connection_info}#{sql}#{"; #{args.inspect}" if args}" timer = Sequel.start_timer begin yield rescue => e log_exception(e, sql) raise ensure log_duration(Sequel.elapsed_seconds_since(timer), sql) unless e end end # Remove any existing loggers and just use the given logger: # # DB.logger = Logger.new($stdout) def logger=(logger) @loggers = Array(logger) end private # Determine if logging should be skipped. Defaults to true if no loggers # have been specified. def skip_logging? @loggers.empty? end # String including information about the connection, for use when logging # connection info. def connection_info(conn) "(conn: #{conn.__id__}) " end # Log the given SQL and then execute it on the connection, used by # the transaction code. def log_connection_execute(conn, sql) log_connection_yield(sql, conn){conn.public_send(connection_execute_method, sql)} end # Log message with message prefixed by duration at info level, or # warn level if duration is greater than log_warn_duration. def log_duration(duration, message) log_each((lwd = log_warn_duration and duration >= lwd) ? :warn : sql_log_level, "(#{sprintf('%0.6fs', duration)}) #{message}") end # Log message at level (which should be :error, :warn, or :info) # to all loggers. def log_each(level, message) @loggers.each{|logger| logger.public_send(level, message)} end end end sequel-5.63.0/lib/sequel/database/misc.rb000066400000000000000000000544101434214120600201740ustar00rootroot00000000000000# frozen-string-literal: true module Sequel class Database # --------------------- # :section: 7 - Miscellaneous methods # These methods don't fit neatly into another category. # --------------------- # Hash of extension name symbols to callable objects to load the extension # into the Database object (usually by extending it with a module defined # in the extension). EXTENSIONS = {} # The general default size for string columns for all Sequel::Database # instances. DEFAULT_STRING_COLUMN_SIZE = 255 # Empty exception regexp to class map, used by default if Sequel doesn't # have specific support for the database in use. DEFAULT_DATABASE_ERROR_REGEXPS = {}.freeze # Mapping of schema type symbols to class or arrays of classes for that # symbol. SCHEMA_TYPE_CLASSES = {:string=>String, :integer=>Integer, :date=>Date, :datetime=>[Time, DateTime].freeze, :time=>Sequel::SQLTime, :boolean=>[TrueClass, FalseClass].freeze, :float=>Float, :decimal=>BigDecimal, :blob=>Sequel::SQL::Blob}.freeze # Nested hook Proc; each new hook Proc just wraps the previous one. @initialize_hook = proc{|db| } # Register a hook that will be run when a new Database is instantiated. It is # called with the new database handle. def self.after_initialize(&block) raise Error, "must provide block to after_initialize" unless block Sequel.synchronize do previous = @initialize_hook @initialize_hook = proc do |db| previous.call(db) block.call(db) end end end # Apply an extension to all Database objects created in the future. def self.extension(*extensions) after_initialize{|db| db.extension(*extensions)} end # Register an extension callback for Database objects. ext should be the # extension name symbol, and mod should either be a Module that the # database is extended with, or a callable object called with the database # object. If mod is not provided, a block can be provided and is treated # as the mod object. def self.register_extension(ext, mod=nil, &block) if mod raise(Error, "cannot provide both mod and block to Database.register_extension") if block if mod.is_a?(Module) block = proc{|db| db.extend(mod)} else block = mod end end Sequel.synchronize{EXTENSIONS[ext] = block} end # Run the after_initialize hook for the given +instance+. def self.run_after_initialize(instance) @initialize_hook.call(instance) end # Converts a uri to an options hash. These options are then passed # to a newly created database object. def self.uri_to_options(uri) { :user => uri.user, :password => uri.password, :port => uri.port, :host => uri.hostname, :database => (m = /\/(.*)/.match(uri.path)) && (m[1]) } end private_class_method :uri_to_options # The options hash for this database attr_reader :opts # Set the timezone to use for this database, overridding Sequel.database_timezone. attr_writer :timezone # The specific default size of string columns for this Sequel::Database, usually 255 by default. attr_accessor :default_string_column_size # Whether to check the bytesize of strings before typecasting (to avoid typecasting strings that # would be too long for the given type), true by default. Strings that are too long will raise # a typecasting error. attr_accessor :check_string_typecast_bytesize # Constructs a new instance of a database connection with the specified # options hash. # # Accepts the following options: # :after_connect :: A callable object called after each new connection is made, with the # connection object (and server argument if the callable accepts 2 arguments), # useful for customizations that you want to apply to all connections. # :before_preconnect :: Callable that runs after extensions from :preconnect_extensions are loaded, # but before any connections are created. # :cache_schema :: Whether schema should be cached for this Database instance # :check_string_typecast_bytesize :: Whether to check the bytesize of strings before typecasting. # :connect_sqls :: An array of sql strings to execute on each new connection, after :after_connect runs. # :default_string_column_size :: The default size of string columns, 255 by default. # :extensions :: Extensions to load into this Database instance. Can be a symbol, array of symbols, # or string with extensions separated by columns. These extensions are loaded after # connections are made by the :preconnect option. # :keep_reference :: Whether to keep a reference to this instance in Sequel::DATABASES, true by default. # :logger :: A specific logger to use. # :loggers :: An array of loggers to use. # :log_connection_info :: Whether connection information should be logged when logging queries. # :log_warn_duration :: The number of elapsed seconds after which queries should be logged at warn level. # :name :: A name to use for the Database object, displayed in PoolTimeout. # :preconnect :: Automatically create the maximum number of connections, so that they don't # need to be created as needed. This is useful when connecting takes a long time # and you want to avoid possible latency during runtime. # Set to :concurrently to create the connections in separate threads. Otherwise # they'll be created sequentially. # :preconnect_extensions :: Similar to the :extensions option, but loads the extensions before the # connections are made by the :preconnect option. # :quote_identifiers :: Whether to quote identifiers. # :servers :: A hash specifying a server/shard specific options, keyed by shard symbol. # :single_threaded :: Whether to use a single-threaded connection pool. # :sql_log_level :: Method to use to log SQL to a logger, :info by default. # # For sharded connection pools, :after_connect and :connect_sqls can be specified per-shard. # # All options given are also passed to the connection pool. Additional options respected by # the connection pool are :max_connections, :pool_timeout, :servers, and :servers_hash. See the # connection pool documentation for details. def initialize(opts = OPTS) @opts ||= opts @opts = connection_pool_default_options.merge(@opts) @loggers = Array(@opts[:logger]) + Array(@opts[:loggers]) @opts[:servers] = {} if @opts[:servers].is_a?(String) @sharded = !!@opts[:servers] @opts[:adapter_class] = self.class @opts[:single_threaded] = @single_threaded = typecast_value_boolean(@opts.fetch(:single_threaded, Sequel.single_threaded)) @default_string_column_size = @opts[:default_string_column_size] || DEFAULT_STRING_COLUMN_SIZE @check_string_typecast_bytesize = typecast_value_boolean(@opts.fetch(:check_string_typecast_bytesize, true)) @schemas = {} @prepared_statements = {} @transactions = {} @symbol_literal_cache = {} @timezone = nil @dataset_class = dataset_class_default @cache_schema = typecast_value_boolean(@opts.fetch(:cache_schema, true)) @dataset_modules = [] @loaded_extensions = [] @schema_type_classes = SCHEMA_TYPE_CLASSES.dup self.sql_log_level = @opts[:sql_log_level] ? @opts[:sql_log_level].to_sym : :info self.log_warn_duration = @opts[:log_warn_duration] self.log_connection_info = typecast_value_boolean(@opts[:log_connection_info]) @pool = ConnectionPool.get_pool(self, @opts) reset_default_dataset adapter_initialize keep_reference = typecast_value_boolean(@opts[:keep_reference]) != false begin Sequel.synchronize{::Sequel::DATABASES.push(self)} if keep_reference Sequel::Database.run_after_initialize(self) initialize_load_extensions(:preconnect_extensions) if before_preconnect = @opts[:before_preconnect] before_preconnect.call(self) end if typecast_value_boolean(@opts[:preconnect]) && @pool.respond_to?(:preconnect, true) concurrent = typecast_value_string(@opts[:preconnect]) == "concurrently" @pool.send(:preconnect, concurrent) end initialize_load_extensions(:extensions) test_connection if typecast_value_boolean(@opts.fetch(:test, true)) && respond_to?(:connect, true) rescue Sequel.synchronize{::Sequel::DATABASES.delete(self)} if keep_reference raise end end # Freeze internal data structures for the Database instance. def freeze valid_connection_sql metadata_dataset @opts.freeze @loggers.freeze @pool.freeze @dataset_class.freeze @dataset_modules.freeze @schema_type_classes.freeze @loaded_extensions.freeze metadata_dataset super end # Disallow dup/clone for Database instances undef_method :dup, :clone, :initialize_copy # :nocov: if RUBY_VERSION >= '1.9.3' # :nocov: undef_method :initialize_clone, :initialize_dup end # Cast the given type to a literal type # # DB.cast_type_literal(Float) # double precision # DB.cast_type_literal(:foo) # foo def cast_type_literal(type) type_literal(:type=>type) end # Load an extension into the receiver. In addition to requiring the extension file, this # also modifies the database to work with the extension (usually extending it with a # module defined in the extension file). If no related extension file exists or the # extension does not have specific support for Database objects, an Error will be raised. # Returns self. def extension(*exts) Sequel.extension(*exts) exts.each do |ext| if pr = Sequel.synchronize{EXTENSIONS[ext]} if Sequel.synchronize{@loaded_extensions.include?(ext) ? false : (@loaded_extensions << ext)} pr.call(self) end else raise(Error, "Extension #{ext} does not have specific support handling individual databases (try: Sequel.extension #{ext.inspect})") end end self end # Convert the given timestamp from the application's timezone, # to the databases's timezone or the default database timezone if # the database does not have a timezone. def from_application_timestamp(v) Sequel.convert_output_timestamp(v, timezone) end # Returns a string representation of the database object including the # class name and connection URI and options used when connecting (if any). def inspect a = [] a << uri.inspect if uri if (oo = opts[:orig_opts]) && !oo.empty? a << oo.inspect end "#<#{self.class}: #{a.join(' ')}>" end # Proxy the literal call to the dataset. # # DB.literal(1) # 1 # DB.literal(:a) # a # DB.literal('a') # 'a' def literal(v) schema_utility_dataset.literal(v) end # Return the literalized version of the symbol if cached, or # nil if it is not cached. def literal_symbol(sym) Sequel.synchronize{@symbol_literal_cache[sym]} end # Set the cached value of the literal symbol. def literal_symbol_set(sym, lit) Sequel.synchronize{@symbol_literal_cache[sym] = lit} end # Synchronize access to the prepared statements cache. def prepared_statement(name) Sequel.synchronize{prepared_statements[name]} end # Proxy the quote_identifier method to the dataset, # useful for quoting unqualified identifiers for use # outside of datasets. def quote_identifier(v) schema_utility_dataset.quote_identifier(v) end # Return ruby class or array of classes for the given type symbol. def schema_type_class(type) @schema_type_classes[type] end # Default serial primary key options, used by the table creation code. def serial_primary_key_options {:primary_key => true, :type => Integer, :auto_increment => true} end # Cache the prepared statement object at the given name. def set_prepared_statement(name, ps) Sequel.synchronize{prepared_statements[name] = ps} end # Whether this database instance uses multiple servers, either for sharding # or for primary/replica configurations. def sharded? @sharded end # The timezone to use for this database, defaulting to Sequel.database_timezone. def timezone @timezone || Sequel.database_timezone end # Convert the given timestamp to the application's timezone, # from the databases's timezone or the default database timezone if # the database does not have a timezone. def to_application_timestamp(v) Sequel.convert_timestamp(v, timezone) end # Typecast the value to the given column_type. Calls # typecast_value_#{column_type} if the method exists, # otherwise returns the value. # This method should raise Sequel::InvalidValue if assigned value # is invalid. def typecast_value(column_type, value) return nil if value.nil? meth = "typecast_value_#{column_type}" begin # Allow calling private methods as per-type typecasting methods are private respond_to?(meth, true) ? send(meth, value) : value rescue ArgumentError, TypeError => e raise Sequel.convert_exception_class(e, InvalidValue) end end # Returns the URI use to connect to the database. If a URI # was not used when connecting, returns nil. def uri opts[:uri] end # Explicit alias of uri for easier subclassing. def url uri end private # Per adapter initialization method, empty by default. def adapter_initialize end # Returns true when the object is considered blank. # The only objects that are blank are nil, false, # strings with all whitespace, and ones that respond # true to empty? def blank_object?(obj) return obj.blank? if obj.respond_to?(:blank?) case obj when NilClass, FalseClass true when Numeric, TrueClass false when String obj.strip.empty? else obj.respond_to?(:empty?) ? obj.empty? : false end end # An enumerable yielding pairs of regexps and exception classes, used # to match against underlying driver exception messages in # order to raise a more specific Sequel::DatabaseError subclass. def database_error_regexps DEFAULT_DATABASE_ERROR_REGEXPS end # Return the Sequel::DatabaseError subclass to wrap the given # exception in. def database_error_class(exception, opts) database_specific_error_class(exception, opts) || DatabaseError end # Return the SQLState for the given exception, if one can be determined def database_exception_sqlstate(exception, opts) nil end # Return a specific Sequel::DatabaseError exception class if # one is appropriate for the underlying exception, # or nil if there is no specific exception class. def database_specific_error_class(exception, opts) return DatabaseDisconnectError if disconnect_error?(exception, opts) if sqlstate = database_exception_sqlstate(exception, opts) if klass = database_specific_error_class_from_sqlstate(sqlstate) return klass end else database_error_regexps.each do |regexp, klss| return klss if exception.message =~ regexp end end nil end NOT_NULL_CONSTRAINT_SQLSTATES = %w'23502'.freeze.each(&:freeze) FOREIGN_KEY_CONSTRAINT_SQLSTATES = %w'23503 23506 23504'.freeze.each(&:freeze) UNIQUE_CONSTRAINT_SQLSTATES = %w'23505'.freeze.each(&:freeze) CHECK_CONSTRAINT_SQLSTATES = %w'23513 23514'.freeze.each(&:freeze) SERIALIZATION_CONSTRAINT_SQLSTATES = %w'40001'.freeze.each(&:freeze) # Given the SQLState, return the appropriate DatabaseError subclass. def database_specific_error_class_from_sqlstate(sqlstate) case sqlstate when *NOT_NULL_CONSTRAINT_SQLSTATES NotNullConstraintViolation when *FOREIGN_KEY_CONSTRAINT_SQLSTATES ForeignKeyConstraintViolation when *UNIQUE_CONSTRAINT_SQLSTATES UniqueConstraintViolation when *CHECK_CONSTRAINT_SQLSTATES CheckConstraintViolation when *SERIALIZATION_CONSTRAINT_SQLSTATES SerializationFailure end end # Return true if exception represents a disconnect error, false otherwise. def disconnect_error?(exception, opts) opts[:disconnect] end # Load extensions during initialization from the given key in opts. def initialize_load_extensions(key) case exts = @opts[key] when String extension(*exts.split(',').map(&:to_sym)) when Array extension(*exts) when Symbol extension(exts) when nil # nothing else raise Error, "unsupported Database #{key.inspect} option: #{@opts[key].inspect}" end end # Convert the given exception to an appropriate Sequel::DatabaseError # subclass, keeping message and backtrace. def raise_error(exception, opts=OPTS) if !opts[:classes] || Array(opts[:classes]).any?{|c| exception.is_a?(c)} raise Sequel.convert_exception_class(exception, database_error_class(exception, opts)) else raise exception end end # Swallow database errors, unless they are connect/disconnect errors. def swallow_database_error yield rescue Sequel::DatabaseDisconnectError, DatabaseConnectionError # Always raise disconnect errors raise rescue Sequel::DatabaseError # Don't raise other database errors. nil # else # Don't rescue other exceptions, they will be raised normally. end # Check the bytesize of a string before conversion. There is no point # trying to typecast strings that would be way too long. def typecast_check_string_length(string, max_size) if @check_string_typecast_bytesize && string.bytesize > max_size raise InvalidValue, "string too long to typecast (bytesize: #{string.bytesize}, max: #{max_size})" end string end # Check the bytesize of the string value, if value is a string. def typecast_check_length(value, max_size) typecast_check_string_length(value, max_size) if String === value value end # Typecast the value to an SQL::Blob def typecast_value_blob(value) value.is_a?(Sequel::SQL::Blob) ? value : Sequel::SQL::Blob.new(value) end # Typecast the value to true, false, or nil def typecast_value_boolean(value) case value when false, 0, "0", /\Af(alse)?\z/i, /\Ano?\z/i false else blank_object?(value) ? nil : true end end # Typecast the value to a Date def typecast_value_date(value) case value when DateTime, Time Date.new(value.year, value.month, value.day) when Date value when String Sequel.string_to_date(typecast_check_string_length(value, 100)) when Hash Date.new(*[:year, :month, :day].map{|x| typecast_check_length(value[x] || value[x.to_s], 100).to_i}) else raise InvalidValue, "invalid value for Date: #{value.inspect}" end end # Typecast the value to a DateTime or Time depending on Sequel.datetime_class def typecast_value_datetime(value) case value when String Sequel.typecast_to_application_timestamp(typecast_check_string_length(value, 100)) when Hash [:year, :month, :day, :hour, :minute, :second, :nanos, :offset].each do |x| typecast_check_length(value[x] || value[x.to_s], 100) end Sequel.typecast_to_application_timestamp(value) else Sequel.typecast_to_application_timestamp(value) end end if RUBY_VERSION >= '2.4' # Typecast a string to a BigDecimal alias _typecast_value_string_to_decimal BigDecimal else # :nocov: def _typecast_value_string_to_decimal(value) d = BigDecimal(value) if d.zero? # BigDecimal parsing is loose by default, returning a 0 value for # invalid input. If a zero value is received, use Float to check # for validity. begin Float(value) rescue ArgumentError raise InvalidValue, "invalid value for BigDecimal: #{value.inspect}" end end d end # :nocov: end # Typecast the value to a BigDecimal def typecast_value_decimal(value) case value when BigDecimal value when Numeric BigDecimal(value.to_s) when String _typecast_value_string_to_decimal(typecast_check_string_length(value, 1000)) else raise InvalidValue, "invalid value for BigDecimal: #{value.inspect}" end end # Typecast the value to a Float def typecast_value_float(value) Float(typecast_check_length(value, 1000)) end # Typecast the value to an Integer def typecast_value_integer(value) case value when String typecast_check_string_length(value, 100) if value =~ /\A-?0+(\d)/ Integer(value, 10) else Integer(value) end else Integer(value) end end # Typecast the value to a String def typecast_value_string(value) case value when Hash, Array raise Sequel::InvalidValue, "invalid value for String: #{value.inspect}" else value.to_s end end # Typecast the value to a Time def typecast_value_time(value) case value when Time if value.is_a?(SQLTime) value else SQLTime.create(value.hour, value.min, value.sec, value.nsec/1000.0) end when String Sequel.string_to_time(typecast_check_string_length(value, 100)) when Hash SQLTime.create(*[:hour, :minute, :second].map{|x| typecast_check_length(value[x] || value[x.to_s], 100).to_i}) else raise Sequel::InvalidValue, "invalid value for Time: #{value.inspect}" end end end end sequel-5.63.0/lib/sequel/database/query.rb000066400000000000000000000340321434214120600204040ustar00rootroot00000000000000# frozen-string-literal: true module Sequel class Database # --------------------- # :section: 1 - Methods that execute queries and/or return results # This methods generally execute SQL code on the database server. # --------------------- COLUMN_SCHEMA_DATETIME_TYPES = [:date, :datetime].freeze COLUMN_SCHEMA_STRING_TYPES = [:string, :blob, :date, :datetime, :time, :enum, :set, :interval].freeze # The prepared statement object hash for this database, keyed by name symbol attr_reader :prepared_statements # Whether the schema should be cached for this database. True by default # for performance, can be set to false to always issue a database query to # get the schema. attr_accessor :cache_schema # Runs the supplied SQL statement string on the database server. # Returns self so it can be safely chained: # # DB << "UPDATE albums SET artist_id = NULL" << "DROP TABLE artists" def <<(sql) run(sql) self end # Call the prepared statement with the given name with the given hash # of arguments. # # DB[:items].where(id: 1).prepare(:first, :sa) # DB.call(:sa) # SELECT * FROM items WHERE id = 1 def call(ps_name, hash=OPTS, &block) prepared_statement(ps_name).call(hash, &block) end # Method that should be used when submitting any DDL (Data Definition # Language) SQL, such as +create_table+. By default, calls +execute_dui+. # This method should not be called directly by user code. def execute_ddl(sql, opts=OPTS, &block) execute_dui(sql, opts, &block) end # Method that should be used when issuing a DELETE or UPDATE # statement. By default, calls execute. # This method should not be called directly by user code. def execute_dui(sql, opts=OPTS, &block) execute(sql, opts, &block) end # Method that should be used when issuing a INSERT # statement. By default, calls execute_dui. # This method should not be called directly by user code. def execute_insert(sql, opts=OPTS, &block) execute_dui(sql, opts, &block) end # Returns a single value from the database, see Dataset#get. # # DB.get(1) # SELECT 1 # # => 1 # DB.get{server_version.function} # SELECT server_version() def get(*args, &block) @default_dataset.get(*args, &block) end # Runs the supplied SQL statement string on the database server. Returns nil. # Options: # :server :: The server to run the SQL on. # # DB.run("SET some_server_variable = 42") def run(sql, opts=OPTS) sql = literal(sql) if sql.is_a?(SQL::PlaceholderLiteralString) execute_ddl(sql, opts) nil end # Returns the schema for the given table as an array with all members being arrays of length 2, # the first member being the column name, and the second member being a hash of column information. # The table argument can also be a dataset, as long as it only has one table. # Available options are: # # :reload :: Ignore any cached results, and get fresh information from the database. # :schema :: An explicit schema to use. It may also be implicitly provided # via the table name. # # If schema parsing is supported by the database, the column information hash should contain at least the # following entries: # # :allow_null :: Whether NULL is an allowed value for the column. # :db_type :: The database type for the column, as a database specific string. # :default :: The database default for the column, as a database specific string, or nil if there is # no default value. # :primary_key :: Whether the columns is a primary key column. If this column is not present, # it means that primary key information is unavailable, not that the column # is not a primary key. # :ruby_default :: The database default for the column, as a ruby object. In many cases, complex # database defaults cannot be parsed into ruby objects, in which case nil will be # used as the value. # :type :: A symbol specifying the type, such as :integer or :string. # # Example: # # DB.schema(:artists) # # [[:id, # # {:type=>:integer, # # :primary_key=>true, # # :default=>"nextval('artist_id_seq'::regclass)", # # :ruby_default=>nil, # # :db_type=>"integer", # # :allow_null=>false}], # # [:name, # # {:type=>:string, # # :primary_key=>false, # # :default=>nil, # # :ruby_default=>nil, # # :db_type=>"text", # # :allow_null=>false}]] def schema(table, opts=OPTS) raise(Error, 'schema parsing is not implemented on this database') unless supports_schema_parsing? opts = opts.dup tab = if table.is_a?(Dataset) o = table.opts from = o[:from] raise(Error, "can only parse the schema for a dataset with a single from table") unless from && from.length == 1 && !o.include?(:join) && !o.include?(:sql) table.first_source_table else table end qualifiers = split_qualifiers(tab) table_name = qualifiers.pop sch = qualifiers.pop information_schema_schema = case qualifiers.length when 1 Sequel.identifier(*qualifiers) when 2 Sequel.qualify(*qualifiers) end if table.is_a?(Dataset) quoted_name = table.literal(tab) opts[:dataset] = table else quoted_name = schema_utility_dataset.literal(table) end opts[:schema] = sch if sch && !opts.include?(:schema) opts[:information_schema_schema] = information_schema_schema if information_schema_schema && !opts.include?(:information_schema_schema) Sequel.synchronize{@schemas.delete(quoted_name)} if opts[:reload] if v = Sequel.synchronize{@schemas[quoted_name]} return v end cols = schema_parse_table(table_name, opts) raise(Error, "schema parsing returned no columns, table #{table_name.inspect} probably doesn't exist") if cols.nil? || cols.empty? primary_keys = 0 auto_increment_set = false cols.each do |_,c| auto_increment_set = true if c.has_key?(:auto_increment) primary_keys += 1 if c[:primary_key] end cols.each do |_,c| c[:ruby_default] = column_schema_to_ruby_default(c[:default], c[:type]) unless c.has_key?(:ruby_default) if c[:primary_key] && !auto_increment_set # If adapter didn't set it, assume that integer primary keys are auto incrementing c[:auto_increment] = primary_keys == 1 && !!(c[:db_type] =~ /int/io) end if !c[:max_length] && c[:type] == :string && (max_length = column_schema_max_length(c[:db_type])) c[:max_length] = max_length end if !c[:max_value] && !c[:min_value] && c[:type] == :integer && (min_max = column_schema_integer_min_max_values(c[:db_type])) c[:min_value], c[:max_value] = min_max end end schema_post_process(cols) Sequel.synchronize{@schemas[quoted_name] = cols} if cache_schema cols end # Returns true if a table with the given name exists. This requires a query # to the database. # # DB.table_exists?(:foo) # => false # # SELECT NULL FROM foo LIMIT 1 # # Note that since this does a SELECT from the table, it can give false negatives # if you don't have permission to SELECT from the table. def table_exists?(name) sch, table_name = schema_and_table(name) name = SQL::QualifiedIdentifier.new(sch, table_name) if sch ds = from(name) transaction(:savepoint=>:only){_table_exists?(ds)} true rescue DatabaseError false end private # Should raise an error if the table doesn't not exist, # and not raise an error if the table does exist. def _table_exists?(ds) ds.get(SQL::AliasedExpression.new(Sequel::NULL, :nil)) end # Whether the type should be treated as a string type when parsing the # column schema default value. def column_schema_default_string_type?(type) COLUMN_SCHEMA_STRING_TYPES.include?(type) end # Transform the given normalized default string into a ruby object for the # given type. def column_schema_default_to_ruby_value(default, type) case type when :boolean case default when /[f0]/i false when /[t1]/i true end when :string, :enum, :set, :interval default when :blob Sequel::SQL::Blob.new(default) when :integer Integer(default) when :float Float(default) when :date Sequel.string_to_date(default) when :datetime Sequel.string_to_datetime(default) when :time Sequel.string_to_time(default) when :decimal BigDecimal(default) end end # Normalize the default value string for the given type # and return the normalized value. def column_schema_normalize_default(default, type) if column_schema_default_string_type?(type) return unless m = /\A'(.*)'\z/.match(default) m[1].gsub("''", "'") else default end end # Convert the given default, which should be a database specific string, into # a ruby object. def column_schema_to_ruby_default(default, type) return default unless default.is_a?(String) if COLUMN_SCHEMA_DATETIME_TYPES.include?(type) if /now|today|CURRENT|getdate|\ADate\(\)\z/i.match(default) if type == :date return Sequel::CURRENT_DATE else return Sequel::CURRENT_TIMESTAMP end end end default = column_schema_normalize_default(default, type) column_schema_default_to_ruby_value(default, type) rescue nil end INTEGER1_MIN_MAX = [-128, 127].freeze INTEGER2_MIN_MAX = [-32768, 32767].freeze INTEGER3_MIN_MAX = [-8388608, 8388607].freeze INTEGER4_MIN_MAX = [-2147483648, 2147483647].freeze INTEGER8_MIN_MAX = [-9223372036854775808, 9223372036854775807].freeze UNSIGNED_INTEGER1_MIN_MAX = [0, 255].freeze UNSIGNED_INTEGER2_MIN_MAX = [0, 65535].freeze UNSIGNED_INTEGER3_MIN_MAX = [0, 16777215].freeze UNSIGNED_INTEGER4_MIN_MAX = [0, 4294967295].freeze UNSIGNED_INTEGER8_MIN_MAX = [0, 18446744073709551615].freeze # Look at the db_type and guess the minimum and maximum integer values for # the column. def column_schema_integer_min_max_values(db_type) unsigned = /unsigned/i =~ db_type case db_type when /big|int8/i unsigned ? UNSIGNED_INTEGER8_MIN_MAX : INTEGER8_MIN_MAX when /medium/i unsigned ? UNSIGNED_INTEGER3_MIN_MAX : INTEGER3_MIN_MAX when /small|int2/i unsigned ? UNSIGNED_INTEGER2_MIN_MAX : INTEGER2_MIN_MAX when /tiny/i (unsigned || column_schema_tinyint_type_is_unsigned?) ? UNSIGNED_INTEGER1_MIN_MAX : INTEGER1_MIN_MAX else unsigned ? UNSIGNED_INTEGER4_MIN_MAX : INTEGER4_MIN_MAX end end # Whether the tinyint type (if supported by the database) is unsigned by default. def column_schema_tinyint_type_is_unsigned? false end # Look at the db_type and guess the maximum length of the column. # This assumes types such as varchar(255). def column_schema_max_length(db_type) if db_type =~ /\((\d+)\)/ $1.to_i end end # Return a Method object for the dataset's output_identifier_method. # Used in metadata parsing to make sure the returned information is in the # correct format. def input_identifier_meth(ds=nil) (ds || dataset).method(:input_identifier) end # Uncached version of metadata_dataset, designed for overriding. def _metadata_dataset dataset end # Return a dataset that uses the default identifier input and output methods # for this database. Used when parsing metadata so that column symbols are # returned as expected. def metadata_dataset @metadata_dataset ||= _metadata_dataset end # Return a Method object for the dataset's output_identifier_method. # Used in metadata parsing to make sure the returned information is in the # correct format. def output_identifier_meth(ds=nil) (ds || dataset).method(:output_identifier) end # Remove the cached schema for the given schema name def remove_cached_schema(table) cache = @default_dataset.send(:cache) Sequel.synchronize{cache.clear} k = quote_schema_table(table) Sequel.synchronize{@schemas.delete(k)} end # Match the database's column type to a ruby type via a # regular expression, and return the ruby type as a symbol # such as :integer or :string. def schema_column_type(db_type) case db_type when /\A(character( varying)?|n?(var)?char|n?text|string|clob)/io :string when /\A(int(eger)?|(big|small|tiny)int)/io :integer when /\Adate\z/io :date when /\A((small)?datetime|timestamp(\(\d\))?( with(out)? time zone)?)\z/io :datetime when /\Atime( with(out)? time zone)?\z/io :time when /\A(bool(ean)?)\z/io :boolean when /\A(real|float( unsigned)?|double( precision)?|double\(\d+,\d+\)( unsigned)?)\z/io :float when /\A(?:(?:(?:num(?:ber|eric)?|decimal)(?:\(\d+,\s*(\d+|false|true)\))?))\z/io $1 && ['0', 'false'].include?($1) ? :integer : :decimal when /bytea|blob|image|(var)?binary/io :blob when /\Aenum/io :enum end end # Post process the schema values. def schema_post_process(cols) # :nocov: if RUBY_VERSION >= '2.5' # :nocov: cols.each do |_, h| db_type = h[:db_type] if db_type.is_a?(String) h[:db_type] = -db_type end end end cols.each do |_,c| c.each_value do |val| val.freeze if val.is_a?(String) end end end end end sequel-5.63.0/lib/sequel/database/schema_generator.rb000066400000000000000000000667651434214120600225670ustar00rootroot00000000000000# frozen-string-literal: true module Sequel # The Schema module holds the schema generators. module Schema # Schema::CreateTableGenerator is an internal class that the user is not expected # to instantiate directly. Instances are created by Database#create_table. # It is used to specify table creation parameters. It takes a Database # object and a block of column/index/constraint specifications, and # gives the Database a table description, which the database uses to # create a table. # # Schema::CreateTableGenerator has some methods but also includes method_missing, # allowing users to specify column type as a method instead of using # the column method, which makes for a nicer DSL. # # For more information on Sequel's support for schema modification, see # the {"Schema Modification" guide}[rdoc-ref:doc/schema_modification.rdoc]. class CreateTableGenerator # Classes specifying generic types that Sequel will convert to database-specific types. GENERIC_TYPES=%w'String Integer Float Numeric BigDecimal Date DateTime Time File TrueClass FalseClass'.freeze # Column hashes created by this generator attr_reader :columns # Constraint hashes created by this generator attr_reader :constraints # Index hashes created by this generator attr_reader :indexes # Set the database in which to create the table, and evaluate the block # in the context of this object. def initialize(db, &block) @db = db @columns = [] @indexes = [] @constraints = [] @primary_key = nil instance_exec(&block) if block end # Use custom Bignum method to use :Bignum instead of Bignum class, to work # correctly in cases where Bignum is the same as Integer. def Bignum(name, opts=OPTS) column(name, :Bignum, opts) end # Use custom Fixnum method to use Integer instead of Fixnum class, to avoid # warnings on ruby 2.4+. def Fixnum(name, opts=OPTS) column(name, Integer, opts) end # Add a method for each of the given types that creates a column # with that type as a constant. Types given should either already # be constants/classes or a capitalized string/symbol with the same name # as a constant/class. def self.add_type_method(*types) types.each do |type| case type when Symbol, String method = type type = Object.const_get(type) else method = type.to_s end define_method(method){|name, opts=OPTS| column(name, type, opts)} end nil end # Add an unnamed constraint, specified by the given block # or args: # # check(num: 1..5) # CHECK num >= 1 AND num <= 5 # check{num > 5} # CHECK num > 5 def check(*args, &block) constraint(nil, *args, &block) end # Add a column with the given name, type, and opts: # # column :num, :integer # # num INTEGER # # column :name, String, null: false, default: 'a' # # name varchar(255) NOT NULL DEFAULT 'a' # # inet :ip # # ip inet # # You can also create columns via method missing, so the following are # equivalent: # # column :number, :integer # integer :number # # The following options are supported: # # :collate :: The collation to use for the column. For backwards compatibility, # only symbols and string values are supported, and they are used verbatim. # However, on PostgreSQL, symbols are literalized as regular identifiers, # since unquoted collations are unlikely to be valid. # :default :: The default value for the column. # :deferrable :: For foreign key columns, this ensures referential integrity will work even if # referencing table uses a foreign key value that does not # yet exist on referenced table (but will exist before the transaction commits). # Basically it adds DEFERRABLE INITIALLY DEFERRED on key creation. # If you use :immediate as the value, uses DEFERRABLE INITIALLY IMMEDIATE. # :generated_always_as :: Specify a GENERATED ALWAYS AS column expression, # if generated columns are supported (PostgreSQL 12+, MariaDB 5.2.0+, # and MySQL 5.7.6+). # :index :: Create an index on this column. If given a hash, use the hash as the # options for the index. # :key :: For foreign key columns, the column in the associated table # that this column references. Unnecessary if this column # references the primary key of the associated table, except if you are # using MySQL. # :null :: Mark the column as allowing NULL values (if true), # or not allowing NULL values (if false). The default is to allow NULL values. # :on_delete :: Specify the behavior of this column when being deleted # (:restrict, :cascade, :set_null, :set_default, :no_action). # :on_update :: Specify the behavior of this column when being updated # (:restrict, :cascade, :set_null, :set_default, :no_action). # :primary_key :: Make the column as a single primary key column. This should not # be used if you want a single autoincrementing primary key column # (use the primary_key method in that case). # :primary_key_constraint_name :: The name to give the primary key constraint # :primary_key_deferrable :: Similar to :deferrable, but for the primary key constraint # if :primary_key is used. # :type :: Overrides the type given as the argument. Generally not used by column # itself, but can be passed as an option to other methods that call column. # :unique :: Mark the column as unique, generally has the same effect as # creating a unique index on the column. # :unique_constraint_name :: The name to give the unique key constraint # :unique_deferrable :: Similar to :deferrable, but for the unique constraint if :unique # is used. # # PostgreSQL specific options: # # :identity :: Create an identity column. # # MySQL specific options: # # :generated_type :: Set the type of column when using :generated_always_as, # should be :virtual or :stored to force a type. # :on_update_current_timestamp :: Use ON UPDATE CURRENT TIMESTAMP when defining the column, # which will update the column value to CURRENT_TIMESTAMP # on every UPDATE. # # Microsoft SQL Server specific options: # # :clustered :: When using :primary_key or :unique, marks the primary key or unique # constraint as CLUSTERED (if true), or NONCLUSTERED (if false). def column(name, type, opts = OPTS) columns << {:name => name, :type => type}.merge!(opts) if index_opts = opts[:index] index(name, index_opts.is_a?(Hash) ? index_opts : OPTS) end nil end # Adds a named CHECK constraint (or unnamed if name is nil), # with the given block or args. To provide options for the constraint, pass # a hash as the first argument. # # constraint(:blah, num: 1..5) # # CONSTRAINT blah CHECK num >= 1 AND num <= 5 # constraint({name: :blah, deferrable: true}, num: 1..5) # # CONSTRAINT blah CHECK num >= 1 AND num <= 5 DEFERRABLE INITIALLY DEFERRED # # If the first argument is a hash, the following options are supported: # # Options: # :name :: The name of the CHECK constraint # :deferrable :: Whether the CHECK constraint should be marked DEFERRABLE. # # PostgreSQL specific options: # :not_valid :: Whether the CHECK constraint should be marked NOT VALID. def constraint(name, *args, &block) opts = name.is_a?(Hash) ? name : {:name=>name} constraints << opts.merge(:type=>:check, :check=>block || args) nil end # Add a foreign key in the table that references another table. See #column # for available options. # # foreign_key(:artist_id) # artist_id INTEGER # foreign_key(:artist_id, :artists) # artist_id INTEGER REFERENCES artists # foreign_key(:artist_id, :artists, key: :id) # artist_id INTEGER REFERENCES artists(id) # foreign_key(:artist_id, :artists, type: String) # artist_id varchar(255) REFERENCES artists(id) # # Additional Options: # # :foreign_key_constraint_name :: The name to give the foreign key constraint # # If you want a foreign key constraint without adding a column (usually because it is a # composite foreign key), you can provide an array of columns as the first argument, and # you can provide the :name option to name the constraint: # # foreign_key([:artist_name, :artist_location], :artists, name: :artist_fk) # # ADD CONSTRAINT artist_fk FOREIGN KEY (artist_name, artist_location) REFERENCES artists def foreign_key(name, table=nil, opts = OPTS) opts = case table when Hash table.merge(opts) when NilClass opts else opts.merge(:table=>table) end return composite_foreign_key(name, opts) if name.is_a?(Array) column(name, Integer, opts) end # Add a full text index on the given columns. # See #index for additional options. # # PostgreSQL specific options: # :index_type :: Can be set to :gist to use a GIST index instead of the # default GIN index. # :language :: Set a language to use for the index (default: simple). def full_text_index(columns, opts = OPTS) index(columns, opts.merge(:type => :full_text)) end # True if the generator includes the creation of a column with the given name. def has_column?(name) columns.any?{|c| c[:name] == name} end # Add an index on the given column(s) with the given options. Examples: # # index :name # # CREATE INDEX table_name_index ON table (name) # # index [:artist_id, :name] # # CREATE INDEX table_artist_id_name_index ON table (artist_id, name) # # index [:artist_id, :name], name: :foo # # CREATE INDEX foo ON table (artist_id, name) # # General options: # # :include :: Include additional column values in the index, without # actually indexing on those values (only supported by # some databases). # :name :: The name to use for the index. If not given, a default name # based on the table and columns is used. # :type :: The type of index to use (only supported by some databases, # :full_text and :spatial values are handled specially). # :unique :: Make the index unique, so duplicate values are not allowed. # :where :: A filter expression, used to create a partial index (only # supported by some databases). # # PostgreSQL specific options: # # :concurrently :: Create the index concurrently, so it doesn't block # operations on the table while the index is being # built. # :if_not_exists :: Only create the index if an index of the same name doesn't already exist. # :nulls_distinct :: Set whether separate NULLs should be considered distinct values in unique indexes. # :opclass :: Set an opclass to use for all columns (per-column opclasses require # custom SQL). # :tablespace :: Specify tablespace for index. # # Microsoft SQL Server specific options: # # :key_index :: Sets the KEY INDEX to the given value. def index(columns, opts = OPTS) indexes << {:columns => Array(columns)}.merge!(opts) nil end # Add a column with the given type, name, and opts. See #column for available # options. def method_missing(type, name = nil, opts = OPTS) name ? column(name, type, opts) : super end # This object responds to all methods. def respond_to_missing?(meth, include_private) true end # Adds an autoincrementing primary key column or a primary key constraint. # To just create a constraint, the first argument should be an array of column symbols # specifying the primary key columns. To create an autoincrementing primary key # column, a single symbol can be used. In both cases, an options hash can be used # as the second argument. # # If you want to create a primary key column that is not autoincrementing, you # should not use this method. Instead, you should use the regular +column+ method # with a primary_key: true option. # # If an array of column symbols is used, you can specify the :name option # to name the constraint. # # Options: # :keep_order :: For non-composite primary keys, respects the existing order of # columns, overriding the default behavior of making the primary # key the first column. # # Examples: # primary_key(:id) # primary_key(:id, type: :Bignum, keep_order: true) # primary_key([:street_number, :house_number], name: :some constraint_name) def primary_key(name, *args) return composite_primary_key(name, *args) if name.is_a?(Array) column = @db.serial_primary_key_options.merge({:name => name}) if opts = args.pop opts = {:type => opts} unless opts.is_a?(Hash) if type = args.pop opts = opts.merge(:type => type) end column.merge!(opts) end @primary_key = column if column[:keep_order] columns << column else columns.unshift(column) end nil end # The name of the primary key for this generator, if it has a primary key. def primary_key_name @primary_key[:name] if @primary_key end # Add a spatial index on the given columns. # See #index for additional options. def spatial_index(columns, opts = OPTS) index(columns, opts.merge(:type => :spatial)) end # Add a unique constraint on the given columns. # # unique(:name) # UNIQUE (name) # # Supports the same :deferrable option as #column. The :name option can be used # to name the constraint. def unique(columns, opts = OPTS) constraints << {:type => :unique, :columns => Array(columns)}.merge!(opts) nil end private # Add a composite primary key constraint def composite_primary_key(columns, *args) opts = args.pop || OPTS constraints << {:type => :primary_key, :columns => columns}.merge!(opts) nil end # Add a composite foreign key constraint def composite_foreign_key(columns, opts) constraints << {:type => :foreign_key, :columns => columns}.merge!(opts) nil end add_type_method(*GENERIC_TYPES) end # Schema::AlterTableGenerator is an internal class that the user is not expected # to instantiate directly. Instances are created by Database#alter_table. # It is used to specify table alteration parameters. It takes a Database # object and a block of operations to perform on the table, and # gives the Database an array of table altering operations, which the database uses to # alter a table's description. # # For more information on Sequel's support for schema modification, see # the {"Schema Modification" guide}[link:files/doc/schema_modification_rdoc.html]. class AlterTableGenerator # An array of operations to perform attr_reader :operations # Set the Database object to which to apply the changes, and evaluate the # block in the context of this object. def initialize(db, &block) @db = db @operations = [] instance_exec(&block) if block end # Add a column with the given name, type, and opts. # See CreateTableGenerator#column for the available options. # # add_column(:name, String) # ADD COLUMN name varchar(255) # # PostgreSQL specific options: # # :if_not_exists :: Set to true to not add the column if it already exists (PostgreSQL 9.6+) # # MySQL specific options: # # :after :: The name of an existing column that the new column should be positioned after # :first :: Create this new column before all other existing columns def add_column(name, type, opts = OPTS) op = {:op => :add_column, :name => name, :type => type}.merge!(opts) index_opts = op.delete(:index) @operations << op add_index(name, index_opts.is_a?(Hash) ? index_opts : OPTS) if index_opts nil end # Add a constraint with the given name and args. # See CreateTableGenerator#constraint. # # add_constraint(:valid_name, Sequel.like(:name, 'A%')) # # ADD CONSTRAINT valid_name CHECK (name LIKE 'A%' ESCAPE '\') # add_constraint({name: :valid_name, deferrable: true}, Sequel.like(:name, 'A%')) # # ADD CONSTRAINT valid_name CHECK (name LIKE 'A%' ESCAPE '\') DEFERRABLE INITIALLY DEFERRED def add_constraint(name, *args, &block) opts = name.is_a?(Hash) ? name : {:name=>name} @operations << opts.merge(:op=>:add_constraint, :type=>:check, :check=>block || args) nil end # Add a unique constraint to the given column(s) # # add_unique_constraint(:name) # ADD UNIQUE (name) # add_unique_constraint(:name, name: :unique_name) # ADD CONSTRAINT unique_name UNIQUE (name) # # Supports the same :deferrable option as CreateTableGenerator#column. def add_unique_constraint(columns, opts = OPTS) @operations << {:op => :add_constraint, :type => :unique, :columns => Array(columns)}.merge!(opts) nil end # Add a foreign key with the given name and referencing the given table. # See CreateTableGenerator#column for the available options. # # You can also pass an array of column names for creating composite foreign # keys. In this case, it will assume the columns exist and will only add # the constraint. You can provide a :name option to name the constraint. # # NOTE: If you need to add a foreign key constraint to a single existing column # use the composite key syntax even if it is only one column. # # add_foreign_key(:artist_id, :table) # ADD COLUMN artist_id integer REFERENCES table # add_foreign_key([:name], :table) # ADD FOREIGN KEY (name) REFERENCES table # # PostgreSQL specific options: # # :not_valid :: Set to true to add the constraint with the NOT VALID syntax. # This makes it so that future inserts must respect referential # integrity, but allows the constraint to be added even if existing # column values reference rows that do not exist. After all the # existing data has been cleaned up, validate_constraint can be used # to mark the constraint as valid. Note that this option only makes # sense when using an array of columns. def add_foreign_key(name, table, opts = OPTS) return add_composite_foreign_key(name, table, opts) if name.is_a?(Array) add_column(name, Integer, {:table=>table}.merge!(opts)) end # Add a full text index on the given columns. # See CreateTableGenerator#full_text_index for available options. def add_full_text_index(columns, opts = OPTS) add_index(columns, {:type=>:full_text}.merge!(opts)) end # Add an index on the given columns. See # CreateTableGenerator#index for available options. # # add_index(:artist_id) # CREATE INDEX table_artist_id_index ON table (artist_id) def add_index(columns, opts = OPTS) @operations << {:op => :add_index, :columns => Array(columns)}.merge!(opts) nil end # Add a primary key. See CreateTableGenerator#column # for the available options. Like +add_foreign_key+, if you specify # the column name as an array, it just creates a constraint: # # add_primary_key(:id) # ADD COLUMN id serial PRIMARY KEY # add_primary_key([:artist_id, :name]) # ADD PRIMARY KEY (artist_id, name) def add_primary_key(name, opts = OPTS) return add_composite_primary_key(name, opts) if name.is_a?(Array) opts = @db.serial_primary_key_options.merge(opts) add_column(name, opts.delete(:type), opts) end # Add a spatial index on the given columns. # See CreateTableGenerator#index for available options. def add_spatial_index(columns, opts = OPTS) add_index(columns, {:type=>:spatial}.merge!(opts)) end # Remove a column from the table. # # drop_column(:artist_id) # DROP COLUMN artist_id # drop_column(:artist_id, cascade: true) # DROP COLUMN artist_id CASCADE # # Options: # # :cascade :: CASCADE the operation, dropping other objects that depend on # the dropped column. # # PostgreSQL specific options: # :if_exists :: Use IF EXISTS, so no error is raised if the column does not # exist. def drop_column(name, opts=OPTS) @operations << {:op => :drop_column, :name => name}.merge!(opts) nil end # Remove a constraint from the table: # # drop_constraint(:unique_name) # DROP CONSTRAINT unique_name # drop_constraint(:unique_name, cascade: true) # DROP CONSTRAINT unique_name CASCADE # # MySQL/SQLite specific options: # # :type :: Set the type of constraint to drop, either :primary_key, :foreign_key, # or :unique. def drop_constraint(name, opts=OPTS) @operations << {:op => :drop_constraint, :name => name}.merge!(opts) nil end # Remove a foreign key and the associated column from the table. General options: # # :name :: The name of the constraint to drop. If not given, uses the same name # that would be used by add_foreign_key with the same columns. # # NOTE: If you want to drop only the foreign key constraint but keep the column, # use the composite key syntax even if it is only one column. # # drop_foreign_key(:artist_id) # DROP CONSTRAINT table_artist_id_fkey, DROP COLUMN artist_id # drop_foreign_key([:name]) # DROP CONSTRAINT table_name_fkey def drop_foreign_key(name, opts=OPTS) if !name.is_a?(Array) && opts[:foreign_key_constraint_name] opts = Hash[opts] opts[:name] = opts[:foreign_key_constraint_name] end drop_composite_foreign_key(Array(name), opts) drop_column(name) unless name.is_a?(Array) end # Remove an index from the table. General options: # # :name :: The name of the index to drop. If not given, uses the same name # that would be used by add_index with the same columns. # # PostgreSQL specific options: # # :cascade :: Cascade the index drop to dependent objects. # :concurrently :: Drop the index using CONCURRENTLY, which doesn't block # operations on the table. Supported in PostgreSQL 9.2+. # :if_exists :: Only drop the index if it already exists. # # drop_index(:artist_id) # DROP INDEX table_artist_id_index # drop_index([:a, :b]) # DROP INDEX table_a_b_index # drop_index([:a, :b], name: :foo) # DROP INDEX foo def drop_index(columns, options=OPTS) @operations << {:op => :drop_index, :columns => Array(columns)}.merge!(options) nil end # Rename one of the table's columns. # # rename_column(:name, :artist_name) # RENAME COLUMN name TO artist_name def rename_column(name, new_name, opts = OPTS) @operations << {:op => :rename_column, :name => name, :new_name => new_name}.merge!(opts) nil end # Modify the default value for one of the table's column. # # set_column_default(:artist_name, 'a') # ALTER COLUMN artist_name SET DEFAULT 'a' # # To remove an existing default value, use +nil+ as the value: # # set_column_default(:artist_name, nil) # ALTER COLUMN artist_name SET DEFAULT NULL # # On MySQL, make sure to use a symbol for the name of the column, as otherwise you # can lose the type and NULL/NOT NULL setting for the column. def set_column_default(name, default) @operations << {:op => :set_column_default, :name => name, :default => default} nil end # Modify the type of one of the table's column. # # set_column_type(:artist_name, 'char(10)') # ALTER COLUMN artist_name TYPE char(10) # # PostgreSQL specific options: # # :using :: Add a USING clause that specifies how to convert existing values to new values. # # On MySQL, make sure to use a symbol for the name of the column, as otherwise you # can lose the default and NULL/NOT NULL setting for the column. def set_column_type(name, type, opts=OPTS) @operations << {:op => :set_column_type, :name => name, :type => type}.merge!(opts) nil end # Set a given column as allowing NULL values. # # set_column_allow_null(:artist_name) # ALTER COLUMN artist_name DROP NOT NULL # # On MySQL, make sure to use a symbol for the name of the column, as otherwise you # can lose the default and type for the column. def set_column_allow_null(name, allow_null=true) @operations << {:op => :set_column_null, :name => name, :null => allow_null} nil end # Set a given column as not allowing NULL values. # # set_column_not_null(:artist_name) # ALTER COLUMN artist_name SET NOT NULL # # On MySQL, make sure to use a symbol for the name of the column, as otherwise you # can lose the default and type for the column. def set_column_not_null(name) set_column_allow_null(name, false) end private # Add a composite primary key constraint def add_composite_primary_key(columns, opts) @operations << {:op => :add_constraint, :type => :primary_key, :columns => columns}.merge!(opts) nil end # Add a composite foreign key constraint def add_composite_foreign_key(columns, table, opts) @operations << {:op => :add_constraint, :type => :foreign_key, :columns => columns, :table => table}.merge!(opts) nil end # Drop a composite foreign key constraint def drop_composite_foreign_key(columns, opts) @operations << opts.merge(:op => :drop_constraint, :type => :foreign_key, :columns => columns) nil end end end end sequel-5.63.0/lib/sequel/database/schema_methods.rb000066400000000000000000001172231434214120600222260ustar00rootroot00000000000000# frozen-string-literal: true module Sequel class Database # --------------------- # :section: 2 - Methods that modify the database schema # These methods execute code on the database that modifies the database's schema. # --------------------- # The order of column modifiers to use when defining a column. COLUMN_DEFINITION_ORDER = [:collate, :default, :null, :unique, :primary_key, :auto_increment, :references].freeze # The alter table operations that are combinable. COMBINABLE_ALTER_TABLE_OPS = [:add_column, :drop_column, :rename_column, :set_column_type, :set_column_default, :set_column_null, :add_constraint, :drop_constraint].freeze # Adds a column to the specified table. This method expects a column name, # a datatype and optionally a hash with additional constraints and options: # # DB.add_column :items, :name, String, unique: true, null: false # DB.add_column :items, :category, String, default: 'ruby' # # See alter_table. def add_column(table, *args) alter_table(table) {add_column(*args)} end # Adds an index to a table for the given columns: # # DB.add_index :posts, :title # DB.add_index :posts, [:author, :title], unique: true # # Options: # # :ignore_errors :: Ignore any DatabaseErrors that are raised # :name :: Name to use for index instead of default # # See alter_table. def add_index(table, columns, options=OPTS) e = options[:ignore_errors] begin alter_table(table){add_index(columns, options)} rescue DatabaseError raise unless e end nil end # Alters the given table with the specified block. Example: # # DB.alter_table :items do # add_column :category, String, default: 'ruby' # drop_column :category # rename_column :cntr, :counter # set_column_type :value, Float # set_column_default :value, 4.2 # add_index [:group, :category] # drop_index [:group, :category] # end # # Note that +add_column+ accepts all the options available for column # definitions using create_table, and +add_index+ accepts all the options # available for index definition. # # See Schema::AlterTableGenerator and the {Migrations guide}[rdoc-ref:doc/migration.rdoc]. def alter_table(name, &block) generator = alter_table_generator(&block) remove_cached_schema(name) apply_alter_table_generator(name, generator) nil end # Return a new Schema::AlterTableGenerator instance with the receiver as # the database and the given block. def alter_table_generator(&block) alter_table_generator_class.new(self, &block) end # Create a join table using a hash of foreign keys to referenced # table names. Example: # # create_join_table(cat_id: :cats, dog_id: :dogs) # # CREATE TABLE cats_dogs ( # # cat_id integer NOT NULL REFERENCES cats, # # dog_id integer NOT NULL REFERENCES dogs, # # PRIMARY KEY (cat_id, dog_id) # # ) # # CREATE INDEX cats_dogs_dog_id_cat_id_index ON cats_dogs(dog_id, cat_id) # # The primary key and index are used so that almost all operations # on the table can benefit from one of the two indexes, and the primary # key ensures that entries in the table are unique, which is the typical # desire for a join table. # # The default table name this will create is the sorted version of the two # hash values, joined by an underscore. So the following two method calls # create the same table: # # create_join_table(cat_id: :cats, dog_id: :dogs) # cats_dogs # create_join_table(dog_id: :dogs, cat_id: :cats) # cats_dogs # # You can provide column options by making the values in the hash # be option hashes, so long as the option hashes have a :table # entry giving the table referenced: # # create_join_table(cat_id: {table: :cats, type: :Bignum}, dog_id: :dogs) # # You can provide a second argument which is a table options hash: # # create_join_table({cat_id: :cats, dog_id: :dogs}, temp: true) # # Some table options are handled specially: # # :index_options :: The options to pass to the index # :name :: The name of the table to create # :no_index :: Set to true not to create the second index. # :no_primary_key :: Set to true to not create the primary key. def create_join_table(hash, options=OPTS) keys = hash.keys.sort create_table(join_table_name(hash, options), options) do keys.each do |key| v = hash[key] unless v.is_a?(Hash) v = {:table=>v} end v[:null] = false unless v.has_key?(:null) foreign_key(key, v) end primary_key(keys) unless options[:no_primary_key] index(keys.reverse, options[:index_options] || OPTS) unless options[:no_index] end nil end # Forcibly create a join table, attempting to drop it if it already exists, then creating it. def create_join_table!(hash, options=OPTS) drop_table?(join_table_name(hash, options)) create_join_table(hash, options) end # Creates the join table unless it already exists. def create_join_table?(hash, options=OPTS) if supports_create_table_if_not_exists? && options[:no_index] create_join_table(hash, options.merge(:if_not_exists=>true)) elsif !table_exists?(join_table_name(hash, options)) create_join_table(hash, options) end end # Creates a table with the columns given in the provided block: # # DB.create_table :posts do # primary_key :id # column :title, String # String :content # index :title # end # # General options: # :as :: Create the table using the value, which should be either a # dataset or a literal SQL string. If this option is used, # a block should not be given to the method. # :ignore_index_errors :: Ignore any errors when creating indexes. # :temp :: Create the table as a temporary table. # # MySQL specific options: # :charset :: The character set to use for the table. # :collate :: The collation to use for the table. # :engine :: The table engine to use for the table. # # PostgreSQL specific options: # :on_commit :: Either :preserve_rows (default), :drop or :delete_rows. Should # only be specified when creating a temporary table. # :foreign :: Create a foreign table. The value should be the name of the # foreign server that was specified in CREATE SERVER. # :inherits :: Inherit from a different table. An array can be # specified to inherit from multiple tables. # :unlogged :: Create the table as an unlogged table. # :options :: The OPTIONS clause to use for foreign tables. Should be a hash # where keys are option names and values are option values. Note # that option names are unquoted, so you should not use untrusted # keys. # :tablespace :: The tablespace to use for the table. # # SQLite specific options: # :strict :: Create a STRICT table, which checks that the values for the columns # are the correct type (similar to all other SQL databases). Note that # when using this option, all column types used should be one of the # following: +int+, +integer+, +real+, +text+, +blob+, and +any+. # The +any+ type is treated like a SQLite column in a non-strict table, # allowing any type of data to be stored. This option is supported on # SQLite 3.37.0+. # # See Schema::CreateTableGenerator and the {"Schema Modification" guide}[rdoc-ref:doc/schema_modification.rdoc]. def create_table(name, options=OPTS, &block) remove_cached_schema(name) if sql = options[:as] raise(Error, "can't provide both :as option and block to create_table") if block create_table_as(name, sql, options) else generator = options[:generator] || create_table_generator(&block) create_table_from_generator(name, generator, options) create_table_indexes_from_generator(name, generator, options) end nil end # Forcibly create a table, attempting to drop it if it already exists, then creating it. # # DB.create_table!(:a){Integer :a} # # SELECT NULL FROM a LIMIT 1 -- check existence # # DROP TABLE a -- drop table if already exists # # CREATE TABLE a (a integer) def create_table!(name, options=OPTS, &block) drop_table?(name) create_table(name, options, &block) end # Creates the table unless the table already exists. # # DB.create_table?(:a){Integer :a} # # SELECT NULL FROM a LIMIT 1 -- check existence # # CREATE TABLE a (a integer) -- if it doesn't already exist def create_table?(name, options=OPTS, &block) options = options.dup generator = options[:generator] ||= create_table_generator(&block) if generator.indexes.empty? && supports_create_table_if_not_exists? create_table(name, options.merge!(:if_not_exists=>true)) elsif !table_exists?(name) create_table(name, options) end end # Return a new Schema::CreateTableGenerator instance with the receiver as # the database and the given block. def create_table_generator(&block) create_table_generator_class.new(self, &block) end # Creates a view, replacing a view with the same name if one already exists. # # DB.create_or_replace_view(:some_items, "SELECT * FROM items WHERE price < 100") # DB.create_or_replace_view(:some_items, DB[:items].where(category: 'ruby')) # # For databases where replacing a view is not natively supported, support # is emulated by dropping a view with the same name before creating the view. def create_or_replace_view(name, source, options = OPTS) if supports_create_or_replace_view? options = options.merge(:replace=>true) else swallow_database_error{drop_view(name)} end create_view(name, source, options) nil end # Creates a view based on a dataset or an SQL string: # # DB.create_view(:cheap_items, "SELECT * FROM items WHERE price < 100") # # CREATE VIEW cheap_items AS # # SELECT * FROM items WHERE price < 100 # # DB.create_view(:ruby_items, DB[:items].where(category: 'ruby')) # # CREATE VIEW ruby_items AS # # SELECT * FROM items WHERE (category = 'ruby') # # DB.create_view(:checked_items, DB[:items].where(:foo), check: true) # # CREATE VIEW checked_items AS # # SELECT * FROM items WHERE foo # # WITH CHECK OPTION # # DB.create_view(:bar_items, DB[:items].select(:foo), columns: [:bar]) # # CREATE VIEW bar_items (bar) AS # # SELECT foo FROM items # # Options: # :columns :: The column names to use for the view. If not given, # automatically determined based on the input dataset. # :check :: Adds a WITH CHECK OPTION clause, so that attempting to modify # rows in the underlying table that would not be returned by the # view is not allowed. This can be set to :local to use WITH # LOCAL CHECK OPTION. # # PostgreSQL/SQLite specific option: # :temp :: Create a temporary view, automatically dropped on disconnect. # # PostgreSQL specific options: # :materialized :: Creates a materialized view, similar to a regular view, # but backed by a physical table. # :recursive :: Creates a recursive view. As columns must be specified for # recursive views, you can also set them as the value of this # option. Since a recursive view requires a union that isn't # in a subquery, if you are providing a Dataset as the source # argument, if should probably call the union method with the # all: true and from_self: false options. # :security_invoker :: Set the security_invoker property on the view, making # the access to the view use the current user's permissions, # instead of the view owner's permissions. # :tablespace :: The tablespace to use for materialized views. def create_view(name, source, options = OPTS) execute_ddl(create_view_sql(name, source, options)) remove_cached_schema(name) nil end # Removes a column from the specified table: # # DB.drop_column :items, :category # # See alter_table. def drop_column(table, *args) alter_table(table) {drop_column(*args)} end # Removes an index for the given table and column(s): # # DB.drop_index :posts, :title # DB.drop_index :posts, [:author, :title] # # See alter_table. def drop_index(table, columns, options=OPTS) alter_table(table){drop_index(columns, options)} end # Drop the join table that would have been created with the # same arguments to create_join_table: # # drop_join_table(cat_id: :cats, dog_id: :dogs) # # DROP TABLE cats_dogs def drop_join_table(hash, options=OPTS) drop_table(join_table_name(hash, options), options) end # Drops one or more tables corresponding to the given names: # # DB.drop_table(:posts) # DROP TABLE posts # DB.drop_table(:posts, :comments) # DB.drop_table(:posts, :comments, cascade: true) def drop_table(*names) options = names.last.is_a?(Hash) ? names.pop : OPTS names.each do |n| execute_ddl(drop_table_sql(n, options)) remove_cached_schema(n) end nil end # Drops the table if it already exists. If it doesn't exist, # does nothing. # # DB.drop_table?(:a) # # SELECT NULL FROM a LIMIT 1 -- check existence # # DROP TABLE a -- if it already exists def drop_table?(*names) options = names.last.is_a?(Hash) ? names.pop : OPTS if supports_drop_table_if_exists? options = options.merge(:if_exists=>true) names.each do |name| drop_table(name, options) end else names.each do |name| drop_table(name, options) if table_exists?(name) end end nil end # Drops one or more views corresponding to the given names: # # DB.drop_view(:cheap_items) # DB.drop_view(:cheap_items, :pricey_items) # DB.drop_view(:cheap_items, :pricey_items, cascade: true) # DB.drop_view(:cheap_items, :pricey_items, if_exists: true) # # Options: # :cascade :: Also drop objects depending on this view. # :if_exists :: Do not raise an error if the view does not exist. # # PostgreSQL specific options: # :materialized :: Drop a materialized view. def drop_view(*names) options = names.last.is_a?(Hash) ? names.pop : OPTS names.each do |n| execute_ddl(drop_view_sql(n, options)) remove_cached_schema(n) end nil end # Renames a table: # # DB.tables #=> [:items] # DB.rename_table :items, :old_items # DB.tables #=> [:old_items] def rename_table(name, new_name) execute_ddl(rename_table_sql(name, new_name)) remove_cached_schema(name) nil end # Renames a column in the specified table. This method expects the current # column name and the new column name: # # DB.rename_column :items, :cntr, :counter # # See alter_table. def rename_column(table, *args) alter_table(table) {rename_column(*args)} end # Sets the default value for the given column in the given table: # # DB.set_column_default :items, :category, 'perl!' # # See alter_table. def set_column_default(table, *args) alter_table(table) {set_column_default(*args)} end # Set the data type for the given column in the given table: # # DB.set_column_type :items, :price, :float # # See alter_table. def set_column_type(table, *args) alter_table(table) {set_column_type(*args)} end private # Apply the changes in the given alter table ops to the table given by name. def apply_alter_table(name, ops) alter_table_sql_list(name, ops).each{|sql| execute_ddl(sql)} end # Apply the operations in the given generator to the table given by name. def apply_alter_table_generator(name, generator) ops = generator.operations unless can_add_primary_key_constraint_on_nullable_columns? if add_pk = ops.find{|op| op[:op] == :add_constraint && op[:type] == :primary_key} ops = add_pk[:columns].map{|column| {:op => :set_column_null, :name => column, :null => false}} + ops end end apply_alter_table(name, ops) end # The class used for alter_table generators. def alter_table_generator_class Schema::AlterTableGenerator end # SQL fragment for given alter table operation. def alter_table_op_sql(table, op) meth = "alter_table_#{op[:op]}_sql" if respond_to?(meth, true) # Allow calling private methods as alter table op sql methods are private send(meth, table, op) else raise Error, "Unsupported ALTER TABLE operation: #{op[:op]}" end end def alter_table_add_column_sql(table, op) "ADD COLUMN #{column_definition_sql(op)}" end def alter_table_drop_column_sql(table, op) "DROP COLUMN #{quote_identifier(op[:name])}#{' CASCADE' if op[:cascade]}" end def alter_table_rename_column_sql(table, op) "RENAME COLUMN #{quote_identifier(op[:name])} TO #{quote_identifier(op[:new_name])}" end def alter_table_set_column_type_sql(table, op) "ALTER COLUMN #{quote_identifier(op[:name])} TYPE #{type_literal(op)}" end def alter_table_set_column_default_sql(table, op) "ALTER COLUMN #{quote_identifier(op[:name])} SET DEFAULT #{literal(op[:default])}" end def alter_table_set_column_null_sql(table, op) "ALTER COLUMN #{quote_identifier(op[:name])} #{op[:null] ? 'DROP' : 'SET'} NOT NULL" end def alter_table_add_constraint_sql(table, op) "ADD #{constraint_definition_sql(op)}" end def alter_table_drop_constraint_sql(table, op) quoted_name = quote_identifier(op[:name]) if op[:name] if op[:type] == :foreign_key quoted_name ||= quote_identifier(foreign_key_name(table, op[:columns])) end "DROP CONSTRAINT #{quoted_name}#{' CASCADE' if op[:cascade]}" end # The SQL to execute to modify the table. op # should be one of the operations returned by the AlterTableGenerator. def alter_table_sql(table, op) case op[:op] when :add_index index_definition_sql(table, op) when :drop_index drop_index_sql(table, op) else if sql = alter_table_op_sql(table, op) "ALTER TABLE #{quote_schema_table(table)} #{sql}" end end end # Array of SQL statements used to modify the table, # corresponding to changes specified by the operations. def alter_table_sql_list(table, operations) if supports_combining_alter_table_ops? grouped_ops = [] last_combinable = false operations.each do |op| if combinable_alter_table_op?(op) if sql = alter_table_op_sql(table, op) grouped_ops << [] unless last_combinable grouped_ops.last << sql last_combinable = true end elsif sql = alter_table_sql(table, op) Array(sql).each{|s| grouped_ops << s} last_combinable = false end end grouped_ops.map do |gop| if gop.is_a?(Array) "ALTER TABLE #{quote_schema_table(table)} #{gop.join(', ')}" else gop end end else operations.map{|op| alter_table_sql(table, op)}.flatten.compact end end # The SQL string specify the autoincrement property, generally used by # primary keys. def auto_increment_sql 'AUTOINCREMENT' end # The order of the column definition, as an array of symbols. def column_definition_order COLUMN_DEFINITION_ORDER end # SQL fragment containing the column creation SQL for the given column. def column_definition_sql(column) sql = String.new sql << "#{quote_identifier(column[:name])} #{type_literal(column)}" column_definition_order.each{|m| send(:"column_definition_#{m}_sql", sql, column)} sql end # Add auto increment SQL fragment to column creation SQL. def column_definition_auto_increment_sql(sql, column) sql << " #{auto_increment_sql}" if column[:auto_increment] end # Add collate SQL fragment to column creation SQL. def column_definition_collate_sql(sql, column) if collate = column[:collate] sql << " COLLATE #{collate}" end end # Add default SQL fragment to column creation SQL. def column_definition_default_sql(sql, column) sql << " DEFAULT #{literal(column[:default])}" if column.include?(:default) end # Add null/not null SQL fragment to column creation SQL. def column_definition_null_sql(sql, column) null = column.fetch(:null, column[:allow_null]) if null.nil? && !can_add_primary_key_constraint_on_nullable_columns? && column[:primary_key] null = false end case null when false sql << ' NOT NULL' when true sql << ' NULL' end end # Add primary key SQL fragment to column creation SQL. def column_definition_primary_key_sql(sql, column) if column[:primary_key] if name = column[:primary_key_constraint_name] sql << " CONSTRAINT #{quote_identifier(name)}" end sql << " " << primary_key_constraint_sql_fragment(column) constraint_deferrable_sql_append(sql, column[:primary_key_deferrable]) end end # Add foreign key reference SQL fragment to column creation SQL. def column_definition_references_sql(sql, column) if column[:table] if name = column[:foreign_key_constraint_name] sql << " CONSTRAINT #{quote_identifier(name)}" end sql << column_references_column_constraint_sql(column) end end # Add unique constraint SQL fragment to column creation SQL. def column_definition_unique_sql(sql, column) if column[:unique] if name = column[:unique_constraint_name] sql << " CONSTRAINT #{quote_identifier(name)}" end sql << ' ' << unique_constraint_sql_fragment(column) constraint_deferrable_sql_append(sql, column[:unique_deferrable]) end end # SQL for all given columns, used inside a CREATE TABLE block. def column_list_sql(generator) (generator.columns.map{|c| column_definition_sql(c)} + generator.constraints.map{|c| constraint_definition_sql(c)}).join(', ') end # SQL fragment for column foreign key references (column constraints) def column_references_column_constraint_sql(column) column_references_sql(column) end # SQL fragment for column foreign key references def column_references_sql(column) sql = String.new sql << " REFERENCES #{quote_schema_table(column[:table])}" sql << "(#{Array(column[:key]).map{|x| quote_identifier(x)}.join(', ')})" if column[:key] sql << " ON DELETE #{on_delete_clause(column[:on_delete])}" if column[:on_delete] sql << " ON UPDATE #{on_update_clause(column[:on_update])}" if column[:on_update] constraint_deferrable_sql_append(sql, column[:deferrable]) sql end # SQL fragment for table foreign key references (table constraints) def column_references_table_constraint_sql(constraint) "FOREIGN KEY #{literal(constraint[:columns])}#{column_references_sql(constraint)}" end # Whether the given alter table operation is combinable. def combinable_alter_table_op?(op) COMBINABLE_ALTER_TABLE_OPS.include?(op[:op]) end # SQL fragment specifying a constraint on a table. def constraint_definition_sql(constraint) sql = String.new sql << "CONSTRAINT #{quote_identifier(constraint[:name])} " if constraint[:name] case constraint[:type] when :check check = constraint[:check] check = check.first if check.is_a?(Array) && check.length == 1 check = filter_expr(check) check = "(#{check})" unless check[0..0] == '(' && check[-1..-1] == ')' sql << "CHECK #{check}" when :primary_key sql << "#{primary_key_constraint_sql_fragment(constraint)} #{literal(constraint[:columns])}" when :foreign_key sql << column_references_table_constraint_sql(constraint.merge(:deferrable=>nil)) when :unique sql << "#{unique_constraint_sql_fragment(constraint)} #{literal(constraint[:columns])}" else raise Error, "Invalid constraint type #{constraint[:type]}, should be :check, :primary_key, :foreign_key, or :unique" end constraint_deferrable_sql_append(sql, constraint[:deferrable]) sql end # SQL fragment specifying the deferrable constraint attributes. def constraint_deferrable_sql_append(sql, defer) case defer when nil when false sql << ' NOT DEFERRABLE' when :immediate sql << ' DEFERRABLE INITIALLY IMMEDIATE' else sql << ' DEFERRABLE INITIALLY DEFERRED' end end # Execute the create table statements using the generator. def create_table_from_generator(name, generator, options) execute_ddl(create_table_sql(name, generator, options)) end # The class used for create_table generators. def create_table_generator_class Schema::CreateTableGenerator end # Execute the create index statements using the generator. def create_table_indexes_from_generator(name, generator, options) e = options[:ignore_index_errors] || options[:if_not_exists] generator.indexes.each do |index| begin pr = proc{index_sql_list(name, [index]).each{|sql| execute_ddl(sql)}} supports_transactional_ddl? ? transaction(:savepoint=>:only, &pr) : pr.call rescue Error raise unless e end end end # SQL statement for creating a table with the given name, columns, and options def create_table_sql(name, generator, options) unless supports_named_column_constraints? # Split column constraints into table constraints if they have a name generator.columns.each do |c| if (constraint_name = c.delete(:foreign_key_constraint_name)) && (table = c.delete(:table)) opts = {} opts[:name] = constraint_name [:key, :on_delete, :on_update, :deferrable].each{|k| opts[k] = c[k]} generator.foreign_key([c[:name]], table, opts) end if (constraint_name = c.delete(:unique_constraint_name)) && c.delete(:unique) generator.unique(c[:name], :name=>constraint_name) end if (constraint_name = c.delete(:primary_key_constraint_name)) && c.delete(:primary_key) generator.primary_key([c[:name]], :name=>constraint_name) end end end unless can_add_primary_key_constraint_on_nullable_columns? if pk = generator.constraints.find{|op| op[:type] == :primary_key} pk[:columns].each do |column| if matched_column = generator.columns.find{|gc| gc[:name] == column} matched_column[:null] = false end end end end "#{create_table_prefix_sql(name, options)} (#{column_list_sql(generator)})" end # Run SQL statement to create the table with the given name from the given # SELECT sql statement. def create_table_as(name, sql, options) sql = sql.sql if sql.is_a?(Sequel::Dataset) run(create_table_as_sql(name, sql, options)) end # SQL statement for creating a table from the result of a SELECT statement. # +sql+ should be a string representing a SELECT query. def create_table_as_sql(name, sql, options) "#{create_table_prefix_sql(name, options)} AS #{sql}" end # SQL fragment for initial part of CREATE TABLE statement def create_table_prefix_sql(name, options) "CREATE #{temporary_table_sql if options[:temp]}TABLE#{' IF NOT EXISTS' if options[:if_not_exists]} #{options[:temp] ? quote_identifier(name) : quote_schema_table(name)}" end # SQL fragment for initial part of CREATE VIEW statement def create_view_prefix_sql(name, options) create_view_sql_append_columns("CREATE #{'OR REPLACE 'if options[:replace]}VIEW #{quote_schema_table(name)}", options[:columns]) end # SQL statement for creating a view. def create_view_sql(name, source, options) source = source.sql if source.is_a?(Dataset) sql = String.new sql << "#{create_view_prefix_sql(name, options)} AS #{source}" if check = options[:check] sql << " WITH#{' LOCAL' if check == :local} CHECK OPTION" end sql end # Append the column list to the SQL, if a column list is given. def create_view_sql_append_columns(sql, columns) if columns sql += ' (' schema_utility_dataset.send(:identifier_list_append, sql, columns) sql << ')' end sql end # Default index name for the table and columns, may be too long # for certain databases. def default_index_name(table_name, columns) schema, table = schema_and_table(table_name) "#{"#{schema}_" if schema}#{table}_#{columns.map{|c| [String, Symbol].any?{|cl| c.is_a?(cl)} ? c : literal(c).gsub(/\W/, '_')}.join('_')}_index" end # Get foreign key name for given table and columns. def foreign_key_name(table_name, columns) keys = foreign_key_list(table_name).select{|key| key[:columns] == columns} raise(Error, "#{keys.empty? ? 'Missing' : 'Ambiguous'} foreign key for #{columns.inspect}") unless keys.size == 1 keys.first[:name] end # The SQL to drop an index for the table. def drop_index_sql(table, op) "DROP INDEX #{quote_identifier(op[:name] || default_index_name(table, op[:columns]))}" end # SQL DDL statement to drop the table with the given name. def drop_table_sql(name, options) "DROP TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}" end # SQL DDL statement to drop a view with the given name. def drop_view_sql(name, options) "DROP VIEW#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}" end # Proxy the filter_expr call to the dataset, used for creating constraints. # Support passing Proc arguments as blocks, as well as treating plain strings # as literal strings, so that previous migrations that used this API do not break. def filter_expr(arg=nil, &block) if arg.is_a?(Proc) && !block block = arg arg = nil elsif arg.is_a?(String) arg = Sequel.lit(arg) elsif arg.is_a?(Array) if arg.first.is_a?(String) arg = Sequel.lit(*arg) elsif arg.length > 1 arg = Sequel.&(*arg) end end schema_utility_dataset.literal(schema_utility_dataset.send(:filter_expr, arg, &block)) end # SQL statement for creating an index for the table with the given name # and index specifications. def index_definition_sql(table_name, index) index_name = index[:name] || default_index_name(table_name, index[:columns]) raise Error, "Index types are not supported for this database" if index[:type] raise Error, "Partial indexes are not supported for this database" if index[:where] && !supports_partial_indexes? "CREATE #{'UNIQUE ' if index[:unique]}INDEX #{quote_identifier(index_name)} ON #{quote_schema_table(table_name)} #{literal(index[:columns])}#{" WHERE #{filter_expr(index[:where])}" if index[:where]}" end # Array of SQL statements, one for each index specification, # for the given table. def index_sql_list(table_name, indexes) indexes.map{|i| index_definition_sql(table_name, i)} end # Extract the join table name from the arguments given to create_join_table. # Also does argument validation for the create_join_table method. def join_table_name(hash, options) entries = hash.values raise Error, "must have 2 entries in hash given to (create|drop)_join_table" unless entries.length == 2 if options[:name] options[:name] else table_names = entries.map{|e| join_table_name_extract(e)} table_names.map(&:to_s).sort.join('_') end end # Extract an individual join table name, which should either be a string # or symbol, or a hash containing one of those as the value for :table. def join_table_name_extract(entry) case entry when Symbol, String entry when Hash join_table_name_extract(entry[:table]) else raise Error, "can't extract table name from #{entry.inspect}" end end # SQL fragment to use for ON DELETE, based on the given action. # The following actions are recognized: # # :cascade :: Delete rows referencing this row. # :no_action :: Raise an error if other rows reference this # row, allow deferring of the integrity check. # This is the default. # :restrict :: Raise an error if other rows reference this row, # but do not allow deferring the integrity check. # :set_default :: Set columns referencing this row to their default value. # :set_null :: Set columns referencing this row to NULL. # # Any other object given is just converted to a string, with "_" converted to " " and upcased. def on_delete_clause(action) action.to_s.gsub("_", " ").upcase end # Alias of #on_delete_clause, since the two usually behave the same. def on_update_clause(action) on_delete_clause(action) end # Add fragment for primary key specification, separated for easier overridding. def primary_key_constraint_sql_fragment(_) 'PRIMARY KEY' end # Proxy the quote_schema_table method to the dataset def quote_schema_table(table) schema_utility_dataset.quote_schema_table(table) end # SQL statement for renaming a table. def rename_table_sql(name, new_name) "ALTER TABLE #{quote_schema_table(name)} RENAME TO #{quote_schema_table(new_name)}" end # Split the schema information from the table def schema_and_table(table_name) schema_utility_dataset.schema_and_table(table_name) end # Return true if the given column schema represents an autoincrementing primary key. def schema_autoincrementing_primary_key?(schema) !!(schema[:primary_key] && schema[:auto_increment]) end # The dataset to use for proxying certain schema methods. def schema_utility_dataset @default_dataset end # Split the schema information from the table def split_qualifiers(table_name) schema_utility_dataset.split_qualifiers(table_name) end # SQL fragment for temporary table def temporary_table_sql 'TEMPORARY ' end # SQL fragment specifying the type of a given column. def type_literal(column) case column[:type] when Class type_literal_generic(column) when :Bignum type_literal_generic_bignum_symbol(column) else type_literal_specific(column) end end # SQL fragment specifying the full type of a column, # consider the type with possible modifiers. def type_literal_generic(column) meth = "type_literal_generic_#{column[:type].name.to_s.downcase}" if respond_to?(meth, true) # Allow calling private methods as per type literal generic methods are private send(meth, column) else raise Error, "Unsupported ruby class used as database type: #{column[:type]}" end end # Alias for type_literal_generic_numeric, to make overriding in a subclass easier. def type_literal_generic_bigdecimal(column) type_literal_generic_numeric(column) end # Sequel uses the bigint type by default for :Bignum symbol. def type_literal_generic_bignum_symbol(column) :bigint end # Sequel uses the date type by default for Dates. def type_literal_generic_date(column) :date end # Sequel uses the timestamp type by default for DateTimes. def type_literal_generic_datetime(column) :timestamp end # Alias for type_literal_generic_trueclass, to make overriding in a subclass easier. def type_literal_generic_falseclass(column) type_literal_generic_trueclass(column) end # Sequel uses the blob type by default for Files. def type_literal_generic_file(column) :blob end # Alias for type_literal_generic_integer, to make overriding in a subclass easier. def type_literal_generic_fixnum(column) type_literal_generic_integer(column) end # Sequel uses the double precision type by default for Floats. def type_literal_generic_float(column) :"double precision" end # Sequel uses the integer type by default for integers def type_literal_generic_integer(column) :integer end # Sequel uses the numeric type by default for Numerics and BigDecimals. # If a size is given, it is used, otherwise, it will default to whatever # the database default is for an unsized value. def type_literal_generic_numeric(column) column[:size] ? "numeric(#{Array(column[:size]).join(', ')})" : :numeric end # Sequel uses the varchar type by default for Strings. If a # size isn't present, Sequel assumes a size of 255. If the # :fixed option is used, Sequel uses the char type. If the # :text option is used, Sequel uses the :text type. def type_literal_generic_string(column) if column[:text] uses_clob_for_text? ? :clob : :text elsif column[:fixed] "char(#{column[:size]||default_string_column_size})" else "varchar(#{column[:size]||default_string_column_size})" end end # Sequel uses the timestamp type by default for Time values. # If the :only_time option is used, the time type is used. def type_literal_generic_time(column) if column[:only_time] type_literal_generic_only_time(column) else type_literal_generic_datetime(column) end end # Use time by default for Time values if :only_time option is used. def type_literal_generic_only_time(column) :time end # Sequel uses the boolean type by default for TrueClass and FalseClass. def type_literal_generic_trueclass(column) :boolean end # SQL fragment for the given type of a column if the column is not one of the # generic types specified with a ruby class. def type_literal_specific(column) type = column[:type] type = "double precision" if type.to_s == 'double' column[:size] ||= default_string_column_size if type.to_s == 'varchar' elements = column[:size] || column[:elements] "#{type}#{literal(Array(elements)) if elements}#{' UNSIGNED' if column[:unsigned]}" end # Add fragment for unique specification, separated for easier overridding. def unique_constraint_sql_fragment(_) 'UNIQUE' end # Whether clob should be used for String text: true columns. def uses_clob_for_text? false end end end sequel-5.63.0/lib/sequel/database/transactions.rb000066400000000000000000000501501434214120600217460ustar00rootroot00000000000000# frozen-string-literal: true module Sequel class Database # --------------------- # :section: 8 - Methods related to database transactions # Database transactions make multiple queries atomic, so # that either all of the queries take effect or none of # them do. # --------------------- TRANSACTION_ISOLATION_LEVELS = {:uncommitted=>'READ UNCOMMITTED'.freeze, :committed=>'READ COMMITTED'.freeze, :repeatable=>'REPEATABLE READ'.freeze, :serializable=>'SERIALIZABLE'.freeze}.freeze # The default transaction isolation level for this database, # used for all future transactions. For MSSQL, this should be set # to something if you ever plan to use the :isolation option to # Database#transaction, as on MSSQL if affects all future transactions # on the same connection. attr_accessor :transaction_isolation_level # If a transaction is not currently in process, yield to the block immediately. # Otherwise, add the block to the list of blocks to call after the currently # in progress transaction commits (and only if it commits). # Options: # :savepoint :: If currently inside a savepoint, only run this hook on transaction # commit if all enclosing savepoints have been released. # :server :: The server/shard to use. def after_commit(opts=OPTS, &block) raise Error, "must provide block to after_commit" unless block synchronize(opts[:server]) do |conn| if h = _trans(conn) raise Error, "cannot call after_commit in a prepared transaction" if h[:prepare] if opts[:savepoint] && in_savepoint?(conn) add_savepoint_hook(conn, :after_commit, block) else add_transaction_hook(conn, :after_commit, block) end else yield end end end # If a transaction is not currently in progress, ignore the block. # Otherwise, add the block to the list of the blocks to call after the currently # in progress transaction rolls back (and only if it rolls back). # Options: # :savepoint :: If currently inside a savepoint, run this hook immediately when # any enclosing savepoint is rolled back, which may be before the transaction # commits or rollsback. # :server :: The server/shard to use. def after_rollback(opts=OPTS, &block) raise Error, "must provide block to after_rollback" unless block synchronize(opts[:server]) do |conn| if h = _trans(conn) raise Error, "cannot call after_rollback in a prepared transaction" if h[:prepare] if opts[:savepoint] && in_savepoint?(conn) add_savepoint_hook(conn, :after_rollback, block) else add_transaction_hook(conn, :after_rollback, block) end end end end # When exiting the transaction block through methods other than an exception # (e.g. normal exit, non-local return, or throw), set the current transaction # to rollback instead of committing. This is designed for use in cases where # you want to preform a non-local return but also want to rollback instead of # committing. # Options: # :cancel :: Cancel the current rollback_on_exit setting, so exiting will commit instead # of rolling back. # :savepoint :: Rollback only the current savepoint if inside a savepoint. # Can also be an positive integer value to rollback that number of enclosing savepoints, # up to and including the transaction itself. # If the database does not support savepoints, this option is ignored and the entire # transaction is affected. # :server :: The server/shard the transaction is being executed on. def rollback_on_exit(opts=OPTS) synchronize(opts[:server]) do |conn| raise Error, "Cannot call Sequel:: Database#rollback_on_exit unless inside a transaction" unless h = _trans(conn) rollback = !opts[:cancel] if supports_savepoints? savepoints = h[:savepoints] if level = opts[:savepoint] level = 1 if level == true raise Error, "invalid :savepoint option to Database#rollback_on_exit: #{level.inspect}" unless level.is_a?(Integer) raise Error, "cannot pass nonpositive integer (#{level.inspect}) as :savepoint option to Database#rollback_on_exit" if level < 1 level.times do |i| break unless savepoint = savepoints[-1 - i] savepoint[:rollback_on_exit] = rollback end else savepoints[0][:rollback_on_exit] = rollback end else h[:rollback_on_exit] = rollback end end nil end # Return true if already in a transaction given the options, # false otherwise. Respects the :server option for selecting # a shard. def in_transaction?(opts=OPTS) synchronize(opts[:server]){|conn| !!_trans(conn)} end # Returns a proc that you can call to check if the transaction # has been rolled back. The proc will return nil if the # transaction is still in progress, true if the transaction was # rolled back, and false if it was committed. Raises an # Error if called outside a transaction. Respects the :server # option for selecting a shard. def rollback_checker(opts=OPTS) synchronize(opts[:server]) do |conn| raise Error, "not in a transaction" unless t = _trans(conn) t[:rollback_checker] ||= proc{Sequel.synchronize{t[:rolled_back]}} end end # Starts a database transaction. When a database transaction is used, # either all statements are successful or none of the statements are # successful. Note that MySQL MyISAM tables do not support transactions. # # The following general options are respected: # # :auto_savepoint :: Automatically use a savepoint for Database#transaction calls # inside this transaction block. # :isolation :: The transaction isolation level to use for this transaction, # should be :uncommitted, :committed, :repeatable, or :serializable, # used if given and the database/adapter supports customizable # transaction isolation levels. # :num_retries :: The number of times to retry if the :retry_on option is used. # The default is 5 times. Can be set to nil to retry indefinitely, # but that is not recommended. # :before_retry :: Proc to execute before retrying if the :retry_on option is used. # Called with two arguments: the number of retry attempts (counting # the current one) and the error the last attempt failed with. # :prepare :: A string to use as the transaction identifier for a # prepared transaction (two-phase commit), if the database/adapter # supports prepared transactions. # :retry_on :: An exception class or array of exception classes for which to # automatically retry the transaction. Can only be set if not inside # an existing transaction. # Note that this should not be used unless the entire transaction # block is idempotent, as otherwise it can cause non-idempotent # behavior to execute multiple times. # :rollback :: Can be set to :reraise to reraise any Sequel::Rollback exceptions # raised, or :always to always rollback even if no exceptions occur # (useful for testing). # :server :: The server to use for the transaction. Set to :default, :read_only, or # whatever symbol you used in the connect string when naming your servers. # :savepoint :: Whether to create a new savepoint for this transaction, # only respected if the database/adapter supports savepoints. By # default Sequel will reuse an existing transaction, so if you want to # use a savepoint you must use this option. If the surrounding transaction # uses :auto_savepoint, you can set this to false to not use a savepoint. # If the value given for this option is :only, it will only create a # savepoint if it is inside a transaction. # # PostgreSQL specific options: # # :deferrable :: (9.1+) If present, set to DEFERRABLE if true or NOT DEFERRABLE if false. # :read_only :: If present, set to READ ONLY if true or READ WRITE if false. # :synchronous :: if non-nil, set synchronous_commit # appropriately. Valid values true, :on, false, :off, :local (9.1+), # and :remote_write (9.2+). def transaction(opts=OPTS, &block) opts = Hash[opts] if retry_on = opts[:retry_on] tot_retries = opts.fetch(:num_retries, 5) num_retries = 0 begin opts[:retry_on] = nil opts[:retrying] = true transaction(opts, &block) rescue *retry_on => e num_retries += 1 if tot_retries.nil? || num_retries <= tot_retries opts[:before_retry].call(num_retries, e) if opts[:before_retry] retry end raise end else synchronize(opts[:server]) do |conn| if opts[:savepoint] == :only if supports_savepoints? if _trans(conn) opts[:savepoint] = true else return yield(conn) end else opts[:savepoint] = false end end if opts[:savepoint] && !supports_savepoints? raise Sequel::InvalidOperation, "savepoints not supported on #{database_type}" end if already_in_transaction?(conn, opts) if opts[:rollback] == :always && !opts.has_key?(:savepoint) if supports_savepoints? opts[:savepoint] = true else raise Sequel::Error, "cannot set :rollback=>:always transaction option if already inside a transaction" end end if opts[:savepoint] != false && (stack = _trans(conn)[:savepoints]) && stack.last[:auto_savepoint] opts[:savepoint] = true end unless opts[:savepoint] if opts[:retrying] raise Sequel::Error, "cannot set :retry_on options if you are already inside a transaction" end return yield(conn) end end _transaction(conn, opts, &block) end end end private # Internal generic transaction method. Any exception raised by the given # block will cause the transaction to be rolled back. If the exception is # not a Sequel::Rollback, the error will be reraised. If no exception occurs # inside the block, the transaction is commited. def _transaction(conn, opts=OPTS) rollback = opts[:rollback] begin add_transaction(conn, opts) begin_transaction(conn, opts) if rollback == :always begin ret = yield(conn) rescue Exception => e1 raise e1 ensure raise ::Sequel::Rollback unless e1 end else yield(conn) end rescue Exception => e begin rollback_transaction(conn, opts) rescue Exception => e3 end transaction_error(e, :conn=>conn, :rollback=>rollback) raise e3 if e3 ret ensure begin committed = commit_or_rollback_transaction(e, conn, opts) rescue Exception => e2 begin raise_error(e2, :classes=>database_error_classes, :conn=>conn) rescue Sequel::DatabaseError => e4 begin rollback_transaction(conn, opts) ensure raise e4 end end ensure remove_transaction(conn, committed) end end end # Synchronize access to the current transactions, returning the hash # of options for the current transaction (if any) def _trans(conn) Sequel.synchronize{@transactions[conn]} end # Add the current thread to the list of active transactions def add_transaction(conn, opts) hash = transaction_options(conn, opts) if supports_savepoints? if t = _trans(conn) t[:savepoints].push({:auto_savepoint=>opts[:auto_savepoint]}) return else hash[:savepoints] = [{:auto_savepoint=>opts[:auto_savepoint]}] if (prep = opts[:prepare]) && supports_prepared_transactions? hash[:prepare] = prep end end elsif (prep = opts[:prepare]) && supports_prepared_transactions? hash[:prepare] = prep end Sequel.synchronize{@transactions[conn] = hash} end # Set the given callable as a hook to be called. Type should be either # :after_commit or :after_rollback. def add_savepoint_hook(conn, type, block) savepoint = _trans(conn)[:savepoints].last (savepoint[type] ||= []) << block end # Set the given callable as a hook to be called. Type should be either # :after_commit or :after_rollback. def add_transaction_hook(conn, type, block) hooks = _trans(conn)[type] ||= [] hooks << block end # Whether the given connection is already inside a transaction def already_in_transaction?(conn, opts) _trans(conn) && (!supports_savepoints? || !opts[:savepoint]) end # Derive the transaction hash from the options passed to the transaction. # Meant to be overridden. def transaction_options(conn, opts) {} end # Issue query to begin a new savepoint. def begin_savepoint(conn, opts) log_connection_execute(conn, begin_savepoint_sql(savepoint_level(conn)-1)) end # SQL to start a new savepoint def begin_savepoint_sql(depth) "SAVEPOINT autopoint_#{depth}" end # Start a new database transaction on the given connection def begin_new_transaction(conn, opts) log_connection_execute(conn, begin_transaction_sql) set_transaction_isolation(conn, opts) end # Start a new database transaction or a new savepoint on the given connection. def begin_transaction(conn, opts=OPTS) if in_savepoint?(conn) begin_savepoint(conn, opts) else begin_new_transaction(conn, opts) end end # SQL to BEGIN a transaction. def begin_transaction_sql 'BEGIN' end # Whether to commit the current transaction. Thread.current.status is # checked because Thread#kill skips rescue blocks (so exception would be # nil), but the transaction should still be rolled back. On Ruby 1.9 (but # not 2.0+), the thread status will still be "run", so Thread#kill # will erroneously commit the transaction, and there isn't a workaround. def commit_or_rollback_transaction(exception, conn, opts) if exception false else if rollback_on_transaction_exit?(conn, opts) rollback_transaction(conn, opts) false else commit_transaction(conn, opts) true end end end # SQL to commit a savepoint def commit_savepoint_sql(depth) "RELEASE SAVEPOINT autopoint_#{depth}" end # Commit the active transaction on the connection def commit_transaction(conn, opts=OPTS) if supports_savepoints? depth = savepoint_level(conn) log_connection_execute(conn, depth > 1 ? commit_savepoint_sql(depth-1) : commit_transaction_sql) else log_connection_execute(conn, commit_transaction_sql) end end # SQL to COMMIT a transaction. def commit_transaction_sql 'COMMIT' end # Method called on the connection object to execute SQL on the database, # used by the transaction code. def connection_execute_method :execute end # Which transaction errors to translate, blank by default. def database_error_classes [] end # Whether the connection is currently inside a savepoint. def in_savepoint?(conn) supports_savepoints? && savepoint_level(conn) > 1 end # Retrieve the savepoint hooks that should be run for the given # connection and commit status. This expacts that you are # already inside a savepoint when calling. def savepoint_hooks(conn, committed) _trans(conn)[:savepoints].last[committed ? :after_commit : :after_rollback] end # Retrieve the transaction hooks that should be run for the given # connection and commit status. def transaction_hooks(conn, committed) unless in_savepoint?(conn) _trans(conn)[committed ? :after_commit : :after_rollback] end end # Remove the current thread from the list of active transactions def remove_transaction(conn, committed) callbacks = transaction_hooks(conn, committed) if in_savepoint?(conn) savepoint_callbacks = savepoint_hooks(conn, committed) if committed savepoint_rollback_callbacks = savepoint_hooks(conn, false) end end if transaction_finished?(conn) h = _trans(conn) rolled_back = !committed Sequel.synchronize{h[:rolled_back] = rolled_back} Sequel.synchronize{@transactions.delete(conn)} elsif savepoint_callbacks || savepoint_rollback_callbacks if committed meth = in_savepoint?(conn) ? :add_savepoint_hook : :add_transaction_hook if savepoint_callbacks savepoint_callbacks.each do |block| send(meth, conn, :after_commit, block) end end if savepoint_rollback_callbacks savepoint_rollback_callbacks.each do |block| send(meth, conn, :after_rollback, block) end end else savepoint_callbacks.each(&:call) end end callbacks.each(&:call) if callbacks end # SQL to rollback to a savepoint def rollback_savepoint_sql(depth) "ROLLBACK TO SAVEPOINT autopoint_#{depth}" end # Whether to rollback the transaction when exiting the transaction. def rollback_on_transaction_exit?(conn, opts) return true if Thread.current.status == 'aborting' h = _trans(conn) if supports_savepoints? h[:savepoints].last[:rollback_on_exit] else h[:rollback_on_exit] end end # Rollback the active transaction on the connection def rollback_transaction(conn, opts=OPTS) if supports_savepoints? depth = savepoint_level(conn) log_connection_execute(conn, depth > 1 ? rollback_savepoint_sql(depth-1) : rollback_transaction_sql) else log_connection_execute(conn, rollback_transaction_sql) end end # SQL to ROLLBACK a transaction. def rollback_transaction_sql 'ROLLBACK' end # Set the transaction isolation level on the given connection def set_transaction_isolation(conn, opts) if supports_transaction_isolation_levels? and level = opts.fetch(:isolation, transaction_isolation_level) log_connection_execute(conn, set_transaction_isolation_sql(level)) end end # SQL to set the transaction isolation level def set_transaction_isolation_sql(level) "SET TRANSACTION ISOLATION LEVEL #{TRANSACTION_ISOLATION_LEVELS[level]}" end # Current savepoint level. def savepoint_level(conn) _trans(conn)[:savepoints].length end # Raise a database error unless the exception is an Rollback. def transaction_error(e, opts=OPTS) if e.is_a?(Rollback) raise e if opts[:rollback] == :reraise else raise_error(e, opts.merge(:classes=>database_error_classes)) end end # Finish a subtransaction. If savepoints are supported, pops the current # tansaction off the savepoint stack. def transaction_finished?(conn) if supports_savepoints? stack = _trans(conn)[:savepoints] stack.pop stack.empty? else true end end end end sequel-5.63.0/lib/sequel/dataset.rb000066400000000000000000000041071434214120600171200ustar00rootroot00000000000000# frozen-string-literal: true module Sequel # A dataset represents an SQL query. Datasets # can be used to select, insert, update and delete records. # # Query results are always retrieved on demand, so a dataset can be kept # around and reused indefinitely (datasets never cache results): # # my_posts = DB[:posts].where(author: 'david') # no records are retrieved # my_posts.all # records are retrieved # my_posts.all # records are retrieved again # # Datasets are frozen and use a functional style where modification methods # return modified copies of the the dataset. This allows you to reuse # datasets: # # posts = DB[:posts] # davids_posts = posts.where(author: 'david') # old_posts = posts.where{stamp < Date.today - 7} # davids_old_posts = davids_posts.where{stamp < Date.today - 7} # # Datasets are Enumerable objects, so they can be manipulated using many # of the Enumerable methods, such as +map+ and +inject+. Note that there are some methods # that Dataset defines that override methods defined in Enumerable and result in different # behavior, such as +select+ and +group_by+. # # For more information, see the {"Dataset Basics" guide}[rdoc-ref:doc/dataset_basics.rdoc]. class Dataset OPTS = Sequel::OPTS # Whether Dataset#freeze can actually freeze datasets. True only on ruby 2.4+, # as it requires clone(freeze: false) TRUE_FREEZE = RUBY_VERSION >= '2.4' include Enumerable include SQL::AliasMethods include SQL::BooleanMethods include SQL::CastMethods include SQL::ComplexExpressionMethods include SQL::InequalityMethods include SQL::NumericMethods include SQL::OrderMethods include SQL::StringMethods end require_relative "dataset/query" require_relative "dataset/actions" require_relative "dataset/features" require_relative "dataset/graph" require_relative "dataset/prepared_statements" require_relative "dataset/misc" require_relative "dataset/sql" require_relative "dataset/placeholder_literalizer" require_relative "dataset/dataset_module" end sequel-5.63.0/lib/sequel/dataset/000077500000000000000000000000001434214120600165715ustar00rootroot00000000000000sequel-5.63.0/lib/sequel/dataset/actions.rb000066400000000000000000001424401434214120600205630ustar00rootroot00000000000000# frozen-string-literal: true module Sequel class Dataset # --------------------- # :section: 2 - Methods that execute code on the database # These methods all execute the dataset's SQL on the database. # They don't return modified datasets, so if used in a method chain # they should be the last method called. # --------------------- # Action methods defined by Sequel that execute code on the database. ACTION_METHODS = (<<-METHS).split.map(&:to_sym).freeze << [] all as_hash avg count columns columns! delete each empty? fetch_rows first first! get import insert last map max min multi_insert paged_each select_hash select_hash_groups select_map select_order_map single_record single_record! single_value single_value! sum to_hash to_hash_groups truncate update where_all where_each where_single_value METHS # The clone options to use when retrieving columns for a dataset. COLUMNS_CLONE_OPTIONS = {:distinct => nil, :limit => 0, :offset=>nil, :where=>nil, :having=>nil, :order=>nil, :row_proc=>nil, :graph=>nil, :eager_graph=>nil}.freeze # Inserts the given argument into the database. Returns self so it # can be used safely when chaining: # # DB[:items] << {id: 0, name: 'Zero'} << DB[:old_items].select(:id, name) def <<(arg) insert(arg) self end # Returns the first record matching the conditions. Examples: # # DB[:table][id: 1] # SELECT * FROM table WHERE (id = 1) LIMIT 1 # # => {:id=>1} def [](*conditions) raise(Error, 'You cannot call Dataset#[] with an integer or with no arguments') if (conditions.length == 1 and conditions.first.is_a?(Integer)) or conditions.length == 0 first(*conditions) end # Returns an array with all records in the dataset. If a block is given, # the array is iterated over after all items have been loaded. # # DB[:table].all # SELECT * FROM table # # => [{:id=>1, ...}, {:id=>2, ...}, ...] # # # Iterate over all rows in the table # DB[:table].all{|row| p row} def all(&block) _all(block){|a| each{|r| a << r}} end # Returns the average value for the given column/expression. # Uses a virtual row block if no argument is given. # # DB[:table].avg(:number) # SELECT avg(number) FROM table LIMIT 1 # # => 3 # DB[:table].avg{function(column)} # SELECT avg(function(column)) FROM table LIMIT 1 # # => 1 def avg(arg=(no_arg = true), &block) arg = Sequel.virtual_row(&block) if no_arg _aggregate(:avg, arg) end # Returns the columns in the result set in order as an array of symbols. # If the columns are currently cached, returns the cached value. Otherwise, # a SELECT query is performed to retrieve a single row in order to get the columns. # # If you are looking for all columns for a single table and maybe some information about # each column (e.g. database type), see Database#schema. # # DB[:table].columns # # => [:id, :name] def columns _columns || columns! end # Ignore any cached column information and perform a query to retrieve # a row in order to get the columns. # # DB[:table].columns! # # => [:id, :name] def columns! ds = clone(COLUMNS_CLONE_OPTIONS) ds.each{break} if cols = ds.cache[:_columns] self.columns = cols else [] end end COUNT_SELECT = Sequel.function(:count).*.as(:count) # Returns the number of records in the dataset. If an argument is provided, # it is used as the argument to count. If a block is provided, it is # treated as a virtual row, and the result is used as the argument to # count. # # DB[:table].count # SELECT count(*) AS count FROM table LIMIT 1 # # => 3 # DB[:table].count(:column) # SELECT count(column) AS count FROM table LIMIT 1 # # => 2 # DB[:table].count{foo(column)} # SELECT count(foo(column)) AS count FROM table LIMIT 1 # # => 1 def count(arg=(no_arg=true), &block) if no_arg && !block cached_dataset(:_count_ds) do aggregate_dataset.select(COUNT_SELECT).single_value_ds end.single_value!.to_i else if block if no_arg arg = Sequel.virtual_row(&block) else raise Error, 'cannot provide both argument and block to Dataset#count' end end _aggregate(:count, arg) end end # Deletes the records in the dataset, returning the number of records deleted. # # DB[:table].delete # DELETE * FROM table # # => 3 # # Some databases support using multiple tables in a DELETE query. This requires # multiple FROM tables (JOINs can also be used). As multiple FROM tables use # an implicit CROSS JOIN, you should make sure your WHERE condition uses the # appropriate filters for the FROM tables: # # DB.from(:a, :b).join(:c, :d=>Sequel[:b][:e]).where{{a[:f]=>b[:g], a[:id]=>c[:h]}}. # delete # # DELETE FROM a # # USING b # # INNER JOIN c ON (c.d = b.e) # # WHERE ((a.f = b.g) AND (a.id = c.h)) def delete(&block) sql = delete_sql if uses_returning?(:delete) returning_fetch_rows(sql, &block) else execute_dui(sql) end end # Iterates over the records in the dataset as they are yielded from the # database adapter, and returns self. # # DB[:table].each{|row| p row} # SELECT * FROM table # # Note that this method is not safe to use on many adapters if you are # running additional queries inside the provided block. If you are # running queries inside the block, you should use +all+ instead of +each+ # for the outer queries, or use a separate thread or shard inside +each+. def each if rp = row_proc fetch_rows(select_sql){|r| yield rp.call(r)} else fetch_rows(select_sql){|r| yield r} end self end EMPTY_SELECT = Sequel::SQL::AliasedExpression.new(1, :one) # Returns true if no records exist in the dataset, false otherwise # # DB[:table].empty? # SELECT 1 AS one FROM table LIMIT 1 # # => false def empty? cached_dataset(:_empty_ds) do single_value_ds.unordered.select(EMPTY_SELECT) end.single_value!.nil? end # Returns the first matching record if no arguments are given. # If a integer argument is given, it is interpreted as a limit, and then returns all # matching records up to that limit. If any other type of # argument(s) is passed, it is treated as a filter and the # first matching record is returned. If a block is given, it is used # to filter the dataset before returning anything. # # If there are no records in the dataset, returns nil (or an empty # array if an integer argument is given). # # Examples: # # DB[:table].first # SELECT * FROM table LIMIT 1 # # => {:id=>7} # # DB[:table].first(2) # SELECT * FROM table LIMIT 2 # # => [{:id=>6}, {:id=>4}] # # DB[:table].first(id: 2) # SELECT * FROM table WHERE (id = 2) LIMIT 1 # # => {:id=>2} # # DB[:table].first(Sequel.lit("id = 3")) # SELECT * FROM table WHERE (id = 3) LIMIT 1 # # => {:id=>3} # # DB[:table].first(Sequel.lit("id = ?", 4)) # SELECT * FROM table WHERE (id = 4) LIMIT 1 # # => {:id=>4} # # DB[:table].first{id > 2} # SELECT * FROM table WHERE (id > 2) LIMIT 1 # # => {:id=>5} # # DB[:table].first(Sequel.lit("id > ?", 4)){id < 6} # SELECT * FROM table WHERE ((id > 4) AND (id < 6)) LIMIT 1 # # => {:id=>5} # # DB[:table].first(2){id < 2} # SELECT * FROM table WHERE (id < 2) LIMIT 2 # # => [{:id=>1}] def first(*args, &block) case args.length when 0 unless block return single_record end when 1 arg = args[0] if arg.is_a?(Integer) res = if block if loader = cached_placeholder_literalizer(:_first_integer_cond_loader) do |pl| where(pl.arg).limit(pl.arg) end loader.all(filter_expr(&block), arg) else where(&block).limit(arg).all end else if loader = cached_placeholder_literalizer(:_first_integer_loader) do |pl| limit(pl.arg) end loader.all(arg) else limit(arg).all end end return res end where_args = args args = arg end if loader = cached_where_placeholder_literalizer(where_args||args, block, :_first_cond_loader) do |pl| _single_record_ds.where(pl.arg) end loader.first(filter_expr(args, &block)) else _single_record_ds.where(args, &block).single_record! end end # Calls first. If first returns nil (signaling that no # row matches), raise a Sequel::NoMatchingRow exception. def first!(*args, &block) first(*args, &block) || raise(Sequel::NoMatchingRow.new(self)) end # Return the column value for the first matching record in the dataset. # Raises an error if both an argument and block is given. # # DB[:table].get(:id) # SELECT id FROM table LIMIT 1 # # => 3 # # ds.get{sum(id)} # SELECT sum(id) AS v FROM table LIMIT 1 # # => 6 # # You can pass an array of arguments to return multiple arguments, # but you must make sure each element in the array has an alias that # Sequel can determine: # # DB[:table].get([:id, :name]) # SELECT id, name FROM table LIMIT 1 # # => [3, 'foo'] # # DB[:table].get{[sum(id).as(sum), name]} # SELECT sum(id) AS sum, name FROM table LIMIT 1 # # => [6, 'foo'] def get(column=(no_arg=true; nil), &block) ds = naked if block raise(Error, 'Must call Dataset#get with an argument or a block, not both') unless no_arg ds = ds.select(&block) column = ds.opts[:select] column = nil if column.is_a?(Array) && column.length < 2 else case column when Array ds = ds.select(*column) when LiteralString, Symbol, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression if loader = cached_placeholder_literalizer(:_get_loader) do |pl| ds.single_value_ds.select(pl.arg) end return loader.get(column) end ds = ds.select(column) else if loader = cached_placeholder_literalizer(:_get_alias_loader) do |pl| ds.single_value_ds.select(Sequel.as(pl.arg, :v)) end return loader.get(column) end ds = ds.select(Sequel.as(column, :v)) end end if column.is_a?(Array) if r = ds.single_record r.values_at(*hash_key_symbols(column)) end else ds.single_value end end # Inserts multiple records into the associated table. This method can be # used to efficiently insert a large number of records into a table in a # single query if the database supports it. Inserts are automatically # wrapped in a transaction if necessary. # # This method is called with a columns array and an array of value arrays: # # DB[:table].import([:x, :y], [[1, 2], [3, 4]]) # # INSERT INTO table (x, y) VALUES (1, 2) # # INSERT INTO table (x, y) VALUES (3, 4) # # or, if the database supports it: # # # INSERT INTO table (x, y) VALUES (1, 2), (3, 4) # # This method also accepts a dataset instead of an array of value arrays: # # DB[:table].import([:x, :y], DB[:table2].select(:a, :b)) # # INSERT INTO table (x, y) SELECT a, b FROM table2 # # Options: # :commit_every :: Open a new transaction for every given number of # records. For example, if you provide a value of 50, # will commit after every 50 records. When a # transaction is not required, this option controls # the maximum number of values to insert with a single # statement; it does not force the use of a # transaction. # :return :: When this is set to :primary_key, returns an array of # autoincremented primary key values for the rows inserted. # This does not have an effect if +values+ is a Dataset. # :server :: Set the server/shard to use for the transaction and insert # queries. # :slice :: Same as :commit_every, :commit_every takes precedence. def import(columns, values, opts=OPTS) return @db.transaction{insert(columns, values)} if values.is_a?(Dataset) return if values.empty? raise(Error, 'Using Sequel::Dataset#import with an empty column array is not allowed') if columns.empty? ds = opts[:server] ? server(opts[:server]) : self if slice_size = opts.fetch(:commit_every, opts.fetch(:slice, default_import_slice)) offset = 0 rows = [] while offset < values.length rows << ds._import(columns, values[offset, slice_size], opts) offset += slice_size end rows.flatten else ds._import(columns, values, opts) end end # Inserts values into the associated table. The returned value is generally # the value of the autoincremented primary key for the inserted row, assuming that # a single row is inserted and the table has an autoincrementing primary key. # # +insert+ handles a number of different argument formats: # no arguments or single empty hash :: Uses DEFAULT VALUES # single hash :: Most common format, treats keys as columns and values as values # single array :: Treats entries as values, with no columns # two arrays :: Treats first array as columns, second array as values # single Dataset :: Treats as an insert based on a selection from the dataset given, # with no columns # array and dataset :: Treats as an insert based on a selection from the dataset # given, with the columns given by the array. # # Examples: # # DB[:items].insert # # INSERT INTO items DEFAULT VALUES # # DB[:items].insert({}) # # INSERT INTO items DEFAULT VALUES # # DB[:items].insert([1,2,3]) # # INSERT INTO items VALUES (1, 2, 3) # # DB[:items].insert([:a, :b], [1,2]) # # INSERT INTO items (a, b) VALUES (1, 2) # # DB[:items].insert(a: 1, b: 2) # # INSERT INTO items (a, b) VALUES (1, 2) # # DB[:items].insert(DB[:old_items]) # # INSERT INTO items SELECT * FROM old_items # # DB[:items].insert([:a, :b], DB[:old_items]) # # INSERT INTO items (a, b) SELECT * FROM old_items def insert(*values, &block) sql = insert_sql(*values) if uses_returning?(:insert) returning_fetch_rows(sql, &block) else execute_insert(sql) end end # Reverses the order and then runs #first with the given arguments and block. Note that this # will not necessarily give you the last record in the dataset, # unless you have an unambiguous order. If there is not # currently an order for this dataset, raises an +Error+. # # DB[:table].order(:id).last # SELECT * FROM table ORDER BY id DESC LIMIT 1 # # => {:id=>10} # # DB[:table].order(Sequel.desc(:id)).last(2) # SELECT * FROM table ORDER BY id ASC LIMIT 2 # # => [{:id=>1}, {:id=>2}] def last(*args, &block) raise(Error, 'No order specified') unless @opts[:order] reverse.first(*args, &block) end # Maps column values for each record in the dataset (if an argument is given) # or performs the stock mapping functionality of +Enumerable+ otherwise. # Raises an +Error+ if both an argument and block are given. # # DB[:table].map(:id) # SELECT * FROM table # # => [1, 2, 3, ...] # # DB[:table].map{|r| r[:id] * 2} # SELECT * FROM table # # => [2, 4, 6, ...] # # You can also provide an array of column names: # # DB[:table].map([:id, :name]) # SELECT * FROM table # # => [[1, 'A'], [2, 'B'], [3, 'C'], ...] def map(column=nil, &block) if column raise(Error, 'Must call Dataset#map with either an argument or a block, not both') if block return naked.map(column) if row_proc if column.is_a?(Array) super(){|r| r.values_at(*column)} else super(){|r| r[column]} end else super(&block) end end # Returns the maximum value for the given column/expression. # Uses a virtual row block if no argument is given. # # DB[:table].max(:id) # SELECT max(id) FROM table LIMIT 1 # # => 10 # DB[:table].max{function(column)} # SELECT max(function(column)) FROM table LIMIT 1 # # => 7 def max(arg=(no_arg = true), &block) arg = Sequel.virtual_row(&block) if no_arg _aggregate(:max, arg) end # Execute a MERGE statement, which allows for INSERT, UPDATE, and DELETE # behavior in a single query, based on whether rows from a source table # match rows in the current table, based on the join conditions. # # Unless the dataset uses static SQL, to use #merge, you must first have # called #merge_using to specify the merge source and join conditions. # You will then likely to call one or more of the following methods # to specify MERGE behavior by adding WHEN [NOT] MATCHED clauses: # # * #merge_insert # * #merge_update # * #merge_delete # # The WHEN [NOT] MATCHED clauses are added to the SQL in the order these # methods were called on the dataset. If none of these methods are # called, an error is raised. # # Example: # # DB[:m1] # merge_using(:m2, i1: :i2). # merge_insert(i1: :i2, a: Sequel[:b]+11). # merge_delete{a > 30}. # merge_update(i1: Sequel[:i1]+:i2+10, a: Sequel[:a]+:b+20). # merge # # SQL: # # MERGE INTO m1 USING m2 ON (i1 = i2) # WHEN NOT MATCHED THEN INSERT (i1, a) VALUES (i2, (b + 11)) # WHEN MATCHED AND (a > 30) THEN DELETE # WHEN MATCHED THEN UPDATE SET i1 = (i1 + i2 + 10), a = (a + b + 20) # # On PostgreSQL, two additional merge methods are supported, for the # PostgreSQL-specific DO NOTHING syntax. # # * #merge_do_nothing_when_matched # * #merge_do_nothing_when_not_matched # # This method is supported on Oracle, but Oracle's MERGE support is # non-standard, and has the following issues: # # * DELETE clause requires UPDATE clause # * DELETE clause requires a condition # * DELETE clause only affects rows updated by UPDATE clause def merge execute_ddl(merge_sql) end # Returns the minimum value for the given column/expression. # Uses a virtual row block if no argument is given. # # DB[:table].min(:id) # SELECT min(id) FROM table LIMIT 1 # # => 1 # DB[:table].min{function(column)} # SELECT min(function(column)) FROM table LIMIT 1 # # => 0 def min(arg=(no_arg = true), &block) arg = Sequel.virtual_row(&block) if no_arg _aggregate(:min, arg) end # This is a front end for import that allows you to submit an array of # hashes instead of arrays of columns and values: # # DB[:table].multi_insert([{x: 1}, {x: 2}]) # # INSERT INTO table (x) VALUES (1) # # INSERT INTO table (x) VALUES (2) # # Be aware that all hashes should have the same keys if you use this calling method, # otherwise some columns could be missed or set to null instead of to default # values. # # This respects the same options as #import. def multi_insert(hashes, opts=OPTS) return if hashes.empty? columns = hashes.first.keys import(columns, hashes.map{|h| columns.map{|c| h[c]}}, opts) end # Yields each row in the dataset, but internally uses multiple queries as needed to # process the entire result set without keeping all rows in the dataset in memory, # even if the underlying driver buffers all query results in memory. # # Because this uses multiple queries internally, in order to remain consistent, # it also uses a transaction internally. Additionally, to work correctly, the dataset # must have unambiguous order. Using an ambiguous order can result in an infinite loop, # as well as subtler bugs such as yielding duplicate rows or rows being skipped. # # Sequel checks that the datasets using this method have an order, but it cannot # ensure that the order is unambiguous. # # Note that this method is not safe to use on many adapters if you are # running additional queries inside the provided block. If you are # running queries inside the block, use a separate thread or shard inside +paged_each+. # # Options: # :rows_per_fetch :: The number of rows to fetch per query. Defaults to 1000. # :strategy :: The strategy to use for paging of results. By default this is :offset, # for using an approach with a limit and offset for every page. This can # be set to :filter, which uses a limit and a filter that excludes # rows from previous pages. In order for this strategy to work, you must be # selecting the columns you are ordering by, and none of the columns can contain # NULLs. Note that some Sequel adapters have optimized implementations that will # use cursors or streaming regardless of the :strategy option used. # :filter_values :: If the strategy: :filter option is used, this option should be a proc # that accepts the last retrieved row for the previous page and an array of # ORDER BY expressions, and returns an array of values relating to those # expressions for the last retrieved row. You will need to use this option # if your ORDER BY expressions are not simple columns, if they contain # qualified identifiers that would be ambiguous unqualified, if they contain # any identifiers that are aliased in SELECT, and potentially other cases. # # Examples: # # DB[:table].order(:id).paged_each{|row| } # # SELECT * FROM table ORDER BY id LIMIT 1000 # # SELECT * FROM table ORDER BY id LIMIT 1000 OFFSET 1000 # # ... # # DB[:table].order(:id).paged_each(rows_per_fetch: 100){|row| } # # SELECT * FROM table ORDER BY id LIMIT 100 # # SELECT * FROM table ORDER BY id LIMIT 100 OFFSET 100 # # ... # # DB[:table].order(:id).paged_each(strategy: :filter){|row| } # # SELECT * FROM table ORDER BY id LIMIT 1000 # # SELECT * FROM table WHERE id > 1001 ORDER BY id LIMIT 1000 # # ... # # DB[:table].order(:id).paged_each(strategy: :filter, # filter_values: lambda{|row, exprs| [row[:id]]}){|row| } # # SELECT * FROM table ORDER BY id LIMIT 1000 # # SELECT * FROM table WHERE id > 1001 ORDER BY id LIMIT 1000 # # ... def paged_each(opts=OPTS) unless @opts[:order] raise Sequel::Error, "Dataset#paged_each requires the dataset be ordered" end unless defined?(yield) return enum_for(:paged_each, opts) end total_limit = @opts[:limit] offset = @opts[:offset] if server = @opts[:server] opts = Hash[opts] opts[:server] = server end rows_per_fetch = opts[:rows_per_fetch] || 1000 strategy = if offset || total_limit :offset else opts[:strategy] || :offset end db.transaction(opts) do case strategy when :filter filter_values = opts[:filter_values] || proc{|row, exprs| exprs.map{|e| row[hash_key_symbol(e)]}} base_ds = ds = limit(rows_per_fetch) while ds last_row = nil ds.each do |row| last_row = row yield row end ds = (base_ds.where(ignore_values_preceding(last_row, &filter_values)) if last_row) end else offset ||= 0 num_rows_yielded = rows_per_fetch total_rows = 0 while num_rows_yielded == rows_per_fetch && (total_limit.nil? || total_rows < total_limit) if total_limit && total_rows + rows_per_fetch > total_limit rows_per_fetch = total_limit - total_rows end num_rows_yielded = 0 limit(rows_per_fetch, offset).each do |row| num_rows_yielded += 1 total_rows += 1 if total_limit yield row end offset += rows_per_fetch end end end self end # Returns a hash with key_column values as keys and value_column values as # values. Similar to as_hash, but only selects the columns given. Like # as_hash, it accepts an optional :hash parameter, into which entries will # be merged. # # DB[:table].select_hash(:id, :name) # # SELECT id, name FROM table # # => {1=>'a', 2=>'b', ...} # # You can also provide an array of column names for either the key_column, # the value column, or both: # # DB[:table].select_hash([:id, :foo], [:name, :bar]) # # SELECT id, foo, name, bar FROM table # # => {[1, 3]=>['a', 'c'], [2, 4]=>['b', 'd'], ...} # # When using this method, you must be sure that each expression has an alias # that Sequel can determine. def select_hash(key_column, value_column, opts = OPTS) _select_hash(:as_hash, key_column, value_column, opts) end # Returns a hash with key_column values as keys and an array of value_column values. # Similar to to_hash_groups, but only selects the columns given. Like to_hash_groups, # it accepts an optional :hash parameter, into which entries will be merged. # # DB[:table].select_hash_groups(:name, :id) # # SELECT id, name FROM table # # => {'a'=>[1, 4, ...], 'b'=>[2, ...], ...} # # You can also provide an array of column names for either the key_column, # the value column, or both: # # DB[:table].select_hash_groups([:first, :middle], [:last, :id]) # # SELECT first, middle, last, id FROM table # # => {['a', 'b']=>[['c', 1], ['d', 2], ...], ...} # # When using this method, you must be sure that each expression has an alias # that Sequel can determine. def select_hash_groups(key_column, value_column, opts = OPTS) _select_hash(:to_hash_groups, key_column, value_column, opts) end # Selects the column given (either as an argument or as a block), and # returns an array of all values of that column in the dataset. If you # give a block argument that returns an array with multiple entries, # the contents of the resulting array are undefined. Raises an Error # if called with both an argument and a block. # # DB[:table].select_map(:id) # SELECT id FROM table # # => [3, 5, 8, 1, ...] # # DB[:table].select_map{id * 2} # SELECT (id * 2) FROM table # # => [6, 10, 16, 2, ...] # # You can also provide an array of column names: # # DB[:table].select_map([:id, :name]) # SELECT id, name FROM table # # => [[1, 'A'], [2, 'B'], [3, 'C'], ...] # # If you provide an array of expressions, you must be sure that each entry # in the array has an alias that Sequel can determine. def select_map(column=nil, &block) _select_map(column, false, &block) end # The same as select_map, but in addition orders the array by the column. # # DB[:table].select_order_map(:id) # SELECT id FROM table ORDER BY id # # => [1, 2, 3, 4, ...] # # DB[:table].select_order_map{id * 2} # SELECT (id * 2) FROM table ORDER BY (id * 2) # # => [2, 4, 6, 8, ...] # # You can also provide an array of column names: # # DB[:table].select_order_map([:id, :name]) # SELECT id, name FROM table ORDER BY id, name # # => [[1, 'A'], [2, 'B'], [3, 'C'], ...] # # If you provide an array of expressions, you must be sure that each entry # in the array has an alias that Sequel can determine. def select_order_map(column=nil, &block) _select_map(column, true, &block) end # Limits the dataset to one record, and returns the first record in the dataset, # or nil if the dataset has no records. Users should probably use +first+ instead of # this method. Example: # # DB[:test].single_record # SELECT * FROM test LIMIT 1 # # => {:column_name=>'value'} def single_record _single_record_ds.single_record! end # Returns the first record in dataset, without limiting the dataset. Returns nil if # the dataset has no records. Users should probably use +first+ instead of this method. # This should only be used if you know the dataset is already limited to a single record. # This method may be desirable to use for performance reasons, as it does not clone the # receiver. Example: # # DB[:test].single_record! # SELECT * FROM test # # => {:column_name=>'value'} def single_record! with_sql_first(select_sql) end # Returns the first value of the first record in the dataset. # Returns nil if dataset is empty. Users should generally use # +get+ instead of this method. Example: # # DB[:test].single_value # SELECT * FROM test LIMIT 1 # # => 'value' def single_value single_value_ds.each do |r| r.each{|_, v| return v} end nil end # Returns the first value of the first record in the dataset, without limiting the dataset. # Returns nil if the dataset is empty. Users should generally use +get+ instead of this # method. Should not be used on graphed datasets or datasets that have row_procs that # don't return hashes. This method may be desirable to use for performance reasons, as # it does not clone the receiver. # # DB[:test].single_value! # SELECT * FROM test # # => 'value' def single_value! with_sql_single_value(select_sql) end # Returns the sum for the given column/expression. # Uses a virtual row block if no column is given. # # DB[:table].sum(:id) # SELECT sum(id) FROM table LIMIT 1 # # => 55 # DB[:table].sum{function(column)} # SELECT sum(function(column)) FROM table LIMIT 1 # # => 10 def sum(arg=(no_arg = true), &block) arg = Sequel.virtual_row(&block) if no_arg _aggregate(:sum, arg) end # Returns a hash with one column used as key and another used as value. # If rows have duplicate values for the key column, the latter row(s) # will overwrite the value of the previous row(s). If the value_column # is not given or nil, uses the entire hash as the value. # # DB[:table].as_hash(:id, :name) # SELECT * FROM table # # {1=>'Jim', 2=>'Bob', ...} # # DB[:table].as_hash(:id) # SELECT * FROM table # # {1=>{:id=>1, :name=>'Jim'}, 2=>{:id=>2, :name=>'Bob'}, ...} # # You can also provide an array of column names for either the key_column, # the value column, or both: # # DB[:table].as_hash([:id, :foo], [:name, :bar]) # SELECT * FROM table # # {[1, 3]=>['Jim', 'bo'], [2, 4]=>['Bob', 'be'], ...} # # DB[:table].as_hash([:id, :name]) # SELECT * FROM table # # {[1, 'Jim']=>{:id=>1, :name=>'Jim'}, [2, 'Bob']=>{:id=>2, :name=>'Bob'}, ...} # # Options: # :all :: Use all instead of each to retrieve the objects # :hash :: The object into which the values will be placed. If this is not # given, an empty hash is used. This can be used to use a hash with # a default value or default proc. def as_hash(key_column, value_column = nil, opts = OPTS) h = opts[:hash] || {} meth = opts[:all] ? :all : :each if value_column return naked.as_hash(key_column, value_column, opts) if row_proc if value_column.is_a?(Array) if key_column.is_a?(Array) public_send(meth){|r| h[r.values_at(*key_column)] = r.values_at(*value_column)} else public_send(meth){|r| h[r[key_column]] = r.values_at(*value_column)} end else if key_column.is_a?(Array) public_send(meth){|r| h[r.values_at(*key_column)] = r[value_column]} else public_send(meth){|r| h[r[key_column]] = r[value_column]} end end elsif key_column.is_a?(Array) public_send(meth){|r| h[key_column.map{|k| r[k]}] = r} else public_send(meth){|r| h[r[key_column]] = r} end h end # Alias of as_hash for backwards compatibility. def to_hash(*a) as_hash(*a) end # Returns a hash with one column used as key and the values being an # array of column values. If the value_column is not given or nil, uses # the entire hash as the value. # # DB[:table].to_hash_groups(:name, :id) # SELECT * FROM table # # {'Jim'=>[1, 4, 16, ...], 'Bob'=>[2], ...} # # DB[:table].to_hash_groups(:name) # SELECT * FROM table # # {'Jim'=>[{:id=>1, :name=>'Jim'}, {:id=>4, :name=>'Jim'}, ...], 'Bob'=>[{:id=>2, :name=>'Bob'}], ...} # # You can also provide an array of column names for either the key_column, # the value column, or both: # # DB[:table].to_hash_groups([:first, :middle], [:last, :id]) # SELECT * FROM table # # {['Jim', 'Bob']=>[['Smith', 1], ['Jackson', 4], ...], ...} # # DB[:table].to_hash_groups([:first, :middle]) # SELECT * FROM table # # {['Jim', 'Bob']=>[{:id=>1, :first=>'Jim', :middle=>'Bob', :last=>'Smith'}, ...], ...} # # Options: # :all :: Use all instead of each to retrieve the objects # :hash :: The object into which the values will be placed. If this is not # given, an empty hash is used. This can be used to use a hash with # a default value or default proc. def to_hash_groups(key_column, value_column = nil, opts = OPTS) h = opts[:hash] || {} meth = opts[:all] ? :all : :each if value_column return naked.to_hash_groups(key_column, value_column, opts) if row_proc if value_column.is_a?(Array) if key_column.is_a?(Array) public_send(meth){|r| (h[r.values_at(*key_column)] ||= []) << r.values_at(*value_column)} else public_send(meth){|r| (h[r[key_column]] ||= []) << r.values_at(*value_column)} end else if key_column.is_a?(Array) public_send(meth){|r| (h[r.values_at(*key_column)] ||= []) << r[value_column]} else public_send(meth){|r| (h[r[key_column]] ||= []) << r[value_column]} end end elsif key_column.is_a?(Array) public_send(meth){|r| (h[key_column.map{|k| r[k]}] ||= []) << r} else public_send(meth){|r| (h[r[key_column]] ||= []) << r} end h end # Truncates the dataset. Returns nil. # # DB[:table].truncate # TRUNCATE table # # => nil def truncate execute_ddl(truncate_sql) end # Updates values for the dataset. The returned value is the number of rows updated. # +values+ should be a hash where the keys are columns to set and values are the values to # which to set the columns. # # DB[:table].update(x: nil) # UPDATE table SET x = NULL # # => 10 # # DB[:table].update(x: Sequel[:x]+1, y: 0) # UPDATE table SET x = (x + 1), y = 0 # # => 10 # # Some databases support using multiple tables in an UPDATE query. This requires # multiple FROM tables (JOINs can also be used). As multiple FROM tables use # an implicit CROSS JOIN, you should make sure your WHERE condition uses the # appropriate filters for the FROM tables: # # DB.from(:a, :b).join(:c, :d=>Sequel[:b][:e]).where{{a[:f]=>b[:g], a[:id]=>10}}. # update(:f=>Sequel[:c][:h]) # # UPDATE a # # SET f = c.h # # FROM b # # INNER JOIN c ON (c.d = b.e) # # WHERE ((a.f = b.g) AND (a.id = 10)) def update(values=OPTS, &block) sql = update_sql(values) if uses_returning?(:update) returning_fetch_rows(sql, &block) else execute_dui(sql) end end # Return an array of all rows matching the given filter condition, also # yielding each row to the given block. Basically the same as where(cond).all(&block), # except it can be optimized to not create an intermediate dataset. # # DB[:table].where_all(id: [1,2,3]) # # SELECT * FROM table WHERE (id IN (1, 2, 3)) def where_all(cond, &block) if loader = _where_loader([cond], nil) loader.all(filter_expr(cond), &block) else where(cond).all(&block) end end # Iterate over all rows matching the given filter condition, # yielding each row to the given block. Basically the same as where(cond).each(&block), # except it can be optimized to not create an intermediate dataset. # # DB[:table].where_each(id: [1,2,3]){|row| p row} # # SELECT * FROM table WHERE (id IN (1, 2, 3)) def where_each(cond, &block) if loader = _where_loader([cond], nil) loader.each(filter_expr(cond), &block) else where(cond).each(&block) end end # Filter the datasets using the given filter condition, then return a single value. # This assumes that the dataset has already been setup to limit the selection to # a single column. Basically the same as where(cond).single_value, # except it can be optimized to not create an intermediate dataset. # # DB[:table].select(:name).where_single_value(id: 1) # # SELECT name FROM table WHERE (id = 1) LIMIT 1 def where_single_value(cond) if loader = cached_where_placeholder_literalizer([cond], nil, :_where_single_value_loader) do |pl| single_value_ds.where(pl.arg) end loader.get(filter_expr(cond)) else where(cond).single_value end end # Run the given SQL and return an array of all rows. If a block is given, # each row is yielded to the block after all rows are loaded. See with_sql_each. def with_sql_all(sql, &block) _all(block){|a| with_sql_each(sql){|r| a << r}} end # Execute the given SQL and return the number of rows deleted. This exists # solely as an optimization, replacing with_sql(sql).delete. It's significantly # faster as it does not require cloning the current dataset. def with_sql_delete(sql) execute_dui(sql) end alias with_sql_update with_sql_delete # Run the given SQL and yield each returned row to the block. def with_sql_each(sql) if rp = row_proc _with_sql_dataset.fetch_rows(sql){|r| yield rp.call(r)} else _with_sql_dataset.fetch_rows(sql){|r| yield r} end self end # Run the given SQL and return the first row, or nil if no rows were returned. # See with_sql_each. def with_sql_first(sql) with_sql_each(sql){|r| return r} nil end # Run the given SQL and return the first value in the first row, or nil if no # rows were returned. For this to make sense, the SQL given should select # only a single value. See with_sql_each. def with_sql_single_value(sql) if r = with_sql_first(sql) r.each{|_, v| return v} end end # Execute the given SQL and (on most databases) return the primary key of the # inserted row. def with_sql_insert(sql) execute_insert(sql) end protected # Internals of #import. If primary key values are requested, use # separate insert commands for each row. Otherwise, call #multi_insert_sql # and execute each statement it gives separately. A transaction is only used # if there are multiple statements to execute. def _import(columns, values, opts) trans_opts = Hash[opts] trans_opts[:server] = @opts[:server] if opts[:return] == :primary_key _import_transaction(values, trans_opts){values.map{|v| insert(columns, v)}} else stmts = multi_insert_sql(columns, values) _import_transaction(stmts, trans_opts){stmts.each{|st| execute_dui(st)}} end end # Return an array of arrays of values given by the symbols in ret_cols. def _select_map_multiple(ret_cols) map{|r| r.values_at(*ret_cols)} end # Returns an array of the first value in each row. def _select_map_single k = nil map{|r| r[k||=r.keys.first]} end # A dataset for returning single values from the current dataset. def single_value_ds clone(:limit=>1).ungraphed.naked end private # Internals of all and with_sql_all def _all(block) a = [] yield a post_load(a) a.each(&block) if block a end # Cached placeholder literalizer for methods that return values using aggregate functions. def _aggregate(function, arg) if loader = cached_placeholder_literalizer(:"_#{function}_loader") do |pl| aggregate_dataset.limit(1).select(SQL::Function.new(function, pl.arg).as(function)) end loader.get(arg) else aggregate_dataset.get(SQL::Function.new(function, arg).as(function)) end end # Use a transaction when yielding to the block if multiple values/statements # are provided. When only a single value or statement is provided, then yield # without using a transaction. def _import_transaction(values, trans_opts, &block) if values.length > 1 @db.transaction(trans_opts, &block) else yield end end # Internals of +select_hash+ and +select_hash_groups+ def _select_hash(meth, key_column, value_column, opts=OPTS) select(*(key_column.is_a?(Array) ? key_column : [key_column]) + (value_column.is_a?(Array) ? value_column : [value_column])). public_send(meth, hash_key_symbols(key_column), hash_key_symbols(value_column), opts) end # Internals of +select_map+ and +select_order_map+ def _select_map(column, order, &block) ds = ungraphed.naked columns = Array(column) virtual_row_columns(columns, block) select_cols = order ? columns.map{|c| c.is_a?(SQL::OrderedExpression) ? c.expression : c} : columns ds = ds.order(*columns.map{|c| unaliased_identifier(c)}) if order if column.is_a?(Array) || (columns.length > 1) ds.select(*select_cols)._select_map_multiple(hash_key_symbols(select_cols)) else ds.select(auto_alias_expression(select_cols.first))._select_map_single end end # A cached dataset for a single record for this dataset. def _single_record_ds cached_dataset(:_single_record_ds){clone(:limit=>1)} end # Loader used for where_all and where_each. def _where_loader(where_args, where_block) cached_where_placeholder_literalizer(where_args, where_block, :_where_loader) do |pl| where(pl.arg) end end # Automatically alias the given expression if it does not have an identifiable alias. def auto_alias_expression(v) case v when LiteralString, Symbol, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression v else SQL::AliasedExpression.new(v, :v) end end # The default number of rows that can be inserted in a single INSERT statement via import. # The default is for no limit. def default_import_slice nil end # Set the server to use to :default unless it is already set in the passed opts def default_server_opts(opts) if @db.sharded? && !opts.has_key?(:server) opts = Hash[opts] opts[:server] = @opts[:server] || :default end opts end # Execute the given select SQL on the database using execute. Use the # :read_only server unless a specific server is set. def execute(sql, opts=OPTS, &block) db = @db if db.sharded? && !opts.has_key?(:server) opts = Hash[opts] opts[:server] = @opts[:server] || (@opts[:lock] ? :default : :read_only) opts end db.execute(sql, opts, &block) end # Execute the given SQL on the database using execute_ddl. def execute_ddl(sql, opts=OPTS, &block) @db.execute_ddl(sql, default_server_opts(opts), &block) nil end # Execute the given SQL on the database using execute_dui. def execute_dui(sql, opts=OPTS, &block) @db.execute_dui(sql, default_server_opts(opts), &block) end # Execute the given SQL on the database using execute_insert. def execute_insert(sql, opts=OPTS, &block) @db.execute_insert(sql, default_server_opts(opts), &block) end # Return a plain symbol given a potentially qualified or aliased symbol, # specifying the symbol that is likely to be used as the hash key # for the column when records are returned. Return nil if no hash key # can be determined def _hash_key_symbol(s, recursing=false) case s when Symbol _, c, a = split_symbol(s) (a || c).to_sym when SQL::Identifier, SQL::Wrapper _hash_key_symbol(s.value, true) when SQL::QualifiedIdentifier _hash_key_symbol(s.column, true) when SQL::AliasedExpression _hash_key_symbol(s.alias, true) when String s.to_sym if recursing end end # Return a plain symbol given a potentially qualified or aliased symbol, # specifying the symbol that is likely to be used as the hash key # for the column when records are returned. Raise Error if the hash key # symbol cannot be returned. def hash_key_symbol(s) if v = _hash_key_symbol(s) v else raise(Error, "#{s.inspect} is not supported, should be a Symbol, SQL::Identifier, SQL::QualifiedIdentifier, or SQL::AliasedExpression") end end # If s is an array, return an array with the given hash key symbols. # Otherwise, return a hash key symbol for the given expression # If a hash key symbol cannot be determined, raise an error. def hash_key_symbols(s) s.is_a?(Array) ? s.map{|c| hash_key_symbol(c)} : hash_key_symbol(s) end # Returns an expression that will ignore values preceding the given row, using the # receiver's current order. This yields the row and the array of order expressions # to the block, which should return an array of values to use. def ignore_values_preceding(row) @opts[:order].map{|v| v.is_a?(SQL::OrderedExpression) ? v.expression : v} order_exprs = @opts[:order].map do |v| if v.is_a?(SQL::OrderedExpression) descending = v.descending v = v.expression else descending = false end [v, descending] end row_values = yield(row, order_exprs.map(&:first)) last_expr = [] cond = order_exprs.zip(row_values).map do |(v, descending), value| expr = last_expr + [SQL::BooleanExpression.new(descending ? :< : :>, v, value)] last_expr += [SQL::BooleanExpression.new(:'=', v, value)] Sequel.&(*expr) end Sequel.|(*cond) end # Downcase identifiers by default when outputing them from the database. def output_identifier(v) v = 'untitled' if v == '' v.to_s.downcase.to_sym end # This is run inside .all, after all of the records have been loaded # via .each, but before any block passed to all is called. It is called with # a single argument, an array of all returned records. Does nothing by # default, added to make the model eager loading code simpler. def post_load(all_records) end # Called by insert/update/delete when returning is used. # Yields each row as a plain hash to the block if one is given, or returns # an array of plain hashes for all rows if a block is not given def returning_fetch_rows(sql, &block) if block default_server.fetch_rows(sql, &block) nil else rows = [] default_server.fetch_rows(sql){|r| rows << r} rows end end # Return the unaliased part of the identifier. Handles both # implicit aliases in symbols, as well as SQL::AliasedExpression # objects. Other objects are returned as is. def unaliased_identifier(c) case c when Symbol table, column, aliaz = split_symbol(c) if aliaz table ? SQL::QualifiedIdentifier.new(table, column) : Sequel.identifier(column) else c end when SQL::AliasedExpression c.expression when SQL::OrderedExpression case expr = c.expression when Symbol, SQL::AliasedExpression SQL::OrderedExpression.new(unaliased_identifier(expr), c.descending, :nulls=>c.nulls) else c end else c end end # Cached dataset to use for with_sql_#{all,each,first,single_value}. # This is used so that the columns returned by the given SQL do not # affect the receiver of the with_sql_* method. def _with_sql_dataset if @opts[:_with_sql_ds] self else cached_dataset(:_with_sql_ds) do clone(:_with_sql_ds=>true) end end end end end sequel-5.63.0/lib/sequel/dataset/dataset_module.rb000066400000000000000000000027151434214120600221150ustar00rootroot00000000000000# frozen-string-literal: true module Sequel class Dataset # This Module subclass is used by Database#extend_datasets # and Dataset#with_extend to add dataset methods to classes. # It adds some helper methods inside the module that can define # named methods on the dataset instances which do specific actions. # For example: # # DB.extend_datasets do # order :by_id, :id # select :with_id_and_name, :id, :name # where :active, :active # end # # DB[:table].active.with_id_and_name.by_id # # SELECT id, name FROM table WHERE active ORDER BY id class DatasetModule < ::Module meths = (<<-METHS).split.map(&:to_sym) where exclude exclude_having having distinct grep group group_and_count group_append limit offset order order_append order_prepend reverse select select_all select_append select_group server METHS # Define a method in the module def self.def_dataset_caching_method(mod, meth) mod.send(:define_method, meth) do |name, *args, &block| if block define_method(name){public_send(meth, *args, &block)} else key = :"_#{meth}_#{name}_ds" define_method(name) do cached_dataset(key){public_send(meth, *args)} end end end end meths.each do |meth| def_dataset_caching_method(self, meth) end end end end sequel-5.63.0/lib/sequel/dataset/features.rb000066400000000000000000000203241434214120600207350ustar00rootroot00000000000000# frozen-string-literal: true module Sequel class Dataset # --------------------- # :section: 4 - Methods that describe what the dataset supports # These methods all return booleans, with most describing whether or not the # dataset supports a feature. # --------------------- # Whether this dataset quotes identifiers. def quote_identifiers? @opts.fetch(:quote_identifiers, true) end # Whether this dataset will provide accurate number of rows matched for # delete and update statements, true by default. Accurate in this case is the number of # rows matched by the dataset's filter. def provides_accurate_rows_matched? true end # Whether you must use a column alias list for recursive CTEs, false by default. def recursive_cte_requires_column_aliases? false end # Whether the dataset requires SQL standard datetimes. False by default, # as most allow strings with ISO 8601 format. def requires_sql_standard_datetimes? false end # Whether type specifiers are required for prepared statement/bound # variable argument placeholders (i.e. :bv__integer), false by default. def requires_placeholder_type_specifiers? false end # Whether the dataset supports common table expressions, false by default. # If given, +type+ can be :select, :insert, :update, or :delete, in which case it # determines whether WITH is supported for the respective statement type. def supports_cte?(type=:select) false end # Whether the dataset supports common table expressions in subqueries, false by default. # If false, applies the WITH clause to the main query, which can cause issues # if multiple WITH clauses use the same name. def supports_cte_in_subqueries? false end # Whether deleting from joined datasets is supported, false by default. def supports_deleting_joins? supports_modifying_joins? end # Whether the database supports derived column lists (e.g. # "table_expr AS table_alias(column_alias1, column_alias2, ...)"), true by # default. def supports_derived_column_lists? true end # Whether the dataset supports or can emulate the DISTINCT ON clause, false by default. def supports_distinct_on? false end # Whether the dataset supports CUBE with GROUP BY, false by default. def supports_group_cube? false end # Whether the dataset supports ROLLUP with GROUP BY, false by default. def supports_group_rollup? false end # Whether the dataset supports GROUPING SETS with GROUP BY, false by default. def supports_grouping_sets? false end # Whether this dataset supports the +insert_select+ method for returning all columns values # directly from an insert query, false by default. def supports_insert_select? supports_returning?(:insert) end # Whether the dataset supports the INTERSECT and EXCEPT compound operations, true by default. def supports_intersect_except? true end # Whether the dataset supports the INTERSECT ALL and EXCEPT ALL compound operations, true by default. def supports_intersect_except_all? true end # Whether the dataset supports the IS TRUE syntax, true by default. def supports_is_true? true end # Whether the dataset supports the JOIN table USING (column1, ...) syntax, true by default. # If false, support is emulated using JOIN table ON (table.column1 = other_table.column1). def supports_join_using? true end # Whether the dataset supports LATERAL for subqueries in the FROM or JOIN clauses, false by default. def supports_lateral_subqueries? false end # Whether limits are supported in correlated subqueries, true by default. def supports_limits_in_correlated_subqueries? true end # Whether the dataset supports skipping raising an error instead of waiting for locked rows when returning data, false by default. def supports_nowait? false end # Whether the MERGE statement is supported, false by default. def supports_merge? false end # Whether modifying joined datasets is supported, false by default. def supports_modifying_joins? false end # Whether the IN/NOT IN operators support multiple columns when an # array of values is given, true by default. def supports_multiple_column_in? true end # Whether offsets are supported in correlated subqueries, true by default. def supports_offsets_in_correlated_subqueries? true end # Whether the dataset supports or can fully emulate the DISTINCT ON clause, # including respecting the ORDER BY clause, false by default. def supports_ordered_distinct_on? supports_distinct_on? end # Whether placeholder literalizers are supported, true by default. def supports_placeholder_literalizer? true end # Whether the dataset supports pattern matching by regular expressions, false by default. def supports_regexp? false end # Whether the dataset supports REPLACE syntax, false by default. def supports_replace? false end # Whether the RETURNING clause is supported for the given type of query, false by default. # +type+ can be :insert, :update, or :delete. def supports_returning?(type) false end # Whether the dataset supports skipping locked rows when returning data, false by default. def supports_skip_locked? false end # Whether the database supports SELECT *, column FROM table, true by default. def supports_select_all_and_column? true end # Whether the dataset supports timezones in literal timestamps, false by default. def supports_timestamp_timezones? false end # Whether the dataset supports fractional seconds in literal timestamps, true by default. def supports_timestamp_usecs? true end # Whether updating joined datasets is supported, false by default. def supports_updating_joins? supports_modifying_joins? end # Whether the dataset supports the WINDOW clause to define windows used by multiple # window functions, false by default. def supports_window_clause? false end # Whether the dataset supports window functions, false by default. def supports_window_functions? false end # Whether the dataset supports the given window function option. True by default. # This should only be called if supports_window_functions? is true. Possible options # are :rows, :range, :groups, :offset, :exclude. def supports_window_function_frame_option?(option) case option when :rows, :range, :offset true else false end end # Whether the dataset supports WHERE TRUE (or WHERE 1 for databases that # that use 1 for true), true by default. def supports_where_true? true end private # Whether insert(nil) or insert({}) must be emulated by # using at least one value. def insert_supports_empty_values? true end # Whether the dataset needs ESCAPE for LIKE for correct behavior. def requires_like_escape? true end # Whether ORDER BY col NULLS FIRST/LAST must be emulated. def requires_emulating_nulls_first? false end # Whether common table expressions are supported in UNION/INTERSECT/EXCEPT clauses. def supports_cte_in_compounds? supports_cte_in_subqueries? end # Whether the dataset supports the FILTER clause for aggregate functions. # If not, support is emulated using CASE. def supports_filtered_aggregates? false end # Whether the database supports quoting function names. def supports_quoted_function_names? false end # Whether the RETURNING clause is used for the given dataset. # +type+ can be :insert, :update, or :delete. def uses_returning?(type) opts[:returning] && !@opts[:sql] && supports_returning?(type) end # Whether the dataset uses WITH ROLLUP/CUBE instead of ROLLUP()/CUBE(). def uses_with_rollup? false end end end sequel-5.63.0/lib/sequel/dataset/graph.rb000066400000000000000000000311421434214120600202200ustar00rootroot00000000000000# frozen-string-literal: true module Sequel class Dataset # --------------------- # :section: 5 - Methods related to dataset graphing # Dataset graphing automatically creates unique aliases columns in join # tables that overlap with already selected column aliases. # All of these methods return modified copies of the receiver. # --------------------- # Adds the given graph aliases to the list of graph aliases to use, # unlike +set_graph_aliases+, which replaces the list (the equivalent # of +select_append+ when graphing). See +set_graph_aliases+. # # DB[:table].add_graph_aliases(some_alias: [:table, :column]) # # SELECT ..., table.column AS some_alias def add_graph_aliases(graph_aliases) graph = opts[:graph] unless (graph && (ga = graph[:column_aliases])) raise Error, "cannot call add_graph_aliases on a dataset that has not been called with graph or set_graph_aliases" end columns, graph_aliases = graph_alias_columns(graph_aliases) select_append(*columns).clone(:graph => graph.merge(:column_aliases=>ga.merge(graph_aliases).freeze).freeze) end # Similar to Dataset#join_table, but uses unambiguous aliases for selected # columns and keeps metadata about the aliases for use in other methods. # # Arguments: # dataset :: Can be a symbol (specifying a table), another dataset, # or an SQL::Identifier, SQL::QualifiedIdentifier, or SQL::AliasedExpression. # join_conditions :: Any condition(s) allowed by +join_table+. # block :: A block that is passed to +join_table+. # # Options: # :from_self_alias :: The alias to use when the receiver is not a graphed # dataset but it contains multiple FROM tables or a JOIN. In this case, # the receiver is wrapped in a from_self before graphing, and this option # determines the alias to use. # :implicit_qualifier :: The qualifier of implicit conditions, see #join_table. # :join_only :: Only join the tables, do not change the selected columns. # :join_type :: The type of join to use (passed to +join_table+). Defaults to :left_outer. # :qualify:: The type of qualification to do, see #join_table. # :select :: An array of columns to select. When not used, selects # all columns in the given dataset. When set to false, selects no # columns and is like simply joining the tables, though graph keeps # some metadata about the join that makes it important to use +graph+ instead # of +join_table+. # :table_alias :: The alias to use for the table. If not specified, doesn't # alias the table. You will get an error if the alias (or table) name is # used more than once. def graph(dataset, join_conditions = nil, options = OPTS, &block) # Allow the use of a dataset or symbol as the first argument # Find the table name/dataset based on the argument table_alias = options[:table_alias] table = dataset create_dataset = true case dataset when Symbol # let alias be the same as the table name (sans any optional schema) # unless alias explicitly given in the symbol using ___ notation and symbol splitting is enabled table_alias ||= split_symbol(table).compact.last when Dataset if dataset.simple_select_all? table = dataset.opts[:from].first table_alias ||= table else table_alias ||= dataset_alias((@opts[:num_dataset_sources] || 0)+1) end create_dataset = false when SQL::Identifier table_alias ||= table.value when SQL::QualifiedIdentifier table_alias ||= split_qualifiers(table).last when SQL::AliasedExpression return graph(table.expression, join_conditions, {:table_alias=>table.alias}.merge!(options), &block) else raise Error, "The dataset argument should be a symbol or dataset" end table_alias = table_alias.to_sym if create_dataset dataset = db.from(table) end # Raise Sequel::Error with explanation that the table alias has been used raise_alias_error = lambda do raise(Error, "this #{options[:table_alias] ? 'alias' : 'table'} has already been been used, please specify " \ "#{options[:table_alias] ? 'a different alias' : 'an alias via the :table_alias option'}") end # Only allow table aliases that haven't been used raise_alias_error.call if @opts[:graph] && @opts[:graph][:table_aliases] && @opts[:graph][:table_aliases].include?(table_alias) table_alias_qualifier = qualifier_from_alias_symbol(table_alias, table) implicit_qualifier = options[:implicit_qualifier] joined_dataset = joined_dataset? ds = self graph = opts[:graph] if !graph && (select = @opts[:select]) && !select.empty? select_columns = nil unless !joined_dataset && select.length == 1 && (select[0].is_a?(SQL::ColumnAll)) force_from_self = false select_columns = select.map do |sel| unless col = _hash_key_symbol(sel) force_from_self = true break end [sel, col] end select_columns = nil if force_from_self end end # Use a from_self if this is already a joined table (or from_self specifically disabled for graphs) if (@opts[:graph_from_self] != false && !graph && (joined_dataset || force_from_self)) from_selfed = true implicit_qualifier = options[:from_self_alias] || first_source ds = ds.from_self(:alias=>implicit_qualifier) end # Join the table early in order to avoid cloning the dataset twice ds = ds.join_table(options[:join_type] || :left_outer, table, join_conditions, :table_alias=>table_alias_qualifier, :implicit_qualifier=>implicit_qualifier, :qualify=>options[:qualify], &block) return ds if options[:join_only] opts = ds.opts # Whether to include the table in the result set add_table = options[:select] == false ? false : true if graph graph = graph.dup select = opts[:select].dup [:column_aliases, :table_aliases, :column_alias_num].each{|k| graph[k] = graph[k].dup} else # Setup the initial graph data structure if it doesn't exist qualifier = ds.first_source_alias master = alias_symbol(qualifier) raise_alias_error.call if master == table_alias # Master hash storing all .graph related information graph = {} # Associates column aliases back to tables and columns column_aliases = graph[:column_aliases] = {} # Associates table alias (the master is never aliased) table_aliases = graph[:table_aliases] = {master=>self} # Keep track of the alias numbers used ca_num = graph[:column_alias_num] = Hash.new(0) select = if select_columns select_columns.map do |sel, column| column_aliases[column] = [master, column] if from_selfed # Initial dataset was wrapped in subselect, selected all # columns in the subselect, qualified by the subselect alias. Sequel.qualify(qualifier, Sequel.identifier(column)) else # Initial dataset not wrapped in subslect, just make # sure columns are qualified in some way. qualified_expression(sel, qualifier) end end else columns.map do |column| column_aliases[column] = [master, column] SQL::QualifiedIdentifier.new(qualifier, column) end end end # Add the table alias to the list of aliases # Even if it isn't been used in the result set, # we add a key for it with a nil value so we can check if it # is used more than once table_aliases = graph[:table_aliases] table_aliases[table_alias] = add_table ? dataset : nil # Add the columns to the selection unless we are ignoring them if add_table column_aliases = graph[:column_aliases] ca_num = graph[:column_alias_num] # Which columns to add to the result set cols = options[:select] || dataset.columns # If the column hasn't been used yet, don't alias it. # If it has been used, try table_column. # If that has been used, try table_column_N # using the next value of N that we know hasn't been # used cols.each do |column| col_alias, identifier = if column_aliases[column] column_alias = :"#{table_alias}_#{column}" if column_aliases[column_alias] column_alias_num = ca_num[column_alias] column_alias = :"#{column_alias}_#{column_alias_num}" ca_num[column_alias] += 1 end [column_alias, SQL::AliasedExpression.new(SQL::QualifiedIdentifier.new(table_alias_qualifier, column), column_alias)] else ident = SQL::QualifiedIdentifier.new(table_alias_qualifier, column) [column, ident] end column_aliases[col_alias] = [table_alias, column].freeze select.push(identifier) end end [:column_aliases, :table_aliases, :column_alias_num].each{|k| graph[k].freeze} ds = ds.clone(:graph=>graph.freeze) ds.select(*select) end # This allows you to manually specify the graph aliases to use # when using graph. You can use it to only select certain # columns, and have those columns mapped to specific aliases # in the result set. This is the equivalent of +select+ for a # graphed dataset, and must be used instead of +select+ whenever # graphing is used. # # graph_aliases should be a hash with keys being symbols of # column aliases, and values being either symbols or arrays with one to three elements. # If the value is a symbol, it is assumed to be the same as a one element # array containing that symbol. # The first element of the array should be the table alias symbol. # The second should be the actual column name symbol. If the array only # has a single element the column name symbol will be assumed to be the # same as the corresponding hash key. If the array # has a third element, it is used as the value returned, instead of # table_alias.column_name. # # DB[:artists].graph(:albums, :artist_id: :id). # set_graph_aliases(name: :artists, # album_name: [:albums, :name], # forty_two: [:albums, :fourtwo, 42]).first # # SELECT artists.name, albums.name AS album_name, 42 AS forty_two ... def set_graph_aliases(graph_aliases) columns, graph_aliases = graph_alias_columns(graph_aliases) if graph = opts[:graph] select(*columns).clone(:graph => graph.merge(:column_aliases=>graph_aliases.freeze).freeze) else raise Error, "cannot call #set_graph_aliases on an ungraphed dataset" end end # Remove the splitting of results into subhashes, and all metadata # related to the current graph (if any). def ungraphed clone(:graph=>nil) end private # Wrap the alias symbol in an SQL::Identifier if the identifier on which is based # is an SQL::Identifier. This works around cases where symbol splitting is enabled and the alias symbol contains # double embedded underscores which would be considered an implicit qualified identifier # if not wrapped in an SQL::Identifier. def qualifier_from_alias_symbol(aliaz, identifier) case identifier when SQL::QualifiedIdentifier if identifier.column.is_a?(String) Sequel.identifier(aliaz) else aliaz end when SQL::Identifier Sequel.identifier(aliaz) else aliaz end end # Transform the hash of graph aliases and return a two element array # where the first element is an array of identifiers suitable to pass to # a select method, and the second is a new hash of preprocessed graph aliases. def graph_alias_columns(graph_aliases) gas = {} identifiers = graph_aliases.map do |col_alias, tc| table, column, value = Array(tc) column ||= col_alias gas[col_alias] = [table, column].freeze identifier = value || SQL::QualifiedIdentifier.new(table, column) identifier = SQL::AliasedExpression.new(identifier, col_alias) if value || column != col_alias identifier end [identifiers, gas] end end end sequel-5.63.0/lib/sequel/dataset/misc.rb000066400000000000000000000266351434214120600200650ustar00rootroot00000000000000# frozen-string-literal: true module Sequel class Dataset # --------------------- # :section: 6 - Miscellaneous methods # These methods don't fit cleanly into another section. # --------------------- # The database related to this dataset. This is the Database instance that # will execute all of this dataset's queries. attr_reader :db # The hash of options for this dataset, keys are symbols. attr_reader :opts # Constructs a new Dataset instance with an associated database and # options. Datasets are usually constructed by invoking the Database#[] method: # # DB[:posts] # # Sequel::Dataset is an abstract class that is not useful by itself. Each # database adapter provides a subclass of Sequel::Dataset, and has # the Database#dataset method return an instance of that subclass. def initialize(db) @db = db @opts = OPTS @cache = {} freeze end # Define a hash value such that datasets with the same class, DB, and opts # will be considered equal. def ==(o) o.is_a?(self.class) && db == o.db && opts == o.opts end # An object representing the current date or time, should be an instance # of Sequel.datetime_class. def current_datetime Sequel.datetime_class.now end # Alias for == def eql?(o) self == o end # Return self, as datasets are always frozen. def dup self end # Yield a dataset for each server in the connection pool that is tied to that server. # Intended for use in sharded environments where all servers need to be modified # with the same data: # # DB[:configs].where(key: 'setting').each_server{|ds| ds.update(value: 'new_value')} def each_server db.servers.each{|s| yield server(s)} end # Returns the string with the LIKE metacharacters (% and _) escaped. # Useful for when the LIKE term is a user-provided string where metacharacters should not # be recognized. Example: # # ds.escape_like("foo\\%_") # 'foo\\\%\_' def escape_like(string) string.gsub(/[\\%_]/){|m| "\\#{m}"} end if TRUE_FREEZE # Freeze the opts when freezing the dataset. def freeze @opts.freeze super end else # :nocov: def freeze # :nodoc: self end def frozen? # :nodoc: true end # :nocov: end # Alias of +first_source_alias+ def first_source first_source_alias end # The first source (primary table) for this dataset. If the dataset doesn't # have a table, raises an +Error+. If the table is aliased, returns the aliased name. # # DB[:table].first_source_alias # # => :table # # DB[Sequel[:table].as(:t)].first_source_alias # # => :t def first_source_alias source = @opts[:from] if source.nil? || source.empty? raise Error, 'No source specified for query' end case s = source.first when SQL::AliasedExpression s.alias when Symbol _, _, aliaz = split_symbol(s) aliaz ? aliaz.to_sym : s else s end end # The first source (primary table) for this dataset. If the dataset doesn't # have a table, raises an error. If the table is aliased, returns the original # table, not the alias # # DB[:table].first_source_table # # => :table # # DB[Sequel[:table].as(:t)].first_source_table # # => :table def first_source_table source = @opts[:from] if source.nil? || source.empty? raise Error, 'No source specified for query' end case s = source.first when SQL::AliasedExpression s.expression when Symbol sch, table, aliaz = split_symbol(s) aliaz ? (sch ? SQL::QualifiedIdentifier.new(sch, table) : table.to_sym) : s else s end end # Define a hash value such that datasets with the same class, DB, and opts, # will have the same hash value. def hash [self.class, db, opts].hash end # Returns a string representation of the dataset including the class name # and the corresponding SQL select statement. def inspect "#<#{visible_class_name}: #{sql.inspect}>" end # Whether this dataset is a joined dataset (multiple FROM tables or any JOINs). def joined_dataset? !!((opts[:from].is_a?(Array) && opts[:from].size > 1) || opts[:join]) end # The alias to use for the row_number column, used when emulating OFFSET # support and for eager limit strategies def row_number_column :x_sequel_row_number_x end # The row_proc for this database, should be any object that responds to +call+ with # a single hash argument and returns the object you want #each to return. def row_proc @opts[:row_proc] end # Splits a possible implicit alias in +c+, handling both SQL::AliasedExpressions # and Symbols. Returns an array of two elements, with the first being the # main expression, and the second being the alias. def split_alias(c) case c when Symbol c_table, column, aliaz = split_symbol(c) [c_table ? SQL::QualifiedIdentifier.new(c_table, column.to_sym) : column.to_sym, aliaz] when SQL::AliasedExpression [c.expression, c.alias] when SQL::JoinClause [c.table, c.table_alias] else [c, nil] end end # This returns an SQL::Identifier or SQL::AliasedExpression containing an # SQL identifier that represents the unqualified column for the given value. # The given value should be a Symbol, SQL::Identifier, SQL::QualifiedIdentifier, # or SQL::AliasedExpression containing one of those. In other cases, this # returns nil. def unqualified_column_for(v) unless v.is_a?(String) _unqualified_column_for(v) end end # Creates a unique table alias that hasn't already been used in the dataset. # table_alias can be any type of object accepted by alias_symbol. # The symbol returned will be the implicit alias in the argument, # possibly appended with "_N" if the implicit alias has already been # used, where N is an integer starting at 0 and increasing until an # unused one is found. # # You can provide a second addition array argument containing symbols # that should not be considered valid table aliases. The current aliases # for the FROM and JOIN tables are automatically included in this array. # # DB[:table].unused_table_alias(:t) # # => :t # # DB[:table].unused_table_alias(:table) # # => :table_0 # # DB[:table, :table_0].unused_table_alias(:table) # # => :table_1 # # DB[:table, :table_0].unused_table_alias(:table, [:table_1, :table_2]) # # => :table_3 def unused_table_alias(table_alias, used_aliases = []) table_alias = alias_symbol(table_alias) used_aliases += opts[:from].map{|t| alias_symbol(t)} if opts[:from] used_aliases += opts[:join].map{|j| j.table_alias ? alias_alias_symbol(j.table_alias) : alias_symbol(j.table)} if opts[:join] if used_aliases.include?(table_alias) i = 0 while true ta = :"#{table_alias}_#{i}" return ta unless used_aliases.include?(ta) i += 1 end else table_alias end end # Return a modified dataset with quote_identifiers set. def with_quote_identifiers(v) clone(:quote_identifiers=>v, :skip_symbol_cache=>true) end protected # Access the cache for the current dataset. Should be used with caution, # as access to the cache is not thread safe without a mutex if other # threads can reference the dataset. Symbol keys prefixed with an # underscore are reserved for internal use. attr_reader :cache # Retreive a value from the dataset's cache in a thread safe manner. def cache_get(k) Sequel.synchronize{@cache[k]} end # Set a value in the dataset's cache in a thread safe manner. def cache_set(k, v) Sequel.synchronize{@cache[k] = v} end # Clear the columns hash for the current dataset. This is not a # thread safe operation, so it should only be used if the dataset # could not be used by another thread (such as one that was just # created via clone). def clear_columns_cache @cache.delete(:_columns) end # The cached columns for the current dataset. def _columns cache_get(:_columns) end private # Check the cache for the given key, returning the value. # Otherwise, yield to get the dataset and cache the dataset under the given key. def cached_dataset(key) unless ds = cache_get(key) ds = yield cache_set(key, ds) end ds end # Return a cached placeholder literalizer for the given key if there # is one for this dataset. If there isn't one, increment the counter # for the number of calls for the key, and if the counter is at least # three, then create a placeholder literalizer by yielding to the block, # and cache it. def cached_placeholder_literalizer(key) if loader = cache_get(key) return loader unless loader.is_a?(Integer) loader += 1 if loader >= 3 loader = Sequel::Dataset::PlaceholderLiteralizer.loader(self){|pl, _| yield pl} cache_set(key, loader) else cache_set(key, loader + 1) loader = nil end elsif cache_sql? && supports_placeholder_literalizer? cache_set(key, 1) end loader end # Return a cached placeholder literalizer for the key, unless where_block is # nil and where_args is an empty array or hash. This is designed to guard # against placeholder literalizer use when passing arguments to where # in the uncached case and filter_expr if a cached placeholder literalizer # is used. def cached_where_placeholder_literalizer(where_args, where_block, key, &block) where_args = where_args[0] if where_args.length == 1 unless where_block return if where_args == OPTS || where_args == EMPTY_ARRAY end cached_placeholder_literalizer(key, &block) end # Set the columns for the current dataset. def columns=(v) cache_set(:_columns, v) end # Set the db, opts, and cache for the copy of the dataset. def initialize_clone(c, _=nil) @db = c.db @opts = Hash[c.opts] if cols = c.cache_get(:_columns) @cache = {:_columns=>cols} else @cache = {} end end alias initialize_copy initialize_clone # Internal recursive version of unqualified_column_for, handling Strings inside # of other objects. def _unqualified_column_for(v) case v when Symbol _, c, a = Sequel.split_symbol(v) c = Sequel.identifier(c) a ? c.as(a) : c when String Sequel.identifier(v) when SQL::Identifier v when SQL::QualifiedIdentifier _unqualified_column_for(v.column) when SQL::AliasedExpression if expr = unqualified_column_for(v.expression) SQL::AliasedExpression.new(expr, v.alias) end end end # Return the class name for this dataset, but skip anonymous classes def visible_class_name c = self.class c = c.superclass while c.name.nil? || c.name == '' c.name end end end sequel-5.63.0/lib/sequel/dataset/placeholder_literalizer.rb000066400000000000000000000174701434214120600240170ustar00rootroot00000000000000# frozen-string-literal: true module Sequel class Dataset # PlaceholderLiteralizer allows you to record the application of arbitrary changes # to a dataset with placeholder arguments, recording where those placeholder arguments # are used in the query. When running the query, the literalization process is much # faster as Sequel can skip most of the work it normally has to do when literalizing a # dataset. # # Basically, this enables optimizations that allow Sequel to cache the SQL produced # for a given dataset, so that it doesn't need to recompute that information every # time. # # Example: # # loader = Sequel::Dataset::PlaceholderLiteralizer.loader(DB[:items]) do |pl, ds| # ds.where(id: pl.arg).exclude(name: pl.arg).limit(1) # end # loader.first(1, "foo") # # SELECT * FROM items WHERE ((id = 1) AND (name != 'foo')) LIMIT 1 # loader.first(2, "bar") # # SELECT * FROM items WHERE ((id = 2) AND (name != 'bar')) LIMIT 1 # # Caveats: # # Note that this method does not handle all possible cases. For example: # # loader = Sequel::Dataset::PlaceholderLiteralizer.loader(DB[:items]) do |pl, ds| # ds.join(pl.arg, item_id: :id) # end # loader.all(:cart_items) # # Will not qualify the item_id column with cart_items. In this type of situation it's # best to add a table alias when joining: # # loader = Sequel::Dataset::PlaceholderLiteralizer.loader(DB[:items]) do |pl, ds| # ds.join(Sequel.as(pl.arg, :t), item_id: :id) # end # loader.all(:cart_items) # # There are other similar cases that are not handled, mainly when Sequel changes the # SQL produced depending on the types of the arguments. class PlaceholderLiteralizer # A placeholder argument used by the PlaceholderLiteralizer. This records the offset # that the argument should be used in the resulting SQL. class Argument # Set the recorder, the argument position, and any transforming block to use # for this placeholder. def initialize(recorder, pos, transformer=nil) @recorder = recorder @pos = pos @transformer = transformer freeze end # Record the SQL query offset, argument position, and transforming block where the # argument should be literalized. def sql_literal_append(ds, sql) if ds.opts[:placeholder_literal_null] ds.send(:literal_append, sql, nil) else @recorder.use(sql, @pos, @transformer) end end # Return a new Argument object for the same recorder and argument position, but with a # different transformer block. def transform(&block) Argument.new(@recorder, @pos, block) end end # Records the offsets at which the placeholder arguments are used in # the SQL query. class Recorder # Yields the receiver and the dataset to the block, which should # call #arg on the receiver for each placeholder argument, and # return the dataset that you want to load. def loader(dataset, &block) PlaceholderLiteralizer.new(*process(dataset, &block)) end # Return an Argument with the specified position, or the next position. In # general you shouldn't mix calls with an argument and calls without an # argument for the same receiver. def arg(v=(no_arg_given = true; @argn+=1)) unless no_arg_given @argn = v if @argn < v end Argument.new(self, v) end # Record the offset at which the argument is used in the SQL query, and any # transforming block. def use(sql, arg, transformer) @args << [sql, sql.length, arg, transformer] end private # Return an array with two elements, the first being an # SQL string with interpolated prepared argument placeholders # (suitable for inspect), the the second being an array of # SQL fragments suitable for using for creating a # Sequel::SQL::PlaceholderLiteralString. Designed for use with # emulated prepared statements. def prepared_sql_and_frags(dataset, prepared_args, &block) _, frags, final_sql, _ = process(dataset, &block) frags = frags.map(&:first) prepared_sql = String.new frags.each_with_index do |sql, i| prepared_sql << sql prepared_sql << "$#{prepared_args[i]}" end frags << final_sql prepared_sql << final_sql [prepared_sql, frags] end # Internals of #loader and #prepared_sql_and_frags. def process(dataset) @argn = -1 @args = [] ds = yield self, dataset sql = ds.clone(:placeholder_literalizer=>self).sql last_offset = 0 fragments = @args.map do |used_sql, offset, arg, t| raise Error, "placeholder literalizer argument literalized into different string than dataset returned" unless used_sql.equal?(sql) a = [sql[last_offset...offset], arg, t] last_offset = offset a end final_sql = sql[last_offset..-1] arity = @argn+1 [ds, fragments, final_sql, arity] end end # Create a PlaceholderLiteralizer by yielding a Recorder and dataset to the # given block, recording the offsets at which the recorders arguments # are used in the query. def self.loader(dataset, &block) Recorder.new.loader(dataset, &block) end # Save the dataset, array of SQL fragments, and ending SQL string. def initialize(dataset, fragments, final_sql, arity) @dataset = dataset @fragments = fragments @final_sql = final_sql @arity = arity freeze end # Freeze the fragments and final SQL when freezing the literalizer. def freeze @fragments.freeze @final_sql.freeze super end # Return a new PlaceholderLiteralizer with a modified dataset. This yields the # receiver's dataset to the block, and the block should return the new dataset # to use. def with_dataset dataset = yield @dataset other = dup other.instance_variable_set(:@dataset, dataset) other.freeze end # Return an array of all objects by running the SQL query for the given arguments. # If a block is given, yields all objects to the block after loading them. def all(*args, &block) @dataset.with_sql_all(sql(*args), &block) end # Run the SQL query for the given arguments, yielding each returned row to the block. def each(*args, &block) @dataset.with_sql_each(sql(*args), &block) end # Run the SQL query for the given arguments, returning the first row. def first(*args) @dataset.with_sql_first(sql(*args)) end # Run the SQL query for the given arguments, returning the first value. For this to # make sense, the dataset should return a single row with a single value (or no rows). def get(*args) @dataset.with_sql_single_value(sql(*args)) end # Return the SQL query to use for the given arguments. def sql(*args) raise Error, "wrong number of arguments (#{args.length} for #{@arity})" unless args.length == @arity s = String.new ds = @dataset @fragments.each do |sql, i, transformer| s << sql if i.is_a?(Integer) v = args.fetch(i) v = transformer.call(v) if transformer else v = i.call end ds.literal_append(s, v) end s << @final_sql s end end end end sequel-5.63.0/lib/sequel/dataset/prepared_statements.rb000066400000000000000000000346031434214120600231750ustar00rootroot00000000000000# frozen-string-literal: true module Sequel class Dataset # --------------------- # :section: 8 - Methods related to prepared statements or bound variables # On some adapters, these use native prepared statements and bound variables, on others # support is emulated. For details, see the {"Prepared Statements/Bound Variables" guide}[rdoc-ref:doc/prepared_statements.rdoc]. # --------------------- PREPARED_ARG_PLACEHOLDER = LiteralString.new('?').freeze DEFAULT_PREPARED_STATEMENT_MODULE_METHODS = %w'execute execute_dui execute_insert'.freeze.each(&:freeze) PREPARED_STATEMENT_MODULE_CODE = { :bind => "opts = Hash[opts]; opts[:arguments] = bind_arguments".freeze, :prepare => "sql = prepared_statement_name".freeze, :prepare_bind => "sql = prepared_statement_name; opts = Hash[opts]; opts[:arguments] = bind_arguments".freeze }.freeze def self.prepared_statements_module(code, mods, meths=DEFAULT_PREPARED_STATEMENT_MODULE_METHODS, &block) code = PREPARED_STATEMENT_MODULE_CODE[code] || code Module.new do Array(mods).each do |mod| include mod end if block module_eval(&block) end meths.each do |meth| module_eval("def #{meth}(sql, opts=Sequel::OPTS) #{code}; super end", __FILE__, __LINE__) end private(*meths) end end private_class_method :prepared_statements_module # Default implementation of the argument mapper to allow # native database support for bind variables and prepared # statements (as opposed to the emulated ones used by default). module ArgumentMapper # The name of the prepared statement, if any. def prepared_statement_name @opts[:prepared_statement_name] end # The bind arguments to use for running this prepared statement def bind_arguments @opts[:bind_arguments] end # Set the bind arguments based on the hash and call super. def call(bind_vars=OPTS, &block) sql = prepared_sql prepared_args.freeze ps = bind(bind_vars) ps.clone(:bind_arguments=>ps.map_to_prepared_args(ps.opts[:bind_vars]), :sql=>sql, :prepared_sql=>sql).run(&block) end # Override the given *_sql method based on the type, and # cache the result of the sql. def prepared_sql if sql = @opts[:prepared_sql] || cache_get(:_prepared_sql) return sql end cache_set(:_prepared_sql, super) end private # Report that prepared statements are not emulated, since # all adapters that use this use native prepared statements. def emulate_prepared_statements? false end end # Backbone of the prepared statement support. Grafts bind variable # support into datasets by hijacking #literal and using placeholders. # By default, emulates prepared statements and bind variables by # taking the hash of bind variables and directly substituting them # into the query, which works on all databases, as it is no different # from using the dataset without bind variables. module PreparedStatementMethods # Whether to log the full SQL query. By default, just the prepared statement # name is generally logged on adapters that support native prepared statements. def log_sql @opts[:log_sql] end # The type of prepared statement, should be one of :select, :first, # :insert, :update, :delete, or :single_value def prepared_type @opts[:prepared_type] end # The array/hash of bound variable placeholder names. def prepared_args @opts[:prepared_args] end # The dataset that created this prepared statement. def orig_dataset @opts[:orig_dataset] end # The argument to supply to insert and update, which may use # placeholders specified by prepared_args def prepared_modify_values @opts[:prepared_modify_values] end # Sets the prepared_args to the given hash and runs the # prepared statement. def call(bind_vars=OPTS, &block) bind(bind_vars).run(&block) end # Raise an error if attempting to call prepare on an already # prepared statement. def prepare(*) raise Error, "cannot prepare an already prepared statement" unless allow_preparing_prepared_statements? super end # Send the columns to the original dataset, as calling it # on the prepared statement can cause problems. def columns orig_dataset.columns end # Disallow use of delayed evaluations in prepared statements. def delayed_evaluation_sql_append(sql, delay) raise Error, "delayed evaluations cannot be used in prepared statements" if @opts[:no_delayed_evaluations] super end # Returns the SQL for the prepared statement, depending on # the type of the statement and the prepared_modify_values. def prepared_sql case prepared_type when :select, :all, :each # Most common scenario, so listed first. select_sql when :first, :single_value clone(:limit=>1).select_sql when :insert_select insert_select_sql(*prepared_modify_values) when :insert, :insert_pk insert_sql(*prepared_modify_values) when :update update_sql(*prepared_modify_values) when :delete delete_sql else select_sql end end # Changes the values of symbols if they start with $ and # prepared_args is present. If so, they are considered placeholders, # and they are substituted using prepared_arg. def literal_symbol_append(sql, v) if @opts[:bind_vars] && /\A\$(.*)\z/ =~ v literal_append(sql, prepared_arg($1.to_sym)) else super end end # Programmer friendly string showing this is a prepared statement, # with the prepared SQL it represents (which in general won't have # substituted variables). def inspect "<#{visible_class_name}/PreparedStatement #{prepared_sql.inspect}>" end protected # Run the method based on the type of prepared statement. def run(&block) case prepared_type when :select, :all all(&block) when :each each(&block) when :insert_select with_sql(prepared_sql).first when :first first when :insert, :update, :delete if opts[:returning] && supports_returning?(prepared_type) returning_fetch_rows(prepared_sql) elsif prepared_type == :delete delete else public_send(prepared_type, *prepared_modify_values) end when :insert_pk fetch_rows(prepared_sql){|r| return r.values.first} when Array # :nocov: case prepared_type[0] # :nocov: when :map, :as_hash, :to_hash, :to_hash_groups public_send(*prepared_type, &block) end when :single_value single_value else raise Error, "unsupported prepared statement type used: #{prepared_type.inspect}" end end private # Returns the value of the prepared_args hash for the given key. def prepared_arg(k) @opts[:bind_vars][k] end # The symbol cache should always be skipped, since placeholders are symbols. def skip_symbol_cache? true end # Use a clone of the dataset extended with prepared statement # support and using the same argument hash so that you can use # bind variables/prepared arguments in subselects. def subselect_sql_append(sql, ds) subselect_sql_dataset(sql, ds).prepared_sql end def subselect_sql_dataset(sql, ds) super.clone(:prepared_args=>prepared_args, :bind_vars=>@opts[:bind_vars]). send(:to_prepared_statement, :select, nil, :extend=>prepared_statement_modules) end end # Default implementation for an argument mapper that uses # unnumbered SQL placeholder arguments. Keeps track of which # arguments have been used, and allows arguments to # be used more than once. module UnnumberedArgumentMapper include ArgumentMapper protected # Returns a single output array mapping the values of the input hash. # Keys in the input hash that are used more than once in the query # have multiple entries in the output array. def map_to_prepared_args(bind_vars) prepared_args.map{|v| bind_vars[v]} end private # Associates the argument with name k with the next position in # the output array. def prepared_arg(k) prepared_args << k prepared_arg_placeholder end end # Prepared statements emulation support for adapters that don't # support native prepared statements. Uses a placeholder # literalizer to hold the prepared sql with the ability to # interpolate arguments to prepare the final SQL string. module EmulatePreparedStatementMethods include UnnumberedArgumentMapper def run(&block) if @opts[:prepared_sql_frags] sql = literal(Sequel::SQL::PlaceholderLiteralString.new(@opts[:prepared_sql_frags], @opts[:bind_arguments], false)) clone(:prepared_sql_frags=>nil, :sql=>sql, :prepared_sql=>sql).run(&block) else super end end private # Turn emulation of prepared statements back on, since ArgumentMapper # turns it off. def emulate_prepared_statements? true end def emulated_prepared_statement(type, name, values) prepared_sql, frags = Sequel::Dataset::PlaceholderLiteralizer::Recorder.new.send(:prepared_sql_and_frags, self, prepared_args) do |pl, ds| ds = ds.clone(:recorder=>pl) case type when :first, :single_value ds.limit(1) when :update, :insert, :insert_select, :delete ds.with_sql(:"#{type}_sql", *values) when :insert_pk ds.with_sql(:insert_sql, *values) else ds end end prepared_args.freeze clone(:prepared_sql_frags=>frags, :prepared_sql=>prepared_sql, :sql=>prepared_sql) end # Associates the argument with name k with the next position in # the output array. def prepared_arg(k) prepared_args << k @opts[:recorder].arg end def subselect_sql_dataset(sql, ds) super.clone(:recorder=>@opts[:recorder]). with_extend(EmulatePreparedStatementMethods) end end # Set the bind variables to use for the call. If bind variables have # already been set for this dataset, they are updated with the contents # of bind_vars. # # DB[:table].where(id: :$id).bind(id: 1).call(:first) # # SELECT * FROM table WHERE id = ? LIMIT 1 -- (1) # # => {:id=>1} def bind(bind_vars=OPTS) bind_vars = if bv = @opts[:bind_vars] bv.merge(bind_vars).freeze else if bind_vars.frozen? bind_vars else Hash[bind_vars] end end clone(:bind_vars=>bind_vars) end # For the given type (:select, :first, :insert, :insert_select, :update, :delete, or :single_value), # run the sql with the bind variables specified in the hash. +values+ is a hash passed to # insert or update (if one of those types is used), which may contain placeholders. # # DB[:table].where(id: :$id).call(:first, id: 1) # # SELECT * FROM table WHERE id = ? LIMIT 1 -- (1) # # => {:id=>1} def call(type, bind_variables=OPTS, *values, &block) to_prepared_statement(type, values, :extend=>bound_variable_modules).call(bind_variables, &block) end # Prepare an SQL statement for later execution. Takes a type similar to #call, # and the +name+ symbol of the prepared statement. # # This returns a clone of the dataset extended with PreparedStatementMethods, # which you can +call+ with the hash of bind variables to use. # The prepared statement is also stored in # the associated Database, where it can be called by name. # The following usage is identical: # # ps = DB[:table].where(name: :$name).prepare(:first, :select_by_name) # # ps.call(name: 'Blah') # # SELECT * FROM table WHERE name = ? -- ('Blah') # # => {:id=>1, :name=>'Blah'} # # DB.call(:select_by_name, name: 'Blah') # Same thing def prepare(type, name, *values) ps = to_prepared_statement(type, values, :name=>name, :extend=>prepared_statement_modules, :no_delayed_evaluations=>true) ps = if ps.send(:emulate_prepared_statements?) ps = ps.with_extend(EmulatePreparedStatementMethods) ps.send(:emulated_prepared_statement, type, name, values) else sql = ps.prepared_sql ps.prepared_args.freeze ps.clone(:prepared_sql=>sql, :sql=>sql) end db.set_prepared_statement(name, ps) ps end protected # Return a cloned copy of the current dataset extended with # PreparedStatementMethods, setting the type and modify values. def to_prepared_statement(type, values=nil, opts=OPTS) mods = opts[:extend] || [] mods += [PreparedStatementMethods] bind. clone(:prepared_statement_name=>opts[:name], :prepared_type=>type, :prepared_modify_values=>values, :orig_dataset=>self, :no_cache_sql=>true, :prepared_args=>@opts[:prepared_args]||[], :no_delayed_evaluations=>opts[:no_delayed_evaluations]). with_extend(*mods) end private # Don't allow preparing prepared statements by default. def allow_preparing_prepared_statements? false end def bound_variable_modules prepared_statement_modules end # Whether prepared statements should be emulated. True by # default so that adapters have to opt in. def emulate_prepared_statements? true end def prepared_statement_modules [] end # The argument placeholder. Most databases used unnumbered # arguments with question marks, so that is the default. def prepared_arg_placeholder PREPARED_ARG_PLACEHOLDER end end end sequel-5.63.0/lib/sequel/dataset/query.rb000066400000000000000000001721071434214120600202730ustar00rootroot00000000000000# frozen-string-literal: true module Sequel class Dataset # --------------------- # :section: 1 - Methods that return modified datasets # These methods all return modified copies of the receiver. # --------------------- # Hash of extension name symbols to callable objects to load the extension # into the Dataset object (usually by extending it with a module defined # in the extension). EXTENSIONS = {} EMPTY_ARRAY = [].freeze # The dataset options that require the removal of cached columns if changed. COLUMN_CHANGE_OPTS = [:select, :sql, :from, :join].freeze # Which options don't affect the SQL generation. Used by simple_select_all? # to determine if this is a simple SELECT * FROM table. NON_SQL_OPTIONS = [:server, :graph, :row_proc, :quote_identifiers, :skip_symbol_cache].freeze # These symbols have _join methods created (e.g. inner_join) that # call join_table with the symbol, passing along the arguments and # block from the method call. CONDITIONED_JOIN_TYPES = [:inner, :full_outer, :right_outer, :left_outer, :full, :right, :left].freeze # These symbols have _join methods created (e.g. natural_join). # They accept a table argument and options hash which is passed to join_table, # and they raise an error if called with a block. UNCONDITIONED_JOIN_TYPES = [:natural, :natural_left, :natural_right, :natural_full, :cross].freeze # All methods that return modified datasets with a joined table added. JOIN_METHODS = ((CONDITIONED_JOIN_TYPES + UNCONDITIONED_JOIN_TYPES).map{|x| "#{x}_join".to_sym} + [:join, :join_table]).freeze # Methods that return modified datasets QUERY_METHODS = ((<<-METHS).split.map(&:to_sym) + JOIN_METHODS).freeze add_graph_aliases distinct except exclude exclude_having filter for_update from from_self graph grep group group_and_count group_append group_by having intersect invert limit lock_style naked offset or order order_append order_by order_more order_prepend qualify reverse reverse_order select select_all select_append select_group select_more server set_graph_aliases unfiltered ungraphed ungrouped union unlimited unordered where with with_recursive with_sql METHS # Register an extension callback for Dataset objects. ext should be the # extension name symbol, and mod should either be a Module that the # dataset is extended with, or a callable object called with the database # object. If mod is not provided, a block can be provided and is treated # as the mod object. # # If mod is a module, this also registers a Database extension that will # extend all of the database's datasets. def self.register_extension(ext, mod=nil, &block) if mod raise(Error, "cannot provide both mod and block to Dataset.register_extension") if block if mod.is_a?(Module) block = proc{|ds| ds.extend(mod)} Sequel::Database.register_extension(ext){|db| db.extend_datasets(mod)} else block = mod end end Sequel.synchronize{EXTENSIONS[ext] = block} end # On Ruby 2.4+, use clone(freeze: false) to create clones, because # we use true freezing in that case, and we need to modify the opts # in the frozen copy. # # On Ruby <2.4, just use Object#clone directly, since we don't # use true freezing as it isn't possible. if TRUE_FREEZE # Save original clone implementation, as some other methods need # to call it internally. alias _clone clone private :_clone # Returns a new clone of the dataset with the given options merged. # If the options changed include options in COLUMN_CHANGE_OPTS, the cached # columns are deleted. This method should generally not be called # directly by user code. def clone(opts = nil || (return self)) # return self used above because clone is called by almost all # other query methods, and it is the fastest approach c = super(:freeze=>false) c.opts.merge!(opts) unless opts.each_key{|o| break if COLUMN_CHANGE_OPTS.include?(o)} c.clear_columns_cache end c.freeze end else # :nocov: def clone(opts = OPTS) # :nodoc: c = super() c.opts.merge!(opts) unless opts.each_key{|o| break if COLUMN_CHANGE_OPTS.include?(o)} c.clear_columns_cache end c.opts.freeze c end # :nocov: end # Returns a copy of the dataset with the SQL DISTINCT clause. The DISTINCT # clause is used to remove duplicate rows from the output. If arguments # are provided, uses a DISTINCT ON clause, in which case it will only be # distinct on those columns, instead of all returned columns. If a block # is given, it is treated as a virtual row block, similar to +where+. # Raises an error if arguments are given and DISTINCT ON is not supported. # # DB[:items].distinct # SQL: SELECT DISTINCT * FROM items # DB[:items].order(:id).distinct(:id) # SQL: SELECT DISTINCT ON (id) * FROM items ORDER BY id # DB[:items].order(:id).distinct{func(:id)} # SQL: SELECT DISTINCT ON (func(id)) * FROM items ORDER BY id # # There is support for emulating the DISTINCT ON support in MySQL, but it # does not support the ORDER of the dataset, and also doesn't work in many # cases if the ONLY_FULL_GROUP_BY sql_mode is used, which is the default on # MySQL 5.7.5+. def distinct(*args, &block) virtual_row_columns(args, block) if args.empty? cached_dataset(:_distinct_ds){clone(:distinct => EMPTY_ARRAY)} else raise(InvalidOperation, "DISTINCT ON not supported") unless supports_distinct_on? clone(:distinct => args.freeze) end end # Adds an EXCEPT clause using a second dataset object. # An EXCEPT compound dataset returns all rows in the current dataset # that are not in the given dataset. # Raises an +InvalidOperation+ if the operation is not supported. # Options: # :alias :: Use the given value as the from_self alias # :all :: Set to true to use EXCEPT ALL instead of EXCEPT, so duplicate rows can occur # :from_self :: Set to false to not wrap the returned dataset in a from_self, use with care. # # DB[:items].except(DB[:other_items]) # # SELECT * FROM (SELECT * FROM items EXCEPT SELECT * FROM other_items) AS t1 # # DB[:items].except(DB[:other_items], all: true, from_self: false) # # SELECT * FROM items EXCEPT ALL SELECT * FROM other_items # # DB[:items].except(DB[:other_items], alias: :i) # # SELECT * FROM (SELECT * FROM items EXCEPT SELECT * FROM other_items) AS i def except(dataset, opts=OPTS) raise(InvalidOperation, "EXCEPT not supported") unless supports_intersect_except? raise(InvalidOperation, "EXCEPT ALL not supported") if opts[:all] && !supports_intersect_except_all? compound_clone(:except, dataset, opts) end # Performs the inverse of Dataset#where. Note that if you have multiple filter # conditions, this is not the same as a negation of all conditions. # # DB[:items].exclude(category: 'software') # # SELECT * FROM items WHERE (category != 'software') # # DB[:items].exclude(category: 'software', id: 3) # # SELECT * FROM items WHERE ((category != 'software') OR (id != 3)) # # Also note that SQL uses 3-valued boolean logic (+true+, +false+, +NULL+), so # the inverse of a true condition is a false condition, and will still # not match rows that were NULL originally. If you take the earlier # example: # # DB[:items].exclude(category: 'software') # # SELECT * FROM items WHERE (category != 'software') # # Note that this does not match rows where +category+ is +NULL+. This # is because +NULL+ is an unknown value, and you do not know whether # or not the +NULL+ category is +software+. You can explicitly # specify how to handle +NULL+ values if you want: # # DB[:items].exclude(Sequel.~(category: nil) & {category: 'software'}) # # SELECT * FROM items WHERE ((category IS NULL) OR (category != 'software')) def exclude(*cond, &block) add_filter(:where, cond, true, &block) end # Inverts the given conditions and adds them to the HAVING clause. # # DB[:items].select_group(:name).exclude_having{count(name) < 2} # # SELECT name FROM items GROUP BY name HAVING (count(name) >= 2) # # See documentation for exclude for how inversion is handled in regards # to SQL 3-valued boolean logic. def exclude_having(*cond, &block) add_filter(:having, cond, true, &block) end if TRUE_FREEZE # Return a clone of the dataset loaded with the given dataset extensions. # If no related extension file exists or the extension does not have # specific support for Dataset objects, an Error will be raised. def extension(*a) c = _clone(:freeze=>false) c.send(:_extension!, a) c.freeze end else # :nocov: def extension(*exts) # :nodoc: c = clone c.send(:_extension!, exts) c end # :nocov: end # Alias for where. def filter(*cond, &block) where(*cond, &block) end # Returns a cloned dataset with a :update lock style. # # DB[:table].for_update # SELECT * FROM table FOR UPDATE def for_update cached_dataset(:_for_update_ds){lock_style(:update)} end # Returns a copy of the dataset with the source changed. If no # source is given, removes all tables. If multiple sources # are given, it is the same as using a CROSS JOIN (cartesian product) between all tables. # If a block is given, it is treated as a virtual row block, similar to +where+. # # DB[:items].from # SQL: SELECT * # DB[:items].from(:blah) # SQL: SELECT * FROM blah # DB[:items].from(:blah, :foo) # SQL: SELECT * FROM blah, foo # DB[:items].from{fun(arg)} # SQL: SELECT * FROM fun(arg) def from(*source, &block) virtual_row_columns(source, block) table_alias_num = 0 ctes = nil source.map! do |s| case s when Dataset if hoist_cte?(s) ctes ||= [] ctes += s.opts[:with] s = s.clone(:with=>nil) end SQL::AliasedExpression.new(s, dataset_alias(table_alias_num+=1)) when Symbol sch, table, aliaz = split_symbol(s) if aliaz s = sch ? SQL::QualifiedIdentifier.new(sch, table) : SQL::Identifier.new(table) SQL::AliasedExpression.new(s, aliaz.to_sym) else s end else s end end o = {:from=>source.empty? ? nil : source.freeze} o[:with] = ((opts[:with] || EMPTY_ARRAY) + ctes).freeze if ctes o[:num_dataset_sources] = table_alias_num if table_alias_num > 0 clone(o) end # Returns a dataset selecting from the current dataset. # Options: # :alias :: Controls the alias of the table # :column_aliases :: Also aliases columns, using derived column lists. # Only used in conjunction with :alias. # # ds = DB[:items].order(:name).select(:id, :name) # # SELECT id,name FROM items ORDER BY name # # ds.from_self # # SELECT * FROM (SELECT id, name FROM items ORDER BY name) AS t1 # # ds.from_self(alias: :foo) # # SELECT * FROM (SELECT id, name FROM items ORDER BY name) AS foo # # ds.from_self(alias: :foo, column_aliases: [:c1, :c2]) # # SELECT * FROM (SELECT id, name FROM items ORDER BY name) AS foo(c1, c2) def from_self(opts=OPTS) fs = {} @opts.keys.each{|k| fs[k] = nil unless non_sql_option?(k)} pr = proc do c = clone(fs).from(opts[:alias] ? as(opts[:alias], opts[:column_aliases]) : self) if cols = _columns c.send(:columns=, cols) end c end opts.empty? ? cached_dataset(:_from_self_ds, &pr) : pr.call end # Match any of the columns to any of the patterns. The terms can be # strings (which use LIKE) or regular expressions if the database supports that. # Note that the total number of pattern matches will be # Array(columns).length * Array(terms).length, # which could cause performance issues. # # Options (all are boolean): # # :all_columns :: All columns must be matched to any of the given patterns. # :all_patterns :: All patterns must match at least one of the columns. # :case_insensitive :: Use a case insensitive pattern match (the default is # case sensitive if the database supports it). # # If both :all_columns and :all_patterns are true, all columns must match all patterns. # # Examples: # # dataset.grep(:a, '%test%') # # SELECT * FROM items WHERE (a LIKE '%test%' ESCAPE '\') # # dataset.grep([:a, :b], %w'%test% foo') # # SELECT * FROM items WHERE ((a LIKE '%test%' ESCAPE '\') OR (a LIKE 'foo' ESCAPE '\') # # OR (b LIKE '%test%' ESCAPE '\') OR (b LIKE 'foo' ESCAPE '\')) # # dataset.grep([:a, :b], %w'%foo% %bar%', all_patterns: true) # # SELECT * FROM a WHERE (((a LIKE '%foo%' ESCAPE '\') OR (b LIKE '%foo%' ESCAPE '\')) # # AND ((a LIKE '%bar%' ESCAPE '\') OR (b LIKE '%bar%' ESCAPE '\'))) # # dataset.grep([:a, :b], %w'%foo% %bar%', all_columns: true) # # SELECT * FROM a WHERE (((a LIKE '%foo%' ESCAPE '\') OR (a LIKE '%bar%' ESCAPE '\')) # # AND ((b LIKE '%foo%' ESCAPE '\') OR (b LIKE '%bar%' ESCAPE '\'))) # # dataset.grep([:a, :b], %w'%foo% %bar%', all_patterns: true, all_columns: true) # # SELECT * FROM a WHERE ((a LIKE '%foo%' ESCAPE '\') AND (b LIKE '%foo%' ESCAPE '\') # # AND (a LIKE '%bar%' ESCAPE '\') AND (b LIKE '%bar%' ESCAPE '\')) def grep(columns, patterns, opts=OPTS) column_op = opts[:all_columns] ? :AND : :OR if opts[:all_patterns] conds = Array(patterns).map do |pat| SQL::BooleanExpression.new(column_op, *Array(columns).map{|c| SQL::StringExpression.like(c, pat, opts)}) end where(SQL::BooleanExpression.new(:AND, *conds)) else conds = Array(columns).map do |c| SQL::BooleanExpression.new(:OR, *Array(patterns).map{|pat| SQL::StringExpression.like(c, pat, opts)}) end where(SQL::BooleanExpression.new(column_op, *conds)) end end # Returns a copy of the dataset with the results grouped by the value of # the given columns. If a block is given, it is treated # as a virtual row block, similar to +where+. # # DB[:items].group(:id) # SELECT * FROM items GROUP BY id # DB[:items].group(:id, :name) # SELECT * FROM items GROUP BY id, name # DB[:items].group{[a, sum(b)]} # SELECT * FROM items GROUP BY a, sum(b) def group(*columns, &block) virtual_row_columns(columns, block) clone(:group => (columns.compact.empty? ? nil : columns.freeze)) end # Alias of group def group_by(*columns, &block) group(*columns, &block) end # Returns a dataset grouped by the given column with count by group. # Column aliases may be supplied, and will be included in the select clause. # If a block is given, it is treated as a virtual row block, similar to +where+. # # Examples: # # DB[:items].group_and_count(:name).all # # SELECT name, count(*) AS count FROM items GROUP BY name # # => [{:name=>'a', :count=>1}, ...] # # DB[:items].group_and_count(:first_name, :last_name).all # # SELECT first_name, last_name, count(*) AS count FROM items GROUP BY first_name, last_name # # => [{:first_name=>'a', :last_name=>'b', :count=>1}, ...] # # DB[:items].group_and_count(Sequel[:first_name].as(:name)).all # # SELECT first_name AS name, count(*) AS count FROM items GROUP BY first_name # # => [{:name=>'a', :count=>1}, ...] # # DB[:items].group_and_count{substr(:first_name, 1, 1).as(:initial)}.all # # SELECT substr(first_name, 1, 1) AS initial, count(*) AS count FROM items GROUP BY substr(first_name, 1, 1) # # => [{:initial=>'a', :count=>1}, ...] def group_and_count(*columns, &block) select_group(*columns, &block).select_append(COUNT_OF_ALL_AS_COUNT) end # Returns a copy of the dataset with the given columns added to the list of # existing columns to group on. If no existing columns are present this # method simply sets the columns as the initial ones to group on. # # DB[:items].group_append(:b) # SELECT * FROM items GROUP BY b # DB[:items].group(:a).group_append(:b) # SELECT * FROM items GROUP BY a, b def group_append(*columns, &block) columns = @opts[:group] + columns if @opts[:group] group(*columns, &block) end # Adds the appropriate CUBE syntax to GROUP BY. def group_cube raise Error, "GROUP BY CUBE not supported on #{db.database_type}" unless supports_group_cube? clone(:group_options=>:cube) end # Adds the appropriate ROLLUP syntax to GROUP BY. def group_rollup raise Error, "GROUP BY ROLLUP not supported on #{db.database_type}" unless supports_group_rollup? clone(:group_options=>:rollup) end # Adds the appropriate GROUPING SETS syntax to GROUP BY. def grouping_sets raise Error, "GROUP BY GROUPING SETS not supported on #{db.database_type}" unless supports_grouping_sets? clone(:group_options=>:"grouping sets") end # Returns a copy of the dataset with the HAVING conditions changed. See #where for argument types. # # DB[:items].group(:sum).having(sum: 10) # # SELECT * FROM items GROUP BY sum HAVING (sum = 10) def having(*cond, &block) add_filter(:having, cond, &block) end # Adds an INTERSECT clause using a second dataset object. # An INTERSECT compound dataset returns all rows in both the current dataset # and the given dataset. # Raises an +InvalidOperation+ if the operation is not supported. # Options: # :alias :: Use the given value as the from_self alias # :all :: Set to true to use INTERSECT ALL instead of INTERSECT, so duplicate rows can occur # :from_self :: Set to false to not wrap the returned dataset in a from_self, use with care. # # DB[:items].intersect(DB[:other_items]) # # SELECT * FROM (SELECT * FROM items INTERSECT SELECT * FROM other_items) AS t1 # # DB[:items].intersect(DB[:other_items], all: true, from_self: false) # # SELECT * FROM items INTERSECT ALL SELECT * FROM other_items # # DB[:items].intersect(DB[:other_items], alias: :i) # # SELECT * FROM (SELECT * FROM items INTERSECT SELECT * FROM other_items) AS i def intersect(dataset, opts=OPTS) raise(InvalidOperation, "INTERSECT not supported") unless supports_intersect_except? raise(InvalidOperation, "INTERSECT ALL not supported") if opts[:all] && !supports_intersect_except_all? compound_clone(:intersect, dataset, opts) end # Inverts the current WHERE and HAVING clauses. If there is neither a # WHERE or HAVING clause, adds a WHERE clause that is always false. # # DB[:items].where(category: 'software').invert # # SELECT * FROM items WHERE (category != 'software') # # DB[:items].where(category: 'software', id: 3).invert # # SELECT * FROM items WHERE ((category != 'software') OR (id != 3)) # # See documentation for exclude for how inversion is handled in regards # to SQL 3-valued boolean logic. def invert cached_dataset(:_invert_ds) do having, where = @opts.values_at(:having, :where) if having.nil? && where.nil? where(false) else o = {} o[:having] = SQL::BooleanExpression.invert(having) if having o[:where] = SQL::BooleanExpression.invert(where) if where clone(o) end end end # Alias of +inner_join+ def join(*args, &block) inner_join(*args, &block) end # Returns a joined dataset. Not usually called directly, users should use the # appropriate join method (e.g. join, left_join, natural_join, cross_join) which fills # in the +type+ argument. # # Takes the following arguments: # # type :: The type of join to do (e.g. :inner) # table :: table to join into the current dataset. Generally one of the following types: # String, Symbol :: identifier used as table or view name # Dataset :: a subselect is performed with an alias of tN for some value of N # SQL::Function :: set returning function # SQL::AliasedExpression :: already aliased expression. Uses given alias unless # overridden by the :table_alias option. # expr :: conditions used when joining, depends on type: # Hash, Array of pairs :: Assumes key (1st arg) is column of joined table (unless already # qualified), and value (2nd arg) is column of the last joined or # primary table (or the :implicit_qualifier option). # To specify multiple conditions on a single joined table column, # you must use an array. Uses a JOIN with an ON clause. # Array :: If all members of the array are symbols, considers them as columns and # uses a JOIN with a USING clause. Most databases will remove duplicate columns from # the result set if this is used. # nil :: If a block is not given, doesn't use ON or USING, so the JOIN should be a NATURAL # or CROSS join. If a block is given, uses an ON clause based on the block, see below. # otherwise :: Treats the argument as a filter expression, so strings are considered literal, symbols # specify boolean columns, and Sequel expressions can be used. Uses a JOIN with an ON clause. # options :: a hash of options, with the following keys supported: # :table_alias :: Override the table alias used when joining. In general you shouldn't use this # option, you should provide the appropriate SQL::AliasedExpression as the table # argument. # :implicit_qualifier :: The name to use for qualifying implicit conditions. By default, # the last joined or primary table is used. # :join_using :: Force the using of JOIN USING, even if +expr+ is not an array of symbols. # :reset_implicit_qualifier :: Can set to false to ignore this join when future joins determine qualifier # for implicit conditions. # :qualify :: Can be set to false to not do any implicit qualification. Can be set # to :deep to use the Qualifier AST Transformer, which will attempt to qualify # subexpressions of the expression tree. Can be set to :symbol to only qualify # symbols. Defaults to the value of default_join_table_qualification. # block :: The block argument should only be given if a JOIN with an ON clause is used, # in which case it yields the table alias/name for the table currently being joined, # the table alias/name for the last joined (or first table), and an array of previous # SQL::JoinClause. Unlike +where+, this block is not treated as a virtual row block. # # Examples: # # DB[:a].join_table(:cross, :b) # # SELECT * FROM a CROSS JOIN b # # DB[:a].join_table(:inner, DB[:b], c: d) # # SELECT * FROM a INNER JOIN (SELECT * FROM b) AS t1 ON (t1.c = a.d) # # DB[:a].join_table(:left, Sequel[:b].as(:c), [:d]) # # SELECT * FROM a LEFT JOIN b AS c USING (d) # # DB[:a].natural_join(:b).join_table(:inner, :c) do |ta, jta, js| # (Sequel.qualify(ta, :d) > Sequel.qualify(jta, :e)) & {Sequel.qualify(ta, :f)=>DB.from(js.first.table).select(:g)} # end # # SELECT * FROM a NATURAL JOIN b INNER JOIN c # # ON ((c.d > b.e) AND (c.f IN (SELECT g FROM b))) def join_table(type, table, expr=nil, options=OPTS, &block) if hoist_cte?(table) s, ds = hoist_cte(table) return s.join_table(type, ds, expr, options, &block) end using_join = options[:join_using] || (expr.is_a?(Array) && !expr.empty? && expr.all?{|x| x.is_a?(Symbol)}) if using_join && !supports_join_using? h = {} expr.each{|e| h[e] = e} return join_table(type, table, h, options) end table_alias = options[:table_alias] if table.is_a?(SQL::AliasedExpression) table_expr = if table_alias SQL::AliasedExpression.new(table.expression, table_alias, table.columns) else table end table = table_expr.expression table_name = table_alias = table_expr.alias elsif table.is_a?(Dataset) if table_alias.nil? table_alias_num = (@opts[:num_dataset_sources] || 0) + 1 table_alias = dataset_alias(table_alias_num) end table_name = table_alias table_expr = SQL::AliasedExpression.new(table, table_alias) else table, implicit_table_alias = split_alias(table) table_alias ||= implicit_table_alias table_name = table_alias || table table_expr = table_alias ? SQL::AliasedExpression.new(table, table_alias) : table end join = if expr.nil? and !block SQL::JoinClause.new(type, table_expr) elsif using_join raise(Sequel::Error, "can't use a block if providing an array of symbols as expr") if block SQL::JoinUsingClause.new(expr, type, table_expr) else last_alias = options[:implicit_qualifier] || @opts[:last_joined_table] || first_source_alias qualify_type = options[:qualify] if Sequel.condition_specifier?(expr) expr = expr.map do |k, v| qualify_type = default_join_table_qualification if qualify_type.nil? case qualify_type when false nil # Do no qualification when :deep k = Sequel::Qualifier.new(table_name).transform(k) v = Sequel::Qualifier.new(last_alias).transform(v) else k = qualified_column_name(k, table_name) if k.is_a?(Symbol) v = qualified_column_name(v, last_alias) if v.is_a?(Symbol) end [k,v] end expr = SQL::BooleanExpression.from_value_pairs(expr) end if block expr2 = yield(table_name, last_alias, @opts[:join] || EMPTY_ARRAY) expr = expr ? SQL::BooleanExpression.new(:AND, expr, expr2) : expr2 end SQL::JoinOnClause.new(expr, type, table_expr) end opts = {:join => ((@opts[:join] || EMPTY_ARRAY) + [join]).freeze} opts[:last_joined_table] = table_name unless options[:reset_implicit_qualifier] == false opts[:num_dataset_sources] = table_alias_num if table_alias_num clone(opts) end CONDITIONED_JOIN_TYPES.each do |jtype| class_eval("def #{jtype}_join(*args, &block); join_table(:#{jtype}, *args, &block) end", __FILE__, __LINE__) end UNCONDITIONED_JOIN_TYPES.each do |jtype| class_eval(<<-END, __FILE__, __LINE__+1) def #{jtype}_join(table, opts=Sequel::OPTS) raise(Sequel::Error, '#{jtype}_join does not accept join table blocks') if defined?(yield) raise(Sequel::Error, '#{jtype}_join 2nd argument should be an options hash, not conditions') unless opts.is_a?(Hash) join_table(:#{jtype}, table, nil, opts) end END end # Marks this dataset as a lateral dataset. If used in another dataset's FROM # or JOIN clauses, it will surround the subquery with LATERAL to enable it # to deal with previous tables in the query: # # DB.from(:a, DB[:b].where(Sequel[:a][:c]=>Sequel[:b][:d]).lateral) # # SELECT * FROM a, LATERAL (SELECT * FROM b WHERE (a.c = b.d)) def lateral cached_dataset(:_lateral_ds){clone(:lateral=>true)} end # If given an integer, the dataset will contain only the first l results. # If given a range, it will contain only those at offsets within that # range. If a second argument is given, it is used as an offset. To use # an offset without a limit, pass nil as the first argument. # # DB[:items].limit(10) # SELECT * FROM items LIMIT 10 # DB[:items].limit(10, 20) # SELECT * FROM items LIMIT 10 OFFSET 20 # DB[:items].limit(10...20) # SELECT * FROM items LIMIT 10 OFFSET 10 # DB[:items].limit(10..20) # SELECT * FROM items LIMIT 11 OFFSET 10 # DB[:items].limit(nil, 20) # SELECT * FROM items OFFSET 20 def limit(l, o = (no_offset = true; nil)) return from_self.limit(l, o) if @opts[:sql] if l.is_a?(Range) no_offset = false o = l.first l = l.last - l.first + (l.exclude_end? ? 0 : 1) end l = l.to_i if l.is_a?(String) && !l.is_a?(LiteralString) if l.is_a?(Integer) raise(Error, 'Limits must be greater than or equal to 1') unless l >= 1 end ds = clone(:limit=>l) ds = ds.offset(o) unless no_offset ds end # Returns a cloned dataset with the given lock style. If style is a # string, it will be used directly. You should never pass a string # to this method that is derived from user input, as that can lead to # SQL injection. # # A symbol may be used for database independent locking behavior, but # all supported symbols have separate methods (e.g. for_update). # # DB[:items].lock_style('FOR SHARE NOWAIT') # # SELECT * FROM items FOR SHARE NOWAIT # DB[:items].lock_style('FOR UPDATE OF table1 SKIP LOCKED') # # SELECT * FROM items FOR UPDATE OF table1 SKIP LOCKED def lock_style(style) clone(:lock => style) end # Return a dataset with a WHEN MATCHED THEN DELETE clause added to the # MERGE statement. If a block is passed, treat it as a virtual row and # use it as additional conditions for the match. # # merge_delete # # WHEN MATCHED THEN DELETE # # merge_delete{a > 30} # # WHEN MATCHED AND (a > 30) THEN DELETE def merge_delete(&block) _merge_when(:type=>:delete, &block) end # Return a dataset with a WHEN NOT MATCHED THEN INSERT clause added to the # MERGE statement. If a block is passed, treat it as a virtual row and # use it as additional conditions for the match. # # The arguments provided can be any arguments that would be accepted by # #insert. # # merge_insert(i1: :i2, a: Sequel[:b]+11) # # WHEN NOT MATCHED THEN INSERT (i1, a) VALUES (i2, (b + 11)) # # merge_insert(:i2, Sequel[:b]+11){a > 30} # # WHEN NOT MATCHED AND (a > 30) THEN INSERT VALUES (i2, (b + 11)) def merge_insert(*values, &block) _merge_when(:type=>:insert, :values=>values, &block) end # Return a dataset with a WHEN MATCHED THEN UPDATE clause added to the # MERGE statement. If a block is passed, treat it as a virtual row and # use it as additional conditions for the match. # # merge_update(i1: Sequel[:i1]+:i2+10, a: Sequel[:a]+:b+20) # # WHEN MATCHED THEN UPDATE SET i1 = (i1 + i2 + 10), a = (a + b + 20) # # merge_update(i1: :i2){a > 30} # # WHEN MATCHED AND (a > 30) THEN UPDATE SET i1 = i2 def merge_update(values, &block) _merge_when(:type=>:update, :values=>values, &block) end # Return a dataset with the source and join condition to use for the MERGE statement. # # merge_using(:m2, i1: :i2) # # USING m2 ON (i1 = i2) def merge_using(source, join_condition) clone(:merge_using => [source, join_condition].freeze) end # Returns a cloned dataset without a row_proc. # # ds = DB[:items].with_row_proc(:invert.to_proc) # ds.all # => [{2=>:id}] # ds.naked.all # => [{:id=>2}] def naked cached_dataset(:_naked_ds){with_row_proc(nil)} end # Returns a copy of the dataset that will raise a DatabaseLockTimeout instead # of waiting for rows that are locked by another transaction # # DB[:items].for_update.nowait # # SELECT * FROM items FOR UPDATE NOWAIT def nowait cached_dataset(:_nowait_ds) do raise(Error, 'This dataset does not support raises errors instead of waiting for locked rows') unless supports_nowait? clone(:nowait=>true) end end # Returns a copy of the dataset with a specified order. Can be safely combined with limit. # If you call limit with an offset, it will override the offset if you've called # offset first. # # DB[:items].offset(10) # SELECT * FROM items OFFSET 10 def offset(o) o = o.to_i if o.is_a?(String) && !o.is_a?(LiteralString) if o.is_a?(Integer) raise(Error, 'Offsets must be greater than or equal to 0') unless o >= 0 end clone(:offset => o) end # Adds an alternate filter to an existing WHERE clause using OR. If there # is no WHERE clause, then the default is WHERE true, and OR would be redundant, # so return the dataset in that case. # # DB[:items].where(:a).or(:b) # SELECT * FROM items WHERE a OR b # DB[:items].or(:b) # SELECT * FROM items def or(*cond, &block) if @opts[:where].nil? self else add_filter(:where, cond, false, :OR, &block) end end # Returns a copy of the dataset with the order changed. If the dataset has an # existing order, it is ignored and overwritten with this order. If a nil is given # the returned dataset has no order. This can accept multiple arguments # of varying kinds, such as SQL functions. If a block is given, it is treated # as a virtual row block, similar to +where+. # # DB[:items].order(:name) # SELECT * FROM items ORDER BY name # DB[:items].order(:a, :b) # SELECT * FROM items ORDER BY a, b # DB[:items].order(Sequel.lit('a + b')) # SELECT * FROM items ORDER BY a + b # DB[:items].order(Sequel[:a] + :b) # SELECT * FROM items ORDER BY (a + b) # DB[:items].order(Sequel.desc(:name)) # SELECT * FROM items ORDER BY name DESC # DB[:items].order(Sequel.asc(:name, nulls: :last)) # SELECT * FROM items ORDER BY name ASC NULLS LAST # DB[:items].order{sum(name).desc} # SELECT * FROM items ORDER BY sum(name) DESC # DB[:items].order(nil) # SELECT * FROM items def order(*columns, &block) virtual_row_columns(columns, block) clone(:order => (columns.compact.empty?) ? nil : columns.freeze) end # Returns a copy of the dataset with the order columns added # to the end of the existing order. # # DB[:items].order(:a).order(:b) # SELECT * FROM items ORDER BY b # DB[:items].order(:a).order_append(:b) # SELECT * FROM items ORDER BY a, b def order_append(*columns, &block) columns = @opts[:order] + columns if @opts[:order] order(*columns, &block) end # Alias of order def order_by(*columns, &block) order(*columns, &block) end # Alias of order_append. def order_more(*columns, &block) order_append(*columns, &block) end # Returns a copy of the dataset with the order columns added # to the beginning of the existing order. # # DB[:items].order(:a).order(:b) # SELECT * FROM items ORDER BY b # DB[:items].order(:a).order_prepend(:b) # SELECT * FROM items ORDER BY b, a def order_prepend(*columns, &block) ds = order(*columns, &block) @opts[:order] ? ds.order_append(*@opts[:order]) : ds end # Qualify to the given table, or first source if no table is given. # # DB[:items].where(id: 1).qualify # # SELECT items.* FROM items WHERE (items.id = 1) # # DB[:items].where(id: 1).qualify(:i) # # SELECT i.* FROM items WHERE (i.id = 1) def qualify(table=(cache=true; first_source)) o = @opts return self if o[:sql] pr = proc do h = {} (o.keys & QUALIFY_KEYS).each do |k| h[k] = qualified_expression(o[k], table) end h[:select] = [SQL::ColumnAll.new(table)].freeze if !o[:select] || o[:select].empty? clone(h) end cache ? cached_dataset(:_qualify_ds, &pr) : pr.call end # Modify the RETURNING clause, only supported on a few databases. If returning # is used, instead of insert returning the autogenerated primary key or # update/delete returning the number of modified rows, results are # returned using +fetch_rows+. # # DB[:items].returning # RETURNING * # DB[:items].returning(nil) # RETURNING NULL # DB[:items].returning(:id, :name) # RETURNING id, name # # DB[:items].returning.insert(a: 1) do |hash| # # hash for each row inserted, with values for all columns # end # DB[:items].returning.update(a: 1) do |hash| # # hash for each row updated, with values for all columns # end # DB[:items].returning.delete(a: 1) do |hash| # # hash for each row deleted, with values for all columns # end def returning(*values) if values.empty? cached_dataset(:_returning_ds) do raise Error, "RETURNING is not supported on #{db.database_type}" unless supports_returning?(:insert) clone(:returning=>EMPTY_ARRAY) end else raise Error, "RETURNING is not supported on #{db.database_type}" unless supports_returning?(:insert) clone(:returning=>values.freeze) end end # Returns a copy of the dataset with the order reversed. If no order is # given, the existing order is inverted. # # DB[:items].reverse(:id) # SELECT * FROM items ORDER BY id DESC # DB[:items].reverse{foo(bar)} # SELECT * FROM items ORDER BY foo(bar) DESC # DB[:items].order(:id).reverse # SELECT * FROM items ORDER BY id DESC # DB[:items].order(:id).reverse(Sequel.desc(:name)) # SELECT * FROM items ORDER BY name ASC def reverse(*order, &block) if order.empty? && !block cached_dataset(:_reverse_ds){order(*invert_order(@opts[:order]))} else virtual_row_columns(order, block) order(*invert_order(order.empty? ? @opts[:order] : order.freeze)) end end # Alias of +reverse+ def reverse_order(*order, &block) reverse(*order, &block) end # Returns a copy of the dataset with the columns selected changed # to the given columns. This also takes a virtual row block, # similar to +where+. # # DB[:items].select(:a) # SELECT a FROM items # DB[:items].select(:a, :b) # SELECT a, b FROM items # DB[:items].select{[a, sum(b)]} # SELECT a, sum(b) FROM items def select(*columns, &block) virtual_row_columns(columns, block) clone(:select => columns.freeze) end # Returns a copy of the dataset selecting the wildcard if no arguments # are given. If arguments are given, treat them as tables and select # all columns (using the wildcard) from each table. # # DB[:items].select(:a).select_all # SELECT * FROM items # DB[:items].select_all(:items) # SELECT items.* FROM items # DB[:items].select_all(:items, :foo) # SELECT items.*, foo.* FROM items def select_all(*tables) if tables.empty? cached_dataset(:_select_all_ds){clone(:select => nil)} else select(*tables.map{|t| i, a = split_alias(t); a || i}.map!{|t| SQL::ColumnAll.new(t)}.freeze) end end # Returns a copy of the dataset with the given columns added # to the existing selected columns. If no columns are currently selected, # it will select the columns given in addition to *. # # DB[:items].select(:a).select(:b) # SELECT b FROM items # DB[:items].select(:a).select_append(:b) # SELECT a, b FROM items # DB[:items].select_append(:b) # SELECT *, b FROM items def select_append(*columns, &block) cur_sel = @opts[:select] if !cur_sel || cur_sel.empty? unless supports_select_all_and_column? return select_all(*(Array(@opts[:from]) + Array(@opts[:join]))).select_append(*columns, &block) end cur_sel = [WILDCARD] end select(*(cur_sel + columns), &block) end # Set both the select and group clauses with the given +columns+. # Column aliases may be supplied, and will be included in the select clause. # This also takes a virtual row block similar to +where+. # # DB[:items].select_group(:a, :b) # # SELECT a, b FROM items GROUP BY a, b # # DB[:items].select_group(Sequel[:c].as(:a)){f(c2)} # # SELECT c AS a, f(c2) FROM items GROUP BY c, f(c2) def select_group(*columns, &block) virtual_row_columns(columns, block) select(*columns).group(*columns.map{|c| unaliased_identifier(c)}) end # Alias for select_append. def select_more(*columns, &block) select_append(*columns, &block) end # Set the server for this dataset to use. Used to pick a specific database # shard to run a query against, or to override the default (where SELECT uses # :read_only database and all other queries use the :default database). This # method is always available but is only useful when database sharding is being # used. # # DB[:items].all # Uses the :read_only or :default server # DB[:items].delete # Uses the :default server # DB[:items].server(:blah).delete # Uses the :blah server def server(servr) clone(:server=>servr) end # If the database uses sharding and the current dataset has not had a # server set, return a cloned dataset that uses the given server. # Otherwise, return the receiver directly instead of returning a clone. def server?(server) if db.sharded? && !opts[:server] server(server) else self end end # Specify that the check for limits/offsets when updating/deleting be skipped for the dataset. def skip_limit_check cached_dataset(:_skip_limit_check_ds) do clone(:skip_limit_check=>true) end end # Skip locked rows when returning results from this dataset. def skip_locked cached_dataset(:_skip_locked_ds) do raise(Error, 'This dataset does not support skipping locked rows') unless supports_skip_locked? clone(:skip_locked=>true) end end # Returns a copy of the dataset with no filters (HAVING or WHERE clause) applied. # # DB[:items].group(:a).having(a: 1).where(:b).unfiltered # # SELECT * FROM items GROUP BY a def unfiltered cached_dataset(:_unfiltered_ds){clone(:where => nil, :having => nil)} end # Returns a copy of the dataset with no grouping (GROUP or HAVING clause) applied. # # DB[:items].group(:a).having(a: 1).where(:b).ungrouped # # SELECT * FROM items WHERE b def ungrouped cached_dataset(:_ungrouped_ds){clone(:group => nil, :having => nil)} end # Adds a UNION clause using a second dataset object. # A UNION compound dataset returns all rows in either the current dataset # or the given dataset. # Options: # :alias :: Use the given value as the from_self alias # :all :: Set to true to use UNION ALL instead of UNION, so duplicate rows can occur # :from_self :: Set to false to not wrap the returned dataset in a from_self, use with care. # # DB[:items].union(DB[:other_items]) # # SELECT * FROM (SELECT * FROM items UNION SELECT * FROM other_items) AS t1 # # DB[:items].union(DB[:other_items], all: true, from_self: false) # # SELECT * FROM items UNION ALL SELECT * FROM other_items # # DB[:items].union(DB[:other_items], alias: :i) # # SELECT * FROM (SELECT * FROM items UNION SELECT * FROM other_items) AS i def union(dataset, opts=OPTS) compound_clone(:union, dataset, opts) end # Returns a copy of the dataset with no limit or offset. # # DB[:items].limit(10, 20).unlimited # SELECT * FROM items def unlimited cached_dataset(:_unlimited_ds){clone(:limit=>nil, :offset=>nil)} end # Returns a copy of the dataset with no order. # # DB[:items].order(:a).unordered # SELECT * FROM items def unordered cached_dataset(:_unordered_ds){clone(:order=>nil)} end # Returns a copy of the dataset with the given WHERE conditions imposed upon it. # # Accepts the following argument types: # # Hash, Array of pairs :: list of equality/inclusion expressions # Symbol :: taken as a boolean column argument (e.g. WHERE active) # Sequel::SQL::BooleanExpression, Sequel::LiteralString :: an existing condition expression, probably created # using the Sequel expression filter DSL. # # where also accepts a block, which should return one of the above argument # types, and is treated the same way. This block yields a virtual row object, # which is easy to use to create identifiers and functions. For more details # on the virtual row support, see the {"Virtual Rows" guide}[rdoc-ref:doc/virtual_rows.rdoc] # # If both a block and regular argument are provided, they get ANDed together. # # Examples: # # DB[:items].where(id: 3) # # SELECT * FROM items WHERE (id = 3) # # DB[:items].where(Sequel.lit('price < ?', 100)) # # SELECT * FROM items WHERE price < 100 # # DB[:items].where([[:id, [1,2,3]], [:id, 0..10]]) # # SELECT * FROM items WHERE ((id IN (1, 2, 3)) AND ((id >= 0) AND (id <= 10))) # # DB[:items].where(Sequel.lit('price < 100')) # # SELECT * FROM items WHERE price < 100 # # DB[:items].where(:active) # # SELECT * FROM items WHERE :active # # DB[:items].where{price < 100} # # SELECT * FROM items WHERE (price < 100) # # Multiple where calls can be chained for scoping: # # software = dataset.where(category: 'software').where{price < 100} # # SELECT * FROM items WHERE ((category = 'software') AND (price < 100)) # # See the {"Dataset Filtering" guide}[rdoc-ref:doc/dataset_filtering.rdoc] for more examples and details. def where(*cond, &block) add_filter(:where, cond, &block) end # Return a clone of the dataset with an addition named window that can be # referenced in window functions. See Sequel::SQL::Window for a list of # options that can be passed in. Example: # # DB[:items].window(:w, partition: :c1, order: :c2) # # SELECT * FROM items WINDOW w AS (PARTITION BY c1 ORDER BY c2) def window(name, opts) clone(:window=>((@opts[:window]||EMPTY_ARRAY) + [[name, SQL::Window.new(opts)].freeze]).freeze) end # Add a common table expression (CTE) with the given name and a dataset that defines the CTE. # A common table expression acts as an inline view for the query. # # Options: # :args :: Specify the arguments/columns for the CTE, should be an array of symbols. # :recursive :: Specify that this is a recursive CTE # :materialized :: Set to false to force inlining of the CTE, or true to force not inlining # the CTE (PostgreSQL 12+/SQLite 3.35+). # # DB[:items].with(:items, DB[:syx].where(Sequel[:name].like('A%'))) # # WITH items AS (SELECT * FROM syx WHERE (name LIKE 'A%' ESCAPE '\')) SELECT * FROM items def with(name, dataset, opts=OPTS) raise(Error, 'This dataset does not support common table expressions') unless supports_cte? if hoist_cte?(dataset) s, ds = hoist_cte(dataset) s.with(name, ds, opts) else clone(:with=>((@opts[:with]||EMPTY_ARRAY) + [Hash[opts].merge!(:name=>name, :dataset=>dataset)]).freeze) end end # Add a recursive common table expression (CTE) with the given name, a dataset that # defines the nonrecursive part of the CTE, and a dataset that defines the recursive part # of the CTE. # # Options: # :args :: Specify the arguments/columns for the CTE, should be an array of symbols. # :union_all :: Set to false to use UNION instead of UNION ALL combining the nonrecursive and recursive parts. # # PostgreSQL 14+ Options: # :cycle :: Stop recursive searching when a cycle is detected. Includes two columns in the # result of the CTE, a cycle column indicating whether a cycle was detected for # the current row, and a path column for the path traversed to get to the current # row. If given, must be a hash with the following keys: # :columns :: (required) The column or array of columns to use to detect a cycle. # If the value of these columns match columns already traversed, then # a cycle is detected, and recursive searching will not traverse beyond # the cycle (the CTE will include the row where the cycle was detected). # :cycle_column :: The name of the cycle column in the output, defaults to :is_cycle. # :cycle_value :: The value of the cycle column in the output if the current row was # detected as a cycle, defaults to true. # :noncycle_value :: The value of the cycle column in the output if the current row # was not detected as a cycle, defaults to false. Only respected # if :cycle_value is given. # :path_column :: The name of the path column in the output, defaults to :path. # :search :: Include an order column in the result of the CTE that allows for breadth or # depth first searching. If given, must be a hash with the following keys: # :by :: (required) The column or array of columns to search by. # :order_column :: The name of the order column in the output, defaults to :ordercol. # :type :: Set to :breadth to use breadth-first searching (depth-first searching # is the default). # # DB[:t].with_recursive(:t, # DB[:i1].select(:id, :parent_id).where(parent_id: nil), # DB[:i1].join(:t, id: :parent_id).select(Sequel[:i1][:id], Sequel[:i1][:parent_id]), # args: [:id, :parent_id]) # # # WITH RECURSIVE t(id, parent_id) AS ( # # SELECT id, parent_id FROM i1 WHERE (parent_id IS NULL) # # UNION ALL # # SELECT i1.id, i1.parent_id FROM i1 INNER JOIN t ON (t.id = i1.parent_id) # # ) SELECT * FROM t # # DB[:t].with_recursive(:t, # DB[:i1].where(parent_id: nil), # DB[:i1].join(:t, id: :parent_id).select_all(:i1), # search: {by: :id, type: :breadth}, # cycle: {columns: :id, cycle_value: 1, noncycle_value: 2}) # # # WITH RECURSIVE t AS ( # # SELECT * FROM i1 WHERE (parent_id IS NULL) # # UNION ALL # # (SELECT i1.* FROM i1 INNER JOIN t ON (t.id = i1.parent_id)) # # ) # # SEARCH BREADTH FIRST BY id SET ordercol # # CYCLE id SET is_cycle TO 1 DEFAULT 2 USING path # # SELECT * FROM t def with_recursive(name, nonrecursive, recursive, opts=OPTS) raise(Error, 'This dataset does not support common table expressions') unless supports_cte? if hoist_cte?(nonrecursive) s, ds = hoist_cte(nonrecursive) s.with_recursive(name, ds, recursive, opts) elsif hoist_cte?(recursive) s, ds = hoist_cte(recursive) s.with_recursive(name, nonrecursive, ds, opts) else clone(:with=>((@opts[:with]||EMPTY_ARRAY) + [Hash[opts].merge!(:recursive=>true, :name=>name, :dataset=>nonrecursive.union(recursive, {:all=>opts[:union_all] != false, :from_self=>false}))]).freeze) end end if TRUE_FREEZE # Return a clone of the dataset extended with the given modules. # Note that like Object#extend, when multiple modules are provided # as arguments the cloned dataset is extended with the modules in reverse # order. If a block is provided, a DatasetModule is created using the block and # the clone is extended with that module after any modules given as arguments. def with_extend(*mods, &block) c = _clone(:freeze=>false) c.extend(*mods) unless mods.empty? c.extend(DatasetModule.new(&block)) if block c.freeze end else # :nocov: def with_extend(*mods, &block) # :nodoc: c = clone c.extend(*mods) unless mods.empty? c.extend(DatasetModule.new(&block)) if block c end # :nocov: end # Returns a cloned dataset with the given row_proc. # # ds = DB[:items] # ds.all # => [{:id=>2}] # ds.with_row_proc(:invert.to_proc).all # => [{2=>:id}] def with_row_proc(callable) clone(:row_proc=>callable) end # Returns a copy of the dataset with the static SQL used. This is useful if you want # to keep the same row_proc/graph, but change the SQL used to custom SQL. # # DB[:items].with_sql('SELECT * FROM foo') # SELECT * FROM foo # # You can use placeholders in your SQL and provide arguments for those placeholders: # # DB[:items].with_sql('SELECT ? FROM foo', 1) # SELECT 1 FROM foo # # You can also provide a method name and arguments to call to get the SQL: # # DB[:items].with_sql(:insert_sql, b: 1) # INSERT INTO items (b) VALUES (1) # # Note that datasets that specify custom SQL using this method will generally # ignore future dataset methods that modify the SQL used, as specifying custom SQL # overrides Sequel's SQL generator. You should probably limit yourself to the following # dataset methods when using this method, or use the implicit_subquery extension: # # * each # * all # * single_record (if only one record could be returned) # * single_value (if only one record could be returned, and a single column is selected) # * map # * as_hash # * to_hash # * to_hash_groups # * delete (if a DELETE statement) # * update (if an UPDATE statement, with no arguments) # * insert (if an INSERT statement, with no arguments) # * truncate (if a TRUNCATE statement, with no arguments) def with_sql(sql, *args) if sql.is_a?(Symbol) sql = public_send(sql, *args) else sql = SQL::PlaceholderLiteralString.new(sql, args) unless args.empty? end clone(:sql=>sql) end protected # Add the dataset to the list of compounds def compound_clone(type, dataset, opts) if dataset.is_a?(Dataset) && dataset.opts[:with] && !supports_cte_in_compounds? s, ds = hoist_cte(dataset) return s.compound_clone(type, ds, opts) end ds = compound_from_self.clone(:compounds=>(Array(@opts[:compounds]).map(&:dup) + [[type, dataset.compound_from_self, opts[:all]].freeze]).freeze) opts[:from_self] == false ? ds : ds.from_self(opts) end # Return true if the dataset has a non-nil value for any key in opts. def options_overlap(opts) !(@opts.map{|k,v| k unless v.nil?}.compact & opts).empty? end # From types allowed to be considered a simple_select_all SIMPLE_SELECT_ALL_ALLOWED_FROM = [Symbol, SQL::Identifier, SQL::QualifiedIdentifier].freeze # Whether this dataset is a simple select from an underlying table, such as: # # SELECT * FROM table # SELECT table.* FROM table def simple_select_all? return false unless (f = @opts[:from]) && f.length == 1 o = @opts.reject{|k,v| v.nil? || non_sql_option?(k)} from = f.first from = from.expression if from.is_a?(SQL::AliasedExpression) if SIMPLE_SELECT_ALL_ALLOWED_FROM.any?{|x| from.is_a?(x)} case o.length when 1 true when 2 (s = o[:select]) && s.length == 1 && s.first.is_a?(SQL::ColumnAll) else false end else false end end private # Load the extensions into the receiver, without checking if the receiver is frozen. def _extension!(exts) Sequel.extension(*exts) exts.each do |ext| if pr = Sequel.synchronize{EXTENSIONS[ext]} pr.call(self) else raise(Error, "Extension #{ext} does not have specific support handling individual datasets (try: Sequel.extension #{ext.inspect})") end end self end # If invert is true, invert the condition. def _invert_filter(cond, invert) if invert SQL::BooleanExpression.invert(cond) else cond end end # Append to the current MERGE WHEN clauses. # Mutates the hash to add the conditions, if a virtual row block is passed. def _merge_when(hash, &block) hash[:conditions] = Sequel.virtual_row(&block) if block if merge_when = @opts[:merge_when] clone(:merge_when => (merge_when.dup << hash.freeze).freeze) else clone(:merge_when => [hash.freeze].freeze) end end # Add the given filter condition. Arguments: # clause :: Symbol or which SQL clause to effect, should be :where or :having # cond :: The filter condition to add # invert :: Whether the condition should be inverted (true or false) # combine :: How to combine the condition with an existing condition, should be :AND or :OR def add_filter(clause, cond, invert=false, combine=:AND, &block) if cond == EMPTY_ARRAY && !block raise Error, "must provide an argument to a filtering method if not passing a block" end cond = cond.first if cond.size == 1 empty = cond == OPTS || cond == EMPTY_ARRAY if empty && !block self else if cond == nil cond = Sequel::NULL end if empty && block cond = nil end cond = _invert_filter(filter_expr(cond, &block), invert) cond = SQL::BooleanExpression.new(combine, @opts[clause], cond) if @opts[clause] if cond.nil? cond = Sequel::NULL end clone(clause => cond) end end # The default :qualify option to use for join tables if one is not specified. def default_join_table_qualification :symbol end # SQL expression object based on the expr type. See +where+. def filter_expr(expr = nil, &block) expr = nil if expr == EMPTY_ARRAY if block cond = filter_expr(Sequel.virtual_row(&block)) cond = SQL::BooleanExpression.new(:AND, filter_expr(expr), cond) if expr return cond end case expr when Hash SQL::BooleanExpression.from_value_pairs(expr) when Array if Sequel.condition_specifier?(expr) SQL::BooleanExpression.from_value_pairs(expr) else raise Error, "Invalid filter expression: #{expr.inspect}" end when LiteralString LiteralString.new("(#{expr})") when Numeric, SQL::NumericExpression, SQL::StringExpression, Proc, String raise Error, "Invalid filter expression: #{expr.inspect}" when TrueClass, FalseClass if supports_where_true? SQL::BooleanExpression.new(:NOOP, expr) elsif expr SQL::Constants::SQLTRUE else SQL::Constants::SQLFALSE end when PlaceholderLiteralizer::Argument expr.transform{|v| filter_expr(v)} when SQL::PlaceholderLiteralString expr.with_parens else expr end end # Return two datasets, the first a clone of the receiver with the WITH # clause from the given dataset added to it, and the second a clone of # the given dataset with the WITH clause removed. def hoist_cte(ds) [clone(:with => ((opts[:with] || EMPTY_ARRAY) + ds.opts[:with]).freeze), ds.clone(:with => nil)] end # Whether CTEs need to be hoisted from the given ds into the current ds. def hoist_cte?(ds) ds.is_a?(Dataset) && ds.opts[:with] && !supports_cte_in_subqueries? end # Inverts the given order by breaking it into a list of column references # and inverting them. # # DB[:items].invert_order([Sequel.desc(:id)]]) #=> [Sequel.asc(:id)] # DB[:items].invert_order([:category, Sequel.desc(:price)]) #=> [Sequel.desc(:category), Sequel.asc(:price)] def invert_order(order) return unless order order.map do |f| case f when SQL::OrderedExpression f.invert else SQL::OrderedExpression.new(f) end end end # Return self if the dataset already has a server, or a cloned dataset with the # default server otherwise. def default_server server?(:default) end # Whether the given option key does not affect the generated SQL. def non_sql_option?(key) NON_SQL_OPTIONS.include?(key) end # Treat the +block+ as a virtual_row block if not +nil+ and # add the resulting columns to the +columns+ array (modifies +columns+). def virtual_row_columns(columns, block) if block v = Sequel.virtual_row(&block) if v.is_a?(Array) columns.concat(v) else columns << v end end end end end sequel-5.63.0/lib/sequel/dataset/sql.rb000066400000000000000000001561411434214120600177250ustar00rootroot00000000000000# frozen-string-literal: true module Sequel class Dataset # --------------------- # :section: 3 - User Methods relating to SQL Creation # These are methods you can call to see what SQL will be generated by the dataset. # --------------------- # Returns an EXISTS clause for the dataset as an SQL::PlaceholderLiteralString. # # DB.select(1).where(DB[:items].exists) # # SELECT 1 WHERE (EXISTS (SELECT * FROM items)) def exists SQL::PlaceholderLiteralString.new(EXISTS, [self], true) end # Returns an INSERT SQL query string. See +insert+. # # DB[:items].insert_sql(a: 1) # # => "INSERT INTO items (a) VALUES (1)" def insert_sql(*values) return static_sql(@opts[:sql]) if @opts[:sql] check_insert_allowed! columns, values = _parse_insert_sql_args(values) if values.is_a?(Array) && values.empty? && !insert_supports_empty_values? columns, values = insert_empty_columns_values elsif values.is_a?(Dataset) && hoist_cte?(values) && supports_cte?(:insert) ds, values = hoist_cte(values) return ds.clone(:columns=>columns, :values=>values).send(:_insert_sql) end clone(:columns=>columns, :values=>values).send(:_insert_sql) end # Append a literal representation of a value to the given SQL string. # # If an unsupported object is given, an +Error+ is raised. def literal_append(sql, v) case v when Symbol if skip_symbol_cache? literal_symbol_append(sql, v) else unless l = db.literal_symbol(v) l = String.new literal_symbol_append(l, v) db.literal_symbol_set(v, l) end sql << l end when String case v when LiteralString sql << v when SQL::Blob literal_blob_append(sql, v) else literal_string_append(sql, v) end when Integer sql << literal_integer(v) when Hash literal_hash_append(sql, v) when SQL::Expression literal_expression_append(sql, v) when Float sql << literal_float(v) when BigDecimal sql << literal_big_decimal(v) when NilClass sql << literal_nil when TrueClass sql << literal_true when FalseClass sql << literal_false when Array literal_array_append(sql, v) when Time v.is_a?(SQLTime) ? literal_sqltime_append(sql, v) : literal_time_append(sql, v) when DateTime literal_datetime_append(sql, v) when Date sql << literal_date(v) when Dataset literal_dataset_append(sql, v) else literal_other_append(sql, v) end end # The SQL to use for the MERGE statement. def merge_sql raise Error, "This database doesn't support MERGE" unless supports_merge? if sql = opts[:sql] return static_sql(sql) end if sql = cache_get(:_merge_sql) return sql end source, join_condition = @opts[:merge_using] raise Error, "No USING clause for MERGE" unless source sql = @opts[:append_sql] || sql_string_origin select_with_sql(sql) sql << "MERGE INTO " source_list_append(sql, @opts[:from]) sql << " USING " identifier_append(sql, source) sql << " ON " literal_append(sql, join_condition) _merge_when_sql(sql) cache_set(:_merge_sql, sql) if cache_sql? sql end # Returns an array of insert statements for inserting multiple records. # This method is used by +multi_insert+ to format insert statements and # expects a keys array and and an array of value arrays. def multi_insert_sql(columns, values) case multi_insert_sql_strategy when :values sql = LiteralString.new('VALUES ') expression_list_append(sql, values.map{|r| Array(r)}) [insert_sql(columns, sql)] when :union c = false sql = LiteralString.new u = ' UNION ALL SELECT ' f = empty_from_sql values.each do |v| if c sql << u else sql << 'SELECT ' c = true end expression_list_append(sql, v) sql << f if f end [insert_sql(columns, sql)] else values.map{|r| insert_sql(columns, r)} end end # Same as +select_sql+, not aliased directly to make subclassing simpler. def sql select_sql end # Returns a TRUNCATE SQL query string. See +truncate+ # # DB[:items].truncate_sql # => 'TRUNCATE items' def truncate_sql if opts[:sql] static_sql(opts[:sql]) else check_truncation_allowed! check_not_limited!(:truncate) raise(InvalidOperation, "Can't truncate filtered datasets") if opts[:where] || opts[:having] t = String.new source_list_append(t, opts[:from]) _truncate_sql(t) end end # Formats an UPDATE statement using the given values. See +update+. # # DB[:items].update_sql(price: 100, category: 'software') # # => "UPDATE items SET price = 100, category = 'software' # # Raises an +Error+ if the dataset is grouped or includes more # than one table. def update_sql(values = OPTS) return static_sql(opts[:sql]) if opts[:sql] check_update_allowed! check_not_limited!(:update) case values when LiteralString # nothing when String raise Error, "plain string passed to Dataset#update is not supported, use Sequel.lit to use a literal string" end clone(:values=>values).send(:_update_sql) end # --------------------- # :section: 9 - Internal Methods relating to SQL Creation # These methods, while public, are not designed to be used directly by the end user. # --------------------- # Given a type (e.g. select) and an array of clauses, # return an array of methods to call to build the SQL string. def self.clause_methods(type, clauses) clauses.map{|clause| :"#{type}_#{clause}_sql"}.freeze end # Define a dataset literalization method for the given type in the given module, # using the given clauses. # # Arguments: # mod :: Module in which to define method # type :: Type of SQL literalization method to create, either :select, :insert, :update, or :delete # clauses :: array of clauses that make up the SQL query for the type. This can either be a single # array of symbols/strings, or it can be an array of pairs, with the first element in # each pair being an if/elsif/else code fragment, and the second element in each pair # being an array of symbol/strings for the appropriate branch. def self.def_sql_method(mod, type, clauses) priv = type == :update || type == :insert cacheable = type == :select || type == :delete lines = [] lines << 'private' if priv lines << "def #{'_' if priv}#{type}_sql" lines << 'if sql = opts[:sql]; return static_sql(sql) end' unless priv lines << "if sql = cache_get(:_#{type}_sql); return sql end" if cacheable lines << 'check_delete_allowed!' << 'check_not_limited!(:delete)' if type == :delete lines << 'sql = @opts[:append_sql] || sql_string_origin' if clauses.all?{|c| c.is_a?(Array)} clauses.each do |i, cs| lines << i lines.concat(clause_methods(type, cs).map{|x| "#{x}(sql)"}) end lines << 'end' else lines.concat(clause_methods(type, clauses).map{|x| "#{x}(sql)"}) end lines << "cache_set(:_#{type}_sql, sql) if cache_sql?" if cacheable lines << 'sql' lines << 'end' mod.class_eval lines.join("\n"), __FILE__, __LINE__ end def_sql_method(self, :delete, %w'delete from where') def_sql_method(self, :insert, %w'insert into columns values') def_sql_method(self, :select, %w'with select distinct columns from join where group having compounds order limit lock') def_sql_method(self, :update, %w'update table set where') WILDCARD = LiteralString.new('*').freeze COUNT_OF_ALL_AS_COUNT = SQL::Function.new(:count, WILDCARD).as(:count) DEFAULT = LiteralString.new('DEFAULT').freeze EXISTS = ['EXISTS '.freeze].freeze BITWISE_METHOD_MAP = {:& =>:BITAND, :| => :BITOR, :^ => :BITXOR}.freeze COUNT_FROM_SELF_OPTS = [:distinct, :group, :sql, :limit, :offset, :compounds].freeze IS_LITERALS = {nil=>'NULL'.freeze, true=>'TRUE'.freeze, false=>'FALSE'.freeze}.freeze QUALIFY_KEYS = [:select, :where, :having, :order, :group].freeze IS_OPERATORS = ::Sequel::SQL::ComplexExpression::IS_OPERATORS LIKE_OPERATORS = ::Sequel::SQL::ComplexExpression::LIKE_OPERATORS N_ARITY_OPERATORS = ::Sequel::SQL::ComplexExpression::N_ARITY_OPERATORS TWO_ARITY_OPERATORS = ::Sequel::SQL::ComplexExpression::TWO_ARITY_OPERATORS REGEXP_OPERATORS = ::Sequel::SQL::ComplexExpression::REGEXP_OPERATORS [:literal, :quote_identifier, :quote_schema_table].each do |meth| class_eval(<<-END, __FILE__, __LINE__ + 1) def #{meth}(*args, &block) s = ''.dup #{meth}_append(s, *args, &block) s end END end # Append literalization of aliased expression to SQL string. def aliased_expression_sql_append(sql, ae) literal_append(sql, ae.expression) as_sql_append(sql, ae.alias, ae.columns) end # Append literalization of array to SQL string. def array_sql_append(sql, a) if a.empty? sql << '(NULL)' else sql << '(' expression_list_append(sql, a) sql << ')' end end # Append literalization of boolean constant to SQL string. def boolean_constant_sql_append(sql, constant) if (constant == true || constant == false) && !supports_where_true? sql << (constant == true ? '(1 = 1)' : '(1 = 0)') else literal_append(sql, constant) end end # Append literalization of case expression to SQL string. def case_expression_sql_append(sql, ce) sql << '(CASE' if ce.expression? sql << ' ' literal_append(sql, ce.expression) end w = " WHEN " t = " THEN " ce.conditions.each do |c,r| sql << w literal_append(sql, c) sql << t literal_append(sql, r) end sql << " ELSE " literal_append(sql, ce.default) sql << " END)" end # Append literalization of cast expression to SQL string. def cast_sql_append(sql, expr, type) sql << 'CAST(' literal_append(sql, expr) sql << ' AS ' << db.cast_type_literal(type).to_s sql << ')' end # Append literalization of column all selection to SQL string. def column_all_sql_append(sql, ca) qualified_identifier_sql_append(sql, ca.table, WILDCARD) end # Append literalization of complex expression to SQL string. def complex_expression_sql_append(sql, op, args) case op when *IS_OPERATORS r = args[1] if r.nil? || supports_is_true? raise(InvalidOperation, 'Invalid argument used for IS operator') unless val = IS_LITERALS[r] sql << '(' literal_append(sql, args[0]) sql << ' ' << op.to_s << ' ' sql << val << ')' elsif op == :IS complex_expression_sql_append(sql, :"=", args) else complex_expression_sql_append(sql, :OR, [SQL::BooleanExpression.new(:"!=", *args), SQL::BooleanExpression.new(:IS, args[0], nil)]) end when :IN, :"NOT IN" cols = args[0] vals = args[1] col_array = true if cols.is_a?(Array) if vals.is_a?(Array) val_array = true empty_val_array = vals == [] end if empty_val_array literal_append(sql, empty_array_value(op, cols)) elsif col_array if !supports_multiple_column_in? if val_array expr = SQL::BooleanExpression.new(:OR, *vals.to_a.map{|vs| SQL::BooleanExpression.from_value_pairs(cols.to_a.zip(vs).map{|c, v| [c, v]})}) literal_append(sql, op == :IN ? expr : ~expr) else old_vals = vals vals = vals.naked if vals.is_a?(Sequel::Dataset) vals = vals.to_a val_cols = old_vals.columns complex_expression_sql_append(sql, op, [cols, vals.map!{|x| x.values_at(*val_cols)}]) end else # If the columns and values are both arrays, use array_sql instead of # literal so that if values is an array of two element arrays, it # will be treated as a value list instead of a condition specifier. sql << '(' literal_append(sql, cols) sql << ' ' << op.to_s << ' ' if val_array array_sql_append(sql, vals) else literal_append(sql, vals) end sql << ')' end else sql << '(' literal_append(sql, cols) sql << ' ' << op.to_s << ' ' literal_append(sql, vals) sql << ')' end when :LIKE, :'NOT LIKE' sql << '(' literal_append(sql, args[0]) sql << ' ' << op.to_s << ' ' literal_append(sql, args[1]) if requires_like_escape? sql << " ESCAPE " literal_append(sql, "\\") end sql << ')' when :ILIKE, :'NOT ILIKE' complex_expression_sql_append(sql, (op == :ILIKE ? :LIKE : :"NOT LIKE"), args.map{|v| Sequel.function(:UPPER, v)}) when :** function_sql_append(sql, Sequel.function(:power, *args)) when *TWO_ARITY_OPERATORS if REGEXP_OPERATORS.include?(op) && !supports_regexp? raise InvalidOperation, "Pattern matching via regular expressions is not supported on #{db.database_type}" end sql << '(' literal_append(sql, args[0]) sql << ' ' << op.to_s << ' ' literal_append(sql, args[1]) sql << ')' when *N_ARITY_OPERATORS sql << '(' c = false op_str = " #{op} " args.each do |a| sql << op_str if c literal_append(sql, a) c ||= true end sql << ')' when :NOT sql << 'NOT ' literal_append(sql, args[0]) when :NOOP literal_append(sql, args[0]) when :'B~' sql << '~' literal_append(sql, args[0]) when :extract sql << 'extract(' << args[0].to_s << ' FROM ' literal_append(sql, args[1]) sql << ')' else raise(InvalidOperation, "invalid operator #{op}") end end # Append literalization of constant to SQL string. def constant_sql_append(sql, constant) sql << constant.to_s end # Append literalization of delayed evaluation to SQL string, # causing the delayed evaluation proc to be evaluated. def delayed_evaluation_sql_append(sql, delay) # Delayed evaluations are used specifically so the SQL # can differ in subsequent calls, so we definitely don't # want to cache the sql in this case. disable_sql_caching! if recorder = @opts[:placeholder_literalizer] recorder.use(sql, lambda{delay.call(self)}, nil) else literal_append(sql, delay.call(self)) end end # Append literalization of function call to SQL string. def function_sql_append(sql, f) name = f.name opts = f.opts if opts[:emulate] if emulate_function?(name) emulate_function_sql_append(sql, f) return end name = native_function_name(name) end sql << 'LATERAL ' if opts[:lateral] case name when SQL::Identifier if supports_quoted_function_names? && opts[:quoted] literal_append(sql, name) else sql << name.value.to_s end when SQL::QualifiedIdentifier if supports_quoted_function_names? && opts[:quoted] != false literal_append(sql, name) else sql << split_qualifiers(name).join('.') end else if supports_quoted_function_names? && opts[:quoted] quote_identifier_append(sql, name) else sql << name.to_s end end sql << '(' if filter = opts[:filter] filter = filter_expr(filter, &opts[:filter_block]) end if opts[:*] if filter && !supports_filtered_aggregates? literal_append(sql, Sequel.case({filter=>1}, nil)) filter = nil else sql << '*' end else sql << "DISTINCT " if opts[:distinct] if filter && !supports_filtered_aggregates? expression_list_append(sql, f.args.map{|arg| Sequel.case({filter=>arg}, nil)}) filter = nil else expression_list_append(sql, f.args) end if order = opts[:order] sql << " ORDER BY " expression_list_append(sql, order) end end sql << ')' if group = opts[:within_group] sql << " WITHIN GROUP (ORDER BY " expression_list_append(sql, group) sql << ')' end if filter sql << " FILTER (WHERE " literal_append(sql, filter) sql << ')' end if window = opts[:over] sql << ' OVER ' window_sql_append(sql, window.opts) end if opts[:with_ordinality] sql << " WITH ORDINALITY" end end # Append literalization of JOIN clause without ON or USING to SQL string. def join_clause_sql_append(sql, jc) table = jc.table table_alias = jc.table_alias table_alias = nil if table == table_alias && !jc.column_aliases sql << ' ' << join_type_sql(jc.join_type) << ' ' identifier_append(sql, table) as_sql_append(sql, table_alias, jc.column_aliases) if table_alias end # Append literalization of JOIN ON clause to SQL string. def join_on_clause_sql_append(sql, jc) join_clause_sql_append(sql, jc) sql << ' ON ' literal_append(sql, filter_expr(jc.on)) end # Append literalization of JOIN USING clause to SQL string. def join_using_clause_sql_append(sql, jc) join_clause_sql_append(sql, jc) join_using_clause_using_sql_append(sql, jc.using) end # Append literalization of negative boolean constant to SQL string. def negative_boolean_constant_sql_append(sql, constant) sql << 'NOT ' boolean_constant_sql_append(sql, constant) end # Append literalization of ordered expression to SQL string. def ordered_expression_sql_append(sql, oe) if emulate = requires_emulating_nulls_first? case oe.nulls when :first null_order = 0 when :last null_order = 2 end if null_order literal_append(sql, Sequel.case({{oe.expression=>nil}=>null_order}, 1)) sql << ", " end end literal_append(sql, oe.expression) sql << (oe.descending ? ' DESC' : ' ASC') unless emulate case oe.nulls when :first sql << " NULLS FIRST" when :last sql << " NULLS LAST" end end end # Append literalization of placeholder literal string to SQL string. def placeholder_literal_string_sql_append(sql, pls) args = pls.args str = pls.str sql << '(' if pls.parens if args.is_a?(Hash) if args.empty? sql << str else re = /:(#{args.keys.map{|k| Regexp.escape(k.to_s)}.join('|')})\b/ while true previous, q, str = str.partition(re) sql << previous literal_append(sql, args[($1||q[1..-1].to_s).to_sym]) unless q.empty? break if str.empty? end end elsif str.is_a?(Array) len = args.length str.each_with_index do |s, i| sql << s literal_append(sql, args[i]) unless i == len end unless str.length == args.length || str.length == args.length + 1 raise Error, "Mismatched number of placeholders (#{str.length}) and placeholder arguments (#{args.length}) when using placeholder array" end else i = -1 match_len = args.length - 1 while true previous, q, str = str.partition('?') sql << previous literal_append(sql, args.at(i+=1)) unless q.empty? if str.empty? unless i == match_len raise Error, "Mismatched number of placeholders (#{i+1}) and placeholder arguments (#{args.length}) when using placeholder string" end break end end end sql << ')' if pls.parens end # Append literalization of qualified identifier to SQL string. # If 3 arguments are given, the 2nd should be the table/qualifier and the third should be # column/qualified. If 2 arguments are given, the 2nd should be an SQL::QualifiedIdentifier. def qualified_identifier_sql_append(sql, table, column=(c = table.column; table = table.table; c)) identifier_append(sql, table) sql << '.' identifier_append(sql, column) end # Append literalization of unqualified identifier to SQL string. # Adds quoting to identifiers (columns and tables). If identifiers are not # being quoted, returns name as a string. If identifiers are being quoted # quote the name with quoted_identifier. def quote_identifier_append(sql, name) if name.is_a?(LiteralString) sql << name else name = name.value if name.is_a?(SQL::Identifier) name = input_identifier(name) if quote_identifiers? quoted_identifier_append(sql, name) else sql << name end end end # Append literalization of identifier or unqualified identifier to SQL string. def quote_schema_table_append(sql, table) schema, table = schema_and_table(table) if schema quote_identifier_append(sql, schema) sql << '.' end quote_identifier_append(sql, table) end # Append literalization of quoted identifier to SQL string. # This method quotes the given name with the SQL standard double quote. # should be overridden by subclasses to provide quoting not matching the # SQL standard, such as backtick (used by MySQL and SQLite). def quoted_identifier_append(sql, name) sql << '"' << name.to_s.gsub('"', '""') << '"' end # Split the schema information from the table, returning two strings, # one for the schema and one for the table. The returned schema may # be nil, but the table will always have a string value. # # Note that this function does not handle tables with more than one # level of qualification (e.g. database.schema.table on Microsoft # SQL Server). def schema_and_table(table_name, sch=nil) sch = sch.to_s if sch case table_name when Symbol s, t, _ = split_symbol(table_name) [s||sch, t] when SQL::QualifiedIdentifier [table_name.table.to_s, table_name.column.to_s] when SQL::Identifier [sch, table_name.value.to_s] when String [sch, table_name] else raise Error, 'table_name should be a Symbol, SQL::QualifiedIdentifier, SQL::Identifier, or String' end end # Splits table_name into an array of strings. # # ds.split_qualifiers(:s) # ['s'] # ds.split_qualifiers(Sequel[:t][:s]) # ['t', 's'] # ds.split_qualifiers(Sequel[:d][:t][:s]) # ['d', 't', 's'] # ds.split_qualifiers(Sequel.qualify(Sequel[:h][:d], Sequel[:t][:s])) # ['h', 'd', 't', 's'] def split_qualifiers(table_name, *args) case table_name when SQL::QualifiedIdentifier split_qualifiers(table_name.table, nil) + split_qualifiers(table_name.column, nil) else sch, table = schema_and_table(table_name, *args) sch ? [sch, table] : [table] end end # Append literalization of subscripts (SQL array accesses) to SQL string. def subscript_sql_append(sql, s) case s.expression when Symbol, SQL::Subscript, SQL::Identifier, SQL::QualifiedIdentifier # nothing else wrap_expression = true sql << '(' end literal_append(sql, s.expression) if wrap_expression sql << ')[' else sql << '[' end sub = s.sub if sub.length == 1 && (range = sub.first).is_a?(Range) literal_append(sql, range.begin) sql << ':' e = range.end e -= 1 if range.exclude_end? && e.is_a?(Integer) literal_append(sql, e) else expression_list_append(sql, s.sub) end sql << ']' end # Append literalization of windows (for window functions) to SQL string. def window_sql_append(sql, opts) raise(Error, 'This dataset does not support window functions') unless supports_window_functions? space = false space_s = ' ' sql << '(' if window = opts[:window] literal_append(sql, window) space = true end if part = opts[:partition] sql << space_s if space sql << "PARTITION BY " expression_list_append(sql, Array(part)) space = true end if order = opts[:order] sql << space_s if space sql << "ORDER BY " expression_list_append(sql, Array(order)) space = true end if frame = opts[:frame] sql << space_s if space if frame.is_a?(String) sql << frame else case frame when :all frame_type = :rows frame_start = :preceding frame_end = :following when :rows, :range, :groups frame_type = frame frame_start = :preceding frame_end = :current when Hash frame_type = frame[:type] unless frame_type == :rows || frame_type == :range || frame_type == :groups raise Error, "invalid window :frame :type option: #{frame_type.inspect}" end unless frame_start = frame[:start] raise Error, "invalid window :frame :start option: #{frame_start.inspect}" end frame_end = frame[:end] frame_exclude = frame[:exclude] else raise Error, "invalid window :frame option: #{frame.inspect}" end sql << frame_type.to_s.upcase << " " sql << 'BETWEEN ' if frame_end window_frame_boundary_sql_append(sql, frame_start, :preceding) if frame_end sql << " AND " window_frame_boundary_sql_append(sql, frame_end, :following) end if frame_exclude sql << " EXCLUDE " case frame_exclude when :current sql << "CURRENT ROW" when :group sql << "GROUP" when :ties sql << "TIES" when :no_others sql << "NO OTHERS" else raise Error, "invalid window :frame :exclude option: #{frame_exclude.inspect}" end end end end sql << ')' end protected # Return a from_self dataset if an order or limit is specified, so it works as expected # with UNION, EXCEPT, and INTERSECT clauses. def compound_from_self (@opts[:sql] || @opts[:limit] || @opts[:order] || @opts[:offset]) ? from_self : self end private # Append the INSERT sql used in a MERGE def _merge_insert_sql(sql, data) sql << " THEN INSERT" columns, values = _parse_insert_sql_args(data[:values]) _insert_columns_sql(sql, columns) _insert_values_sql(sql, values) end def _merge_update_sql(sql, data) sql << " THEN UPDATE SET " update_sql_values_hash(sql, data[:values]) end def _merge_delete_sql(sql, data) sql << " THEN DELETE" end # Mapping of merge types to related SQL MERGE_TYPE_SQL = { :insert => ' WHEN NOT MATCHED', :delete => ' WHEN MATCHED', :update => ' WHEN MATCHED', :matched => ' WHEN MATCHED', :not_matched => ' WHEN NOT MATCHED', }.freeze private_constant :MERGE_TYPE_SQL # Add the WHEN clauses to the MERGE SQL def _merge_when_sql(sql) raise Error, "no WHEN [NOT] MATCHED clauses provided for MERGE" unless merge_when = @opts[:merge_when] merge_when.each do |data| type = data[:type] sql << MERGE_TYPE_SQL[type] _merge_when_conditions_sql(sql, data) send(:"_merge_#{type}_sql", sql, data) end end # Append MERGE WHEN conditions, if there are conditions provided. def _merge_when_conditions_sql(sql, data) if data.has_key?(:conditions) sql << " AND " literal_append(sql, data[:conditions]) end end # Parse the values passed to insert_sql, returning columns and values # to use for the INSERT. Returned columns is always an array, but can be empty # for an INSERT without explicit column references. Returned values can be an # array, dataset, or literal string. def _parse_insert_sql_args(values) columns = [] case values.size when 0 values = [] when 1 case vals = values[0] when Hash values = [] vals.each do |k,v| columns << k values << v end when Dataset, Array, LiteralString values = vals end when 2 if (v0 = values[0]).is_a?(Array) && ((v1 = values[1]).is_a?(Array) || v1.is_a?(Dataset) || v1.is_a?(LiteralString)) columns, values = v0, v1 raise(Error, "Different number of values and columns given to insert_sql") if values.is_a?(Array) and columns.length != values.length end end [columns, values] end # Formats the truncate statement. Assumes the table given has already been # literalized. def _truncate_sql(table) "TRUNCATE TABLE #{table}" end # Returns an appropriate symbol for the alias represented by s. def alias_alias_symbol(s) case s when Symbol s when String s.to_sym when SQL::Identifier s.value.to_s.to_sym else raise Error, "Invalid alias for alias_alias_symbol: #{s.inspect}" end end # Returns an appropriate alias symbol for the given object, which can be # a Symbol, String, SQL::Identifier, SQL::QualifiedIdentifier, or # SQL::AliasedExpression. def alias_symbol(sym) case sym when Symbol s, t, a = split_symbol(sym) a || s ? (a || t).to_sym : sym when String sym.to_sym when SQL::Identifier sym.value.to_s.to_sym when SQL::QualifiedIdentifier alias_symbol(sym.column) when SQL::AliasedExpression alias_alias_symbol(sym.alias) else raise Error, "Invalid alias for alias_symbol: #{sym.inspect}" end end # Clone of this dataset usable in aggregate operations. Does # a from_self if dataset contains any parameters that would # affect normal aggregation, or just removes an existing # order if not. Also removes the row_proc, which isn't needed # for aggregate calculations. def aggregate_dataset (options_overlap(COUNT_FROM_SELF_OPTS) ? from_self : unordered).naked end # Append aliasing expression to SQL string. def as_sql_append(sql, aliaz, column_aliases=nil) sql << ' AS ' quote_identifier_append(sql, aliaz) if column_aliases raise Error, "#{db.database_type} does not support derived column lists" unless supports_derived_column_lists? sql << '(' identifier_list_append(sql, column_aliases) sql << ')' end end # Don't allow caching SQL if specifically marked not to. def cache_sql? !@opts[:no_cache_sql] && !cache_get(:_no_cache_sql) end # Raise an InvalidOperation exception if modification is not allowed for this dataset. # Check whether it is allowed to insert into this dataset. # Only for backwards compatibility with older external adapters. def check_modification_allowed! # SEQUEL6: Remove Sequel::Deprecation.deprecate("Dataset#check_modification_allowed!", "Use check_{insert,delete,update,truncation}_allowed! instead") _check_modification_allowed!(supports_modifying_joins?) end # Check whether it is allowed to insert into this dataset. def check_insert_allowed! _check_modification_allowed!(false) end alias check_truncation_allowed! check_insert_allowed! # Check whether it is allowed to delete from this dataset. def check_delete_allowed! _check_modification_allowed!(supports_deleting_joins?) end # Check whether it is allowed to update this dataset. def check_update_allowed! _check_modification_allowed!(supports_updating_joins?) end # Internals of the check_*_allowed! methods def _check_modification_allowed!(modifying_joins_supported) raise(InvalidOperation, "Grouped datasets cannot be modified") if opts[:group] raise(InvalidOperation, "Joined datasets cannot be modified") if !modifying_joins_supported && joined_dataset? end # Raise error if the dataset uses limits or offsets. def check_not_limited!(type) return if @opts[:skip_limit_check] && type != :truncate raise InvalidOperation, "Dataset##{type} not supported on datasets with limits or offsets" if opts[:limit] || opts[:offset] end # Append column list to SQL string. # If the column list is empty, a wildcard (*) is appended. def column_list_append(sql, columns) if (columns.nil? || columns.empty?) sql << '*' else expression_list_append(sql, columns) end end # Yield each pair of arguments to the block, which should # return an object representing the SQL expression for those # two arguments. For more than two arguments, the first # argument to the block will be result of the previous block call. def complex_expression_arg_pairs(args) case args.length when 1 args[0] when 2 yield args[0], args[1] else args.inject{|m, a| yield(m, a)} end end # Append the literalization of the args using complex_expression_arg_pairs # to the given SQL string, used when database operator/function is 2-ary # where Sequel expression is N-ary. def complex_expression_arg_pairs_append(sql, args, &block) literal_append(sql, complex_expression_arg_pairs(args, &block)) end # Append literalization of complex expression to SQL string, for # operators unsupported by some databases. Used by adapters for databases # that don't support the operators natively. def complex_expression_emulate_append(sql, op, args) # :nocov: case op # :nocov: when :% complex_expression_arg_pairs_append(sql, args){|a, b| Sequel.function(:MOD, a, b)} when :>> complex_expression_arg_pairs_append(sql, args){|a, b| Sequel./(a, Sequel.function(:power, 2, b))} when :<< complex_expression_arg_pairs_append(sql, args){|a, b| Sequel.*(a, Sequel.function(:power, 2, b))} when :&, :|, :^ f = BITWISE_METHOD_MAP[op] complex_expression_arg_pairs_append(sql, args){|a, b| Sequel.function(f, a, b)} when :'B~' sql << "((0 - " literal_append(sql, args[0]) sql << ") - 1)" end end # Append literalization of dataset used in UNION/INTERSECT/EXCEPT clause to SQL string. def compound_dataset_sql_append(sql, ds) subselect_sql_append(sql, ds) end # The alias to use for datasets, takes a number to make sure the name is unique. def dataset_alias(number) :"t#{number}" end # The strftime format to use when literalizing the time. def default_timestamp_format requires_sql_standard_datetimes? ? "TIMESTAMP '%Y-%m-%d %H:%M:%S%N%z'" : "'%Y-%m-%d %H:%M:%S%N%z'" end def delete_delete_sql(sql) sql << 'DELETE' end def delete_from_sql(sql) if f = @opts[:from] sql << ' FROM ' source_list_append(sql, f) end end # Disable caching of SQL for the current dataset def disable_sql_caching! cache_set(:_no_cache_sql, true) end # An SQL FROM clause to use in SELECT statements where the dataset has # no from tables. def empty_from_sql nil end # Whether to emulate the function with the given name. This should only be true # if the emulation goes beyond choosing a function with a different name. def emulate_function?(name) false end # Append literalization of array of expressions to SQL string, separating them # with commas. def expression_list_append(sql, columns) c = false co = ', ' columns.each do |col| sql << co if c literal_append(sql, col) c ||= true end end # Append literalization of array of grouping elements to SQL string, seperating them with commas. def grouping_element_list_append(sql, columns) c = false co = ', ' columns.each do |col| sql << co if c if col.is_a?(Array) && col.empty? sql << '()' else literal_append(sql, Array(col)) end c ||= true end end # An expression for how to handle an empty array lookup. def empty_array_value(op, cols) {1 => ((op == :IN) ? 0 : 1)} end # Format the timestamp based on the default_timestamp_format, with a couple # of modifiers. First, allow %N to be used for fractions seconds (if the # database supports them), and override %z to always use a numeric offset # of hours and minutes. def format_timestamp(v) v2 = db.from_application_timestamp(v) fmt = default_timestamp_format.gsub(/%[Nz]/) do |m| if m == '%N' # Ruby 1.9 supports %N in timestamp formats, but Sequel has supported %N # for longer in a different way, where the . is already appended and only 6 # decimal places are used by default. format_timestamp_usec(v.is_a?(DateTime) ? v.sec_fraction*(1000000) : v.usec) if supports_timestamp_usecs? else if supports_timestamp_timezones? # Would like to just use %z format, but it doesn't appear to work on Windows # Instead, the offset fragment is constructed manually minutes = (v2.is_a?(DateTime) ? v2.offset * 1440 : v2.utc_offset/60).to_i format_timestamp_offset(*minutes.divmod(60)) end end end v2.strftime(fmt) end # Return the SQL timestamp fragment to use for the timezone offset. def format_timestamp_offset(hour, minute) sprintf("%+03i%02i", hour, minute) end # Return the SQL timestamp fragment to use for the fractional time part. # Should start with the decimal point. Uses 6 decimal places by default. def format_timestamp_usec(usec, ts=timestamp_precision) unless ts == 6 usec = usec/(10 ** (6 - ts)) end sprintf(".%0#{ts}d", usec) end # Append literalization of identifier to SQL string, considering regular strings # as SQL identifiers instead of SQL strings. def identifier_append(sql, v) if v.is_a?(String) case v when LiteralString sql << v when SQL::Blob literal_append(sql, v) else quote_identifier_append(sql, v) end else literal_append(sql, v) end end # Append literalization of array of identifiers to SQL string. def identifier_list_append(sql, args) c = false comma = ', ' args.each do |a| sql << comma if c identifier_append(sql, a) c ||= true end end # Upcase identifiers by default when inputting them into the database. def input_identifier(v) v.to_s.upcase end def insert_into_sql(sql) sql << " INTO " if (f = @opts[:from]) && f.length == 1 identifier_append(sql, unaliased_identifier(f.first)) else source_list_append(sql, f) end end def insert_columns_sql(sql) _insert_columns_sql(sql, opts[:columns]) end def _insert_columns_sql(sql, columns) if columns && !columns.empty? sql << ' (' identifier_list_append(sql, columns) sql << ')' end end # The columns and values to use for an empty insert if the database doesn't support # INSERT with DEFAULT VALUES. def insert_empty_columns_values [[columns.last], [DEFAULT]] end def insert_insert_sql(sql) sql << "INSERT" end def insert_values_sql(sql) _insert_values_sql(sql, opts[:values]) end def _insert_values_sql(sql, values) case values when Array if values.empty? sql << " DEFAULT VALUES" else sql << " VALUES " literal_append(sql, values) end when Dataset sql << ' ' subselect_sql_append(sql, values) when LiteralString sql << ' ' << values else raise Error, "Unsupported INSERT values type, should be an Array or Dataset: #{values.inspect}" end end def insert_returning_sql(sql) if opts.has_key?(:returning) sql << " RETURNING " column_list_append(sql, Array(opts[:returning])) end end alias delete_returning_sql insert_returning_sql alias update_returning_sql insert_returning_sql # SQL fragment specifying a JOIN type, converts underscores to # spaces and upcases. def join_type_sql(join_type) "#{join_type.to_s.gsub('_', ' ').upcase} JOIN" end # Append USING clause for JOIN USING def join_using_clause_using_sql_append(sql, using_columns) sql << ' USING (' column_list_append(sql, using_columns) sql << ')' end # Append a literalization of the array to SQL string. # Treats as an expression if an array of all two pairs, or as a SQL array otherwise. def literal_array_append(sql, v) if Sequel.condition_specifier?(v) literal_expression_append(sql, SQL::BooleanExpression.from_value_pairs(v)) else array_sql_append(sql, v) end end # SQL fragment for BigDecimal def literal_big_decimal(v) d = v.to_s("F") v.nan? || v.infinite? ? "'#{d}'" : d end # Append literalization of SQL::Blob to SQL string. def literal_blob_append(sql, v) literal_string_append(sql, v) end # Append literalization of dataset to SQL string. Does a subselect inside parantheses. def literal_dataset_append(sql, v) sql << 'LATERAL ' if v.opts[:lateral] sql << '(' subselect_sql_append(sql, v) sql << ')' end # SQL fragment for Date, using the ISO8601 format. def literal_date(v) if requires_sql_standard_datetimes? v.strftime("DATE '%Y-%m-%d'") else v.strftime("'%Y-%m-%d'") end end # SQL fragment for DateTime def literal_datetime(v) format_timestamp(v) end # Append literalization of DateTime to SQL string. def literal_datetime_append(sql, v) sql << literal_datetime(v) end # Append literalization of SQL::Expression to SQL string. def literal_expression_append(sql, v) v.to_s_append(self, sql) end # SQL fragment for false def literal_false "'f'" end # SQL fragment for Float def literal_float(v) v.to_s end # Append literalization of Hash to SQL string, treating hash as a boolean expression. def literal_hash_append(sql, v) literal_expression_append(sql, SQL::BooleanExpression.from_value_pairs(v)) end # SQL fragment for Integer def literal_integer(v) v.to_s end # SQL fragment for nil def literal_nil "NULL" end # Append a literalization of the object to the given SQL string. # Calls +sql_literal_append+ if object responds to it, otherwise # calls +sql_literal+ if object responds to it, otherwise raises an error. # If a database specific type is allowed, this should be overriden in a subclass. def literal_other_append(sql, v) # We can't be sure if v will always literalize to the same SQL, so # don't cache SQL for a dataset that uses this. disable_sql_caching! if v.respond_to?(:sql_literal_append) v.sql_literal_append(self, sql) elsif v.respond_to?(:sql_literal) sql << v.sql_literal(self) else raise Error, "can't express #{v.inspect} as a SQL literal" end end # SQL fragment for Sequel::SQLTime, containing just the time part def literal_sqltime(v) v.strftime("'%H:%M:%S#{format_timestamp_usec(v.usec, sqltime_precision) if supports_timestamp_usecs?}'") end # Append literalization of Sequel::SQLTime to SQL string. def literal_sqltime_append(sql, v) sql << literal_sqltime(v) end # Append literalization of string to SQL string. def literal_string_append(sql, v) sql << "'" << v.gsub("'", "''") << "'" end # Append literalization of symbol to SQL string. def literal_symbol_append(sql, v) c_table, column, c_alias = split_symbol(v) if c_table quote_identifier_append(sql, c_table) sql << '.' end quote_identifier_append(sql, column) as_sql_append(sql, c_alias) if c_alias end # SQL fragment for Time def literal_time(v) format_timestamp(v) end # Append literalization of Time to SQL string. def literal_time_append(sql, v) sql << literal_time(v) end # SQL fragment for true def literal_true "'t'" end # What strategy to use for import/multi_insert. While SQL-92 defaults # to allowing multiple rows in a VALUES clause, there are enough databases # that don't allow that that it can't be the default. Use separate queries # by default, which works everywhere. def multi_insert_sql_strategy :separate end # Get the native function name given the emulated function name. def native_function_name(emulated_function) emulated_function end # Returns a qualified column name (including a table name) if the column # name isn't already qualified. def qualified_column_name(column, table) if column.is_a?(Symbol) c_table, column, _ = split_symbol(column) unless c_table case table when Symbol schema, table, t_alias = split_symbol(table) t_alias ||= Sequel::SQL::QualifiedIdentifier.new(schema, table) if schema when Sequel::SQL::AliasedExpression t_alias = table.alias end c_table = t_alias || table end ::Sequel::SQL::QualifiedIdentifier.new(c_table, column) else column end end # Qualify the given expression to the given table. def qualified_expression(e, table) Qualifier.new(table).transform(e) end def select_columns_sql(sql) sql << ' ' column_list_append(sql, @opts[:select]) end def select_distinct_sql(sql) if distinct = @opts[:distinct] sql << " DISTINCT" unless distinct.empty? sql << " ON (" expression_list_append(sql, distinct) sql << ')' end end end # Modify the sql to add a dataset to the via an EXCEPT, INTERSECT, or UNION clause. # This uses a subselect for the compound datasets used, because using parantheses doesn't # work on all databases. def select_compounds_sql(sql) return unless c = @opts[:compounds] c.each do |type, dataset, all| sql << ' ' << type.to_s.upcase sql << ' ALL' if all sql << ' ' compound_dataset_sql_append(sql, dataset) end end def select_from_sql(sql) if f = @opts[:from] sql << ' FROM ' source_list_append(sql, f) elsif f = empty_from_sql sql << f end end def select_group_sql(sql) if group = @opts[:group] sql << " GROUP BY " if go = @opts[:group_options] if go == :"grouping sets" sql << go.to_s.upcase << '(' grouping_element_list_append(sql, group) sql << ')' elsif uses_with_rollup? expression_list_append(sql, group) sql << " WITH " << go.to_s.upcase else sql << go.to_s.upcase << '(' expression_list_append(sql, group) sql << ')' end else expression_list_append(sql, group) end end end def select_having_sql(sql) if having = @opts[:having] sql << " HAVING " literal_append(sql, having) end end def select_join_sql(sql) if js = @opts[:join] js.each{|j| literal_append(sql, j)} end end def select_limit_sql(sql) if l = @opts[:limit] sql << " LIMIT " literal_append(sql, l) if o = @opts[:offset] sql << " OFFSET " literal_append(sql, o) end elsif @opts[:offset] select_only_offset_sql(sql) end end def select_lock_sql(sql) case l = @opts[:lock] when :update sql << ' FOR UPDATE' when String sql << ' ' << l end end # Used only if there is an offset and no limit, making it easier to override # in the adapter, as many databases do not support just a plain offset with # no limit. def select_only_offset_sql(sql) sql << " OFFSET " literal_append(sql, @opts[:offset]) end def select_order_sql(sql) if o = @opts[:order] sql << " ORDER BY " expression_list_append(sql, o) end end alias delete_order_sql select_order_sql alias update_order_sql select_order_sql def select_select_sql(sql) sql << 'SELECT' end def select_where_sql(sql) if w = @opts[:where] sql << " WHERE " literal_append(sql, w) end end alias delete_where_sql select_where_sql alias update_where_sql select_where_sql def select_window_sql(sql) if ws = @opts[:window] sql << " WINDOW " c = false co = ', ' as = ' AS ' ws.map do |name, window| sql << co if c literal_append(sql, name) sql << as literal_append(sql, window) c ||= true end end end def select_with_sql(sql) return unless supports_cte? ctes = opts[:with] return if !ctes || ctes.empty? sql << select_with_sql_base c = false comma = ', ' ctes.each do |cte| sql << comma if c select_with_sql_cte(sql, cte) c ||= true end sql << ' ' end alias delete_with_sql select_with_sql alias insert_with_sql select_with_sql alias update_with_sql select_with_sql def select_with_sql_base "WITH " end def select_with_sql_cte(sql, cte) select_with_sql_prefix(sql, cte) literal_dataset_append(sql, cte[:dataset]) end def select_with_sql_prefix(sql, w) quote_identifier_append(sql, w[:name]) if args = w[:args] sql << '(' identifier_list_append(sql, args) sql << ')' end sql << ' AS ' case w[:materialized] when true sql << "MATERIALIZED " when false sql << "NOT MATERIALIZED " end end # Whether the symbol cache should be skipped when literalizing the dataset def skip_symbol_cache? @opts[:skip_symbol_cache] end # Append literalization of array of sources/tables to SQL string, raising an Error if there # are no sources. def source_list_append(sql, sources) raise(Error, 'No source specified for query') if sources.nil? || sources == [] identifier_list_append(sql, sources) end # Delegate to Sequel.split_symbol. def split_symbol(sym) Sequel.split_symbol(sym) end # The string that is appended to to create the SQL query, the empty # string by default. def sql_string_origin String.new end # The precision to use for SQLTime instances (time column values without dates). # Defaults to timestamp_precision. def sqltime_precision timestamp_precision end # SQL to use if this dataset uses static SQL. Since static SQL # can be a PlaceholderLiteralString in addition to a String, # we literalize nonstrings. If there is an append_sql for this # dataset, append to that SQL instead of returning the value. def static_sql(sql) if append_sql = @opts[:append_sql] if sql.is_a?(String) append_sql << sql else literal_append(append_sql, sql) end else if sql.is_a?(String) sql else literal(sql) end end end # Append literalization of the subselect to SQL string. def subselect_sql_append(sql, ds) sds = subselect_sql_dataset(sql, ds) subselect_sql_append_sql(sql, sds) unless sds.send(:cache_sql?) # If subquery dataset does not allow caching SQL, # then this dataset should not allow caching SQL. disable_sql_caching! end end def subselect_sql_dataset(sql, ds) ds.clone(:append_sql=>sql) end def subselect_sql_append_sql(sql, ds) ds.sql end # The number of decimal digits of precision to use in timestamps. def timestamp_precision supports_timestamp_usecs? ? 6 : 0 end def update_table_sql(sql) sql << ' ' source_list_append(sql, @opts[:from]) select_join_sql(sql) if supports_modifying_joins? end def update_set_sql(sql) sql << ' SET ' values = @opts[:values] if values.is_a?(Hash) update_sql_values_hash(sql, values) else sql << values end end def update_sql_values_hash(sql, values) c = false eq = ' = ' values.each do |k, v| sql << ', ' if c if k.is_a?(String) && !k.is_a?(LiteralString) quote_identifier_append(sql, k) else literal_append(sql, k) end sql << eq literal_append(sql, v) c ||= true end end def update_update_sql(sql) sql << 'UPDATE' end def window_frame_boundary_sql_append(sql, boundary, direction) case boundary when :current sql << "CURRENT ROW" when :preceding sql << "UNBOUNDED PRECEDING" when :following sql << "UNBOUNDED FOLLOWING" else if boundary.is_a?(Array) offset, direction = boundary unless boundary.length == 2 && (direction == :preceding || direction == :following) raise Error, "invalid window :frame boundary (:start or :end) option: #{boundary.inspect}" end else offset = boundary end case offset when Numeric, String, SQL::Cast # nothing else raise Error, "invalid window :frame boundary (:start or :end) option: #{boundary.inspect}" end literal_append(sql, offset) sql << (direction == :preceding ? " PRECEDING" : " FOLLOWING") end end end end sequel-5.63.0/lib/sequel/deprecated.rb000066400000000000000000000057201434214120600175750ustar00rootroot00000000000000# frozen-string-literal: true module Sequel # This module makes it easy to print deprecation warnings with optional backtraces to a given stream. # There are a two accessors you can use to change how/where the deprecation methods are printed # and whether/how backtraces should be included: # # Sequel::Deprecation.output = $stderr # print deprecation messages to standard error (default) # Sequel::Deprecation.output = File.open('deprecated_calls.txt', 'wb') # use a file instead # Sequel::Deprecation.output = false # do not output deprecation messages # # Sequel::Deprecation.prefix = "SEQUEL DEPRECATION WARNING: " # prefix deprecation messages with a given string (default) # Sequel::Deprecation.prefix = false # do not prefix deprecation messages # # Sequel::Deprecation.backtrace_filter = false # don't include backtraces # Sequel::Deprecation.backtrace_filter = true # include full backtraces # Sequel::Deprecation.backtrace_filter = 10 # include 10 backtrace lines (default) # Sequel::Deprecation.backtrace_filter = 1 # include 1 backtrace line # Sequel::Deprecation.backtrace_filter = lambda{|line, line_no| line_no < 3 || line =~ /my_app/} # select backtrace lines to output module Deprecation @backtrace_filter = 10 @output = $stderr @prefix = "SEQUEL DEPRECATION WARNING: ".freeze class << self # How to filter backtraces. +false+ does not include backtraces, +true+ includes # full backtraces, an Integer includes that number of backtrace lines, and # a proc is called with the backtrace line and line number to select the backtrace # lines to include. The default is 10 backtrace lines. attr_accessor :backtrace_filter # Where deprecation messages should be output, must respond to puts. $stderr by default. attr_accessor :output # Where deprecation messages should be prefixed with ("SEQUEL DEPRECATION WARNING: " by default). attr_accessor :prefix end # Print the message and possibly backtrace to the output. def self.deprecate(method, instead=nil) return unless output message = instead ? "#{method} is deprecated and will be removed in Sequel 6. #{instead}." : method message = "#{prefix}#{message}" if prefix output.puts(message) case b = backtrace_filter when Integer caller.each do |c| b -= 1 output.puts(c) break if b <= 0 end when true caller.each{|c| output.puts(c)} when Proc caller.each_with_index{|line, line_no| output.puts(line) if b.call(line, line_no)} end nil end # If using ruby 2.3+, use Module#deprecate_constant to deprecate the constant, # otherwise do nothing as the ruby implementation does not support constant deprecation. def self.deprecate_constant(mod, constant) # :nocov: if RUBY_VERSION > '2.3' # :nocov: mod.deprecate_constant(constant) end end end end sequel-5.63.0/lib/sequel/exceptions.rb000066400000000000000000000075611434214120600176630ustar00rootroot00000000000000# frozen-string-literal: true module Sequel # The default exception class for exceptions raised by Sequel. # All exception classes defined by Sequel are descendants of this class. class Error < ::StandardError # If this exception wraps an underlying exception, the underlying # exception is held here. attr_accessor :wrapped_exception # :nocov: if RUBY_VERSION >= '2.1' # :nocov: # Returned the wrapped exception if one exists, otherwise use # ruby's default behavior. def cause wrapped_exception || super end end end ( # Error raised when the adapter requested doesn't exist or can't be loaded. AdapterNotFound = Class.new(Error) ).name ( # Generic error raised by the database adapters, indicating a # problem originating from the database server. Usually raised # because incorrect SQL syntax is used. DatabaseError = Class.new(Error) ).name ( # Error raised when the Sequel is unable to connect to the database with the # connection parameters it was given. DatabaseConnectionError = Class.new(DatabaseError) ).name ( # Error raised by adapters when they determine that the connection # to the database has been lost. Instructs the connection pool code to # remove that connection from the pool so that other connections can be acquired # automatically. DatabaseDisconnectError = Class.new(DatabaseError) ).name ( # Generic error raised when Sequel determines a database constraint has been violated. ConstraintViolation = Class.new(DatabaseError) ).name ( # Error raised when Sequel determines a database check constraint has been violated. CheckConstraintViolation = Class.new(ConstraintViolation) ).name ( # Error raised when Sequel determines a database foreign key constraint has been violated. ForeignKeyConstraintViolation = Class.new(ConstraintViolation) ).name ( # Error raised when Sequel determines a database NOT NULL constraint has been violated. NotNullConstraintViolation = Class.new(ConstraintViolation) ).name ( # Error raised when Sequel determines a database unique constraint has been violated. UniqueConstraintViolation = Class.new(ConstraintViolation) ).name ( # Error raised when Sequel determines a serialization failure/deadlock in the database. SerializationFailure = Class.new(DatabaseError) ).name ( # Error raised when Sequel determines the database could not acquire a necessary lock # before timing out. Use of Dataset#nowait can often cause this exception when # retrieving rows. DatabaseLockTimeout = Class.new(DatabaseError) ).name ( # Error raised on an invalid operation, such as trying to update or delete # a joined or grouped dataset when the database does not support that. InvalidOperation = Class.new(Error) ).name ( # Error raised when attempting an invalid type conversion. InvalidValue = Class.new(Error) ).name # Error raised when the user requests a record via the first! or similar # method, and the dataset does not yield any rows. class NoMatchingRow < Error # The dataset that raised this NoMatchingRow exception. attr_accessor :dataset # If the first argument is a Sequel::Dataset, set the dataset related to # the exception to that argument, instead of assuming it is the exception message. def initialize(msg=nil) if msg.is_a?(Sequel::Dataset) @dataset = msg msg = nil end super end end ( # Error raised when the connection pool cannot acquire a database connection # before the timeout. PoolTimeout = Class.new(Error) ).name ( # Error that you should raise to signal a rollback of the current transaction. # The transaction block will catch this exception, rollback the current transaction, # and won't reraise it (unless a reraise is requested). Rollback = Class.new(Error) ).name end sequel-5.63.0/lib/sequel/extensions/000077500000000000000000000000001434214120600173435ustar00rootroot00000000000000sequel-5.63.0/lib/sequel/extensions/_model_constraint_validations.rb000066400000000000000000000007731434214120600257770ustar00rootroot00000000000000# frozen-string-literal: true module Sequel module Plugins module ConstraintValidations module DatabaseMethods # A hash of validation method call metadata for all tables in the database. # The hash is keyed by table name string and contains arrays of validation # method call arrays. attr_accessor :constraint_validations end end end Database.register_extension(:_model_constraint_validations, Plugins::ConstraintValidations::DatabaseMethods) end sequel-5.63.0/lib/sequel/extensions/_model_pg_row.rb000066400000000000000000000014111434214120600225010ustar00rootroot00000000000000# frozen-string-literal: true module Sequel module Plugins module PgRow module DatabaseMethods # Handle Sequel::Model instances in bound variables. def bound_variable_arg(arg, conn) case arg when Sequel::Model "(#{arg.values.values_at(*arg.columns).map{|v| bound_variable_array(v)}.join(',')})" else super end end # If a Sequel::Model instance is given, return it as-is # instead of attempting to convert it. def row_type(db_type, v) if v.is_a?(Sequel::Model) v else super end end end end end Database.register_extension(:_model_pg_row, Plugins::PgRow::DatabaseMethods) end sequel-5.63.0/lib/sequel/extensions/_pretty_table.rb000066400000000000000000000044461434214120600225350ustar00rootroot00000000000000# frozen-string-literal: true # # This _pretty_table extension is only for internal use. # It adds the Sequel::PrettyTable class without modifying # Sequel::Dataset. # # To load the extension: # # Sequel.extension :_pretty_table # # Related module: Sequel::PrettyTable # module Sequel module PrettyTable # Prints nice-looking plain-text tables via puts # # +--+-------+ # |id|name | # |--+-------| # |1 |fasdfas| # |2 |test | # +--+-------+ def self.print(records, columns=nil) puts string(records, columns) end # Return the string that #print will print via puts. def self.string(records, columns = nil) # records is an array of hashes columns ||= records.first.keys.sort sizes = column_sizes(records, columns) sep_line = separator_line(columns, sizes) array = [sep_line, header_line(columns, sizes), sep_line] records.each {|r| array << data_line(columns, sizes, r)} array << sep_line array.join("\n") end # Hash of the maximum size of the value for each column def self.column_sizes(records, columns) # :nodoc: sizes = Hash.new(0) columns.each do |c| sizes[c] = c.to_s.size end records.each do |r| columns.each do |c| s = r[c].to_s.size sizes[c] = s if s > sizes[c] end end sizes end # String for each data line def self.data_line(columns, sizes, record) # :nodoc: String.new << '|' << columns.map {|c| format_cell(sizes[c], record[c])}.join('|') << '|' end # Format the value so it takes up exactly size characters def self.format_cell(size, v) # :nodoc: case v when Integer "%#{size}d" % v when Float, BigDecimal "%#{size}g" % v else "%-#{size}s" % v.to_s end end # String for header line def self.header_line(columns, sizes) # :nodoc: String.new << '|' << columns.map {|c| "%-#{sizes[c]}s" % c.to_s}.join('|') << '|' end # String for separtor line def self.separator_line(columns, sizes) # :nodoc: String.new << '+' << columns.map {|c| '-' * sizes[c]}.join('+') << '+' end private_class_method :column_sizes, :data_line, :format_cell, :header_line, :separator_line end end sequel-5.63.0/lib/sequel/extensions/any_not_empty.rb000066400000000000000000000023701434214120600225570ustar00rootroot00000000000000# frozen-string-literal: true # # The any_not_empty extension changes the behavior of Dataset#any? # if called without a block. By default, this method uses the # standard Enumerable behavior of enumerating results and seeing # if any result is not false or nil. With this extension, it # just checks whether the dataset is empty. This approach can # be much faster if the dataset is currently large. # # DB[:table].any? # # SELECT * FROM table # # DB[:table].extension(:any_not_empty).any? # # SELECT 1 as one FROM table LIMIT 1 # # You can load this extension into specific datasets: # # ds = DB[:table] # ds = ds.extension(:any_not_empty) # # Or you can load it into all of a database's datasets, which # is probably the desired behavior if you are using this extension: # # DB.extension(:any_not_empty) # # Note that this can result in any? returning a different result if # the dataset has a row_proc that can return false or nil. # # Related module: Sequel::AnyNotEmpty # module Sequel module AnyNotEmpty # If a block is not given, return whether the dataset is not empty. def any? if defined?(yield) super else !empty? end end end Dataset.register_extension(:any_not_empty, AnyNotEmpty) end sequel-5.63.0/lib/sequel/extensions/arbitrary_servers.rb000066400000000000000000000063151434214120600234450ustar00rootroot00000000000000# frozen-string-literal: true # # The arbitrary_servers extension allows you to connect to arbitrary # servers/shards that were not defined when you created the database. # To use it, you first load the extension into the Database object: # # DB.extension :arbitrary_servers # # Then you can pass arbitrary connection options for the server/shard # to use as a hash: # # DB[:table].server(host: '...', database: '...').all # # Because Sequel can never be sure that the connection will be reused, # arbitrary connections are disconnected as soon as the outermost block # that uses them exits. So this example uses the same connection: # # DB.transaction(server: {host: '...', database: '...'}) do |c| # DB.transaction(server: {host: '...', database: '...'}) do |c2| # # c == c2 # end # end # # But this example does not: # # DB.transaction(server: {host: '...', database: '...'}) do |c| # end # DB.transaction(server: {host: '...', database: '...'}) do |c2| # # c != c2 # end # # You can use this extension in conjunction with the server_block # extension: # # DB.with_server(host: '...', database: '...') do # DB.synchronize do # # All of these use the host/database given to with_server # DB[:table].insert(c: 1) # DB[:table].update(c: 2) # DB.tables # DB[:table].all # end # end # # Anyone using this extension in conjunction with the server_block # extension may want to do the following to so that you don't need # to call synchronize separately: # # def DB.with_server(*a) # super(*a){synchronize{yield}} # end # # Note that this extension only works with the sharded threaded connection # pool. If you are using the sharded single connection pool, you need # to switch to the sharded threaded connection pool before using this # extension. # # Related module: Sequel::ArbitraryServers # module Sequel module ArbitraryServers private # If server is a hash, create a new connection for # it, and cache it first by thread and then server. def acquire(thread, server) if server.is_a?(Hash) sync{@allocated[thread] ||= {}}[server] = make_new(server) else super end end # If server is a hash, the entry for it probably doesn't # exist in the @allocated hash, so check for existence to # avoid calling nil.[] def owned_connection(thread, server) if server.is_a?(Hash) if a = sync{@allocated[thread]} a[server] end else super end end # If server is a hash, return it directly. def pick_server(server) if server.is_a?(Hash) server else super end end # If server is a hash, delete the thread from the allocated # connections for that server. Additionally, if this was the last thread # using that server, delete the server from the @allocated hash. def release(thread, conn, server) if server.is_a?(Hash) a = @allocated[thread] a.delete(server) @allocated.delete(thread) if a.empty? disconnect_connection(conn) else super end end end Database.register_extension(:arbitrary_servers){|db| db.pool.extend(ArbitraryServers)} end sequel-5.63.0/lib/sequel/extensions/async_thread_pool.rb000066400000000000000000000374421434214120600233770ustar00rootroot00000000000000# frozen-string-literal: true # # The async_thread_pool extension adds support for running database # queries in a separate threads using a thread pool. With the following # code # # DB.extension :async_thread_pool # foos = DB[:foos].async.where(name: 'A'..'M').all # bar_names = DB[:bar].async.select_order_map(:name) # baz_1 = DB[:bazes].async.first(id: 1) # # All 3 queries will be run in separate threads. +foos+, +bar_names+ # and +baz_1+ will be proxy objects. Calling a method on the proxy # object will wait for the query to be run, and will return the result # of calling that method on the result of the query method. For example, # if you run: # # foos = DB[:foos].async.where(name: 'A'..'M').all # bar_names = DB[:bars].async.select_order_map(:name) # baz_1 = DB[:bazes].async.first(id: 1) # sleep(1) # foos.size # bar_names.first # baz_1.name # # These three queries will generally be run concurrently in separate # threads. If you instead run: # # DB[:foos].async.where(name: 'A'..'M').all.size # DB[:bars].async.select_order_map(:name).first # DB[:bazes].async.first(id: 1).name # # Then will run each query sequentially, since you need the result of # one query before running the next query. The queries will still be # run in separate threads (by default). # # What is run in the separate thread is the entire method call that # returns results. So with the original example: # # foos = DB[:foos].async.where(name: 'A'..'M').all # bar_names = DB[:bars].async.select_order_map(:name) # baz_1 = DB[:bazes].async.first(id: 1) # # The +all+, select_order_map(:name), and first(id: 1) # calls are run in separate threads. If a block is passed to a method # such as +all+ or +each+, the block is also run in that thread. If you # have code such as: # # h = {} # DB[:foos].async.each{|row| h[row[:id]] = row} # bar_names = DB[:bars].async.select_order_map(:name) # p h # # You may end up with it printing an empty hash or partial hash, because the # async +each+ call will not have run or finished running. Since the # p h code relies on a side-effect of the +each+ block and not the # return value of the +each+ call, it will not wait for the loading. # # You should avoid using +async+ for any queries where you are ignoring the # return value, as otherwise you have no way to wait for the query to be run. # # Datasets that use async will use async threads to load data for the majority # of methods that can return data. However, dataset methods that return # enumerators will not use an async thread (e.g. calling # Dataset#map # without a block or arguments does not use an async thread or return a # proxy object). # # Because async methods (including their blocks) run in a separate thread, you # should not use control flow modifiers such as +return+ or +break+ in async # queries. Doing so will result in a error. # # Because async results are returned as proxy objects, it's a bad idea # to use them in a boolean setting: # # result = DB[:foo].async.get(:boolean_column) # # or: # result = DB[:foo].async.first # # # ... # if result # # will always execute this banch, since result is a proxy object # end # # In this case, you can call the +__value+ method to return the actual # result: # # if result.__value # # will not execute this branch if the dataset method returned nil or false # end # # Similarly, because a proxy object is used, you should be careful using the # result in a case statement or an argument to Class#===: # # # ... # case result # when Hash, true, false # # will never take this branch, since result is a proxy object # end # # Similar to usage in an +if+ statement, you should use +__value+: # # case result.__value # when Hash, true, false # # will never take this branch, since result is a proxy object # end # # On Ruby 2.2+, you can use +itself+ instead of +__value+. It's preferable to # use +itself+ if you can, as that will allow code to work with both proxy # objects and regular objects. # # Because separate threads and connections are used for async queries, # they do not use any state on the current connection/thread. So if # you do: # # DB.transaction{DB[:table].async.all} # # Be aware that the transaction runs on one connection, and the SELECT # query on a different connection. If you use currently using # transactional testing (running each test inside a transaction/savepoint), # and want to start using this extension, you should first switch to # non-transactional testing of the code that will use the async thread # pool before using this extension, as otherwise the use of # Dataset#async will likely break your tests. # # If you are using Database#synchronize to checkout a connection, the # same issue applies, where the async query runs on a different # connection: # # DB.synchronize{DB[:table].async.all} # # Similarly, if you are using the server_block extension, any async # queries inside with_server blocks will not use the server specified: # # DB.with_server(:shard1) do # DB[:a].all # Uses shard1 # DB[:a].async.all # Uses default shard # end # # You need to manually specify the shard for any dataset using an async # query: # # DB.with_server(:shard1) do # DB[:a].all # Uses shard1 # DB[:a].async.server(:shard1).all # Uses shard1 # end # # When the async_thread_pool extension, the size of the async thread pool # can be set by using the +:num_async_threads+ Database option, which must # be set before loading the async_thread_pool extension. This defaults # to the size of the Database object's connection pool. # # By default, for consistent behavior, the async_thread_pool extension # will always run the query in a separate thread. However, in some cases, # such as when the async thread pool is busy and the results of a query # are needed right away, it can improve performance to allow preemption, # so that the query will run in the current thread instead of waiting # for an async thread to become available. With the following code: # # foos = DB[:foos].async.where(name: 'A'..'M').all # bar_names = DB[:bar].async.select_order_map(:name) # if foos.length > 4 # baz_1 = DB[:bazes].async.first(id: 1) # end # # Whether you need the +baz_1+ variable depends on the value of foos. # If the async thread pool is busy, and by the time the +foos.length+ # call is made, the async thread pool has not started the processing # to get the +foos+ value, it can improve performance to start that # processing in the current thread, since it is needed immediately to # determine whether to schedule query to get the +baz_1+ variable. # The default is to not allow preemption, because if the current # thread is used, it may have already checked out a connection that # could be used, and that connection could be inside a transaction or # have some other manner of connection-specific state applied to it. # If you want to allow preemption, you can set the # +:preempt_async_thread+ Database option before loading the # async_thread_pool extension. # # Related module: Sequel::Database::AsyncThreadPool::DatasetMethods # module Sequel module Database::AsyncThreadPool # JobProcessor is a wrapper around a single thread, that will # process a queue of jobs until it is shut down. class JobProcessor # :nodoc: def self.create_finalizer(queue, pool) proc{run_finalizer(queue, pool)} end def self.run_finalizer(queue, pool) # Push a nil for each thread using the queue, signalling # that thread to close. pool.each{queue.push(nil)} # Join each of the closed threads. pool.each(&:join) # Clear the thread pool. Probably not necessary, but this allows # for a simple way to check whether this finalizer has been run. pool.clear nil end private_class_method :run_finalizer def initialize(queue) @thread = ::Thread.new do while proxy = queue.pop proxy.__send__(:__run) end end end # Join the thread, should only be called by the related finalizer. def join @thread.join end end # Wrapper for exception instances raised by async jobs. The # wrapped exception will be raised by the code getting the value # of the job. WrappedException = Struct.new(:exception) # Base proxy object class for jobs processed by async threads and # the returned result. class BaseProxy < BasicObject # Store a block that returns the result when called. def initialize(&block) ::Kernel.raise Error, "must provide block for an async job" unless block @block = block end # Pass all method calls to the returned result. def method_missing(*args, &block) __value.public_send(*args, &block) end # :nocov: ruby2_keywords(:method_missing) if respond_to?(:ruby2_keywords, true) # :nocov: # Delegate respond_to? calls to the returned result. def respond_to_missing?(*args) __value.respond_to?(*args) end # Override some methods defined by default so they apply to the # returned result and not the current object. [:!, :==, :!=, :instance_eval, :instance_exec].each do |method| define_method(method) do |*args, &block| __value.public_send(method, *args, &block) end end # Wait for the value to be loaded if it hasn't already been loaded. # If the code to load the return value raised an exception that was # wrapped, reraise the exception. def __value unless defined?(@value) __get_value end if @value.is_a?(WrappedException) ::Kernel.raise @value end @value end private # Run the block and return the block value. If the block call raises # an exception, wrap the exception. def __run_block # This may not catch concurrent calls (unless surrounded by a mutex), but # it's not worth trying to protect against that. It's enough to just check for # multiple non-concurrent calls. ::Kernel.raise Error, "Cannot run async block multiple times" unless block = @block @block = nil begin block.call rescue ::Exception => e WrappedException.new(e) end end end # Default object class for async job/proxy result. This uses a queue for # synchronization. The JobProcessor will push a result until the queue, # and the code to get the value will pop the result from that queue (and # repush the result to handle thread safety). class Proxy < BaseProxy def initialize super @queue = ::Queue.new end private def __run @queue.push(__run_block) end def __get_value @value = @queue.pop # Handle thread-safety by repushing the popped value, so that # concurrent calls will receive the same value @queue.push(@value) end end # Object class for async job/proxy result when the :preempt_async_thread # Database option is used. Uses a mutex for synchronization, and either # the JobProcessor or the calling thread can run code to get the value. class PreemptableProxy < BaseProxy def initialize super @mutex = ::Mutex.new end private def __get_value @mutex.synchronize do unless defined?(@value) @value = __run_block end end end alias __run __get_value end module DatabaseMethods def self.extended(db) db.instance_exec do unless pool.pool_type == :threaded || pool.pool_type == :sharded_threaded raise Error, "can only load async_thread_pool extension if using threaded or sharded_threaded connection pool" end num_async_threads = opts[:num_async_threads] ? typecast_value_integer(opts[:num_async_threads]) : (Integer(opts[:max_connections] || 4)) raise Error, "must have positive number for num_async_threads" if num_async_threads <= 0 proxy_klass = typecast_value_boolean(opts[:preempt_async_thread]) ? PreemptableProxy : Proxy define_singleton_method(:async_job_class){proxy_klass} queue = @async_thread_queue = Queue.new pool = @async_thread_pool = num_async_threads.times.map{JobProcessor.new(queue)} ObjectSpace.define_finalizer(db, JobProcessor.create_finalizer(queue, pool)) extend_datasets(DatasetMethods) end end private # Wrap the block in a job/proxy object and schedule it to run using the async thread pool. def async_run(&block) proxy = async_job_class.new(&block) @async_thread_queue.push(proxy) proxy end end ASYNC_METHODS = ([:all?, :any?, :drop, :entries, :grep_v, :include?, :inject, :member?, :minmax, :none?, :one?, :reduce, :sort, :take, :tally, :to_a, :to_h, :uniq, :zip] & Enumerable.instance_methods) + (Dataset::ACTION_METHODS - [:map, :paged_each]) ASYNC_BLOCK_METHODS = ([:collect, :collect_concat, :detect, :drop_while, :each_cons, :each_entry, :each_slice, :each_with_index, :each_with_object, :filter_map, :find, :find_all, :find_index, :flat_map, :max_by, :min_by, :minmax_by, :partition, :reject, :reverse_each, :sort_by, :take_while] & Enumerable.instance_methods) + [:paged_each] ASYNC_ARGS_OR_BLOCK_METHODS = [:map] module DatasetMethods # Define an method in the given module that will run the given method using an async thread # if the current dataset is async. def self.define_async_method(mod, method) mod.send(:define_method, method) do |*args, &block| if @opts[:async] ds = sync db.send(:async_run){ds.send(method, *args, &block)} else super(*args, &block) end end end # Define an method in the given module that will run the given method using an async thread # if the current dataset is async and a block is provided. def self.define_async_block_method(mod, method) mod.send(:define_method, method) do |*args, &block| if block && @opts[:async] ds = sync db.send(:async_run){ds.send(method, *args, &block)} else super(*args, &block) end end end # Define an method in the given module that will run the given method using an async thread # if the current dataset is async and arguments or a block is provided. def self.define_async_args_or_block_method(mod, method) mod.send(:define_method, method) do |*args, &block| if (block || !args.empty?) && @opts[:async] ds = sync db.send(:async_run){ds.send(method, *args, &block)} else super(*args, &block) end end end # Override all of the methods that return results to do the processing in an async thread # if they have been marked to run async and should run async (i.e. they don't return an # Enumerator). ASYNC_METHODS.each{|m| define_async_method(self, m)} ASYNC_BLOCK_METHODS.each{|m| define_async_block_method(self, m)} ASYNC_ARGS_OR_BLOCK_METHODS.each{|m| define_async_args_or_block_method(self, m)} # Return a cloned dataset that will load results using the async thread pool. def async cached_dataset(:_async) do clone(:async=>true) end end # Return a cloned dataset that will not load results using the async thread pool. # Only used if the current dataset has been marked as using the async thread pool. def sync cached_dataset(:_sync) do clone(:async=>false) end end end end Database.register_extension(:async_thread_pool, Database::AsyncThreadPool::DatabaseMethods) end sequel-5.63.0/lib/sequel/extensions/auto_literal_strings.rb000066400000000000000000000042141434214120600241260ustar00rootroot00000000000000# frozen-string-literal: true # # The auto_literal_strings extension treats string values passed as filter # arguments as SQL query fragments. This is the behavior of previous # versions of Sequel. Using this extension makes using raw SQL fragments # easier, since you don't need to wrap them with Sequel.lit, but also makes # it easier to introduce SQL injection vulnerabilities into the application. # It is only recommended to use this extension for # backwards compatibility with previous versions of Sequel. # # With this extension, if a single string is given, it is used as an SQL # query fragment: # # ds = DB[:table].extension(:auto_literal_strings) # ds.where("name > 'A'") # # SELECT * FROM table WHERE (name > 'A') # # If additional arguments are given, they are used as placeholders: # # ds.where("name > ?", "A") # # SELECT * FROM table WHERE (name > 'A') # # Named placeholders can also be used with a hash: # # ds.where("name > :a", a: "A") # # SELECT * FROM table WHERE (name > 'A') # # This extension also allows the use of a plain string passed to Dataset#update: # # ds.update("column = column + 1") # # UPDATE table SET column = column + 1 # # Related module: Sequel::Dataset::AutoLiteralStrings # module Sequel class Dataset module AutoLiteralStrings # Treat plain strings as literal strings, and arrays where the first element # is a string as a literal string with placeholders. def filter_expr(expr = nil) case expr when LiteralString super when String super(LiteralString.new(expr)) when Array if (sexpr = expr.first).is_a?(String) super(SQL::PlaceholderLiteralString.new(sexpr, expr[1..-1], true)) else super end else super end end # Treat plain strings as literal strings. def update_sql(values=OPTS) case values when LiteralString super when String super(LiteralString.new(values)) else super end end end register_extension(:auto_literal_strings, AutoLiteralStrings) end end sequel-5.63.0/lib/sequel/extensions/blank.rb000066400000000000000000000016331434214120600207620ustar00rootroot00000000000000# frozen-string-literal: true # # The blank extension adds the blank? method to all objects (e.g. Object#blank?). # # To load the extension: # # Sequel.extension :blank [FalseClass, Object, NilClass, Numeric, String, TrueClass].each do |klass| # :nocov: if klass.method_defined?(:blank?) klass.send(:alias_method, :blank?, :blank?) end # :nocov: end class FalseClass # false is always blank def blank? true end end class Object # Objects are blank if they respond true to empty? def blank? respond_to?(:empty?) && empty? end end class NilClass # nil is always blank def blank? true end end class Numeric # Numerics are never blank (not even 0) def blank? false end end class String # Strings are blank if they are empty or include only whitespace def blank? strip.empty? end end class TrueClass # true is never blank def blank? false end end sequel-5.63.0/lib/sequel/extensions/caller_logging.rb000066400000000000000000000045121434214120600226420ustar00rootroot00000000000000# frozen-string-literal: true # # The caller_logging extension includes caller information before # query logging, showing which code caused the query. It skips # internal Sequel code, showing the first non-Sequel caller line. # # DB.extension :caller_logging # DB[:table].first # # Logger: # # (0.000041s) (source: /path/to/app/foo/t.rb:12 in `get_first`) SELECT * FROM table LIMIT 1 # # You can further filter the caller lines by setting # Database#caller_logging_ignore to a regexp of additional # caller lines to ignore. This is useful if you have specific # methods or internal extensions/plugins that you would also # like to ignore as they obscure the code actually making the # request. # # DB.caller_logging_ignore = %r{/path/to/app/lib/plugins} # # You can also format the caller before it is placed in the logger, # using +caller_logging_formatter+: # # DB.caller_logging_formatter = lambda do |caller| # "(#{caller.sub(/\A\/path\/to\/app\//, '')})" # end # DB[:table].first # # Logger: # # (0.000041s) (foo/t.rb:12 in `get_first`) SELECT * FROM table LIMIT 1 # # Related module: Sequel::CallerLogging require 'rbconfig' # module Sequel module CallerLogging SEQUEL_LIB_PATH = (File.expand_path('../../..', __FILE__) + '/').freeze # A regexp of caller lines to ignore, in addition to internal Sequel and Ruby code. attr_accessor :caller_logging_ignore # A callable to format the external caller attr_accessor :caller_logging_formatter # Include caller information when logging query. def log_connection_yield(sql, conn, args=nil) if !@loggers.empty? && (external_caller = external_caller_for_log) sql = "#{external_caller} #{sql}" end super end private # The caller to log, ignoring internal Sequel and Ruby code, and user specified # lines to ignore. def external_caller_for_log ignore = caller_logging_ignore c = caller.find do |line| !(line.start_with?(SEQUEL_LIB_PATH) || line.start_with?(RbConfig::CONFIG["rubylibdir"]) || (ignore && line =~ ignore)) end if c c = if formatter = caller_logging_formatter formatter.call(c) else "(source: #{c})" end end c end end Database.register_extension(:caller_logging, CallerLogging) end sequel-5.63.0/lib/sequel/extensions/columns_introspection.rb000066400000000000000000000051371434214120600243360ustar00rootroot00000000000000# frozen-string-literal: true # # The columns_introspection extension attempts to introspect the # selected columns for a dataset before issuing a query. If it # thinks it can guess correctly at the columns the query will use, # it will return the columns without issuing a database query. # # This method is not fool-proof, it's possible that some databases # will use column names that Sequel does not expect. Also, it # may not correctly handle all cases. # # To attempt to introspect columns for a single dataset: # # ds = ds.extension(:columns_introspection) # # To attempt to introspect columns for all datasets on a single database: # # DB.extension(:columns_introspection) # # Related module: Sequel::ColumnsIntrospection # module Sequel module ColumnsIntrospection # Attempt to guess the columns that will be returned # if there are columns selected, in order to skip a database # query to retrieve the columns. This should work with # Symbols, SQL::Identifiers, SQL::QualifiedIdentifiers, and # SQL::AliasedExpressions. def columns if cols = _columns return cols end if (pcs = probable_columns) && pcs.all? self.columns = pcs else super end end protected # Return an array of probable column names for the dataset, or # nil if it is not possible to determine that through # introspection. def probable_columns if (cols = opts[:select]) && !cols.empty? cols.map{|c| probable_column_name(c)} elsif !opts[:join] && !opts[:with] && (from = opts[:from]) && from.length == 1 && (from = from.first) if from.is_a?(SQL::AliasedExpression) from = from.expression end case from when Dataset from.probable_columns when Symbol, SQL::Identifier, SQL::QualifiedIdentifier schemas = db.instance_variable_get(:@schemas) if schemas && (table = literal(from)) && (sch = Sequel.synchronize{schemas[table]}) sch.map{|c,_| c} end end end end private # Return the probable name of the column, or nil if one # cannot be determined. def probable_column_name(c) case c when Symbol _, c, a = split_symbol(c) (a || c).to_sym when SQL::Identifier c.value.to_sym when SQL::QualifiedIdentifier c.column.to_sym when SQL::AliasedExpression a = c.alias a.is_a?(SQL::Identifier) ? a.value.to_sym : a.to_sym end end end Dataset.register_extension(:columns_introspection, Sequel::ColumnsIntrospection) end sequel-5.63.0/lib/sequel/extensions/connection_expiration.rb000066400000000000000000000063501434214120600242750ustar00rootroot00000000000000# frozen-string-literal: true # # The connection_expiration extension modifies a database's # connection pool to validate that connections checked out # from the pool are not expired, before yielding them for # use. If it detects an expired connection, it removes it # from the pool and tries the next available connection, # creating a new connection if no available connection is # unexpired. Example of use: # # DB.extension(:connection_expiration) # # The default connection timeout is 14400 seconds (4 hours). # To override it: # # DB.pool.connection_expiration_timeout = 3600 # 1 hour # # Note that this extension only affects the default threaded # and the sharded threaded connection pool. The single # threaded and sharded single threaded connection pools are # not affected. As the only reason to use the single threaded # pools is for speed, and this extension makes the connection # pool slower, there's not much point in modifying this # extension to work with the single threaded pools. The # threaded pools work fine even in single threaded code, so if # you are currently using a single threaded pool and want to # use this extension, switch to using a threaded pool. # # Related module: Sequel::ConnectionExpiration # module Sequel module ConnectionExpiration class Retry < Error; end Sequel::Deprecation.deprecate_constant(self, :Retry) # The number of seconds that need to pass since # connection creation before expiring a connection. # Defaults to 14400 seconds (4 hours). attr_accessor :connection_expiration_timeout # The maximum number of seconds that will be added as a random delay to the expiration timeout # Defaults to 0 seconds (no random delay). attr_accessor :connection_expiration_random_delay # Initialize the data structures used by this extension. def self.extended(pool) pool.instance_exec do sync do @connection_expiration_timestamps ||= {} @connection_expiration_timeout ||= 14400 @connection_expiration_random_delay ||= 0 end end end private # Clean up expiration timestamps during disconnect. def disconnect_connection(conn) sync{@connection_expiration_timestamps.delete(conn)} super end # Record the time the connection was created. def make_new(*) conn = super @connection_expiration_timestamps[conn] = [Sequel.start_timer, @connection_expiration_timeout + (rand * @connection_expiration_random_delay)].freeze conn end # When acquiring a connection, check if the connection is expired. # If it is expired, disconnect the connection, and retry with a new # connection. def acquire(*a) conn = nil 1.times do if (conn = super) && (cet = sync{@connection_expiration_timestamps[conn]}) && Sequel.elapsed_seconds_since(cet[0]) > cet[1] if pool_type == :sharded_threaded sync{allocated(a.last).delete(Sequel.current)} else sync{@allocated.delete(Sequel.current)} end disconnect_connection(conn) redo end end conn end end Database.register_extension(:connection_expiration){|db| db.pool.extend(ConnectionExpiration)} end sequel-5.63.0/lib/sequel/extensions/connection_validator.rb000066400000000000000000000104731434214120600241010ustar00rootroot00000000000000# frozen-string-literal: true # # The connection_validator extension modifies a database's # connection pool to validate that connections checked out # from the pool are still valid, before yielding them for # use. If it detects an invalid connection, it removes it # from the pool and tries the next available connection, # creating a new connection if no available connection is # valid. Example of use: # # DB.extension(:connection_validator) # # As checking connections for validity involves issuing a # query, which is potentially an expensive operation, # the validation checks are only run if the connection has # been idle for longer than a certain threshold. By default, # that threshold is 3600 seconds (1 hour), but it can be # modified by the user, set to -1 to always validate # connections on checkout: # # DB.pool.connection_validation_timeout = -1 # # Note that if you set the timeout to validate connections # on every checkout, you should probably manually control # connection checkouts on a coarse basis, using # Database#synchronize. In a web application, the optimal # place for that would be a rack middleware. Validating # connections on every checkout without setting up coarse # connection checkouts will hurt performance, in some cases # significantly. Note that setting up coarse connection # checkouts reduces the concurrency level acheivable. For # example, in a web application, using Database#synchronize # in a rack middleware will limit the number of concurrent # web requests to the number to connections in the database # connection pool. # # Note that this extension only affects the default threaded # and the sharded threaded connection pool. The single # threaded and sharded single threaded connection pools are # not affected. As the only reason to use the single threaded # pools is for speed, and this extension makes the connection # pool slower, there's not much point in modifying this # extension to work with the single threaded pools. The # threaded pools work fine even in single threaded code, so if # you are currently using a single threaded pool and want to # use this extension, switch to using a threaded pool. # # Related module: Sequel::ConnectionValidator # module Sequel module ConnectionValidator class Retry < Error; end Sequel::Deprecation.deprecate_constant(self, :Retry) # The number of seconds that need to pass since # connection checkin before attempting to validate # the connection when checking it out from the pool. # Defaults to 3600 seconds (1 hour). attr_accessor :connection_validation_timeout # Initialize the data structures used by this extension. def self.extended(pool) pool.instance_exec do sync do @connection_timestamps ||= {} @connection_validation_timeout ||= 3600 end end # Make sure the valid connection SQL query is precached, # otherwise it's possible it will happen at runtime. While # it should work correctly at runtime, it's better to avoid # the possibility of failure altogether. pool.db.send(:valid_connection_sql) end private # Record the time the connection was checked back into the pool. def checkin_connection(*) conn = super @connection_timestamps[conn] = Sequel.start_timer conn end # Clean up timestamps during disconnect. def disconnect_connection(conn) sync{@connection_timestamps.delete(conn)} super end # When acquiring a connection, if it has been # idle for longer than the connection validation timeout, # test the connection for validity. If it is not valid, # disconnect the connection, and retry with a new connection. def acquire(*a) conn = nil 1.times do if (conn = super) && (timer = sync{@connection_timestamps.delete(conn)}) && Sequel.elapsed_seconds_since(timer) > @connection_validation_timeout && !db.valid_connection?(conn) if pool_type == :sharded_threaded sync{allocated(a.last).delete(Sequel.current)} else sync{@allocated.delete(Sequel.current)} end disconnect_connection(conn) redo end end conn end end Database.register_extension(:connection_validator){|db| db.pool.extend(ConnectionValidator)} end sequel-5.63.0/lib/sequel/extensions/constant_sql_override.rb000066400000000000000000000036721434214120600243070ustar00rootroot00000000000000# frozen-string-literal: true # # The constant_sql_override extension allows you to change the SQL # generated for Sequel constants. # # One possible use-case for this is to have Sequel::CURRENT_TIMESTAMP use UTC time when # you have Sequel.database_timezone = :utc, but the database uses localtime when # generating CURRENT_TIMESTAMP. # # You can set SQL overrides with Database#set_constant_sql: # # DB.set_constant_sql(Sequel::CURRENT_TIMESTAMP, "CURRENT_TIMESTAMP AT TIME ZONE 'UTC'") # # Now, using Sequel::CURRENT_TIMESTAMP will use your override instead: # # Album.where(released_at: Sequel::CURRENT_TIMESTAMP).sql # # => SELECT "albums.*" FROM "albums" WHERE ("released_at" = CURRENT_TIMESTAMP AT TIME ZONE 'UTC') # # To use this extension, first load it into your Sequel::Database instance: # # DB.extension :constant_sql_override # # Related module: Sequel::ConstantSqlOverride # module Sequel module ConstantSqlOverride module DatabaseMethods # Create the initial empty hash of constant sql overrides. def self.extended(db) db.instance_exec do @constant_sqls ||= {} extend_datasets(DatasetMethods) end end # Hash mapping constant symbols to SQL. For internal use only. attr_reader :constant_sqls # :nodoc: # Set the SQL to use for the given Sequel::SQL::Constant def set_constant_sql(constant, override) @constant_sqls[constant.constant] = override end # Freeze the constant_sqls hash to prevent adding new overrides. def freeze @constant_sqls.freeze super end end module DatasetMethods # Use overridden constant SQL def constant_sql_append(sql, constant) if constant_sql = db.constant_sqls[constant] sql << constant_sql else super end end end end Database.register_extension(:constant_sql_override, ConstantSqlOverride::DatabaseMethods) end sequel-5.63.0/lib/sequel/extensions/constraint_validations.rb000066400000000000000000000504751434214120600244640ustar00rootroot00000000000000# frozen-string-literal: true # # The constraint_validations extension is designed to easily create database # constraints inside create_table and alter_table blocks. It also adds # relevant metadata about the constraints to a separate table, which the # constraint_validations model plugin uses to setup automatic validations. # # To use this extension, you first need to load it into the database: # # DB.extension(:constraint_validations) # # Note that you should only need to do this when modifying the constraint # validations (i.e. when migrating). You should probably not load this # extension in general application code. # # You also need to make sure to add the metadata table for the automatic # validations. By default, this table is called sequel_constraint_validations. # # DB.create_constraint_validations_table # # This table should only be created once. For new applications, you # generally want to create it first, before creating any other application # tables. # # Because migrations instance_exec the up and down blocks on a database, # using this extension in a migration can be done via: # # Sequel.migration do # up do # extension(:constraint_validations) # # ... # end # down do # extension(:constraint_validations) # # ... # end # end # # However, note that you cannot use change migrations with this extension, # you need to use separate up/down migrations. # # The API for creating the constraints with automatic validations is # similar to the validation_helpers model plugin API. However, # instead of having separate validates_* methods, it just adds a validate # method that accepts a block to the schema generators. Like the # create_table and alter_table blocks, this block is instance_execed and # offers its own DSL. Example: # # DB.create_table(:table) do # Integer :id # String :name # # validate do # presence :id # min_length 5, :name # end # end # # instance_exec is used in this case because create_table and alter_table # already use instance_exec, so losing access to the surrounding receiver # is not an issue. # # Here's a breakdown of the constraints created for each constraint validation # method: # # All constraints except unique unless :allow_nil is true :: CHECK column IS NOT NULL # presence (String column) :: CHECK trim(column) != '' # exact_length 5 :: CHECK char_length(column) = 5 # min_length 5 :: CHECK char_length(column) >= 5 # max_length 5 :: CHECK char_length(column) <= 5 # length_range 3..5 :: CHECK char_length(column) >= 3 AND char_length(column) <= 5 # length_range 3...5 :: CHECK char_length(column) >= 3 AND char_length(column) < 5 # format /foo\\d+/ :: CHECK column ~ 'foo\\d+' # format /foo\\d+/i :: CHECK column ~* 'foo\\d+' # like 'foo%' :: CHECK column LIKE 'foo%' ESCAPE '\' # ilike 'foo%' :: CHECK column ILIKE 'foo%' ESCAPE '\' # includes ['a', 'b'] :: CHECK column IN ('a', 'b') # includes [1, 2] :: CHECK column IN (1, 2) # includes 3..5 :: CHECK column >= 3 AND column <= 5 # includes 3...5 :: CHECK column >= 3 AND column < 5 # operator :>, 1 :: CHECK column > 1 # operator :>=, 2 :: CHECK column >= 2 # operator :<, "M" :: CHECK column < 'M' # operator :<=, 'K' :: CHECK column <= 'K' # unique :: UNIQUE (column) # # There are some additional API differences: # # * Only the :message and :allow_nil options are respected. The :allow_blank # and :allow_missing options are not respected. # * A new option, :name, is respected, for providing the name of the constraint. It is highly # recommended that you provide a name for all constraint validations, as # otherwise, it is difficult to drop the constraints later. # * The includes validation only supports an array of strings, and array of # integers, and a range of integers. # * There are like and ilike validations, which are similar to the format # validation but use a case sensitive or case insensitive LIKE pattern. LIKE # patters are very simple, so many regexp patterns cannot be expressed by # them, but only a couple databases (PostgreSQL and MySQL) support regexp # patterns. # * The operator validation only supports >, >=, <, and <= operators, and the # argument must be a string or an integer. # * When using the unique validation, column names cannot have embedded commas. # For similar reasons, when using an includes validation with an array of # strings, none of the strings in the array can have embedded commas. # * The unique validation does not support an arbitrary number of columns. # For a single column, just the symbol should be used, and for an array # of columns, an array of symbols should be used. There is no support # for creating two separate unique validations for separate columns in # a single call. # * A drop method can be called with a constraint name in a alter_table # validate block to drop an existing constraint and the related # validation metadata. # * While it is allowed to create a presence constraint with :allow_nil # set to true, doing so does not create a constraint unless the column # has String type. # # Note that this extension has the following issues on certain databases: # # * MySQL does not support check constraints (they are parsed but ignored), # so using this extension does not actually set up constraints on MySQL, # except for the unique constraint. It can still be used on MySQL to # add the validation metadata so that the plugin can setup automatic # validations. # * On SQLite, adding constraints to a table is not supported, so it must # be emulated by dropping the table and recreating it with the constraints. # If you want to use this plugin on SQLite with an alter_table block, # you should drop all constraint validation metadata using # drop_constraint_validations_for(table: 'table'), and then # readd all constraints you want to use inside the alter table block, # making no other changes inside the alter_table block. # # Dropping a table will automatically delete all constraint validations for # that table. However, altering a table (e.g. to drop a column) will not # currently make any changes to the constraint validations metadata. # # Related module: Sequel::ConstraintValidations # module Sequel module ConstraintValidations # The default table name used for the validation metadata. DEFAULT_CONSTRAINT_VALIDATIONS_TABLE = :sequel_constraint_validations OPERATORS = {:< => :lt, :<= => :lte, :> => :gt, :>= => :gte}.freeze REVERSE_OPERATOR_MAP = {:str_lt => :<, :str_lte => :<=, :str_gt => :>, :str_gte => :>=, :int_lt => :<, :int_lte => :<=, :int_gt => :>, :int_gte => :>=}.freeze # Set the default validation metadata table name if it has not already # been set. def self.extended(db) db.constraint_validations_table ||= DEFAULT_CONSTRAINT_VALIDATIONS_TABLE end # This is the DSL class used for the validate block inside create_table and # alter_table. class Generator # Store the schema generator that encloses this validates block. def initialize(generator) @generator = generator end # Create constraint validation methods that don't take an argument %w'presence unique'.each do |v| class_eval(<<-END, __FILE__, __LINE__+1) def #{v}(columns, opts=OPTS) @generator.validation({:type=>:#{v}, :columns=>Array(columns)}.merge!(opts)) end END end # Create constraint validation methods that take an argument %w'exact_length min_length max_length length_range format like ilike includes'.each do |v| class_eval(<<-END, __FILE__, __LINE__+1) def #{v}(arg, columns, opts=OPTS) @generator.validation({:type=>:#{v}, :columns=>Array(columns), :arg=>arg}.merge!(opts)) end END end # Create operator validation. The op should be either +:>+, +:>=+, +:<+, or +:<=+, and # the arg should be either a string or an integer. def operator(op, arg, columns, opts=OPTS) raise Error, "invalid operator (#{op}) used when creating operator validation" unless suffix = OPERATORS[op] prefix = case arg when String "str" when Integer "int" else raise Error, "invalid argument (#{arg.inspect}) used when creating operator validation" end @generator.validation({:type=>:"#{prefix}_#{suffix}", :columns=>Array(columns), :arg=>arg}.merge!(opts)) end # Given the name of a constraint, drop that constraint from the database, # and remove the related validation metadata. def drop(constraint) @generator.validation({:type=>:drop, :name=>constraint}) end # Alias of instance_exec for a nicer API. def process(&block) instance_exec(&block) end end # Additional methods for the create_table generator to support constraint validations. module CreateTableGeneratorMethods # An array of stored validation metadata, used later by the database to create # constraints. attr_reader :validations # Add a validation metadata hash to the stored array. def validation(opts) @validations << opts end # Call into the validate DSL for creating constraint validations. def validate(&block) Generator.new(self).process(&block) end end # Additional methods for the alter_table generator to support constraint validations, # used to give it a more similar API to the create_table generator. module AlterTableGeneratorMethods include CreateTableGeneratorMethods # Alias of add_constraint for similarity to create_table generator. def constraint(*args) add_constraint(*args) end # Alias of add_unique_constraint for similarity to create_table generator. def unique(*args) add_unique_constraint(*args) end end # The name of the table storing the validation metadata. If modifying this # from the default, this should be changed directly after loading the # extension into the database attr_accessor :constraint_validations_table # Create the table storing the validation metadata for all of the # constraints created by this extension. def create_constraint_validations_table create_table(constraint_validations_table) do String :table, :null=>false String :constraint_name String :validation_type, :null=>false String :column, :null=>false String :argument String :message TrueClass :allow_nil end end # Modify the default create_table generator to include # the constraint validation methods. def create_table_generator(&block) super do extend CreateTableGeneratorMethods @validations = [] instance_exec(&block) if block end end # Drop all constraint validations for a table if dropping the table. def drop_table(*names) names.each do |name| if !name.is_a?(Hash) && table_exists?(constraint_validations_table) drop_constraint_validations_for(:table=>name) end end super end # Drop the constraint validations table. def drop_constraint_validations_table drop_table(constraint_validations_table) end # Delete validation metadata for specific constraints. At least # one of the following options should be specified: # # :table :: The table containing the constraint # :column :: The column affected by the constraint # :constraint :: The name of the related constraint # # The main reason for this method is when dropping tables # or columns. If you have previously defined a constraint # validation on the table or column, you should delete the # related metadata when dropping the table or column. # For a table, this isn't a big issue, as it will just result # in some wasted space, but for columns, if you don't drop # the related metadata, it could make it impossible to save # rows, since a validation for a nonexistent column will be # created. def drop_constraint_validations_for(opts=OPTS) ds = from(constraint_validations_table) if table = opts[:table] ds = ds.where(:table=>constraint_validations_literal_table(table)) end if column = opts[:column] ds = ds.where(:column=>column.to_s) end if constraint = opts[:constraint] ds = ds.where(:constraint_name=>constraint.to_s) end unless table || column || constraint raise Error, "must specify :table, :column, or :constraint when dropping constraint validations" end ds.delete end # Modify the default alter_table generator to include # the constraint validation methods. def alter_table_generator(&block) super do extend AlterTableGeneratorMethods @validations = [] instance_exec(&block) if block end end private # After running all of the table alteration statements, # if there were any constraint validations, run table alteration # statements to create related constraints. This is purposely # run after the other statements, as the presence validation # in alter table requires introspecting the modified model # schema. def apply_alter_table_generator(name, generator) super unless generator.validations.empty? gen = alter_table_generator process_generator_validations(name, gen, generator.validations) apply_alter_table(name, gen.operations) end end # The value of a blank string. An empty string by default, but nil # on Oracle as Oracle treats the empty string as NULL. def blank_string_value if database_type == :oracle nil else '' end end # Return an unquoted literal form of the table name. # This allows the code to handle schema qualified tables, # without quoting all table names. def constraint_validations_literal_table(table) dataset.with_quote_identifiers(false).literal(table) end # Before creating the table, add constraints for all of the # generators validations to the generator. def create_table_from_generator(name, generator, options) unless generator.validations.empty? process_generator_validations(name, generator, generator.validations) end super end def constraint_validation_expression(cols, allow_nil) exprs = cols.map do |c| expr = yield c if allow_nil Sequel.|({c=>nil}, expr) else Sequel.&(Sequel.~(c=>nil), expr) end end Sequel.&(*exprs) end # For the given table, generator, and validations, add constraints # to the generator for each of the validations, as well as adding # validation metadata to the constraint validations table. def process_generator_validations(table, generator, validations) drop_rows = [] rows = validations.map do |val| columns, arg, constraint, validation_type, message, allow_nil = val.values_at(:columns, :arg, :name, :type, :message, :allow_nil) case validation_type when :presence strings, non_strings = columns.partition{|c| generator_string_column?(generator, table, c)} if !non_strings.empty? && !allow_nil non_strings_expr = Sequel.&(*non_strings.map{|c| Sequel.~(c=>nil)}) end unless strings.empty? strings_expr = constraint_validation_expression(strings, allow_nil){|c| Sequel.~(Sequel.trim(c) => blank_string_value)} end expr = if non_strings_expr && strings_expr Sequel.&(strings_expr, non_strings_expr) else strings_expr || non_strings_expr end if expr generator.constraint(constraint, expr) end when :exact_length generator.constraint(constraint, constraint_validation_expression(columns, allow_nil){|c| {Sequel.char_length(c) => arg}}) when :min_length generator.constraint(constraint, constraint_validation_expression(columns, allow_nil){|c| Sequel.char_length(c) >= arg}) when :max_length generator.constraint(constraint, constraint_validation_expression(columns, allow_nil){|c| Sequel.char_length(c) <= arg}) when *REVERSE_OPERATOR_MAP.keys generator.constraint(constraint, constraint_validation_expression(columns, allow_nil){|c| Sequel.identifier(c).public_send(REVERSE_OPERATOR_MAP[validation_type], arg)}) when :length_range op = arg.exclude_end? ? :< : :<= generator.constraint(constraint, constraint_validation_expression(columns, allow_nil){|c| (Sequel.char_length(c) >= arg.begin) & Sequel.char_length(c).public_send(op, arg.end)}) arg = "#{arg.begin}..#{'.' if arg.exclude_end?}#{arg.end}" when :format generator.constraint(constraint, constraint_validation_expression(columns, allow_nil){|c| {c => arg}}) if arg.casefold? validation_type = :iformat end arg = arg.source when :includes generator.constraint(constraint, constraint_validation_expression(columns, allow_nil){|c| {c => arg}}) if arg.is_a?(Range) if arg.begin.is_a?(Integer) && arg.end.is_a?(Integer) validation_type = :includes_int_range arg = "#{arg.begin}..#{'.' if arg.exclude_end?}#{arg.end}" else raise Error, "validates includes with a range only supports integers currently, cannot handle: #{arg.inspect}" end elsif arg.is_a?(Array) if arg.all?{|x| x.is_a?(Integer)} validation_type = :includes_int_array elsif arg.all?{|x| x.is_a?(String)} validation_type = :includes_str_array else raise Error, "validates includes with an array only supports strings and integers currently, cannot handle: #{arg.inspect}" end arg = arg.join(',') else raise Error, "validates includes only supports arrays and ranges currently, cannot handle: #{arg.inspect}" end when :like, :ilike generator.constraint(constraint, constraint_validation_expression(columns, allow_nil){|c| Sequel.public_send(validation_type, c, arg)}) when :unique generator.unique(columns, :name=>constraint) columns = [columns.join(',')] when :drop if generator.is_a?(Sequel::Schema::AlterTableGenerator) unless constraint raise Error, 'cannot drop a constraint validation without a constraint name' end generator.drop_constraint(constraint) drop_rows << [constraint_validations_literal_table(table), constraint.to_s] columns = [] else raise Error, 'cannot drop a constraint validation in a create_table generator' end else raise Error, "invalid or missing validation type: #{val.inspect}" end columns.map do |column| {:table=>constraint_validations_literal_table(table), :constraint_name=>(constraint.to_s if constraint), :validation_type=>validation_type.to_s, :column=>column.to_s, :argument=>(arg.to_s if arg), :message=>(message.to_s if message), :allow_nil=>allow_nil} end end ds = from(constraint_validations_table) unless drop_rows.empty? ds.where([:table, :constraint_name]=>drop_rows).delete end ds.multi_insert(rows.flatten) end # Introspect the generator to determine if column # created is a string or not. def generator_string_column?(generator, table, c) if generator.is_a?(Sequel::Schema::AlterTableGenerator) # This is the alter table case, which runs after the # table has been altered, so just check the database # schema for the column. schema(table).each do |col, sch| if col == c return sch[:type] == :string end end false else # This is the create table case, check the metadata # for the column to be created to see if it is a string. generator.columns.each do |col| if col[:name] == c return [String, :text, :varchar].include?(col[:type]) end end false end end end Database.register_extension(:constraint_validations, ConstraintValidations) end sequel-5.63.0/lib/sequel/extensions/core_extensions.rb000066400000000000000000000200311434214120600230730ustar00rootroot00000000000000# frozen-string-literal: true # # These are extensions to core classes that Sequel enables by default. # They make using Sequel's DSL easier by adding methods to Array, # Hash, String, and Symbol to add methods that return Sequel # expression objects. To load the extension: # # Sequel.extension :core_extensions # This extension loads the core extensions. def Sequel.core_extensions? true end Sequel.extension :symbol_as # Sequel extends +Array+ to add methods to implement the SQL DSL. # Most of these methods require that the array not be empty and that it # must consist solely of other arrays that have exactly two elements. class Array # Return a Sequel::SQL::BooleanExpression created from this array, not matching all of the # conditions. # # ~[[:a, true]] # SQL: (a IS NOT TRUE) # ~[[:a, 1], [:b, [2, 3]]] # SQL: ((a != 1) OR (b NOT IN (2, 3))) def ~ Sequel.~(self) end # Return a Sequel::SQL::CaseExpression with this array as the conditions and the given # default value and expression. # # [[{a: [2,3]}, 1]].case(0) # SQL: CASE WHEN (a IN (2, 3)) THEN 1 ELSE 0 END # [[:a, 1], [:b, 2]].case(:d, :c) # SQL: CASE c WHEN a THEN 1 WHEN b THEN 2 ELSE d END def case(*args) ::Sequel::SQL::CaseExpression.new(self, *args) end # Return a Sequel::SQL::ValueList created from this array. Used if this array contains # all two element arrays and you want it treated as an SQL value list (IN predicate) # instead of as a conditions specifier (similar to a hash). This is not necessary if you are using # this array as a value in a filter, but may be necessary if you are using it as a # value with placeholder SQL: # # DB[:a].where([:a, :b]=>[[1, 2], [3, 4]]) # SQL: ((a, b) IN ((1, 2), (3, 4))) # DB[:a].where('(a, b) IN ?', [[1, 2], [3, 4]]) # SQL: ((a, b) IN ((1 = 2) AND (3 = 4))) # DB[:a].where('(a, b) IN ?', [[1, 2], [3, 4]].sql_value_list) # SQL: ((a, b) IN ((1, 2), (3, 4))) def sql_value_list ::Sequel::SQL::ValueList.new(self) end # Return a Sequel::SQL::BooleanExpression created from this array, matching all of the # conditions. Rarely do you need to call this explicitly, as Sequel generally # assumes that arrays of two element arrays specify this type of condition. One case where # it can be necessary to use this is if you are using the object as a value in a filter hash # and want to use the = operator instead of the IN operator (which is used by default for # arrays of two element arrays). # # [[:a, true]].sql_expr # SQL: (a IS TRUE) # [[:a, 1], [:b, [2, 3]]].sql_expr # SQL: ((a = 1) AND (b IN (2, 3))) def sql_expr Sequel[self] end # Return a Sequel::SQL::BooleanExpression created from this array, matching none # of the conditions. # # [[:a, true]].sql_negate # SQL: (a IS NOT TRUE) # [[:a, 1], [:b, [2, 3]]].sql_negate # SQL: ((a != 1) AND (b NOT IN (2, 3))) def sql_negate Sequel.negate(self) end # Return a Sequel::SQL::BooleanExpression created from this array, matching any of the # conditions. # # [[:a, true]].sql_or # SQL: (a IS TRUE) # [[:a, 1], [:b, [2, 3]]].sql_or # SQL: ((a = 1) OR (b IN (2, 3))) def sql_or Sequel.or(self) end # Return a Sequel::SQL::StringExpression representing an SQL string made up of the # concatenation of this array's elements. If an argument is passed # it is used in between each element of the array in the SQL # concatenation. # # [:a].sql_string_join # SQL: a # [:a, :b].sql_string_join # SQL: (a || b) # [:a, 'b'].sql_string_join # SQL: (a || 'b') # ['a', :b].sql_string_join(' ') # SQL: ('a' || ' ' || b) def sql_string_join(joiner=nil) Sequel.join(self, joiner) end end # Sequel extends +Hash+ to add methods to implement the SQL DSL. class Hash # Return a Sequel::SQL::BooleanExpression created from this hash, matching # all of the conditions in this hash and the condition specified by # the given argument. # # {a: 1} & :b # SQL: ((a = 1) AND b) # {a: true} & ~:b # SQL: ((a IS TRUE) AND NOT b) def &(ce) ::Sequel::SQL::BooleanExpression.new(:AND, self, ce) end # Return a Sequel::SQL::BooleanExpression created from this hash, matching # all of the conditions in this hash or the condition specified by # the given argument. # # {a: 1} | :b # SQL: ((a = 1) OR b) # {a: true} | ~:b # SQL: ((a IS TRUE) OR NOT b) def |(ce) ::Sequel::SQL::BooleanExpression.new(:OR, self, ce) end # Return a Sequel::SQL::BooleanExpression created from this hash, not matching all of the # conditions. # # ~{a: true} # SQL: (a IS NOT TRUE) # ~{a: 1, b: [2, 3]} # SQL: ((a != 1) OR (b NOT IN (2, 3))) def ~ ::Sequel::SQL::BooleanExpression.from_value_pairs(self, :OR, true) end # Return a Sequel::SQL::CaseExpression with this hash as the conditions and the given # default value. # # {{a: [2,3]}=>1}.case(0) # SQL: CASE WHEN (a IN (2, 3)) THEN 1 ELSE 0 END # {a: 1, b: 2}.case(:d, :c) # SQL: CASE c WHEN a THEN 1 WHEN b THEN 2 ELSE d END def case(*args) ::Sequel::SQL::CaseExpression.new(to_a, *args) end # Return a Sequel::SQL::BooleanExpression created from this hash, matching all of the # conditions. Rarely do you need to call this explicitly, as Sequel generally # assumes that hashes specify this type of condition. # # {a: true}.sql_expr # SQL: (a IS TRUE) # {a: 1, b: [2, 3]}.sql_expr # SQL: ((a = 1) AND (b IN (2, 3))) def sql_expr ::Sequel::SQL::BooleanExpression.from_value_pairs(self) end # Return a Sequel::SQL::BooleanExpression created from this hash, matching none # of the conditions. # # {a: true}.sql_negate # SQL: (a IS NOT TRUE) # {a: 1, b: [2, 3]}.sql_negate # SQL: ((a != 1) AND (b NOT IN (2, 3))) def sql_negate ::Sequel::SQL::BooleanExpression.from_value_pairs(self, :AND, true) end # Return a Sequel::SQL::BooleanExpression created from this hash, matching any of the # conditions. # # {a: true}.sql_or # SQL: (a IS TRUE) # {a: 1, b: [2, 3]}.sql_or # SQL: ((a = 1) OR (b IN (2, 3))) def sql_or ::Sequel::SQL::BooleanExpression.from_value_pairs(self, :OR) end end # Sequel extends +String+ to add methods to implement the SQL DSL. class String include Sequel::SQL::AliasMethods include Sequel::SQL::CastMethods # Converts a string into a Sequel::LiteralString, in order to override string # literalization, e.g.: # # DB[:items].where(abc: 'def') # # "SELECT * FROM items WHERE (abc = 'def')" # # DB[:items].where(abc: 'def'.lit) # # "SELECT * FROM items WHERE (abc = def)" # # You can also provide arguments, to create a Sequel::SQL::PlaceholderLiteralString: # # DB[:items].select{|o| o.count('DISTINCT ?'.lit(:a))} # # "SELECT count(DISTINCT a) FROM items" def lit(*args) args.empty? ? Sequel::LiteralString.new(self) : Sequel::SQL::PlaceholderLiteralString.new(self, args) end # Returns a Sequel::SQL::Blob that holds the same data as this string. Blobs provide proper # escaping of binary data. def to_sequel_blob ::Sequel::SQL::Blob.new(self) end end # Sequel extends +Symbol+ to add methods to implement the SQL DSL. class Symbol include Sequel::SQL::CastMethods include Sequel::SQL::OrderMethods include Sequel::SQL::BooleanMethods include Sequel::SQL::NumericMethods include Sequel::SQL::QualifyingMethods include Sequel::SQL::StringMethods include Sequel::SQL::SubscriptMethods include Sequel::SQL::ComplexExpressionMethods # Returns receiver wrapped in an Sequel::SQL::Identifier. # # :a.identifier # SQL: "a" def identifier Sequel::SQL::Identifier.new(self) end # Returns a Sequel::SQL::Function with this as the function name, # and the given arguments. # # :now.sql_function # SQL: now() # :sum.sql_function(:a) # SQL: sum(a) # :concat.sql_function(:a, :b) # SQL: concat(a, b) def sql_function(*args) Sequel::SQL::Function.new(self, *args) end end sequel-5.63.0/lib/sequel/extensions/core_refinements.rb000066400000000000000000000213261434214120600232230ustar00rootroot00000000000000# frozen-string-literal: true # # These are refinements to core classes that allow the Sequel # DSL to be used without modifying the core classes directly. # After loading the extension via: # # Sequel.extension :core_refinements # # you can enable the refinements for particular files: # # using Sequel::CoreRefinements # :nocov: raise(Sequel::Error, "Refinements require ruby 2.0.0 or greater") unless RUBY_VERSION >= '2.0.0' # :nocov: module Sequel::CoreRefinements # :nocov: include_meth = RUBY_VERSION >= '3.1' ? :import_methods : :include # :nocov: INCLUDE_METH = include_meth private_constant :INCLUDE_METH refine Array do # Return a Sequel::SQL::BooleanExpression created from this array, not matching all of the # conditions. # # ~[[:a, true]] # SQL: (a IS NOT TRUE) # ~[[:a, 1], [:b, [2, 3]]] # SQL: ((a != 1) OR (b NOT IN (2, 3))) def ~ Sequel.~(self) end # Return a Sequel::SQL::CaseExpression with this array as the conditions and the given # default value and expression. # # [[{a: [2,3]}, 1]].case(0) # SQL: CASE WHEN (a IN (2, 3)) THEN 1 ELSE 0 END # [[:a, 1], [:b, 2]].case(:d, :c) # SQL: CASE c WHEN a THEN 1 WHEN b THEN 2 ELSE d END def case(*args) ::Sequel::SQL::CaseExpression.new(self, *args) end # Return a Sequel::SQL::ValueList created from this array. Used if this array contains # all two element arrays and you want it treated as an SQL value list (IN predicate) # instead of as a conditions specifier (similar to a hash). This is not necessary if you are using # this array as a value in a filter, but may be necessary if you are using it as a # value with placeholder SQL: # # DB[:a].where([:a, :b]=>[[1, 2], [3, 4]]) # SQL: ((a, b) IN ((1, 2), (3, 4))) # DB[:a].where('(a, b) IN ?', [[1, 2], [3, 4]]) # SQL: ((a, b) IN ((1 = 2) AND (3 = 4))) # DB[:a].where('(a, b) IN ?', [[1, 2], [3, 4]].sql_value_list) # SQL: ((a, b) IN ((1, 2), (3, 4))) def sql_value_list ::Sequel::SQL::ValueList.new(self) end # Return a Sequel::SQL::BooleanExpression created from this array, matching all of the # conditions. Rarely do you need to call this explicitly, as Sequel generally # assumes that arrays of two element arrays specify this type of condition. One case where # it can be necessary to use this is if you are using the object as a value in a filter hash # and want to use the = operator instead of the IN operator (which is used by default for # arrays of two element arrays). # # [[:a, true]].sql_expr # SQL: (a IS TRUE) # [[:a, 1], [:b, [2, 3]]].sql_expr # SQL: ((a = 1) AND (b IN (2, 3))) def sql_expr Sequel[self] end # Return a Sequel::SQL::BooleanExpression created from this array, matching none # of the conditions. # # [[:a, true]].sql_negate # SQL: (a IS NOT TRUE) # [[:a, 1], [:b, [2, 3]]].sql_negate # SQL: ((a != 1) AND (b NOT IN (2, 3))) def sql_negate Sequel.negate(self) end # Return a Sequel::SQL::BooleanExpression created from this array, matching any of the # conditions. # # [[:a, true]].sql_or # SQL: (a IS TRUE) # [[:a, 1], [:b, [2, 3]]].sql_or # SQL: ((a = 1) OR (b IN (2, 3))) def sql_or Sequel.or(self) end # Return a Sequel::SQL::StringExpression representing an SQL string made up of the # concatenation of this array's elements. If an argument is passed # it is used in between each element of the array in the SQL # concatenation. # # [:a].sql_string_join # SQL: a # [:a, :b].sql_string_join # SQL: (a || b) # [:a, 'b'].sql_string_join # SQL: (a || 'b') # ['a', :b].sql_string_join(' ') # SQL: ('a' || ' ' || b) def sql_string_join(joiner=nil) Sequel.join(self, joiner) end end refine Hash do # Return a Sequel::SQL::BooleanExpression created from this hash, matching # all of the conditions in this hash and the condition specified by # the given argument. # # {a: 1} & :b # SQL: ((a = 1) AND b) # {a: true} & ~:b # SQL: ((a IS TRUE) AND NOT b) def &(ce) ::Sequel::SQL::BooleanExpression.new(:AND, self, ce) end # Return a Sequel::SQL::BooleanExpression created from this hash, matching # all of the conditions in this hash or the condition specified by # the given argument. # # {a: 1} | :b # SQL: ((a = 1) OR b) # {a: true} | ~:b # SQL: ((a IS TRUE) OR NOT b) def |(ce) ::Sequel::SQL::BooleanExpression.new(:OR, self, ce) end # Return a Sequel::SQL::BooleanExpression created from this hash, not matching all of the # conditions. # # ~{a: true} # SQL: (a IS NOT TRUE) # ~{a: 1, b: [2, 3]} # SQL: ((a != 1) OR (b NOT IN (2, 3))) def ~ ::Sequel::SQL::BooleanExpression.from_value_pairs(self, :OR, true) end # Return a Sequel::SQL::CaseExpression with this hash as the conditions and the given # default value. # # {{a: [2,3]}=>1}.case(0) # SQL: CASE WHEN (a IN (2, 3)) THEN 1 ELSE 0 END # {a: 1, b: 2}.case(:d, :c) # SQL: CASE c WHEN a THEN 1 WHEN b THEN 2 ELSE d END def case(*args) ::Sequel::SQL::CaseExpression.new(to_a, *args) end # Return a Sequel::SQL::BooleanExpression created from this hash, matching all of the # conditions. Rarely do you need to call this explicitly, as Sequel generally # assumes that hashes specify this type of condition. # # {a: true}.sql_expr # SQL: (a IS TRUE) # {a: 1, b: [2, 3]}.sql_expr # SQL: ((a = 1) AND (b IN (2, 3))) def sql_expr ::Sequel::SQL::BooleanExpression.from_value_pairs(self) end # Return a Sequel::SQL::BooleanExpression created from this hash, matching none # of the conditions. # # {a: true}.sql_negate # SQL: (a IS NOT TRUE) # {a: 1, b: [2, 3]}.sql_negate # SQL: ((a != 1) AND (b NOT IN (2, 3))) def sql_negate ::Sequel::SQL::BooleanExpression.from_value_pairs(self, :AND, true) end # Return a Sequel::SQL::BooleanExpression created from this hash, matching any of the # conditions. # # {a: true}.sql_or # SQL: (a IS TRUE) # {a: 1, b: [2, 3]}.sql_or # SQL: ((a = 1) OR (b IN (2, 3))) def sql_or ::Sequel::SQL::BooleanExpression.from_value_pairs(self, :OR) end end refine String do send include_meth, Sequel::SQL::AliasMethods send include_meth, Sequel::SQL::CastMethods # Converts a string into a Sequel::LiteralString, in order to override string # literalization, e.g.: # # DB[:items].where(abc: 'def') # # "SELECT * FROM items WHERE (abc = 'def')" # # DB[:items].where(abc: 'def'.lit) # # "SELECT * FROM items WHERE (abc = def)" # # You can also provide arguments, to create a Sequel::SQL::PlaceholderLiteralString: # # DB[:items].select{|o| o.count('DISTINCT ?'.lit(:a))} # # "SELECT count(DISTINCT a) FROM items" def lit(*args) args.empty? ? Sequel::LiteralString.new(self) : Sequel::SQL::PlaceholderLiteralString.new(self, args) end # Returns a Sequel::SQL::Blob that holds the same data as this string. Blobs provide proper # escaping of binary data. def to_sequel_blob ::Sequel::SQL::Blob.new(self) end end refine Symbol do send include_meth, Sequel::SQL::AliasMethods send include_meth, Sequel::SQL::CastMethods send include_meth, Sequel::SQL::OrderMethods send include_meth, Sequel::SQL::BooleanMethods send include_meth, Sequel::SQL::NumericMethods # :nocov: remove_method :* if RUBY_VERSION >= '3.1' # :nocov: send include_meth, Sequel::SQL::QualifyingMethods send include_meth, Sequel::SQL::StringMethods send include_meth, Sequel::SQL::SubscriptMethods send include_meth, Sequel::SQL::ComplexExpressionMethods # :nocov: if RUBY_VERSION >= '3.1' remove_method :* def *(ce=(arg=false;nil)) if arg == false Sequel::SQL::ColumnAll.new(self) else Sequel::SQL::NumericExpression.new(:*, self, ce) end end end # :nocov: # Returns receiver wrapped in an Sequel::SQL::Identifier. # # :ab.identifier # SQL: "a" def identifier Sequel::SQL::Identifier.new(self) end # Returns a Sequel::SQL::Function with this as the function name, # and the given arguments. # # :now.sql_function # SQL: now() # :sum.sql_function(:a) # SQL: sum(a) # :concat.sql_function(:a, :b) # SQL: concat(a, b) def sql_function(*args) Sequel::SQL::Function.new(self, *args) end end end sequel-5.63.0/lib/sequel/extensions/current_datetime_timestamp.rb000066400000000000000000000037261434214120600253210ustar00rootroot00000000000000# frozen-string-literal: true # # The current_datetime_timestamp extension makes Dataset#current_datetime # return an object that operates like Sequel.datetime_class.now, but will # be literalized as CURRENT_TIMESTAMP. # # This allows you to use the defaults_setter, timestamps, and touch # model plugins and make sure that CURRENT_TIMESTAMP is used instead of # a literalized timestamp value. # # The reason that CURRENT_TIMESTAMP is better than a literalized version # of the timestamp is that it obeys correct transactional semantics # (all calls to CURRENT_TIMESTAMP in the same transaction return the # same timestamp, at least on some databases). # # To have current_datetime be literalized as CURRENT_TIMESTAMP for # a single dataset: # # ds = ds.extension(:current_datetime_timestamp) # # To have current_datetime be literalized as CURRENT_TIMESTAMP for all # datasets of a given database. # # DB.extension(:current_datetime_timestamp) # # Related module: Sequel::CurrentDateTimeTimestamp # module Sequel module CurrentDateTimeTimestamp module DatasetMethods # Return an instance of Sequel.datetime_class that will be literalized # as CURRENT_TIMESTAMP. def current_datetime (Sequel.datetime_class == ::Time ? Time : DateTime).now end private # Literalize custom DateTime subclass objects as CURRENT_TIMESTAMP. def literal_datetime_append(sql, v) v.is_a?(DateTime) ? literal_append(sql, Sequel::CURRENT_TIMESTAMP) : super end # Literalize custom Time subclass objects as CURRENT_TIMESTAMP. def literal_time_append(sql, v) v.is_a?(Time) ? literal_append(sql, Sequel::CURRENT_TIMESTAMP) : super end end # Time subclass literalized as CURRENT_TIMESTAMP class Time < ::Time; end # DateTime subclass literalized as CURRENT_TIMESTAMP class DateTime < ::DateTime; end end Dataset.register_extension(:current_datetime_timestamp, CurrentDateTimeTimestamp::DatasetMethods) end sequel-5.63.0/lib/sequel/extensions/dataset_source_alias.rb000066400000000000000000000061031434214120600240460ustar00rootroot00000000000000# frozen-string-literal: true # # The dataset_source_alias extension changes Sequel's # default behavior of automatically aliasing datasets # from using t1, t2, etc. to using an alias based on # the source of the dataset. Example: # # DB.from(DB.from(:a)) # # default: SELECT * FROM (SELECT * FROM a) AS t1 # # with extension: SELECT * FROM (SELECT * FROM a) AS a # # This also works when joining: # # DB[:a].join(DB[:b], [:id]) # # SELECT * FROM a INNER JOIN (SELECT * FROM b) AS b USING (id) # # To avoid conflicting aliases, this attempts to alias tables # uniquely if it detects a conflict: # # DB.from(:a, DB.from(:a)) # # SELECT * FROM a, (SELECT * FROM a) AS a_0 # # Note that not all conflicts are correctly detected and handled. # It is encouraged to alias your datasets manually instead of # relying on the auto-aliasing if there would be a conflict. # # In the places where Sequel cannot determine the # appropriate alias to use for the dataset, it will fallback to # the standard t1, t2, etc. aliasing. # # You can load this extension into specific datasets: # # ds = DB[:table] # ds = ds.extension(:dataset_source_alias) # # Or you can load it into all of a database's datasets, which # is probably the desired behavior if you are using this extension: # # DB.extension(:dataset_source_alias) # # Related module: Sequel::Dataset::DatasetSourceAlias # module Sequel class Dataset module DatasetSourceAlias # Preprocess the list of sources and attempt to alias any # datasets in the sources to the first source of the respective # dataset. def from(*source, &block) virtual_row_columns(source, block) table_aliases = [] source = source.map do |s| case s when Dataset s = dataset_source_alias_expression(s, table_aliases) when Symbol, String, SQL::AliasedExpression, SQL::Identifier, SQL::QualifiedIdentifier table_aliases << alias_symbol(s) end s end super(*source, &nil) end # If a Dataset is given as the table argument, attempt to alias # it to its source. def join_table(type, table, expr=nil, options=OPTS) if table.is_a?(Dataset) && !options[:table_alias] table = dataset_source_alias_expression(table) end super end private # Attempt to automatically alias the given dataset to its source. # If the dataset cannot be automatically aliased to its source, # return it unchanged. The table_aliases argument is a list of # already used alias symbols, which will not be used as the alias. def dataset_source_alias_expression(ds, table_aliases=[]) base = ds.first_source if ds.opts[:from] case base when Symbol, String, SQL::AliasedExpression, SQL::Identifier, SQL::QualifiedIdentifier aliaz = unused_table_alias(base, table_aliases) table_aliases << aliaz ds.as(aliaz) else ds end end end register_extension(:dataset_source_alias, DatasetSourceAlias) end end sequel-5.63.0/lib/sequel/extensions/date_arithmetic.rb000066400000000000000000000233371434214120600230260ustar00rootroot00000000000000# frozen-string-literal: true # # The date_arithmetic extension adds the ability to perform database-independent # addition/substraction of intervals to/from dates and timestamps. # # First, you need to load the extension into the database: # # DB.extension :date_arithmetic # # Then you can use the Sequel.date_add and Sequel.date_sub methods # to return Sequel expressions (this example shows the only supported # keys for the second argument): # # add = Sequel.date_add(:date_column, years: 1, months: 2, weeks: 2, days: 1) # sub = Sequel.date_sub(:date_column, hours: 1, minutes: 2, seconds: 3) # # In addition to specifying the interval as a hash, there is also # support for specifying the interval as an ActiveSupport::Duration # object: # # require 'active_support/all' # add = Sequel.date_add(:date_column, 1.years + 2.months + 3.days) # sub = Sequel.date_sub(:date_column, 1.hours + 2.minutes + 3.seconds) # # By default, values are casted to the generic timestamp type for the # database. You can override the cast type using the :cast option: # # add = Sequel.date_add(:date_column, {years: 1, months: 2, days: 3}, cast: :timestamptz) # # These expressions can be used in your datasets, or anywhere else that # Sequel expressions are allowed: # # DB[:table].select(add.as(:d)).where(sub > Sequel::CURRENT_TIMESTAMP) # # On most databases, the values you provide for years/months/days/etc. must # be numeric values and not arbitrary SQL expressions. However, on PostgreSQL # 9.4+, use of arbitrary SQL expressions is supported. # # Related module: Sequel::SQL::DateAdd # module Sequel module SQL module Builders # Return a DateAdd expression, adding an interval to the date/timestamp expr. # Options: # :cast :: Cast to the specified type instead of the default if casting def date_add(expr, interval, opts=OPTS) DateAdd.new(expr, interval, opts) end # Return a DateAdd expression, adding the negative of the interval to # the date/timestamp expr. # Options: # :cast :: Cast to the specified type instead of the default if casting def date_sub(expr, interval, opts=OPTS) if defined?(ActiveSupport::Duration) && interval.is_a?(ActiveSupport::Duration) interval = interval.parts end parts = {} interval.each do |k,v| case v when nil # ignore when Numeric parts[k] = -v else parts[k] = Sequel::SQL::NumericExpression.new(:*, v, -1) end end DateAdd.new(expr, parts, opts) end end # The DateAdd class represents the addition of an interval to a # date/timestamp expression. class DateAdd < GenericExpression # These methods are added to datasets using the date_arithmetic # extension, for the purposes of correctly literalizing DateAdd # expressions for the appropriate database type. module DatasetMethods DURATION_UNITS = [:years, :months, :days, :hours, :minutes, :seconds].freeze DEF_DURATION_UNITS = DURATION_UNITS.zip(DURATION_UNITS.map{|s| s.to_s.freeze}).freeze POSTGRES_DURATION_UNITS = DURATION_UNITS.zip([:years, :months, :days, :hours, :mins, :secs].map{|s| s.to_s.freeze}).freeze MYSQL_DURATION_UNITS = DURATION_UNITS.zip(DURATION_UNITS.map{|s| Sequel.lit(s.to_s.upcase[0...-1]).freeze}).freeze MSSQL_DURATION_UNITS = DURATION_UNITS.zip(DURATION_UNITS.map{|s| Sequel.lit(s.to_s[0...-1]).freeze}).freeze H2_DURATION_UNITS = DURATION_UNITS.zip(DURATION_UNITS.map{|s| s.to_s[0...-1].freeze}).freeze DERBY_DURATION_UNITS = DURATION_UNITS.zip(DURATION_UNITS.map{|s| Sequel.lit("SQL_TSI_#{s.to_s.upcase[0...-1]}").freeze}).freeze ACCESS_DURATION_UNITS = DURATION_UNITS.zip(%w'yyyy m d h n s'.map(&:freeze)).freeze DB2_DURATION_UNITS = DURATION_UNITS.zip(DURATION_UNITS.map{|s| Sequel.lit(s.to_s).freeze}).freeze # Append the SQL fragment for the DateAdd expression to the SQL query. def date_add_sql_append(sql, da) if defined?(super) return super end h = da.interval expr = da.expr cast_type = da.cast_type || Time cast = case db_type = db.database_type when :postgres casted = Sequel.cast(expr, cast_type) if db.server_version >= 90400 placeholder = [] vals = [] each_valid_interval_unit(h, POSTGRES_DURATION_UNITS) do |value, sql_unit| placeholder << "#{', ' unless placeholder.empty?}#{sql_unit} := " vals << value end interval = Sequel.function(:make_interval, Sequel.lit(placeholder, *vals)) unless vals.empty? else parts = String.new each_valid_interval_unit(h, DEF_DURATION_UNITS) do |value, sql_unit| parts << "#{value} #{sql_unit} " end interval = Sequel.cast(parts, :interval) unless parts.empty? end if interval return complex_expression_sql_append(sql, :+, [casted, interval]) else return literal_append(sql, casted) end when :sqlite args = [expr] each_valid_interval_unit(h, DEF_DURATION_UNITS) do |value, sql_unit| args << "#{value} #{sql_unit}" end return function_sql_append(sql, Sequel.function(:datetime, *args)) when :mysql, :hsqldb if db_type == :hsqldb # HSQLDB requires 2.2.9+ for the DATE_ADD function expr = Sequel.cast(expr, cast_type) end each_valid_interval_unit(h, MYSQL_DURATION_UNITS) do |value, sql_unit| expr = Sequel.function(:DATE_ADD, expr, Sequel.lit(["INTERVAL ", " "], value, sql_unit)) end when :mssql, :h2, :access, :sqlanywhere units = case db_type when :h2 H2_DURATION_UNITS when :access ACCESS_DURATION_UNITS else MSSQL_DURATION_UNITS end each_valid_interval_unit(h, units) do |value, sql_unit| expr = Sequel.function(:DATEADD, sql_unit, value, expr) end when :derby if expr.is_a?(Date) && !expr.is_a?(DateTime) # Work around for https://issues.apache.org/jira/browse/DERBY-896 expr = Sequel.cast_string(expr) + ' 00:00:00' end each_valid_interval_unit(h, DERBY_DURATION_UNITS) do |value, sql_unit| expr = Sequel.lit(["{fn timestampadd(#{sql_unit}, ", ", timestamp(", "))}"], value, expr) end when :oracle each_valid_interval_unit(h, MYSQL_DURATION_UNITS) do |value, sql_unit| expr = Sequel.+(expr, Sequel.lit(["INTERVAL ", " "], value.to_s, sql_unit)) end when :db2 expr = Sequel.cast(expr, cast_type) each_valid_interval_unit(h, DB2_DURATION_UNITS) do |value, sql_unit| expr = Sequel.+(expr, Sequel.lit(["", " "], value, sql_unit)) end false else raise Error, "date arithmetic is not implemented on #{db.database_type}" end if cast expr = Sequel.cast(expr, cast_type) end literal_append(sql, expr) end private # Yield the value in the interval for each of the units # present in the interval, along with the SQL fragment # representing the unit name. Returns false if any # values were yielded, true otherwise def each_valid_interval_unit(interval, units) cast = true units.each do |unit, sql_unit| if (value = interval[unit]) && value != 0 cast = false yield value, sql_unit end end cast end end # The expression that the interval is being added to. attr_reader :expr # The interval added to the expression, as a hash with # symbol keys. attr_reader :interval # The type to cast the expression to. nil if not overridden, in which cast # the generic timestamp type for the database will be used. attr_reader :cast_type # Supports two types of intervals: # Hash :: Used directly, but values cannot be plain strings. # ActiveSupport::Duration :: Converted to a hash using the interval's parts. def initialize(expr, interval, opts=OPTS) @expr = expr h = Hash.new(0) interval = interval.parts unless interval.is_a?(Hash) interval.each do |unit, value| # skip nil values next unless value # Convert weeks to days, as ActiveSupport::Duration can use weeks, # but the database-specific literalizers only support days. if unit == :weeks unit = :days value *= 7 end unless DatasetMethods::DURATION_UNITS.include?(unit) raise Sequel::Error, "Invalid key used in DateAdd interval hash: #{unit.inspect}" end # Attempt to prevent SQL injection by users who pass untrusted strings # as interval values. It doesn't make sense to support literal strings, # due to the numeric adding below. if value.is_a?(String) raise Sequel::InvalidValue, "cannot provide String value as interval part: #{value.inspect}" end h[unit] += value end @interval = Hash[h].freeze @cast_type = opts[:cast] if opts[:cast] freeze end to_s_method :date_add_sql end end Dataset.register_extension(:date_arithmetic, SQL::DateAdd::DatasetMethods) end sequel-5.63.0/lib/sequel/extensions/date_parse_input_handler.rb000066400000000000000000000041101434214120600247070ustar00rootroot00000000000000# frozen-string-literal: true # # The date_parse_input_handler extension allows for configuring how input # to date parsing methods should be handled. By default, the # extension does not change behavior. However, you can use the # +Sequel.date_parse_input_handler+ method to support custom handling # of input strings to the date parsing methods. For example, if you want # to implement a length check to prevent denial of service vulnerabilities # in older versions of Ruby, you can do: # # Sequel.extension :date_parse_input_handler # Sequel.date_parse_input_handler do |string| # raise Sequel::InvalidValue, "string length (200) exceeds the limit 128" if string.bytesize > 128 # string # end # # You can also use +Sequel.date_parse_input_handler+ to modify the string # that will be passed to the parsing methods. For example, you could # truncate it: # # Sequel.date_parse_input_handler do |string| # string.b[0, 128] # end # # Be aware that modern versions of Ruby will raise an exception if # date parsing input exceeds 128 bytes. module Sequel module DateParseInputHandler def date_parse_input_handler(&block) singleton_class.class_eval do define_method(:handle_date_parse_input, &block) private :handle_date_parse_input alias handle_date_parse_input handle_date_parse_input end end # Call date parse input handler with input string. def string_to_date(string) super(handle_date_parse_input(string)) end # Call date parse input handler with input string. def string_to_datetime(string) super(handle_date_parse_input(string)) end # Call date parse input handler with input string. def string_to_time(string) super(handle_date_parse_input(string)) end private # Call date parse input handler with input string. def _date_parse(string) super(handle_date_parse_input(string)) end # Return string as-is by default, so by default behavior does not change. def handle_date_parse_input(string) string end end extend DateParseInputHandler end sequel-5.63.0/lib/sequel/extensions/datetime_parse_to_time.rb000066400000000000000000000025341434214120600244020ustar00rootroot00000000000000# frozen-string-literal: true # # This switches the default parsing of strings into Time values # from using Time.parse to using DateTime.parse.to_time. This # fixes issues when the times being parsed have no timezone # information, the implicit timezone for the Database instance # is set to +:utc+, and the timestamps being used include values # not valid in the local timezone, such as during a daylight # savings time switch. # # To load the extension: # # Sequel.extension :datetime_parse_to_time # module Sequel::DateTimeParseToTime private # Use DateTime.parse.to_time to do the conversion if the input a string and is assumed to # be in UTC and there is no offset information in the string. def convert_input_timestamp(v, input_timezone) if v.is_a?(String) && datetime_class == Time && input_timezone == :utc && !_date_parse(v).has_key?(:offset) # :nocov: # Whether this is fully branch covered depends on the order in which the specs are run. v = handle_date_parse_input(v) if respond_to?(:handle_date_parse_input, true) # :nocov: t = DateTime.parse(v).to_time case application_timezone when nil, :local t = t.localtime end t else super end rescue => e raise convert_exception_class(e, Sequel::InvalidValue) end end Sequel.extend(Sequel::DateTimeParseToTime) sequel-5.63.0/lib/sequel/extensions/duplicate_columns_handler.rb000066400000000000000000000064531434214120600251070ustar00rootroot00000000000000# frozen-string-literal: true # # The duplicate_columns_handler extension allows you to customize handling of # duplicate column names in your queries on a per-database or per-dataset level. # # For example, you may want to raise an exception if you join 2 tables together # which contains a column that will override another columns. # # To use the extension, you need to load the extension into the database: # # DB.extension :duplicate_columns_handler # # or into individual datasets: # # ds = DB[:items].extension(:duplicate_columns_handler) # # A database option is introduced: :on_duplicate_columns. It accepts a Symbol # or any object that responds to :call. # # on_duplicate_columns: :raise # on_duplicate_columns: :warn # on_duplicate_columns: :ignore # on_duplicate_columns: lambda{|columns| arbitrary_condition? ? :raise : :warn} # # You may also configure duplicate columns handling for a specific dataset: # # ds.on_duplicate_columns(:warn) # ds.on_duplicate_columns(:raise) # ds.on_duplicate_columns(:ignore) # ds.on_duplicate_columns{|columns| arbitrary_condition? ? :raise : :warn} # ds.on_duplicate_columns(lambda{|columns| arbitrary_condition? ? :raise : :warn}) # # If :raise is specified, a Sequel::DuplicateColumnError is raised. # If :warn is specified, you will receive a warning via +warn+. # If a callable is specified, it will be called. # If no on_duplicate_columns is specified, the default is :warn. # # Related module: Sequel::DuplicateColumnsHandler module Sequel module DuplicateColumnsHandler # :nocov: CALLER_ARGS = (RUBY_VERSION >= '2.0' ? [0,1] : [0]).freeze # :nocov: # Customize handling of duplicate columns for this dataset. def on_duplicate_columns(handler = (raise Error, "Must provide either an argument or a block to on_duplicate_columns" unless defined?(yield); nil), &block) raise Error, "Cannot provide both an argument and a block to on_duplicate_columns" if handler && block clone(:on_duplicate_columns=>handler||block) end private # Call handle_duplicate_columns if there are duplicate columns. def columns=(cols) if cols && cols.uniq.size != cols.size handle_duplicate_columns(cols) end super end # Invoke the appropriate behavior when duplicate columns are present. def handle_duplicate_columns(cols) message = "#{caller(*CALLER_ARGS).first}: One or more duplicate columns present in #{cols.inspect}" case duplicate_columns_handler_type(cols) when :raise raise DuplicateColumnError, message when :warn warn message end end # Try to find dataset option for on_duplicate_columns. If not present on the dataset, # use the on_duplicate_columns option on the database. If not present on the database, # default to :warn. def duplicate_columns_handler_type(cols) handler = opts.fetch(:on_duplicate_columns){db.opts.fetch(:on_duplicate_columns, :warn)} if handler.respond_to?(:call) handler.call(cols) else handler end end end # Error which is raised when duplicate columns are present in a dataset which is configured # to :raise on_duplicate_columns. class DuplicateColumnError < Error end Dataset.register_extension(:duplicate_columns_handler, Sequel::DuplicateColumnsHandler) end sequel-5.63.0/lib/sequel/extensions/empty_array_consider_nulls.rb000066400000000000000000000022261434214120600253310ustar00rootroot00000000000000# frozen-string-literal: true # # This changes Sequel's literalization of IN/NOT IN with an empty # array value to consider NULL values if one of the referenced # columns is NULL: # # DB[:test].where(name: []) # # SELECT * FROM test WHERE (name != name) # DB[:test].exclude(name: []) # # SELECT * FROM test WHERE (name = name) # # The default Sequel behavior is to ignore NULLs, as the above # query is not generally optimized well by databases. # # You can load this extension into specific datasets: # # ds = DB[:table] # ds = ds.extension(:empty_array_consider_nulls) # # Or you can load it into all of a database's datasets, which # is probably the desired behavior if you are using this extension: # # DB.extension(:empty_array_consider_nulls) # # Related module: Sequel::EmptyArrayConsiderNulls # module Sequel module EmptyArrayConsiderNulls # Use an expression that returns NULL if the column value is NULL. def empty_array_value(op, cols) c = Array(cols) SQL::BooleanExpression.from_value_pairs(c.zip(c), :AND, op == :IN) end end Dataset.register_extension(:empty_array_consider_nulls, EmptyArrayConsiderNulls) end sequel-5.63.0/lib/sequel/extensions/error_sql.rb000066400000000000000000000047461434214120600217130ustar00rootroot00000000000000# frozen-string-literal: true # # The error_sql extension adds a DatabaseError#sql method # that you can use to get the sql that caused the error # to be raised. # # begin # DB.run "Invalid SQL" # rescue => e # puts e.sql # "Invalid SQL" # end # # On some databases, the error message contains part or all # of the SQL used, but on other databases, none of the SQL # used is displayed in the error message, so it can be # difficult to track down what is causing the error without # using a logger. This extension should hopefully make # debugging easier on databases that have bad error # messages. # # This extension may not work correctly in the following cases: # # * log_connection_yield is not used when executing the query. # * The underlying exception is frozen or reused. # * The underlying exception doesn't correctly record instance # variables set on it (seems to happen on JRuby when underlying # exception objects are Java exceptions). # # To load the extension into the database: # # DB.extension :error_sql # # Related module: Sequel::ErrorSQL # module Sequel class DatabaseError # Get the SQL code that caused this error to be raised. def sql # We store the error SQL in the wrapped exception instead of the # current exception, since when the error SQL is originally associated # with the wrapped exception, the current exception doesn't exist. It's # possible to copy the error SQL into the current exception, but there # doesn't seem to be a reason to do that. wrapped_exception.instance_variable_get(:@sequel_error_sql) if wrapped_exception end end module ErrorSQL # Store the SQL related to the exception with the exception, so it # is available for DatabaseError#sql later. def log_exception(exception, message) exception.instance_variable_set(:@sequel_error_sql, message) super end # If there are no loggers for this database and an exception is raised # store the SQL related to the exception with the exception, so it # is available for DatabaseError#sql later. def log_connection_yield(sql, conn, args=nil) if @loggers.empty? begin yield rescue => e sql = "#{connection_info(conn) if conn && log_connection_info}#{sql}#{"; #{args.inspect}" if args}" e.instance_variable_set(:@sequel_error_sql, sql) raise end else super end end end Database.register_extension(:error_sql, ErrorSQL) end sequel-5.63.0/lib/sequel/extensions/escaped_like.rb000066400000000000000000000101201434214120600222720ustar00rootroot00000000000000# frozen-string-literal: true # # The escaped_like extension adds +escaped_like+ and +escaped_ilike+ # methods to Sequel::SQL::StringMethods, which allow them to be easily # used with most of Sequel's expression objects. Example: # # DB[:table].where{string_column.escaped_like('?%', user_input)} # # user_input is 'foo': # # SELECT * FROM table WHERE string_column LIKE 'foo%' ESCAPE '\' # # user_input is '%foo': # # SELECT * FROM table WHERE string_column LIKE '\%foo%' ESCAPE '\' # # To load the extension: # # Sequel.extension :escaped_like # # Related modules: Sequel::SQL::StringMethods, Sequel::SQL::EscapedLikeExpression # module Sequel module SQL # Represents an pattern match SQL expression, where the pattern can depend # upon interpolated values in a database-dependent manner. class EscapedLikeExpression < Expression include AliasMethods include BooleanMethods include CastMethods include OrderMethods # Initialize the expression. Arguments: # expr :: Right hand site of LIKE/ILIKE operator, what you are matching against the pattern # case_insensitive :: Whether the match is case sensitive # placeholder_pattern :: The pattern to match against, with +?+ for the placeholders # placeholder_values :: The string values for each +?+ in the placeholder pattern. Should be an # array of strings, though it can be a single string if there is only # a single placeholder. def initialize(expr, case_sensitive, placeholder_pattern, placeholder_values) @expr = expr @method = case_sensitive ? :like : :ilike @pattern = placeholder_pattern unless placeholder_values.is_a?(Array) placeholder_values = [placeholder_values].freeze end @values = placeholder_values freeze end # Interpolate the pattern values into the placeholder pattern to get the final pattern, # now that we have access to the dataset. Use the expression and final pattern and # add an appropriate LIKE/ILIKE expression to the SQL being built. def to_s_append(ds, sql) i = -1 match_len = @values.length - 1 like_pattern = String.new pattern = @pattern while true previous, q, pattern = pattern.partition('?') like_pattern << previous unless q.empty? if i == match_len raise Error, "Mismatched number of placeholders (#{i+1}) and placeholder arguments (#{@values.length}) for escaped like expression: #{@pattern.inspect}" end like_pattern << ds.escape_like(@values.at(i+=1)) end if pattern.empty? unless i == match_len raise Error, "Mismatched number of placeholders (#{i+1}) and placeholder arguments (#{@values.length}) for escaped like expression: #{@pattern.inspect}" end break end end ds.literal_append(sql, Sequel.send(@method, @expr, like_pattern)) end end module StringMethods # Create a +EscapedLikeExpression+ case insensitive pattern match of the receiver # with the patterns, interpolated escaped values for each +?+ placeholder in the # pattern. # # Sequel[:a].escaped_ilike('?%', 'A') # "a" ILIKE 'A%' ESCAPE '\' # Sequel[:a].escaped_ilike('?%', '%A') # "a" ILIKE '\%A%' ESCAPE '\' def escaped_ilike(placeholder_pattern, placeholder_values) EscapedLikeExpression.new(self, false, placeholder_pattern, placeholder_values) end # Create a +EscapedLikeExpression+ case sensitive pattern match of the receiver # with the patterns, interpolated escaped values for each +?+ placeholder in the # pattern. # # Sequel[:a].escaped_like('?%', 'A') # "a" LIKE 'A%' ESCAPE '\' # Sequel[:a].escaped_like('?%', '%A') # "a" LIKE '\%A%' ESCAPE '\' def escaped_like(placeholder_pattern, placeholder_values) EscapedLikeExpression.new(self, true, placeholder_pattern, placeholder_values) end end end end sequel-5.63.0/lib/sequel/extensions/eval_inspect.rb000066400000000000000000000126261434214120600223530ustar00rootroot00000000000000# frozen-string-literal: true # # The eval_inspect extension changes #inspect for Sequel::SQL::Expression # subclasses to return a string suitable for ruby's eval, such that # # eval(obj.inspect) == obj # # is true. The above code is true for most of ruby's simple classes such # as String, Integer, Float, and Symbol, but it's not true for classes such # as Time, Date, and BigDecimal. Sequel attempts to handle situations where # instances of these classes are a component of a Sequel expression. # # To load the extension: # # Sequel.extension :eval_inspect # # Related module: Sequel::EvalInspect # module Sequel module EvalInspect # Special case objects where inspect does not generally produce input # suitable for eval. Used by Sequel::SQL::Expression#inspect so that # it can produce a string suitable for eval even if components of the # expression have inspect methods that do not produce strings suitable # for eval. def eval_inspect(obj) case obj when BigDecimal "Kernel::BigDecimal(#{obj.to_s.inspect})" when Sequel::SQL::Blob, Sequel::LiteralString "#{obj.class}.new(#{obj.to_s.inspect})" when Sequel::SQL::ValueList "#{obj.class}.new(#{obj.to_a.inspect})" when Array "[#{obj.map{|o| eval_inspect(o)}.join(', ')}]" when Hash "{#{obj.map{|k, v| "#{eval_inspect(k)} => #{eval_inspect(v)}"}.join(', ')}}" when Time datepart = "%Y-%m-%dT" unless obj.is_a?(Sequel::SQLTime) "#{obj.class}.parse(#{obj.strftime("#{datepart}%T.%N%z").inspect})#{'.utc' if obj.utc?}" when DateTime # Ignore date of calendar reform "DateTime.parse(#{obj.strftime('%FT%T.%N%z').inspect})" when Date # Ignore offset and date of calendar reform "Date.new(#{obj.year}, #{obj.month}, #{obj.day})" else obj.inspect end end end extend EvalInspect module SQL class Expression alias inspect inspect # Attempt to produce a string suitable for eval, such that: # # eval(obj.inspect) == obj def inspect # Assume by default that the object can be recreated by calling # self.class.new with any attr_reader values defined on the class, # in the order they were defined. klass = self.class args = inspect_args.map do |arg| if arg.is_a?(String) && arg =~ /\A\*/ # Special case string arguments starting with *, indicating that # they should return an array to be splatted as the remaining arguments. # Allow calling private methods to get inspect output. send(arg.sub('*', '')).map{|a| Sequel.eval_inspect(a)}.join(', ') else # Allow calling private methods to get inspect output. Sequel.eval_inspect(send(arg)) end end "#{klass}.#{inspect_new_method}(#{args.join(', ')})" end private # Which attribute values to use in the inspect string. def inspect_args self.class.comparison_attrs end # Use the new method by default for creating new objects. def inspect_new_method :new end end class ComplexExpression private # ComplexExpression's initializer uses a splat for the operator arguments. def inspect_args [:op, "*args"] end end class Constant # Constants to lookup in the Sequel module. INSPECT_LOOKUPS = [:CURRENT_DATE, :CURRENT_TIMESTAMP, :CURRENT_TIME, :SQLTRUE, :SQLFALSE, :NULL, :NOTNULL] # Reference the constant in the Sequel module if there is # one that matches. def inspect INSPECT_LOOKUPS.each do |c| return "Sequel::#{c}" if Sequel.const_get(c) == self end super end end class CaseExpression private # CaseExpression's initializer checks whether an argument was # provided, to differentiate CASE WHEN from CASE NULL WHEN, so # check if an expression was provided, and only include the # expression in the inspect output if so. def inspect_args if expression? [:conditions, :default, :expression] else [:conditions, :default] end end end class Function private # Function uses a new! method for creating functions with options, # since Function.new does not allow for an options hash. def inspect_new_method :new! end end class JoinOnClause private # JoinOnClause's initializer takes the on argument as the first argument # instead of the last. def inspect_args [:on, :join_type, :table_expr] end end class JoinUsingClause private # JoinOnClause's initializer takes the using argument as the first argument # instead of the last. def inspect_args [:using, :join_type, :table_expr] end end class OrderedExpression private # OrderedExpression's initializer takes the :nulls information inside a hash, # so if a NULL order was given, include a hash with that information. def inspect_args if nulls [:expression, :descending, :opts_hash] else [:expression, :descending] end end # A hash of null information suitable for passing to the initializer. def opts_hash {:nulls=>nulls} end end end end sequel-5.63.0/lib/sequel/extensions/exclude_or_null.rb000066400000000000000000000044751434214120600230650ustar00rootroot00000000000000# frozen-string-literal: true # # The exclude_or_null extension adds Dataset#exclude_or_null and # Dataset#exclude_or_null_having. These methods are similar to # Dataset#exclude and Dataset#exclude_having, except that they # will also exclude rows where the condition IS NULL. # # DB[:table].exclude_or_null(foo: 1) # # SELECT * FROM table WHERE NOT coalesce((foo = 1), false) # # DB[:table].exclude_or_null{foo(bar) =~ 1} # # SELECT * FROM table HAVING NOT coalesce((foo(bar) = 1), false)) # # You can load this extension into specific datasets: # # ds = DB[:table] # ds = ds.extension(:exclude_or_null) # # Or you can load it into all of a database's datasets, which # is probably the desired behavior if you are using this extension: # # DB.extension(:exclude_or_null) # # Note, this extension works correctly on PostgreSQL, SQLite, MySQL, # H2, and HSQLDB. However, it does not work correctly on Microsoft SQL Server, # Oracle, DB2, SQLAnywhere, or Derby. # # Related module: Sequel::ExcludeOrNull # module Sequel module ExcludeOrNull # Performs the inverse of Dataset#where, but also excludes rows where the given # condition IS NULL. # # DB[:items].exclude_or_null(category: 'software') # # SELECT * FROM items WHERE NOT coalesce((category = 'software'), false) # # DB[:items].exclude_or_null(category: 'software', id: 3) # # SELECT * FROM items WHERE NOT coalesce(((category = 'software') AND (id = 3)), false) def exclude_or_null(*cond, &block) add_filter(:where, cond, :or_null, &block) end # The same as exclude_or_null, but affecting the HAVING clause instead of the # WHERE clause. # # DB[:items].select_group(:name).exclude_or_null_having{count(name) < 2} # # SELECT name FROM items GROUP BY name HAVING NOT coalesce((count(name) < 2), true) def exclude_or_null_having(*cond, &block) add_filter(:having, cond, :or_null, &block) end private # Recognize :or_null value for invert, returning an expression for # the invert of the condition or the condition being null. def _invert_filter(cond, invert) if invert == :or_null ~SQL::Function.new(:coalesce, cond, SQL::Constants::SQLFALSE) else super end end end Dataset.register_extension(:exclude_or_null, ExcludeOrNull) end sequel-5.63.0/lib/sequel/extensions/fiber_concurrency.rb000066400000000000000000000012041434214120600233660ustar00rootroot00000000000000# frozen-string-literal: true # # The fiber_concurrency extension changes the default concurrency # primitive in Sequel to be Fiber.current instead of Thread.current. # This is the value used in various hash keys to implement safe # concurrency (thread-safe concurrency by default, fiber-safe # concurrency with this extension. It can be enabled via: # # Sequel.extension :fiber_concurrency # # Related module: Sequel::FiberConcurrency require 'fiber' module Sequel module FiberConcurrency # Make the current concurrency primitive be Fiber.current. def current Fiber.current end end extend FiberConcurrency end sequel-5.63.0/lib/sequel/extensions/freeze_datasets.rb000066400000000000000000000001271434214120600230400ustar00rootroot00000000000000# frozen-string-literal: true Sequel::Database.register_extension(:freeze_datasets){} sequel-5.63.0/lib/sequel/extensions/from_block.rb000066400000000000000000000001221434214120600220000ustar00rootroot00000000000000# frozen-string-literal: true Sequel::Database.register_extension(:from_block){} sequel-5.63.0/lib/sequel/extensions/graph_each.rb000066400000000000000000000053241434214120600217550ustar00rootroot00000000000000# frozen-string-literal: true # # The graph_each extension adds Dataset#graph_each and # makes Dataset#each call #graph_each if the dataset has been graphed. # Dataset#graph_each splits result hashes into subhashes per table: # # DB[:a].graph(:b, id: :b_id).all # # => {:a=>{:id=>1, :b_id=>2}, :b=>{:id=>2}} # # You can load this extension into specific datasets: # # ds = DB[:table] # ds = ds.extension(:graph_each) # # Or you can load it into all of a database's datasets, which # is probably the desired behavior if you are using this extension: # # DB.extension(:graph_each) # # Related module: Sequel::GraphEach # module Sequel module GraphEach # Call graph_each for graphed datasets that are not being eager graphed. def each if @opts[:graph] && !@opts[:eager_graph] graph_each{|r| yield r} else super end end # Call graph_each for graphed datasets that are not being eager graphed. def with_sql_each(sql) if @opts[:graph] && !@opts[:eager_graph] graph_each(sql){|r| yield r} else super end end private # Fetch the rows, split them into component table parts, # tranform and run the row_proc on each part (if applicable), # and yield a hash of the parts. def graph_each(sql=select_sql) # Reject tables with nil datasets, as they are excluded from # the result set datasets = @opts[:graph][:table_aliases].to_a.reject{|ta,ds| ds.nil?} # Get just the list of table aliases into a local variable, for speed table_aliases = datasets.map{|ta,ds| ta} # Get an array of arrays, one for each dataset, with # the necessary information about each dataset, for speed datasets = datasets.map{|ta, ds| [ta, ds, ds.row_proc]} # Use the manually set graph aliases, if any, otherwise # use the ones automatically created by .graph column_aliases = @opts[:graph][:column_aliases] fetch_rows(sql) do |r| graph = {} # Create the sub hashes, one per table table_aliases.each{|ta| graph[ta]={}} # Split the result set based on the column aliases # If there are columns in the result set that are # not in column_aliases, they are ignored column_aliases.each do |col_alias, tc| ta, column = tc graph[ta][column] = r[col_alias] end # For each dataset run the row_proc if applicable datasets.each do |ta,ds,rp| g = graph[ta] graph[ta] = if g.values.any?{|x| !x.nil?} rp ? rp.call(g) : g else nil end end yield graph end self end end Dataset.register_extension(:graph_each, GraphEach) end sequel-5.63.0/lib/sequel/extensions/identifier_mangling.rb000066400000000000000000000142461434214120600236750ustar00rootroot00000000000000# frozen-string-literal: true # # The identifier_mangling extension adds support for to change # the default identifier mangling for datasets, as well as all # datasets for a given database. # # # Use uppercase identifiers in database, and lowercase in ruby. # # Default behavior of Sequel, as the SQL standard behavior # # folds unquoted identifiers to uppercase. # DB.identifier_input_method = :upcase # DB.identifier_output_method = :downcase # # # Don't modify identifiers. # # Default behavior of Sequel on PostgreSQL, MySQL, SQLite, # # as they fold unquoted identifiers to lowercase. # DB.identifier_input_method = nil # DB.identifier_output_method = nil # # You can also choose to turn on or off identifier quoting: # # # Quote identifiers. Sequel's default behavior. # DB.quote_identifiers = true # # # Don't quote identifiers. Sequel's default behavior on DB2. # DB.quote_identifiers = false # # To modify the identifiers on a per-dataset basis: # # ds = DB[:a].with_input_indentifier(:upcase). # with_output_identifier(:downcase). # with_quote_identifiers(true) # # To load the extension into the database: # # DB.extension :identifier_mangling # # Related modules: Sequel::IdentifierMangling::DatabaseMethods, # Sequel::IdentifierMangling::DatasetMethods # module Sequel module IdentifierMangling module DatabaseMethods def self.extended(db) db.instance_exec do @identifier_input_method = nil @identifier_output_method = nil @quote_identifiers = nil reset_identifier_mangling extend_datasets(DatasetMethods) end end # The identifier input method to use by default for this database (default: adapter default) attr_reader :identifier_input_method # The identifier output method to use by default for this database (default: adapter default) attr_reader :identifier_output_method # Set the method to call on identifiers going into the database: # # DB[:items] # SELECT * FROM items # DB.identifier_input_method = :upcase # DB[:items] # SELECT * FROM ITEMS def identifier_input_method=(v) reset_default_dataset @identifier_input_method = v end # Set the method to call on identifiers coming from the database: # # DB[:items].first # {:id=>1, :name=>'foo'} # DB.identifier_output_method = :upcase # DB[:items].first # {:ID=>1, :NAME=>'foo'} def identifier_output_method=(v) reset_default_dataset @identifier_output_method = v end # Set whether to quote identifiers (columns and tables) for this database: # # DB[:items] # SELECT * FROM items # DB.quote_identifiers = true # DB[:items] # SELECT * FROM "items" def quote_identifiers=(v) reset_default_dataset @quote_identifiers = v end # Returns true if the database quotes identifiers. def quote_identifiers? @quote_identifiers end private # Return a dataset that uses the default identifier input and output methods # for this database. Used when parsing metadata so that column symbols are # returned as expected. def _metadata_dataset super. with_identifier_input_method(identifier_input_method_default). with_identifier_output_method(identifier_output_method_default) end # Upcase identifiers on input if database folds unquoted identifiers to # uppercase. def identifier_input_method_default return super if defined?(super) :upcase if folds_unquoted_identifiers_to_uppercase? end # Downcase identifiers on output if database folds unquoted identifiers to # uppercase. def identifier_output_method_default return super if defined?(super) :downcase if folds_unquoted_identifiers_to_uppercase? end # Reset the identifier mangling options. Overrides any already set on # the instance. Only for internal use by shared adapters. def reset_identifier_mangling @quote_identifiers = @opts.fetch(:quote_identifiers, quote_identifiers_default) @identifier_input_method = @opts.fetch(:identifier_input_method, identifier_input_method_default) @identifier_output_method = @opts.fetch(:identifier_output_method, identifier_output_method_default) reset_default_dataset end end module DatasetMethods # The String instance method to call on identifiers before sending them to # the database. def identifier_input_method @opts.fetch(:identifier_input_method, db.identifier_input_method) end # The String instance method to call on identifiers before sending them to # the database. def identifier_output_method @opts.fetch(:identifier_output_method, db.identifier_output_method) end # Check with the database to see if identifier quoting is enabled def quote_identifiers? @opts.fetch(:quote_identifiers, db.quote_identifiers?) end # Return a modified dataset with identifier_input_method set. def with_identifier_input_method(meth) clone(:identifier_input_method=>meth, :skip_symbol_cache=>true) end # Return a modified dataset with identifier_output_method set. def with_identifier_output_method(meth) clone(:identifier_output_method=>meth) end private # Convert the identifier to the version used in the database via # identifier_input_method. def input_identifier(v) (i = identifier_input_method) ? v.to_s.public_send(i) : v.to_s end # Modify the identifier returned from the database based on the # identifier_output_method. def output_identifier(v) v = 'untitled' if v == '' (i = identifier_output_method) ? v.to_s.public_send(i).to_sym : v.to_sym end def non_sql_option?(key) super || key == :identifier_input_method || key == :identifier_output_method end end end Database.register_extension(:identifier_mangling, IdentifierMangling::DatabaseMethods) end sequel-5.63.0/lib/sequel/extensions/implicit_subquery.rb000066400000000000000000000030321434214120600234370ustar00rootroot00000000000000# frozen-string-literal: true # # The implicit_subquery extension changes most dataset methods that # return modified datasets to implicitly call from_self if the database # currently uses raw SQL. Sequel's by default does not do this: # # DB["SELECT * FROM table"].select(:column).sql # # => "SELECT * FROM table" # # With this extension, datasets that use raw SQL are implicitly wrapped # in a subquery: # # DB["SELECT * FROM table"].select(:column).sql # # => "SELECT column FROM (SELECT * FROM table) AS t1" # # To add this extension to an existing dataset: # # ds = ds.extension(:implicit_subquery) # # To set this as the default behavior for all datasets on a single database: # # DB.extension(:implicit_subquery) # # Related module: Sequel::Dataset::ImplicitSubquery # module Sequel class Dataset module ImplicitSubquery exceptions = [:add_graph_aliases, :filter, :from, :from_self, :naked, :or, :order_more, :qualify, :reverse, :reverse_order, :select_all, :select_more, :server, :set_graph_aliases, :unfiltered, :ungraphed, :ungrouped, :unlimited, :unordered, :with_sql] additions = [:join_table] (Dataset::QUERY_METHODS - Dataset::JOIN_METHODS - exceptions + additions).each do |meth| define_method(meth) do |*a, &b| if opts[:sql] from_self.public_send(meth, *a, &b) else super(*a, &b) end end end end register_extension(:implicit_subquery, ImplicitSubquery) end end sequel-5.63.0/lib/sequel/extensions/index_caching.rb000066400000000000000000000066161434214120600224640ustar00rootroot00000000000000# frozen-string-literal: true # # The index_caching extension adds a few methods to Sequel::Database # that make it easy to dump information about database indexes to a file, # and load it from that file. Loading index information from a # dumped file is faster than parsing it from the database, so this # can save bootup time for applications with large numbers of index. # # Basic usage in application code: # # DB = Sequel.connect('...') # DB.extension :index_caching # DB.load_index_cache('/path/to/index_cache.dump') # # # load model files # # Then, whenever database indicies are modified, write a new cached # file. You can do that with bin/sequel's -X option: # # bin/sequel -X /path/to/index_cache.dump postgres://... # # Alternatively, if you don't want to dump the index information for # all tables, and you don't worry about race conditions, you can # choose to use the following in your application code: # # DB = Sequel.connect('...') # DB.extension :index_caching # DB.load_index_cache?('/path/to/index_cache.dump') # # # load model files # # DB.dump_index_cache?('/path/to/index_cache.dump') # # With this method, you just have to delete the index dump file if # the schema is modified, and the application will recreate it for you # using just the tables that your models use. # # Note that it is up to the application to ensure that the dumped # index cache reflects the current state of the database. Sequel # does no checking to ensure this, as checking would take time and the # purpose of this code is to take a shortcut. # # The index cache is dumped in Marshal format, since it is the fastest # and it handles all ruby objects used in the indexes hash. Because of this, # you should not attempt to load from an untrusted file. # # Related module: Sequel::IndexCaching # module Sequel module IndexCaching # Set index cache to the empty hash. def self.extended(db) db.instance_variable_set(:@indexes, {}) end # Dump the index cache to the filename given in Marshal format. def dump_index_cache(file) File.open(file, 'wb'){|f| f.write(Marshal.dump(@indexes))} nil end # Dump the index cache to the filename given unless the file # already exists. def dump_index_cache?(file) dump_index_cache(file) unless File.exist?(file) end # Replace the index cache with the data from the given file, which # should be in Marshal format. def load_index_cache(file) @indexes = Marshal.load(File.read(file)) nil end # Replace the index cache with the data from the given file if the # file exists. def load_index_cache?(file) load_index_cache(file) if File.exist?(file) end # If no options are provided and there is cached index information for # the table, return the cached information instead of querying the # database. def indexes(table, opts=OPTS) return super unless opts.empty? quoted_name = literal(table) if v = Sequel.synchronize{@indexes[quoted_name]} return v end result = super Sequel.synchronize{@indexes[quoted_name] = result} result end private # Remove the index cache for the given schema name def remove_cached_schema(table) k = quote_schema_table(table) Sequel.synchronize{@indexes.delete(k)} super end end Database.register_extension(:index_caching, IndexCaching) end sequel-5.63.0/lib/sequel/extensions/inflector.rb000066400000000000000000000220371434214120600216610ustar00rootroot00000000000000# frozen-string-literal: true # # The inflector extension adds inflection instance methods to String, which allows the easy transformation of # words from singular to plural, class names to table names, modularized class # names to ones without, and class names to foreign keys. It exists for # backwards compatibility to legacy Sequel code. # # To load the extension: # # Sequel.extension :inflector # # Related module: String::Inflections class String # This module acts as a singleton returned/yielded by String.inflections, # which is used to override or specify additional inflection rules. Examples: # # String.inflections do |inflect| # inflect.plural /^(ox)$/i, '\1\2en' # inflect.singular /^(ox)en/i, '\1' # # inflect.irregular 'octopus', 'octopi' # # inflect.uncountable "equipment" # end # # New rules are added at the top. So in the example above, the irregular rule for octopus will now be the first of the # pluralization and singularization rules that is runs. This guarantees that your rules run before any of the rules that may # already have been loaded. module Inflections @plurals, @singulars, @uncountables = [], [], [] class << self # Array of 2 element arrays, first containing a regex, and the second containing a substitution pattern, used for plurization. attr_reader :plurals # Array of 2 element arrays, first containing a regex, and the second containing a substitution pattern, used for singularization. attr_reader :singulars # Array of strings for words were the singular form is the same as the plural form attr_reader :uncountables end # Clears the loaded inflections within a given scope (default is :all). Give the scope as a symbol of the inflection type, # the options are: :plurals, :singulars, :uncountables # # Examples: # clear :all # clear :plurals def self.clear(scope = :all) case scope when :all @plurals, @singulars, @uncountables = [], [], [] else instance_variable_set("@#{scope}", []) end end # Specifies a new irregular that applies to both pluralization and singularization at the same time. This can only be used # for strings, not regular expressions. You simply pass the irregular in singular and plural form. # # Examples: # irregular 'octopus', 'octopi' # irregular 'person', 'people' def self.irregular(singular, plural) plural(Regexp.new("(#{singular[0,1]})#{singular[1..-1]}$", "i"), '\1' + plural[1..-1]) singular(Regexp.new("(#{plural[0,1]})#{plural[1..-1]}$", "i"), '\1' + singular[1..-1]) end # Specifies a new pluralization rule and its replacement. The rule can either be a string or a regular expression. # The replacement should always be a string that may include references to the matched data from the rule. # # Example: # plural(/(x|ch|ss|sh)$/i, '\1es') def self.plural(rule, replacement) @plurals.insert(0, [rule, replacement]) end # Specifies a new singularization rule and its replacement. The rule can either be a string or a regular expression. # The replacement should always be a string that may include references to the matched data from the rule. # # Example: # singular(/([^aeiouy]|qu)ies$/i, '\1y') def self.singular(rule, replacement) @singulars.insert(0, [rule, replacement]) end # Add uncountable words that shouldn't be attempted inflected. # # Examples: # uncountable "money" # uncountable "money", "information" # uncountable %w( money information rice ) def self.uncountable(*words) (@uncountables << words).flatten! end require_relative '../model/default_inflections' instance_exec(&Sequel::DEFAULT_INFLECTIONS_PROC) end # Yield the Inflections module if a block is given, and return # the Inflections module. def self.inflections yield Inflections if defined?(yield) Inflections end %w'classify constantize dasherize demodulize foreign_key humanize pluralize singularize tableize underscore'.each do |m| # :nocov: if method_defined?(m) alias_method(m, m) end # :nocov: end # By default, camelize converts the string to UpperCamelCase. If the argument to camelize # is set to :lower then camelize produces lowerCamelCase. # # camelize will also convert '/' to '::' which is useful for converting paths to namespaces # # Examples # "active_record".camelize #=> "ActiveRecord" # "active_record".camelize(:lower) #=> "activeRecord" # "active_record/errors".camelize #=> "ActiveRecord::Errors" # "active_record/errors".camelize(:lower) #=> "activeRecord::Errors" def camelize(first_letter_in_uppercase = :upper) s = gsub(/\/(.?)/){|x| "::#{x[-1..-1].upcase unless x == '/'}"}.gsub(/(^|_)(.)/){|x| x[-1..-1].upcase} s[0...1] = s[0...1].downcase unless first_letter_in_uppercase == :upper s end alias_method :camelcase, :camelize # Singularizes and camelizes the string. Also strips out all characters preceding # and including a period ("."). # # Examples # "egg_and_hams".classify #=> "EggAndHam" # "post".classify #=> "Post" # "schema.post".classify #=> "Post" def classify sub(/.*\./, '').singularize.camelize end # Constantize tries to find a declared constant with the name specified # in the string. It raises a NameError when the name is not in CamelCase # or is not initialized. # # Examples # "Module".constantize #=> Module # "Class".constantize #=> Class def constantize raise(NameError, "#{inspect} is not a valid constant name!") unless m = /\A(?:::)?([A-Z]\w*(?:::[A-Z]\w*)*)\z/.match(self) Object.module_eval("::#{m[1]}", __FILE__, __LINE__) end # Replaces underscores with dashes in the string. # # Example # "puni_puni".dasherize #=> "puni-puni" def dasherize gsub('_', '-') end # Removes the module part from the expression in the string # # Examples # "ActiveRecord::CoreExtensions::String::Inflections".demodulize #=> "Inflections" # "Inflections".demodulize #=> "Inflections" def demodulize gsub(/^.*::/, '') end # Creates a foreign key name from a class name. # +use_underscore+ sets whether the method should put '_' between the name and 'id'. # # Examples # "Message".foreign_key #=> "message_id" # "Message".foreign_key(false) #=> "messageid" # "Admin::Post".foreign_key #=> "post_id" def foreign_key(use_underscore = true) "#{demodulize.underscore}#{'_' if use_underscore}id" end # Capitalizes the first word and turns underscores into spaces and strips _id. # Like titleize, this is meant for creating pretty output. # # Examples # "employee_salary" #=> "Employee salary" # "author_id" #=> "Author" def humanize gsub(/_id$/, "").gsub('_', " ").capitalize end # Returns the plural form of the word in the string. # # Examples # "post".pluralize #=> "posts" # "octopus".pluralize #=> "octopi" # "sheep".pluralize #=> "sheep" # "words".pluralize #=> "words" # "the blue mailman".pluralize #=> "the blue mailmen" # "CamelOctopus".pluralize #=> "CamelOctopi" def pluralize result = dup Inflections.plurals.each{|(rule, replacement)| break if result.gsub!(rule, replacement)} unless Inflections.uncountables.include?(downcase) result end # The reverse of pluralize, returns the singular form of a word in a string. # # Examples # "posts".singularize #=> "post" # "octopi".singularize #=> "octopus" # "sheep".singluarize #=> "sheep" # "word".singluarize #=> "word" # "the blue mailmen".singularize #=> "the blue mailman" # "CamelOctopi".singularize #=> "CamelOctopus" def singularize result = dup Inflections.singulars.each{|(rule, replacement)| break if result.gsub!(rule, replacement)} unless Inflections.uncountables.include?(downcase) result end # Underscores and pluralizes the string. # # Examples # "RawScaledScorer".tableize #=> "raw_scaled_scorers" # "egg_and_ham".tableize #=> "egg_and_hams" # "fancyCategory".tableize #=> "fancy_categories" def tableize underscore.pluralize end # Capitalizes all the words and replaces some characters in the string to create # a nicer looking title. Titleize is meant for creating pretty output. # # titleize is also aliased as as titlecase # # Examples # "man from the boondocks".titleize #=> "Man From The Boondocks" # "x-men: the last stand".titleize #=> "X Men: The Last Stand" def titleize underscore.humanize.gsub(/\b([a-z])/){|x| x[-1..-1].upcase} end alias_method :titlecase, :titleize # The reverse of camelize. Makes an underscored form from the expression in the string. # Also changes '::' to '/' to convert namespaces to paths. # # Examples # "ActiveRecord".underscore #=> "active_record" # "ActiveRecord::Errors".underscore #=> active_record/errors def underscore gsub(/::/, '/').gsub(/([A-Z]+)([A-Z][a-z])/,'\1_\2'). gsub(/([a-z\d])([A-Z])/,'\1_\2').tr("-", "_").downcase end end sequel-5.63.0/lib/sequel/extensions/integer64.rb000066400000000000000000000020121434214120600214720ustar00rootroot00000000000000# frozen-string-literal: true # # The integer64 extension changes the default type used for Integer # to be the same type as used for :Bignum. In general, this means that # instead of Integer resulting in a 32-bit database integer type, it will # result in a 64-bit database integer type. This affects the default # type used for primary_key and foreign_key when using the schema # modification methods. # # Note that it doesn't make sense to use this extension on SQLite, since # the integer type will automatically handle 64-bit integers, and it treats # the integer type specially when the column is also the primary key. # # To load the extension into the database: # # DB.extension :integer64 # # Related module: Sequel::Integer64 # module Sequel module Integer64 private # Use same type as used for :Bignum by default for generic integer value. def type_literal_generic_integer(column) type_literal_generic_bignum_symbol(column) end end Database.register_extension(:integer64, Integer64) end sequel-5.63.0/lib/sequel/extensions/is_distinct_from.rb000066400000000000000000000102341434214120600232270ustar00rootroot00000000000000# frozen-string-literal: true # # The is_distinct_from extension adds the ability to use the # SQL standard IS DISTINCT FROM operator, which is similar to the # not equals operator, except that NULL values are considered # equal. PostgreSQL, SQLite 3.39+, and H2 currently support this operator. On # other databases, support is emulated. # # First, you need to load the extension into the database: # # DB.extension :is_distinct_from # # Then you can use the Sequel.is_distinct_from to create the expression # objects: # # expr = Sequel.is_distinct_from(:column_a, :column_b) # # (column_a IS DISTINCT FROM column_b) # # You can also use the +is_distinct_from+ method on most Sequel expressions: # # expr = Sequel[:column_a].is_distinct_from(:column_b) # # (column_a IS DISTINCT FROM column_b) # # These expressions can be used in your datasets, or anywhere else that # Sequel expressions are allowed: # # DB[:table].where(expr) # # Related module: Sequel::SQL::IsDistinctFrom # module Sequel module SQL module Builders # Return a IsDistinctFrom expression object, using the IS DISTINCT FROM operator # with the given left hand side and right hand side. def is_distinct_from(lhs, rhs) BooleanExpression.new(:NOOP, IsDistinctFrom.new(lhs, rhs)) end end # Represents an SQL expression using the IS DISTINCT FROM operator. class IsDistinctFrom < GenericExpression # These methods are added to expressions, allowing them to return IS DISTINCT # FROM expressions based on the receiving expression. module Methods # Return a IsDistinctFrom expression, using the IS DISTINCT FROM operator, # with the receiver as the left hand side and the argument as the right hand side. def is_distinct_from(rhs) BooleanExpression.new(:NOOP, IsDistinctFrom.new(self, rhs)) end end # These methods are added to datasets using the is_distinct_from extension # extension, for the purposes of correctly literalizing IsDistinctFrom # expressions for the appropriate database type. module DatasetMethods # Append the SQL fragment for the IS DISTINCT FROM expression to the SQL query. def is_distinct_from_sql_append(sql, idf) lhs = idf.lhs rhs = idf.rhs if supports_is_distinct_from? sql << "(" literal_append(sql, lhs) sql << " IS DISTINCT FROM " literal_append(sql, rhs) sql << ")" elsif db.database_type == :derby && (lhs == nil || rhs == nil) if lhs == nil && rhs == nil sql << literal_false elsif lhs == nil literal_append(sql, ~Sequel.expr(rhs=>nil)) else literal_append(sql, ~Sequel.expr(lhs=>nil)) end else literal_append(sql, Sequel.case({(Sequel.expr(lhs=>rhs) | [[lhs, nil], [rhs, nil]]) => 0}, 1) => 1) end end private # Whether the database supports IS DISTINCT FROM. def supports_is_distinct_from? if defined?(super) return super end case db.database_type when :postgres, :h2 true when :sqlite db.sqlite_version >= 33900 else false end end end # The left hand side of the IS DISTINCT FROM expression. attr_reader :lhs # The right hand side of the IS DISTINCT FROM expression. attr_reader :rhs def initialize(lhs, rhs) @lhs = lhs @rhs = rhs end to_s_method :is_distinct_from_sql end end class SQL::GenericExpression include SQL::IsDistinctFrom::Methods end class LiteralString include SQL::IsDistinctFrom::Methods end Dataset.register_extension(:is_distinct_from, SQL::IsDistinctFrom::DatasetMethods) end # :nocov: if Sequel.core_extensions? class Symbol include Sequel::SQL::IsDistinctFrom::Methods end end if defined?(Sequel::CoreRefinements) module Sequel::CoreRefinements refine Symbol do send INCLUDE_METH, Sequel::SQL::IsDistinctFrom::Methods end end end # :nocov: sequel-5.63.0/lib/sequel/extensions/looser_typecasting.rb000066400000000000000000000025721434214120600236130ustar00rootroot00000000000000# frozen-string-literal: true # # The LooserTypecasting extension loosens the default database typecasting # for the following types: # # :float :: use to_f instead of Float() # :integer :: use to_i instead of Integer() # :decimal :: use 0.0 for unsupported strings # :string :: silently allow hash and array conversion to string # # This also removes bytesize checks for string inputs for float, integer # and decimal conversions. # # To load the extension into the database: # # DB.extension :looser_typecasting # # Related module: Sequel::LooserTypecasting # module Sequel module LooserTypecasting private # Typecast the value to a Float using to_f instead of Kernel.Float def typecast_value_float(value) value.to_f end # Typecast the value to an Integer using to_i instead of Kernel.Integer def typecast_value_integer(value) value.to_i end # Typecast the value to an String using to_s instead of Kernel.String def typecast_value_string(value) value.to_s end if RUBY_VERSION >= '2.4' def _typecast_value_string_to_decimal(value) BigDecimal(value) rescue BigDecimal('0.0') end else # :nocov: def _typecast_value_string_to_decimal(value) BigDecimal(value) end # :nocov: end end Database.register_extension(:looser_typecasting, LooserTypecasting) end sequel-5.63.0/lib/sequel/extensions/migration.rb000066400000000000000000000645271434214120600216770ustar00rootroot00000000000000# frozen-string-literal: true # # Adds the Sequel::Migration and Sequel::Migrator classes, which allow # the user to easily group schema changes and migrate the database # to a newer version or revert to a previous version. # # To load the extension: # # Sequel.extension :migration # # Related modules: Sequel::Migration, Sequel::SimpleMigration, # Sequel::MigrationDSL, Sequel::MigrationReverser, Sequel::MigrationAlterTableReverser, # Sequel::Migrator, Sequel::IntegerMigrator, Sequel::TimestampMigrator # module Sequel # Sequel's older migration class, available for backward compatibility. # Uses subclasses with up and down instance methods for each migration: # # Class.new(Sequel::Migration) do # def up # create_table(:artists) do # primary_key :id # String :name # end # end # # def down # drop_table(:artists) # end # end # # Part of the +migration+ extension. class Migration # Set the database associated with this migration. def initialize(db) @db = db end # Applies the migration to the supplied database in the specified # direction. def self.apply(db, direction) raise(ArgumentError, "Invalid migration direction specified (#{direction.inspect})") unless [:up, :down].include?(direction) new(db).public_send(direction) end # Returns the list of Migration descendants. def self.descendants @descendants ||= [] end # Adds the new migration class to the list of Migration descendants. def self.inherited(base) descendants << base end # Don't allow transaction overriding in old migrations. def self.use_transactions nil end # The default down action does nothing def down end # Intercepts method calls intended for the database and sends them along. def method_missing(method_sym, *args, &block) # Allow calling private methods for backwards compatibility @db.send(method_sym, *args, &block) end # :nocov: ruby2_keywords(:method_missing) if respond_to?(:ruby2_keywords, true) # :nocov: # This object responds to all methods the database responds to. def respond_to_missing?(meth, include_private) @db.respond_to?(meth, include_private) end # The default up action does nothing def up end end # Migration class used by the Sequel.migration DSL, # using instances for each migration, unlike the # +Migration+ class, which uses subclasses for each # migration. Part of the +migration+ extension. class SimpleMigration # Proc used for the down action attr_accessor :down # Proc used for the up action attr_accessor :up # Whether to use transactions for this migration, default depends on the # database. attr_accessor :use_transactions # Don't set transaction use by default. def initialize @use_transactions = nil end # Apply the appropriate block on the +Database+ # instance using instance_exec. def apply(db, direction) raise(ArgumentError, "Invalid migration direction specified (#{direction.inspect})") unless [:up, :down].include?(direction) if prok = public_send(direction) db.instance_exec(&prok) end end end # Internal class used by the Sequel.migration DSL, part of the +migration+ extension. class MigrationDSL < BasicObject # The underlying SimpleMigration instance attr_reader :migration def self.create(&block) new(&block).migration end # Create a new migration class, and instance_exec the block. def initialize(&block) @migration = SimpleMigration.new Migration.descendants << migration instance_exec(&block) end # Defines the migration's down action. def down(&block) migration.down = block end # Disable the use of transactions for the related migration def no_transaction migration.use_transactions = false end # Enable the use of transactions for the related migration def transaction migration.use_transactions = true end # Defines the migration's up action. def up(&block) migration.up = block end # Creates a reversible migration. This is the same as creating # the same block with +up+, but it also calls the block and attempts # to create a +down+ block that will reverse the changes made by # the block. # # There are no guarantees that this will work perfectly # in all cases, but it works for some simple cases. def change(&block) migration.up = block migration.down = MigrationReverser.new.reverse(&block) end end # Handles the reversing of reversible migrations. Basically records # supported methods calls, translates them to reversed calls, and # returns them in reverse order. class MigrationReverser < Sequel::BasicObject def initialize @actions = [] end # Reverse the actions for the given block. Takes the block given # and returns a new block that reverses the actions taken by # the given block. def reverse(&block) begin instance_exec(&block) rescue just_raise = true end if just_raise Proc.new{raise Sequel::Error, "irreversible migration method used in #{block.source_location.first}, you may need to write your own down method"} else actions = @actions.reverse Proc.new do actions.each do |a| pr = a.last.is_a?(Proc) ? a.pop : nil # Allow calling private methods as the reversing methods are private send(*a, &pr) end end end end private def add_column(*args) @actions << [:drop_column, args[0], args[1]] end def add_index(*args) @actions << [:drop_index, *args] end def alter_table(table, &block) @actions << [:alter_table, table, MigrationAlterTableReverser.new.reverse(&block)] end def create_join_table(*args) @actions << [:drop_join_table, *args] end def create_table(name, opts=OPTS) @actions << [:drop_table, name, opts] end def create_view(name, _, opts=OPTS) @actions << [:drop_view, name, opts] end def rename_column(table, name, new_name) @actions << [:rename_column, table, new_name, name] end def rename_table(table, new_name) @actions << [:rename_table, new_name, table] end end # Handles reversing an alter_table block in a reversible migration. class MigrationAlterTableReverser < Sequel::BasicObject def initialize @actions = [] end def reverse(&block) instance_exec(&block) actions = @actions.reverse # Allow calling private methods as the reversing methods are private Proc.new{actions.each{|a| send(*a)}} end private def add_column(*args) @actions << [:drop_column, args.first] end def add_constraint(*args) name = args.first name = name.is_a?(Hash) ? name[:name] : name @actions << [:drop_constraint, name] end def add_foreign_key(key, table, *args) @actions << [:drop_foreign_key, key, *args] end def add_primary_key(*args) raise if args.first.is_a?(Array) @actions << [:drop_column, args.first] end def add_index(*args) @actions << [:drop_index, *args] end alias add_full_text_index add_index alias add_spatial_index add_index def rename_column(name, new_name) @actions << [:rename_column, new_name, name] end end # The preferred method for writing Sequel migrations, using a DSL: # # Sequel.migration do # up do # create_table(:artists) do # primary_key :id # String :name # end # end # # down do # drop_table(:artists) # end # end # # Designed to be used with the +Migrator+ class, part of the +migration+ extension. def self.migration(&block) MigrationDSL.create(&block) end # The +Migrator+ class performs migrations based on migration files in a # specified directory. The migration files should be named using the # following pattern: # # _.rb # # For example, the following files are considered migration files: # # 001_create_sessions.rb # 002_add_data_column.rb # # You can also use timestamps as version numbers: # # 1273253850_create_sessions.rb # 1273257248_add_data_column.rb # # If any migration filenames use timestamps as version numbers, Sequel # uses the +TimestampMigrator+ to migrate, otherwise it uses the +IntegerMigrator+. # The +TimestampMigrator+ can handle migrations that are run out of order # as well as migrations with the same timestamp, # while the +IntegerMigrator+ is more strict and raises exceptions for missing # or duplicate migration files. # # The migration files should contain either one +Migration+ # subclass or one <tt>Sequel.migration</tt> call. # # Migrations are generally run via the sequel command line tool, # using the -m and -M switches. The -m switch specifies the migration # directory, and the -M switch specifies the version to which to migrate. # # You can apply migrations using the Migrator API, as well (this is necessary # if you want to specify the version from which to migrate in addition to the version # to which to migrate). # To apply a migrator, the +apply+ method must be invoked with the database # instance, the directory of migration files and the target version. If # no current version is supplied, it is read from the database. The migrator # automatically creates a table (schema_info for integer migrations and # schema_migrations for timestamped migrations). in the database to keep track # of the current migration version. If no migration version is stored in the # database, the version is considered to be 0. If no target version is # specified, or the target version specified is greater than the latest # version available, the database is migrated to the latest version available in the # migration directory. # # For example, to migrate the database to the latest version: # # Sequel::Migrator.run(DB, '.') # # For example, to migrate the database all the way down: # # Sequel::Migrator.run(DB, '.', target: 0) # # For example, to migrate the database to version 4: # # Sequel::Migrator.run(DB, '.', target: 4) # # To migrate the database from version 1 to version 5: # # Sequel::Migrator.run(DB, '.', target: 5, current: 1) # # Part of the +migration+ extension. class Migrator MIGRATION_FILE_PATTERN = /\A(\d+)_.+\.rb\z/i.freeze # Mutex used around migration file loading MUTEX = Mutex.new # Exception class raised when there is an error with the migrator's # file structure, database, or arguments. class Error < Sequel::Error end # Exception class raised when Migrator.check_current signals that it is # not current. class NotCurrentError < Error end # Wrapper for +run+, maintaining backwards API compatibility def self.apply(db, directory, target = nil, current = nil) run(db, directory, :target => target, :current => current) end # Raise a NotCurrentError unless the migrator is current, takes the same # arguments as #run. def self.check_current(*args) raise(NotCurrentError, 'current migration version does not match latest available version') unless is_current?(*args) end # Return whether the migrator is current (i.e. it does not need to make # any changes). Takes the same arguments as #run. def self.is_current?(db, directory, opts=OPTS) migrator_class(directory).new(db, directory, opts).is_current? end # Migrates the supplied database using the migration files in the specified directory. Options: # :allow_missing_migration_files :: Don't raise an error if there are missing migration files. # It is very risky to use this option, since it can result in # the database schema version number not matching the expected # database schema. # :column :: The column in the :table argument storing the migration version (default: :version). # :current :: The current version of the database. If not given, it is retrieved from the database # using the :table and :column options. # :relative :: Run the given number of migrations, with a positive number being migrations to migrate # up, and a negative number being migrations to migrate down (IntegerMigrator only). # :table :: The table containing the schema version (default: :schema_info for integer migrations and # :schema_migrations for timestamped migrations). # :target :: The target version to which to migrate. If not given, migrates to the maximum version. # # Examples: # Sequel::Migrator.run(DB, "migrations") # Sequel::Migrator.run(DB, "migrations", target: 15, current: 10) # Sequel::Migrator.run(DB, "app1/migrations", column: :app2_version) # Sequel::Migrator.run(DB, "app2/migrations", column: :app2_version, table: :schema_info2) def self.run(db, directory, opts=OPTS) migrator_class(directory).new(db, directory, opts).run end # Choose the Migrator subclass to use. Uses the TimestampMigrator # if the version number is greater than 20000101, otherwise uses the IntegerMigrator. def self.migrator_class(directory) if self.equal?(Migrator) raise(Error, "Must supply a valid migration path") unless File.directory?(directory) Dir.new(directory).each do |file| next unless MIGRATION_FILE_PATTERN.match(file) return TimestampMigrator if file.split('_', 2).first.to_i > 20000101 end IntegerMigrator else self end end # The column to use to hold the migration version number for integer migrations or # filename for timestamp migrations (defaults to :version for integer migrations and # :filename for timestamp migrations) attr_reader :column # The database related to this migrator attr_reader :db # The directory for this migrator's files attr_reader :directory # The dataset for this migrator, representing the +schema_info+ table for integer # migrations and the +schema_migrations+ table for timestamp migrations attr_reader :ds # All migration files in this migrator's directory attr_reader :files # The table to use to hold the applied migration data (defaults to :schema_info for # integer migrations and :schema_migrations for timestamp migrations) attr_reader :table # The target version for this migrator attr_reader :target # Setup the state for the migrator def initialize(db, directory, opts=OPTS) raise(Error, "Must supply a valid migration path") unless File.directory?(directory) @db = db @directory = directory @allow_missing_migration_files = opts[:allow_missing_migration_files] @files = get_migration_files schema, table = @db.send(:schema_and_table, opts[:table] || default_schema_table) @table = schema ? Sequel::SQL::QualifiedIdentifier.new(schema, table) : table @column = opts[:column] || default_schema_column @ds = schema_dataset @use_transactions = opts[:use_transactions] end private # If transactions should be used for the migration, yield to the block # inside a transaction. Otherwise, just yield to the block. def checked_transaction(migration, &block) use_trans = if @use_transactions.nil? if migration.use_transactions.nil? @db.supports_transactional_ddl? else migration.use_transactions end else @use_transactions end if use_trans db.transaction(&block) else yield end end # Load the migration file, raising an exception if the file does not define # a single migration. def load_migration_file(file) MUTEX.synchronize do n = Migration.descendants.length load(file) raise Error, "Migration file #{file.inspect} not containing a single migration detected" unless n + 1 == Migration.descendants.length c = Migration.descendants.pop if c.is_a?(Class) && !c.name.to_s.empty? && Object.const_defined?(c.name) Object.send(:remove_const, c.name) end c end end # Return the integer migration version based on the filename. def migration_version_from_file(filename) filename.split('_', 2).first.to_i end end # The default migrator, recommended in most cases. Uses a simple incrementing # version number starting with 1, where missing or duplicate migration file # versions are not allowed. Part of the +migration+ extension. class IntegerMigrator < Migrator Error = Migrator::Error # The current version for this migrator attr_reader :current # The direction of the migrator, either :up or :down attr_reader :direction # The migrations used by this migrator attr_reader :migrations # Set up all state for the migrator instance def initialize(db, directory, opts=OPTS) super @current = opts[:current] || current_migration_version latest_version = latest_migration_version @target = if opts[:target] opts[:target] elsif opts[:relative] @current + opts[:relative] else latest_version end raise(Error, "No target and/or latest version available, probably because no migration files found or filenames don't follow the migration filename convention") unless target && latest_version if @target > latest_version @target = latest_version elsif @target < 0 @target = 0 end @direction = current < target ? :up : :down if @direction == :down && @current >= @files.length && !@allow_missing_migration_files raise Migrator::Error, "Missing migration version(s) needed to migrate down to target version (current: #{current}, target: #{target})" end @migrations = get_migrations end # The integer migrator is current if the current version is the same as the target version. def is_current? current_migration_version == target end # Apply all migrations on the database def run migrations.zip(version_numbers).each do |m, v| timer = Sequel.start_timer db.log_info("Begin applying migration version #{v}, direction: #{direction}") checked_transaction(m) do m.apply(db, direction) set_migration_version(up? ? v : v-1) end db.log_info("Finished applying migration version #{v}, direction: #{direction}, took #{sprintf('%0.6f', Sequel.elapsed_seconds_since(timer))} seconds") end target end private # Gets the current migration version stored in the database. If no version # number is stored, 0 is returned. def current_migration_version ds.get(column) || 0 end # The default column storing schema version. def default_schema_column :version end # The default table storing schema version. def default_schema_table :schema_info end # Returns any found migration files in the supplied directory. def get_migration_files files = [] Dir.new(directory).each do |file| next unless MIGRATION_FILE_PATTERN.match(file) version = migration_version_from_file(file) if version >= 20000101 raise Migrator::Error, "Migration number too large, must use TimestampMigrator: #{file}" end raise(Error, "Duplicate migration version: #{version}") if files[version] files[version] = File.join(directory, file) end 1.upto(files.length - 1){|i| raise(Error, "Missing migration version: #{i}") unless files[i]} unless @allow_missing_migration_files files end # Returns a list of migration classes filtered for the migration range and # ordered according to the migration direction. def get_migrations version_numbers.map{|n| load_migration_file(files[n])} end # Returns the latest version available in the specified directory. def latest_migration_version l = files.last l ? migration_version_from_file(File.basename(l)) : nil end # Returns the dataset for the schema_info table. If no such table # exists, it is automatically created. def schema_dataset c = column ds = db.from(table) db.create_table?(table){Integer c, :default=>0, :null=>false} unless ds.columns.include?(c) db.alter_table(table){add_column c, Integer, :default=>0, :null=>false} end ds.insert(c=>0) if ds.empty? raise(Error, "More than 1 row in migrator table") if ds.count > 1 ds end # Sets the current migration version stored in the database. def set_migration_version(version) ds.update(column=>version) end # Whether or not this is an up migration def up? direction == :up end # An array of numbers corresponding to the migrations, # so that each number in the array is the migration version # that will be in affect after the migration is run. def version_numbers @version_numbers ||= begin versions = files. compact. map{|f| migration_version_from_file(File.basename(f))}. select{|v| up? ? (v > current && v <= target) : (v <= current && v > target)}. sort versions.reverse! unless up? versions end end end # The migrator used if any migration file version is greater than 20000101. # Stores filenames of migration files, and can figure out which migrations # have not been applied and apply them, even if earlier migrations are added # after later migrations. If you plan to do that, the responsibility is on # you to make sure the migrations don't conflict. Part of the +migration+ extension. class TimestampMigrator < Migrator Error = Migrator::Error # Array of strings of applied migration filenames attr_reader :applied_migrations # Get tuples of migrations, filenames, and actions for each migration attr_reader :migration_tuples # Set up all state for the migrator instance def initialize(db, directory, opts=OPTS) super @target = opts[:target] @applied_migrations = get_applied_migrations @migration_tuples = get_migration_tuples end # The timestamp migrator is current if there are no migrations to apply # in either direction. def is_current? migration_tuples.empty? end # Apply all migration tuples on the database def run migration_tuples.each do |m, f, direction| t = Time.now db.log_info("Begin applying migration #{f}, direction: #{direction}") checked_transaction(m) do m.apply(db, direction) fi = f.downcase direction == :up ? ds.insert(column=>fi) : ds.where(column=>fi).delete end db.log_info("Finished applying migration #{f}, direction: #{direction}, took #{sprintf('%0.6f', Time.now - t)} seconds") end nil end private # Convert the schema_info table to the new schema_migrations table format, # using the version of the schema_info table and the current migration files. def convert_from_schema_info v = db[:schema_info].get(:version) ds = db.from(table) files.each do |path| f = File.basename(path) if migration_version_from_file(f) <= v ds.insert(column=>f) end end end # The default column storing migration filenames. def default_schema_column :filename end # The default table storing migration filenames. def default_schema_table :schema_migrations end # Returns filenames of all applied migrations def get_applied_migrations am = ds.select_order_map(column) missing_migration_files = am - files.map{|f| File.basename(f).downcase} raise(Error, "Applied migration files not in file system: #{missing_migration_files.join(', ')}") if missing_migration_files.length > 0 && !@allow_missing_migration_files am end # Returns any migration files found in the migrator's directory. def get_migration_files files = [] Dir.new(directory).each do |file| next unless MIGRATION_FILE_PATTERN.match(file) files << File.join(directory, file) end files.sort_by{|f| MIGRATION_FILE_PATTERN.match(File.basename(f))[1].to_i} end # Returns tuples of migration, filename, and direction def get_migration_tuples up_mts = [] down_mts = [] files.each do |path| f = File.basename(path) fi = f.downcase if target if migration_version_from_file(f) > target if applied_migrations.include?(fi) down_mts << [load_migration_file(path), f, :down] end elsif !applied_migrations.include?(fi) up_mts << [load_migration_file(path), f, :up] end elsif !applied_migrations.include?(fi) up_mts << [load_migration_file(path), f, :up] end end up_mts + down_mts.reverse end # Returns the dataset for the schema_migrations table. If no such table # exists, it is automatically created. def schema_dataset c = column ds = db.from(table) if !db.table_exists?(table) begin db.create_table(table){String c, :primary_key=>true} rescue Sequel::DatabaseError => e if db.database_type == :mysql && e.message =~ /max key length/ # Handle case where MySQL is used with utf8mb4 charset default, which # only allows a maximum length of about 190 characters for string # primary keys due to InnoDB limitations. db.create_table(table){String c, :primary_key=>true, :size=>190} else raise e end end if db.table_exists?(:schema_info) and vha = db[:schema_info].all and vha.length == 1 and vha.first.keys == [:version] and vha.first.values.first.is_a?(Integer) convert_from_schema_info end elsif !ds.columns.include?(c) raise(Error, "Migrator table #{table} does not contain column #{c}") end ds end end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/mssql_emulate_lateral_with_apply.rb�����������������������������0000664�0000000�0000000�00000005367�14342141206�0026522�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The mssql_emulate_lateral_with_apply extension converts # queries that use LATERAL into queries that use CROSS/OUTER # APPLY, allowing code that works on databases that support # LATERAL via Dataset#lateral to run on Microsoft SQL Server # and Sybase SQLAnywhere. # # This is available as a separate extension instead of # integrated into the Microsoft SQL Server and Sybase # SQLAnywhere support because few people need it and there # is a performance hit to code that doesn't use it. # # It is possible there are cases where this emulation does # not work. Users should probably verify that correct # results are returned when using this extension. # # You can load this extension into specific datasets: # # ds = DB[:table] # ds = ds.extension(:mssql_emulate_lateral_with_apply) # # Or you can load it into all of a database's datasets: # # DB.extension(:mssql_emulate_lateral_with_apply) # # Related module: Sequel::MSSQL::EmulateLateralWithApply # module Sequel module MSSQL module EmulateLateralWithApply # If the table is a dataset that uses LATERAL, # convert it to a CROSS APPLY if it is a INNER # or CROSS JOIN, and an OUTER APPLY if it is a # LEFT JOIN. def join_table(type, table, expr=nil, *) if table.is_a?(Dataset) && table.opts[:lateral] table = table.clone(:lateral=>nil) case type when :inner type = :cross_apply table = table.where(expr) expr = nil when :cross type = :cross_apply when :left, :left_outer type = :outer_apply table = table.where(expr) expr = nil end end super end # When a FROM entry uses a LATERAL subquery, # convert that entry into a CROSS APPLY. def from(*source, &block) virtual_row_columns(source, block) lateral, source = source.partition{|t| t.is_a?(Sequel::Dataset) && t.opts[:lateral] || (t.is_a?(Sequel::SQL::AliasedExpression) && t.expression.is_a?(Sequel::Dataset) && t.expression.opts[:lateral])} unless source.empty? return super(*source, &nil) if !lateral || lateral.empty? ds = from(*source) lateral.each do |l| l = if l.is_a?(Sequel::SQL::AliasedExpression) l.expression.clone(:lateral=>nil).as(l.alias) else l.clone(:lateral=>nil) end ds = ds.cross_apply(l) end ds end # MSSQL can emulate lateral subqueries via CROSS/OUTER APPLY # when using this extension. def supports_lateral_subqueries? true end end end Dataset.register_extension(:mssql_emulate_lateral_with_apply, MSSQL::EmulateLateralWithApply) end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/named_timezones.rb����������������������������������������������0000664�0000000�0000000�00000016567�14342141206�0023070�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # Allows the use of named timezones via TZInfo (requires tzinfo). # Forces the use of DateTime as Sequel's datetime_class, since # historically, Ruby's Time class doesn't support timezones other # than local and UTC. To continue using Ruby's Time class when using # the named_timezones extension: # # # Load the extension # Sequel.extension :named_timezones # # # Set Sequel.datetime_class back to Time # Sequel.datetime_class = Time # # This allows you to either pass strings or TZInfo::Timezone # instance to Sequel.database_timezone=, application_timezone=, and # typecast_timezone=. If a string is passed, it is converted to a # TZInfo::Timezone using TZInfo::Timezone.get. # # Let's say you have the database server in New York and the # application server in Los Angeles. For historical reasons, data # is stored in local New York time, but the application server only # services clients in Los Angeles, so you want to use New York # time in the database and Los Angeles time in the application. This # is easily done via: # # Sequel.database_timezone = 'America/New_York' # Sequel.application_timezone = 'America/Los_Angeles' # # Then, before data is stored in the database, it is converted to New # York time. When data is retrieved from the database, it is # converted to Los Angeles time. # # If you are using database specific timezones, you may want to load # this extension into the database in order to support similar API: # # DB.extension :named_timezones # DB.timezone = 'America/New_York' # # Note that typecasting from the database timezone to the application # timezone when fetching rows is dependent on the database adapter, # and only works on adapters where Sequel itself does the conversion. # It should work with the mysql, postgres, sqlite, ibmdb, and jdbc # adapters. # # Related module: Sequel::NamedTimezones require 'tzinfo' # module Sequel self.datetime_class = DateTime module NamedTimezones module DatabaseMethods def timezone=(tz) super(Sequel.send(:convert_timezone_setter_arg, tz)) end end # Handles TZInfo::AmbiguousTime exceptions automatically by providing a # proc called with both the datetime value being converted as well as # the array of TZInfo::TimezonePeriod results. Example: # # Sequel.tzinfo_disambiguator = proc{|datetime, periods| periods.first} attr_accessor :tzinfo_disambiguator private if RUBY_VERSION >= '2.6' # Whether Time.at with :nsec and :in is broken. True on JRuby < 9.3.9.0. BROKEN_TIME_AT_WITH_NSEC = defined?(JRUBY_VERSION) && (JRUBY_VERSION < '9.3' || (JRUBY_VERSION < '9.4' && JRUBY_VERSION.split('.')[2].to_i < 9)) private_constant :BROKEN_TIME_AT_WITH_NSEC # Convert the given input Time (which must be in UTC) to the given input timezone, # which should be a TZInfo::Timezone instance. def convert_input_time_other(v, input_timezone) Time.new(v.year, v.mon, v.day, v.hour, v.min, (v.sec + Rational(v.nsec, 1000000000)), input_timezone) rescue TZInfo::AmbiguousTime raise unless disamb = tzinfo_disambiguator_for(v) period = input_timezone.period_for_local(v, &disamb) offset = period.utc_total_offset # :nocov: if BROKEN_TIME_AT_WITH_NSEC Time.at(v.to_i - offset, :in => input_timezone) + v.nsec/1000000000.0 # :nocov: else Time.at(v.to_i - offset, v.nsec, :nsec, :in => input_timezone) end end # Convert the given input Time to the given output timezone, # which should be a TZInfo::Timezone instance. def convert_output_time_other(v, output_timezone) # :nocov: if BROKEN_TIME_AT_WITH_NSEC Time.at(v.to_i, :in => output_timezone) + v.nsec/1000000000.0 # :nocov: else Time.at(v.to_i, v.nsec, :nsec, :in => output_timezone) end end # :nodoc: # :nocov: else def convert_input_time_other(v, input_timezone) local_offset = input_timezone.period_for_local(v, &tzinfo_disambiguator_for(v)).utc_total_offset Time.new(1970, 1, 1, 0, 0, 0, local_offset) + v.to_i + v.nsec/1000000000.0 end if defined?(TZInfo::VERSION) && TZInfo::VERSION > '2' def convert_output_time_other(v, output_timezone) v = output_timezone.utc_to_local(v.getutc) local_offset = output_timezone.period_for_local(v, &tzinfo_disambiguator_for(v)).utc_total_offset Time.new(1970, 1, 1, 0, 0, 0, local_offset) + v.to_i + v.nsec/1000000000.0 + local_offset end else def convert_output_time_other(v, output_timezone) v = output_timezone.utc_to_local(v.getutc) local_offset = output_timezone.period_for_local(v, &tzinfo_disambiguator_for(v)).utc_total_offset Time.new(1970, 1, 1, 0, 0, 0, local_offset) + v.to_i + v.nsec/1000000000.0 end end # :nodoc: # :nocov: end # Handle both TZInfo 1 and TZInfo 2 if defined?(TZInfo::VERSION) && TZInfo::VERSION > '2' def convert_input_datetime_other(v, input_timezone) local_offset = Rational(input_timezone.period_for_local(v, &tzinfo_disambiguator_for(v)).utc_total_offset, 86400) (v - local_offset).new_offset(local_offset) end def convert_output_datetime_other(v, output_timezone) v = output_timezone.utc_to_local(v.new_offset(0)) # Force DateTime output instead of TZInfo::DateTimeWithOffset DateTime.jd(v.jd, v.hour, v.minute, v.second + v.sec_fraction, v.offset, v.start) end # :nodoc: # :nocov: else # Assume the given DateTime has a correct time but a wrong timezone. It is # currently in UTC timezone, but it should be converted to the input_timezone. # Keep the time the same but convert the timezone to the input_timezone. # Expects the input_timezone to be a TZInfo::Timezone instance. def convert_input_datetime_other(v, input_timezone) local_offset = input_timezone.period_for_local(v, &tzinfo_disambiguator_for(v)).utc_total_offset_rational (v - local_offset).new_offset(local_offset) end # Convert the given DateTime to use the given output_timezone. # Expects the output_timezone to be a TZInfo::Timezone instance. def convert_output_datetime_other(v, output_timezone) # TZInfo 1 converts times, but expects the given DateTime to have an offset # of 0 and always leaves the timezone offset as 0 v = output_timezone.utc_to_local(v.new_offset(0)) local_offset = output_timezone.period_for_local(v, &tzinfo_disambiguator_for(v)).utc_total_offset_rational # Convert timezone offset from UTC to the offset for the output_timezone (v - local_offset).new_offset(local_offset) end # :nodoc: # :nocov: end # Returns TZInfo::Timezone instance if given a String. def convert_timezone_setter_arg(tz) tz.is_a?(String) ? TZInfo::Timezone.get(tz) : super end # Return a disambiguation proc that provides both the datetime value # and the periods, in order to allow the choice of period to depend # on the datetime value. def tzinfo_disambiguator_for(v) if pr = @tzinfo_disambiguator proc{|periods| pr.call(v, periods)} end end end extend NamedTimezones Database.register_extension(:named_timezones, NamedTimezones::DatabaseMethods) end �����������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/no_auto_literal_strings.rb��������������������������������������0000664�0000000�0000000�00000000236�14342141206�0024622�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true Sequel::Database.register_extension(:no_auto_literal_strings){} Sequel::Dataset.register_extension(:no_auto_literal_strings){} ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/null_dataset.rb�������������������������������������������������0000664�0000000�0000000�00000006015�14342141206�0022351�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The null_dataset extension adds the Dataset#nullify method, which # returns a cloned dataset that will never issue a query to the # database. It implements the null object pattern for datasets. # # The most common usage is probably in a method that must return # a dataset, where the method knows the dataset shouldn't return # anything. With standard Sequel, you'd probably just add a # WHERE condition that is always false, but that still results # in a query being sent to the database, and can be overridden # using #unfiltered, the OR operator, or a UNION. # # Usage: # # ds = DB[:items].nullify.where(a: :b).select(:c) # ds.sql # => "SELECT c FROM items WHERE (a = b)" # ds.all # => [] # no query sent to the database # # Note that there is one case where a null dataset will sent # a query to the database. If you call #columns on a nulled # dataset and the dataset doesn't have an already cached # version of the columns, it will create a new dataset with # the same options to get the columns. # # This extension uses Object#extend at runtime, which can hurt performance. # # To add the nullify method to a single dataset: # # ds = ds.extension(:null_dataset) # # To add the nullify method to all datasets on a single database: # # DB.extension(:null_dataset) # # Related modules: Sequel::Dataset::Nullifiable, Sequel::Dataset::NullDataset # module Sequel class Dataset module Nullifiable # Return a cloned nullified dataset. def nullify cached_dataset(:_nullify_ds) do with_extend(NullDataset) end end end module NullDataset # Create a new dataset from the dataset (which won't # be nulled) to get the columns if they aren't already cached. def columns if cols = _columns return cols end self.columns = db.dataset.clone(@opts).columns end # Return 0 without sending a database query. def delete 0 end # Return self without sending a database query, never yielding. def each self end # Return nil without sending a database query, never yielding. def fetch_rows(sql) nil end # Return nil without sending a database query. def insert(*) nil end # Return nil without sending a database query. def truncate nil end # Return 0 without sending a database query. def update(v=OPTS) 0 end protected # Return nil without sending a database query. def _import(columns, values, opts) nil end private # Just in case these are called directly by some internal code, # make them noops. There's nothing we can do if the db # is accessed directly to make a change, though. (%w'_ddl _dui _insert' << '').each do |m| class_eval("private; def execute#{m}(sql, opts=OPTS) end", __FILE__, __LINE__) end end end Dataset.register_extension(:null_dataset, Dataset::Nullifiable) end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/pagination.rb���������������������������������������������������0000664�0000000�0000000�00000010760�14342141206�0022025�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The pagination extension adds the Sequel::Dataset#paginate and #each_page methods, # which return paginated (limited and offset) datasets with the following methods # added that make creating a paginated display easier: # # * +page_size+ # * +page_count+ # * +page_range+ # * +current_page+ # * +next_page+ # * +prev_page+ # * +first_page?+ # * +last_page?+ # * +pagination_record_count+ # * +current_page_record_count+ # * +current_page_record_range+ # # This extension uses Object#extend at runtime, which can hurt performance. # # You can load this extension into specific datasets: # # ds = DB[:table] # ds = ds.extension(:pagination) # # Or you can load it into all of a database's datasets, which # is probably the desired behavior if you are using this extension: # # DB.extension(:pagination) # # Related modules: Sequel::DatasetPagination, Sequel::Dataset::Pagination # module Sequel module DatasetPagination # Returns a paginated dataset. The returned dataset is limited to # the page size at the correct offset, and extended with the Pagination # module. If a record count is not provided, does a count of total # number of records for this dataset. def paginate(page_no, page_size, record_count=nil) raise(Error, "You cannot paginate a dataset that already has a limit") if @opts[:limit] record_count ||= count page_count = (record_count / page_size.to_f).ceil page_count = 1 if page_count == 0 limit(page_size, (page_no - 1) * page_size). with_extend(Dataset::Pagination). clone(:page_size=>page_size, :current_page=>page_no, :pagination_record_count=>record_count, :page_count=>page_count) end # Yields a paginated dataset for each page and returns the receiver. Does # a count to find the total number of records for this dataset. Returns # an enumerator if no block is given. def each_page(page_size) raise(Error, "You cannot paginate a dataset that already has a limit") if @opts[:limit] return to_enum(:each_page, page_size) unless defined?(yield) record_count = count total_pages = (record_count / page_size.to_f).ceil (1..total_pages).each{|page_no| yield paginate(page_no, page_size, record_count)} self end end class Dataset # Holds methods that only relate to paginated datasets. Paginated dataset # have pages starting at 1 (page 1 is offset 0, page 2 is offset 1 * page_size). module Pagination # The number of records per page (the final page may have fewer than # this number of records). def page_size @opts[:page_size] end # The number of pages in the dataset before pagination, of which # this paginated dataset is one. Empty datasets are considered # to have a single page. def page_count @opts[:page_count] end # The current page of the dataset, starting at 1 and not 0. def current_page @opts[:current_page] end # The total number of records in the dataset before pagination. def pagination_record_count @opts[:pagination_record_count] end # Returns the record range for the current page def current_page_record_range return (0..0) if current_page > page_count a = 1 + (current_page - 1) * page_size b = a + page_size - 1 b = pagination_record_count if b > pagination_record_count a..b end # Returns the number of records in the current page def current_page_record_count return 0 if current_page > page_count a = 1 + (current_page - 1) * page_size b = a + page_size - 1 b = pagination_record_count if b > pagination_record_count b - a + 1 end # Returns true if the current page is the first page def first_page? current_page == 1 end # Returns true if the current page is the last page def last_page? current_page == page_count end # Returns the next page number or nil if the current page is the last page def next_page current_page < page_count ? (current_page + 1) : nil end # Returns the page range def page_range 1..page_count end # Returns the previous page number or nil if the current page is the first def prev_page current_page > 1 ? (current_page - 1) : nil end end end Dataset.register_extension(:pagination, DatasetPagination) end ����������������sequel-5.63.0/lib/sequel/extensions/pg_array.rb�����������������������������������������������������0000664�0000000�0000000�00000052366�14342141206�0021510�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The pg_array extension adds support for Sequel to handle # PostgreSQL's array types. # # This extension integrates with Sequel's native postgres adapter and # the jdbc/postgresql adapter, so that when array fields are retrieved, # they are parsed and returned as instances of Sequel::Postgres::PGArray. # PGArray is a DelegateClass of Array, so it mostly acts like an array, but not # completely (is_a?(Array) is false). If you want the actual array, # you can call PGArray#to_a. This is done so that Sequel does not # treat a PGArray like an Array by default, which would cause issues. # # In addition to the parsers, this extension comes with literalizers # for PGArray using the standard Sequel literalization callbacks, so # they work with on all adapters. # # To turn an existing Array into a PGArray: # # Sequel.pg_array(array) # # If you have loaded the {core_extensions extension}[rdoc-ref:doc/core_extensions.rdoc], # or you have loaded the core_refinements extension # and have activated refinements for the file, you can also use Array#pg_array: # # array.pg_array # # You can also provide a type, though in many cases it isn't necessary: # # Sequel.pg_array(array, :varchar) # or :integer, :"double precision", etc. # array.pg_array(:varchar) # or :integer, :"double precision", etc. # # So if you want to insert an array into an integer[] database column: # # DB[:table].insert(column: Sequel.pg_array([1, 2, 3])) # # To use this extension, first load it into your Sequel::Database instance: # # DB.extension :pg_array # # See the {schema modification guide}[rdoc-ref:doc/schema_modification.rdoc] # for details on using postgres array columns in CREATE/ALTER TABLE statements. # # This extension by default includes handlers for array types for # all scalar types that the native postgres adapter handles. It # also makes it easy to add support for other array types. In # general, you just need to make sure that the scalar type is # handled and has the appropriate converter installed. For user defined # types, you can do this via: # # DB.add_conversion_proc(scalar_type_oid){|string| } # # Then you can call # Sequel::Postgres::PGArray::DatabaseMethods#register_array_type # to automatically set up a handler for the array type. So if you # want to support the foo[] type (assuming the foo type is already # supported): # # DB.register_array_type('foo') # # While this extension can parse PostgreSQL arrays with explicit bounds, it # currently ignores explicit bounds, so such values do not round # trip. # # If you want an easy way to call PostgreSQL array functions and # operators, look into the pg_array_ops extension. # # This extension requires the delegate library, and the strscan library # sequel_pg has not been loaded. # # Related module: Sequel::Postgres::PGArray require 'delegate' module Sequel module Postgres # Represents a PostgreSQL array column value. class PGArray < DelegateClass(Array) include Sequel::SQL::AliasMethods module DatabaseMethods BLOB_RANGE = 1...-1 # Create the local hash of database type strings to schema type symbols, # used for array types local to this database. def self.extended(db) db.instance_exec do @pg_array_schema_types ||= {} register_array_type('timestamp without time zone', :oid=>1115, :scalar_oid=>1114, :type_symbol=>:datetime) register_array_type('timestamp with time zone', :oid=>1185, :scalar_oid=>1184, :type_symbol=>:datetime_timezone, :scalar_typecast=>:datetime) register_array_type('text', :oid=>1009, :scalar_oid=>25, :type_symbol=>:string) register_array_type('integer', :oid=>1007, :scalar_oid=>23) register_array_type('bigint', :oid=>1016, :scalar_oid=>20, :scalar_typecast=>:integer) register_array_type('numeric', :oid=>1231, :scalar_oid=>1700, :type_symbol=>:decimal) register_array_type('double precision', :oid=>1022, :scalar_oid=>701, :type_symbol=>:float) register_array_type('boolean', :oid=>1000, :scalar_oid=>16) register_array_type('bytea', :oid=>1001, :scalar_oid=>17, :type_symbol=>:blob) register_array_type('date', :oid=>1182, :scalar_oid=>1082) register_array_type('time without time zone', :oid=>1183, :scalar_oid=>1083, :type_symbol=>:time) register_array_type('time with time zone', :oid=>1270, :scalar_oid=>1266, :type_symbol=>:time_timezone, :scalar_typecast=>:time) register_array_type('smallint', :oid=>1005, :scalar_oid=>21, :scalar_typecast=>:integer) register_array_type('oid', :oid=>1028, :scalar_oid=>26, :scalar_typecast=>:integer) register_array_type('real', :oid=>1021, :scalar_oid=>700, :scalar_typecast=>:float) register_array_type('character', :oid=>1014, :converter=>nil, :array_type=>:text, :scalar_typecast=>:string) register_array_type('character varying', :oid=>1015, :converter=>nil, :scalar_typecast=>:string, :type_symbol=>:varchar) register_array_type('xml', :oid=>143, :scalar_oid=>142) register_array_type('money', :oid=>791, :scalar_oid=>790) register_array_type('bit', :oid=>1561, :scalar_oid=>1560) register_array_type('bit varying', :oid=>1563, :scalar_oid=>1562, :type_symbol=>:varbit) register_array_type('uuid', :oid=>2951, :scalar_oid=>2950) register_array_type('xid', :oid=>1011, :scalar_oid=>28) register_array_type('cid', :oid=>1012, :scalar_oid=>29) register_array_type('name', :oid=>1003, :scalar_oid=>19) register_array_type('tid', :oid=>1010, :scalar_oid=>27) register_array_type('int2vector', :oid=>1006, :scalar_oid=>22) register_array_type('oidvector', :oid=>1013, :scalar_oid=>30) [:string_array, :integer_array, :decimal_array, :float_array, :boolean_array, :blob_array, :date_array, :time_array, :datetime_array].each do |v| @schema_type_classes[v] = PGArray end end end def add_named_conversion_proc(name, &block) ret = super name = name.to_s if name.is_a?(Symbol) from(:pg_type).where(:typname=>name).select_map([:oid, :typarray]).each do |scalar_oid, array_oid| register_array_type(name, :oid=>array_oid.to_i, :scalar_oid=>scalar_oid.to_i) end ret end # Handle arrays in bound variables def bound_variable_arg(arg, conn) case arg when PGArray bound_variable_array(arg.to_a) when Array bound_variable_array(arg) else super end end # Freeze the pg array schema types to prevent adding new ones. def freeze @pg_array_schema_types.freeze super end # Register a database specific array type. Options: # # :array_type :: The type to automatically cast the array to when literalizing the array. # Usually the same as db_type. # :converter :: A callable object (e.g. Proc), that is called with each element of the array # (usually a string), and should return the appropriate typecasted object. # :oid :: The PostgreSQL OID for the array type. This is used by the Sequel postgres adapter # to set up automatic type conversion on retrieval from the database. # :scalar_oid :: Should be the PostgreSQL OID for the scalar version of this array type. If given, # automatically sets the :converter option by looking for scalar conversion # proc. # :scalar_typecast :: Should be a symbol indicating the typecast method that should be called on # each element of the array, when a plain array is passed into a database # typecast method. For example, for an array of integers, this could be set to # :integer, so that the typecast_value_integer method is called on all of the # array elements. Defaults to :type_symbol option. # :type_symbol :: The base of the schema type symbol for this type. For example, if you provide # :integer, Sequel will recognize this type as :integer_array during schema parsing. # Defaults to the db_type argument. # # If a block is given, it is treated as the :converter option. def register_array_type(db_type, opts=OPTS, &block) oid = opts[:oid] soid = opts[:scalar_oid] if has_converter = opts.has_key?(:converter) raise Error, "can't provide both a block and :converter option to register_array_type" if block converter = opts[:converter] else has_converter = true if block converter = block end unless (soid || has_converter) && oid array_oid, scalar_oid = from(:pg_type).where(:typname=>db_type.to_s).get([:typarray, :oid]) soid ||= scalar_oid unless has_converter oid ||= array_oid end db_type = db_type.to_s type = (opts[:type_symbol] || db_type).to_sym typecast_method_map = @pg_array_schema_types if soid raise Error, "can't provide both a converter and :scalar_oid option to register" if has_converter converter = conversion_procs[soid] end array_type = (opts[:array_type] || db_type).to_s.dup.freeze creator = Creator.new(array_type, converter) add_conversion_proc(oid, creator) typecast_method_map[db_type] = :"#{type}_array" singleton_class.class_eval do meth = :"typecast_value_#{type}_array" scalar_typecast_method = :"typecast_value_#{opts.fetch(:scalar_typecast, type)}" define_method(meth){|v| typecast_value_pg_array(v, creator, scalar_typecast_method)} private meth alias_method(meth, meth) end @schema_type_classes[:"#{type}_array"] = PGArray nil end private # Format arrays used in bound variables. def bound_variable_array(a) case a when Array "{#{a.map{|i| bound_variable_array(i)}.join(',')}}" when Sequel::SQL::Blob bound_variable_array_string(literal(a)[BLOB_RANGE].gsub("''", "'")) when Sequel::LiteralString a when String bound_variable_array_string(a) else if (s = bound_variable_arg(a, nil)).is_a?(String) bound_variable_array_string(s) else literal(a) end end end # Escape strings used as array members in bound variables. Most complex # will create a regular string with bound_variable_arg, and then use this # escaping to format it as an array member. def bound_variable_array_string(s) "\"#{s.gsub(/("|\\)/, '\\\\\1')}\"" end # Look into both the current database's array schema types and the global # array schema types to get the type symbol for the given database type # string. def pg_array_schema_type(type) @pg_array_schema_types[type] end # Make the column type detection handle registered array types. def schema_column_type(db_type) if (db_type =~ /\A([^(]+)(?:\([^(]+\))?\[\]\z/io) && (type = pg_array_schema_type($1)) type else super end end # Set the :callable_default value if the default value is recognized as an empty array. def schema_post_process(_) super.each do |a| h = a[1] if h[:default] =~ /\A(?:'\{\}'|ARRAY\[\])::([\w ]+)\[\]\z/ type = $1.freeze h[:callable_default] = lambda{Sequel.pg_array([], type)} end end end # Convert ruby arrays to PostgreSQL arrays when used as default values. def column_definition_default_sql(sql, column) if (d = column[:default]) && d.is_a?(Array) && !Sequel.condition_specifier?(d) sql << " DEFAULT (#{literal(Sequel.pg_array(d))}::#{type_literal(column)})" else super end end # Given a value to typecast and the type of PGArray subclass: # * If given a PGArray with a matching array_type, use it directly. # * If given a PGArray with a different array_type, return a PGArray # with the creator's type. # * If given an Array, create a new PGArray instance for it. This does not # typecast all members of the array in ruby for performance reasons, but # it will cast the array the appropriate database type when the array is # literalized. def typecast_value_pg_array(value, creator, scalar_typecast_method=nil) case value when PGArray if value.array_type != creator.type PGArray.new(value.to_a, creator.type) else value end when Array if scalar_typecast_method && respond_to?(scalar_typecast_method, true) value = Sequel.recursive_map(value, method(scalar_typecast_method)) end PGArray.new(value, creator.type) else raise Sequel::InvalidValue, "invalid value for array type: #{value.inspect}" end end end unless Sequel::Postgres.respond_to?(:parse_pg_array) require 'strscan' # PostgreSQL array parser that handles PostgreSQL array output format. # Note that does not handle all forms out input that PostgreSQL will # accept, and it will not raise an error for all forms of invalid input. class Parser < StringScanner # Set the source for the input, and any converter callable # to call with objects to be created. For nested parsers # the source may contain text after the end current parse, # which will be ignored. def initialize(source, converter=nil) super(source) @converter = converter @stack = [[]] @encoding = string.encoding @recorded = String.new.force_encoding(@encoding) end # Take the buffer of recorded characters and add it to the array # of entries, and use a new buffer for recorded characters. def new_entry(include_empty=false) if !@recorded.empty? || include_empty entry = @recorded if entry == 'NULL' && !include_empty entry = nil elsif @converter entry = @converter.call(entry) end @stack.last.push(entry) @recorded = String.new.force_encoding(@encoding) end end # Parse the input character by character, returning an array # of parsed (and potentially converted) objects. def parse raise Sequel::Error, "invalid array, empty string" if eos? raise Sequel::Error, "invalid array, doesn't start with {" unless scan(/((\[\d+:\d+\])+=)?\{/) # :nocov: while !eos? # :nocov: char = scan(/[{}",]|[^{}",]+/) if char == ',' # Comma outside quoted string indicates end of current entry new_entry elsif char == '"' raise Sequel::Error, "invalid array, opening quote with existing recorded data" unless @recorded.empty? # :nocov: while true # :nocov: char = scan(/["\\]|[^"\\]+/) if char == '\\' @recorded << getch elsif char == '"' n = peek(1) raise Sequel::Error, "invalid array, closing quote not followed by comma or closing brace" unless n == ',' || n == '}' break else @recorded << char end end new_entry(true) elsif char == '{' raise Sequel::Error, "invalid array, opening brace with existing recorded data" unless @recorded.empty? # Start of new array, add it to the stack new = [] @stack.last << new @stack << new elsif char == '}' # End of current array, add current entry to the current array new_entry if @stack.length == 1 raise Sequel::Error, "array parsing finished without parsing entire string" unless eos? # Top level of array, parsing should be over. # Pop current array off stack and return it as result return @stack.pop else # Nested array, pop current array off stack @stack.pop end else # Add the character to the recorded character buffer. @recorded << char end end raise Sequel::Error, "array parsing finished with array unclosed" end end end # Callable object that takes the input string and parses it using Parser. class Creator # The converter callable that is called on each member of the array # to convert it to the correct type. attr_reader :converter # The database type to set on the PGArray instances returned. attr_reader :type # Set the type and optional converter callable that will be used. def initialize(type, converter=nil) @type = type @converter = converter end if Sequel::Postgres.respond_to?(:parse_pg_array) # :nocov: # Use sequel_pg's C-based parser if it has already been defined. def call(string) PGArray.new(Sequel::Postgres.parse_pg_array(string, @converter), @type) end # :nocov: else # Parse the string using Parser with the appropriate # converter, and return a PGArray with the appropriate database # type. def call(string) PGArray.new(Parser.new(string, @converter).parse, @type) end end end # The type of this array. May be nil if no type was given. If a type # is provided, the array is automatically casted to this type when # literalizing. This type is the underlying type, not the array type # itself, so for an int4[] database type, it should be :int4 or 'int4' attr_accessor :array_type # Set the array to delegate to, and a database type. def initialize(array, type=nil) super(array) @array_type = type end # Append the array SQL to the given sql string. # If the receiver has a type, add a cast to the # database array type. def sql_literal_append(ds, sql) at = array_type if empty? && at sql << "'{}'" else sql << "ARRAY" _literal_append(sql, ds, to_a) end if at sql << '::' << at.to_s << '[]' end end # Allow automatic parameterization of the receiver if all elements can be # can be automatically parameterized. def sequel_auto_param_type(ds) if array_type && all?{|x| nil == x || ds.send(:auto_param_type, x)} "::#{array_type}[]" end end private # Recursive method that handles multi-dimensional # arrays, surrounding each with [] and interspersing # entries with ,. def _literal_append(sql, ds, array) sql << '[' comma = false commas = ',' array.each do |i| sql << commas if comma if i.is_a?(Array) _literal_append(sql, ds, i) else ds.literal_append(sql, i) end comma = true end sql << ']' end end end module SQL::Builders # Return a Postgres::PGArray proxy for the given array and database array type. def pg_array(v, array_type=nil) case v when Postgres::PGArray if array_type.nil? || v.array_type == array_type v else Postgres::PGArray.new(v.to_a, array_type) end when Array Postgres::PGArray.new(v, array_type) else # May not be defined unless the pg_array_ops extension is used pg_array_op(v) end end end Database.register_extension(:pg_array, Postgres::PGArray::DatabaseMethods) end # :nocov: if Sequel.core_extensions? class Array # Return a PGArray proxy to the receiver, using a # specific database type if given. This is mostly useful # as a short cut for creating PGArray objects that didn't # come from the database. def pg_array(type=nil) Sequel::Postgres::PGArray.new(self, type) end end end if defined?(Sequel::CoreRefinements) module Sequel::CoreRefinements refine Array do def pg_array(type=nil) Sequel::Postgres::PGArray.new(self, type) end end end end # :nocov: ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/pg_array_ops.rb�������������������������������������������������0000664�0000000�0000000�00000023412�14342141206�0022357�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The pg_array_ops extension adds support to Sequel's DSL to make # it easier to call PostgreSQL array functions and operators. # # To load the extension: # # Sequel.extension :pg_array_ops # # The most common usage is passing an expression to Sequel.pg_array_op: # # ia = Sequel.pg_array_op(:int_array_column) # # If you have also loaded the pg_array extension, you can use # Sequel.pg_array as well: # # ia = Sequel.pg_array(:int_array_column) # # Also, on most Sequel expression objects, you can call the pg_array # method: # # ia = Sequel[:int_array_column].pg_array # # If you have loaded the {core_extensions extension}[rdoc-ref:doc/core_extensions.rdoc], # or you have loaded the core_refinements extension # and have activated refinements for the file, you can also use Symbol#pg_array: # # ia = :int_array_column.pg_array # # This creates a Sequel::Postgres::ArrayOp object that can be used # for easier querying: # # ia[1] # int_array_column[1] # ia[1][2] # int_array_column[1][2] # # ia.contains(:other_int_array_column) # @> # ia.contained_by(:other_int_array_column) # <@ # ia.overlaps(:other_int_array_column) # && # ia.concat(:other_int_array_column) # || # # ia.push(1) # int_array_column || 1 # ia.unshift(1) # 1 || int_array_column # # ia.any # ANY(int_array_column) # ia.all # ALL(int_array_column) # ia.cardinality # cardinality(int_array_column) # ia.dims # array_dims(int_array_column) # ia.hstore # hstore(int_array_column) # ia.hstore(:a) # hstore(int_array_column, a) # ia.length # array_length(int_array_column, 1) # ia.length(2) # array_length(int_array_column, 2) # ia.lower # array_lower(int_array_column, 1) # ia.lower(2) # array_lower(int_array_column, 2) # ia.join # array_to_string(int_array_column, '') # ia.join(':') # array_to_string(int_array_column, ':') # ia.join(':', ' ') # array_to_string(int_array_column, ':', ' ') # ia.unnest # unnest(int_array_column) # ia.unnest(:b) # unnest(int_array_column, b) # # See the PostgreSQL array function and operator documentation for more # details on what these functions and operators do. # # If you are also using the pg_array extension, you should load it before # loading this extension. Doing so will allow you to use PGArray#op to get # an ArrayOp, allowing you to perform array operations on array literals. # # In order for #hstore to automatically wrap the returned value correctly in # an HStoreOp, you need to load the pg_hstore_ops extension. # # Related module: Sequel::Postgres::ArrayOp # module Sequel module Postgres # The ArrayOp class is a simple container for a single object that # defines methods that yield Sequel expression objects representing # PostgreSQL array operators and functions. # # In the method documentation examples, assume that: # # array_op = :array.pg_array class ArrayOp < Sequel::SQL::Wrapper CONCAT = ["(".freeze, " || ".freeze, ")".freeze].freeze CONTAINS = ["(".freeze, " @> ".freeze, ")".freeze].freeze CONTAINED_BY = ["(".freeze, " <@ ".freeze, ")".freeze].freeze OVERLAPS = ["(".freeze, " && ".freeze, ")".freeze].freeze # Access a member of the array, returns an SQL::Subscript instance: # # array_op[1] # array[1] def [](key) s = Sequel::SQL::Subscript.new(self, [key]) s = ArrayOp.new(s) if key.is_a?(Range) s end # Call the ALL function: # # array_op.all # ALL(array) # # Usually used like: # # dataset.where(1=>array_op.all) # # WHERE (1 = ALL(array)) def all function(:ALL) end # Call the ANY function: # # array_op.any # ANY(array) # # Usually used like: # # dataset.where(1=>array_op.any) # # WHERE (1 = ANY(array)) def any function(:ANY) end # Call the cardinality method: # # array_op.cardinality # cardinality(array) def cardinality function(:cardinality) end # Use the contains (@>) operator: # # array_op.contains(:a) # (array @> a) def contains(other) bool_op(CONTAINS, wrap_array(other)) end # Use the contained by (<@) operator: # # array_op.contained_by(:a) # (array <@ a) def contained_by(other) bool_op(CONTAINED_BY, wrap_array(other)) end # Call the array_dims method: # # array_op.dims # array_dims(array) def dims function(:array_dims) end # Convert the array into an hstore using the hstore function. # If given an argument, use the two array form: # # array_op.hstore # hstore(array) # array_op.hstore(:array2) # hstore(array, array2) def hstore(arg=(no_arg_given=true; nil)) v = if no_arg_given Sequel.function(:hstore, self) else Sequel.function(:hstore, self, wrap_array(arg)) end # :nocov: if Sequel.respond_to?(:hstore_op) # :nocov: v = Sequel.hstore_op(v) end v end # Call the array_length method: # # array_op.length # array_length(array, 1) # array_op.length(2) # array_length(array, 2) def length(dimension = 1) function(:array_length, dimension) end # Call the array_lower method: # # array_op.lower # array_lower(array, 1) # array_op.lower(2) # array_lower(array, 2) def lower(dimension = 1) function(:array_lower, dimension) end # Use the overlaps (&&) operator: # # array_op.overlaps(:a) # (array && a) def overlaps(other) bool_op(OVERLAPS, wrap_array(other)) end # Use the concatentation (||) operator: # # array_op.push(:a) # (array || a) # array_op.concat(:a) # (array || a) def push(other) array_op(CONCAT, [self, wrap_array(other)]) end alias concat push # Return the receiver. def pg_array self end # Remove the given element from the array: # # array_op.remove(1) # array_remove(array, 1) def remove(element) ArrayOp.new(function(:array_remove, element)) end # Replace the given element in the array with another # element: # # array_op.replace(1, 2) # array_replace(array, 1, 2) def replace(element, replacement) ArrayOp.new(function(:array_replace, element, replacement)) end # Call the array_to_string method: # # array_op.join # array_to_string(array, '') # array_op.to_string # array_to_string(array, '') # array_op.join(":") # array_to_string(array, ':') # array_op.join(":", "*") # array_to_string(array, ':', '*') def to_string(joiner="", null=nil) if null.nil? function(:array_to_string, joiner) else function(:array_to_string, joiner, null) end end alias join to_string # Call the unnest method: # # array_op.unnest # unnest(array) def unnest(*args) function(:unnest, *args.map{|a| wrap_array(a)}) end # Use the concatentation (||) operator, reversing the order: # # array_op.unshift(:a) # (a || array) def unshift(other) array_op(CONCAT, [wrap_array(other), self]) end private # Return a placeholder literal with the given str and args, wrapped # in an ArrayOp, used by operators that return arrays. def array_op(str, args) ArrayOp.new(Sequel::SQL::PlaceholderLiteralString.new(str, args)) end # Return a placeholder literal with the given str and args, wrapped # in a boolean expression, used by operators that return booleans. def bool_op(str, other) Sequel::SQL::BooleanExpression.new(:NOOP, Sequel::SQL::PlaceholderLiteralString.new(str, [value, other])) end # Return a function with the given name, and the receiver as the first # argument, with any additional arguments given. def function(name, *args) SQL::Function.new(name, self, *args) end # Automatically wrap argument in a PGArray if it is a plain Array. # Requires that the pg_array extension has been loaded to work. def wrap_array(arg) if arg.instance_of?(Array) Sequel.pg_array(arg) else arg end end end module ArrayOpMethods # Wrap the receiver in an ArrayOp so you can easily use the PostgreSQL # array functions and operators with it. def pg_array ArrayOp.new(self) end end # :nocov: if defined?(PGArray) # :nocov: class PGArray # Wrap the PGArray instance in an ArrayOp, allowing you to easily use # the PostgreSQL array functions and operators with literal arrays. def op ArrayOp.new(self) end end end end module SQL::Builders # Return the object wrapped in an Postgres::ArrayOp. def pg_array_op(v) case v when Postgres::ArrayOp v else Postgres::ArrayOp.new(v) end end end class SQL::GenericExpression include Sequel::Postgres::ArrayOpMethods end class LiteralString include Sequel::Postgres::ArrayOpMethods end end # :nocov: if Sequel.core_extensions? class Symbol include Sequel::Postgres::ArrayOpMethods end end if defined?(Sequel::CoreRefinements) module Sequel::CoreRefinements refine Symbol do send INCLUDE_METH, Sequel::Postgres::ArrayOpMethods end end end # :nocov: ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/pg_auto_parameterize.rb�����������������������������������������0000664�0000000�0000000�00000041213�14342141206�0024077�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # This extension changes Sequel's postgres adapter to automatically # parameterize queries by default. Sequel's default behavior has always # been to literalize all arguments unless specifically using # parameters (via :$arg placeholders and the Dataset#prepare/call methods). # This extension makes Sequel use string, numeric, blob, date, and # time types as parameters. Example: # # # Default # DB[:test].where(:a=>1) # # SQL: SELECT * FROM test WHERE a = 1 # # DB.extension :pg_auto_parameterize # DB[:test].where(:a=>1) # # SQL: SELECT * FROM test WHERE a = $1 (args: [1]) # # Other pg_* extensions that ship with Sequel and add support for # PostgreSQL-specific types support automatically parameterizing those # types when used with this extension. # # This extension is not generally faster than the default behavior. # In some cases it is faster, such as when using large strings. # However, the use of parameters avoids potential security issues, # in case Sequel does not correctly literalize one of the arguments # that this extension would automatically parameterize. # # There are some known issues with automatic parameterization: # # 1. In order to avoid most type errors, the extension attempts to guess # the appropriate type and automatically casts most placeholders, # except plain Ruby strings (which PostgreSQL treats as an unknown # type). # # Unfortunately, if the type guess is incorrect, or a plain Ruby # string is used and PostgreSQL cannot determine the data type for it, # the query may result in a DatabaseError. To fix both issues, you can # explicitly cast values using <tt>Sequel.cast(value, type)</tt>, and # Sequel will cast to that type. # # 2. PostgreSQL supports a maximum of 65535 parameters per query. # Attempts to use a query with more than this number of parameters # will result in a Sequel::DatabaseError being raised. Sequel tries # to mitigate this issue by turning <tt>column IN (int, ...)</tt> # queries into <tt>column = ANY(CAST($ AS int8[]))</tt> using an # array parameter, to reduce the number of parameters. It also limits # inserting multiple rows at once to a maximum of 40 rows per query by # default. While these mitigations handle the most common cases # where a large number of parameters would be used, there are other # cases. # # 3. Automatic parameterization will consider the same objects as # equivalent when building SQL. However, for performance, it does # not perform equality checks. So code such as: # # DB[:t].select{foo('a').as(:f)}.group{foo('a')} # # SELECT foo('a') AS "f" FROM "t" GROUP BY foo('a') # # Will get auto paramterized as: # # # SELECT foo($1) AS "f" FROM "t" GROUP BY foo($2) # # Which will result in a DatabaseError, since that is not valid SQL. # # If you use the same expression, it will use the same parameter: # # foo = Sequel.function(:foo, 'a') # DB[:t].select(foo.as(:f)).group(foo) # # SELECT foo($1) AS "f" FROM "t" GROUP BY foo($1) # # Note that Dataset#select_group and similar methods that take arguments # used in multiple places in the SQL will generally handle this # automatically, since they will use the same objects: # # DB[:t].select_group{foo('a').as(:f)} # # SELECT foo($1) AS "f" FROM "t" GROUP BY foo($1) # # You can work around any issues that come up by disabling automatic # parameterization by calling the +no_auto_parameterize+ method on the # dataset (which returns a clone of the dataset). You can avoid # parameterization for specific values in the query by wrapping them # with +Sequel.skip_pg_auto_param+. # # It is likely there are corner cases not mentioned above # when using this extension. Users are encouraged to provide feedback # when using this extension if they come across such corner cases. # # This extension is only compatible when using the pg driver, not # when using the sequel-postgres-pr, jeremyevans-postgres-pr, or # postgres-pr drivers, as those do not support bound variables. # # Related module: Sequel::Postgres::AutoParameterize module Sequel module Postgres # Enable automatically parameterizing queries. module AutoParameterize # SQL query string that also holds an array of parameters class QueryString < ::String # The array of parameters used by this query. attr_reader :args # Add a new parameter to this query, which adds # the parameter to the array of parameters, and an # SQL placeholder to the query itself. def add_arg(s) unless defined?(@args) @args = [] @arg_map = {} @arg_map.compare_by_identity end unless pos = @arg_map[s] @args << s pos = @arg_map[s] = @args.length.to_s end self << '$' << pos end # Return a new QueryString with the given string appended # to the receiver, and the same arguments. def +(other) v = self.class.new(super) v.instance_variable_set(:@args, @args) if @args v end # Whether this query string currently supports # automatic parameterization. Automatic parameterization # is disabled at certain points during query building where # PostgreSQL does not support it. def auto_param? !@skip_auto_param end # Skip automatic parameterization inside the passed block. # This is used during query generation to disable # automatic parameterization for clauses not supporting it. def skip_auto_param skip_auto_param = @skip_auto_param begin @skip_auto_param = true yield ensure @skip_auto_param = skip_auto_param end end # Freeze the stored arguments when freezing the query string. def freeze @args.freeze if @args super end # Show args when the query string is inspected def inspect @args ? "#{self}; #{@args.inspect}".inspect : super end end # Wrapper class that skips auto parameterization for the wrapped object. class SkipAutoParam < SQL::Wrapper def to_s_append(ds, sql) if sql.is_a?(QueryString) sql.skip_auto_param{super} else super end end end module DatabaseMethods def self.extended(db) unless (db.adapter_scheme == :postgres && USES_PG) || (db.adapter_scheme == :mock && db.database_type == :postgres) raise Error, "pg_auto_parameterize is only supported when using the postgres adapter with the pg driver" end db.extend_datasets(DatasetMethods) end # If the sql string has an embedded parameter array, # extract the parameter values from that. def execute(sql, opts={}) if sql.is_a?(QueryString) && (args = sql.args) opts = opts.merge(:arguments=>args) end super end private # Disable auto_parameterization during COPY TABLE. def copy_table_sql(table, opts=OPTS) table = _no_auto_parameterize(table) super end # Disable auto_parameterization during CREATE TABLE AS. def create_table_as(name, sql, options) sql = _no_auto_parameterize(sql) super end # Disable auto_parameterization during CREATE VIEW. def create_view_sql(name, source, options) source = _no_auto_parameterize(source) super end # Disable automatic parameterization for the given table if supported. def _no_auto_parameterize(table) if table.is_a?(DatasetMethods) table.no_auto_parameterize else table end end end module DatasetMethods # Return a clone of the dataset that will not do # automatic parameterization. def no_auto_parameterize cached_dataset(:_no_auto_parameterize_ds) do @opts[:no_auto_parameterize] ? self : clone(:no_auto_parameterize=>true) end end # Do not add implicit typecasts for directly typecasted values, # since the user is presumably doing so to set the type, not convert # from the implicitly typecasted type. def cast_sql_append(sql, expr, type) if auto_param?(sql) && auto_param_type(expr) sql << 'CAST(' sql.add_arg(expr) sql << ' AS ' << db.cast_type_literal(type).to_s << ')' else super end end # Transform column IN (int, ...) expressions into column = ANY($) # and column NOT IN (int, ...) expressions into column != ALL($) # using an integer array bound variable for the ANY/ALL argument. # This is the same optimization PostgreSQL performs internally, # but this reduces the number of bound variables. def complex_expression_sql_append(sql, op, args) case op when :IN, :"NOT IN" l, r = args if auto_param?(sql) && !l.is_a?(Array) && _integer_array?(r) && r.size > 1 if op == :IN op = :"=" func = :ANY else op = :!= func = :ALL end args = [l, Sequel.function(func, Sequel.cast(_integer_array_auto_param(r), 'int8[]'))] end end super end # Parameterize insertion of multiple values def multi_insert_sql(columns, values) if @opts[:no_auto_parameterize] super else [clone(:multi_insert_values=>values.map{|r| Array(r)}).insert_sql(columns, LiteralString.new('VALUES '))] end end # For strings, numeric arguments, and date/time arguments, add # them as parameters to the query instead of literalizing them # into the SQL. def literal_append(sql, v) if auto_param?(sql) && (type = auto_param_type(v)) sql.add_arg(v) << type else super end end # Placeholder literalizers are not supported supported when using automatic parameterization. def supports_placeholder_literalizer? @opts[:no_auto_parameterize] end # Disable automatic parameterization when using a cursor. def use_cursor(*) super.no_auto_parameterize end # Store receiving dataset and args when with_sql is used with a method name symbol, so sql # can be parameterized correctly if used as a subselect. def with_sql(*a) ds = super if Symbol === a[0] ds = ds.clone(:with_sql_dataset=>self, :with_sql_args=>a.freeze) end ds end protected # Disable automatic parameterization for prepared statements, # since they will use manual parameterization. def to_prepared_statement(*a) @opts[:no_auto_parameterize] ? super : no_auto_parameterize.to_prepared_statement(*a) end private # If auto parameterization is supported for the value, return a string # for the implicit typecast to use. Return false/nil if the value should not be # automatically parameterized. def auto_param_type(v) case v when String case v when LiteralString false when Sequel::SQL::Blob "::bytea" else "" end when Integer ((v > 2147483647 || v < -2147483648) ? "::int8" : "::int4") when Float # PostgreSQL treats literal floats as numeric, not double precision # But older versions of PostgreSQL don't handle Infinity/NaN in numeric v.finite? ? "::numeric" : "::double precision" when BigDecimal "::numeric" when Sequel::SQLTime "::time" when Time "::#{@db.cast_type_literal(Time)}" when DateTime "::#{@db.cast_type_literal(DateTime)}" when Date "::date" else v.respond_to?(:sequel_auto_param_type) ? v.sequel_auto_param_type(self) : auto_param_type_fallback(v) end end # Allow other extensions to support auto parameterization in ways that do not # require adding the sequel_auto_param_type method. def auto_param_type_fallback(v) super if defined?(super) end # Whether the given query string currently supports automatic parameterization. def auto_param?(sql) sql.is_a?(QueryString) && sql.auto_param? end # Default the import slice to 40, since PostgreSQL supports a maximum of 1600 # columns per table, and it supports a maximum of 65k parameters. Technically, # there can be more than one parameter per column, so this doesn't prevent going # over the limit, though it does make it less likely. def default_import_slice 40 end # Handle parameterization of multi_insert_sql def _insert_values_sql(sql, values) super if values = @opts[:multi_insert_values] expression_list_append(sql, values.map{|r| Array(r)}) end end # Whether the given argument is an array of integers or NULL values, recursively. def _integer_array?(v) Array === v && v.all?{|x| nil == x || Integer === x} end # Create the bound variable string that will be used for the IN (int, ...) to = ANY($) # optimization for integer arrays. def _integer_array_auto_param(v) buf = String.new buf << '{' comma = false v.each do |x| if comma buf << "," else comma = true end buf << (x ? x.to_s : 'NULL') end buf << '}' end # Skip auto parameterization in LIMIT and OFFSET clauses def select_limit_sql(sql) if auto_param?(sql) && (@opts[:limit] || @opts[:offset]) sql.skip_auto_param{super} else super end end # Skip auto parameterization in ORDER clause if used with # integer values indicating ordering by the nth column. def select_order_sql(sql) if auto_param?(sql) && (order = @opts[:order]) && order.any?{|o| Integer === o || (SQL::OrderedExpression === o && Integer === o.expression)} sql.skip_auto_param{super} else super end end # Skip auto parameterization in CTE CYCLE clause def select_with_sql_cte_search_cycle(sql,cte) if auto_param?(sql) && cte[:cycle] sql.skip_auto_param{super} else super end end # Unless auto parameterization is disabled, use a string that # can store the parameterized arguments. def sql_string_origin @opts[:no_auto_parameterize] ? super : QueryString.new end # If subquery uses with_sql with a method name symbol, get the dataset # with_sql was called on, and use that as the subquery, recording the # arguments to with_sql that will be used to calculate the sql. def subselect_sql_dataset(sql, ds) if ws_ds = ds.opts[:with_sql_dataset] super(sql, ws_ds).clone(:subselect_sql_args=>ds.opts[:with_sql_args]) else super end end # If subquery used with_sql with a method name symbol, use the arguments to # with_sql to determine the sql, so that the subselect can be parameterized. def subselect_sql_append_sql(sql, ds) if args = ds.opts[:subselect_sql_args] ds.send(*args) else super end end # Use auto parameterization for datasets with static SQL using placeholders. def static_sql(sql) if @opts[:append_sql] || @opts[:no_auto_parameterize] || String === sql super else query_string = QueryString.new literal_append(query_string, sql) query_string end end end end end module SQL::Builders # Skip auto parameterization for the given object when building queries. def skip_pg_auto_param(v) Postgres::AutoParameterize::SkipAutoParam.new(v) end end Database.register_extension(:pg_auto_parameterize, Postgres::AutoParameterize::DatabaseMethods) end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/pg_enum.rb������������������������������������������������������0000664�0000000�0000000�00000015014�14342141206�0021323�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The pg_enum extension adds support for Sequel to handle PostgreSQL's enum # types. To use this extension, first load it into your Database instance: # # DB.extension :pg_enum # # It allows creation of enum types using create_enum: # # DB.create_enum(:enum_type_name, %w'value1 value2 value3') # # You can also add values to existing enums via add_enum_value: # # DB.add_enum_value(:enum_type_name, 'value4') # # If you want to rename an enum type, you can use rename_enum: # # DB.rename_enum(:enum_type_name, :enum_type_another_name) # # If you want to rename an enum value, you can use rename_enum_value: # # DB.rename_enum_value( # :enum_type_name, :enum_value_name, :enum_value_another_name # ) # # If you want to drop an enum type, you can use drop_enum: # # DB.drop_enum(:enum_type_name) # # Just like any user-created type, after creating the type, you # can create tables that have a column of that type: # # DB.create_table(:table_name) do # enum_type_name :column_name # end # # When parsing the schema, enum types are recognized, and available # values returned in the schema hash: # # DB.schema(:table_name) # [[:column_name, {:type=>:enum, :enum_values=>['value1', 'value2']}]] # # This extension integrates with the pg_array extension. If you plan # to use arrays of enum types, load the pg_array extension before the # pg_enum extension: # # DB.extension :pg_array, :pg_enum # # DB.create_table(:table_name) do # column :column_name, 'enum_type_name[]' # end # DB[:table_name].get(:column_name) # # ['value1', 'value2'] # # If the migration extension is loaded before this one (the order is important), # you can use create_enum in a reversible migration: # # Sequel.migration do # change do # create_enum(:enum_type_name, %w'value1 value2 value3') # end # end # # Finally, typecasting for enums is setup to cast to strings, which # allows you to use symbols in your model code. Similar, you can provide # the enum values as symbols when creating enums using create_enum or # add_enum_value. # # Related module: Sequel::Postgres::EnumDatabaseMethods # module Sequel module Postgres # Methods enabling Database object integration with enum types. module EnumDatabaseMethods # Parse the available enum values when loading this extension into # your database. def self.extended(db) db.instance_exec do @enum_labels = {} parse_enum_labels end end # Run the SQL to add the given value to the existing enum type. # Options: # :after :: Add the new value after this existing value. # :before :: Add the new value before this existing value. # :if_not_exists :: Do not raise an error if the value already exists in the enum. def add_enum_value(enum, value, opts=OPTS) sql = String.new sql << "ALTER TYPE #{quote_schema_table(enum)} ADD VALUE#{' IF NOT EXISTS' if opts[:if_not_exists]} #{literal(value.to_s)}" if v = opts[:before] sql << " BEFORE #{literal(v.to_s)}" elsif v = opts[:after] sql << " AFTER #{literal(v.to_s)}" end _process_enum_change_sql(sql) end # Run the SQL to create an enum type with the given name and values. def create_enum(enum, values) _process_enum_change_sql("CREATE TYPE #{quote_schema_table(enum)} AS ENUM (#{values.map{|v| literal(v.to_s)}.join(', ')})") end # Run the SQL to rename the enum type with the given name # to the another given name. def rename_enum(enum, new_name) _process_enum_change_sql("ALTER TYPE #{quote_schema_table(enum)} RENAME TO #{quote_schema_table(new_name)}") end # Run the SQL to rename the enum value with the given name # to the another given name. def rename_enum_value(enum, old_name, new_name) _process_enum_change_sql("ALTER TYPE #{quote_schema_table(enum)} RENAME VALUE #{literal(old_name.to_s)} TO #{literal(new_name.to_s)}") end # Run the SQL to drop the enum type with the given name. # Options: # :if_exists :: Do not raise an error if the enum type does not exist # :cascade :: Also drop other objects that depend on the enum type def drop_enum(enum, opts=OPTS) _process_enum_change_sql("DROP TYPE#{' IF EXISTS' if opts[:if_exists]} #{quote_schema_table(enum)}#{' CASCADE' if opts[:cascade]}") end private # Run the SQL on the database, reparsing the enum labels after it is run. def _process_enum_change_sql(sql) run(sql) parse_enum_labels nil end # Parse the pg_enum table to get enum values, and # the pg_type table to get names and array oids for # enums. def parse_enum_labels order = [:enumtypid] order << :enumsortorder if server_version >= 90100 enum_labels = metadata_dataset.from(:pg_enum). order(*order). select_hash_groups(Sequel.cast(:enumtypid, Integer).as(:v), :enumlabel).freeze enum_labels.each_value(&:freeze) if respond_to?(:register_array_type) array_types = metadata_dataset. from(:pg_type). where(:oid=>enum_labels.keys). exclude(:typarray=>0). select_map([:typname, Sequel.cast(:typarray, Integer).as(:v)]) existing_oids = conversion_procs.keys array_types.each do |name, oid| next if existing_oids.include?(oid) register_array_type(name, :oid=>oid) end end Sequel.synchronize{@enum_labels.replace(enum_labels)} end # For schema entries that are enums, set the type to # :enum and add a :enum_values entry with the enum values. def schema_post_process(_) super.each do |_, s| oid = s[:oid] if values = Sequel.synchronize{@enum_labels[oid]} s[:type] = :enum s[:enum_values] = values end end end # Typecast the given value to a string. def typecast_value_enum(value) value.to_s end end end # support reversible create_enum statements if the migration extension is loaded # :nocov: if defined?(MigrationReverser) # :nocov: class MigrationReverser private def create_enum(name, _) @actions << [:drop_enum, name] end def rename_enum(old_name, new_name) @actions << [:rename_enum, new_name, old_name] end end end Database.register_extension(:pg_enum, Postgres::EnumDatabaseMethods) end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/pg_extended_date_support.rb�������������������������������������0000664�0000000�0000000�00000017565�14342141206�0024765�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The pg_extended_date_support extension allows support # for BC dates/timestamps by default, and infinite # dates/timestamps if configured. Without this extension, # BC and infinite dates/timestamps will be handled incorrectly # or raise an error. This behavior isn't the default because # it can hurt performance, and few users need support for BC # and infinite dates/timestamps. # # To load the extension into the database: # # DB.extension :pg_extended_date_support # # To enable support for infinite dates/timestamps: # # DB.convert_infinite_timestamps = 'string' # or 'nil' or 'float' # # Related module: Sequel::Postgres::ExtendedDateSupport # module Sequel module Postgres module ExtendedDateSupport DATE_YEAR_1 = Date.new(1) DATETIME_YEAR_1 = DateTime.new(1) TIME_YEAR_1 = Time.at(-62135596800).utc INFINITE_TIMESTAMP_STRINGS = ['infinity'.freeze, '-infinity'.freeze].freeze INFINITE_DATETIME_VALUES = ([PLUS_INFINITY, MINUS_INFINITY] + INFINITE_TIMESTAMP_STRINGS).freeze PLUS_DATE_INFINITY = Date::Infinity.new MINUS_DATE_INFINITY = -PLUS_DATE_INFINITY RATIONAL_60 = Rational(60) TIME_CAN_PARSE_BC = RUBY_VERSION >= '2.5' # Add dataset methods and update the conversion proces for dates and timestamps. def self.extended(db) db.extend_datasets(DatasetMethods) procs = db.conversion_procs procs[1082] = ::Sequel.method(:string_to_date) procs[1184] = procs[1114] = db.method(:to_application_timestamp) end # Handle BC dates and times in bound variables. This is necessary for Date values # when using both the postgres and jdbc adapters, but also necessary for Time values # on jdbc. def bound_variable_arg(arg, conn) case arg when Date, Time literal(arg) else super end end # Whether infinite timestamps/dates should be converted on retrieval. By default, no # conversion is done, so an error is raised if you attempt to retrieve an infinite # timestamp/date. You can set this to :nil to convert to nil, :string to leave # as a string, or :float to convert to an infinite float. attr_reader :convert_infinite_timestamps # Set whether to allow infinite timestamps/dates. Make sure the # conversion proc for date reflects that setting. def convert_infinite_timestamps=(v) @convert_infinite_timestamps = case v when Symbol v when 'nil' :nil when 'string' :string when 'date' :date when 'float' :float when String, true typecast_value_boolean(v) else false end pr = old_pr = Sequel.method(:string_to_date) if @convert_infinite_timestamps pr = lambda do |val| case val when *INFINITE_TIMESTAMP_STRINGS infinite_timestamp_value(val) else old_pr.call(val) end end end add_conversion_proc(1082, pr) end # Handle BC dates in timestamps by moving the BC from after the time to # after the date, to appease ruby's date parser. # If convert_infinite_timestamps is true and the value is infinite, return an appropriate # value based on the convert_infinite_timestamps setting. def to_application_timestamp(value) if value.is_a?(String) && (m = /((?:[-+]\d\d:\d\d)(:\d\d)?)?( BC)?\z/.match(value)) && (m[2] || m[3]) if m[3] value = value.sub(' BC', '').sub(' ', ' BC ') end if m[2] dt = if Sequel.datetime_class == DateTime DateTime.parse(value) elsif TIME_CAN_PARSE_BC Time.parse(value) # :nocov: else DateTime.parse(value).to_time # :nocov: end Sequel.convert_output_timestamp(dt, Sequel.application_timezone) else super(value) end elsif convert_infinite_timestamps case value when *INFINITE_TIMESTAMP_STRINGS infinite_timestamp_value(value) else super end else super end end private # Return an appropriate value for the given infinite timestamp string. def infinite_timestamp_value(value) case convert_infinite_timestamps when :nil nil when :string value when :date value == 'infinity' ? PLUS_DATE_INFINITY : MINUS_DATE_INFINITY else value == 'infinity' ? PLUS_INFINITY : MINUS_INFINITY end end # If the value is an infinite value (either an infinite float or a string returned by # by PostgreSQL for an infinite date), return it without converting it if # convert_infinite_timestamps is set. def typecast_value_date(value) if convert_infinite_timestamps case value when *INFINITE_DATETIME_VALUES value else super end else super end end # If the value is an infinite value (either an infinite float or a string returned by # by PostgreSQL for an infinite timestamp), return it without converting it if # convert_infinite_timestamps is set. def typecast_value_datetime(value) if convert_infinite_timestamps case value when *INFINITE_DATETIME_VALUES value else super end else super end end module DatasetMethods private # Handle BC Date objects. def literal_date(date) if date < DATE_YEAR_1 date <<= ((date.year) * 24 - 12) date.strftime("'%Y-%m-%d BC'") else super end end # Handle BC DateTime objects. def literal_datetime(date) if date < DATETIME_YEAR_1 date <<= ((date.year) * 24 - 12) date = db.from_application_timestamp(date) minutes = (date.offset * 1440).to_i date.strftime("'%Y-%m-%d %H:%M:%S.%N#{format_timestamp_offset(*minutes.divmod(60))} BC'") else super end end # Handle Date::Infinity values def literal_other_append(sql, v) if v.is_a?(Date::Infinity) sql << (v > 0 ? "'infinity'" : "'-infinity'") else super end end if RUBY_ENGINE == 'jruby' # :nocov: ExtendedDateSupport::CONVERT_TYPES = [Java::JavaSQL::Types::DATE, Java::JavaSQL::Types::TIMESTAMP] # Use non-JDBC parsing as JDBC parsing doesn't work for BC dates/timestamps. def type_convertor(map, meta, type, i) case type when *CONVERT_TYPES db.oid_convertor_proc(meta.getField(i).getOID) else super end end # Work around JRuby bug #4822 in Time#to_datetime for times before date of calendar reform def literal_time(time) if time < TIME_YEAR_1 literal_datetime(DateTime.parse(super)) else super end end # :nocov: else # Handle BC Time objects. def literal_time(time) if time < TIME_YEAR_1 time = db.from_application_timestamp(time) time.strftime("'#{sprintf('%04i', time.year.abs+1)}-%m-%d %H:%M:%S.%N#{format_timestamp_offset(*(time.utc_offset/RATIONAL_60).divmod(60))} BC'") else super end end end end end end Database.register_extension(:pg_extended_date_support, Postgres::ExtendedDateSupport) end �������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/pg_extended_integer_support.rb����������������������������������0000664�0000000�0000000�00000010222�14342141206�0025464�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The pg_extended_integer_support extension supports literalizing # Ruby integers outside of PostgreSQL bigint range on PostgreSQL. # Sequel by default will raise exceptions when # literalizing such integers, as PostgreSQL would treat them # as numeric type values instead of integer/bigint type values # if unquoted, which can result in unexpected negative performance # (e.g. forcing sequential scans when index scans would be used for # an integer/bigint type). # # To load the extension into a Dataset (this returns a new Dataset): # # dataset = dataset.extension(:pg_extended_integer_support) # # To load the extension into a Database, so it affects all of the # Database's datasets: # # DB.extension :pg_extended_integer_support # # By default, the extension will quote integers outside # bigint range: # # DB.literal(2**63) # => "'9223372036854775808'" # # Quoting the value treats the type as unknown: # # DB.get{pg_typeof(2**63)} # => 'unknown' # # PostgreSQL will implicitly cast the unknown type to the appropriate # database type, raising an error if it cannot be casted. Be aware this # can result in the integer value being implicitly casted to text or # any other PostgreSQL type: # # # Returns a string, not an integer: # DB.get{2**63} # # => "9223372036854775808" # # You can use the Dataset#integer_outside_bigint_range_strategy method # with the value +:raw+ to change the strategy to not quote the variable: # # DB.dataset. # integer_outside_bigint_range_strategy(:raw). # literal(2**63) # # => "9223372036854775808" # # Note that not quoting the value will result in PostgreSQL treating # the type as numeric instead of integer: # # DB.dataset. # integer_outside_bigint_range_strategy(:raw). # get{pg_typeof(2**63)} # # => "numeric" # # The +:raw+ behavior was Sequel's historical behavior, but unless # you fully understand the reprecussions of PostgreSQL using a # numeric type for integer values, you should not use it. # # To get the current default behavior of raising an exception for # integers outside of PostgreSQL bigint range, you can use a strategy # of +:raise+. # # To specify a default strategy for handling integers outside # bigint range that applies to all of a Database's datasets, you can # use the +:integer_outside_bigint_range_strategy+ Database option with # a value of +:raise+ or +:raw+: # # DB.opts[:integer_outside_bigint_range_strategy] = :raw # # The Database option will be used as a fallback if you did not call # the Dataset#integer_outside_bigint_range_strategy method to specify # a strategy for the dataset. # # Related module: Sequel::Postgres::ExtendedIntegerSupport # module Sequel module Postgres module ExtendedIntegerSupport # Set the strategy for handling integers outside PostgreSQL # bigint range. Supported values: # # :quote :: Quote the integer value. PostgreSQL will treat # the integer as a unknown type, implicitly casting # to any other type as needed. This is the default # value when using the pg_extended_integer_support # extension. # :raise :: Raise error when attempting to literalize the integer # (the default behavior of Sequel on PostgreSQL when # not using the pg_extended_integer_support extension). # :raw :: Use raw integer value without quoting. PostgreSQL # will treat the integer as a numeric. This was Sequel's # historical behavior, but it is unlikely to be desired. def integer_outside_bigint_range_strategy(strategy) clone(:integer_outside_bigint_range_strategy=>strategy) end private # Handle integers outside the bigint range by using # the configured strategy. def literal_integer_outside_bigint_range(v) case @opts[:integer_outside_bigint_range_strategy] || @db.opts[:integer_outside_bigint_range_strategy] when :raise super when :raw v.to_s else # when :quote "'#{v}'" end end end end Dataset.register_extension(:pg_extended_integer_support, Postgres::ExtendedIntegerSupport) end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/pg_hstore.rb����������������������������������������������������0000664�0000000�0000000�00000025071�14342141206�0021667�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The pg_hstore extension adds support for the PostgreSQL hstore type # to Sequel. hstore is an extension that ships with PostgreSQL, and # the hstore type stores an arbitrary key-value table, where the keys # are strings and the values are strings or NULL. # # This extension integrates with Sequel's native postgres and jdbc/postgresql # adapters, so that when hstore fields are retrieved, they are parsed and returned # as instances of Sequel::Postgres::HStore. HStore is # a DelegateClass of Hash, so it mostly acts like a hash, but not # completely (is_a?(Hash) is false). If you want the actual hash, # you can call HStore#to_hash. This is done so that Sequel does not # treat a HStore like a Hash by default, which would cause issues. # # In addition to the parsers, this extension comes with literalizers # for HStore using the standard Sequel literalization callbacks, so # they work with on all adapters. # # To turn an existing Hash into an HStore, use Sequel.hstore: # # Sequel.hstore(hash) # # If you have loaded the {core_extensions extension}[rdoc-ref:doc/core_extensions.rdoc], # or you have loaded the core_refinements extension # and have activated refinements for the file, you can also use Hash#hstore: # # hash.hstore # # Since the hstore type only supports strings, non string keys and # values are converted to strings # # Sequel.hstore(foo: 1).to_hash # {'foo'=>'1'} # v = Sequel.hstore({}) # v[:foo] = 1 # v # {'foo'=>'1'} # # However, to make life easier, lookups by key are converted to # strings (even when accessing the underlying hash directly): # # Sequel.hstore('foo'=>'bar')[:foo] # 'bar' # Sequel.hstore('foo'=>'bar').to_hash[:foo] # 'bar' # # HStore instances mostly just delegate to the underlying hash # instance, so Hash methods that modify the receiver or returned # modified copies of the receiver may not do string conversion. # The following methods will handle string conversion, and more # can be added later if desired: # # * \[\] # * \[\]= # * assoc # * delete # * fetch # * has_key? # * has_value? # * include? # * key # * key? # * member? # * merge # * merge! # * rassoc # * replace # * store # * update # * value? # # If you want to insert a hash into an hstore database column: # # DB[:table].insert(column: Sequel.hstore('foo'=>'bar')) # # To use this extension, first load it into your Sequel::Database instance: # # DB.extension :pg_hstore # # This extension integrates with the pg_array extension. If you plan # to use arrays of hstore types, load the pg_array extension before the # pg_hstore extension: # # DB.extension :pg_array, :pg_hstore # # See the {schema modification guide}[rdoc-ref:doc/schema_modification.rdoc] # for details on using hstore columns in CREATE/ALTER TABLE statements. # # This extension requires the delegate and strscan libraries. # # Related module: Sequel::Postgres::HStore require 'delegate' require 'strscan' module Sequel module Postgres class HStore < DelegateClass(Hash) include Sequel::SQL::AliasMethods # Parser for PostgreSQL hstore output format. class Parser < StringScanner # Parse the output format that PostgreSQL uses for hstore # columns. Note that this does not attempt to parse all # input formats that PostgreSQL will accept. For instance, # it expects all keys and non-NULL values to be quoted. # # Return the resulting hash of objects. This can be called # multiple times, it will cache the parsed hash on the first # call and use it for subsequent calls. def parse return @result if @result hash = {} while !eos? skip(/"/) k = parse_quoted skip(/"\s*=>\s*/) if skip(/"/) v = parse_quoted skip(/"/) else scan(/NULL/) v = nil end skip(/,\s*/) hash[k] = v end @result = hash end private # Parse and unescape a quoted key/value. def parse_quoted scan(/(\\"|[^"])*/).gsub(/\\(.)/, '\1') end end module DatabaseMethods def self.extended(db) db.instance_exec do add_named_conversion_proc(:hstore, &HStore.method(:parse)) @schema_type_classes[:hstore] = HStore end end # Handle hstores in bound variables def bound_variable_arg(arg, conn) case arg when HStore arg.unquoted_literal when Hash HStore.new(arg).unquoted_literal else super end end private # Recognize the hstore database type. def schema_column_type(db_type) db_type == 'hstore' ? :hstore : super end # Set the :callable_default value if the default value is recognized as an empty hstore. def schema_post_process(_) super.each do |a| h = a[1] if h[:type] == :hstore && h[:default] =~ /\A''::hstore\z/ h[:callable_default] = lambda{HStore.new({})} end end end # Typecast value correctly to HStore. If already an # HStore instance, return as is. If a hash, return # an HStore version of it. If a string, assume it is # in PostgreSQL output format and parse it using the # parser. def typecast_value_hstore(value) case value when HStore value when Hash HStore.new(value) else raise Sequel::InvalidValue, "invalid value for hstore: #{value.inspect}" end end end # Default proc used for all underlying HStore hashes, so that even # if you grab the underlying hash, it will still convert non-string # keys to strings during lookup. DEFAULT_PROC = lambda{|h, k| h[k.to_s] unless k.is_a?(String)} # Undef marshal_{dump,load} methods in the delegate class, # so that ruby uses the old style _dump/_load methods defined # in the delegate class, instead of the marshal_{dump,load} methods # in the Hash class. undef_method :marshal_load undef_method :marshal_dump # Use custom marshal loading, since underlying hash uses a default proc. def self._load(args) new(Hash[Marshal.load(args)]) end # Parse the given string into an HStore, assuming the str is in PostgreSQL # hstore output format. def self.parse(str) new(Parser.new(str).parse) end # Override methods that accept key argument to convert to string. %w'[] delete has_key? include? key? member? assoc'.each do |m| class_eval("def #{m}(k) super(k.to_s) end", __FILE__, __LINE__) end # Override methods that accept value argument to convert to string unless nil. %w'has_value? value? key rassoc'.each do |m| class_eval("def #{m}(v) super(convert_value(v)) end", __FILE__, __LINE__) end # Override methods that accept key and value arguments to convert to string appropriately. %w'[]= store'.each do |m| class_eval("def #{m}(k, v) super(k.to_s, convert_value(v)) end", __FILE__, __LINE__) end # Override methods that take hashes to convert the hashes to using strings for keys and # values before using them. %w'initialize merge! update replace'.each do |m| class_eval("def #{m}(h, &block) super(convert_hash(h), &block) end", __FILE__, __LINE__) end # Use custom marshal dumping, since underlying hash uses a default proc. def _dump(*) Marshal.dump(to_a) end # Override to force the key argument to a string. def fetch(key, *args, &block) super(key.to_s, *args, &block) end # Convert the input hash to string keys and values before merging, # and return a new HStore instance with the merged hash. def merge(hash, &block) self.class.new(super(convert_hash(hash), &block)) end # Return the underlying hash used by this HStore instance. alias to_hash __getobj__ # Append a literalize version of the hstore to the sql. def sql_literal_append(ds, sql) ds.literal_append(sql, unquoted_literal) sql << '::hstore' end # Return a string containing the unquoted, unstring-escaped # literal version of the hstore. Separated out for use by # the bound argument code. def unquoted_literal str = String.new comma = false commas = "," quote = '"' kv_sep = "=>" null = "NULL" each do |k, v| str << commas if comma str << quote << escape_value(k) << quote str << kv_sep if v.nil? str << null else str << quote << escape_value(v) << quote end comma = true end str end # Allow automatic parameterization. def sequel_auto_param_type(ds) "::hstore" end private # Return a new hash based on the input hash with string # keys and string or nil values. def convert_hash(h) hash = Hash.new(&DEFAULT_PROC) h.each{|k,v| hash[k.to_s] = convert_value(v)} hash end # Return value v as a string unless it is already nil. def convert_value(v) v.to_s unless v.nil? end # Escape key/value strings when literalizing to # correctly handle backslash and quote characters. def escape_value(k) k.to_s.gsub(/("|\\)/, '\\\\\1') end end end module SQL::Builders # Return a Postgres::HStore proxy for the given hash. def hstore(v) case v when Postgres::HStore v when Hash Postgres::HStore.new(v) else # May not be defined unless the pg_hstore_ops extension is used hstore_op(v) end end end Database.register_extension(:pg_hstore, Postgres::HStore::DatabaseMethods) end # :nocov: if Sequel.core_extensions? class Hash # Create a new HStore using the receiver as the input # hash. Note that the HStore created will not use the # receiver as the backing store, since it has to # modify the hash. To get the new backing store, use: # # hash.hstore.to_hash def hstore Sequel::Postgres::HStore.new(self) end end end if defined?(Sequel::CoreRefinements) module Sequel::CoreRefinements refine Hash do def hstore Sequel::Postgres::HStore.new(self) end end end end # :nocov: �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/pg_hstore_ops.rb������������������������������������������������0000664�0000000�0000000�00000031554�14342141206�0022553�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The pg_hstore_ops extension adds support to Sequel's DSL to make # it easier to call PostgreSQL hstore functions and operators. # # To load the extension: # # Sequel.extension :pg_hstore_ops # # The most common usage is taking an object that represents an SQL # expression (such as a :symbol), and calling Sequel.hstore_op with it: # # h = Sequel.hstore_op(:hstore_column) # # If you have also loaded the pg_hstore extension, you can use # Sequel.hstore as well: # # h = Sequel.hstore(:hstore_column) # # Also, on most Sequel expression objects, you can call the hstore # method: # # h = Sequel[:hstore_column].hstore # # If you have loaded the {core_extensions extension}[rdoc-ref:doc/core_extensions.rdoc], # or you have loaded the core_refinements extension # and have activated refinements for the file, you can also use Symbol#hstore: # # h = :hstore_column.hstore # # This creates a Sequel::Postgres::HStoreOp object that can be used # for easier querying: # # h - 'a' # hstore_column - CAST('a' AS text) # h['a'] # hstore_column -> 'a' # # h.concat(:other_hstore_column) # || # h.has_key?('a') # ? # h.contain_all(:array_column) # ?& # h.contain_any(:array_column) # ?| # h.contains(:other_hstore_column) # @> # h.contained_by(:other_hstore_column) # <@ # # h.defined # defined(hstore_column) # h.delete('a') # delete(hstore_column, 'a') # h.each # each(hstore_column) # h.keys # akeys(hstore_column) # h.populate(:a) # populate_record(a, hstore_column) # h.record_set(:a) # (a #= hstore_column) # h.skeys # skeys(hstore_column) # h.slice(:a) # slice(hstore_column, a) # h.svals # svals(hstore_column) # h.to_array # hstore_to_array(hstore_column) # h.to_matrix # hstore_to_matrix(hstore_column) # h.values # avals(hstore_column) # # Here are a couple examples for updating an existing hstore column: # # # Add a key, or update an existing key with a new value # DB[:tab].update(h: Sequel.hstore_op(:h).concat('c'=>3)) # # # Delete a key # DB[:tab].update(h: Sequel.hstore_op(:h).delete('k1')) # # On PostgreSQL 14+, The hstore <tt>[]</tt> method will use subscripts instead of being # the same as +get+, if the value being wrapped is an identifer: # # Sequel.hstore_op(:hstore_column)['a'] # hstore_column['a'] # Sequel.hstore_op(Sequel[:h][:s])['a'] # h.s['a'] # # This support allows you to use hstore subscripts in UPDATE statements to update only # part of a column: # # h = Sequel.hstore_op(:h) # DB[:t].update(h['key1'] => 'val1', h['key2'] => 'val2') # # UPDATE "t" SET "h"['key1'] = 'val1', "h"['key2'] = 'val2' # # See the PostgreSQL hstore function and operator documentation for more # details on what these functions and operators do. # # If you are also using the pg_hstore extension, you should load it before # loading this extension. Doing so will allow you to use HStore#op to get # an HStoreOp, allowing you to perform hstore operations on hstore literals. # # Some of these methods will accept ruby arrays and convert them automatically to # PostgreSQL arrays if you have the pg_array extension loaded. Some of these methods # will accept ruby hashes and convert them automatically to PostgreSQL hstores if the # pg_hstore extension is loaded. Methods representing expressions that return # PostgreSQL arrays will have the returned expression automatically wrapped in a # Postgres::ArrayOp if the pg_array_ops extension is loaded. # # Related module: Sequel::Postgres::HStoreOp # module Sequel module Postgres # The HStoreOp class is a simple container for a single object that # defines methods that yield Sequel expression objects representing # PostgreSQL hstore operators and functions. # # In the method documentation examples, assume that: # # hstore_op = :hstore.hstore class HStoreOp < Sequel::SQL::Wrapper CONCAT = ["(".freeze, " || ".freeze, ")".freeze].freeze CONTAIN_ALL = ["(".freeze, " ?& ".freeze, ")".freeze].freeze CONTAIN_ANY = ["(".freeze, " ?| ".freeze, ")".freeze].freeze CONTAINS = ["(".freeze, " @> ".freeze, ")".freeze].freeze CONTAINED_BY = ["(".freeze, " <@ ".freeze, ")".freeze].freeze HAS_KEY = ["(".freeze, " ? ".freeze, ")".freeze].freeze LOOKUP = ["(".freeze, " -> ".freeze, ")".freeze].freeze RECORD_SET = ["(".freeze, " #= ".freeze, ")".freeze].freeze # Delete entries from an hstore using the subtraction operator: # # hstore_op - 'a' # (hstore - 'a') def -(other) other = if other.is_a?(String) && !other.is_a?(Sequel::LiteralString) Sequel.cast_string(other) else wrap_input_array(wrap_input_hash(other)) end HStoreOp.new(super) end # Lookup the value for the given key in an hstore: # # hstore_op['a'] # (hstore -> 'a') def [](key) if key.is_a?(Array) || (defined?(Sequel::Postgres::PGArray) && key.is_a?(Sequel::Postgres::PGArray)) || (defined?(Sequel::Postgres::ArrayOp) && key.is_a?(Sequel::Postgres::ArrayOp)) wrap_output_array(Sequel::SQL::PlaceholderLiteralString.new(LOOKUP, [value, wrap_input_array(key)])) else v = case @value when Symbol, SQL::Identifier, SQL::QualifiedIdentifier HStoreSubscriptOp.new(self, key) else Sequel::SQL::PlaceholderLiteralString.new(LOOKUP, [value, key]) end Sequel::SQL::StringExpression.new(:NOOP, v) end end # Check if the receiver contains all of the keys in the given array: # # hstore_op.contain_all(:a) # (hstore ?& a) def contain_all(other) bool_op(CONTAIN_ALL, wrap_input_array(other)) end # Check if the receiver contains any of the keys in the given array: # # hstore_op.contain_any(:a) # (hstore ?| a) def contain_any(other) bool_op(CONTAIN_ANY, wrap_input_array(other)) end # Check if the receiver contains all entries in the other hstore: # # hstore_op.contains(:h) # (hstore @> h) def contains(other) bool_op(CONTAINS, wrap_input_hash(other)) end # Check if the other hstore contains all entries in the receiver: # # hstore_op.contained_by(:h) # (hstore <@ h) def contained_by(other) bool_op(CONTAINED_BY, wrap_input_hash(other)) end # Check if the receiver contains a non-NULL value for the given key: # # hstore_op.defined('a') # defined(hstore, 'a') def defined(key) Sequel::SQL::BooleanExpression.new(:NOOP, function(:defined, key)) end # Delete the matching entries from the receiver: # # hstore_op.delete('a') # delete(hstore, 'a') def delete(key) HStoreOp.new(function(:delete, wrap_input_array(wrap_input_hash(key)))) end # Transform the receiver into a set of keys and values: # # hstore_op.each # each(hstore) def each function(:each) end # Check if the receiver contains the given key: # # hstore_op.has_key?('a') # (hstore ? 'a') def has_key?(key) bool_op(HAS_KEY, key) end alias include? has_key? alias key? has_key? alias member? has_key? alias exist? has_key? # Return the receiver. def hstore self end # Return the keys as a PostgreSQL array: # # hstore_op.keys # akeys(hstore) def keys wrap_output_array(function(:akeys)) end alias akeys keys # Merge a given hstore into the receiver: # # hstore_op.merge(:a) # (hstore || a) def merge(other) HStoreOp.new(Sequel::SQL::PlaceholderLiteralString.new(CONCAT, [self, wrap_input_hash(other)])) end alias concat merge # Create a new record populated with entries from the receiver: # # hstore_op.populate(:a) # populate_record(a, hstore) def populate(record) SQL::Function.new(:populate_record, record, self) end # Update the values in a record using entries in the receiver: # # hstore_op.record_set(:a) # (a #= hstore) def record_set(record) Sequel::SQL::PlaceholderLiteralString.new(RECORD_SET, [record, value]) end # Return the keys as a PostgreSQL set: # # hstore_op.skeys # skeys(hstore) def skeys function(:skeys) end # Return an hstore with only the keys in the given array: # # hstore_op.slice(:a) # slice(hstore, a) def slice(keys) HStoreOp.new(function(:slice, wrap_input_array(keys))) end # Return the values as a PostgreSQL set: # # hstore_op.svals # svals(hstore) def svals function(:svals) end # Return a flattened array of the receiver with alternating # keys and values: # # hstore_op.to_array # hstore_to_array(hstore) def to_array wrap_output_array(function(:hstore_to_array)) end # Return a nested array of the receiver, with arrays of # 2 element (key/value) arrays: # # hstore_op.to_matrix # hstore_to_matrix(hstore) def to_matrix wrap_output_array(function(:hstore_to_matrix)) end # Return the values as a PostgreSQL array: # # hstore_op.values # avals(hstore) def values wrap_output_array(function(:avals)) end alias avals values private # Return a placeholder literal with the given str and args, wrapped # in a boolean expression, used by operators that return booleans. def bool_op(str, other) Sequel::SQL::BooleanExpression.new(:NOOP, Sequel::SQL::PlaceholderLiteralString.new(str, [value, other])) end # Return a function with the given name, and the receiver as the first # argument, with any additional arguments given. def function(name, *args) SQL::Function.new(name, self, *args) end # Wrap argument in a PGArray if it is an array def wrap_input_array(obj) if obj.is_a?(Array) && Sequel.respond_to?(:pg_array) Sequel.pg_array(obj) else obj end end # Wrap argument in an Hstore if it is a hash def wrap_input_hash(obj) if obj.is_a?(Hash) && Sequel.respond_to?(:hstore) Sequel.hstore(obj) else obj end end # Wrap argument in a PGArrayOp if supported def wrap_output_array(obj) if Sequel.respond_to?(:pg_array_op) Sequel.pg_array_op(obj) else obj end end end # Represents hstore subscripts. This is abstracted because the # subscript support depends on the database version. class HStoreSubscriptOp < SQL::Expression SUBSCRIPT = ["".freeze, "[".freeze, "]".freeze].freeze # The expression being subscripted attr_reader :expression # The subscript to use attr_reader :sub # Set the expression and subscript to the given arguments def initialize(expression, sub) @expression = expression @sub = sub freeze end # Use subscripts instead of -> operator on PostgreSQL 14+ def to_s_append(ds, sql) server_version = ds.db.server_version frag = server_version && server_version >= 140000 ? SUBSCRIPT : HStoreOp::LOOKUP ds.literal_append(sql, Sequel::SQL::PlaceholderLiteralString.new(frag, [@expression, @sub])) end # Support transforming of hstore subscripts def sequel_ast_transform(transformer) self.class.new(transformer.call(@expression), transformer.call(@sub)) end end module HStoreOpMethods # Wrap the receiver in an HStoreOp so you can easily use the PostgreSQL # hstore functions and operators with it. def hstore HStoreOp.new(self) end end # :nocov: if defined?(HStore) # :nocov: class HStore # Wrap the receiver in an HStoreOp so you can easily use the PostgreSQL # hstore functions and operators with it. def op HStoreOp.new(self) end end end end module SQL::Builders # Return the object wrapped in an Postgres::HStoreOp. def hstore_op(v) case v when Postgres::HStoreOp v else Postgres::HStoreOp.new(v) end end end class SQL::GenericExpression include Sequel::Postgres::HStoreOpMethods end class LiteralString include Sequel::Postgres::HStoreOpMethods end end # :nocov: if Sequel.core_extensions? class Symbol include Sequel::Postgres::HStoreOpMethods end end if defined?(Sequel::CoreRefinements) module Sequel::CoreRefinements refine Symbol do send INCLUDE_METH, Sequel::Postgres::HStoreOpMethods end end end # :nocov: ����������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/pg_inet.rb������������������������������������������������������0000664�0000000�0000000�00000010525�14342141206�0021320�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The pg_inet extension adds support for Sequel to handle # PostgreSQL's inet and cidr types using ruby's IPAddr class. # # This extension integrates with Sequel's native postgres and jdbc/postgresql # adapters, so that when inet/cidr fields are retrieved, they are returned as # IPAddr instances. # # To use this extension, load it into your database: # # DB.extension :pg_inet # # This extension integrates with the pg_array extension. If you plan # to use the inet[] or cidr[] types, load the pg_array extension before # the pg_inet extension: # # DB.extension :pg_array, :pg_inet # # This extension does not add special support for the macaddr # type. Ruby doesn't have a stdlib class that represents mac # addresses, so these will still be returned as strings. The exception # to this is that the pg_array extension integration will recognize # macaddr[] types return them as arrays of strings. # # See the {schema modification guide}[rdoc-ref:doc/schema_modification.rdoc] # for details on using inet/cidr columns in CREATE/ALTER TABLE statements. # # Related module: Sequel::Postgres::InetDatabaseMethods require 'ipaddr' module Sequel module Postgres # Methods enabling Database object integration with the inet/cidr types. module InetDatabaseMethods # Reset the conversion procs when extending the Database object, so # it will pick up the inet/cidr converter. Also, extend the datasets # with support for literalizing the IPAddr types. def self.extended(db) db.instance_exec do extend_datasets(InetDatasetMethods) # :nocov: if !defined?(SEQUEL_PG_VERSION_INTEGER) || SEQUEL_PG_VERSION_INTEGER >= 11300 # :nocov: # sequel_pg 1.13.0+ will use inet/cidr conversion procs, but doing so is # slower, so don't add the conversion procs if using sequel_pg 1.13.0+. meth = IPAddr.method(:new) add_conversion_proc(869, meth) add_conversion_proc(650, meth) if respond_to?(:register_array_type) register_array_type('inet', :oid=>1041, :scalar_oid=>869) register_array_type('cidr', :oid=>651, :scalar_oid=>650) end end if respond_to?(:register_array_type) register_array_type('macaddr', :oid=>1040, :scalar_oid=>829) end @schema_type_classes[:ipaddr] = IPAddr end end # Convert an IPAddr arg to a string. Probably not necessary, but done # for safety. def bound_variable_arg(arg, conn) case arg when IPAddr "#{arg.to_s}/#{arg.instance_variable_get(:@mask_addr).to_s(2).count('1')}" else super end end private # Make the column type detection recognize the inet and cidr types. def schema_column_type(db_type) case db_type when 'inet', 'cidr' :ipaddr else super end end # Set the :ruby_default value if the default value is recognized as an ip address. def schema_post_process(_) super.each do |a| h = a[1] if h[:type] == :ipaddr && h[:default] =~ /\A'([:a-fA-F0-9\.\/]+)'::(?:inet|cidr)\z/ h[:ruby_default] = IPAddr.new($1) end end end # Typecast the given value to an IPAddr object. def typecast_value_ipaddr(value) case value when IPAddr value when String IPAddr.new(typecast_check_string_length(value, 100)) else raise Sequel::InvalidValue, "invalid value for inet/cidr: #{value.inspect}" end end end module InetDatasetMethods private # Allow auto parameterization of IPAddr instances. def auto_param_type_fallback(v) if defined?(super) && (type = super) type elsif IPAddr === v "::inet" end end # Convert IPAddr value to a string and append a literal version # of the string to the sql. def literal_other_append(sql, value) if value.is_a?(IPAddr) literal_string_append(sql, "#{value.to_s}/#{value.instance_variable_get(:@mask_addr).to_s(2).count('1')}") else super end end end end Database.register_extension(:pg_inet, Postgres::InetDatabaseMethods) end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/pg_inet_ops.rb��������������������������������������������������0000664�0000000�0000000�00000013754�14342141206�0022210�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The pg_inet_ops extension adds support to Sequel's DSL to make # it easier to call PostgreSQL inet functions and operators. # # To load the extension: # # Sequel.extension :pg_inet_ops # # The most common usage is passing an expression to Sequel.pg_inet_op: # # r = Sequel.pg_inet_op(:inet) # # Also, on most Sequel expression objects, you can call the pg_inet # method: # # r = Sequel[:ip].pg_inet # # If you have loaded the {core_extensions extension}[rdoc-ref:doc/core_extensions.rdoc], # or you have loaded the core_refinements extension # and have activated refinements for the file, you can also use Symbol#pg_inet: # # r = :inet.pg_inet # # This creates a Sequel::Postgres::InetOp object that can be used # for easier querying: # # ~r # ~inet # r & other # inet & other # r | other # inet | other # r << :other # inet << other # r >> :other # inet >> other # # r.contained_by(:other) # inet << other # r.contained_by_or_equals(:other) # inet <<= other # r.contains(:other) # inet >> other # r.contains_or_equals(:other) # inet >>= other # r.contains_or_contained_by(:other) # inet && other # # r.abbrev # abbrev(inet) # r.broadcast # broadcast(inet) # r.family # family(inet) # r.host # host(inet) # r.hostmask # hostmask(inet) # r.masklen # masklen(inet) # r.netmask # netmask(inet) # r.network # network(inet) # r.set_masklen(16) # set_masklen(inet, 16) # r.text # text(inet) # # If a String or IPAddr instance is passed to Sequel.pg_inet_op, it will automatically # be cast to +inet+. To treat the object as a +cidr+, you must cast it before passing # it to Sequel.pg_inet_op: # # r = Sequel.pg_inet_op(Sequel.cast('1.2.3.4', :cidr)) # # See the PostgreSQL network function and operator documentation for more # details on what these functions and operators do. # # Related module: Sequel::Postgres::InetOp require 'ipaddr' module Sequel module Postgres # The InetOp class is a simple container for a single object that # defines methods that yield Sequel expression objects representing # PostgreSQL inet operators and functions. # # Most methods in this class are defined via metaprogramming, see # the pg_inet_ops extension documentation for details on the API. class InetOp < Sequel::SQL::Wrapper include Sequel::SQL::BitwiseMethods # For String and IPAddr instances, wrap them in a cast to inet, # to avoid ambiguity issues when calling operator methods. def initialize(v) case v when ::Sequel::LiteralString # nothing when String, IPAddr v = Sequel.cast(v, :inet) end super end OPERATORS = { :contained_by_or_equals => ["(".freeze, " <<= ".freeze, ")".freeze].freeze, :contains_or_equals => ["(".freeze, " >>= ".freeze, ")".freeze].freeze, :contains_or_contained_by => ["(".freeze, " && ".freeze, ")".freeze].freeze, }.freeze OPERATORS.keys.each do |f| class_eval("def #{f}(v) Sequel::SQL::BooleanExpression.new(:NOOP, operator(:#{f}, v)) end", __FILE__, __LINE__) end %w'<< >>'.each do |f| class_eval("def #{f}(v) Sequel::SQL::BooleanExpression.new(:NOOP, super) end", __FILE__, __LINE__) end %w'& | +'.each do |f| class_eval("def #{f}(v) self.class.new(super) end", __FILE__, __LINE__) end %w'abbrev host text'.each do |f| class_eval("def #{f}() Sequel::SQL::StringExpression.new(:NOOP, function(:#{f})) end", __FILE__, __LINE__) end %w'family masklen'.each do |f| class_eval("def #{f}() Sequel::SQL::NumericExpression.new(:NOOP, function(:#{f})) end", __FILE__, __LINE__) end %w'broadcast hostmask netmask network'.each do |f| class_eval("def #{f}() self.class.new(function(:#{f})) end", __FILE__, __LINE__) end # Return the receiver. def pg_inet self end # Return an expression for the bitwise NOT of the receiver def ~ self.class.new(super) end # Return an expression for the subtraction of the argument from the receiver def -(v) case v when Integer self.class.new(super) else Sequel::SQL::NumericExpression.new(:NOOP, super) end end # Return an expression for the calling of the set_masklen function with the receiver and the given argument def set_masklen(v) self.class.new(Sequel::SQL::Function.new(:set_masklen, self, v)) end alias contained_by << alias contains >> undef_method :*, :/ private # Handle PostgreSQL specific operator types def operator(type, other) Sequel::SQL::PlaceholderLiteralString.new(OPERATORS[type], [value, other]) end # Return a function called with the receiver. def function(name) Sequel::SQL::Function.new(name, self) end end module InetOpMethods # Wrap the receiver in an InetOp so you can easily use the PostgreSQL # inet functions and operators with it. def pg_inet InetOp.new(self) end end end module SQL::Builders # Return the expression wrapped in the Postgres::InetOp. def pg_inet_op(v) case v when Postgres::InetOp v else Postgres::InetOp.new(v) end end end class SQL::GenericExpression include Sequel::Postgres::InetOpMethods end class LiteralString include Sequel::Postgres::InetOpMethods end end # :nocov: if Sequel.core_extensions? class Symbol include Sequel::Postgres::InetOpMethods end end if defined?(Sequel::CoreRefinements) module Sequel::CoreRefinements refine Symbol do send INCLUDE_METH, Sequel::Postgres::InetOpMethods end end end # :nocov: ��������������������sequel-5.63.0/lib/sequel/extensions/pg_interval.rb��������������������������������������������������0000664�0000000�0000000�00000016621�14342141206�0022210�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The pg_interval extension adds support for PostgreSQL's interval type. # # This extension integrates with Sequel's native postgres and jdbc/postgresql # adapters, so that when interval type values are retrieved, they are parsed and returned # as instances of ActiveSupport::Duration. # # In addition to the parser, this extension adds literalizers for # ActiveSupport::Duration that use the standard Sequel literalization # callbacks, so they work on all adapters. # # To use this extension, load it into the Database instance: # # DB.extension :pg_interval # # This extension integrates with the pg_array extension. If you plan # to use arrays of interval types, load the pg_array extension before the # pg_interval extension: # # DB.extension :pg_array, :pg_interval # # The parser this extension uses requires that IntervalStyle for PostgreSQL # is set to postgres (the default setting). If IntervalStyle is changed from # the default setting, the parser will probably not work. The parser used is # very simple, and is only designed to parse PostgreSQL's default output # format, it is not designed to support all input formats that PostgreSQL # supports. # # See the {schema modification guide}[rdoc-ref:doc/schema_modification.rdoc] # for details on using interval columns in CREATE/ALTER TABLE statements. # # Related module: Sequel::Postgres::IntervalDatabaseMethods require 'active_support' require 'active_support/duration' # :nocov: begin require 'active_support/version' rescue LoadError end # :nocov: module Sequel module Postgres module IntervalDatabaseMethods DURATION_UNITS = [:years, :months, :weeks, :days, :hours, :minutes, :seconds].freeze # Return an unquoted string version of the duration object suitable for # use as a bound variable. def self.literal_duration(duration) h = Hash.new(0) duration.parts.each{|unit, value| h[unit] += value} s = String.new DURATION_UNITS.each do |unit| if (v = h[unit]) != 0 s << "#{v.is_a?(Integer) ? v : sprintf('%0.6f', v)} #{unit} " end end if s.empty? '0' else s end end # Creates callable objects that convert strings into ActiveSupport::Duration instances. class Parser # Whether ActiveSupport::Duration.new takes parts as array instead of hash USE_PARTS_ARRAY = !defined?(ActiveSupport::VERSION::STRING) || ActiveSupport::VERSION::STRING < '5.1' if defined?(ActiveSupport::Duration::SECONDS_PER_MONTH) SECONDS_PER_MONTH = ActiveSupport::Duration::SECONDS_PER_MONTH SECONDS_PER_YEAR = ActiveSupport::Duration::SECONDS_PER_YEAR # :nocov: else SECONDS_PER_MONTH = 2592000 SECONDS_PER_YEAR = 31557600 # :nocov: end # Parse the interval input string into an ActiveSupport::Duration instance. def call(string) raise(InvalidValue, "invalid or unhandled interval format: #{string.inspect}") unless matches = /\A([+-]?\d+ years?\s?)?([+-]?\d+ mons?\s?)?([+-]?\d+ days?\s?)?(?:(?:([+-])?(\d{2,10}):(\d\d):(\d\d(\.\d+)?))|([+-]?\d+ hours?\s?)?([+-]?\d+ mins?\s?)?([+-]?\d+(\.\d+)? secs?\s?)?)?\z/.match(string) value = 0 parts = {} if v = matches[1] v = v.to_i value += SECONDS_PER_YEAR * v parts[:years] = v end if v = matches[2] v = v.to_i value += SECONDS_PER_MONTH * v parts[:months] = v end if v = matches[3] v = v.to_i value += 86400 * v parts[:days] = v end if matches[5] seconds = matches[5].to_i * 3600 + matches[6].to_i * 60 seconds += matches[8] ? matches[7].to_f : matches[7].to_i seconds *= -1 if matches[4] == '-' value += seconds parts[:seconds] = seconds elsif matches[9] || matches[10] || matches[11] seconds = 0 if v = matches[9] seconds += v.to_i * 3600 end if v = matches[10] seconds += v.to_i * 60 end if v = matches[11] seconds += matches[12] ? v.to_f : v.to_i end value += seconds parts[:seconds] = seconds end # :nocov: if USE_PARTS_ARRAY parts = parts.to_a end # :nocov: ActiveSupport::Duration.new(value, parts) end end # Single instance of Parser used for parsing, to save on memory (since the parser has no state). PARSER = Parser.new # Reset the conversion procs if using the native postgres adapter, # and extend the datasets to correctly literalize ActiveSupport::Duration values. def self.extended(db) db.instance_exec do extend_datasets(IntervalDatasetMethods) add_conversion_proc(1186, Postgres::IntervalDatabaseMethods::PARSER) if respond_to?(:register_array_type) register_array_type('interval', :oid=>1187, :scalar_oid=>1186) end @schema_type_classes[:interval] = ActiveSupport::Duration end end # Handle ActiveSupport::Duration values in bound variables. def bound_variable_arg(arg, conn) case arg when ActiveSupport::Duration IntervalDatabaseMethods.literal_duration(arg) else super end end private # Set the :ruby_default value if the default value is recognized as an interval. def schema_post_process(_) super.each do |a| h = a[1] if h[:type] == :interval && h[:default] =~ /\A'([\w ]+)'::interval\z/ h[:ruby_default] = PARSER.call($1) end end end # Typecast value correctly to an ActiveSupport::Duration instance. # If already an ActiveSupport::Duration, return it. # If a numeric argument is given, assume it represents a number # of seconds, and create a new ActiveSupport::Duration instance # representing that number of seconds. # If a String, assume it is in PostgreSQL interval output format # and attempt to parse it. def typecast_value_interval(value) case value when ActiveSupport::Duration value when Numeric ActiveSupport::Duration.new(value, [[:seconds, value]]) when String PARSER.call(typecast_check_string_length(value, 1000)) else raise Sequel::InvalidValue, "invalid value for interval type: #{value.inspect}" end end end module IntervalDatasetMethods private # Allow auto parameterization of ActiveSupport::Duration instances. def auto_param_type_fallback(v) if defined?(super) && (type = super) type elsif ActiveSupport::Duration === v "::interval" end end # Handle literalization of ActiveSupport::Duration objects, treating them as # PostgreSQL intervals. def literal_other_append(sql, v) case v when ActiveSupport::Duration literal_append(sql, IntervalDatabaseMethods.literal_duration(v)) sql << '::interval' else super end end end end Database.register_extension(:pg_interval, Postgres::IntervalDatabaseMethods) end ���������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/pg_json.rb������������������������������������������������������0000664�0000000�0000000�00000047356�14342141206�0021346�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The pg_json extension adds support for Sequel to handle # PostgreSQL's json and jsonb types. By default, it wraps # JSON arrays and JSON objects with ruby array-like and # hash-like objects. If you would like to wrap JSON primitives # (numbers, strings, +null+, +true+, and +false+), you need to # use the +wrap_json_primitives+ setter: # # DB.extension :pg_json # DB.wrap_json_primitives = true # # Note that wrapping JSON primitives changes the behavior for # JSON false and null values. Because only +false+ and +nil+ # in Ruby are considered falsey, wrapping these objects results # in unexpected behavior if you use the values directly in # conditionals: # # if DB[:table].get(:json_column) # # called if the value of json_column is null/false # # if you are wrapping primitives # end # # To extract the Ruby primitive object from the wrapper object, # you can use +__getobj__+ (this comes from Ruby's delegate library). # # To wrap an existing Ruby array, hash, string, integer, float, # +nil+, +true+, or +false+, use +Sequel.pg_json_wrap+ or +Sequel.pg_jsonb_wrap+: # # Sequel.pg_json_wrap(object) # json type # Sequel.pg_jsonb_wrap(object) # jsonb type # # So if you want to insert an array or hash into an json database column: # # DB[:table].insert(column: Sequel.pg_json_wrap([1, 2, 3])) # DB[:table].insert(column: Sequel.pg_json_wrap({'a'=>1, 'b'=>2})) # # Note that the +pg_json_wrap+ and +pg_jsonb_wrap+ methods only handle Ruby primitives, # they do not handle already wrapped objects. # # If you have loaded the {core_extensions extension}[rdoc-ref:doc/core_extensions.rdoc], # or you have loaded the core_refinements extension # and have activated refinements for the file, you can also use the # +pg_json+ and +pg_jsonb+ methods directly on Array or Hash: # # array.pg_json # json type # array.pg_jsonb # jsonb type # # hash.pg_json # json type # hash.pg_jsonb # jsonb type # # Model classes that use json or jsonb columns will have typecasting automatically # setup, so you can assign Ruby primitives to model columns and have the wrapped # objects automatically created. However, for backwards compatibility, passing # a string object will parse the string as JSON, not create a JSON string object. # # obj = Model.new # obj.json_column = {'a'=>'b'} # obj.json_column.class # # => Sequel::Postgres::JSONHash # obj.json_column['a'] # # => 'b' # # obj.json_column = '{"a": "b"}' # obj.json_column.class # # => Sequel::Postgres::JSONHash # obj.json_column['a'] # # => 'b' # # You can change the handling of string typecasting by using +typecast_json_strings+: # # DB.typecast_json_strings = true # obj.json_column = '{"a": "b"}' # obj.json_column.class # # => Sequel::Postgres::JSONString # obj.json_column # # => '{"a": "b"}' # # Note that +nil+ values are never automatically wrapped: # # obj.json_column = nil # obj.json_column.class # # => NilClass # obj.json_column # # => nil # # If you want to set a JSON null value when using a model, you must wrap it # explicitly: # # obj.json_column = Sequel.pg_json_wrap(nil) # obj.json_column.class # # => Sequel::Postgres::JSONNull # obj.json_column # # => nil # # To use this extension, load it into the Database instance: # # DB.extension :pg_json # # See the {schema modification guide}[rdoc-ref:doc/schema_modification.rdoc] # for details on using json columns in CREATE/ALTER TABLE statements. # # This extension integrates with the pg_array extension. If you plan # to use the json[] or jsonb[] types, load the pg_array extension before the # pg_json extension: # # DB.extension :pg_array, :pg_json # # Note that when accessing json hashes, you should always use strings for keys. # Attempting to use other values (such as symbols) will not work correctly. # # This extension requires both the json and delegate libraries. However, you # can override +Sequel.parse_json+, +Sequel.object_to_json+, and # +Sequel.json_parser_error_class+ to use an alternative JSON implementation. # # Related modules: Sequel::Postgres::JSONDatabaseMethods require 'delegate' require 'json' module Sequel module Postgres # A module included in all of the JSON wrapper classes. module JSONObject end # A module included in all of the JSONB wrapper classes. module JSONBObject end create_delegate_class = lambda do |name, delegate_class| base_class = DelegateClass(delegate_class) base_class.class_eval do include Sequel::SQL::AliasMethods include Sequel::SQL::CastMethods end json_class = Class.new(base_class) do include JSONObject def sql_literal_append(ds, sql) ds.literal_append(sql, Sequel.object_to_json(self)) sql << '::json' end # Allow automatic parameterization. def sequel_auto_param_type(ds) "::json" end end jsonb_class = Class.new(base_class) do include JSONBObject def sql_literal_append(ds, sql) ds.literal_append(sql, Sequel.object_to_json(self)) sql << '::jsonb' end # Allow automatic parameterization. def sequel_auto_param_type(ds) "::jsonb" end end const_set(:"JSON#{name}Base", base_class) const_set(:"JSON#{name}", json_class) const_set(:"JSONB#{name}", jsonb_class) end create_delegate_class.call(:Array, Array) create_delegate_class.call(:Hash, Hash) create_delegate_class.call(:String, String) create_delegate_class.call(:Integer, Integer) create_delegate_class.call(:Float, Float) create_delegate_class.call(:Null, NilClass) create_delegate_class.call(:True, TrueClass) create_delegate_class.call(:False, FalseClass) JSON_WRAPPER_MAPPING = { ::Array => JSONArray, ::Hash => JSONHash, }.freeze JSONB_WRAPPER_MAPPING = { ::Array => JSONBArray, ::Hash => JSONBHash, }.freeze JSON_PRIMITIVE_WRAPPER_MAPPING = { ::String => JSONString, ::Integer => JSONInteger, ::Float => JSONFloat, ::NilClass => JSONNull, ::TrueClass => JSONTrue, ::FalseClass => JSONFalse, } JSONB_PRIMITIVE_WRAPPER_MAPPING = { ::String => JSONBString, ::Integer => JSONBInteger, ::Float => JSONBFloat, ::NilClass => JSONBNull, ::TrueClass => JSONBTrue, ::FalseClass => JSONBFalse, } if RUBY_VERSION < '2.4' # :nocov: JSON_PRIMITIVE_WRAPPER_MAPPING[Fixnum] = JSONInteger JSON_PRIMITIVE_WRAPPER_MAPPING[Bignum] = JSONInteger JSONB_PRIMITIVE_WRAPPER_MAPPING[Fixnum] = JSONBInteger JSONB_PRIMITIVE_WRAPPER_MAPPING[Bignum] = JSONBInteger # :nocov: end JSON_PRIMITIVE_WRAPPER_MAPPING.freeze JSONB_PRIMITIVE_WRAPPER_MAPPING.freeze JSON_COMBINED_WRAPPER_MAPPING =JSON_WRAPPER_MAPPING.merge(JSON_PRIMITIVE_WRAPPER_MAPPING).freeze JSONB_COMBINED_WRAPPER_MAPPING =JSONB_WRAPPER_MAPPING.merge(JSONB_PRIMITIVE_WRAPPER_MAPPING).freeze JSONB_WRAP_CLASSES = JSONB_COMBINED_WRAPPER_MAPPING.keys.freeze Sequel::Deprecation.deprecate_constant(self, :JSON_WRAPPER_MAPPING) Sequel::Deprecation.deprecate_constant(self, :JSONB_WRAPPER_MAPPING) Sequel::Deprecation.deprecate_constant(self, :JSON_PRIMITIVE_WRAPPER_MAPPING) Sequel::Deprecation.deprecate_constant(self, :JSONB_PRIMITIVE_WRAPPER_MAPPING) Sequel::Deprecation.deprecate_constant(self, :JSON_COMBINED_WRAPPER_MAPPING) Sequel::Deprecation.deprecate_constant(self, :JSONB_COMBINED_WRAPPER_MAPPING) Sequel::Deprecation.deprecate_constant(self, :JSONB_WRAP_CLASSES) JSON_WRAP_CLASSES = [Hash, Array, String, Integer, Float, NilClass, TrueClass, FalseClass].freeze # Methods enabling Database object integration with the json type. module JSONDatabaseMethods def self.extended(db) db.instance_exec do add_conversion_proc(114, method(:_db_parse_json)) add_conversion_proc(3802, method(:_db_parse_jsonb)) if respond_to?(:register_array_type) register_array_type('json', :oid=>199, :scalar_oid=>114) register_array_type('jsonb', :oid=>3807, :scalar_oid=>3802) end @schema_type_classes[:json] = [JSONObject] @schema_type_classes[:jsonb] = [JSONBObject] end end # Return the wrapper class for the json type if value is Hash or Array. def self.json_wrapper(value) case value when ::Hash JSONHash when ::Array JSONArray end end # Return the wrapper class for the jsonb type if value is Hash or Array. def self.jsonb_wrapper(value) case value when ::Hash JSONBHash when ::Array JSONBArray end end # Return the wrapper class for the json type if value is a supported type. def self.json_primitive_wrapper(value) case value when ::Hash JSONHash when ::Array JSONArray when ::String JSONString when ::Integer JSONInteger when ::Float JSONFloat when ::NilClass JSONNull when ::TrueClass JSONTrue when ::FalseClass JSONFalse end end # Return the wrapper class for the jsonb type if value is a supported type. def self.jsonb_primitive_wrapper(value) case value when ::Hash JSONBHash when ::Array JSONBArray when ::String JSONBString when ::Integer JSONBInteger when ::Float JSONBFloat when ::NilClass JSONBNull when ::TrueClass JSONBTrue when ::FalseClass JSONBFalse end end # Deprecated def self.db_parse_json(s) # SEQUEL6: Remove parse_json(s) rescue Sequel::InvalidValue raise unless s.is_a?(String) parse_json("[#{s}]").first end # Deprecated def self.db_parse_jsonb(s) # SEQUEL6: Remove parse_json(s, true) rescue Sequel::InvalidValue raise unless s.is_a?(String) parse_json("[#{s}]").first end # Deprecated def self.parse_json(s, jsonb=false) # SEQUEL6: Remove Sequel::Deprecation.deprecate("Sequel::Postgres::JSONDatabaseMethods.{parse_json,db_parse_json,db_parse_jsonb} are deprecated and will be removed in Sequel 6.") begin value = Sequel.parse_json(s) rescue Sequel.json_parser_error_class => e raise Sequel.convert_exception_class(e, Sequel::InvalidValue) end case value when Array (jsonb ? JSONBArray : JSONArray).new(value) when Hash (jsonb ? JSONBHash : JSONHash).new(value) when String, Numeric, true, false, nil value else raise Sequel::InvalidValue, "unhandled json value: #{value.inspect} (from #{s.inspect})" end end # Whether to wrap JSON primitives instead of using Ruby objects. # Wrapping the primitives allows the primitive values to roundtrip, # but it can cause problems, especially as false/null JSON values # will be treated as truthy in Ruby due to the wrapping. False by # default. attr_accessor :wrap_json_primitives # Whether to typecast strings for json/jsonb types as JSON # strings, instead of trying to parse the string as JSON. # False by default. attr_accessor :typecast_json_strings # Handle json and jsonb types in bound variables def bound_variable_arg(arg, conn) case arg when JSONObject, JSONBObject Sequel.object_to_json(arg) else super end end private # Parse JSON data coming from the database. Since PostgreSQL allows # non JSON data in JSON fields (such as plain numbers and strings), # we don't want to raise an exception for that. def _db_parse_json(s) _wrap_json(_parse_json(s)) rescue Sequel::InvalidValue raise unless s.is_a?(String) _wrap_json(_parse_json("[#{s}]").first) end # Same as _db_parse_json, but consider the input as jsonb. def _db_parse_jsonb(s) _wrap_jsonb(_parse_json(s)) rescue Sequel::InvalidValue raise unless s.is_a?(String) _wrap_jsonb(_parse_json("[#{s}]").first) end # Parse the given string as json, returning either a JSONArray # or JSONHash instance (or JSONBArray or JSONBHash instance if jsonb # argument is true), or a String, Numeric, true, false, or nil # if the json library used supports that. def _parse_json(s) Sequel.parse_json(s) rescue Sequel.json_parser_error_class => e raise Sequel.convert_exception_class(e, Sequel::InvalidValue) end # Wrap the parsed JSON value in the appropriate JSON wrapper class. # Only wrap primitive values if wrap_json_primitives is set. def _wrap_json(value) if klass = JSONDatabaseMethods.json_wrapper(value) klass.new(value) elsif klass = JSONDatabaseMethods.json_primitive_wrapper(value) if wrap_json_primitives klass.new(value) else value end else raise Sequel::InvalidValue, "unhandled json value: #{value.inspect}" end end # Wrap the parsed JSON value in the appropriate JSONB wrapper class. # Only wrap primitive values if wrap_json_primitives is set. def _wrap_jsonb(value) if klass = JSONDatabaseMethods.jsonb_wrapper(value) klass.new(value) elsif klass = JSONDatabaseMethods.jsonb_primitive_wrapper(value) if wrap_json_primitives klass.new(value) else value end else raise Sequel::InvalidValue, "unhandled jsonb value: #{value.inspect}" end end # Make the column type detection recognize the json types. def schema_column_type(db_type) case db_type when 'json' :json when 'jsonb' :jsonb else super end end # Set the :callable_default value if the default value is recognized as an empty json/jsonb array/hash. def schema_post_process(_) super.each do |a| h = a[1] if (h[:type] == :json || h[:type] == :jsonb) && h[:default] =~ /\A'(\{\}|\[\])'::jsonb?\z/ is_array = $1 == '[]' klass = if h[:type] == :json if is_array JSONArray else JSONHash end elsif is_array JSONBArray else JSONBHash end h[:callable_default] = lambda{klass.new(is_array ? [] : {})} end end end # Convert the value given to a JSON wrapper object. def typecast_value_json(value) case value when JSONObject value when String if typecast_json_strings JSONString.new(value) else _wrap_json(_parse_json(value)) end when *JSON_WRAP_CLASSES JSONDatabaseMethods.json_primitive_wrapper(value).new(value) when JSONBObject value = value.__getobj__ JSONDatabaseMethods.json_primitive_wrapper(value).new(value) else raise Sequel::InvalidValue, "invalid value for json: #{value.inspect}" end end # Convert the value given to a JSONB wrapper object. def typecast_value_jsonb(value) case value when JSONBObject value when String if typecast_json_strings JSONBString.new(value) else _wrap_jsonb(_parse_json(value)) end when *JSON_WRAP_CLASSES JSONDatabaseMethods.jsonb_primitive_wrapper(value).new(value) when JSONObject value = value.__getobj__ JSONDatabaseMethods.jsonb_primitive_wrapper(value).new(value) else raise Sequel::InvalidValue, "invalid value for jsonb: #{value.inspect}" end end end end module SQL::Builders # Wrap the array or hash in a Postgres::JSONArray or Postgres::JSONHash. # Also handles Postgres::JSONObject and JSONBObjects. # For other objects, calls +Sequel.pg_json_op+ (which is defined # by the pg_json_ops extension). def pg_json(v) case v when Postgres::JSONObject v when Array Postgres::JSONArray.new(v) when Hash Postgres::JSONHash.new(v) when Postgres::JSONBObject v = v.__getobj__ Postgres::JSONDatabaseMethods.json_primitive_wrapper(v).new(v) else Sequel.pg_json_op(v) end end # Wraps Ruby array, hash, string, integer, float, true, false, and nil # values with the appropriate JSON wrapper. Raises an exception for # other types. def pg_json_wrap(v) case v when *Postgres::JSON_WRAP_CLASSES Postgres::JSONDatabaseMethods.json_primitive_wrapper(v).new(v) else raise Error, "invalid value passed to Sequel.pg_json_wrap: #{v.inspect}" end end # Wrap the array or hash in a Postgres::JSONBArray or Postgres::JSONBHash. # Also handles Postgres::JSONObject and JSONBObjects. # For other objects, calls +Sequel.pg_json_op+ (which is defined # by the pg_json_ops extension). def pg_jsonb(v) case v when Postgres::JSONBObject v when Array Postgres::JSONBArray.new(v) when Hash Postgres::JSONBHash.new(v) when Postgres::JSONObject v = v.__getobj__ Postgres::JSONDatabaseMethods.jsonb_primitive_wrapper(v).new(v) else Sequel.pg_jsonb_op(v) end end # Wraps Ruby array, hash, string, integer, float, true, false, and nil # values with the appropriate JSONB wrapper. Raises an exception for # other types. def pg_jsonb_wrap(v) case v when *Postgres::JSON_WRAP_CLASSES Postgres::JSONDatabaseMethods.jsonb_primitive_wrapper(v).new(v) else raise Error, "invalid value passed to Sequel.pg_jsonb_wrap: #{v.inspect}" end end end Database.register_extension(:pg_json, Postgres::JSONDatabaseMethods) end # :nocov: if Sequel.core_extensions? class Array # Return a Sequel::Postgres::JSONArray proxy to the receiver. # This is mostly useful as a short cut for creating JSONArray # objects that didn't come from the database. def pg_json Sequel::Postgres::JSONArray.new(self) end # Return a Sequel::Postgres::JSONArray proxy to the receiver. # This is mostly useful as a short cut for creating JSONArray # objects that didn't come from the database. def pg_jsonb Sequel::Postgres::JSONBArray.new(self) end end class Hash # Return a Sequel::Postgres::JSONHash proxy to the receiver. # This is mostly useful as a short cut for creating JSONHash # objects that didn't come from the database. def pg_json Sequel::Postgres::JSONHash.new(self) end # Return a Sequel::Postgres::JSONHash proxy to the receiver. # This is mostly useful as a short cut for creating JSONHash # objects that didn't come from the database. def pg_jsonb Sequel::Postgres::JSONBHash.new(self) end end end if defined?(Sequel::CoreRefinements) module Sequel::CoreRefinements refine Array do def pg_json Sequel::Postgres::JSONArray.new(self) end def pg_jsonb Sequel::Postgres::JSONBArray.new(self) end end refine Hash do def pg_json Sequel::Postgres::JSONHash.new(self) end def pg_jsonb Sequel::Postgres::JSONBHash.new(self) end end end end # :nocov: ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/pg_json_ops.rb��������������������������������������������������0000664�0000000�0000000�00000065727�14342141206�0022231�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The pg_json_ops extension adds support to Sequel's DSL to make # it easier to call PostgreSQL JSON functions and operators (added # first in PostgreSQL 9.3). It also supports the JSONB functions # and operators added in PostgreSQL 9.4, as well as additional # functions and operators added in later versions. # # To load the extension: # # Sequel.extension :pg_json_ops # # The most common usage is passing an expression to Sequel.pg_json_op # or Sequel.pg_jsonb_op: # # j = Sequel.pg_json_op(:json_column) # jb = Sequel.pg_jsonb_op(:jsonb_column) # # If you have also loaded the pg_json extension, you can use # Sequel.pg_json or Sequel.pg_jsonb as well: # # j = Sequel.pg_json(:json_column) # jb = Sequel.pg_jsonb(:jsonb_column) # # Also, on most Sequel expression objects, you can call the pg_json # or pg_jsonb method: # # j = Sequel[:json_column].pg_json # jb = Sequel[:jsonb_column].pg_jsonb # # If you have loaded the {core_extensions extension}[rdoc-ref:doc/core_extensions.rdoc], # or you have loaded the core_refinements extension # and have activated refinements for the file, you can also use Symbol#pg_json or # Symbol#pg_jsonb: # # j = :json_column.pg_json # jb = :jsonb_column.pg_jsonb # # This creates a Sequel::Postgres::JSONOp or Sequel::Postgres::JSONBOp object that can be used # for easier querying. The following methods are available for both JSONOp and JSONBOp instances: # # j[1] # (json_column -> 1) # j[%w'a b'] # (json_column #> ARRAY['a','b']) # j.get_text(1) # (json_column ->> 1) # j.get_text(%w'a b') # (json_column #>> ARRAY['a','b']) # j.extract('a', 'b') # json_extract_path(json_column, 'a', 'b') # j.extract_text('a', 'b') # json_extract_path_text(json_column, 'a', 'b') # # j.array_length # json_array_length(json_column) # j.array_elements # json_array_elements(json_column) # j.array_elements_text # json_array_elements_text(json_column) # j.each # json_each(json_column) # j.each_text # json_each_text(json_column) # j.keys # json_object_keys(json_column) # j.typeof # json_typeof(json_column) # j.strip_nulls # json_strip_nulls(json_column) # # j.populate(:a) # json_populate_record(:a, json_column) # j.populate_set(:a) # json_populate_recordset(:a, json_column) # j.to_record # json_to_record(json_column) # j.to_recordset # json_to_recordset(json_column) # # There are additional methods are are only supported on JSONBOp instances: # # j - 1 # (jsonb_column - 1) # j.concat(:h) # (jsonb_column || h) # j.contain_all(:a) # (jsonb_column ?& a) # j.contain_any(:a) # (jsonb_column ?| a) # j.contains(:h) # (jsonb_column @> h) # j.contained_by(:h) # (jsonb_column <@ h) # j.delete_path(%w'0 a') # (jsonb_column #- ARRAY['0','a']) # j.has_key?('a') # (jsonb_column ? 'a') # j.insert(%w'0 a', 'a'=>1) # jsonb_insert(jsonb_column, ARRAY[0, 'a'], '{"a":1}'::jsonb, false) # j.pretty # jsonb_pretty(jsonb_column) # j.set(%w'0 a', :h) # jsonb_set(jsonb_column, ARRAY['0','a'], h, true) # # j.set_lax(%w'0 a', :h, false, 'raise_exception') # # jsonb_set_lax(jsonb_column, ARRAY['0','a'], h, false, 'raise_exception') # # On PostgreSQL 12+ SQL/JSON path functions and operators are supported: # # j.path_exists('$.foo') # (jsonb_column @? '$.foo') # j.path_match('$.foo') # (jsonb_column @@ '$.foo') # # j.path_exists!('$.foo') # jsonb_path_exists(jsonb_column, '$.foo') # j.path_match!('$.foo') # jsonb_path_match(jsonb_column, '$.foo') # j.path_query('$.foo') # jsonb_path_query(jsonb_column, '$.foo') # j.path_query_array('$.foo') # jsonb_path_query_array(jsonb_column, '$.foo') # j.path_query_first('$.foo') # jsonb_path_query_first(jsonb_column, '$.foo') # # On PostgreSQL 13+ timezone-aware SQL/JSON path functions and operators are supported: # # j.path_exists_tz!('$.foo') # jsonb_path_exists_tz(jsonb_column, '$.foo') # j.path_match_tz!('$.foo') # jsonb_path_match_tz(jsonb_column, '$.foo') # j.path_query_tz('$.foo') # jsonb_path_query_tz(jsonb_column, '$.foo') # j.path_query_array_tz('$.foo') # jsonb_path_query_array_tz(jsonb_column, '$.foo') # j.path_query_first_tz('$.foo') # jsonb_path_query_first_tz(jsonb_column, '$.foo') # # For the PostgreSQL 12+ SQL/JSON path functions, one argument is required (+path+) and # two more arguments are optional (+vars+ and +silent+). +path+ specifies the JSON path. # +vars+ specifies a hash or a string in JSON format of named variables to be # substituted in +path+. +silent+ specifies whether errors are suppressed. By default, # errors are not suppressed. # # On PostgreSQL 14+, The JSONB <tt>[]</tt> method will use subscripts instead of being # the same as +get+, if the value being wrapped is an identifer: # # Sequel.pg_jsonb_op(:jsonb_column)[1] # jsonb_column[1] # Sequel.pg_jsonb_op(:jsonb_column)[1][2] # jsonb_column[1][2] # Sequel.pg_jsonb_op(Sequel[:j][:b])[1] # j.b[1] # # This support allows you to use JSONB subscripts in UPDATE statements to update only # part of a column: # # c = Sequel.pg_jsonb_op(:c) # DB[:t].update(c['key1'] => '1', c['key2'] => '"a"') # # UPDATE "t" SET "c"['key1'] = '1', "c"['key2'] = '"a"' # # Note that you have to provide the value of a JSONB subscript as a JSONB value, so this # will update +key1+ to use the number <tt>1</tt>, and +key2+ to use the string <tt>a</tt>. # For this reason it may be simpler to use +to_json+: # # c = Sequel.pg_jsonb_op(:c) # DB[:t].update(c['key1'] => 1.to_json, c['key2'] => "a".to_json) # # If you are also using the pg_json extension, you should load it before # loading this extension. Doing so will allow you to use the #op method on # JSONHash, JSONHarray, JSONBHash, and JSONBArray, allowing you to perform json/jsonb operations # on json/jsonb literals. # # In order to get the automatic conversion from a ruby array to a PostgreSQL array # (as shown in the #[] and #get_text examples above), you need to load the pg_array # extension. # # Related modules: Sequel::Postgres::JSONBaseOp, Sequel::Postgres::JSONOp, # Sequel::Postgres::JSONBOp # module Sequel module Postgres # The JSONBaseOp class is a simple container for a single object that # defines methods that yield Sequel expression objects representing # PostgreSQL json operators and functions. # # In the method documentation examples, assume that: # # json_op = Sequel.pg_json(:json) class JSONBaseOp < Sequel::SQL::Wrapper GET = ["(".freeze, " -> ".freeze, ")".freeze].freeze GET_TEXT = ["(".freeze, " ->> ".freeze, ")".freeze].freeze GET_PATH = ["(".freeze, " #> ".freeze, ")".freeze].freeze GET_PATH_TEXT = ["(".freeze, " #>> ".freeze, ")".freeze].freeze # Get JSON array element or object field as json. If an array is given, # gets the object at the specified path. # # json_op[1] # (json -> 1) # json_op['a'] # (json -> 'a') # json_op[%w'a b'] # (json #> ARRAY['a', 'b']) def [](key) if is_array?(key) json_op(GET_PATH, wrap_array(key)) else json_op(GET, key) end end alias get [] # Returns a set of json values for the elements in the json array. # # json_op.array_elements # json_array_elements(json) def array_elements function(:array_elements) end # Returns a set of text values for the elements in the json array. # # json_op.array_elements_text # json_array_elements_text(json) def array_elements_text function(:array_elements_text) end # Get the length of the outermost json array. # # json_op.array_length # json_array_length(json) def array_length Sequel::SQL::NumericExpression.new(:NOOP, function(:array_length)) end # Returns a set of key and value pairs, where the keys # are text and the values are JSON. # # json_op.each # json_each(json) def each function(:each) end # Returns a set of key and value pairs, where the keys # and values are both text. # # json_op.each_text # json_each_text(json) def each_text function(:each_text) end # Returns a json value for the object at the given path. # # json_op.extract('a') # json_extract_path(json, 'a') # json_op.extract('a', 'b') # json_extract_path(json, 'a', 'b') def extract(*a) self.class.new(function(:extract_path, *a)) end # Returns a text value for the object at the given path. # # json_op.extract_text('a') # json_extract_path_text(json, 'a') # json_op.extract_text('a', 'b') # json_extract_path_text(json, 'a', 'b') def extract_text(*a) Sequel::SQL::StringExpression.new(:NOOP, function(:extract_path_text, *a)) end # Get JSON array element or object field as text. If an array is given, # gets the object at the specified path. # # json_op.get_text(1) # (json ->> 1) # json_op.get_text('a') # (json ->> 'a') # json_op.get_text(%w'a b') # (json #>> ARRAY['a', 'b']) def get_text(key) if is_array?(key) json_op(GET_PATH_TEXT, wrap_array(key)) else json_op(GET_TEXT, key) end end # Returns a set of keys AS text in the json object. # # json_op.keys # json_object_keys(json) def keys function(:object_keys) end # Expands the given argument using the columns in the json. # # json_op.populate(arg) # json_populate_record(arg, json) def populate(arg) SQL::Function.new(function_name(:populate_record), arg, self) end # Expands the given argument using the columns in the json. # # json_op.populate_set(arg) # json_populate_recordset(arg, json) def populate_set(arg) SQL::Function.new(function_name(:populate_recordset), arg, self) end # Returns a json value stripped of all internal null values. # # json_op.strip_nulls # json_strip_nulls(json) def strip_nulls self.class.new(function(:strip_nulls)) end # Builds arbitrary record from json object. You need to define the # structure of the record using #as on the resulting object: # # json_op.to_record.as(:x, [Sequel.lit('a integer'), Sequel.lit('b text')]) # json_to_record(json) AS x(a integer, b text) def to_record function(:to_record) end # Builds arbitrary set of records from json array of objects. You need to define the # structure of the records using #as on the resulting object: # # json_op.to_recordset.as(:x, [Sequel.lit('a integer'), Sequel.lit('b text')]) # json_to_recordset(json) AS x(a integer, b text) def to_recordset function(:to_recordset) end # Returns the type of the outermost json value as text. # # json_op.typeof # json_typeof(json) def typeof function(:typeof) end private # Return a placeholder literal with the given str and args, wrapped # in an JSONOp or JSONBOp, used by operators that return json or jsonb. def json_op(str, args) self.class.new(Sequel::SQL::PlaceholderLiteralString.new(str, [self, args])) end # Return a function with the given name, and the receiver as the first # argument, with any additional arguments given. def function(name, *args) SQL::Function.new(function_name(name), self, *args) end # Whether the given object represents an array in PostgreSQL. def is_array?(a) a.is_a?(Array) || (defined?(PGArray) && a.is_a?(PGArray)) || (defined?(ArrayOp) && a.is_a?(ArrayOp)) end # Automatically wrap argument in a PGArray if it is a plain Array. # Requires that the pg_array extension has been loaded to work. def wrap_array(arg) if arg.instance_of?(Array) && Sequel.respond_to?(:pg_array) Sequel.pg_array(arg) else arg end end end # JSONBaseOp subclass for the json type class JSONOp < JSONBaseOp # Return the receiver, since it is already a JSONOp. def pg_json self end private # The json type functions are prefixed with json_ def function_name(name) "json_#{name}" end end # JSONBaseOp subclass for the jsonb type. # # In the method documentation examples, assume that: # # jsonb_op = Sequel.pg_jsonb(:jsonb) class JSONBOp < JSONBaseOp CONCAT = ["(".freeze, " || ".freeze, ")".freeze].freeze CONTAIN_ALL = ["(".freeze, " ?& ".freeze, ")".freeze].freeze CONTAIN_ANY = ["(".freeze, " ?| ".freeze, ")".freeze].freeze CONTAINS = ["(".freeze, " @> ".freeze, ")".freeze].freeze CONTAINED_BY = ["(".freeze, " <@ ".freeze, ")".freeze].freeze DELETE_PATH = ["(".freeze, " #- ".freeze, ")".freeze].freeze HAS_KEY = ["(".freeze, " ? ".freeze, ")".freeze].freeze PATH_EXISTS = ["(".freeze, " @? ".freeze, ")".freeze].freeze PATH_MATCH = ["(".freeze, " @@ ".freeze, ")".freeze].freeze # Support subscript syntax for JSONB. def [](key) if is_array?(key) super else case @value when Symbol, SQL::Identifier, SQL::QualifiedIdentifier, JSONBSubscriptOp # Only use subscripts for identifiers. In other cases, switching from # the -> operator to [] for subscripts causes SQL syntax issues. You # only need the [] for subscripting when doing assignment, and # assignment is generally done on identifiers. self.class.new(JSONBSubscriptOp.new(self, key)) else super end end end # jsonb expression for deletion of the given argument from the # current jsonb. # # jsonb_op - "a" # (jsonb - 'a') def -(other) self.class.new(super) end # jsonb expression for concatenation of the given jsonb into # the current jsonb. # # jsonb_op.concat(:h) # (jsonb || h) def concat(other) json_op(CONCAT, wrap_input_jsonb(other)) end # Check if the receiver contains all of the keys in the given array: # # jsonb_op.contain_all(:a) # (jsonb ?& a) def contain_all(other) bool_op(CONTAIN_ALL, wrap_input_array(other)) end # Check if the receiver contains any of the keys in the given array: # # jsonb_op.contain_any(:a) # (jsonb ?| a) def contain_any(other) bool_op(CONTAIN_ANY, wrap_input_array(other)) end # Check if the receiver contains all entries in the other jsonb: # # jsonb_op.contains(:h) # (jsonb @> h) def contains(other) bool_op(CONTAINS, wrap_input_jsonb(other)) end # Check if the other jsonb contains all entries in the receiver: # # jsonb_op.contained_by(:h) # (jsonb <@ h) def contained_by(other) bool_op(CONTAINED_BY, wrap_input_jsonb(other)) end # Removes the given path from the receiver. # # jsonb_op.delete_path(:h) # (jsonb #- h) def delete_path(other) json_op(DELETE_PATH, wrap_input_array(other)) end # Check if the receiver contains the given key: # # jsonb_op.has_key?('a') # (jsonb ? 'a') def has_key?(key) bool_op(HAS_KEY, key) end alias include? has_key? # Inserts the given jsonb value at the given path in the receiver. # The default is to insert the value before the given path, but # insert_after can be set to true to insert it after the given path. # # jsonb_op.insert(['a', 'b'], h) # jsonb_insert(jsonb, ARRAY['a', 'b'], h, false) # jsonb_op.insert(['a', 'b'], h, true) # jsonb_insert(jsonb, ARRAY['a', 'b'], h, true) def insert(path, other, insert_after=false) self.class.new(function(:insert, wrap_input_array(path), wrap_input_jsonb(other), insert_after)) end # Returns whether the JSON path returns any item for the json object. # # json_op.path_exists("$.foo") # (json @? '$.foo') def path_exists(path) bool_op(PATH_EXISTS, path) end # Returns whether the JSON path returns any item for the json object. # # json_op.path_exists!("$.foo") # # jsonb_path_exists(json, '$.foo') # # json_op.path_exists!("$.foo ? ($ > $x)", x: 2) # # jsonb_path_exists(json, '$.foo ? ($ > $x)', '{"x":2}') # # json_op.path_exists!("$.foo ? ($ > $x)", {x: 2}, true) # # jsonb_path_exists(json, '$.foo ? ($ > $x)', '{"x":2}', true) def path_exists!(path, vars=nil, silent=nil) Sequel::SQL::BooleanExpression.new(:NOOP, _path_function(:jsonb_path_exists, path, vars, silent)) end # The same as #path_exists!, except that timezone-aware conversions are used for date/time values. def path_exists_tz!(path, vars=nil, silent=nil) Sequel::SQL::BooleanExpression.new(:NOOP, _path_function(:jsonb_path_exists_tz, path, vars, silent)) end # Returns the first item of the result of JSON path predicate check for the json object. # Returns nil if the first item is not true or false. # # json_op.path_match("$.foo") # (json @@ '$.foo') def path_match(path) bool_op(PATH_MATCH, path) end # Returns the first item of the result of JSON path predicate check for the json object. # Returns nil if the first item is not true or false and silent is true. # # json_op.path_match!("$.foo") # # jsonb_path_match(json, '$.foo') # # json_op.path_match!("$.foo ? ($ > $x)", x: 2) # # jsonb_path_match(json, '$.foo ? ($ > $x)', '{"x":2}') # # json_op.path_match!("$.foo ? ($ > $x)", {x: 2}, true) # # jsonb_path_match(json, '$.foo ? ($ > $x)', '{"x":2}', true) def path_match!(path, vars=nil, silent=nil) Sequel::SQL::BooleanExpression.new(:NOOP, _path_function(:jsonb_path_match, path, vars, silent)) end # The same as #path_match!, except that timezone-aware conversions are used for date/time values. def path_match_tz!(path, vars=nil, silent=nil) Sequel::SQL::BooleanExpression.new(:NOOP, _path_function(:jsonb_path_match_tz, path, vars, silent)) end # Returns a set of all jsonb values specified by the JSON path # for the json object. # # json_op.path_query("$.foo") # # jsonb_path_query(json, '$.foo') # # json_op.path_query("$.foo ? ($ > $x)", x: 2) # # jsonb_path_query(json, '$.foo ? ($ > $x)', '{"x":2}') # # json_op.path_query("$.foo ? ($ > $x)", {x: 2}, true) # # jsonb_path_query(json, '$.foo ? ($ > $x)', '{"x":2}', true) def path_query(path, vars=nil, silent=nil) _path_function(:jsonb_path_query, path, vars, silent) end # The same as #path_query, except that timezone-aware conversions are used for date/time values. def path_query_tz(path, vars=nil, silent=nil) _path_function(:jsonb_path_query_tz, path, vars, silent) end # Returns a jsonb array of all values specified by the JSON path # for the json object. # # json_op.path_query_array("$.foo") # # jsonb_path_query_array(json, '$.foo') # # json_op.path_query_array("$.foo ? ($ > $x)", x: 2) # # jsonb_path_query_array(json, '$.foo ? ($ > $x)', '{"x":2}') # # json_op.path_query_array("$.foo ? ($ > $x)", {x: 2}, true) # # jsonb_path_query_array(json, '$.foo ? ($ > $x)', '{"x":2}', true) def path_query_array(path, vars=nil, silent=nil) JSONBOp.new(_path_function(:jsonb_path_query_array, path, vars, silent)) end # The same as #path_query_array, except that timezone-aware conversions are used for date/time values. def path_query_array_tz(path, vars=nil, silent=nil) JSONBOp.new(_path_function(:jsonb_path_query_array_tz, path, vars, silent)) end # Returns the first item of the result specified by the JSON path # for the json object. # # json_op.path_query_first("$.foo") # # jsonb_path_query_first(json, '$.foo') # # json_op.path_query_first("$.foo ? ($ > $x)", x: 2) # # jsonb_path_query_first(json, '$.foo ? ($ > $x)', '{"x":2}') # # json_op.path_query_first("$.foo ? ($ > $x)", {x: 2}, true) # # jsonb_path_query_first(json, '$.foo ? ($ > $x)', '{"x":2}', true) def path_query_first(path, vars=nil, silent=nil) JSONBOp.new(_path_function(:jsonb_path_query_first, path, vars, silent)) end # The same as #path_query_first, except that timezone-aware conversions are used for date/time values. def path_query_first_tz(path, vars=nil, silent=nil) JSONBOp.new(_path_function(:jsonb_path_query_first_tz, path, vars, silent)) end # Return the receiver, since it is already a JSONBOp. def pg_jsonb self end # Return a pretty printed version of the receiver as a string expression. # # jsonb_op.pretty # jsonb_pretty(jsonb) def pretty Sequel::SQL::StringExpression.new(:NOOP, function(:pretty)) end # Set the given jsonb value at the given path in the receiver. # By default, this will create the value if it does not exist, but # create_missing can be set to false to not create a new value. # # jsonb_op.set(['a', 'b'], h) # jsonb_set(jsonb, ARRAY['a', 'b'], h, true) # jsonb_op.set(['a', 'b'], h, false) # jsonb_set(jsonb, ARRAY['a', 'b'], h, false) def set(path, other, create_missing=true) self.class.new(function(:set, wrap_input_array(path), wrap_input_jsonb(other), create_missing)) end # The same as #set, except if +other+ is +nil+, then behaves according to +null_value_treatment+, # which can be one of 'raise_exception', 'use_json_null' (default), 'delete_key', or 'return_target'. def set_lax(path, other, create_missing=true, null_value_treatment='use_json_null') self.class.new(function(:set_lax, wrap_input_array(path), wrap_input_jsonb(other), create_missing, null_value_treatment)) end private # Internals of the jsonb SQL/JSON path functions. def _path_function(func, path, vars, silent) args = [] if vars if vars.is_a?(Hash) vars = vars.to_json end args << vars unless silent.nil? args << silent end end SQL::Function.new(func, self, path, *args) end # Return a placeholder literal with the given str and args, wrapped # in a boolean expression, used by operators that return booleans. def bool_op(str, other) Sequel::SQL::BooleanExpression.new(:NOOP, Sequel::SQL::PlaceholderLiteralString.new(str, [value, other])) end # Wrap argument in a PGArray if it is an array def wrap_input_array(obj) if obj.is_a?(Array) && Sequel.respond_to?(:pg_array) Sequel.pg_array(obj) else obj end end # Wrap argument in a JSONBArray or JSONBHash if it is an array or hash. def wrap_input_jsonb(obj) if Sequel.respond_to?(:pg_jsonb) && (obj.is_a?(Array) || obj.is_a?(Hash)) Sequel.pg_jsonb(obj) else obj end end # The jsonb type functions are prefixed with jsonb_ def function_name(name) "jsonb_#{name}" end end # Represents JSONB subscripts. This is abstracted because the # subscript support depends on the database version. class JSONBSubscriptOp < SQL::Expression SUBSCRIPT = ["".freeze, "[".freeze, "]".freeze].freeze # The expression being subscripted attr_reader :expression # The subscript to use attr_reader :sub # Set the expression and subscript to the given arguments def initialize(expression, sub) @expression = expression @sub = sub freeze end # Use subscripts instead of -> operator on PostgreSQL 14+ def to_s_append(ds, sql) server_version = ds.db.server_version frag = server_version && server_version >= 140000 ? SUBSCRIPT : JSONOp::GET ds.literal_append(sql, Sequel::SQL::PlaceholderLiteralString.new(frag, [@expression, @sub])) end # Support transforming of jsonb subscripts def sequel_ast_transform(transformer) self.class.new(transformer.call(@expression), transformer.call(@sub)) end end module JSONOpMethods # Wrap the receiver in an JSONOp so you can easily use the PostgreSQL # json functions and operators with it. def pg_json JSONOp.new(self) end # # Wrap the receiver in an JSONBOp so you can easily use the PostgreSQL # jsonb functions and operators with it. def pg_jsonb JSONBOp.new(self) end end # :nocov: if defined?(JSONArray) # :nocov: class JSONArray # Wrap the JSONArray instance in an JSONOp, allowing you to easily use # the PostgreSQL json functions and operators with literal jsons. def op JSONOp.new(self) end end class JSONHash # Wrap the JSONHash instance in an JSONOp, allowing you to easily use # the PostgreSQL json functions and operators with literal jsons. def op JSONOp.new(self) end end class JSONBArray # Wrap the JSONBArray instance in an JSONBOp, allowing you to easily use # the PostgreSQL jsonb functions and operators with literal jsonbs. def op JSONBOp.new(self) end end class JSONBHash # Wrap the JSONBHash instance in an JSONBOp, allowing you to easily use # the PostgreSQL jsonb functions and operators with literal jsonbs. def op JSONBOp.new(self) end end end end module SQL::Builders # Return the object wrapped in an Postgres::JSONOp. def pg_json_op(v) case v when Postgres::JSONOp v else Postgres::JSONOp.new(v) end end # Return the object wrapped in an Postgres::JSONBOp. def pg_jsonb_op(v) case v when Postgres::JSONBOp v else Postgres::JSONBOp.new(v) end end end class SQL::GenericExpression include Sequel::Postgres::JSONOpMethods end class LiteralString include Sequel::Postgres::JSONOpMethods end end # :nocov: if Sequel.core_extensions? class Symbol include Sequel::Postgres::JSONOpMethods end end if defined?(Sequel::CoreRefinements) module Sequel::CoreRefinements refine Symbol do send INCLUDE_METH, Sequel::Postgres::JSONOpMethods end end end # :nocov: �����������������������������������������sequel-5.63.0/lib/sequel/extensions/pg_loose_count.rb�����������������������������������������������0000664�0000000�0000000�00000002152�14342141206�0022707�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The pg_loose_count extension looks at the table statistics # in the PostgreSQL system tables to get a fast approximate # count of the number of rows in a given table: # # DB.loose_count(:table) # => 123456 # # It can also support schema qualified tables: # # DB.loose_count(Sequel[:schema][:table]) # => 123456 # # How accurate this count is depends on the number of rows # added/deleted from the table since the last time it was # analyzed. If the table has not been vacuumed or analyzed # yet, this can return 0 or -1 depending on the PostgreSQL # version in use. # # To load the extension into the database: # # DB.extension :pg_loose_count # # Related module: Sequel::Postgres::LooseCount # module Sequel module Postgres module LooseCount # Look at the table statistics for the given table to get # an approximate count of the number of rows. def loose_count(table) from(:pg_class).where(:oid=>regclass_oid(table)).get(Sequel.cast(:reltuples, Integer)) end end end Database.register_extension(:pg_loose_count, Postgres::LooseCount) end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/pg_multirange.rb������������������������������������������������0000664�0000000�0000000�00000032635�14342141206�0022536�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The pg_multirange extension adds support for the PostgreSQL 14+ multirange # types to Sequel. PostgreSQL multirange types are similar to an array of # ranges, where a match against the multirange is a match against any of the # ranges in the multirange. # # When PostgreSQL multirange values are retrieved, they are parsed and returned # as instances of Sequel::Postgres::PGMultiRange. PGMultiRange mostly acts # like an array of Sequel::Postgres::PGRange (see the pg_range extension). # # In addition to the parser, this extension comes with literalizers # for PGMultiRanges, so they can be used in queries and as bound variables. # # To turn an existing array of Ranges into a PGMultiRange, use Sequel.pg_multirange. # You must provide the type of multirange when creating the multirange: # # Sequel.pg_multirange(array_of_date_ranges, :datemultirange) # # To use this extension, load it into the Database instance: # # DB.extension :pg_multirange # # See the {schema modification guide}[rdoc-ref:doc/schema_modification.rdoc] # for details on using multirange type columns in CREATE/ALTER TABLE statements. # # This extension makes it easy to add support for other multirange types. In # general, you just need to make sure that the subtype is handled and has the # appropriate converter installed. For user defined # types, you can do this via: # # DB.add_conversion_proc(subtype_oid){|string| } # # Then you can call # Sequel::Postgres::PGMultiRange::DatabaseMethods#register_multirange_type # to automatically set up a handler for the range type. So if you # want to support the timemultirange type (assuming the time type is already # supported): # # DB.register_multirange_type('timerange') # # This extension integrates with the pg_array extension. If you plan # to use arrays of multirange types, load the pg_array extension before the # pg_multirange extension: # # DB.extension :pg_array, :pg_multirange # # The pg_multirange extension will automatically load the pg_range extension. # # Related module: Sequel::Postgres::PGMultiRange require 'delegate' require 'strscan' module Sequel module Postgres class PGMultiRange < DelegateClass(Array) include Sequel::SQL::AliasMethods # Converts strings into PGMultiRange instances. class Parser < StringScanner def initialize(source, converter) super(source) @converter = converter end # Parse the multirange type input string into a PGMultiRange value. def parse raise Sequel::Error, "invalid multirange, doesn't start with {" unless get_byte == '{' ranges = [] unless scan(/\}/) while true raise Sequel::Error, "unfinished multirange" unless range_string = scan_until(/[\]\)]/) ranges << @converter.call(range_string) case sep = get_byte when '}' break when ',' # nothing else raise Sequel::Error, "invalid multirange separator: #{sep.inspect}" end end end raise Sequel::Error, "invalid multirange, remaining data after }" unless eos? ranges end end # Callable object that takes the input string and parses it using Parser. class Creator # The database type to set on the PGMultiRange instances returned. attr_reader :type def initialize(type, converter=nil) @type = type @converter = converter end # Parse the string using Parser with the appropriate # converter, and return a PGMultiRange with the appropriate database # type. def call(string) PGMultiRange.new(Parser.new(string, @converter).parse, @type) end end module DatabaseMethods # Add the default multirange conversion procs to the database def self.extended(db) db.instance_exec do raise Error, "multiranges not supported on this database" unless server_version >= 140000 extension :pg_range @pg_multirange_schema_types ||= {} register_multirange_type('int4multirange', :range_oid=>3904, :oid=>4451) register_multirange_type('nummultirange', :range_oid=>3906, :oid=>4532) register_multirange_type('tsmultirange', :range_oid=>3908, :oid=>4533) register_multirange_type('tstzmultirange', :range_oid=>3910, :oid=>4534) register_multirange_type('datemultirange', :range_oid=>3912, :oid=>4535) register_multirange_type('int8multirange', :range_oid=>3926, :oid=>4536) if respond_to?(:register_array_type) register_array_type('int4multirange', :oid=>6150, :scalar_oid=>4451, :scalar_typecast=>:int4multirange) register_array_type('nummultirange', :oid=>6151, :scalar_oid=>4532, :scalar_typecast=>:nummultirange) register_array_type('tsmultirange', :oid=>6152, :scalar_oid=>4533, :scalar_typecast=>:tsmultirange) register_array_type('tstzmultirange', :oid=>6153, :scalar_oid=>4534, :scalar_typecast=>:tstzmultirange) register_array_type('datemultirange', :oid=>6155, :scalar_oid=>4535, :scalar_typecast=>:datemultirange) register_array_type('int8multirange', :oid=>6157, :scalar_oid=>4536, :scalar_typecast=>:int8multirange) end [:int4multirange, :nummultirange, :tsmultirange, :tstzmultirange, :datemultirange, :int8multirange].each do |v| @schema_type_classes[v] = PGMultiRange end procs = conversion_procs add_conversion_proc(4533, PGMultiRange::Creator.new("tsmultirange", procs[3908])) add_conversion_proc(4534, PGMultiRange::Creator.new("tstzmultirange", procs[3910])) if respond_to?(:register_array_type) && defined?(PGArray::Creator) add_conversion_proc(6152, PGArray::Creator.new("tsmultirange", procs[4533])) add_conversion_proc(6153, PGArray::Creator.new("tstzmultirange", procs[4534])) end end end # Handle PGMultiRange values in bound variables def bound_variable_arg(arg, conn) case arg when PGMultiRange arg.unquoted_literal(schema_utility_dataset) else super end end # Freeze the pg multirange schema types to prevent adding new ones. def freeze @pg_multirange_schema_types.freeze super end # Register a database specific multirange type. This can be used to support # different multirange types per Database. Options: # # :converter :: A callable object (e.g. Proc), that is called with the PostgreSQL range string, # and should return a PGRange instance. # :oid :: The PostgreSQL OID for the multirange type. This is used by Sequel to set up automatic type # conversion on retrieval from the database. # :range_oid :: Should be the PostgreSQL OID for the multirange subtype (the range type). If given, # automatically sets the :converter option by looking for scalar conversion # proc. # # If a block is given, it is treated as the :converter option. def register_multirange_type(db_type, opts=OPTS, &block) oid = opts[:oid] soid = opts[:range_oid] if has_converter = opts.has_key?(:converter) raise Error, "can't provide both a block and :converter option to register_multirange_type" if block converter = opts[:converter] else has_converter = true if block converter = block end unless (soid || has_converter) && oid range_oid, subtype_oid = from(:pg_range).join(:pg_type, :oid=>:rngmultitypid).where(:typname=>db_type.to_s).get([:rngmultitypid, :rngtypid]) soid ||= subtype_oid unless has_converter oid ||= range_oid end db_type = db_type.to_s.dup.freeze if soid raise Error, "can't provide both a converter and :range_oid option to register" if has_converter raise Error, "no conversion proc for :range_oid=>#{soid.inspect} in conversion_procs" unless converter = conversion_procs[soid] end raise Error, "cannot add a multirange type without a convertor (use :converter or :range_oid option or pass block)" unless converter creator = Creator.new(db_type, converter) add_conversion_proc(oid, creator) @pg_multirange_schema_types[db_type] = db_type.to_sym singleton_class.class_eval do meth = :"typecast_value_#{db_type}" scalar_typecast_method = :"typecast_value_#{opts.fetch(:scalar_typecast, db_type.sub('multirange', 'range'))}" define_method(meth){|v| typecast_value_pg_multirange(v, creator, scalar_typecast_method)} private meth end @schema_type_classes[db_type] = PGMultiRange nil end private # Recognize the registered database multirange types. def schema_column_type(db_type) @pg_multirange_schema_types[db_type] || super end # Set the :ruby_default value if the default value is recognized as a multirange. def schema_post_process(_) super.each do |a| h = a[1] db_type = h[:db_type] if @pg_multirange_schema_types[db_type] && h[:default] =~ /\A#{db_type}\(.*\)\z/ h[:ruby_default] = get(Sequel.lit(h[:default])) end end end # Given a value to typecast and the type of PGMultiRange subclass: # * If given a PGMultiRange with a matching type, use it directly. # * If given a PGMultiRange with a different type, return a PGMultiRange # with the creator's type. # * If given an Array, create a new PGMultiRange instance for it, typecasting # each instance using the scalar_typecast_method. def typecast_value_pg_multirange(value, creator, scalar_typecast_method=nil) case value when PGMultiRange return value if value.db_type == creator.type when Array # nothing else raise Sequel::InvalidValue, "invalid value for multirange type: #{value.inspect}" end if scalar_typecast_method && respond_to?(scalar_typecast_method, true) value = value.map{|v| send(scalar_typecast_method, v)} end PGMultiRange.new(value, creator.type) end end # The type of this multirange (e.g. 'int4multirange'). attr_accessor :db_type # Set the array of ranges to delegate to, and the database type. def initialize(ranges, db_type) super(ranges) @db_type = db_type.to_s end # Append the multirange SQL to the given sql string. def sql_literal_append(ds, sql) sql << db_type << '(' joiner = nil conversion_meth = nil each do |range| if joiner sql << joiner else joiner = ', ' end unless range.is_a?(PGRange) conversion_meth ||= :"typecast_value_#{db_type.sub('multi', '')}" range = ds.db.send(conversion_meth, range) end ds.literal_append(sql, range) end sql << ')' end # Return whether the value is inside any of the ranges in the multirange. def cover?(value) any?{|range| range.cover?(value)} end alias === cover? # Don't consider multiranges with different database types equal. def eql?(other) if PGMultiRange === other return false unless other.db_type == db_type other = other.__getobj__ end __getobj__.eql?(other) end # Don't consider multiranges with different database types equal. def ==(other) return false if PGMultiRange === other && other.db_type != db_type super end # Return a string containing the unescaped version of the multirange. # Separated out for use by the bound argument code. def unquoted_literal(ds) val = String.new val << "{" joiner = nil conversion_meth = nil each do |range| if joiner val << joiner else joiner = ', ' end unless range.is_a?(PGRange) conversion_meth ||= :"typecast_value_#{db_type.sub('multi', '')}" range = ds.db.send(conversion_meth, range) end val << range.unquoted_literal(ds) end val << "}" end # Allow automatic parameterization. def sequel_auto_param_type(ds) "::#{db_type}" end end end module SQL::Builders # Convert the object to a Postgres::PGMultiRange. def pg_multirange(v, db_type) case v when Postgres::PGMultiRange if v.db_type == db_type v else Postgres::PGMultiRange.new(v, db_type) end when Array Postgres::PGMultiRange.new(v, db_type) else # May not be defined unless the pg_range_ops extension is used pg_range_op(v) end end end Database.register_extension(:pg_multirange, Postgres::PGMultiRange::DatabaseMethods) end ���������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/pg_range.rb�����������������������������������������������������0000664�0000000�0000000�00000050222�14342141206�0021453�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The pg_range extension adds support for the PostgreSQL 9.2+ range # types to Sequel. PostgreSQL range types are similar to ruby's # Range class, representating an array of values. However, they # are more flexible than ruby's ranges, allowing exclusive beginnings # and endings (ruby's range only allows exclusive endings). # # When PostgreSQL range values are retreived, they are parsed and returned # as instances of Sequel::Postgres::PGRange. PGRange mostly acts # like a Range, but it's not a Range as not all PostgreSQL range # type values would be valid ruby ranges. If the range type value # you are using is a valid ruby range, you can call PGRange#to_range # to get a Range. However, if you call PGRange#to_range on a range # type value uses features that ruby's Range does not support, an # exception will be raised. # # In addition to the parser, this extension comes with literalizers # for PGRange and Range, so they can be used in queries and as bound variables. # # To turn an existing Range into a PGRange, use Sequel.pg_range: # # Sequel.pg_range(range) # # If you have loaded the {core_extensions extension}[rdoc-ref:doc/core_extensions.rdoc], # or you have loaded the core_refinements extension # and have activated refinements for the file, you can also use Range#pg_range: # # range.pg_range # # You may want to specify a specific range type: # # Sequel.pg_range(range, :daterange) # range.pg_range(:daterange) # # If you specify the range database type, Sequel will automatically cast # the value to that type when literalizing. # # To use this extension, load it into the Database instance: # # DB.extension :pg_range # # See the {schema modification guide}[rdoc-ref:doc/schema_modification.rdoc] # for details on using range type columns in CREATE/ALTER TABLE statements. # # This extension makes it easy to add support for other range types. In # general, you just need to make sure that the subtype is handled and has the # appropriate converter installed. For user defined # types, you can do this via: # # DB.add_conversion_proc(subtype_oid){|string| } # # Then you can call # Sequel::Postgres::PGRange::DatabaseMethods#register_range_type # to automatically set up a handler for the range type. So if you # want to support the timerange type (assuming the time type is already # supported): # # DB.register_range_type('timerange') # # This extension integrates with the pg_array extension. If you plan # to use arrays of range types, load the pg_array extension before the # pg_range extension: # # DB.extension :pg_array, :pg_range # # Related module: Sequel::Postgres::PGRange module Sequel module Postgres class PGRange include Sequel::SQL::AliasMethods # Creates callable objects that convert strings into PGRange instances. class Parser # The database range type for this parser (e.g. 'int4range'), # automatically setting the db_type for the returned PGRange instances. attr_reader :db_type # A callable object to convert the beginning and ending of the range into # the appropriate ruby type. attr_reader :converter # Set the db_type and converter on initialization. def initialize(db_type, converter=nil) @db_type = db_type.to_s.dup.freeze if db_type @converter = converter end # Parse the range type input string into a PGRange value. def call(string) if string == 'empty' return PGRange.empty(db_type) end raise(InvalidValue, "invalid or unhandled range format: #{string.inspect}") unless matches = /\A(\[|\()("((?:\\"|[^"])*)"|[^"]*),("((?:\\"|[^"])*)"|[^"]*)(\]|\))\z/.match(string) exclude_begin = matches[1] == '(' exclude_end = matches[6] == ')' # If the input is quoted, it needs to be unescaped. Also, quoted input isn't # checked for emptiness, since the empty quoted string is considered an # element that happens to be the empty string, while an unquoted empty string # is considered unbounded. # # While PostgreSQL allows pure escaping for input (without quoting), it appears # to always use the quoted output form when characters need to be escaped, so # there isn't a need to unescape unquoted output. if beg = matches[3] beg.gsub!(/\\(.)/, '\1') else beg = matches[2] unless matches[2].empty? end if en = matches[5] en.gsub!(/\\(.)/, '\1') else en = matches[4] unless matches[4].empty? end if c = converter beg = c.call(beg) if beg en = c.call(en) if en end PGRange.new(beg, en, :exclude_begin=>exclude_begin, :exclude_end=>exclude_end, :db_type=>db_type) end end module DatabaseMethods # Add the conversion procs to the database # and extend the datasets to correctly literalize ruby Range values. def self.extended(db) db.instance_exec do @pg_range_schema_types ||= {} extend_datasets(DatasetMethods) register_range_type('int4range', :oid=>3904, :subtype_oid=>23) register_range_type('numrange', :oid=>3906, :subtype_oid=>1700) register_range_type('tsrange', :oid=>3908, :subtype_oid=>1114) register_range_type('tstzrange', :oid=>3910, :subtype_oid=>1184) register_range_type('daterange', :oid=>3912, :subtype_oid=>1082) register_range_type('int8range', :oid=>3926, :subtype_oid=>20) if respond_to?(:register_array_type) register_array_type('int4range', :oid=>3905, :scalar_oid=>3904, :scalar_typecast=>:int4range) register_array_type('numrange', :oid=>3907, :scalar_oid=>3906, :scalar_typecast=>:numrange) register_array_type('tsrange', :oid=>3909, :scalar_oid=>3908, :scalar_typecast=>:tsrange) register_array_type('tstzrange', :oid=>3911, :scalar_oid=>3910, :scalar_typecast=>:tstzrange) register_array_type('daterange', :oid=>3913, :scalar_oid=>3912, :scalar_typecast=>:daterange) register_array_type('int8range', :oid=>3927, :scalar_oid=>3926, :scalar_typecast=>:int8range) end [:int4range, :numrange, :tsrange, :tstzrange, :daterange, :int8range].each do |v| @schema_type_classes[v] = PGRange end procs = conversion_procs add_conversion_proc(3908, Parser.new("tsrange", procs[1114])) add_conversion_proc(3910, Parser.new("tstzrange", procs[1184])) if respond_to?(:register_array_type) && defined?(PGArray::Creator) add_conversion_proc(3909, PGArray::Creator.new("tsrange", procs[3908])) add_conversion_proc(3911, PGArray::Creator.new("tstzrange", procs[3910])) end end end # Handle Range and PGRange values in bound variables def bound_variable_arg(arg, conn) case arg when PGRange arg.unquoted_literal(schema_utility_dataset) when Range PGRange.from_range(arg).unquoted_literal(schema_utility_dataset) else super end end # Freeze the pg range schema types to prevent adding new ones. def freeze @pg_range_schema_types.freeze super end # Register a database specific range type. This can be used to support # different range types per Database. Options: # # :converter :: A callable object (e.g. Proc), that is called with the start or end of the range # (usually a string), and should return the appropriate typecasted object. # :oid :: The PostgreSQL OID for the range type. This is used by the Sequel postgres adapter # to set up automatic type conversion on retrieval from the database. # :subtype_oid :: Should be the PostgreSQL OID for the range's subtype. If given, # automatically sets the :converter option by looking for scalar conversion # proc. # # If a block is given, it is treated as the :converter option. def register_range_type(db_type, opts=OPTS, &block) oid = opts[:oid] soid = opts[:subtype_oid] if has_converter = opts.has_key?(:converter) raise Error, "can't provide both a block and :converter option to register_range_type" if block converter = opts[:converter] else has_converter = true if block converter = block end unless (soid || has_converter) && oid range_oid, subtype_oid = from(:pg_range).join(:pg_type, :oid=>:rngtypid).where(:typname=>db_type.to_s).get([:rngtypid, :rngsubtype]) soid ||= subtype_oid unless has_converter oid ||= range_oid end db_type = db_type.to_s.dup.freeze if soid raise Error, "can't provide both a converter and :subtype_oid option to register" if has_converter raise Error, "no conversion proc for :subtype_oid=>#{soid.inspect} in conversion_procs" unless converter = conversion_procs[soid] end parser = Parser.new(db_type, converter) add_conversion_proc(oid, parser) @pg_range_schema_types[db_type] = db_type.to_sym singleton_class.class_eval do meth = :"typecast_value_#{db_type}" define_method(meth){|v| typecast_value_pg_range(v, parser)} private meth end @schema_type_classes[:"#{opts[:type_symbol] || db_type}"] = PGRange nil end private # Recognize the registered database range types. def schema_column_type(db_type) @pg_range_schema_types[db_type] || super end # Set the :ruby_default value if the default value is recognized as a range. def schema_post_process(_) super.each do |a| h = a[1] db_type = h[:db_type] if @pg_range_schema_types[db_type] && h[:default] =~ /\A'([^']+)'::#{db_type}\z/ default = $1 if convertor = conversion_procs[h[:oid]] h[:ruby_default] = convertor.call(default) end end end end # Typecast value correctly to a PGRange. If already an # PGRange instance with the same db_type, return as is. # If a PGRange with a different subtype, return a new # PGRange with the same values and the expected subtype. # If a Range object, create a PGRange with the given # db_type. If a string, assume it is in PostgreSQL # output format and parse it using the parser. def typecast_value_pg_range(value, parser) case value when PGRange if value.db_type.to_s == parser.db_type value elsif value.empty? PGRange.empty(parser.db_type) else PGRange.new(value.begin, value.end, :exclude_begin=>value.exclude_begin?, :exclude_end=>value.exclude_end?, :db_type=>parser.db_type) end when Range PGRange.from_range(value, parser.db_type) when String parser.call(typecast_check_string_length(value, 100)) else raise Sequel::InvalidValue, "invalid value for range type: #{value.inspect}" end end end module DatasetMethods private # Handle literalization of ruby Range objects, treating them as # PostgreSQL ranges. def literal_other_append(sql, v) case v when Range super(sql, Sequel::Postgres::PGRange.from_range(v)) else super end end end include Enumerable # The beginning of the range. If nil, the range has an unbounded beginning. attr_reader :begin # The end of the range. If nil, the range has an unbounded ending. attr_reader :end # The PostgreSQL database type for the range (e.g. 'int4range'). attr_reader :db_type # Create a new PGRange instance using the beginning and ending of the ruby Range, # with the given db_type. def self.from_range(range, db_type=nil) new(range.begin, range.end, :exclude_end=>range.exclude_end?, :db_type=>db_type) end # Create an empty PGRange with the given database type. def self.empty(db_type=nil) new(nil, nil, :empty=>true, :db_type=>db_type) end # Initialize a new PGRange instance. Accepts the following options: # # :db_type :: The PostgreSQL database type for the range. # :empty :: Whether the range is empty (has no points) # :exclude_begin :: Whether the beginning element is excluded from the range. # :exclude_end :: Whether the ending element is excluded from the range. def initialize(beg, en, opts=OPTS) @begin = beg @end = en @empty = !!opts[:empty] @exclude_begin = !!opts[:exclude_begin] @exclude_end = !!opts[:exclude_end] @db_type = opts[:db_type] if @empty raise(Error, 'cannot have an empty range with either a beginning or ending') unless @begin.nil? && @end.nil? && opts[:exclude_begin].nil? && opts[:exclude_end].nil? end end # Delegate to the ruby range object so that the object mostly acts like a range. range_methods = %w'each last first step' range_methods.each do |m| class_eval("def #{m}(*a, &block) to_range.#{m}(*a, &block) end", __FILE__, __LINE__) end # Return whether the value is inside the range. def cover?(value) return false if empty? b = self.begin return false if b && b.public_send(exclude_begin? ? :>= : :>, value) e = self.end return false if e && e.public_send(exclude_end? ? :<= : :<, value) true end # Consider the receiver equal to other PGRange instances with the # same beginning, ending, exclusions, and database type. Also consider # it equal to Range instances if this PGRange can be converted to a # a Range and those ranges are equal. def eql?(other) case other when PGRange if db_type == other.db_type if empty? other.empty? elsif other.empty? false else [:@begin, :@end, :@exclude_begin, :@exclude_end].all?{|v| instance_variable_get(v) == other.instance_variable_get(v)} end else false end when Range if valid_ruby_range? to_range.eql?(other) else false end else false end end alias == eql? # Make sure equal ranges have the same hash. def hash if @empty @db_type.hash else [@begin, @end, @exclude_begin, @exclude_end, @db_type].hash end end # Allow PGRange values in case statements, where they return true if they # are equal to each other using eql?, or if this PGRange can be converted # to a Range, delegating to that range. def ===(other) if eql?(other) true else if valid_ruby_range? to_range === other else false end end end # Whether this range is empty (has no points). Note that for manually created ranges # (ones not retrieved from the database), this will only be true if the range # was created using the :empty option. def empty? @empty end # Whether the beginning element is excluded from the range. def exclude_begin? @exclude_begin end # Whether the ending element is excluded from the range. def exclude_end? @exclude_end end # Append a literalize version of the receiver to the sql. def sql_literal_append(ds, sql) if (s = @db_type) && !empty? sql << s.to_s << "(" ds.literal_append(sql, self.begin) sql << ',' ds.literal_append(sql, self.end) sql << ',' ds.literal_append(sql, "#{exclude_begin? ? "(" : "["}#{exclude_end? ? ")" : "]"}") sql << ")" else ds.literal_append(sql, unquoted_literal(ds)) if s sql << '::' << s.to_s end end end ENDLESS_RANGE_NOT_SUPPORTED = RUBY_VERSION < '2.6' STARTLESS_RANGE_NOT_SUPPORTED = RUBY_VERSION < '2.7' # Return a ruby Range object for this instance, if one can be created. def to_range return @range if @range raise(Error, "cannot create ruby range for an empty PostgreSQL range") if empty? raise(Error, "cannot create ruby range when PostgreSQL range excludes beginning element") if exclude_begin? # :nocov: raise(Error, "cannot create ruby range when PostgreSQL range has unbounded beginning") if STARTLESS_RANGE_NOT_SUPPORTED && !self.begin raise(Error, "cannot create ruby range when PostgreSQL range has unbounded ending") if ENDLESS_RANGE_NOT_SUPPORTED && !self.end # :nocov: @range = Range.new(self.begin, self.end, exclude_end?) end # Whether or not this PGRange is a valid ruby range. In order to be a valid ruby range, # it must have a beginning and an ending (no unbounded ranges), and it cannot exclude # the beginning element. def valid_ruby_range? !(empty? || exclude_begin? || (STARTLESS_RANGE_NOT_SUPPORTED && !self.begin) || (ENDLESS_RANGE_NOT_SUPPORTED && !self.end)) end # Whether the beginning of the range is unbounded. def unbounded_begin? self.begin.nil? && !empty? end # Whether the end of the range is unbounded. def unbounded_end? self.end.nil? && !empty? end # Return a string containing the unescaped version of the range. # Separated out for use by the bound argument code. def unquoted_literal(ds) if empty? 'empty' else "#{exclude_begin? ? "(" : "["}#{escape_value(self.begin, ds)},#{escape_value(self.end, ds)}#{exclude_end? ? ")" : "]"}" end end # Allow automatic parameterization for ranges with types. def sequel_auto_param_type(ds) "::#{db_type}" if db_type end private # Escape common range types. Instead of quoting, just backslash escape all # special characters. def escape_value(k, ds) case k when nil '' when Date, Time ds.literal(k)[1...-1] when Integer, Float k.to_s when BigDecimal k.to_s('F') when LiteralString k when String if k.empty? '""' else k.gsub(/("|,|\\|\[|\]|\(|\))/, '\\\\\1') end else ds.literal(k).gsub(/("|,|\\|\[|\]|\(|\))/, '\\\\\1') end end end end module SQL::Builders # Convert the object to a Postgres::PGRange. def pg_range(v, db_type=nil) case v when Postgres::PGRange if db_type.nil? || v.db_type == db_type v else Postgres::PGRange.new(v.begin, v.end, :exclude_begin=>v.exclude_begin?, :exclude_end=>v.exclude_end?, :db_type=>db_type) end when Range Postgres::PGRange.from_range(v, db_type) else # May not be defined unless the pg_range_ops extension is used pg_range_op(v) end end end Database.register_extension(:pg_range, Postgres::PGRange::DatabaseMethods) end # :nocov: if Sequel.core_extensions? class Range # Create a new PGRange using the receiver as the input range, # with the given database type. def pg_range(db_type=nil) Sequel::Postgres::PGRange.from_range(self, db_type) end end end if defined?(Sequel::CoreRefinements) module Sequel::CoreRefinements refine Range do def pg_range(db_type=nil) Sequel::Postgres::PGRange.from_range(self, db_type) end end end end # :nocov: ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/pg_range_ops.rb�������������������������������������������������0000664�0000000�0000000�00000014141�14342141206�0022334�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The pg_range_ops extension adds support to Sequel's DSL to make # it easier to call PostgreSQL range and multirange functions and operators. # # To load the extension: # # Sequel.extension :pg_range_ops # # The most common usage is passing an expression to Sequel.pg_range_op: # # r = Sequel.pg_range_op(:range) # # If you have also loaded the pg_range or pg_multirange extensions, you can use # Sequel.pg_range or Sequel.pg_multirange as well: # # r = Sequel.pg_range(:range) # r = Sequel.pg_multirange(:range) # # Also, on most Sequel expression objects, you can call the pg_range # method: # # r = Sequel[:range].pg_range # # If you have loaded the {core_extensions extension}[rdoc-ref:doc/core_extensions.rdoc], # or you have loaded the core_refinements extension # and have activated refinements for the file, you can also use Symbol#pg_range: # # r = :range.pg_range # # This creates a Sequel::Postgres::RangeOp object that can be used # for easier querying: # # r.contains(:other) # range @> other # r.contained_by(:other) # range <@ other # r.overlaps(:other) # range && other # r.left_of(:other) # range << other # r.right_of(:other) # range >> other # r.starts_after(:other) # range &> other # r.ends_before(:other) # range &< other # r.adjacent_to(:other) # range -|- other # # r.lower # lower(range) # r.upper # upper(range) # r.isempty # isempty(range) # r.lower_inc # lower_inc(range) # r.upper_inc # upper_inc(range) # r.lower_inf # lower_inf(range) # r.upper_inf # upper_inf(range) # # All of the above methods work for both ranges and multiranges, as long # as PostgreSQL supports the operation. The following methods are also # supported: # # r.range_merge # range_merge(range) # r.unnest # unnest(range) # r.multirange # multirange(range) # # +range_merge+ and +unnest+ expect the receiver to represent a multirange # value, while +multi_range+ expects the receiver to represent a range value. # # See the PostgreSQL range and multirange function and operator documentation for more # details on what these functions and operators do. # # If you are also using the pg_range or pg_multirange extension, you should # load them before loading this extension. Doing so will allow you to use # PGRange#op and PGMultiRange#op to get a RangeOp, allowing you to perform # range operations on range literals. # # Related module: Sequel::Postgres::RangeOp # module Sequel module Postgres # The RangeOp class is a simple container for a single object that # defines methods that yield Sequel expression objects representing # PostgreSQL range operators and functions. # # Most methods in this class are defined via metaprogramming, see # the pg_range_ops extension documentation for details on the API. class RangeOp < Sequel::SQL::Wrapper OPERATORS = { :contains => ["(".freeze, " @> ".freeze, ")".freeze].freeze, :contained_by => ["(".freeze, " <@ ".freeze, ")".freeze].freeze, :left_of => ["(".freeze, " << ".freeze, ")".freeze].freeze, :right_of => ["(".freeze, " >> ".freeze, ")".freeze].freeze, :ends_before => ["(".freeze, " &< ".freeze, ")".freeze].freeze, :starts_after => ["(".freeze, " &> ".freeze, ")".freeze].freeze, :adjacent_to => ["(".freeze, " -|- ".freeze, ")".freeze].freeze, :overlaps => ["(".freeze, " && ".freeze, ")".freeze].freeze, }.freeze %w'lower upper isempty lower_inc upper_inc lower_inf upper_inf unnest'.each do |f| class_eval("def #{f}; function(:#{f}) end", __FILE__, __LINE__) end %w'range_merge multirange'.each do |f| class_eval("def #{f}; RangeOp.new(function(:#{f})) end", __FILE__, __LINE__) end OPERATORS.each_key do |f| class_eval("def #{f}(v); operator(:#{f}, v) end", __FILE__, __LINE__) end # These operators are already supported by the wrapper, but for ranges they # return ranges, so wrap the results in another RangeOp. %w'+ * -'.each do |f| class_eval("def #{f}(v); RangeOp.new(super) end", __FILE__, __LINE__) end # Return the receiver. def pg_range self end private # Create a boolen expression for the given type and argument. def operator(type, other) Sequel::SQL::BooleanExpression.new(:NOOP, Sequel::SQL::PlaceholderLiteralString.new(OPERATORS[type], [value, other])) end # Return a function called with the receiver. def function(name) Sequel::SQL::Function.new(name, self) end end module RangeOpMethods # Wrap the receiver in an RangeOp so you can easily use the PostgreSQL # range functions and operators with it. def pg_range RangeOp.new(self) end end # :nocov: if defined?(PGRange) # :nocov: class PGRange # Wrap the PGRange instance in an RangeOp, allowing you to easily use # the PostgreSQL range functions and operators with literal ranges. def op RangeOp.new(self) end end end # :nocov: if defined?(PGMultiRange) # :nocov: class PGMultiRange # Wrap the PGRange instance in an RangeOp, allowing you to easily use # the PostgreSQL range functions and operators with literal ranges. def op RangeOp.new(self) end end end end module SQL::Builders # Return the expression wrapped in the Postgres::RangeOp. def pg_range_op(v) case v when Postgres::RangeOp v else Postgres::RangeOp.new(v) end end end class SQL::GenericExpression include Sequel::Postgres::RangeOpMethods end class LiteralString include Sequel::Postgres::RangeOpMethods end end # :nocov: if Sequel.core_extensions? class Symbol include Sequel::Postgres::RangeOpMethods end end if defined?(Sequel::CoreRefinements) module Sequel::CoreRefinements refine Symbol do send INCLUDE_METH, Sequel::Postgres::RangeOpMethods end end end # :nocov: �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/pg_row.rb�������������������������������������������������������0000664�0000000�0000000�00000050760�14342141206�0021175�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The pg_row extension adds support for Sequel to handle # PostgreSQL's row-valued/composite types. # # This extension integrates with Sequel's native postgres and jdbc/postgresql adapters, so # that when composite fields are retrieved, they are parsed and returned # as instances of Sequel::Postgres::PGRow::(HashRow|ArrayRow), or # optionally a custom type. HashRow and ArrayRow are DelegateClasses of # Hash and Array, so they mostly act like a hash or array, but not # completely (is_a?(Hash) and is_a?(Array) are false). If you want the # actual hash for a HashRow, call HashRow#to_hash, and if you want the # actual array for an ArrayRow, call ArrayRow#to_a. This is done so # that Sequel does not treat a values like an Array or Hash by default, # which would cause issues. # # In addition to the parsers, this extension comes with literalizers # for HashRow and ArrayRow using the standard Sequel literalization callbacks, so # they work with on all adapters. # # To use this extension, first load it into the Database instance: # # DB.extension :pg_row # # If you plan to use arrays of composite types, make sure you load the # pg_array extension first: # # DB.extension :pg_array, :pg_row # # You can create an anonymous row type by calling the Sequel.pg_row with # an array: # # Sequel.pg_row(array) # # If you have loaded the {core_extensions extension}[rdoc-ref:doc/core_extensions.rdoc], # or you have loaded the core_refinements extension # and have activated refinements for the file, you can also use Array#pg_row: # # array.pg_row # # However, in most cases you are going to want something beyond anonymous # row types. This extension allows you to register row types on a per # database basis, using Database#register_row_type: # # DB.register_row_type(:foo) # # When you register the row type, Sequel will query the PostgreSQL # system tables to find the related metadata, and will setup # a custom HashRow subclass for that type. This includes looking up # conversion procs for each column in the type, so that when the composite # type is returned from the database, the members of the type have # the correct type. Additionally, if the composite type also has an # array form, Sequel registers an array type for the composite type, # so that array columns of the composite type are converted correctly. # # You can then create values of that type by using Database#row_type: # # DB.row_type(:address, ['123 Sesame St.', 'Some City', '12345']) # # Let's say table address has columns street, city, and zip. This would return # something similar to: # # {:street=>'123 Sesame St.', :city=>'Some City', :zip=>'12345'} # # You can also use a hash: # # DB.row_type(:address, street: '123 Sesame St.', city: 'Some City', zip: '12345') # # So if you have a person table that has an address column, here's how you # could insert into the column: # # DB[:table].insert(address: DB.row_type(:address, street: '123 Sesame St.', city: 'Some City', zip: '12345')) # # Note that registering row types without providing an explicit :converter option # creates anonymous classes. This results in ruby being unable to Marshal such # objects. You can work around this by assigning the anonymous class to a constant. # To get a list of such anonymous classes, you can use the following code: # # DB.conversion_procs.select{|k,v| v.is_a?(Sequel::Postgres::PGRow::Parser) && \ # v.converter && (v.converter.name.nil? || v.converter.name == '') }.map{|k,v| v} # # See the {schema modification guide}[rdoc-ref:doc/schema_modification.rdoc] # for details on using row type columns in CREATE/ALTER TABLE statements. # # This extension requires both the strscan and delegate libraries. # # Related module: Sequel::Postgres::PGRow require 'delegate' require 'strscan' module Sequel module Postgres module PGRow # Class for row-valued/composite types that are treated as arrays. By default, # this is only used for generic PostgreSQL record types, as registered # types use HashRow by default. class ArrayRow < DelegateClass(Array) include Sequel::SQL::AliasMethods class << self # The database type for this class. May be nil if this class # done not have a specific database type. attr_accessor :db_type # Alias new to call, so that the class itself can be used # directly as a converter. alias call new end # Create a subclass associated with a specific database type. # This is done so that instances of this subclass are # automatically casted to the database type when literalizing. def self.subclass(db_type) Class.new(self) do @db_type = db_type end end # Sets the database type associated with this instance. This is # used to override the class's default database type. attr_writer :db_type # Return the instance's database type, or the class's database # type if the instance has not overridden it. def db_type @db_type || self.class.db_type end # Append SQL fragment related to this object to the sql. def sql_literal_append(ds, sql) sql << 'ROW' ds.literal_append(sql, to_a) if db_type sql << '::' ds.quote_schema_table_append(sql, db_type) end end # Allow automatic parameterization if all values support it. def sequel_auto_param_type(ds) if db_type && all?{|v| nil == v || ds.send(:auto_param_type, v)} s = String.new << "::" ds.quote_schema_table_append(s, db_type) s end end end # Class for row-valued/composite types that are treated as hashes. # Types registered via Database#register_row_type will use this # class by default. class HashRow < DelegateClass(Hash) include Sequel::SQL::AliasMethods class << self # The columns associated with this class. attr_accessor :columns # The database type for this class. May be nil if this class # done not have a specific database type. attr_accessor :db_type # Alias new to call, so that the class itself can be used # directly as a converter. alias call new end # Create a new subclass of this class with the given database # type and columns. def self.subclass(db_type, columns) Class.new(self) do @db_type = db_type @columns = columns end end # Return the underlying hash for this delegate object. alias to_hash __getobj__ # Sets the columns associated with this instance. This is # used to override the class's default columns. attr_writer :columns # Sets the database type associated with this instance. This is # used to override the class's default database type. attr_writer :db_type # Return the instance's columns, or the class's columns # if the instance has not overridden it. def columns @columns || self.class.columns end # Return the instance's database type, or the class's columns # if the instance has not overridden it. def db_type @db_type || self.class.db_type end # Check that the HashRow has valid columns. This should be used # before all attempts to literalize the object, since literalization # depends on the columns to get the column order. def check_columns! if columns.nil? || columns.empty? raise Error, 'cannot literalize HashRow without columns' end end # Append SQL fragment related to this object to the sql. def sql_literal_append(ds, sql) check_columns! sql << 'ROW' ds.literal_append(sql, values_at(*columns)) if db_type sql << '::' ds.quote_schema_table_append(sql, db_type) end end # Allow automatic parameterization if all values support it. def sequel_auto_param_type(ds) if db_type && all?{|_,v| nil == v || ds.send(:auto_param_type, v)} s = String.new << "::" ds.quote_schema_table_append(s, db_type) s end end end ROW_TYPE_CLASSES = [HashRow, ArrayRow].freeze # This parser-like class splits the PostgreSQL # row-valued/composite type output string format # into an array of strings. Note this class makes # no attempt to handle all input formats that PostgreSQL # will accept, it only handles the output format that # PostgreSQL uses. class Splitter < StringScanner # Split the stored string into an array of strings, handling # the different types of quoting. def parse values = [] skip(/\(/) if skip(/\)/) values << nil else # :nocov: until eos? # :nocov: if skip(/"/) values << scan(/(\\.|""|[^"])*/).gsub(/\\(.)|"(")/, '\1\2') skip(/"[,)]/) else v = scan(/[^,)]*/) values << (v unless v.empty?) skip(/[,)]/) end end end values end end # The Parser is responsible for taking the input string # from PostgreSQL, and returning an appropriate ruby # object that the type represents, such as an ArrayRow or # HashRow. class Parser # The columns for the parser, if any. If the parser has # no columns, it will treat the input as an array. If # it has columns, it will treat the input as a hash. # If present, should be an array of strings. attr_reader :columns # Converters for each member in the composite type. If # not present, no conversion will be done, so values will # remain strings. If present, should be an array of # callable objects. attr_reader :column_converters # The OIDs for each member in the composite type. Not # currently used, but made available for user code. attr_reader :column_oids # A converter for the object as a whole. Used to wrap # the returned array/hash in another object, such as an # ArrayRow or HashRow. If present, should be callable. attr_reader :converter # The oid for the composite type itself. attr_reader :oid # A callable object used for typecasting the object. This # is similar to the converter, but it is called by the # typecasting code, which has different assumptions than # the converter. For instance, the converter should be # called with all of the member values already typecast, # but the typecaster may not be. attr_reader :typecaster # Sets each of the parser's attributes, using options with # the same name (e.g. :columns sets the columns attribute). def initialize(h=OPTS) @columns = h[:columns] @column_converters = h[:column_converters] @column_oids = h[:column_oids] @converter = h[:converter] @typecaster = h[:typecaster] @oid = h[:oid] end # Convert the PostgreSQL composite type input format into # an appropriate ruby object. def call(s) convert(convert_format(convert_columns(Splitter.new(s).parse))) end # Typecast the given object to the appropriate type using the # typecaster. Note that this does not conversion for the members # of the composite type, since those conversion expect strings and # strings may not be provided. def typecast(obj) case obj when Array _typecast(convert_format(obj)) when Hash unless @columns raise Error, 'PGRow::Parser without columns cannot typecast from a hash' end _typecast(obj) else raise Error, 'PGRow::Parser can only typecast arrays and hashes' end end private # If the parser has a typecaster, call it with # the object, otherwise return the object as is. def _typecast(obj) if t = @typecaster t.call(obj) else obj end end # If the parser has column converters, map the # array of strings input to a array of appropriate # ruby objects, one for each converter. def convert_columns(arr) if ccs = @column_converters arr.zip(ccs).map{|v, pr| (v && pr) ? pr.call(v) : v} else arr end end # If the parser has columns, return a hash assuming # that the array is ordered by the columns. def convert_format(arr) if cs = @columns h = {} arr.zip(cs).each{|v, c| h[c] = v} h else arr end end # If the parser has a converter, call it with the object, # otherwise return the object as is. def convert(obj) if c = @converter c.call(obj) else obj end end end module DatabaseMethods # A hash mapping row type keys (usually symbols), to option # hashes. At the least, the values will contain the :parser # option for the Parser instance that the type will use. attr_reader :row_types # Do some setup for the data structures the module uses. def self.extended(db) db.instance_exec do @row_types = {} @row_schema_types = {} extend(@row_type_method_module = Module.new) add_conversion_proc(2249, PGRow::Parser.new(:converter=>PGRow::ArrayRow)) if respond_to?(:register_array_type) register_array_type('record', :oid=>2287, :scalar_oid=>2249) end end end # Handle ArrayRow and HashRow values in bound variables. def bound_variable_arg(arg, conn) case arg when ArrayRow "(#{arg.map{|v| bound_variable_array(v) if v}.join(',')})" when HashRow arg.check_columns! "(#{arg.values_at(*arg.columns).map{|v| bound_variable_array(v) if v}.join(',')})" else super end end # Freeze the row types and row schema types to prevent adding new ones. def freeze @row_types.freeze @row_schema_types.freeze @row_type_method_module.freeze super end # Register a new row type for the Database instance. db_type should be the type # symbol. This parses the PostgreSQL system tables to get information the # composite type, and by default has the type return instances of a subclass # of HashRow. # # The following options are supported: # # :converter :: Use a custom converter for the parser. # :typecaster :: Use a custom typecaster for the parser. def register_row_type(db_type, opts=OPTS) procs = @conversion_procs rel_oid = nil array_oid = nil parser_opts = {} # Try to handle schema-qualified types. type_schema, type_name = schema_and_table(db_type) schema_type_string = type_name.to_s # Get basic oid information for the composite type. ds = from(:pg_type). select{[pg_type[:oid], :typrelid, :typarray]}. where([[:typtype, 'c'], [:typname, type_name.to_s]]) if type_schema ds = ds.join(:pg_namespace, [[:oid, :typnamespace], [:nspname, type_schema.to_s]]) schema_type_symbol = :"pg_row_#{type_schema}__#{type_name}" else schema_type_symbol = :"pg_row_#{type_name}" end unless row = ds.first raise Error, "row type #{db_type.inspect} not found in database" end # Manually cast to integer using to_i, because adapter may not cast oid type # correctly (e.g. swift) parser_opts[:oid], rel_oid, array_oid = row.values_at(:oid, :typrelid, :typarray).map(&:to_i) # Get column names and oids for each of the members of the composite type. res = from(:pg_attribute). join(:pg_type, :oid=>:atttypid). where(:attrelid=>rel_oid). where{attnum > 0}. exclude(:attisdropped). order(:attnum). select_map{[:attname, Sequel.case({0=>:atttypid}, pg_type[:typbasetype], pg_type[:typbasetype]).as(:atttypid)]} if res.empty? raise Error, "no columns for row type #{db_type.inspect} in database" end parser_opts[:columns] = res.map{|r| r[0].to_sym} parser_opts[:column_oids] = res.map{|r| r[1].to_i} # Using the conversion_procs, lookup converters for each member of the composite type parser_opts[:column_converters] = parser_opts[:column_oids].map do |oid| procs[oid] end # Setup the converter and typecaster parser_opts[:converter] = opts.fetch(:converter){HashRow.subclass(db_type, parser_opts[:columns])} parser_opts[:typecaster] = opts.fetch(:typecaster, parser_opts[:converter]) parser = Parser.new(parser_opts) add_conversion_proc(parser.oid, parser) if respond_to?(:register_array_type) && array_oid && array_oid > 0 array_type_name = if type_schema "#{type_schema}.#{type_name}" else type_name end register_array_type(array_type_name, :oid=>array_oid, :converter=>parser, :scalar_typecast=>schema_type_symbol) end @row_types[literal(db_type)] = opts.merge(:parser=>parser, :type=>db_type) @row_schema_types[schema_type_string] = schema_type_symbol @schema_type_classes[schema_type_symbol] = ROW_TYPE_CLASSES @row_type_method_module.class_eval do meth = :"typecast_value_#{schema_type_symbol}" define_method(meth) do |v| row_type(db_type, v) end private meth alias_method(meth, meth) end nil end # Handle typecasting of the given object to the given database type. # In general, the given database type should already be registered, # but if obj is an array, this will handled unregistered types. def row_type(db_type, obj) (type_hash = @row_types[literal(db_type)]) && (parser = type_hash[:parser]) case obj when ArrayRow, HashRow obj when Array if parser parser.typecast(obj) else obj = ArrayRow.new(obj) obj.db_type = db_type obj end when Hash if parser parser.typecast(obj) else raise InvalidValue, "Database#row_type requires the #{db_type.inspect} type have a registered parser and typecaster when called with a hash" end else raise InvalidValue, "cannot convert #{obj.inspect} to row type #{db_type.inspect}" end end private # Make the column type detection handle registered row types. def schema_column_type(db_type) if type = @row_schema_types[db_type] type else super end end end end end module SQL::Builders # Wraps the expr array in an anonymous Postgres::PGRow::ArrayRow instance. def pg_row(expr) case expr when Array Postgres::PGRow::ArrayRow.new(expr) else # Will only work if pg_row_ops extension is loaded pg_row_op(expr) end end end Database.register_extension(:pg_row, Postgres::PGRow::DatabaseMethods) end # :nocov: if Sequel.core_extensions? class Array # Wraps the receiver in an anonymous Sequel::Postgres::PGRow::ArrayRow instance. def pg_row Sequel::Postgres::PGRow::ArrayRow.new(self) end end end if defined?(Sequel::CoreRefinements) module Sequel::CoreRefinements refine Array do def pg_row Sequel::Postgres::PGRow::ArrayRow.new(self) end end end end # :nocov: ����������������sequel-5.63.0/lib/sequel/extensions/pg_row_ops.rb���������������������������������������������������0000664�0000000�0000000�00000014722�14342141206�0022054�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The pg_row_ops extension adds support to Sequel's DSL to make # it easier to deal with PostgreSQL row-valued/composite types. # # To load the extension: # # Sequel.extension :pg_row_ops # # The most common usage is passing an expression to Sequel.pg_row_op: # # r = Sequel.pg_row_op(:row_column) # # If you have also loaded the pg_row extension, you can use # Sequel.pg_row as well: # # r = Sequel.pg_row(:row_column) # # Also, on most Sequel expression objects, you can call the pg_row # method: # # r = Sequel[:row_column].pg_row # # If you have loaded the {core_extensions extension}[rdoc-ref:doc/core_extensions.rdoc], # or you have loaded the core_refinements extension # and have activated refinements for the file, you can also use Symbol#pg_row: # # r = :row_column.pg_row # # There's only fairly basic support currently. You can use the [] method to access # a member of the composite type: # # r[:a] # (row_column).a # # This can be chained: # # r[:a][:b] # ((row_column).a).b # # If you've loaded the pg_array_ops extension, you there is also support for composite # types that include arrays, or arrays of composite types: # # r[1][:a] # (row_column[1]).a # r[:a][1] # (row_column).a[1] # # The only other support is the splat method: # # r.splat # (row_column.*) # # The splat method is necessary if you are trying to reference a table's type when the # table has the same name as one of it's columns. For example: # # DB.create_table(:a){Integer :a; Integer :b} # # Let's say you want to reference the composite type for the table: # # a = Sequel.pg_row_op(:a) # DB[:a].select(a[:b]) # SELECT (a).b FROM a # # Unfortunately, that doesn't work, as it references the integer column, not the table. # The splat method works around this: # # DB[:a].select(a.splat[:b]) # SELECT (a.*).b FROM a # # Splat also takes an argument which is used for casting. This is necessary if you # want to return the composite type itself, instead of the columns in the composite # type. For example: # # DB[:a].select(a.splat).first # SELECT (a.*) FROM a # # => {:a=>1, :b=>2} # # By casting the expression, you can get a composite type returned: # # DB[:a].select(a.splat(:a)).first # SELECT (a.*)::a FROM a # # => {:a=>"(1,2)"} # or {:a=>{:a=>1, :b=>2}} if the "a" type has been registered # # with the pg_row extension # # This feature is mostly useful for a different way to graph tables: # # DB[:a].join(:b, id: :b_id).select(Sequel.pg_row_op(:a).splat(:a), # Sequel.pg_row_op(:b).splat(:b)) # # SELECT (a.*)::a, (b.*)::b FROM a INNER JOIN b ON (b.id = a.b_id) # # => {:a=>{:id=>1, :b_id=>2}, :b=>{:id=>2}} # # Related module: Sequel::Postgres::PGRowOp # module Sequel module Postgres # This class represents a composite type expression reference. class PGRowOp < SQL::PlaceholderLiteralString ROW = ['(', '.*)'].freeze.each(&:freeze) ROW_CAST = ['(', '.*)::'].freeze.each(&:freeze) QUALIFY = ['(', ').'].freeze.each(&:freeze) WRAP = [""].freeze.each(&:freeze) # Wrap the expression in a PGRowOp, without changing the # SQL it would use. def self.wrap(expr) PGRowOp.new(WRAP, [expr]) end # Access a member of the composite type if given a # symbol or an SQL::Identifier. For all other access, # assuming the pg_array_ops extension is loaded and # that it represents an array access. In either # case, return a PgRowOp so that access can be cascaded. def [](member) case member when Symbol, SQL::Identifier PGRowOp.new(QUALIFY, [self, member]) else PGRowOp.wrap(Sequel.pg_array_op(self)[member]) end end # Use the (identifier).* syntax to reference the members # of the composite type as separate columns. Generally # used when you want to expand the columns of a composite # type to be separate columns in the result set. # # Sequel.pg_row_op(:a).* # (a).* # Sequel.pg_row_op(:a)[:b].* # ((a).b).* def *(ce=(arg=false;nil)) if arg == false Sequel::SQL::ColumnAll.new([self]) else super(ce) end end # Use the (identifier.*) syntax to indicate that this # expression represents the composite type of one # of the tables being referenced, if it has the same # name as one of the columns. If the cast_to argument # is given, also cast the expression to that type # (which should be a symbol representing the composite type). # This is used if you want to return whole table row as a # composite type. # # Sequel.pg_row_op(:a).splat[:b] # (a.*).b # Sequel.pg_row_op(:a).splat(:a) # (a.*)::a def splat(cast_to=nil) if args.length > 1 raise Error, 'cannot splat a PGRowOp with multiple arguments' end if cast_to PGRowOp.new(ROW_CAST, args + [cast_to]) else PGRowOp.new(ROW, args) end end module ExpressionMethods # Return a PGRowOp wrapping the receiver. def pg_row Sequel.pg_row_op(self) end end end # :nocov: if defined?(PGRow::ArrayRow) # :nocov: class PGRow::ArrayRow # Wrap the PGRow::ArrayRow instance in an PGRowOp, allowing you to easily use # the PostgreSQL row functions and operators with literal rows. def op Sequel.pg_row_op(self) end end end # :nocov: if defined?(PGRow::HashRow) # :nocov: class PGRow::HashRow # Wrap the PGRow::ArrayRow instance in an PGRowOp, allowing you to easily use # the PostgreSQL row functions and operators with literal rows. def op Sequel.pg_row_op(self) end end end end module SQL::Builders # Return a PGRowOp wrapping the given expression. def pg_row_op(expr) Postgres::PGRowOp.wrap(expr) end end class SQL::GenericExpression include Sequel::Postgres::PGRowOp::ExpressionMethods end class LiteralString include Sequel::Postgres::PGRowOp::ExpressionMethods end end # :nocov: if Sequel.core_extensions? class Symbol include Sequel::Postgres::PGRowOp::ExpressionMethods end end if defined?(Sequel::CoreRefinements) module Sequel::CoreRefinements refine Symbol do send INCLUDE_METH, Sequel::Postgres::PGRowOp::ExpressionMethods end end end # :nocov: ����������������������������������������������sequel-5.63.0/lib/sequel/extensions/pg_static_cache_updater.rb��������������������������������������0000664�0000000�0000000�00000013006�14342141206�0024514�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The pg_static_cache_updater extension is designed to # automatically update the caches in the models using the # static_cache plugin when changes to the underlying tables # are detected. # # Before using the extension in production, you have to add # triggers to the tables for the classes where you want the # caches updated automatically. You would generally do this # during a migration: # # Sequel.migration do # up do # extension :pg_static_cache_updater # create_static_cache_update_function # create_static_cache_update_trigger(:table_1) # create_static_cache_update_trigger(:table_2) # end # down do # extension :pg_static_cache_updater # drop_trigger(:table_2, default_static_cache_update_name) # drop_trigger(:table_1, default_static_cache_update_name) # drop_function(default_static_cache_update_name) # end # end # # After the triggers have been added, in your application process, # after setting up your models, you need to listen for changes to # the underlying tables: # # class Model1 < Sequel::Model(:table_1) # plugin :static_cache # end # class Model2 < Sequel::Model(:table_2) # plugin :static_cache # end # # DB.extension :pg_static_cache_updater # DB.listen_for_static_cache_updates([Model1, Model2]) # # When an INSERT/UPDATE/DELETE happens on the underlying table, # the trigger will send a notification with the table's OID. # The application(s) listening on that channel will receive # the notification, check the oid to see if it matches one # for the model tables it is interested in, and tell that model # to reload the cache if there is a match. # # Note that listen_for_static_cache_updates spawns a new thread # which will reserve its own database connection. This thread # runs until the application process is shutdown. # # Also note that PostgreSQL does not send notifications to # channels until after the transaction including the changes # is committed. Also, because a separate thread is used to # listen for notifications, there may be a slight delay between # when the transaction is committed and when the cache is # reloaded. # # Requirements: # * PostgreSQL 9.0+ # * Listening Database object must be using the postgres adapter # with the pg driver (the model classes do not have to # use the same Database). # * Must be using a thread-safe connection pool (the default). # # Related module: Sequel::Postgres::StaticCacheUpdater # module Sequel module Postgres module StaticCacheUpdater # Add the static cache update function to the PostgreSQL database. # This must be added before any triggers using this function are # added. # # Options: # :channel_name :: Override the channel name to use. # :function_name :: Override the function name to use. def create_static_cache_update_function(opts=OPTS) create_function(opts[:function_name]||default_static_cache_update_name, <<SQL, :returns=>:trigger, :language=>:plpgsql) BEGIN PERFORM pg_notify(#{literal((opts[:channel_name]||default_static_cache_update_name).to_s)}, TG_RELID::text); RETURN NULL; END SQL end # Add a trigger to the given table that calls the function # which will notify about table changes. # # Options: # :function_name :: Override the function name to use. # :trigger_name :: Override the trigger name to use. def create_static_cache_update_trigger(table, opts=OPTS) create_trigger(table, opts[:trigger_name]||default_static_cache_update_name, opts[:function_name]||default_static_cache_update_name, :after=>true) end # The default name for the function, trigger, and notification channel # for this extension. def default_static_cache_update_name :sequel_static_cache_update end # Listen on the notification channel for changes to any of tables for # the models given in a new thread. If notified about a change to one of the tables, # reload the cache for the related model. Options given are also # passed to Database#listen. # # Note that this implementation does not currently support multiple # models that use the same underlying table. # # Options: # :channel_name :: Override the channel name to use. # :before_thread_exit :: An object that responds to +call+ that is called before the # the created thread exits. def listen_for_static_cache_updates(models, opts=OPTS) raise Error, "this database object does not respond to listen, use the postgres adapter with the pg driver" unless respond_to?(:listen) models = [models] unless models.is_a?(Array) raise Error, "array of models to listen for changes cannot be empty" if models.empty? oid_map = {} models.each do |model| raise Error, "#{model.inspect} does not use the static_cache plugin" unless model.respond_to?(:load_cache) oid_map[get(regclass_oid(model.dataset.first_source_table))] = model end Thread.new do begin listen(opts[:channel_name]||default_static_cache_update_name, {:loop=>true}.merge!(opts)) do |_, _, oid| if model = oid_map[oid.to_i] model.load_cache end end ensure opts[:before_thread_exit].call if opts[:before_thread_exit] end end end end end Database.register_extension(:pg_static_cache_updater, Postgres::StaticCacheUpdater) end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/pg_timestamptz.rb�����������������������������������������������0000664�0000000�0000000�00000001353�14342141206�0022741�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The pg_timestamptz extension changes the default timestamp # type for the database to be +timestamptz+ (+timestamp with time zone+) # instead of +timestamp+ (+timestamp without time zone+). This is # recommended if you are dealing with multiple timezones in your application. # # To load the extension into the database: # # DB.extension :pg_timestamptz # # Related module: Sequel::Postgres::Timestamptz # module Sequel module Postgres module Timestamptz private # Use timestamptz by default for generic timestamp value. def type_literal_generic_datetime(column) :timestamptz end end end Database.register_extension(:pg_timestamptz, Postgres::Timestamptz) end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/pretty_table.rb�������������������������������������������������0000664�0000000�0000000�00000001672�14342141206�0022374�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The pretty_table extension adds Sequel::Dataset#print and the # Sequel::PrettyTable class for creating nice-looking plain-text # tables. Example: # # +--+-------+ # |id|name | # |--+-------| # |1 |fasdfas| # |2 |test | # +--+-------+ # # You can load this extension into specific datasets: # # ds = DB[:table] # ds = ds.extension(:pretty_table) # # Or you can load it into all of a database's datasets, which # is probably the desired behavior if you are using this extension: # # DB.extension(:pretty_table) # # Related module: Sequel::DatasetPrinter # module Sequel extension :_pretty_table module DatasetPrinter # Pretty prints the records in the dataset as plain-text table. def print(*cols) ds = naked rows = ds.all Sequel::PrettyTable.print(rows, cols.empty? ? ds.columns : cols) end end Dataset.register_extension(:pretty_table, DatasetPrinter) end ����������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/query.rb��������������������������������������������������������0000664�0000000�0000000�00000004453�14342141206�0021043�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The query extension adds a query method which allows # a different way to construct queries instead of the usual # method chaining: # # dataset = DB[:items].query do # select :x, :y, :z # where{(x > 1) & (y > 2)} # reverse :z # end # # You can load this extension into specific datasets: # # ds = DB[:table] # ds = ds.extension(:query) # # Or you can load it into all of a database's datasets, which # is probably the desired behavior if you are using this extension: # # DB.extension(:query) # # Related modules: Sequel::DatabaseQuery, Sequel::DatasetQuery, # Sequel::Dataset::Query # module Sequel module DatabaseQuery def self.extended(db) db.extend_datasets(DatasetQuery) end # Return a dataset modified by the query block def query(&block) dataset.query(&block) end end module DatasetQuery # Translates a query block into a dataset. Query blocks are an # alternative to Sequel's usual method chaining, by using # instance_exec with a proxy object: # # dataset = DB[:items].query do # select :x, :y, :z # where{(x > 1) & (y > 2)} # reverse :z # end # # Which is the same as: # # dataset = DB[:items].select(:x, :y, :z).where{(x > 1) & (y > 2)}.reverse(:z) def query(&block) query = Dataset::Query.new(self) query.instance_exec(&block) query.dataset end end class Dataset # Proxy object used by Dataset#query. class Query < Sequel::BasicObject # The current dataset in the query. This changes on each method call. attr_reader :dataset def initialize(dataset) @dataset = dataset end # Replace the query's dataset with dataset returned by the method call. def method_missing(method, *args, &block) # Allow calling private methods, so things like raise works @dataset = @dataset.send(method, *args, &block) raise(Sequel::Error, "method #{method.inspect} did not return a dataset") unless @dataset.is_a?(Dataset) self end # :nocov: ruby2_keywords(:method_missing) if respond_to?(:ruby2_keywords, true) # :nocov: end end Dataset.register_extension(:query, DatasetQuery) Database.register_extension(:query, DatabaseQuery) end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/round_timestamps.rb���������������������������������������������0000664�0000000�0000000�00000002743�14342141206�0023273�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The round_timestamps extension will automatically round timestamp # values to the database's supported level of precision before literalizing # them. # # For example, if the database supports millisecond precision, and you give # it a Time value with microsecond precision, it will round it appropriately: # # Time.at(1405341161.917999982833862) # # default: 2014-07-14 14:32:41.917999 # # with extension: 2014-07-14 14:32:41.918000 # # The round_timestamps extension correctly deals with databases that support # millisecond or second precision. In addition to handling Time values, it # also handles DateTime values and Sequel::SQLTime values (for the TIME type). # # To round timestamps for a single dataset: # # ds = ds.extension(:round_timestamps) # # To round timestamps for all datasets on a single database: # # DB.extension(:round_timestamps) # # Related module: Sequel::Dataset::RoundTimestamps module Sequel class Dataset module RoundTimestamps # Round DateTime values before literalizing def literal_datetime(v) super(v + Rational(5, 10**timestamp_precision)/864000) end # Round Sequel::SQLTime values before literalizing def literal_sqltime(v) super(v.round(timestamp_precision)) end # Round Time values before literalizing def literal_time(v) super(v.round(timestamp_precision)) end end register_extension(:round_timestamps, RoundTimestamps) end end �����������������������������sequel-5.63.0/lib/sequel/extensions/run_transaction_hooks.rb����������������������������������������0000664�0000000�0000000�00000003765�14342141206�0024317�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The run_transaction_hooks extension allows for running after_commit or # after_rollback extensions before commit or rollback. It then removes # the hook after running it, so it will not be run twice. # # This extension should only be used in transactional tests where the # transaction always rolls back, to test the behavior of the after_commit # and after_rollback hooks. Any other usage is probably a bad idea. # # Example: # # DB.extension :run_transaction_hooks # x = 1 # DB.transaction(rollback: :always) do # DB.after_rollback{x = 3} # DB.after_commit{x = 2} # # x # => 1 # DB.run_after_rollback_hooks # x # => 3 # DB.run_after_commit_hooks # x # => 2 # end # x # => 2 # class Sequel::Database module RunTransactionHooks # Run all savepoint and transaction after_commit hooks for the current transaction, # and remove the hooks after running them. # Options: # :server :: The server/shard to use. def run_after_commit_hooks(opts=OPTS) _run_transaction_hooks(:after_commit, opts) end # Run all savepoint and transaction after_rollback hooks for the current transaction, # and remove the hooks after running them. # Options: # :server :: The server/shard to use. def run_after_rollback_hooks(opts=OPTS) _run_transaction_hooks(:after_rollback, opts) end private def _run_transaction_hooks(type, opts) synchronize(opts[:server]) do |conn| unless h = _trans(conn) raise Sequel::Error, "Cannot call run_#{type}_hooks outside of a transaction" end if hooks = h[type] hooks.each(&:call) hooks.clear end if (savepoints = h[:savepoints]) savepoints.each do |savepoint| if hooks = savepoint[type] hooks.each(&:call) hooks.clear end end end end end end register_extension(:run_transaction_hooks, RunTransactionHooks) end �����������sequel-5.63.0/lib/sequel/extensions/s.rb������������������������������������������������������������0000664�0000000�0000000�00000002610�14342141206�0020131�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The s extension adds Sequel::S, a module containing a private #S # method that calls Sequel.expr. It's designed as a shortcut so # that instead of: # # Sequel.expr(:column) + 1 # # or # Sequel.expr{column + 1} # # you can just write: # # S(:column) + 1 # # or # S{column + 1} # # To load the extension: # # Sequel.extension :s # # Then you can include the Sequel::S module into whatever classes or # objects you care about: # # Sequel::Model.send(:include, Sequel::S) # available in model instance methods # Sequel::Model.extend(Sequel::S) # available in model class methods # Sequel::Dataset.send(:include, Sequel::S) # available in dataset methods # # or just into Object if you want it available everywhere: # # Object.send(:include, Sequel::S) # # If you are using Ruby 2+, and you would like to use refinements, you # can use Sequel::S as a refinement, in which case the private #S method # will be available on all objects while the refinement is active. # # using Sequel::S # # S(:column) + 1 # # Related module: Sequel::S # module Sequel::S private # Delegate to Sequel.expr def S(*a, &block) Sequel.expr(*a, &block) end # :nocov: if RUBY_VERSION >= '2.0.0' include_meth = RUBY_VERSION >= '3.1' ? :import_methods : :include # :nocov: refine Object do send include_meth, Sequel::S end end end ������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/schema_caching.rb�����������������������������������������������0000664�0000000�0000000�00000005561�14342141206�0022613�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The schema_caching extension adds a few methods to Sequel::Database # that make it easy to dump the parsed schema information to a file, # and load it from that file. Loading the schema information from a # dumped file is faster than parsing it from the database, so this # can save bootup time for applications with large numbers of models. # # Basic usage in application code: # # DB = Sequel.connect('...') # DB.extension :schema_caching # DB.load_schema_cache('/path/to/schema.dump') # # # load model files # # Then, whenever the database schema is modified, write a new cached # file. You can do that with <tt>bin/sequel</tt>'s -S option: # # bin/sequel -S /path/to/schema.dump postgres://... # # Alternatively, if you don't want to dump the schema information for # all tables, and you don't worry about race conditions, you can # choose to use the following in your application code: # # DB = Sequel.connect('...') # DB.extension :schema_caching # DB.load_schema_cache?('/path/to/schema.dump') # # # load model files # # DB.dump_schema_cache?('/path/to/schema.dump') # # With this method, you just have to delete the schema dump file if # the schema is modified, and the application will recreate it for you # using just the tables that your models use. # # Note that it is up to the application to ensure that the dumped # cached schema reflects the current state of the database. Sequel # does no checking to ensure this, as checking would take time and the # purpose of this code is to take a shortcut. # # The cached schema is dumped in Marshal format, since it is the fastest # and it handles all ruby objects used in the schema hash. Because of this, # you should not attempt to load the schema from a untrusted file. # # Related module: Sequel::SchemaCaching # module Sequel module SchemaCaching # Dump the cached schema to the filename given in Marshal format. def dump_schema_cache(file) sch = {} @schemas.each do |k,v| sch[k] = v.map do |c, h| h = Hash[h] h.delete(:callable_default) [c, h] end end File.open(file, 'wb'){|f| f.write(Marshal.dump(sch))} nil end # Dump the cached schema to the filename given unless the file # already exists. def dump_schema_cache?(file) dump_schema_cache(file) unless File.exist?(file) end # Replace the schema cache with the data from the given file, which # should be in Marshal format. def load_schema_cache(file) @schemas = Marshal.load(File.read(file)) @schemas.each_value{|v| schema_post_process(v)} nil end # Replace the schema cache with the data from the given file if the # file exists. def load_schema_cache?(file) load_schema_cache(file) if File.exist?(file) end end Database.register_extension(:schema_caching, SchemaCaching) end �����������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/schema_dumper.rb������������������������������������������������0000664�0000000�0000000�00000051147�14342141206�0022514�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The schema_dumper extension supports dumping tables and indexes # in a Sequel::Migration format, so they can be restored on another # database (which can be the same type or a different type than # the current database). The main interface is through # Sequel::Database#dump_schema_migration. # # The schema_dumper extension is quite limited in what types of # database objects it supports. In general, it only supports # dumping tables, columns, primary key and foreign key constraints, # and some indexes. It does not support most table options, CHECK # constraints, partial indexes, database functions, triggers, # security grants/revokes, and a wide variety of other useful # database properties. Be aware of the limitations when using the # schema_dumper extension. If you are dumping the schema to restore # to the same database type, it is recommended to use your database's # dump and restore programs instead of the schema_dumper extension. # # To load the extension: # # DB.extension :schema_dumper # # Related module: Sequel::SchemaDumper Sequel.extension :eval_inspect module Sequel module SchemaDumper # Convert the column schema information to a hash of column options, one of which must # be :type. The other options added should modify that type (e.g. :size). If a # database type is not recognized, return it as a String type. def column_schema_to_ruby_type(schema) type = schema[:db_type].downcase if database_type == :oracle type = type.sub(/ not null\z/, '') end case type when /\A(medium|small)?int(?:eger)?(?:\((\d+)\))?( unsigned)?\z/ if !$1 && $2 && $2.to_i >= 10 && $3 # Unsigned integer type with 10 digits can potentially contain values which # don't fit signed integer type, so use bigint type in target database. {:type=>:Bignum} else {:type=>Integer} end when /\Atinyint(?:\((\d+)\))?(?: unsigned)?\z/ {:type =>schema[:type] == :boolean ? TrueClass : Integer} when /\Abigint(?:\((?:\d+)\))?(?: unsigned)?\z/ {:type=>:Bignum} when /\A(?:real|float|double(?: precision)?|double\(\d+,\d+\))(?: unsigned)?\z/ {:type=>Float} when 'boolean', 'bit', 'bool' {:type=>TrueClass} when /\A(?:(?:tiny|medium|long|n)?text|clob)\z/ {:type=>String, :text=>true} when 'date' {:type=>Date} when /\A(?:small)?datetime\z/ {:type=>DateTime} when /\Atimestamp(?:\((\d+)\))?(?: with(?:out)? time zone)?\z/ {:type=>DateTime, :size=>($1.to_i if $1)} when /\Atime(?: with(?:out)? time zone)?\z/ {:type=>Time, :only_time=>true} when /\An?char(?:acter)?(?:\((\d+)\))?\z/ {:type=>String, :size=>($1.to_i if $1), :fixed=>true} when /\A(?:n?varchar2?|character varying|bpchar|string)(?:\((\d+)\))?\z/ {:type=>String, :size=>($1.to_i if $1)} when /\A(?:small)?money\z/ {:type=>BigDecimal, :size=>[19,2]} when /\A(?:decimal|numeric|number)(?:\((\d+)(?:,\s*(\d+))?\))?(?: unsigned)?\z/ s = [($1.to_i if $1), ($2.to_i if $2)].compact {:type=>BigDecimal, :size=>(s.empty? ? nil : s)} when /\A(?:bytea|(?:tiny|medium|long)?blob|(?:var)?binary)(?:\((\d+)\))?\z/ {:type=>File, :size=>($1.to_i if $1)} when /\A(?:year|(?:int )?identity)\z/ {:type=>Integer} else {:type=>String} end end # Dump foreign key constraints for all tables as a migration. This complements # the foreign_keys: false option to dump_schema_migration. This only dumps # the constraints (not the columns) using alter_table/add_foreign_key with an # array of columns. # # Note that the migration this produces does not have a down # block, so you cannot reverse it. def dump_foreign_key_migration(options=OPTS) ts = tables(options) <<END_MIG Sequel.migration do change do #{ts.sort.map{|t| dump_table_foreign_keys(t)}.reject{|x| x == ''}.join("\n\n").gsub(/^/, ' ')} end end END_MIG end # Dump indexes for all tables as a migration. This complements # the indexes: false option to dump_schema_migration. Options: # :same_db :: Create a dump for the same database type, so # don't ignore errors if the index statements fail. # :index_names :: If set to false, don't record names of indexes. If # set to :namespace, prepend the table name to the index name if the # database does not use a global index namespace. def dump_indexes_migration(options=OPTS) ts = tables(options) <<END_MIG Sequel.migration do change do #{ts.sort.map{|t| dump_table_indexes(t, :add_index, options)}.reject{|x| x == ''}.join("\n\n").gsub(/^/, ' ')} end end END_MIG end # Return a string that contains a Sequel migration that when # run would recreate the database structure. Options: # :same_db :: Don't attempt to translate database types to ruby types. # If this isn't set to true, all database types will be translated to # ruby types, but there is no guarantee that the migration generated # will yield the same type. Without this set, types that aren't # recognized will be translated to a string-like type. # :foreign_keys :: If set to false, don't dump foreign_keys (they can be # added later via #dump_foreign_key_migration) # :indexes :: If set to false, don't dump indexes (they can be added # later via #dump_index_migration). # :index_names :: If set to false, don't record names of indexes. If # set to :namespace, prepend the table name to the index name. def dump_schema_migration(options=OPTS) options = options.dup if options[:indexes] == false && !options.has_key?(:foreign_keys) # Unless foreign_keys option is specifically set, disable if indexes # are disabled, as foreign keys that point to non-primary keys rely # on unique indexes being created first options[:foreign_keys] = false end ts = sort_dumped_tables(tables(options), options) skipped_fks = if sfk = options[:skipped_foreign_keys] # Handle skipped foreign keys by adding them at the end via # alter_table/add_foreign_key. Note that skipped foreign keys # probably result in a broken down migration. sfka = sfk.sort.map{|table, fks| dump_add_fk_constraints(table, fks.values)} sfka.join("\n\n").gsub(/^/, ' ') unless sfka.empty? end <<END_MIG Sequel.migration do change do #{ts.map{|t| dump_table_schema(t, options)}.join("\n\n").gsub(/^/, ' ')}#{"\n \n" if skipped_fks}#{skipped_fks} end end END_MIG end # Return a string with a create table block that will recreate the given # table's schema. Takes the same options as dump_schema_migration. def dump_table_schema(table, options=OPTS) gen = dump_table_generator(table, options) commands = [gen.dump_columns, gen.dump_constraints, gen.dump_indexes].reject{|x| x == ''}.join("\n\n") "create_table(#{table.inspect}#{', :ignore_index_errors=>true' if !options[:same_db] && options[:indexes] != false && !gen.indexes.empty?}) do\n#{commands.gsub(/^/, ' ')}\nend" end private # If a database default exists and can't be converted, and we are dumping with :same_db, # return a string with the inspect method modified a literal string is created if the code is evaled. def column_schema_to_ruby_default_fallback(default, options) if default.is_a?(String) && options[:same_db] && use_column_schema_to_ruby_default_fallback? default = default.dup def default.inspect "Sequel::LiteralString.new(#{super})" end default end end # Recreate the column in the passed Schema::CreateTableGenerator from the given name and parsed database schema. def recreate_column(name, schema, gen, options) if options[:single_pk] && schema_autoincrementing_primary_key?(schema) type_hash = options[:same_db] ? {:type=>schema[:db_type]} : column_schema_to_ruby_type(schema) [:table, :key, :on_delete, :on_update, :deferrable].each{|f| type_hash[f] = schema[f] if schema[f]} if type_hash == {:type=>Integer} || type_hash == {:type=>"integer"} || type_hash == {:type=>"INTEGER"} type_hash.delete(:type) elsif options[:same_db] && type_hash == {:type=>type_literal_generic_bignum_symbol(type_hash).to_s} type_hash[:type] = :Bignum end unless gen.columns.empty? type_hash[:keep_order] = true end if type_hash.empty? gen.primary_key(name) else gen.primary_key(name, type_hash) end else col_opts = if options[:same_db] h = {:type=>schema[:db_type]} if database_type == :mysql && h[:type] =~ /\Atimestamp/ h[:null] = true end h else column_schema_to_ruby_type(schema) end type = col_opts.delete(:type) col_opts.delete(:size) if col_opts[:size].nil? if schema[:generated] if options[:same_db] && database_type == :postgres col_opts[:generated_always_as] = column_schema_to_ruby_default_fallback(schema[:default], options) end else col_opts[:default] = if schema[:ruby_default].nil? column_schema_to_ruby_default_fallback(schema[:default], options) else schema[:ruby_default] end col_opts.delete(:default) if col_opts[:default].nil? end col_opts[:null] = false if schema[:allow_null] == false if table = schema[:table] [:key, :on_delete, :on_update, :deferrable].each{|f| col_opts[f] = schema[f] if schema[f]} col_opts[:type] = type unless type == Integer || type == 'integer' || type == 'INTEGER' gen.foreign_key(name, table, col_opts) else gen.column(name, type, col_opts) if [Integer, :Bignum, Float, BigDecimal].include?(type) && schema[:db_type] =~ / unsigned\z/io gen.check(Sequel::SQL::Identifier.new(name) >= 0) end end end end # For the table and foreign key metadata array, return an alter_table # string that would add the foreign keys if run in a migration. def dump_add_fk_constraints(table, fks) sfks = String.new sfks << "alter_table(#{table.inspect}) do\n" sfks << create_table_generator do fks.sort_by{|fk| fk[:columns]}.each do |fk| foreign_key fk[:columns], fk end end.dump_constraints.gsub(/^foreign_key /, ' add_foreign_key ') sfks << "\nend" end # For the table given, get the list of foreign keys and return an alter_table # string that would add the foreign keys if run in a migration. def dump_table_foreign_keys(table, options=OPTS) if supports_foreign_key_parsing? fks = foreign_key_list(table, options).sort_by{|fk| fk[:columns]} end if fks.nil? || fks.empty? '' else dump_add_fk_constraints(table, fks) end end # Return a Schema::CreateTableGenerator object that will recreate the # table's schema. Takes the same options as dump_schema_migration. def dump_table_generator(table, options=OPTS) s = schema(table, options).dup pks = s.find_all{|x| x.last[:primary_key] == true}.map(&:first) options = options.merge(:single_pk=>true) if pks.length == 1 m = method(:recreate_column) im = method(:index_to_generator_opts) if options[:indexes] != false && supports_index_parsing? indexes = indexes(table).sort end if options[:foreign_keys] != false && supports_foreign_key_parsing? fk_list = foreign_key_list(table) if (sfk = options[:skipped_foreign_keys]) && (sfkt = sfk[table]) fk_list.delete_if{|fk| sfkt.has_key?(fk[:columns])} end composite_fks, single_fks = fk_list.partition{|h| h[:columns].length > 1} fk_hash = {} single_fks.each do |fk| column = fk.delete(:columns).first fk.delete(:name) fk_hash[column] = fk end s = s.map do |name, info| if fk_info = fk_hash[name] [name, fk_info.merge(info)] else [name, info] end end end create_table_generator do s.each{|name, info| m.call(name, info, self, options)} primary_key(pks) if !@primary_key && pks.length > 0 indexes.each{|iname, iopts| send(:index, iopts[:columns], im.call(table, iname, iopts, options))} if indexes composite_fks.each{|fk| send(:foreign_key, fk[:columns], fk)} if composite_fks end end # Return a string that containing add_index/drop_index method calls for # creating the index migration. def dump_table_indexes(table, meth, options=OPTS) if supports_index_parsing? indexes = indexes(table).sort else return '' end im = method(:index_to_generator_opts) gen = create_table_generator do indexes.each{|iname, iopts| send(:index, iopts[:columns], im.call(table, iname, iopts, options))} end gen.dump_indexes(meth=>table, :ignore_errors=>!options[:same_db]) end # Convert the parsed index information into options to the CreateTableGenerator's index method. def index_to_generator_opts(table, name, index_opts, options=OPTS) h = {} if options[:index_names] != false && default_index_name(table, index_opts[:columns]) != name.to_s if options[:index_names] == :namespace && !global_index_namespace? h[:name] = "#{table}_#{name}".to_sym else h[:name] = name end end h[:unique] = true if index_opts[:unique] h[:deferrable] = true if index_opts[:deferrable] h end # Sort the tables so that referenced tables are created before tables that # reference them, and then by name. If foreign keys are disabled, just sort by name. def sort_dumped_tables(tables, options=OPTS) if options[:foreign_keys] != false && supports_foreign_key_parsing? table_fks = {} tables.each{|t| table_fks[t] = foreign_key_list(t)} # Remove self referential foreign keys, not important when sorting. table_fks.each{|t, fks| fks.delete_if{|fk| fk[:table] == t}} tables, skipped_foreign_keys = sort_dumped_tables_topologically(table_fks, []) options[:skipped_foreign_keys] = skipped_foreign_keys tables else tables.sort end end # Do a topological sort of tables, so that referenced tables # come before referencing tables. Returns an array of sorted # tables and a hash of skipped foreign keys. The hash will be # empty unless there are circular dependencies. def sort_dumped_tables_topologically(table_fks, sorted_tables) skipped_foreign_keys = {} until table_fks.empty? this_loop = [] table_fks.each do |table, fks| fks.delete_if{|fk| !table_fks.has_key?(fk[:table])} this_loop << table if fks.empty? end if this_loop.empty? # No tables were changed this round, there must be a circular dependency. # Break circular dependency by picking the table with the least number of # outstanding foreign keys and skipping those foreign keys. # The skipped foreign keys will be added at the end of the # migration. skip_table, skip_fks = table_fks.sort_by{|table, fks| [fks.length, table]}.first skip_fks_hash = skipped_foreign_keys[skip_table] = {} skip_fks.each{|fk| skip_fks_hash[fk[:columns]] = fk} this_loop << skip_table end # Add sorted tables from this loop to the final list sorted_tables.concat(this_loop.sort) # Remove tables that were handled this loop this_loop.each{|t| table_fks.delete(t)} end [sorted_tables, skipped_foreign_keys] end # Don't use a literal string fallback on MySQL, since the defaults it uses aren't # valid literal SQL values. def use_column_schema_to_ruby_default_fallback? database_type != :mysql end end module Schema class CreateTableGenerator # Dump this generator's columns to a string that could be evaled inside # another instance to represent the same columns def dump_columns strings = [] cols = columns.dup cols.each do |x| x.delete(:on_delete) if x[:on_delete] == :no_action x.delete(:on_update) if x[:on_update] == :no_action end if (pkn = primary_key_name) && !@primary_key[:keep_order] cols.delete_if{|x| x[:name] == pkn} pk = @primary_key.dup pkname = pk.delete(:name) @db.serial_primary_key_options.each{|k,v| pk.delete(k) if v == pk[k]} strings << "primary_key #{pkname.inspect}#{opts_inspect(pk)}" end cols.each do |c| c = c.dup name = c.delete(:name) strings << if table = c.delete(:table) c.delete(:type) if c[:type] == Integer || c[:type] == 'integer' "foreign_key #{name.inspect}, #{table.inspect}#{opts_inspect(c)}" elsif pkn == name @db.serial_primary_key_options.each{|k,v| c.delete(k) if v == c[k]} "primary_key #{name.inspect}#{opts_inspect(c)}" else type = c.delete(:type) opts = opts_inspect(c) case type when Class "#{type.name} #{name.inspect}#{opts}" when :Bignum "Bignum #{name.inspect}#{opts}" else "column #{name.inspect}, #{type.inspect}#{opts}" end end end strings.join("\n") end # Dump this generator's constraints to a string that could be evaled inside # another instance to represent the same constraints def dump_constraints cs = constraints.map do |c| c = c.dup type = c.delete(:type) case type when :check raise(Error, "can't dump check/constraint specified with Proc") if c[:check].is_a?(Proc) name = c.delete(:name) if !name and c[:check].length == 1 and c[:check].first.is_a?(Hash) "check #{c[:check].first.inspect[1...-1]}" else "#{name ? "constraint #{name.inspect}," : 'check'} #{c[:check].map(&:inspect).join(', ')}" end when :foreign_key c.delete(:on_delete) if c[:on_delete] == :no_action c.delete(:on_update) if c[:on_update] == :no_action c.delete(:deferrable) unless c[:deferrable] cols = c.delete(:columns) table = c.delete(:table) "#{type} #{cols.inspect}, #{table.inspect}#{opts_inspect(c)}" else cols = c.delete(:columns) "#{type} #{cols.inspect}#{opts_inspect(c)}" end end cs.join("\n") end # Dump this generator's indexes to a string that could be evaled inside # another instance to represent the same indexes. Options: # :add_index :: Use add_index instead of index, so the methods # can be called outside of a generator but inside a migration. # The value of this option should be the table name to use. # :drop_index :: Same as add_index, but create drop_index statements. # :ignore_errors :: Add the ignore_errors option to the outputted indexes def dump_indexes(options=OPTS) is = indexes.map do |c| c = c.dup cols = c.delete(:columns) if table = options[:add_index] || options[:drop_index] "#{options[:drop_index] ? 'drop' : 'add'}_index #{table.inspect}, #{cols.inspect}#{', :ignore_errors=>true' if options[:ignore_errors]}#{opts_inspect(c)}" else "index #{cols.inspect}#{opts_inspect(c)}" end end is = is.reverse if options[:drop_index] is.join("\n") end private # Return a string that converts the given options into one # suitable for literal ruby code, handling default values # that don't default to a literal interpretation. def opts_inspect(opts) if opts[:default] opts = opts.dup de = Sequel.eval_inspect(opts.delete(:default)) ", :default=>#{de}#{", #{opts.inspect[1...-1]}" if opts.length > 0}" else ", #{opts.inspect[1...-1]}" if opts.length > 0 end end end end Database.register_extension(:schema_dumper, SchemaDumper) end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/select_remove.rb������������������������������������������������0000664�0000000�0000000�00000004337�14342141206�0022533�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The select_remove extension adds select_remove for removing existing selected # columns from a dataset. It's not part of Sequel core as it is rarely needed and has # some corner cases where it can't work correctly. # # You can load this extension into specific datasets: # # ds = DB[:table] # ds = ds.extension(:select_remove) # # Or you can load it into all of a database's datasets, which # is probably the desired behavior if you are using this extension: # # DB.extension(:select_remove) # # Related module: Sequel::SelectRemove # module Sequel module SelectRemove # Remove columns from the list of selected columns. If any of the currently selected # columns use expressions/aliases, this will remove selected columns with the given # aliases. It will also remove entries from the selection that match exactly: # # # Assume columns a, b, and c in items table # DB[:items] # SELECT * FROM items # DB[:items].select_remove(:c) # SELECT a, b FROM items # DB[:items].select(:a, Sequel[:b].as(:c), Sequel[:c].as(:b)).select_remove(:c) # SELECT a, c AS b FROM items # DB[:items].select(:a, Sequel[:b][:c], Sequel[:c][:b]).select_remove(Sequel[:c][:b]) # SELECT a, b AS c FROM items # # Note that there are a few cases where this method may not work correctly: # # * This dataset joins multiple tables and does not have an existing explicit selection. # In this case, the code will currently use unqualified column names for all columns # the dataset returns, except for the columns given. # * This dataset has an existing explicit selection containing an item that returns # multiple database columns (e.g. Sequel[:table].*, Sequel.lit('column1, column2')). In this case, # the behavior is undefined and this method should not be used. # # There may be other cases where this method does not work correctly, use it with caution. def select_remove(*cols) if (sel = @opts[:select]) && !sel.empty? select(*(columns.zip(sel).reject{|c, s| cols.include?(c)}.map{|c, s| s} - cols)) else select(*(columns - cols)) end end end Dataset.register_extension(:select_remove, SelectRemove) end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/sequel_4_dataset_methods.rb�������������������������������������0000664�0000000�0000000�00000005363�14342141206�0024650�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # This adds the following dataset methods: # # and :: alias for where # exclude_where :: alias for exclude # interval :: Returns max - min, using a single query # range :: Returns min..max, using a single query # # It is only recommended to use this for backwards compatibility. # # You can load this extension into specific datasets: # # ds = DB[:table] # ds = ds.extension(:sequel_4_dataset_methods) # # Or you can load it into all of a database's datasets, which # is probably the desired behavior if you are using this extension: # # DB.extension(:sequel_4_dataset_methods) # # Related module: Sequel::Sequel4DatasetMethods # module Sequel module Sequel4DatasetMethods # Alias for where. def and(*cond, &block) where(*cond, &block) end # Alias for exclude. def exclude_where(*cond, &block) exclude(*cond, &block) end # Returns the interval between minimum and maximum values for the given # column/expression. Uses a virtual row block if no argument is given. # # DB[:table].interval(:id) # SELECT (max(id) - min(id)) FROM table LIMIT 1 # # => 6 # DB[:table].interval{function(column)} # SELECT (max(function(column)) - min(function(column))) FROM table LIMIT 1 # # => 7 def interval(column=(no_arg = true), &block) column = Sequel.virtual_row(&block) if no_arg if loader = cached_placeholder_literalizer(:_interval_loader) do |pl| arg = pl.arg aggregate_dataset.limit(1).select((SQL::Function.new(:max, arg) - SQL::Function.new(:min, arg)).as(:interval)) end loader.get(column) else aggregate_dataset.get{(max(column) - min(column)).as(:interval)} end end # Returns a +Range+ instance made from the minimum and maximum values for the # given column/expression. Uses a virtual row block if no argument is given. # # DB[:table].range(:id) # SELECT max(id) AS v1, min(id) AS v2 FROM table LIMIT 1 # # => 1..10 # DB[:table].interval{function(column)} # SELECT max(function(column)) AS v1, min(function(column)) AS v2 FROM table LIMIT 1 # # => 0..7 def range(column=(no_arg = true), &block) column = Sequel.virtual_row(&block) if no_arg r = if loader = cached_placeholder_literalizer(:_range_loader) do |pl| arg = pl.arg aggregate_dataset.limit(1).select(SQL::Function.new(:min, arg).as(:v1), SQL::Function.new(:max, arg).as(:v2)) end loader.first(column) else aggregate_dataset.select{[min(column).as(v1), max(column).as(v2)]}.first end if r (r[:v1]..r[:v2]) end end end Dataset.register_extension(:sequel_4_dataset_methods, Sequel4DatasetMethods) end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/server_block.rb�������������������������������������������������0000664�0000000�0000000�00000012771�14342141206�0022360�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The server_block extension adds the Database#with_server method, which takes a shard # argument and a block, and makes it so that access inside the block will use the # specified shard by default. # # First, you need to enable it on the database object: # # DB.extension :server_block # # Then you can call with_server: # # DB.with_server(:shard1) do # DB[:a].all # Uses shard1 # DB[:a].server(:shard2).all # Uses shard2 # end # DB[:a].all # Uses default # # You can nest calls to with_server: # # DB.with_server(:shard1) do # DB[:a].all # Uses shard1 # DB.with_server(:shard2) do # DB[:a].all # Uses shard2 # end # DB[:a].all # Uses shard1 # end # DB[:a].all # Uses default # # Note that if you pass the nil, :default, or :read_only server/shard # names to Dataset#server inside a with_server block, they will be # ignored and the server/shard given to with_server will be used: # # DB.with_server(:shard1) do # DB[:a].all # Uses shard1 # DB[:a].server(:shard2).all # Uses shard2 # DB[:a].server(nil).all # Uses shard1 # DB[:a].server(:default).all # Uses shard1 # DB[:a].server(:read_only).all # Uses shard1 # end # # If you pass two separate shards to with_server, the second shard will # be used instead of the :read_only shard, and the first shard will be # used instead of the :default shard: # # DB.with_server(:shard1, :shard2) do # DB[:a].all # Uses shard2 # DB[:a].delete # Uses shard1 # DB[:a].server(:shard3).all # Uses shard3 # DB[:a].server(:shard3).delete # Uses shard3 # DB[:a].server(:default).all # Uses shard1 # DB[:a].server(:read_only).delete # Uses shard2 # end # # If you use an invalid server when calling with_server, it will be # treated the same way as if you called Dataset#server with an invalid # server. By default, the default server will be used in such cases. # If you would like a different server to be used, or an exception to # be raised, then use the :servers_hash Database option. # # Related modules: Sequel::ServerBlock, Sequel::UnthreadedServerBlock, # Sequel::ThreadedServerBlock # module Sequel module ServerBlock # Enable the server block on the connection pool, choosing the correct # extension depending on whether the connection pool is threaded or not. # Also defines the with_server method on the receiver for easy use. def self.extended(db) pool = db.pool if defined?(ShardedThreadedConnectionPool) && pool.is_a?(ShardedThreadedConnectionPool) pool.extend(ThreadedServerBlock) pool.instance_variable_set(:@default_servers, {}) else pool.extend(UnthreadedServerBlock) pool.instance_variable_set(:@default_servers, []) end end # Delegate to the connection pool def with_server(default_server, read_only_server=default_server, &block) pool.with_server(default_server, read_only_server, &block) end end # Adds with_server support for the sharded single connection pool. module UnthreadedServerBlock # Set a default server/shard to use inside the block. def with_server(default_server, read_only_server=default_server) set_default_server(default_server, read_only_server) yield ensure clear_default_server end private # Make the given server the new default server. def set_default_server(default_server, read_only_server=default_server) @default_servers << [default_server, read_only_server] end # Remove the current default server, restoring the # previous default server. def clear_default_server @default_servers.pop end # Use the server given to with_server if appropriate. def pick_server(server) if @default_servers.empty? super else case server when :default, nil @servers[@default_servers[-1][0]] when :read_only @servers[@default_servers[-1][1]] else super end end end end # Adds with_server support for the sharded threaded connection pool. module ThreadedServerBlock # Set a default server/shard to use inside the block for the current # thread. def with_server(default_server, read_only_server=default_server) set_default_server(default_server, read_only_server) yield ensure clear_default_server end private # Make the given server the new default server for the current thread. def set_default_server(default_server, read_only_server=default_server) sync{(@default_servers[Sequel.current] ||= [])} << [default_server, read_only_server] end # Remove the current default server for the current thread, restoring the # previous default server. def clear_default_server t = Sequel.current a = sync{@default_servers[t]} a.pop sync{@default_servers.delete(t)} if a.empty? end # Use the server given to with_server for the given thread, if appropriate. def pick_server(server) a = sync{@default_servers[Sequel.current]} if !a || a.empty? super else # Hash handling required to work when loaded after arbitrary servers plugin. case server when :default, nil v = a[-1][0] v = @servers[v] unless v.is_a?(Hash) v when :read_only v = a[-1][1] v = @servers[v] unless v.is_a?(Hash) v else super end end end end Database.register_extension(:server_block, ServerBlock) end �������sequel-5.63.0/lib/sequel/extensions/server_logging.rb�����������������������������������������������0000664�0000000�0000000�00000003544�14342141206�0022712�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The server_logging extension makes the logger include the server/shard # the query was issued on. This makes it easier to use the logs when # using sharding. # # Example: # # DB.opts[:server] # # {:read_only=>{}, :b=>{}} # DB.extension :server_logging # DB[:a].all # # (0.000005s) (conn: 1014942550, server: read_only) SELECT * FROM a # DB[:a].server(:b).all # # (0.000004s) (conn: 997304100, server: b) SELECT * FROM a # DB[:a].insert # # (0.000004s) (conn: 1014374750, server: default) INSERT INTO a DEFAULT VALUES # # In order for the server/shard to be correct for all connections, you need to # use this before connections to the database are made, or you need to call # <tt>Database#disconnect</tt> after loading this extension. # # Related module: Sequel::ServerLogging # module Sequel module ServerLogging # Initialize the hash mapping connections to shards, and turn on logging # of connection info unless it has specifically been turned off. def self.extended(db) db.instance_exec do @server_connection_map ||= {} self.log_connection_info = true if log_connection_info.nil? end end # When setting up a new connection, associate the connection with the # shard. def connect(server) conn = super Sequel.synchronize{@server_connection_map[conn] = server} conn end # When disconnecting a connection, remove the related connection from the mapping. def disconnect_connection(conn) super ensure Sequel.synchronize{@server_connection_map.delete(conn)} end private # Include the server with the connection's id. def connection_info(conn) "(conn: #{conn.__id__}, server: #{Sequel.synchronize{@server_connection_map[conn]}}) " end end Database.register_extension(:server_logging, ServerLogging) end ������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/split_array_nil.rb����������������������������������������������0000664�0000000�0000000�00000004252�14342141206�0023066�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The split_array_nil extension overrides Sequel's default handling of # IN/NOT IN with arrays of values to do specific nil checking. For example, # # ds = DB[:table].where(column: [1, nil]) # # By default, that produces the following SQL: # # SELECT * FROM table WHERE (column IN (1, NULL)) # # However, because NULL = NULL is not true in SQL (it is NULL), this # will not return rows in the table where the column is NULL. This # extension allows for an alternative behavior more similar to ruby, # which will return rows in the table where the column is NULL, using # a query like: # # SELECT * FROM table WHERE ((column IN (1)) OR (column IS NULL))) # # Similarly, for NOT IN queries: # # ds = DB[:table].exclude(column: [1, nil]) # # Default: # # SELECT * FROM table WHERE (column NOT IN (1, NULL)) # # with split_array_nils extension: # # SELECT * FROM table WHERE ((column NOT IN (1)) AND (column IS NOT NULL))) # # To use this extension with a single dataset: # # ds = ds.extension(:split_array_nil) # # To use this extension for all of a database's datasets: # # DB.extension(:split_array_nil) # # Related module: Sequel::Dataset::SplitArrayNil # module Sequel class Dataset module SplitArrayNil # Over the IN/NOT IN handling with an array of values where one of the # values in the array is nil, by removing nils from the array of values, # and using a separate OR IS NULL clause for IN or AND IS NOT NULL clause # for NOT IN. def complex_expression_sql_append(sql, op, args) case op when :IN, :"NOT IN" vals = args[1] if vals.is_a?(Array) && vals.any?(&:nil?) cols = args[0] vals = vals.compact c = Sequel::SQL::BooleanExpression if op == :IN literal_append(sql, c.new(:OR, c.new(:IN, cols, vals), c.new(:IS, cols, nil))) else literal_append(sql, c.new(:AND, c.new(:"NOT IN", cols, vals), c.new(:"IS NOT", cols, nil))) end else super end else super end end end end Dataset.register_extension(:split_array_nil, Dataset::SplitArrayNil) end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/sql_comments.rb�������������������������������������������������0000664�0000000�0000000�00000014507�14342141206�0022403�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The sql_comments extension adds Dataset#comment to the datasets, # allowing you to set SQL comments in the resulting query. These # comments are appended to the end of the SQL query: # # ds = DB[:table].comment("Some Comment").all # # SELECT * FROM table -- Some Comment # # # # As you can see, this uses single line SQL comments (--) suffixed # by a newline. This plugin transforms all consecutive whitespace # in the comment to a single string: # # ds = DB[:table].comment("Some\r\nComment Here").all # # SELECT * FROM table -- Some Comment Here # # # # The reason for the prefixing and suffixing by newlines is to # work correctly when used in subqueries: # # ds = DB[:table].comment("Some\r\nComment Here") # ds.where(id: ds).all # # SELECT * FROM table WHERE (id IN (SELECT * FROM table -- Some Comment Here # # )) -- Some Comment Here # # # # In addition to working on SELECT queries, it also works when # inserting, updating, and deleting. # # Due to the use of single line SQL comments and converting all # whitespace to spaces, this should correctly handle even # malicious input. However, it would be unwise to rely on that, # you should ensure that the argument given # to Dataset#comment is not derived from user input. # # You can load this extension into specific datasets: # # ds = DB[:table] # ds = ds.extension(:sql_comments) # # Or you can load it into all of a database's datasets, which # is probably the desired behavior if you are using this extension: # # DB.extension(:sql_comments) # # Loading the sql_comments extension into the database also adds # support for block-level comment support via Database#with_comments. # You call #with_comments with a hash. Queries inside the hash will # include a comment based on the hash (assuming they are inside the # same thread): # # DB.with_comments(model: Album, action: :all) do # DB[:albums].all # # SELECT * FROM albums -- model:Album,action:all # end # # You can nest calls to #with_comments, which will combine the # entries from both calls: # # DB.with_comments(application: App, path: :scrubbed_path) do # DB.with_comments(model: Album, action: :all) do # ds = DB[:albums].all # # SELECT * FROM albums # # -- application:App,path:scrubbed_path,model:Album,action:all # end # end # # You can override comment entries specified in earlier blocks, or # remove entries specified earlier using a nil value: # # DB.with_comments(application: App, path: :scrubbed_path) do # DB.with_comments(application: Foo, path: nil) do # ds = DB[:albums].all # # SELECT * FROM albums # -- application:Foo # end # end # # You can combine block-level comments with dataset-specific # comments: # # DB.with_comments(model: Album, action: :all) do # DB[:table].comment("Some Comment").all # # SELECT * FROM albums -- model:Album,action:all -- Some Comment # end # # Note that Microsoft Access does not support inline comments, # and attempting to use comments on it will result in SQL syntax # errors. # # Related modules: Sequel::SQLComments, Sequel::Database::SQLComments # module Sequel module SQLComments # Return a modified copy of the dataset that will use the given comment. # To uncomment a commented dataset, pass nil as the argument. def comment(comment) clone(:comment=>(format_sql_comment(comment) if comment)) end %w'select insert update delete'.each do |type| define_method(:"#{type}_sql") do |*a| sql = super(*a) if comment = _sql_comment # This assumes that the comment stored in the dataset has # already been formatted. If not, this could result in SQL # injection. # # Additionally, due to the use of an SQL comment, if any # SQL is appened to the query after the comment is added, # it will become part of the comment unless it is preceded # by a newline. if sql.frozen? sql += comment sql.freeze elsif @opts[:append_sql] || @opts[:placeholder_literalizer] sql << comment else sql += comment end end sql end end private # The comment to include in the SQL query, if any. def _sql_comment @opts[:comment] end # Format the comment. For maximum compatibility, this uses a # single line SQL comment, and converts all consecutive whitespace # in the comment to a single space. def format_sql_comment(comment) " -- #{comment.to_s.gsub(/\s+/, ' ')}\n" end end module Database::SQLComments def self.extended(db) db.instance_variable_set(:@comment_hashes, {}) db.extend_datasets DatasetSQLComments end # A map of threads to comment hashes, used for correctly setting # comments for all queries inside #with_comments blocks. attr_reader :comment_hashes # Store the comment hash and use it to create comments inside the block def with_comments(comment_hash) hashes = @comment_hashes t = Sequel.current new_hash = if hash = Sequel.synchronize{hashes[t]} hash.merge(comment_hash) else comment_hash.dup end yield Sequel.synchronize{hashes[t] = new_hash} ensure if hash Sequel.synchronize{hashes[t] = hash} else t && Sequel.synchronize{hashes.delete(t)} end end module DatasetSQLComments include Sequel::SQLComments private # Include comments added via Database#with_comments in the output SQL. def _sql_comment specific_comment = super return specific_comment if @opts[:append_sql] t = Sequel.current hashes = db.comment_hashes block_comment = if comment_hash = Sequel.synchronize{hashes[t]} comment_array = comment_hash.map{|k,v| "#{k}:#{v}" unless v.nil?} comment_array.compact! comment_array.join(",") end if block_comment if specific_comment format_sql_comment(block_comment + specific_comment) else format_sql_comment(block_comment) end else specific_comment end end end end Dataset.register_extension(:sql_comments, SQLComments) Database.register_extension(:sql_comments, Database::SQLComments) end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/sql_expr.rb�����������������������������������������������������0000664�0000000�0000000�00000001067�14342141206�0021531�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The sql_expr extension adds the sql_expr method to every object, which # returns an wrapped object that works nicely with Sequel's DSL by calling # Sequel.expr: # # 1.sql_expr < :a # 1 < a # false.sql_expr & :a # FALSE AND a # true.sql_expr | :a # TRUE OR a # ~nil.sql_expr # NOT NULL # "a".sql_expr + "b" # 'a' || 'b' # # To load the extension: # # Sequel.extension :sql_expr # class Object # Return the object wrapper in an appropriate Sequel expression object. def sql_expr Sequel[self] end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/sql_log_normalizer.rb�������������������������������������������0000664�0000000�0000000�00000006716�14342141206�0023604�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The sql_log_normalizer extension normalizes the SQL that is logged, # removing the literal strings and numbers in the SQL, and removing the # logging of any bound variables: # # ds = DB[:table].first(a: 1, b: 'something') # # Without sql_log_normalizer extension # # SELECT * FROM "table" WHERE (("a" = 1) AND ("b" = 'something')) LIMIT 1 # # # With sql_log_normalizer_extension # # SELECT * FROM "table" WHERE (("a" = ?) AND ("b" = ?)) LIMIT ? # # The normalization is done by scanning the SQL string being executed # for literal strings and numbers, and replacing them with question # marks. While this should work for all or almost all production queries, # there are pathlogical queries that will not be handled correctly, such as # the use of apostrophes in identifiers: # # DB[:"asf'bar"].where(a: 1, b: 'something').first # # Logged as: # # SELECT * FROM "asf?something')) LIMIT ? # # The expected use case for this extension is when you want to normalize # logs to group similar queries, or when you want to protect sensitive # data from being stored in the logs. # # Related module: Sequel::SQLLogNormalizer # module Sequel module SQLLogNormalizer def self.extended(db) type = case db.literal("'") when "''''" :standard when "'\\''" :backslash when "N''''" :n_standard else raise Error, "SQL log normalization is not supported on this database (' literalized as #{db.literal("'").inspect})" end db.instance_variable_set(:@sql_string_escape_type, type) end # Normalize the SQL before calling super. def log_connection_yield(sql, conn, args=nil) unless skip_logging? sql = normalize_logged_sql(sql) args = nil end super end # Replace literal strings and numbers in SQL with question mark placeholders. def normalize_logged_sql(sql) sql = sql.dup sql.force_encoding('BINARY') start_index = 0 check_n = @sql_string_escape_type == :n_standard outside_string = true if @sql_string_escape_type == :backslash search_char = /[\\']/ escape_char_offset = 0 escape_char_value = 92 # backslash else search_char = "'" escape_char_offset = 1 escape_char_value = 39 # apostrophe end # The approach used here goes against Sequel's philosophy of never attempting # to parse SQL. However, parsing the SQL is basically the only way to implement # this support with Sequel's design, and it's better to be pragmatic and accept # this than not be able to support this. # Replace literal strings while outside_string && (index = start_index = sql.index("'", start_index)) if check_n && index != 0 && sql.getbyte(index-1) == 78 # N' start start_index -= 1 end index += 1 outside_string = false while (index = sql.index(search_char, index)) && (sql.getbyte(index + escape_char_offset) == escape_char_value) # skip escaped characters inside string literal index += 2 end if index # Found end of string sql[start_index..index] = '?' start_index += 1 outside_string = true end end # Replace integer and decimal floating point numbers sql.gsub!(/\b-?\d+(?:\.\d+)?\b/, '?') sql end end Database.register_extension(:sql_log_normalizer, SQLLogNormalizer) end ��������������������������������������������������sequel-5.63.0/lib/sequel/extensions/sqlite_json_ops.rb����������������������������������������������0000664�0000000�0000000�00000021162�14342141206�0023105�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The sqlite_json_ops extension adds support to Sequel's DSL to make # it easier to call SQLite JSON functions and operators (added # first in SQLite 3.38.0). # # To load the extension: # # Sequel.extension :sqlite_json_ops # # This extension works by calling methods on Sequel::SQLite::JSONOp objects, # which you can create via Sequel.sqlite_json_op: # # j = Sequel.sqlite_json_op(:json_column) # # Also, on most Sequel expression objects, you can call the sqlite_json_op method # to create a Sequel::SQLite::JSONOp object: # # j = Sequel[:json_column].sqlite_json_op # # If you have loaded the {core_extensions extension}[rdoc-ref:doc/core_extensions.rdoc], # or you have loaded the core_refinements extension # and have activated refinements for the file, you can also use Symbol#sqlite_json_op: # # j = :json_column.sqlite_json_op # # The following methods are available for Sequel::SQLite::JSONOp instances: # # j[1] # (json_column ->> 1) # j.get(1) # (json_column ->> 1) # j.get_text(1) # (json_column -> 1) # j.extract('$.a') # json_extract(json_column, '$.a') # # j.array_length # json_array_length(json_column) # j.type # json_type(json_column) # j.valid # json_valid(json_column) # j.json # json(json_column) # # j.insert('$.a', 1) # json_insert(json_column, '$.a', 1) # j.set('$.a', 1) # json_set(json_column, '$.a', 1) # j.replace('$.a', 1) # json_replace(json_column, '$.a', 1) # j.remove('$.a') # json_remove(json_column, '$.a') # j.patch('{"a":2}') # json_patch(json_column, '{"a":2}') # # j.each # json_each(json_column) # j.tree # json_tree(json_column) # # Related modules: Sequel::SQLite::JSONOp # module Sequel module SQLite # The JSONOp class is a simple container for a single object that # defines methods that yield Sequel expression objects representing # SQLite json operators and functions. # # In the method documentation examples, assume that: # # json_op = Sequel.sqlite_json_op(:json) class JSONOp < Sequel::SQL::Wrapper GET = ["(".freeze, " ->> ".freeze, ")".freeze].freeze private_constant :GET GET_JSON = ["(".freeze, " -> ".freeze, ")".freeze].freeze private_constant :GET_JSON # Returns an expression for getting the JSON array element or object field # at the specified path as a SQLite value. # # json_op[1] # (json ->> 1) # json_op['a'] # (json ->> 'a') # json_op['$.a.b'] # (json ->> '$.a.b') # json_op['$[1][2]'] # (json ->> '$[1][2]') def [](key) json_op(GET, key) end alias get [] # Returns an expression for the length of the JSON array, or the JSON array at # the given path. # # json_op.array_length # json_array_length(json) # json_op.array_length('$[1]') # json_array_length(json, '$[1]') def array_length(*args) Sequel::SQL::NumericExpression.new(:NOOP, function(:array_length, *args)) end # Returns an expression for a set of information extracted from the top-level # members of the JSON array or object, or the top-level members of the JSON array # or object at the given path. # # json_op.each # json_each(json) # json_op.each('$.a') # json_each(json, '$.a') def each(*args) function(:each, *args) end # Returns an expression for the JSON array element or object field at the specified # path as a SQLite value, but only accept paths as arguments, and allow the use of # multiple paths. # # json_op.extract('$.a') # json_extract(json, '$.a') # json_op.extract('$.a', '$.b') # json_extract(json, '$.a', '$.b') def extract(*a) function(:extract, *a) end # Returns an expression for getting the JSON array element or object field at the # specified path as a JSON value. # # json_op.get_json(1) # (json -> 1) # json_op.get_json('a') # (json -> 'a') # json_op.get_json('$.a.b') # (json -> '$.a.b') # json_op.get_json('$[1][2]') # (json -> '$[1][2]') def get_json(key) self.class.new(json_op(GET_JSON, key)) end # Returns an expression for creating new entries at the given paths in the JSON array # or object, but not overwriting existing entries. # # json_op.insert('$.a', 1) # json_insert(json, '$.a', 1) # json_op.insert('$.a', 1, '$.b', 2) # json_insert(json, '$.a', 1, '$.b', 2) def insert(path, value, *args) wrapped_function(:insert, path, value, *args) end # Returns an expression for a minified version of the JSON. # # json_op.json # json(json) def json self.class.new(SQL::Function.new(:json, self)) end alias minify json # Returns an expression for updating the JSON object using the RFC 7396 MergePatch algorithm # # json_op.patch('{"a": 1, "b": null}') # json_patch(json, '{"a": 1, "b": null}') def patch(json_patch) wrapped_function(:patch, json_patch) end # Returns an expression for removing entries at the given paths from the JSON array or object. # # json_op.remove('$.a') # json_remove(json, '$.a') # json_op.remove('$.a', '$.b') # json_remove(json, '$.a', '$.b') def remove(path, *paths) wrapped_function(:remove, path, *paths) end # Returns an expression for replacing entries at the given paths in the JSON array or object, # but not creating new entries. # # json_op.replace('$.a', 1) # json_replace(json, '$.a', 1) # json_op.replace('$.a', 1, '$.b', 2) # json_replace(json, '$.a', 1, '$.b', 2) def replace(path, value, *args) wrapped_function(:replace, path, value, *args) end # Returns an expression for creating or replacing entries at the given paths in the # JSON array or object. # # json_op.set('$.a', 1) # json_set(json, '$.a', 1) # json_op.set('$.a', 1, '$.b', 2) # json_set(json, '$.a', 1, '$.b', 2) def set(path, value, *args) wrapped_function(:set, path, value, *args) end # Returns an expression for a set of information extracted from the JSON array or object, or # the JSON array or object at the given path. # # json_op.tree # json_tree(json) # json_op.tree('$.a') # json_tree(json, '$.a') def tree(*args) function(:tree, *args) end # Returns an expression for the type of the JSON value or the JSON value at the given path. # # json_op.type # json_type(json) # json_op.type('$[1]') # json_type(json, '$[1]') def type(*args) Sequel::SQL::StringExpression.new(:NOOP, function(:type, *args)) end alias typeof type # Returns a boolean expression for whether the JSON is valid or not. def valid Sequel::SQL::BooleanExpression.new(:NOOP, function(:valid)) end private # Internals of the [], get, get_json methods, using a placeholder literal string. def json_op(str, args) self.class.new(Sequel::SQL::PlaceholderLiteralString.new(str, [self, args])) end # Internals of the methods that return functions prefixed with +json_+. def function(name, *args) SQL::Function.new("json_#{name}", self, *args) end # Internals of the methods that return functions prefixed with +json_+, that # return JSON values. def wrapped_function(*args) self.class.new(function(*args)) end end module JSONOpMethods # Wrap the receiver in an JSONOp so you can easily use the SQLite # json functions and operators with it. def sqlite_json_op JSONOp.new(self) end end end module SQL::Builders # Return the object wrapped in an SQLite::JSONOp. def sqlite_json_op(v) case v when SQLite::JSONOp v else SQLite::JSONOp.new(v) end end end class SQL::GenericExpression include Sequel::SQLite::JSONOpMethods end class LiteralString include Sequel::SQLite::JSONOpMethods end end # :nocov: if Sequel.core_extensions? class Symbol include Sequel::SQLite::JSONOpMethods end end if defined?(Sequel::CoreRefinements) module Sequel::CoreRefinements refine Symbol do send INCLUDE_METH, Sequel::SQLite::JSONOpMethods end end end # :nocov: ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/string_agg.rb���������������������������������������������������0000664�0000000�0000000�00000012104�14342141206�0022012�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The string_agg extension adds the ability to perform database-independent # aggregate string concatentation. For example, with a table like: # # c1 | c2 # ---+--- # a | 1 # a | 2 # a | 3 # b | 4 # # You can return a result set like: # # c1 | c2s # ---+--- # a | 1,2,3 # b | 4 # # First, you need to load the extension into the database: # # DB.extension :string_agg # # Then you can use the Sequel.string_agg method to return a Sequel # expression: # # sa = Sequel.string_agg(:column_name) # # or: # sa = Sequel.string_agg(:column_name, '-') # custom separator # # You can specify the order in which the concatention happens by # calling +order+ on the expression: # # sa = Sequel.string_agg(:column_name).order(:other_column) # # Additionally, if you want to have the concatenation only operate # on distinct values, you can call distinct: # # sa = Sequel.string_agg(:column_name).order(:other_column).distinct # # These expressions can be used in your datasets, or anywhere else that # Sequel expressions are allowed: # # DB[:table]. # select_group(:c1). # select_append(Sequel.string_agg(:c2)) # # This extension currenly supports the following databases: # # * PostgreSQL 9+ # * SQLAnywhere 12+ # * Oracle 11g+ (except distinct) # * DB2 9.7+ (except distinct) # * MySQL # * HSQLDB # * H2 # # Related module: Sequel::SQL::StringAgg # module Sequel module SQL module Builders # Return a StringAgg expression for an aggregate string concatentation. def string_agg(*a) StringAgg.new(*a) end end # The StringAgg class represents an aggregate string concatentation. class StringAgg < GenericExpression include StringMethods include StringConcatenationMethods include InequalityMethods include AliasMethods include CastMethods include OrderMethods include PatternMatchMethods include SubscriptMethods # These methods are added to datasets using the string_agg # extension, for the purposes of correctly literalizing StringAgg # expressions for the appropriate database type. module DatasetMethods # Append the SQL fragment for the StringAgg expression to the SQL query. def string_agg_sql_append(sql, sa) if defined?(super) return super end expr = sa.expr separator = sa.separator || "," order = sa.order_expr distinct = sa.is_distinct? case db_type = db.database_type when :postgres, :sqlanywhere f = Function.new(db_type == :postgres ? :string_agg : :list, expr, separator) if order f = f.order(*order) end if distinct f = f.distinct end literal_append(sql, f) when :mysql, :hsqldb, :h2 sql << "GROUP_CONCAT(" if distinct sql << "DISTINCT " end literal_append(sql, expr) if order sql << " ORDER BY " expression_list_append(sql, order) end sql << " SEPARATOR " literal_append(sql, separator) sql << ")" when :oracle, :db2 if distinct raise Error, "string_agg with distinct is not implemented on #{db.database_type}" end literal_append(sql, Function.new(:listagg, expr, separator)) if order sql << " WITHIN GROUP (ORDER BY " expression_list_append(sql, order) sql << ")" else sql << " WITHIN GROUP (ORDER BY 1)" end else raise Error, "string_agg is not implemented on #{db.database_type}" end end end # The string expression for each row that will concatenated to the output. attr_reader :expr # The separator between each string expression. attr_reader :separator # The expression that the aggregation is ordered by. attr_reader :order_expr # Set the expression and separator def initialize(expr, separator=nil) @expr = expr @separator = separator yield self if defined?(yield) freeze end # Whether the current expression uses distinct expressions def is_distinct? @distinct == true end # Return a modified StringAgg that uses distinct expressions def distinct self.class.new(@expr, @separator) do |sa| sa.instance_variable_set(:@order_expr, @order_expr) if @order_expr sa.instance_variable_set(:@distinct, true) end end # Return a modified StringAgg with the given order def order(*o) self.class.new(@expr, @separator) do |sa| sa.instance_variable_set(:@distinct, @distinct) if @distinct sa.instance_variable_set(:@order_expr, o.empty? ? nil : o.freeze) end end to_s_method :string_agg_sql end end Dataset.register_extension(:string_agg, SQL::StringAgg::DatasetMethods) end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/string_date_time.rb���������������������������������������������0000664�0000000�0000000�00000002655�14342141206�0023221�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The string_date_time extension provides String instance methods # for converting the strings to a date (e.g. String#to_date), allowing # for backwards compatibility with legacy Sequel code. # # These methods calls +parse+ on the related class, and as such, can # result in denial of service in older versions of Ruby for large # untrusted input, and raise exceptions in newer versions of Ruby. # # To load the extension: # # Sequel.extension :string_date_time class String # Converts a string into a Date object. def to_date Date.parse(self, Sequel.convert_two_digit_years) rescue => e raise Sequel.convert_exception_class(e, Sequel::InvalidValue) end # Converts a string into a DateTime object. def to_datetime DateTime.parse(self, Sequel.convert_two_digit_years) rescue => e raise Sequel.convert_exception_class(e, Sequel::InvalidValue) end # Converts a string into a Time or DateTime object, depending on the # value of Sequel.datetime_class def to_sequel_time if Sequel.datetime_class == DateTime DateTime.parse(self, Sequel.convert_two_digit_years) else Sequel.datetime_class.parse(self) end rescue => e raise Sequel.convert_exception_class(e, Sequel::InvalidValue) end # Converts a string into a Time object. def to_time Time.parse(self) rescue => e raise Sequel.convert_exception_class(e, Sequel::InvalidValue) end end �����������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/symbol_aref.rb��������������������������������������������������0000664�0000000�0000000�00000002434�14342141206�0022175�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The symbol_aref extension makes Symbol#[] support Symbol, # Sequel::SQL::Indentifier, and Sequel::SQL::QualifiedIdentifier instances, # returning appropriate Sequel::SQL::QualifiedIdentifier instances. It's # designed as a shortcut so that instead of: # # Sequel[:table][:column] # table.column # # you can just write: # # :table[:column] # table.column # # To load the extension: # # Sequel.extension :symbol_aref # # If you are using Ruby 2+, and you would like to use refinements, there # is a refinement version of this in the symbol_aref_refinement extension. # # Related module: Sequel::SymbolAref if RUBY_VERSION >= '2.0' module Sequel::SymbolAref def [](v) case v when Symbol, Sequel::SQL::Identifier, Sequel::SQL::QualifiedIdentifier Sequel::SQL::QualifiedIdentifier.new(self, v) else super end end end class Symbol prepend Sequel::SymbolAref end # :nocov: else class Symbol if method_defined?(:[]) alias_method :aref_before_sequel, :[] end def [](v) case v when Symbol, Sequel::SQL::Identifier, Sequel::SQL::QualifiedIdentifier Sequel::SQL::QualifiedIdentifier.new(self, v) else aref_before_sequel(v) end end end end # :nocov: ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/symbol_aref_refinement.rb���������������������������������������0000664�0000000�0000000�00000002144�14342141206�0024407�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The symbol_aref_refinement extension adds a refinement that makes # Symbol#[] support Symbol, #Sequel::SQL::Indentifier, and # Sequel::SQL::QualifiedIdentifier instances, returning appropriate # Sequel::SQL::QualifiedIdentifier instances. It's designed as a # shortcut so that instead of: # # Sequel[:table][:column] # table.column # # you can just write: # # :table[:column] # table.column # # To load the extension: # # Sequel.extension :symbol_aref_refinement # # To enable the refinement for the current file: # # using Sequel::SymbolAref # # If you would like this extension to be enabled globally instead # of as a refinement, use the symbol_aref extension. # # Related module: Sequel::SymbolAref # :nocov: raise(Sequel::Error, "Refinements require ruby 2.0.0 or greater") unless RUBY_VERSION >= '2.0.0' # :nocov: module Sequel::SymbolAref refine Symbol do def [](v) case v when Symbol, Sequel::SQL::Identifier, Sequel::SQL::QualifiedIdentifier Sequel::SQL::QualifiedIdentifier.new(self, v) else super end end end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/symbol_as.rb����������������������������������������������������0000664�0000000�0000000�00000001017�14342141206�0021657�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The symbol_as extension adds Symbol#as, for creating # Sequel::SQL::AliasedExpression objects. It's # designed as a shortcut so that instead of: # # Sequel[:column].as(:alias) # # you can just write: # # :column.as(:alias) # # To load the extension: # # Sequel.extension :symbol_as # # If you are using Ruby 2+, and you would like to use refinements, there # is a refinement version of this in the symbol_as_refinement extension. # class Symbol include Sequel::SQL::AliasMethods end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/symbol_as_refinement.rb�����������������������������������������0000664�0000000�0000000�00000001622�14342141206�0024075�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The symbol_as_refinement extension adds a refinement that makes # Symbol#as return Sequel::SQL::AliasedExpression instances. It's # designed as a shortcut so that instead of: # # Sequel[:column].as(:alias) # column AS alias # # you can just write: # # :column.as(:alias) # column AS alias # # To load the extension: # # Sequel.extension :symbol_as_refinement # # To enable the refinement for the current file: # # using Sequel::SymbolAs # # If you would like this extension to be enabled globally instead # of as a refinement, use the symbol_as extension. # # Related module: Sequel::SymbolAs # :nocov: raise(Sequel::Error, "Refinements require ruby 2.0.0 or greater") unless RUBY_VERSION >= '2.0.0' # :nocov: module Sequel::SymbolAs refine Symbol do def as(aliaz, columns=nil) Sequel::SQL::AliasedExpression.new(self, aliaz, columns) end end end ��������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/synchronize_sql.rb����������������������������������������������0000664�0000000�0000000�00000003402�14342141206�0023121�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The synchronize_sql extension checks out a connection from the pool while # generating an SQL string. In cases where a connection is necessary # in order to properly escape input, and multiple inputs in the query need # escaping, this can result in fewer connection checkouts and better # overall performance. In other cases this results in a performance decrease # because a connection is checked out and either not used or kept checked out # longer than necessary. # # The adapters where this extension may improve performance include amalgalite, # mysql2, postgres, jdbc/postgresql, and tinytds. In these adapters, escaping # strings requires a connection object for as proper escaping requires calling # an escaping method on the connection object. # # This extension is most helpful when dealing with queries with lots of # strings that need escaping (e.g. IN queries with long lists). By default, # a connection will be checked out and back in for each string to be escaped, # which under high contention can cause the query to spend longer generating # the SQL string than the actual pool timeout (since every individual checkout # will take less than the timeout, but the sum of all of them can be greater). # # This extension is unnecessary and will decrease performance if the single # threaded connection pool is used. # module Sequel class Dataset module SynchronizeSQL %w'insert select update delete'.each do |type| define_method(:"#{type}_sql") do |*args| if @opts[:sql].is_a?(String) return super(*args) end db.synchronize(@opts[:server]) do super(*args) end end end end register_extension(:synchronize_sql, SynchronizeSQL) end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/thread_local_timezones.rb���������������������������������������0000664�0000000�0000000�00000004164�14342141206�0024413�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # The thread_local_timezones extension allows you to set a per-thread timezone that # will override the default global timezone while the thread is executing. The # main use case is for web applications that execute each request in its own thread, # and want to set the timezones based on the request. # # To load the extension: # # Sequel.extension :thread_local_timezones # # The most common example is having the database always store time in # UTC, but have the application deal with the timezone of the current # user. That can be done with: # # Sequel.database_timezone = :utc # # In each thread: # Sequel.thread_application_timezone = current_user.timezone # # This extension is designed to work with the named_timezones extension. # # This extension adds the thread_application_timezone=, thread_database_timezone=, # and thread_typecast_timezone= methods to the Sequel module. It overrides # the application_timezone, database_timezone, and typecast_timezone # methods to check the related thread local timezone first, and use it if present. # If the related thread local timezone is not present, it falls back to the # default global timezone. # # There is one special case of note. If you have a default global timezone # and you want to have a nil thread local timezone, you have to set the thread # local value to :nil instead of nil: # # Sequel.application_timezone = :utc # Sequel.thread_application_timezone = nil # Sequel.application_timezone # => :utc # Sequel.thread_application_timezone = :nil # Sequel.application_timezone # => nil # # Related module: Sequel::ThreadLocalTimezones # module Sequel module ThreadLocalTimezones %w'application database typecast'.each do |t| class_eval("def thread_#{t}_timezone=(tz); Thread.current[:#{t}_timezone] = convert_timezone_setter_arg(tz); end", __FILE__, __LINE__) class_eval(<<END, __FILE__, __LINE__ + 1) def #{t}_timezone if tz = Thread.current[:#{t}_timezone] tz unless tz == :nil else super end end END end end extend ThreadLocalTimezones end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/to_dot.rb�������������������������������������������������������0000664�0000000�0000000�00000011111�14342141206�0021153�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # This adds a <tt>Sequel::Dataset#to_dot</tt> method. The +to_dot+ method # returns a string that can be processed by graphviz's +dot+ program in # order to get a visualization of the dataset. Basically, it shows a version # of the dataset's abstract syntax tree. # # To load the extension: # # Sequel.extension :to_dot # # Related module: Sequel::ToDot # module Sequel class ToDot module DatasetMethods # Return a string that can be processed by the +dot+ program (included # with graphviz) in order to see a visualization of the dataset's # abstract syntax tree. def to_dot ToDot.output(self) end end # The option keys that should be included in the dot output. TO_DOT_OPTIONS = [:with, :distinct, :select, :from, :join, :where, :group, :having, :compounds, :order, :limit, :offset, :lock].freeze # Given a +Dataset+, return a string in +dot+ format that will # generate a visualization of the dataset. def self.output(ds) new(ds).output end # Given a +Dataset+, parse the internal structure to generate # a dataset visualization. def initialize(ds) @i = 0 @stack = [@i] @dot = ["digraph G {", "0 [label=\"self\"];"] v(ds, "") @dot << "}" end # Output the dataset visualization as a string in +dot+ format. def output @dot.join("\n") end private # Add an entry to the +dot+ output with the given label. If +j+ # is given, it is used directly as the node or transition. Otherwise # a node is created for the current object. def dot(label, j=nil) label = case label when nil "<nil>" else label.to_s end @dot << "#{j||@i} [label=#{label.inspect}];" end # Recursive method that parses all of Sequel's internal datastructures, # adding the appropriate nodes and transitions to the internal +dot+ # structure. def v(e, l) @i += 1 dot(l, "#{@stack.last} -> #{@i}") @stack.push(@i) case e when LiteralString dot "Sequel.lit(#{e.to_s.inspect})" when Symbol, Numeric, String, Class, TrueClass, FalseClass, NilClass dot e.inspect when Array dot "Array" e.each_with_index do |val, j| v(val, j) end when Hash dot "Hash" e.each do |k, val| v(val, k) end when SQL::ComplexExpression dot "ComplexExpression: #{e.op}" e.args.each_with_index do |val, j| v(val, j) end when SQL::Identifier dot "Identifier" v(e.value, :value) when SQL::QualifiedIdentifier dot "QualifiedIdentifier" v(e.table, :table) v(e.column, :column) when SQL::OrderedExpression dot "OrderedExpression: #{e.descending ? :DESC : :ASC}#{" NULLS #{e.nulls.to_s.upcase}" if e.nulls}" v(e.expression, :expression) when SQL::AliasedExpression dot "AliasedExpression" v(e.expression, :expression) v(e.alias, :alias) v(e.columns, :columns) if e.columns when SQL::CaseExpression dot "CaseExpression" v(e.expression, :expression) if e.expression v(e.conditions, :conditions) v(e.default, :default) when SQL::Cast dot "Cast" v(e.expr, :expr) v(e.type, :type) when SQL::Function dot "Function: #{e.name}" e.args.each_with_index do |val, j| v(val, j) end v(e.args, :args) v(e.opts, :opts) when SQL::Subscript dot "Subscript" v(e.f, :f) v(e.sub, :sub) when SQL::Window dot "Window" v(e.opts, :opts) when SQL::PlaceholderLiteralString str = e.str str = "(#{str})" if e.parens dot "PlaceholderLiteralString: #{str.inspect}" v(e.args, :args) when SQL::JoinClause str = "#{e.join_type.to_s.upcase} JOIN".dup if e.is_a?(SQL::JoinOnClause) str << " ON" elsif e.is_a?(SQL::JoinUsingClause) str << " USING" end dot str v(e.table_expr, :table) if e.is_a?(SQL::JoinOnClause) v(e.on, :on) elsif e.is_a?(SQL::JoinUsingClause) v(e.using, :using) end when Dataset dot "Dataset" TO_DOT_OPTIONS.each do |k| if val = e.opts[k] v(val, k) end end else dot "Unhandled: #{e.inspect}" end @stack.pop end end Dataset.register_extension(:to_dot, ToDot::DatasetMethods) end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/extensions/virtual_row_method_block.rb�������������������������������������0000664�0000000�0000000�00000002337�14342141206�0024764�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # # These modifies virtual row blocks so that you can pass a block # when calling a method to change the behavior. It only exists # for backwards compatibility with previous Sequel versions, and # is not recommended for new applications. # # To load the extension: # # Sequel.extension :virtual_row_method_block # module Sequel module SQL class VirtualRow < BasicObject include(Module.new do # Handle blocks passed to methods and change the behavior. def method_missing(m, *args, &block) if block if args.empty? Function.new(m) else case args.shift when :* Function.new(m, *args).* when :distinct Function.new(m, *args).distinct when :over opts = args.shift || OPTS f = Function.new(m, *::Kernel.Array(opts[:args])) f = f.* if opts[:*] f.over(opts) else Kernel.raise(Error, 'unsupported VirtualRow method argument used with block') end end else super end end end) end end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/model.rb�������������������������������������������������������������������0000664�0000000�0000000�00000006022�14342141206�0016571�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true require_relative 'core' module Sequel # <tt>Sequel::Model</tt> is an object relational mapper built on top of Sequel core. Each # model class is backed by a dataset instance, and many dataset methods can be # called directly on the class. Model datasets return rows as model instances, # which are wrappers around the underlying hash that allow easily updating or # deleting the individual row. # # <tt>Sequel::Model</tt> is built completely out of plugins. Plugins can override any class, # instance, or dataset method defined by a previous plugin and call super to get the default # behavior. By default, <tt>Sequel::Model</tt> loads two plugins, <tt>Sequel::Model</tt> # (which is itself a plugin) for the base support, and <tt>Sequel::Model::Associations</tt> # for the associations support. # # You can set the +SEQUEL_NO_ASSOCIATIONS+ constant or environment variable to # make Sequel not load the associations plugin by default. class Model OPTS = Sequel::OPTS # Empty instance methods to create that the user can override. # Just like any other method defined by Sequel, if you override one of these, you should # call +super+ to get the default behavior (while empty by default, they are often overridden # by plugins). See the {"Model Hooks" guide}[rdoc-ref:doc/model_hooks.rdoc] for # more detail on hooks. HOOKS = [ :after_create, :after_destroy, :after_save, :after_update, :after_validation, :before_create, :before_destroy, :before_save, :before_update, :before_validation ].freeze @cache_anonymous_models = true @db = nil @db_schema = nil @dataset = nil @dataset_method_modules = [] @default_set_fields_options = {} @overridable_methods_module = nil @fast_pk_lookup_sql = nil @fast_instance_delete_sql = nil @plugins = [] @primary_key = :id @raise_on_save_failure = true @raise_on_typecast_failure = false @require_modification = nil @require_valid_table = true @restrict_primary_key = true @setter_methods = nil @simple_pk = nil @simple_table = nil @strict_param_setting = true @typecast_empty_string_to_nil = true @typecast_on_assignment = true @use_transactions = true require_relative "model/default_inflections" require_relative "model/inflections" require_relative "model/plugins" require_relative "model/dataset_module" require_relative "model/base" require_relative "model/exceptions" require_relative "model/errors" # :nocov: if !defined?(::SEQUEL_NO_ASSOCIATIONS) && !ENV.has_key?('SEQUEL_NO_ASSOCIATIONS') # :nocov: require_relative 'model/associations' plugin Model::Associations end def_Model(::Sequel) # The setter methods (methods ending with =) that are never allowed # to be called automatically via +set+/+update+/+new+/etc. RESTRICTED_SETTER_METHODS = instance_methods.map(&:to_s).select{|l| l.end_with?('=')}.freeze end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/model/���������������������������������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0016244�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/model/associations.rb������������������������������������������������������0000664�0000000�0000000�00000552556�14342141206�0021312�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel class Model # Associations are used in order to specify relationships between model classes # that reflect relations between tables in the database using foreign keys. module Associations # Map of association type symbols to association reflection classes. ASSOCIATION_TYPES = {} # Set an empty association reflection hash in the model def self.apply(model) model.instance_exec do @association_reflections = {} @autoreloading_associations = {} @cache_associations = true @default_eager_limit_strategy = true @default_association_options = {} @default_association_type_options = {} @dataset_module_class = DatasetModule end end # The dataset module to use for classes using the associations plugin. class DatasetModule < Model::DatasetModule def_dataset_caching_method(self, :eager) end # AssociationReflection is a Hash subclass that keeps information on Sequel::Model associations. It # provides methods to reduce internal code duplication. It should not # be instantiated by the user. class AssociationReflection < Hash include Sequel::Inflections # Name symbol for the _add internal association method def _add_method self[:_add_method] end # Name symbol for the _remove_all internal association method def _remove_all_method self[:_remove_all_method] end # Name symbol for the _remove internal association method def _remove_method self[:_remove_method] end # Name symbol for the _setter association method def _setter_method self[:_setter_method] end # Name symbol for the add association method def add_method self[:add_method] end # Name symbol for association method, the same as the name of the association. def association_method self[:name] end # The class associated to the current model class via this association def associated_class cached_fetch(:class) do begin constantize(self[:class_name]) rescue NameError => e raise NameError, "#{e.message} (this happened when attempting to find the associated class for #{inspect})", e.backtrace end end end # The dataset associated via this association, with the non-instance specific # changes already applied. This will be a joined dataset if the association # requires joining tables. def associated_dataset cached_fetch(:_dataset){apply_dataset_changes(_associated_dataset)} end # Apply all non-instance specific changes to the given dataset and return it. def apply_dataset_changes(ds) ds = ds.with_extend(AssociationDatasetMethods).clone(:association_reflection => self) if exts = self[:reverse_extend] ds = ds.with_extend(*exts) end ds = ds.select(*select) if select if c = self[:conditions] ds = (c.is_a?(Array) && !Sequel.condition_specifier?(c)) ? ds.where(*c) : ds.where(c) end ds = ds.order(*self[:order]) if self[:order] ds = ds.limit(*self[:limit]) if self[:limit] ds = ds.limit(1).skip_limit_check if limit_to_single_row? ds = ds.eager(self[:eager]) if self[:eager] ds = ds.distinct if self[:distinct] ds end # Apply all non-instance specific changes and the eager_block option to the given # dataset and return it. def apply_eager_dataset_changes(ds) ds = apply_dataset_changes(ds) if block = self[:eager_block] ds = block.call(ds) end ds end # Apply the eager graph limit strategy to the dataset to graph into the current dataset, or return # the dataset unmodified if no SQL limit strategy is needed. def apply_eager_graph_limit_strategy(strategy, ds) case strategy when :distinct_on apply_distinct_on_eager_limit_strategy(ds.order_prepend(*self[:order])) when :window_function apply_window_function_eager_limit_strategy(ds.order_prepend(*self[:order])).select(*ds.columns) else ds end end # Apply an eager limit strategy to the dataset, or return the dataset # unmodified if it doesn't need an eager limit strategy. def apply_eager_limit_strategy(ds, strategy=eager_limit_strategy, limit_and_offset=limit_and_offset()) case strategy when :distinct_on apply_distinct_on_eager_limit_strategy(ds) when :window_function apply_window_function_eager_limit_strategy(ds, limit_and_offset) else ds end end # Use DISTINCT ON and ORDER BY clauses to limit the results to the first record with matching keys. def apply_distinct_on_eager_limit_strategy(ds) keys = predicate_key ds.distinct(*keys).order_prepend(*keys) end # Use a window function to limit the results of the eager loading dataset. def apply_window_function_eager_limit_strategy(ds, limit_and_offset=limit_and_offset()) rn = ds.row_number_column limit, offset = limit_and_offset ds = ds.unordered.select_append{|o| o.row_number.function.over(:partition=>predicate_key, :order=>ds.opts[:order]).as(rn)}.from_self ds = ds.order(rn) if ds.db.database_type == :mysql ds = if !returns_array? ds.where(rn => offset ? offset+1 : 1) elsif offset offset += 1 if limit ds.where(rn => (offset...(offset+limit))) else ds.where{SQL::Identifier.new(rn) >= offset} end else ds.where{SQL::Identifier.new(rn) <= limit} end end # If the ruby eager limit strategy is being used, slice the array using the slice # range to return the object(s) at the correct offset/limit. def apply_ruby_eager_limit_strategy(rows, limit_and_offset = limit_and_offset()) name = self[:name] return unless range = slice_range(limit_and_offset) if returns_array? rows.each{|o| o.associations[name] = o.associations[name][range] || []} else offset = range.begin rows.each{|o| o.associations[name] = o.associations[name][offset]} end end # Whether the associations cache should use an array when storing the # associated records during eager loading. def assign_singular? !returns_array? end # Whether this association can have associated objects, given the current # object. Should be false if obj cannot have associated objects because # the necessary key columns are NULL. def can_have_associated_objects?(obj) true end # Whether you are able to clone from the given association type to the current # association type, true by default only if the types match. def cloneable?(ref) ref[:type] == self[:type] end # Name symbol for the dataset association method def dataset_method self[:dataset_method] end # Whether the dataset needs a primary key to function, true by default. def dataset_need_primary_key? true end # Return the symbol used for the row number column if the window function # eager limit strategy is being used, or nil otherwise. def delete_row_number_column(ds=associated_dataset) if eager_limit_strategy == :window_function ds.row_number_column end end # Return an dataset that will load the appropriate associated objects for # the given object using this association. def association_dataset_for(object) condition = if can_have_associated_objects?(object) predicate_keys.zip(predicate_key_values(object)) else false end associated_dataset.where(condition) end ASSOCIATION_DATASET_PROC = proc{|r| r.association_dataset_for(self)} # Proc used to create the association dataset method. def association_dataset_proc ASSOCIATION_DATASET_PROC end # The eager_graph limit strategy to use for this dataset def eager_graph_limit_strategy(strategy) if self[:limit] || !returns_array? strategy = strategy[self[:name]] if strategy.is_a?(Hash) case strategy when true true_eager_graph_limit_strategy when Symbol strategy else if returns_array? || offset :ruby end end end end # The eager limit strategy to use for this dataset. def eager_limit_strategy cached_fetch(:_eager_limit_strategy) do if self[:limit] || !returns_array? case s = cached_fetch(:eager_limit_strategy){default_eager_limit_strategy} when true true_eager_limit_strategy else s end end end end # Eager load the associated objects using the hash of eager options, # yielding each row to the block. def eager_load_results(eo, &block) rows = eo[:rows] unless eo[:initialize_rows] == false Sequel.synchronize_with(eo[:mutex]){initialize_association_cache(rows)} end if eo[:id_map] ids = eo[:id_map].keys return ids if ids.empty? end strategy = eager_limit_strategy cascade = eo[:associations] eager_limit = nil if eo[:no_results] no_results = true elsif eo[:eager_block] || eo[:loader] == false || !use_placeholder_loader? ds = eager_loading_dataset(eo) strategy = ds.opts[:eager_limit_strategy] || strategy eager_limit = if el = ds.opts[:eager_limit] raise Error, "The :eager_limit dataset option is not supported for associations returning a single record" unless returns_array? strategy ||= true_eager_graph_limit_strategy if el.is_a?(Array) el else [el, nil] end else limit_and_offset end strategy = true_eager_graph_limit_strategy if strategy == :union # Correlated subqueries are not supported for regular eager loading strategy = :ruby if strategy == :correlated_subquery strategy = nil if strategy == :ruby && assign_singular? objects = apply_eager_limit_strategy(ds, strategy, eager_limit).all if strategy == :window_function delete_rn = ds.row_number_column objects.each{|obj| obj.values.delete(delete_rn)} end elsif strategy == :union objects = [] ds = associated_dataset loader = union_eager_loader joiner = " UNION ALL " ids.each_slice(subqueries_per_union).each do |slice| objects.concat(ds.with_sql(slice.map{|k| loader.sql(*k)}.join(joiner)).to_a) end ds = ds.eager(cascade) if cascade ds.send(:post_load, objects) else loader = placeholder_eager_loader loader = loader.with_dataset{|dataset| dataset.eager(cascade)} if cascade objects = loader.all(ids) end Sequel.synchronize_with(eo[:mutex]){objects.each(&block)} unless no_results if strategy == :ruby apply_ruby_eager_limit_strategy(rows, eager_limit || limit_and_offset) end end # The key to use for the key hash when eager loading def eager_loader_key self[:eager_loader_key] end # By default associations do not need to select a key in an associated table # to eagerly load. def eager_loading_use_associated_key? false end # Whether to eagerly graph a lazy dataset, true by default. If this # is false, the association won't respect the :eager_graph option # when loading the association for a single record. def eager_graph_lazy_dataset? true end # Whether additional conditions should be added when using the filter # by associations support. def filter_by_associations_add_conditions? self[:conditions] || self[:eager_block] || self[:limit] end # The expression to use for the additional conditions to be added for # the filter by association support, when the association itself is # filtered. Works by using a subquery to test that the objects passed # also meet the association filter criteria. def filter_by_associations_conditions_expression(obj) ds = filter_by_associations_conditions_dataset.where(filter_by_associations_conditions_subquery_conditions(obj)) {filter_by_associations_conditions_key=>ds} end # Finalize the association by first attempting to populate the thread-safe cache, # and then transfering the thread-safe cache value to the association itself, # so that a mutex is not needed to get the value. def finalize return unless cache = self[:cache] finalizer = proc do |meth, key| next if has_key?(key) # Allow calling private methods to make sure caching is done appropriately send(meth) self[key] = cache.delete(key) if cache.has_key?(key) end finalize_settings.each(&finalizer) unless self[:instance_specific] finalizer.call(:associated_eager_dataset, :associated_eager_dataset) finalizer.call(:filter_by_associations_conditions_dataset, :filter_by_associations_conditions_dataset) end nil end # Map of methods to cache keys used for finalizing associations. FINALIZE_SETTINGS = { :associated_class=>:class, :associated_dataset=>:_dataset, :eager_limit_strategy=>:_eager_limit_strategy, :placeholder_loader=>:placeholder_loader, :predicate_key=>:predicate_key, :predicate_keys=>:predicate_keys, :reciprocal=>:reciprocal, }.freeze def finalize_settings FINALIZE_SETTINGS end # Whether to handle silent modification failure when adding/removing # associated records, false by default. def handle_silent_modification_failure? false end # Initialize the associations cache for the current association for the given objects. def initialize_association_cache(objects) name = self[:name] if assign_singular? objects.each{|object| object.associations[name] = nil} else objects.each{|object| object.associations[name] = []} end end # Show which type of reflection this is, and a guess at what code was used to create the # association. def inspect o = self[:orig_opts].dup o.delete(:class) o.delete(:class_name) o.delete(:block) unless o[:block] o[:class] = self[:orig_class] if self[:orig_class] "#<#{self.class} #{self[:model]}.#{self[:type]} #{self[:name].inspect}#{", #{o.inspect[1...-1]}" unless o.empty?}>" end # The limit and offset for this association (returned as a two element array). def limit_and_offset if (v = self[:limit]).is_a?(Array) v else [v, nil] end end # Whether the associated object needs a primary key to be added/removed, # false by default. def need_associated_primary_key? false end # A placeholder literalizer that can be used to lazily load the association. If # one can't be used, returns nil. def placeholder_loader if use_placeholder_loader? cached_fetch(:placeholder_loader) do Sequel::Dataset::PlaceholderLiteralizer.loader(associated_dataset) do |pl, ds| ds = ds.where(Sequel.&(*predicate_keys.map{|k| SQL::BooleanExpression.new(:'=', k, pl.arg)})) if self[:block] ds = self[:block].call(ds) end ds end end end end # The keys to use for loading of the regular dataset, as an array. def predicate_keys cached_fetch(:predicate_keys){Array(predicate_key)} end # The values that predicate_keys should match for objects to be associated. def predicate_key_values(object) predicate_key_methods.map{|k| object.get_column_value(k)} end # Qualify +col+ with the given table name. def qualify(table, col) transform(col) do |k| case k when Symbol, SQL::Identifier SQL::QualifiedIdentifier.new(table, k) else Sequel::Qualifier.new(table).transform(k) end end end # Qualify col with the associated model's table name. def qualify_assoc(col) qualify(associated_class.table_name, col) end # Qualify col with the current model's table name. def qualify_cur(col) qualify(self[:model].table_name, col) end # Returns the reciprocal association variable, if one exists. The reciprocal # association is the association in the associated class that is the opposite # of the current association. For example, Album.many_to_one :artist and # Artist.one_to_many :albums are reciprocal associations. This information is # to populate reciprocal associations. For example, when you do this_artist.add_album(album) # it sets album.artist to this_artist. def reciprocal cached_fetch(:reciprocal) do possible_recips = [] associated_class.all_association_reflections.each do |assoc_reflect| if reciprocal_association?(assoc_reflect) possible_recips << assoc_reflect end end if possible_recips.length == 1 cached_set(:reciprocal_type, possible_recips.first[:type]) if ambiguous_reciprocal_type? possible_recips.first[:name] end end end # Whether the reciprocal of this association returns an array of objects instead of a single object, # true by default. def reciprocal_array? true end # Name symbol for the remove_all_ association method def remove_all_method self[:remove_all_method] end # Whether associated objects need to be removed from the association before # being destroyed in order to preserve referential integrity. def remove_before_destroy? true end # Name symbol for the remove_ association method def remove_method self[:remove_method] end # Whether to check that an object to be disassociated is already associated to this object, false by default. def remove_should_check_existing? false end # Whether this association returns an array of objects instead of a single object, # true by default. def returns_array? true end # The columns to select when loading the association. def select self[:select] end # Whether to set the reciprocal association to self when loading associated # records, false by default. def set_reciprocal_to_self? false end # Name symbol for the setter association method def setter_method self[:setter_method] end # The range used for slicing when using the :ruby eager limit strategy. def slice_range(limit_and_offset = limit_and_offset()) limit, offset = limit_and_offset if limit || offset (offset||0)..(limit ? (offset||0)+limit-1 : -1) end end private # If the key exists in the reflection hash, return it. # If the key doesn't exist and association reflections are uncached, then yield to get the value. # If the key doesn't exist and association reflection are cached, check the cache and return # the value if present, or yield to get the value, cache the value, and return it. def cached_fetch(key) fetch(key) do return yield unless h = self[:cache] Sequel.synchronize{return h[key] if h.has_key?(key)} value = yield Sequel.synchronize{h[key] = value} end end # Cache the value at the given key if caching. def cached_set(key, value) return unless h = self[:cache] Sequel.synchronize{h[key] = value} end # The base dataset used for the association, before any order/conditions # options have been applied. def _associated_dataset associated_class.dataset end # Whether for the reciprocal type for the given association cannot be # known in advantage, false by default. def ambiguous_reciprocal_type? false end # Apply a limit strategy to the given dataset so that filter by # associations works with a limited dataset. def apply_filter_by_associations_limit_strategy(ds) case filter_by_associations_limit_strategy when :distinct_on apply_filter_by_associations_distinct_on_limit_strategy(ds) when :window_function apply_filter_by_associations_window_function_limit_strategy(ds) else ds end end # Apply a distinct on eager limit strategy using IN with a subquery # that uses DISTINCT ON to ensure only the first matching record for # each key is included. def apply_filter_by_associations_distinct_on_limit_strategy(ds) k = filter_by_associations_limit_key ds.where(k=>apply_distinct_on_eager_limit_strategy(associated_eager_dataset.select(*k))) end # Apply a distinct on eager limit strategy using IN with a subquery # that uses a filter on the row_number window function to ensure # that only rows inside the limit are returned. def apply_filter_by_associations_window_function_limit_strategy(ds) ds.where(filter_by_associations_limit_key=>apply_window_function_eager_limit_strategy(associated_eager_dataset.select(*filter_by_associations_limit_alias_key)).select(*filter_by_associations_limit_aliases)) end # The associated_dataset with the eager_block callback already applied. def associated_eager_dataset cached_fetch(:associated_eager_dataset) do ds = associated_dataset.unlimited if block = self[:eager_block] ds = block.call(ds) end ds end end # The dataset to use for eager loading associated objects for multiple current objects, # given the hash passed to the eager loader. def eager_loading_dataset(eo=OPTS) ds = eo[:dataset] || associated_eager_dataset ds = eager_loading_set_predicate_condition(ds, eo) if associations = eo[:associations] ds = ds.eager(associations) end if block = eo[:eager_block] orig_ds = ds ds = block.call(ds) end if eager_loading_use_associated_key? ds = if ds.opts[:eager_graph] && !orig_ds.opts[:eager_graph] block.call(orig_ds.select_append(*associated_key_array)) else ds.select_append(*associated_key_array) end end if self[:eager_graph] raise(Error, "cannot eagerly load a #{self[:type]} association that uses :eager_graph") if eager_loading_use_associated_key? ds = ds.eager_graph(self[:eager_graph]) end ds end # The default eager limit strategy to use for this association def default_eager_limit_strategy self[:model].default_eager_limit_strategy || :ruby end # Set the predicate condition for the eager loading dataset based on the id map # in the eager loading options. def eager_loading_set_predicate_condition(ds, eo) if id_map = eo[:id_map] ds = ds.where(eager_loading_predicate_condition(id_map.keys)) end ds end # The predicate condition to use for the eager_loader. def eager_loading_predicate_condition(keys) {predicate_key=>keys} end # Add conditions to the dataset to not include NULL values for # the associated keys, and select those keys. def filter_by_associations_add_conditions_dataset_filter(ds) k = filter_by_associations_conditions_associated_keys ds.select(*k).where(Sequel.negate(k.zip([]))) end # The conditions to add to the filter by associations conditions # subquery to restrict it to to the object(s) that was used as the # filter value. def filter_by_associations_conditions_subquery_conditions(obj) key = qualify(associated_class.table_name, associated_class.primary_key) case obj when Array {key=>obj.map(&:pk)} when Sequel::Dataset {key=>obj.select(*Array(qualify(associated_class.table_name, associated_class.primary_key)))} else Array(key).zip(Array(obj.pk)) end end # The base dataset to use for the filter by associations conditions # subquery, regardless of the objects that are passed in as filter # values. def filter_by_associations_conditions_dataset cached_fetch(:filter_by_associations_conditions_dataset) do ds = associated_eager_dataset.unordered ds = filter_by_associations_add_conditions_dataset_filter(ds) ds = apply_filter_by_associations_limit_strategy(ds) ds end end # The strategy to use to filter by a limited association def filter_by_associations_limit_strategy v = fetch(:filter_limit_strategy, self[:eager_limit_strategy]) if v || self[:limit] || !returns_array? case v ||= self[:model].default_eager_limit_strategy when true, :union, :ruby # Can't use a union or ruby-based strategy for filtering by associations, switch to default eager graph limit # strategy. true_eager_graph_limit_strategy when Symbol v end end end # Whether to limit the associated dataset to a single row. def limit_to_single_row? !returns_array? end # Any offset to use for this association (or nil if there is no offset). def offset limit_and_offset.last end # A placeholder literalizer used to speed up eager loading. def placeholder_eager_loader cached_fetch(:placeholder_eager_loader) do Sequel::Dataset::PlaceholderLiteralizer.loader(associated_dataset) do |pl, ds| apply_eager_limit_strategy(eager_loading_dataset.where(predicate_key=>pl.arg), eager_limit_strategy) end end end # The reciprocal type as an array, should be overridden in reflection subclasses that # have ambiguous reciprocal types. def possible_reciprocal_types [reciprocal_type] end # Whether the given association reflection is possible reciprocal # association for the current association reflection. def reciprocal_association?(assoc_reflect) possible_reciprocal_types.include?(assoc_reflect[:type]) && (begin; assoc_reflect.associated_class; rescue NameError; end) == self[:model] && assoc_reflect[:conditions].nil? && assoc_reflect[:block].nil? end # The number of subqueries to use in each union query, used to eagerly load # limited associations. Defaults to 40, the optimal number depends on the # latency between the database and the application. def subqueries_per_union self[:subqueries_per_union] || 40 end # If +s+ is an array, map +s+ over the block. Otherwise, just call the # block with +s+. def transform(s, &block) s.is_a?(Array) ? s.map(&block) : (yield s) end # What eager limit strategy should be used when true is given as the value, # defaults to UNION as that is the fastest strategy if the appropriate keys are indexed. def true_eager_limit_strategy if self[:eager_graph] || (offset && !associated_dataset.supports_offsets_in_correlated_subqueries?) # An SQL-based approach won't work if you are also eager graphing, # so use a ruby based approach in that case. :ruby else :union end end # The eager_graph limit strategy used when true is given as the value, choosing the # best strategy based on what the database supports. def true_eager_graph_limit_strategy if associated_class.dataset.supports_window_functions? :window_function else :ruby end end # A placeholder literalizer used to speed up the creation of union queries when eager # loading a limited association. def union_eager_loader cached_fetch(:union_eager_loader) do Sequel::Dataset::PlaceholderLiteralizer.loader(associated_dataset) do |pl, ds| ds = self[:eager_block].call(ds) if self[:eager_block] keys = predicate_keys ds = ds.where(keys.map{pl.arg}.zip(keys)) if eager_loading_use_associated_key? ds = ds.select_append(*associated_key_array) end ds.from_self end end end # Whether the placeholder loader can be used to load the association. def use_placeholder_loader? self[:use_placeholder_loader] && _associated_dataset.supports_placeholder_literalizer? end end class ManyToOneAssociationReflection < AssociationReflection ASSOCIATION_TYPES[:many_to_one] = self # many_to_one associations can only have associated objects if none of # the :keys options have a nil value. def can_have_associated_objects?(obj) !self[:keys].any?{|k| obj.get_column_value(k).nil?} end # Whether the dataset needs a primary key to function, false for many_to_one associations. def dataset_need_primary_key? false end # Default foreign key name symbol for foreign key in current model's table that points to # the given association's table's primary key. def default_key :"#{self[:name]}_id" end # Whether to eagerly graph a lazy dataset, true for many_to_one associations # only if the key is nil. def eager_graph_lazy_dataset? self[:key].nil? end # many_to_one associations don't need an eager_graph limit strategy def eager_graph_limit_strategy(_) nil end # many_to_one associations don't need an eager limit strategy def eager_limit_strategy nil end # many_to_one associations don't need a filter by associations limit strategy def filter_by_associations_limit_strategy nil end FINALIZE_SETTINGS = superclass::FINALIZE_SETTINGS.merge( :primary_key=>:primary_key, :primary_keys=>:primary_keys, :primary_key_method=>:primary_key_method, :primary_key_methods=>:primary_key_methods, :qualified_primary_key=>:qualified_primary_key, :reciprocal_type=>:reciprocal_type ).freeze def finalize_settings FINALIZE_SETTINGS end # The expression to use on the left hand side of the IN lookup when eager loading def predicate_key cached_fetch(:predicate_key){qualified_primary_key} end # The column(s) in the associated table that the key in the current table references (either a symbol or an array). def primary_key cached_fetch(:primary_key){associated_class.primary_key || raise(Error, "no primary key specified for #{associated_class.inspect}")} end # The columns in the associated table that the key in the current table references (always an array). def primary_keys cached_fetch(:primary_keys){Array(primary_key)} end alias associated_object_keys primary_keys # The method symbol or array of method symbols to call on the associated object # to get the value to use for the foreign keys. def primary_key_method cached_fetch(:primary_key_method){primary_key} end # The array of method symbols to call on the associated object # to get the value to use for the foreign keys. def primary_key_methods cached_fetch(:primary_key_methods){Array(primary_key_method)} end # #primary_key qualified by the associated table def qualified_primary_key cached_fetch(:qualified_primary_key){self[:qualify] == false ? primary_key : qualify_assoc(primary_key)} end # True only if the reciprocal is a one_to_many association. def reciprocal_array? !set_reciprocal_to_self? end # Whether this association returns an array of objects instead of a single object, # false for a many_to_one association. def returns_array? false end # True only if the reciprocal is a one_to_one association. def set_reciprocal_to_self? reciprocal reciprocal_type == :one_to_one end private # Reciprocals of many_to_one associations could be either one_to_many or one_to_one, # and which is not known in advance. def ambiguous_reciprocal_type? true end def filter_by_associations_conditions_associated_keys qualify(associated_class.table_name, primary_keys) end def filter_by_associations_conditions_key qualify(self[:model].table_name, self[:key_column]) end # many_to_one associations do not need to be limited to a single row if they # explicitly do not have a key. def limit_to_single_row? super && self[:key] end def predicate_key_methods self[:keys] end # The reciprocal type of a many_to_one association is either # a one_to_many or a one_to_one association. def possible_reciprocal_types [:one_to_many, :one_to_one] end # Whether the given association reflection is possible reciprocal def reciprocal_association?(assoc_reflect) super && self[:keys] == assoc_reflect[:keys] && primary_key == assoc_reflect.primary_key end # The reciprocal type of a many_to_one association is either # a one_to_many or a one_to_one association, look in the associated class # to try to figure out which. def reciprocal_type cached_fetch(:reciprocal_type) do possible_recips = [] associated_class.all_association_reflections.each do |assoc_reflect| if reciprocal_association?(assoc_reflect) possible_recips << assoc_reflect end end if possible_recips.length == 1 possible_recips.first[:type] else possible_reciprocal_types end end end end class OneToManyAssociationReflection < AssociationReflection ASSOCIATION_TYPES[:one_to_many] = self # Support a correlated subquery limit strategy when using eager_graph. def apply_eager_graph_limit_strategy(strategy, ds) case strategy when :correlated_subquery apply_correlated_subquery_limit_strategy(ds) else super end end # The keys in the associated model's table related to this association def associated_object_keys self[:keys] end # one_to_many associations can only have associated objects if none of # the :keys options have a nil value. def can_have_associated_objects?(obj) !self[:primary_keys].any?{|k| obj.get_column_value(k).nil?} end # one_to_many and one_to_one associations can be clones def cloneable?(ref) ref[:type] == :one_to_many || ref[:type] == :one_to_one end # Default foreign key name symbol for key in associated table that points to # current table's primary key. def default_key :"#{underscore(demodulize(self[:model].name))}_id" end FINALIZE_SETTINGS = superclass::FINALIZE_SETTINGS.merge( :qualified_primary_key=>:qualified_primary_key ).freeze def finalize_settings FINALIZE_SETTINGS end # Handle silent failure of add/remove methods if raise_on_save_failure is false. def handle_silent_modification_failure? self[:raise_on_save_failure] == false end # The hash key to use for the eager loading predicate (left side of IN (1, 2, 3)) def predicate_key cached_fetch(:predicate_key){qualify_assoc(self[:key])} end alias qualified_key predicate_key # The column in the current table that the key in the associated table references. def primary_key self[:primary_key] end # #primary_key qualified by the current table def qualified_primary_key cached_fetch(:qualified_primary_key){qualify_cur(primary_key)} end # Whether the reciprocal of this association returns an array of objects instead of a single object, # false for a one_to_many association. def reciprocal_array? false end # Destroying one_to_many associated objects automatically deletes the foreign key. def remove_before_destroy? false end # The one_to_many association needs to check that an object to be removed already is associated. def remove_should_check_existing? true end # One to many associations set the reciprocal to self when loading associated records. def set_reciprocal_to_self? true end private # Use a correlated subquery to limit the dataset. Note that this will not # work correctly if the associated dataset uses qualified identifers in the WHERE clause, # as they would reference the containing query instead of the subquery. def apply_correlated_subquery_limit_strategy(ds) table = ds.first_source_table table_alias = ds.first_source_alias primary_key = associated_class.primary_key key = self[:key] cs_alias = :t1 cs = associated_dataset. from(Sequel.as(table, :t1)). select(*qualify(cs_alias, primary_key)). where(Array(qualify(cs_alias, key)).zip(Array(qualify(table_alias, key)))). limit(*limit_and_offset) ds.where(qualify(table_alias, primary_key)=>cs) end # Support correlated subquery strategy when filtering by limited associations. def apply_filter_by_associations_limit_strategy(ds) case filter_by_associations_limit_strategy when :correlated_subquery apply_correlated_subquery_limit_strategy(ds) else super end end def filter_by_associations_conditions_associated_keys qualify(associated_class.table_name, self[:keys]) end def filter_by_associations_conditions_key qualify(self[:model].table_name, self[:primary_key_column]) end def filter_by_associations_limit_alias_key Array(filter_by_associations_limit_key) end def filter_by_associations_limit_aliases filter_by_associations_limit_alias_key.map(&:column) end def filter_by_associations_limit_key qualify(associated_class.table_name, associated_class.primary_key) end def predicate_key_methods self[:primary_keys] end def reciprocal_association?(assoc_reflect) super && self[:keys] == assoc_reflect[:keys] && primary_key == assoc_reflect.primary_key end # The reciprocal type of a one_to_many association is a many_to_one association. def reciprocal_type :many_to_one end # Support automatic use of correlated subqueries if :ruby option is best available option, # the database supports them, and either the associated class has a non-composite primary key # or the database supports multiple columns in IN. def true_eager_graph_limit_strategy r = super ds = associated_dataset if r == :ruby && ds.supports_limits_in_correlated_subqueries? && (Array(associated_class.primary_key).length == 1 || ds.supports_multiple_column_in?) && (!offset || ds.supports_offsets_in_correlated_subqueries?) :correlated_subquery else r end end end # Methods that turn an association that returns multiple objects into an association that # returns a single object. module SingularAssociationReflection # Singular associations do not assign singular if they are using the ruby eager limit strategy # and have a slice range, since they need to store the array of associated objects in order to # pick the correct one with an offset. def assign_singular? super && (eager_limit_strategy != :ruby || !slice_range) end # Add conditions when filtering by singular associations with orders, since the # underlying relationship is probably not one-to-one. def filter_by_associations_add_conditions? super || self[:order] || self[:eager_limit_strategy] || self[:filter_limit_strategy] end # Make sure singular associations always have 1 as the limit def limit_and_offset r = super if r.first == 1 r else [1, r[1]] end end # Singular associations always return a single object, not an array. def returns_array? false end private # Only use a eager limit strategy by default if there is an offset or an order. def default_eager_limit_strategy super if self[:order] || offset end # Use a strategy for filtering by associations if there is an order or an offset, # or a specific limiting strategy has been specified. def filter_by_associations_limit_strategy super if self[:order] || offset || self[:eager_limit_strategy] || self[:filter_limit_strategy] end # Use the DISTINCT ON eager limit strategy for true if the database supports it. def true_eager_graph_limit_strategy if associated_class.dataset.supports_ordered_distinct_on? && !offset :distinct_on else super end end end class OneToOneAssociationReflection < OneToManyAssociationReflection ASSOCIATION_TYPES[:one_to_one] = self include SingularAssociationReflection end class ManyToManyAssociationReflection < AssociationReflection ASSOCIATION_TYPES[:many_to_many] = self # The alias to use for the associated key when eagerly loading def associated_key_alias self[:left_key_alias] end # Array of associated keys used when eagerly loading. def associated_key_array cached_fetch(:associated_key_array) do if self[:uses_left_composite_keys] associated_key_alias.zip(predicate_keys).map{|a, k| SQL::AliasedExpression.new(k, a)} else [SQL::AliasedExpression.new(predicate_key, associated_key_alias)] end end end # The column to use for the associated key when eagerly loading def associated_key_column self[:left_key] end # Alias of right_primary_keys def associated_object_keys right_primary_keys end # many_to_many associations can only have associated objects if none of # the :left_primary_keys options have a nil value. def can_have_associated_objects?(obj) !self[:left_primary_keys].any?{|k| obj.get_column_value(k).nil?} end # one_through_one and many_to_many associations can be clones def cloneable?(ref) ref[:type] == :many_to_many || ref[:type] == :one_through_one end # The default associated key alias(es) to use when eager loading # associations via eager. def default_associated_key_alias self[:uses_left_composite_keys] ? (0...self[:left_keys].length).map{|i| :"x_foreign_key_#{i}_x"} : :x_foreign_key_x end # The default eager loader used if the user doesn't override it. Extracted # to a method so the code can be shared with the many_through_many plugin. def default_eager_loader(eo) h = eo[:id_map] assign_singular = assign_singular? delete_rn = delete_row_number_column uses_lcks = self[:uses_left_composite_keys] left_key_alias = self[:left_key_alias] name = self[:name] self[:model].eager_load_results(self, eo) do |assoc_record| assoc_record.values.delete(delete_rn) if delete_rn hash_key = if uses_lcks left_key_alias.map{|k| assoc_record.values.delete(k)} else assoc_record.values.delete(left_key_alias) end objects = h[hash_key] if assign_singular objects.each do |object| object.associations[name] ||= assoc_record end else objects.each do |object| object.associations[name].push(assoc_record) end end end end # Default name symbol for the join table. def default_join_table [self[:class_name], self[:model].name].map{|i| underscore(pluralize(demodulize(i)))}.sort.join('_').to_sym end # Default foreign key name symbol for key in join table that points to # current table's primary key (or :left_primary_key column). def default_left_key :"#{underscore(demodulize(self[:model].name))}_id" end # Default foreign key name symbol for foreign key in join table that points to # the association's table's primary key (or :right_primary_key column). def default_right_key :"#{singularize(self[:name])}_id" end FINALIZE_SETTINGS = superclass::FINALIZE_SETTINGS.merge( :associated_key_array=>:associated_key_array, :qualified_right_key=>:qualified_right_key, :join_table_source=>:join_table_source, :join_table_alias=>:join_table_alias, :qualified_right_primary_key=>:qualified_right_primary_key, :right_primary_key=>:right_primary_key, :right_primary_keys=>:right_primary_keys, :right_primary_key_method=>:right_primary_key_method, :right_primary_key_methods=>:right_primary_key_methods, :select=>:select ).freeze def finalize_settings FINALIZE_SETTINGS end # The hash key to use for the eager loading predicate (left side of IN (1, 2, 3)). # The left key qualified by the join table. def predicate_key cached_fetch(:predicate_key){qualify(join_table_alias, self[:left_key])} end alias qualified_left_key predicate_key # The right key qualified by the join table. def qualified_right_key cached_fetch(:qualified_right_key){qualify(join_table_alias, self[:right_key])} end # many_to_many associations need to select a key in an associated table to eagerly load def eager_loading_use_associated_key? !separate_query_per_table? end # The source of the join table. This is the join table itself, unless it # is aliased, in which case it is the unaliased part. def join_table_source cached_fetch(:join_table_source){split_join_table_alias[0]} end # The join table itself, unless it is aliased, in which case this # is the alias. def join_table_alias cached_fetch(:join_table_alias) do s, a = split_join_table_alias a || s end end alias associated_key_table join_table_alias # Whether the associated object needs a primary key to be added/removed, # true for many_to_many associations. def need_associated_primary_key? true end # #right_primary_key qualified by the associated table def qualified_right_primary_key cached_fetch(:qualified_right_primary_key){qualify_assoc(right_primary_key)} end # The primary key column(s) to use in the associated table (can be symbol or array). def right_primary_key cached_fetch(:right_primary_key){associated_class.primary_key || raise(Error, "no primary key specified for #{associated_class.inspect}")} end # The primary key columns to use in the associated table (always array). def right_primary_keys cached_fetch(:right_primary_keys){Array(right_primary_key)} end # The method symbol or array of method symbols to call on the associated objects # to get the foreign key values for the join table. def right_primary_key_method cached_fetch(:right_primary_key_method){right_primary_key} end # The array of method symbols to call on the associated objects # to get the foreign key values for the join table. def right_primary_key_methods cached_fetch(:right_primary_key_methods){Array(right_primary_key_method)} end # The columns to select when loading the association, associated_class.table_name.* by default. def select cached_fetch(:select){default_select} end # Whether a separate query should be used for the join table. def separate_query_per_table? self[:join_table_db] end private # Join to the the join table, unless using a separate query per table. def _associated_dataset if separate_query_per_table? super else super.inner_join(self[:join_table], self[:right_keys].zip(right_primary_keys), :qualify=>:deep) end end # Use the right_keys from the eager loading options if # using a separate query per table. def eager_loading_set_predicate_condition(ds, eo) if separate_query_per_table? ds.where(right_primary_key=>eo[:right_keys]) else super end end # The default selection for associations that require joins. These do not use the default # model selection unless all entries in the select are explicitly qualified identifiers, as # other it can include unqualified columns which would be made ambiguous by joining. def default_select if (sel = associated_class.dataset.opts[:select]) && sel.all?{|c| selection_is_qualified?(c)} sel else Sequel::SQL::ColumnAll.new(associated_class.table_name) end end def filter_by_associations_conditions_associated_keys qualify(join_table_alias, self[:left_keys]) end def filter_by_associations_conditions_key qualify(self[:model].table_name, self[:left_primary_key_column]) end def filter_by_associations_limit_alias_key aliaz = 'a' filter_by_associations_limit_key.map{|c| c.as(Sequel.identifier(aliaz = aliaz.next))} end def filter_by_associations_limit_aliases filter_by_associations_limit_alias_key.map(&:alias) end def filter_by_associations_limit_key qualify(join_table_alias, self[:left_keys]) + Array(qualify(associated_class.table_name, associated_class.primary_key)) end def predicate_key_methods self[:left_primary_keys] end def reciprocal_association?(assoc_reflect) super && assoc_reflect[:left_keys] == self[:right_keys] && assoc_reflect[:right_keys] == self[:left_keys] && assoc_reflect[:join_table] == self[:join_table] && right_primary_keys == assoc_reflect[:left_primary_key_columns] && self[:left_primary_key_columns] == assoc_reflect.right_primary_keys end def reciprocal_type :many_to_many end # Whether the given expression represents a qualified identifier. Used to determine if it is # OK to use directly when joining. def selection_is_qualified?(c) case c when Symbol Sequel.split_symbol(c)[0] when Sequel::SQL::QualifiedIdentifier true when Sequel::SQL::AliasedExpression selection_is_qualified?(c.expression) else false end end # Split the join table into source and alias parts. def split_join_table_alias associated_class.dataset.split_alias(self[:join_table]) end end class OneThroughOneAssociationReflection < ManyToManyAssociationReflection ASSOCIATION_TYPES[:one_through_one] = self include SingularAssociationReflection # one_through_one associations should not singularize the association name when # creating the foreign key. def default_right_key :"#{self[:name]}_id" end # one_through_one associations have no reciprocals def reciprocal nil end end # This module contains methods added to all association datasets module AssociationDatasetMethods # The model object that created the association dataset def model_object @opts[:model_object] end # The association reflection related to the association dataset def association_reflection @opts[:association_reflection] end private def non_sql_option?(key) super || key == :model_object || key == :association_reflection end end # Each kind of association adds a number of instance methods to the model class which # are specialized according to the association type and optional parameters # given in the definition. Example: # # class Project < Sequel::Model # many_to_one :portfolio # # or: one_to_one :portfolio # one_to_many :milestones # # or: many_to_many :milestones # end # # The project class now has the following instance methods: # portfolio :: Returns the associated portfolio. # portfolio=(obj) :: Sets the associated portfolio to the object, # but the change is not persisted until you save the record (for many_to_one associations). # portfolio_dataset :: Returns a dataset that would return the associated # portfolio, only useful in fairly specific circumstances. # milestones :: Returns an array of associated milestones # add_milestone(obj) :: Associates the passed milestone with this object. # remove_milestone(obj) :: Removes the association with the passed milestone. # remove_all_milestones :: Removes associations with all associated milestones. # milestones_dataset :: Returns a dataset that would return the associated # milestones, allowing for further filtering/limiting/etc. # # If you want to override the behavior of the add_/remove_/remove_all_/ methods # or the association setter method, use the :adder, :remover, :clearer, and/or :setter # options. These options override the default behavior. # # By default the classes for the associations are inferred from the association # name, so for example the Project#portfolio will return an instance of # Portfolio, and Project#milestones will return an array of Milestone # instances. You can use the :class option to change which class is used. # # Association definitions are also reflected by the class, e.g.: # # Project.associations # => [:portfolio, :milestones] # Project.association_reflection(:portfolio) # => #<Sequel::Model::Associations::ManyToOneAssociationReflection Project.many_to_one :portfolio> # # Associations should not have the same names as any of the columns in the # model's current table they reference. If you are dealing with an existing schema that # has a column named status, you can't name the association status, you'd # have to name it foo_status or something else. If you give an association the same name # as a column, you will probably end up with an association that doesn't work, or a SystemStackError. # # For a more in depth general overview, as well as a reference guide, # see the {Association Basics guide}[rdoc-ref:doc/association_basics.rdoc]. # For examples of advanced usage, see the {Advanced Associations guide}[rdoc-ref:doc/advanced_associations.rdoc]. module ClassMethods # All association reflections defined for this model (default: {}). attr_reader :association_reflections # Hash with column symbol keys and arrays of many_to_one # association symbols that should be cleared when the column # value changes. attr_reader :autoreloading_associations # Whether association metadata should be cached in the association reflection. If not cached, it will be computed # on demand. In general you only want to set this to false when using code reloading. When using code reloading, # setting this will make sure that if an associated class is removed or modified, this class will not have a reference to # the previous class. attr_accessor :cache_associations # The default options to use for all associations. This hash is merged into the association reflection hash for # all association reflections. attr_accessor :default_association_options # The default options to use for all associations of a given type. This is a hash keyed by association type # symbol. If there is a value for the association type symbol key, the resulting hash will be merged into the # association reflection hash for all association reflections of that type. attr_accessor :default_association_type_options # The default :eager_limit_strategy option to use for limited or offset associations (default: true, causing Sequel # to use what it considers the most appropriate strategy). attr_accessor :default_eager_limit_strategy # Array of all association reflections for this model class def all_association_reflections association_reflections.values end # Associates a related model with the current model. The following types are # supported: # # :many_to_one :: Foreign key in current model's table points to # associated model's primary key. Each associated model object can # be associated with more than one current model objects. Each current # model object can be associated with only one associated model object. # :one_to_many :: Foreign key in associated model's table points to this # model's primary key. Each current model object can be associated with # more than one associated model objects. Each associated model object # can be associated with only one current model object. # :one_through_one :: Similar to many_to_many in terms of foreign keys, but only one object # is associated to the current object through the association. # Provides only getter methods, no setter or modification methods. # :one_to_one :: Similar to one_to_many in terms of foreign keys, but # only one object is associated to the current object through the # association. The methods created are similar to many_to_one, except # that the one_to_one setter method saves the passed object. # :many_to_many :: A join table is used that has a foreign key that points # to this model's primary key and a foreign key that points to the # associated model's primary key. Each current model object can be # associated with many associated model objects, and each associated # model object can be associated with many current model objects. # # The following options can be supplied: # === Multiple Types # :adder :: Proc used to define the private _add_* method for doing the database work # to associate the given object to the current object (*_to_many assocations). # Set to nil to not define a add_* method for the association. # :after_add :: Symbol, Proc, or array of both/either specifying a callback to call # after a new item is added to the association. # :after_load :: Symbol, Proc, or array of both/either specifying a callback to call # after the associated record(s) have been retrieved from the database. # :after_remove :: Symbol, Proc, or array of both/either specifying a callback to call # after an item is removed from the association. # :after_set :: Symbol, Proc, or array of both/either specifying a callback to call # after an item is set using the association setter method. # :allow_eager :: If set to false, you cannot load the association eagerly # via eager or eager_graph # :allow_eager_graph :: If set to false, you cannot load the association eagerly via eager_graph. # :allow_filtering_by :: If set to false, you cannot use the association when filtering # :before_add :: Symbol, Proc, or array of both/either specifying a callback to call # before a new item is added to the association. # :before_remove :: Symbol, Proc, or array of both/either specifying a callback to call # before an item is removed from the association. # :before_set :: Symbol, Proc, or array of both/either specifying a callback to call # before an item is set using the association setter method. # :cartesian_product_number :: the number of joins completed by this association that could cause more # than one row for each row in the current table (default: 0 for # many_to_one, one_to_one, and one_through_one associations, 1 # for one_to_many and many_to_many associations). # :class :: The associated class or its name as a string or symbol. If not # given, uses the association's name, which is camelized (and # singularized unless the type is :many_to_one, :one_to_one, or one_through_one). If this is specified # as a string or symbol, you must specify the full class name (e.g. "::SomeModule::MyModel"). # :class_namespace :: If :class is given as a string or symbol, sets the default namespace in which to look for # the class. <tt>class: 'Foo', class_namespace: 'Bar'</tt> looks for <tt>::Bar::Foo</tt>.) # :clearer :: Proc used to define the private _remove_all_* method for doing the database work # to remove all objects associated to the current object (*_to_many assocations). # Set to nil to not define a remove_all_* method for the association. # :clone :: Merge the current options and block into the options and block used in defining # the given association. Can be used to DRY up a bunch of similar associations that # all share the same options such as :class and :key, while changing the order and block used. # :conditions :: The conditions to use to filter the association, can be any argument passed to where. # This option is not respected when using eager_graph or association_join, unless it # is hash or array of two element arrays. Consider also specifying the :graph_block # option if the value for this option is not a hash or array of two element arrays # and you plan to use this association in eager_graph or association_join. # :dataset :: A proc that is used to define the method to get the base dataset to use (before the other # options are applied). If the proc accepts an argument, it is passed the related # association reflection. It is a best practice to always have the dataset accept an argument # and use the argument to return the appropriate dataset. # :distinct :: Use the DISTINCT clause when selecting associating object, both when # lazy loading and eager loading via .eager (but not when using .eager_graph). # :eager :: The associations to eagerly load via +eager+ when loading the associated object(s). # :eager_block :: If given, use the block instead of the default block when # eagerly loading. To not use a block when eager loading (when one is used normally), # set to nil. # :eager_graph :: The associations to eagerly load via +eager_graph+ when loading the associated object(s). # many_to_many associations with this option cannot be eagerly loaded via +eager+. # :eager_grapher :: A proc to use to implement eager loading via +eager_graph+, overriding the default. # Takes an options hash with at least the entries :self (the receiver of the eager_graph call), # :table_alias (the alias to use for table to graph into the association), and :implicit_qualifier # (the alias that was used for the current table). # Should return a copy of the dataset with the association graphed into it. # :eager_limit_strategy :: Determines the strategy used for enforcing limits and offsets when eager loading # associations via the +eager+ method. # :eager_loader :: A proc to use to implement eager loading, overriding the default. Takes a single hash argument, # with at least the keys: :rows, which is an array of current model instances, :associations, # which is a hash of dependent associations, :self, which is the dataset doing the eager loading, # :eager_block, which is a dynamic callback that should be called with the dataset, and :id_map, # which is a mapping of key values to arrays of current model instances. In the proc, the # associated records should be queried from the database and the associations cache for each # record should be populated. # :eager_loader_key :: A symbol for the key column to use to populate the key_hash # for the eager loader. Can be set to nil to not populate the key_hash. # :extend :: A module or array of modules to extend the dataset with. # :filter_limit_strategy :: Determines the strategy used for enforcing limits and offsets when filtering by # limited associations. Possible options are :window_function, :distinct_on, or # :correlated_subquery depending on association type and database type. # :graph_alias_base :: The base name to use for the table alias when eager graphing. Defaults to the name # of the association. If the alias name has already been used in the query, Sequel will create # a unique alias by appending a numeric suffix (e.g. alias_0, alias_1, ...) until the alias is # unique. # :graph_block :: The block to pass to join_table when eagerly loading # the association via +eager_graph+. # :graph_conditions :: The additional conditions to use on the SQL join when eagerly loading # the association via +eager_graph+. Should be a hash or an array of two element arrays. If not # specified, the :conditions option is used if it is a hash or array of two element arrays. # :graph_join_type :: The type of SQL join to use when eagerly loading the association via # eager_graph. Defaults to :left_outer. # :graph_only_conditions :: The conditions to use on the SQL join when eagerly loading # the association via +eager_graph+, instead of the default conditions specified by the # foreign/primary keys. This option causes the :graph_conditions option to be ignored. # :graph_order :: the order to use when using eager_graph, instead of the default order. This should be used # in the case where :order contains an identifier qualified by the table's name, which may not match # the alias used when eager graphing. By setting this to the unqualified identifier, it will be # automatically qualified when using eager_graph. # :graph_select :: A column or array of columns to select from the associated table # when eagerly loading the association via +eager_graph+. Defaults to all # columns in the associated table. # :instance_specific :: Marks the association as instance specific. Should be used if the association block # uses instance specific state, or transient state (accessing current date/time, etc.). # :limit :: Limit the number of records to the provided value. Use # an array with two elements for the value to specify a # limit (first element) and an offset (second element). # :methods_module :: The module that methods the association creates will be placed into. Defaults # to the module containing the model's columns. # :no_association_method :: Do not add a method for the association. This can save memory if the association # method is never used. # :no_dataset_method :: Do not add a method for the association dataset. This can save memory if the dataset # method is never used. # :order :: the column(s) by which to order the association dataset. Can be a # singular column symbol or an array of column symbols. # :order_eager_graph :: Whether to add the association's order to the graphed dataset's order when graphing # via +eager_graph+. Defaults to true, so set to false to disable. # :read_only :: Do not add a setter method (for many_to_one or one_to_one associations), # or add_/remove_/remove_all_ methods (for one_to_many and many_to_many associations). # :reciprocal :: the symbol name of the reciprocal association, # if it exists. By default, Sequel will try to determine it by looking at the # associated model's assocations for a association that matches # the current association's key(s). Set to nil to not use a reciprocal. # :remover :: Proc used to define the private _remove_* method for doing the database work # to remove the association between the given object and the current object (*_to_many assocations). # Set to nil to not define a remove_* method for the association. # :select :: the columns to select. Defaults to the associated class's table_name.* in an association # that uses joins, which means it doesn't include the attributes from the # join table. If you want to include the join table attributes, you can # use this option, but beware that the join table attributes can clash with # attributes from the model table, so you should alias any attributes that have # the same name in both the join table and the associated table. # :setter :: Proc used to define the private _*= method for doing the work to setup the assocation # between the given object and the current object (*_to_one associations). # Set to nil to not define a setter method for the association. # :subqueries_per_union :: The number of subqueries to use in each UNION query, for eager # loading limited associations using the default :union strategy. # :validate :: Set to false to not validate when implicitly saving any associated object. # === :many_to_one # :key :: foreign key in current model's table that references # associated model's primary key, as a symbol. Defaults to :"#{name}_id". Can use an # array of symbols for a composite key association. # :key_column :: Similar to, and usually identical to, :key, but :key refers to the model method # to call, where :key_column refers to the underlying column. Should only be # used if the model method differs from the foreign key column, in conjunction # with defining a model alias method for the key column. # :primary_key :: column in the associated table that :key option references, as a symbol. # Defaults to the primary key of the associated table. Can use an # array of symbols for a composite key association. # :primary_key_method :: the method symbol or array of method symbols to call on the associated # object to get the foreign key values. Defaults to :primary_key option. # :qualify :: Whether to use qualified primary keys when loading the association. The default # is true, so you must set to false to not qualify. Qualification rarely causes # problems, but it's necessary to disable in some cases, such as when you are doing # a JOIN USING operation on the column on Oracle. # === :one_to_many and :one_to_one # :key :: foreign key in associated model's table that references # current model's primary key, as a symbol. Defaults to # :"#{self.name.underscore}_id". Can use an # array of symbols for a composite key association. # :key_method :: the method symbol or array of method symbols to call on the associated # object to get the foreign key values. Defaults to :key option. # :primary_key :: column in the current table that :key option references, as a symbol. # Defaults to primary key of the current table. Can use an # array of symbols for a composite key association. # :primary_key_column :: Similar to, and usually identical to, :primary_key, but :primary_key refers # to the model method call, where :primary_key_column refers to the underlying column. # Should only be used if the model method differs from the primary key column, in # conjunction with defining a model alias method for the primary key column. # :raise_on_save_failure :: Do not raise exceptions for hook or validation failures when saving associated # objects in the add/remove methods (return nil instead) [one_to_many only]. # === :many_to_many and :one_through_one # :graph_join_table_block :: The block to pass to +join_table+ for # the join table when eagerly loading the association via +eager_graph+. # :graph_join_table_conditions :: The additional conditions to use on the SQL join for # the join table when eagerly loading the association via +eager_graph+. # Should be a hash or an array of two element arrays. # :graph_join_table_join_type :: The type of SQL join to use for the join table when eagerly # loading the association via +eager_graph+. Defaults to the # :graph_join_type option or :left_outer. # :graph_join_table_only_conditions :: The conditions to use on the SQL join for the join # table when eagerly loading the association via +eager_graph+, # instead of the default conditions specified by the # foreign/primary keys. This option causes the # :graph_join_table_conditions option to be ignored. # :join_table :: name of table that includes the foreign keys to both # the current model and the associated model, as a symbol. Defaults to the name # of current model and name of associated model, pluralized, # underscored, sorted, and joined with '_'. # :join_table_block :: proc that can be used to modify the dataset used in the add/remove/remove_all # methods. Should accept a dataset argument and return a modified dataset if present. # :join_table_db :: When retrieving records when using lazy loading or eager loading via +eager+, instead of # a join between to the join table and the associated table, use a separate query for the # join table using the given Database object. # :left_key :: foreign key in join table that points to current model's # primary key, as a symbol. Defaults to :"#{self.name.underscore}_id". # Can use an array of symbols for a composite key association. # :left_primary_key :: column in current table that :left_key points to, as a symbol. # Defaults to primary key of current table. Can use an # array of symbols for a composite key association. # :left_primary_key_column :: Similar to, and usually identical to, :left_primary_key, but :left_primary_key refers to # the model method to call, where :left_primary_key_column refers to the underlying column. Should only # be used if the model method differs from the left primary key column, in conjunction # with defining a model alias method for the left primary key column. # :right_key :: foreign key in join table that points to associated # model's primary key, as a symbol. Defaults to :"#{name.to_s.singularize}_id". # Can use an array of symbols for a composite key association. # :right_primary_key :: column in associated table that :right_key points to, as a symbol. # Defaults to primary key of the associated table. Can use an # array of symbols for a composite key association. # :right_primary_key_method :: the method symbol or array of method symbols to call on the associated # object to get the foreign key values for the join table. # Defaults to :right_primary_key option. # :uniq :: Adds a after_load callback that makes the array of objects unique. def associate(type, name, opts = OPTS, &block) raise(Error, 'invalid association type') unless assoc_class = Sequel.synchronize{ASSOCIATION_TYPES[type]} raise(Error, 'Model.associate name argument must be a symbol') unless name.is_a?(Symbol) # dup early so we don't modify opts orig_opts = opts.dup if opts[:clone] cloned_assoc = association_reflection(opts[:clone]) remove_class_name = orig_opts[:class] && !orig_opts[:class_name] orig_opts = cloned_assoc[:orig_opts].merge(orig_opts) orig_opts.delete(:class_name) if remove_class_name end opts = Hash[default_association_options] if type_options = default_association_type_options[type] opts.merge!(type_options) end opts.merge!(orig_opts) opts.merge!(:type => type, :name => name, :cache=>({} if cache_associations), :model => self) opts[:block] = block if block opts[:instance_specific] = true if orig_opts[:dataset] if !opts.has_key?(:instance_specific) && (block || orig_opts[:block]) # It's possible the association is instance specific, in that it depends on # values other than the foreign key value. This needs to be checked for # in certain places to disable optimizations. opts[:instance_specific] = _association_instance_specific_default(name) end if (orig_opts[:instance_specific] || orig_opts[:dataset]) && !opts.has_key?(:allow_eager) && !opts[:eager_loader] # For associations explicitly marked as instance specific, or that use the # :dataset option, where :allow_eager is not set, and no :eager_loader is # provided, disallow eager loading. In these cases, eager loading is # unlikely to work. This is not done for implicit setting of :instance_specific, # because implicit use is done by default for all associations with blocks, # and the vast majority of associations with blocks use the block for filtering # in a manner compatible with eager loading. opts[:allow_eager] = false end opts = assoc_class.new.merge!(opts) if opts[:clone] && !opts.cloneable?(cloned_assoc) raise(Error, "cannot clone an association to an association of different type (association #{name} with type #{type} cloning #{opts[:clone]} with type #{cloned_assoc[:type]})") end opts[:use_placeholder_loader] = !opts[:instance_specific] && !opts[:eager_graph] opts[:eager_block] = opts[:block] unless opts.include?(:eager_block) opts[:graph_join_type] ||= :left_outer opts[:order_eager_graph] = true unless opts.include?(:order_eager_graph) conds = opts[:conditions] opts[:graph_alias_base] ||= name opts[:graph_conditions] = conds if !opts.include?(:graph_conditions) and Sequel.condition_specifier?(conds) opts[:graph_conditions] = opts.fetch(:graph_conditions, []).to_a opts[:graph_select] = Array(opts[:graph_select]) if opts[:graph_select] [:before_add, :before_remove, :after_add, :after_remove, :after_load, :before_set, :after_set].each do |cb_type| opts[cb_type] = Array(opts[cb_type]) if opts[cb_type] end if opts[:extend] opts[:extend] = Array(opts[:extend]) opts[:reverse_extend] = opts[:extend].reverse end late_binding_class_option(opts, opts.returns_array? ? singularize(name) : name) # Remove :class entry if it exists and is nil, to work with cached_fetch opts.delete(:class) unless opts[:class] def_association(opts) orig_opts.delete(:clone) opts[:orig_class] = orig_opts[:class] || orig_opts[:class_name] orig_opts.merge!(:class_name=>opts[:class_name], :class=>opts[:class], :block=>opts[:block]) opts[:orig_opts] = orig_opts # don't add to association_reflections until we are sure there are no errors association_reflections[name] = opts end # The association reflection hash for the association of the given name. def association_reflection(name) association_reflections[name] end # Array of association name symbols def associations association_reflections.keys end # Eager load the association with the given eager loader options. def eager_load_results(opts, eo, &block) opts.eager_load_results(eo, &block) end # Freeze association related metadata when freezing model class. def freeze @association_reflections.freeze.each_value(&:freeze) @autoreloading_associations.freeze.each_value(&:freeze) @default_association_options.freeze @default_association_type_options.freeze @default_association_type_options.each_value(&:freeze) super end # Finalize all associations such that values that are looked up # dynamically in associated classes are set statically. # As this modifies the associations, it must be done before # calling freeze. def finalize_associations @association_reflections.each_value(&:finalize) end # Shortcut for adding a many_to_many association, see #associate def many_to_many(name, opts=OPTS, &block) associate(:many_to_many, name, opts, &block) end # Shortcut for adding a many_to_one association, see #associate def many_to_one(name, opts=OPTS, &block) associate(:many_to_one, name, opts, &block) end # Shortcut for adding a one_through_one association, see #associate def one_through_one(name, opts=OPTS, &block) associate(:one_through_one, name, opts, &block) end # Shortcut for adding a one_to_many association, see #associate def one_to_many(name, opts=OPTS, &block) associate(:one_to_many, name, opts, &block) end # Shortcut for adding a one_to_one association, see #associate def one_to_one(name, opts=OPTS, &block) associate(:one_to_one, name, opts, &block) end Plugins.inherited_instance_variables(self, :@association_reflections=>:dup, :@autoreloading_associations=>:hash_dup, :@default_association_options=>:dup, :@default_association_type_options=>:hash_dup, :@cache_associations=>nil, :@default_eager_limit_strategy=>nil) Plugins.def_dataset_methods(self, [:eager, :eager_graph, :eager_graph_with_options, :association_join, :association_full_join, :association_inner_join, :association_left_join, :association_right_join]) private # The default value for the instance_specific option, if the association # could be instance specific and the :instance_specific option is not specified. def _association_instance_specific_default(_) true end # The module to use for the association's methods. Defaults to # the overridable_methods_module. def association_module(opts=OPTS) opts.fetch(:methods_module, overridable_methods_module) end # Add a method to the module included in the class, so the method # can be easily overridden in the class itself while allowing for # super to be called. def association_module_def(name, opts=OPTS, &block) mod = association_module(opts) mod.send(:define_method, name, &block) mod.send(:alias_method, name, name) end # Add a method to the module included in the class, so the method # can be easily overridden in the class itself while allowing for # super to be called. This method allows passing keywords through # the defined methods. def association_module_delegate_def(name, opts, &block) mod = association_module(opts) mod.send(:define_method, name, &block) # :nocov: mod.send(:ruby2_keywords, name) if mod.respond_to?(:ruby2_keywords, true) # :nocov: mod.send(:alias_method, name, name) end # Add a private method to the module included in the class. def association_module_private_def(name, opts=OPTS, &block) association_module_def(name, opts, &block) association_module(opts).send(:private, name) end # Delegate to the type-specific association method to setup the # association, and define the association instance methods. def def_association(opts) send(:"def_#{opts[:type]}", opts) def_association_instance_methods(opts) end # Adds the association method to the association methods module. def def_association_method(opts) association_module_def(opts.association_method, opts) do |dynamic_opts=OPTS, &block| load_associated_objects(opts, dynamic_opts, &block) end end # Define all of the association instance methods for this association. def def_association_instance_methods(opts) # Always set the method names in the association reflection, even if they # are not used, for backwards compatibility. opts[:dataset_method] = :"#{opts[:name]}_dataset" if opts.returns_array? sname = singularize(opts[:name]) opts[:_add_method] = :"_add_#{sname}" opts[:add_method] = :"add_#{sname}" opts[:_remove_method] = :"_remove_#{sname}" opts[:remove_method] = :"remove_#{sname}" opts[:_remove_all_method] = :"_remove_all_#{opts[:name]}" opts[:remove_all_method] = :"remove_all_#{opts[:name]}" else opts[:_setter_method] = :"_#{opts[:name]}=" opts[:setter_method] = :"#{opts[:name]}=" end association_module_def(opts.dataset_method, opts){_dataset(opts)} unless opts[:no_dataset_method] if opts[:block] opts[:block_method] = Plugins.def_sequel_method(association_module(opts), "#{opts[:name]}_block", 1, &opts[:block]) end opts[:dataset_opt_arity] = opts[:dataset].arity == 0 ? 0 : 1 opts[:dataset_opt_method] = Plugins.def_sequel_method(association_module(opts), "#{opts[:name]}_dataset_opt", opts[:dataset_opt_arity], &opts[:dataset]) def_association_method(opts) unless opts[:no_association_method] return if opts[:read_only] if opts[:setter] && opts[:_setter] # This is backwards due to backwards compatibility association_module_private_def(opts[:_setter_method], opts, &opts[:setter]) association_module_def(opts[:setter_method], opts, &opts[:_setter]) end if adder = opts[:adder] association_module_private_def(opts[:_add_method], opts, &adder) association_module_delegate_def(opts[:add_method], opts){|o,*args| add_associated_object(opts, o, *args)} end if remover = opts[:remover] association_module_private_def(opts[:_remove_method], opts, &remover) association_module_delegate_def(opts[:remove_method], opts){|o,*args| remove_associated_object(opts, o, *args)} end if clearer = opts[:clearer] association_module_private_def(opts[:_remove_all_method], opts, &clearer) association_module_delegate_def(opts[:remove_all_method], opts){|*args| remove_all_associated_objects(opts, *args)} end end # Configures many_to_many and one_through_one association reflection and adds the related association methods def def_many_to_many(opts) one_through_one = opts[:type] == :one_through_one left = (opts[:left_key] ||= opts.default_left_key) lcks = opts[:left_keys] = Array(left) right = (opts[:right_key] ||= opts.default_right_key) rcks = opts[:right_keys] = Array(right) left_pk = (opts[:left_primary_key] ||= self.primary_key) opts[:eager_loader_key] = left_pk unless opts.has_key?(:eager_loader_key) lcpks = opts[:left_primary_keys] = Array(left_pk) lpkc = opts[:left_primary_key_column] ||= left_pk lpkcs = opts[:left_primary_key_columns] ||= Array(lpkc) raise(Error, "mismatched number of left keys: #{lcks.inspect} vs #{lcpks.inspect}") unless lcks.length == lcpks.length if opts[:right_primary_key] rcpks = Array(opts[:right_primary_key]) raise(Error, "mismatched number of right keys: #{rcks.inspect} vs #{rcpks.inspect}") unless rcks.length == rcpks.length end opts[:uses_left_composite_keys] = lcks.length > 1 uses_rcks = opts[:uses_right_composite_keys] = rcks.length > 1 opts[:cartesian_product_number] ||= one_through_one ? 0 : 1 join_table = (opts[:join_table] ||= opts.default_join_table) opts[:left_key_alias] ||= opts.default_associated_key_alias opts[:graph_join_table_join_type] ||= opts[:graph_join_type] if opts[:uniq] opts[:after_load] ||= [] opts[:after_load].unshift(:array_uniq!) end if join_table_db = opts[:join_table_db] opts[:use_placeholder_loader] = false opts[:allow_eager_graph] = false opts[:allow_filtering_by] = false opts[:eager_limit_strategy] = nil join_table_ds = join_table_db.from(join_table) opts[:dataset] ||= proc do |r| vals = join_table_ds.where(lcks.zip(lcpks.map{|k| get_column_value(k)})).select_map(right) ds = r.associated_dataset.where(opts.right_primary_key => vals) if uses_rcks vals.delete_if{|v| v.any?(&:nil?)} else vals.delete(nil) end ds = ds.clone(:no_results=>true) if vals.empty? ds end opts[:eager_loader] ||= proc do |eo| h = eo[:id_map] assign_singular = opts.assign_singular? rpk = opts.right_primary_key name = opts[:name] join_map = join_table_ds.where(left=>h.keys).select_hash_groups(right, left) if uses_rcks join_map.delete_if{|v,| v.any?(&:nil?)} else join_map.delete(nil) end eo = Hash[eo] if join_map.empty? eo[:no_results] = true else join_map.each_value do |vs| vs.replace(vs.flat_map{|v| h[v]}) vs.uniq! end eo[:loader] = false eo[:right_keys] = join_map.keys end opts[:model].eager_load_results(opts, eo) do |assoc_record| rpkv = if uses_rcks assoc_record.values.values_at(*rpk) else assoc_record.values[rpk] end objects = join_map[rpkv] if assign_singular objects.each do |object| object.associations[name] ||= assoc_record end else objects.each do |object| object.associations[name].push(assoc_record) end end end end else opts[:dataset] ||= opts.association_dataset_proc opts[:eager_loader] ||= opts.method(:default_eager_loader) end join_type = opts[:graph_join_type] select = opts[:graph_select] use_only_conditions = opts.include?(:graph_only_conditions) only_conditions = opts[:graph_only_conditions] conditions = opts[:graph_conditions] graph_block = opts[:graph_block] graph_jt_conds = opts[:graph_join_table_conditions] = opts.fetch(:graph_join_table_conditions, []).to_a use_jt_only_conditions = opts.include?(:graph_join_table_only_conditions) jt_only_conditions = opts[:graph_join_table_only_conditions] jt_join_type = opts[:graph_join_table_join_type] jt_graph_block = opts[:graph_join_table_block] opts[:eager_grapher] ||= proc do |eo| ds = eo[:self] egls = eo[:limit_strategy] if egls && egls != :ruby associated_key_array = opts.associated_key_array orig_egds = egds = eager_graph_dataset(opts, eo) egds = egds. inner_join(join_table, rcks.zip(opts.right_primary_keys) + graph_jt_conds, :qualify=>:deep). select_all(egds.first_source). select_append(*associated_key_array) egds = opts.apply_eager_graph_limit_strategy(egls, egds) ds.graph(egds, associated_key_array.map(&:alias).zip(lpkcs) + conditions, :qualify=>:deep, :table_alias=>eo[:table_alias], :implicit_qualifier=>eo[:implicit_qualifier], :join_type=>eo[:join_type]||join_type, :from_self_alias=>eo[:from_self_alias], :join_only=>eo[:join_only], :select=>select||orig_egds.columns, &graph_block) else ds = ds.graph(join_table, use_jt_only_conditions ? jt_only_conditions : lcks.zip(lpkcs) + graph_jt_conds, :select=>false, :table_alias=>ds.unused_table_alias(join_table, [eo[:table_alias]]), :join_type=>eo[:join_type]||jt_join_type, :join_only=>eo[:join_only], :implicit_qualifier=>eo[:implicit_qualifier], :qualify=>:deep, :from_self_alias=>eo[:from_self_alias], &jt_graph_block) ds.graph(eager_graph_dataset(opts, eo), use_only_conditions ? only_conditions : opts.right_primary_keys.zip(rcks) + conditions, :select=>select, :table_alias=>eo[:table_alias], :qualify=>:deep, :join_type=>eo[:join_type]||join_type, :join_only=>eo[:join_only], &graph_block) end end return if opts[:read_only] if one_through_one unless opts.has_key?(:setter) opts[:setter] = proc do |o| h = {} lh = lcks.zip(lcpks.map{|k| get_column_value(k)}) jtds = _join_table_dataset(opts).where(lh) checked_transaction do current = jtds.first if o new_values = [] rcks.zip(opts.right_primary_key_methods).each{|k, pk| new_values << (h[k] = o.get_column_value(pk))} end if current current_values = rcks.map{|k| current[k]} jtds = jtds.where(rcks.zip(current_values)) if o if current_values != new_values jtds.update(h) end else jtds.delete end elsif o lh.each{|k,v| h[k] = v} jtds.insert(h) end end end end if opts.fetch(:setter, true) opts[:_setter] = proc{|o| set_one_through_one_associated_object(opts, o)} end else unless opts.has_key?(:adder) opts[:adder] = proc do |o| h = {} lcks.zip(lcpks).each{|k, pk| h[k] = get_column_value(pk)} rcks.zip(opts.right_primary_key_methods).each{|k, pk| h[k] = o.get_column_value(pk)} _join_table_dataset(opts).insert(h) end end unless opts.has_key?(:remover) opts[:remover] = proc do |o| _join_table_dataset(opts).where(lcks.zip(lcpks.map{|k| get_column_value(k)}) + rcks.zip(opts.right_primary_key_methods.map{|k| o.get_column_value(k)})).delete end end unless opts.has_key?(:clearer) opts[:clearer] = proc do _join_table_dataset(opts).where(lcks.zip(lcpks.map{|k| get_column_value(k)})).delete end end end end # Configures many_to_one association reflection and adds the related association methods def def_many_to_one(opts) name = opts[:name] opts[:key] = opts.default_key unless opts.has_key?(:key) key = opts[:key] opts[:eager_loader_key] = key unless opts.has_key?(:eager_loader_key) cks = opts[:graph_keys] = opts[:keys] = Array(key) opts[:key_column] ||= key opts[:graph_keys] = opts[:key_columns] = Array(opts[:key_column]) opts[:qualified_key] = opts.qualify_cur(key) if opts[:primary_key] cpks = Array(opts[:primary_key]) raise(Error, "mismatched number of keys: #{cks.inspect} vs #{cpks.inspect}") unless cks.length == cpks.length end uses_cks = opts[:uses_composite_keys] = cks.length > 1 opts[:cartesian_product_number] ||= 0 if !opts.has_key?(:many_to_one_pk_lookup) && (opts[:dataset] || opts[:conditions] || opts[:block] || opts[:select] || (opts.has_key?(:key) && opts[:key] == nil)) opts[:many_to_one_pk_lookup] = false end auto_assocs = @autoreloading_associations cks.each do |k| (auto_assocs[k] ||= []) << name end opts[:dataset] ||= opts.association_dataset_proc opts[:eager_loader] ||= proc do |eo| h = eo[:id_map] pk_meths = opts.primary_key_methods eager_load_results(opts, eo) do |assoc_record| hash_key = uses_cks ? pk_meths.map{|k| assoc_record.get_column_value(k)} : assoc_record.get_column_value(opts.primary_key_method) h[hash_key].each{|object| object.associations[name] = assoc_record} end end join_type = opts[:graph_join_type] select = opts[:graph_select] use_only_conditions = opts.include?(:graph_only_conditions) only_conditions = opts[:graph_only_conditions] conditions = opts[:graph_conditions] graph_block = opts[:graph_block] graph_cks = opts[:graph_keys] opts[:eager_grapher] ||= proc do |eo| ds = eo[:self] ds.graph(eager_graph_dataset(opts, eo), use_only_conditions ? only_conditions : opts.primary_keys.zip(graph_cks) + conditions, eo.merge(:select=>select, :join_type=>eo[:join_type]||join_type, :qualify=>:deep), &graph_block) end return if opts[:read_only] unless opts.has_key?(:setter) opts[:setter] = proc{|o| cks.zip(opts.primary_key_methods).each{|k, pk| set_column_value(:"#{k}=", (o.get_column_value(pk) if o))}} end if opts.fetch(:setter, true) opts[:_setter] = proc{|o| set_associated_object(opts, o)} end end # Configures one_to_many and one_to_one association reflections and adds the related association methods def def_one_to_many(opts) one_to_one = opts[:type] == :one_to_one name = opts[:name] key = (opts[:key] ||= opts.default_key) km = opts[:key_method] ||= opts[:key] cks = opts[:keys] = Array(key) opts[:key_methods] = Array(opts[:key_method]) primary_key = (opts[:primary_key] ||= self.primary_key) opts[:eager_loader_key] = primary_key unless opts.has_key?(:eager_loader_key) cpks = opts[:primary_keys] = Array(primary_key) pkc = opts[:primary_key_column] ||= primary_key pkcs = opts[:primary_key_columns] ||= Array(pkc) raise(Error, "mismatched number of keys: #{cks.inspect} vs #{cpks.inspect}") unless cks.length == cpks.length uses_cks = opts[:uses_composite_keys] = cks.length > 1 opts[:dataset] ||= opts.association_dataset_proc opts[:eager_loader] ||= proc do |eo| h = eo[:id_map] reciprocal = opts.reciprocal assign_singular = opts.assign_singular? delete_rn = opts.delete_row_number_column eager_load_results(opts, eo) do |assoc_record| assoc_record.values.delete(delete_rn) if delete_rn hash_key = uses_cks ? km.map{|k| assoc_record.get_column_value(k)} : assoc_record.get_column_value(km) objects = h[hash_key] if assign_singular objects.each do |object| unless object.associations[name] object.associations[name] = assoc_record assoc_record.associations[reciprocal] = object if reciprocal end end else objects.each do |object| object.associations[name].push(assoc_record) assoc_record.associations[reciprocal] = object if reciprocal end end end end join_type = opts[:graph_join_type] select = opts[:graph_select] use_only_conditions = opts.include?(:graph_only_conditions) only_conditions = opts[:graph_only_conditions] conditions = opts[:graph_conditions] opts[:cartesian_product_number] ||= one_to_one ? 0 : 1 graph_block = opts[:graph_block] opts[:eager_grapher] ||= proc do |eo| ds = eo[:self] ds = ds.graph(opts.apply_eager_graph_limit_strategy(eo[:limit_strategy], eager_graph_dataset(opts, eo)), use_only_conditions ? only_conditions : cks.zip(pkcs) + conditions, eo.merge(:select=>select, :join_type=>eo[:join_type]||join_type, :qualify=>:deep), &graph_block) # We only load reciprocals for one_to_many associations, as other reciprocals don't make sense ds.opts[:eager_graph][:reciprocals][eo[:table_alias]] = opts.reciprocal ds end return if opts[:read_only] save_opts = {:validate=>opts[:validate]} ck_nil_hash ={} cks.each{|k| ck_nil_hash[k] = nil} if one_to_one unless opts.has_key?(:setter) opts[:setter] = proc do |o| up_ds = _apply_association_options(opts, opts.associated_dataset.where(cks.zip(cpks.map{|k| get_column_value(k)}))) if (froms = up_ds.opts[:from]) && (from = froms[0]) && (from.is_a?(Sequel::Dataset) || (from.is_a?(Sequel::SQL::AliasedExpression) && from.expression.is_a?(Sequel::Dataset))) if old = up_ds.first cks.each{|k| old.set_column_value(:"#{k}=", nil)} end save_old = true end if o if !o.new? && !save_old up_ds = up_ds.exclude(o.pk_hash) end cks.zip(cpks).each{|k, pk| o.set_column_value(:"#{k}=", get_column_value(pk))} end checked_transaction do if save_old old.save(save_opts) || raise(Sequel::Error, "invalid previously associated object, cannot save") if old else up_ds.skip_limit_check.update(ck_nil_hash) end o.save(save_opts) || raise(Sequel::Error, "invalid associated object, cannot save") if o end end end if opts.fetch(:setter, true) opts[:_setter] = proc{|o| set_one_to_one_associated_object(opts, o)} end else save_opts[:raise_on_failure] = opts[:raise_on_save_failure] != false unless opts.has_key?(:adder) opts[:adder] = proc do |o| cks.zip(cpks).each{|k, pk| o.set_column_value(:"#{k}=", get_column_value(pk))} o.save(save_opts) end end unless opts.has_key?(:remover) opts[:remover] = proc do |o| cks.each{|k| o.set_column_value(:"#{k}=", nil)} o.save(save_opts) end end unless opts.has_key?(:clearer) opts[:clearer] = proc do _apply_association_options(opts, opts.associated_dataset.where(cks.zip(cpks.map{|k| get_column_value(k)}))).update(ck_nil_hash) end end end end # Alias of def_many_to_many, since they share pretty much the same code. def def_one_through_one(opts) def_many_to_many(opts) end # Alias of def_one_to_many, since they share pretty much the same code. def def_one_to_one(opts) def_one_to_many(opts) end # Return dataset to graph into given the association reflection, applying the :callback option if set. def eager_graph_dataset(opts, eager_options) ds = opts.associated_class.dataset if cb = eager_options[:callback] ds = cb.call(ds) end ds end # If not caching associations, reload the database schema by default, # ignoring any cached values. def reload_db_schema? !@cache_associations end end # Instance methods used to implement the associations support. module InstanceMethods # The currently cached associations. A hash with the keys being the # association name symbols and the values being the associated object # or nil (many_to_one), or the array of associated objects (*_to_many). def associations @associations ||= {} end # Freeze the associations cache when freezing the object. Note that # retrieving associations after freezing will still work in most cases, # but the associations will not be cached in the association cache. def freeze associations super associations.freeze self end private # Apply the association options such as :order and :limit to the given dataset, returning a modified dataset. def _apply_association_options(opts, ds) unless ds.kind_of?(AssociationDatasetMethods) ds = opts.apply_dataset_changes(ds) end ds = ds.clone(:model_object => self) ds = ds.eager_graph(opts[:eager_graph]) if opts[:eager_graph] && opts.eager_graph_lazy_dataset? # block method is private ds = send(opts[:block_method], ds) if opts[:block_method] ds end # Return a dataset for the association after applying any dynamic callback. def _associated_dataset(opts, dynamic_opts) ds = public_send(opts.dataset_method) if callback = dynamic_opts[:callback] ds = callback.call(ds) end ds end # A placeholder literalizer that can be used to load the association, or nil to not use one. def _associated_object_loader(opts, dynamic_opts) if !dynamic_opts[:callback] && (loader = opts.placeholder_loader) loader end end # Return an association dataset for the given association reflection def _dataset(opts) raise(Sequel::Error, "model object #{inspect} does not have a primary key") if opts.dataset_need_primary_key? && !pk ds = if opts[:dataset_opt_arity] == 1 # dataset_opt_method is private send(opts[:dataset_opt_method], opts) else send(opts[:dataset_opt_method]) end _apply_association_options(opts, ds) end # Dataset for the join table of the given many to many association reflection def _join_table_dataset(opts) ds = (opts[:join_table_db] || model.db).from(opts.join_table_source) opts[:join_table_block] ? opts[:join_table_block].call(ds) : ds end # Return the associated single object for the given association reflection and dynamic options # (or nil if no associated object). def _load_associated_object(opts, dynamic_opts) _load_associated_object_array(opts, dynamic_opts).first end # Return the associated single object using a primary key lookup on the associated class. def _load_associated_object_via_primary_key(opts) opts.associated_class.send(:primary_key_lookup, ((fk = opts[:key]).is_a?(Array) ? fk.map{|c| get_column_value(c)} : get_column_value(fk))) end # Load the associated objects for the given association reflection and dynamic options # as an array. def _load_associated_object_array(opts, dynamic_opts) if loader = _associated_object_loader(opts, dynamic_opts) loader.all(*opts.predicate_key_values(self)) else ds = _associated_dataset(opts, dynamic_opts) if ds.opts[:no_results] [] else ds.all end end end # Return the associated objects from the dataset, without association callbacks, reciprocals, and caching. # Still apply the dynamic callback if present. def _load_associated_objects(opts, dynamic_opts=OPTS) if opts.can_have_associated_objects?(self) if opts.returns_array? _load_associated_object_array(opts, dynamic_opts) elsif load_with_primary_key_lookup?(opts, dynamic_opts) _load_associated_object_via_primary_key(opts) else _load_associated_object(opts, dynamic_opts) end elsif opts.returns_array? [] end end # Clear the associations cache when refreshing def _refresh_set_values(hash) @associations.clear if @associations super end # Add the given associated object to the given association def add_associated_object(opts, o, *args) o = make_add_associated_object(opts, o) raise(Sequel::Error, "model object #{inspect} does not have a primary key") if opts.dataset_need_primary_key? && !pk ensure_associated_primary_key(opts, o, *args) return if run_association_callbacks(opts, :before_add, o) == false # Allow calling private _add method return if !send(opts[:_add_method], o, *args) && opts.handle_silent_modification_failure? if array = associations[opts[:name]] and !array.include?(o) array.push(o) end add_reciprocal_object(opts, o) run_association_callbacks(opts, :after_add, o) o end # :nocov: ruby2_keywords(:add_associated_object) if respond_to?(:ruby2_keywords, true) # :nocov: # Add/Set the current object to/as the given object's reciprocal association. def add_reciprocal_object(opts, o) return if o.frozen? return unless reciprocal = opts.reciprocal if opts.reciprocal_array? if array = o.associations[reciprocal] and !array.include?(self) array.push(self) end else o.associations[reciprocal] = self end end # Call uniq! on the given array. This is used by the :uniq option, # and is an actual method for memory reasons. def array_uniq!(a) a.uniq! end # If a foreign key column value changes, clear the related # cached associations. def change_column_value(column, value) if assocs = model.autoreloading_associations[column] vals = @values if new? # Do deeper checking for new objects, so that associations are # not deleted when values do not change. This code is run at # a higher level for existing objects. if value == (c = vals[column]) && value.class == c.class # If the value is the same, there is no reason to delete # the related associations, so exit early in that case. return super end only_delete_nil = c.nil? elsif vals[column].nil? only_delete_nil = true end if only_delete_nil # If the current foreign key value is nil, but the association # is already present in the cache, it was probably added to the # cache for a reason, and we do not want to delete it in that case. # However, we still want to delete associations with nil values # to remove the cached false negative. assocs.each{|a| associations.delete(a) if associations[a].nil?} else assocs.each{|a| associations.delete(a)} end end super end # Save the associated object if the associated object needs a primary key # and the associated object is new and does not have one. Raise an error if # the object still does not have a primary key def ensure_associated_primary_key(opts, o, *args) if opts.need_associated_primary_key? o.save(:validate=>opts[:validate]) if o.new? raise(Sequel::Error, "associated object #{o.inspect} does not have a primary key") unless o.pk end end # Duplicate the associations hash when duplicating the object. def initialize_copy(other) super @associations = Hash[@associations] if @associations self end # If a block is given, assign it as the :callback option in the hash, and return the hash. def load_association_objects_options(dynamic_opts, &block) if block dynamic_opts = Hash[dynamic_opts] dynamic_opts[:callback] = block end dynamic_opts end # Load the associated objects using the dataset, handling callbacks, reciprocals, and caching. def load_associated_objects(opts, dynamic_opts, &block) dynamic_opts = load_association_objects_options(dynamic_opts, &block) name = opts[:name] if associations.include?(name) && !dynamic_opts[:callback] && !dynamic_opts[:reload] associations[name] else objs = _load_associated_objects(opts, dynamic_opts) if opts.set_reciprocal_to_self? if opts.returns_array? objs.each{|o| add_reciprocal_object(opts, o)} elsif objs add_reciprocal_object(opts, objs) end end # If the current object is frozen, you can't update the associations # cache. This can cause issues for after_load procs that expect # the objects to be already cached in the associations, but # unfortunately that case cannot be handled. associations[name] = objs unless frozen? run_association_callbacks(opts, :after_load, objs) frozen? ? objs : associations[name] end end # Whether to use a simple primary key lookup on the associated class when loading. def load_with_primary_key_lookup?(opts, dynamic_opts) opts[:type] == :many_to_one && !dynamic_opts[:callback] && opts.send(:cached_fetch, :many_to_one_pk_lookup){opts.primary_key == opts.associated_class.primary_key} end # Convert the input of the add_* association method into an associated object. For # hashes, this creates a new object using the hash. For integers, strings, and arrays, # assume the value specifies a primary key, and lookup an existing object with that primary key. # Otherwise, if the object is not already an instance of the class, raise an exception. def make_add_associated_object(opts, o) klass = opts.associated_class case o when Hash klass.new(o) when Integer, String, Array klass.with_pk!(o) when klass o else raise(Sequel::Error, "associated object #{o.inspect} not of correct type #{klass}") end end # Remove all associated objects from the given association def remove_all_associated_objects(opts, *args) raise(Sequel::Error, "model object #{inspect} does not have a primary key") if opts.dataset_need_primary_key? && !pk # Allow calling private _remove_all method send(opts[:_remove_all_method], *args) ret = associations[opts[:name]].each{|o| remove_reciprocal_object(opts, o)} if associations.include?(opts[:name]) associations[opts[:name]] = [] ret end # :nocov: ruby2_keywords(:remove_all_associated_objects) if respond_to?(:ruby2_keywords, true) # :nocov: # Remove the given associated object from the given association def remove_associated_object(opts, o, *args) klass = opts.associated_class if o.is_a?(Integer) || o.is_a?(String) || o.is_a?(Array) o = remove_check_existing_object_from_pk(opts, o, *args) elsif !o.is_a?(klass) raise(Sequel::Error, "associated object #{o.inspect} not of correct type #{klass}") elsif opts.remove_should_check_existing? && public_send(opts.dataset_method).where(o.pk_hash).empty? raise(Sequel::Error, "associated object #{o.inspect} is not currently associated to #{inspect}") end raise(Sequel::Error, "model object #{inspect} does not have a primary key") if opts.dataset_need_primary_key? && !pk raise(Sequel::Error, "associated object #{o.inspect} does not have a primary key") if opts.need_associated_primary_key? && !o.pk return if run_association_callbacks(opts, :before_remove, o) == false # Allow calling private _remove method return if !send(opts[:_remove_method], o, *args) && opts.handle_silent_modification_failure? associations[opts[:name]].delete_if{|x| o === x} if associations.include?(opts[:name]) remove_reciprocal_object(opts, o) run_association_callbacks(opts, :after_remove, o) o end # :nocov: ruby2_keywords(:remove_associated_object) if respond_to?(:ruby2_keywords, true) # :nocov: # Check that the object from the associated table specified by the primary key # is currently associated to the receiver. If it is associated, return the object, otherwise # raise an error. def remove_check_existing_object_from_pk(opts, o, *args) key = o pkh = opts.associated_class.qualified_primary_key_hash(key) raise(Sequel::Error, "no object with key(s) #{key.inspect} is currently associated to #{inspect}") unless o = public_send(opts.dataset_method).first(pkh) o end # Remove/unset the current object from/as the given object's reciprocal association. def remove_reciprocal_object(opts, o) return unless reciprocal = opts.reciprocal if opts.reciprocal_array? if array = o.associations[reciprocal] array.delete_if{|x| self === x} end else o.associations[reciprocal] = nil end end # Run the callback for the association with the object. def run_association_callbacks(reflection, callback_type, object) return unless cbs = reflection[callback_type] begin cbs.each do |cb| case cb when Symbol # Allow calling private methods in association callbacks send(cb, object) when Proc cb.call(self, object) else raise Error, "callbacks should either be Procs or Symbols" end end rescue HookFailed # The reason we automatically set raise_error for singular associations is that # assignment in ruby always returns the argument instead of the result of the # method, so we can't return nil to signal that the association callback prevented # the modification return false unless raise_on_save_failure || !reflection.returns_array? raise end end # Set the given object as the associated object for the given *_to_one association reflection def _set_associated_object(opts, o) a = associations[opts[:name]] reciprocal = opts.reciprocal if set_associated_object_if_same? if reciprocal remove_reciprocal = a && (a != o || a.associations[reciprocal] != self) add_reciprocal = o && o.associations[reciprocal] != self end else return if a && a == o if reciprocal remove_reciprocal = a add_reciprocal = o end end run_association_callbacks(opts, :before_set, o) remove_reciprocal_object(opts, a) if remove_reciprocal # Allow calling private _setter method send(opts[:_setter_method], o) associations[opts[:name]] = o add_reciprocal_object(opts, o) if add_reciprocal run_association_callbacks(opts, :after_set, o) o end # Whether run the associated object setter code if passed the same object as the one already # cached in the association. Usually not set (so nil), can be set on a per-object basis # if necessary. def set_associated_object_if_same? @set_associated_object_if_same end # Set the given object as the associated object for the given many_to_one association reflection def set_associated_object(opts, o) raise(Error, "associated object #{o.inspect} does not have a primary key") if o && !o.pk _set_associated_object(opts, o) end # Set the given object as the associated object for the given one_through_one association reflection def set_one_through_one_associated_object(opts, o) raise(Error, "object #{inspect} does not have a primary key") unless pk raise(Error, "associated object #{o.inspect} does not have a primary key") if o && !o.pk _set_associated_object(opts, o) end # Set the given object as the associated object for the given one_to_one association reflection def set_one_to_one_associated_object(opts, o) raise(Error, "object #{inspect} does not have a primary key") unless pk _set_associated_object(opts, o) end end # Eager loading makes it so that you can load all associated records for a # set of objects in a single query, instead of a separate query for each object. # # Two separate implementations are provided. +eager+ should be used most of the # time, as it loads associated records using one query per association. However, # it does not allow you the ability to filter or order based on columns in associated tables. +eager_graph+ loads # all records in a single query using JOINs, allowing you to filter or order based on columns in associated # tables. However, +eager_graph+ is usually slower than +eager+, especially if multiple # one_to_many or many_to_many associations are joined. # # You can cascade the eager loading (loading associations on associated objects) # with no limit to the depth of the cascades. You do this by passing a hash to +eager+ or +eager_graph+ # with the keys being associations of the current model and values being # associations of the model associated with the current model via the key. # # The arguments can be symbols or hashes with symbol keys (for cascaded # eager loading). Examples: # # Album.eager(:artist).all # Album.eager_graph(:artist).all # Album.eager(:artist, :genre).all # Album.eager_graph(:artist, :genre).all # Album.eager(:artist).eager(:genre).all # Album.eager_graph(:artist).eager_graph(:genre).all # Artist.eager(albums: :tracks).all # Artist.eager_graph(albums: :tracks).all # Artist.eager(albums: {tracks: :genre}).all # Artist.eager_graph(albums: {tracks: :genre}).all # # You can also pass a callback as a hash value in order to customize the dataset being # eager loaded at query time, analogous to the way the :eager_block association option # allows you to customize it at association definition time. For example, # if you wanted artists with their albums since 1990: # # Artist.eager(albums: proc{|ds| ds.where{year > 1990}}) # # Or if you needed albums and their artist's name only, using a single query: # # Albums.eager_graph(artist: proc{|ds| ds.select(:name)}) # # To cascade eager loading while using a callback, you substitute the cascaded # associations with a single entry hash that has the proc callback as the key and # the cascaded associations as the value. This will load artists with their albums # since 1990, and also the tracks on those albums and the genre for those tracks: # # Artist.eager(albums: {proc{|ds| ds.where{year > 1990}}=>{tracks: :genre}}) module DatasetMethods %w'inner left right full'.each do |type| class_eval(<<-END, __FILE__, __LINE__+1) def association_#{type}_join(*associations) _association_join(:#{type}, associations) end END end # Adds one or more INNER JOINs to the existing dataset using the keys and conditions # specified by the given association(s). Take the same arguments as eager_graph, and # operates similarly, but only adds the joins as opposed to making the other changes # (such as adding selected columns and setting up eager loading). # # The following methods also exist for specifying a different type of JOIN: # # association_full_join :: FULL JOIN # association_inner_join :: INNER JOIN # association_left_join :: LEFT JOIN # association_right_join :: RIGHT JOIN # # Examples: # # # For each album, association_join load the artist # Album.association_join(:artist).all # # SELECT * # # FROM albums # # INNER JOIN artists AS artist ON (artists.id = albums.artist_id) # # # For each album, association_join load the artist, using a specified alias # Album.association_join(Sequel[:artist].as(:a)).all # # SELECT * # # FROM albums # # INNER JOIN artists AS a ON (a.id = albums.artist_id) # # # For each album, association_join load the artist and genre # Album.association_join(:artist, :genre).all # Album.association_join(:artist).association_join(:genre).all # # SELECT * # # FROM albums # # INNER JOIN artists AS artist ON (artist.id = albums.artist_id) # # INNER JOIN genres AS genre ON (genre.id = albums.genre_id) # # # For each artist, association_join load albums and tracks for each album # Artist.association_join(albums: :tracks).all # # SELECT * # # FROM artists # # INNER JOIN albums ON (albums.artist_id = artists.id) # # INNER JOIN tracks ON (tracks.album_id = albums.id) # # # For each artist, association_join load albums, tracks for each album, and genre for each track # Artist.association_join(albums: {tracks: :genre}).all # # SELECT * # # FROM artists # # INNER JOIN albums ON (albums.artist_id = artists.id) # # INNER JOIN tracks ON (tracks.album_id = albums.id) # # INNER JOIN genres AS genre ON (genre.id = tracks.genre_id) # # # For each artist, association_join load albums with year > 1990 # Artist.association_join(albums: proc{|ds| ds.where{year > 1990}}).all # # SELECT * # # FROM artists # # INNER JOIN ( # # SELECT * FROM albums WHERE (year > 1990) # # ) AS albums ON (albums.artist_id = artists.id) # # # For each artist, association_join load albums and tracks 1-10 for each album # Artist.association_join(albums: {tracks: proc{|ds| ds.where(number: 1..10)}}).all # # SELECT * # # FROM artists # # INNER JOIN albums ON (albums.artist_id = artists.id) # # INNER JOIN ( # # SELECT * FROM tracks WHERE ((number >= 1) AND (number <= 10)) # # ) AS tracks ON (tracks.albums_id = albums.id) # # # For each artist, association_join load albums with year > 1990, and tracks for those albums # Artist.association_join(albums: {proc{|ds| ds.where{year > 1990}}=>:tracks}).all # # SELECT * # # FROM artists # # INNER JOIN ( # # SELECT * FROM albums WHERE (year > 1990) # # ) AS albums ON (albums.artist_id = artists.id) # # INNER JOIN tracks ON (tracks.album_id = albums.id) def association_join(*associations) association_inner_join(*associations) end # If the expression is in the form <tt>x = y</tt> where +y+ is a <tt>Sequel::Model</tt> # instance, array of <tt>Sequel::Model</tt> instances, or a <tt>Sequel::Model</tt> dataset, # assume +x+ is an association symbol and look up the association reflection # via the dataset's model. From there, return the appropriate SQL based on the type of # association and the values of the foreign/primary keys of +y+. For most association # types, this is a simple transformation, but for +many_to_many+ associations this # creates a subquery to the join table. def complex_expression_sql_append(sql, op, args) r = args[1] if (((op == :'=' || op == :'!=') && r.is_a?(Sequel::Model)) || (multiple = ((op == :IN || op == :'NOT IN') && ((is_ds = r.is_a?(Sequel::Dataset)) || (r.respond_to?(:all?) && r.all?{|x| x.is_a?(Sequel::Model)}))))) l = args[0] if ar = model.association_reflections[l] raise Error, "filtering by associations is not allowed for #{ar.inspect}" if ar[:allow_filtering_by] == false if multiple klass = ar.associated_class if is_ds if r.respond_to?(:model) unless r.model <= klass # A dataset for a different model class, could be a valid regular query return super end else # Not a model dataset, could be a valid regular query return super end else unless r.all?{|x| x.is_a?(klass)} raise Sequel::Error, "invalid association class for one object for association #{l.inspect} used in dataset filter for model #{model.inspect}, expected class #{klass.inspect}" end end elsif !r.is_a?(ar.associated_class) raise Sequel::Error, "invalid association class #{r.class.inspect} for association #{l.inspect} used in dataset filter for model #{model.inspect}, expected class #{ar.associated_class.inspect}" end if exp = association_filter_expression(op, ar, r) literal_append(sql, exp) else raise Sequel::Error, "invalid association type #{ar[:type].inspect} for association #{l.inspect} used in dataset filter for model #{model.inspect}" end elsif multiple && (is_ds || r.empty?) # Not a query designed for this support, could be a valid regular query super else raise Sequel::Error, "invalid association #{l.inspect} used in dataset filter for model #{model.inspect}" end else super end end # The preferred eager loading method. Loads all associated records using one # query for each association. # # The basic idea for how it works is that the dataset is first loaded normally. # Then it goes through all associations that have been specified via +eager+. # It loads each of those associations separately, then associates them back # to the original dataset via primary/foreign keys. Due to the necessity of # all objects being present, you need to use +all+ to use eager loading, as it # can't work with +each+. # # This implementation avoids the complexity of extracting an object graph out # of a single dataset, by building the object graph out of multiple datasets, # one for each association. By using a separate dataset for each association, # it avoids problems such as aliasing conflicts and creating cartesian product # result sets if multiple one_to_many or many_to_many eager associations are requested. # # One limitation of using this method is that you cannot filter the current dataset # based on values of columns in an associated table, since the associations are loaded # in separate queries. To do that you need to load all associations in the # same query, and extract an object graph from the results of that query. If you # need to filter based on columns in associated tables, look at +eager_graph+ # or join the tables you need to filter on manually. # # Each association's order, if defined, is respected. # If the association uses a block or has an :eager_block argument, it is used. # # To modify the associated dataset that will be used for the eager load, you should use a # hash for the association, with the key being the association name symbol, and the value being # a callable object that is called with the associated dataset and should return a modified # dataset. If that association also has dependent associations, instead of a callable object, # use a hash with the callable object being the key, and the dependent association(s) as the value. # # Examples: # # # For each album, eager load the artist # Album.eager(:artist).all # # SELECT * FROM albums # # SELECT * FROM artists WHERE (id IN (...)) # # # For each album, eager load the artist and genre # Album.eager(:artist, :genre).all # Album.eager(:artist).eager(:genre).all # # SELECT * FROM albums # # SELECT * FROM artists WHERE (id IN (...)) # # SELECT * FROM genres WHERE (id IN (...)) # # # For each artist, eager load albums and tracks for each album # Artist.eager(albums: :tracks).all # # SELECT * FROM artists # # SELECT * FROM albums WHERE (artist_id IN (...)) # # SELECT * FROM tracks WHERE (album_id IN (...)) # # # For each artist, eager load albums, tracks for each album, and genre for each track # Artist.eager(albums: {tracks: :genre}).all # # SELECT * FROM artists # # SELECT * FROM albums WHERE (artist_id IN (...)) # # SELECT * FROM tracks WHERE (album_id IN (...)) # # SELECT * FROM genre WHERE (id IN (...)) # # # For each artist, eager load albums with year > 1990 # Artist.eager(albums: proc{|ds| ds.where{year > 1990}}).all # # SELECT * FROM artists # # SELECT * FROM albums WHERE ((year > 1990) AND (artist_id IN (...))) # # # For each artist, eager load albums and tracks 1-10 for each album # Artist.eager(albums: {tracks: proc{|ds| ds.where(number: 1..10)}}).all # # SELECT * FROM artists # # SELECT * FROM albums WHERE (artist_id IN (...)) # # SELECT * FROM tracks WHERE ((number >= 1) AND (number <= 10) AND (album_id IN (...))) # # # For each artist, eager load albums with year > 1990, and tracks for those albums # Artist.eager(albums: {proc{|ds| ds.where{year > 1990}}=>:tracks}).all # # SELECT * FROM artists # # SELECT * FROM albums WHERE ((year > 1990) AND (artist_id IN (...))) # # SELECT * FROM albums WHERE (artist_id IN (...)) def eager(*associations) opts = @opts[:eager] association_opts = eager_options_for_associations(associations) opts = opts ? opts.merge(association_opts) : association_opts clone(:eager=>opts.freeze) end # The secondary eager loading method. Loads all associations in a single query. This # method should only be used if you need to filter or order based on columns in associated tables, # or if you have done comparative benchmarking and determined it is faster. # # This method uses <tt>Dataset#graph</tt> to create appropriate aliases for columns in all the # tables. Then it uses the graph's metadata to build the associations from the single hash, and # finally replaces the array of hashes with an array model objects inside all. # # Be very careful when using this with multiple one_to_many or many_to_many associations, as you can # create large cartesian products. If you must graph multiple one_to_many and many_to_many associations, # make sure your filters are narrow if the datasets are large. # # Each association's order, if defined, is respected. +eager_graph+ probably # won't work correctly on a limited dataset, unless you are # only graphing many_to_one, one_to_one, and one_through_one associations. # # Does not use the block defined for the association, since it does a single query for # all objects. You can use the :graph_* association options to modify the SQL query. # # Like +eager+, you need to call +all+ on the dataset for the eager loading to work. If you just # call +each+, it will yield plain hashes, each containing all columns from all the tables. # # To modify the associated dataset that will be joined to the current dataset, you should use a # hash for the association, with the key being the association name symbol, and the value being # a callable object that is called with the associated dataset and should return a modified # dataset. If that association also has dependent associations, instead of a callable object, # use a hash with the callable object being the key, and the dependent association(s) as the value. # # You can specify an custom alias and/or join type on a per-association basis by providing an # Sequel::SQL::AliasedExpression object instead of an a Symbol for the association name. # # You cannot mix calls to +eager_graph+ and +graph+ on the same dataset. # # Examples: # # # For each album, eager_graph load the artist # Album.eager_graph(:artist).all # # SELECT ... # # FROM albums # # LEFT OUTER JOIN artists AS artist ON (artists.id = albums.artist_id) # # # For each album, eager_graph load the artist, using a specified alias # Album.eager_graph(Sequel[:artist].as(:a)).all # # SELECT ... # # FROM albums # # LEFT OUTER JOIN artists AS a ON (a.id = albums.artist_id) # # # For each album, eager_graph load the artist, using a specified alias # # and custom join type # # Album.eager_graph(Sequel[:artist].as(:a, join_type: :inner)).all # # SELECT ... # # FROM albums # # INNER JOIN artists AS a ON (a.id = albums.artist_id) # # # For each album, eager_graph load the artist and genre # Album.eager_graph(:artist, :genre).all # Album.eager_graph(:artist).eager_graph(:genre).all # # SELECT ... # # FROM albums # # LEFT OUTER JOIN artists AS artist ON (artist.id = albums.artist_id) # # LEFT OUTER JOIN genres AS genre ON (genre.id = albums.genre_id) # # # For each artist, eager_graph load albums and tracks for each album # Artist.eager_graph(albums: :tracks).all # # SELECT ... # # FROM artists # # LEFT OUTER JOIN albums ON (albums.artist_id = artists.id) # # LEFT OUTER JOIN tracks ON (tracks.album_id = albums.id) # # # For each artist, eager_graph load albums, tracks for each album, and genre for each track # Artist.eager_graph(albums: {tracks: :genre}).all # # SELECT ... # # FROM artists # # LEFT OUTER JOIN albums ON (albums.artist_id = artists.id) # # LEFT OUTER JOIN tracks ON (tracks.album_id = albums.id) # # LEFT OUTER JOIN genres AS genre ON (genre.id = tracks.genre_id) # # # For each artist, eager_graph load albums with year > 1990 # Artist.eager_graph(albums: proc{|ds| ds.where{year > 1990}}).all # # SELECT ... # # FROM artists # # LEFT OUTER JOIN ( # # SELECT * FROM albums WHERE (year > 1990) # # ) AS albums ON (albums.artist_id = artists.id) # # # For each artist, eager_graph load albums and tracks 1-10 for each album # Artist.eager_graph(albums: {tracks: proc{|ds| ds.where(number: 1..10)}}).all # # SELECT ... # # FROM artists # # LEFT OUTER JOIN albums ON (albums.artist_id = artists.id) # # LEFT OUTER JOIN ( # # SELECT * FROM tracks WHERE ((number >= 1) AND (number <= 10)) # # ) AS tracks ON (tracks.albums_id = albums.id) # # # For each artist, eager_graph load albums with year > 1990, and tracks for those albums # Artist.eager_graph(albums: {proc{|ds| ds.where{year > 1990}}=>:tracks}).all # # SELECT ... # # FROM artists # # LEFT OUTER JOIN ( # # SELECT * FROM albums WHERE (year > 1990) # # ) AS albums ON (albums.artist_id = artists.id) # # LEFT OUTER JOIN tracks ON (tracks.album_id = albums.id) def eager_graph(*associations) eager_graph_with_options(associations) end # Run eager_graph with some options specific to just this call. Unlike eager_graph, this takes # the associations as a single argument instead of multiple arguments. # # Options: # # :join_type :: Override the join type specified in the association # :limit_strategy :: Use a strategy for handling limits on associations. # Appropriate :limit_strategy values are: # true :: Pick the most appropriate based on what the database supports # :distinct_on :: Force use of DISTINCT ON stategy (*_one associations only) # :correlated_subquery :: Force use of correlated subquery strategy (one_to_* associations only) # :window_function :: Force use of window function strategy # :ruby :: Don't modify the SQL, implement limits/offsets with array slicing # # This can also be a hash with association name symbol keys and one of the above values, # to use different strategies per association. # # The default is the :ruby strategy. Choosing a different strategy can make your code # significantly slower in some cases (perhaps even the majority of cases), so you should # only use this if you have benchmarked that it is faster for your use cases. def eager_graph_with_options(associations, opts=OPTS) return self if associations.empty? opts = opts.dup unless opts.frozen? associations = [associations] unless associations.is_a?(Array) ds = if eg = @opts[:eager_graph] eg = eg.dup [:requirements, :reflections, :reciprocals, :limits].each{|k| eg[k] = eg[k].dup} eg[:local] = opts ds = clone(:eager_graph=>eg) ds.eager_graph_associations(ds, model, ds.opts[:eager_graph][:master], [], *associations) else # Each of the following have a symbol key for the table alias, with the following values: # :reciprocals :: the reciprocal value to use for this association # :reflections :: AssociationReflection instance related to this association # :requirements :: array of requirements for this association # :limits :: Any limit/offset array slicing that need to be handled in ruby land after loading opts = {:requirements=>{}, :master=>alias_symbol(first_source), :reflections=>{}, :reciprocals=>{}, :limits=>{}, :local=>opts, :cartesian_product_number=>0, :row_proc=>row_proc} ds = clone(:eager_graph=>opts) ds = ds.eager_graph_associations(ds, model, ds.opts[:eager_graph][:master], [], *associations).naked end ds.opts[:eager_graph].freeze ds.opts[:eager_graph].each_value{|v| v.freeze if v.is_a?(Hash)} ds end # If the dataset is being eagerly loaded, default to calling all # instead of each. def as_hash(key_column=nil, value_column=nil, opts=OPTS) if (@opts[:eager_graph] || @opts[:eager]) && !opts.has_key?(:all) opts = Hash[opts] opts[:all] = true end super end # If the dataset is being eagerly loaded, default to calling all # instead of each. def to_hash_groups(key_column, value_column=nil, opts=OPTS) if (@opts[:eager_graph] || @opts[:eager]) && !opts.has_key?(:all) opts = Hash[opts] opts[:all] = true end super end # Do not attempt to split the result set into associations, # just return results as simple objects. This is useful if you # want to use eager_graph as a shortcut to have all of the joins # and aliasing set up, but want to do something else with the dataset. def ungraphed ds = super.clone(:eager_graph=>nil) if (eg = @opts[:eager_graph]) && (rp = eg[:row_proc]) ds = ds.with_row_proc(rp) end ds end protected # Call graph on the association with the correct arguments, # update the eager_graph data structure, and recurse into # eager_graph_associations if there are any passed in associations # (which would be dependencies of the current association) # # Arguments: # ds :: Current dataset # model :: Current Model # ta :: table_alias used for the parent association # requirements :: an array, used as a stack for requirements # r :: association reflection for the current association, or an SQL::AliasedExpression # with the reflection as the expression, the alias base as the alias (or nil to # use the default alias), and an optional hash with a :join_type entry as the columns # to use a custom join type. # *associations :: any associations dependent on this one def eager_graph_association(ds, model, ta, requirements, r, *associations) if r.is_a?(SQL::AliasedExpression) alias_base = r.alias if r.columns.is_a?(Hash) join_type = r.columns[:join_type] end r = r.expression else alias_base = r[:graph_alias_base] end assoc_table_alias = ds.unused_table_alias(alias_base) loader = r[:eager_grapher] if !associations.empty? if associations.first.respond_to?(:call) callback = associations.first associations = {} elsif associations.length == 1 && (assocs = associations.first).is_a?(Hash) && assocs.length == 1 && (pr_assoc = assocs.to_a.first) && pr_assoc.first.respond_to?(:call) callback, assoc = pr_assoc associations = assoc.is_a?(Array) ? assoc : [assoc] end end local_opts = ds.opts[:eager_graph][:local] limit_strategy = r.eager_graph_limit_strategy(local_opts[:limit_strategy]) if r[:conditions] && !Sequel.condition_specifier?(r[:conditions]) && !r[:orig_opts].has_key?(:graph_conditions) && !r[:orig_opts].has_key?(:graph_only_conditions) && !r.has_key?(:graph_block) raise Error, "Cannot eager_graph association when :conditions specified and not a hash or an array of pairs. Specify :graph_conditions, :graph_only_conditions, or :graph_block for the association. Model: #{r[:model]}, association: #{r[:name]}" end ds = loader.call(:self=>ds, :table_alias=>assoc_table_alias, :implicit_qualifier=>(ta == ds.opts[:eager_graph][:master]) ? first_source : qualifier_from_alias_symbol(ta, first_source), :callback=>callback, :join_type=>join_type || local_opts[:join_type], :join_only=>local_opts[:join_only], :limit_strategy=>limit_strategy, :from_self_alias=>ds.opts[:eager_graph][:master]) if r[:order_eager_graph] && (order = r.fetch(:graph_order, r[:order])) ds = ds.order_append(*qualified_expression(order, assoc_table_alias)) end eager_graph = ds.opts[:eager_graph] eager_graph[:requirements][assoc_table_alias] = requirements.dup eager_graph[:reflections][assoc_table_alias] = r if limit_strategy == :ruby eager_graph[:limits][assoc_table_alias] = r.limit_and_offset end eager_graph[:cartesian_product_number] += r[:cartesian_product_number] || 2 ds = ds.eager_graph_associations(ds, r.associated_class, assoc_table_alias, requirements + [assoc_table_alias], *associations) unless associations.empty? ds end # Check the associations are valid for the given model. # Call eager_graph_association on each association. # # Arguments: # ds :: Current dataset # model :: Current Model # ta :: table_alias used for the parent association # requirements :: an array, used as a stack for requirements # *associations :: the associations to add to the graph def eager_graph_associations(ds, model, ta, requirements, *associations) associations.flatten.each do |association| ds = case association when Symbol, SQL::AliasedExpression ds.eager_graph_association(ds, model, ta, requirements, eager_graph_check_association(model, association)) when Hash association.each do |assoc, assoc_assocs| ds = ds.eager_graph_association(ds, model, ta, requirements, eager_graph_check_association(model, assoc), assoc_assocs) end ds else raise(Sequel::Error, 'Associations must be in the form of a symbol or hash') end end ds end # Replace the array of plain hashes with an array of model objects will all eager_graphed # associations set in the associations cache for each object. def eager_graph_build_associations(hashes) hashes.replace(_eager_graph_build_associations(hashes, eager_graph_loader)) end private # Return a new dataset with JOINs of the given type added, using the tables and # conditions specified by the associations. def _association_join(type, associations) clone(:join=>clone(:graph_from_self=>false).eager_graph_with_options(associations, :join_type=>type, :join_only=>true).opts[:join]) end # Process the array of hashes using the eager graph loader to return an array # of model objects with the associations set. def _eager_graph_build_associations(hashes, egl) egl.load(hashes) end # If the association has conditions itself, then it requires additional filters be # added to the current dataset to ensure that the passed in object would also be # included by the association's conditions. def add_association_filter_conditions(ref, obj, expr) if expr != SQL::Constants::FALSE && ref.filter_by_associations_add_conditions? Sequel[ref.filter_by_associations_conditions_expression(obj)] else expr end end # Process the array of associations arguments (Symbols, Arrays, and Hashes), # and return a hash of options suitable for cascading. def eager_options_for_associations(associations) opts = {} associations.flatten.each do |association| case association when Symbol check_association(model, association) opts[association] = nil when Hash association.keys.each{|assoc| check_association(model, assoc)} opts.merge!(association) else raise(Sequel::Error, 'Associations must be in the form of a symbol or hash') end end opts end # Return an expression for filtering by the given association reflection and associated object. def association_filter_expression(op, ref, obj) meth = :"#{ref[:type]}_association_filter_expression" # Allow calling private association specific method to get filter expression send(meth, op, ref, obj) if respond_to?(meth, true) end # Handle inversion for association filters by returning an inverted expression, # plus also handling cases where the referenced columns are NULL. def association_filter_handle_inversion(op, exp, cols) if op == :'!=' || op == :'NOT IN' if exp == SQL::Constants::FALSE ~exp else ~exp | Sequel::SQL::BooleanExpression.from_value_pairs(cols.zip([]), :OR) end else exp end end # Return an expression for making sure that the given keys match the value of # the given methods for either the single object given or for any of the objects # given if +obj+ is an array. def association_filter_key_expression(keys, meths, obj) vals = if obj.is_a?(Sequel::Dataset) {(keys.length == 1 ? keys.first : keys)=>obj.select(*meths).exclude(Sequel::SQL::BooleanExpression.from_value_pairs(meths.zip([]), :OR))} else vals = Array(obj).reject{|o| !meths.all?{|m| o.get_column_value(m)}} return SQL::Constants::FALSE if vals.empty? if obj.is_a?(Array) if keys.length == 1 meth = meths.first {keys.first=>vals.map{|o| o.get_column_value(meth)}} else {keys=>vals.map{|o| meths.map{|m| o.get_column_value(m)}}} end else keys.zip(meths.map{|k| obj.get_column_value(k)}) end end SQL::BooleanExpression.from_value_pairs(vals) end # Make sure the association is valid for this model, and return the related AssociationReflection. def check_association(model, association) raise(Sequel::UndefinedAssociation, "Invalid association #{association} for #{model.name}") unless reflection = model.association_reflection(association) raise(Sequel::Error, "Eager loading is not allowed for #{model.name} association #{association}") if reflection[:allow_eager] == false reflection end # Allow associations that are eagerly graphed to be specified as an SQL::AliasedExpression, for # per-call determining of the alias base. def eager_graph_check_association(model, association) reflection = if association.is_a?(SQL::AliasedExpression) expr = association.expression if expr.is_a?(SQL::Identifier) expr = expr.value if expr.is_a?(String) expr = expr.to_sym end end check_reflection = check_association(model, expr) SQL::AliasedExpression.new(check_reflection, association.alias || expr, association.columns) else check_reflection = check_association(model, association) end if check_reflection && check_reflection[:allow_eager_graph] == false raise Error, "eager_graph not allowed for #{reflection.inspect}" end reflection end # The EagerGraphLoader instance used for converting eager_graph results. def eager_graph_loader unless egl = cache_get(:_model_eager_graph_loader) egl = cache_set(:_model_eager_graph_loader, EagerGraphLoader.new(self)) end egl.dup end # Eagerly load all specified associations. def eager_load(a, eager_assoc=@opts[:eager], m=model) return if a.empty? # Reflections for all associations to eager load reflections = eager_assoc.keys.map{|assoc| m.association_reflection(assoc) || (raise Sequel::UndefinedAssociation, "Model: #{self}, Association: #{assoc}")} perform_eager_loads(prepare_eager_load(a, reflections, eager_assoc)) reflections.each do |r| a.each{|object| object.send(:run_association_callbacks, r, :after_load, object.associations[r[:name]])} if r[:after_load] end nil end # Prepare a hash loaders and eager options which will be used to implement the eager loading. def prepare_eager_load(a, reflections, eager_assoc) eager_load_data = {} # Key is foreign/primary key name symbol. # Value is hash with keys being foreign/primary key values (generally integers) # and values being an array of current model objects with that specific foreign/primary key key_hash = {} # Populate the key_hash entry for each association being eagerly loaded reflections.each do |r| if key = r.eager_loader_key # key_hash for this key has already been populated, # skip populating again so that duplicate values # aren't added. unless id_map = key_hash[key] id_map = key_hash[key] = Hash.new{|h,k| h[k] = []} # Supporting both single (Symbol) and composite (Array) keys. a.each do |rec| case key when Array if (k = key.map{|k2| rec.get_column_value(k2)}) && k.all? id_map[k] << rec end when Symbol if k = rec.get_column_value(key) id_map[k] << rec end else raise Error, "unhandled eager_loader_key #{key.inspect} for association #{r[:name]}" end end end else id_map = nil end associations = eager_assoc[r[:name]] if associations.respond_to?(:call) eager_block = associations associations = OPTS elsif associations.is_a?(Hash) && associations.length == 1 && (pr_assoc = associations.to_a.first) && pr_assoc.first.respond_to?(:call) eager_block, associations = pr_assoc end eager_load_data[r[:eager_loader]] = {:key_hash=>key_hash, :rows=>a, :associations=>associations, :self=>self, :eager_block=>eager_block, :id_map=>id_map} end eager_load_data end # Using the hash of loaders and eager options, perform the eager loading. def perform_eager_loads(eager_load_data) eager_load_data.map do |loader, eo| perform_eager_load(loader, eo) end end # Perform eager loading for a single association using the loader and eager options. def perform_eager_load(loader, eo) loader.call(eo) end # Return a subquery expression for filering by a many_to_many association def many_to_many_association_filter_expression(op, ref, obj) lpks, lks, rks = ref.values_at(:left_primary_key_columns, :left_keys, :right_keys) jt = ref.join_table_alias lpks = lpks.first if lpks.length == 1 lpks = ref.qualify(model.table_name, lpks) meths = if obj.is_a?(Sequel::Dataset) ref.qualify(obj.model.table_name, ref.right_primary_keys) else ref.right_primary_key_methods end expr = association_filter_key_expression(ref.qualify(jt, rks), meths, obj) unless expr == SQL::Constants::FALSE expr = SQL::BooleanExpression.from_value_pairs(lpks=>model.db.from(ref[:join_table]).select(*ref.qualify(jt, lks)).where(expr).exclude(SQL::BooleanExpression.from_value_pairs(ref.qualify(jt, lks).zip([]), :OR))) expr = add_association_filter_conditions(ref, obj, expr) end association_filter_handle_inversion(op, expr, Array(lpks)) end alias one_through_one_association_filter_expression many_to_many_association_filter_expression # Return a simple equality expression for filering by a many_to_one association def many_to_one_association_filter_expression(op, ref, obj) keys = ref.qualify(model.table_name, ref[:key_columns]) meths = if obj.is_a?(Sequel::Dataset) ref.qualify(obj.model.table_name, ref.primary_keys) else ref.primary_key_methods end expr = association_filter_key_expression(keys, meths, obj) expr = add_association_filter_conditions(ref, obj, expr) association_filter_handle_inversion(op, expr, keys) end # Return a simple equality expression for filering by a one_to_* association def one_to_many_association_filter_expression(op, ref, obj) keys = ref.qualify(model.table_name, ref[:primary_key_columns]) meths = if obj.is_a?(Sequel::Dataset) ref.qualify(obj.model.table_name, ref[:keys]) else ref[:key_methods] end expr = association_filter_key_expression(keys, meths, obj) expr = add_association_filter_conditions(ref, obj, expr) association_filter_handle_inversion(op, expr, keys) end alias one_to_one_association_filter_expression one_to_many_association_filter_expression def non_sql_option?(key) super || key == :eager || key == :eager_graph end # Build associations from the graph if #eager_graph was used, # and/or load other associations if #eager was used. def post_load(all_records) eager_graph_build_associations(all_records) if @opts[:eager_graph] eager_load(all_records) if @opts[:eager] && (row_proc || @opts[:eager_graph]) super end end # This class is the internal implementation of eager_graph. It is responsible for taking an array of plain # hashes and returning an array of model objects with all eager_graphed associations already set in the # association cache. class EagerGraphLoader # Hash with table alias symbol keys and after_load hook values attr_reader :after_load_map # Hash with table alias symbol keys and association name values attr_reader :alias_map # Hash with table alias symbol keys and subhash values mapping column_alias symbols to the # symbol of the real name of the column attr_reader :column_maps # Recursive hash with table alias symbol keys mapping to hashes with dependent table alias symbol keys. attr_reader :dependency_map # Hash with table alias symbol keys and [limit, offset] values attr_reader :limit_map # The table alias symbol for the primary model attr_reader :master # Hash with table alias symbol keys and primary key symbol values (or arrays of primary key symbols for # composite key tables) attr_reader :primary_keys # Hash with table alias symbol keys and reciprocal association symbol values, # used for setting reciprocals for one_to_many associations. attr_reader :reciprocal_map # Hash with table alias symbol keys and subhash values mapping primary key symbols (or array of symbols) # to model instances. Used so that only a single model instance is created for each object. attr_reader :records_map # Hash with table alias symbol keys and AssociationReflection values attr_reader :reflection_map # Hash with table alias symbol keys and callable values used to create model instances attr_reader :row_procs # Hash with table alias symbol keys and true/false values, where true means the # association represented by the table alias uses an array of values instead of # a single value (i.e. true => *_many, false => *_to_one). attr_reader :type_map # Initialize all of the data structures used during loading. def initialize(dataset) opts = dataset.opts eager_graph = opts[:eager_graph] @master = eager_graph[:master] requirements = eager_graph[:requirements] reflection_map = @reflection_map = eager_graph[:reflections] reciprocal_map = @reciprocal_map = eager_graph[:reciprocals] limit_map = @limit_map = eager_graph[:limits] @unique = eager_graph[:cartesian_product_number] > 1 alias_map = @alias_map = {} type_map = @type_map = {} after_load_map = @after_load_map = {} reflection_map.each do |k, v| alias_map[k] = v[:name] after_load_map[k] = v[:after_load] if v[:after_load] type_map[k] = if v.returns_array? true elsif (limit_and_offset = limit_map[k]) && !limit_and_offset.last.nil? :offset end end after_load_map.freeze alias_map.freeze type_map.freeze # Make dependency map hash out of requirements array for each association. # This builds a tree of dependencies that will be used for recursion # to ensure that all parts of the object graph are loaded into the # appropriate subordinate association. dependency_map = @dependency_map = {} # Sort the associations by requirements length, so that # requirements are added to the dependency hash before their # dependencies. requirements.sort_by{|a| a[1].length}.each do |ta, deps| if deps.empty? dependency_map[ta] = {} else deps = deps.dup hash = dependency_map[deps.shift] deps.each do |dep| hash = hash[dep] end hash[ta] = {} end end freezer = lambda do |h| h.freeze h.each_value(&freezer) end freezer.call(dependency_map) datasets = opts[:graph][:table_aliases].to_a.reject{|ta,ds| ds.nil?} column_aliases = opts[:graph][:column_aliases] primary_keys = {} column_maps = {} models = {} row_procs = {} datasets.each do |ta, ds| models[ta] = ds.model primary_keys[ta] = [] column_maps[ta] = {} row_procs[ta] = ds.row_proc end column_aliases.each do |col_alias, tc| ta, column = tc column_maps[ta][col_alias] = column end column_maps.each do |ta, h| pk = models[ta].primary_key if pk.is_a?(Array) primary_keys[ta] = [] h.select{|ca, c| primary_keys[ta] << ca if pk.include?(c)} else h.select{|ca, c| primary_keys[ta] = ca if pk == c} end end @column_maps = column_maps.freeze @primary_keys = primary_keys.freeze @row_procs = row_procs.freeze # For performance, create two special maps for the master table, # so you can skip a hash lookup. @master_column_map = column_maps[master] @master_primary_keys = primary_keys[master] # Add a special hash mapping table alias symbols to 5 element arrays that just # contain the data in other data structures for that table alias. This is # used for performance, to get all values in one hash lookup instead of # separate hash lookups for each data structure. ta_map = {} alias_map.each_key do |ta| ta_map[ta] = [row_procs[ta], alias_map[ta], type_map[ta], reciprocal_map[ta]].freeze end @ta_map = ta_map.freeze freeze end # Return an array of primary model instances with the associations cache prepopulated # for all model objects (both primary and associated). def load(hashes) # This mapping is used to make sure that duplicate entries in the # result set are mapped to a single record. For example, using a # single one_to_many association with 10 associated records, # the main object column values appear in the object graph 10 times. # We map by primary key, if available, or by the object's entire values, # if not. The mapping must be per table, so create sub maps for each table # alias. @records_map = records_map = {} alias_map.keys.each{|ta| records_map[ta] = {}} master = master() # Assign to local variables for speed increase rp = row_procs[master] rm = records_map[master] = {} dm = dependency_map records_map.freeze # This will hold the final record set that we will be replacing the object graph with. records = [] hashes.each do |h| unless key = master_pk(h) key = hkey(master_hfor(h)) end unless primary_record = rm[key] primary_record = rm[key] = rp.call(master_hfor(h)) # Only add it to the list of records to return if it is a new record records.push(primary_record) end # Build all associations for the current object and it's dependencies _load(dm, primary_record, h) end # Remove duplicate records from all associations if this graph could possibly be a cartesian product # Run after_load procs if there are any post_process(records, dm) if @unique || !after_load_map.empty? || !limit_map.empty? records_map.each_value(&:freeze) freeze records end private # Recursive method that creates associated model objects and associates them to the current model object. def _load(dependency_map, current, h) dependency_map.each do |ta, deps| unless key = pk(ta, h) ta_h = hfor(ta, h) unless ta_h.values.any? assoc_name = alias_map[ta] unless (assoc = current.associations).has_key?(assoc_name) assoc[assoc_name] = type_map[ta] ? [] : nil end next end key = hkey(ta_h) end rp, assoc_name, tm, rcm = @ta_map[ta] rm = records_map[ta] # Check type map for all dependencies, and use a unique # object if any are dependencies for multiple objects, # to prevent duplicate objects from showing up in the case # the normal duplicate removal code is not being used. if !@unique && !deps.empty? && deps.any?{|dep_key,_| @ta_map[dep_key][2]} key = [current.object_id, key] end unless rec = rm[key] rec = rm[key] = rp.call(hfor(ta, h)) end if tm unless (assoc = current.associations).has_key?(assoc_name) assoc[assoc_name] = [] end assoc[assoc_name].push(rec) rec.associations[rcm] = current if rcm else current.associations[assoc_name] ||= rec end # Recurse into dependencies of the current object _load(deps, rec, h) unless deps.empty? end end # Return the subhash for the specific table alias +ta+ by parsing the values out of the main hash +h+ def hfor(ta, h) out = {} @column_maps[ta].each{|ca, c| out[c] = h[ca]} out end # Return a suitable hash key for any subhash +h+, which is an array of values by column order. # This is only used if the primary key cannot be used. def hkey(h) h.sort_by{|x| x[0]} end # Return the subhash for the master table by parsing the values out of the main hash +h+ def master_hfor(h) out = {} @master_column_map.each{|ca, c| out[c] = h[ca]} out end # Return a primary key value for the master table by parsing it out of the main hash +h+. def master_pk(h) x = @master_primary_keys if x.is_a?(Array) unless x == [] x = x.map{|ca| h[ca]} x if x.all? end else h[x] end end # Return a primary key value for the given table alias by parsing it out of the main hash +h+. def pk(ta, h) x = primary_keys[ta] if x.is_a?(Array) unless x == [] x = x.map{|ca| h[ca]} x if x.all? end else h[x] end end # If the result set is the result of a cartesian product, then it is possible that # there are multiple records for each association when there should only be one. # In that case, for each object in all associations loaded via +eager_graph+, run # uniq! on the association to make sure no duplicate records show up. # Note that this can cause legitimate duplicate records to be removed. def post_process(records, dependency_map) records.each do |record| dependency_map.each do |ta, deps| assoc_name = alias_map[ta] list = record.public_send(assoc_name) rec_list = if type_map[ta] list.uniq! if lo = limit_map[ta] limit, offset = lo offset ||= 0 if type_map[ta] == :offset [record.associations[assoc_name] = list[offset]] else list.replace(list[(offset)..(limit ? (offset)+limit-1 : -1)] || []) end else list end elsif list [list] else [] end record.send(:run_association_callbacks, reflection_map[ta], :after_load, list) if after_load_map[ta] post_process(rec_list, deps) if !rec_list.empty? && !deps.empty? end end end end end end end ��������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/model/base.rb��������������������������������������������������������������0000664�0000000�0000000�00000251403�14342141206�0017510�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel class Model extend Enumerable extend Inflections # Class methods for Sequel::Model that implement basic model functionality. # # * All of the following methods have class methods created that send the method # to the model's dataset: all, as_hash, avg, count, cross_join, distinct, each, # each_server, empty?, except, exclude, exclude_having, fetch_rows, # filter, first, first!, for_update, from, from_self, full_join, full_outer_join, # get, graph, grep, group, group_and_count, group_append, group_by, having, import, # inner_join, insert, intersect, invert, join, join_table, last, left_join, # left_outer_join, limit, lock_style, map, max, min, multi_insert, naked, natural_full_join, # natural_join, natural_left_join, natural_right_join, offset, order, order_append, order_by, # order_more, order_prepend, paged_each, qualify, reverse, reverse_order, right_join, # right_outer_join, select, select_all, select_append, select_group, select_hash, # select_hash_groups, select_map, select_more, select_order_map, server, # single_record, single_record!, single_value, single_value!, sum, to_hash, to_hash_groups, # truncate, unfiltered, ungraphed, ungrouped, union, unlimited, unordered, where, where_all, # where_each, where_single_value, with, with_recursive, with_sql module ClassMethods # Whether to cache the anonymous models created by Sequel::Model(), true by default. This is # required for reloading them correctly (avoiding the superclass mismatch). attr_accessor :cache_anonymous_models # Array of modules that extend this model's dataset. Stored # so that if the model's dataset is changed, it will be extended # with all of these modules. attr_reader :dataset_method_modules # The Module subclass to use for dataset_module blocks. attr_reader :dataset_module_class # The default options to use for Model#set_fields. These are merged with # the options given to set_fields. attr_accessor :default_set_fields_options # SQL string fragment used for faster DELETE statement creation when deleting/destroying # model instances, or nil if the optimization should not be used. For internal use only. attr_reader :fast_instance_delete_sql # SQL string fragment used for faster lookups by primary key, or nil if the optimization # should not be used. For internal use only. attr_reader :fast_pk_lookup_sql # The dataset that instance datasets (#this) are based on. Generally a naked version of # the model's dataset limited to one row. For internal use only. attr_reader :instance_dataset # Array of plugin modules loaded by this class # # Sequel::Model.plugins # # => [Sequel::Model, Sequel::Model::Associations] attr_reader :plugins # The primary key for the class. Sequel can determine this automatically for # many databases, but not all, so you may need to set it manually. If not # determined automatically, the default is :id. attr_reader :primary_key # Whether to raise an error instead of returning nil on a failure # to save/create/save_changes/update/destroy due to a validation failure or # a before_* hook returning false (default: true). attr_accessor :raise_on_save_failure # Whether to raise an error when unable to typecast data for a column # (default: false). This should be set to true if you want to have model # setter methods raise errors if the argument cannot be typecast properly. attr_accessor :raise_on_typecast_failure # Whether to raise an error if an UPDATE or DELETE query related to # a model instance does not modify exactly 1 row. If set to false, # Sequel will not check the number of rows modified (default: true). attr_accessor :require_modification # If true (the default), requires that all models have valid tables, # raising exceptions if creating a model without a valid table backing it. # Setting this to false will allow the creation of model classes where the # underlying table doesn't exist. attr_accessor :require_valid_table # Should be the literal primary key column name if this Model's table has a simple primary key, or # nil if the model has a compound primary key or no primary key. attr_reader :simple_pk # Should be the literal table name if this Model's dataset is a simple table (no select, order, join, etc.), # or nil otherwise. This and simple_pk are used for an optimization in Model.[]. attr_reader :simple_table # Whether mass assigning via .create/.new/#set/#update should raise an error # if an invalid key is used. A key is invalid if no setter method exists # for that key or the access to the setter method is restricted (e.g. due to it # being a primary key field). If set to false, silently skip # any key where the setter method doesn't exist or access to it is restricted. attr_accessor :strict_param_setting # Whether to typecast the empty string ('') to nil for columns that # are not string or blob. In most cases the empty string would be the # way to specify a NULL SQL value in string form (nil.to_s == ''), # and an empty string would not usually be typecast correctly for other # types, so the default is true. attr_accessor :typecast_empty_string_to_nil # Whether to typecast attribute values on assignment (default: true). # If set to false, no typecasting is done, so it will be left up to the # database to typecast the value correctly. attr_accessor :typecast_on_assignment # Whether to use a transaction by default when saving/deleting records (default: true). # If you are sending database queries in before_* or after_* hooks, you shouldn't change # the default setting without a good reason. attr_accessor :use_transactions # Define a Model method on the given module that calls the Model # method on the receiver. This is how the Sequel::Model() method is # defined, and allows you to define Model() methods on other modules, # making it easier to have custom model settings for all models under # a namespace. Example: # # module Foo # Model = Class.new(Sequel::Model) # Model.def_Model(self) # DB = Model.db = Sequel.connect(ENV['FOO_DATABASE_URL']) # Model.plugin :prepared_statements # # class Bar < Model # # Uses Foo::DB[:bars] # end # # class Baz < Model(:my_baz) # # Uses Foo::DB[:my_baz] # end # end def def_Model(mod) model = self mod.define_singleton_method(:Model) do |source| model.Model(source) end end # Lets you create a Model subclass with its dataset already set. # +source+ should be an instance of one of the following classes: # # Database :: Sets the database for this model to +source+. # Generally only useful when subclassing directly # from the returned class, where the name of the # subclass sets the table name (which is combined # with the +Database+ in +source+ to create the # dataset to use) # Dataset :: Sets the dataset for this model to +source+. # other :: Sets the table name for this model to +source+. The # class will use the default database for model # classes in order to create the dataset. # # The purpose of this method is to set the dataset/database automatically # for a model class, if the table name doesn't match the default table # name that Sequel would use. # # When creating subclasses of Sequel::Model itself, this method is usually # called on Sequel itself, using <tt>Sequel::Model(:something)</tt>. # # # Using a symbol # class Comment < Sequel::Model(:something) # table_name # => :something # end # # # Using a dataset # class Comment < Sequel::Model(DB1[:something]) # dataset # => DB1[:something] # end # # # Using a database # class Comment < Sequel::Model(DB1) # dataset # => DB1[:comments] # end def Model(source) if cache_anonymous_models cache = Sequel.synchronize{@Model_cache ||= {}} if klass = Sequel.synchronize{cache[source]} return klass end end klass = Class.new(self) if source.is_a?(::Sequel::Database) klass.db = source else klass.set_dataset(source) end if cache_anonymous_models Sequel.synchronize{cache[source] = klass} end klass end # Returns the first record from the database matching the conditions. # If a hash is given, it is used as the conditions. If another # object is given, it finds the first record whose primary key(s) match # the given argument(s). If no object is returned by the dataset, returns nil. # # Artist[1] # SELECT * FROM artists WHERE id = 1 # # => #<Artist {:id=>1, ...}> # # Artist[name: 'Bob'] # SELECT * FROM artists WHERE (name = 'Bob') LIMIT 1 # # => #<Artist {:name=>'Bob', ...}> def [](*args) args = args.first if args.size <= 1 args.is_a?(Hash) ? first(args) : (primary_key_lookup(args) unless args.nil?) end # Initializes a model instance as an existing record. This constructor is # used by Sequel to initialize model instances when fetching records. # Requires that values be a hash where all keys are symbols. It # probably should not be used by external code. def call(values) o = allocate o.instance_variable_set(:@values, values) o end # Clear the setter_methods cache def clear_setter_methods_cache @setter_methods = nil unless frozen? end # Returns the columns in the result set in their original order. # Generally, this will use the columns determined via the database # schema, but in certain cases (e.g. models that are based on a joined # dataset) it will use <tt>Dataset#columns</tt> to find the columns. # # Artist.columns # # => [:id, :name] def columns return @columns if @columns return nil if frozen? set_columns(dataset.naked.columns) end # Creates instance using new with the given values and block, and saves it. # # Artist.create(name: 'Bob') # # INSERT INTO artists (name) VALUES ('Bob') # # Artist.create do |a| # a.name = 'Jim' # end # INSERT INTO artists (name) VALUES ('Jim') def create(values = OPTS, &block) new(values, &block).save end # Returns the dataset associated with the Model class. Raises # an +Error+ if there is no associated dataset for this class. # In most cases, you don't need to call this directly, as Model # proxies many dataset methods to the underlying dataset. # # Artist.dataset.all # SELECT * FROM artists def dataset @dataset || raise(Error, "No dataset associated with #{self}") end # Alias of set_dataset def dataset=(ds) set_dataset(ds) end # Extend the dataset with a module, similar to adding # a plugin with the methods defined in DatasetMethods. # This is the recommended way to add methods to model datasets. # # If given an argument, it should be a module, and is used to extend # the underlying dataset. Otherwise an anonymous module is created, and # if a block is given, it is module_evaled, allowing you do define # dataset methods directly using the standard ruby def syntax. # Returns the module given or the anonymous module created. # # # Usage with existing module # Album.dataset_module Sequel::ColumnsIntrospection # # # Usage with anonymous module # Album.dataset_module do # def foo # :bar # end # end # Album.dataset.foo # # => :bar # Album.foo # # => :bar # # Any anonymous modules created are actually instances of Sequel::Model::DatasetModule # (a Module subclass), which allows you to call the subset method on them, which # defines a dataset method that adds a filter. There are also a number of other # methods with the same names as the dataset methods, which can use to define # named dataset methods: # # Album.dataset_module do # where(:released, Sequel[:release_date] <= Sequel::CURRENT_DATE) # order :by_release_date, :release_date # select :for_select_options, :id, :name, :release_date # end # Album.released.sql # # => "SELECT * FROM artists WHERE (release_date <= CURRENT_DATE)" # Album.by_release_date.sql # # => "SELECT * FROM artists ORDER BY release_date" # Album.for_select_options.sql # # => "SELECT id, name, release_date FROM artists" # Album.released.by_release_date.for_select_options.sql # # => "SELECT id, name, release_date FROM artists WHERE (release_date <= CURRENT_DATE) ORDER BY release_date" # # The following methods are supported: distinct, eager, exclude, exclude_having, grep, group, group_and_count, # group_append, having, limit, offset, order, order_append, order_prepend, select, select_all, # select_append, select_group, where, and server. # # The advantage of using these DatasetModule methods to define your dataset # methods is that they can take advantage of dataset caching to improve # performance. # # Any public methods in the dataset module will have class methods created that # call the method on the dataset, assuming that the class method is not already # defined. def dataset_module(mod = nil, &block) if mod raise Error, "can't provide both argument and block to Model.dataset_module" if block dataset_extend(mod) mod else @dataset_module ||= dataset_module_class.new(self) @dataset_module.module_eval(&block) if block dataset_extend(@dataset_module) @dataset_module end end # Returns the database associated with the Model class. # If this model doesn't have a database associated with it, # assumes the superclass's database, or the first object in # Sequel::DATABASES. If no Sequel::Database object has # been created, raises an error. # # Artist.db.transaction do # BEGIN # Artist.create(name: 'Bob') # # INSERT INTO artists (name) VALUES ('Bob') # end # COMMIT def db return @db if @db @db = self == Model ? Sequel.synchronize{DATABASES.first} : superclass.db raise(Error, "No database associated with #{self}: have you called Sequel.connect or #{self}.db= ?") unless @db @db end # Sets the database associated with the Model class. # Should only be used if the Model class currently does not # have a dataset defined. # # This can be used directly on Sequel::Model to set the default database to be used # by subclasses, or to override the database used for specific models: # # Sequel::Model.db = DB1 # Artist = Class.new(Sequel::Model) # Artist.db = DB2 # # Note that you should not use this to change the model's database # at runtime. If you have that need, you should look into Sequel's # sharding support, or consider using separate model classes per Database. def db=(db) raise Error, "Cannot use Sequel::Model.db= on model with existing dataset. Use Sequel::Model.dataset= instead." if @dataset @db = db end # Returns the cached schema information if available or gets it # from the database. This is a hash where keys are column symbols # and values are hashes of information related to the column. See # <tt>Database#schema</tt>. # # Artist.db_schema # # {:id=>{:type=>:integer, :primary_key=>true, ...}, # # :name=>{:type=>:string, :primary_key=>false, ...}} def db_schema return @db_schema if @db_schema return nil if frozen? @db_schema = get_db_schema end # Create a column alias, where the column methods have one name, but the underlying storage uses a # different name. def def_column_alias(meth, column) clear_setter_methods_cache overridable_methods_module.module_eval do define_method(meth){self[column]} define_method("#{meth}="){|v| self[column] = v} end end # Finds a single record according to the supplied filter. # You are encouraged to use Model.[] or Model.first instead of this method. # # Artist.find(name: 'Bob') # # SELECT * FROM artists WHERE (name = 'Bob') LIMIT 1 # # Artist.find{name > 'M'} # # SELECT * FROM artists WHERE (name > 'M') LIMIT 1 def find(*args, &block) first(*args, &block) end # Like +find+ but invokes create with given conditions when record does not # exist. Unlike +find+ in that the block used in this method is not passed # to +find+, but instead is passed to +create+ only if +find+ does not # return an object. # # Artist.find_or_create(name: 'Bob') # # SELECT * FROM artists WHERE (name = 'Bob') LIMIT 1 # # INSERT INTO artists (name) VALUES ('Bob') # # Artist.find_or_create(name: 'Jim'){|a| a.hometown = 'Sactown'} # # SELECT * FROM artists WHERE (name = 'Jim') LIMIT 1 # # INSERT INTO artists (name, hometown) VALUES ('Jim', 'Sactown') def find_or_create(cond, &block) find(cond) || create(cond, &block) end # Freeze a model class, disallowing any further changes to it. def freeze return self if frozen? dataset_module.freeze overridable_methods_module.freeze if @dataset db_schema.freeze.each_value(&:freeze) columns.freeze setter_methods.freeze else @setter_methods = [].freeze end @dataset_method_modules.freeze @default_set_fields_options.freeze @plugins.freeze super end # Whether the model has a dataset. True for most model classes, # but can be false if the model class is an abstract model class # designed for subclassing, such as Sequel::Model itself. def has_dataset? !@dataset.nil? end # Clear the setter_methods cache when a module is included, as it # may contain setter methods. def include(*mods) clear_setter_methods_cache super end # Returns the implicit table name for the model class, which is the demodulized, # underscored, pluralized name of the class. # # Artist.implicit_table_name # => :artists # Foo::ArtistAlias.implicit_table_name # => :artist_aliases def implicit_table_name pluralize(underscore(demodulize(name))).to_sym end # Calls #call with the values hash. def load(values) call(values) end # Mark the model as not having a primary key. Not having a primary key # can cause issues, among which is that you won't be able to update records. # # Artist.primary_key # => :id # Artist.no_primary_key # Artist.primary_key # => nil def no_primary_key clear_setter_methods_cache self.simple_pk = @primary_key = nil end # Loads a plugin for use with the model class, passing optional arguments # to the plugin. If the plugin is a module, load it directly. Otherwise, # require the plugin from sequel/plugins/#{plugin} and then attempt to load # the module using a the camelized plugin name under Sequel::Plugins. def plugin(plugin, *args, &block) m = plugin.is_a?(Module) ? plugin : plugin_module(plugin) if !m.respond_to?(:apply) && !m.respond_to?(:configure) && (!args.empty? || block) Deprecation.deprecate("Plugin #{plugin} accepts no arguments or block, and passing arguments/block to it", "Remove arguments and block when loading the plugin") end unless @plugins.include?(m) @plugins << m m.apply(self, *args, &block) if m.respond_to?(:apply) extend(m::ClassMethods) if m.const_defined?(:ClassMethods, false) include(m::InstanceMethods) if m.const_defined?(:InstanceMethods, false) if m.const_defined?(:DatasetMethods, false) dataset_extend(m::DatasetMethods, :create_class_methods=>false) end end m.configure(self, *args, &block) if m.respond_to?(:configure) end # :nocov: ruby2_keywords(:plugin) if respond_to?(:ruby2_keywords, true) # :nocov: # Returns primary key attribute hash. If using a composite primary key # value such be an array with values for each primary key in the correct # order. For a standard primary key, value should be an object with a # compatible type for the key. If the model does not have a primary key, # raises an +Error+. # # Artist.primary_key_hash(1) # => {:id=>1} # Artist.primary_key_hash([1, 2]) # => {:id1=>1, :id2=>2} def primary_key_hash(value) case key = @primary_key when Symbol {key => value} when Array hash = {} key.zip(Array(value)){|k,v| hash[k] = v} hash else raise(Error, "#{self} does not have a primary key") end end # Return a hash where the keys are qualified column references. Uses the given # qualifier if provided, or the table_name otherwise. This is useful if you # plan to join other tables to this table and you want the column references # to be qualified. # # Artist.where(Artist.qualified_primary_key_hash(1)) # # SELECT * FROM artists WHERE (artists.id = 1) def qualified_primary_key_hash(value, qualifier=table_name) case key = @primary_key when Symbol {SQL::QualifiedIdentifier.new(qualifier, key) => value} when Array hash = {} key.zip(Array(value)){|k,v| hash[SQL::QualifiedIdentifier.new(qualifier, k)] = v} hash else raise(Error, "#{self} does not have a primary key") end end # Restrict the setting of the primary key(s) when using mass assignment (e.g. +set+). Because # this is the default, this only make sense to use in a subclass where the # parent class has used +unrestrict_primary_key+. def restrict_primary_key clear_setter_methods_cache @restrict_primary_key = true end # Whether or not setting the primary key(s) when using mass assignment (e.g. +set+) is # restricted, true by default. def restrict_primary_key? @restrict_primary_key end # Sets the dataset associated with the Model class. +ds+ can be a +Symbol+, # +LiteralString+, <tt>SQL::Identifier</tt>, <tt>SQL::QualifiedIdentifier</tt>, # <tt>SQL::AliasedExpression</tt> # (all specifying a table name in the current database), or a +Dataset+. # If a dataset is used, the model's database is changed to the database of the given # dataset. If a dataset is not used, a dataset is created from the current # database with the table name given. Other arguments raise an +Error+. # Returns self. # # It also attempts to determine the database schema for the model, # based on the given dataset. # # Note that you should not use this to change the model's dataset # at runtime. If you have that need, you should look into Sequel's # sharding support, or creating a separate Model class per dataset # # You should avoid calling this method directly if possible. Instead you should # set the table name or dataset when creating the model class: # # # table name # class Artist < Sequel::Model(:tbl_artists) # end # # # dataset # class Artist < Sequel::Model(DB[:tbl_artists]) # end def set_dataset(ds, opts=OPTS) inherited = opts[:inherited] @dataset = convert_input_dataset(ds) @require_modification = @dataset.provides_accurate_rows_matched? if require_modification.nil? if inherited self.simple_table = superclass.simple_table @columns = superclass.instance_variable_get(:@columns) @db_schema = superclass.instance_variable_get(:@db_schema) else @dataset = @dataset.with_extend(*@dataset_method_modules.reverse) @db_schema = get_db_schema end @fast_pk_lookup_sql = @fast_instance_delete_sql = nil unless @dataset.supports_placeholder_literalizer? reset_instance_dataset self end # Sets the primary key for this model. You can use either a regular # or a composite primary key. To not use a primary key, set to nil # or use +no_primary_key+. On most adapters, Sequel can automatically # determine the primary key to use, so this method is not needed often. # # class Person < Sequel::Model # # regular key # set_primary_key :person_id # end # # class Tagging < Sequel::Model # # composite key # set_primary_key [:taggable_id, :tag_id] # end def set_primary_key(key) clear_setter_methods_cache if key.is_a?(Array) if key.length < 2 key = key.first else key = key.dup.freeze end end self.simple_pk = if key && !key.is_a?(Array) (@dataset || db).literal(key).freeze end @primary_key = key end # Cache of setter methods to allow by default, in order to speed up mass assignment. def setter_methods @setter_methods || (@setter_methods = get_setter_methods) end # Returns name of primary table for the dataset. If the table for the dataset # is aliased, returns the aliased name. # # Artist.table_name # => :artists # Sequel::Model(:foo).table_name # => :foo # Sequel::Model(Sequel[:foo].as(:bar)).table_name # => :bar def table_name dataset.first_source_alias end # Allow the setting of the primary key(s) when using the mass assignment methods. # Using this method can open up security issues, be very careful before using it. # # Artist.set(id: 1) # Error # Artist.unrestrict_primary_key # Artist.set(id: 1) # No Error def unrestrict_primary_key clear_setter_methods_cache @restrict_primary_key = false end # Return the model instance with the primary key, or nil if there is no matching record. def with_pk(pk) primary_key_lookup(pk) end # Return the model instance with the primary key, or raise NoMatchingRow if there is no matching record. def with_pk!(pk) with_pk(pk) || raise(NoMatchingRow.new(dataset)) end # Add model methods that call dataset methods Plugins.def_dataset_methods(self, (Dataset::ACTION_METHODS + Dataset::QUERY_METHODS + [:each_server]) - [:<<, :or, :[], :columns, :columns!, :delete, :update, :set_graph_aliases, :add_graph_aliases]) private # Yield to the passed block and if do_raise is false, swallow Sequel::Errors other than DatabaseConnectionError # and DatabaseDisconnectError. def check_non_connection_error(do_raise=require_valid_table) db.transaction(:savepoint=>:only){yield} rescue Sequel::DatabaseConnectionError, Sequel::DatabaseDisconnectError raise rescue Sequel::Error raise if do_raise end # Convert the given object to a Dataset that should be used as # this model's dataset. def convert_input_dataset(ds) case ds when Symbol, SQL::Identifier, SQL::QualifiedIdentifier self.simple_table = db.literal(ds).freeze ds = db.from(ds) when SQL::AliasedExpression, LiteralString self.simple_table = nil ds = db.from(ds) when Dataset ds = ds.from_self(:alias=>ds.first_source) if ds.joined_dataset? self.simple_table = if ds.send(:simple_select_all?) ds.literal(ds.first_source_table).freeze end @db = ds.db else raise(Error, "Model.set_dataset takes one of the following classes as an argument: Symbol, LiteralString, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, Dataset") end set_dataset_row_proc(ds.clone(:model=>self)) end # Add the module to the class's dataset_method_modules. Extend the dataset with the # module if the model has a dataset. Add dataset methods to the class for all # public dataset methods. def dataset_extend(mod, opts=OPTS) @dataset = @dataset.with_extend(mod) if @dataset reset_instance_dataset dataset_method_modules << mod unless opts[:create_class_methods] == false mod.public_instance_methods.each{|meth| def_model_dataset_method(meth)} end end # Create a column accessor for a column with a method name that is hard to use in ruby code. def def_bad_column_accessor(column) im = instance_methods overridable_methods_module.module_eval do meth = :"#{column}=" unless im.include?(column) define_method(column){self[column]} alias_method(column, column) end unless im.include?(meth) define_method(meth){|v| self[column] = v} alias_method(meth, meth) end end end # Create the column accessors. For columns that can be used as method names directly in ruby code, # use a string to define the method for speed. For other columns names, use a block. def def_column_accessor(*columns) clear_setter_methods_cache columns, bad_columns = columns.partition{|x| /\A[A-Za-z_][A-Za-z0-9_]*\z/.match(x.to_s)} bad_columns.each{|x| def_bad_column_accessor(x)} im = instance_methods columns.each do |column| meth = :"#{column}=" unless im.include?(column) overridable_methods_module.module_eval("def #{column}; self[:#{column}] end", __FILE__, __LINE__) overridable_methods_module.send(:alias_method, column, column) end unless im.include?(meth) overridable_methods_module.module_eval("def #{meth}(v); self[:#{column}] = v end", __FILE__, __LINE__) overridable_methods_module.send(:alias_method, meth, meth) end end end # Define a model method that calls the dataset method with the same name, # only used for methods with names that can't be represented directly in # ruby code. def def_model_dataset_method(meth) return if respond_to?(meth, true) if meth.to_s =~ /\A[A-Za-z_][A-Za-z0-9_]*\z/ instance_eval("def #{meth}(*args, &block); dataset.#{meth}(*args, &block) end", __FILE__, __LINE__) else define_singleton_method(meth){|*args, &block| dataset.public_send(meth, *args, &block)} end singleton_class.send(:alias_method, meth, meth) # :nocov: singleton_class.send(:ruby2_keywords, meth) if respond_to?(:ruby2_keywords, true) # :nocov: end # Get the schema from the database, fall back on checking the columns # via the database if that will return inaccurate results or if # it raises an error. def get_db_schema(reload = reload_db_schema?) set_columns(nil) return nil unless @dataset schema_hash = {} ds_opts = dataset.opts get_columns = proc{check_non_connection_error{columns} || []} schema_array = get_db_schema_array(reload) if db.supports_schema_parsing? if schema_array schema_array.each{|k,v| schema_hash[k] = v} # Set the primary key(s) based on the schema information, # if the schema information includes primary key information if schema_array.all?{|k,v| v.has_key?(:primary_key)} pks = schema_array.map{|k,v| k if v[:primary_key]}.compact pks.length > 0 ? set_primary_key(pks) : no_primary_key end if (select = ds_opts[:select]) && !(select.length == 1 && select.first.is_a?(SQL::ColumnAll)) # We don't remove the columns from the schema_hash, # as it's possible they will be used for typecasting # even if they are not selected. cols = get_columns.call cols.each{|c| schema_hash[c] ||= {}} def_column_accessor(*schema_hash.keys) else # Dataset is for a single table with all columns, # so set the columns based on the order they were # returned by the schema. cols = schema_array.map{|k,v| k} set_columns(cols) # Also set the columns for the dataset, so the dataset # doesn't have to do a query to get them. dataset.send(:columns=, cols) end else # If the dataset uses multiple tables or custom sql or getting # the schema raised an error, just get the columns and # create an empty schema hash for it. get_columns.call.each{|c| schema_hash[c] = {}} end schema_hash end # Get the array of schema information for the dataset. Returns nil if # the schema information cannot be determined. def get_db_schema_array(reload) check_non_connection_error(false){db.schema(dataset, :reload=>reload)} end # Uncached version of setter_methods, to be overridden by plugins # that want to modify the methods used. def get_setter_methods meths = instance_methods.map(&:to_s).select{|l| l.end_with?('=')} - RESTRICTED_SETTER_METHODS meths -= Array(primary_key).map{|x| "#{x}="} if primary_key && restrict_primary_key? meths end # If possible, set the dataset for the model subclass as soon as it # is created. Also, make sure the inherited class instance variables # are copied into the subclass. # # Sequel queries the database to get schema information as soon as # a model class is created: # # class Artist < Sequel::Model # Causes schema query # end def inherited(subclass) super ivs = subclass.instance_variables inherited_instance_variables.each do |iv, dup| if (sup_class_value = instance_variable_get(iv)) && dup sup_class_value = case dup when :dup sup_class_value.dup when :hash_dup h = {} sup_class_value.each{|k,v| h[k] = v.dup} h when Proc dup.call(sup_class_value) else raise Error, "bad inherited instance variable type: #{dup.inspect}" end end subclass.instance_variable_set(iv, sup_class_value) end unless ivs.include?(:@dataset) if @dataset && self != Model subclass.set_dataset(@dataset.clone, :inherited=>true) elsif (n = subclass.name) && !n.to_s.empty? db subclass.set_dataset(subclass.implicit_table_name) end end end # A hash of instance variables to automatically set up in subclasses. # Keys are instance variable symbols, values should be: # nil :: Assign directly from superclass to subclass (frozen objects) # :dup :: Dup object when assigning from superclass to subclass (mutable objects) # :hash_dup :: Assign hash with same keys, but dup all the values # Proc :: Call with subclass to do the assignment def inherited_instance_variables { :@cache_anonymous_models=>nil, :@dataset_method_modules=>:dup, :@dataset_module_class=>nil, :@db=>nil, :@default_set_fields_options=>:dup, :@fast_instance_delete_sql=>nil, :@fast_pk_lookup_sql=>nil, :@plugins=>:dup, :@primary_key=>nil, :@raise_on_save_failure=>nil, :@raise_on_typecast_failure=>nil, :@require_modification=>nil, :@require_valid_table=>nil, :@restrict_primary_key=>nil, :@setter_methods=>nil, :@simple_pk=>nil, :@simple_table=>nil, :@strict_param_setting=>nil, :@typecast_empty_string_to_nil=>nil, :@typecast_on_assignment=>nil, :@use_transactions=>nil } end # For the given opts hash and default name or :class option, add a # :class_name option unless already present which contains the name # of the class to use as a string. The purpose is to allow late # binding to the class later using constantize. def late_binding_class_option(opts, default) case opts[:class] when String, Symbol # Delete :class to allow late binding class_name = opts.delete(:class).to_s if (namespace = opts[:class_namespace]) && !class_name.start_with?('::') class_name = "::#{namespace}::#{class_name}" end opts[:class_name] ||= class_name when Class opts[:class_name] ||= opts[:class].name end opts[:class_name] ||= '::' + ((name || '').split("::")[0..-2] + [camelize(default)]).join('::') end # Clear the setter_methods cache when a setter method is added. def method_added(meth) clear_setter_methods_cache if meth.to_s.end_with?('=') super end # Module that the class includes that holds methods the class adds for column accessors and # associations so that the methods can be overridden with +super+. def overridable_methods_module include(@overridable_methods_module = Module.new) unless @overridable_methods_module @overridable_methods_module end # Returns the module for the specified plugin. If the module is not # defined, the corresponding plugin required. def plugin_module(plugin) module_name = plugin.to_s.gsub(/(^|_)(.)/){|x| x[-1..-1].upcase} unless Sequel::Plugins.const_defined?(module_name, false) require "sequel/plugins/#{plugin}" end Sequel::Plugins.const_get(module_name) end # Find the row in the dataset that matches the primary key. Uses # a static SQL optimization if the table and primary key are simple. # # This method should not be called with a nil primary key, in case # it is overridden by plugins which assume that the passed argument # is valid. def primary_key_lookup(pk) if sql = @fast_pk_lookup_sql sql = sql.dup ds = dataset ds.literal_append(sql, pk) ds.fetch_rows(sql){|r| return ds.row_proc.call(r)} nil else dataset.first(primary_key_hash(pk)) end end # Whether to reload the database schema by default, ignoring any cached value. def reload_db_schema? false end # Reset the cached fast primary lookup SQL if a simple table and primary key # are used, or set it to nil if not used. def reset_fast_pk_lookup_sql @fast_pk_lookup_sql = if @simple_table && @simple_pk "SELECT * FROM #{@simple_table} WHERE #{@simple_pk} = ".freeze end @fast_instance_delete_sql = if @simple_table && @simple_pk "DELETE FROM #{@simple_table} WHERE #{@simple_pk} = ".freeze end end # Reset the instance dataset to a modified copy of the current dataset, # should be used whenever the model's dataset is modified. def reset_instance_dataset @instance_dataset = @dataset.limit(1).naked.skip_limit_check if @dataset end # Set the columns for this model and create accessor methods for each column. def set_columns(new_columns) @columns = new_columns def_column_accessor(*new_columns) if new_columns @columns end # Set the dataset's row_proc to the current model. def set_dataset_row_proc(ds) ds.with_row_proc(self) end # Reset the fast primary key lookup SQL when the simple_pk value changes. def simple_pk=(pk) @simple_pk = pk reset_fast_pk_lookup_sql end # Reset the fast primary key lookup SQL when the simple_table value changes. def simple_table=(t) @simple_table = t reset_fast_pk_lookup_sql end # Returns a copy of the model's dataset with custom SQL # # Artist.fetch("SELECT * FROM artists WHERE name LIKE 'A%'") # Artist.fetch("SELECT * FROM artists WHERE id = ?", 1) alias fetch with_sql end # Sequel::Model instance methods that implement basic model functionality. # # * All of the model before/after/around hooks are implemented as instance methods that are called # by Sequel when the appropriate action occurs. For example, when destroying # a model object, Sequel will call +around_destroy+, which will call +before_destroy+, do # the destroy, and then call +after_destroy+. # * The following instance_methods all call the class method of the same # name: columns, db, primary_key, db_schema. # * The following accessor methods are defined via metaprogramming: # raise_on_save_failure, raise_on_typecast_failure, require_modification, # strict_param_setting, typecast_empty_string_to_nil, typecast_on_assignment, # and use_transactions. The setter methods will change the setting for the # instance, and the getter methods will check for an instance setting, then # try the class setting if no instance setting has been set. module InstanceMethods HOOKS.each{|h| class_eval("def #{h}; end", __FILE__, __LINE__)} [:around_create, :around_update, :around_save, :around_destroy, :around_validation].each{|h| class_eval("def #{h}; yield end", __FILE__, __LINE__)} # Define instance method(s) that calls class method(s) of the # same name. Replaces the construct: # # define_method(meth){self.class.public_send(meth)} [:columns, :db, :primary_key, :db_schema].each{|meth| class_eval("def #{meth}; self.class.#{meth} end", __FILE__, __LINE__)} # Define instance method(s) that calls class method(s) of the # same name, caching the result in an instance variable. Define # standard attr_writer method for modifying that instance variable. [:typecast_empty_string_to_nil, :typecast_on_assignment, :strict_param_setting, :raise_on_save_failure, :raise_on_typecast_failure, :require_modification, :use_transactions].each do |meth| class_eval("def #{meth}; !defined?(@#{meth}) ? (frozen? ? self.class.#{meth} : (@#{meth} = self.class.#{meth})) : @#{meth} end", __FILE__, __LINE__) attr_writer(meth) end # The hash of attribute values. Keys are symbols with the names of the # underlying database columns. The returned hash is a reference to the # receiver's values hash, and modifying it will also modify the receiver's # values. # # Artist.new(name: 'Bob').values # => {:name=>'Bob'} # Artist[1].values # => {:id=>1, :name=>'Jim', ...} attr_reader :values alias to_hash values # Get the value of the column. Takes a single symbol or string argument. # By default it calls send with the argument to get the value. This can # be overridden if you have columns that conflict with existing # method names. alias get_column_value send # Set the value of the column. Takes two arguments. The first is a # symbol or string argument for the column name, suffixed with =. The # second is the value to set for the column. By default it calls send # with the argument to set the value. This can be overridden if you have # columns that conflict with existing method names (unlikely for setter # methods, but possible). alias set_column_value send # Creates new instance and passes the given values to set. # If a block is given, yield the instance to the block. # # Arguments: # values :: should be a hash to pass to set. # # Artist.new(name: 'Bob') # # Artist.new do |a| # a.name = 'Bob' # end def initialize(values = OPTS) @values = {} @new = true @modified = true initialize_set(values) _clear_changed_columns(:initialize) yield self if defined?(yield) end # Returns value of the column's attribute. # # Artist[1][:id] #=> 1 def [](column) @values[column] end # Sets the value for the given column. If typecasting is enabled for # this object, typecast the value based on the column's type. # If this is a new record or the typecasted value isn't the same # as the current value for the column, mark the column as changed. # # a = Artist.new # a[:name] = 'Bob' # a.values #=> {:name=>'Bob'} def []=(column, value) # If it is new, it doesn't have a value yet, so we should # definitely set the new value. # If the column isn't in @values, we can't assume it is # NULL in the database, so assume it has changed. v = typecast_value(column, value) vals = @values if new? || !vals.include?(column) || v != (c = vals[column]) || v.class != c.class change_column_value(column, v) end end # Alias of eql? def ==(obj) eql?(obj) end # Case equality. By default, checks equality of the primary key value, see # pk_equal?. # # Artist[1] === Artist[1] # => true # Artist.new === Artist.new # => false # Artist[1].set(name: 'Bob') === Artist[1] # => true def ===(obj) case pkv = pk when nil return false when Array return false if pkv.any?(&:nil?) end (obj.class == model) && (obj.pk == pkv) end # If the receiver has a primary key value, returns true if the objects have # the same class and primary key value. # If the receiver's primary key value is nil or is an array containing # nil, returns false. # # Artist[1].pk_equal?(Artist[1]) # => true # Artist.new.pk_equal?(Artist.new) # => false # Artist[1].set(name: 'Bob').pk_equal?(Artist[1]) # => true alias pk_equal? === # class is defined in Object, but it is also a keyword, # and since a lot of instance methods call class methods, # this alias makes it so you can use model instead of # self.class. # # Artist.new.model # => Artist alias_method :model, :class # The autoincrementing primary key for this model object. Should be # overridden if you have a composite primary key with one part of it # being autoincrementing. def autoincrementing_primary_key primary_key end # Cancel the current action. Should be called in before hooks to halt # the processing of the action. If a +msg+ argument is given and # the model instance is configured to raise exceptions on failure, # sets the message to use for the raised HookFailed exception. def cancel_action(msg=nil) raise_hook_failure(msg) end # The columns that have been updated. This isn't completely accurate, # as it could contain columns whose values have not changed. # # a = Artist[1] # a.changed_columns # => [] # a.name = 'Bob' # a.changed_columns # => [:name] def changed_columns _changed_columns end # Deletes and returns +self+. Does not run destroy hooks. # Look into using +destroy+ instead. # # Artist[1].delete # DELETE FROM artists WHERE (id = 1) # # => #<Artist {:id=>1, ...}> def delete raise Sequel::Error, "can't delete frozen object" if frozen? _delete self end # Like delete but runs hooks before and after delete. # Uses a transaction if use_transactions is true or if the # :transaction option is given and true. # # Artist[1].destroy # BEGIN; DELETE FROM artists WHERE (id = 1); COMMIT; # # => #<Artist {:id=>1, ...}> def destroy(opts = OPTS) raise Sequel::Error, "can't destroy frozen object" if frozen? checked_save_failure(opts){checked_transaction(opts){_destroy(opts)}} end # Iterates through all of the current values using each. # # Album[1].each{|k, v| puts "#{k} => #{v}"} # # id => 1 # # name => 'Bob' def each(&block) @values.each(&block) end # Compares model instances by values. # # Artist[1] == Artist[1] # => true # Artist.new == Artist.new # => true # Artist[1].set(name: 'Bob') == Artist[1] # => false def eql?(obj) (obj.class == model) && (obj.values == @values) end # Returns the validation errors associated with this object. # See +Errors+. def errors @errors ||= errors_class.new end # Returns true when current instance exists, false otherwise. # Generally an object that isn't new will exist unless it has # been deleted. Uses a database query to check for existence, # unless the model object is new, in which case this is always # false. # # Artist[1].exists? # SELECT 1 FROM artists WHERE (id = 1) # # => true # Artist.new.exists? # # => false def exists? new? ? false : !this.get(SQL::AliasedExpression.new(1, :one)).nil? end # Ignore the model's setter method cache when this instances extends a module, as the # module may contain setter methods. def extend(mod) @singleton_setter_added = true super end # Freeze the object in such a way that it is still usable but not modifiable. # Once an object is frozen, you cannot modify it's values, changed_columns, # errors, or dataset. def freeze unless errors.frozen? validate errors.freeze end values.freeze _changed_columns.freeze this if !new? && model.primary_key super end # Value that should be unique for objects with the same class and pk (if pk is not nil), or # the same class and values (if pk is nil). # # Artist[1].hash == Artist[1].hash # true # Artist[1].set(name: 'Bob').hash == Artist[1].hash # true # Artist.new.hash == Artist.new.hash # true # Artist.new(name: 'Bob').hash == Artist.new.hash # false def hash case primary_key when Array [model, !pk.all? ? @values : pk].hash when Symbol [model, pk.nil? ? @values : pk].hash else [model, @values].hash end end # Returns value for the :id attribute, even if the primary key is # not id. To get the primary key value, use +pk+. # # Artist[1].id # => 1 def id @values[:id] end # Returns a string representation of the model instance including # the class name and values. def inspect "#<#{model.name} @values=#{inspect_values}>" end # Returns the keys in +values+. May not include all column names. # # Artist.new.keys # => [] # Artist.new(name: 'Bob').keys # => [:name] # Artist[1].keys # => [:id, :name] def keys @values.keys end # Refresh this record using +for_update+ (by default, or the specified style when given) # unless this is a new record. Returns self. This can be used to make sure no other # process is updating the record at the same time. # # If style is a string, it will be used directly. You should never pass a string # to this method that is derived from user input, as that can lead to # SQL injection. # # A symbol may be used for database independent locking behavior, but # all supported symbols have separate methods (e.g. for_update). # # # a = Artist[1] # Artist.db.transaction do # a.lock! # a.update(name: 'A') # end # # a = Artist[2] # Artist.db.transaction do # a.lock!('FOR NO KEY UPDATE') # a.update(name: 'B') # end def lock!(style=:update) _refresh(this.lock_style(style)) unless new? self end # Remove elements of the model object that make marshalling fail. Returns self. # # a = Artist[1] # a.marshallable! # Marshal.dump(a) def marshallable! @this = nil self end # Explicitly mark the object as modified, so +save_changes+/+update+ will # run callbacks even if no columns have changed. # # a = Artist[1] # a.save_changes # No callbacks run, as no changes # a.modified! # a.save_changes # Callbacks run, even though no changes made # # If a column is given, specifically marked that column as modified, # so that +save_changes+/+update+ will include that column in the # update. This should be used if you plan on mutating the column # value instead of assigning a new column value: # # a.modified!(:name) # a.name.gsub!(/[aeou]/, 'i') def modified!(column=nil) _add_changed_column(column) if column @modified = true end # Whether this object has been modified since last saved, used by # save_changes to determine whether changes should be saved. New # values are always considered modified. # # a = Artist[1] # a.modified? # => false # a.set(name: 'Jim') # a.modified? # => true # # If a column is given, specifically check if the given column has # been modified: # # a.modified?(:num_albums) # => false # a.num_albums = 10 # a.modified?(:num_albums) # => true def modified?(column=nil) if column changed_columns.include?(column) else @modified || !changed_columns.empty? end end # Returns true if the current instance represents a new record. # # Artist.new.new? # => true # Artist[1].new? # => false def new? defined?(@new) ? @new : (@new = false) end # Returns the primary key value identifying the model instance. # Raises an +Error+ if this model does not have a primary key. # If the model has a composite primary key, returns an array of values. # # Artist[1].pk # => 1 # Artist[[1, 2]].pk # => [1, 2] def pk raise(Error, "No primary key is associated with this model") unless key = primary_key if key.is_a?(Array) vals = @values key.map{|k| vals[k]} else @values[key] end end # Returns a hash mapping the receivers primary key column(s) to their values. # # Artist[1].pk_hash # => {:id=>1} # Artist[[1, 2]].pk_hash # => {:id1=>1, :id2=>2} def pk_hash model.primary_key_hash(pk) end # Returns a hash mapping the receivers qualified primary key column(s) to their values. # # Artist[1].qualified_pk_hash # # => {Sequel[:artists][:id]=>1} # Artist[[1, 2]].qualified_pk_hash # # => {Sequel[:artists][:id1]=>1, Sequel[:artists][:id2]=>2} def qualified_pk_hash(qualifier=model.table_name) model.qualified_primary_key_hash(pk, qualifier) end # Reloads attributes from database and returns self. Also clears all # changed_columns information. Raises an +Error+ if the record no longer # exists in the database. # # a = Artist[1] # a.name = 'Jim' # a.refresh # a.name # => 'Bob' def refresh raise Sequel::Error, "can't refresh frozen object" if frozen? _refresh(this) self end # Alias of refresh, but not aliased directly to make overriding in a plugin easier. def reload refresh end # Creates or updates the record, after making sure the record # is valid and before hooks execute successfully. Fails if: # # * the record is not valid, or # * before_save calls cancel_action, or # * the record is new and before_create calls cancel_action, or # * the record is not new and before_update calls cancel_action. # # If +save+ fails and either raise_on_save_failure or the # :raise_on_failure option is true, it raises ValidationFailed # or HookFailed. Otherwise it returns nil. # # If it succeeds, it returns self. # # Takes the following options: # # :changed :: save all changed columns, instead of all columns or the columns given # :columns :: array of specific columns that should be saved. # :raise_on_failure :: set to true or false to override the current # +raise_on_save_failure+ setting # :server :: set the server/shard on the object before saving, and use that # server/shard in any transaction. # :transaction :: set to true or false to override the current # +use_transactions+ setting # :validate :: set to false to skip validation def save(opts=OPTS) raise Sequel::Error, "can't save frozen object" if frozen? set_server(opts[:server]) if opts[:server] unless _save_valid?(opts) raise(validation_failed_error) if raise_on_failure?(opts) return end checked_save_failure(opts){checked_transaction(opts){_save(opts)}} end # Saves only changed columns if the object has been modified. # If the object has not been modified, returns nil. If unable to # save, returns false unless +raise_on_save_failure+ is true. # # a = Artist[1] # a.save_changes # => nil # a.name = 'Jim' # a.save_changes # UPDATE artists SET name = 'Bob' WHERE (id = 1) # # => #<Artist {:id=>1, :name=>'Jim', ...} def save_changes(opts=OPTS) save(Hash[opts].merge!(:changed=>true)) || false if modified? end # Updates the instance with the supplied values with support for virtual # attributes, raising an exception if a value is used that doesn't have # a setter method (or ignoring it if <tt>strict_param_setting = false</tt>). # Does not save the record. # # artist.set(name: 'Jim') # artist.name # => 'Jim' def set(hash) set_restricted(hash, :default) end # For each of the fields in the given array +fields+, call the setter # method with the value of that +hash+ entry for the field. Returns self. # # You can provide an options hash, with the following options currently respected: # :missing :: Can be set to :skip to skip missing entries or :raise to raise an # Error for missing entries. The default behavior is not to check for # missing entries, in which case the default value is used. To be # friendly with most web frameworks, the missing check will also check # for the string version of the argument in the hash if given a symbol. # # Examples: # # artist.set_fields({name: 'Jim'}, [:name]) # artist.name # => 'Jim' # # artist.set_fields({hometown: 'LA'}, [:name]) # artist.name # => nil # artist.hometown # => 'Sac' # # artist.name # => 'Jim' # artist.set_fields({}, [:name], missing: :skip) # artist.name # => 'Jim' # # artist.name # => 'Jim' # artist.set_fields({}, [:name], missing: :raise) # # Sequel::Error raised def set_fields(hash, fields, opts=nil) opts = if opts model.default_set_fields_options.merge(opts) else model.default_set_fields_options end case missing = opts[:missing] when :skip, :raise do_raise = true if missing == :raise fields.each do |f| if hash.has_key?(f) set_column_value("#{f}=", hash[f]) elsif f.is_a?(Symbol) && hash.has_key?(sf = f.to_s) set_column_value("#{sf}=", hash[sf]) elsif do_raise raise(Sequel::Error, "missing field in hash: #{f.inspect} not in #{hash.inspect}") end end else fields.each{|f| set_column_value("#{f}=", hash[f])} end self end # Set the shard that this object is tied to. Returns self. def set_server(s) @server = s @this = @this.server(s) if @this self end # Clear the setter_methods cache when a method is added def singleton_method_added(meth) @singleton_setter_added = true if meth.to_s.end_with?('=') super end # Skip all validation of the object on the next call to #save, # including the running of validation hooks. This is designed for # and should only be used in cases where #valid? is called before # saving and the <tt>validate: false</tt> option cannot be passed to # #save. def skip_validation_on_next_save! @skip_validation_on_next_save = true end # Returns (naked) dataset that should return only this instance. # # Artist[1].this # # SELECT * FROM artists WHERE (id = 1) LIMIT 1 def this return @this if @this raise Error, "No dataset for model #{model}" unless ds = model.instance_dataset @this = use_server(ds.where(pk_hash)) end # Runs #set with the passed hash and then runs save_changes. # # artist.update(name: 'Jim') # UPDATE artists SET name = 'Jim' WHERE (id = 1) def update(hash) update_restricted(hash, :default) end # Update the instance's values by calling set_fields with the arguments, then # calls save_changes. # # artist.update_fields({name: 'Jim'}, [:name]) # # UPDATE artists SET name = 'Jim' WHERE (id = 1) # # artist.update_fields({hometown: 'LA'}, [:name]) # # UPDATE artists SET name = NULL WHERE (id = 1) def update_fields(hash, fields, opts=nil) set_fields(hash, fields, opts) save_changes end # Validates the object. If the object is invalid, errors should be added # to the errors attribute. By default, does nothing, as all models # are valid by default. See the {"Model Validations" guide}[rdoc-ref:doc/validations.rdoc]. # for details about validation. Should not be called directly by # user code, call <tt>valid?</tt> instead to check if an object # is valid. def validate end # Validates the object and returns true if no errors are reported. # # artist.set(name: 'Valid').valid? # => true # artist.set(name: 'Invalid').valid? # => false # artist.errors.full_messages # => ['name cannot be Invalid'] def valid?(opts = OPTS) _valid?(opts) rescue HookFailed false end private # Add a column as a changed column. def _add_changed_column(column) cc = _changed_columns cc << column unless cc.include?(column) end # Internal changed_columns method that just returns stored array. def _changed_columns @changed_columns ||= [] end # Clear the changed columns. Reason is the reason for clearing # the columns, and should be one of: :initialize, :refresh, :create # or :update. def _clear_changed_columns(_reason) _changed_columns.clear end # Do the deletion of the object's dataset, and check that the row # was actually deleted. def _delete n = _delete_without_checking raise(NoExistingObject, "Attempt to delete object did not result in a single row modification (Rows Deleted: #{n}, SQL: #{_delete_dataset.delete_sql})") if require_modification && n != 1 n end # The dataset to use when deleting the object. The same as the object's # dataset by default. def _delete_dataset this end # Actually do the deletion of the object's dataset. Return the # number of rows modified. def _delete_without_checking if sql = (m = model).fast_instance_delete_sql sql = sql.dup ds = use_server(m.dataset) ds.literal_append(sql, pk) ds.with_sql_delete(sql) else _delete_dataset.delete end end # Internal destroy method, separted from destroy to # allow running inside a transaction def _destroy(opts) called = false around_destroy do called = true before_destroy _destroy_delete after_destroy end raise_hook_failure(:around_destroy) unless called self end # Internal delete method to call when destroying an object, # separated from delete to allow you to override destroy's version # without affecting delete. def _destroy_delete delete end # Insert the record into the database, returning the primary key if # the record should be refreshed from the database. def _insert ds = _insert_dataset if _use_insert_select?(ds) && !(h = _insert_select_raw(ds)).nil? _save_set_values(h) if h nil else iid = _insert_raw(ds) # if we have a regular primary key and it's not set in @values, # we assume it's the last inserted id if (pk = autoincrementing_primary_key) && pk.is_a?(Symbol) && !(vals = @values)[pk] vals[pk] = iid end pk end end # The dataset to use when inserting a new object. The same as the model's # dataset by default. def _insert_dataset use_server(model.instance_dataset) end # Insert into the given dataset and return the primary key created (if any). def _insert_raw(ds) ds.insert(_insert_values) end # Insert into the given dataset and return the hash of column values. def _insert_select_raw(ds) ds.insert_select(_insert_values) end # The values hash to use when inserting a new record. alias _insert_values values private :_insert_values # Refresh using a particular dataset, used inside save to make sure the same server # is used for reading newly inserted values from the database def _refresh(dataset) _refresh_set_values(_refresh_get(dataset) || raise(NoExistingObject, "Record not found")) _clear_changed_columns(:refresh) end # Get the row of column data from the database. def _refresh_get(dataset) if (sql = model.fast_pk_lookup_sql) && !dataset.opts[:lock] sql = sql.dup ds = use_server(dataset) ds.literal_append(sql, pk) ds.with_sql_first(sql) else dataset.first end end # Set the values to the given hash after refreshing. def _refresh_set_values(h) @values = h end # Internal version of save, split from save to allow running inside # it's own transaction. def _save(opts) pk = nil called_save = false called_cu = false around_save do called_save = true before_save if new? around_create do called_cu = true before_create pk = _insert @this = nil @new = false @modified = false pk ? _save_refresh : _clear_changed_columns(:create) after_create true end raise_hook_failure(:around_create) unless called_cu else around_update do called_cu = true before_update columns = opts[:columns] if columns.nil? columns_updated = if opts[:changed] _save_update_changed_colums_hash else _save_update_all_columns_hash end _clear_changed_columns(:update) else # update only the specified columns columns = Array(columns) columns_updated = @values.reject{|k, v| !columns.include?(k)} _changed_columns.reject!{|c| columns.include?(c)} end _update_columns(columns_updated) @this = nil @modified = false after_update true end raise_hook_failure(:around_update) unless called_cu end after_save true end raise_hook_failure(:around_save) unless called_save self end # Refresh the object after saving it, used to get # default values of all columns. Separated from _save so it # can be overridden to avoid the refresh. def _save_refresh _save_set_values(_refresh_get(this.server?(:default)) || raise(NoExistingObject, "Record not found")) _clear_changed_columns(:create) end # Set values to the provided hash. Called after a create, # to set the full values from the database in the model instance. def _save_set_values(h) @values = h end # Return a hash of values used when saving all columns of an # existing object (i.e. not passing specific columns to save # or using update/save_changes). Defaults to all of the # object's values except unmodified primary key columns, as some # databases don't like you setting primary key values even # to their existing values. def _save_update_all_columns_hash v = Hash[@values] cc = changed_columns Array(primary_key).each{|x| v.delete(x) unless cc.include?(x)} v end # Return a hash of values used when saving changed columns of an # existing object. Defaults to all of the objects current values # that are recorded as modified. def _save_update_changed_colums_hash cc = changed_columns @values.reject{|k,v| !cc.include?(k)} end # Validate the object if validating on save. Skips validation # completely (including validation hooks) if # skip_validation_on_save! has been called on the object, # resetting the flag so that future saves will validate. def _save_valid?(opts) if @skip_validation_on_next_save @skip_validation_on_next_save = false return true end checked_save_failure(opts){_valid?(opts)} end # Call _update with the given columns, if any are present. # Plugins can override this method in order to update with # additional columns, even when the column hash is initially empty. def _update_columns(columns) _update(columns) unless columns.empty? end # Update this instance's dataset with the supplied column hash, # checking that only a single row was modified. def _update(columns) n = _update_without_checking(columns) raise(NoExistingObject, "Attempt to update object did not result in a single row modification (SQL: #{_update_dataset.update_sql(columns)})") if require_modification && n != 1 n end # The dataset to use when updating an object. The same as the object's # dataset by default. def _update_dataset this end # Update this instances dataset with the supplied column hash. def _update_without_checking(columns) _update_dataset.update(columns) end # Whether to use insert_select when inserting a new row. def _use_insert_select?(ds) (!ds.opts[:select] || ds.opts[:returning]) && ds.supports_insert_select? end # Internal validation method, running validation hooks. def _valid?(opts) return errors.empty? if frozen? errors.clear called = false skip_validate = opts[:validate] == false around_validation do called = true before_validation validate unless skip_validate after_validation end return true if skip_validate if called errors.empty? else raise_hook_failure(:around_validation) end end # If not raising on failure, check for HookFailed # being raised by yielding and swallow it. def checked_save_failure(opts) if raise_on_failure?(opts) yield else begin yield rescue HookFailed nil end end end # If transactions should be used, wrap the yield in a transaction block. def checked_transaction(opts=OPTS) use_transaction?(opts) ? db.transaction({:server=>this_server}.merge!(opts)){yield} : yield end # Change the value of the column to given value, recording the change. def change_column_value(column, value) _add_changed_column(column) @values[column] = value end # Default error class used for errors. def errors_class Errors end # A HookFailed exception for the given message tied to the current instance. def hook_failed_error(msg) HookFailed.new(msg, self) end # Clone constructor -- freeze internal data structures if the original's # are frozen. def initialize_clone(other) super freeze if other.frozen? self end # Copy constructor -- Duplicate internal data structures. def initialize_copy(other) super @values = Hash[@values] @changed_columns = @changed_columns.dup if @changed_columns @errors = @errors.dup if @errors self end # Set the columns with the given hash. By default, the same as +set+, but # exists so it can be overridden. This is called only for new records, before # changed_columns is cleared. def initialize_set(h) set(h) unless h.empty? end # Default inspection output for the values hash, overwrite to change what #inspect displays. def inspect_values @values.inspect end # Whether to raise or return false if this action fails. If the # :raise_on_failure option is present in the hash, use that, otherwise, # fallback to the object's raise_on_save_failure (if set), or # class's default (if not). def raise_on_failure?(opts) opts.fetch(:raise_on_failure, raise_on_save_failure) end # Raise an error appropriate to the hook type. May be swallowed by # checked_save_failure depending on the raise_on_failure? setting. def raise_hook_failure(type=nil) msg = case type when String type when Symbol "the #{type} hook failed" else "a hook failed" end raise hook_failed_error(msg) end # Get the ruby class or classes related to the given column's type. def schema_type_class(column) if (sch = db_schema[column]) && (type = sch[:type]) db.schema_type_class(type) end end # Call setter methods based on keys in hash, with the appropriate values. # Restrict which methods can be called based on the provided type. def set_restricted(hash, type) return self if hash.empty? meths = setter_methods(type) strict = strict_param_setting hash.each do |k,v| m = "#{k}=" if meths.include?(m) set_column_value(m, v) elsif strict # Avoid using respond_to? or creating symbols from user input if public_methods.map(&:to_s).include?(m) if Array(model.primary_key).map(&:to_s).member?(k.to_s) && model.restrict_primary_key? raise MassAssignmentRestriction, "#{k} is a restricted primary key" else raise MassAssignmentRestriction, "#{k} is a restricted column" end else raise MassAssignmentRestriction, "method #{m} doesn't exist" end end end self end # Returns all methods that can be used for attribute assignment (those that end with =), # depending on the type: # # :default :: Use the default methods allowed in the model class. # :all :: Allow setting all setters, except those specifically restricted (such as ==). # Array :: Only allow setting of columns in the given array. def setter_methods(type) if type == :default && !@singleton_setter_added return model.setter_methods end meths = methods.map(&:to_s).select{|l| l.end_with?('=')} - RESTRICTED_SETTER_METHODS meths -= Array(primary_key).map{|x| "#{x}="} if primary_key && model.restrict_primary_key? meths end # The server/shard that the model object's dataset uses, or :default if the # model object's dataset does not have an associated shard. def this_server if (s = @server) s elsif (t = @this) t.opts[:server] || :default else model.dataset.opts[:server] || :default end end # Typecast the value to the column's type if typecasting. Calls the database's # typecast_value method, so database adapters can override/augment the handling # for database specific column types. def typecast_value(column, value) return value unless typecast_on_assignment && db_schema && (col_schema = db_schema[column]) value = nil if '' == value and typecast_empty_string_to_nil and col_schema[:type] and ![:string, :blob].include?(col_schema[:type]) raise(InvalidValue, "nil/NULL is not allowed for the #{column} column") if raise_on_typecast_failure && value.nil? && (col_schema[:allow_null] == false) begin model.db.typecast_value(col_schema[:type], value) rescue InvalidValue raise_on_typecast_failure ? raise : value end end # Set the columns, filtered by the only and except arrays. def update_restricted(hash, type) set_restricted(hash, type) save_changes end # Set the given dataset to use the current object's shard. def use_server(ds) @server ? ds.server(@server) : ds end # Whether to use a transaction for this action. If the :transaction # option is present in the hash, use that, otherwise, fallback to the # object's default (if set), or class's default (if not). def use_transaction?(opts = OPTS) opts.fetch(:transaction, use_transactions) end # An ValidationFailed exception instance to raise for this instance. def validation_failed_error ValidationFailed.new(self) end end # DatasetMethods contains methods that all model datasets have. module DatasetMethods # The model class associated with this dataset # # Artist.dataset.model # => Artist def model @opts[:model] end # Assume if a single integer is given that it is a lookup by primary # key, and call with_pk with the argument. # # Artist.dataset[1] # SELECT * FROM artists WHERE (id = 1) LIMIT 1 def [](*args) if args.length == 1 && (i = args[0]) && i.is_a?(Integer) with_pk(i) else super end end # Destroy each row in the dataset by instantiating it and then calling # destroy on the resulting model object. This isn't as fast as deleting # the dataset, which does a single SQL call, but this runs any destroy # hooks on each object in the dataset. # # Artist.dataset.destroy # # DELETE FROM artists WHERE (id = 1) # # DELETE FROM artists WHERE (id = 2) # # ... def destroy pr = proc{all(&:destroy).length} model.use_transactions ? @db.transaction(:server=>opts[:server], &pr) : pr.call end # If there is no order already defined on this dataset, order it by # the primary key and call last. # # Album.last # # SELECT * FROM albums ORDER BY id DESC LIMIT 1 def last(*a, &block) if ds = _primary_key_order ds.last(*a, &block) else super end end # If there is no order already defined on this dataset, order it by # the primary key and call paged_each. # # Album.paged_each{|row| } # # SELECT * FROM albums ORDER BY id LIMIT 1000 OFFSET 0 # # SELECT * FROM albums ORDER BY id LIMIT 1000 OFFSET 1000 # # SELECT * FROM albums ORDER BY id LIMIT 1000 OFFSET 2000 # # ... def paged_each(*a, &block) if ds = _primary_key_order ds.paged_each(*a, &block) else super end end # This allows you to call +as_hash+ without any arguments, which will # result in a hash with the primary key value being the key and the # model object being the value. # # Artist.dataset.as_hash # SELECT * FROM artists # # => {1=>#<Artist {:id=>1, ...}>, # # 2=>#<Artist {:id=>2, ...}>, # # ...} def as_hash(key_column=nil, value_column=nil, opts=OPTS) if key_column super else raise(Sequel::Error, "No primary key for model") unless model && (pk = model.primary_key) super(pk, value_column, opts) end end # Alias of as_hash for backwards compatibility. def to_hash(*a) as_hash(*a) end # Given a primary key value, return the first record in the dataset with that primary key # value. If no records matches, returns nil. # # # Single primary key # Artist.dataset.with_pk(1) # # SELECT * FROM artists WHERE (artists.id = 1) LIMIT 1 # # # Composite primary key # Artist.dataset.with_pk([1, 2]) # # SELECT * FROM artists WHERE ((artists.id1 = 1) AND (artists.id2 = 2)) LIMIT 1 def with_pk(pk) if pk && (loader = _with_pk_loader) loader.first(*pk) else first(model.qualified_primary_key_hash(pk)) end end # Same as with_pk, but raises NoMatchingRow instead of returning nil if no # row matches. def with_pk!(pk) with_pk(pk) || raise(NoMatchingRow.new(self)) end private # If the dataset is not already ordered, and the model has a primary key, # return a clone ordered by the primary key. def _primary_key_order if @opts[:order].nil? && model && (pk = model.primary_key) cached_dataset(:_pk_order_ds){order(*pk)} end end # A cached placeholder literalizer, if one exists for the current dataset. def _with_pk_loader cached_placeholder_literalizer(:_with_pk_loader) do |pl| table = model.table_name cond = case primary_key = model.primary_key when Array primary_key.map{|key| [SQL::QualifiedIdentifier.new(table, key), pl.arg]} when Symbol {SQL::QualifiedIdentifier.new(table, primary_key)=>pl.arg} else raise(Error, "#{model} does not have a primary key") end where(cond).limit(1) end end def non_sql_option?(key) super || key == :model end end extend ClassMethods plugin self singleton_class.send(:undef_method, :dup, :clone, :initialize_copy) # :nocov: if RUBY_VERSION >= '1.9.3' # :nocov: singleton_class.send(:undef_method, :initialize_clone, :initialize_dup) end end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/model/dataset_module.rb����������������������������������������������������0000664�0000000�0000000�00000001612�14342141206�0021563�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel class Model # This Module subclass is used by Model.dataset_module # to add dataset methods to classes. In addition to the # methods offered by Dataset::DatasetModule, it also # automatically creates class methods for public dataset # methods. class DatasetModule < Dataset::DatasetModule # Store the model related to this dataset module. def initialize(model) @model = model end # Alias for where. def subset(name, *args, &block) where(name, *args, &block) end private # Add a class method to the related model that # calls the dataset method of the same name. def method_added(meth) @model.send(:def_model_dataset_method, meth) if public_method_defined?(meth) super end end @dataset_module_class = DatasetModule end end ����������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/model/default_inflections.rb�����������������������������������������������0000664�0000000�0000000�00000003077�14342141206�0022621�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel # Proc that is instance_execed to create the default inflections for both the # model inflector and the inflector extension. DEFAULT_INFLECTIONS_PROC = proc do plural(/$/, 's') plural(/s$/i, 's') plural(/(alias|(?:stat|octop|vir|b)us)$/i, '\1es') plural(/(buffal|tomat)o$/i, '\1oes') plural(/([ti])um$/i, '\1a') plural(/sis$/i, 'ses') plural(/(?:([^f])fe|([lr])f)$/i, '\1\2ves') plural(/(hive)$/i, '\1s') plural(/([^aeiouy]|qu)y$/i, '\1ies') plural(/(x|ch|ss|sh)$/i, '\1es') plural(/(matr|vert|ind)ix|ex$/i, '\1ices') plural(/([m|l])ouse$/i, '\1ice') singular(/s$/i, '') singular(/([ti])a$/i, '\1um') singular(/(analy|ba|cri|diagno|parenthe|progno|synop|the)ses$/i, '\1sis') singular(/([^f])ves$/i, '\1fe') singular(/([h|t]ive)s$/i, '\1') singular(/([lr])ves$/i, '\1f') singular(/([^aeiouy]|qu)ies$/i, '\1y') singular(/(m)ovies$/i, '\1ovie') singular(/(x|ch|ss|sh)es$/i, '\1') singular(/([m|l])ice$/i, '\1ouse') singular(/buses$/i, 'bus') singular(/oes$/i, 'o') singular(/shoes$/i, 'shoe') singular(/(alias|(?:stat|octop|vir|b)us)es$/i, '\1') singular(/(vert|ind)ices$/i, '\1ex') singular(/matrices$/i, 'matrix') irregular('person', 'people') irregular('man', 'men') irregular('child', 'children') irregular('sex', 'sexes') irregular('move', 'moves') irregular('quiz', 'quizzes') irregular('testis', 'testes') uncountable(%w(equipment information rice money species series fish sheep news)) end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/model/errors.rb������������������������������������������������������������0000664�0000000�0000000�00000003674�14342141206�0020117�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel class Model # Errors represents validation errors, a simple hash subclass # with a few convenience methods. class Errors < ::Hash # Adds an error for the given attribute. # # errors.add(:name, 'is not valid') if name == 'invalid' def add(att, msg) fetch(att){self[att] = []} << msg end # Return the total number of error messages. # # errors.count # => 3 def count values.inject(0){|m, v| m + v.length} end # Return true if there are no error messages, false otherwise. def empty? count == 0 end # Returns an array of fully-formatted error messages. # # errors.full_messages # # => ['name is not valid', # # 'hometown is not at least 2 letters'] # # If the message is a Sequel::LiteralString, it will be used literally, without the column name: # # errors.add(:name, Sequel.lit("Album name is not valid")) # errors.full_messages # # => ['Album name is not valid'] def full_messages inject([]) do |m, kv| att, errors = *kv errors.each {|e| m << (e.is_a?(LiteralString) ? e : full_message(att, e))} m end end # Returns the array of errors for the given attribute, or nil # if there are no errors for the attribute. # # errors.on(:name) # => ['name is not valid'] # errors.on(:id) # => nil def on(att) if v = fetch(att, nil) and !v.empty? v end end private # Create full error message to use for the given attribute (or array of attributes) # and error message. This can be overridden for easier internalization. def full_message(att, error_msg) att = att.join(' and ') if att.is_a?(Array) "#{att} #{error_msg}" end end end end ��������������������������������������������������������������������sequel-5.63.0/lib/sequel/model/exceptions.rb��������������������������������������������������������0000664�0000000�0000000�00000002752�14342141206�0020760�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel # Exception class raised when +raise_on_save_failure+ is set and an action is canceled in a hook. # or an around hook doesn't yield. class HookFailed < Error # The Sequel::Model instance related to this error. attr_reader :model def initialize(message=nil, model=nil) @model = model super(message) end end ( # Exception class raised when +require_modification+ is set and an UPDATE or DELETE statement to modify the dataset doesn't # modify a single row. NoExistingObject = Class.new(Error) ).name ( # Raised when an undefined association is used when eager loading. UndefinedAssociation = Class.new(Error) ).name ( # Raised when a mass assignment method is called in strict mode with either a restricted column # or a column without a setter method. MassAssignmentRestriction = Class.new(Error) ).name # Exception class raised when +raise_on_save_failure+ is set and validation fails class ValidationFailed < Error # The Sequel::Model object related to this exception. attr_reader :model # The Sequel::Model::Errors object related to this exception. attr_reader :errors def initialize(errors=nil) if errors.is_a?(Sequel::Model) @model = errors errors = @model.errors end if errors.respond_to?(:full_messages) @errors = errors super(errors.full_messages.join(', ')) else super end end end end ����������������������sequel-5.63.0/lib/sequel/model/inflections.rb�������������������������������������������������������0000664�0000000�0000000�00000013143�14342141206�0021110�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel # Yield the Inflections module if a block is given, and return # the Inflections module. def self.inflections yield Inflections if defined?(yield) Inflections end # This module acts as a singleton returned/yielded by Sequel.inflections, # which is used to override or specify additional inflection rules # for Sequel. Examples: # # Sequel.inflections do |inflect| # inflect.plural /^(ox)$/i, '\1\2en' # inflect.singular /^(ox)en/i, '\1' # # inflect.irregular 'octopus', 'octopi' # # inflect.uncountable "equipment" # end # # New rules are added at the top. So in the example above, the irregular rule for octopus will now be the first of the # pluralization and singularization rules that is runs. This guarantees that your rules run before any of the rules that may # already have been loaded. module Inflections @plurals, @singulars, @uncountables = [], [], [] class << self # Array of two element arrays, first containing a regex, and the second containing a substitution pattern, used for plurization. attr_reader :plurals # Array of two element arrays, first containing a regex, and the second containing a substitution pattern, used for singularization. attr_reader :singulars # Array of strings for words were the singular form is the same as the plural form attr_reader :uncountables end # Clears the loaded inflections within a given scope (default is :all). Give the scope as a symbol of the inflection type, # the options are: :plurals, :singulars, :uncountables # # Examples: # clear :all # clear :plurals def self.clear(scope = :all) case scope when :all @plurals, @singulars, @uncountables = [], [], [] else instance_variable_set("@#{scope}", []) end end # Specifies a new irregular that applies to both pluralization and singularization at the same time. This can only be used # for strings, not regular expressions. You simply pass the irregular in singular and plural form. # # Examples: # irregular 'octopus', 'octopi' # irregular 'person', 'people' def self.irregular(singular, plural) plural(Regexp.new("(#{singular[0,1]})#{singular[1..-1]}$", "i"), '\1' + plural[1..-1]) singular(Regexp.new("(#{plural[0,1]})#{plural[1..-1]}$", "i"), '\1' + singular[1..-1]) end # Specifies a new pluralization rule and its replacement. The rule can either be a string or a regular expression. # The replacement should always be a string that may include references to the matched data from the rule. # # Example: # plural(/(x|ch|ss|sh)$/i, '\1es') def self.plural(rule, replacement) @plurals.insert(0, [rule, replacement]) end # Specifies a new singularization rule and its replacement. The rule can either be a string or a regular expression. # The replacement should always be a string that may include references to the matched data from the rule. # # Example: # singular(/([^aeiouy]|qu)ies$/i, '\1y') def self.singular(rule, replacement) @singulars.insert(0, [rule, replacement]) end # Add uncountable words that shouldn't be attempted inflected. # # Examples: # uncountable "money" # uncountable "money", "information" # uncountable %w( money information rice ) def self.uncountable(*words) (@uncountables << words).flatten! end instance_exec(&DEFAULT_INFLECTIONS_PROC) private # Convert the given string to CamelCase. Will also convert '/' to '::' which is useful for converting paths to namespaces. def camelize(s) s = s.to_s return s.camelize if s.respond_to?(:camelize) s = s.gsub(/\/(.?)/){|x| "::#{x[-1..-1].upcase unless x == '/'}"}.gsub(/(^|_)(.)/){|x| x[-1..-1].upcase} s end # Tries to find a declared constant with the name specified # in the string. It raises a NameError when the name is not in CamelCase # or is not initialized. def constantize(s) s = s.to_s return s.constantize if s.respond_to?(:constantize) raise(NameError, "#{s.inspect} is not a valid constant name!") unless m = /\A(?:::)?([A-Z]\w*(?:::[A-Z]\w*)*)\z/.match(s) Object.module_eval("::#{m[1]}", __FILE__, __LINE__) end # Removes the module part from the expression in the string def demodulize(s) s = s.to_s return s.demodulize if s.respond_to?(:demodulize) s.gsub(/^.*::/, '') end # Returns the plural form of the word in the string. def pluralize(s) s = s.to_s return s.pluralize if s.respond_to?(:pluralize) result = s.dup Inflections.plurals.each{|(rule, replacement)| break if result.gsub!(rule, replacement)} unless Inflections.uncountables.include?(s.downcase) result end # The reverse of pluralize, returns the singular form of a word in a string. def singularize(s) s = s.to_s return s.singularize if s.respond_to?(:singularize) result = s.dup Inflections.singulars.each{|(rule, replacement)| break if result.gsub!(rule, replacement)} unless Inflections.uncountables.include?(s.downcase) result end # The reverse of camelize. Makes an underscored form from the expression in the string. # Also changes '::' to '/' to convert namespaces to paths. def underscore(s) s = s.to_s return s.underscore if s.respond_to?(:underscore) s.gsub('::', '/').gsub(/([A-Z]+)([A-Z][a-z])/, '\1_\2'). gsub(/([a-z\d])([A-Z])/, '\1_\2').tr('-', '_').downcase end end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/model/plugins.rb�����������������������������������������������������������0000664�0000000�0000000�00000015602�14342141206�0020256�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel # Empty namespace that plugins should use to store themselves, # so they can be loaded via Model.plugin. # # Plugins should be modules with one of the following conditions: # * A singleton method named apply, which takes a model, # additional arguments, and an optional block. This is called # the first time the plugin is loaded for this model (unless it was # already loaded by an ancestor class), before including/extending # any modules, with the arguments # and block provided to the call to Model.plugin. # * A module inside the plugin module named ClassMethods, # which will extend the model class. # * A module inside the plugin module named InstanceMethods, # which will be included in the model class. # * A module inside the plugin module named DatasetMethods, # which will extend the model's dataset. # * A singleton method named configure, which takes a model, # additional arguments, and an optional block. This is called # every time the Model.plugin method is called, after including/extending # any modules. module Plugins # In the given module +mod+, define methods that are call the same method # on the dataset. This is designed for plugins to define dataset methods # inside ClassMethods that call the implementations in DatasetMethods. # # This should not be called with untrusted input or method names that # can't be used literally, since it uses class_eval. def self.def_dataset_methods(mod, meths) Array(meths).each do |meth| mod.class_eval("def #{meth}(*args, &block); dataset.#{meth}(*args, &block) end", __FILE__, __LINE__) # :nocov: mod.send(:ruby2_keywords, meth) if respond_to?(:ruby2_keywords, true) # :nocov: end end # Add method to +mod+ that overrides inherited_instance_variables to include the # values in this hash. def self.inherited_instance_variables(mod, hash) mod.send(:define_method, :inherited_instance_variables) do || super().merge!(hash) end mod.send(:private, :inherited_instance_variables) end # Add method to +mod+ that overrides set_dataset to call the method afterward. def self.after_set_dataset(mod, meth) mod.send(:define_method, :set_dataset) do |*a| r = super(*a) # Allow calling private class methods as methods this specifies are usually private send(meth) r end end method_num = 0 method_num_mutex = Mutex.new # Return a unique method name symbol for the given suffix. SEQUEL_METHOD_NAME = lambda do |suffix| :"_sequel_#{suffix}_#{method_num_mutex.synchronize{method_num += 1}}" end # Define a private instance method using the block with the provided name and # expected arity. If the name is given as a Symbol, it is used directly. # If the name is given as a String, a unique name will be generated using # that string. The expected_arity should be either 0 (no arguments) or # 1 (single argument). # # If a block with an arity that does not match the expected arity is used, # a deprecation warning will be issued. The method defined should still # work, though it will be slower than a method with the expected arity. # # Sequel only checks arity for regular blocks, not lambdas. Lambdas were # already strict in regards to arity, so there is no need to try to fix # arity to keep backwards compatibility for lambdas. # # Blocks with required keyword arguments are not supported by this method. def self.def_sequel_method(model, meth, expected_arity, &block) if meth.is_a?(String) meth = SEQUEL_METHOD_NAME.call(meth) end call_meth = meth unless block.lambda? required_args, optional_args, rest, keyword = _define_sequel_method_arg_numbers(block) if keyword == :required raise Error, "cannot use block with required keyword arguments when calling define_sequel_method with expected arity #{expected_arity}" end case expected_arity when 0 unless required_args == 0 # SEQUEL6: remove Sequel::Deprecation.deprecate("Arity mismatch in block passed to define_sequel_method. Expected Arity 0, but arguments required for #{block.inspect}. Support for this will be removed in Sequel 6.") b = block block = lambda{instance_exec(&b)} # Fallback end when 1 if required_args == 0 && optional_args == 0 && !rest # SEQUEL6: remove Sequel::Deprecation.deprecate("Arity mismatch in block passed to define_sequel_method. Expected Arity 1, but no arguments accepted for #{block.inspect}. Support for this will be removed in Sequel 6.") temp_method = SEQUEL_METHOD_NAME.call("temp") model.class_eval("def #{temp_method}(_) #{meth =~ /\A\w+\z/ ? "#{meth}_arity" : "send(:\"#{meth}_arity\")"} end", __FILE__, __LINE__) model.send(:alias_method, meth, temp_method) model.send(:undef_method, temp_method) model.send(:private, meth) meth = :"#{meth}_arity" elsif required_args > 1 # SEQUEL6: remove Sequel::Deprecation.deprecate("Arity mismatch in block passed to define_sequel_method. Expected Arity 1, but more arguments required for #{block.inspect}. Support for this will be removed in Sequel 6.") b = block block = lambda{|r| instance_exec(r, &b)} # Fallback end else raise Error, "unexpected arity passed to define_sequel_method: #{expected_arity.inspect}" end end model.send(:define_method, meth, &block) model.send(:private, meth) model.send(:alias_method, meth, meth) call_meth end # Return the number of required argument, optional arguments, # whether the callable accepts any additional arguments, # and whether the callable accepts keyword arguments (true, false # or :required). def self._define_sequel_method_arg_numbers(callable) optional_args = 0 rest = false keyword = false callable.parameters.map(&:first).each do |arg_type, _| case arg_type when :opt optional_args += 1 when :rest rest = true when :keyreq keyword = :required when :key, :keyrest keyword ||= true else raise Error, "invalid arg_type passed to _define_sequel_method_arg_numbers: #{arg_type}" end end arity = callable.arity if arity < 0 arity = arity.abs - 1 end required_args = arity arity -= 1 if keyword == :required # callable currently is always a non-lambda Proc optional_args -= arity [required_args, optional_args, rest, keyword] end private_class_method :_define_sequel_method_arg_numbers end end ������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/�������������������������������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0016625�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/accessed_columns.rb������������������������������������������������0000664�0000000�0000000�00000003474�14342141206�0022474�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The accessed_columns plugin records which columns have been # accessed for a model instance. This is useful if you are # looking to remove other columns from being SELECTed by the # dataset that retrieved the instance, which can significantly # improve performance: # # a = Album[1] # a.accessed_columns # [] # a.name # a.accessed_columns # [:name] # a.artist_id # a.accessed_columns # [:name, :artist_id] # # Note that this plugin should probably not be used in production, # as it causes a performance hit. # # Usage: # # # Make all model subclass instances record accessed columns (called before loading subclasses) # Sequel::Model.plugin :accessed_columns # # # Make the Album instances record accessed columns # Album.plugin :accessed_columns module AccessedColumns module InstanceMethods # Record the column access before retrieving the value. def [](c) (@accessed_columns ||= {})[c] = true unless frozen? super end # Clear the accessed columns when saving. def after_save super @accessed_columns = nil end # The columns that have been accessed. def accessed_columns @accessed_columns ? @accessed_columns.keys : [] end # Copy the accessed columns when duping and cloning. def initialize_copy(other) other.instance_variable_set(:@accessed_columns, Hash[@accessed_columns]) if @accessed_columns super end private # Clear the accessed columns when refreshing. def _refresh(_) @accessed_columns = nil super end end end end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/active_model.rb����������������������������������������������������0000664�0000000�0000000�00000006570�14342141206�0021615�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true require 'active_model' module Sequel module Plugins # The active_model plugin makes Sequel::Model objects # pass the ActiveModel::Lint tests, which should # hopefully mean full ActiveModel compliance. This should # allow the full support of Sequel::Model objects in Rails 3+. # This plugin requires active_model in order to use # ActiveModel::Naming. # # Usage: # # # Make all subclasses active_model compliant (called before loading subclasses) # Sequel::Model.plugin :active_model # # # Make the Album class active_model compliant # Album.plugin :active_model module ActiveModel # ActiveModel compliant error class class Errors < Sequel::Model::Errors # Add autovivification so that #[] always returns an array. def [](k) fetch(k){self[k] = []} end end module ClassMethods include ::ActiveModel::Naming # Cache model_name and to_partial path value before freezing. def freeze model_name _to_partial_path super end # Class level cache for to_partial_path. def _to_partial_path @_to_partial_path ||= "#{underscore(pluralize(to_s))}/#{underscore(demodulize(to_s))}".freeze end end module InstanceMethods # Record that an object was destroyed, for later use by # destroyed? def after_destroy super @destroyed = true end # Return ::ActiveModel::Name instance for the class. def model_name model.model_name end # False if the object is new? or has been destroyed, true otherwise. def persisted? return false if new? return false if defined?(@destroyed) if defined?(@rollback_checker) if @rollback_checker.call return false end end true end # An array of primary key values, or nil if the object is not persisted. def to_key if primary_key.is_a?(Symbol) [pk] if pk else pk if pk.all? end end # With the active_model plugin, Sequel model objects are already # compliant, so this returns self. def to_model self end # An string representing the object's primary key. For composite # primary keys, joins them with to_param_joiner. def to_param if persisted? and k = to_key k.join(to_param_joiner) end end # Returns a string identifying the path associated with the object. def to_partial_path model._to_partial_path end private # For new objects, add a rollback checker to check if the transaction # in which this instance is created is rolled back. def _save(opts) if new? && db.in_transaction?(opts) @rollback_checker = db.rollback_checker(opts) end super end # Use ActiveModel compliant errors class. def errors_class Errors end # The string to use to join composite primary key param strings. def to_param_joiner '-' end end end end end ����������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/after_initialize.rb������������������������������������������������0000664�0000000�0000000�00000002040�14342141206�0022470�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # Adds an after_initialize hook to models, called after initializing # both new objects and ones loaded from the database. # # Usage: # # # Make all model subclasses support the after_initialize hook # Sequel::Model.plugin :after_initialize # # # Make the Album class support the after_initialize hook # Album.plugin :after_initialize module AfterInitialize module ClassMethods # Call after_initialize for model objects loaded from the database. def call(_) v = super v.after_initialize v end end module InstanceMethods # Call after_initialize for new model objects. def initialize(h={}) super after_initialize end # An empty after_initialize hook, so that plugins that use this # can always call super to get the default behavior. def after_initialize end end end end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/association_dependencies.rb����������������������������������������0000664�0000000�0000000�00000012201�14342141206�0024170�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The association_dependencies plugin allows you do easily set up before and/or after destroy hooks # for destroying, deleting, or nullifying associated model objects. The following # association types support the following dependency actions: # # :many_to_many :: :nullify (removes all related entries in join table) # :many_to_one :: :delete, :destroy # :one_to_many, one_to_one :: :delete, :destroy, :nullify (sets foreign key to NULL for all associated objects) # # This plugin works directly with the association datasets and does not use any cached association values. # The :delete action will delete all associated objects from the database in a single SQL call. # The :destroy action will load each associated object from the database and call the destroy method on it. # # To set up an association dependency, you must provide a hash with association name symbols # and dependency action values. You can provide the hash to the plugin call itself or # to the add_association_dependencies method: # # Business.plugin :association_dependencies, address: :delete # # or: # Artist.plugin :association_dependencies # Artist.add_association_dependencies albums: :destroy, reviews: :delete, tags: :nullify module AssociationDependencies # Mapping of association types to when the dependency calls should be made (either # :before for in before_destroy or :after for in after_destroy) ASSOCIATION_MAPPING = {:one_to_many=>:before, :many_to_one=>:after, :many_to_many=>:before, :one_to_one=>:before}.freeze # The valid dependence actions DEPENDENCE_ACTIONS = [:delete, :destroy, :nullify].freeze # Initialize the association_dependencies hash for this model. def self.apply(model, hash=OPTS) model.instance_exec{@association_dependencies = {:before_delete=>[], :before_destroy=>[], :before_nullify=>[], :after_delete=>[], :after_destroy=>[]}} end # Call add_association_dependencies with any dependencies given in the plugin call. def self.configure(model, hash=OPTS) model.add_association_dependencies(hash) unless hash.empty? end module ClassMethods # A hash specifying the association dependencies for each model. The keys # are symbols indicating the type of action and when it should be executed # (e.g. :before_delete). Values are an array of method symbols. # For before_nullify, the symbols are remove_all_association methods. For other # types, the symbols are association_dataset methods, on which delete or # destroy is called. attr_reader :association_dependencies # Add association dependencies to this model. The hash should have association name # symbol keys and dependency action symbol values (e.g. albums: :destroy). def add_association_dependencies(hash) hash.each do |association, action| raise(Error, "Nonexistent association: #{association}") unless r = association_reflection(association) type = r[:type] raise(Error, "Invalid dependence action type: association: #{association}, dependence action: #{action}") unless DEPENDENCE_ACTIONS.include?(action) raise(Error, "Invalid association type: association: #{association}, type: #{type}") unless time = ASSOCIATION_MAPPING[type] association_dependencies[:"#{time}_#{action}"] << if action == :nullify case type when :one_to_many , :many_to_many [r[:remove_all_method]] when :one_to_one [r[:setter_method], nil] else raise(Error, "Can't nullify many_to_one associated objects: association: #{association}") end else raise(Error, "Can only nullify many_to_many associations: association: #{association}") if type == :many_to_many r[:dataset_method] end end end # Freeze association dependencies when freezing model class. def freeze @association_dependencies.freeze.each_value(&:freeze) super end Plugins.inherited_instance_variables(self, :@association_dependencies=>:hash_dup) end module InstanceMethods # Run the delete and destroy association dependency actions for # many_to_one associations. def after_destroy super model.association_dependencies[:after_delete].each{|m| public_send(m).delete} model.association_dependencies[:after_destroy].each{|m| public_send(m).destroy} end # Run the delete, destroy, and nullify association dependency actions for # *_to_many associations. def before_destroy model.association_dependencies[:before_delete].each{|m| public_send(m).delete} model.association_dependencies[:before_destroy].each{|m| public_send(m).destroy} model.association_dependencies[:before_nullify].each{|args| public_send(*args)} super end end end end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/association_lazy_eager_option.rb�����������������������������������0000664�0000000�0000000�00000004522�14342141206�0025263�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The association_lazy_eager_option plugin supports passing # an +:eager+ option to an association method. If the related # association is already cached, the cached version will be # returned. If the association is not already cached, it will # be loaded, and the value of the +:eager+ option will be used # to perform an eager load of the given associations. # the plural versions. # # With Sequel's default behavior, you can already perform an # eager load when lazy loading using a block: # # obj.association{|ds| ds.eager(:nested_association)} # # However, this will ignore any cached version. In more # complex software, the association may already be cached # and have the nested association cached inside of it, and # using this callback approach then requires 2 unnecessary # queries. This plugin will not perform any queries if the # association is already cached, preventing duplicate work. # However, you should make sure that an already loaded # association has the nested association already eagerly # loaded. # # Usage: # # # Make all model subclasses support the :eager association # # method option (called before loading subclasses) # Sequel::Model.plugin :association_lazy_eager_option # # # Make the Album class support the :eager association # # method option # Album.plugin :association_lazy_eager_option module AssociationLazyEagerOption module InstanceMethods private # Return a dataset for the association after applying any dynamic callback. def _associated_dataset(opts, dynamic_opts) ds = super if eager = dynamic_opts[:eager] ds = ds.eager(eager) end ds end # A placeholder literalizer that can be used to load the association, or nil to not use one. def _associated_object_loader(opts, dynamic_opts) return if dynamic_opts[:eager] super end # Whether to use a simple primary key lookup on the associated class when loading. def load_with_primary_key_lookup?(opts, dynamic_opts) return false if dynamic_opts[:eager] super end end end end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/association_multi_add_remove.rb������������������������������������0000664�0000000�0000000�00000007176�14342141206�0025100�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The association_multi_add_remove plugin allows adding, removing and setting # multiple associated objects in a single method call. # By default Sequel::Model defines singular <tt>add_*</tt> and <tt>remove_*</tt> # methods that operate on a single associated object, this adds plural forms # that operate on multiple associated objects. Example: # # artist.albums # => [album1] # artist.add_albums([album2, album3]) # artist.albums # => [album1, album2, album3] # artist.remove_albums([album3, album1]) # artist.albums # => [album2] # artist.albums = [album2, album3] # artist.albums # => [album2, album3] # # It can handle all situations that the normal singular methods handle, but there is # no attempt to optimize behavior, so using these methods will not improve performance. # # The add/remove/set methods defined by this plugin use a transaction, # so if one add/remove/set fails and raises an exception, all adds/removes/set # will be rolled back. If you are using database sharding and want to save # to a specific shard, call Model#set_server to set the server for this instance, # as the transaction will be opened on that server. # # You can customize the method names used for adding/removing multiple associated # objects using the :multi_add_method and :multi_remove_method association options. # # Usage: # # # Allow adding/removing/setting multiple associated objects in a single call # # for all model subclass instances (called before loading subclasses): # Sequel::Model.plugin :association_multi_add_remove # # # Allow adding/removing/setting multiple associated objects in a single call # # for Album instances (called before defining associations in the class): # Album.plugin :association_multi_add_remove module AssociationMultiAddRemove module ClassMethods private # Define the methods use to add/remove/set multiple associated objects # in a single method call. def def_association_instance_methods(opts) super if opts[:adder] add_method = opts[:add_method] multi_add_method = opts[:multi_add_method] || :"add_#{opts[:name]}" multi_add_method = nil if add_method == multi_add_method if multi_add_method association_module_def(multi_add_method, opts) do |objs, *args| db.transaction(:server=>@server){objs.map{|obj| send(add_method, obj, *args)}.compact} end end end if opts[:remover] remove_method = opts[:remove_method] multi_remove_method = opts[:multi_remove_method] || :"remove_#{opts[:name]}" multi_remove_method = nil if remove_method == multi_remove_method if multi_remove_method association_module_def(multi_remove_method, opts) do |objs, *args| db.transaction(:server=>@server){objs.map{|obj| send(remove_method, obj, *args)}.compact} end end end if multi_add_method && multi_remove_method association_module_def(:"#{opts[:name]}=", opts) do |objs, *args| db.transaction(:server=>@server) do existing_objs = send(opts.association_method) send(multi_remove_method, (existing_objs - objs), *args) send(multi_add_method, (objs - existing_objs), *args) nil end end end end end end end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/association_pks.rb�������������������������������������������������0000664�0000000�0000000�00000030114�14342141206�0022342�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The association_pks plugin adds association_pks, association_pks=, and # association_pks_dataset instance methods to the model class for each # one_to_many and many_to_many association added. These methods allow for # easily returning the primary keys of the associated objects, and easily # modifying which objects are associated: # # Artist.one_to_many :albums # artist = Artist[1] # artist.album_pks_dataset # # SELECT id FROM albums WHERE (albums.artist_id = 1) # # artist.album_pks # [1, 2, 3] # artist.album_pks = [2, 4] # artist.album_pks # [2, 4] # artist.save # # Persist changes # # Note that it uses the singular form of the association name. Also note # that the setter both associates to new primary keys not in the assocation # and disassociates from primary keys not provided to the method. # # This plugin makes modifications directly to the underlying tables, # it does not create or return any model objects, and therefore does # not call any callbacks. If you have any association callbacks, # you probably should not use the setter methods this plugin adds. # # By default, changes to the association will not happen until the object # is saved. However, using the delay_pks: false association option, you can have # the changes made immediately when the association_pks setter method is called. # # By default, repeated calls to the association_pks getter method will not be # cached, unless the setter method has been used and the delay_pks: false # association option is not used. You can set caching of repeated calls to the # association_pks getter method using the :cache_pks association option. You can # pass the :refresh option when calling the getter method to ignore any existing # cached values, similar to how the :refresh option works with associations. # # By default, if you pass a nil value to the setter, an exception will be raised. # You can change this behavior by using the :association_pks_nil association option. # If set to :ignore, the setter will take no action if nil is given. # If set to :remove, the setter will treat the nil as an empty array, removing # the association all currently associated values. # # For many_to_many associations, association_pks assumes the related pks can be # accessed directly from the join table. This works in most cases, but in cases # where the :right_primary_key association option is used to specify a different # primary key in the associated table, association_pks will return the value of # the association primary keys (foreign key values to associated table in the join # table), not the associated model primary keys. If you would like to use the # associated model primary keys, you need to use the # :association_pks_use_associated_table association option. If the # :association_pks_use_associated_table association option is used, no setter # method will be added. # # Usage: # # # Make all model subclass *_to_many associations have association_pks # # methods (called before loading subclasses) # Sequel::Model.plugin :association_pks # # # Make the Album *_to_many associations have association_pks # # methods (called before the association methods) # Album.plugin :association_pks module AssociationPks module ClassMethods private # Define a association_pks method using the block for the association reflection def def_association_pks_methods(opts) association_module_def(opts[:pks_dataset_method], &opts[:pks_dataset]) opts[:pks_getter_method] = :"#{singularize(opts[:name])}_pks_getter" association_module_def(opts[:pks_getter_method], &opts[:pks_getter]) association_module_def(:"#{singularize(opts[:name])}_pks", opts){|dynamic_opts=OPTS| _association_pks_getter(opts, dynamic_opts)} if opts[:pks_setter] opts[:pks_setter_method] = :"#{singularize(opts[:name])}_pks_setter" association_module_def(opts[:pks_setter_method], &opts[:pks_setter]) association_module_def(:"#{singularize(opts[:name])}_pks=", opts){|pks| _association_pks_setter(opts, pks)} end end # Add a getter that checks the join table for matching records and # a setter that deletes from or inserts into the join table. def def_many_to_many(opts) super return if opts[:type] == :one_through_one # Grab values from the reflection so that the hash lookup only needs to be # done once instead of inside every method call. lk, lpk, rk = opts.values_at(:left_key, :left_primary_key, :right_key) clpk = lpk.is_a?(Array) crk = rk.is_a?(Array) dataset_method = opts[:pks_dataset_method] = :"#{singularize(opts[:name])}_pks_dataset" opts[:pks_dataset] = if join_associated_table = opts[:association_pks_use_associated_table] tname = opts[:join_table] lambda do cond = if clpk lk.zip(lpk).map{|k, pk| [Sequel.qualify(tname, k), get_column_value(pk)]} else {Sequel.qualify(tname, lk) => get_column_value(lpk)} end rpk = opts.associated_class.primary_key opts.associated_dataset. naked.where(cond). select(*Sequel.public_send(rpk.is_a?(Array) ? :deep_qualify : :qualify, opts.associated_class.table_name, rpk)) end elsif clpk lambda do cond = lk.zip(lpk).map{|k, pk| [k, get_column_value(pk)]} _join_table_dataset(opts).where(cond).select(*rk) end else lambda do _join_table_dataset(opts).where(lk=>get_column_value(lpk)).select(*rk) end end opts[:pks_getter] = if join_associated_table = opts[:association_pks_use_associated_table] lambda do public_send(dataset_method).map(opts.associated_class.primary_key) end else lambda do public_send(dataset_method).map(rk) end end if !opts[:read_only] && !join_associated_table opts[:pks_setter] = lambda do |pks| if pks.empty? public_send(opts[:remove_all_method]) else checked_transaction do if clpk lpkv = lpk.map{|k| get_column_value(k)} cond = lk.zip(lpkv) else lpkv = get_column_value(lpk) cond = {lk=>lpkv} end ds = _join_table_dataset(opts).where(cond) ds.exclude(rk=>pks).delete pks -= ds.select_map(rk) lpkv = Array(lpkv) key_array = crk ? pks.map{|pk| lpkv + pk} : pks.map{|pk| lpkv + [pk]} key_columns = Array(lk) + Array(rk) ds.import(key_columns, key_array) end end end end def_association_pks_methods(opts) end # Add a getter that checks the association dataset and a setter # that updates the associated table. def def_one_to_many(opts) super return if opts[:type] == :one_to_one key = opts[:key] dataset_method = opts[:pks_dataset_method] = :"#{singularize(opts[:name])}_pks_dataset" opts[:pks_dataset] = lambda do public_send(opts[:dataset_method]).select(*opts.associated_class.primary_key) end opts[:pks_getter] = lambda do public_send(dataset_method).map(opts.associated_class.primary_key) end unless opts[:read_only] opts[:pks_setter] = lambda do |pks| if pks.empty? public_send(opts[:remove_all_method]) else primary_key = opts.associated_class.primary_key pkh = {primary_key=>pks} if key.is_a?(Array) h = {} nh = {} key.zip(pk).each do|k, v| h[k] = v nh[k] = nil end else h = {key=>pk} nh = {key=>nil} end checked_transaction do ds = public_send(opts.dataset_method) ds.unfiltered.where(pkh).update(h) ds.exclude(pkh).update(nh) end end end end def_association_pks_methods(opts) end end module InstanceMethods # After creating an object, if there are any saved association pks, # call the related association pks setters. def after_save if assoc_pks = @_association_pks assoc_pks.each do |name, pks| # pks_setter_method is private send(model.association_reflection(name)[:pks_setter_method], pks) end @_association_pks = nil end super end # Clear the associated pks if explicitly refreshing. def refresh @_association_pks = nil super end private # Return the primary keys of the associated objects. # If the receiver is a new object, return any saved # pks, or an empty array if no pks have been saved. def _association_pks_getter(opts, dynamic_opts=OPTS) do_cache = opts[:cache_pks] delay = opts.fetch(:delay_pks, true) cache_or_delay = do_cache || delay if dynamic_opts[:refresh] && @_association_pks @_association_pks.delete(opts[:name]) end if new? && cache_or_delay (@_association_pks ||= {})[opts[:name]] ||= [] elsif cache_or_delay && @_association_pks && (objs = @_association_pks[opts[:name]]) objs elsif do_cache # pks_getter_method is private (@_association_pks ||= {})[opts[:name]] = send(opts[:pks_getter_method]) else # pks_getter_method is private send(opts[:pks_getter_method]) end end # Update which objects are associated to the receiver. # If the receiver is a new object, save the pks # so the update can happen after the receiver has been saved. def _association_pks_setter(opts, pks) if pks.nil? case opts[:association_pks_nil] when :remove pks = [] when :ignore return else raise Error, "nil value given to association_pks setter" end end pks = convert_pk_array(opts, pks) if opts.fetch(:delay_pks, true) modified! (@_association_pks ||= {})[opts[:name]] = pks else # pks_setter_method is private send(opts[:pks_setter_method], pks) end end # If the associated class's primary key column type is integer, # typecast all provided values to integer before using them. def convert_pk_array(opts, pks) klass = opts.associated_class primary_key = klass.primary_key sch = klass.db_schema if primary_key.is_a?(Array) if (cols = sch.values_at(*klass.primary_key)).all? && (convs = cols.map{|c| c[:type] == :integer}).all? db = model.db pks.map do |cpk| cpk.map do |pk| db.typecast_value(:integer, pk) end end else pks end elsif (col = sch[klass.primary_key]) && (col[:type] == :integer) pks.map{|pk| model.db.typecast_value(:integer, pk)} else pks end end end end end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/association_proxies.rb���������������������������������������������0000664�0000000�0000000�00000012506�14342141206�0023243�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # Sequel by default does not use proxies for associations. The association # method for *_to_many associations returns an array, and the association_dataset # method returns a dataset. This plugin makes the association method return a proxy # that will load the association and call a method on the association array if sent # an array method, and otherwise send the method to the association's dataset. # # You can override which methods to forward to the dataset by passing a block to the plugin: # # plugin :association_proxies do |opts| # [:find, :where, :create].include?(opts[:method]) # end # # If the block returns false or nil, the method is sent to the array of associated # objects. Otherwise, the method is sent to the association dataset. Here are the entries # in the hash passed to the block: # # :method :: The name of the method # :arguments :: The arguments to the method # :block :: The block given to the method # :instance :: The model instance related to the call # :reflection :: The reflection for the association related to the call # :proxy_argument :: The argument given to the association method call # :proxy_block :: The block given to the association method call # # For example, in a call like: # # artist.albums(1){|ds| ds}.foo(2){|x| 3} # # The opts passed to the block would be: # # { # :method => :foo, # :arguments => [2], # :block => {|x| 3}, # :instance => artist, # :reflection => AssociationReflection instance, # :proxy_argument => 1, # :proxy_block => {|ds| ds} # } # # Usage: # # # Use association proxies in all model subclasses (called before loading subclasses) # Sequel::Model.plugin :association_proxies # # # Use association proxies in a specific model subclass # Album.plugin :association_proxies module AssociationProxies def self.configure(model, &block) model.instance_exec do @association_proxy_to_dataset = block if block @association_proxy_to_dataset ||= AssociationProxy::DEFAULT_PROXY_TO_DATASET end end # A proxy for the association. Calling an array method will load the # associated objects and call the method on the associated object array. # Calling any other method will call that method on the association's dataset. class AssociationProxy < BasicObject array = [].freeze if RUBY_VERSION < '2.6' # :nocov: # Default proc used to determine whether to send the method to the dataset. # If the array would respond to it, sends it to the array instead of the dataset. DEFAULT_PROXY_TO_DATASET = proc do |opts| array_method = array.respond_to?(opts[:method]) if !array_method && opts[:method] == :filter Sequel::Deprecation.deprecate "The behavior of the #filter method for association proxies will change in Ruby 2.6. Switch from using #filter to using #where to conserve current behavior." end !array_method end # :nocov: else DEFAULT_PROXY_TO_DATASET = proc{|opts| !array.respond_to?(opts[:method])} end # Set the association reflection to use, and whether the association should be # reloaded if an array method is called. def initialize(instance, reflection, proxy_argument, &proxy_block) @instance = instance @reflection = reflection @proxy_argument = proxy_argument @proxy_block = proxy_block end # Call the method given on the array of associated objects if the method # is an array method, otherwise call the method on the association's dataset. def method_missing(meth, *args, &block) v = if @instance.model.association_proxy_to_dataset.call(:method=>meth, :arguments=>args, :block=>block, :instance=>@instance, :reflection=>@reflection, :proxy_argument=>@proxy_argument, :proxy_block=>@proxy_block) @instance.public_send(@reflection[:dataset_method]) else @instance.send(:load_associated_objects, @reflection, @proxy_argument, &@proxy_block) end v.public_send(meth, *args, &block) end # :nocov: ruby2_keywords(:method_missing) if respond_to?(:ruby2_keywords, true) # :nocov: end module ClassMethods # Proc that accepts a method name, array of arguments, and block and # should return a truthy value to send the method to the dataset instead of the # array of associated objects. attr_reader :association_proxy_to_dataset Plugins.inherited_instance_variables(self, :@association_proxy_to_dataset=>nil) private # Changes the association method to return a proxy instead of the associated objects # directly. def def_association_method(opts) if opts.returns_array? association_module_def(opts.association_method, opts) do |dynamic_opts=OPTS, &block| AssociationProxy.new(self, opts, dynamic_opts, &block) end else super end end end end end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/async_thread_pool.rb�����������������������������������������������0000664�0000000�0000000�00000002450�14342141206�0022650�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel extension 'async_thread_pool' module Plugins # The async_thread_pool plugin makes it slightly easier to use the async_thread_pool # Database extension with models. It makes Model.async return an async dataset for the # model, and support async behavior for #destroy, #with_pk, and #with_pk! for model # datasets: # # # Will load the artist with primary key 1 asynchronously # artist = Artist.async.with_pk(1) # # You must load the async_thread_pool Database extension into the Database object the # model class uses in order for async behavior to work. # # Usage: # # # Make all model subclass datasets support support async class methods and additional # # async dataset methods # Sequel::Model.plugin :async_thread_pool # # # Make the Album class support async class method and additional async dataset methods # Album.plugin :async_thread_pool module AsyncThreadPool module ClassMethods Plugins.def_dataset_methods(self, :async) end module DatasetMethods [:destroy, :with_pk, :with_pk!].each do |meth| ::Sequel::Database::AsyncThreadPool::DatasetMethods.define_async_method(self, meth) end end end end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/auto_restrict_eager_graph.rb���������������������������������������0000664�0000000�0000000�00000005137�14342141206�0024373�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The auto_restrict_eager_graph plugin will automatically disallow the use # of eager_graph for associations that have associated blocks but no :graph_* # association options. The reason for this is the block will have an effect # during regular and eager loading, but not loading via eager_graph, and it # is likely that whatever the block is doing should have an equivalent done # when eager_graphing. Most likely, not including a :graph_* option was either # an oversight (and one should be added), or use with eager_graph was never # intended (and usage should be forbidden). Disallowing eager_graph in this # case prevents likely unexpected behavior during eager_graph. # # As an example of this, consider the following code: # # Album.one_to_many :popular_tracks, class: :Track do |ds| # ds = ds.where(popular: true) # end # # Album.eager(:popular_tracks).all # # SELECT * FROM albums # # SELECT * FROM tracks WHERE ((popular IS TRUE) AND (album_id IN (...))) # # # Notice that no condition for tracks.popular is added. # Album.eager_graph(:popular_tracks).all # # SELECT ... FROM albums LEFT JOIN tracks ON (tracks.album_id = albums.id) # # With the auto_restrict_eager_graph plugin, the eager_graph call above will # raise an error, alerting you to the fact that you either should not be # using eager_graph with the association, or that you should be adding an # appropriate :graph_* option, such as: # # Album.one_to_many :popular_tracks, class: :Track, graph_conditions: {popular: true} do |ds| # ds = ds.where(popular: true) # end # # Usage: # # # Automatically restrict eager_graph for associations if appropriate for all # # model subclasses (called before loading subclasses) # Sequel::Model.plugin :auto_restrict_eager_graph # # # Automatically restrict eager_graph for associations in Album class # Album.plugin :auto_restrict_eager_graph module AutoRestrictEagerGraph module ClassMethods # When defining an association, if a block is given for the association, but # a :graph_* option is not used, disallow the use of eager_graph. def associate(type, name, opts = OPTS, &block) opts = super if opts[:block] && !opts.has_key?(:allow_eager_graph) && !opts[:orig_opts].any?{|k,| /\Agraph_/ =~ k} opts[:allow_eager_graph] = false end opts end end end end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/auto_validations.rb������������������������������������������������0000664�0000000�0000000�00000031711�14342141206�0022522�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The auto_validations plugin automatically sets up the following types of validations # for your model columns: # # 1. type validations for all columns # 2. not_null validations on NOT NULL columns (optionally, presence validations) # 3. unique validations on columns or sets of columns with unique indexes # 4. max length validations on string columns # 5. no null byte validations on string columns # 6. minimum and maximum values on columns # # To determine the columns to use for the type/not_null/max_length/no_null_byte/max_value/min_value validations, # the plugin looks at the database schema for the model's table. To determine # the unique validations, Sequel looks at the indexes on the table. In order # for this plugin to be fully functional, the underlying database adapter needs # to support both schema and index parsing. Additionally, unique validations are # only added for models that select from a simple table, they are not added for models # that select from a subquery. # # This plugin uses the validation_helpers plugin underneath to implement the # validations. It does not allow for any per-column validation message # customization, but you can alter the messages for the given type of validation # on a per-model basis (see the validation_helpers documentation). # # You can skip certain types of validations from being automatically added via: # # Model.skip_auto_validations(:not_null) # # If you want to skip all auto validations (only useful if loading the plugin # in a superclass): # # Model.skip_auto_validations(:all) # # It is possible to skip auto validations on a per-model-instance basis via: # # instance.skip_auto_validations(:unique, :not_null) do # puts instance.valid? # end # # By default, the plugin uses a not_null validation for NOT NULL columns, but that # can be changed to a presence validation using an option: # # Model.plugin :auto_validations, not_null: :presence # # This is useful if you want to enforce that NOT NULL string columns do not # allow empty values. # # You can also supply hashes to pass options through to the underlying validators: # # Model.plugin :auto_validations, unique_opts: {only_if_modified: true} # # This works for unique_opts, max_length_opts, schema_types_opts, max_value_opts, min_value_opts, no_null_byte_opts, # explicit_not_null_opts, and not_null_opts. # # If you only want auto_validations to add validations to columns that do not already # have an error associated with them, you can use the skip_invalid option: # # Model.plugin :auto_validations, skip_invalid: true # # Usage: # # # Make all model subclass use auto validations (called before loading subclasses) # Sequel::Model.plugin :auto_validations # # # Make the Album class use auto validations # Album.plugin :auto_validations module AutoValidations NOT_NULL_OPTIONS = {:from=>:values}.freeze EXPLICIT_NOT_NULL_OPTIONS = {:from=>:values, :allow_missing=>true}.freeze MAX_LENGTH_OPTIONS = {:from=>:values, :allow_nil=>true}.freeze SCHEMA_TYPES_OPTIONS = NOT_NULL_OPTIONS UNIQUE_OPTIONS = NOT_NULL_OPTIONS NO_NULL_BYTE_OPTIONS = MAX_LENGTH_OPTIONS MAX_VALUE_OPTIONS = {:from=>:values, :allow_nil=>true, :skip_invalid=>true}.freeze MIN_VALUE_OPTIONS = MAX_VALUE_OPTIONS AUTO_VALIDATE_OPTIONS = { :no_null_byte=>NO_NULL_BYTE_OPTIONS, :not_null=>NOT_NULL_OPTIONS, :explicit_not_null=>EXPLICIT_NOT_NULL_OPTIONS, :max_length=>MAX_LENGTH_OPTIONS, :max_value=>MAX_VALUE_OPTIONS, :min_value=>MIN_VALUE_OPTIONS, :schema_types=>SCHEMA_TYPES_OPTIONS, :unique=>UNIQUE_OPTIONS }.freeze EMPTY_ARRAY = [].freeze def self.apply(model, opts=OPTS) model.instance_exec do plugin :validation_helpers @auto_validate_presence = false @auto_validate_no_null_byte_columns = [] @auto_validate_not_null_columns = [] @auto_validate_explicit_not_null_columns = [] @auto_validate_max_length_columns = [] @auto_validate_max_value_columns = [] @auto_validate_min_value_columns = [] @auto_validate_unique_columns = [] @auto_validate_types = true @auto_validate_options = AUTO_VALIDATE_OPTIONS end end # Setup auto validations for the model if it has a dataset. def self.configure(model, opts=OPTS) model.instance_exec do setup_auto_validations if @dataset if opts[:not_null] == :presence @auto_validate_presence = true end h = @auto_validate_options.dup [:not_null, :explicit_not_null, :max_length, :max_value, :min_value, :no_null_byte, :schema_types, :unique].each do |type| if type_opts = opts[:"#{type}_opts"] h[type] = h[type].merge(type_opts).freeze end end if opts[:skip_invalid] [:not_null, :explicit_not_null, :no_null_byte, :max_length, :schema_types].each do |type| h[type] = h[type].merge(:skip_invalid=>true).freeze end end @auto_validate_options = h.freeze end end module ClassMethods # The columns with automatic no_null_byte validations attr_reader :auto_validate_no_null_byte_columns # The columns with automatic not_null validations attr_reader :auto_validate_not_null_columns # The columns with automatic not_null validations for columns present in the values. attr_reader :auto_validate_explicit_not_null_columns # The columns or sets of columns with automatic max_length validations, as an array of # pairs, with the first entry being the column name and second entry being the maximum length. attr_reader :auto_validate_max_length_columns # The columns with automatch max value validations, as an array of # pairs, with the first entry being the column name and second entry being the maximum value. attr_reader :auto_validate_max_value_columns # The columns with automatch min value validations, as an array of # pairs, with the first entry being the column name and second entry being the minimum value. attr_reader :auto_validate_min_value_columns # The columns or sets of columns with automatic unique validations attr_reader :auto_validate_unique_columns # Inherited options attr_reader :auto_validate_options Plugins.inherited_instance_variables(self, :@auto_validate_presence=>nil, :@auto_validate_types=>nil, :@auto_validate_no_null_byte_columns=>:dup, :@auto_validate_not_null_columns=>:dup, :@auto_validate_explicit_not_null_columns=>:dup, :@auto_validate_max_length_columns=>:dup, :@auto_validate_max_value_columns=>:dup, :@auto_validate_min_value_columns=>:dup, :@auto_validate_unique_columns=>:dup, :@auto_validate_options => :dup) Plugins.after_set_dataset(self, :setup_auto_validations) # Whether to use a presence validation for not null columns def auto_validate_presence? @auto_validate_presence end # Whether to automatically validate schema types for all columns def auto_validate_types? @auto_validate_types end # Freeze auto_validation settings when freezing model class. def freeze @auto_validate_no_null_byte_columns.freeze @auto_validate_not_null_columns.freeze @auto_validate_explicit_not_null_columns.freeze @auto_validate_max_length_columns.freeze @auto_validate_max_value_columns.freeze @auto_validate_min_value_columns.freeze @auto_validate_unique_columns.freeze super end # Skip automatic validations for the given validation type # (:not_null, :no_null_byte, :types, :unique, :max_length, :max_value, :min_value). # If :all is given as the type, skip all auto validations. # # Skipping types validation automatically skips max_value and min_value validations, # since those validations require valid types. def skip_auto_validations(type) case type when :all [:not_null, :no_null_byte, :types, :unique, :max_length, :max_value, :min_value].each{|v| skip_auto_validations(v)} when :not_null auto_validate_not_null_columns.clear auto_validate_explicit_not_null_columns.clear when :types @auto_validate_types = false else public_send("auto_validate_#{type}_columns").clear end end private # Parse the database schema and indexes and record the columns to automatically validate. def setup_auto_validations not_null_cols, explicit_not_null_cols = db_schema.select{|col, sch| sch[:allow_null] == false}.partition{|col, sch| sch[:default].nil?}.map{|cs| cs.map{|col, sch| col}} @auto_validate_not_null_columns = not_null_cols - Array(primary_key) explicit_not_null_cols += Array(primary_key) @auto_validate_explicit_not_null_columns = explicit_not_null_cols.uniq @auto_validate_max_length_columns = db_schema.select{|col, sch| sch[:type] == :string && sch[:max_length].is_a?(Integer)}.map{|col, sch| [col, sch[:max_length]]} @auto_validate_max_value_columns = db_schema.select{|col, sch| sch[:max_value]}.map{|col, sch| [col, sch[:max_value]]} @auto_validate_min_value_columns = db_schema.select{|col, sch| sch[:min_value]}.map{|col, sch| [col, sch[:min_value]]} @auto_validate_no_null_byte_columns = db_schema.select{|_, sch| sch[:type] == :string}.map{|col, _| col} table = dataset.first_source_table @auto_validate_unique_columns = if db.supports_index_parsing? && [Symbol, SQL::QualifiedIdentifier, SQL::Identifier, String].any?{|c| table.is_a?(c)} db.indexes(table).select{|name, idx| idx[:unique] == true}.map{|name, idx| idx[:columns].length == 1 ? idx[:columns].first : idx[:columns]} else [] end end end module InstanceMethods # Skip the given types of auto validations on this instance inside the block. def skip_auto_validations(*types) types << :all if types.empty? @_skip_auto_validations = types yield ensure @_skip_auto_validations = nil end # Validate the model's auto validations columns def validate super skip = @_skip_auto_validations || EMPTY_ARRAY return if skip.include?(:all) opts = model.auto_validate_options unless skip.include?(:no_null_byte) || (no_null_byte_columns = model.auto_validate_no_null_byte_columns).empty? validates_no_null_byte(no_null_byte_columns, opts[:no_null_byte]) end unless skip.include?(:not_null) not_null_method = model.auto_validate_presence? ? :validates_presence : :validates_not_null unless (not_null_columns = model.auto_validate_not_null_columns).empty? public_send(not_null_method, not_null_columns, opts[:not_null]) end unless (not_null_columns = model.auto_validate_explicit_not_null_columns).empty? public_send(not_null_method, not_null_columns, opts[:explicit_not_null]) end end unless skip.include?(:max_length) || (max_length_columns = model.auto_validate_max_length_columns).empty? max_length_columns.each do |col, len| validates_max_length(len, col, opts[:max_length]) end end unless skip.include?(:types) || !model.auto_validate_types? validates_schema_types(keys, opts[:schema_types]) unless skip.include?(:max_value) || ((max_value_columns = model.auto_validate_max_value_columns).empty?) max_value_columns.each do |col, max| validates_max_value(max, col, opts[:max_value]) end end unless skip.include?(:min_value) || ((min_value_columns = model.auto_validate_min_value_columns).empty?) min_value_columns.each do |col, min| validates_min_value(min, col, opts[:min_value]) end end end unless skip.include?(:unique) unique_opts = Hash[opts[:unique]] if model.respond_to?(:sti_dataset) unique_opts[:dataset] = model.sti_dataset end model.auto_validate_unique_columns.each{|cols| validates_unique(cols, unique_opts)} end end end end end end �������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/auto_validations_constraint_validations_presence_message.rb��������0000664�0000000�0000000�00000005360�14342141206�0032754�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The auto_validations_constraint_validations_presence_message plugin provides # integration for the auto_validations and constraint_validations plugins in # the following situation: # # * A column has a NOT NULL constraint in the database # * A constraint validation for presence exists on the column, with a :message # option to set a column-specific message, and with the :allow_nil option set # to true because the CHECK constraint doesn't need to check for NULL values # as the column itself is NOT NULL # # In this case, by default the validation error message on the column will # use the more specific constraint validation error message if the column # has a non-NULL empty value, but will use the default auto_validations # message if the column has a NULL value. With this plugin, the column-specific # constraint validation error message will be used in both cases. # # Usage: # # # Make all model subclasses use this auto_validations/constraint_validations # # integration (called before loading subclasses) # Sequel::Model.plugin :auto_validations_constraint_validations_presence_message # # # Make the Album class use this auto_validations/constraint_validations integration # Album.plugin :auto_validations_constraint_validations_presence_message module AutoValidationsConstraintValidationsPresenceMessage def self.apply(model) model.plugin :auto_validations model.plugin :constraint_validations end def self.configure(model, opts=OPTS) model.send(:_adjust_auto_validations_constraint_validations_presence_message) end module ClassMethods Plugins.after_set_dataset(self, :_adjust_auto_validations_constraint_validations_presence_message) private def _adjust_auto_validations_constraint_validations_presence_message if @dataset && !@auto_validate_options[:not_null][:message] && !@auto_validate_options[:explicit_not_null][:message] @constraint_validations.each do |array| meth, column, opts = array if meth == :validates_presence && opts && opts[:message] && opts[:allow_nil] && (@auto_validate_not_null_columns.include?(column) || @auto_validate_explicit_not_null_columns.include?(column)) @auto_validate_not_null_columns.delete(column) @auto_validate_explicit_not_null_columns.delete(column) array[2] = array[2].merge(:allow_nil=>false) end end end end end end end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/before_after_save.rb�����������������������������������������������0000664�0000000�0000000�00000000153�14342141206�0022612�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins module BeforeAfterSave end end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/blacklist_security.rb����������������������������������������������0000664�0000000�0000000�00000010321�14342141206�0023046�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The blacklist_security plugin contains blacklist-based support for # mass assignment, specifying which columns to not allow mass assignment for, # implicitly allowing mass assignment for columns not listed. This is only # for backwards compatibility, it should not be used by new code. # # Usage: # # # Make all model subclasses support the blacklist security features. # Sequel::Model.plugin :blacklist_security # # # Make the Album class support the blacklist security features. # Album.plugin :blacklist_security module BlacklistSecurity # Special array subclass used for marking methods to be removed. class ExceptionList < Array end module ClassMethods # Which columns are specifically restricted in a call to set/update/new/etc. # (default: not set). Some columns are restricted regardless of # this setting, such as the primary key column and columns in Model::RESTRICTED_SETTER_METHODS. attr_reader :restricted_columns Plugins.inherited_instance_variables(self, :@restricted_columns=>:dup) # Freeze restricted columns when freezing model class. def freeze @restricted_columns.freeze super end # Set the columns to restrict when using mass assignment (e.g. +set+). Using this means that # attempts to call setter methods for the columns listed here will cause an # exception or be silently skipped (based on the +strict_param_setting+ setting). # If you have any virtual setter methods (methods that end in =) that you # want not to be used during mass assignment, they need to be listed here as well (without the =). # # It's generally a bad idea to rely on a blacklist approach for security. Using a whitelist # approach such as the whitelist_security plugin or the set_fields methods # is usually a better choice. So use of this method is generally a bad idea. # # Artist.set_restricted_columns(:records_sold) # Artist.set(name: 'Bob', hometown: 'Sactown') # No Error # Artist.set(name: 'Bob', records_sold: 30000) # Error def set_restricted_columns(*cols) clear_setter_methods_cache @restricted_columns = cols end private # If allowed_columns is not set but restricted_columns is, remove the # restricted_columns. def get_setter_methods meths = super if (!defined?(::Sequel::Plugins::WhitelistSecurity::ClassMethods) || !is_a?(::Sequel::Plugins::WhitelistSecurity::ClassMethods) || !allowed_columns) && restricted_columns meths -= restricted_columns.map{|x| "#{x}="} end meths end end module InstanceMethods # Set all values using the entries in the hash, except for the keys # given in except. You should probably use +set_fields+ # instead of this method, as blacklist approaches to security are a bad idea. # # artist.set_except({name: 'Jim'}, :hometown) # artist.name # => 'Jim' def set_except(hash, *except) set_restricted(hash, ExceptionList.new(except.flatten)) end # Update all values using the entries in the hash, except for the keys # given in except. You should probably use +update_fields+ # instead of this method, as blacklist approaches to security are a bad idea. # # artist.update_except({name: 'Jim'}, :hometown) # UPDATE artists SET name = 'Jim' WHERE (id = 1) def update_except(hash, *except) update_restricted(hash, ExceptionList.new(except.flatten)) end private # If set_except or update_except was used, remove the related methods from the list. def setter_methods(type) if type.is_a?(ExceptionList) meths = super(:all) meths -= Array(primary_key).map{|x| "#{x}="} if primary_key && model.restrict_primary_key? meths -= type.map{|x| "#{x}="} meths else super end end end end end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/boolean_readers.rb�������������������������������������������������0000664�0000000�0000000�00000004655�14342141206�0022310�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The boolean_readers plugin allows for the creation of attribute? methods # for boolean columns, which provides a nicer API. By default, the accessors # are created for all columns of type :boolean. However, you can provide a # block to the plugin to change the criteria used to determine if a # column is boolean. The block is yielded with the column symbol for each # column in the models dataset. # # Usage: # # # Add boolean attribute? methods for all columns of type :boolean # # in all model subclasses (called before loading subclasses) # Sequel::Model.plugin :boolean_readers # # # Add boolean readers for all tinyint columns in the Album class # Album.plugin(:boolean_readers){|c| db_schema[c][:db_type] =~ /\Atinyint/} # # # Add a boolean reader for a specific columns in the Artist class # Artist.plugin(:boolean_readers){|c| [:column1, :column2, :column3].include?(c)} module BooleanReaders # Default proc for determining if given column is a boolean, which # just checks that the :type is boolean. DEFAULT_BOOLEAN_ATTRIBUTE_PROC = lambda{|c| s = db_schema[c] and s[:type] == :boolean} # Add the boolean_attribute? class method to the model, and create # attribute? boolean reader methods for the class's columns if the class has a dataset. def self.configure(model, &block) model.instance_exec do define_singleton_method(:boolean_attribute?, &(block || DEFAULT_BOOLEAN_ATTRIBUTE_PROC)) send(:create_boolean_readers) if @dataset end end module ClassMethods Plugins.after_set_dataset(self, :create_boolean_readers) private # Add a attribute? method for the column to a module included in the class. def create_boolean_reader(column) overridable_methods_module.module_eval do define_method("#{column}?"){model.db.typecast_value(:boolean, get_column_value(column))} end end # Add attribute? methods for all of the boolean attributes for this model. def create_boolean_readers im = instance_methods.map(&:to_s) if cs = check_non_connection_error(false){columns} cs.each{|c| create_boolean_reader(c) if boolean_attribute?(c) && !im.include?("#{c}?")} end end end end end end �����������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/boolean_subsets.rb�������������������������������������������������0000664�0000000�0000000�00000004646�14342141206�0022353�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The boolean_subsets plugin allows for the automatic creation of subsets for # for boolean columns, which can DRY up model classes that define such subsets # manually. By default, subsets are created for all columns of type :boolean, # with the subset name being the same as column name, and the conditions being # <tt>column IS TRUE</tt> (assuming the database supports that syntax). # # You can provide a block to the plugin, which will be called with column name # symbol, and should return an array of arguments to pass to +dataset_module.where+. # Using this, you can change the method name and arguments for each column. # This block is executed in the context of the model class. # # Usage: # # # Add boolean subset methods for all columns of type :boolean # # in all model subclasses (called before loading subclasses) # Sequel::Model.plugin :boolean_subsets # # # Add subsets for all boolean columns in the Album class # Album.plugin(:boolean_subsets) # # # Remove is_ from the front of the column name when creating the subset # # method name, and use (column = 'Y') as the filter conditions # Sequel::Model.plugin :boolean_subsets do |column| # [column.to_s.sub(/\Ais_/, ''), {column=>'Y'}] # end module BooleanSubsets # Create boolean subset methods for each boolean column. def self.configure(model, &block) model.instance_exec do if block define_singleton_method(:boolean_subset_args, &block) singleton_class.send(:private, :boolean_subset_args) end create_boolean_subsets if @dataset end end module ClassMethods Plugins.after_set_dataset(self, :create_boolean_subsets) private # The arguments to use when automatically defining a boolean subset for the given column. def boolean_subset_args(c) [c, {c=>true}] end # Add subset methods for all of the boolean columns in this model. def create_boolean_subsets if cs = check_non_connection_error(false){columns} cs = cs.select{|c| db_schema[c][:type] == :boolean}.map{|c| boolean_subset_args(c)} dataset_module do cs.each{|c| where(*c)} end end end end end end end ������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/caching.rb���������������������������������������������������������0000664�0000000�0000000�00000013170�14342141206�0020550�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # Sequel's built-in caching plugin supports caching to any object that # implements the Ruby-Memcache API (or memcached API with the :ignore_exceptions # option): # # cache_store.set(key, obj, time) # Associate the obj with the given key # # in the cache for the time (specified # # in seconds). # cache_store.get(key) # => obj # Returns object set with same key. # cache_store.get(key2) # => nil # nil returned if there isn't an object # # currently in the cache with that key. # cache_store.delete(key) # Remove key from cache # # If the :ignore_exceptions option is true, exceptions raised by cache_store.get # are ignored and nil is returned instead. The memcached API is to # raise an exception for a missing record, so if you use memcached, you will # want to use this option. # # Note that only lookups by primary key are cached using this plugin. The following # methods use a lookup by primary key: # # * Model.with_pk # * Model.with_pk! # * Model.[] # when argument is not hash or nil # * many_to_one association method # without dynamic callback, when primary key matches # # You should not use this plugin if you are using sharding and there are different # rows for the same primary key on different shards. # # Usage: # # # Make all subclasses use the same cache (called before loading subclasses) # # using the Ruby-Memcache API, with the cache stored in the CACHE constant # Sequel::Model.plugin :caching, CACHE # # # Make the Album class use the cache with a 30 minute time-to-live # Album.plugin :caching, CACHE, ttl: 1800 # # # Make the Artist class use a cache with the memcached protocol # Artist.plugin :caching, MEMCACHED_CACHE, ignore_exceptions: true module Caching # Set the cache_store and cache_ttl attributes for the given model. # If the :ttl option is not given, 3600 seconds is the default. def self.configure(model, store, opts=OPTS) model.instance_exec do @cache_store = store @cache_ttl = opts[:ttl] || 3600 @cache_ignore_exceptions = opts[:ignore_exceptions] end end module ClassMethods # If true, ignores exceptions when gettings cached records (the memcached API). attr_reader :cache_ignore_exceptions # The cache store object for the model, which should implement the # Ruby-Memcache (or memcached) API attr_reader :cache_store # The time to live for the cache store, in seconds. attr_reader :cache_ttl # Delete the cached object with the given primary key. def cache_delete_pk(pk) cache_delete(cache_key(pk)) end # Return the cached object with the given primary key, # or nil if no such object is in the cache. def cache_get_pk(pk) cache_get(cache_key(pk)) end # Returns the prefix used to namespace this class in the cache. def cache_key_prefix to_s end # Return a key string for the given primary key. def cache_key(pk) raise(Error, 'no primary key for this record') unless pk.is_a?(Array) ? pk.all? : pk "#{cache_key_prefix}:#{Array(pk).join(',')}" end Plugins.inherited_instance_variables(self, :@cache_store=>nil, :@cache_ttl=>nil, :@cache_ignore_exceptions=>nil) # Set the time to live for the cache store, in seconds (default is 3600, # so 1 hour). def set_cache_ttl(ttl) @cache_ttl = ttl end private # Access the cache using the given method and key, rescuing exceptions if necessary. def cache_op(meth, ck) if @cache_ignore_exceptions @cache_store.public_send(meth, ck) rescue nil else @cache_store.public_send(meth, ck) end end # Delete the entry with the matching key from the cache def cache_delete(ck) cache_op(:delete, ck) nil end # Returned the cached object, or nil if the object was not # in the cached def cache_get(ck) cache_op(:get, ck) end # Set the object in the cache_store with the given key for cache_ttl seconds. def cache_set(ck, obj) @cache_store.set(ck, obj, @cache_ttl) end # Check the cache before a database lookup unless a hash is supplied. def primary_key_lookup(pk) ck = cache_key(pk) unless obj = cache_get(ck) if obj = super(pk) cache_set(ck, obj) end end obj end end module InstanceMethods # Remove the object from the cache when updating def before_update cache_delete super end # Return a key unique to the underlying record for caching, based on the # primary key value(s) for the object. If the model does not have a primary # key, raise an Error. def cache_key model.cache_key(pk) end # Remove the object from the cache when deleting def delete cache_delete super end private # Delete this object from the cache def cache_delete model.cache_delete_pk(pk) end end end end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/class_table_inheritance.rb�����������������������������������������0000664�0000000�0000000�00000044166�14342141206�0024012�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # = Overview # # The class_table_inheritance plugin uses the single_table_inheritance # plugin, so it supports all of the single_table_inheritance features, but it # additionally supports subclasses that have additional columns, # which are stored in a separate table with a key referencing the primary table. # # = Detail # # For example, with this hierarchy: # # Employee # / \ # Staff Manager # | | # Cook Executive # | # CEO # # the following database schema may be used (table - columns): # # employees :: id, name, kind # staff :: id, manager_id # managers :: id, num_staff # executives :: id, num_managers # # The class_table_inheritance plugin assumes that the root table # (e.g. employees) has a primary key column (usually autoincrementing), # and all other tables have a foreign key of the same name that points # to the same column in their superclass's table, which is also the primary # key for that table. In this example, the employees table has an id column # is a primary key and the id column in every other table is a foreign key # referencing employees.id, which is also the primary key of that table. # # Additionally, note that other than the primary key column, no subclass # table has a column with the same name as any superclass table. This plugin # does not support cases where the column names in a subclass table overlap # with any column names in a superclass table. # # In this example the staff table also stores Cook model objects and the # executives table also stores CEO model objects. # # When using the class_table_inheritance plugin, subclasses that have additional # columns use joined datasets in subselects: # # Employee.dataset.sql # # SELECT * FROM employees # # Manager.dataset.sql # # SELECT * FROM ( # # SELECT employees.id, employees.name, employees.kind, # # managers.num_staff # # FROM employees # # JOIN managers ON (managers.id = employees.id) # # ) AS employees # # CEO.dataset.sql # # SELECT * FROM ( # # SELECT employees.id, employees.name, employees.kind, # # managers.num_staff, executives.num_managers # # FROM employees # # JOIN managers ON (managers.id = employees.id) # # JOIN executives ON (executives.id = managers.id) # # WHERE (employees.kind IN ('CEO')) # # ) AS employees # # This allows CEO.all to return instances with all attributes # loaded. The plugin overrides the deleting, inserting, and updating # in the model to work with multiple tables, by handling each table # individually. # # = Subclass loading # # When model objects are retrieved for a superclass the result can contain # subclass instances that only have column entries for the columns in the # superclass table. Calling the column method on the subclass instance for # a column not in the superclass table will cause a query to the database # to get the value for that column. If the subclass instance was retreived # using Dataset#all, the query to the database will attempt to load the column # values for all subclass instances that were retrieved. For example: # # a = Employee.all # [<#Staff>, <#Manager>, <#Executive>] # a.first.values # {:id=>1, name=>'S', :kind=>'Staff'} # a.first.manager_id # Loads the manager_id attribute from the database # # If you want to get all columns in a subclass instance after loading # via the superclass, call Model#refresh. # # a = Employee.first # a.values # {:id=>1, name=>'S', :kind=>'CEO'} # a.refresh.values # {:id=>1, name=>'S', :kind=>'CEO', :num_staff=>4, :num_managers=>2} # # You can also load directly from a subclass: # # a = Executive.first # a.values # {:id=>1, name=>'S', :kind=>'Executive', :num_staff=>4, :num_managers=>2} # # Note that when loading from a subclass, because the subclass dataset uses a subquery # that by default uses the same alias at the primary table, any qualified identifiers # should reference the subquery alias (and qualified identifiers should not be needed # unless joining to another table): # # a = Executive.where(id: 1).first # works # a = Executive.where{{employees[:id]=>1}}.first # works # a = Executive.where{{executives[:id]=>1}}.first # doesn't work # # Note that because subclass datasets select from a subquery, you cannot update, # delete, or insert into them directly. To delete related rows, you need to go # through the related tables and remove the related rows. Code that does this would # be similar to: # # pks = Executive.where{num_staff < 10}.select_map(:id) # Executive.cti_tables.reverse_each do |table| # DB.from(table).where(id: pks).delete # end # # = Usage # # # Use the default of storing the class name in the sti_key # # column (:kind in this case) # class Employee < Sequel::Model # plugin :class_table_inheritance, key: :kind # end # # # Have subclasses inherit from the appropriate class # class Staff < Employee; end # uses staff table # class Cook < Staff; end # cooks table doesn't exist so uses staff table # class Manager < Employee; end # uses managers table # class Executive < Manager; end # uses executives table # class CEO < Executive; end # ceos table doesn't exist so uses executives table # # # Some examples of using these options: # # # Specifying the tables with a :table_map hash # Employee.plugin :class_table_inheritance, # table_map: {Employee: :employees, # Staff: :staff, # Cook: :staff, # Manager: :managers, # Executive: :executives, # CEO: :executives } # # # Using integers to store the class type, with a :model_map hash # # and an sti_key of :type # Employee.plugin :class_table_inheritance, key: :type, # model_map: {1=>:Staff, 2=>:Cook, 3=>:Manager, 4=>:Executive, 5=>:CEO} # # # Using non-class name strings # Employee.plugin :class_table_inheritance, key: :type, # model_map: {'staff'=>:Staff, 'cook staff'=>:Cook, 'supervisor'=>:Manager} # # # By default the plugin sets the respective column value # # when a new instance is created. # Cook.create.type == 'cook staff' # Manager.create.type == 'supervisor' # # # You can customize this behavior with the :key_chooser option. # # This is most useful when using a non-bijective mapping. # Employee.plugin :class_table_inheritance, key: :type, # model_map: {'cook staff'=>:Cook, 'supervisor'=>:Manager}, # key_chooser: proc{|instance| instance.model.sti_key_map[instance.model.to_s].first || 'stranger' } # # # Using custom procs, with :model_map taking column values # # and yielding either a class, string, symbol, or nil, # # and :key_map taking a class object and returning the column # # value to use # Employee.plugin :single_table_inheritance, key: :type, # model_map: proc{|v| v.reverse}, # key_map: proc{|klass| klass.name.reverse} # # # You can use the same class for multiple values. # # This is mainly useful when the sti_key column contains multiple values # # which are different but do not require different code. # Employee.plugin :single_table_inheritance, key: :type, # model_map: {'staff' => "Staff", # 'manager' => "Manager", # 'overpayed staff' => "Staff", # 'underpayed staff' => "Staff"} # # One minor issue to note is that if you specify the <tt>:key_map</tt> # option as a hash, instead of having it inferred from the <tt>:model_map</tt>, # you should only use class name strings as keys, you should not use symbols # as keys. module ClassTableInheritance # The class_table_inheritance plugin requires the single_table_inheritance # plugin and the lazy_attributes plugin to handle lazily-loaded attributes # for subclass instances returned by superclass methods. def self.apply(model, opts = OPTS) model.plugin :single_table_inheritance, nil model.plugin :lazy_attributes end # Initialize the plugin using the following options: # :alias :: Change the alias used for the subquery in model datasets. # using this as the alias. # :key :: Column symbol that holds the key that identifies the class to use. # Necessary if you want to call model methods on a superclass # that return subclass instances # :model_map :: Hash or proc mapping the key column values to model class names. # :key_map :: Hash or proc mapping model class names to key column values. # Each value or return is an array of possible key column values. # :key_chooser :: proc returning key for the provided model instance # :table_map :: Hash with class name symbols keys mapping to table name symbol values. # Overrides implicit table names. # :ignore_subclass_columns :: Array with column names as symbols that are ignored # on all sub-classes. # :qualify_tables :: Boolean true to qualify automatically determined # subclass tables with the same qualifier as their # superclass. def self.configure(model, opts = OPTS) SingleTableInheritance.configure model, opts[:key], opts model.instance_exec do @cti_models = [self] @cti_tables = [table_name] @cti_instance_dataset = @instance_dataset @cti_table_columns = columns @cti_table_map = opts[:table_map] || {} @cti_alias = opts[:alias] || case source = @dataset.first_source when SQL::QualifiedIdentifier @dataset.unqualified_column_for(source) else source end @cti_ignore_subclass_columns = opts[:ignore_subclass_columns] || [] @cti_qualify_tables = !!opts[:qualify_tables] end end module ClassMethods # An array of each model in the inheritance hierarchy that is # backed by a new table. attr_reader :cti_models # An array of column symbols for the backing database table, # giving the columns to update in each backing database table. attr_reader :cti_table_columns # The dataset that table instance datasets are based on. # Used for database modifications attr_reader :cti_instance_dataset # An array of table symbols that back this model. The first is # table symbol for the base model, and the last is the current model # table symbol. attr_reader :cti_tables # A hash with class name symbol keys and table name symbol values. # Specified with the :table_map option to the plugin, and should be used if # the implicit naming is incorrect. attr_reader :cti_table_map # An array of columns that may be duplicated in sub-classes. The # primary key column is always allowed to be duplicated attr_reader :cti_ignore_subclass_columns # A boolean indicating whether or not to automatically qualify tables # backing subclasses with the same qualifier as their superclass, if # the superclass is qualified. Specified with the :qualify_tables # option to the plugin and only applied to automatically determined # table names (not to the :table_map option). attr_reader :cti_qualify_tables # Freeze CTI information when freezing model class. def freeze @cti_models.freeze @cti_tables.freeze @cti_table_columns.freeze @cti_table_map.freeze @cti_ignore_subclass_columns.freeze super end Plugins.inherited_instance_variables(self, :@cti_models=>nil, :@cti_tables=>nil, :@cti_table_columns=>nil, :@cti_instance_dataset=>nil, :@cti_table_map=>nil, :@cti_alias=>nil, :@cti_ignore_subclass_columns=>nil, :@cti_qualify_tables=>nil) # The table name for the current model class's main table. def table_name if cti_tables && cti_tables.length > 1 @cti_alias else super end end # The name of the most recently joined table. def cti_table_name cti_tables.last end # The model class for the given key value. def sti_class_from_key(key) sti_class(sti_model_map[key]) end private def inherited(subclass) ds = sti_dataset # Prevent inherited in model/base.rb from setting the dataset subclass.instance_exec { @dataset = nil } super # Set table if this is a class table inheritance table = nil columns = nil if n = subclass.name if table = cti_table_map[n.to_sym] columns = db.schema(table).map(&:first) else table = if cti_qualify_tables && (schema = dataset.schema_and_table(cti_table_name).first) SQL::QualifiedIdentifier.new(schema, subclass.implicit_table_name) else subclass.implicit_table_name end columns = check_non_connection_error(false){db.schema(table) && db.schema(table).map(&:first)} table = nil if !columns || columns.empty? end end table = nil if table && (table == cti_table_name) return unless table pk = primary_key subclass.instance_exec do if cti_tables.length == 1 ds = ds.select(*self.columns.map{|cc| Sequel.qualify(cti_table_name, Sequel.identifier(cc))}) end ds.send(:columns=, self.columns) cols = (columns - [pk]) - cti_ignore_subclass_columns dup_cols = cols & ds.columns unless dup_cols.empty? raise Error, "class_table_inheritance with duplicate column names (other than the primary key column) is not supported, make sure tables have unique column names (duplicate columns: #{dup_cols}). If this is desired, specify these columns in the :ignore_subclass_columns option when initializing the plugin" end sel_app = cols.map{|cc| Sequel.qualify(table, Sequel.identifier(cc))} @sti_dataset = ds = ds.join(table, pk=>pk).select_append(*sel_app) ds = ds.from_self(:alias=>@cti_alias) ds.send(:columns=, self.columns + cols) set_dataset(ds) set_columns(self.columns) @dataset = @dataset.with_row_proc(lambda{|r| subclass.sti_load(r)}) cols.each{|a| define_lazy_attribute_getter(a, :dataset=>dataset, :table=>@cti_alias)} @cti_models += [self] @cti_tables += [table] @cti_table_columns = columns @cti_instance_dataset = db.from(table) cti_tables.reverse_each do |ct| db.schema(ct).each{|sk,v| db_schema[sk] = v} end setup_auto_validations if respond_to?(:setup_auto_validations, true) end end # If using a subquery for class table inheritance, also use a subquery # when setting subclass dataset. def sti_subclass_dataset(key) ds = super if cti_models[0] != self ds = ds.from_self(:alias=>@cti_alias) end ds end end module InstanceMethods # Delete the row from all backing tables, starting from the # most recent table and going through all superclasses. def delete raise Sequel::Error, "can't delete frozen object" if frozen? model.cti_models.reverse_each do |m| cti_this(m).delete end self end # Set the sti_key column based on the sti_key_map. def before_validation if new? && (set = self[model.sti_key]) exp = model.sti_key_chooser.call(self) if set != exp set_table = model.sti_class_from_key(set).cti_table_name exp_table = model.sti_class_from_key(exp).cti_table_name set_column_value("#{model.sti_key}=", exp) if set_table != exp_table end end super end private def cti_this(model) use_server(model.cti_instance_dataset.where(model.primary_key_hash(pk))) end # Insert rows into all backing tables, using the columns # in each table. def _insert return super if model.cti_models[0] == model model.cti_models.each do |m| v = {} m.cti_table_columns.each{|c| v[c] = @values[c] if @values.include?(c)} ds = use_server(m.cti_instance_dataset) if ds.supports_insert_select? && (h = ds.insert_select(v)) @values.merge!(h) else nid = ds.insert(v) @values[primary_key] ||= nid end end @values[primary_key] end # Update rows in all backing tables, using the columns in each table. def _update(columns) return super if model.cti_models[0] == model model.cti_models.each do |m| h = {} m.cti_table_columns.each{|c| h[c] = columns[c] if columns.include?(c)} unless h.empty? ds = cti_this(m) n = ds.update(h) raise(NoExistingObject, "Attempt to update object did not result in a single row modification (SQL: #{ds.update_sql(h)})") if require_modification && n != 1 end end end end end end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/column_conflicts.rb������������������������������������������������0000664�0000000�0000000�00000010117�14342141206�0022513�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The column_conflicts plugin overrides Model#get_column_value and #set_column_value # to automatically handle column names that conflict with Ruby/Sequel method names. # # By default, Model#get_column_value and #set_column_value just call send, this # plugin overrides the methods and gets/sets the value directly in the values # hash if the column name conflicts with an existing Sequel::Model instance # method name. # # Checking for column conflicts causes a performance hit, which is why Sequel # does not enable such checks by default. # # When using this plugin, you can manually update the columns used. This may be useful if # the columns conflict with one of your custom methods, instead of a method defined in # Sequel::Model: # # Album.plugin :column_conflicts # Album.get_column_conflict!(:column) # Album.set_column_conflict!(:other_column) # # Usage: # # # Make all model's handle column conflicts automatically (called before loading subclasses) # Sequel::Model.plugin :column_conflicts # # # Make the Album class handle column conflicts automatically # Album.plugin :column_conflicts module ColumnConflicts def self.apply(model) model.instance_exec do @get_column_conflicts = {} @set_column_conflicts = {} end end # Check for column conflicts on the current model if the model has a dataset. def self.configure(model) model.instance_exec do check_column_conflicts if @dataset end end module ClassMethods Plugins.after_set_dataset(self, :check_column_conflicts) Plugins.inherited_instance_variables(self, :@get_column_conflicts=>:dup, :@set_column_conflicts=>:dup) # Hash for columns where the getter method already exists. keys are column symbols/strings that # conflict with method names and should be looked up directly instead of calling a method, # values are the column symbol to lookup in the values hash. attr_reader :get_column_conflicts # Hash for columns where the setter method already exists. keys are column symbols/strings suffixed # with = that conflict with method names and should be set directly in the values hash, # values are the column symbol to set in the values hash. attr_reader :set_column_conflicts # Compare the column names for the model with the methods defined on Sequel::Model, and automatically # setup the column conflicts. def check_column_conflicts mod = Sequel::Model columns.find_all{|c| mod.method_defined?(c)}.each{|c| get_column_conflict!(c)} columns.find_all{|c| mod.method_defined?("#{c}=")}.each{|c| set_column_conflict!(c)} end # Freeze column conflict information when freezing model class. def freeze @get_column_conflicts.freeze @set_column_conflicts.freeze super end # Set the given column as one with a getter method conflict. def get_column_conflict!(column) @get_column_conflicts[column.to_sym] = @get_column_conflicts[column.to_s] = column.to_sym end # Set the given column as one with a setter method conflict. def set_column_conflict!(column) @set_column_conflicts[:"#{column}="] = @set_column_conflicts["#{column}="] = column.to_sym end end module InstanceMethods # If the given column has a getter method conflict, lookup the value directly in the values hash. def get_column_value(c) if col = model.get_column_conflicts[c] self[col] else super end end # If the given column has a setter method conflict, set the value directly in the values hash. def set_column_value(c, v) if col = model.set_column_conflicts[c] self[col] = v else super end end end end end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/column_encryption.rb�����������������������������������������������0000664�0000000�0000000�00000076531�14342141206�0022735�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # :nocov: raise(Sequel::Error, "Sequel column_encryption plugin requires ruby 2.3 or greater") unless RUBY_VERSION >= '2.3' # :nocov: require 'openssl' begin # Test cipher actually works cipher = OpenSSL::Cipher.new("aes-256-gcm") cipher.encrypt cipher.key = '1'*32 cipher_iv = cipher.random_iv cipher.auth_data = '' cipher_text = cipher.update('2') << cipher.final auth_tag = cipher.auth_tag cipher = OpenSSL::Cipher.new("aes-256-gcm") cipher.decrypt cipher.iv = cipher_iv cipher.key = '1'*32 cipher.auth_data = '' cipher.auth_tag = auth_tag # :nocov: unless (cipher.update(cipher_text) << cipher.final) == '2' raise OpenSSL::Cipher::CipherError end rescue RuntimeError, OpenSSL::Cipher::CipherError raise LoadError, "Sequel column_encryption plugin requires a working aes-256-gcm cipher" # :nocov: end require 'base64' require 'securerandom' module Sequel module Plugins # The column_encryption plugin adds support for encrypting the content of individual # columns in a table. # # Column values are encrypted with AES-256-GCM using a per-value cipher key derived from # a key provided in the configuration using HMAC-SHA256. # # = Usage # # If you would like to support encryption of columns in more than one model, you should # probably load the plugin into the parent class of your models and specify the keys: # # Sequel::Model.plugin :column_encryption do |enc| # enc.key 0, ENV["SEQUEL_COLUMN_ENCRYPTION_KEY"] # end # # This specifies a single master encryption key. Unless you are actively rotating keys, # it is best to use a single master key. Rotation of encryption keys will be discussed # in a later section. # # In the above call, <tt>0</tt> is the id of the key, and the # <tt>ENV["SEQUEL_COLUMN_ENCRYPTION_KEY"]</tt> is the content of the key, which must be # a string with exactly 32 bytes. As indicated, this key should not be hardcoded or # otherwise committed to the source control repository. # # For models that need encrypted columns, you load the plugin again, but specify the # columns to encrypt: # # ConfidentialModel.plugin :column_encryption do |enc| # enc.column :encrypted_column_name # enc.column :searchable_column_name, searchable: true # enc.column :ci_searchable_column_name, searchable: :case_insensitive # end # # With this, all three specified columns (+encrypted_column_name+, +searchable_column_name+, # and +ci_searchable_column_name+) will be marked as encrypted columns. When you run the # following code: # # ConfidentialModel.create( # encrypted_column_name: 'These', # searchable_column_name: 'will be', # ci_searchable_column_name: 'Encrypted' # ) # # It will save encrypted versions to the database. +encrypted_column_name+ will not be # searchable, +searchable_column_name+ will be searchable with an exact match, and # +ci_searchable_column_name+ will be searchable with a case insensitive match. See section # below for details on searching. # # It is possible to have model-specific keys by specifying both the +key+ and +column+ methods # in the model: # # ConfidentialModel.plugin :column_encryption do |enc| # enc.key 0, ENV["SEQUEL_MODEL_SPECIFIC_ENCRYPTION_KEY"] # # enc.column :encrypted_column_name # enc.column :searchable_column_name, searchable: true # enc.column :ci_searchable_column_name, searchable: :case_insensitive # end # # When the +key+ method is called inside the plugin block, previous keys are ignored, # and only the new keys specified will be used. This approach would allow the # +ConfidentialModel+ to use the model specific encryption keys, and other models # to use the default keys specified in the parent class. # # The +key+ and +column+ methods inside the plugin block support additional options. # The +key+ method supports the following options: # # :auth_data :: The authentication data to use for the AES-256-GCM cipher. Defaults # to the empty string. # :padding :: The number of padding bytes to use. For security, data is padded so that # a database administrator cannot determine the exact size of the # unencrypted data. By default, this value is 8, which means that # unencrypted data will be padded to a multiple of 8 bytes. Up to twice as # much padding as specified will be used, as the number of padding bytes # is partially randomized. # # The +column+ method supports the following options: # # :searchable :: Whether the column is searchable. This should not be used unless # searchability is needed, as it can allow the database administrator # to determine whether two distinct rows have the same unencrypted # data (but not what that data is). This can be set to +true+ to allow # searching with an exact match, or +:case_insensitive+ for a case # insensitive match. # :search_both :: This should only be used if you have previously switched the # +:searchable+ option from +true+ to +:case_insensitive+ or vice-versa, # and would like the search to return values that have not yet been # reencrypted. Note that switching from +true+ to +:case_insensitive+ # isn't a problem, but switching from +:case_insensitive+ to +true+ and # using this option can cause the search to return values that are # not an exact match. You should manually filter those objects # after decrypting if you want to ensure an exact match. # :format :: The format of the column, if you want to perform serialization before # encryption and deserialization after decryption. Can be either a # symbol registered with the serialization plugin or an array of two # callables, the first for serialization and the second for deserialization. # # The +column+ method also supports a block for column-specific keys: # # ConfidentialModel.plugin :column_encryption do |enc| # enc.column :encrypted_column_name do |cenc| # cenc.key 0, ENV["SEQUEL_COLUMN_SPECIFIC_ENCRYPTION_KEY"] # end # # enc.column :searchable_column_name, searchable: true # enc.column :ci_searchable_column_name, searchable: :case_insensitive # end # # In this case, the <tt>ENV["SEQUEL_COLUMN_SPECIFIC_ENCRYPTION_KEY"]</tt> key will # only be used for the +:encrypted_column_name+ column, and not the other columns. # # Note that there isn't a security reason to prefer either model-specific or # column-specific keys, as the actual cipher key used is unique per column value. # # Note that changing the key_id, key string, or auth_data for an existing key will # break decryption of values encrypted with that key. If you would like to change # any aspect of the key, add a new key, rotate to the new encryption key, and then # remove the previous key, as described in the section below on key rotation. # # = Searching Encrypted Values # # To search searchable encrypted columns, use +with_encrypted_value+. This example # code will return the model instance created in the code example in the previous # section: # # ConfidentialModel. # with_encrypted_value(:searchable_column_name, "will be") # with_encrypted_value(:ci_searchable_column_name, "encrypted"). # first # # = Encryption Key Rotation # # To rotate encryption keys, add a new key above the existing key, with a new key ID: # # Sequel::Model.plugin :column_encryption do |enc| # enc.key 1, ENV["SEQUEL_COLUMN_ENCRYPTION_KEY"] # enc.key 0, ENV["SEQUEL_OLD_COLUMN_ENCRYPTION_KEY"] # end # # Newly encrypted data will then use the new key. Records encrypted with the older key # will still be decrypted correctly. # # To force reencryption for existing records that are using the older key, you can use # the +needing_reencryption+ dataset method and the +reencrypt+ instance method. For a # small number of records, you can probably do: # # ConfidentialModel.needing_reencryption.all(&:reencrypt) # # With more than a small number of records, you'll want to do this in batches. It's # possible you could use an approach such as: # # ds = ConfidentialModel.needing_reencryption.limit(100) # true until ds.all(&:reencrypt).empty? # # After all values have been reencrypted for all models, and no models use the older # encryption key, you can remove it from the configuration: # # Sequel::Model.plugin :column_encryption do |enc| # enc.key 1, ENV["SEQUEL_COLUMN_ENCRYPTION_KEY"] # end # # Once an encryption key has been removed, after no data uses it, it is safe to reuse # the same key id for a new key. This approach allows for up to 256 concurrent keys # in the same configuration. # # = Encrypting Additional Formats # # By default, the column_encryption plugin assumes that the decrypted data should be # returned as a string, and a string will be passed to encrypt. However, using the # +:format+ option, you can specify an alternate format. For example, if you want to # encrypt a JSON representation of the object, so that you can deal with an array/hash # and automatically have it serialized with JSON and then encrypted when saving, and # then deserialized with JSON after decryption when it is retrieved: # # require 'json' # ConfidentialModel.plugin :column_encryption do |enc| # enc.key 0, ENV["SEQUEL_MODEL_SPECIFIC_ENCRYPTION_KEY"] # # enc.column :encrypted_column_name # enc.column :searchable_column_name, searchable: true # enc.column :ci_searchable_column_name, searchable: :case_insensitive # enc.column :encrypted_json_column_name, format: :json # end # # The values of the +:format+ are the same values you can pass as the first argument # to +serialize_attributes+ (in the serialization plugin). You can pass an array # with the serializer and deserializer for custom support. # # You can use both +:searchable+ and +:format+ together for searchable encrypted # serialized columns. However, note that this allows only exact searches of the # serialized version of the data. So for JSON, a search for <tt>{'a'=>1, 'b'=>2}</tt> # would not match <tt>{'b'=>2, 'a'=>1}</tt> even though the objects are considered # equal. If this is an issue, make sure you use a serialization format where all # equal objects are serialized to the same string. # # = Enforcing Uniqueness # # You cannot enforce uniqueness of unencrypted data at the database level # if you also want to support key rotation. However, absent key rotation, a # unique index on the first 48 characters of the encrypted column can enforce uniqueness, # as long as the column is searchable. If the encrypted column is case-insensitive # searchable, the uniqueness is case insensitive as well. # # = Column Value Cryptography/Format # # Column values used by this plugin use the following format (+key+ is specified # in the plugin configuration and must be exactly 32 bytes): # # column_value :: urlsafe_base64(flags + NUL + key_id + NUL + search_data + key_data + # cipher_iv + cipher_auth_tag + encrypted_data) # flags :: 1 byte, the type of record (0: not searchable, 1: searchable, 2: lowercase searchable) # NUL :: 1 byte, ASCII NUL # key_id :: 1 byte, the key id, supporting 256 concurrently active keys (0 - 255) # search_data :: 0 bytes if flags is 0, 32 bytes if flags is 1 or 2. # Format is HMAC-SHA256(key, unencrypted_data). # Ignored on decryption, only used for searching. # key_data :: 32 bytes random data used to construct cipher key # cipher_iv :: 12 bytes, AES-256-GCM cipher random initialization vector # cipher_auth_tag :: 16 bytes, AES-256-GCM cipher authentication tag # encrypted_data :: AES-256-GCM(HMAC-SHA256(key, key_data), # padding_size + padding + unencrypted_data) # padding_size :: 1 byte, with the amount of padding (0-255 bytes of padding allowed) # padding :: number of bytes specified by padding size, ignored on decryption # unencrypted_data :: actual column value # # The reason for <tt>flags + NUL + key_id + NUL</tt> (4 bytes) as the header is to allow for # an easy way to search for values needing reencryption using a database index. It takes # the first three bytes and converts them to base64, and looks for values less than that value # or greater than that value with 'B' appended. The NUL byte in the fourth byte of the header # ensures that after base64 encoding, the fifth byte in the column will be 'A'. # # The reason for <tt>search_data</tt> (32 bytes) directly after is that for searchable values, # after base64 encoding of the header and search data, it is 48 bytes and can be used directly # as a prefix search on the column, which can be supported by the same database index. This is # more efficient than a full column value search for large values, and allows for case-insensitive # searching without a separate column, by having the search_data be based on the lowercase value # while the unencrypted data is original case. # # The reason for the padding is so that a database administrator cannot be sure exactly how # many bytes are in the column. It is stored encrypted because otherwise the database # administrator could calculate it by decoding the base64 data. # # = Unsupported Features # # The following features are delibrately not supported: # # == Compression # # Allowing compression with encryption is inviting security issues later. # While padding can reduce the risk of compression with encryption, it does not # eliminate it entirely. Users that must have compression with encryption can use # the +:format+ option with a serializer that compresses and a deserializer that # decompresses. # # == Mixing Encrypted/Unencrypted Data # # Mixing encrypted and unencrypted data increases the complexity and security risk, since there # is a chance unencrypted data could look like encrypted data in the pathologic case. # If you have existing unencrypted data that would like to encrypt, create a new column for # the encrypted data, and then migrate the data from the unencrypted column to the encrypted # column. After all unencrypted values have been migrated, drop the unencrypted column. # # == Arbitrary Encryption Schemes # # Supporting arbitrary encryption schemes increases the complexity risk. # If in the future AES-256-GCM is not considered a secure enough cipher, it is possible to # extend the current format using the reserved values in the first two bytes of the header. # # = Caveats # # As column_encryption is a model plugin, it only works with using model instance methods. # If you directly modify the database using a dataset or an external program that modifies # the contents of the encrypted columns, you will probably corrupt the data. To make data # corruption less likely, it is best to have a CHECK constraints on the encrypted column # with a basic format and length check: # # DB.alter_table(:table_name) do # c = Sequel[:encrypted_column_name] # add_constraint(:encrypted_column_name_format, # c.like('AA__A%') | c.like('Ag__A%') | c.like('AQ__A%')) # add_constraint(:encrypted_column_name_length, Sequel.char_length(c) >= 88) # end # # If possible, it's also best to check that the column is valid urlsafe base64 data of # sufficient length. This can be done on PostgreSQL using a combination of octet_length, # decode, and regexp_replace: # # DB.alter_table(:ce_test) do # c = Sequel[:encrypted_column_name] # add_constraint(:enc_base64) do # octet_length(decode(regexp_replace(regexp_replace(c, '_', '/', 'g'), '-', '+', 'g'), 'base64')) >= 65} # end # end # # Such constraints will probably be sufficient to protect against most unintentional corruption of # encrypted columns. # # If the database supports transparent data encryption and you trust the database administrator, # using the database support is probably a better approach. # # The column_encryption plugin is only supported on Ruby 2.3+ and when the Ruby openssl standard # library supports the AES-256-GCM cipher. module ColumnEncryption # Cryptor handles the encryption and decryption of rows for a key set. # It also provides methods that return search prefixes, which datasets # use in queries. # # The same cryptor can support non-searchable, searchable, and case-insensitive # searchable columns. class Cryptor # :nodoc: # Flags NOT_SEARCHABLE = 0 SEARCHABLE = 1 LOWERCASE_SEARCHABLE = 2 # This is the default padding, but up to 2x the padding can be used for a record. DEFAULT_PADDING = 8 # Keys should be an array of arrays containing key_id, key string, auth_data, and padding. def initialize(keys) if !keys || keys.empty? raise Error, "Cannot initialize encryptor without encryption key" end # First key is used for encryption @key_id, @key, @auth_data, @padding = keys[0] # All keys are candidates for decryption @key_map = {} keys.each do |key_id, key, auth_data, padding| @key_map[key_id] = [key, auth_data, padding].freeze end freeze end # Decrypt using any supported format and any available key. def decrypt(data) begin data = Base64.urlsafe_decode64(data) rescue ArgumentError raise Error, "Unable to decode encrypted column: invalid base64" end unless data.getbyte(1) == 0 && data.getbyte(3) == 0 raise Error, "Unable to decode encrypted column: invalid format" end flags = data.getbyte(0) key, auth_data = @key_map[data.getbyte(2)] unless key raise Error, "Unable to decode encrypted column: invalid key id" end case flags when NOT_SEARCHABLE if data.bytesize < 65 raise Error, "Decoded encrypted column smaller than minimum size" end data.slice!(0, 4) when SEARCHABLE, LOWERCASE_SEARCHABLE if data.bytesize < 97 raise Error, "Decoded encrypted column smaller than minimum size" end data.slice!(0, 36) else raise Error, "Unable to decode encrypted column: invalid flags" end key_part = data.slice!(0, 32) cipher_iv = data.slice!(0, 12) auth_tag = data.slice!(0, 16) cipher = OpenSSL::Cipher.new("aes-256-gcm") cipher.decrypt cipher.iv = cipher_iv cipher.key = OpenSSL::HMAC.digest(OpenSSL::Digest::SHA256.new, key, key_part) cipher.auth_data = auth_data cipher.auth_tag = auth_tag begin decrypted_data = cipher.update(data) << cipher.final rescue OpenSSL::Cipher::CipherError => e raise Error, "Unable to decrypt encrypted column: #{e.class} (probably due to encryption key or auth data mismatch or corrupt data)" end # Remove padding decrypted_data.slice!(0, decrypted_data.getbyte(0) + 1) decrypted_data end # Encrypt in not searchable format with the first configured encryption key. def encrypt(data) _encrypt(data, "#{NOT_SEARCHABLE.chr}\0#{@key_id.chr}\0") end # Encrypt in searchable format with the first configured encryption key. def searchable_encrypt(data) _encrypt(data, _search_prefix(data, SEARCHABLE, @key_id, @key)) end # Encrypt in case insensitive searchable format with the first configured encryption key. def case_insensitive_searchable_encrypt(data) _encrypt(data, _search_prefix(data.downcase, LOWERCASE_SEARCHABLE, @key_id, @key)) end # The prefix string of columns for the given search type and the first configured encryption key. # Used to find values that do not use this prefix in order to perform reencryption. def current_key_prefix(search_type) Base64.urlsafe_encode64("#{search_type.chr}\0#{@key_id.chr}") end # The prefix values to search for the given data (an array of strings), assuming the column uses # the searchable format. def search_prefixes(data) _search_prefixes(data, SEARCHABLE) end # The prefix values to search for the given data (an array of strings), assuming the column uses # the case insensitive searchable format. def lowercase_search_prefixes(data) _search_prefixes(data.downcase, LOWERCASE_SEARCHABLE) end # The prefix values to search for the given data (an array of strings), assuming the column uses # either the searchable or the case insensitive searchable format. Should be used only when # transitioning between formats (used by the :search_both option when encrypting columns). def regular_and_lowercase_search_prefixes(data) search_prefixes(data) + lowercase_search_prefixes(data) end private # An array of strings, one for each configured encryption key, to find encypted values matching # the given data and search format. def _search_prefixes(data, search_type) @key_map.map do |key_id, (key, _)| Base64.urlsafe_encode64(_search_prefix(data, search_type, key_id, key)) end end # The prefix to use for searchable data, including the HMAC-SHA256(key, data). def _search_prefix(data, search_type, key_id, key) "#{search_type.chr}\0#{key_id.chr}\0#{OpenSSL::HMAC.digest(OpenSSL::Digest::SHA256.new, key, data)}" end # Encrypt the data using AES-256-GCM, with the given prefix. def _encrypt(data, prefix) padding = @padding random_data = SecureRandom.random_bytes(32) cipher = OpenSSL::Cipher.new("aes-256-gcm") cipher.encrypt cipher.key = OpenSSL::HMAC.digest(OpenSSL::Digest::SHA256.new, @key, random_data) cipher_iv = cipher.random_iv cipher.auth_data = @auth_data cipher_text = String.new data_size = data.bytesize padding_size = if padding (padding * rand(1)) + padding - (data.bytesize % padding) else 0 end cipher_text << cipher.update(padding_size.chr) cipher_text << cipher.update(SecureRandom.random_bytes(padding_size)) if padding_size > 0 cipher_text << cipher.update(data) if data_size > 0 cipher_text << cipher.final Base64.urlsafe_encode64("#{prefix}#{random_data}#{cipher_iv}#{cipher.auth_tag}#{cipher_text}") end end # The object type yielded to blocks passed to the +column+ method inside # <tt>plugin :column_encryption</tt> blocks. This is used to configure custom # per-column keys. class ColumnDSL # :nodoc: # An array of arrays for the data for the keys configured inside the block. attr_reader :keys def initialize @keys = [] end # Verify that the key_id, key, and options are value. def key(key_id, key, opts=OPTS) unless key_id.is_a?(Integer) && key_id >= 0 && key_id <= 255 raise Error, "invalid key_id argument, must be integer between 0 and 255" end unless key.is_a?(String) && key.bytesize == 32 raise Error, "invalid key argument, must be string with exactly 32 bytes" end if opts.has_key?(:padding) if padding = opts[:padding] unless padding.is_a?(Integer) && padding >= 1 && padding <= 120 raise Error, "invalid :padding option, must be between 1 and 120" end end else padding = Cryptor::DEFAULT_PADDING end @keys << [key_id, key, opts[:auth_data].to_s, padding].freeze end end # The object type yielded to <tt>plugin :column_encryption</tt> blocks, # used to configure encryption keys and encrypted columns. class DSL < ColumnDSL # :nodoc: # An array of arrays of data for the columns configured inside the block. attr_reader :columns def initialize super @columns = [] end # Store the column information. def column(column, opts=OPTS, &block) @columns << [column, opts, block].freeze end end def self.apply(model, opts=OPTS) model.plugin :serialization end def self.configure(model) dsl = DSL.new yield dsl model.instance_exec do unless dsl.keys.empty? @column_encryption_keys = dsl.keys.freeze @column_encryption_cryptor = nil end @column_encryption_metadata = Hash[@column_encryption_metadata || {}] dsl.columns.each do |column, opts, block| _encrypt_column(column, opts, &block) end @column_encryption_metadata.freeze end end # This stores four callables for handling encyption, decryption, data searching, # and key searching. One of these is created for each encrypted column. ColumnEncryptionMetadata = Struct.new(:encryptor, :decryptor, :data_searcher, :key_searcher) # :nodoc: module ClassMethods private # A hash with column symbol keys and ColumnEncryptionMetadata values for each # encrypted column. attr_reader :column_encryption_metadata # The default Cryptor to use for encrypted columns. This is only overridden if # per-column keys are used. def column_encryption_cryptor @column_encryption_cryptor ||= Cryptor.new(@column_encryption_keys) end # Setup encryption for the given column. def _encrypt_column(column, opts) cryptor ||= if defined?(yield) dsl = ColumnDSL.new yield dsl Cryptor.new(dsl.keys) else column_encryption_cryptor end encrypt_method, search_prefixes_method, search_type = case searchable = opts[:searchable] when nil, false [:encrypt, nil, Cryptor::NOT_SEARCHABLE] when true [:searchable_encrypt, :search_prefixes, Cryptor::SEARCHABLE] when :case_insensitive [:case_insensitive_searchable_encrypt, :lowercase_search_prefixes, Cryptor::LOWERCASE_SEARCHABLE] else raise Error, "invalid :searchable option for encrypted column: #{searchable.inspect}" end if searchable && opts[:search_both] search_prefixes_method = :regular_and_lowercase_search_prefixes end # Setup the callables used in the metadata. encryptor = cryptor.method(encrypt_method) decryptor = cryptor.method(:decrypt) data_searcher = cryptor.method(search_prefixes_method) if search_prefixes_method key_searcher = lambda{cryptor.current_key_prefix(search_type)} if format = opts[:format] if format.is_a?(Symbol) unless format = Sequel.synchronize{Serialization::REGISTERED_FORMATS[format]} raise(Error, "Unsupported serialization format: #{format} (valid formats: #{Sequel.synchronize{Serialization::REGISTERED_FORMATS.keys}.inspect})") end end # If a custom serialization format is used, override the # callables to handle serialization and deserialization. serializer, deserializer = format enc, dec, data_s = encryptor, decryptor, data_searcher encryptor = lambda do |data| enc.call(serializer.call(data)) end decryptor = lambda do |data| deserializer.call(dec.call(data)) end data_searcher = lambda do |data| data_s.call(serializer.call(data)) end end # Setup the setter and getter methods to do encryption and decryption using # the serialization plugin. serialize_attributes([encryptor, decryptor], column) column_encryption_metadata[column] = ColumnEncryptionMetadata.new(encryptor, decryptor, data_searcher, key_searcher).freeze nil end end module ClassMethods Plugins.def_dataset_methods(self, [:with_encrypted_value, :needing_reencryption]) Plugins.inherited_instance_variables(self, :@column_encryption_cryptor=>nil, :@column_encryption_keys=>nil, :@column_encryption_metadata=>nil, ) end module InstanceMethods # Reencrypt the model if needed. Looks at all of the models encrypted columns # and if any were encypted with older keys or a different format, reencrypt # with the current key and format and save the object. Returns the object # if reencryption was needed, or nil if reencryption was not needed. def reencrypt do_save = false model.send(:column_encryption_metadata).each do |column, metadata| if (value = values[column]) && !value.start_with?(metadata.key_searcher.call) do_save = true values[column] = metadata.encryptor.call(metadata.decryptor.call(value)) end end save if do_save end end module DatasetMethods # Filter the dataset to only match rows where the column contains an encrypted version # of value. Only works on searchable encrypted columns. def with_encrypted_value(column, value) metadata = model.send(:column_encryption_metadata)[column] unless metadata && metadata.data_searcher raise Error, "lookup for encrypted column #{column.inspect} is not supported" end prefixes = metadata.data_searcher.call(value) where(Sequel.|(*prefixes.map{|v| Sequel.like(column, "#{escape_like(v)}%")})) end # Filter the dataset to exclude rows where all encrypted columns are already encrypted # with the current key and format. def needing_reencryption incorrect_column_prefixes = model.send(:column_encryption_metadata).map do |column, metadata| prefix = metadata.key_searcher.call (Sequel[column] < prefix) | (Sequel[column] > prefix + 'B') end where(Sequel.|(*incorrect_column_prefixes)) end end end end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/column_select.rb���������������������������������������������������0000664�0000000�0000000�00000004207�14342141206�0022011�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The column_select plugin changes the default selection for a # model dataset to explicit select all columns from the table: # <tt>table.column1, table.column2, table.column3, ...</tt>. # This makes it simpler to add columns to the model's table # in a migration concurrently while running the application, # without it affecting the operation of the application. # # Note that by default on databases that supporting RETURNING, # using explicit column selections will cause instance creations # to use two queries (insert and refresh) instead of a single # query using RETURNING. You can use the insert_returning_select # plugin to automatically use RETURNING for instance creations # for models where the column_select plugin automatically sets up # an explicit column selection. # # Usage: # # # Make all model subclasses explicitly select qualified columns # Sequel::Model.plugin :column_select # # # Make the Album class select qualified columns # Album.plugin :column_select module ColumnSelect # Modify the current model's dataset selection, if the model # has a dataset. def self.configure(model) model.instance_exec do self.dataset = dataset if @dataset end end module ClassMethods private # If the underlying dataset selects from a single table and # has no explicit selection, explicitly select all columns from that table, # qualifying them with table's name. def convert_input_dataset(ds) ds = super unless ds.opts[:select] if db.supports_schema_parsing? cols = check_non_connection_error(false){db.schema(ds)} if cols cols = cols.map{|c, _| c} end end if cols ||= check_non_connection_error(false){ds.columns} ds = ds.select(*cols.map{|c| Sequel.qualify(ds.first_source, Sequel.identifier(c))}) end end ds end end end end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/columns_updated.rb�������������������������������������������������0000664�0000000�0000000�00000002357�14342141206�0022347�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The columns_updated plugin stores the columns hash used in the # UPDATE query when saving the instance, and makes it available # in the after_update and after_save hooks via the +columns_updated+ # accessor. The data is cleared before returning from +save+. # # Usage: # # # Make all model subclasses store the columns hash used for updating # Sequel::Model.plugin :columns_updated # # # Make the Album class store the columns hash used for updating # Album.plugin :columns_updated module ColumnsUpdated module InstanceMethods private # The hash used for updating records. This should only be called # in the after_update and after_save hooks. attr_reader :columns_updated # Store the hash used for updating the record, so it can be accessed # in the after_hooks. def _update_columns(columns_updated) @columns_updated = columns_updated super end # Unset the updated columns hash before returning from save. def _save(opts) super ensure @columns_updated = nil end end end end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/composition.rb�����������������������������������������������������0000664�0000000�0000000�00000020271�14342141206�0021517�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The composition plugin allows you to easily define a virtual # attribute where the backing data is composed of other columns. # # There are two ways to use the plugin. One way is with the # :mapping option. A simple example of this is when you have a # database table with separate columns for year, month, and day, # but where you want to deal with Date objects in your ruby code. # This can be handled with: # # Album.plugin :composition # Album.composition :date, mapping: [:year, :month, :day] # # With the :mapping option, you can provide a :class option # that gives the class to use, but if that is not provided, it # is inferred from the name of the composition (e.g. :date -> Date). # When the <tt>date</tt> method is called, it will return a # Date object by calling: # # Date.new(year, month, day) # # When saving the object, if the date composition has been used # (by calling either the getter or setter method), it will # populate the related columns of the object before saving: # # self.year = date.year # self.month = date.month # self.day = date.day # # The :mapping option is just a shortcut that works in particular # cases. To handle any case, you can define a custom :composer # and :decomposer procs. The :composer and :decomposer procs will # be used to define instance methods. The :composer will be called # the first time the getter is called, and the :decomposer # will be called before saving. The above example could # also be implemented as: # # Album.composition :date, # composer: proc{Date.new(year, month, day) if year || month || day}, # decomposer: (proc do # if d = compositions[:date] # self.year = d.year # self.month = d.month # self.day = d.day # else # self.year = nil # self.month = nil # self.day = nil # end # end) # # Note that when using the composition object, you should not # modify the underlying columns if you are also instantiating # the composition, as otherwise the composition object values # will override any underlying columns when the object is saved. module Composition # Define the necessary class instance variables. def self.apply(model) model.instance_exec do @compositions = {} include(@composition_module ||= Module.new) end end module ClassMethods # A hash with composition name keys and composition reflection # hash values. attr_reader :compositions # Define a composition for this model, with name being the name of the composition. # You must provide either a :mapping option or both the :composer and :decomposer options. # # Options: # :class :: if using the :mapping option, the class to use, as a Class, String or Symbol. # :composer :: A proc used to define the method that the composition getter method will call # to create the composition. # :decomposer :: A proc used to define the method called before saving the model object, # if the composition object exists, which sets the columns in the model object # based on the value of the composition object. # :mapping :: An array where each element is either a symbol or an array of two symbols. # A symbol is treated like an array of two symbols where both symbols are the same. # The first symbol represents the getter method in the model, and the second symbol # represents the getter method in the composition object. Example: # # Uses columns year, month, and day in the current model # # Uses year, month, and day methods in the composition object # {mapping: [:year, :month, :day]} # # Uses columns year, month, and day in the current model # # Uses y, m, and d methods in the composition object where # # for example y in the composition object represents year # # in the model object. # {mapping: [[:year, :y], [:month, :m], [:day, :d]]} def composition(name, opts=OPTS) opts = opts.dup compositions[name] = opts if mapping = opts[:mapping] keys = mapping.map{|k| k.is_a?(Array) ? k.first : k} if !opts[:composer] late_binding_class_option(opts, name) klass = opts[:class] class_proc = proc{klass || constantize(opts[:class_name])} opts[:composer] = proc do if values = keys.map{|k| get_column_value(k)} and values.any?{|v| !v.nil?} class_proc.call.new(*values) else nil end end end if !opts[:decomposer] setter_meths = keys.map{|k| :"#{k}="} cov_methods = mapping.map{|k| k.is_a?(Array) ? k.last : k} setters = setter_meths.zip(cov_methods) opts[:decomposer] = proc do if (o = compositions[name]).nil? setter_meths.each{|sm| set_column_value(sm, nil)} else setters.each{|sm, cm| set_column_value(sm, o.public_send(cm))} end end end end raise(Error, "Must provide :composer and :decomposer options, or :mapping option") unless opts[:composer] && opts[:decomposer] define_composition_accessor(name, opts) end Plugins.inherited_instance_variables(self, :@compositions=>:dup) # Define getter and setter methods for the composition object. def define_composition_accessor(name, opts=OPTS) composer_meth = opts[:composer_method] = Plugins.def_sequel_method(@composition_module, "#{name}_composer", 0, &opts[:composer]) opts[:decomposer_method] = Plugins.def_sequel_method(@composition_module, "#{name}_decomposer", 0, &opts[:decomposer]) @composition_module.class_eval do define_method(name) do if compositions.has_key?(name) compositions[name] elsif frozen? # composer_meth is private send(composer_meth) else compositions[name] = send(composer_meth) end end alias_method(name, name) meth = :"#{name}=" define_method(meth) do |v| modified! compositions[name] = v end alias_method(meth, meth) end end # Freeze composition information when freezing model class. def freeze compositions.freeze.each_value(&:freeze) @composition_module.freeze super end end module InstanceMethods # Cache of composition objects for this class. def compositions @compositions ||= {} end # Freeze compositions hash when freezing model instance. def freeze compositions super compositions.freeze self end # For each composition, set the columns in the model class based # on the composition object. def before_validation # decomposer_method is private @compositions.keys.each{|n| send(model.compositions[n][:decomposer_method])} if @compositions super end private # Clear the cached compositions when manually refreshing. def _refresh_set_values(hash) @compositions.clear if @compositions super end # Duplicate compositions hash when duplicating model instance. def initialize_copy(other) super @compositions = Hash[other.compositions] self end end end end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/concurrent_eager_loading.rb����������������������������������������0000664�0000000�0000000�00000017017�14342141206�0024202�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel extension 'async_thread_pool' module Plugins # The concurrent_eager_loading plugin allows for eager loading multiple associations # concurrently in separate threads. You must load the async_thread_pool Database # extension into the Database object the model class uses in order for this plugin # to work. # # By default in Sequel, eager loading happens in a serial manner. If you have code # such as: # # Album.eager(:artist, :genre, :tracks) # # Sequel will load the albums, then the artists for the albums, then # the genres for the albums, then the tracks for the albums. # # With the concurrent_eager_loading plugin, you can use the +eager_load_concurrently+ # method to allow for concurrent eager loading: # # Album.eager_load_concurrently.eager(:artist, :genre, :tracks) # # This will load the albums, first, since it needs to load the albums to know # which artists, genres, and tracks to eagerly load. However, it will load the # artists, genres, and tracks for the albums concurrently in separate threads. # This can significantly improve performance, especially if there is significant # latency between the application and the database. Note that using separate threads # is only used in the case where there are multiple associations to eagerly load. # With only a single association to eagerly load, there is no reason to use a # separate thread, since it would not improve performance. # # If you want to make concurrent eager loading the default, you can load the # plugin with the +:always+ option. In this case, all eager loads will be # concurrent. If you want to force a non-concurrent eager load, you can use # +eager_load_serially+: # # Album.eager_load_serially.eager(:artist, :genre, :tracks) # # Note that making concurrent eager loading the default is probably a bad idea # if you are eager loading inside transactions and want the eager load to # reflect changes made inside the transaction, unless you plan to use # +eager_load_serially+ for such cases. See the async_thread_pool # Database extension documentation for more general caveats regarding its use. # # The default eager loaders for all of the association types that ship with Sequel # support safe concurrent eager loading. However, if you are specifying a custom # +:eager_loader+ for an association, it may not work safely unless it it modified to # support concurrent eager loading. Taking this example from the # {Advanced Associations guide}[rdoc-ref:doc/advanced_associations.rdoc] # # Album.many_to_one :artist, eager_loader: (proc do |eo_opts| # eo_opts[:rows].each{|album| album.associations[:artist] = nil} # id_map = eo_opts[:id_map] # Artist.where(id: id_map.keys).all do |artist| # if albums = id_map[artist.id] # albums.each do |album| # album.associations[:artist] = artist # end # end # end # end) # # This would not support concurrent eager loading safely. To support safe # concurrent eager loading, you need to make sure you are not modifying # the associations for objects concurrently by separate threads. This is # implemented using a mutex, which you can access via <tt>eo_opts[:mutex]</tt>. # To keep things simple, you can use +Sequel.synchronize_with+ to only # use this mutex if it is available. You want to use the mutex around the # code that initializes the associations (usually to +nil+ or <tt>[]</tt>), # and also around the code that sets the associatied objects appropriately # after they have been retreived. You do not want to use the mutex around # the code that loads the objects, since that will prevent concurrent loading. # So after the changes, the custom eager loader would look like this: # # Album.many_to_one :artist, eager_loader: (proc do |eo_opts| # Sequel.synchronize_with(eo[:mutex]) do # eo_opts[:rows].each{|album| album.associations[:artist] = nil} # end # id_map = eo_opts[:id_map] # rows = Artist.where(id: id_map.keys).all # Sequel.synchronize_with(eo[:mutex]) do # rows.each do |artist| # if albums = id_map[artist.id] # albums.each do |album| # album.associations[:artist] = artist # end # end # end # end # end) # # Usage: # # # Make all model subclass datasets support concurrent eager loading # Sequel::Model.plugin :concurrent_eager_loading # # # Make the Album class datasets support concurrent eager loading # Album.plugin :concurrent_eager_loading # # # Make all model subclass datasets concurrently eager load by default # Sequel::Model.plugin :concurrent_eager_loading, always: true module ConcurrentEagerLoading def self.configure(mod, opts=OPTS) if opts.has_key?(:always) mod.instance_variable_set(:@always_eager_load_concurrently, opts[:always]) end end module ClassMethods Plugins.inherited_instance_variables(self, :@always_eager_load_concurrently => nil) Plugins.def_dataset_methods(self, [:eager_load_concurrently, :eager_load_serially]) # Whether datasets for this class should eager load concurrently by default. def always_eager_load_concurrently? @always_eager_load_concurrently end end module DatasetMethods # Return a cloned dataset that will eager load associated results concurrently # using the async thread pool. def eager_load_concurrently cached_dataset(:_eager_load_concurrently) do clone(:eager_load_concurrently=>true) end end # Return a cloned dataset that will noteager load associated results concurrently # using the async thread pool. Only useful if the current dataset has been marked # as loading concurrently, or loading concurrently is the model's default behavior. def eager_load_serially cached_dataset(:_eager_load_serially) do clone(:eager_load_concurrently=>false) end end private # Whether this particular dataset will eager load results concurrently. def eager_load_concurrently? v = @opts[:eager_load_concurrently] v.nil? ? model.always_eager_load_concurrently? : v end # If performing eager loads concurrently, and at least 2 associations are being # eagerly loaded, create a single mutex used for all eager loads. After the # eager loads have been performed, force loading of any async results, so that # all eager loads will have been completed before this method returns. def perform_eager_loads(eager_load_data) return super if !eager_load_concurrently? || eager_load_data.length < 2 mutex = Mutex.new eager_load_data.each_value do |eo| eo[:mutex] = mutex end super.each do |v| if Sequel::Database::AsyncThreadPool::BaseProxy === v v.__value end end end # If performing eager loads concurrently, perform this eager load using the # async thread pool. def perform_eager_load(loader, eo) eo[:mutex] ? db.send(:async_run){super} : super end end end end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/constraint_validations.rb������������������������������������������0000664�0000000�0000000�00000022021�14342141206�0023730�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The constraint_validations plugin is designed to be used with databases # that used the constraint_validations extension when creating their # tables. The extension adds validation metadata for constraints created, # and this plugin reads that metadata and automatically creates validations # for all of the constraints. For example, if you used the extension # and created your albums table like this: # # DB.create_table(:albums) do # primary_key :id # String :name # validate do # min_length 5, :name # end # end # # Then when you went to save an album that uses this plugin: # # Album.create(name: 'abc') # # raises Sequel::ValidationFailed: name is shorter than 5 characters # # Usage: # # # Make all model subclasses use constraint validations (called before loading subclasses) # Sequel::Model.plugin :constraint_validations # # # Make the Album class use constraint validations # Album.plugin :constraint_validations module ConstraintValidations # The default constraint validation metadata table name. DEFAULT_CONSTRAINT_VALIDATIONS_TABLE = :sequel_constraint_validations # Mapping of operator names in table to ruby operators OPERATOR_MAP = {:str_lt => :<, :str_lte => :<=, :str_gt => :>, :str_gte => :>=, :int_lt => :<, :int_lte => :<=, :int_gt => :>, :int_gte => :>=}.freeze # Automatically load the validation_helpers plugin to run the actual validations. def self.apply(model, opts=OPTS) model.instance_exec do plugin :validation_helpers @constraint_validations_table = DEFAULT_CONSTRAINT_VALIDATIONS_TABLE @constraint_validation_options = {} end end # Parse the constraint validations metadata from the database. Options: # :constraint_validations_table :: Override the name of the constraint validations # metadata table. Should only be used if the table # name was overridden when creating the constraint # validations. # :validation_options :: Override/augment the options stored in the database with the # given options. Keys should be validation type symbols (e.g. # :presence) and values should be hashes of options specific # to that validation type. def self.configure(model, opts=OPTS) model.instance_exec do if table = opts[:constraint_validations_table] @constraint_validations_table = table end if vos = opts[:validation_options] vos.each do |k, v| if existing_options = @constraint_validation_options[k] v = existing_options.merge(v) end @constraint_validation_options[k] = v end end parse_constraint_validations end end module ClassMethods # An array of validation method call arrays. Each array is an array that # is splatted to send to perform a validation via validation_helpers. attr_reader :constraint_validations # A hash of reflections of constraint validations. Keys are type name # symbols. Each value is an array of pairs, with the first element being # the validation type symbol (e.g. :presence) and the second element being # options for the validation. If the validation takes an argument, it appears # as the :argument entry in the validation option hash. attr_reader :constraint_validation_reflections # The name of the table containing the constraint validations metadata. attr_reader :constraint_validations_table Plugins.inherited_instance_variables(self, :@constraint_validations_table=>nil, :@constraint_validation_options=>:hash_dup) Plugins.after_set_dataset(self, :parse_constraint_validations) # Freeze constraint validations data when freezing model class. def freeze @constraint_validations.freeze.each(&:freeze) @constraint_validation_reflections.freeze.each_value do |v| v.freeze v.each(&:freeze) end @constraint_validation_options.freeze.each_value(&:freeze) super end private # If the database has not already parsed constraint validation # metadata, then run a query to get the metadata data and transform it # into arrays of validation method calls. # # If this model has associated dataset, use the model's table name # to get the validations for just this model. def parse_constraint_validations db.extension(:_model_constraint_validations) unless hash = Sequel.synchronize{db.constraint_validations} hash = {} db.from(constraint_validations_table).each do |r| (hash[r[:table]] ||= []) << r end Sequel.synchronize{db.constraint_validations = hash} end if @dataset ds = @dataset.with_quote_identifiers(false) table_name = ds.literal(ds.first_source_table) reflections = {} @constraint_validations = (Sequel.synchronize{hash[table_name]} || []).map{|r| constraint_validation_array(r, reflections)} @constraint_validation_reflections = reflections end end # Given a specific database constraint validation metadata row hash, transform # it in an validation method call array suitable for splatting to send. def constraint_validation_array(r, reflections) opts = {} opts[:message] = r[:message] if r[:message] opts[:allow_nil] = true if db.typecast_value(:boolean, r[:allow_nil]) type = r[:validation_type].to_sym arg = r[:argument] column = r[:column] case type when :like, :ilike arg = constraint_validation_like_to_regexp(arg, type == :ilike) type = :format when :exact_length, :min_length, :max_length arg = arg.to_i when :length_range arg = constraint_validation_int_range(arg) when :format arg = Regexp.new(arg) when :iformat arg = Regexp.new(arg, Regexp::IGNORECASE) type = :format when :includes_str_array arg = arg.split(',') type = :includes when :includes_int_array arg = arg.split(',').map(&:to_i) type = :includes when :includes_int_range arg = constraint_validation_int_range(arg) type = :includes when *OPERATOR_MAP.keys arg = arg.to_i if type.to_s =~ /\Aint_/ operator = OPERATOR_MAP[type] type = :operator end column = if type == :unique column.split(',').map(&:to_sym) else column.to_sym end if type_opts = @constraint_validation_options[type] opts.merge!(type_opts) end reflection_opts = opts.dup a = [:"validates_#{type}"] if operator a << operator reflection_opts[:operator] = operator end if arg a << arg reflection_opts[:argument] = arg end a << column unless opts.empty? a << opts end if column.is_a?(Array) && column.length == 1 column = column.first end (reflections[column] ||= []) << [type, reflection_opts] a end # Return a range of integers assuming the argument is in # 1..2 or 1...2 format. def constraint_validation_int_range(arg) arg =~ /(\d+)\.\.(\.)?(\d+)/ Range.new($1.to_i, $3.to_i, $2 == '.') end # Transform the LIKE pattern string argument into a # Regexp argument suitable for use with validates_format. def constraint_validation_like_to_regexp(arg, case_insensitive) arg = Regexp.escape(arg).gsub(/%%|%|_/) do |s| case s when '%%' '%' when '%' '.*' else #when '_' '.' end end arg = "\\A#{arg}\\z" if case_insensitive Regexp.new(arg, Regexp::IGNORECASE) else Regexp.new(arg) end end end module InstanceMethods # Run all of the constraint validations parsed from the database # when validating the instance. def validate super model.constraint_validations.each do |v| # Allow calling private validation methods send(*v) end end end end end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/csv_serializer.rb��������������������������������������������������0000664�0000000�0000000�00000015406�14342141206�0022204�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true require 'csv' module Sequel module Plugins # csv_serializer handles serializing entire Sequel::Model objects to CSV, # as well as support for deserializing CSV directly into Sequel::Model # objects. It requires the csv standard library. # # Basic Example: # # album = Album[1] # album.to_csv(write_headers: true) # # => "id,name,artist_id\n1,RF,2\n" # # You can provide options to control the CSV output: # # album.to_csv(only: :name) # album.to_csv(except: [:id, :artist_id]) # # => "RF\n" # # +to_csv+ also exists as a class and dataset method, both of which return # all objects in the dataset: # # Album.to_csv # Album.where(artist_id: 1).to_csv # # If you have an existing array of model instances you want to convert to # CSV, you can call the class to_csv method with the :array option: # # Album.to_csv(array: [Album[1], Album[2]]) # # In addition to creating CSV, this plugin also enables Sequel::Model # classes to create instances directly from CSV using the from_csv class # method: # # csv = album.to_csv # album = Album.from_csv(csv) # # The array_from_csv class method exists to parse arrays of model instances # from CSV: # # csv = Album.where(artist_id: 1).to_csv # albums = Album.array_from_csv(csv) # # These do not necessarily round trip, since doing so would let users # create model objects with arbitrary values. By default, from_csv will # call set with the values in the hash. If you want to specify the allowed # fields, you can use the :headers option. # # Album.from_csv(album.to_csv, headers: %w'id name') # # If you want to update an existing instance, you can use the from_csv # instance method: # # album.from_csv(csv) # # Usage: # # # Add CSV output capability to all model subclass instances (called # # before loading subclasses) # Sequel::Model.plugin :csv_serializer # # # Add CSV output capability to Album class instances # Album.plugin :csv_serializer module CsvSerializer # Set up the column readers to do deserialization and the column writers # to save the value in deserialized_values def self.configure(model, opts = OPTS) model.instance_exec do @csv_serializer_opts = (@csv_serializer_opts || OPTS).merge(opts) end end # Avoid keyword argument separation warnings on Ruby 2.7, while still # being compatible with 1.9. if RUBY_VERSION >= "2.0" instance_eval(<<-END, __FILE__, __LINE__+1) def self.csv_call(*args, opts, &block) CSV.send(*args, **opts, &block) end END else # :nocov: # :nodoc: def self.csv_call(*args, opts, &block) CSV.send(*args, opts, &block) end # :nodoc: # :nocov: end module ClassMethods # The default opts to use when serializing model objects to CSV attr_reader :csv_serializer_opts # Attempt to parse an array of instances from the given CSV string def array_from_csv(csv, opts = OPTS) CsvSerializer.csv_call(:parse, csv, process_csv_serializer_opts(opts)).map do |row| row = row.to_hash row.delete(nil) new(row) end end # Freeze csv serializier opts when freezing model class def freeze @csv_serializer_opts.freeze.each_value do |v| v.freeze if v.is_a?(Array) || v.is_a?(Hash) end super end # Attempt to parse a single instance from the given CSV string def from_csv(csv, opts = OPTS) new.from_csv(csv, opts) end # Convert the options hash to one that can be passed to CSV. def process_csv_serializer_opts(opts) opts = (csv_serializer_opts || OPTS).merge(opts) opts_cols = opts.delete(:columns) opts_include = opts.delete(:include) opts_except = opts.delete(:except) only = opts.delete(:only) opts[:headers] ||= Array(only || opts_cols || columns) + Array(opts_include) - Array(opts_except) opts end Plugins.inherited_instance_variables( self, :@csv_serializer_opts => lambda do |csv_serializer_opts| opts = {} csv_serializer_opts.each do |k, v| opts[k] = (v.is_a?(Array) || v.is_a?(Hash)) ? v.dup : v end opts end) Plugins.def_dataset_methods(self, :to_csv) end module InstanceMethods # Update the object using the data provided in the first line in CSV. Options: # # :headers :: The headers to use for the CSV line. Use nil for a header # to specify the column should be ignored. def from_csv(csv, opts = OPTS) row = CsvSerializer.csv_call(:parse_line, csv, model.process_csv_serializer_opts(opts)).to_hash row.delete(nil) set(row) end # Return a string in CSV format. Accepts the same options as CSV.new, # as well as the following options: # # :except :: Symbol or Array of Symbols of columns not to include in # the CSV output. # :only :: Symbol or Array of Symbols of columns to include in the CSV # output, ignoring all other columns # :include :: Symbol or Array of Symbols specifying non-column # attributes to include in the CSV output. def to_csv(opts = OPTS) opts = model.process_csv_serializer_opts(opts) headers = opts[:headers] CsvSerializer.csv_call(:generate, model.process_csv_serializer_opts(opts)) do |csv| csv << headers.map{|k| public_send(k)} end end end module DatasetMethods # Return a CSV string representing an array of all objects in this # dataset. Takes the same options as the instance method, and passes # them to every instance. Accepts the same options as CSV.new, as well # as the following options: # # :array :: An array of instances. If this is not provided, calls #all # on the receiver to get the array. def to_csv(opts = OPTS) opts = model.process_csv_serializer_opts({:columns=>columns}.merge!(opts)) items = opts.delete(:array) || self headers = opts[:headers] CsvSerializer.csv_call(:generate, opts) do |csv| items.each do |object| csv << headers.map{|header| object.public_send(header)} end end end end end end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/dataset_associations.rb��������������������������������������������0000664�0000000�0000000�00000015743�14342141206�0023370�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # DatasetAssociations allows you to easily use your model associations # via datasets. For each association you define, it creates a dataset # method for that association that returns a dataset of all objects # that are associated to objects in the current dataset. Here's a simple # example: # # class Artist < Sequel::Model # plugin :dataset_associations # one_to_many :albums # end # Artist.where(id: 1..100).albums # # SELECT * FROM albums # # WHERE (albums.artist_id IN ( # # SELECT id FROM artists # # WHERE ((id >= 1) AND (id <= 100)))) # # This works for all of the association types that ship with Sequel, # including ones implemented in other plugins. Most association options that # are supported when eager loading are supported when using a # dataset association. However, it will only work for limited associations or # *_one associations with orders if the database supports window functions. # # As the dataset methods return datasets, you can easily chain the # methods to get associated datasets of associated datasets: # # Artist.where(id: 1..100).albums.where{name < 'M'}.tags # # SELECT tags.* FROM tags # # WHERE (tags.id IN ( # # SELECT albums_tags.tag_id FROM albums # # INNER JOIN albums_tags # # ON (albums_tags.album_id = albums.id) # # WHERE # # ((albums.artist_id IN ( # # SELECT id FROM artists # # WHERE ((id >= 1) AND (id <= 100))) # # AND # # (name < 'M'))))) # # For associations that do JOINs, such as many_to_many, note that the datasets returned # by a dataset association method do not do a JOIN by default (they use a subquery that # JOINs). This can cause problems when you are doing a select, order, or filter on a # column in the joined table. In that case, you should use the +:dataset_associations_join+ # option in the association, which will make sure the datasets returned by the dataset # association methods also use JOINs, allowing such dataset association methods to work # correctly. # # Usage: # # # Make all model subclasses create association methods for datasets # Sequel::Model.plugin :dataset_associations # # # Make the Album class create association methods for datasets # Album.plugin :dataset_associations module DatasetAssociations module ClassMethods # Set up a dataset method for each association to return an associated dataset def associate(type, name, *) ret = super r = association_reflection(name) meth = r.returns_array? ? name : pluralize(name).to_sym dataset_module do define_method(meth){associated(name)} alias_method(meth, meth) end ret end Plugins.def_dataset_methods(self, :associated) end module DatasetMethods # For the association given by +name+, return a dataset of associated objects # such that it would return the union of calling the association method on # all objects returned by the current dataset. # # This supports most options that are supported when eager loading. However, it # will only work for limited associations or *_one associations with orders if the # database supports window functions. def associated(name) raise Error, "unrecognized association name: #{name.inspect}" unless r = model.association_reflection(name) ds = r.associated_class.dataset sds = opts[:limit] ? self : unordered ds = case r[:type] when :many_to_one ds.where(r.qualified_primary_key=>sds.select(*Array(r[:qualified_key]))) when :one_to_one, :one_to_many r.send(:apply_filter_by_associations_limit_strategy, ds.where(r.qualified_key=>sds.select(*Array(r.qualified_primary_key)))) when :many_to_many, :one_through_one mds = r.associated_class.dataset. join(r[:join_table], r[:right_keys].zip(r.right_primary_keys)). select(*Array(r.qualified_right_key)). where(r.qualify(r.join_table_alias, r[:left_keys])=>sds.select(*r.qualify(model.table_name, r[:left_primary_key_columns]))) ds.where(r.qualified_right_primary_key=>r.send(:apply_filter_by_associations_limit_strategy, mds)) when :many_through_many, :one_through_many if r.reverse_edges.empty? mds = r.associated_dataset fe = r.edges.first selection = Array(r.qualify(fe[:table], r.final_edge[:left])) predicate_key = r.qualify(fe[:table], fe[:right]) else mds = model.dataset iq = model.table_name edges = r.edges.map(&:dup) edges << r.final_edge.dup edges.each do |e| alias_expr = e[:table] aliaz = mds.unused_table_alias(e[:table]) unless aliaz == alias_expr alias_expr = Sequel.as(e[:table], aliaz) end e[:alias] = aliaz mds = mds.join(alias_expr, Array(e[:right]).zip(Array(e[:left])), :implicit_qualifier=>iq) iq = nil end fe, f1e, f2e = edges.values_at(0, -1, -2) selection = Array(r.qualify(f2e[:alias], f1e[:left])) predicate_key = r.qualify(fe[:alias], fe[:right]) end mds = mds. select(*selection). where(predicate_key=>sds.select(*r.qualify(model.table_name, r[:left_primary_key_columns]))) ds.where(r.qualified_right_primary_key=>r.send(:apply_filter_by_associations_limit_strategy, mds)) when :pg_array_to_many ds.where(Sequel[r.primary_key=>sds.select{Sequel.pg_array_op(r.qualify(r[:model].table_name, r[:key])).unnest}]) when :many_to_pg_array ds.where(Sequel.function(:coalesce, Sequel.pg_array_op(r[:key]).overlaps(sds.select{array_agg(r.qualify(r[:model].table_name, r.primary_key))}), false)) else raise Error, "unrecognized association type for association #{name.inspect}: #{r[:type].inspect}" end ds = r.apply_eager_dataset_changes(ds).unlimited if r[:dataset_associations_join] case r[:type] when :many_to_many, :one_through_one ds = ds.join(r[:join_table], r[:right_keys].zip(r.right_primary_keys)) when :many_through_many, :one_through_many (r.reverse_edges + [r.final_reverse_edge]).each{|e| ds = ds.join(e[:table], e.fetch(:only_conditions, (Array(e[:left]).zip(Array(e[:right])) + Array(e[:conditions]))), :table_alias=>ds.unused_table_alias(e[:table]), :qualify=>:deep, &e[:block])} end end ds end end end end end �����������������������������sequel-5.63.0/lib/sequel/plugins/def_dataset_method.rb����������������������������������������������0000664�0000000�0000000�00000007143�14342141206�0022762�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The def_dataset_method plugin adds Model.def_dataset_method # for defining dataset methods: # # Album.def_dataset_method(:by_name) do |name| # where(name: name) # end # # Additionally, this adds support for Model.subset, which can also # be used to define dataset methods that add specific filters: # # Album.subset(:gold){copies_sold >= 500000} # # This exists for backwards compatibility with previous Sequel versions. # # Usage: # # # Make all model subclasses support Model.def_dataset_method # # (called before loading subclasses) # Sequel::Model.plugin :def_dataset_method # # # Make the Album class support Model.def_dataset_method # Album.plugin :def_dataset_method module DefDatasetMethod module ClassMethods # If a block is given, define a method on the dataset (if the model currently has an dataset) with the given argument name using # the given block. Also define a class method on the model that calls the # dataset method. Stores the method name and block so that it can be reapplied if the model's # dataset changes. # # If a block is not given, just define a class method on the model for each argument # that calls the dataset method of the same argument name. # # Using dataset_module is recommended over using this method. In addition to allowing # more natural ruby syntax for defining methods manually, it also offers numerous # helper methods that make defining common dataset methods more easily, as well as # supporting dataset caching (assuming the arguments allow it). # # # Add new dataset method and class method that calls it # Artist.def_dataset_method(:by_name){order(:name)} # Artist.where(Sequel[:name].like('A%')).by_name # Artist.by_name.where(Sequel[:name].like('A%')) # # # Just add a class method that calls an existing dataset method # Artist.def_dataset_method(:paginate) # Artist.paginate(2, 10) def def_dataset_method(*args, &block) raise(Error, "No arguments given") if args.empty? if block raise(Error, "Defining a dataset method using a block requires only one argument") if args.length > 1 dataset_module{define_method(args.first, &block)} else args.each{|arg| def_model_dataset_method(arg)} end end # Sets up a dataset method that returns a filtered dataset. # Sometimes thought of as a scope, and like most dataset methods, # they can be chained. # For example: # # Topic.subset(:joes, Sequel[:username].like('%joe%')) # Topic.subset(:popular){num_posts > 100} # Topic.subset(:recent){created_on > Date.today - 7} # # Allows you to do: # # Topic.joes.recent.popular # # to get topics with a username that includes joe that # have more than 100 posts and were created less than # 7 days ago. # # Both the args given and the block are passed to <tt>Dataset#where</tt>. # # This method creates dataset methods that do not accept arguments. To create # dataset methods that accept arguments, you should use define a # method directly inside a #dataset_module block. def subset(*args, &block) dataset_module{subset(*args, &block)} end end end end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/defaults_setter.rb�������������������������������������������������0000664�0000000�0000000�00000010225�14342141206�0022347�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The defaults_setter plugin makes the column getter methods return the default # values for new objects, if the values have not already been set. Example: # # # column a default NULL # # column b default 2 # album = Album.new # album.a # => nil # album.b # => 2 # album = Album.new(a: 1, b: 3) # album.a # => 1 # album.b # => 3 # # You can manually set default values as well: # # Album.default_values[:a] = 4 # Album.new.a # => 4 # # You can also provide procs to set default values: # # Album.default_values[:a] = lambda{Date.today} # Album.new.a # => Date.today # # By default, default values returned are not cached: # # Album.new.a.equal?(Album.new.a) # => false # # However, you can turn on caching of default values: # # Album.plugin :defaults_setter, cache: true # Album.new.a.equal?(Album.new.a) # => false # # Note that if the cache is turned on, the cached values are stored in # the values hash: # # Album.plugin :defaults_setter, cache: true # album = Album.new # album.values # => {} # album.a # album.values # => {:a => Date.today} # # Usage: # # # Make all model subclass instances set defaults (called before loading subclasses) # Sequel::Model.plugin :defaults_setter # # # Make the Album class set defaults # Album.plugin :defaults_setter module DefaultsSetter # Set the default values based on the model schema. Options: # :cache :: Cache default values returned in the model's values hash. def self.configure(model, opts=OPTS) model.instance_exec do set_default_values @cache_default_values = opts[:cache] if opts.has_key?(:cache) end end module ClassMethods # The default values to use for this model. A hash with column symbol # keys and default values. If the default values respond to +call+, it will be called # to get the value, otherwise the value will be used directly. You can manually modify # this hash to set specific default values, by default the ones will be parsed from the database. attr_reader :default_values Plugins.after_set_dataset(self, :set_default_values) Plugins.inherited_instance_variables(self, :@default_values=>:dup, :@cache_default_values=>nil) # Whether default values should be cached in the values hash after being retrieved. def cache_default_values? @cache_default_values end # Freeze default values when freezing model class def freeze @default_values.freeze super end private # Parse the cached database schema for this model and set the default values appropriately. def set_default_values h = {} if @db_schema @db_schema.each do |k, v| if v[:callable_default] h[k] = v[:callable_default] elsif !v[:ruby_default].nil? h[k] = convert_default_value(v[:ruby_default]) end end end @default_values = h.merge!(@default_values || OPTS) end # Handle the CURRENT_DATE and CURRENT_TIMESTAMP values specially by returning an appropriate Date or # Time/DateTime value. def convert_default_value(v) case v when Sequel::CURRENT_DATE lambda{Date.today} when Sequel::CURRENT_TIMESTAMP lambda{dataset.current_datetime} else v end end end module InstanceMethods # Use default value for a new record if values doesn't already contain an entry for it. def [](k) if new? && !values.has_key?(k) v = model.default_values.fetch(k){return} v = v.call if v.respond_to?(:call) values[k] = v if model.cache_default_values? v else super end end end end end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/delay_add_association.rb�������������������������������������������0000664�0000000�0000000�00000003466�14342141206�0023465�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The delay_add_association plugin delays the adding of # associated objects to a new (unsaved) object until after the new # object is saved. By default, if you attempt to add # associated objects to a new object, Sequel will raise # an error, because you need to have a primary key before # saving the objects. # # When delaying the add of an associated object, the object # will be immediately added to the cached association array. # When saving the current object, it will also attempt to # validate any associated objects, and if the associated objects # are not valid, the current object will also be considered # not valid. # # Usage: # # # Make all model subclass delay add_association for new objects # Sequel::Model.plugin :delay_add_association # # # Make the Album class delay add_association for new objects # Album.plugin :delay_add_association module DelayAddAssociation # Depend on the validate_associated plugin. def self.apply(mod) mod.plugin :validate_associated end module InstanceMethods private # Delay the addition of the associated object till after # saving the current object, if the current object is new # and the associated dataset requires a primary key on the # current object. def add_associated_object(opts, o, *args) if opts.dataset_need_primary_key? && new? o = make_add_associated_object(opts, o) delay_validate_associated_object(opts, o) public_send(opts[:name]) << o after_create_hook{super(opts, o, *args)} o else super end end end end end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/dirty.rb�����������������������������������������������������������0000664�0000000�0000000�00000023361�14342141206�0020312�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The dirty plugin makes Sequel save the initial value of # a column when setting a new value for the column. This # makes it easier to see what changes were made to the object: # # artist.name # => 'Foo' # artist.name = 'Bar' # artist.initial_value(:name) # 'Foo' # artist.column_change(:name) # ['Foo', 'Bar'] # artist.column_changes # {:name => ['Foo', 'Bar']} # artist.column_changed?(:name) # true # artist.reset_column(:name) # artist.name # => 'Foo' # artist.column_changed?(:name) # false # # It also makes changed_columns more accurate in that it # can detect when a the column value is changed and then # changed back: # # artist.name # => 'Foo' # artist.name = 'Bar' # artist.changed_columns # => [:name] # artist.name = 'Foo' # artist.changed_columns # => [] # # It can handle situations where a column value is # modified in place: # # artist.will_change_column(:name) # artist.name.gsub!(/o/, 'u') # artist.changed_columns # => [:name] # artist.initial_value(:name) # => 'Foo' # artist.column_change(:name) # => ['Foo', 'Fuu'] # # It also saves the previously changed values after an update: # # artist.update(name: 'Bar') # artist.column_changes # => {} # artist.previous_changes # => {:name=>['Foo', 'Bar']} # # artist.column_previously_was(:name) # # => 'Foo' # artist.column_previously_changed?(:name) # # => true # artist.column_previously_changed?(:name, from: 'Foo', to: 'Bar') # # => true # artist.column_previously_changed?(:name, from: 'Foo', to: 'Baz') # # => false # # There is one caveat; when used with a column that also uses the # serialization plugin, setting the column back to its original value # after changing it is not correctly detected and will leave an entry # in changed_columns. # # Usage: # # # Make all model subclass instances record previous values (called before loading subclasses) # Sequel::Model.plugin :dirty # # # Make the Album class record previous values # Album.plugin :dirty module Dirty module InstanceMethods # A hash of previous changes before the object was # saved, in the same format as #column_changes. # Note that this is not necessarily the same as the columns # that were used in the update statement. attr_reader :previous_changes # Reset the initial values after saving. def after_save super reset_initial_values end # Save the current changes so they are available after updating. This happens # before after_save resets them. def after_update super @previous_changes = column_changes end # An array with the initial value and the current value # of the column, if the column has been changed. If the # column has not been changed, returns nil. # # column_change(:name) # => ['Initial', 'Current'] def column_change(column) [initial_value(column), get_column_value(column)] if column_changed?(column) end # A hash with column symbol keys and pairs of initial and # current values for all changed columns. # # column_changes # => {:name => ['Initial', 'Current']} def column_changes h = {} initial_values.each do |column, value| h[column] = [value, get_column_value(column)] end h end # Either true or false depending on whether the column has # changed. Note that this is not exactly the same as checking if # the column is in changed_columns, if the column was not set # initially. # # column_changed?(:name) # => true def column_changed?(column) initial_values.has_key?(column) end # Whether the column was previously changed. # Options: # :from :: If given, the previous initial value of the column must match this # :to :: If given, the previous changed value of the column must match this # # update(name: 'Current') # previous_changes # => {:name=>['Initial', 'Current']} # column_previously_changed?(:name) # => true # column_previously_changed?(:id) # => false # column_previously_changed?(:name, from: 'Initial', to: 'Current') # => true # column_previously_changed?(:name, from: 'Foo', to: 'Current') # => false def column_previously_changed?(column, opts=OPTS) return false unless (pc = @previous_changes) && (val = pc[column]) if opts.has_key?(:from) return false unless val[0] == opts[:from] end if opts.has_key?(:to) return false unless val[1] == opts[:to] end true end # The previous value of the column, which is the initial value of # the column before the object was previously saved. # # initial_value(:name) # => 'Initial' # update(name: 'Current') # column_previously_was(:name) # => 'Initial' def column_previously_was(column) (pc = @previous_changes) && (val = pc[column]) && val[0] end # Freeze internal data structures def freeze initial_values.freeze missing_initial_values.freeze @previous_changes.freeze if @previous_changes super end # The initial value of the given column. If the column value has # not changed, this will be the same as the current value of the # column. # # initial_value(:name) # => 'Initial' def initial_value(column) initial_values.fetch(column){get_column_value(column)} end # A hash with column symbol keys and initial values. # # initial_values # {:name => 'Initial'} def initial_values @initial_values ||= {} end # Reset the column to its initial value. If the column was not set # initial, removes it from the values. # # reset_column(:name) # name # => 'Initial' def reset_column(column) if initial_values.has_key?(column) set_column_value(:"#{column}=", initial_values[column]) end if missing_initial_values.include?(column) values.delete(column) end end # Manually specify that a column will change. This should only be used # if you plan to modify a column value in place, which is not recommended. # # will_change_column(:name) # name.gsub(/i/i, 'o') # column_change(:name) # => ['Initial', 'onotoal'] def will_change_column(column) _add_changed_column(column) check_missing_initial_value(column) value = if initial_values.has_key?(column) initial_values[column] else get_column_value(column) end initial_values[column] = if value && value != true && value.respond_to?(:clone) begin value.clone rescue TypeError value end else value end end private # Reset initial values when clearing changed columns def _clear_changed_columns(reason) reset_initial_values if reason == :initialize || reason == :refresh super end # When changing the column value, save the initial column value. If the column # value is changed back to the initial value, update changed columns to remove # the column. def change_column_value(column, value) if (iv = initial_values).has_key?(column) initial = iv[column] super if value == initial _changed_columns.delete(column) unless missing_initial_values.include?(column) iv.delete(column) end else check_missing_initial_value(column) iv[column] = get_column_value(column) super end end # If the values hash does not contain the column, make sure missing_initial_values # does so that it doesn't get deleted from changed_columns if changed back, # and so that resetting the column value can be handled correctly. def check_missing_initial_value(column) unless values.has_key?(column) || (miv = missing_initial_values).include?(column) miv << column end end # Duplicate internal data structures def initialize_copy(other) super @initial_values = Hash[other.initial_values] @missing_initial_values = other.send(:missing_initial_values).dup @previous_changes = Hash[other.previous_changes] if other.previous_changes self end # Array holding column symbols that were not present initially. This is necessary # to differentiate between values that were not present and values that were # present but equal to nil. def missing_initial_values @missing_initial_values ||= [] end # Clear the data structures that store the initial values. def reset_initial_values @initial_values.clear if @initial_values @missing_initial_values.clear if @missing_initial_values end end end end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/eager_each.rb������������������������������������������������������0000664�0000000�0000000�00000006255�14342141206�0021225�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The eager_each plugin makes calling each on an eager loaded dataset do eager loading. # By default, each does not work on an eager loaded dataset, because each iterates # over rows of the dataset as they come in, and to eagerly load you need to have all # values up front. With the default associations code, you must call #all on an eagerly # loaded dataset, as calling #each on an #eager dataset skips the eager loading, and calling # #each on an #eager_graph dataset makes it yield plain hashes with columns from all # tables, instead of yielding the instances of the main model. # # This plugin makes #each call #all for eagerly loaded datasets. As #all usually calls # #each, this is a bit of issue, but this plugin resolves the issue by cloning the dataset # and setting a new flag in the cloned dataset, so that each can check with the flag to # determine whether it should call all. # # This plugin also makes #first and related methods that load single records work with # eager loading. Note that when using eager_graph, calling #first or a similar method # will result in two queries, one to load the main object, and one to eagerly load all associated # objects to that main object. # # Usage: # # # Make all model subclass instances eagerly load for each (called before loading subclasses) # Sequel::Model.plugin :eager_each # # # Make the Album class eagerly load for each # Album.plugin :eager_each module EagerEach module DatasetMethods # Don't call #all when attempting to load the columns. def columns! if use_eager_all? clone(:all_called=>true).columns! else super end end # Call #all instead of #each if eager loading, # unless #each is being called by #all. def each(&block) if use_eager_all? all(&block) else super end end # If eager loading, clone the dataset and set a flag to let #each know not to call #all, # to avoid the infinite loop. def all(&block) if use_eager_all? clone(:all_called=>true).all(&block) else super end end # Handle eager loading when calling first and related methods. For eager_graph, # this does an additional query after retrieving a single record, because otherwise # the associated records won't get eager loaded correctly. def single_record! if use_eager_all? obj = clone(:all_called=>true).all.first if opts[:eager_graph] obj = clone(:all_called=>true).where(obj.qualified_pk_hash).unlimited.all.first end obj else super end end private # Wether to use all when each is called, true when eager loading # unless the flag has already been set. def use_eager_all? (opts[:eager] || opts[:eager_graph]) && !opts[:all_called] end end end end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/eager_graph_eager.rb�����������������������������������������������0000664�0000000�0000000�00000012273�14342141206�0022566�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The eager_graph_eager plugin allows for chaining eager loads after eager_graph # loads. Given the following model associations: # # Band.one_to_many :albums # Album.one_to_many :tracks # # Let's say you wanted to return bands ordered by album name, and eagerly load # those albums, you can do that using: # # Band.eager_graph(:albums).order{albums[:name]} # # Let's say you also wanted to eagerly load the tracks for each album. You could # just add them to the eager_graph call: # # Band.eager_graph(albums: :tracks).order{albums[:name]} # # However, the bloats the result set, and you aren't ordering by the track # information, so a join is not required. The eager_graph_eager plugin allows # you to specify that the tracks be eagerly loaded in a separate query after # the eager_graph load of albums: # # Band.eager_graph(:albums).eager_graph_eager([:albums], :tracks).order{albums[:name]} # # <tt>Dataset#eager_graph_eager</tt>'s first argument is a dependency chain, specified # as an array of symbols. This specifies the point at which to perform the eager load. # The remaining arguments are arguments that could be passed to Dataset#eager to specify # what dependent associations should be loaded at that point. # # If you also have the following model association: # # Track.one_to_many :lyrics # # Here's some different ways of performing eager loading: # # # 4 Queries: bands, albums, tracks, lyrics # Band.eager(albums: {tracks: :lyrics}) # # # 1 Query: bands+albums+tracks+lyrics # Band.eager_graph(albums: {tracks: :lyrics}) # # # 3 Queries: bands+albums, tracks, lyrics # Band.eager_graph(:albums).eager_graph_eager([:albums], tracks: :lyrics) # # # 2 Queries: bands+albums+tracks, lyrics # Band.eager_graph(albums: :tracks).eager_graph_eager([:albums, :tracks], :lyrics) # # # 2 Queries: bands+albums, tracks+lyrics # Band.eager_graph(:albums).eager_graph_eager([:albums], tracks: proc{|ds| ds.eager_graph(:lyrics)}) # # Usage: # # # Support eager_graph_eager in all subclass datasets (called before loading subclasses) # Sequel::Model.plugin :eager_graph_eager # # # Support eager_graph_eager in Album class datasets # Album.plugin :eager_graph_eager module EagerGraphEager module DatasetMethods # Specify for the given dependency chain, after loading objects for the # current dataset via eager_graph, eagerly load the given associations at that point in the # dependency chain. # # dependency_chain :: Array of association symbols, with the first association symbol # specifying an association in the dataset's model, the next # association specifying an association in the previous association's # associated model, and so on. # assocs :: Symbols or hashes specifying associations to eagerly load at the point # specified by the dependency chain. def eager_graph_eager(dependency_chain, *assocs) unless dependency_chain.is_a?(Array) && dependency_chain.all?{|s| s.is_a?(Symbol)} && !dependency_chain.empty? raise Error, "eager_graph_eager first argument must be array of symbols" end current = model deps = dependency_chain.map do |dep| unless ref = current.association_reflection(dep) raise Error, "invalid association #{dep.inspect} for #{current.inspect}" end current = ref.associated_class [dep, ref.returns_array?] end assocs = current.dataset.send(:eager_options_for_associations, assocs) deps.each(&:freeze) deps.unshift(current) deps.freeze assocs.freeze if h = @opts[:eager_graph_eager] h = Hash[h] h[deps] = assocs else h = {deps => assocs} end clone(:eager_graph_eager=>h.freeze) end protected # After building objects from the rows, if eager_graph_eager has been # called on the datasets, for each dependency chain specified, eagerly # load the appropriate associations. def eager_graph_build_associations(rows) objects = super if eager_data = @opts[:eager_graph_eager] eager_data.each do |deps, assocs| current = objects last_class, *deps = deps deps.each do |dep, is_multiple| current_assocs = current.map(&:associations) if is_multiple current = current_assocs.flat_map{|a| a[dep]} else current = current_assocs.map{|a| a[dep]} current.compact! end current.uniq!(&:object_id) end last_class.dataset.send(:eager_load, current, assocs) end end objects end end end end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/empty_failure_backtraces.rb����������������������������������������0000664�0000000�0000000�00000002411�14342141206�0024177�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The empty_failure_backtraces plugin uses empty backtraces when raising HookFailed and ValidationFailed # exceptions. This can be significantly faster, and if you are using these exceptions for # flow control, you do not need the backtraces. This plugin is about 10% faster on CRuby # and 10-15x faster on JRuby 9.2.7.0+. This does not have an effect on JRuby <9.2.7.0. # # Usage: # # # Make all model subclass instances use empty backtraces for HookFailed # # and ValidationFailed exceptions (called before loading subclasses) # Sequel::Model.plugin :empty_failure_backtraces # # # Make the Album class use empty backtraces for HookFailed and ValidationFailed exceptions # Album.plugin :empty_failure_backtraces module EmptyFailureBacktraces module InstanceMethods private # Use empty backtrace for HookFailed exceptions. def hook_failed_error(msg) e = super e.set_backtrace([]) e end # Use empty backtrace for ValidationFailed exceptions. def validation_failed_error e = super e.set_backtrace([]) e end end end end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/enum.rb������������������������������������������������������������0000664�0000000�0000000�00000012323�14342141206�0020117�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The enum plugin allows for easily adding methods to modify the value of # a column. It allows treating the column itself as an enum, returning a # symbol for the related enum value. It also allows for setting up dataset # methods to easily find records having or not having each enum value. # # After loading the plugin, you can call the +enum+ method to define the # methods. The +enum+ method accepts a symbol for the underlying # database column, and a hash with symbol keys for the enum values. # For example, the following call: # # Album.enum :status_id, good: 1, bad: 2 # # Will define the following instance methods: # # Album#good! :: Change +status_id+ to +1+ (does not save the receiver) # Album#bad! :: Change +status_id+ to +2+ (does not save the receiver) # Album#good? :: Return whether +status_id+ is +1+ # Album#bad? :: Return whether +status_id+ is +2+ # # It will override the following instance methods: # # Album#status_id :: Return +:good+/+:bad+ instead of +1+/+2+ (other values returned as-is) # Album#status_id= :: Allow calling with +:good+/+:bad+ to set +status_id+ to +1+/+2+ (other values, # such as <tt>'good'</tt>/<tt>'bad'</tt> set as-is) # # If will define the following dataset methods: # # Album.dataset.good :: Return a dataset filtered to rows where +status_id+ is +1+ # Album.dataset.not_good :: Return a dataset filtered to rows where +status_id+ is not +1+ # Album.dataset.bad:: Return a dataset filtered to rows where +status_id+ is +2+ # Album.dataset.not_bad:: Return a dataset filtered to rows where +status_id+ is not +2+ # # When calling +enum+, you can also provide the following options: # # :prefix :: Use a prefix for methods defined for each enum value. If +true+ is provided at the value, use the column name as the prefix. # For example, with <tt>prefix: 'status'</tt>, the instance methods defined above would be +status_good?+, +status_bad?+, # +status_good!+, and +status_bad!+, and the dataset methods defined would be +status_good+, +status_not_good+, +status_bad+, # and +status_not_bad+. # :suffix :: Use a suffix for methods defined for each enum value. If +true+ is provided at the value, use the column name as the suffix. # For example, with <tt>suffix: 'status'</tt>, the instance methods defined above would be +good_status?+, +bad_status?+, # +good_status!+, and +bad_status!+, and the dataset methods defined would be +good_status+, +not_good_status+, +bad_status+, # and +not_bad_status+. # :override_accessors :: Set to +false+ to not override the column accessor methods. # :dataset_methods :: Set to +false+ to not define dataset methods. # # Note that this does not use a true enum column in the database. If you are # looking for enum support in the database, and your are using PostgreSQL, # Sequel supports that via the pg_enum Database extension. # # Usage: # # # Make all model subclasses handle enums # Sequel::Model.plugin :enum # # # Make the Album class handle enums # Album.plugin :enum module Enum module ClassMethods # Define instance and dataset methods in this class to treat column # as a enum. See Enum documentation for usage. def enum(column, values, opts=OPTS) raise Sequel::Error, "enum column must be a symbol" unless column.is_a?(Symbol) raise Sequel::Error, "enum values must be provided as a hash with symbol keys" unless values.is_a?(Hash) && values.all?{|k,| k.is_a?(Symbol)} if prefix = opts[:prefix] prefix = column if prefix == true prefix = "#{prefix}_" end if suffix = opts[:suffix] suffix = column if suffix == true suffix = "_#{suffix}" end values = Hash[values].freeze inverted = values.invert.freeze unless @enum_methods @enum_methods = Module.new include @enum_methods end @enum_methods.module_eval do unless opts[:override_accessors] == false define_method(column) do v = super() inverted.fetch(v, v) end define_method(:"#{column}=") do |v| super(values.fetch(v, v)) end end values.each do |key, value| define_method(:"#{prefix}#{key}#{suffix}!") do self[column] = value nil end define_method(:"#{prefix}#{key}#{suffix}?") do self[column] == value end end end unless opts[:dataset_methods] == false dataset_module do values.each do |key, value| cond = Sequel[column=>value] where :"#{prefix}#{key}#{suffix}", cond where :"#{prefix}not_#{key}#{suffix}", ~cond end end end end end end end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/error_splitter.rb��������������������������������������������������0000664�0000000�0000000�00000003476�14342141206�0022243�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The error_splitter plugin automatically splits errors entries related to # multiple columns to have separate error entries, one per column. For example, # a multiple column uniqueness entry: # # validates_unique([:artist_id, :name]) # # would by default result in errors entries such as: # # {[:artist_id, :name]=>'is already taken'} # # This plugin transforms those errors into: # # {:artist_id=>'is already taken', :name=>'is already taken'} # # The main reason to split errors is if you have a list of fields that you # are checking for validation errors. If you don't split the errors, then: # # errors.on(:artist_id) # # would not return the uniqueness error. # # Usage: # # # Make all model subclass instances split errors (called before loading subclasses) # Sequel::Model.plugin :error_splitter # # # Make the Album class split errors # Album.plugin :error_splitter module ErrorSplitter module InstanceMethods private # If the model instance is not valid, split the errors before returning. def _valid?(opts) v = super unless v split_validation_errors(errors) end v end # Go through all of the errors entries. For any that apply to multiple columns, # remove them and add separate error entries, one per column. def split_validation_errors(errors) errors.keys.select{|k| k.is_a?(Array)}.each do |ks| msgs = errors.delete(ks) ks.each do |k| msgs.each do |msg| errors.add(k, msg) end end end end end end end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/finder.rb����������������������������������������������������������0000664�0000000�0000000�00000023143�14342141206�0020424�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The finder plugin adds Model.finder for defining optimized finder methods. # There are two ways to use this. The recommended way is to pass a symbol # that represents a model class method that returns a dataset: # # def Artist.by_name(name) # where(name: name) # end # # Artist.finder :by_name # # This creates an optimized first_by_name method, which you can call normally: # # Artist.first_by_name("Joe") # # The alternative way to use this to pass your own block: # # Artist.finder(name: :first_by_name){|pl, ds| ds.where(name: pl.arg).limit(1)} # # Additionally, there is a Model.prepared_finder method. This works similarly # to Model.finder, but uses a prepared statement. This limits the types of # arguments that will be accepted, but can perform better in the database. # # Usage: # # # Make all model subclasses support Model.finder # # (called before loading subclasses) # Sequel::Model.plugin :finder # # # Make the Album class support Model.finder # Album.plugin :finder module Finder FINDER_TYPES = [:first, :all, :each, :get].freeze def self.apply(model) model.instance_exec do @finders ||= {} @finder_loaders ||= {} end end module ClassMethods # Create an optimized finder method using a dataset placeholder literalizer. # This pre-computes the SQL to use for the query, except for given arguments. # # There are two ways to use this. The recommended way is to pass a symbol # that represents a model class method that returns a dataset: # # def Artist.by_name(name) # where(name: name) # end # # Artist.finder :by_name # # This creates an optimized first_by_name method, which you can call normally: # # Artist.first_by_name("Joe") # # The alternative way to use this to pass your own block: # # Artist.finder(name: :first_by_name){|pl, ds| ds.where(name: pl.arg).limit(1)} # # Note that if you pass your own block, you are responsible for manually setting # limits if necessary (as shown above). # # Options: # :arity :: When using a symbol method name, this specifies the arity of the method. # This should be used if if the method accepts an arbitrary number of arguments, # or the method has default argument values. Note that if the method is defined # as a dataset method, the class method Sequel creates accepts an arbitrary number # of arguments, so you should use this option in that case. If you want to handle # multiple possible arities, you need to call the finder method multiple times with # unique :arity and :name methods each time. # :name :: The name of the method to create. This must be given if you pass a block. # If you use a symbol, this defaults to the symbol prefixed by the type. # :mod :: The module in which to create the finder method. Defaults to the singleton # class of the model. # :type :: The type of query to run. Can be :first, :each, :all, or :get, defaults to # :first. # # Caveats: # # This doesn't handle all possible cases. For example, if you have a method such as: # # def Artist.by_name(name) # name ? where(name: name) : exclude(name: nil) # end # # Then calling a finder without an argument will not work as you expect. # # Artist.finder :by_name # Artist.by_name(nil).first # # WHERE (name IS NOT NULL) # Artist.first_by_name(nil) # # WHERE (name IS NULL) # # See Dataset::PlaceholderLiteralizer for additional caveats. Note that if the model's # dataset does not support placeholder literalizers, you will not be able to use this # method. def finder(meth=OPTS, opts=OPTS, &block) if block raise Error, "cannot pass both a method name argument and a block of Model.finder" unless meth.is_a?(Hash) raise Error, "cannot pass two option hashes to Model.finder" unless opts.equal?(OPTS) opts = meth raise Error, "must provide method name via :name option when passing block to Model.finder" unless meth_name = opts[:name] end type = opts.fetch(:type, :first) unless prepare = opts[:prepare] raise Error, ":type option to Model.finder must be :first, :all, :each, or :get" unless FINDER_TYPES.include?(type) end limit1 = type == :first || type == :get meth_name ||= opts[:name] || :"#{type}_#{meth}" argn = lambda do |model| if arity = opts[:arity] arity else method = block || model.method(meth) (method.arity < 0 ? method.arity.abs - 1 : method.arity) end end loader_proc = if prepare proc do |model| args = prepare_method_args('$a', argn.call(model)) ds = if block model.instance_exec(*args, &block) else model.public_send(meth, *args) end ds = ds.limit(1) if limit1 model_name = model.name if model_name.to_s.empty? model_name = model.object_id else model_name = model_name.gsub(/\W/, '_') end ds.prepare(type, :"#{model_name}_#{meth_name}") end else proc do |model| n = argn.call(model) block ||= lambda do |pl, model2| args = (0...n).map{pl.arg} ds = model2.public_send(meth, *args) ds = ds.limit(1) if limit1 ds end Sequel::Dataset::PlaceholderLiteralizer.loader(model, &block) end end @finder_loaders[meth_name] = loader_proc mod = opts[:mod] || singleton_class if prepare def_prepare_method(mod, meth_name) else def_finder_method(mod, meth_name, type) end end def freeze @finder_loaders.freeze @finder_loaders.each_key{|k| finder_for(k)} if @dataset @finders.freeze super end # Similar to finder, but uses a prepared statement instead of a placeholder # literalizer. This makes the SQL used static (cannot vary per call), but # allows binding argument values instead of literalizing them into the SQL # query string. # # If a block is used with this method, it is instance_execed by the model, # and should accept the desired number of placeholder arguments. # # The options are the same as the options for finder, with the following # exception: # :type :: Specifies the type of prepared statement to create def prepared_finder(meth=OPTS, opts=OPTS, &block) if block raise Error, "cannot pass both a method name argument and a block of Model.finder" unless meth.is_a?(Hash) meth = meth.merge(:prepare=>true) else opts = opts.merge(:prepare=>true) end finder(meth, opts, &block) end Plugins.inherited_instance_variables(self, :@finders=>:dup, :@finder_loaders=>:dup) private # Define a finder method in the given module with the given method name that # load rows using the finder with the given name. def def_finder_method(mod, meth, type) mod.send(:define_method, meth){|*args, &block| finder_for(meth).public_send(type, *args, &block)} end # Define a prepared_finder method in the given module that will call the associated prepared # statement. def def_prepare_method(mod, meth) mod.send(:define_method, meth){|*args, &block| finder_for(meth).call(prepare_method_arg_hash(args), &block)} end # Find the finder to use for the give method. If a finder has not been loaded # for the method, load the finder and set correctly in the finders hash, then # return the finder. def finder_for(meth) unless finder = (frozen? ? @finders[meth] : Sequel.synchronize{@finders[meth]}) finder_loader = @finder_loaders.fetch(meth) finder = finder_loader.call(self) Sequel.synchronize{@finders[meth] = finder} end finder end # An hash of prepared argument values for the given arguments, with keys # starting at a. Used by the methods created by prepared_finder. def prepare_method_arg_hash(args) h = {} prepare_method_args('a', args.length).zip(args).each{|k, v| h[k] = v} h end # An array of prepared statement argument names, of length n and starting with base. def prepare_method_args(base, n) (0...n).map do s = base.to_sym base = base.next s end end # Clear any finders when reseting the instance dataset def reset_instance_dataset Sequel.synchronize{@finders.clear} if @finders && !@finders.frozen? super end end end end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/forbid_lazy_load.rb������������������������������������������������0000664�0000000�0000000�00000017050�14342141206�0022460�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The forbid_lazy_load plugin forbids lazy loading of associations # for objects in cases where the object wasn't loaded with a # method that only returns a single object. # # The main reason for doing this is it makes it easier to detect # N+1 query issues. Note that Sequel also offers a # tactical_eager_loading plugin which will automatically eagerly # load associations for all objects retrived in the same query # if any object would attempt to lazily load an association. That # approach may be simpler if you are trying to prevent N+1 issues, # though it does retain more objects in memory. # # This plugin offers multiple different ways to forbid lazy # loading. You can forbid lazy loading associations for individual # model instances: # # obj = Album[1] # obj.forbid_lazy_load # obj.artist # raises Sequel::Plugins::ForbidLazyLoad::Error # # +forbid_lazy_load+ is automatically called on instances if the # instances are loaded via a method such as Dataset#all, # Dataset#each, and other methods that load multiple instances # at once. These are the cases where lazily loading associations # for such instances can cause N+1 issues. # # Album.all.first.artist # objs.first.artist # raises Sequel::Plugins::ForbidLazyLoad::Error # # Album.each do |obj| # obj.artist # raises Sequel::Plugins::ForbidLazyLoad::Error # end # # Album[1].artist # no error # # Album.first.artist # no error # # You can allow lazy loading associations for an instance that it # was previously forbidden for: # # obj = Album.all.first # obj.allow_lazy_load # obj.artist # no error # # You can forbid lazy loading associations on a per-call basis, # even if lazy loading of associations is allowed for the instance: # # obj = Album[1] # obj.artist(forbid_lazy_load: true) # # raises Sequel::Plugins::ForbidLazyLoad::Error # # This also works for allowing lazy loading associations for a # specific association load even if it is forbidden for the instance: # # obj = Album.all.first # obj.artist(forbid_lazy_load: false) # # nothing raised # # You can also forbid lazy loading on a per-association basis using the # +:forbid_lazy_load+ association option with a +true+ value: # # Album.many_to_one :artist, forbid_lazy_load: true # Album[1].artist # raises Sequel::Plugins::ForbidLazyLoad::Error # # However, you probably don't want to do this as it will forbid any # lazy loading of the association, even if the loading could not # result in an N+1 issue. # # On the flip side, you can allow lazy loading using the # +:forbid_lazy_load+ association option with a +false+ value: # # Album.many_to_one :artist, forbid_lazy_load: false # Album.all.first.artist # no error # # One reason to do this is when using a plugin like static_cache # on the associated model, where a query is not actually issued # when doing a lazy association load. To make that particular # case easier, this plugin makes Model.finalize_associations # automatically set the association option if the associated # class uses the static_cache plugin. # # Note that even with this plugin, there can still be N+1 issues, # such as: # # Album.each do |obj| # 1 query for all albums # Artist[obj.artist_id] # 1 query per album for each artist # end # # Usage: # # # Make all model subclasses support forbidding lazy load # # (called before loading subclasses) # Sequel::Model.plugin :forbid_lazy_load # # # Make the Album class support forbidding lazy load # Album.plugin :forbid_lazy_load module ForbidLazyLoad # Error raised when attempting to lazy load an association when # lazy loading has been forbidden. class Error < StandardError end module ClassMethods Plugins.def_dataset_methods(self, :forbid_lazy_load) # If the static_cache plugin is used by the associated class for # an association, allow lazy loading that association, since the # lazy association load will use a hash table lookup and not a query. def allow_lazy_load_for_static_cache_associations # :nocov: if defined?(::Sequel::Plugins::StaticCache::ClassMethods) # :nocov: @association_reflections.each_value do |ref| if ref.associated_class.is_a?(::Sequel::Plugins::StaticCache::ClassMethods) ref[:forbid_lazy_load] = false end end end end # Allow lazy loading for static cache associations before finalizing. def finalize_associations allow_lazy_load_for_static_cache_associations super end end module InstanceMethods # Set this model instance to allow lazy loading of associations. def allow_lazy_load @forbid_lazy_load = false self end # Set this model instance to not allow lazy loading of associations. def forbid_lazy_load @forbid_lazy_load = true self end private # Allow lazy loading for objects returned by singular associations. def _load_associated_object(opts, dynamic_opts) # The implementation that loads these associations does # .all.first, which would result in the object returned being # marked as forbidding lazy load. obj = super obj.allow_lazy_load if obj.is_a?(InstanceMethods) obj end # Raise an Error if lazy loading has been forbidden for # the instance, association, or call. def _load_associated_objects(opts, dynamic_opts=OPTS) case dynamic_opts[:forbid_lazy_load] when false # nothing when nil unless dynamic_opts[:reload] case opts[:forbid_lazy_load] when nil raise Error, "lazy loading forbidden for this object (association: #{opts.inspect}, object: #{inspect})" if @forbid_lazy_load when false # nothing else raise Error, "lazy loading forbidden for this association (#{opts.inspect})" end end else raise Error, "lazy loading forbidden for this association method call (association: #{opts.inspect})" end super end end module DatasetMethods # Mark model instances retrieved in this call as forbidding lazy loading. def each if row_proc super do |obj| obj.forbid_lazy_load if obj.is_a?(InstanceMethods) yield obj end else super end end # Mark model instances retrieved in this call as forbidding lazy loading. def with_sql_each(sql) if row_proc super(sql) do |obj| obj.forbid_lazy_load if obj.is_a?(InstanceMethods) yield obj end else super end end # Mark model instances retrieved in this call as allowing lazy loading. def with_sql_first(sql) obj = super obj.allow_lazy_load if obj.is_a?(InstanceMethods) obj end end end end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/force_encoding.rb��������������������������������������������������0000664�0000000�0000000�00000005335�14342141206�0022124�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The force_encoding plugin allows you force specific encodings for all # strings that are used by the model. When model instances are loaded # from the database, all values in the hash that are strings are # forced to the given encoding. Whenever you update a model column # attribute, the resulting value is forced to a given encoding if the # value is a string. There are two ways to specify the encoding. You # can either do so in the plugin call itself, or via the # forced_encoding class accessor. # # Usage: # # # Force all strings to be UTF-8 encoded in a all model subclasses # # (called before loading subclasses) # Sequel::Model.plugin :force_encoding, 'UTF-8' # # # Force the encoding for the Album model to UTF-8 # Album.plugin :force_encoding # Album.forced_encoding = 'UTF-8' module ForceEncoding # Set the forced_encoding based on the value given in the plugin call. # Note that if a the plugin has been previously loaded, any previous # forced encoding is overruled, even if no encoding is given when calling # the plugin. def self.configure(model, encoding=nil) model.forced_encoding = encoding end module ClassMethods # The string encoding to force on a column string values attr_accessor :forced_encoding Plugins.inherited_instance_variables(self, :@forced_encoding=>nil) def call(values) o = super o.send(:force_hash_encoding, o.values) o end end module InstanceMethods private # Force the encoding of all string values when setting the instance's values. def _refresh_set_values(values) super(force_hash_encoding(values)) end # Force the encoding of all string values when setting the instance's values. def _save_set_values(values) super(force_hash_encoding(values)) end # Force the encoding for all string values in the given row hash. def force_hash_encoding(row) if fe = model.forced_encoding row.each_value{|v| v.force_encoding(fe) if v.is_a?(String) && !v.is_a?(Sequel::SQL::Blob)} end row end # Force the encoding of all returned strings to the model's forced_encoding. def typecast_value(column, value) s = super if s.is_a?(String) && !s.is_a?(Sequel::SQL::Blob) && (fe = model.forced_encoding) s = s.dup if s.frozen? s.force_encoding(fe) end s end end end end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/hook_class_methods.rb����������������������������������������������0000664�0000000�0000000�00000007606�14342141206�0023033�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # Sequel's built-in hook_class_methods plugin is designed for backwards # compatibility. Its use is not encouraged, it is recommended to use # instance methods and super instead of this plugin. This plugin allows # calling class methods with blocks to define hooks: # # # Block only, can cause duplicate hooks if code is reloaded # before_save{self.created_at = Time.now} # # Block with tag, safe for reloading # before_save(:set_created_at){self.created_at = Time.now} # # Tag only, safe for reloading, calls instance method # before_save(:set_created_at) # # Pretty much anything you can do with a hook class method, you can also # do with an instance method instead (making sure to call super), which is # the recommended way to add hooks in Sequel: # # def before_save # super # self.created_at = Time.now # end # # Usage: # # # Allow use of hook class methods in all model subclasses (called before loading subclasses) # Sequel::Model.plugin :hook_class_methods # # # Allow the use of hook class methods in the Album class # Album.plugin :hook_class_methods module HookClassMethods # Set up the hooks instance variable in the model. def self.apply(model) hooks = model.instance_variable_set(:@hooks, {}) Model::HOOKS.each{|h| hooks[h] = []} end module ClassMethods Model::HOOKS.each do |h| class_eval(<<-END, __FILE__, __LINE__ + 1) def #{h}(method = nil, &block) add_hook(:#{h}, method, &block) end END end # Freeze hooks when freezing model class. def freeze @hooks.freeze.each_value(&:freeze) super end # Returns true if there are any hook blocks for the given hook. def has_hooks?(hook) !@hooks[hook].empty? end # Yield every block related to the given hook. def hook_blocks(hook) # SEQUEL6: Remove Sequel::Deprecation.deprecate("The hook_blocks class method in the hook_class_methods plugin is deprecated and will be removed in Sequel 6.") @hooks[hook].each{|_,v,_| yield v} end # Yield every method related to the given hook. def hook_methods_for(hook) @hooks[hook].each{|_,_,m| yield m} end Plugins.inherited_instance_variables(self, :@hooks=>:hash_dup) private # Add a hook block to the list of hook methods. # If a non-nil tag is given and it already is in the list of hooks, # replace it with the new block. def add_hook(hook, tag, &block) unless block (raise Error, 'No hook method specified') unless tag # Allow calling private hook methods block = proc {send(tag)} end h = @hooks[hook] if tag && (old = h.find{|x| x[0] == tag}) old[1] = block Plugins.def_sequel_method(self, old[2], 0, &block) else meth = Plugins.def_sequel_method(self, "validation_class_methods_#{hook}", 0, &block) if hook.to_s =~ /^before/ h.unshift([tag, block, meth]) else h << [tag, block, meth] end end end end module InstanceMethods # hook methods are private [:before_create, :before_update, :before_validation, :before_save, :before_destroy].each{|h| class_eval("def #{h}; model.hook_methods_for(:#{h}){|m| send(m)}; super end", __FILE__, __LINE__)} [:after_create, :after_update, :after_validation, :after_save, :after_destroy].each{|h| class_eval("def #{h}; super; model.hook_methods_for(:#{h}){|m| send(m)}; end", __FILE__, __LINE__)} end end end end ��������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/input_transformer.rb�����������������������������������������������0000664�0000000�0000000�00000006273�14342141206�0022743�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # InputTransformer is a plugin that allows generic transformations # of input values in model column setters. Example: # # Album.plugin :input_transformer # Album.add_input_transformer(:reverser){|v| v.is_a?(String) ? v.reverse : v} # album = Album.new(name: 'foo') # album.name # => 'oof' # # You can specifically set some columns to skip some input # input transformers: # # Album.skip_input_transformer(:reverser, :foo) # Album.new(foo: 'bar').foo # => 'bar' # # Usage: # # # Make all model subclass instances support input transformers (called before loading subclasses) # Sequel::Model.plugin :input_transformer # # # Make the Album class support input transformers # Album.plugin :input_transformer module InputTransformer def self.apply(model, *) model.instance_exec do @input_transformers = {} @skip_input_transformer_columns = {} end end # If an input transformer is given in the plugin call, # add it as a transformer def self.configure(model, transformer_name=nil, &block) model.add_input_transformer(transformer_name, &block) if transformer_name || block end module ClassMethods # Hash of input transformer name symbols to transformer callables. attr_reader :input_transformers # The order in which to call the input transformers. For backwards compatibility only. def input_transformer_order input_transformers.keys.reverse end Plugins.inherited_instance_variables(self, :@skip_input_transformer_columns=>:hash_dup, :@input_transformers=>:dup) # Add an input transformer to this model. def add_input_transformer(transformer_name, &block) raise(Error, 'must provide both transformer name and block when adding input transformer') unless transformer_name && block @input_transformers[transformer_name] = block @skip_input_transformer_columns[transformer_name] = [] end # Freeze input transformers when freezing model class def freeze @input_transformers.freeze @skip_input_transformer_columns.freeze.each_value(&:freeze) super end # Set columns that the transformer should skip. def skip_input_transformer(transformer_name, *columns) @skip_input_transformer_columns[transformer_name].concat(columns).uniq! end # Return true if the transformer should not be called for the given column. def skip_input_transformer?(transformer_name, column) @skip_input_transformer_columns[transformer_name].include?(column) end end module InstanceMethods # Transform the input using all of the transformers, except those explicitly # skipped, before setting the value in the model object. def []=(k, v) model.input_transformers.reverse_each do |name, transformer| v = transformer.call(v) unless model.skip_input_transformer?(name, k) end super end end end end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/insert_conflict.rb�������������������������������������������������0000664�0000000�0000000�00000004770�14342141206�0022347�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The insert_conflict plugin allows handling conflicts due to unique # constraints when saving new model instance, using the INSERT ON CONFLICT # support in PostgreSQL 9.5+ and SQLite 3.24.0+. Example: # # class Album < Sequel::Model # plugin :insert_conflict # end # # Album.new(name: 'Foo', copies_sold: 1000). # insert_conflict( # target: :name, # update: {copies_sold: Sequel[:excluded][:b]} # ). # save # # This example will try to insert the album, but if there is an existing # album with the name 'Foo', this will update the copies_sold attribute # for that album. See the PostgreSQL and SQLite adapter documention for # the options you can pass to the insert_conflict method. # # You should not attempt to use this plugin to ignore conflicts when # inserting, you should only use it to turn insert conflicts into updates. # Any usage to ignore conflicts is not recommended or supported. # # Usage: # # # Make all model subclasses support insert_conflict # Sequel::Model.plugin :insert_conflict # # # Make the Album class support insert_conflict # Album.plugin :insert_conflict module InsertConflict def self.configure(model) model.instance_exec do if @dataset && !@dataset.respond_to?(:insert_conflict) raise Error, "#{self}'s dataset does not support insert_conflict" end end end module InstanceMethods # Set the insert_conflict options to pass to the dataset when inserting. def insert_conflict(opts=OPTS) raise Error, "Model#insert_conflict is only supported on new model instances" unless new? @insert_conflict_opts = opts self end private # Set the dataset used for inserting to use INSERT ON CONFLICT # Model#insert_conflict has been called on the instance previously. def _insert_dataset ds = super if @insert_conflict_opts ds = ds.insert_conflict(@insert_conflict_opts) end ds end # Disable the use of prepared insert statements, as they are not compatible # with this plugin. def use_prepared_statements_for?(type) return false if type == :insert || type == :insert_select super if defined?(super) end end end end end ��������sequel-5.63.0/lib/sequel/plugins/insert_returning_select.rb�����������������������������������������0000664�0000000�0000000�00000004471�14342141206�0024120�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # If the model's dataset selects explicit columns and the # database supports it, the insert_returning_select plugin will # automatically set the RETURNING clause on the dataset used to # insert rows to the columns selected, which allows the default model # support to run the insert and refresh of the data in a single # query, instead of two separate queries. This is Sequel's default # behavior when the model does not select explicit columns. # # Usage: # # # Make all model subclasses automatically setup insert returning clauses # Sequel::Model.plugin :insert_returning_select # # # Make the Album class automatically setup insert returning clauses # Album.plugin :insert_returning_select module InsertReturningSelect # Modify the current model's dataset selection, if the model # has a dataset. def self.configure(model) model.instance_exec do self.dataset = dataset if @dataset && @dataset.opts[:select] end end module ClassMethods # The dataset to use to insert new rows. For internal use only. attr_reader :instance_insert_dataset private # When reseting the instance dataset, also reset the instance_insert_dataset. def reset_instance_dataset ret = super return unless ds = @instance_dataset if columns = insert_returning_columns(ds) ds = ds.returning(*columns) end @instance_insert_dataset = ds ret end # Determine the columns to use for the returning clause, or return nil # if they can't be determined and a returning clause should not be # added automatically. def insert_returning_columns(ds) return unless ds.supports_returning?(:insert) return unless values = ds.opts[:select] values = values.map{|v| ds.unqualified_column_for(v)} if values.all? values end end end module InstanceMethods private # Use the instance_insert_dataset as the base dataset for the insert. def _insert_dataset use_server(model.instance_insert_dataset) end end end end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/instance_filters.rb������������������������������������������������0000664�0000000�0000000�00000010743�14342141206�0022513�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # This plugin allows you to add filters on a per object basis that # restrict updating or deleting the object. It's designed for cases # where you would normally have to drop down to the dataset level # to get the necessary control, because you only want to delete or # update the rows in certain cases based on the current status of # the row in the database. The main purpose of this plugin is to # avoid race conditions by relying on the atomic properties of database # transactions. # # class Item < Sequel::Model # plugin :instance_filters # end # # # These are two separate objects that represent the same # # database row. # i1 = Item.first(id: 1, delete_allowed: false) # i2 = Item.first(id: 1, delete_allowed: false) # # # Add an instance filter to the object. This filter is in effect # # until the object is successfully updated or deleted. # i1.instance_filter(delete_allowed: true) # # # Attempting to delete the object where the filter doesn't # # match any rows raises an error. # i1.delete # raises Sequel::NoExistingObject # # # The other object that represents the same row has no # # instance filters, and can be updated normally. # i2.update(delete_allowed: true) # # # Even though the filter is now still in effect, since the # # database row has been updated to allow deleting, # # delete now works. # i1.delete # # This plugin sets the require_modification flag on the model, # so if the model's dataset doesn't provide an accurate number # of matched rows, this could result in invalid exceptions being raised. module InstanceFilters # Exception class raised when updating or deleting an object does # not affect exactly one row. Error = Sequel::NoExistingObject # Set the require_modification flag to true for the model. def self.configure(model) model.require_modification = true end module InstanceMethods # Clear the instance filters after successfully destroying the object. def after_destroy super clear_instance_filters end # Clear the instance filters after successfully updating the object. def after_update super clear_instance_filters end # Freeze the instance filters when freezing the object def freeze instance_filters.freeze super end # Add an instance filter to the array of instance filters # Both the arguments given and the block are passed to the # dataset's filter method. def instance_filter(*args, &block) instance_filters << [args, block] end private # If there are any instance filters, make sure not to use the # instance delete optimization. def _delete_without_checking if @instance_filters && !@instance_filters.empty? _delete_dataset.delete else super end end # Duplicate internal structures when duplicating model instance. def initialize_copy(other) super @instance_filters = other.send(:instance_filters).dup self end # Lazily initialize the instance filter array. def instance_filters @instance_filters ||= [] end # Apply the instance filters to the given dataset def apply_instance_filters(ds) instance_filters.inject(ds){|ds1, i| ds1.where(*i[0], &i[1])} end # Clear the instance filters. def clear_instance_filters instance_filters.clear end # Apply the instance filters to the dataset returned by super. def _delete_dataset apply_instance_filters(super) end # Apply the instance filters to the dataset returned by super. def _update_dataset apply_instance_filters(super) end # Only use prepared statements for update and delete queries # if there are no instance filters. def use_prepared_statements_for?(type) if type == :update && !instance_filters.empty? false else super if defined?(super) end end end end end end �����������������������������sequel-5.63.0/lib/sequel/plugins/instance_hooks.rb��������������������������������������������������0000664�0000000�0000000�00000010400�14342141206�0022154�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The instance_hooks plugin allows you to add hooks to specific instances, # by passing a block to a _hook method (e.g. before_save_hook{do_something}). # The block is executed when the hook is called (e.g. before_save). # # All of the standard hooks are supported. # Instance level before hooks are executed in reverse order of addition before # calling super. Instance level after hooks are executed in order of addition # after calling super. # # Instance level hooks for before and after are cleared after all related # after level instance hooks have run. This means that if you add a before_create # and before_update instance hooks to a new object, the before_create hook will # be run the first time you save the object (creating it), and the before_update # hook will be run the second time you save the object (updating it), and no # hooks will be run the third time you save the object. # # Validation hooks are not cleared until after a successful save. # # Usage: # # # Add the instance hook methods to all model subclass instances (called before loading subclasses) # Sequel::Model.plugin :instance_hooks # # # Add the instance hook methods just to Album instances # Album.plugin :instance_hooks module InstanceHooks module InstanceMethods Sequel::Model::HOOKS.each{|h| class_eval(<<-END , __FILE__, __LINE__+1)} def #{h}_hook(&block) raise Sequel::Error, "can't add hooks to frozen object" if frozen? add_instance_hook(:#{h}, &block) self end END [:before_create, :before_update, :before_validation].each{|h| class_eval("def #{h}; run_before_instance_hooks(:#{h}) if @instance_hooks; super end", __FILE__, __LINE__)} [:after_create, :after_update].each{|h| class_eval(<<-END, __FILE__, __LINE__ + 1)} def #{h} super return unless @instance_hooks run_after_instance_hooks(:#{h}) @instance_hooks.delete(:#{h}) @instance_hooks.delete(:#{h.to_s.sub('after', 'before')}) end END # Run after destroy instance hooks. def after_destroy super return unless @instance_hooks run_after_instance_hooks(:after_destroy) @instance_hooks.delete(:after_destroy) @instance_hooks.delete(:before_destroy) end # Run after validation instance hooks. def after_validation super return unless @instance_hooks run_after_instance_hooks(:after_validation) end # Run after save instance hooks. def after_save super return unless @instance_hooks run_after_instance_hooks(:after_save) @instance_hooks.delete(:after_save) @instance_hooks.delete(:before_save) @instance_hooks.delete(:after_validation) @instance_hooks.delete(:before_validation) end # Run before_destroy instance hooks. def before_destroy return super unless @instance_hooks run_before_instance_hooks(:before_destroy) super end # Run before_save instance hooks. def before_save return super unless @instance_hooks run_before_instance_hooks(:before_save) super end private # Add the block as an instance level hook. For before hooks, add it to # the beginning of the instance hook's array. For after hooks, add it # to the end. def add_instance_hook(hook, &block) instance_hooks(hook).public_send(hook.to_s.start_with?('before') ? :unshift : :push, block) end # An array of instance level hook blocks for the given hook type. def instance_hooks(hook) @instance_hooks ||= {} @instance_hooks[hook] ||= [] end # Run all hook blocks of the given hook type. def run_after_instance_hooks(hook) instance_hooks(hook).each(&:call) end alias run_before_instance_hooks run_after_instance_hooks end end end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/instance_specific_default.rb���������������������������������������0000664�0000000�0000000�00000011560�14342141206�0024332�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The instance_specific_default plugin exists to make it easier to use a # global :instance_specific association option, or to warn or raise when Sequel # has to guess which value to use :instance_specific option (Sequel defaults to # guessing true as that is the conservative setting). It is helpful to # use this plugin, particularly with the :warn or :raise settings, to determine # which associations should have :instance_specific set. Setting the # :instance_specific to false for associations that are not instance specific # can improve performance. # # Associations are instance-specific if their block calls # a model instance method, or where the value of the block varies # based on runtime state, and the variance is outside of a delayed evaluation. # For example, with the following three associations: # # Album.one_to_one :first_track, class: :Track do |ds| # ds.where(number: 1) # end # # Album.one_to_one :last_track, class: :Track do |ds| # ds.where(number: num_tracks) # end # # Album.one_to_many :recent_tracks, class: :Track do |ds| # ds.where{date_updated > Date.today - 10} # end # # +first_track+ is not instance specific, but +last_track+ and +recent_tracks+ are. # +last_track+ is because the +num_tracks+ call in the block is calling # <tt>Album#num_tracks</tt>. +recent_tracks+ is because the value will change over # time. This plugin allows you to find these cases, and set the :instance_specific # option appropriately for them: # # Album.one_to_one :first_track, class: :Track, instance_specific: false do |ds| # ds.where(number: 1) # end # # Album.one_to_one :last_track, class: :Track, instance_specific: true do |ds| # ds.where(number: num_tracks) # end # # Album.one_to_many :recent_tracks, class: :Track, instance_specific: true do |ds| # ds.where{date_updated > Date.today - 10} # end # # For the +recent_tracks+ association, instead of marking it instance_specific, you # could also use a delayed evaluation, since it doesn't actually contain # instance-specific code: # # Album.one_to_many :recent_tracks, class: :Track, instance_specific: false do |ds| # ds.where{date_updated > Sequel.delay{Date.today - 10}} # end # # Possible arguments to provide when loading the plugin: # # true :: Set the :instance_specific option to true # false :: Set the :instance_specific option to false # :default :: Call super to set the :instance_specific option # :warn :: Emit a warning before calling super to set the :instance_specific option # :raise :: Raise a Sequel::Error if an :instance_specific option is not provided and # an association could be instance-specific. # # Note that this plugin only affects associations which could be instance # specific (those with blocks), where the :instance_specific option was not # specified when the association was created. # # Usage: # # # Set how to handle associations that could be instance specific # # but did not specify an :instance_specific option, for all subclasses # # (set before creating subclasses). # Sequel::Model.plugin :instance_specific_default, :warn # # # Set how to handle associations that could be instance specific # # but did not specify an :instance_specific option, for the Album class # Album.plugin :instance_specific_default, :warn module InstanceSpecificDefault # Set how to handle associations that could be instance specific but did # not specify an :instance_specific value. def self.configure(model, default) model.instance_variable_set(:@instance_specific_default, default) end module ClassMethods Plugins.inherited_instance_variables(self, :@instance_specific_default=>nil) private # Return the appropriate :instance_specific value, or warn or raise if # configured. def _association_instance_specific_default(name) case @instance_specific_default when true, false return @instance_specific_default when :default # nothing when :warn warn("possibly instance-specific association without :instance_specific option (class: #{self}, association: #{name})", :uplevel => 3) when :raise raise Sequel::Error, "possibly instance-specific association without :instance_specific option (class: #{self}, association: #{name})" else raise Sequel::Error, "invalid value passed to instance_specific_default plugin: #{@instance_specific_default.inspect}" end super end end end end end ������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/inverted_subsets.rb������������������������������������������������0000664�0000000�0000000�00000003335�14342141206�0022546�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The inverted_subsets plugin adds another method for each defined # subset, which inverts the condition supplied. By default, inverted # subset method names are prefixed with not_. # # You can change the prefix, or indeed entirely customise the inverted names, # by passing a block to the plugin configuration: # # # Use an exclude_ prefix for inverted subsets instead of not_ # Album.plugin(:inverted_subsets){|name| "exclude_#{name}"} # # Usage: # # # Add inverted subsets in the Album class # Album.plugin :inverted_subsets # # # This will now create two methods, published and not_published # Album.dataset_module do # where :published, published: true # end # # Album.published.sql # # SELECT * FROM albums WHERE (published IS TRUE) # # Album.not_published.sql # # SELECT * FROM albums WHERE (published IS NOT TRUE) # module InvertedSubsets def self.apply(model, &block) model.instance_exec do @dataset_module_class = Class.new(@dataset_module_class) do include DatasetModuleMethods if block define_method(:inverted_subset_name, &block) private :inverted_subset_name end end end end module DatasetModuleMethods # Define a not_ prefixed subset which inverts the subset condition. def where(name, *args, &block) super exclude(inverted_subset_name(name), *args, &block) end private def inverted_subset_name(name) "not_#{name}" end end end end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/json_serializer.rb�������������������������������������������������0000664�0000000�0000000�00000040536�14342141206�0022364�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true require 'json' module Sequel module Plugins # The json_serializer plugin handles serializing entire Sequel::Model # objects to JSON, as well as support for deserializing JSON directly # into Sequel::Model objects. It requires the json library, and can # work with either the pure ruby version or the C extension. # # Basic Example: # # album = Album[1] # album.to_json # # => '{"id"=>1,"name"=>"RF","artist_id"=>2}' # # In addition, you can provide options to control the JSON output: # # album.to_json(only: :name) # album.to_json(except: [:id, :artist_id]) # # => '{"json_class"="Album","name"=>"RF"}' # # album.to_json(include: :artist) # # => '{"id":1,"name":"RF","artist_id":2, # # "artist":{"id":2,"name":"YJM"}}' # # You can use a hash value with <tt>:include</tt> to pass options # to associations: # # album.to_json(include: {artist: {only: :name}}) # # => '{"id":1,"name":"RF","artist_id":2, # # "artist":{"name":"YJM"}}' # # You can specify a name for a given association by using an aliased # expression as the key in the <tt>:include</tt> hash # # album.to_json(include: {Sequel.as(:artist, :singer)=>{only: :name}}) # # => '{"id":1,"name":"RF","artist_id":2, # # "singer":{"name":"YJM"}}' # # You can specify the <tt>:root</tt> option to nest the JSON under the # name of the model: # # album.to_json(root: true) # # => '{"album":{"id":1,"name":"RF","artist_id":2}}' # # You can specify JSON serialization options to use later: # # album.json_serializer_opts(root: true) # [album].to_json # # => '[{"album":{"id":1,"name":"RF","artist_id":2}}]' # # Additionally, +to_json+ also exists as a class and dataset method, both # of which return all objects in the dataset: # # Album.to_json # Album.where(artist_id: 1).to_json(include: :tags) # # If you have an existing array of model instances you want to convert to # JSON, you can call the class to_json method with the :array option: # # Album.to_json(array: [Album[1], Album[2]]) # # All to_json methods take blocks, and if a block is given, it will yield # the array or hash before serialization, and will serialize the value # the block returns. This allows you to customize the resulting JSON format # on a per-call basis. # # In addition to creating JSON, this plugin also enables Sequel::Model # classes to create instances directly from JSON using the from_json class # method: # # json = album.to_json # album = Album.from_json(json) # # The array_from_json class method exists to parse arrays of model instances # from json: # # json = Album.where(artist_id: 1).to_json # albums = Album.array_from_json(json) # # These does not necessarily round trip, since doing so would let users # create model objects with arbitrary values. By default, from_json will # call set with the values in the hash. If you want to specify the allowed # fields, you can use the :fields option, which will call set_fields with # the given fields: # # Album.from_json(album.to_json, fields: %w'id name') # # If you want to update an existing instance, you can use the from_json # instance method: # # album.from_json(json) # # Both of these allow creation of cached associated objects, if you provide # the :associations option: # # album.from_json(json, associations: :artist) # # You can even provide options when setting up the associated objects: # # album.from_json(json, associations: {artist: {fields: %w'id name', associations: :tags}}) # # Note that active_support/json makes incompatible changes to the to_json API, # and breaks some aspects of the json_serializer plugin. You can undo the damage # done by active_support/json by doing: # # module ActiveSupportBrokenJSONFix # def to_json(options = {}) # JSON.generate(self) # end # end # Array.send(:prepend, ActiveSupportBrokenJSONFix) # Hash.send(:prepend, ActiveSupportBrokenJSONFix) # # Note that this will probably cause active_support/json to no longer work # correctly in some cases. # # Usage: # # # Add JSON output capability to all model subclass instances (called before loading subclasses) # Sequel::Model.plugin :json_serializer # # # Add JSON output capability to Album class instances # Album.plugin :json_serializer module JsonSerializer # Set up the column readers to do deserialization and the column writers # to save the value in deserialized_values. def self.configure(model, opts=OPTS) model.instance_exec do @json_serializer_opts = (@json_serializer_opts || OPTS).merge(opts) end end # SEQUEL6: Remove # :nocov: class Literal def initialize(json) @json = json end def to_json(*a) @json end end # :nocov: Sequel::Deprecation.deprecate_constant(self, :Literal) # Convert the given object to a JSON data structure using the given arguments. def self.object_to_json_data(obj, *args, &block) if obj.is_a?(Array) obj.map{|x| object_to_json_data(x, *args, &block)} else if obj.respond_to?(:to_json_data) obj.to_json_data(*args, &block) else begin Sequel.parse_json(Sequel.object_to_json(obj, *args, &block)) # :nocov: rescue Sequel.json_parser_error_class # Support for old Ruby code that only supports parsing JSON object/array Sequel.parse_json(Sequel.object_to_json([obj], *args, &block))[0] # :nocov: end end end end module ClassMethods # The default opts to use when serializing model objects to JSON. attr_reader :json_serializer_opts # Freeze json serializier opts when freezing model class def freeze @json_serializer_opts.freeze.each_value do |v| v.freeze if v.is_a?(Array) || v.is_a?(Hash) end super end # Attempt to parse a single instance from the given JSON string, # with options passed to InstanceMethods#from_json_node. def from_json(json, opts=OPTS) v = Sequel.parse_json(json) case v when self v when Hash new.from_json_node(v, opts) else raise Error, "parsed json doesn't return a hash or instance of #{self}" end end # Attempt to parse an array of instances from the given JSON string, # with options passed to InstanceMethods#from_json_node. def array_from_json(json, opts=OPTS) v = Sequel.parse_json(json) if v.is_a?(Array) raise(Error, 'parsed json returned an array containing non-hashes') unless v.all?{|ve| ve.is_a?(Hash) || ve.is_a?(self)} v.map{|ve| ve.is_a?(self) ? ve : new.from_json_node(ve, opts)} else raise(Error, 'parsed json did not return an array') end end Plugins.inherited_instance_variables(self, :@json_serializer_opts=>lambda do |json_serializer_opts| opts = {} json_serializer_opts.each{|k, v| opts[k] = (v.is_a?(Array) || v.is_a?(Hash)) ? v.dup : v} opts end) Plugins.def_dataset_methods(self, :to_json) end module InstanceMethods # Parse the provided JSON, which should return a hash, # and process the hash with from_json_node. def from_json(json, opts=OPTS) from_json_node(Sequel.parse_json(json), opts) end # Using the provided hash, update the instance with data contained in the hash. By default, just # calls set with the hash values. # # Options: # :associations :: Indicates that the associations cache should be updated by creating # a new associated object using data from the hash. Should be a Symbol # for a single association, an array of symbols for multiple associations, # or a hash with symbol keys and dependent association option hash values. # :fields :: Changes the behavior to call set_fields using the provided fields, instead of calling set. def from_json_node(hash, opts=OPTS) unless hash.is_a?(Hash) raise Error, "parsed json doesn't return a hash" end populate_associations = {} if assocs = opts[:associations] assocs = case assocs when Symbol {assocs=>OPTS} when Array assocs_tmp = {} assocs.each{|v| assocs_tmp[v] = OPTS} assocs_tmp when Hash assocs else raise Error, ":associations should be Symbol, Array, or Hash if present" end assocs.each do |assoc, assoc_opts| if assoc_values = hash.delete(assoc.to_s) unless r = model.association_reflection(assoc) raise Error, "Association #{assoc} is not defined for #{model}" end populate_associations[assoc] = if r.returns_array? raise Error, "Attempt to populate array association with a non-array" unless assoc_values.is_a?(Array) assoc_values.map{|v| v.is_a?(r.associated_class) ? v : r.associated_class.new.from_json_node(v, assoc_opts)} else raise Error, "Attempt to populate non-array association with an array" if assoc_values.is_a?(Array) assoc_values.is_a?(r.associated_class) ? assoc_values : r.associated_class.new.from_json_node(assoc_values, assoc_opts) end end end end if fields = opts[:fields] set_fields(hash, fields, opts) else set(hash) end populate_associations.each do |assoc, values| associations[assoc] = values end self end # Set the json serialization options that will be used by default # in future calls to +to_json+. This is designed for cases where # the model object will be used inside another data structure # which to_json is called on, and as such will not allow passing # of arguments to +to_json+. # # Example: # # obj.json_serializer_opts(only: :name) # [obj].to_json # => '[{"name":"..."}]' def json_serializer_opts(opts=OPTS) @json_serializer_opts = (@json_serializer_opts||OPTS).merge(opts) end # Return a string in JSON format. Accepts the following # options: # # :except :: Symbol or Array of Symbols of columns not # to include in the JSON output. # :include :: Symbol, Array of Symbols, or a Hash with # Symbol keys and Hash values specifying # associations or other non-column attributes # to include in the JSON output. Using a nested # hash, you can pass options to associations # to affect the JSON used for associated objects. # :only :: Symbol or Array of Symbols of columns to only # include in the JSON output, ignoring all other # columns. # :root :: Qualify the JSON with the name of the object. If a # string is given, use the string as the key, otherwise # use an underscored version of the model's name. def to_json(*a) opts = model.json_serializer_opts opts = opts.merge(@json_serializer_opts) if @json_serializer_opts if (arg_opts = a.first).is_a?(Hash) opts = opts.merge(arg_opts) a = [] end vals = values cols = if only = opts[:only] Array(only) else vals.keys - Array(opts[:except]) end h = {} cols.each{|c| h[c.to_s] = get_column_value(c)} if inc = opts[:include] if inc.is_a?(Hash) inc.each do |k, v| if k.is_a?(Sequel::SQL::AliasedExpression) key_name = k.alias.to_s k = k.expression else key_name = k.to_s end v = v.empty? ? [] : [v] h[key_name] = JsonSerializer.object_to_json_data(public_send(k), *v) end else Array(inc).each do |c| if c.is_a?(Sequel::SQL::AliasedExpression) key_name = c.alias.to_s c = c.expression else key_name = c.to_s end h[key_name] = JsonSerializer.object_to_json_data(public_send(c)) end end end if root = opts[:root] unless root.is_a?(String) root = model.send(:underscore, model.send(:demodulize, model.to_s)) end h = {root => h} end h = yield h if defined?(yield) Sequel.object_to_json(h, *a) end # Convert the receiver to a JSON data structure using the given arguments. def to_json_data(*args, &block) if block to_json(*args){|x| return block.call(x)} else to_json(*args){|x| return x} end end end module DatasetMethods # Store default options used when calling to_json on this dataset. # These options take precedence over the class level options, # and can be overridden by passing options directly to to_json. def json_serializer_opts(opts=OPTS) clone(:json_serializer_opts=>opts) end # Return a JSON string representing an array of all objects in # this dataset. Takes the same options as the instance # method, and passes them to every instance. Additionally, # respects the following options: # # :array :: An array of instances. If this is not provided, # calls #all on the receiver to get the array. # :instance_block :: A block to pass to #to_json for each # value in the dataset (or :array option). # :root :: If set to :collection, wraps the collection # in a root object using the pluralized, underscored model # name as the key. If set to :instance, only wraps # the instances in a root object. If set to :both, # wraps both the collection and instances in a root # object. If set to a string, wraps the collection in # a root object using the string as the key. def to_json(*a) opts = model.json_serializer_opts if ds_opts = @opts[:json_serializer_opts] opts = opts.merge(ds_opts) end if (arg = a.first).is_a?(Hash) opts = opts.merge(arg) a = [] end case collection_root = opts[:root] when nil, false, :instance collection_root = false else opts = opts.dup unless collection_root == :both opts.delete(:root) end unless collection_root.is_a?(String) collection_root = model.send(:pluralize, model.send(:underscore, model.send(:demodulize, model.to_s))) end end res = if row_proc || @opts[:eager_graph] array = if opts[:array] opts = opts.dup opts.delete(:array) else all end JsonSerializer.object_to_json_data(array, opts, &opts[:instance_block]) else all end res = {collection_root => res} if collection_root res = yield res if defined?(yield) Sequel.object_to_json(res, *a) end end end end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/lazy_attributes.rb�������������������������������������������������0000664�0000000�0000000�00000011533�14342141206�0022402�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The lazy_attributes plugin allows users to easily set that some attributes # should not be loaded by default when loading model objects. If the attribute # is needed after the instance has been retrieved, a database query is made to # retreive the value of the attribute. # # This plugin depends on the tactical_eager_loading plugin, and allows you to # eagerly load lazy attributes for all objects retrieved with the current object. # So the following code should issue one query to get the albums and one query to # get the reviews for all of those albums: # # Album.plugin :lazy_attributes, :review # Album.where{id < 100}.all do |a| # a.review # end # # # You can specify multiple columns to lazily load: # Album.plugin :lazy_attributes, :review, :tracklist # # Note that by default on databases that supporting RETURNING, # using explicit column selections will cause instance creations # to use two queries (insert and refresh) instead of a single # query using RETURNING. You can use the insert_returning_select # plugin to automatically use RETURNING for instance creations # for models using the lazy_attributes plugin. module LazyAttributes # Lazy attributes requires the tactical_eager_loading plugin def self.apply(model, *attrs) model.plugin :tactical_eager_loading end # Set the attributes given as lazy attributes def self.configure(model, *attrs) model.lazy_attributes(*attrs) unless attrs.empty? end module ClassMethods # Freeze lazy attributes module when freezing model class. def freeze @lazy_attributes_module.freeze if @lazy_attributes_module super end # Remove the given attributes from the list of columns selected by default. # For each attribute given, create an accessor method that allows a lazy # lookup of the attribute. Each attribute should be given as a symbol. def lazy_attributes(*attrs) unless select = dataset.opts[:select] select = dataset.columns.map{|c| Sequel.qualify(dataset.first_source, c)} end db_schema = @db_schema set_dataset(dataset.select(*select.reject{|c| attrs.include?(dataset.send(:_hash_key_symbol, c))})) @db_schema = db_schema attrs.each{|a| define_lazy_attribute_getter(a)} end private # Add a lazy attribute getter method to the lazy_attributes_module. Options: # :dataset :: The base dataset to use for the lazy attribute lookup # :table :: The table name to use to qualify the attribute and primary key columns. def define_lazy_attribute_getter(a, opts=OPTS) include(@lazy_attributes_module ||= Module.new) unless @lazy_attributes_module @lazy_attributes_module.class_eval do define_method(a) do if !values.has_key?(a) && !new? lazy_attribute_lookup(a, opts) else super() end end alias_method(a, a) end end end module InstanceMethods private # If the model was selected with other model objects, eagerly load the # attribute for all of those objects. If not, query the database for # the attribute for just the current object. Return the value of # the attribute for the current object. def lazy_attribute_lookup(a, opts=OPTS) table = opts[:table] || model.table_name selection = Sequel.qualify(table, a) if base_ds = opts[:dataset] ds = base_ds.where(qualified_pk_hash(table)) else base_ds = model.dataset ds = this end if frozen? return ds.get(selection) end if retrieved_with primary_key = model.primary_key composite_pk = true if primary_key.is_a?(Array) id_map = {} retrieved_with.each{|o| id_map[o.pk] = o unless o.values.has_key?(a) || o.frozen?} predicate_key = composite_pk ? primary_key.map{|k| Sequel.qualify(table, k)} : Sequel.qualify(table, primary_key) base_ds. select(*(Array(primary_key).map{|k| Sequel.qualify(table, k)} + [selection])). where(predicate_key=>id_map.keys). naked. each do |row| obj = id_map[composite_pk ? row.values_at(*primary_key) : row[primary_key]] if obj && !obj.values.has_key?(a) obj.values[a] = row[a] end end end values[a] = ds.get(selection) unless values.has_key?(a) values[a] end end end end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/list.rb������������������������������������������������������������0000664�0000000�0000000�00000017041�14342141206�0020130�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The list plugin allows for model instances to be part of an ordered list, # based on a position field in the database. It can either consider all # rows in the table as being from the same list, or you can specify scopes # so that multiple lists can be kept in the same table. # # Basic Example: # # class Item < Sequel::Model(:items) # plugin :list # will use :position field for position # plugin :list, field: :pos # will use :pos field for position # end # # item = Item[1] # # # Get the next or previous item in the list # # item.next # item.prev # # # Modify the item's position, which may require modifying other items in # # the same list # # item.move_to(3) # item.move_to_top # item.move_to_bottom # item.move_up # item.move_down # # You can provide a <tt>:scope</tt> option to scope the list. This option # can be a symbol or array of symbols specifying column name(s), or a proc # that accepts a model instance and returns a dataset representing the list # the object is in. You will need to provide a <tt>:scope</tt> option if # the model's dataset uses a subquery (such as when using the class_table_inheritance # plugin). # # For example, if each item has a +user_id+ field, and you want every user # to have their own list: # # Item.plugin :list, scope: :user_id # # Note that using this plugin modifies the order of the model's dataset to # sort by the position and scope fields. Also note that this plugin is subject to # race conditions, and is not safe when concurrent modifications are made # to the same list. # # Note that by default, unlike ruby arrays, the list plugin assumes the first # entry in the list has position 1, not position 0. # # You can change this by providing an integer <tt>:top</tt> option: # # Item.plugin :list, top: 0 # # Copyright (c) 2007-2010 Sharon Rosner, Wayne E. Seguin, Aman Gupta, Adrian Madrid, Jeremy Evans module List # Set the +position_field+, +scope_proc+ and +top_of_list+ attributes for the model, # using the <tt>:field</tt>, <tt>:scope</tt>, and <tt>:top</tt> options, respectively. # The <tt>:scope</tt> option can be a symbol, array of symbols, or a proc that # accepts a model instance and returns a dataset representing the list. # Also, modify the model dataset's order to order by the position and scope fields. def self.configure(model, opts = OPTS) model.position_field = opts[:field] || :position model.dataset = model.dataset.order_prepend(model.position_field) model.instance_exec do @top_of_list = opts[:top] || 1 end model.scope_proc = case scope = opts[:scope] when Symbol model.dataset = model.dataset.order_prepend(scope) proc{|obj| obj.model.where(scope=>obj.public_send(scope))} when Array model.dataset = model.dataset.order_prepend(*scope) proc{|obj| obj.model.where(scope.map{|s| [s, obj.get_column_value(s)]})} else scope end end module ClassMethods # The column name holding the position in the list, as a symbol. attr_accessor :position_field # A proc that scopes the dataset, so that there can be multiple positions # in the list, but the positions are unique with the scoped dataset. This # proc should accept an instance and return a dataset representing the list. attr_accessor :scope_proc # An Integer to use as the position of the top of the list. Defaults to 1. attr_reader :top_of_list Plugins.inherited_instance_variables(self, :@position_field=>nil, :@scope_proc=>nil, :@top_of_list=>nil) end module InstanceMethods # The model object at the given position in the list containing this instance. def at_position(p) list_dataset.first(position_field => p) end # When destroying an instance, move all entries after the instance down # one position, so that there aren't any gaps def after_destroy super f = Sequel[position_field] list_dataset.where(f > position_value).update(f => f - 1) end # Find the last position in the list containing this instance. def last_position list_dataset.max(position_field).to_i end # A dataset that represents the list containing this instance. def list_dataset model.scope_proc ? model.scope_proc.call(self) : model.dataset end # Move this instance down the given number of places in the list, # or 1 place if no argument is specified. def move_down(n = 1) move_to(position_value + n) end # Move this instance to the given place in the list. If lp is not # given or greater than the last list position, uses the last list # position. If lp is less than the top list position, uses the # top list position. def move_to(target, lp = nil) current = position_value if target != current checked_transaction do ds = list_dataset op, ds = if target < current target = model.top_of_list if target < model.top_of_list [:+, ds.where(position_field=>target...current)] else lp ||= last_position target = lp if target > lp [:-, ds.where(position_field=>(current + 1)..target)] end ds.update(position_field => Sequel::SQL::NumericExpression.new(op, position_field, 1)) update(position_field => target) end end self end # Move this instance to the bottom (last position) of the list. def move_to_bottom lp = last_position move_to(lp, lp) end # Move this instance to the top (first position, usually position 1) of the list. def move_to_top move_to(model.top_of_list) end # Move this instance the given number of places up in the list, or 1 place # if no argument is specified. def move_up(n = 1) move_to(position_value - n) end # The model instance the given number of places below this model instance # in the list, or 1 place below if no argument is given. def next(n = 1) n == 0 ? self : at_position(position_value + n) end # The value of the model's position field for this instance. def position_value get_column_value(position_field) end # The model instance the given number of places below this model instance # in the list, or 1 place below if no argument is given. def prev(n = 1) self.next(n * -1) end # Set the value of the position_field to the maximum value plus 1 unless the # position field already has a value. def before_validation unless get_column_value(position_field) set_column_value("#{position_field}=", list_dataset.max(position_field).to_i+1) end super end private # The model's position field, an instance method for ease of use. def position_field model.position_field end end end end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/many_through_many.rb�����������������������������������������������0000664�0000000�0000000�00000051361�14342141206�0022710�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The many_through_many plugin allow you to create an association using multiple join tables. # For example, assume the following associations: # # Artist.many_to_many :albums # Album.many_to_many :tags # # The many_through_many plugin would allow this: # # Artist.plugin :many_through_many # Artist.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums_tags, :album_id, :tag_id]] # # Which will give you the tags for all of the artist's albums. # # Let's break down the 2nd argument of the many_through_many call: # # [[:albums_artists, :artist_id, :album_id], # [:albums_tags, :album_id, :tag_id]] # # This argument is an array of arrays with three elements. Each entry in the main array represents a JOIN in SQL: # # first element :: represents the name of the table to join. # second element :: represents the column used to join to the previous table. # third element :: represents the column used to join to the next table. # # So the "Artist.many_through_many :tags" is translated into something similar to: # # FROM artists # JOIN albums_artists ON (artists.id = albums_artists.artist_id) # JOIN albums_tags ON (albums_artists.album_id = albums_tag.album_id) # JOIN tags ON (albums_tags.tag_id = tags.id) # # The "artists.id" and "tags.id" criteria come from other association options (defaulting to the primary keys of the current and # associated tables), but hopefully you can see how each argument in the array is used in the JOIN clauses. Note that you do # not need to add an entry for the final table (tags in this example), as that comes from the associated class. # # Here are some more examples: # # # Same as Artist.many_to_many :albums # Artist.many_through_many :albums, [[:albums_artists, :artist_id, :album_id]] # # # All artists that are associated to any album that this artist is associated to # Artist.many_through_many :artists, [[:albums_artists, :artist_id, :album_id], [:albums_artists, :album_id, :artist_id]] # # # All albums by artists that are associated to any album that this artist is associated to # Artist.many_through_many :artist_albums, [[:albums_artists, :artist_id, :album_id], # [:albums_artists, :album_id, :artist_id], [:albums_artists, :artist_id, :album_id]], # class: :Album # # # All tracks on albums by this artist (also could be a many_to_many) # Artist.many_through_many :tracks, [[:albums_artists, :artist_id, :album_id]], # right_primary_key: :album_id # # Often you don't want the current object to appear in the array of associated objects. This is easiest to handle via an :after_load hook: # # Artist.many_through_many :artists, [[:albums_artists, :artist_id, :album_id], [:albums_artists, :album_id, :artist_id]], # after_load: lambda{|artist, associated_artists| associated_artists.delete(artist)} # # You can also handle it by adding a dataset block that excludes the current record (so it won't be retrieved at all), but # that won't work when eagerly loading, which is why the :after_load proc is recommended instead. # # It's also common to not want duplicate records, in which case the :distinct option can be used: # # Artist.many_through_many :artists, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_artists, :album_id, :artist_id]], # distinct: true # # In addition to many_through_many, this plugin also adds one_through_many, for an association to a single object through multiple join tables. # This is useful if there are unique constraints on the foreign keys in the join tables that reference back to the current table, or if you want # to set an order on the association and just want the first record. # # Usage: # # # Make all model subclasses support many_through_many associations # Sequel::Model.plugin :many_through_many # # # Make the Album class support many_through_many associations # Album.plugin :many_through_many module ManyThroughMany # The AssociationReflection subclass for many_through_many associations. class ManyThroughManyAssociationReflection < Sequel::Model::Associations::ManyToManyAssociationReflection Sequel.synchronize{Sequel::Model::Associations::ASSOCIATION_TYPES[:many_through_many] = self} # many_through_many and one_through_many associations can be clones def cloneable?(ref) ref[:type] == :many_through_many || ref[:type] == :one_through_many end # The default associated key alias(es) to use when eager loading # associations via eager. def default_associated_key_alias self[:uses_left_composite_keys] ? (0...self[:through].first[:left].length).map{|i| :"x_foreign_key_#{i}_x"} : :x_foreign_key_x end %w'associated_key_table predicate_key edges final_edge final_reverse_edge reverse_edges'.each do |meth| class_eval(<<-END, __FILE__, __LINE__+1) def #{meth} cached_fetch(:#{meth}){calculate_edges[:#{meth}]} end END end FINALIZE_SETTINGS = superclass::FINALIZE_SETTINGS.merge( :associated_key_table=>:associated_key_table, :edges=>:edges, :final_edge=>:final_edge, :final_reverse_edge=>:final_reverse_edge, :reverse_edges=>:reverse_edges ).freeze def finalize_settings FINALIZE_SETTINGS end # The alias for the first join table. def join_table_alias final_reverse_edge[:alias] end # Many through many associations don't have a reciprocal def reciprocal nil end # Whether a separate query should be used for each join table. def separate_query_per_table? self[:separate_query_per_table] end private def _associated_dataset ds = associated_class if separate_query_per_table? ds = ds.dataset else (reverse_edges + [final_reverse_edge]).each do |t| h = {:qualify=>:deep} if t[:alias] != t[:table] h[:table_alias] = t[:alias] end ds = ds.join(t[:table], Array(t[:left]).zip(Array(t[:right])), h) end end ds end # Make sure to use unique table aliases when lazy loading or eager loading def calculate_reverse_edge_aliases(reverse_edges) aliases = [associated_class.table_name] reverse_edges.each do |e| table_alias = e[:table] if aliases.include?(table_alias) i = 0 table_alias = while true ta = :"#{table_alias}_#{i}" break ta unless aliases.include?(ta) i += 1 end end aliases.push(e[:alias] = table_alias) end end # Transform the :through option into a list of edges and reverse edges to use to join tables when loading the association. def calculate_edges es = [{:left_table=>self[:model].table_name, :left_key=>self[:left_primary_key_column]}] self[:through].each do |t| es.last.merge!(:right_key=>t[:left], :right_table=>t[:table], :join_type=>t[:join_type]||self[:graph_join_type], :conditions=>(t[:conditions]||[]).to_a, :block=>t[:block]) es.last[:only_conditions] = t[:only_conditions] if t.include?(:only_conditions) es << {:left_table=>t[:table], :left_key=>t[:right]} end es.last.merge!(:right_key=>right_primary_key, :right_table=>associated_class.table_name) edges = es.map do |e| h = {:table=>e[:right_table], :left=>e[:left_key], :right=>e[:right_key], :conditions=>e[:conditions], :join_type=>e[:join_type], :block=>e[:block]} h[:only_conditions] = e[:only_conditions] if e.include?(:only_conditions) h end reverse_edges = es.reverse.map{|e| {:table=>e[:left_table], :left=>e[:left_key], :right=>e[:right_key]}} reverse_edges.pop calculate_reverse_edge_aliases(reverse_edges) final_reverse_edge = reverse_edges.pop final_reverse_alias = final_reverse_edge[:alias] h = {:final_edge=>edges.pop, :final_reverse_edge=>final_reverse_edge, :edges=>edges, :reverse_edges=>reverse_edges, :predicate_key=>qualify(final_reverse_alias, edges.first[:right]), :associated_key_table=>final_reverse_edge[:alias], } h.each{|k, v| cached_set(k, v)} h end def filter_by_associations_limit_key fe = edges.first Array(qualify(fe[:table], fe[:right])) + Array(qualify(associated_class.table_name, associated_class.primary_key)) end end class OneThroughManyAssociationReflection < ManyThroughManyAssociationReflection Sequel.synchronize{Sequel::Model::Associations::ASSOCIATION_TYPES[:one_through_many] = self} include Sequel::Model::Associations::SingularAssociationReflection end module ClassMethods # Create a many_through_many association. Arguments: # name :: Same as associate, the name of the association. # through :: The tables and keys to join between the current table and the associated table. # Must be an array, with elements that are either 3 element arrays, or hashes with keys :table, :left, and :right. # The required entries in the array/hash are: # :table (first array element) :: The name of the table to join. # :left (middle array element) :: The key joining the table to the previous table. Can use an # array of symbols for a composite key association. # :right (last array element) :: The key joining the table to the next table. Can use an # array of symbols for a composite key association. # If a hash is provided, the following keys are respected when using eager_graph: # :db :: The Database containing the table. This changes lookup to use a separate query for each join table. # :block :: A proc to use as the block argument to join. # :conditions :: Extra conditions to add to the JOIN ON clause. Must be a hash or array of two pairs. # :join_type :: The join type to use for the join, defaults to :left_outer. # :only_conditions :: Conditions to use for the join instead of the ones specified by the keys. # opts :: The options for the associaion. Takes the same options as many_to_many. def many_through_many(name, through, opts=OPTS, &block) associate(:many_through_many, name, opts.merge(through.is_a?(Hash) ? through : {:through=>through}), &block) end # Creates a one_through_many association. See many_through_many for arguments. def one_through_many(name, through, opts=OPTS, &block) associate(:one_through_many, name, opts.merge(through.is_a?(Hash) ? through : {:through=>through}), &block) end private # Create the association methods and :eager_loader and :eager_grapher procs. def def_many_through_many(opts) one_through_many = opts[:type] == :one_through_many opts[:read_only] = true if opts[:uniq] opts[:after_load] ||= [] opts[:after_load].unshift(:array_uniq!) end opts[:cartesian_product_number] ||= one_through_many ? 0 : 2 separate_query_per_table = false through = opts[:through] = opts[:through].map do |e| case e when Array raise(Error, "array elements of the through option/argument for many_through_many associations must have at least three elements") unless e.length == 3 {:table=>e[0], :left=>e[1], :right=>e[2]} when Hash raise(Error, "hash elements of the through option/argument for many_through_many associations must contain :table, :left, and :right keys") unless e[:table] && e[:left] && e[:right] separate_query_per_table = true if e[:db] e else raise(Error, "the through option/argument for many_through_many associations must be an enumerable of arrays or hashes") end end opts[:separate_query_per_table] = separate_query_per_table left_key = opts[:left_key] = opts[:through].first[:left] opts[:left_keys] = Array(left_key) uses_lcks = opts[:uses_left_composite_keys] = left_key.is_a?(Array) left_pk = (opts[:left_primary_key] ||= self.primary_key) raise(Error, "no primary key specified for #{inspect}") unless left_pk opts[:eager_loader_key] = left_pk unless opts.has_key?(:eager_loader_key) opts[:left_primary_keys] = Array(left_pk) lpkc = opts[:left_primary_key_column] ||= left_pk lpkcs = opts[:left_primary_key_columns] ||= Array(lpkc) opts[:left_key_alias] ||= opts.default_associated_key_alias if separate_query_per_table opts[:use_placeholder_loader] = false opts[:allow_eager_graph] = false opts[:allow_filtering_by] = false opts[:eager_limit_strategy] = nil opts[:dataset] ||= proc do |r| def_db = r.associated_class.db vals = uses_lcks ? [lpkcs.map{|k| get_column_value(k)}] : get_column_value(left_pk) has_results = through.each do |edge| ds = (edge[:db] || def_db).from(edge[:table]).where(edge[:left]=>vals) ds = ds.where(edge[:conditions]) if edge[:conditions] right = edge[:right] vals = ds.select_map(right) if right.is_a?(Array) vals.delete_if{|v| v.any?(&:nil?)} else vals.delete(nil) end break if vals.empty? end ds = r.associated_dataset.where(opts.right_primary_key=>vals) ds = ds.clone(:no_results=>true) unless has_results ds end opts[:eager_loader] ||= proc do |eo| h = eo[:id_map] assign_singular = opts.assign_singular? uses_rcks = opts.right_primary_key.is_a?(Array) rpk = uses_rcks ? opts.right_primary_keys : opts.right_primary_key name = opts[:name] def_db = opts.associated_class.db join_map = h run_query = through.each do |edge| ds = (edge[:db] || def_db).from(edge[:table]) ds = ds.where(edge[:conditions]) if edge[:conditions] left = edge[:left] right = edge[:right] prev_map = join_map join_map = ds.where(left=>join_map.keys).select_hash_groups(right, left) if right.is_a?(Array) join_map.delete_if{|v,| v.any?(&:nil?)} else join_map.delete(nil) end break if join_map.empty? join_map.each_value do |vs| vs.replace(vs.flat_map{|v| prev_map[v]}) vs.uniq! end end eo = Hash[eo] if run_query eo[:loader] = false eo[:right_keys] = join_map.keys else eo[:no_results] = true end opts[:model].eager_load_results(opts, eo) do |assoc_record| rpkv = if uses_rcks assoc_record.values.values_at(*rpk) else assoc_record.values[rpk] end objects = join_map[rpkv] if assign_singular objects.each do |object| object.associations[name] ||= assoc_record end else objects.each do |object| object.associations[name].push(assoc_record) end end end end else opts[:dataset] ||= opts.association_dataset_proc opts[:eager_loader] ||= opts.method(:default_eager_loader) end join_type = opts[:graph_join_type] select = opts[:graph_select] graph_block = opts[:graph_block] only_conditions = opts[:graph_only_conditions] use_only_conditions = opts.include?(:graph_only_conditions) conditions = opts[:graph_conditions] opts[:eager_grapher] ||= proc do |eo| ds = eo[:self] iq = eo[:implicit_qualifier] egls = eo[:limit_strategy] if egls && egls != :ruby associated_key_array = opts.associated_key_array orig_egds = egds = eager_graph_dataset(opts, eo) opts.reverse_edges.each{|t| egds = egds.join(t[:table], Array(t[:left]).zip(Array(t[:right])), :table_alias=>t[:alias], :qualify=>:deep)} ft = opts.final_reverse_edge egds = egds.join(ft[:table], Array(ft[:left]).zip(Array(ft[:right])), :table_alias=>ft[:alias], :qualify=>:deep). select_all(egds.first_source). select_append(*associated_key_array) egds = opts.apply_eager_graph_limit_strategy(egls, egds) ds.graph(egds, associated_key_array.map(&:alias).zip(Array(lpkcs)) + conditions, :qualify=>:deep, :table_alias=>eo[:table_alias], :implicit_qualifier=>iq, :join_type=>eo[:join_type]||join_type, :join_only=>eo[:join_only], :from_self_alias=>eo[:from_self_alias], :select=>select||orig_egds.columns, &graph_block) else opts.edges.each do |t| ds = ds.graph(t[:table], t.fetch(:only_conditions, (Array(t[:right]).zip(Array(t[:left])) + t[:conditions])), :select=>false, :table_alias=>ds.unused_table_alias(t[:table]), :join_type=>eo[:join_type]||t[:join_type], :join_only=>eo[:join_only], :qualify=>:deep, :implicit_qualifier=>iq, :from_self_alias=>eo[:from_self_alias], &t[:block]) iq = nil end fe = opts.final_edge ds.graph(opts.associated_class.dataset, use_only_conditions ? only_conditions : (Array(opts.right_primary_key).zip(Array(fe[:left])) + conditions), :select=>select, :table_alias=>eo[:table_alias], :qualify=>:deep, :join_type=>eo[:join_type]||join_type, :join_only=>eo[:join_only], &graph_block) end end end # Use def_many_through_many, since they share pretty much the same code. def def_one_through_many(opts) def_many_through_many(opts) end end module DatasetMethods private # Use a subquery to filter rows to those related to the given associated object def many_through_many_association_filter_expression(op, ref, obj) lpks = ref[:left_primary_key_columns] lpks = lpks.first if lpks.length == 1 lpks = ref.qualify(model.table_name, lpks) edges = ref.edges first, rest = edges.first, edges[1..-1] ds = model.db[first[:table]].select(*Array(ref.qualify(first[:table], first[:right]))) rest.each{|e| ds = ds.join(e[:table], e.fetch(:only_conditions, (Array(e[:right]).zip(Array(e[:left])) + e[:conditions])), :table_alias=>ds.unused_table_alias(e[:table]), :qualify=>:deep, &e[:block])} last_alias = if rest.empty? first[:table] else last_join = ds.opts[:join].last last_join.table_alias || last_join.table end meths = if obj.is_a?(Sequel::Dataset) ref.qualify(obj.model.table_name, ref.right_primary_keys) else ref.right_primary_key_methods end expr = association_filter_key_expression(ref.qualify(last_alias, Array(ref.final_edge[:left])), meths, obj) unless expr == SQL::Constants::FALSE ds = ds.where(expr).exclude(SQL::BooleanExpression.from_value_pairs(ds.opts[:select].zip([]), :OR)) expr = SQL::BooleanExpression.from_value_pairs(lpks=>ds) expr = add_association_filter_conditions(ref, obj, expr) end association_filter_handle_inversion(op, expr, Array(lpks)) end alias one_through_many_association_filter_expression many_through_many_association_filter_expression end end end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/modification_detection.rb������������������������������������������0000664�0000000�0000000�00000006251�14342141206�0023661�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # This plugin automatically detects in-place modifications to # columns as well as direct modifications of the values hash. # # class User < Sequel::Model # plugin :modification_detection # end # user = User[1] # user.a # => 'a' # user.a << 'b' # user.save_changes # # UPDATE users SET a = 'ab' WHERE (id = 1) # # Note that for this plugin to work correctly, the column values must # correctly implement the #hash method, returning the same value if # the object is equal, and a different value if the object is not equal. # As this solely uses hash values to check for modification, there may # be cases where a modification is made and the hash value is the same, # resulting in a false negative. # # Note that this plugin causes a performance hit for all retrieved # objects, so it shouldn't be used in cases where performance is a # primary concern. # # Usage: # # # Make all model subclass automatically detect column modifications # Sequel::Model.plugin :modification_detection # # # Make the Album class automatically detect column modifications # Album.plugin :modification_detection module ModificationDetection module ClassMethods # Calculate the hashes for all of the column values, so that they # can be compared later to determine if the column value has changed. def call(_) v = super v.calculate_values_hashes v end end module InstanceMethods # Recalculate the column value hashes after updating. def after_update super recalculate_values_hashes end # Calculate the column hash values if they haven't been already calculated. def calculate_values_hashes @values_hashes || recalculate_values_hashes end # Detect which columns have been modified by comparing the cached hash # value to the hash of the current value. def changed_columns changed = super if vh = @values_hashes values = @values changed = changed.dup if frozen? vh.each do |c, v| match = values.has_key?(c) && v == values[c].hash if changed.include?(c) changed.delete(c) if match else changed << c unless match end end end changed end private # Recalculate the column value hashes after manually refreshing. def _refresh(dataset) super recalculate_values_hashes end # Recalculate the column value hashes after refreshing after saving a new object. def _save_refresh super recalculate_values_hashes end # Recalculate the column value hashes, caching them for later use. def recalculate_values_hashes vh = {} @values.each do |k,v| vh[k] = v.hash end @values_hashes = vh.freeze end end end end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/mssql_optimistic_locking.rb����������������������������������������0000664�0000000�0000000�00000006467�14342141206�0024300�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # This plugin implements optimistic locking mechanism on Microsoft SQL Server # using a timestamp/rowversion column to ensure that concurrent updates are # detected and previous changes are not automatically overridden. This is # best implemented by a code example: # # class Person < Sequel::Model # plugin :mssql_optimistic_locking # end # p1 = Person[1] # p2 = Person[1] # p1.update(name: 'Jim') # works # p2.update(name: 'Bob') # raises Sequel::NoExistingObject # # In order for this plugin to work, you need to make sure that the database # table has a column of timestamp or rowversion. The plugin uses a default # name of timestamp for this columns, but you can override that using the # :lock_column option: # # plugin :mssql_optimistic_locking, lock_column: :column_name # # This plugin relies on the instance_filters plugin. module MssqlOptimisticLocking # Load the instance_filters plugin into the model. def self.apply(model, opts=OPTS) model.plugin :instance_filters end # Set the lock_column to the :lock_column option (default: :timestamp) def self.configure(model, opts=OPTS) model.lock_column = opts[:lock_column] || :timestamp end module ClassMethods # The timestamp/rowversion column containing the version for the current row. attr_accessor :lock_column Plugins.inherited_instance_variables(self, :@lock_column=>nil) end module InstanceMethods # Add the lock column instance filter to the object before destroying it. def before_destroy lock_column_instance_filter super end # Add the lock column instance filter to the object before updating it. def before_update lock_column_instance_filter super end private # Add the lock column instance filter to the object. def lock_column_instance_filter lc = model.lock_column instance_filter(lc=>Sequel.blob(get_column_value(lc))) end # Clear the instance filters when refreshing, so that attempting to # refresh after a failed save removes the previous lock column filter # (the new one will be added before updating). def _refresh(ds) clear_instance_filters super end # Remove the lock column from the columns to update. # SQL Server automatically updates the lock column value, and does not like # it to be assigned. def _save_update_all_columns_hash v = @values.dup cc = changed_columns Array(primary_key).each{|x| v.delete(x) unless cc.include?(x)} v.delete(model.lock_column) v end # Add an OUTPUT clause to fetch the updated timestamp when updating the row. def _update_without_checking(columns) ds = _update_dataset lc = model.lock_column rows = ds.clone(ds.send(:default_server_opts, :sql=>ds.output(nil, [Sequel[:inserted][lc]]).update_sql(columns))).all values[lc] = rows.first[lc] unless rows.empty? rows.length end end end end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/nested_attributes.rb�����������������������������������������������0000664�0000000�0000000�00000041031�14342141206�0022701�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The nested_attributes plugin allows you to create, update, and delete # associated objects directly by calling a method on the current object. # Nested attributes are defined using the nested_attributes class method: # # Artist.one_to_many :albums # Artist.plugin :nested_attributes # Artist.nested_attributes :albums # # The nested_attributes call defines a single method, <tt><i>association</i>_attributes=</tt>, # (e.g. <tt>albums_attributes=</tt>). So if you have an Artist instance: # # a = Artist.new(name: 'YJM') # # You can create new album instances related to this artist: # # a.albums_attributes = [{name: 'RF'}, {name: 'MO'}] # # Note that this doesn't send any queries to the database yet. That doesn't happen till # you save the object: # # a.save # # That will save the artist first, and then save both albums. If either the artist # is invalid or one of the albums is invalid, none of the objects will be saved to the # database, and all related validation errors will be available in the artist's validation # errors. # # In addition to creating new associated objects, you can also update existing associated # objects. You just need to make sure that the primary key field is filled in for the # associated object: # # a.update(albums_attributes: [{id: 1, name: 'T'}]) # # Since the primary key field is filled in, the plugin will update the album with id 1 instead # of creating a new album. # # If you would like to delete the associated object instead of updating it, you add a _delete # entry to the hash, and also pass the :destroy option when calling +nested_attributes+: # # Artist.nested_attributes :albums, destroy: true # a.update(albums_attributes: [{id: 1, _delete: true}]) # # This will delete the related associated object from the database. If you want to leave the # associated object in the database, but just remove it from the association, add a _remove # entry in the hash, and also pass the :remove option when calling +nested_attributes+: # # Artist.nested_attributes :albums, remove: true # a.update(albums_attributes: [{id: 1, _remove: true}]) # # The above example was for a one_to_many association, but the plugin also works similarly # for other association types. For one_to_one and many_to_one associations, you need to # pass a single hash instead of an array of hashes. # # This plugin is mainly designed to make it easy to use on html forms, where a single form # submission can contained nested attributes (and even nested attributes of those attributes). # You just need to name your form inputs correctly: # # artist[name] # artist[albums_attributes][0][:name] # artist[albums_attributes][1][:id] # artist[albums_attributes][1][:name] # # Your web stack will probably parse that into a nested hash similar to: # # {'artist'=>{'name'=>'Y', 'albums_attributes'=>{'0'=>{'name'=>'X'}, '1'=>{'id'=>'2', 'name'=>'Z'}}}} # # Then you can do: # # artist.update_fields(params['artist'], %w'name albums_attributes') # # Note that Rails 5+ does not use a Hash for submitted parameters, and therefore # the above will not work. With Rails 5+, you have to use: # # artist.update_fields(params.to_unsafe_h['artist'], %w'name albums_attributes') # # To save changes to the artist, create the first album and associate it to the artist, # and update the other existing associated album. # # You can pass options for individual nested attributes, which will override the default # nested attributes options for that association. This is useful for per-call filtering # of the allowed fields: # # a.set_nested_attributes(:albums, params['artist'], fields: %w'name') module NestedAttributes # Depend on the validate_associated plugin. def self.apply(model) model.plugin(:validate_associated) end module ClassMethods # Freeze nested_attributes_module when freezing model class. def freeze @nested_attributes_module.freeze if @nested_attributes_module super end # Allow nested attributes to be set for the given associations. Options: # :destroy :: Allow destruction of nested records. # :fields :: If provided, should be an Array or proc. If it is an array, # restricts the fields allowed to be modified through the # association_attributes= method to the specific fields given. If it is # a proc, it will be called with the associated object and should return an # array of the allowable fields. # :limit :: For *_to_many associations, a limit on the number of records # that will be processed, to prevent denial of service attacks. # :reject_if :: A proc that is called with each attribute hash before it is # passed to its associated object. If the proc returns a truthy # value, the attribute hash is ignored. # :reject_nil :: Ignore nil objects passed to nested attributes setter methods. # :remove :: Allow disassociation of nested records (can remove the associated # object from the parent object, but not destroy the associated object). # :require_modification :: Whether to require modification of nested objects when # updating or deleting them (checking that a single row was # updated). By default, uses the default require_modification # setting for the nested object. # :transform :: A proc to transform attribute hashes before they are # passed to associated object. Takes two arguments, the parent object and # the attribute hash. Uses the return value as the new attribute hash. # :unmatched_pk :: Specify the action to be taken if a primary key is # provided in a record, but it doesn't match an existing associated # object. Set to :create to create a new object with that primary # key, :ignore to ignore the record, or :raise to raise an error. # The default is :raise. # # If a block is provided, it is used to set the :reject_if option. def nested_attributes(*associations, &block) include(@nested_attributes_module ||= Module.new) unless @nested_attributes_module opts = associations.last.is_a?(Hash) ? associations.pop : OPTS reflections = associations.map{|a| association_reflection(a) || raise(Error, "no association named #{a} for #{self}")} reflections.each do |r| r[:nested_attributes] = opts.dup r[:nested_attributes][:unmatched_pk] ||= :raise r[:nested_attributes][:reject_if] ||= block def_nested_attribute_method(r) end end private # Add a nested attribute setter method to a module included in the # class. def def_nested_attribute_method(reflection) @nested_attributes_module.class_eval do meth = :"#{reflection[:name]}_attributes=" assoc = reflection[:name] define_method(meth) do |v| set_nested_attributes(assoc, v) end alias_method meth, meth end end end module InstanceMethods # Set the nested attributes for the given association. obj should be an enumerable of multiple objects # for plural associations. The opts hash can be used to override any of the default options set by # the class-level nested_attributes call. def set_nested_attributes(assoc, obj, opts=OPTS) raise(Error, "no association named #{assoc} for #{model.inspect}") unless ref = model.association_reflection(assoc) raise(Error, "nested attributes are not enabled for association #{assoc} for #{model.inspect}") unless meta = ref[:nested_attributes] return if obj.nil? && meta[:reject_nil] meta = meta.merge(opts) meta[:reflection] = ref if ref.returns_array? nested_attributes_list_setter(meta, obj) else nested_attributes_setter(meta, obj) end end private # Check that the keys related to the association are not modified inside the block. Does # not use an ensure block, so callers should be careful. def nested_attributes_check_key_modifications(meta, obj) reflection = meta[:reflection] keys = reflection.associated_object_keys.map{|x| obj.get_column_value(x)} yield unless keys == reflection.associated_object_keys.map{|x| obj.get_column_value(x)} raise(Error, "Modifying association dependent key(s) when updating associated objects is not allowed") end end # Create a new associated object with the given attributes, validate # it when the parent is validated, and save it when the object is saved. # Returns the object created. def nested_attributes_create(meta, attributes) reflection = meta[:reflection] obj = reflection.associated_class.new nested_attributes_set_attributes(meta, obj, attributes) delay_validate_associated_object(reflection, obj) if reflection.returns_array? public_send(reflection[:name]) << obj obj.skip_validation_on_next_save! after_save_hook{public_send(reflection[:add_method], obj)} else associations[reflection[:name]] = obj # Because we are modifying the associations cache manually before the # setter is called, we still want to run the setter code even though # the cached value will be the same as the given value. @set_associated_object_if_same = true # Don't need to validate the object twice if :validate association option is not false # and don't want to validate it at all if it is false. if reflection[:type] == :many_to_one before_save_hook{public_send(reflection[:setter_method], obj.save(:validate=>false))} else after_save_hook do obj.skip_validation_on_next_save! public_send(reflection[:setter_method], obj) end end end add_reciprocal_object(reflection, obj) obj end # Take an array or hash of attribute hashes and set each one individually. # If a hash is provided it, sort it by key and then use the values. # If there is a limit on the nested attributes for this association, # make sure the length of the attributes_list is not greater than the limit. def nested_attributes_list_setter(meta, attributes_list) attributes_list = attributes_list.sort.map{|k,v| v} if attributes_list.is_a?(Hash) if (limit = meta[:limit]) && attributes_list.length > limit raise(Error, "number of nested attributes (#{attributes_list.length}) exceeds the limit (#{limit})") end attributes_list.each{|a| nested_attributes_setter(meta, a)} end # Remove the given associated object from the current object. If the # :destroy option is given, destroy the object after disassociating it # (unless destroying the object would automatically disassociate it). # Returns the object removed. def nested_attributes_remove(meta, obj, opts=OPTS) reflection = meta[:reflection] if !opts[:destroy] || reflection.remove_before_destroy? before_save_hook do if reflection.returns_array? public_send(reflection[:remove_method], obj) else public_send(reflection[:setter_method], nil) end end end after_save_hook{obj.destroy} if opts[:destroy] if reflection.returns_array? associations[reflection[:name]].delete(obj) end obj end # Set the fields in the obj based on the association, only allowing # specific :fields if configured. def nested_attributes_set_attributes(meta, obj, attributes) if fields = meta[:fields] fields = fields.call(obj) if fields.respond_to?(:call) obj.set_fields(attributes, fields, :missing=>:skip) else obj.set(attributes) end end # Modify the associated object based on the contents of the attributes hash: # * If a :transform block was given to nested_attributes, use it to modify the attribute hash. # * If a block was given to nested_attributes, call it with the attributes and return immediately if the block returns true. # * If a primary key exists in the attributes hash and it matches an associated object: # ** If _delete is a key in the hash and the :destroy option is used, destroy the matching associated object. # ** If _remove is a key in the hash and the :remove option is used, disassociated the matching associated object. # ** Otherwise, update the matching associated object with the contents of the hash. # * If a primary key exists in the attributes hash but it does not match an associated object, # either raise an error, create a new object or ignore the hash, depending on the :unmatched_pk option. # * If no primary key exists in the attributes hash, create a new object. def nested_attributes_setter(meta, attributes) if a = meta[:transform] attributes = a.call(self, attributes) end return if (b = meta[:reject_if]) && b.call(attributes) modified! reflection = meta[:reflection] klass = reflection.associated_class sym_keys = Array(klass.primary_key) str_keys = sym_keys.map(&:to_s) if (pk = attributes.values_at(*sym_keys)).all? || (pk = attributes.values_at(*str_keys)).all? pk = pk.map(&:to_s) obj = Array(public_send(reflection[:name])).find{|x| Array(x.pk).map(&:to_s) == pk} end if obj unless (require_modification = meta[:require_modification]).nil? obj.require_modification = require_modification end attributes = attributes.dup.delete_if{|k,v| str_keys.include? k.to_s} if meta[:destroy] && klass.db.send(:typecast_value_boolean, attributes.delete(:_delete) || attributes.delete('_delete')) nested_attributes_remove(meta, obj, :destroy=>true) elsif meta[:remove] && klass.db.send(:typecast_value_boolean, attributes.delete(:_remove) || attributes.delete('_remove')) nested_attributes_remove(meta, obj) else nested_attributes_update(meta, obj, attributes) end elsif pk.all? && meta[:unmatched_pk] != :create if meta[:unmatched_pk] == :raise raise(Error, "no matching associated object with given primary key (association: #{reflection[:name]}, pk: #{pk})") end else nested_attributes_create(meta, attributes) end end # Update the given object with the attributes, validating it when the # parent object is validated and saving it when the parent is saved. # Returns the object updated. def nested_attributes_update(meta, obj, attributes) nested_attributes_update_attributes(meta, obj, attributes) delay_validate_associated_object(meta[:reflection], obj) # Don't need to validate the object twice if :validate association option is not false # and don't want to validate it at all if it is false. after_save_hook{obj.save_changes(:validate=>false)} obj end # Update the attributes for the given object related to the current object through the association. def nested_attributes_update_attributes(meta, obj, attributes) nested_attributes_check_key_modifications(meta, obj) do nested_attributes_set_attributes(meta, obj, attributes) end end end end end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/optimistic_locking.rb����������������������������������������������0000664�0000000�0000000�00000005371�14342141206�0023052�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # This plugin implements a simple database-independent locking mechanism # to ensure that concurrent updates do not override changes. This is # best implemented by a code example: # # class Person < Sequel::Model # plugin :optimistic_locking # end # p1 = Person[1] # p2 = Person[1] # p1.update(name: 'Jim') # works # p2.update(name: 'Bob') # raises Sequel::Plugins::OptimisticLocking::Error # # In order for this plugin to work, you need to make sure that the database # table has a +lock_version+ column (or other column you name via the lock_column # class level accessor) that defaults to 0. # # This plugin relies on the instance_filters plugin. module OptimisticLocking # Exception class raised when trying to update or destroy a stale object. Error = Sequel::NoExistingObject # Load the instance_filters plugin into the model. def self.apply(model, opts=OPTS) model.plugin :instance_filters end # Set the lock_column to the :lock_column option, or :lock_version if # that option is not given. def self.configure(model, opts=OPTS) model.lock_column = opts[:lock_column] || :lock_version end module ClassMethods # The column holding the version of the lock attr_accessor :lock_column Plugins.inherited_instance_variables(self, :@lock_column=>nil) end module InstanceMethods # Add the lock column instance filter to the object before destroying it. def before_destroy lock_column_instance_filter super end # Add the lock column instance filter to the object before updating it. def before_update lock_column_instance_filter super end private # Add the lock column instance filter to the object. def lock_column_instance_filter lc = model.lock_column instance_filter(lc=>get_column_value(lc)) end # Clear the instance filters when refreshing, so that attempting to # refresh after a failed save removes the previous lock column filter # (the new one will be added before updating). def _refresh(ds) clear_instance_filters super end # Only update the row if it has the same lock version, and increment the # lock version. def _update_columns(columns) lc = model.lock_column lcv = get_column_value(lc) columns[lc] = lcv + 1 super set_column_value("#{lc}=", lcv + 1) end end end end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/pg_array_associations.rb�������������������������������������������0000664�0000000�0000000�00000052761�14342141206�0023550�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel extension :pg_array, :pg_array_ops module Plugins # This plugin allows you to create associations where the foreign keys # are stored in a PostgreSQL array column in one of the tables. The # model with the table containing the array column has a # pg_array_to_many association to the associated model, and the # model with the table containing the primary key referenced by # elements in the array column has a many_to_pg_array association # to the associated model. # # # Database schema: # # tags albums # # :id (int4) <--\ :id # # :name \-- :tag_ids (int4[]) # # :name # # class Album # plugin :pg_array_associations # pg_array_to_many :tags # end # class Tag # plugin :pg_array_associations # many_to_pg_array :albums # end # # These association types work similarly to Sequel's other association # types, so you can use them as you would any other association. Unlike # other associations, they do not support composite keys. # # One thing that is different is that the modification methods for # pg_array_to_many associations do not affect the database, since they # operate purely on the receiver. For example: # # album = Album[1] # album.add_tag(Tag[2]) # # does not save the album. This allows you to call add_tag repeatedly # and the save after to combine all changes into a single query. Note # that the many_to_pg_array association modification methods do save, so: # # tag = Tag[2] # tag.add_album(Album[1]) # # will save the changes to the album. # # They support some additional options specific to this plugin: # # :array_type :: This overrides the type of the array. By default, the type # is determined by looking at the db_schema for the model, and if that fails, # it defaults to :integer. # :raise_on_save_failure :: Do not raise exceptions for hook or validation failures when saving associated # objects in the add/remove methods (return nil instead). # :save_after_modify :: For pg_array_to_many associations, this makes the # the modification methods save the current object, # so they operate more similarly to the one_to_many # and many_to_many association modification methods. # :uniq :: Similar to many_to_many associations, this can be used to # make sure the returned associated object array has uniq values. # # Note that until PostgreSQL gains the ability to enforce foreign key # constraints in array columns, this plugin is not recommended for # production use unless you plan on emulating referential integrity # constraints via triggers. # # This plugin should work on all supported PostgreSQL versions, except # the remove_all modification method for many_to_pg_array associations, which # requires the array_remove method added in PostgreSQL 9.3. # # This plugin requires that the underlying database have the pg_array # extension loaded. module PgArrayAssociations # The AssociationReflection subclass for many_to_pg_array associations. class ManyToPgArrayAssociationReflection < Sequel::Model::Associations::AssociationReflection Sequel.synchronize{Sequel::Model::Associations::ASSOCIATION_TYPES[:many_to_pg_array] = self} def array_type cached_fetch(:array_type) do if (sch = associated_class.db_schema) && (s = sch[self[:key]]) && (t = s[:db_type]) t.sub(/\[\]\z/, '').freeze else :integer end end end # The array column in the associated model containing foreign keys to # the current model. def associated_object_keys [self[:key]] end # many_to_pg_array associations can have associated objects as long as they have # a primary key. def can_have_associated_objects?(obj) obj.get_column_value(self[:primary_key]) end # Assume that the key in the associated table uses a version of the current # model's name suffixed with _ids. def default_key :"#{underscore(demodulize(self[:model].name))}_ids" end # Always use the ruby eager_graph limit strategy if association is limited. def eager_graph_limit_strategy(_) :ruby if self[:limit] end # Always use the ruby eager limit strategy def eager_limit_strategy cached_fetch(:_eager_limit_strategy) do :ruby if self[:limit] end end # Don't use a filter by associations limit strategy def filter_by_associations_limit_strategy nil end FINALIZE_SETTINGS = superclass::FINALIZE_SETTINGS.merge( :array_type=>:array_type ).freeze def finalize_settings FINALIZE_SETTINGS end # Handle silent failure of add/remove methods if raise_on_save_failure is false. def handle_silent_modification_failure? self[:raise_on_save_failure] == false end # The hash key to use for the eager loading predicate (left side of IN (1, 2, 3)) def predicate_key cached_fetch(:predicate_key){qualify_assoc(self[:key_column])} end # The column in the current table that the keys in the array column in the # associated table reference. def primary_key self[:primary_key] end # Destroying the associated object automatically removes the association, # since the association is stored in the associated object. def remove_before_destroy? false end private # The predicate condition to use for the eager_loader. def eager_loading_predicate_condition(keys) Sequel.pg_array_op(predicate_key).overlaps(Sequel.pg_array(keys, array_type)) end def filter_by_associations_add_conditions_dataset_filter(ds) key = qualify(associated_class.table_name, self[:key]) ds.cross_join(Sequel.function(:unnest, key).as(:_smtopgaa_, [:_smtopgaa_key_])).exclude(key=>nil).select(:_smtopgaa_key_) end def filter_by_associations_conditions_key qualify(self[:model].table_name, primary_key) end # Only consider an association as a reciprocal if it has matching keys # and primary keys. def reciprocal_association?(assoc_reflect) super && self[:key] == assoc_reflect[:key] && primary_key == assoc_reflect.primary_key end def reciprocal_type :pg_array_to_many end def use_placeholder_loader? false end end # The AssociationReflection subclass for pg_array_to_many associations. class PgArrayToManyAssociationReflection < Sequel::Model::Associations::AssociationReflection Sequel.synchronize{Sequel::Model::Associations::ASSOCIATION_TYPES[:pg_array_to_many] = self} def array_type cached_fetch(:array_type) do if (sch = self[:model].db_schema) && (s = sch[self[:key]]) && (t = s[:db_type]) t.sub(/\[\]\z/, '').freeze else :integer end end end # An array containing the primary key for the associated model. def associated_object_keys Array(primary_key) end # pg_array_to_many associations can only have associated objects if # the array field is not nil or empty. def can_have_associated_objects?(obj) v = obj.get_column_value(self[:key]) v && !v.empty? end # pg_array_to_many associations do not need a primary key. def dataset_need_primary_key? false end # Use a default key name of *_ids, for similarity to other association types # that use *_id for single keys. def default_key :"#{singularize(self[:name])}_ids" end # Always use the ruby eager_graph limit strategy if association is limited. def eager_graph_limit_strategy(_) :ruby if self[:limit] end # Always use the ruby eager limit strategy def eager_limit_strategy cached_fetch(:_eager_limit_strategy) do :ruby if self[:limit] end end # Don't use a filter by associations limit strategy def filter_by_associations_limit_strategy nil end FINALIZE_SETTINGS = superclass::FINALIZE_SETTINGS.merge( :array_type=>:array_type, :primary_key=>:primary_key, :primary_key_method=>:primary_key_method ).freeze def finalize_settings FINALIZE_SETTINGS end # Handle silent failure of add/remove methods if raise_on_save_failure is false # and save_after_modify is true. def handle_silent_modification_failure? self[:raise_on_save_failure] == false && self[:save_after_modify] end # A qualified version of the associated primary key. def predicate_key cached_fetch(:predicate_key){qualify_assoc(primary_key)} end # The primary key of the associated model. def primary_key cached_fetch(:primary_key){associated_class.primary_key || raise(Error, "no primary key specified for #{associated_class.inspect}")} end # The method to call to get value of the primary key of the associated model. def primary_key_method cached_fetch(:primary_key_method){primary_key} end def filter_by_associations_conditions_expression(obj) ds = filter_by_associations_conditions_dataset.where(filter_by_associations_conditions_subquery_conditions(obj)) Sequel.function(:coalesce, Sequel.pg_array(filter_by_associations_conditions_key).overlaps(ds), Sequel::SQL::Constants::FALSE) end private def filter_by_associations_add_conditions_dataset_filter(ds) pk = qualify(associated_class.table_name, primary_key) ds.select{array_agg(pk)}.exclude(pk=>nil) end def filter_by_associations_conditions_key qualify(self[:model].table_name, self[:key]) end # Only consider an association as a reciprocal if it has matching keys # and primary keys. def reciprocal_association?(assoc_reflect) super && self[:key] == assoc_reflect[:key] && primary_key == assoc_reflect.primary_key end def reciprocal_type :many_to_pg_array end def use_placeholder_loader? false end end # Add the pg_array extension to the database def self.apply(model) model.db.extension(:pg_array) end module ClassMethods # Create a many_to_pg_array association, for the case where the associated # table contains the array with foreign keys pointing to the current table. # See associate for options. def many_to_pg_array(name, opts=OPTS, &block) associate(:many_to_pg_array, name, opts, &block) end # Create a pg_array_to_many association, for the case where the current # table contains the array with foreign keys pointing to the associated table. # See associate for options. def pg_array_to_many(name, opts=OPTS, &block) associate(:pg_array_to_many, name, opts, &block) end private # Setup the many_to_pg_array-specific datasets, eager loaders, and modification methods. def def_many_to_pg_array(opts) name = opts[:name] model = self pk = opts[:eager_loader_key] = opts[:primary_key] ||= model.primary_key raise(Error, "no primary key specified for #{inspect}") unless pk opts[:key] = opts.default_key unless opts.has_key?(:key) key = opts[:key] key_column = opts[:key_column] ||= opts[:key] if opts[:uniq] opts[:after_load] ||= [] opts[:after_load].unshift(:array_uniq!) end opts[:dataset] ||= lambda do opts.associated_dataset.where(Sequel.pg_array_op(opts.predicate_key).contains(Sequel.pg_array([get_column_value(pk)], opts.array_type))) end opts[:eager_loader] ||= proc do |eo| id_map = eo[:id_map] eo = Hash[eo] eo[:loader] = false eager_load_results(opts, eo) do |assoc_record| if pks = assoc_record.get_column_value(key) pks.each do |pkv| id_map[pkv].each do |object| object.associations[name].push(assoc_record) end end end end end join_type = opts[:graph_join_type] select = opts[:graph_select] opts[:cartesian_product_number] ||= 1 if opts.include?(:graph_only_conditions) conditions = opts[:graph_only_conditions] graph_block = opts[:graph_block] else conditions = opts[:graph_conditions] conditions = nil if conditions.empty? graph_block = proc do |j, lj, js| Sequel.pg_array_op(Sequel.deep_qualify(j, key_column)).contains([Sequel.deep_qualify(lj, opts.primary_key)]) end if orig_graph_block = opts[:graph_block] pg_array_graph_block = graph_block graph_block = proc do |j, lj, js| Sequel.&(orig_graph_block.call(j,lj,js), pg_array_graph_block.call(j, lj, js)) end end end opts[:eager_grapher] ||= proc do |eo| ds = eo[:self] ds = ds.graph(eager_graph_dataset(opts, eo), conditions, eo.merge(:select=>select, :join_type=>eo[:join_type]||join_type, :qualify=>:deep), &graph_block) ds end return if opts[:read_only] save_opts = {:validate=>opts[:validate]} save_opts[:raise_on_failure] = opts[:raise_on_save_failure] != false unless opts.has_key?(:adder) opts[:adder] = proc do |o| if array = o.get_column_value(key) array << get_column_value(pk) else o.set_column_value("#{key}=", Sequel.pg_array([get_column_value(pk)], opts.array_type)) end o.save(save_opts) end end unless opts.has_key?(:remover) opts[:remover] = proc do |o| if (array = o.get_column_value(key)) && !array.empty? array.delete(get_column_value(pk)) o.save(save_opts) end end end unless opts.has_key?(:clearer) opts[:clearer] = proc do pk_value = get_column_value(pk) db_type = opts.array_type opts.associated_dataset.where(Sequel.pg_array_op(key).contains(Sequel.pg_array([pk_value], db_type))).update(key=>Sequel.function(:array_remove, key, Sequel.cast(pk_value, db_type))) end end end # Setup the pg_array_to_many-specific datasets, eager loaders, and modification methods. def def_pg_array_to_many(opts) name = opts[:name] opts[:key] = opts.default_key unless opts.has_key?(:key) key = opts[:key] key_column = opts[:key_column] ||= key opts[:eager_loader_key] = nil if opts[:uniq] opts[:after_load] ||= [] opts[:after_load].unshift(:array_uniq!) end opts[:dataset] ||= lambda do opts.associated_dataset.where(opts.predicate_key=>get_column_value(key).to_a) end opts[:eager_loader] ||= proc do |eo| rows = eo[:rows] id_map = {} pkm = opts.primary_key_method Sequel.synchronize_with(eo[:mutex]) do rows.each do |object| if associated_pks = object.get_column_value(key) associated_pks.each do |apk| (id_map[apk] ||= []) << object end end end end eo = Hash[eo] eo[:id_map] = id_map eager_load_results(opts, eo) do |assoc_record| if objects = id_map[assoc_record.get_column_value(pkm)] objects.each do |object| object.associations[name].push(assoc_record) end end end end join_type = opts[:graph_join_type] select = opts[:graph_select] opts[:cartesian_product_number] ||= 1 if opts.include?(:graph_only_conditions) conditions = opts[:graph_only_conditions] graph_block = opts[:graph_block] else conditions = opts[:graph_conditions] conditions = nil if conditions.empty? graph_block = proc do |j, lj, js| Sequel.pg_array_op(Sequel.deep_qualify(lj, key_column)).contains([Sequel.deep_qualify(j, opts.primary_key)]) end if orig_graph_block = opts[:graph_block] pg_array_graph_block = graph_block graph_block = proc do |j, lj, js| Sequel.&(orig_graph_block.call(j,lj,js), pg_array_graph_block.call(j, lj, js)) end end end opts[:eager_grapher] ||= proc do |eo| ds = eo[:self] ds = ds.graph(eager_graph_dataset(opts, eo), conditions, eo.merge(:select=>select, :join_type=>eo[:join_type]||join_type, :qualify=>:deep), &graph_block) ds end return if opts[:read_only] save_opts = {:validate=>opts[:validate]} save_opts[:raise_on_failure] = opts[:raise_on_save_failure] != false if opts[:save_after_modify] save_after_modify = proc do |obj| obj.save(save_opts) end end unless opts.has_key?(:adder) opts[:adder] = proc do |o| opk = o.get_column_value(opts.primary_key) if array = get_column_value(key) modified!(key) array << opk else set_column_value("#{key}=", Sequel.pg_array([opk], opts.array_type)) end save_after_modify.call(self) if save_after_modify end end unless opts.has_key?(:remover) opts[:remover] = proc do |o| if (array = get_column_value(key)) && !array.empty? modified!(key) array.delete(o.get_column_value(opts.primary_key)) save_after_modify.call(self) if save_after_modify end end end unless opts.has_key?(:clearer) opts[:clearer] = proc do if (array = get_column_value(key)) && !array.empty? modified!(key) array.clear save_after_modify.call(self) if save_after_modify end end end end end module DatasetMethods private # Support filtering by many_to_pg_array associations using a subquery. def many_to_pg_array_association_filter_expression(op, ref, obj) pk = ref.qualify(model.table_name, ref.primary_key) key = ref[:key] # :nocov: expr = case obj # :nocov: when Sequel::Model if (assoc_pks = obj.get_column_value(key)) && !assoc_pks.empty? Sequel[pk=>assoc_pks.to_a] end when Array if (assoc_pks = obj.map{|o| o.get_column_value(key)}.flatten.compact.uniq) && !assoc_pks.empty? Sequel[pk=>assoc_pks] end when Sequel::Dataset obj.select(ref.qualify(obj.model.table_name, ref[:key_column]).as(:key)).from_self.where{{pk=>any(:key)}}.select(1).exists end expr = Sequel::SQL::Constants::FALSE unless expr expr = add_association_filter_conditions(ref, obj, expr) association_filter_handle_inversion(op, expr, [pk]) end # Support filtering by pg_array_to_many associations using a subquery. def pg_array_to_many_association_filter_expression(op, ref, obj) key = ref.qualify(model.table_name, ref[:key_column]) # :nocov: expr = case obj # :nocov: when Sequel::Model if pkv = obj.get_column_value(ref.primary_key_method) Sequel.pg_array_op(key).contains(Sequel.pg_array([pkv], ref.array_type)) end when Array if (pkvs = obj.map{|o| o.get_column_value(ref.primary_key_method)}.compact) && !pkvs.empty? Sequel.pg_array(key).overlaps(Sequel.pg_array(pkvs, ref.array_type)) end when Sequel::Dataset Sequel.function(:coalesce, Sequel.pg_array_op(key).overlaps(obj.select{array_agg(ref.qualify(obj.model.table_name, ref.primary_key))}), Sequel::SQL::Constants::FALSE) end expr = Sequel::SQL::Constants::FALSE unless expr expr = add_association_filter_conditions(ref, obj, expr) association_filter_handle_inversion(op, expr, [key]) end end end end end ���������������sequel-5.63.0/lib/sequel/plugins/pg_auto_constraint_validations.rb����������������������������������0000664�0000000�0000000�00000036165�14342141206�0025464�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The pg_auto_constraint_validations plugin automatically converts some constraint # violation exceptions that are raised by INSERT/UPDATE queries into validation # failures. This can allow for using the same error handling code for both # regular validation errors (checked before attempting the INSERT/UPDATE), and # constraint violations (raised during the INSERT/UPDATE). # # This handles the following constraint violations: # # * NOT NULL # * CHECK # * UNIQUE (except expression/functional indexes) # * FOREIGN KEY (both referencing and referenced by) # # If the plugin cannot convert the constraint violation error to a validation # error, it just reraises the initial exception, so this should not cause # problems if the plugin doesn't know how to convert the exception. # # This plugin is not intended as a replacement for other validations, # it is intended as a last resort. The purpose of validations is to provide nice # error messages for the user, and the error messages generated by this plugin are # fairly generic by default. The error messages can be customized per constraint type # using the :messages plugin option, and individually per constraint using # +pg_auto_constraint_validation_override+ (see below). # # This plugin only works on the postgres adapter when using the pg 0.16+ driver, # PostgreSQL 9.3+ server, and PostgreSQL 9.3+ client library (libpq). In other cases # it will be a no-op. # # Example: # # album = Album.new(artist_id: 1) # Assume no such artist exists # begin # album.save # rescue Sequel::ValidationFailed # album.errors.on(:artist_id) # ['is invalid'] # end # # While the database usually provides enough information to correctly associated # constraint violations with model columns, there are cases where it does not. # In those cases, you can override the handling of specific constraint violations # to be associated to particular column(s), and use a specific error message: # # Album.pg_auto_constraint_validation_override(:constraint_name, [:column1], "validation error message") # # Using the pg_auto_constraint_validations plugin requires 5 queries per # model at load time in order to gather the necessary metadata. For applications # with a large number of models, this can result in a noticeable delay during model # initialization. To mitigate this issue, you can cache the necessary metadata in # a file with the :cache_file option: # # Sequel::Model.plugin :pg_auto_constraint_validations, cache_file: 'db/pgacv.cache' # # The file does not have to exist when loading the plugin. If it exists, the plugin # will load the cache and use the cached results instead of issuing queries if there # is an entry in the cache. If there is no entry in the cache, it will update the # in-memory cache with the metadata results. To save the in in-memory cache back to # the cache file, run: # # Sequel::Model.dump_pg_auto_constraint_validations_cache # # Note that when using the :cache_file option, it is up to the application to ensure # that the dumped cached metadata reflects the current state of the database. Sequel # does no checking to ensure this, as checking would take time and the # purpose of this code is to take a shortcut. # # The cached schema is dumped in Marshal format, since it is the fastest # and it handles all ruby objects used in the metadata. Because of this, # you should not attempt to load the metadata from a untrusted file. # # Usage: # # # Make all model subclasses automatically convert constraint violations # # to validation failures (called before loading subclasses) # Sequel::Model.plugin :pg_auto_constraint_validations # # # Make the Album class automatically convert constraint violations # # to validation failures # Album.plugin :pg_auto_constraint_validations module PgAutoConstraintValidations ( # The default error messages for each constraint violation type. DEFAULT_ERROR_MESSAGES = { :not_null=>"is not present", :check=>"is invalid", :unique=>'is already taken', :foreign_key=>'is invalid', :referenced_by=>'cannot be changed currently' }.freeze).each_value(&:freeze) # Setup the constraint violation metadata. Options: # :cache_file :: File storing cached metadata, to avoid queries for each model # :messages :: Override the default error messages for each constraint # violation type (:not_null, :check, :unique, :foreign_key, :referenced_by) def self.configure(model, opts=OPTS) model.instance_exec do if @pg_auto_constraint_validations_cache_file = opts[:cache_file] @pg_auto_constraint_validations_cache = if ::File.file?(@pg_auto_constraint_validations_cache_file) cache = Marshal.load(File.read(@pg_auto_constraint_validations_cache_file)) cache.each_value do |hash| hash.freeze.each_value(&:freeze) end else {} end else @pg_auto_constraint_validations_cache = nil end setup_pg_auto_constraint_validations @pg_auto_constraint_validations_messages = (@pg_auto_constraint_validations_messages || DEFAULT_ERROR_MESSAGES).merge(opts[:messages] || OPTS).freeze end nil end module ClassMethods # Hash of metadata checked when an instance attempts to convert a constraint # violation into a validation failure. attr_reader :pg_auto_constraint_validations # Hash of error messages keyed by constraint type symbol to use in the # generated validation failures. attr_reader :pg_auto_constraint_validations_messages Plugins.inherited_instance_variables(self, :@pg_auto_constraint_validations=>nil, :@pg_auto_constraint_validations_messages=>nil, :@pg_auto_constraint_validations_cache=>nil, :@pg_auto_constraint_validations_cache_file=>nil) Plugins.after_set_dataset(self, :setup_pg_auto_constraint_validations) # Dump the in-memory cached metadata to the cache file. def dump_pg_auto_constraint_validations_cache raise Error, "No pg_auto_constraint_validations setup" unless file = @pg_auto_constraint_validations_cache_file File.open(file, 'wb'){|f| f.write(Marshal.dump(@pg_auto_constraint_validations_cache))} nil end # Override the constraint validation columns and message for a given constraint def pg_auto_constraint_validation_override(constraint, columns, message) pgacv = Hash[@pg_auto_constraint_validations] overrides = pgacv[:overrides] = Hash[pgacv[:overrides]] overrides[constraint] = [Array(columns), message].freeze overrides.freeze @pg_auto_constraint_validations = pgacv.freeze nil end private # Get the list of constraints, unique indexes, foreign keys in the current # table, and keys in the current table referenced by foreign keys in other # tables. Store this information so that if a constraint violation occurs, # all necessary metadata is already available in the model, so a query is # not required at runtime. This is both for performance and because in # general after the constraint violation failure you will be inside a # failed transaction and not able to execute queries. def setup_pg_auto_constraint_validations return unless @dataset case @dataset.first_source_table when Symbol, String, SQL::Identifier, SQL::QualifiedIdentifier convert_errors = db.respond_to?(:error_info) end unless convert_errors # Might be a table returning function or subquery, skip handling those. # Might have db not support error_info, skip handling that. @pg_auto_constraint_validations = nil return end cache = @pg_auto_constraint_validations_cache literal_table_name = dataset.literal(table_name) unless cache && (metadata = cache[literal_table_name]) checks = {} indexes = {} foreign_keys = {} referenced_by = {} db.check_constraints(table_name).each do |k, v| checks[k] = v[:columns].dup.freeze unless v[:columns].empty? end db.indexes(table_name, :include_partial=>true).each do |k, v| if v[:unique] indexes[k] = v[:columns].dup.freeze end end db.foreign_key_list(table_name, :schema=>false).each do |fk| foreign_keys[fk[:name]] = fk[:columns].dup.freeze end db.foreign_key_list(table_name, :reverse=>true, :schema=>false).each do |fk| referenced_by[[fk[:schema], fk[:table], fk[:name]].freeze] = fk[:key].dup.freeze end schema, table = db[:pg_class]. join(:pg_namespace, :oid=>:relnamespace, db.send(:regclass_oid, table_name)=>:oid). get([:nspname, :relname]) metadata = { :schema=>schema, :table=>table, :check=>checks, :unique=>indexes, :foreign_key=>foreign_keys, :referenced_by=>referenced_by, :overrides=>OPTS }.freeze metadata.each_value(&:freeze) if cache cache[literal_table_name] = metadata end end @pg_auto_constraint_validations = metadata nil end end module InstanceMethods private # Yield to the given block, and if a Sequel::ConstraintViolation is raised, try # to convert it to a Sequel::ValidationFailed error using the PostgreSQL error # metadata. def check_pg_constraint_error(ds) yield rescue Sequel::ConstraintViolation => e begin unless cv_info = model.pg_auto_constraint_validations # Necessary metadata does not exist, just reraise the exception. raise e end info = ds.db.error_info(e) m = ds.method(:output_identifier) schema = info[:schema] table = info[:table] if constraint = info[:constraint] constraint = m.call(constraint) columns, message = cv_info[:overrides][constraint] if columns override = true add_pg_constraint_validation_error(columns, message) end end messages = model.pg_auto_constraint_validations_messages unless override # :nocov: case e # :nocov: when Sequel::NotNullConstraintViolation if column = info[:column] add_pg_constraint_validation_error([m.call(column)], messages[:not_null]) end when Sequel::CheckConstraintViolation if columns = cv_info[:check][constraint] add_pg_constraint_validation_error(columns, messages[:check]) end when Sequel::UniqueConstraintViolation if columns = cv_info[:unique][constraint] add_pg_constraint_validation_error(columns, messages[:unique]) end when Sequel::ForeignKeyConstraintViolation message_primary = info[:message_primary] if message_primary.start_with?('update') # This constraint violation is different from the others, because the constraint # referenced is a constraint for a different table, not for this table. This # happens when another table references the current table, and the referenced # column in the current update is modified such that referential integrity # would be broken. Use the reverse foreign key information to figure out # which column is affected in that case. skip_schema_table_check = true if columns = cv_info[:referenced_by][[m.call(schema), m.call(table), constraint]] add_pg_constraint_validation_error(columns, messages[:referenced_by]) end elsif message_primary.start_with?('insert') if columns = cv_info[:foreign_key][constraint] add_pg_constraint_validation_error(columns, messages[:foreign_key]) end end end end rescue # If there is an error trying to conver the constraint violation # into a validation failure, it's best to just raise the constraint # violation. This can make debugging the above block of code more # difficult. raise e else unless skip_schema_table_check # The constraint violation could be caused by a trigger modifying # a different table. Check that the error schema and table # match the model's schema and table, or clear the validation error # that was set above. if schema != cv_info[:schema] || table != cv_info[:table] errors.clear end end if errors.empty? # If we weren't able to parse the constraint violation metadata and # convert it to an appropriate validation failure, or the schema/table # didn't match, then raise the constraint violation. raise e end # Integrate with error_splitter plugin to split any multi-column errors # and add them as separate single column errors if respond_to?(:split_validation_errors, true) split_validation_errors(errors) end vf = ValidationFailed.new(self) vf.set_backtrace(e.backtrace) vf.wrapped_exception = e raise vf end end # If there is a single column instead of an array of columns, add the error # for the column, otherwise add the error for the array of columns. def add_pg_constraint_validation_error(column, message) column = column.first if column.length == 1 errors.add(column, message) end # Convert PostgreSQL constraint errors when inserting. def _insert_raw(ds) check_pg_constraint_error(ds){super} end # Convert PostgreSQL constraint errors when inserting. def _insert_select_raw(ds) check_pg_constraint_error(ds){super} end # Convert PostgreSQL constraint errors when updating. def _update_without_checking(_) check_pg_constraint_error(_update_dataset){super} end end end end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/pg_row.rb����������������������������������������������������������0000664�0000000�0000000�00000005241�14342141206�0020451�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The pg_row plugin allows you to use Sequel::Model classes as composite type # classes, via the pg_row extension. So if you have an address table: # # DB.create_table(:address) do # String :street # String :city # String :zip # end # # and a company table with an address: # # DB.create_table(:company) do # String :name # address :address # end # # You can create a Sequel::Model for the address table, and load the plugin, # which registers the row type: # # class Address < Sequel::Model(:address) # plugin :pg_row # end # # Then when you select from the company table (even using a plain dataset), # it will return address values as instances of Address: # # DB[:company].first # # => {:name=>'MS', :address=> # # Address.load(:street=>'123 Foo St', :city=>'Bar Town', :zip=>'12345')} # # If you want a lot of your models to be used as row types, you can load the # plugin into Sequel::Model itself: # # Sequel::Model.plugin :pg_row # # And then call register_row_type in the class # # Address.register_row_type # # In addition to returning row-valued/composite types as instances of Sequel::Model, # this also lets you use model instances in datasets when inserting, updating, and # filtering: # # DB[:company].insert(name: 'MS', address: # Address.load(street: '123 Foo St', city: 'Bar Town', zip: '12345')) module PgRow # When loading the extension, make sure the database has the pg_row extension # loaded, load the custom database extensions, and automatically register the # row type if the model has a dataset. def self.configure(model) model.db.extension(:pg_row, :_model_pg_row) model.register_row_type if model.instance_variable_get(:@dataset) end module ClassMethods # Register the model's row type with the database. def register_row_type table = dataset.first_source_table db.register_row_type(table, :converter=>self, :typecaster=>method(:new)) db.instance_variable_get(:@schema_type_classes)[:"pg_row_#{table}"] = self end end module InstanceMethods # Literalize the model instance and append it to the sql. def sql_literal_append(ds, sql) sql << 'ROW' ds.literal_append(sql, values.values_at(*columns)) sql << '::' ds.quote_schema_table_append(sql, model.dataset.first_source_table) end end end end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/prepared_statements.rb���������������������������������������������0000664�0000000�0000000�00000017364�14342141206�0023236�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The prepared_statements plugin modifies the model to use prepared statements for # instance level inserts and updates. # # Note that this plugin is unsafe in some circumstances, as it can allow up to # 2^N prepared statements to be created for each type of insert and update query, where # N is the number of columns in the table. It is recommended that you use the # +prepared_statements_safe+ plugin in addition to this plugin to reduce the number # of prepared statements that can be created, unless you tightly control how your # model instances are saved. # # Usage: # # # Make all model subclasses use prepared statements (called before loading subclasses) # Sequel::Model.plugin :prepared_statements # # # Make the Album class use prepared statements # Album.plugin :prepared_statements module PreparedStatements # Synchronize access to the integer sequence so that no two calls get the same integer. MUTEX = Mutex.new i = 0 # This plugin names prepared statements uniquely using an integer sequence, this # lambda returns the next integer to use. NEXT = lambda{MUTEX.synchronize{i += 1}} # Setup the datastructure used to hold the prepared statements in the model. def self.apply(model) model.instance_variable_set(:@prepared_statements, {:insert=>{}, :insert_select=>{}, :update=>{}}.freeze) end module ClassMethods Plugins.inherited_instance_variables(self, :@prepared_statements=>lambda{|v| {:insert=>{}, :insert_select=>{}, :update=>{}}.freeze}) private # Create a prepared statement, but modify the SQL used so that the model's columns are explicitly # selected instead of using *, assuming that the dataset selects from a single table. def prepare_explicit_statement(ds, type, vals=OPTS) s = ds.opts[:returning] if !s || s.empty? ds = ds.returning(*columns.map{|c| Sequel.identifier(c)}) end prepare_statement(ds, type, vals) end # Create a prepared statement based on the given dataset with a unique name for the given # type of query and values. def prepare_statement(ds, type, vals=OPTS) ds.clone(:log_sql=>true).prepare(type, :"smpsp_#{NEXT.call}", vals) end # Return a sorted array of columns for use as a hash key. def prepared_columns(cols) cols.sort end # Return a prepared statement that can be used to insert a row using the given columns. def prepared_insert(cols) cached_prepared_statement(:insert, prepared_columns(cols)){prepare_statement(dataset, :insert, prepared_statement_key_hash(cols))} end # Return a prepared statement that can be used to insert a row using the given columns # and return that column values for the row created. def prepared_insert_select(cols) cached_prepared_statement(:insert_select, prepared_columns(cols)){prepare_explicit_statement(naked.clone(:server=>dataset.opts.fetch(:server, :default)), :insert_select, prepared_statement_key_hash(cols))} end # Return an array of two element arrays with the column symbol as the first entry and the # placeholder symbol as the second entry. def prepared_statement_key_array(keys) if dataset.requires_placeholder_type_specifiers? sch = db_schema Array(keys).map do |k| if (s = sch[k]) && (t = s[:type]) [k, :"$#{k}__#{t}"] else [k, :"$#{k}"] end end else Array(keys).map{|k| [k, :"$#{k}"]} end end # Return a hash mapping column symbols to placeholder symbols. def prepared_statement_key_hash(keys) Hash[*(prepared_statement_key_array(keys).flatten)] end # Return a prepared statement that can be used to update row using the given columns. def prepared_update(cols) cached_prepared_statement(:update, prepared_columns(cols)){prepare_statement(where(prepared_statement_key_array(primary_key)), :update, prepared_statement_key_hash(cols))} end # If a prepared statement has already been cached for the given type and subtype, # return it. Otherwise, yield to the block to get the prepared statement, and cache it. def cached_prepared_statement(type, subtype) h = @prepared_statements[type] Sequel.synchronize do if v = h[subtype] return v end end ps = yield Sequel.synchronize{h[subtype] = ps} end # Whether to use prepared statements for lookups by primary key. True if the default # primary key lookup isn't optimized. def use_prepared_statements_for_pk_lookup? !@fast_pk_lookup_sql && !dataset.joined_dataset? end end module InstanceMethods private # Use a prepared statement to insert the values into the model's dataset. def _insert_raw(ds) if use_prepared_statements_for?(:insert) _set_prepared_statement_server(model.send(:prepared_insert, @values.keys)).call(@values) else super end end # Use a prepared statement to insert the values into the model's dataset # and return the new column values. def _insert_select_raw(ds) if use_prepared_statements_for?(:insert_select) _set_prepared_statement_server(model.send(:prepared_insert_select, @values.keys)).call(@values) else super end end # Use a prepared statement to update this model's columns in the database. def _update_without_checking(columns) if use_prepared_statements_for?(:update) _set_prepared_statement_server(model.send(:prepared_update, columns.keys)).call(columns.merge(pk_hash)) else super end end # If a server is set for the instance, return a prepared statement that will use that server. def _set_prepared_statement_server(ps) if @server ps.server(@server) else ps end end # Whether prepared statements should be used for the given type of query # (:insert, :insert_select, :update). True by default, # can be overridden in other plugins to disallow prepared statements for # specific types of queries. def use_prepared_statements_for?(type) if defined?(super) result = super return result unless result.nil? end case type when :insert, :update true when :insert_select # SQLite RETURNING support has a bug that doesn't allow for committing transactions # when a prepared statement with RETURNING has been used on the connection: # # SQLite3::BusyException: cannot commit transaction - SQL statements in progress: COMMIT # # Disabling usage of prepared statements for insert_select on SQLite seems to be the # simplest way to workaround the problem. db.database_type != :sqlite # :nocov: when :delete, :refresh Sequel::Deprecation.deprecate("The :delete and :refresh prepared statement types", "There should be no need to check if these types are supported") false # :nocov: else raise Error, "unsupported type used: #{type.inspect}" end end end end end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/prepared_statements_safe.rb����������������������������������������0000664�0000000�0000000�00000005767�14342141206�0024240�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The prepared_statements_safe plugin modifies the model to reduce the number of # prepared statements that can be created, by setting as many columns as possible # before creating, and by changing +save_changes+ to save all columns instead of # just the changed ones. # # This plugin depends on the +prepared_statements+ plugin. # # Usage: # # # Make all model subclasses more safe when using prepared statements (called before loading subclasses) # Sequel::Model.plugin :prepared_statements_safe # # # Make the Album class more safe when using prepared statements # Album.plugin :prepared_statements_safe module PreparedStatementsSafe # Depend on the prepared_statements plugin def self.apply(model) model.plugin(:prepared_statements) end # Set the column defaults to use when creating on the model. def self.configure(model) model.send(:set_prepared_statements_column_defaults) end module ClassMethods # A hash with column symbol keys and default values. Instance # values are merged into this hash before creating to reduce the # number of free columns (columns that may or may not be present # in the INSERT statement), as the number of prepared statements # that can be created is 2^N (where N is the number of free columns). attr_reader :prepared_statements_column_defaults Plugins.inherited_instance_variables(self, :@prepared_statements_column_defaults=>:dup) Plugins.after_set_dataset(self, :set_prepared_statements_column_defaults) # Freeze the prepared statements column defaults when freezing the model class. def freeze @prepared_statements_column_defaults.freeze if @prepared_statements_column_defaults super end private # Set the column defaults based on the database schema. All columns # are set to a default value unless they are a primary key column or # they don't have a parseable default. def set_prepared_statements_column_defaults if db_schema h = {} db_schema.each do |k, v| default = v[:ruby_default] h[k] = default if (default || !v[:default]) && !v[:primary_key] && !default.is_a?(Sequel::SQL::Expression) end @prepared_statements_column_defaults = h end end end module InstanceMethods # Merge the current values into the default values to reduce the number # of free columns. def before_create @values = model.prepared_statements_column_defaults.merge(@values) super end # Always do a full save of all columns to reduce the number of prepared # statements that can be used. def save_changes(opts=OPTS) save(opts) || false if modified? end end end end end ���������sequel-5.63.0/lib/sequel/plugins/primary_key_lookup_check_values.rb���������������������������������0000664�0000000�0000000�00000012536�14342141206�0025621�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The primary_key_lookup_check_values plugin typecasts given primary key # values before performing a lookup by primary key. If the given primary # key value cannot be typecasted correctly, the lookup returns nil # without issuing a query. If the schema for the primary key column # includes minimum and maximum values, this also checks the given value # is not outside the range. If the given value is outside the allowed # range, the lookup returns nil without issuing a query. # # This affects the following Model methods: # # * Model.[] (when called with non-Hash) # * Model.with_pk # * Model.with_pk! # # It also affects the following Model dataset methods: # # * Dataset#[] (when called with Integer) # * Dataset#with_pk # * dataset#with_pk! # # Note that this can break working code. The above methods accept # any filter condition by default, not just primary key values. The # plugin will handle Symbol, Sequel::SQL::Expression, and # Sequel::LiteralString objects, but code such as the following will break: # # # Return first Album where primary key is one of the given values # Album.dataset.with_pk([1, 2, 3]) # # Usage: # # # Make all model subclasses support checking primary key values before # # lookup # (called before loading subclasses) # Sequel::Model.plugin :primary_key_lookup_check_values # # # Make the Album class support checking primary key values before lookup # Album.plugin :primary_key_lookup_check_values module PrimaryKeyLookupCheckValues def self.configure(model) model.instance_exec do setup_primary_key_lookup_check_values if @dataset end end module ClassMethods Plugins.after_set_dataset(self, :setup_primary_key_lookup_check_values) Plugins.inherited_instance_variables(self, :@primary_key_type=>nil, :@primary_key_value_range=>nil) private # Check the given primary key value. Typecast it to the appropriate # database type if the database type is known. If it cannot be # typecasted, or the typecasted value is outside the range of column # values, return nil. def _check_pk_lookup_value(pk) return if nil == pk case pk when SQL::Expression, LiteralString, Symbol return pk end return pk unless pk_type = @primary_key_type if pk_type.is_a?(Array) return unless pk.is_a?(Array) return unless pk.size == pk_type.size return if pk.any?(&:nil?) pk_value_range = @primary_key_value_range i = 0 pk.map do |v| if type = pk_type[i] v = _typecast_pk_lookup_value(v, type) return if nil == v if pk_value_range min, max = pk_value_range[i] return if min && v < min return if max && v > max end end i += 1 v end elsif pk.is_a?(Array) return elsif nil != (pk = _typecast_pk_lookup_value(pk, pk_type)) min, max = @primary_key_value_range return if min && pk < min return if max && pk > max pk end end # Typecast the value to the appropriate type, # returning nil if it cannot be typecasted. def _typecast_pk_lookup_value(value, type) db.typecast_value(type, value) rescue InvalidValue nil end # Skip the primary key lookup if the typecasted and checked # primary key value is nil. def primary_key_lookup(pk) unless nil == (pk = _check_pk_lookup_value(pk)) super end end # Setup the primary key type and value range used for checking # primary key values during lookup. def setup_primary_key_lookup_check_values if primary_key.is_a?(Array) types = [] value_ranges = [] primary_key.each do |pk| type, min, max = _type_min_max_values_for_column(pk) types << type value_ranges << ([min, max].freeze if min || max) end @primary_key_type = (types.freeze if types.any?) @primary_key_value_range = (value_ranges.freeze if @primary_key_type && value_ranges.any?) else @primary_key_type, min, max = _type_min_max_values_for_column(primary_key) @primary_key_value_range = ([min, max].freeze if @primary_key_type && (min || max)) end end # Return the type, min_value, and max_value schema entries # for the column, if they exist. def _type_min_max_values_for_column(column) if schema = db_schema[column] schema.values_at(:type, :min_value, :max_value) end end end module DatasetMethods # Skip the primary key lookup if the typecasted and checked # primary key value is nil. def with_pk(pk) unless nil == (pk = model.send(:_check_pk_lookup_value, pk)) super end end end end end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/rcte_tree.rb�������������������������������������������������������0000664�0000000�0000000�00000036002�14342141206�0021127�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # = Overview # # The rcte_tree plugin deals with tree structured data stored # in the database using the adjacency list model (where child rows # have a foreign key pointing to the parent rows), using recursive # common table expressions to load all ancestors in a single query, # all descendants in a single query, and all descendants to a given # level (where level 1 is children, level 2 is children and grandchildren # etc.) in a single query. # # = Usage # # The rcte_tree plugin adds four associations to the model: parent, children, ancestors, and # descendants. Both the parent and children are fairly standard many_to_one # and one_to_many associations, respectively. However, the ancestors and # descendants associations are special. Both the ancestors and descendants # associations will automatically set the parent and children associations, # respectively, for current object and all of the ancestor or descendant # objects, whenever they are loaded (either eagerly or lazily). Additionally, # the descendants association can take a level argument when called eagerly, # which limits the returned objects to only that many levels in the tree (see # the Overview). # # Model.plugin :rcte_tree # # # Lazy loading # model = Model.first # model.parent # model.children # model.ancestors # Populates :parent association for all ancestors # model.descendants # Populates :children association for all descendants # # # Eager loading - also populates the :parent and children associations # # for all ancestors and descendants # Model.where(id: [1, 2]).eager(:ancestors, :descendants).all # # # Eager loading children and grandchildren # Model.where(id: [1, 2]).eager(descendants: 2).all # # Eager loading children, grandchildren, and great grandchildren # Model.where(id: [1, 2]).eager(descendants: 3).all # # = Options # # You can override the options for any specific association by making # sure the plugin options contain one of the following keys: # # :parent :: hash of options for the parent association # :children :: hash of options for the children association # :ancestors :: hash of options for the ancestors association # :descendants :: hash of options for the descendants association # # Note that you can change the name of the above associations by specifying # a :name key in the appropriate hash of options above. For example: # # Model.plugin :rcte_tree, parent: {name: :mother}, # children: {name: :daughters}, descendants: {name: :offspring} # # Any other keys in the main options hash are treated as options shared by # all of the associations. Here's a few options that affect the plugin: # # :key :: The foreign key in the table that points to the primary key # of the parent (default: :parent_id) # :primary_key :: The primary key to use (default: the model's primary key) # :key_alias :: The symbol identifier to use for aliasing when eager # loading (default: :x_root_x) # :cte_name :: The symbol identifier to use for the common table expression # (default: :t) # :level_alias :: The symbol identifier to use when eagerly loading descendants # up to a given level (default: :x_level_x) module RcteTree # Create the appropriate parent, children, ancestors, and descendants # associations for the model. def self.apply(model, opts=OPTS) model.plugin :tree, opts opts = opts.dup opts[:class] = model opts[:methods_module] = Module.new model.send(:include, opts[:methods_module]) key = opts[:key] ||= :parent_id prkey = opts[:primary_key] ||= model.primary_key ka = opts[:key_alias] ||= :x_root_x t = opts[:cte_name] ||= :t c_all = if model.dataset.recursive_cte_requires_column_aliases? # Work around Oracle/ruby-oci8 bug that returns integers as BigDecimals in recursive queries. conv_bd = model.db.database_type == :oracle col_aliases = model.dataset.columns model_table = model.table_name col_aliases.map{|c| SQL::QualifiedIdentifier.new(model_table, c)} else [SQL::ColumnAll.new(model.table_name)] end bd_conv = lambda{|v| conv_bd && v.is_a?(BigDecimal) ? v.to_i : v} key_array = Array(key) prkey_array = Array(prkey) if key.is_a?(Array) key_conv = lambda{|m| key_array.map{|k| m[k]}} key_present = lambda{|m| key_conv[m].all?} prkey_conv = lambda{|m| prkey_array.map{|k| m[k]}} key_aliases = (0...key_array.length).map{|i| :"#{ka}_#{i}"} ancestor_base_case_columns = prkey_array.zip(key_aliases).map{|k, ka_| SQL::AliasedExpression.new(k, ka_)} + c_all descendant_base_case_columns = key_array.zip(key_aliases).map{|k, ka_| SQL::AliasedExpression.new(k, ka_)} + c_all recursive_case_columns = prkey_array.zip(key_aliases).map{|k, ka_| SQL::QualifiedIdentifier.new(t, ka_)} + c_all extract_key_alias = lambda{|m| key_aliases.map{|ka_| bd_conv[m.values.delete(ka_)]}} else key_present = key_conv = lambda{|m| m[key]} prkey_conv = lambda{|m| m[prkey]} key_aliases = [ka] ancestor_base_case_columns = [SQL::AliasedExpression.new(prkey, ka)] + c_all descendant_base_case_columns = [SQL::AliasedExpression.new(key, ka)] + c_all recursive_case_columns = [SQL::QualifiedIdentifier.new(t, ka)] + c_all extract_key_alias = lambda{|m| bd_conv[m.values.delete(ka)]} end parent = opts.merge(opts.fetch(:parent, OPTS)).fetch(:name, :parent) childrena = opts.merge(opts.fetch(:children, OPTS)).fetch(:name, :children) opts[:reciprocal] = nil a = opts.merge(opts.fetch(:ancestors, OPTS)) ancestors = a.fetch(:name, :ancestors) a[:read_only] = true unless a.has_key?(:read_only) a[:eager_grapher] = proc do |_| raise Sequel::Error, "the #{ancestors} association for #{self} does not support eager graphing" end a[:eager_loader_key] = key a[:dataset] ||= proc do base_ds = model.where(prkey_array.zip(key_array.map{|k| get_column_value(k)})) recursive_ds = model.join(t, key_array.zip(prkey_array)) if c = a[:conditions] (base_ds, recursive_ds) = [base_ds, recursive_ds].map do |ds| (c.is_a?(Array) && !Sequel.condition_specifier?(c)) ? ds.where(*c) : ds.where(c) end end table_alias = model.dataset.schema_and_table(model.table_name)[1].to_sym model.from(SQL::AliasedExpression.new(t, table_alias)). with_recursive(t, col_aliases ? base_ds.select(*col_aliases) : base_ds.select_all, recursive_ds.select(*c_all), :args=>col_aliases) end aal = Array(a[:after_load]) aal << proc do |m, ancs| unless m.associations.has_key?(parent) parent_map = {prkey_conv[m]=>m} child_map = {} child_map[key_conv[m]] = m if key_present[m] m.associations[parent] = nil ancs.each do |obj| obj.associations[parent] = nil parent_map[prkey_conv[obj]] = obj if ok = key_conv[obj] child_map[ok] = obj end end parent_map.each do |parent_id, obj| if child = child_map[parent_id] child.associations[parent] = obj end end end end a[:after_load] ||= aal a[:eager_loader] ||= proc do |eo| id_map = eo[:id_map] parent_map = {} children_map = {} Sequel.synchronize_with(eo[:mutex]) do eo[:rows].each do |obj| parent_map[prkey_conv[obj]] = obj (children_map[key_conv[obj]] ||= []) << obj obj.associations[ancestors] = [] obj.associations[parent] = nil end end r = model.association_reflection(ancestors) base_case = model.where(prkey=>id_map.keys). select(*ancestor_base_case_columns) recursive_case = model.join(t, key_array.zip(prkey_array)). select(*recursive_case_columns) if c = r[:conditions] (base_case, recursive_case) = [base_case, recursive_case].map do |ds| (c.is_a?(Array) && !Sequel.condition_specifier?(c)) ? ds.where(*c) : ds.where(c) end end table_alias = model.dataset.schema_and_table(model.table_name)[1].to_sym ds = model.from(SQL::AliasedExpression.new(t, table_alias)). with_recursive(t, base_case, recursive_case, :args=>((key_aliases + col_aliases) if col_aliases)) ds = r.apply_eager_dataset_changes(ds) ds = ds.select_append(ka) unless ds.opts[:select] == nil model.eager_load_results(r, eo.merge(:loader=>false, :initialize_rows=>false, :dataset=>ds, :id_map=>nil)) do |obj| opk = prkey_conv[obj] if idm_obj = parent_map[opk] key_aliases.each{|ka_| idm_obj.values[ka_] = obj.values[ka_]} obj = idm_obj else obj.associations[parent] = nil parent_map[opk] = obj (children_map[key_conv[obj]] ||= []) << obj end id_map[extract_key_alias[obj]].each do |root| root.associations[ancestors] << obj end end Sequel.synchronize_with(eo[:mutex]) do parent_map.each do |parent_id, obj| if children = children_map[parent_id] children.each do |child| child.associations[parent] = obj end end end end end model.one_to_many ancestors, a d = opts.merge(opts.fetch(:descendants, OPTS)) descendants = d.fetch(:name, :descendants) d[:read_only] = true unless d.has_key?(:read_only) d[:eager_grapher] = proc do |_| raise Sequel::Error, "the #{descendants} association for #{self} does not support eager graphing" end la = d[:level_alias] ||= :x_level_x d[:dataset] ||= proc do base_ds = model.where(key_array.zip(prkey_array.map{|k| get_column_value(k)})) recursive_ds = model.join(t, prkey_array.zip(key_array)) if c = d[:conditions] (base_ds, recursive_ds) = [base_ds, recursive_ds].map do |ds| (c.is_a?(Array) && !Sequel.condition_specifier?(c)) ? ds.where(*c) : ds.where(c) end end table_alias = model.dataset.schema_and_table(model.table_name)[1].to_sym model.from(SQL::AliasedExpression.new(t, table_alias)). with_recursive(t, col_aliases ? base_ds.select(*col_aliases) : base_ds.select_all, recursive_ds.select(*c_all), :args=>col_aliases) end dal = Array(d[:after_load]) dal << proc do |m, descs| unless m.associations.has_key?(childrena) parent_map = {prkey_conv[m]=>m} children_map = {} m.associations[childrena] = [] descs.each do |obj| obj.associations[childrena] = [] if opk = prkey_conv[obj] parent_map[opk] = obj end if ok = key_conv[obj] (children_map[ok] ||= []) << obj end end children_map.each do |parent_id, objs| parent_obj = parent_map[parent_id] parent_obj.associations[childrena] = objs objs.each do |obj| obj.associations[parent] = parent_obj end end end end d[:after_load] = dal d[:eager_loader] ||= proc do |eo| id_map = eo[:id_map] associations = eo[:associations] parent_map = {} children_map = {} Sequel.synchronize_with(eo[:mutex]) do eo[:rows].each do |obj| parent_map[prkey_conv[obj]] = obj obj.associations[descendants] = [] obj.associations[childrena] = [] end end r = model.association_reflection(descendants) base_case = model.where(key=>id_map.keys). select(*descendant_base_case_columns) recursive_case = model.join(t, prkey_array.zip(key_array)). select(*recursive_case_columns) if c = r[:conditions] (base_case, recursive_case) = [base_case, recursive_case].map do |ds| (c.is_a?(Array) && !Sequel.condition_specifier?(c)) ? ds.where(*c) : ds.where(c) end end if associations.is_a?(Integer) level = associations no_cache_level = level - 1 associations = {} base_case = base_case.select_append(SQL::AliasedExpression.new(Sequel.cast(0, Integer), la)) recursive_case = recursive_case.select_append(SQL::AliasedExpression.new(SQL::QualifiedIdentifier.new(t, la) + 1, la)).where(SQL::QualifiedIdentifier.new(t, la) < level - 1) end table_alias = model.dataset.schema_and_table(model.table_name)[1].to_sym ds = model.from(SQL::AliasedExpression.new(t, table_alias)). with_recursive(t, base_case, recursive_case, :args=>((key_aliases + col_aliases + (level ? [la] : [])) if col_aliases)) ds = r.apply_eager_dataset_changes(ds) ds = ds.select_append(ka) unless ds.opts[:select] == nil model.eager_load_results(r, eo.merge(:loader=>false, :initialize_rows=>false, :dataset=>ds, :id_map=>nil, :associations=>OPTS)) do |obj| if level no_cache = no_cache_level == obj.values.delete(la) end opk = prkey_conv[obj] if idm_obj = parent_map[opk] key_aliases.each{|ka_| idm_obj.values[ka_] = obj.values[ka_]} obj = idm_obj else obj.associations[childrena] = [] unless no_cache parent_map[opk] = obj end if root = id_map[extract_key_alias[obj]].first root.associations[descendants] << obj end (children_map[key_conv[obj]] ||= []) << obj end Sequel.synchronize_with(eo[:mutex]) do children_map.each do |parent_id, objs| objs = objs.uniq parent_obj = parent_map[parent_id] parent_obj.associations[childrena] = objs objs.each do |obj| obj.associations[parent] = parent_obj end end end end model.one_to_many descendants, d end end end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/require_valid_schema.rb��������������������������������������������0000664�0000000�0000000�00000005037�14342141206�0023332�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The require_valid_schema plugin makes Sequel raise or warn if attempting # to set the dataset of a model class to a simple table, where the database # supports schema parsing, but schema parsing does not work for the model's # table. # # The plugin's default behavior requires that all models that select from a # single identifier have a valid table schema, if the database supports # schema parsing. If the schema cannot be determined for such # a model, an error is raised: # # Sequel::Model.plugin :require_valid_schema # # If you load the plugin with an argument of :warn, Sequel will warn instead # of raising for such tables: # # Sequel::Model.plugin :require_valid_schema, :warn # # This can catch bugs where you expect models to have valid schema, but # they do not. This setting only affects future attempts to set datasets # in the current class and subclasses created in the future. # # If you load the plugin with an argument of false, it will not require valid schema. # This can be used in subclasses where you do not want to require valid schema, # but the plugin must be loaded before a dataset with invalid schema is set: # # Sequel::Model.plugin :require_valid_schema # InvalidSchemaAllowed = Class.new(Sequel::Model) # InvalidSchemaAllowed.plugin :require_valid_schema, false # class MyModel < InvalidSchemaAllowed # end module RequireValidSchema # Modify the current model's dataset selection, if the model # has a dataset. def self.configure(model, setting=true) model.instance_variable_set(:@require_valid_schema, setting) end module ClassMethods Plugins.inherited_instance_variables(self, :@require_valid_schema=>nil) private # If the schema cannot be determined, the model uses a simple table, # require_valid_schema is set, and the database supports schema parsing, raise or # warn based on the require_valid_schema setting. def get_db_schema_array(reload) schema_array = super if !schema_array && simple_table && @require_valid_schema message = "Not able to parse schema for model: #{inspect}, table: #{simple_table}" if @require_valid_schema == :warn warn message else raise Error, message end end schema_array end end end end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/serialization.rb���������������������������������������������������0000664�0000000�0000000�00000022505�14342141206�0022033�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The serialization plugin allows you to keep serialized # ruby objects in the database, while giving you deserialized objects # when you call an accessor. # # This plugin works by keeping the serialized value in the values, and # adding a deserialized_values hash. The reader method for serialized columns # will check the deserialized_values for the value, return it if present, # or deserialize the entry in values and return it. The writer method will # set the deserialized_values entry. This plugin adds a before_validation hook # that serializes all deserialized_values to values. # # You can specify the serialization format as a pair of serializer/deserializer # callable objects. You can also specify the serialization format as a single # symbol, if such a symbol has a registered serializer/deserializer pair in the # plugin. By default, the plugin registers the :marshal, :yaml, and :json # serialization formats. To register your own serialization formats, use # Sequel::Plugins::Serialization.register_format. # If you use yaml or json format, you need to require the libraries, Sequel # does not do the requiring for you. # # You can specify the columns to serialize when loading the plugin, or later # using the serialize_attributes class method. # # Because of how this plugin works, it must be used inside each model class # that needs serialization, after any set_dataset method calls in that class. # Otherwise, it is possible that the default column accessors will take # precedence. # # == Example # # # Require json if you plan to use it, as the plugin doesn't require it for you. # require 'json' # # # Register custom serializer/deserializer pair, if desired # require 'sequel/plugins/serialization' # Sequel::Plugins::Serialization.register_format(:reverse, :reverse.to_proc, :reverse.to_proc) # # class User < Sequel::Model # # Built-in format support when loading the plugin # plugin :serialization, :json, :permissions # # # Built-in format support after loading the plugin using serialize_attributes # plugin :serialization # serialize_attributes :marshal, :permissions # # # Use custom registered serialization format just like built-in format # serialize_attributes :reverse, :password # # # Use a custom serializer/deserializer pair without registering # serialize_attributes [:reverse.to_proc, :reverse.to_proc], :password # end # user = User.create # user.permissions = {global: 'read-only'} # user.save # # Note that if you mutate serialized column values without reassigning them, # those changes won't be picked up by <tt>Model#save_changes</tt> or # <tt>Model#update</tt>. Example: # # user = User[1] # user.permissions[:global] = 'foo' # user.save_changes # Will not pick up changes to permissions # # You can use the +serialization_modification_detection+ plugin to pick # up such changes. module Serialization # The default serializers supported by the serialization module. # Use register_format to add serializers to this hash. REGISTERED_FORMATS = {} # Set up the column readers to do deserialization and the column writers # to save the value in deserialized_values. def self.apply(model, *args) model.instance_exec do @deserialization_map = {} @serialization_map = {} end end # Automatically call serialize_attributes with the format and columns unless # no columns were provided. def self.configure(model, format=nil, *columns) model.serialize_attributes(format, *columns) unless columns.empty? end # Register a serializer/deserializer pair with a format symbol, to allow # models to pick this format by name. Both serializer and deserializer # should be callable objects. def self.register_format(format, serializer, deserializer) Sequel.synchronize{REGISTERED_FORMATS[format] = [serializer, deserializer].freeze} end register_format(:marshal, lambda{|v| [Marshal.dump(v)].pack('m')}, lambda do |v| # Handle unpacked marshalled data for backwards compat v = v.unpack('m')[0] unless v[0..1] == "\x04\x08" Marshal.load(v) end) register_format(:yaml, :to_yaml.to_proc, lambda{|s| YAML.load(s)}) register_format(:json, Sequel.method(:object_to_json), Sequel.method(:parse_json)) module ClassMethods # A hash with column name symbols and callable values, with the value # called to deserialize the column. attr_reader :deserialization_map # A hash with column name symbols and callable values, with the value # called to serialize the column. attr_reader :serialization_map Plugins.inherited_instance_variables(self, :@deserialization_map=>:dup, :@serialization_map=>:dup) # Freeze serialization metadata when freezing model class. def freeze @deserialization_map.freeze @serialization_map.freeze @serialization_module.freeze if @serialization_module super end # Create instance level reader that deserializes column values on request, # and instance level writer that stores new deserialized values. def serialize_attributes(format, *columns) if format.is_a?(Symbol) unless format = Sequel.synchronize{REGISTERED_FORMATS[format]} raise(Error, "Unsupported serialization format: #{format} (valid formats: #{Sequel.synchronize{REGISTERED_FORMATS.keys}.inspect})") end end serializer, deserializer = format raise(Error, "No columns given. The serialization plugin requires you specify which columns to serialize") if columns.empty? define_serialized_attribute_accessor(serializer, deserializer, *columns) end private # Add serializated attribute acessor methods to the serialization_module def define_serialized_attribute_accessor(serializer, deserializer, *columns) m = self include(@serialization_module ||= Module.new) unless @serialization_module @serialization_module.class_eval do columns.each do |column| m.serialization_map[column] = serializer m.deserialization_map[column] = deserializer define_method(column) do if deserialized_values.has_key?(column) deserialized_values[column] elsif frozen? deserialize_value(column, super()) else deserialized_values[column] = deserialize_value(column, super()) end end alias_method(column, column) setter = :"#{column}=" define_method(setter) do |v| cc = changed_columns if !cc.include?(column) && (new? || get_column_value(column) != v) cc << column will_change_column(column) if respond_to?(:will_change_column) end deserialized_values[column] = v end alias_method(setter, setter) end end end end module InstanceMethods # Hash of deserialized values, used as a cache. def deserialized_values @deserialized_values ||= {} end # Freeze the deserialized values def freeze deserialized_values super deserialized_values.freeze self end # Serialize deserialized values before saving def before_validation serialize_deserialized_values super end private # Clear any cached deserialized values when doing a manual refresh. def _refresh_set_values(hash) @deserialized_values.clear if @deserialized_values super end # Deserialize the column value. Called when the model column accessor is called to # return a deserialized value. def deserialize_value(column, v) unless v.nil? raise Sequel::Error, "no entry in deserialization_map for #{column.inspect}" unless callable = model.deserialization_map[column] callable.call(v) end end # Dup the deserialized values when duping model instance. def initialize_copy(other) super @deserialized_values = Hash[other.deserialized_values] self end # Serialize all deserialized values def serialize_deserialized_values deserialized_values.each{|k,v| @values[k] = serialize_value(k, v)} end # Serialize the column value. Called before saving to ensure the serialized value # is saved in the database. def serialize_value(column, v) unless v.nil? raise Sequel::Error, "no entry in serialization_map for #{column.inspect}" unless callable = model.serialization_map[column] callable.call(v) end end end end end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/serialization_modification_detection.rb����������������������������0000664�0000000�0000000�00000005746�14342141206�0026626�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # This plugin extends the serialization plugin and enables it to detect # changes in serialized values by checking whether the current # deserialized value is the same as the original deserialized value. # The serialization plugin does not do such checks by default, as they # often aren't needed and can hurt performance. # # Note that for this plugin to work correctly, the values you are # serializing must roundtrip correctly (i.e. deserialize(serialize(value)) # should equal value). This is true in most cases, but not in all. For # example, ruby symbols round trip through yaml, but not json (as they get # turned into strings in json). # # == Example # # require 'sequel' # require 'json' # class User < Sequel::Model # plugin :serialization, :json, :permissions # plugin :serialization_modification_detection # end # user = User.create(permissions: {}) # user.permissions[:global] = 'read-only' # user.save_changes module SerializationModificationDetection # Load the serialization plugin automatically. def self.apply(model) model.plugin :serialization end module InstanceMethods # Clear the cache of original deserialized values after saving so that it doesn't # show the column is modified after saving. def after_save super @original_deserialized_values = @deserialized_values end # Detect which serialized columns have changed. def changed_columns cc = super cc = cc.dup if frozen? deserialized_values.each{|c, v| cc << c if !cc.include?(c) && original_deserialized_value(c) != v} cc end # Freeze the original deserialized values when freezing the instance. def freeze @original_deserialized_values ||= {} super @original_deserialized_values.freeze self end private # Duplicate the original deserialized values when duplicating instance. def initialize_copy(other) super if o = other.instance_variable_get(:@original_deserialized_values) @original_deserialized_values = Hash[o] end self end # For new objects, serialize any existing deserialized values so that changes can # be detected. def initialize_set(values) super serialize_deserialized_values end # Return the original deserialized value of the column, caching it to improve performance. def original_deserialized_value(column) if frozen? @original_deserialized_values[column] || deserialize_value(column, self[column]) else (@original_deserialized_values ||= {})[column] ||= deserialize_value(column, self[column]) end end end end end end ��������������������������sequel-5.63.0/lib/sequel/plugins/sharding.rb��������������������������������������������������������0000664�0000000�0000000�00000007770�14342141206�0020764�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The sharding plugin augments Sequel's default model sharding support # in the following ways: # # * It automatically saves model instances back to the # shard they were retreived from. # * It makes model associations use the same shard as the model object. # * It adds a slightly nicer API for creating model instances on # specific shards. # # Usage: # # # Add the sharding support to all model subclasses (called before loading subclasses) # Sequel::Model.plugin :sharding # # # Add the sharding support to the Album class # Album.plugin :sharding module Sharding module ClassMethods # Create a new object on the given shard s. def create_using_server(s, values=OPTS, &block) new_using_server(s, values, &block).save end # Eager load the association with the given eager loader options. def eager_load_results(opts, eo, &block) if (s = eo[:self]) && (server = s.opts[:server]) eb = eo[:eager_block] set_server = proc do |ds| ds = eb.call(ds) if eb ds = ds.server?(server) ds end eo = Hash[eo] eo[:eager_block] = set_server eo end super end # Return a newly instantiated object that is tied to the given # shard s. When the object is saved, a record will be inserted # on shard s. def new_using_server(s, values=OPTS, &block) new(values, &block).set_server(s) end private # Set the server for each graphed dataset to the current server # unless the graphed dataset already has a server set. def eager_graph_dataset(opts, eager_options) ds = super if s = eager_options[:self].opts[:server] ds = ds.server?(s) end ds end end module InstanceMethods # Set the server that this object is tied to, unless it has # already been set. Returns self. def set_server?(s) @server ||= s self end private # Ensure that association datasets are tied to the correct shard. def _apply_association_options(*args) use_server(super) end # Don't use an associated object loader, as it won't respect the shard used. def _associated_object_loader(opts, dynamic_opts) nil end # Ensure that the join table for many_to_many associations uses the correct shard. def _join_table_dataset(opts) use_server(super) end # If creating the object by doing <tt>add_association</tt> for a # +many_to_many+ association, make sure the associated object is created on the # current object's shard, unless the passed object already has an assigned shard. def ensure_associated_primary_key(opts, o, *args) o.set_server?(@server) if o.respond_to?(:set_server?) super end # Don't use primary key lookup to load associated objects, since that will not # respect the current object's server. def load_with_primary_key_lookup?(opts, dynamic_opts) false end end module DatasetMethods # If a row proc exists on the dataset, replace it with one that calls the # previous row_proc, but calls set_server on the output of that row_proc, # ensuring that objects retrieved by a specific shard know which shard they # are tied to. def row_proc rp = super if rp case server = db.pool.send(:pick_server, opts[:server]) when nil, :default, :read_only # nothing else old_rp = rp rp = proc{|r| old_rp.call(r).set_server(server)} end end rp end end end end end ��������sequel-5.63.0/lib/sequel/plugins/single_table_inheritance.rb����������������������������������������0000664�0000000�0000000�00000024341�14342141206�0024157�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The single_table_inheritance plugin allows storing all objects # in the same class hierarchy in the same table. It makes it so # subclasses of this model only load rows related to the subclass, # and when you retrieve rows from the main class, you get instances # of the subclasses (if the rows should use the subclasses's class). # # By default, the plugin assumes that the +sti_key+ column (the first # argument to the plugin) holds the class name as a string. However, # you can override this by using the <tt>:model_map</tt> option and/or # the <tt>:key_map</tt> option. # # You should only load this plugin in the parent class, not in the subclasses. # # You shouldn't call set_dataset in the model after applying this # plugin, otherwise subclasses might use the wrong dataset. You should # make sure this plugin is loaded before the subclasses. Note that since you # need to load the plugin before the subclasses are created, you can't use # direct class references in the plugin class. You should specify subclasses # in the plugin call using class name strings or symbols, see usage below. # # Usage: # # # Use the default of storing the class name in the sti_key # # column (:kind in this case) # class Employee < Sequel::Model # plugin :single_table_inheritance, :kind # end # # # Have subclasses inherit from the appropriate class # class Staff < Employee; end # class Manager < Employee; end # # # You can also use many different options to configure the plugin: # # # Using integers to store the class type, with a :model_map hash # # and an sti_key of :type # Employee.plugin :single_table_inheritance, :type, # model_map: {1=>:Staff, 2=>:Manager} # # # Using non-class name strings # Employee.plugin :single_table_inheritance, :type, # model_map: {'line staff'=>:Staff, 'supervisor'=>:Manager} # # # By default the plugin sets the respective column value # # when a new instance is created. # Staff.create.type == 'line staff' # Manager.create.type == 'supervisor' # # # You can customize this behavior with the :key_chooser option. # # This is most useful when using a non-bijective mapping. # Employee.plugin :single_table_inheritance, :type, # model_map: {'line staff'=>:Staff, 'supervisor'=>:Manager}, # key_chooser: lambda{|instance| instance.model.sti_key_map[instance.model.to_s].first || 'stranger'} # # # Using custom procs, with :model_map taking column values # # and yielding either a class, string, symbol, or nil, # # and :key_map taking a class object and returning the column # # value to use # Employee.plugin :single_table_inheritance, :type, # model_map: :reverse.to_proc, # key_map: lambda{|klass| klass.name.reverse} # # # You can use the same class for multiple values. # # This is mainly useful when the sti_key column contains multiple values # # which are different but do not require different code. # Employee.plugin :single_table_inheritance, :type, # model_map: {'staff' => "Staff", # 'manager' => "Manager", # 'overpayed staff' => "Staff", # 'underpayed staff' => "Staff"} # # One minor issue to note is that if you specify the <tt>:key_map</tt> # option as a hash, instead of having it inferred from the <tt>:model_map</tt>, # you should only use class name strings as keys, you should not use symbols # as keys. module SingleTableInheritance # Setup the necessary STI variables, see the module RDoc for SingleTableInheritance def self.configure(model, key, opts=OPTS) model.instance_exec do @sti_key_array = nil @sti_key = key @sti_dataset = dataset @sti_model_map = opts[:model_map] || lambda{|v| v if v && v != ''} @sti_key_map = if km = opts[:key_map] if km.is_a?(Hash) h = Hash.new do |h1,k| unless k.is_a?(String) h1[k.to_s] else [] end end km.each do |k,v| h[k.to_s] = [] unless h.key?(k.to_s) h[k.to_s].push( *Array(v) ) end h else km end elsif sti_model_map.is_a?(Hash) h = Hash.new do |h1,k| unless k.is_a?(String) h1[k.to_s] else [] end end sti_model_map.each do |k,v| h[v.to_s] = [] unless h.key?(v.to_s) h[v.to_s] << k end h else lambda{|klass| klass.name.to_s} end @sti_key_chooser = opts[:key_chooser] || lambda{|inst| Array(inst.model.sti_key_map[inst.model]).last } @dataset = @dataset.with_row_proc(model.method(:sti_load)) end end module ClassMethods # The base dataset for STI, to which filters are added to get # only the models for the specific STI subclass. attr_reader :sti_dataset # The column name holding the STI key for this model attr_reader :sti_key # Array holding keys for all subclasses of this class, used for the # dataset filter in subclasses. Nil in the main class. attr_reader :sti_key_array # A hash/proc with class keys and column value values, mapping # the class to a particular value given to the sti_key column. # Used to set the column value when creating objects, and for the # filter when retrieving objects in subclasses. attr_reader :sti_key_map # A hash/proc with column value keys and class values, mapping # the value of the sti_key column to the appropriate class to use. attr_reader :sti_model_map # A proc which returns the value to use for new instances. # This defaults to a lookup in the key map. attr_reader :sti_key_chooser Plugins.inherited_instance_variables(self, :@sti_dataset=>nil, :@sti_key=>nil, :@sti_key_map=>nil, :@sti_model_map=>nil, :@sti_key_chooser=>nil) # Freeze STI information when freezing model class. Note that # because of how STI works, you should not freeze an STI subclass # until after all subclasses of it have been created. def freeze @sti_key_array.freeze if @sti_key_array @sti_key_map.freeze if @sti_key_map.is_a?(Hash) @sti_model_map.freeze if @sti_model_map.is_a?(Hash) super end # Return an instance of the class specified by sti_key, # used by the row_proc. def sti_load(r) sti_class_from_sti_key(r[sti_key]).call(r) end # Return the sti class based on one of the keys from sti_model_map. def sti_class_from_sti_key(key) sti_class(sti_model_map[key]) end # Make sure that all subclasses of the parent class correctly include # keys for all of their descendant classes. def sti_subclass_added(key) if sti_key_array key_array = Array(key) Sequel.synchronize{sti_key_array.push(*key_array)} superclass.sti_subclass_added(key) end end private # Extend the sti dataset with the module when extending the main # dataset. def dataset_extend(mod, opts=OPTS) @sti_dataset = @sti_dataset.with_extend(mod) super end # Copy the necessary attributes to the subclasses, and filter the # subclass's dataset based on the sti_kep_map entry for the class. def inherited(subclass) super key = Array(sti_key_map[subclass]).dup sti_subclass_added(key) rp = dataset.row_proc subclass.set_dataset(sti_subclass_dataset(key), :inherited=>true) subclass.instance_exec do @dataset = @dataset.with_row_proc(rp) @sti_key_array = key self.simple_table = nil end end # If calling set_dataset manually, make sure to set the dataset # row proc to one that handles inheritance correctly. def set_dataset_row_proc(ds) if @dataset ds.with_row_proc(@dataset.row_proc) else super end end # Return a class object. If a class is given, return it directly. # Treat strings and symbols as class names. If nil is given or # an invalid class name string or symbol is used, return self. # Raise an error for other types. def sti_class(v) case v when String, Symbol constantize(v) rescue self when nil self when Class v else raise(Error, "Invalid class type used: #{v.inspect}") end end # Use the given dataset for the subclass, with key being the allowed # values for the sti_kind field. def sti_subclass_dataset(key) sti_dataset.where(SQL::QualifiedIdentifier.new(sti_dataset.first_source_alias, sti_key)=>Sequel.delay{Sequel.synchronize{key}}) end end module InstanceMethods # Set the sti_key column based on the sti_key_map. def before_validation if new? && model.sti_key && !self[model.sti_key] set_column_value("#{model.sti_key}=", model.sti_key_chooser.call(self)) end super end private # Limit tactical eager loading objects to objects that support the same association. def _filter_tactical_eager_load_objects(opts) objects = defined?(super) ? super : retrieved_with.dup name = opts[:name] objects.select!{|x| x.model.association_reflections.include?(name)} objects end # Don't allow use of prepared statements. def use_prepared_statements_for?(type) false end end end end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/singular_table_names.rb��������������������������������������������0000664�0000000�0000000�00000002123�14342141206�0023326�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The singular_table_names plugin changes the default # table names for subclasses to not assume a plural version. # By default, Sequel assumes table names for models use # the plural versions. # # Note that this plugin only affects subclasses of the # class it is loaded into, it does not affect the # current class. So it only makes sense to load this # into Sequel::Model itself, or a subclass of Sequel::Model # that is created via Class.new. # # Usage: # # # Don't assume pluralized table names # Sequel::Model.plugin :singular_table_names module SingularTableNames module ClassMethods # Returns the implicit table name for the model class, which is the demodulized, # underscored, name of the class. # # Artist.implicit_table_name # => :artist # Foo::ArtistAlias.implicit_table_name # => :artist_alias def implicit_table_name underscore(demodulize(name)).to_sym end end end end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/skip_create_refresh.rb���������������������������������������������0000664�0000000�0000000�00000002332�14342141206�0023161�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The skip_create_refresh plugin skips the # refresh after saving a new model object. Sequel does the # refresh by default to make sure all columns are populated, which is # necessary so that database defaults work correctly. # # This plugin is mostly for performance reasons where you # want to save the cost of select statement after the insert, # but it could also help cases where records are not # immediately available for selection after insertion. # # Note that Sequel by default does not attempt to refresh records when # updating existing model objects, only when inserting new # model objects. # # Usage: # # # Make all model subclass instances skip refreshes when saving # # (called before loading subclasses) # Sequel::Model.plugin :skip_create_refresh # # # Make the Album class skip refreshes when saving # Album.plugin :skip_create_refresh module SkipCreateRefresh module InstanceMethods private # Do nothing instead of refreshing the record inside of save. def _save_refresh nil end end end end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/skip_saving_columns.rb���������������������������������������������0000664�0000000�0000000�00000007106�14342141206�0023233�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The skip_saving_columms plugin allows skipping specific columns when # saving. By default, it skips columns that the database schema # indicates are generated columns: # # # Assume id column, name column, and id2 generated column # album = Album[1] # album.id # => 1 # album.name # => 'X' # album.id2 # => 2 # album.save # # UPDATE album SET name = 'X' WHERE (id = 1) # # You can override which columns will be skipped: # # Album.skip_saving_columns = [:name] # album.save # # UPDATE album SET id2 = 2 WHERE (id = 1) # # The skipping happens for all usage of Model#save and callers of it (e.g. # Model.create, Model.update). When using the plugin, the only way to get # it to save a column marked for skipping is to explicitly specify it: # # album.save(columns: [:name, :id2]) # album.save # # UPDATE album SET name = 'X', id2 = 2 WHERE (id = 1) # # Usage: # # # Support skipping saving columns in all Sequel::Model subclasses # # (called before loading subclasses) # Sequel::Model.plugin :skip_saving_columns # # # Support skipping saving columns in the Album class # Album.plugin :skip_saving_columns module SkipSavingColumns # Setup skipping of the generated columns for a model with an existing dataset. def self.configure(mod) mod.instance_exec do set_skip_saving_generated_columns if @dataset end end module ClassMethods # An array of column symbols for columns to skip when saving. attr_reader :skip_saving_columns # Over the default array of columns to skip. Once overridden, future # changes to the class's dataset and future subclasses will automatically # use these overridden columns, instead of introspecting the database schema. def skip_saving_columns=(v) @_skip_saving_columns_no_override = true @skip_saving_columns = v.dup.freeze end Plugins.after_set_dataset(self, :set_skip_saving_generated_columns) Plugins.inherited_instance_variables(self, :@skip_saving_columns=>:dup, :@_skip_saving_columns_no_override=>nil) private # If the skip saving columns has not been overridden, check the database # schema and automatically skip any generated columns. def set_skip_saving_generated_columns return if @_skip_saving_columns_no_override s = [] db_schema.each do |k, v| s << k if v[:generated] end @skip_saving_columns = s.freeze nil end end module InstanceMethods private # Skip the columns the model has marked to skip when inserting. def _insert_values _save_removed_skipped_columns(Hash[super]) end # Skip the columns the model has marked to skip when updating # all columns. def _save_update_all_columns_hash _save_removed_skipped_columns(super) end # Skip the columns the model has marked to skip when updating # only changed columns. def _save_update_changed_colums_hash _save_removed_skipped_columns(super) end # Remove any columns the model has marked to skip when saving. def _save_removed_skipped_columns(hash) model.skip_saving_columns.each do |column| hash.delete(column) end hash end end end end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/split_values.rb����������������������������������������������������0000664�0000000�0000000�00000004311�14342141206�0021663�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The split_values plugin splits the values hash retreived from the # database, and moves keys from the values hash that are not columns # in the model's dataset to a separate hash. This makes it so the # values hash only stores columns from the model's dataset. # # Among other things, this allows you to save model objects even if # they were retrieved with additional columns, and have equality # comparisons with other instances not care about non-column values. # # Example: # # class Album < Sequel::Model # plugin :split_values # end # a1 = Album[1] # a2 = Album.select_append(Sequel.as(true, :exists))[1] # a1.name # => 'Album Name' # a2.name # => 'Album Name' # a1[:exists] # => nil # a2[:exists] # => true # a1 == a2 # => true # a2.values # => {:id=>1, :name=>'Album Name'} # a2.save # Works # # Usage: # # # Make all model subclass instances split values # # (called before loading subclasses) # Sequel::Model.plugin :split_values # # # Make the Album class split values # Album.plugin :split_values module SplitValues module ClassMethods # Split the noncolumn values when creating a new object retrieved from # the database. def call(_) super.split_noncolumn_values end end module InstanceMethods # If there isn't an entry in the values hash, but there is a noncolumn_values # hash, look in that hash for the value. def [](k) if (res = super).nil? @noncolumn_values[k] if !@values.has_key?(k) && @noncolumn_values else res end end # Check all entries in the values hash. If any of the keys are not columns, # move the entry into the noncolumn_values hash. def split_noncolumn_values cols = (@values.keys - columns) return self if cols.empty? nc = @noncolumn_values ||= {} vals = @values cols.each{|k| nc[k] = vals.delete(k)} self end end end end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/sql_comments.rb����������������������������������������������������0000664�0000000�0000000�00000015511�14342141206�0021661�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The sql_comments plugin will automatically use SQL comments on # queries for the model it is loaded into. These comments will # show the related model, what type of method was called, and # the method name (or association name for queries to load # associations): # # album = Album[1] # # SELECT * FROM albums WHERE (id = 1) LIMIT 1 # # -- model:Album,method_type:class,method:[] # # album.update(name: 'A') # # UPDATE albums SET name = 'baz' WHERE (id = 1) # # -- model:Album,method_type:instance,method:update # # album.artist # # SELECT * FROM artists WHERE (artists.id = 1) # # -- model:Album,method_type:association_load,association:artist # # Album.eager(:artists).all # # SELECT * FROM albums # # SELECT * FROM artists WHERE (artists.id IN (1)) # # -- model:Album,method_type:association_eager_load,association:artist # # Album.where(id: 1).delete # # DELETE FROM albums WHERE (id = 1) # # -- model:Album,method_type:dataset,method:delete # # This plugin automatically supports the class, instance, and dataset # methods are are supported by default in Sequel::Model. To support # custom class, instance, and dataset methods, such as those added by # other plugins, you can use the appropriate <tt>sql_comments_*_methods</tt> # class method: # # Album.sql_comments_class_methods :first_by_name # example from finder plugin, with :mod option # Album.sql_comments_instance_methods :lazy_attribute_lookup # lazy_attributes plugin # Album.sql_comments_dataset_methods :to_csv # csv_serializer plugin # # In order for the sql_comments plugin to work, the sql_comments # Database extension must be loaded into the model's database. # # Note that in order to make sure SQL comments are included, some # optimizations are disabled if this plugin is loaded. # # Usage: # # # Make all model subclasses support automatic SQL comments # # (called before loading subclasses) # Sequel::Model.plugin :sql_comments # # # Make the Album class support automatic SQL comments # Album.plugin :sql_comments module SqlComments # Define a method +meth+ on the given module +mod+ that will use automatic # SQL comments with the given model, method_type, and method. def self.def_sql_commend_method(mod, model, method_type, meth) mod.send(:define_method, meth) do |*a, &block| model.db.with_comments(:model=>model, :method_type=>method_type, :method=>meth) do super(*a, &block) end end # :nocov: mod.send(:ruby2_keywords, meth) if mod.respond_to?(:ruby2_keywords, true) # :nocov: end def self.configure(model) model.send(:reset_fast_pk_lookup_sql) end module ClassMethods # Use automatic SQL comments for the given class methods. def sql_comments_class_methods(*meths) _sql_comments_methods(singleton_class, :class, meths) end # Use automatic SQL comments for the given instance methods. def sql_comments_instance_methods(*meths) _sql_comments_methods(self, :instance, meths) end # Use automatic SQL comments for the given dataset methods. def sql_comments_dataset_methods(*meths) unless @_sql_comments_dataset_module dataset_module(@_sql_comments_dataset_module = Module.new) end _sql_comments_methods(@_sql_comments_dataset_module, :dataset, meths) end [:[], :create, :find, :find_or_create, :with_pk, :with_pk!].each do |meth| define_method(meth) do |*a, &block| db.with_comments(:model=>self, :method_type=>:class, :method=>meth) do super(*a, &block) end end # :nocov: ruby2_keywords(meth) if respond_to?(:ruby2_keywords, true) # :nocov: end private # Don't optimize the fast PK lookups, as it uses static SQL that # won't support the SQL comments. def reset_fast_pk_lookup_sql @fast_pk_lookup_sql = @fast_instance_delete_sql = nil end # Define automatic SQL comment methods in +mod+ for each method in +meths+, # with the given +method_type+. def _sql_comments_methods(mod, method_type, meths) meths.each do |meth| SqlComments.def_sql_commend_method(mod, self, method_type, meth) end end end module InstanceMethods [:delete, :destroy, :lock!, :refresh, :save, :save_changes, :update, :update_fields].each do |meth| define_method(meth) do |*a, &block| t = Sequel.current return super(*a, &block) if (hash = Sequel.synchronize{db.comment_hashes[t]}) && hash[:model] db.with_comments(:model=>model, :method_type=>:instance, :method=>meth) do super(*a, &block) end end # :nocov: ruby2_keywords(meth) if respond_to?(:ruby2_keywords, true) # :nocov: end private # Do not use a placeholder loader for associations. def _associated_object_loader(opts, dynamic_opts) nil end # Use SQL comments on normal association load queries, showing they are association loads. def _load_associated_objects(opts, dynamic_opts=OPTS) db.with_comments(:model=>model, :method_type=>:association_load, :association=>opts[:name]) do super end end end module DatasetMethods Dataset::ACTION_METHODS.each do |meth| define_method(meth) do |*a, &block| t = Sequel.current return super(*a, &block) if (hash = Sequel.synchronize{db.comment_hashes[t]}) && hash[:model] db.with_comments(:model=>model, :method_type=>:dataset, :method=>meth) do super(*a, &block) end end # :nocov: ruby2_keywords(meth) if respond_to?(:ruby2_keywords, true) # :nocov: end private # Add the association name as part of the eager load data, so # perform_eager_load has access to it. def prepare_eager_load(a, reflections, eager_assoc) res = super reflections.each do |r| res[r[:eager_loader]][:association] = r[:name] end res end # Use SQL comments on eager load queries, showing they are eager loads. def perform_eager_load(loader, eo) db.with_comments(:model=>model, :method_type=>:association_eager_load, :method=>nil, :association=>eo[:association]) do super end end end end end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/static_cache.rb����������������������������������������������������0000664�0000000�0000000�00000023500�14342141206�0021564�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The static_cache plugin is designed for models that are not modified at all # in production use cases, or at least where modifications to them would usually # coincide with an application restart. When loaded into a model class, it # retrieves all rows in the database and statically caches a ruby array and hash # keyed on primary key containing all of the model instances. All of these instances # are frozen so they won't be modified unexpectedly, and before hooks disallow # saving or destroying instances. # # You can use the frozen: false option to have this plugin return unfrozen # instances. This is slower as it requires creating new objects, but it allows # you to make changes to the object and save them. If you set the option to false, # you are responsible for updating the cache manually (the pg_static_cache_updater # extension can handle this automatically). Note that it is not safe to use the # frozen: false option if you are mutating column values directly. If you are # mutating column values, you should also override Model.call to dup each mutable # column value to ensure it is not shared by other instances. # # The caches this plugin creates are used for the following things: # # * Primary key lookups (e.g. Model[1]) # * Model.all # * Model.each # * Model.first (without block, only supporting no arguments or single integer argument) # * Model.count (without an argument or block) # * Model.map # * Model.as_hash # * Model.to_hash # * Model.to_hash_groups # # Usage: # # # Cache the AlbumType class statically, disallowing any changes. # AlbumType.plugin :static_cache # # # Cache the AlbumType class statically, but return unfrozen instances # # that can be modified. # AlbumType.plugin :static_cache, frozen: false # # If you would like the speed benefits of keeping frozen: true but still need # to occasionally update objects, you can side-step the before_ hooks by # overriding the class method +static_cache_allow_modifications?+ to return true: # # class Model # plugin :static_cache # # def self.static_cache_allow_modifications? # true # end # end # # Now if you +#dup+ a Model object (the resulting object is not frozen), you # will be able to update and save the duplicate. # Note the caveats around your responsibility to update the cache still applies. # You can update the cache via `.load_cache` method. module StaticCache # Populate the static caches when loading the plugin. Options: # :frozen :: Whether retrieved model objects are frozen. The default is true, # for better performance as the shared frozen objects can be used # directly. If set to false, new instances are created. def self.configure(model, opts=OPTS) model.instance_exec do @static_cache_frozen = opts.fetch(:frozen, true) load_cache end end module ClassMethods # A frozen ruby hash holding all of the model's frozen instances, keyed by frozen primary key. attr_reader :cache # An array of all of the model's instances, without issuing a database # query. If a block is given, yields each instance to the block. def all(&block) array = @static_cache_frozen ? @all.dup : to_a array.each(&block) if block array end # If a block is given, multiple arguments are given, or a single # non-Integer argument is given, performs the default behavior of # issuing a database query. Otherwise, uses the cached values # to return either the first cached instance (no arguments) or an # array containing the number of instances specified (single integer # argument). def first(*args) if defined?(yield) || args.length > 1 || (args.length == 1 && !args[0].is_a?(Integer)) super else @all.first(*args) end end # Get the number of records in the cache, without issuing a database query. def count(*a, &block) if a.empty? && !block @all.size else super end end # Return the frozen object with the given pk, or nil if no such object exists # in the cache, without issuing a database query. def cache_get_pk(pk) static_cache_object(cache[pk]) end # Yield each of the model's frozen instances to the block, without issuing a database # query. def each(&block) if @static_cache_frozen @all.each(&block) else @all.each{|o| yield(static_cache_object(o))} end end # Use the cache instead of a query to get the results. def map(column=nil, &block) if column raise(Error, "Cannot provide both column and block to map") if block if column.is_a?(Array) @all.map{|r| r.values.values_at(*column)} else @all.map{|r| r[column]} end elsif @static_cache_frozen @all.map(&block) elsif block @all.map{|o| yield(static_cache_object(o))} else all.map end end Plugins.after_set_dataset(self, :load_cache) Plugins.inherited_instance_variables(self, :@static_cache_frozen=>nil) # Use the cache instead of a query to get the results. def as_hash(key_column = nil, value_column = nil, opts = OPTS) if key_column.nil? && value_column.nil? if @static_cache_frozen && !opts[:hash] return Hash[cache] else key_column = primary_key end end h = opts[:hash] || {} if value_column if value_column.is_a?(Array) if key_column.is_a?(Array) @all.each{|r| h[r.values.values_at(*key_column)] = r.values.values_at(*value_column)} else @all.each{|r| h[r[key_column]] = r.values.values_at(*value_column)} end else if key_column.is_a?(Array) @all.each{|r| h[r.values.values_at(*key_column)] = r[value_column]} else @all.each{|r| h[r[key_column]] = r[value_column]} end end elsif key_column.is_a?(Array) @all.each{|r| h[r.values.values_at(*key_column)] = static_cache_object(r)} else @all.each{|r| h[r[key_column]] = static_cache_object(r)} end h end # Alias of as_hash for backwards compatibility. def to_hash(*a) as_hash(*a) end # Use the cache instead of a query to get the results def to_hash_groups(key_column, value_column = nil, opts = OPTS) h = opts[:hash] || {} if value_column if value_column.is_a?(Array) if key_column.is_a?(Array) @all.each{|r| (h[r.values.values_at(*key_column)] ||= []) << r.values.values_at(*value_column)} else @all.each{|r| (h[r[key_column]] ||= []) << r.values.values_at(*value_column)} end else if key_column.is_a?(Array) @all.each{|r| (h[r.values.values_at(*key_column)] ||= []) << r[value_column]} else @all.each{|r| (h[r[key_column]] ||= []) << r[value_column]} end end elsif key_column.is_a?(Array) @all.each{|r| (h[r.values.values_at(*key_column)] ||= []) << static_cache_object(r)} else @all.each{|r| (h[r[key_column]] ||= []) << static_cache_object(r)} end h end # Ask whether modifications to this class are allowed. def static_cache_allow_modifications? !@static_cache_frozen end # Reload the cache for this model by retrieving all of the instances in the dataset # freezing them, and populating the cached array and hash. def load_cache @all = load_static_cache_rows h = {} @all.each do |o| o.errors.freeze h[o.pk.freeze] = o.freeze end @cache = h.freeze end private # Load the static cache rows from the database. def load_static_cache_rows ret = super if defined?(super) ret || dataset.all.freeze end # Return the frozen object with the given pk, or nil if no such object exists # in the cache, without issuing a database query. def primary_key_lookup(pk) static_cache_object(cache[pk]) end # If frozen: false is not used, just return the argument. Otherwise, # create a new instance with the arguments values if the argument is # not nil. def static_cache_object(o) if @static_cache_frozen o elsif o call(Hash[o.values]) end end end module InstanceMethods # Disallowing destroying the object unless the frozen: false option was used. def before_destroy cancel_action("modifying model objects that use the static_cache plugin is not allowed") unless model.static_cache_allow_modifications? super end # Disallowing saving the object unless the frozen: false option was used. def before_save cancel_action("modifying model objects that use the static_cache plugin is not allowed") unless model.static_cache_allow_modifications? super end end end end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/static_cache_cache.rb����������������������������������������������0000664�0000000�0000000�00000003766�14342141206�0022723�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The static_cache_cache plugin allows for caching the row content for subclasses # that use the static cache plugin (or just the current class). Using this plugin # can avoid the need to query the database every time loading the plugin into a # model, which can save time when you have a lot of models using the static_cache # plugin. # # Usage: # # # Make all model subclasses that use the static_cache plugin use # # the cached values in the given file # Sequel::Model.plugin :static_cache_cache, "static_cache.cache" # # # Make the AlbumType model the cached values in the given file, # # should be loaded before the static_cache plugin # AlbumType.plugin :static_cache_cache, "static_cache.cache" module StaticCacheCache def self.configure(model, file) model.instance_variable_set(:@static_cache_cache_file, file) model.instance_variable_set(:@static_cache_cache, File.exist?(file) ? Marshal.load(File.read(file)) : {}) end module ClassMethods # Dump the in-memory cached rows to the cache file. def dump_static_cache_cache File.open(@static_cache_cache_file, 'wb'){|f| f.write(Marshal.dump(@static_cache_cache))} nil end Plugins.inherited_instance_variables(self, :@static_cache_cache_file=>nil, :@static_cache_cache=>nil) private # Load the rows for the model from the cache if available. # If not available, load the rows from the database, and # then update the cache with the raw rows. def load_static_cache_rows if rows = Sequel.synchronize{@static_cache_cache[name]} rows.map{|row| call(row)}.freeze else rows = dataset.all.freeze raw_rows = rows.map(&:values) Sequel.synchronize{@static_cache_cache[name] = raw_rows} rows end end end end end end ����������sequel-5.63.0/lib/sequel/plugins/string_stripper.rb�������������������������������������������������0000664�0000000�0000000�00000003563�14342141206�0022417�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # StringStripper is a plugin that strips all input strings # when assigning to the model's values. Example: # # album = Album.new(name: ' A ') # album.name # => 'A' # # SQL::Blob instances and all non-strings are not modified by # this plugin. Additionally, strings passed to a blob column # setter are also not modified. You can explicitly set # other columns to skip the stripping: # # Album.skip_string_stripping :foo # Album.new(foo: ' A ').foo # => ' A ' # # Usage: # # # Make all model subclass instances strip strings (called before loading subclasses) # Sequel::Model.plugin :string_stripper # # # Make the Album class strip strings # Album.plugin :string_stripper module StringStripper def self.apply(model) model.plugin(:input_transformer, :string_stripper){|v| (v.is_a?(String) && !v.is_a?(SQL::Blob)) ? v.strip : v} end def self.configure(model) model.send(:set_skipped_string_stripping_columns) end module ClassMethods Plugins.after_set_dataset(self, :set_skipped_string_stripping_columns) # Skip stripping for the given columns. def skip_string_stripping(*columns) skip_input_transformer(:string_stripper, *columns) end # Return true if the column should not have values stripped. def skip_string_stripping?(column) skip_input_transformer?(:string_stripper, column) end private # Automatically skip stripping of blob columns def set_skipped_string_stripping_columns if @db_schema blob_columns = @db_schema.map{|k,v| k if v[:type] == :blob}.compact skip_string_stripping(*blob_columns) end end end end end end ���������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/subclasses.rb������������������������������������������������������0000664�0000000�0000000�00000006120�14342141206�0021320�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The subclasses plugin keeps track of all subclasses of the # current model class. Direct subclasses are available via the # subclasses method, and all descendent classes are available via the # descendants method: # # c = Class.new(Sequel::Model) # c.plugin :subclasses # sc1 = Class.new(c) # sc2 = Class.new(c) # ssc1 = Class.new(sc1) # c.subclasses # [sc1, sc2] # sc1.subclasses # [ssc1] # sc2.subclasses # [] # ssc1.subclasses # [] # c.descendants # [sc1, ssc1, sc2] # # You can also finalize the associations and then freeze the classes # in all descendent classes. Doing so is a recommended practice after # all models have been defined in production and testing, and this makes # it easier than keeping track of the classes to finalize and freeze # manually: # # c.freeze_descendants # # You can provide a block when loading the plugin, and it will be called # with each subclass created: # # a = [] # Sequel::Model.plugin(:subclasses){|sc| a << sc} # class A < Sequel::Model; end # class B < Sequel::Model; end # a # => [A, B] module Subclasses NEED_SUBCLASSES = !Object.respond_to?(:subclasses) || Object.method(:subclasses).source_location private_constant :NEED_SUBCLASSES # Initialize the subclasses instance variable for the model. def self.apply(model, &block) # :nocov: model.instance_variable_set(:@subclasses, []) if NEED_SUBCLASSES # :nocov: model.instance_variable_set(:@on_subclass, block) end module ClassMethods # Callable object that should be called with every descendent # class created. attr_reader :on_subclass # :nocov: if NEED_SUBCLASSES # All subclasses for the current model. Does not # include the model itself. attr_reader :subclasses end # :nocov: # All descendent classes of this model. def descendants Sequel.synchronize{subclasses.dup}.map{|x| [x] + x.send(:descendants)}.flatten end # SEQUEL6: Remove alias descendents descendants # Freeze all descendent classes. This also finalizes the associations for those # classes before freezing. def freeze_descendants descendants.each(&:finalize_associations).each(&:freeze) end # SEQUEL6: Remove alias freeze_descendents freeze_descendants Plugins.inherited_instance_variables(self, :@subclasses=>lambda{|v| []}, :@on_subclass=>nil) private # Add the subclass to this model's current subclasses, # and initialize a new subclasses instance variable # in the subclass. def inherited(subclass) super # :nocov: Sequel.synchronize{subclasses << subclass} if NEED_SUBCLASSES # :nocov: on_subclass.call(subclass) if on_subclass end end end end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/subset_conditions.rb�����������������������������������������������0000664�0000000�0000000�00000003063�14342141206�0022712�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The subset_conditions plugin creates an additional *_conditions method # for every subset created, which returns the filter conditions the subset # uses. This can be useful if you want to use the conditions for a separate # filter or combine them with OR. # # Usage: # # # Add subset_conditions in the Album class # Album.plugin :subset_conditions # # # This will now create a published_conditions method # Album.dataset_module do # subset :published, published: true # end # # Album.where(Album.published_conditions).sql # # SELECT * FROM albums WHERE (published IS TRUE) # # Album.exclude(Album.published_conditions).sql # # SELECT * FROM albums WHERE (published IS NOT TRUE) # # Album.where(Album.published_conditions | {ready: true}).sql # # SELECT * FROM albums WHERE ((published IS TRUE) OR (ready IS TRUE)) module SubsetConditions def self.apply(model, &block) model.instance_exec do @dataset_module_class = Class.new(@dataset_module_class) do include DatasetModuleMethods end end end module DatasetModuleMethods # Also create a method that returns the conditions the filter uses. def where(name, *args, &block) super cond = args cond = cond.first if cond.size == 1 define_method(:"#{name}_conditions"){filter_expr(cond, &block)} end end end end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/table_select.rb����������������������������������������������������0000664�0000000�0000000�00000002346�14342141206�0021605�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The table_select plugin changes the default selection for a # model dataset from <tt>*</tt> to <tt>table.*</tt>. # This makes it so that if you join the model's dataset to # other tables, columns in the other tables do not appear # in the result sets (and possibly overwrite columns in the # current model with the same name). # # Usage: # # # Make all model subclasses select table.* # Sequel::Model.plugin :table_select # # # Make the Album class select albums.* # Album.plugin :table_select module TableSelect # Modify the current model's dataset selection, if the model # has a dataset. def self.configure(model) model.instance_exec do self.dataset = dataset if @dataset end end module ClassMethods private # If the underlying dataset selects from a single table and # has no explicit selection, select table.* from that table. def convert_input_dataset(ds) ds = super unless ds.opts[:select] ds = ds.select_all(ds.first_source) end ds end end end end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/tactical_eager_loading.rb������������������������������������������0000664�0000000�0000000�00000020613�14342141206�0023600�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The tactical_eager_loading plugin allows you to eagerly load # an association for all objects retrieved from the same dataset # without calling +eager+ on the dataset. If you attempt to load # associated objects for a record and the association for that # object is currently not cached, it assumes you want to get # the associated objects for all objects retrieved with the dataset that # retrieved the current object. # # Tactical eager loading only takes affect if you retrieved the # current object with Dataset#all, it doesn't work if you # retrieved the current object with Dataset#each. # # Basically, this allows the following code to issue only two queries: # # Album.where{id<100}.all do |a| # a.artists # end # # SELECT * FROM albums WHERE (id < 100) # # SELECT * FROM artists WHERE id IN (...) # # Note that if you are passing a callback to the association method via # a block or :callback option, or using the :reload option to reload # the association, eager loading will not be done. # # You can use the :eager_reload option to reload the association for all # objects that the current object was retrieved with: # # # SELECT * FROM albums WHERE (id < 100) # albums = Album.where{id<100}.all # # # Eagerly load all artists for these albums # # SELECT * FROM artists WHERE id IN (...) # albums.first.artists # # # Do something that may affect which artists are associated to the albums # # # Eagerly reload all artists for these albums # # SELECT * FROM artists WHERE id IN (...) # albums.first.artists(eager_reload: true) # # You can also use the :eager option to specify dependent associations # to eager load: # # albums = Album.where{id<100}.all # # # Eager load all artists for these albums, and all albums for those artists # # SELECT * FROM artists WHERE id IN (...) # # SELECT * FROM albums WHERE artist_id IN (...) # albums.first.artists(eager: :albums) # # You can also use :eager to specify an eager callback. For example: # # albums = Album.where{id<100}.all # # # Eagerly load all artists whose name starts with A-M for these albums # # SELECT * FROM artists WHERE name > 'N' AND id IN (...) # albums.first.artists(eager: lambda{|ds| ds.where(Sequel[:name] > 'N')}) # # Note that the :eager option only takes effect if the association # has not already been loaded for the model. # # The tactical_eager_loading plugin also allows transparent eager # loading when calling association methods on associated objects # eagerly loaded via Dataset#eager_graph. This can reduce N queries # to a single query when iterating over all associated objects. # Consider the following code: # # artists = Artist.eager_graph(:albums).all # artists.each do |artist| # artist.albums.each do |album| # album.tracks # end # end # # By default this will issue a single query to load the artists and # albums, and then one query for each album to load the tracks for # the album: # # # SELECT artists.id, ... # albums.id, ... # # FROM artists # # LEFT OUTER JOIN albums ON (albums.artist_id = artists.id); # # SELECT * FROM tracks WHERE album_id = 1; # # SELECT * FROM tracks WHERE album_id = 2; # # SELECT * FROM tracks WHERE album_id = 10; # # ... # # With the tactical_eager_loading plugin, this uses the same # query to load the artists and albums, but then issues a single query # to load the tracks for all albums. # # # SELECT artists.id, ... # albums.id, ... # # FROM artists # # LEFT OUTER JOIN albums ON (albums.artist_id = artists.id); # # SELECT * FROM tracks WHERE (tracks.album_id IN (1, 2, 10, ...)); # # Note that transparent eager loading for associated objects # loaded by eager_graph will only take place if the associated classes # also use the tactical_eager_loading plugin. # # When using this plugin, calling association methods on separate # instances of the same result set is not thread-safe, because this # plugin attempts to modify all instances of the same result set # to eagerly set the associated objects, and having separate threads # modify the same model instance is not thread-safe. # # Because this plugin will automatically use eager loading for # performance, it can break code that defines associations that # do not support eager loading, without marking that they do not # support eager loading via the <tt>allow_eager: false</tt> option. # Make sure to set <tt>allow_eager: false</tt> on any association # used with this plugin if the association doesn't support eager loading. # # Usage: # # # Make all model subclass instances use tactical eager loading (called before loading subclasses) # Sequel::Model.plugin :tactical_eager_loading # # # Make the Album class use tactical eager loading # Album.plugin :tactical_eager_loading module TacticalEagerLoading module InstanceMethods # The dataset that retrieved this object, set if the object was # reteived via Dataset#all. attr_accessor :retrieved_by # All model objects retrieved with this object, set if the object was # reteived via Dataset#all. attr_accessor :retrieved_with # Remove retrieved_by and retrieved_with when marshalling. retrieved_by # contains unmarshallable objects, and retrieved_with can be very large # and is not helpful without retrieved_by. def marshallable! @retrieved_by = nil @retrieved_with = nil super end private # If there the association is not in the associations cache and the object # was reteived via Dataset#all, eagerly load the association for all model # objects retrieved with the current object. def load_associated_objects(opts, dynamic_opts=OPTS, &block) dynamic_opts = load_association_objects_options(dynamic_opts, &block) name = opts[:name] eager_reload = dynamic_opts[:eager_reload] if (!associations.include?(name) || eager_reload) && opts[:allow_eager] != false && retrieved_by && !frozen? && !dynamic_opts[:callback] && !dynamic_opts[:reload] retrieved_by.send(:eager_load, _filter_tactical_eager_load_objects(:eager_reload=>eager_reload, :name=>name), {name=>dynamic_opts[:eager] || OPTS}, model) end super end # Filter the objects used when tactical eager loading. # By default, this removes frozen objects and objects that alreayd have the association loaded def _filter_tactical_eager_load_objects(opts) objects = defined?(super) ? super : retrieved_with.dup if opts[:eager_reload] objects.reject!(&:frozen?) else name = opts[:name] objects.reject!{|x| x.frozen? || x.associations.include?(name)} end objects end end module DatasetMethods private # Set the retrieved_with and retrieved_by attributes for each of the associated objects # created by the eager graph loader with the appropriate class dataset and array of objects. def _eager_graph_build_associations(_, egl) objects = super master = egl.master egl.records_map.each do |k, v| next if k == master || v.empty? by = opts[:graph][:table_aliases][k] values = v.values values.each do |o| next unless o.is_a?(TacticalEagerLoading::InstanceMethods) && !o.retrieved_by o.retrieved_by = by o.retrieved_with = values end end objects end # Set the retrieved_with and retrieved_by attributes for each object # with the current dataset and array of all objects. def post_load(objects) super objects.each do |o| next unless o.is_a?(Sequel::Model) o.retrieved_by = self o.retrieved_with = objects end end end end end end ���������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/throw_failures.rb��������������������������������������������������0000664�0000000�0000000�00000007276�14342141206�0022223�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The throw_failures plugin throws HookFailed and ValidationFailed exceptions instead # of raising them. If there is no matching catch block, the UncaughtThrowError will be rescued # and the HookFailed or ValidationFailed exception will be raised normally. # # If you are setting up the catch blocks to handle these failures, in the failure case this # plugin is about 10-15% faster on CRuby and 10x faster on JRuby. If you are not # setting up the catch blocks, in the failure case this plugin is about 30% slower on CRuby # and 2x slower on JRuby. So this plugin should only be used if you are setting up catch # blocks manually. # # This plugin will setup catch blocks automatically for internally rescued HookFailed # exceptions when the model is configured to not raise exceptions on failure (by default, # the exceptions are internally rescued in that case. # # To set up the catch blocks, use the class of the exception: # # ret = catch(Sequel::ValidationFailed) do # model_instance.save # end # if ret.is_a?(Sequel::ValidationFailed) # # handle failure # else # # handle success # end # # Usage: # # # Make all model subclass instances throw HookFailed and ValidationFailed exceptions # # (called before loading subclasses) # Sequel::Model.plugin :throw_failures # # # Make the Album class throw HookFailed and ValidationFailed exceptions # Album.plugin :throw_failures module ThrowFailures module InstanceMethods # Catch any thrown HookFailed exceptions. def valid?(opts = OPTS) catch_hook_failures{super} || false end private # Catch any HookFailed exceptions thrown inside the block, and return # nil if there were any. def catch_hook_failures called = ret = nil catch(HookFailed) do ret = yield called = true end ret if called end # Catch any thrown HookFailed exceptions if not raising on failure. def checked_save_failure(opts) if raise_on_failure?(opts) super else catch_hook_failures{super} end end if RUBY_VERSION >= '2.2' && (!defined?(JRUBY_VERSION) || JRUBY_VERSION > '9.1') # Throw HookFailed with the generated error. If the throw is not # caught, just return the originally generated error. def hook_failed_error(msg) e = super throw HookFailed, e rescue UncaughtThrowError e end # Throw ValidationFailed with the generated error. If the throw is not # caught, just return the originally generated error. def validation_failed_error e = super throw ValidationFailed, e rescue UncaughtThrowError e end else # UncaughtThrowError was added in Ruby 2.2. Older Ruby versions # used ArgumentError with "uncaught throw" at the start of the message # :nocov: def hook_failed_error(msg) e = super throw HookFailed, e rescue ArgumentError => e2 raise e2 unless e2.message.start_with?('uncaught throw') e end def validation_failed_error e = super throw ValidationFailed, e rescue ArgumentError => e2 raise e2 unless e2.message.start_with?('uncaught throw') e end # :nocov: end end end end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/timestamps.rb������������������������������������������������������0000664�0000000�0000000�00000011223�14342141206�0021337�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The timestamps plugin creates hooks that automatically set create and # update timestamp fields. Both field names used are configurable, and you # can also set whether to overwrite existing create timestamps (false # by default), or whether to set the update timestamp when creating (also # false by default). # # Usage: # # # Timestamp all model instances using +created_at+ and +updated_at+ # # (called before loading subclasses) # Sequel::Model.plugin :timestamps # # # Timestamp Album instances, with custom column names # Album.plugin :timestamps, create: :created_on, update: :updated_on # # # Timestamp Artist instances, forcing an overwrite of the create # # timestamp, and setting the update timestamp when creating # Artist.plugin :timestamps, force: true, update_on_create: true module Timestamps # Configure the plugin by setting the available options. Note that # if this method is run more than once, previous settings are ignored, # and it will just use the settings given or the default settings. Options: # :allow_manual_update :: Whether to skip setting the update timestamp if it has been modified manually (default: false) # :create :: The field to hold the create timestamp (default: :created_at) # :force :: Whether to overwrite an existing create timestamp (default: false) # :update :: The field to hold the update timestamp (default: :updated_at) # :update_on_create :: Whether to set the update timestamp to the create timestamp when creating (default: false) def self.configure(model, opts=OPTS) model.instance_exec do @allow_manual_timestamp_update = opts[:allow_manual_update]||false @create_timestamp_field = opts[:create]||:created_at @update_timestamp_field = opts[:update]||:updated_at @create_timestamp_overwrite = opts[:force]||false @set_update_timestamp_on_create = opts[:update_on_create]||false end end module ClassMethods # The field to store the create timestamp attr_reader :create_timestamp_field # The field to store the update timestamp attr_reader :update_timestamp_field # Whether to overwrite the create timestamp if it already exists def create_timestamp_overwrite? @create_timestamp_overwrite end Plugins.inherited_instance_variables(self, :@allow_manual_timestamp_update=>nil, :@create_timestamp_field=>nil, :@create_timestamp_overwrite=>nil, :@set_update_timestamp_on_create=>nil, :@update_timestamp_field=>nil) # Whether to allow manual setting of the update timestamp when creating def allow_manual_timestamp_update? @allow_manual_timestamp_update end # Whether to set the update timestamp to the create timestamp when creating def set_update_timestamp_on_create? @set_update_timestamp_on_create end end module InstanceMethods # Set the update timestamp when updating def before_update set_update_timestamp super end # Set the create timestamp when creating def before_validation set_create_timestamp if new? super end private # If the object has accessor methods for the create timestamp field, and # the create timestamp value is nil or overwriting it is allowed, set the # create timestamp field to the time given or the current time. If setting # the update timestamp on creation is configured, set the update timestamp # as well. def set_create_timestamp(time=nil) field = model.create_timestamp_field meth = :"#{field}=" set_column_value(meth, time||=model.dataset.current_datetime) if respond_to?(field) && respond_to?(meth) && (model.create_timestamp_overwrite? || get_column_value(field).nil?) set_update_timestamp(time) if model.set_update_timestamp_on_create? end # Set the update timestamp to the time given or the current time if the # object has a setter method for the update timestamp field. def set_update_timestamp(time=nil) return if model.allow_manual_timestamp_update? && modified?(model.update_timestamp_field) meth = :"#{model.update_timestamp_field}=" set_column_value(meth, time||model.dataset.current_datetime) if respond_to?(meth) end end end end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/touch.rb�����������������������������������������������������������0000664�0000000�0000000�00000014152�14342141206�0020277�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The touch plugin adds a touch method to model instances, which saves # the object with a modified timestamp. By default, it uses the # :updated_at column, but you can set which column to use. # It also supports touching of associations, so that when the current # model object is updated or destroyed, the associated rows in the # database can have their modified timestamp updated to the current # timestamp. # # Since the instance touch method works on model instances, # it uses Time.now for the timestamp. The association touching works # on datasets, so it updates all related rows in a single query, using # the SQL standard CURRENT_TIMESTAMP. Both of these can be overridden # easily if necessary. # # Usage: # # # Allow touching of all model instances (called before loading subclasses) # Sequel::Model.plugin :touch # # # Allow touching of Album instances, with a custom column # Album.plugin :touch, column: :updated_on # # # Allow touching of Artist instances, updating the albums and tags # # associations when touching, touching the +updated_on+ column for # # albums and the +updated_at+ column for tags # Artist.plugin :touch, associations: [{albums: :updated_on}, :tags] module Touch def self.apply(model, opts=OPTS) model.instance_variable_set(:@touched_associations, {}) end # Set the touch_column and touched_associations variables for the model. # Options: # :associations :: The associations to touch when a model instance is # updated or destroyed. Can be a symbol for a single association, # a hash with association keys and column values, or an array of # symbols and/or hashes. If a symbol is used, the column used # when updating the associated objects is the model's touch_column. # If a hash is used, the value is used as the column to update. # :column :: The column to modify when touching a model instance. def self.configure(model, opts=OPTS) model.touch_column = opts[:column] || :updated_at if opts[:column] || !model.touch_column model.touch_associations(opts[:associations]) if opts[:associations] end module ClassMethods # The column to modify when touching a model instance, as a symbol. Also used # as the default column when touching associations, if # the associations don't specify a column. attr_accessor :touch_column # A hash specifying the associations to touch when instances are # updated or destroyed. Keys are association name symbols and values # are column name symbols. attr_reader :touched_associations Plugins.inherited_instance_variables(self, :@touched_associations=>:dup, :@touch_column=>nil) # Freeze the touched associations when freezing the model class. def freeze @touched_associations.freeze super end # Add additional associations to be touched. See the :association option # of the Sequel::Plugin::Touch.configure method for the format of the associations # arguments. def touch_associations(*associations) associations.flatten.each do |a| a = {a=>touch_column} if a.is_a?(Symbol) a.each do |k,v| raise(Error, "invalid association: #{k}") unless association_reflection(k) touched_associations[k] = v end end end end module InstanceMethods # Touch all of the model's touched_associations when creating the object. def after_create super touch_associations end # Touch all of the model's touched_associations when destroying the object. def after_destroy super touch_associations end # Touch all of the model's touched_associations when updating the object. def after_update super touch_associations end # Touch the model object. If a column is not given, use the model's touch_column # as the column. If the column to use is not one of the model's columns, just # save the changes to the object instead of attempting to a value that doesn't # exist. def touch(column=nil) if column set(column=>touch_instance_value) else column = model.touch_column set(column=>touch_instance_value) if columns.include?(column) end save_changes end private # The value to use when modifying the touch column for the association datasets. Uses # the SQL standard CURRENT_TIMESTAMP. def touch_association_value Sequel::CURRENT_TIMESTAMP end # Update the updated at field for all associated objects that should be touched. def touch_associations model.touched_associations.each do |assoc, column| r = model.association_reflection(assoc) next unless r.can_have_associated_objects?(self) ds = public_send(r[:dataset_method]) if ds.send(:joined_dataset?) # Can't update all values at once, so update each instance individually. # Instead if doing a simple save, update via the instance's dataset, # to avoid going into an infinite loop in some cases. public_send(assoc).each{|x| x.this.update(column=>touch_association_value)} else # Update all values at once for performance reasons. ds.update(column=>touch_association_value) associations.delete(assoc) end end end # The value to use when modifying the touch column for the model instance. # Uses Time/DateTime.now to work well with typecasting. def touch_instance_value model.dataset.current_datetime end end end end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/tree.rb������������������������������������������������������������0000664�0000000�0000000�00000014002�14342141206�0020106�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The tree plugin adds additional associations and methods that allow you to # treat a Model as a tree. # # A column for holding the parent key is required and is :parent_id by default. # This may be overridden by passing column name via :key. # # Optionally, a column to control order of nodes returned can be specified # by passing column name via :order. # # If you pass true for the :single_root option, the class will ensure there is # only ever one root in the tree. # # Examples: # # class Node < Sequel::Model # plugin :tree # end # # class Node < Sequel::Model # plugin :tree, key: :parentid, order: :position # end module Tree # Create parent and children associations. Any options # specified are passed to both associations. You can also # specify options to use for just the parent association # using a :parent option, and options to use for just the # children association using a :children option. def self.apply(model, opts=OPTS) opts = opts.dup opts[:class] = model opts[:key] ||= :parent_id par = opts.merge(opts.fetch(:parent, OPTS)) parent = par.fetch(:name, :parent) chi = opts.merge(opts.fetch(:children, OPTS)) children = chi.fetch(:name, :children) par[:reciprocal] = children chi[:reciprocal] = parent model.instance_exec do @parent_column = opts[:key] @qualified_parent_column = Sequel.deep_qualify(table_name, opts[:key]) @tree_order = opts[:order] @parent_association_name = parent @children_association_name = children many_to_one parent, par one_to_many children, chi plugin SingleRoot if opts[:single_root] end end module ClassMethods # The column symbol or array of column symbols on which to order the tree. attr_accessor :tree_order # The symbol or array of symbols for the column containing the value pointing to the # parent of the node. attr_accessor :parent_column # The qualified identifier or array of qualified identifiers for the column # containing the value pointing to the parent of the node. attr_accessor :qualified_parent_column # The association name for the parent association attr_reader :parent_association_name # The association name for the children association attr_reader :children_association_name Plugins.inherited_instance_variables(self, :@parent_column=>nil, :@qualified_parent_column=>nil, :@tree_order=>nil, :@parent_association_name=>nil, :@children_association_name=>nil) Plugins.def_dataset_methods(self, [:roots, :roots_dataset]) # Should freeze tree order if it is an array when freezing the model class. def freeze @tree_order.freeze if @tree_order.is_a?(Array) super end end module InstanceMethods # Returns list of ancestors, starting from parent until root. # # subchild1.ancestors # => [child1, root] def ancestors node, nodes = self, [] meth = model.parent_association_name while par = node.send(meth) nodes << node = par end nodes end # Returns list of descendants # # node.descendants # => [child1, child2, subchild1_1, subchild1_2, subchild2_1, subchild2_2] def descendants nodes = send(model.children_association_name).dup send(model.children_association_name).each{|child| nodes.concat(child.descendants)} nodes end # Returns the root node of the tree that this node descends from. # This node is returned if it is a root node itself. def root ancestors.last || self end # Returns true if this is a root node, false otherwise. def root? !new? && possible_root? end # Returns all siblings and a reference to the current node. # # subchild1.self_and_siblings # => [subchild1, subchild2] def self_and_siblings if parent = send(model.parent_association_name) parent.send(model.children_association_name) else model.roots end end # Returns all siblings of the current node. # # subchild1.siblings # => [subchild2] def siblings self_and_siblings - [self] end private # True if if all parent columns values are not NULL. def possible_root? !Array(model.parent_column).map{|c| self[c]}.all? end end module DatasetMethods # Returns list of all root nodes (those with no parent nodes). # # TreeClass.roots # => [root1, root2] def roots roots_dataset.all end # Returns the dataset for retrieval of all root nodes # # TreeClass.roots_dataset # => Sequel::Dataset instance def roots_dataset ds = where(Sequel.or(Array(model.qualified_parent_column).zip([]))) ds = ds.order(*model.tree_order) if model.tree_order ds end end # Plugin included when :single_root option is passed. module SingleRoot module ClassMethods # Returns the single root node. def root roots_dataset.first end end module InstanceMethods # Hook that prevents a second root from being created. def before_save if possible_root? && (root = model.root) && pk != root.pk raise TreeMultipleRootError, "there is already a root #{model.name} defined" end super end end end class TreeMultipleRootError < Error; end end end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/typecast_on_load.rb������������������������������������������������0000664�0000000�0000000�00000005717�14342141206�0022513�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The typecast_on_load plugin exists because most of Sequel's database adapters don't # have complete control over typecasting, and may return columns that aren't # typecast correctly (with correct being defined as how the model object # would typecast the same column values). # # This plugin makes model loading call the setter methods (which typecast # by default) for all columns given. You can either specify the columns to # typecast on load in the plugin call itself, or afterwards using # add_typecast_on_load_columns: # # Album.plugin :typecast_on_load, :release_date, :record_date # # or: # Album.plugin :typecast_on_load # Album.add_typecast_on_load_columns :release_date, :record_date # # If the database returns release_date and record_date columns as strings # instead of dates, this will ensure that if you access those columns through # the model object, you'll get Date objects instead of strings. module TypecastOnLoad # Call add_typecast_on_load_columns on the passed column arguments. def self.configure(model, *columns) model.instance_exec do @typecast_on_load_columns ||= [] add_typecast_on_load_columns(*columns) end end module ClassMethods # The columns to typecast on load for this model. attr_reader :typecast_on_load_columns # Add additional columns to typecast on load for this model. def add_typecast_on_load_columns(*columns) @typecast_on_load_columns.concat(columns) end # Typecast values using #load_typecast when the values are retrieved # from the database. def call(values) o = super.load_typecast o.send(:_clear_changed_columns, :initialize) o end # Freeze typecast on load columns when freezing model class. def freeze @typecast_on_load_columns.freeze super end Plugins.inherited_instance_variables(self, :@typecast_on_load_columns=>:dup) end module InstanceMethods # Call the setter method for each of the model's typecast_on_load_columns # with the current value, so it can be typecasted correctly. def load_typecast model.typecast_on_load_columns.each do |c| if v = values[c] set_column_value("#{c}=", v) end end self end private # Typecast values using #load_typecast when the values are refreshed manually. def _refresh_set_values(values) ret = super load_typecast ret end # Typecast values using #load_typecast when the values are refreshed # automatically after a save. def _save_set_values(values) ret = super load_typecast ret end end end end end �������������������������������������������������sequel-5.63.0/lib/sequel/plugins/unlimited_update.rb������������������������������������������������0000664�0000000�0000000�00000001257�14342141206�0022513�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The unlimited_update plugin is designed to work around a # MySQL warning in replicated environments, which occurs if # you issue an UPDATE with a LIMIT clause. # # Usage: # # # Make all model subclass not use a limit for update # Sequel::Model.plugin :unlimited_update # # # Make the Album class not use a limit for update # Album.plugin :unlimited_update module UnlimitedUpdate module InstanceMethods private # Use an unlimited dataset for updates. def _update_dataset super.unlimited end end end end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/unused_associations.rb���������������������������������������������0000664�0000000�0000000�00000056446�14342141206�0023253�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true # :nocov: # This entire file is excluded from coverage testing. This is because it # requires coverage testing to work, and if you've already loaded Sequel # without enabling coverage, then coverage testing won't work correctly # for methods defined by Sequel. # # While automated coverage testing is disabled, manual coverage testing # was used during spec development to make sure this code is 100% covered. if RUBY_VERSION < '2.5' raise LoadError, "The Sequel unused_associations plugin depends on Ruby 2.5+ method coverage" end require 'coverage' require 'json' module Sequel module Plugins # The unused_associations plugin detects which model associations are not # used and can be removed, and which model association methods are not used # and can skip being defined. The advantage of removing unused associations # and unused association methods is decreased memory usage, since each # method defined takes memory and adds more work for the garbage collector. # # In order to detect which associations are used, this relies on the method # coverage support added in Ruby 2.5. To allow flexibility to override # association methods, the association methods that Sequel defines are # defined in a module included in the class instead of directly in the # class. Unfortunately, that makes it difficult to directly use the # coverage data to find unused associations. The advantage of this plugin # is that it is able to figure out from the coverage information whether # the association methods Sequel defines are actually used. # # = Basic Usage # # The expected usage of the unused_associations plugin is to load it # into the base class for models in your application, which will often # be Sequel::Model: # # Sequel::Model.plugin :unused_associations # # Then you run your test suite with method coverage enabled, passing the # coverage result to +update_associations_coverage+. # +update_associations_coverage+ returns a data structure containing # method coverage information for all subclasses of the base class. # You can pass the coverage information to # +update_unused_associations_data+, which will return a data structure # with information on unused associations. # # require 'coverage' # Coverage.start(methods: true) # # load sequel after starting coverage, then run your tests # cov_data = Sequel::Model.update_associations_coverage # unused_associations_data = Sequel::Model.update_unused_associations_data(coverage_data: cov_data) # # You can take that unused association data and pass it to the # +unused_associations+ method to get a array of information on # associations which have not been used. Each entry in the array # will contain a class name and association name for each unused # association, both as a string: # # Sequel::Model.unused_associations(unused_associations_data: unused_associations_data) # # => [["Class1", "assoc1"], ...] # # You can use the output of the +unused_associations+ method to determine # which associations are not used at all in your application, and can # be eliminiated. # # You can also take that unused association data and pass it to the # +unused_association_options+ method, which will return an array of # information on associations which are used, but have related methods # defined that are not used. The first two entries in each array are # the class name and association name as a string, and the third # entry is a hash of association options: # # Sequel::Model.unused_association_options(unused_associations_data: unused_associations_data) # # => [["Class2", "assoc2", {:read_only=>true}], ...] # # You can use the output of the +unused_association_options+ to # find out which association options can be provided when defining # the association so that the association method will not define # methods that are not used. # # = Combining Coverage Results # # It is common to want to combine results from multiple separate # coverage runs. For example, if you have multiple test suites # for your application, one for model or unit tests and one for # web or integration tests, you would want to combine the # coverage information from all test suites before determining # that the associations are not used. # # The unused_associations plugin supports combining multiple # coverage results using the :coverage_file plugin option: # # Sequel::Model.plugin :unused_associations, # coverage_file: 'unused_associations_coverage.json' # # With the coverage file option, +update_associations_coverage+ # will look in the given file for existing coverage information, # if it exists. If the file exists, the data from it will be # merged with the coverage result passed to the method. # Before returning, the coverage file will be updated with the # merged result. When using the :coverage_file plugin option, # you can each of your test suites update the coverage # information: # # require 'coverage' # Coverage.start(methods: true) # # run this test suite # Sequel::Model.update_associations_coverage # # After all test suites have been run, you can run # +update_unused_associations_data+, without an argument: # # unused_associations_data = Sequel::Model.update_unused_associations_data # # With no argument, +update_unused_associations_data+ will get # the coverage data from the coverage file, and then use that # to prepare the information. You can then use the returned # value the same as before to get the data on unused associations. # To prevent stale coverage information, calling # +update_unused_associations_data+ when using the :coverage_file # plugin option will remove the coverage file by default (you can # use the :keep_coverage option to prevent the deletion of the # coverage file). # # = Automatic Usage of Unused Association Data # # Since it can be a pain to manually update all of your code # to remove unused assocations or add options to prevent the # definition of unused associations, the unused_associations # plugin comes with support to take previously saved unused # association data, and use it to not create unused associations, # and to automatically use the appropriate options so that unused # association methods are not created. # # To use this option, you first need to save the unused association # data previously prepared. You can do this by passing an # :file option when loading the plugin. # # Sequel::Model.plugin :unused_associations, # file: 'unused_associations.json' # # With the :file option provided, you no longer need to use # the return value of +update_unused_associations_data+, as # the file will be updated with the information: # # Sequel::Model.update_unused_associations_data(coverage_data: cov_data) # # Then, to use the saved unused associations data, add the # :modify_associations plugin option: # # Sequel::Model.plugin :unused_associations, # file: 'unused_associations.json', # modify_associations: true # # With the :modify_associations used, and the unused association # data file is available, when subclasses attempt to create an # unused association, the attempt will be ignored. If the # subclasses attempt to create an association where not # all association methods are used, the plugin will automatically # set the appropriate options so that the unused association # methods are not defined. # # When you are testing which associations are used, make sure # not to set the :modify_associations plugin option, or make sure # that the unused associations data file does not exist. # # == Automatic Usage with Combined Coverage Results # # If you have multiple test suites and want to automatically # use the unused association data, you should provide both # :file and :coverage_file options when loading the plugin: # # Sequel::Model.plugin :unused_associations, # file: 'unused_associations.json', # coverage_file: 'unused_associations_coverage.json' # # Then each test suite just needs to run # +update_associations_coverage+ to update the coverage information: # # Sequel::Model.update_associations_coverage # # After all test suites have been run, you can run # +update_unused_associations_data+ to update the unused # association data file (and remove the coverage file): # # Sequel::Model.update_unused_associations_data # # Then you can add the :modify_associations plugin option to # automatically use the unused association data. # # = Caveats # # Since this plugin is based on coverage information, if you do # not have tests that cover all usage of associations in your # application, you can end up with coverage that shows the # association is not used, when it is used in code that is not # covered. The output of plugin can still be useful in such cases, # as long as you are manually checking it. However, you should # avoid using the :modify_associations unless you have # confidence that your tests cover all usage of associations # in your application. You can specify the :is_used association # option for any association that you know is used. If an # association uses the :is_used association option, this plugin # will not modify it if the :modify_associations option is used. # # This plugin does not handle anonymous classes. Any unused # associations defined in anonymous classes will not be # reported by this plugin. # # This plugin only considers the public instance methods the # association defines, and direct access to the related # association reflection via Sequel::Model.association_reflection # to determine if the association was used. If the association # metadata was accessed another way, it's possible this plugin # will show the association as unused. # # As this relies on the method coverage added in Ruby 2.5, it does # not work on older versions of Ruby. It also does not work on # JRuby, as JRuby does not implement method coverage. module UnusedAssociations # Load the subclasses plugin, as the unused associations plugin # is designed to handle all subclasses of the class it is loaded # into. def self.apply(mod, opts=OPTS) mod.plugin :subclasses end # Plugin options: # :coverage_file :: The file to store the coverage information, # when combining coverage information from # multiple test suites. # :file :: The file to store and/or load the unused associations data. # :modify_associations :: Whether to use the unused associations data # to skip defining associations or association # methods. # :unused_associations_data :: The unused associations data to use if the # :modify_associations is used (by default, the # :modify_associations option will use the data from # the file specified by the :file option). This is # same data returned by the # +update_unused_associations_data+ method. def self.configure(mod, opts=OPTS) mod.instance_exec do @unused_associations_coverage_file = opts[:coverage_file] @unused_associations_file = opts[:file] @unused_associations_data = if opts[:modify_associations] if opts[:unused_associations_data] opts[:unused_associations_data] elsif File.file?(opts[:file]) Sequel.parse_json(File.binread(opts[:file])) end end end end module ClassMethods # Only the data is copied to subclasses, to allow the :modify_associations # plugin option to affect them. The :file and :coverage_file are not copied # to subclasses, as users are expected ot call methods such as # unused_associations only on the class that is loading the plugin. Plugins.inherited_instance_variables(self, :@unused_associations_data=>nil) # Synchronize access to the used association reflections. def used_association_reflections Sequel.synchronize{@used_association_reflections ||= {}} end # Record access to association reflections to determine which associations are not used. def association_reflection(association) uar = used_association_reflections Sequel.synchronize{uar[association] ||= true} super end # If modifying associations, and this association is marked as not used, # and the association does not include the specific :is_used option, # skip defining the association. def associate(type, assoc_name, opts=OPTS) if !opts[:is_used] && @unused_associations_data && (data = @unused_associations_data[name]) && data[assoc_name.to_s] == 'unused' return end super end # Setup the used_association_reflections storage before freezing def freeze used_association_reflections super end # Parse the coverage result, and return the coverage data for the # associations for descendants of this class. If the plugin # uses the :coverage_file option, the existing coverage file will be loaded # if present, and before the method returns, the coverage file will be updated. # # Options: # :coverage_result :: The coverage result to use. This defaults to +Coverage.result+. def update_associations_coverage(opts=OPTS) coverage_result = opts[:coverage_result] || Coverage.result module_mapping = {} file = @unused_associations_coverage_file coverage_data = if file && File.file?(file) Sequel.parse_json(File.binread(file)) else {} end ([self] + descendants).each do |sc| next if sc.associations.empty? || !sc.name module_mapping[sc.send(:overridable_methods_module)] = sc cov_data = coverage_data[sc.name] ||= {''=>[]} cov_data[''].concat(sc.used_association_reflections.keys.map(&:to_s).sort).uniq! end coverage_result.each do |file, coverage| coverage[:methods].each do |(mod, meth), times| next unless sc = module_mapping[mod] coverage_data[sc.name][meth.to_s] ||= 0 coverage_data[sc.name][meth.to_s] += times end end if file File.binwrite(file, Sequel.object_to_json(coverage_data)) end coverage_data end # Parse the coverage data returned by #update_associations_coverage, # and return data on unused associations and unused association methods. # # Options: # :coverage_data :: The coverage data to use. If not given, it is taken # from the file specified by the :coverage_file plugin option. # :keep_coverage :: Do not delete the file specified by the :coverage_file plugin # option, even if it exists. def update_unused_associations_data(options=OPTS) coverage_data = options[:coverage_data] || Sequel.parse_json(File.binread(@unused_associations_coverage_file)) unused_associations_data = {} ([self] + descendants).each do |sc| next unless cov_data = coverage_data[sc.name] reflection_data = cov_data[''] || [] sc.association_reflections.each do |assoc, ref| # Only report associations for the class they are defined in next unless ref[:model] == sc # Do not report associations using methods_module option, because this plugin only # looks in the class's overridable_methods_module next if ref[:methods_module] info = {} if reflection_data.include?(assoc.to_s) info[:used] = [:reflection] end _update_association_coverage_info(info, cov_data, ref.dataset_method, :dataset_method) _update_association_coverage_info(info, cov_data, ref.association_method, :association_method) unless ref[:orig_opts][:read_only] if ref.returns_array? _update_association_coverage_info(info, cov_data, ref[:add_method], :adder) _update_association_coverage_info(info, cov_data, ref[:remove_method], :remover) _update_association_coverage_info(info, cov_data, ref[:remove_all_method], :clearer) else _update_association_coverage_info(info, cov_data, ref[:setter_method], :setter) end end next if info.keys == [:missing] if !info[:used] (unused_associations_data[sc.name] ||= {})[assoc.to_s] = 'unused' elsif unused = info[:unused] if unused.include?(:setter) || [:adder, :remover, :clearer].all?{|k| unused.include?(k)} [:setter, :adder, :remover, :clearer].each do |k| unused.delete(k) end unused << :read_only end (unused_associations_data[sc.name] ||= {})[assoc.to_s] = unused.map(&:to_s) end end end if @unused_associations_file File.binwrite(@unused_associations_file, Sequel.object_to_json(unused_associations_data)) end unless options[:keep_coverage] _delete_unused_associations_file(@unused_associations_coverage_file) end unused_associations_data end # Return an array of unused associations. These are associations where none of the # association methods are used, according to the coverage information. Each entry # in the array is an array of two strings, with the first string being the class name # and the second string being the association name. # # Options: # :unused_associations_data :: The data to use for determining which associations # are unused, which is returned from # +update_unused_associations_data+. If not given, # loads the data from the file specified by the :file # plugin option. def unused_associations(opts=OPTS) unused_associations_data = opts[:unused_associations_data] || Sequel.parse_json(File.binread(@unused_associations_file)) unused_associations = [] unused_associations_data.each do |sc, associations| associations.each do |assoc, unused| if unused == 'unused' unused_associations << [sc, assoc] end end end unused_associations end # Return an array of unused association options. These are associations some but not all # of the association methods are used, according to the coverage information. Each entry # in the array is an array of three elements. The first element is the class name string, # the second element is the association name string, and the third element is a hash of # association options that can be used in the association so it does not define methods # that are not used. # # Options: # :unused_associations_data :: The data to use for determining which associations # are unused, which is returned from # +update_unused_associations_data+. If not given, # loads the data from the file specified by the :file # plugin option. def unused_association_options(opts=OPTS) unused_associations_data = opts[:unused_associations_data] || Sequel.parse_json(File.binread(@unused_associations_file)) unused_association_methods = [] unused_associations_data.each do |sc, associations| associations.each do |assoc, unused| unless unused == 'unused' unused_association_methods << [sc, assoc, set_unused_options_for_association({}, unused)] end end end unused_association_methods end # Delete the unused associations coverage file and unused associations data file, # if either exist. def delete_unused_associations_files _delete_unused_associations_file(@unused_associations_coverage_file) _delete_unused_associations_file(@unused_associations_file) end private # Delete the given file if it exists. def _delete_unused_associations_file(file) if file && File.file?(file) File.unlink(file) end end # Update the info hash with information on whether the given method was # called, according to the coverage information. def _update_association_coverage_info(info, coverage_data, meth, key) type = case coverage_data[meth.to_s] when 0 :unused when Integer :used else # Missing here means there is no coverage information for the # the method, which indicates the expected method was never # defined. In that case, it can be ignored. :missing end (info[type] ||= []) << key end # Based on the value of the unused, update the opts hash with association # options that will prevent unused association methods from being # defined. def set_unused_options_for_association(opts, unused) opts[:read_only] = true if unused.include?('read_only') opts[:no_dataset_method] = true if unused.include?('dataset_method') opts[:no_association_method] = true if unused.include?('association_method') opts[:adder] = nil if unused.include?('adder') opts[:remover] = nil if unused.include?('remover') opts[:clearer] = nil if unused.include?('clearer') opts end # If modifying associations, and this association has unused association # methods, automatically set the appropriate options so the unused association # methods are not defined, unless the association explicitly uses the :is_used # options. def def_association(opts) if !opts[:is_used] && @unused_associations_data && (data = @unused_associations_data[name]) && (unused = data[opts[:name].to_s]) set_unused_options_for_association(opts, unused) end super end end end end end # :nocov: ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/update_or_create.rb������������������������������������������������0000664�0000000�0000000�00000005037�14342141206�0022464�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The update_or_create plugin adds methods that make it easier # to deal with objects which may or may not yet exist in the database. # The first method is update_or_create, which updates an object if it # exists in the database, or creates the object if it does not. # # You can call update_or_create with a block: # # Album.update_or_create(name: 'Hello') do |album| # album.num_copies_sold = 1000 # end # # or provide two hashes, with the second one being the attributes # to set. # # Album.update_or_create({name: 'Hello'}, num_copies_sold: 1000) # # In both cases, this will check the database to find the album with # the name "Hello". If such an album exists, it will be updated to set # num_copies_sold to 1000. If no such album exists, an album with the # name "Hello" and num_copies_sold 1000 will be created. # # The second method is find_or_new, which returns the object from the # database if it exists, or returns a new (unsaved) object if not. It # has the same API as update_or_create, and operates identically to # update_or_create except that it doesn't persist any changes. # # Usage: # # # Make all model subclass support update_or_create # Sequel::Model.plugin :update_or_create # # # Make the Album class support update_or_create # Album.plugin :update_or_create module UpdateOrCreate module ClassMethods # Attempt to find an record with the +attrs+, which should be a # hash with column symbol keys. If such an record exists, update it # with the values given in +set_attrs+. If no such record exists, # create a new record with the columns specified by both +attrs+ and # +set_attrs+, with the ones in +set_attrs+ taking priority. If # a block is given, the object is yielded to the block before the # object is saved. Returns the new or updated object. def update_or_create(attrs, set_attrs=nil, &block) obj = find_or_new(attrs, set_attrs, &block) obj.save_changes obj end # Operates the same as +update_or_create+, but returns the objects # without persisting changes (no UPDATE/INSERT queries). def find_or_new(attrs, set_attrs=nil) obj = find(attrs) || new(attrs) obj.set(set_attrs) if set_attrs yield obj if defined?(yield) obj end end end end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/update_primary_key.rb����������������������������������������������0000664�0000000�0000000�00000004477�14342141206�0023063�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The update_primary_key plugin allows you to modify an object's # primary key and then save the record. Sequel does not work # correctly with primary key modifications by default. Sequel # is designed to work with surrogate primary keys that never need to be # modified, but this plugin makes it work correctly with natural # primary keys that may need to be modified. Example: # # album = Album[1] # album.id = 2 # album.save # # Usage: # # # Make all model subclasses support primary key updates # # (called before loading subclasses) # Sequel::Model.plugin :update_primary_key # # # Make the Album class support primary key updates # Album.plugin :update_primary_key module UpdatePrimaryKey module InstanceMethods # Clear the cached primary key. def after_update super @pk_hash = nil end # Use the cached primary key if one is present. def pk_hash @pk_hash || super end private # If the primary key column changes, clear related associations and cache # the previous primary key values. def change_column_value(column, value) pk = primary_key if (pk.is_a?(Array) ? pk.include?(column) : pk == column) @pk_hash ||= pk_hash unless new? clear_associations_using_primary_key end super end # Clear associations that are likely to be tied to the primary key. # Note that this currently can clear additional options that don't reference # the primary key (such as one_to_many columns referencing a column other than the # primary key). def clear_associations_using_primary_key associations.keys.each do |k| associations.delete(k) if model.association_reflection(k)[:type] != :many_to_one end end # Do not use prepared statements for update queries, since they don't work # in the case where the primary key has changed. def use_prepared_statements_for?(type) if type == :update false else super if defined?(super) end end end end end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/update_refresh.rb��������������������������������������������������0000664�0000000�0000000�00000005513�14342141206�0022156�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The update_refresh plugin makes the model class refresh # the object after updating. By default, Sequel only # refreshes automatically after inserting new rows, not # after updating. However, if you are using triggers # to modify the contents of updated rows, it can be # helpful to immediately get the current data after # updating. # # If the dataset supports UPDATE RETURNING, this # plugin will use it so that it can retrieve the current # data in the same query it uses for the update. # # Usage: # # # Make all model subclasses refresh after update # Sequel::Model.plugin :update_refresh # # # Make the Album class refresh after update # Album.plugin :update_refresh # # As a performance optimisation, if you know only specific # columns will have changed, you can specify them to the # +columns+ option. This can be a performance gain if it # would avoid pointlessly comparing many other columns. # Note that this option currently only has an effect if the # dataset supports RETURNING. # # # Only include the artist column in RETURNING # Album.plugin :update_refresh, columns: :artist # # # Only include the artist and title columns in RETURNING # Album.plugin :update_refresh, columns: [:artist, :title] module UpdateRefresh # Set the specific columns to refresh, if the :columns option # is provided. def self.configure(model, opts=OPTS) model.instance_exec do @update_refresh_columns = Array(opts[:columns]) || [] end end module ClassMethods # The specific columns to refresh when updating, if UPDATE RETURNING is supported. attr_reader :update_refresh_columns # Freeze the update refresh columns when freezing the model class. def freeze @update_refresh_columns.freeze super end end module InstanceMethods # If the dataset does not support UPDATE RETURNING, then refresh after an update. def after_update super unless this.supports_returning?(:update) refresh end end private # If the dataset supports UPDATE RETURNING, use it to do the refresh in the same # query as the update. def _update_without_checking(columns) ds = _update_dataset if ds.supports_returning?(:update) ds = ds.opts[:returning] ? ds : ds.returning(*self.class.update_refresh_columns) rows = ds.update(columns) n = rows.length if n == 1 @values.merge!(rows.first) end n else super end end end end end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/uuid.rb������������������������������������������������������������0000664�0000000�0000000�00000004214�14342141206�0020121�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true require 'securerandom' module Sequel module Plugins # The uuid plugin creates hooks that automatically create a uuid for every # instance. # # Usage: # # # Uuid all model instances using +uuid+ # # (called before loading subclasses) # Sequel::Model.plugin :uuid # # # Uuid Album instances, with custom column name # Album.plugin :uuid, field: :my_uuid module Uuid # Configure the plugin by setting the available options. Note that # if this method is run more than once, previous settings are ignored, # and it will just use the settings given or the default settings. Options: # :field :: The field to hold the uuid (default: :uuid) # :force :: Whether to overwrite an existing uuid (default: false) def self.configure(model, opts=OPTS) model.instance_exec do @uuid_field = opts[:field]||:uuid @uuid_overwrite = opts[:force]||false end end module ClassMethods # The field to store the uuid attr_reader :uuid_field # Whether to overwrite the create uuid if it already exists def uuid_overwrite? @uuid_overwrite end Plugins.inherited_instance_variables(self, :@uuid_field=>nil, :@uuid_overwrite=>nil) end module InstanceMethods # Set the uuid when creating def before_validation set_uuid if new? super end private # Create a new UUID. This method can be overridden to use a separate # method for creating UUIDs. def create_uuid SecureRandom.uuid end # If the object has accessor methods for the uuid field, and the uuid # value is nil or overwriting it is allowed, set the uuid. def set_uuid(uuid=create_uuid) field = model.uuid_field meth = :"#{field}=" if respond_to?(field) && respond_to?(meth) && (model.uuid_overwrite? || get_column_value(field).nil?) set_column_value(meth, uuid) end end end end end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/validate_associated.rb���������������������������������������������0000664�0000000�0000000�00000005660�14342141206�0023151�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The validate_associated plugin allows you to validate associated # objects. It also offers the ability to delay the validation of # associated objects until the current object is validated. # If the associated object is invalid, validation error messages # from the associated object will be added to the current object's # validation errors. # # Usage: # # # Make all model subclass support validating associated objects # Sequel::Model.plugin :validate_associated # # # Make the Album class support validating associated objects # Album.plugin :validate_associated # # class Album # many_to_one :artist # many_to_many :tags # # # Always validate associated artist when saving the album # def validate # super # if artist # validate_associated_object(model.association_reflection(:artist), artist) # end # end # # # When saving after calling this method, validate the given tag as well. # def check_tag!(tag) # delay_validate_associated_object(model.association_reflection(:tags), tag) # end # end module ValidateAssociated # Depend on the instance_hooks plugin. def self.apply(mod) mod.plugin :instance_hooks end module InstanceMethods private # Delay validating the associated object until validating the current object. def delay_validate_associated_object(reflection, obj) after_validation_hook{validate_associated_object(reflection, obj)} end # Validate the given associated object, adding any validation error messages from the # given object to the parent object. def validate_associated_object(reflection, obj) return if reflection[:validate] == false association = reflection[:name] if (reflection[:type] == :one_to_many || reflection[:type] == :one_to_one) && (key = reflection[:key]).is_a?(Symbol) && !(pk_val = obj.values[key]) p_key = pk unless pk.is_a?(Array) if p_key obj.values[key] = p_key else ignore_key_errors = true end end unless obj.valid? if ignore_key_errors # Ignore errors on the key column in the associated object. This column # will be set when saving to a presumably valid value using a column # in the current object (which may not be available until after the current # object is saved). obj.errors.delete(key) obj.errors.delete_if{|k,| Array === k && k.include?(key)} end obj.errors.full_messages.each do |m| errors.add(association, m) end end nil end end end end end ��������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/validation_class_methods.rb����������������������������������������0000664�0000000�0000000�00000044354�14342141206�0024226�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # Sequel's built-in validation_class_methods plugin adds backwards compatibility # for the legacy class-level validation methods (e.g. validates_presence_of :column). # # It is recommended to use the validation_helpers plugin instead of this one, # as it is less complex and more flexible. However, this plugin provides reflection # support, since it is class-level, while the instance-level validation_helpers # plugin does not. # # Usage: # # # Add the validation class methods to all model subclasses (called before loading subclasses) # Sequel::Model.plugin :validation_class_methods # # # Add the validation class methods to the Album class # Album.plugin :validation_class_methods module ValidationClassMethods # Setup the validations hash for the given model. def self.apply(model) model.class_eval do @validations = {} @validation_reflections = {} end end module ClassMethods # A hash of validations for this model class. Keys are column symbols, # values are arrays of validation procs. attr_reader :validations # A hash of validation reflections for this model class. Keys are column # symbols, values are an array of two element arrays, with the first element # being the validation type symbol and the second being a hash of validation # options. attr_reader :validation_reflections # Freeze validation metadata when freezing model class. def freeze @validations.freeze.each_value(&:freeze) @validation_reflections.freeze.each_value do |vs| vs.freeze.each do |v| v.freeze v.last.freeze end end super end # The Generator class is used to generate validation definitions using # the validates {} idiom. class Generator # Initializes a new generator. def initialize(receiver ,&block) @receiver = receiver instance_exec(&block) end # Delegates method calls to the receiver by calling receiver.validates_xxx. def method_missing(m, *args, &block) @receiver.send(:"validates_#{m}", *args, &block) end # This object responds to all validates_* methods the model responds to. def respond_to_missing?(meth, include_private) @receiver.respond_to?(:"validates_#{meth}", include_private) end end # Returns true if validations are defined. def has_validations? !validations.empty? end Plugins.inherited_instance_variables(self, :@validations=>:hash_dup, :@validation_reflections=>:hash_dup) # Instructs the model to skip validations defined in superclasses def skip_superclass_validations superclass.validations.each do |att, procs| if @validations[att] @validations[att] -= procs end end @skip_superclass_validations = true end # Instructs the model to skip validations defined in superclasses def skip_superclass_validations? @skip_superclass_validations end # Defines validations by converting a longhand block into a series of # shorthand definitions. For example: # # class MyClass < Sequel::Model # validates do # length_of :name, minimum: 6 # length_of :password, minimum: 8 # end # end # # is equivalent to: # # class MyClass < Sequel::Model # validates_length_of :name, minimum: 6 # validates_length_of :password, minimum: 8 # end def validates(&block) Generator.new(self, &block) end # Validates the given instance. def validate(o) validations.each do |att, procs| v = case att when Array att.map{|a| o.get_column_value(a)} else o.get_column_value(att) end procs.each {|tag, p| p.call(o, att, v)} end end # Validates acceptance of an attribute. Just checks that the value # is equal to the :accept option. This method is unique in that # :allow_nil is assumed to be true instead of false. # # Possible Options: # :accept :: The value required for the object to be valid (default: '1') # :message :: The message to use (default: 'is not accepted') def validates_acceptance_of(*atts) opts = { :message => 'is not accepted', :allow_nil => true, :accept => '1', :tag => :acceptance, }.merge!(extract_options!(atts)) reflect_validation(:acceptance, opts, atts) atts << opts validates_each(*atts) do |o, a, v| o.errors.add(a, opts[:message]) unless v == opts[:accept] end end # Validates confirmation of an attribute. Checks that the object has # a _confirmation value matching the current value. For example: # # validates_confirmation_of :blah # # Just makes sure that object.blah = object.blah_confirmation. Often used for passwords # or email addresses on web forms. # # Possible Options: # :message :: The message to use (default: 'is not confirmed') def validates_confirmation_of(*atts) opts = { :message => 'is not confirmed', :tag => :confirmation, }.merge!(extract_options!(atts)) reflect_validation(:confirmation, opts, atts) atts << opts validates_each(*atts) do |o, a, v| o.errors.add(a, opts[:message]) unless v == o.get_column_value(:"#{a}_confirmation") end end # Adds a validation for each of the given attributes using the supplied # block. The block must accept three arguments: instance, attribute and # value, e.g.: # # validates_each :name, :password do |object, attribute, value| # object.errors.add(attribute, 'is not nice') unless value.nice? # end # # Possible Options: # :allow_blank :: Whether to skip the validation if the value is blank. # :allow_missing :: Whether to skip the validation if the attribute isn't a key in the # values hash. This is different from allow_nil, because Sequel only sends the attributes # in the values when doing an insert or update. If the attribute is not present, Sequel # doesn't specify it, so the database will use the table's default value. This is different # from having an attribute in values with a value of nil, which Sequel will send as NULL. # If your database table has a non NULL default, this may be a good option to use. You # don't want to use allow_nil, because if the attribute is in values but has a value nil, # Sequel will attempt to insert a NULL value into the database, instead of using the # database's default. # :allow_nil :: Whether to skip the validation if the value is nil. # :if :: A symbol (indicating an instance_method) or proc (which is used to define an instance method) # skipping this validation if it returns nil or false. # :tag :: The tag to use for this validation. def validates_each(*atts, &block) opts = extract_options!(atts) blank_meth = db.method(:blank_object?).to_proc i = opts[:if] am = opts[:allow_missing] an = opts[:allow_nil] ab = opts[:allow_blank] blk = if i || am || an || ab if i.is_a?(Proc) i = Plugins.def_sequel_method(self, "validation_class_methods_if", 0, &i) end proc do |o,a,v| next if i && !validation_if_proc(o, i) next if an && Array(v).all?(&:nil?) next if ab && Array(v).all?(&blank_meth) next if am && Array(a).all?{|x| !o.values.has_key?(x)} block.call(o,a,v) end else block end tag = opts[:tag] atts.each do |a| a_vals = Sequel.synchronize{validations[a] ||= []} if tag && (old = a_vals.find{|x| x[0] == tag}) old[1] = blk else a_vals << [tag, blk] end end end # Validates the format of an attribute, checking the string representation of the # value against the regular expression provided by the :with option. # # Possible Options: # :message :: The message to use (default: 'is invalid') # :with :: The regular expression to validate the value with (required). def validates_format_of(*atts) opts = { :message => 'is invalid', :tag => :format, }.merge!(extract_options!(atts)) unless opts[:with].is_a?(Regexp) raise ArgumentError, "A regular expression must be supplied as the :with option of the options hash" end reflect_validation(:format, opts, atts) atts << opts validates_each(*atts) do |o, a, v| o.errors.add(a, opts[:message]) unless v.to_s =~ opts[:with] end end # Validates the length of an attribute. # # Possible Options: # :is :: The exact size required for the value to be valid (no default) # :maximum :: The maximum size allowed for the value (no default) # :message :: The message to use (no default, overrides :nil_message, :too_long, :too_short, and :wrong_length # options if present) # :minimum :: The minimum size allowed for the value (no default) # :nil_message :: The message to use use if :maximum option is used and the value is nil (default: 'is not present') # :too_long :: The message to use use if it the value is too long (default: 'is too long') # :too_short :: The message to use use if it the value is too short (default: 'is too short') # :within :: The array/range that must include the size of the value for it to be valid (no default) # :wrong_length :: The message to use use if it the value is not valid (default: 'is the wrong length') def validates_length_of(*atts) opts = { :nil_message => 'is not present', :too_long => 'is too long', :too_short => 'is too short', :wrong_length => 'is the wrong length' }.merge!(extract_options!(atts)) opts[:tag] ||= ([:length] + [:maximum, :minimum, :is, :within].reject{|x| !opts.include?(x)}).join('-').to_sym reflect_validation(:length, opts, atts) atts << opts validates_each(*atts) do |o, a, v| if m = opts[:maximum] o.errors.add(a, opts[:message] || (v ? opts[:too_long] : opts[:nil_message])) unless v && v.size <= m end if m = opts[:minimum] o.errors.add(a, opts[:message] || opts[:too_short]) unless v && v.size >= m end if i = opts[:is] o.errors.add(a, opts[:message] || opts[:wrong_length]) unless v && v.size == i end if w = opts[:within] o.errors.add(a, opts[:message] || opts[:wrong_length]) unless v && w.public_send(w.respond_to?(:cover?) ? :cover? : :include?, v.size) end end end # Validates whether an attribute is a number. # # Possible Options: # :message :: The message to use (default: 'is not a number') # :only_integer :: Whether only integers are valid values (default: false) def validates_numericality_of(*atts) opts = { :message => 'is not a number', :tag => :numericality, }.merge!(extract_options!(atts)) reflect_validation(:numericality, opts, atts) atts << opts validates_each(*atts) do |o, a, v| begin if opts[:only_integer] Kernel.Integer(v.to_s) else Kernel.Float(v.to_s) end rescue o.errors.add(a, opts[:message]) end end end # Validates the presence of an attribute. Requires the value not be blank, # with false considered present instead of absent. # # Possible Options: # :message :: The message to use (default: 'is not present') def validates_presence_of(*atts) opts = { :message => 'is not present', :tag => :presence, }.merge!(extract_options!(atts)) reflect_validation(:presence, opts, atts) atts << opts validates_each(*atts) do |o, a, v| o.errors.add(a, opts[:message]) if db.send(:blank_object?, v) && v != false end end # Validates that an attribute is within a specified range or set of values. # # Possible Options: # :in :: An array or range of values to check for validity (required) # :message :: The message to use (default: 'is not in range or set: <specified range>') def validates_inclusion_of(*atts) opts = extract_options!(atts) n = opts[:in] unless n && (n.respond_to?(:cover?) || n.respond_to?(:include?)) raise ArgumentError, "The :in parameter is required, and must respond to cover? or include?" end opts[:message] ||= "is not in range or set: #{n.inspect}" reflect_validation(:inclusion, opts, atts) atts << opts validates_each(*atts) do |o, a, v| o.errors.add(a, opts[:message]) unless n.public_send(n.respond_to?(:cover?) ? :cover? : :include?, v) end end # Validates whether an attribute has the correct ruby type for the associated # database type. This is generally useful in conjunction with # raise_on_typecast_failure = false, to handle typecasting errors at validation # time instead of at setter time. # # Possible Options: # :message :: The message to use (default: 'is not a valid (integer|datetime|etc.)') def validates_schema_type(*atts) opts = { :tag => :schema_type, }.merge!(extract_options!(atts)) reflect_validation(:schema_type, opts, atts) atts << opts validates_each(*atts) do |o, a, v| next if v.nil? || (klass = o.send(:schema_type_class, a)).nil? if klass.is_a?(Array) ? !klass.any?{|kls| v.is_a?(kls)} : !v.is_a?(klass) message = opts[:message] || "is not a valid #{Array(klass).join(" or ").downcase}" o.errors.add(a, message) end end end # Validates only if the fields in the model (specified by atts) are # unique in the database. Pass an array of fields instead of multiple # fields to specify that the combination of fields must be unique, # instead of that each field should have a unique value. # # This means that the code: # validates_uniqueness_of([:column1, :column2]) # validates the grouping of column1 and column2 while # validates_uniqueness_of(:column1, :column2) # validates them separately. # # You should also add a unique index in the # database, as this suffers from a fairly obvious race condition. # # Possible Options: # :message :: The message to use (default: 'is already taken') def validates_uniqueness_of(*atts) opts = { :message => 'is already taken', :tag => :uniqueness, }.merge!(extract_options!(atts)) reflect_validation(:uniqueness, opts, atts) atts << opts validates_each(*atts) do |o, a, v| error_field = a a = Array(a) v = Array(v) next if v.empty? || !v.all? ds = o.class.where(a.zip(v)) num_dups = ds.count allow = if num_dups == 0 # No unique value in the database true elsif num_dups > 1 # Multiple "unique" values in the database!! # Someone didn't add a unique index false elsif o.new? # New record, but unique value already exists in the database false elsif ds.first === o # Unique value exists in database, but for the same record, so the update won't cause a duplicate record true else false end o.errors.add(error_field, opts[:message]) unless allow end end private # Removes and returns the last member of the array if it is a hash. Otherwise, # an empty hash is returned This method is useful when writing methods that # take an options hash as the last parameter. def extract_options!(array) array.last.is_a?(Hash) ? array.pop : OPTS end # Add the validation reflection to the class's validations. def reflect_validation(type, opts, atts) atts.each do |att| (validation_reflections[att] ||= []) << [type, opts] end end # Handle the :if option for validations def validation_if_proc(o, i) case i when Symbol o.get_column_value(i) else raise(::Sequel::Error, "invalid value for :if validation option") end end end module InstanceMethods # Validates the object. def validate model.validate(self) super end end end end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/validation_contexts.rb���������������������������������������������0000664�0000000�0000000�00000003653�14342141206�0023242�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The validation_contexts plugin adds support for a validation_context method inside a validate # method. You pass in the validation context to use via the :validation_context option to # Sequel::Model#save && Sequel::Model#valid?: # # class Album < Sequel::Model # plugin :validation_contexts # def validate # super # errors.add(:status_id, 'not 1') if status_id != 1 && validation_context == :initial # errors.add(:status_id, 'not 2') if status_id != 2 && validation_context == :approve # end # end # # Album.new(status_id: 1).valid?(validation_context: :initial) # => true # Album.new(status_id: 2).valid?(validation_context: :initial) # => false # # Album.new(status_id: 1).valid?(validation_context: :approve) # => false # Album.new(status_id: 2).valid?(validation_context: :approve) # => true # # There is no validation context used by default, so validation_context will be # +nil+ if one is not specified. If you want to differentiate between creating new # objects and updating existing objects, just use +new?+. # # Once this plugin is loaded into a model, after you freeze an instance # of that model, you can no longer specify a validation context when # validating the instance. module ValidationContexts module InstanceMethods # The validation context to use for the current validation. # Set via the :validation_context option passed to save/valid?. attr_reader :validation_context private # Set validation context before running validations def _valid?(opts) @validation_context = opts[:validation_context] if opts[:validation_context] super ensure @validation_context = nil if @validation_context end end end end end �������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/validation_helpers.rb����������������������������������������������0000664�0000000�0000000�00000040501�14342141206�0023026�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The validation_helpers plugin contains validate_* methods designed to be called inside Model#validate # to perform validations: # # Sequel::Model.plugin :validation_helpers # class Album < Sequel::Model # def validate # super # validates_min_length 1, :num_tracks # end # end # # The validates_unique method has a unique API, but the other validations have the API explained here: # # Arguments: # atts :: Single attribute symbol or an array of attribute symbols specifying the # attribute(s) to validate. # Options: # :allow_blank :: Whether to skip the validation if the value is blank. # :allow_missing :: Whether to skip the validation if the attribute isn't a key in the # values hash. This is different from allow_nil, because Sequel only sends the attributes # in the values when doing an insert or update. If the attribute is not present, Sequel # doesn't specify it, so the database will use the table's default value. This is different # from having an attribute in values with a value of nil, which Sequel will send as NULL. # If your database table has a non NULL default, this may be a good option to use. You # don't want to use allow_nil, because if the attribute is in values but has a value nil, # Sequel will attempt to insert a NULL value into the database, instead of using the # database's default. # :allow_nil :: Whether to skip the validation if the value is nil. # :from :: Set to :values to pull column values from the values hash instead of calling the related method. # Allows for setting up methods on the underlying column values, in the cases where the model # transforms the underlying value before returning it, such as when using serialization. # :message :: The message to use. Can be a string which is used directly, or a # proc which is called. If the validation method takes a argument before the array of attributes, # that argument is passed as an argument to the proc. # :skip_invalid :: Do not try to validate columns that are already invalid. # # The default validation options for all models can be modified by # overridding the Model#default_validation_helpers_options private method. # By changing the default options, you can setup internationalization of the # error messages. For example, you would modify the default options: # # class Sequel::Model # private # # def default_validation_helpers_options(type) # case type # when :exact_length # {message: lambda{|exact| I18n.t("errors.exact_length", exact: exact)}} # when :integer # {message: lambda{I18n.t("errors.integer")}} # else # super # end # end # end # # and then use something like this in your yaml translation file: # # en: # errors: # exact_length: "is not %{exact} characters" # integer: "is not a number" # # Note that if you want to support internationalization of Errors#full_messages, # it is easiest to override Errors#full_message (note singular form and not plural form). # Here's an example: # # class Sequel::Model::Errors # private # def full_message(attribute, error_msg) # "#{Array(attribute).join(I18n.t('errors.joiner'))} #{error_msg}" # end # end module ValidationHelpers DEFAULT_OPTIONS = { :exact_length=>{:message=>lambda{|exact| "is not #{exact} characters"}}, :format=>{:message=>lambda{|with| 'is invalid'}}, :includes=>{:message=>lambda{|set| "is not in range or set: #{set.inspect}"}}, :integer=>{:message=>lambda{"is not a number"}}, :length_range=>{:message=>lambda{|range| "is too short or too long"}}, :max_length=>{:message=>lambda{|max| "is longer than #{max} characters"}, :nil_message=>lambda{"is not present"}}, :max_value=>{:message=>lambda{|max| "is greater than maximum allowed value"}}, :min_length=>{:message=>lambda{|min| "is shorter than #{min} characters"}}, :min_value=>{:message=>lambda{|min| "is less than minimum allowed value"}}, :not_null=>{:message=>lambda{"is not present"}}, :no_null_byte=>{:message=>lambda{"contains a null byte"}}, :numeric=>{:message=>lambda{"is not a number"}}, :operator=>{:message=>lambda{|operator, rhs| "is not #{operator} #{rhs}"}}, :type=>{:message=>lambda{|klass| klass.is_a?(Array) ? "is not a valid #{klass.join(" or ").downcase}" : "is not a valid #{klass.to_s.downcase}"}}, :presence=>{:message=>lambda{"is not present"}}, :unique=>{:message=>lambda{'is already taken'}} }.freeze DEFAULT_OPTIONS.each_value(&:freeze) module InstanceMethods # Check that the attribute values are the given exact length. def validates_exact_length(exact, atts, opts=OPTS) validatable_attributes_for_type(:exact_length, atts, opts){|a,v,m| validation_error_message(m, exact) if v.nil? || v.length != exact} end # Check the string representation of the attribute value(s) against the regular expression with. def validates_format(with, atts, opts=OPTS) validatable_attributes_for_type(:format, atts, opts){|a,v,m| validation_error_message(m, with) unless v.to_s =~ with} end # Check attribute value(s) is included in the given set. def validates_includes(set, atts, opts=OPTS) validatable_attributes_for_type(:includes, atts, opts){|a,v,m| validation_error_message(m, set) unless set.public_send(set.respond_to?(:cover?) ? :cover? : :include?, v)} end # Check attribute value(s) string representation is a valid integer. def validates_integer(atts, opts=OPTS) validatable_attributes_for_type(:integer, atts, opts) do |a,v,m| begin Kernel.Integer(v.to_s) nil rescue validation_error_message(m) end end end # Check that the attribute values length is in the specified range. def validates_length_range(range, atts, opts=OPTS) validatable_attributes_for_type(:length_range, atts, opts){|a,v,m| validation_error_message(m, range) if v.nil? || !range.cover?(v.length)} end # Check that the attribute values are not longer than the given max length. # # Accepts a :nil_message option that is the error message to use when the # value is nil instead of being too long. def validates_max_length(max, atts, opts=OPTS) validatable_attributes_for_type(:max_length, atts, opts) do |a,v,m| if v.nil? validation_error_message(opts[:nil_message] || default_validation_helpers_options(:max_length)[:nil_message]) elsif v.length > max validation_error_message(m, max) end end end # Check that the attribute values are not greater that the given maximum value. # Does not perform validation if attribute value is nil. # You should only call this if you have checked the attribute value has the expected type. def validates_max_value(max, atts, opts=OPTS) validatable_attributes_for_type(:max_value, atts, opts) do |a,v,m| validation_error_message(m, max) if !v.nil? && v > max end end # Check that the attribute values are not shorter than the given min length. def validates_min_length(min, atts, opts=OPTS) validatable_attributes_for_type(:min_length, atts, opts){|a,v,m| validation_error_message(m, min) if v.nil? || v.length < min} end # Check that the attribute values are not less that the given minimum value. # Does not perform validation if attribute value is nil. # You should only call this if you have checked the attribute value has the expected type. def validates_min_value(min, atts, opts=OPTS) validatable_attributes_for_type(:min_value, atts, opts) do |a,v,m| validation_error_message(m, min) if !v.nil? && v < min end end # Check attribute value(s) are not NULL/nil. def validates_not_null(atts, opts=OPTS) validatable_attributes_for_type(:not_null, atts, opts){|a,v,m| validation_error_message(m) if v.nil?} end # Check attribute value(s) does not contain a null ("\0", ASCII NUL) byte. def validates_no_null_byte(atts, opts=OPTS) validatable_attributes_for_type(:no_null_byte, atts, opts){|a,v,m| validation_error_message(m) if String === v && v.include?("\0")} end # Check attribute value(s) string representation is a valid float. def validates_numeric(atts, opts=OPTS) validatable_attributes_for_type(:numeric, atts, opts) do |a,v,m| begin Kernel.Float(v.to_s) nil rescue validation_error_message(m) end end end # Check attribute value(s) against a specified value and operation, e.g. # validates_operator(:>, 3, :value) validates that value > 3. def validates_operator(operator, rhs, atts, opts=OPTS) validatable_attributes_for_type(:operator, atts, opts){|a,v,m| validation_error_message(m, operator, rhs) if v.nil? || !v.public_send(operator, rhs)} end # Validates for all of the model columns (or just the given columns) # that the column value is an instance of the expected class based on # the column's schema type. def validates_schema_types(atts=keys, opts=OPTS) Array(atts).each do |k| if type = schema_type_class(k) validates_type(type, k, {:allow_nil=>true}.merge!(opts)) end end end # Check if value is an instance of a class. If +klass+ is an array, # the value must be an instance of one of the classes in the array. def validates_type(klass, atts, opts=OPTS) klass = klass.to_s.constantize if klass.is_a?(String) || klass.is_a?(Symbol) validatable_attributes_for_type(:type, atts, opts) do |a,v,m| if klass.is_a?(Array) ? !klass.any?{|kls| v.is_a?(kls)} : !v.is_a?(klass) validation_error_message(m, klass) end end end # Check attribute value(s) is not considered blank by the database, but allow false values. def validates_presence(atts, opts=OPTS) validatable_attributes_for_type(:presence, atts, opts){|a,v,m| validation_error_message(m) if model.db.send(:blank_object?, v) && v != false} end # Checks that there are no duplicate values in the database for the given # attributes. Pass an array of fields instead of multiple # fields to specify that the combination of fields must be unique, # instead of that each field should have a unique value. # # This means that the code: # validates_unique([:column1, :column2]) # validates the grouping of column1 and column2 while # validates_unique(:column1, :column2) # validates them separately. # # You can pass a block, which is yielded the dataset in which the columns # must be unique. So if you are doing a soft delete of records, in which # the name must be unique, but only for active records: # # validates_unique(:name){|ds| ds.where(:active)} # # You should also add a unique index in the # database, as this suffers from a fairly obvious race condition. # # This validation does not respect the :allow_* options that the other validations accept, # since it can deal with a grouping of multiple attributes. # # Possible Options: # :dataset :: The base dataset to use for the unique query, defaults to the # model's dataset. # :message :: The message to use (default: 'is already taken') # :only_if_modified :: Only check the uniqueness if the object is new or # one of the columns has been modified, true by default. # :where :: A callable object where call takes three arguments, a dataset, # the current object, and an array of columns, and should return # a modified dataset that is filtered to include only rows with # the same values as the current object for each column in the array. # # If you want to do a case insensitive uniqueness validation on a database that # is case sensitive by default, you can use: # # validates_unique :column, where:(lambda do |ds, obj, cols| # ds.where(cols.map do |c| # v = obj.public_send(c) # v = v.downcase if v # [Sequel.function(:lower, c), v] # end) # end) def validates_unique(*atts) opts = default_validation_helpers_options(:unique) if atts.last.is_a?(Hash) opts = opts.merge(atts.pop) end message = validation_error_message(opts[:message]) from_values = opts[:from] == :values where = opts[:where] atts.each do |a| arr = Array(a) next if arr.any?{|x| errors.on(x)} cc = changed_columns next if opts.fetch(:only_if_modified, true) && !new? && !arr.any?{|x| cc.include?(x)} ds = opts[:dataset] || model.dataset ds = if where where.call(ds, self, arr) else vals = arr.map{|x| from_values ? values[x] : get_column_value(x)} next if vals.any?(&:nil?) ds.where(arr.zip(vals)) end ds = yield(ds) if defined?(yield) unless new? h = ds.joined_dataset? ? qualified_pk_hash : pk_hash ds = ds.exclude(h) end errors.add(a, message) unless ds.count == 0 end end private # The default options hash for the given type of validation. Can # be overridden on a per-model basis for different per model defaults. # The hash return must include a :message option that is either a # proc or string. def default_validation_helpers_options(type) DEFAULT_OPTIONS[type] end # Skip validating any attribute that matches one of the allow_* options, # or already has an error if the skip_invalid option is given. # # Otherwise, yield the attribute, value, and passed option :message to # the block. If the block returns anything except nil or false, add it as # an error message for that attributes. def validatable_attributes(atts, opts) am, an, ab, m, si = opts.values_at(:allow_missing, :allow_nil, :allow_blank, :message, :skip_invalid) from_values = opts[:from] == :values Array(atts).each do |a| next if si && errors.on(a) next if am && !values.has_key?(a) v = from_values ? values[a] : get_column_value(a) next if an && v.nil? next if ab && model.db.send(:blank_object?, v) if message = yield(a, v, m) errors.add(a, message) end end end # Merge the given options with the default options for the given type # and call validatable_attributes with the merged options. def validatable_attributes_for_type(type, atts, opts, &block) validatable_attributes(atts, default_validation_helpers_options(type).merge(opts), &block) end # The validation error message to use, as a string. If message # is a Proc, call it with the args. Otherwise, assume it is a string and # return it. def validation_error_message(message, *args) message.is_a?(Proc) ? message.call(*args) : message end end end end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/whitelist_security.rb����������������������������������������������0000664�0000000�0000000�00000010530�14342141206�0023114�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel module Plugins # The whitelist_security plugin contains whitelist-based support for # mass assignment, explicitly specifying which columns to allow mass assignment for, # disallowing mass assignment for columns not listed. This exists mostly for backwards # compatibility, it's best to use Sequel::Model#set_fields and Sequel::Model#update_fields # to decide which fields to allow on a per-call basis. # # Usage: # # # Make all model subclasses support allowed_columns # Sequel::Model.plugin :whitelist_security # # # Make the Album class support allowed_columns # Album.plugin :whitelist_security module WhitelistSecurity module ClassMethods # Which columns should be the only columns allowed in a call to a mass assignment method (e.g. set) # (default: not set, so all columns not otherwise restricted are allowed). attr_reader :allowed_columns Plugins.inherited_instance_variables(self, :@allowed_columns=>:dup) # Freeze allowed columns when freezing model class. def freeze @allowed_columns.freeze super end # Set the columns to allow when using mass assignment (e.g. +set+). Using this means that # any columns not listed here will not be modified. If you have any virtual # setter methods (methods that end in =) that you want to be used during # mass assignment, they need to be listed here as well (without the =). # # It may be better to use +set_fields+ which lets you specify # the allowed fields per call. # # Artist.set_allowed_columns(:name, :hometown) # Artist.set(name: 'Bob', hometown: 'Sactown') # No Error # Artist.set(name: 'Bob', records_sold: 30000) # Error def set_allowed_columns(*cols) clear_setter_methods_cache @allowed_columns = cols end private # If allowed_columns is set, only allow those columns. def get_setter_methods if allowed_columns allowed_columns.map{|x| "#{x}="} else super end end end module InstanceMethods # Set all values using the entries in the hash, ignoring any setting of # allowed_columns in the model. # # Artist.set_allowed_columns(:num_albums) # artist.set_all(name: 'Jim') # artist.name # => 'Jim' def set_all(hash) set_restricted(hash, :all) end # Set the values using the entries in the hash, only if the key # is included in only. It may be a better idea to use +set_fields+ # instead of this method. # # artist.set_only({name: 'Jim'}, :name) # artist.name # => 'Jim' # # artist.set_only({hometown: 'LA'}, :name) # Raise Error def set_only(hash, *only) set_restricted(hash, only.flatten) end # Update all values using the entries in the hash, ignoring any setting of # +allowed_columns+ in the model. # # Artist.set_allowed_columns(:num_albums) # artist.update_all(name: 'Jim') # UPDATE artists SET name = 'Jim' WHERE (id = 1) def update_all(hash) update_restricted(hash, :all) end # Update the values using the entries in the hash, only if the key # is included in only. It may be a better idea to use +update_fields+ # instead of this method. # # artist.update_only({name: 'Jim'}, :name) # # UPDATE artists SET name = 'Jim' WHERE (id = 1) # # artist.update_only({hometown: 'LA'}, :name) # Raise Error def update_only(hash, *only) update_restricted(hash, only.flatten) end private # If allowed_columns is set and set/update is called, only allow those columns. def setter_methods(type) if type == :default && model.allowed_columns model.setter_methods elsif type.is_a?(Array) type.map{|x| "#{x}="} elsif type == :all && primary_key && model.restrict_primary_key? super + Array(primary_key).map{|x| "#{x}="} else super end end end end end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/plugins/xml_serializer.rb��������������������������������������������������0000664�0000000�0000000�00000036252�14342141206�0022213�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true require 'nokogiri' module Sequel module Plugins # The xml_serializer plugin handles serializing entire Sequel::Model # objects to XML, and deserializing XML into a single Sequel::Model # object or an array of Sequel::Model objects. It requires the # nokogiri library. # # Basic Example: # # album = Album[1] # puts album.to_xml # # Output: # # <?xml version="1.0"?> # # <album> # # <id>1</id> # # <name>RF</name> # # <artist_id>2</artist_id> # # </album> # # You can provide options to control the XML output: # # puts album.to_xml(only: :name) # puts album.to_xml(except: [:id, :artist_id]) # # Output: # # <?xml version="1.0"?> # # <album> # # <name>RF</name> # # </album> # # album.to_xml(include: :artist) # # Output: # # <?xml version="1.0"?> # # <album> # # <id>1</id> # # <name>RF</name> # # <artist_id>2</artist_id> # # <artist> # # <id>2</id> # # <name>YJM</name> # # </artist> # # </album> # # You can use a hash value with <tt>:include</tt> to pass options # to associations: # # album.to_xml(include: {artist: {only: :name}}) # # Output: # # <?xml version="1.0"?> # # <album> # # <id>1</id> # # <name>RF</name> # # <artist_id>2</artist_id> # # <artist> # # <name>YJM</name> # # </artist> # # </album> # # +to_xml+ also exists as a class and dataset method, both # of which return all objects in the dataset: # # Album.to_xml # Album.where(artist_id: 1).to_xml(include: :tags) # # If you have an existing array of model instances you want to convert to # XML, you can call the class to_xml method with the :array option: # # Album.to_xml(array: [Album[1], Album[2]]) # # In addition to creating XML, this plugin also enables Sequel::Model # classes to create instances directly from XML using the from_xml class # method: # # xml = album.to_xml # album = Album.from_xml(xml) # # The array_from_xml class method exists to parse arrays of model instances # from xml: # # xml = Album.where(artist_id: 1).to_xml # albums = Album.array_from_xml(xml) # # These does not necessarily round trip, since doing so would let users # create model objects with arbitrary values. By default, from_xml will # call set using values from the tags in the xml. If you want to specify the allowed # fields, you can use the :fields option, which will call set_fields with # the given fields: # # Album.from_xml(album.to_xml, fields: %w'id name') # # If you want to update an existing instance, you can use the from_xml # instance method: # # album.from_xml(xml) # # Both of these allow creation of cached associated objects, if you provide # the :associations option: # # album.from_xml(xml, associations: :artist) # # You can even provide options when setting up the associated objects: # # album.from_xml(xml, associations: {artist: {fields: %w'id name', associations: :tags}}) # # Usage: # # # Add XML output capability to all model subclass instances (called before loading subclasses) # Sequel::Model.plugin :xml_serializer # # # Add XML output capability to Album class instances # Album.plugin :xml_serializer module XmlSerializer module ClassMethods # Proc that camelizes the input string, used for the :camelize option CAMELIZE = :camelize.to_proc # Proc that dasherizes the input string, used for the :dasherize option DASHERIZE = :dasherize.to_proc # Proc that returns the input string as is, used if # no :name_proc, :dasherize, or :camelize option is used. IDENTITY = proc{|s| s} # Proc that underscores the input string, used for the :underscore option UNDERSCORE = :underscore.to_proc # Return an array of instances of this class based on # the provided XML. def array_from_xml(xml, opts=OPTS) node = Nokogiri::XML(xml).children.first unless node raise Error, "Malformed XML used" end node.children.reject{|c| c.is_a?(Nokogiri::XML::Text)}.map{|c| from_xml_node(c, opts)} end # Return an instance of this class based on the provided XML. def from_xml(xml, opts=OPTS) from_xml_node(Nokogiri::XML(xml).children.first, opts) end # Return an instance of this class based on the given # XML node, which should be Nokogiri::XML::Node instance. # This should not be used directly by user code. def from_xml_node(parent, opts=OPTS) new.from_xml_node(parent, opts) end # Return an appropriate Nokogiri::XML::Builder instance # used to create the XML. This should not be used # directly by user code. def xml_builder(opts=OPTS) if opts[:builder] opts[:builder] else builder_opts = if opts[:builder_opts] Hash[opts[:builder_opts]] else {} end builder_opts[:encoding] = opts[:encoding] if opts.has_key?(:encoding) Nokogiri::XML::Builder.new(builder_opts) end end # Return a proc (or any other object that responds to []), # used for formatting XML tag names when serializing to XML. # This should not be used directly by user code. def xml_deserialize_name_proc(opts=OPTS) if opts[:name_proc] opts[:name_proc] elsif opts[:underscore] UNDERSCORE else IDENTITY end end # Return a proc (or any other object that responds to []), # used for formatting XML tag names when serializing to XML. # This should not be used directly by user code. def xml_serialize_name_proc(opts=OPTS) pr = if opts[:name_proc] opts[:name_proc] elsif opts[:dasherize] DASHERIZE elsif opts[:camelize] CAMELIZE else IDENTITY end proc{|s| "#{pr[s]}_"} end Plugins.def_dataset_methods(self, :to_xml) end module InstanceMethods # Update the contents of this instance based on the given XML. # Accepts the following options: # # :name_proc :: Proc or Hash that accepts a string and returns # a string, used to convert tag names to column or # association names. # :underscore :: Sets the :name_proc option to one that calls +underscore+ # on the input string. Requires that you load the inflector # extension or another library that adds String#underscore. def from_xml(xml, opts=OPTS) from_xml_node(Nokogiri::XML(xml).children.first, opts) end # Update the contents of this instance based on the given # XML node, which should be a Nokogiri::XML::Node instance. # By default, just calls set with a hash created from the content of the node. # # Options: # :associations :: Indicates that the associations cache should be updated by creating # a new associated object using data from the hash. Should be a Symbol # for a single association, an array of symbols for multiple associations, # or a hash with symbol keys and dependent association option hash values. # :fields :: Changes the behavior to call set_fields using the provided fields, instead of calling set. def from_xml_node(parent, opts=OPTS) unless parent raise Error, "Malformed XML used" end if !parent.children.empty? && parent.children.all?{|node| node.is_a?(Nokogiri::XML::Text)} raise Error, "XML consisting of just text nodes used" end if assocs = opts[:associations] assocs = case assocs when Symbol {assocs=>OPTS} when Array assocs_tmp = {} assocs.each{|v| assocs_tmp[v] = OPTS} assocs_tmp when Hash assocs else raise Error, ":associations should be Symbol, Array, or Hash if present" end assocs_hash = {} assocs.each{|k,v| assocs_hash[k.to_s] = v} assocs_present = [] end hash = {} populate_associations = {} name_proc = model.xml_deserialize_name_proc(opts) parent.children.each do |node| next if node.is_a?(Nokogiri::XML::Text) k = name_proc[node.name] if assocs_hash && assocs_hash[k] assocs_present << [k.to_sym, node] else hash[k] = node.key?('nil') ? nil : node.children.first.to_s end end if assocs_present assocs_present.each do |assoc, node| assoc_opts = assocs[assoc] unless r = model.association_reflection(assoc) raise Error, "Association #{assoc} is not defined for #{model}" end populate_associations[assoc] = if r.returns_array? node.children.reject{|c| c.is_a?(Nokogiri::XML::Text)}.map{|c| r.associated_class.from_xml_node(c, assoc_opts)} else r.associated_class.from_xml_node(node, assoc_opts) end end end if fields = opts[:fields] set_fields(hash, fields, opts) else set(hash) end populate_associations.each do |assoc, values| associations[assoc] = values end self end # Return a string in XML format. If a block is given, yields the XML # builder object so you can add additional XML tags. # Accepts the following options: # # :builder :: The builder instance used to build the XML, # which should be an instance of Nokogiri::XML::Node. This # is necessary if you are serializing entire object graphs, # like associated objects. # :builder_opts :: Options to pass to the Nokogiri::XML::Builder # initializer, if the :builder option is not provided. # :camelize:: Sets the :name_proc option to one that calls +camelize+ # on the input string. Requires that you load the inflector # extension or another library that adds String#camelize. # :dasherize :: Sets the :name_proc option to one that calls +dasherize+ # on the input string. Requires that you load the inflector # extension or another library that adds String#dasherize. # :encoding :: The encoding to use for the XML output, passed # to the Nokogiri::XML::Builder initializer. # :except :: Symbol or Array of Symbols of columns not # to include in the XML output. # :include :: Symbol, Array of Symbols, or a Hash with # Symbol keys and Hash values specifying # associations or other non-column attributes # to include in the XML output. Using a nested # hash, you can pass options to associations # to affect the XML used for associated objects. # :name_proc :: Proc or Hash that accepts a string and returns # a string, used to format tag names. # :only :: Symbol or Array of Symbols of columns to only # include in the JSON output, ignoring all other # columns. # :root_name :: The base name to use for the XML tag that # contains the data for this instance. This will # be the name of the root node if you are only serializing # a single object, but not if you are serializing # an array of objects using Model.to_xml or Dataset#to_xml. # :types :: Set to true to include type information for # all of the columns, pulled from the db_schema. def to_xml(opts=OPTS) vals = values types = opts[:types] inc = opts[:include] cols = if only = opts[:only] Array(only) else vals.keys - Array(opts[:except]) end name_proc = model.xml_serialize_name_proc(opts) x = model.xml_builder(opts) x.public_send(name_proc[opts.fetch(:root_name, model.send(:underscore, model.name).gsub('/', '__')).to_s]) do |x1| cols.each do |c| attrs = {} if types attrs[:type] = db_schema.fetch(c, OPTS)[:type] end v = vals[c] if v.nil? attrs[:nil] = '' end x1.public_send(name_proc[c.to_s], v, attrs) end if inc.is_a?(Hash) inc.each{|k, v| to_xml_include(x1, k, v)} else Array(inc).each{|i| to_xml_include(x1, i)} end yield x1 if defined?(yield) end x.to_xml end private # Handle associated objects and virtual attributes when creating # the xml. def to_xml_include(node, i, opts=OPTS) name_proc = model.xml_serialize_name_proc(opts) objs = public_send(i) if objs.is_a?(Array) && objs.all?{|x| x.is_a?(Sequel::Model)} node.public_send(name_proc[i.to_s]) do |x2| objs.each{|obj| obj.to_xml(opts.merge(:builder=>x2))} end elsif objs.is_a?(Sequel::Model) objs.to_xml(opts.merge(:builder=>node, :root_name=>i)) else node.public_send(name_proc[i.to_s], objs) end end end module DatasetMethods # Return an XML string containing all model objects specified with # this dataset. Takes all of the options available to Model#to_xml, # as well as the :array_root_name option for specifying the name of # the root node that contains the nodes for all of the instances. def to_xml(opts=OPTS) raise(Sequel::Error, "Dataset#to_xml") unless row_proc || @opts[:eager_graph] x = model.xml_builder(opts) name_proc = model.xml_serialize_name_proc(opts) array = if opts[:array] opts = opts.dup opts.delete(:array) else all end x.public_send(name_proc[opts.fetch(:array_root_name, model.send(:pluralize, model.send(:underscore, model.name))).to_s]) do |x1| array.each do |obj| obj.to_xml(opts.merge(:builder=>x1)) end end x.to_xml end end end end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/sql.rb���������������������������������������������������������������������0000664�0000000�0000000�00000223645�14342141206�0016304�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel # The <tt>Sequel::BasicObject</tt> class is just like the # default +BasicObject+ class, except that missing constants are resolved in # +Object+. This allows the virtual row support to work with classes # without prefixing them with ::, such as: # # DB[:bonds].where{maturity_date > Time.now} class BasicObject < ::BasicObject # Lookup missing constants in <tt>::Object</tt> def self.const_missing(name) ::Object.const_get(name) end end class LiteralString < ::String end # Time subclass that gets literalized with only the time value, so it operates # like a standard SQL time type. This type does not support timezones, by design, # so it will not work correctly with <tt>time with time zone</tt> types. class SQLTime < ::Time @date = nil class << self # Set the date used for SQLTime instances. attr_writer :date # Use the date explicitly set, or the current date if there is not a # date set. def date @date || now end # Set the correct date and timezone when parsing times. def parse(*) t = super utc = Sequel.application_timezone == :utc d = @date if d || utc meth = utc ? :utc : :local d ||= t t = public_send(meth, d.year, d.month, d.day, t.hour, t.min, t.sec, t.usec) end t end # Create a new SQLTime instance given an hour, minute, second, and usec. def create(hour, minute, second, usec = 0) t = date meth = Sequel.application_timezone == :utc ? :utc : :local public_send(meth, t.year, t.month, t.day, hour, minute, second, usec) end end # Show that this is an SQLTime, and the time represented def inspect "#<#{self.class} #{to_s}>" end # Return a string in HH:MM:SS format representing the time. def to_s(*args) if args.empty? strftime('%H:%M:%S') else # Superclass may have defined a method that takes a format string, # and we shouldn't override in that case. super end end end # The SQL module holds classes whose instances represent SQL fragments. # It also holds modules that are used by these classes. module SQL # Base class for all SQL expression objects. class Expression @comparison_attrs = [].freeze class << self # All attributes used for equality and hash methods. attr_reader :comparison_attrs # Expression objects are assumed to be value objects, where their # attribute values can't change after assignment. In order to make # it easy to define equality and hash methods, subclass # instances assume that the only values that affect the results of # such methods are the values of the object's attributes. def attr_reader(*args) super comparison_attrs.concat(args) end # Copy the comparison_attrs into the subclass. def inherited(subclass) super subclass.instance_variable_set(:@comparison_attrs, comparison_attrs.dup) end private # Create a to_s instance method that takes a dataset, and calls # the method provided on the dataset with args as the argument (self by default). # Used to DRY up some code. # # Do not call this method with untrusted input, as that can result in # arbitrary code execution. def to_s_method(meth, args=:self) # :nodoc: class_eval("def to_s_append(ds, sql) ds.#{meth}_append(sql, #{args}) end", __FILE__, __LINE__) @comparison_attrs.freeze end end # Make clone/dup return self, since Expression objects are supposed to # be frozen value objects def clone self end alias dup clone # Alias of <tt>eql?</tt> def ==(other) eql?(other) end # Returns true if the receiver is the same expression as the # the +other+ expression. def eql?(other) other.is_a?(self.class) && !self.class.comparison_attrs.find{|a| public_send(a) != other.public_send(a)} end # Make sure that the hash value is the same if the attributes are the same. def hash ([self.class] + self.class.comparison_attrs.map{|x| public_send(x)}).hash end # Show the class name and instance variables for the object. def inspect "#<#{self.class} #{instance_variables.map{|iv| "#{iv}=>#{instance_variable_get(iv).inspect}"}.join(', ')}>" end end # Represents a SQL expression, with a given operator and one # or more attributes (which may also be ComplexExpressions, forming # a tree). This class is the backbone of Sequel's ruby expression DSL. # # This is an abstract class that is not that useful by itself. The # subclasses +BooleanExpression+, +NumericExpression+, and +StringExpression+ # define the behavior of the DSL via operators. class ComplexExpression < Expression # A hash of the opposite for each operator symbol, used for inverting # objects. OPERTATOR_INVERSIONS = {:AND => :OR, :OR => :AND, :< => :>=, :> => :<=, :<= => :>, :>= => :<, :'=' => :'!=' , :'!=' => :'=', :LIKE => :'NOT LIKE', :'NOT LIKE' => :LIKE, :~ => :'!~', :'!~' => :~, :IN => :'NOT IN', :'NOT IN' => :IN, :IS => :'IS NOT', :'IS NOT' => :IS, :'~*' => :'!~*', :'!~*' => :'~*', :NOT => :NOOP, :NOOP => :NOT, :ILIKE => :'NOT ILIKE', :'NOT ILIKE'=>:ILIKE}.freeze # Standard mathematical operators used in +NumericMethods+ MATHEMATICAL_OPERATORS = [:+, :-, :/, :*, :**].freeze # Bitwise mathematical operators used in +BitwiseMethods+ BITWISE_OPERATORS = [:&, :|, :^, :<<, :>>, :%].freeze # Operators that check for equality EQUALITY_OPERATORS = [:'=', :'!='].freeze # Inequality operators used in +InequalityMethods+ INEQUALITY_OPERATORS = [:<, :>, :<=, :>=].freeze # Hash of ruby operator symbols to SQL operators, used in +BooleanMethods+ BOOLEAN_OPERATOR_METHODS = {:& => :AND, :| =>:OR}.freeze # Operators that use IN/NOT IN for inclusion/exclusion IN_OPERATORS = [:IN, :'NOT IN'].freeze # Operators that use IS, used for special casing to override literal true/false values IS_OPERATORS = [:IS, :'IS NOT'].freeze # Operators that do pattern matching via regular expressions REGEXP_OPERATORS = [:~, :'!~', :'~*', :'!~*'].freeze # Operators that do pattern matching via LIKE LIKE_OPERATORS = [:LIKE, :'NOT LIKE', :ILIKE, :'NOT ILIKE'].freeze # Operator symbols that take exactly two arguments TWO_ARITY_OPERATORS = (EQUALITY_OPERATORS + INEQUALITY_OPERATORS + IS_OPERATORS + IN_OPERATORS + REGEXP_OPERATORS + LIKE_OPERATORS + [:**]).freeze # Operator symbols that take one or more arguments N_ARITY_OPERATORS = ([:AND, :OR, :'||'] + MATHEMATICAL_OPERATORS + BITWISE_OPERATORS - [:**]).freeze # Operator symbols that are associative ASSOCIATIVE_OPERATORS = [:AND, :OR, :'||', :+, :*, :&, :|].freeze # Operator symbols that take only a single argument ONE_ARITY_OPERATORS = [:NOT, :NOOP, :'B~'].freeze # Custom expressions that may have different syntax on different databases CUSTOM_EXPRESSIONS = [:extract].freeze # The operator symbol for this object attr_reader :op # An array of args for this object attr_reader :args # Set the operator symbol and arguments for this object to the ones given. # Convert all args that are hashes or arrays of two element arrays to +BooleanExpressions+, # other than the second arg for an IN/NOT IN operator. # Raise an +Error+ if the operator doesn't allow boolean input and a boolean argument is given. # Raise an +Error+ if the wrong number of arguments for a given operator is used. def initialize(op, *args) orig_args = args args = args.map{|a| Sequel.condition_specifier?(a) ? SQL::BooleanExpression.from_value_pairs(a) : a} case op when *N_ARITY_OPERATORS raise(Error, "The #{op} operator requires at least 1 argument") unless args.length >= 1 args.map!{|a| a.is_a?(self.class) && a.op == :NOOP ? a.args.first : a} if ASSOCIATIVE_OPERATORS.include?(op) old_args = args args = [] old_args.each{|a| a.is_a?(self.class) && a.op == op ? args.concat(a.args) : args.push(a)} end when *TWO_ARITY_OPERATORS raise(Error, "The #{op} operator requires precisely 2 arguments") unless args.length == 2 # With IN/NOT IN, even if the second argument is an array of two element arrays, # don't convert it into a boolean expression, since it's definitely being used # as a value list. args[1] = orig_args[1] if IN_OPERATORS.include?(op) when *ONE_ARITY_OPERATORS raise(Error, "The #{op} operator requires a single argument") unless args.length == 1 when *CUSTOM_EXPRESSIONS # nothing else raise(Error, "Invalid operator #{op}") end @op = op @args = args.freeze freeze end to_s_method :complex_expression_sql, '@op, @args' end # The base class for expressions that can be used in multiple places in # an SQL query. class GenericExpression < Expression end # Includes an +as+ method that creates an SQL alias. module AliasMethods # Create an SQL alias (+AliasedExpression+) of the receiving column or expression to the given alias. # # Sequel.function(:func).as(:alias) # func() AS "alias" # Sequel.function(:func).as(:alias, [:col_alias1, :col_alias2]) # func() AS "alias"("col_alias1", "col_alias2") def as(aliaz, columns=nil) AliasedExpression.new(self, aliaz, columns) end end # This defines the bitwise methods: &, |, ^, ~, <<, and >>. Because these # methods overlap with the standard +BooleanMethods methods+, and they only # make sense for integers, they are only included in +NumericExpression+. # # Sequel[:a].sql_number & :b # "a" & "b" # Sequel[:a].sql_number | :b # "a" | "b" # Sequel[:a].sql_number ^ :b # "a" ^ "b" # Sequel[:a].sql_number << :b # "a" << "b" # Sequel[:a].sql_number >> :b # "a" >> "b" # ~Sequel[:a].sql_number # ~"a" module BitwiseMethods ComplexExpression::BITWISE_OPERATORS.each do |o| module_eval("def #{o}(o) NumericExpression.new(#{o.inspect}, self, o) end", __FILE__, __LINE__) end # Do the bitwise compliment of the self # # ~(Sequel[:a].sql_number) # ~"a" def ~ NumericExpression.new(:'B~', self) end end # This module includes the boolean/logical AND (&), OR (|) and NOT (~) operators # that are defined on objects that can be used in a boolean context in SQL. # # Sequel[:a] & Sequel[:b] # "a" AND "b" # Sequel[:a] | Sequel[:b] # "a" OR "b" # ~Sequel[:a] # NOT "a" # # One exception to this is when a NumericExpression or Integer is the argument # to & or |, in which case a bitwise method will be used: # # Sequel[:a] & 1 # "a" & 1 # Sequel[:a] | (Sequel[:b] + 1) # "a" | ("b" + 1) module BooleanMethods ComplexExpression::BOOLEAN_OPERATOR_METHODS.each do |m, o| module_eval(<<-END, __FILE__, __LINE__+1) def #{m}(o) case o when NumericExpression, Integer NumericExpression.new(#{m.inspect}, self, o) else BooleanExpression.new(#{o.inspect}, self, o) end end END end # Create a new BooleanExpression with NOT, representing the inversion of whatever self represents. # # ~Sequel[:a] # NOT :a def ~ BooleanExpression.invert(self) end end # These methods make it easier to create Sequel expressions without # using the core extensions. module Builders # Create an SQL::AliasedExpression for the given expression and alias. # # Sequel.as(:column, :alias) # "column" AS "alias" # Sequel.as(:column, :alias, [:col_alias1, :col_alias2]) # "column" AS "alias"("col_alias1", "col_alias2") def as(exp, aliaz, columns=nil) SQL::AliasedExpression.new(exp, aliaz, columns) end # Order the given argument ascending. # Options: # # :nulls :: Set to :first to use NULLS FIRST (so NULL values are ordered # before other values), or :last to use NULLS LAST (so NULL values # are ordered after other values). # # Sequel.asc(:a) # a ASC # Sequel.asc(:b, nulls: :last) # b ASC NULLS LAST def asc(arg, opts=OPTS) SQL::OrderedExpression.new(arg, false, opts) end # Return an <tt>SQL::Blob</tt> that holds the same data as this string. # Blobs provide proper escaping of binary data. If given a blob, returns it # directly. def blob(s) if s.is_a?(SQL::Blob) s else SQL::Blob.new(s) end end # Return an <tt>SQL::CaseExpression</tt> created with the given arguments. # The first argument are the <tt>WHEN</tt>/<tt>THEN</tt> conditions, # specified as an array or a hash. The second argument is the # <tt>ELSE</tt> default value. The third optional argument is the # <tt>CASE</tt> expression. # # Sequel.case({a: 1}, 0) # SQL: CASE WHEN a THEN 1 ELSE 0 END # Sequel.case({a: 1}, 0, :b) # SQL: CASE b WHEN a THEN 1 ELSE 0 END # Sequel.case({{a: [2,3]} => 1}, 0) # SQL: CASE WHEN a IN (2, 3) THEN 1 ELSE 0 END # Sequel.case([[{a: [2,3]}, 1]], 0) # SQL: CASE WHEN a IN (2, 3) THEN 1 ELSE 0 END def case(*args) SQL::CaseExpression.new(*args) end # Cast the reciever to the given SQL type. You can specify a ruby class as a type, # and it is handled similarly to using a database independent type in the schema methods. # # Sequel.cast(:a, :integer) # CAST(a AS integer) # Sequel.cast(:a, String) # CAST(a AS varchar(255)) def cast(arg, sql_type) SQL::Cast.new(arg, sql_type) end # Cast the reciever to the given SQL type (or the database's default Integer type if none given), # and return the result as a +NumericExpression+, so you can use the bitwise operators # on the result. # # Sequel.cast_numeric(:a) # CAST(a AS integer) # Sequel.cast_numeric(:a, Float) # CAST(a AS double precision) def cast_numeric(arg, sql_type = nil) cast(arg, sql_type || Integer).sql_number end # Cast the reciever to the given SQL type (or the database's default String type if none given), # and return the result as a +StringExpression+, so you can use + # directly on the result for SQL string concatenation. # # Sequel.cast_string(:a) # CAST(a AS varchar(255)) # Sequel.cast_string(:a, :text) # CAST(a AS text) def cast_string(arg, sql_type = nil) cast(arg, sql_type || String).sql_string end # Return an emulated function call for getting the number of characters # in the argument: # # Sequel.char_length(:a) # char_length(a) -- Most databases # Sequel.char_length(:a) # length(a) -- SQLite def char_length(arg) SQL::Function.new!(:char_length, [arg], :emulate=>true) end # Do a deep qualification of the argument using the qualifier. This recurses into # nested structures. # # Sequel.deep_qualify(:table, :column) # "table"."column" # Sequel.deep_qualify(:table, Sequel[:column] + 1) # "table"."column" + 1 # Sequel.deep_qualify(:table, Sequel[:a].like('b')) # "table"."a" LIKE 'b' ESCAPE '\' def deep_qualify(qualifier, expr) Sequel::Qualifier.new(qualifier).transform(expr) end # Return a delayed evaluation that uses the passed block. This is used # to delay evaluations of the code to runtime. For example, with # the following code: # # ds = DB[:table].where{column > Time.now} # # The filter is fixed to the time that where was called. Unless you are # only using the dataset once immediately after creating it, that's # probably not desired. If you just want to set it to the time when the # query is sent to the database, you can wrap it in Sequel.delay: # # ds = DB[:table].where{column > Sequel.delay{Time.now}} # # Note that for dates and timestamps, you are probably better off using # Sequel::CURRENT_DATE and Sequel::CURRENT_TIMESTAMP instead of this # generic delayed evaluation facility. def delay(&block) raise(Error, "Sequel.delay requires a block") unless block SQL::DelayedEvaluation.new(block) end # Order the given argument descending. # Options: # # :nulls :: Set to :first to use NULLS FIRST (so NULL values are ordered # before other values), or :last to use NULLS LAST (so NULL values # are ordered after other values). # # Sequel.desc(:a) # b DESC # Sequel.desc(:b, nulls: :first) # b DESC NULLS FIRST def desc(arg, opts=OPTS) SQL::OrderedExpression.new(arg, true, opts) end # Wraps the given object in an appropriate Sequel wrapper. # If the given object is already a Sequel object, return it directly. # For condition specifiers (hashes and arrays of two pairs), true, and false, # return a boolean expressions. For numeric objects, return a numeric # expression. For strings, return a string expression. For procs or when # the method is passed a block, evaluate it as a virtual row and wrap it # appropriately. In all other cases, use a generic wrapper. # # This method allows you to construct SQL expressions that are difficult # to construct via other methods. For example: # # Sequel.expr(1) - :a # SQL: (1 - a) # # On the Sequel module, this is aliased as #[], for easier use: # # Sequel[1] - :a # SQL: (1 - a) def expr(arg=(no_arg=true), &block) if defined?(yield) if no_arg return expr(block) else raise Error, 'cannot provide both an argument and a block to Sequel.expr' end elsif no_arg raise Error, 'must provide either an argument or a block to Sequel.expr' end case arg when Symbol t, c, a = Sequel.split_symbol(arg) arg = if t SQL::QualifiedIdentifier.new(t, c) else SQL::Identifier.new(c) end if a arg = SQL::AliasedExpression.new(arg, a) end arg when SQL::Expression, LiteralString, SQL::Blob arg when Hash SQL::BooleanExpression.from_value_pairs(arg, :AND) when Array if condition_specifier?(arg) SQL::BooleanExpression.from_value_pairs(arg, :AND) else SQL::Wrapper.new(arg) end when Numeric SQL::NumericExpression.new(:NOOP, arg) when String SQL::StringExpression.new(:NOOP, arg) when TrueClass, FalseClass SQL::BooleanExpression.new(:NOOP, arg) when Proc expr(virtual_row(&arg)) else SQL::Wrapper.new(arg) end end # Extract a datetime_part (e.g. year, month) from the given # expression: # # Sequel.extract(:year, :date) # extract(year FROM "date") def extract(datetime_part, exp) SQL::NumericExpression.new(:extract, datetime_part, exp) end # Returns a <tt>Sequel::SQL::Function</tt> with the function name # and the given arguments. # # Sequel.function(:now) # SQL: now() # Sequel.function(:substr, :a, 1) # SQL: substr(a, 1) def function(name, *args) SQL::Function.new(name, *args) end # Return the argument wrapped as an <tt>SQL::Identifier</tt>. # # Sequel.identifier(:a) # "a" def identifier(name) SQL::Identifier.new(name) end # Return a <tt>Sequel::SQL::StringExpression</tt> representing an SQL string made up of the # concatenation of the given array's elements. If an argument is passed, # it is used in between each element of the array in the SQL # concatenation. # # Sequel.join([:a]) # SQL: a # Sequel.join([:a, :b]) # SQL: a || b # Sequel.join([:a, 'b']) # SQL: a || 'b' # Sequel.join(['a', :b], ' ') # SQL: 'a' || ' ' || b def join(args, joiner=nil) raise Error, 'argument to Sequel.join must be an array' unless args.is_a?(Array) if joiner args = args.zip([joiner]*args.length).flatten args.pop end return SQL::StringExpression.new(:NOOP, '') if args.empty? args = args.map do |a| case a when Symbol, ::Sequel::SQL::Expression, ::Sequel::LiteralString, TrueClass, FalseClass, NilClass a else a.to_s end end SQL::StringExpression.new(:'||', *args) end # Create a <tt>BooleanExpression</tt> case insensitive (if the database supports it) pattern match of the receiver with # the given patterns. See <tt>SQL::StringExpression.like</tt>. # # Sequel.ilike(:a, 'A%') # "a" ILIKE 'A%' ESCAPE '\' def ilike(*args) SQL::StringExpression.like(*(args << {:case_insensitive=>true})) end # Create a <tt>SQL::BooleanExpression</tt> case sensitive (if the database supports it) pattern match of the receiver with # the given patterns. See <tt>SQL::StringExpression.like</tt>. # # Sequel.like(:a, 'A%') # "a" LIKE 'A%' ESCAPE '\' def like(*args) SQL::StringExpression.like(*args) end # Converts a string into a <tt>Sequel::LiteralString</tt>, in order to override string # literalization, e.g.: # # DB[:items].where(abc: 'def').sql #=> # "SELECT * FROM items WHERE (abc = 'def')" # # DB[:items].where(abc: Sequel.lit('def')).sql #=> # "SELECT * FROM items WHERE (abc = def)" # # You can also provide arguments, to create a <tt>Sequel::SQL::PlaceholderLiteralString</tt>: # # DB[:items].select{|o| o.count(Sequel.lit('DISTINCT ?', :a))}.sql #=> # "SELECT count(DISTINCT a) FROM items" def lit(s, *args) if args.empty? if s.is_a?(LiteralString) s else LiteralString.new(s) end else SQL::PlaceholderLiteralString.new(s, args) end end # Return a <tt>Sequel::SQL::BooleanExpression</tt> created from the condition # specifier, matching none of the conditions. # # Sequel.negate(a: true) # SQL: a IS NOT TRUE # Sequel.negate([[:a, true]]) # SQL: a IS NOT TRUE # Sequel.negate([[:a, 1], [:b, 2]]) # SQL: ((a != 1) AND (b != 2)) def negate(arg) if condition_specifier?(arg) SQL::BooleanExpression.from_value_pairs(arg, :AND, true) else raise Error, 'must pass a conditions specifier to Sequel.negate' end end # Return a <tt>Sequel::SQL::BooleanExpression</tt> created from the condition # specifier, matching any of the conditions. # # Sequel.or(a: true) # SQL: a IS TRUE # Sequel.or([[:a, true]]) # SQL: a IS TRUE # Sequel.or([[:a, 1], [:b, 2]]) # SQL: ((a = 1) OR (b = 2)) def or(arg) if condition_specifier?(arg) SQL::BooleanExpression.from_value_pairs(arg, :OR, false) else raise Error, 'must pass a conditions specifier to Sequel.or' end end # Create a qualified identifier with the given qualifier and identifier # # Sequel.qualify(:table, :column) # "table"."column" # Sequel.qualify(:schema, :table) # "schema"."table" # Sequel.qualify(:table, :column).qualify(:schema) # "schema"."table"."column" def qualify(qualifier, identifier) SQL::QualifiedIdentifier.new(qualifier, identifier) end # Return an <tt>SQL::Subscript</tt> with the given arguments, representing an # SQL array access. # # Sequel.subscript(:array, 1) # array[1] # Sequel.subscript(:array, 1, 2) # array[1, 2] # Sequel.subscript(:array, [1, 2]) # array[1, 2] # Sequel.subscript(:array, 1..2) # array[1:2] # Sequel.subscript(:array, 1...3) # array[1:2] def subscript(exp, *subs) SQL::Subscript.new(exp, subs.flatten) end # Return an emulated function call for trimming a string of spaces from # both sides (similar to ruby's String#strip). # # Sequel.trim(:a) # trim(a) -- Most databases # Sequel.trim(:a) # ltrim(rtrim(a)) -- Microsoft SQL Server def trim(arg) SQL::Function.new!(:trim, [arg], :emulate=>true) end # Return a <tt>SQL::ValueList</tt> created from the given array. Used if the array contains # all two element arrays and you want it treated as an SQL value list (IN predicate) # instead of as a conditions specifier (similar to a hash). This is not necessary if you are using # this array as a value in a filter, but may be necessary if you are using it as a # value with placeholder SQL: # # DB[:a].where([:a, :b]=>[[1, 2], [3, 4]]) # SQL: (a, b) IN ((1, 2), (3, 4)) # DB[:a].where('(a, b) IN ?', [[1, 2], [3, 4]]) # SQL: (a, b) IN ((1 = 2) AND (3 = 4)) # DB[:a].where('(a, b) IN ?', Sequel.value_list([[1, 2], [3, 4]])) # SQL: (a, b) IN ((1, 2), (3, 4)) def value_list(arg) raise Error, 'argument to Sequel.value_list must be an array' unless arg.is_a?(Array) SQL::ValueList.new(arg) end end # Holds methods that are used to cast objects to different SQL types. module CastMethods # Cast the reciever to the given SQL type. You can specify a ruby class as a type, # and it is handled similarly to using a database independent type in the schema methods. # # Sequel.function(:func).cast(:integer) # CAST(func() AS integer) # Sequel.function(:func).cast(String) # CAST(func() AS varchar(255)) def cast(sql_type) Cast.new(self, sql_type) end # Cast the reciever to the given SQL type (or the database's default Integer type if none given), # and return the result as a +NumericExpression+, so you can use the bitwise operators # on the result. # # Sequel.function(:func).cast_numeric # CAST(func() AS integer) # Sequel.function(:func).cast_numeric(Float) # CAST(func() AS double precision) def cast_numeric(sql_type = nil) Cast.new(self, sql_type || Integer).sql_number end # Cast the reciever to the given SQL type (or the database's default String type if none given), # and return the result as a +StringExpression+, so you can use + # directly on the result for SQL string concatenation. # # Sequel.function(:func).cast_string # CAST(func() AS varchar(255)) # Sequel.function(:func).cast_string(:text) # CAST(func() AS text) def cast_string(sql_type = nil) Cast.new(self, sql_type || String).sql_string end end # Adds methods that allow you to treat an object as an instance of a specific # +ComplexExpression+ subclass. module ComplexExpressionMethods # Extract a datetime part (e.g. year, month) from self: # # Sequel[:date].extract(:year) # extract(year FROM "date") # # Also has the benefit of returning the result as a # NumericExpression instead of a generic ComplexExpression. def extract(datetime_part) NumericExpression.new(:extract, datetime_part, self) end # Return a BooleanExpression representation of +self+. def sql_boolean BooleanExpression.new(:NOOP, self) end # Return a NumericExpression representation of +self+. # # ~Sequel[:a] # NOT "a" # ~(Sequel[:a].sql_number) # ~"a" def sql_number NumericExpression.new(:NOOP, self) end # Return a StringExpression representation of +self+. # # Sequel[:a] + :b # "a" + "b" # Sequel[:a].sql_string + :b # "a" || "b" def sql_string StringExpression.new(:NOOP, self) end end # This module includes the inequality methods (>, <, >=, <=) that are defined on objects that can be # used in a numeric or string context in SQL. # # Sequel[:a] > :b # a > "b" # Sequel[:a] < :b # a > "b" # Sequel[:a] >= :b # a >= "b" # Sequel[:a] <= :b # a <= "b" module InequalityMethods ComplexExpression::INEQUALITY_OPERATORS.each do |o| module_eval("def #{o}(o) BooleanExpression.new(#{o.inspect}, self, o) end", __FILE__, __LINE__) end end # This module includes the standard mathematical methods (+, -, *, and /) # that are defined on objects that can be used in a numeric context in SQL # (+Symbol+, +LiteralString+, and +SQL::GenericExpression+). # # Sequel[:a] + :b # "a" + "b" # Sequel[:a] - :b # "a" - "b" # Sequel[:a] * :b # "a" * "b" # Sequel[:a] / :b # "a" / "b" # # One exception to this is if + is called with a +String+ or +StringExpression+, # in which case the || operator is used instead of the + operator: # # Sequel[:a] + 'b' # "a" || 'b' module NumericMethods (ComplexExpression::MATHEMATICAL_OPERATORS - [:+]).each do |o| module_eval("def #{o}(o) NumericExpression.new(#{o.inspect}, self, o) end", __FILE__, __LINE__) end # If the argument given is Numeric, treat it as a NumericExpression, # allowing code such as: # # 1 + Sequel[:x] # SQL: (1 + x) # Sequel.expr{1 - x(y)} # SQL: (1 - x(y)) def coerce(other) if other.is_a?(Numeric) [SQL::NumericExpression.new(:NOOP, other), self] elsif defined?(super) super else [self, other] end end # Use || as the operator when called with StringExpression and String instances, # and the + operator for LiteralStrings and all other types. def +(ce) case ce when LiteralString NumericExpression.new(:+, self, ce) when StringExpression, String StringExpression.new(:'||', self, ce) else NumericExpression.new(:+, self, ce) end end end # This module includes methods for overriding the =~ method for SQL equality, # inclusion, and pattern matching. It returns the same result that Sequel would # return when using a hash with a single entry, where the receiver was the key # and the argument was the value. Example: # # Sequel[:a] =~ 1 # (a = 1) # Sequel[:a] =~ [1, 2] # (a IN [1, 2]) # Sequel[:a] =~ nil # (a IS NULL) # # This also adds the !~ method, for easily setting up not equals, # exclusion, and inverse pattern matching. This is the same as as inverting the # result of the =~ method # # Sequel[:a] !~ 1 # (a != 1) # Sequel[:a] !~ [1, 2] # (a NOT IN [1, 2]) # Sequel[:a] !~ nil # (a IS NOT NULL) module PatternMatchMethods # Set up an equality, inclusion, or pattern match operation, based on the type # of the argument. def =~(other) BooleanExpression.send(:from_value_pair, self, other) end def !~(other) ~(self =~ other) end end # This adds methods to create SQL expressions using operators: # # Sequel.+(1, :a) # (1 + a) # Sequel.-(1, :a) # (1 - a) # Sequel.*(1, :a) # (1 * a) # Sequel./(1, :a) # (1 / a) # Sequel.&(:b, :a) # (b AND a) # Sequel.|(:b, :a) # (b OR a) module OperatorBuilders {'::Sequel::SQL::NumericExpression'=>{'+'=>'+', '-'=>'-', '*'=>'*', '/'=>'/'}, '::Sequel::SQL::BooleanExpression'=>{'&'=>'AND', '|'=>'OR'}}.each do |klass, ops| ops.each do |m, op| class_eval(<<-END, __FILE__, __LINE__ + 1) def #{m}(*args) if (args.length == 1) if (v = args.first).class.is_a?(#{klass}) v else #{klass}.new(:NOOP, v) end else #{klass}.new(:#{op}, *args) end end END end end # Return NumericExpression for the exponentiation: # # Sequel.**(2, 3) # SQL: power(2, 3) def **(a, b) SQL::NumericExpression.new(:**, a, b) end # Invert the given expression. Returns a <tt>Sequel::SQL::BooleanExpression</tt> # created from this argument, not matching all of the conditions. # # Sequel.~(nil) # SQL: NOT NULL # Sequel.~([[:a, true]]) # SQL: a IS NOT TRUE # Sequel.~([[:a, 1], [:b, [2, 3]]]) # SQL: a != 1 OR b NOT IN (2, 3) def ~(arg) if condition_specifier?(arg) SQL::BooleanExpression.from_value_pairs(arg, :OR, true) else SQL::BooleanExpression.invert(arg) end end end # Methods that create +OrderedExpressions+, used for sorting by columns # or more complex expressions. module OrderMethods # Mark the receiving SQL column as sorting in an ascending fashion (generally a no-op). # Options: # # :nulls :: Set to :first to use NULLS FIRST (so NULL values are ordered # before other values), or :last to use NULLS LAST (so NULL values # are ordered after other values). def asc(opts=OPTS) OrderedExpression.new(self, false, opts) end # Mark the receiving SQL column as sorting in a descending fashion. # Options: # # :nulls :: Set to :first to use NULLS FIRST (so NULL values are ordered # before other values), or :last to use NULLS LAST (so NULL values # are ordered after other values). def desc(opts=OPTS) OrderedExpression.new(self, true, opts) end end # Includes a +qualify+ and <tt>[]</tt> methods that create <tt>QualifiedIdentifier</tt>s, used for qualifying column # names with a table or table names with a schema, and the * method for returning all columns in # the identifier if no arguments are given. module QualifyingMethods # If no arguments are given, return an SQL::ColumnAll: # # Sequel[:a].* # a.* def *(ce=(arg=false;nil)) if arg == false Sequel::SQL::ColumnAll.new(self) else super(ce) end end # Qualify the receiver with the given +qualifier+ (table for column/schema for table). # # Sequel[:column].qualify(:table) # "table"."column" # Sequel[:table].qualify(:schema) # "schema"."table" # Sequel.qualify(:table, :column).qualify(:schema) # "schema"."table"."column" def qualify(qualifier) QualifiedIdentifier.new(qualifier, self) end # Qualify the receiver with the given +qualifier+ (table for column/schema for table). # # Sequel[:table][:column] # "table"."column" # Sequel[:schema][:table] # "schema"."table" # Sequel[:schema][:table][:column] # "schema"."table"."column" def [](identifier) QualifiedIdentifier.new(self, identifier) end end # This module includes the +like+ and +ilike+ methods used for pattern matching that are defined on objects that can be # used in a string context in SQL (+Symbol+, +LiteralString+, <tt>SQL::GenericExpression</tt>). module StringMethods # Create a +BooleanExpression+ case insensitive pattern match of the receiver # with the given patterns. See <tt>StringExpression.like</tt>. # # Sequel[:a].ilike('A%') # "a" ILIKE 'A%' ESCAPE '\' def ilike(*ces) StringExpression.like(self, *(ces << {:case_insensitive=>true})) end # Create a +BooleanExpression+ case sensitive (if the database supports it) pattern match of the receiver with # the given patterns. See <tt>StringExpression.like</tt>. # # Sequel[:a].like('A%') # "a" LIKE 'A%' ESCAPE '\' def like(*ces) StringExpression.like(self, *ces) end end # This module includes the <tt>+</tt> method. It is included in +StringExpression+ and can be included elsewhere # to allow the use of the + operator to represent concatenation of SQL Strings: module StringConcatenationMethods # Return a +StringExpression+ representing the concatenation of the receiver # with the given argument. # # Sequel[:x].sql_string + :y # => "x" || "y" def +(ce) StringExpression.new(:'||', self, ce) end end # This module includes the +sql_subscript+ method, representing SQL array accesses. module SubscriptMethods # Return a <tt>Subscript</tt> with the given arguments, representing an # SQL array access. # # Sequel[:array].sql_subscript(1) # array[1] # Sequel[:array].sql_subscript(1, 2) # array[1, 2] # Sequel[:array].sql_subscript([1, 2]) # array[1, 2] # Sequel[:array].sql_subscript(1..2) # array[1:2] # Sequel[:array].sql_subscript(1...3) # array[1:2] def sql_subscript(*sub) Subscript.new(self, sub.flatten) end end # Represents an aliasing of an expression to a given alias. class AliasedExpression < Expression # The expression to alias attr_reader :expression # The alias to use for the expression. attr_reader :alias # The columns aliases (derived column list) to use, for when the aliased expression is # a record or set of records (such as a dataset). attr_reader :columns # Create an object with the given expression, alias, and optional column aliases. def initialize(expression, aliaz, columns=nil) @expression = expression @alias = aliaz @columns = columns freeze end to_s_method :aliased_expression_sql end # +Blob+ is used to represent binary data in the Ruby environment that is # stored as a blob type in the database. Sequel represents binary data as a Blob object because # most database engines require binary data to be escaped differently than regular strings. class Blob < ::String include SQL::AliasMethods include SQL::CastMethods class << self # Alias new to call for usage in conversion procs alias call new end # Return a LiteralString with the same content if no args are given, otherwise # return a SQL::PlaceholderLiteralString with the current string and the given args. def lit(*args) args.empty? ? LiteralString.new(self) : SQL::PlaceholderLiteralString.new(self, args) end # Return a string showing that this is a blob, the size, and the some or all of the content, # depending on the size. def inspect size = length content = if size > 20 "start=#{self[0...10].to_s.inspect} end=#{self[-10..-1].to_s.inspect}" else "content=#{super}" end "#<#{self.class}:0x#{"%x" % object_id} bytes=#{size} #{content}>" end # Returns +self+, since it is already a blob. def to_sequel_blob self end end # Subclass of +ComplexExpression+ where the expression results # in a boolean value in SQL. class BooleanExpression < ComplexExpression include BooleanMethods # Take pairs of values (e.g. a hash or array of two element arrays) # and converts it to a +BooleanExpression+. The operator and args # used depends on the case of the right (2nd) argument: # # 0..10 :: left >= 0 AND left <= 10 # [1,2] :: left IN (1,2) # nil :: left IS NULL # true :: left IS TRUE # false :: left IS FALSE # /as/ :: left ~ 'as' # :blah :: left = blah # 'blah' :: left = 'blah' # # If multiple arguments are given, they are joined with the op given (AND # by default, OR possible). If negate is set to true, # all subexpressions are inverted before used. Therefore, the following # expressions are equivalent: # # ~from_value_pairs(hash) # from_value_pairs(hash, :OR, true) def self.from_value_pairs(pairs, op=:AND, negate=false) pairs = pairs.map{|l,r| from_value_pair(l, r)} pairs.map!{|ce| invert(ce)} if negate pairs.length == 1 ? pairs[0] : new(op, *pairs) end # Return a BooleanExpression based on the right side of the pair. def self.from_value_pair(l, r) case r when Range unless r.begin.nil? begin_expr = new(:>=, l, r.begin) end unless r.end.nil? end_expr = new(r.exclude_end? ? :< : :<=, l, r.end) end if begin_expr if end_expr new(:AND, begin_expr, end_expr) else begin_expr end elsif end_expr end_expr else new(:'=', 1, 1) end when ::Array r = r.dup.freeze unless r.frozen? new(:IN, l, r) when ::String r = r.dup.freeze unless r.frozen? new(:'=', l, r) when ::Sequel::Dataset new(:IN, l, r) when NegativeBooleanConstant new(:"IS NOT", l, r.constant) when BooleanConstant new(:IS, l, r.constant) when NilClass, TrueClass, FalseClass new(:IS, l, r) when Regexp StringExpression.like(l, r) when DelayedEvaluation Sequel.delay{|ds| from_value_pair(l, r.call(ds))} when Dataset::PlaceholderLiteralizer::Argument r.transform{|v| from_value_pair(l, v)} else new(:'=', l, r) end end private_class_method :from_value_pair # Invert the expression, if possible. If the expression cannot # be inverted, raise an error. An inverted expression should match everything that the # uninverted expression did not match, and vice-versa, except for possible issues with # SQL NULL (i.e. 1 == NULL is NULL and 1 != NULL is also NULL). # # BooleanExpression.invert(:a) # NOT "a" def self.invert(ce) case ce when BooleanExpression case op = ce.op when :AND, :OR BooleanExpression.new(OPERTATOR_INVERSIONS[op], *ce.args.map{|a| BooleanExpression.invert(a)}) when :IN, :"NOT IN" BooleanExpression.new(OPERTATOR_INVERSIONS[op], *ce.args.dup) else if ce.args.length == 2 case ce.args[1] when Function, LiteralString, PlaceholderLiteralString # Special behavior to not push down inversion in this case because doing so # can result in incorrect behavior for ANY/SOME/ALL operators. BooleanExpression.new(:NOT, ce) else BooleanExpression.new(OPERTATOR_INVERSIONS[op], *ce.args.dup) end else BooleanExpression.new(OPERTATOR_INVERSIONS[op], *ce.args.dup) end end when StringExpression, NumericExpression raise(Sequel::Error, "cannot invert #{ce.inspect}") when Constant CONSTANT_INVERSIONS[ce] || raise(Sequel::Error, "cannot invert #{ce.inspect}") else BooleanExpression.new(:NOT, ce) end end # Always use an AND operator for & on BooleanExpressions def &(ce) BooleanExpression.new(:AND, self, ce) end # Always use an OR operator for | on BooleanExpressions def |(ce) BooleanExpression.new(:OR, self, ce) end # Return self instead of creating a new object to save on memory. def sql_boolean self end end # Represents an SQL CASE expression, used for conditional branching in SQL. class CaseExpression < GenericExpression # An array of all two pairs with the first element specifying the # condition and the second element specifying the result if the # condition matches. attr_reader :conditions # The default value if no conditions match. attr_reader :default # An optional expression to test the conditions against attr_reader :expression # Create an object with the given conditions and # default value, and optional expression. An expression can be provided to # test each condition against, instead of having # all conditions represent their own boolean expression. def initialize(conditions, default, expression=(no_expression=true; nil)) raise(Sequel::Error, 'CaseExpression conditions must be a hash or array of all two pairs') unless Sequel.condition_specifier?(conditions) @conditions = conditions.to_a.dup.freeze @default = default @expression = expression @no_expression = no_expression freeze end # Whether to use an expression for this CASE expression. def expression? !@no_expression end # Merge the CASE expression into the conditions, useful for databases that # don't support CASE expressions. def with_merged_expression if expression? e = expression CaseExpression.new(conditions.map{|c, r| [::Sequel::SQL::BooleanExpression.new(:'=', e, c), r]}, default) else self end end to_s_method :case_expression_sql end # Represents a cast of an SQL expression to a specific type. class Cast < GenericExpression # The expression to cast attr_reader :expr # The type to which to cast the expression attr_reader :type # Set the expression and type for the cast def initialize(expr, type) @expr = expr @type = type freeze end to_s_method :cast_sql, '@expr, @type' end # Represents all columns in a given table, table.* in SQL class ColumnAll < Expression # The table containing the columns being selected attr_reader :table # Create an object with the given table def initialize(table) @table = table freeze end to_s_method :column_all_sql end class ComplexExpression include AliasMethods include CastMethods include OrderMethods include PatternMatchMethods include SubscriptMethods # Return a BooleanExpression with the same op and args. def sql_boolean BooleanExpression.new(op, *args) end # Return a NumericExpression with the same op and args. def sql_number NumericExpression.new(op, *args) end # Return a StringExpression with the same op and args. def sql_string StringExpression.new(op, *args) end end # Represents constants or psuedo-constants (e.g. +CURRENT_DATE+) in SQL. class Constant < GenericExpression # The underlying constant related to this object. attr_reader :constant # Create a constant with the given value def initialize(constant) @constant = constant freeze end to_s_method :constant_sql, '@constant' end # Represents boolean constants such as +NULL+, +TRUE+, and +FALSE+. class BooleanConstant < Constant to_s_method :boolean_constant_sql, '@constant' end # Represents inverse boolean constants (currently only +NOTNULL+). A # special class to allow for special behavior. class NegativeBooleanConstant < Constant to_s_method :negative_boolean_constant_sql, '@constant' end # Holds default generic constants that can be referenced. These # are included in the Sequel top level module and are also available # in this module which can be required at the top level to get # direct access to the constants. module Constants CURRENT_DATE = Constant.new(:CURRENT_DATE) CURRENT_TIME = Constant.new(:CURRENT_TIME) CURRENT_TIMESTAMP = Constant.new(:CURRENT_TIMESTAMP) DEFAULT = Constant.new(:DEFAULT) SQLTRUE = TRUE = BooleanConstant.new(true) SQLFALSE = FALSE = BooleanConstant.new(false) NULL = BooleanConstant.new(nil) NOTNULL = NegativeBooleanConstant.new(nil) end class ComplexExpression # A hash of the opposite for each constant, used for inverting constants. CONSTANT_INVERSIONS = {Constants::TRUE=>Constants::FALSE, Constants::FALSE=>Constants::TRUE, Constants::NULL=>Constants::NOTNULL, Constants::NOTNULL=>Constants::NULL}.freeze end # Represents a delayed evaluation, encapsulating a callable # object which returns the value to use when called. class DelayedEvaluation < GenericExpression # A callable object that returns the value of the evaluation # when called. attr_reader :callable # Set the callable object def initialize(callable) @callable = callable freeze end # Call the underlying callable and return the result. If the # underlying callable only accepts a single argument, call it # with the given dataset. def call(ds) if @callable.respond_to?(:arity) && @callable.arity == 1 @callable.call(ds) else @callable.call end end to_s_method :delayed_evaluation_sql end # Represents an SQL function call. class Function < GenericExpression WILDCARD = LiteralString.new('*').freeze DISTINCT = ["DISTINCT ".freeze].freeze COMMA_ARRAY = [LiteralString.new(', ').freeze].freeze # The SQL function to call attr_reader :name # The array of arguments to pass to the function (may be blank) attr_reader :args # Options for this function attr_reader :opts # Set the name and args for the function def initialize(name, *args) _initialize(name, args, OPTS) end # Set the name, args, and options, for internal use only. def self.new!(name, args, opts) # :nodoc: allocate.send(:_initialize, name, args, opts) end # If no arguments are given, return a new function with the wildcard prepended to the arguments. # # Sequel.function(:count).* # count(*) def *(ce=(arg=false;nil)) if arg == false raise Error, "Cannot apply * to functions with arguments" unless args.empty? with_opts(:"*"=>true) else super(ce) end end # Return a new function with DISTINCT before the method arguments. # # Sequel.function(:count, :col).distinct # count(DISTINCT col) def distinct with_opts(:distinct=>true) end # Return a new function with FILTER added to it, for filtered # aggregate functions: # # Sequel.function(:foo, :col).filter(a: 1) # foo(col) FILTER (WHERE (a = 1)) def filter(*args, &block) if args.length == 1 args = args.first else args.freeze end with_opts(:filter=>args, :filter_block=>block) end # Return a function which will use LATERAL when literalized: # # Sequel.function(:foo, :col).lateral # LATERAL foo(col) def lateral with_opts(:lateral=>true) end # Return a new function where the function will be ordered. Only useful for aggregate # functions that are order dependent. # # Sequel.function(:foo, :a).order(:a, Sequel.desc(:b)) # foo(a ORDER BY a, b DESC) def order(*args) with_opts(:order=>args.freeze) end # Return a new function with an OVER clause (making it a window function). # See Sequel::SQL::Window for the list of options +over+ can receive. # # Sequel.function(:row_number).over(partition: :col) # row_number() OVER (PARTITION BY col) def over(window=OPTS) raise Error, "function already has a window applied to it" if opts[:over] window = Window.new(window) unless window.is_a?(Window) with_opts(:over=>window) end # Return a new function where the function name will be quoted if the database supports # quoted functions: # # Sequel.function(:foo).quoted # "foo"() def quoted with_opts(:quoted=>true) end # Return a new function where the function name will not be quoted even # if the database supports quoted functions: # # Sequel[:foo][:bar].function.unquoted # foo.bar() def unquoted with_opts(:quoted=>false) end # Return a new function that will use WITH ORDINALITY to also return # a row number for every row the function returns: # # Sequel.function(:foo).with_ordinality # foo() WITH ORDINALITY def with_ordinality with_opts(:with_ordinality=>true) end # Return a new function that uses WITHIN GROUP ordered by the given expression, # useful for ordered-set and hypothetical-set aggregate functions: # # Sequel.function(:rank, :a).within_group(:b, :c) # # rank(a) WITHIN GROUP (ORDER BY b, c) def within_group(*expressions) with_opts(:within_group=>expressions.freeze) end to_s_method :function_sql private # Set name, args, and opts def _initialize(name, args, opts) @name = name @args = args.freeze @opts = opts.freeze freeze end # Return a new function call with the given opts merged into the current opts. def with_opts(opts) self.class.new!(name, args, @opts.merge(opts)) end end class GenericExpression include AliasMethods include BooleanMethods include CastMethods include ComplexExpressionMethods include InequalityMethods include NumericMethods include OrderMethods include PatternMatchMethods include StringMethods include SubscriptMethods end # Represents an identifier (column, table, schema, etc.). class Identifier < GenericExpression include QualifyingMethods # The identifier to reference attr_reader :value # Set the identifier to the given argument def initialize(value) @value = value freeze end # Create a Function using this identifier as the functions name, with # the given args. def function(*args) Function.new(self, *args) end to_s_method :quote_identifier, '@value' end # Represents an SQL JOIN clause, used for joining tables. class JoinClause < Expression # The type of join to do attr_reader :join_type # The expression representing the table/set related to the JOIN. # Is an AliasedExpression if the JOIN uses an alias. attr_reader :table_expr # Create an object with the given join_type and table expression. def initialize(join_type, table_expr) @join_type = join_type @table_expr = table_expr freeze end # The table/set related to the JOIN, without any alias. def table if @table_expr.is_a?(AliasedExpression) @table_expr.expression else @table_expr end end # The table alias to use for the JOIN , or nil if the # JOIN does not alias the table. def table_alias if @table_expr.is_a?(AliasedExpression) @table_expr.alias end end # The column aliases to use for the JOIN , or nil if the # JOIN does not use a derived column list. def column_aliases if @table_expr.is_a?(AliasedExpression) @table_expr.columns end end to_s_method :join_clause_sql end # Represents an SQL JOIN clause with ON conditions. class JoinOnClause < JoinClause # The conditions for the join attr_reader :on # Create an object with the ON conditions and call super with the # remaining args. def initialize(on, *args) @on = on super(*args) end to_s_method :join_on_clause_sql end # Represents an SQL JOIN clause with USING conditions. class JoinUsingClause < JoinClause # The columns that appear in both tables that should be equal # for the conditions to match. attr_reader :using # Create an object with the given USING conditions and call super # with the remaining args. def initialize(cols, *args) @using = cols super(*args) end to_s_method :join_using_clause_sql end # Represents a literal string with placeholders and arguments. # This is necessary to ensure delayed literalization of the arguments # required for the prepared statement support and for database-specific # literalization. class PlaceholderLiteralString < GenericExpression # The literal string containing placeholders. This can also be an array # of strings, where each arg in args goes between the string elements. attr_reader :str # The arguments that will be subsituted into the placeholders. # Either an array of unnamed placeholders (which will be substituted in # order for ? characters), or a hash of named placeholders (which will be # substituted for :key phrases). attr_reader :args # Whether to surround the expression with parantheses attr_reader :parens # Create an object with the given string, placeholder arguments, and parens flag. def initialize(str, args, parens=false) @str = str @args = args.is_a?(Array) && args.length == 1 && (v = args[0]).is_a?(Hash) ? v : args @parens = parens freeze end # Return a copy of the that will be surrounded by parantheses. def with_parens @parens ? self : self.class.new(@str, @args, true) end to_s_method :placeholder_literal_string_sql end # Subclass of +ComplexExpression+ where the expression results # in a numeric value in SQL. class NumericExpression < ComplexExpression include BitwiseMethods include NumericMethods include InequalityMethods # Always use + for + operator for NumericExpressions. def +(ce) NumericExpression.new(:+, self, ce) end # Return self instead of creating a new object to save on memory. def sql_number self end end # Represents a column/expression to order the result set by. class OrderedExpression < Expression INVERT_NULLS = {:first=>:last, :last=>:first}.freeze # The expression to order the result set by. attr_reader :expression # Whether the expression should order the result set in a descending manner attr_reader :descending # Whether to sort NULLS FIRST/LAST attr_reader :nulls # Set the expression and descending attributes to the given values. # Options: # # :nulls :: Can be :first/:last for NULLS FIRST/LAST. def initialize(expression, descending = true, opts=OPTS) @expression = expression @descending = descending @nulls = opts[:nulls] freeze end # Return a copy that is ordered ASC def asc OrderedExpression.new(@expression, false, :nulls=>@nulls) end # Return a copy that is ordered DESC def desc OrderedExpression.new(@expression, true, :nulls=>@nulls) end # Return an inverted expression, changing ASC to DESC and NULLS FIRST to NULLS LAST. def invert OrderedExpression.new(@expression, !@descending, :nulls=>INVERT_NULLS.fetch(@nulls, @nulls)) end to_s_method :ordered_expression_sql end # Represents a qualified identifier (column with table or table with schema). class QualifiedIdentifier < GenericExpression include QualifyingMethods # The table/schema qualifying the reference attr_reader :table # The column/table referenced attr_reader :column # Set the table and column to the given arguments def initialize(table, column) @table = convert_identifier(table) @column = convert_identifier(column) freeze end # Create a Function using this identifier as the functions name, with # the given args. def function(*args) Function.new(self, *args) end to_s_method :qualified_identifier_sql, "@table, @column" private # Automatically convert SQL::Identifiers to strings def convert_identifier(identifier) case identifier when SQL::Identifier identifier.value.to_s else identifier end end end # Subclass of +ComplexExpression+ where the expression results # in a text/string/varchar value in SQL. class StringExpression < ComplexExpression include StringMethods include StringConcatenationMethods include InequalityMethods # Map of [regexp, case_insenstive] to +ComplexExpression+ operator symbol LIKE_MAP = {[true, true]=>:'~*', [true, false]=>:~, [false, true]=>:ILIKE, [false, false]=>:LIKE}.freeze LIKE_MAP.each_key(&:freeze) # Creates a SQL pattern match exprssion. left (l) is the SQL string we # are matching against, and ces are the patterns we are matching. # The match succeeds if any of the patterns match (SQL OR). # # If a regular expression is used as a pattern, an SQL regular expression will be # used, which is currently only supported on some databases. Be aware # that SQL regular expression syntax is similar to ruby # regular expression syntax, but it not exactly the same, especially for # advanced regular expression features. Sequel just uses the source of the # ruby regular expression verbatim as the SQL regular expression string. # # If any other object is used as a regular expression, the SQL LIKE operator will # be used, and should be supported by most databases. # # The pattern match will be case insensitive if the last argument is a hash # with a key of :case_insensitive that is not false or nil. Also, # if a case insensitive regular expression is used (//i), that particular # pattern which will always be case insensitive. # # StringExpression.like(:a, 'a%') # ("a" LIKE 'a%' ESCAPE '\') # StringExpression.like(:a, 'a%', case_insensitive: true) # ("a" ILIKE 'a%' ESCAPE '\') # StringExpression.like(:a, 'a%', /^a/i) # (("a" LIKE 'a%' ESCAPE '\') OR ("a" ~* '^a')) def self.like(l, *ces) l, lre, lci = like_element(l) lci = (ces.last.is_a?(Hash) ? ces.pop : OPTS)[:case_insensitive] ? true : lci ces.map! do |ce| r, rre, rci = like_element(ce) BooleanExpression.new(LIKE_MAP[[lre||rre, lci||rci]], l, r) end ces.length == 1 ? ces[0] : BooleanExpression.new(:OR, *ces) end # Returns a three element array, made up of: # * The object to use # * Whether it is a regular expression # * Whether it is case insensitive def self.like_element(re) # :nodoc: if re.is_a?(Regexp) [re.source, true, re.casefold?] else [re, false, false] end end private_class_method :like_element # Return self instead of creating a new object to save on memory. def sql_string self end end # Represents an SQL array access, with multiple possible arguments. class Subscript < GenericExpression # The SQL array column attr_reader :expression alias f expression # The array of subscripts to use (should be an array of numbers) attr_reader :sub # Set the array column and subscripts to the given arguments def initialize(expression, sub) @expression = expression @sub = sub freeze end # Create a new +Subscript+ appending the given subscript(s) # to the current array of subscripts. # # Sequel[:a].sql_subscript(2) # a[2] # Sequel[:a].sql_subscript(2) | 1 # a[2, 1] def |(sub) Subscript.new(@expression, @sub + Array(sub)) end # Create a new +Subscript+ by accessing a subarray of a multidimensional # array. # # Sequel[:a].sql_subscript(2) # a[2] # Sequel[:a].sql_subscript(2)[1] # a[2][1] def [](sub) Subscript.new(self, Array(sub)) end to_s_method :subscript_sql end # Represents an SQL value list (IN/NOT IN predicate value). Added so it is possible to deal with a # ruby array of two element arrays as an SQL value list instead of an ordered # hash-like conditions specifier. class ValueList < ::Array # Show that this is a value list and not just an array def inspect "#<#{self.class} #{super}>" end end # The purpose of the +VirtualRow+ class is to allow the easy creation of SQL identifiers and functions, # in a way that leads to more compact code. # # An instance of this class is yielded to the block supplied to <tt>Dataset#where</tt>, <tt>Dataset#order</tt>, and <tt>Dataset#select</tt> # (and the other methods that accept a block and pass it to one of those methods). # If the block doesn't take an argument, the block is instance_execed in the context of # an instance of this class. # # +VirtualRow+ uses +method_missing+ to return either an +Identifier+, +Function+ # depending on how it is called. # # +Function+ :: Returned if any arguments are supplied, using the method name # as the function name, and the arguments as the function arguments. # +Identifier+ :: Returned otherwise, using the method name. # # If splitting symbols has been enabled (not the default), then method calls without # arguments will return +QualifiedIdentifier+ instances if the method call includes a # double underscore. # # Examples: # # ds = DB[:t] # # # Argument yielded to block # ds.where{|r| r.name < 2} # SELECT * FROM t WHERE (name < 2) # # # Block without argument (instance_exec) # ds.where{name < 2} # SELECT * FROM t WHERE (name < 2) # # # Functions # ds.where{is_active(1, 'arg2')} # SELECT * FROM t WHERE is_active(1, 'arg2') # ds.select{version.function} # SELECT version() FROM t # ds.select{count.function.*} # SELECT count(*) FROM t # ds.select{count(col1).distinct} # SELECT count(DISTINCT col1) FROM t # # # Math Operators # ds.select{|o| o.+(1, :a).as(:b)} # SELECT (1 + a) AS b FROM t # ds.select{|o| o.-(2, :a).as(:b)} # SELECT (2 - a) AS b FROM t # ds.select{|o| o.*(3, :a).as(:b)} # SELECT (3 * a) AS b FROM t # ds.select{|o| o./(4, :a).as(:b)} # SELECT (4 / a) AS b FROM t # # # Boolean Operators # ds.where{|o| o.&({a: 1}, :b)} # SELECT * FROM t WHERE ((a = 1) AND b) # ds.where{|o| o.|({a: 1}, :b)} # SELECT * FROM t WHERE ((a = 1) OR b) # ds.where{|o| o.~(a: 1)} # SELECT * FROM t WHERE (a != 1) # ds.where{|o| o.~(a: 1, b: 2)} # SELECT * FROM t WHERE ((a != 1) OR (b != 2)) # # # Inequality Operators # ds.where{|o| o.>(1, :a)} # SELECT * FROM t WHERE (1 > a) # ds.where{|o| o.<(2, :a)} # SELECT * FROM t WHERE (2 < a) # ds.where{|o| o.>=(3, :a)} # SELECT * FROM t WHERE (3 >= a) # ds.where{|o| o.<=(4, :a)} # SELECT * FROM t WHERE (4 <= a) # # For a more detailed explanation, see the {Virtual Rows guide}[rdoc-ref:doc/virtual_rows.rdoc]. class VirtualRow < BasicObject include OperatorBuilders %w'> < >= <='.each do |op| class_eval(<<-END, __FILE__, __LINE__ + 1) def #{op}(*args) SQL::BooleanExpression.new(:#{op}, *args) end END end def initialize freeze end m = Module.new do # Return an +Identifier+, +QualifiedIdentifier+, or +Function+, depending # on arguments and whether a block is provided. Does not currently call the block. # See the class level documentation. def method_missing(m, *args) if args.empty? if Sequel.split_symbols? table, column = m.to_s.split('__', 2) column ? QualifiedIdentifier.new(table, column) : Identifier.new(m) else Identifier.new(m) end else Function.new(m, *args) end end end include m Sequel::VIRTUAL_ROW = new end # A +Window+ is part of a window function specifying the window over which a window function operates. # # Sequel::SQL::Window.new(partition: :col1) # # (PARTITION BY col1) # Sequel::SQL::Window.new(partition: [:col2, :col3]) # # (PARTITION BY col2, col3) # # Sequel::SQL::Window.new(order: :col4) # # (ORDER BY col4) # Sequel::SQL::Window.new(order: [:col5, Sequel.desc(:col6)]) # # (ORDER BY col5, col6 DESC) # # Sequel::SQL::Window.new(partition: :col7, frame: :all) # # (PARTITION BY col7 ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) # Sequel::SQL::Window.new(partition: :col7, frame: :rows) # # (PARTITION BY col7 ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) # Sequel::SQL::Window.new(partition: :col7, frame: {type: :range, start: current}) # # (PARTITION BY col7 RANGE CURRENT ROW) # Sequel::SQL::Window.new(partition: :col7, frame: {type: :range, start: 1, end: 1}) # # (PARTITION BY col7 RANGE BETWEEN 1 PRECEDING AND 1 FOLLOWING) # Sequel::SQL::Window.new(partition: :col7, frame: {type: :range, start: 2, end: [1, :preceding]}) # # (PARTITION BY col7 RANGE BETWEEN 2 PRECEDING AND 1 PRECEDING) # Sequel::SQL::Window.new(partition: :col7, frame: {type: :range, start: 1, end: [2, :following]}) # # (PARTITION BY col7 RANGE BETWEEN 1 FOLLOWING AND 2 FOLLOWING) # Sequel::SQL::Window.new(partition: :col7, frame: {type: :range, start: :preceding, exclude: :current}) # # (PARTITION BY col7 RANGE UNBOUNDED PRECEDING EXCLUDE CURRENT ROW) # # Sequel::SQL::Window.new(window: :named_window) # you can create a named window with Dataset#window # # (named_window) class Window < Expression # The options for this window. Options currently supported: # :frame :: if specified, should be :all, :rows, :range, :groups, a String, or a Hash. # :all :: Always operates over all rows in the partition # :rows :: Includes rows in the partition up to and including the current row # :range, :groups :: Includes rows in the partition up to and including the current group # String :: Used as literal SQL code, try to avoid # Hash :: Hash of options for the frame: # :type :: The type of frame, must be :rows, :range, or :groups (required) # :start :: The start of the frame (required). Possible values: # :preceding :: UNBOUNDED PRECEDING # :following :: UNBOUNDED FOLLOWING # :current :: CURRENT ROW # String, Numeric, or Cast :: Used as the offset of rows/values preceding # Array :: Must have two elements, with first element being String, Numeric, or # Cast and second element being :preceding or :following # :end :: The end of the frame. Can be left out. If present, takes the same values as # :start, except that when a String, Numeric, or Hash, it is used as the offset # for rows following # :exclude :: Which rows to exclude. Possible values are :current, :ties, :group # :no_others. # :order :: order on the column(s) given # :partition :: partition/group on the column(s) given # :window :: base results on a previously specified named window attr_reader :opts # Set the options to the options given def initialize(opts=OPTS) @opts = opts.frozen? ? opts : Hash[opts].freeze freeze end to_s_method :window_sql, '@opts' end # A +Wrapper+ is a simple way to wrap an existing object so that it supports # the Sequel DSL. class Wrapper < GenericExpression # The underlying value wrapped by this object. attr_reader :value # Set the value wrapped by the object. def initialize(value) @value = value freeze end to_s_method :literal, '@value' end end # +LiteralString+ is used to represent literal SQL expressions. A # +LiteralString+ is copied verbatim into an SQL statement. Instances of # +LiteralString+ can be created by calling <tt>Sequel.lit</tt>. class LiteralString include SQL::OrderMethods include SQL::ComplexExpressionMethods include SQL::BooleanMethods include SQL::NumericMethods include SQL::StringMethods include SQL::InequalityMethods include SQL::AliasMethods include SQL::CastMethods # Show that the current string is a literal string in addition to the output. def inspect "#<#{self.class} #{super}>" end # Return self if no args are given, otherwise return a SQL::PlaceholderLiteralString # with the current string and the given args. def lit(*args) args.empty? ? self : SQL::PlaceholderLiteralString.new(self, args) end # Convert a literal string to a SQL::Blob. def to_sequel_blob SQL::Blob.new(self) end end include SQL::Constants extend SQL::Builders extend SQL::OperatorBuilders end �������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/timezones.rb���������������������������������������������������������������0000664�0000000�0000000�00000023521�14342141206�0017511�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel @application_timezone = nil @database_timezone = nil @typecast_timezone = nil @local_offsets = {} # Backwards compatible alias Timezones = SequelMethods Deprecation.deprecate_constant(self, :Timezones) # Sequel doesn't pay much attention to timezones by default, but you can set it to # handle timezones if you want. There are three separate timezone settings: # # * application_timezone # * database_timezone # * typecast_timezone # # All three timezones have getter and setter methods. # You can set all three timezones to the same value at once via <tt>Sequel.default_timezone=</tt>. # # The only timezone values that are supported by default are <tt>:utc</tt> (convert to UTC), # <tt>:local</tt> (convert to local time), and +nil+ (don't convert). If you need to # convert to a specific timezone, or need the timezones being used to change based # on the environment (e.g. current user), you need to use the +named_timezones+ extension (and use # +DateTime+ as the +datetime_class+). Sequel also ships with a +thread_local_timezones+ extensions # which allows each thread to have its own timezone values for each of the timezones. module SequelMethods # The timezone you want the application to use. This is the timezone # that incoming times from the database and typecasting are converted to. attr_reader :application_timezone # The timezone for storage in the database. This is the # timezone to which Sequel will convert timestamps before literalizing them # for storage in the database. It is also the timezone that Sequel will assume # database timestamp values are already in (if they don't include an offset). attr_reader :database_timezone # The timezone that incoming data that Sequel needs to typecast # is assumed to be already in (if they don't include an offset). attr_reader :typecast_timezone %w'application database typecast'.each do |t| class_eval("def #{t}_timezone=(tz); @#{t}_timezone = convert_timezone_setter_arg(tz) end", __FILE__, __LINE__) end # Convert the given +Time+/+DateTime+ object into the database timezone, used when # literalizing objects in an SQL string. def application_to_database_timestamp(v) convert_output_timestamp(v, Sequel.database_timezone) end # Converts the object to the given +output_timezone+. def convert_output_timestamp(v, output_timezone) if output_timezone if v.is_a?(DateTime) case output_timezone when :utc v.new_offset(0) when :local v.new_offset(local_offset_for_datetime(v)) else convert_output_datetime_other(v, output_timezone) end else case output_timezone when :utc v.getutc when :local v.getlocal else convert_output_time_other(v, output_timezone) end end else v end end # Converts the given object from the given input timezone to the # +application_timezone+ using +convert_input_timestamp+ and # +convert_output_timestamp+. def convert_timestamp(v, input_timezone) if v.is_a?(Date) && !v.is_a?(DateTime) # Dates handled specially as they are assumed to already be in the application_timezone if datetime_class == DateTime DateTime.civil(v.year, v.month, v.day, 0, 0, 0, application_timezone == :local ? Rational(Time.local(v.year, v.month, v.day).utc_offset, 86400) : 0) else Time.public_send(application_timezone == :utc ? :utc : :local, v.year, v.month, v.day) end else convert_output_timestamp(convert_input_timestamp(v, input_timezone), application_timezone) end rescue InvalidValue raise rescue => e raise convert_exception_class(e, InvalidValue) end # Convert the given object into an object of <tt>Sequel.datetime_class</tt> in the # +application_timezone+. Used when converting datetime/timestamp columns # returned by the database. def database_to_application_timestamp(v) convert_timestamp(v, Sequel.database_timezone) end # Sets the database, application, and typecasting timezones to the given timezone. def default_timezone=(tz) self.database_timezone = tz self.application_timezone = tz self.typecast_timezone = tz end # Convert the given object into an object of <tt>Sequel.datetime_class</tt> in the # +application_timezone+. Used when typecasting values when assigning them # to model datetime attributes. def typecast_to_application_timestamp(v) convert_timestamp(v, Sequel.typecast_timezone) end private # Convert the given +DateTime+ to the given input_timezone, keeping the # same time and just modifying the timezone. def convert_input_datetime_no_offset(v, input_timezone) case input_timezone when nil, :utc v # DateTime assumes UTC if no offset is given when :local offset = local_offset_for_datetime(v) v.new_offset(offset) - offset else convert_input_datetime_other(v, input_timezone) end end # Convert the given +DateTime+ to the given input_timezone that is not supported # by default (i.e. one other than +nil+, <tt>:local</tt>, or <tt>:utc</tt>). Raises an +InvalidValue+ by default. # Can be overridden in extensions. def convert_input_datetime_other(v, input_timezone) raise InvalidValue, "Invalid input_timezone: #{input_timezone.inspect}" end # Convert the given +Time+ to the given input_timezone that is not supported # by default (i.e. one other than +nil+, <tt>:local</tt>, or <tt>:utc</tt>). Raises an +InvalidValue+ by default. # Can be overridden in extensions. def convert_input_time_other(v, input_timezone) raise InvalidValue, "Invalid input_timezone: #{input_timezone.inspect}" end # Converts the object from a +String+, +Array+, +Date+, +DateTime+, or +Time+ into an # instance of <tt>Sequel.datetime_class</tt>. If given an array or a string that doesn't # contain an offset, assume that the array/string is already in the given +input_timezone+. def convert_input_timestamp(v, input_timezone) case v when String v2 = Sequel.string_to_datetime(v) if !input_timezone || _date_parse(v).has_key?(:offset) v2 else # Correct for potentially wrong offset if string doesn't include offset if v2.is_a?(DateTime) convert_input_datetime_no_offset(v2, input_timezone) else case input_timezone when nil, :local v2 when :utc (v2 + v2.utc_offset).utc else convert_input_time_other((v2 + v2.utc_offset).utc, input_timezone) end end end when Array y, mo, d, h, mi, s, ns, off = v if datetime_class == DateTime s += Rational(ns, 1000000000) if ns if off DateTime.civil(y, mo, d, h, mi, s, off) else convert_input_datetime_no_offset(DateTime.civil(y, mo, d, h, mi, s), input_timezone) end elsif off s += Rational(ns, 1000000000) if ns Time.new(y, mo, d, h, mi, s, (off*86400).to_i) else case input_timezone when nil, :local Time.local(y, mo, d, h, mi, s, (ns ? ns / 1000.0 : 0)) when :utc Time.utc(y, mo, d, h, mi, s, (ns ? ns / 1000.0 : 0)) else convert_input_time_other(Time.utc(y, mo, d, h, mi, s, (ns ? ns / 1000.0 : 0)), input_timezone) end end when Hash ary = [:year, :month, :day, :hour, :minute, :second, :nanos].map{|x| (v[x] || v[x.to_s]).to_i} if (offset = (v[:offset] || v['offset'])) ary << offset end convert_input_timestamp(ary, input_timezone) when Time if datetime_class == DateTime v.to_datetime else v end when DateTime if datetime_class == DateTime v else v.to_time end else raise InvalidValue, "Invalid convert_input_timestamp type: #{v.inspect}" end end # Convert the given +DateTime+ to the given output_timezone that is not supported # by default (i.e. one other than +nil+, <tt>:local</tt>, or <tt>:utc</tt>). Raises an +InvalidValue+ by default. # Can be overridden in extensions. def convert_output_datetime_other(v, output_timezone) raise InvalidValue, "Invalid output_timezone: #{output_timezone.inspect}" end # Convert the given +Time+ to the given output_timezone that is not supported # by default (i.e. one other than +nil+, <tt>:local</tt>, or <tt>:utc</tt>). Raises an +InvalidValue+ by default. # Can be overridden in extensions. def convert_output_time_other(v, output_timezone) raise InvalidValue, "Invalid output_timezone: #{output_timezone.inspect}" end # Convert the timezone setter argument. Returns argument given by default, # exists for easier overriding in extensions. def convert_timezone_setter_arg(tz) tz end # Takes a DateTime dt, and returns the correct local offset for that dt, daylight savings included, in fraction of a day. def local_offset_for_datetime(dt) time_offset_to_datetime_offset Time.local(dt.year, dt.month, dt.day, dt.hour, dt.min, dt.sec).utc_offset end # Caches offset conversions to avoid excess Rational math. def time_offset_to_datetime_offset(offset_secs) if offset = Sequel.synchronize{@local_offsets[offset_secs]} return offset end Sequel.synchronize{@local_offsets[offset_secs] = Rational(offset_secs, 86400)} end end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/lib/sequel/version.rb�����������������������������������������������������������������0000664�0000000�0000000�00000001345�14342141206�0017161�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# frozen-string-literal: true module Sequel # The major version of Sequel. Only bumped for major changes. MAJOR = 5 # The minor version of Sequel. Bumped for every non-patch level # release, generally around once a month. MINOR = 63 # The tiny version of Sequel. Usually 0, only bumped for bugfix # releases that fix regressions from previous versions. TINY = 0 # The version of Sequel you are using, as a string (e.g. "2.11.0") VERSION = [MAJOR, MINOR, TINY].join('.').freeze # The version of Sequel you are using, as a number (2.11.0 -> 20110) VERSION_NUMBER = MAJOR*10000 + MINOR*10 + TINY # The version of Sequel you are using, as a string (e.g. "2.11.0") def self.version VERSION end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/sequel.gemspec������������������������������������������������������������������������0000664�0000000�0000000�00000003121�14342141206�0015740�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require File.expand_path("../lib/sequel/version", __FILE__) SEQUEL_GEMSPEC = Gem::Specification.new do |s| s.name = 'sequel' s.version = Sequel.version s.platform = Gem::Platform::RUBY s.extra_rdoc_files = ["README.rdoc", "CHANGELOG", "MIT-LICENSE"] + Dir["doc/*.rdoc"] + Dir['doc/release_notes/5.*.txt'] s.rdoc_options += ["--quiet", "--line-numbers", "--inline-source", '--title', 'Sequel: The Database Toolkit for Ruby', '--main', 'README.rdoc'] s.summary = "The Database Toolkit for Ruby" s.description = s.summary s.author = "Jeremy Evans" s.email = "code@jeremyevans.net" s.homepage = "https://sequel.jeremyevans.net" s.license = 'MIT' s.metadata = { 'bug_tracker_uri' => 'https://github.com/jeremyevans/sequel/issues', 'changelog_uri' => 'https://sequel.jeremyevans.net/rdoc/files/CHANGELOG.html', 'documentation_uri' => 'https://sequel.jeremyevans.net/documentation.html', 'mailing_list_uri' => 'https://github.com/jeremyevans/sequel/discussions', 'source_code_uri' => 'https://github.com/jeremyevans/sequel', } s.required_ruby_version = ">= 1.9.2" s.files = %w(MIT-LICENSE CHANGELOG README.rdoc bin/sequel) + Dir["doc/*.rdoc"] + Dir["doc/release_notes/5.*.txt"] + Dir["lib/**/*.rb"] s.require_path = "lib" s.bindir = 'bin' s.executables << 'sequel' s.add_development_dependency "minitest", '>=5.7.0' s.add_development_dependency "minitest-hooks" s.add_development_dependency "minitest-global_expectations" s.add_development_dependency "tzinfo" s.add_development_dependency "activemodel" s.add_development_dependency "nokogiri" end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/���������������������������������������������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0014032�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/adapter_spec.rb������������������������������������������������������������������0000664�0000000�0000000�00000000225�14342141206�0017010�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������if !ARGV.empty? && ARGV.first != 'none' require_relative "adapters/#{ARGV.first}_spec" end Dir['./spec/integration/*_test.rb'].each{|f| require f} ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/adapters/������������������������������������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0015635�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/adapters/db2_spec.rb�������������������������������������������������������������0000664�0000000�0000000�00000011424�14342141206�0017645�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������SEQUEL_ADAPTER_TEST = :db2 require_relative 'spec_helper' if DB.table_exists?(:test) DB.drop_table(:test) end describe Sequel::Database do before do @db = DB @db.create_table(:test){String :a} @ds = @db[:test] end after do @db.drop_table(:test) end it "should provide disconnect functionality after preparing a connection" do @ds.prepare(:first, :a).call @db.disconnect @db.pool.size.must_equal 0 end it "should return version correctly" do @db.db2_version.must_match(/DB2 v/i) end end describe "Simple Dataset operations" do before(:all) do DB.use_clob_as_blob = true DB.create_table!(:items) do Integer :id, :primary_key => true Integer :number column :bin_string, 'varchar(20) for bit data' column :bin_clob, 'clob' end @ds = DB[:items] end after(:each) do @ds.delete end after(:all) do DB.use_clob_as_blob = false DB.drop_table(:items) end it "should insert with a primary key specified" do @ds.insert(:id => 1, :number => 10) @ds.insert(:id => 100, :number => 20) @ds.select_hash(:id, :number).must_equal(1 => 10, 100 => 20) end it "should insert into binary columns" do @ds.insert(:id => 1, :bin_string => Sequel.blob("\1"), :bin_clob => Sequel.blob("\2")) @ds.select(:bin_string, :bin_clob).first.must_equal(:bin_string => "\1", :bin_clob => "\2") end end describe Sequel::Database do before do @db = DB end after do @db.drop_table(:items) end it "should parse primary keys from the schema properly" do @db.create_table!(:items){Integer :number} @db.schema(:items).collect{|k,v| k if v[:primary_key]}.compact.must_equal [] @db.create_table!(:items){primary_key :number} @db.schema(:items).collect{|k,v| k if v[:primary_key]}.compact.must_equal [:number] @db.create_table!(:items){Integer :number1, :null => false; Integer :number2, :null => false; primary_key [:number1, :number2]} @db.schema(:items).collect{|k,v| k if v[:primary_key]}.compact.must_equal [:number1, :number2] end it "should not error on alter_table operations that need REORG" do @db.create_table!(:items) do varchar :a end @db.alter_table(:items) do add_column :b, :varchar, :null => true set_column_allow_null :a, false add_index :a, :unique => true end end end describe "Sequel::IBMDB::Database#convert_smallint_to_bool" do before do @db = DB @db.create_table!(:booltest){column :b, 'smallint'; column :i, 'integer'} @ds = @db[:booltest] end after do @db.convert_smallint_to_bool = true @db.drop_table(:booltest) end it "should consider smallint datatypes as boolean if set, but not larger smallints" do @db.schema(:booltest, :reload=>true).first.last[:type].must_equal :boolean @db.schema(:booltest, :reload=>true).first.last[:db_type].must_match(/smallint/i) @db.convert_smallint_to_bool = false @db.schema(:booltest, :reload=>true).first.last[:type].must_equal :integer @db.schema(:booltest, :reload=>true).first.last[:db_type].must_match(/smallint/i) end it "should return smallints as bools and integers as integers when set" do @db.convert_smallint_to_bool = true @ds.delete @ds.insert(:b=>true, :i=>10) @ds.all.must_equal [{:b=>true, :i=>10}] @ds.delete @ds.insert(:b=>false, :i=>0) @ds.all.must_equal [{:b=>false, :i=>0}] @ds.delete @ds.insert(:b=>true, :i=>1) @ds.all.must_equal [{:b=>true, :i=>1}] @ds = @ds.with_convert_smallint_to_bool(false) @ds.delete @ds.insert(:b=>true, :i=>10) @ds.all.must_equal [{:b=>1, :i=>10}] end it "should return all smallints as integers when unset" do @db.convert_smallint_to_bool = false @ds.delete @ds.insert(:b=>true, :i=>10) @ds.all.must_equal [{:b=>1, :i=>10}] @ds.delete @ds.insert(:b=>false, :i=>0) @ds.all.must_equal [{:b=>0, :i=>0}] @ds.delete @ds.insert(:b=>1, :i=>10) @ds.all.must_equal [{:b=>1, :i=>10}] @ds.delete @ds.insert(:b=>0, :i=>0) @ds.all.must_equal [{:b=>0, :i=>0}] @ds = @ds.with_convert_smallint_to_bool(true) @ds.delete @ds.insert(:b=>true, :i=>10) @ds.all.must_equal [{:b=>true, :i=>10}] end end if DB.adapter_scheme == :ibmdb describe "Simple Dataset operations in transactions" do before do DB.create_table!(:items_insert_in_transaction) do Integer :id, :primary_key => true integer :number end @ds = DB[:items_insert_in_transaction] end after do DB.drop_table(:items_insert_in_transaction) end it "should insert correctly with a primary key specified inside a transaction" do DB.transaction do @ds.insert(:id=>100, :number=>20) @ds.count.must_equal 1 @ds.order(:id).all.must_equal [{:id=>100, :number=>20}] end end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/adapters/mssql_spec.rb�����������������������������������������������������������0000664�0000000�0000000�00000077474�14342141206�0020356�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������SEQUEL_ADAPTER_TEST = :mssql require_relative 'spec_helper' describe "A MSSQL database" do before do @db = DB end it "should be able to read fractional part of timestamp" do rs = @db["select getutcdate() as full_date, cast(round(datepart(millisecond, getutcdate()), 0) as int) as milliseconds"].first rs[:milliseconds].must_be_close_to(rs[:full_date].usec/1000, 2) end it "should be able to write fractional part of timestamp" do t = Time.utc(2001, 12, 31, 23, 59, 59, 996000) (t.usec/1000).must_equal @db["select cast(round(datepart(millisecond, ?), 0) as int) as milliseconds", t].get end it "should not raise an error when getting the server version" do @db.server_version @db.dataset.server_version end it "should support LIKE without COLLATE" do begin like_without_collate = @db.like_without_collate @db.like_without_collate = true @db.get(Sequel.case({Sequel.like('a', 'a')=>1}, 2).as(:v)).must_equal 1 ensure @db.like_without_collate = like_without_collate end end end describe "A MSSQL database" do before do @db = DB @db.create_table! :test3 do Integer :value TrueClass :b end end after do @db.drop_table?(:test3) end it "should work with NOLOCK" do @db.transaction{@db[:test3].nolock.all.must_equal []} end it "should emulate boolean columns" do ds = @db[:test3] ds.insert(:value=>1, :b=>true) ds.insert(:value=>2, :b=>false) ds.insert(:value=>3, :b=>nil) ds.order(:value).select_map(:b).must_equal [true, false, nil] end end describe "A MSSQL database" do before do @db = DB end after do @db.drop_table?(:test3) end cspecify "should not modify column type when adding primary key", [:odbc] do @db.create_table(:test3) do column :row_id, "int", null: false, auto_increment: true column :deviceid, "binary", size: 7, null: false index [:row_id], type: :clustered, unique: true end @db.alter_table(:test3) do add_primary_key [:deviceid] end @db[:test3].insert(:deviceid=>Sequel.blob('abcdefg')) @db[:test3].get(:deviceid).must_equal 'abcdefg' end it "should allow creating clustered and non-clustered primary keys" do [true, false].each do |clustered| @db.create_table!(:test3) do primary_key :row_id, :clustered=>clustered String :name end end end it "should allow creating clustered and non-clustered unique constraints" do [true, false].each do |clustered| @db.create_table!(:test3) do Integer :row_id String :name unique [:name], :clustered=>clustered end end end it "should allow adding clustered and non-clustered primary keys" do [true, false].each do |clustered| @db.create_table!(:test3) do Integer :row_id, :null=>false String :name end @db.alter_table(:test3) do add_primary_key [:row_id], :clustered=>clustered end end end it "should allow adding clustered and non-clustered unique constraints" do [true, false].each do |clustered| @db.create_table!(:test3) do Integer :row_id, :null=>false String :name end @db.alter_table(:test3) do add_unique_constraint [:row_id], :clustered=>clustered end end end end describe "MSSQL decimal locale handling" do before do @locale = WIN32OLE.locale @decimal = BigDecimal('1234.56') end after do WIN32OLE.locale = @locale end it "should work with current locale" do DB.get(Sequel.cast(@decimal, 'decimal(16,4)').as(:v)).must_equal @decimal end it "should work with 1031 locale" do WIN32OLE.locale = 1031 DB.get(Sequel.cast(@decimal, 'decimal(16,4)').as(:v)).must_equal @decimal end it "should work with 1033 locale" do WIN32OLE.locale = 1033 DB.get(Sequel.cast(@decimal, 'decimal(16,4)').as(:v)).must_equal @decimal end end if DB.adapter_scheme == :ado describe "MSSQL" do before(:all) do @db = DB @db.create_table!(:test3){Integer :v3} @db.create_table!(:test4){Integer :v4} @db[:test3].import([:v3], [[1], [2]]) @db[:test4].import([:v4], [[1], [3]]) end after(:all) do @db.drop_table?(:test3, :test4) end it "should should support CROSS APPLY" do @db[:test3].cross_apply(@db[:test4].where(Sequel[:test3][:v3]=>Sequel[:test4][:v4])).select_order_map([:v3, :v4]).must_equal [[1,1]] end it "should should support OUTER APPLY" do @db[:test3].outer_apply(@db[:test4].where(Sequel[:test3][:v3]=>Sequel[:test4][:v4])).select_order_map([:v3, :v4]).must_equal [[1,1], [2, nil]] end cspecify "should handle time values with fractional seconds", [:ado] do # ado: Returns nil values t = Sequel::SQLTime.create(10, 20, 30, 999900) v = @db.get(Sequel.cast(t, 'time')) v = Sequel.string_to_time(v) if v.is_a?(String) pr = lambda{|x| [:hour, :min, :sec, :usec].map{|m| x.send(m)}} pr[v].must_equal(pr[t]) end cspecify "should get datetimeoffset values as Time with fractional seconds", [:odbc], [:ado], [:tinytds, proc{|db| TinyTds::VERSION < '0.9'}] do # odbc: Returns string rounded to nearest second # ado: Returns nil values # tiny_tds < 0.9: Returns wrong value for hour t = Time.local(2010, 11, 12, 10, 20, 30, 999000) v = @db.get(Sequel.cast(t, 'datetimeoffset')) v = Sequel.string_to_datetime(v) if v.is_a?(String) pr = lambda{|x| [:year, :month, :day, :hour, :min, :sec, :usec].map{|m| x.send(m)}} pr[v].must_equal(pr[t]) end end # This spec is currently disabled as the SQL Server 2008 R2 Express doesn't support # full text searching. Even if full text searching is supported, # you may need to create a full text catalog on the database first via: # CREATE FULLTEXT CATALOG ftscd AS DEFAULT describe "MSSQL full_text_search" do before do @db = DB @db.drop_table?(:posts) end after do @db.drop_table?(:posts) end it "should support fulltext indexes and full_text_search" do log do @db.create_table(:posts){Integer :id, :null=>false; String :title; String :body; index :id, :name=>:fts_id_idx, :unique=>true; full_text_index :title, :key_index=>:fts_id_idx; full_text_index [:title, :body], :key_index=>:fts_id_idx} @db[:posts].insert(:title=>'ruby rails', :body=>'y') @db[:posts].insert(:title=>'sequel', :body=>'ruby') @db[:posts].insert(:title=>'ruby scooby', :body=>'x') @db[:posts].full_text_search(:title, 'rails').all.must_equal [{:title=>'ruby rails', :body=>'y'}] @db[:posts].full_text_search([:title, :body], ['sequel', 'ruby']).all.must_equal [{:title=>'sequel', :body=>'ruby'}] @db[:posts].full_text_search(:title, :$n).call(:select, :n=>'rails').must_equal [{:title=>'ruby rails', :body=>'y'}] @db[:posts].full_text_search(:title, :$n).prepare(:select, :fts_select).call(:n=>'rails').must_equal [{:title=>'ruby rails', :body=>'y'}] end end end if false describe "MSSQL Dataset#output" do before(:all) do @db = DB @db.create_table!(:items){String :name; Integer :value} @db.create_table!(:out){String :name; Integer :value} @ds = @db[:items] end after do @ds.delete @db[:out].delete end after(:all) do @db.drop_table?(:items, :out) end it "should handle OUTPUT clauses without INTO for DELETE statements" do @ds.insert(:name=>'a', :value=>1) @ds.output(nil, [Sequel[:deleted][:name], Sequel[:deleted][:value]]).with_sql(:delete_sql).all.must_equal [{:name=>"a", :value=>1}] @ds.insert(:name=>'a', :value=>1) @ds.output(nil, [Sequel[:deleted][:name]]).with_sql(:delete_sql).all.must_equal [{:name=>"a"}] @ds.insert(:name=>'a', :value=>1) @ds.output(nil, [Sequel::SQL::ColumnAll.new(:deleted)]).with_sql(:delete_sql).all.must_equal [{:name=>"a", :value=>1}] end it "should handle OUTPUT clauses with INTO for DELETE statements" do @ds.insert(:name=>'a', :value=>1) @ds.output(:out, {:name => Sequel[:deleted][:name], :value => Sequel[:deleted][:value]}).delete @db[:out].all.must_equal [{:name=>"a", :value=>1}] end it "should handle OUTPUT clauses without INTO for INSERT statements" do @ds.output(nil, [Sequel[:inserted][:name], Sequel[:inserted][:value]]).with_sql(:insert_sql, :name => "name", :value => 1).all.must_equal [{:name=>"name", :value=>1}] @ds.all.must_equal [{:name=>"name", :value=>1}] end it "should handle OUTPUT clauses with INTO for INSERT statements" do @ds.output(:out, {:name => Sequel[:inserted][:name], :value => Sequel[:inserted][:value]}).insert(:name => "name", :value => 1) @db[:out].all.must_equal [{:name=>"name", :value=>1}] end it "should handle OUTPUT clauses without INTO for UPDATE statements" do @ds.insert(:name=>'a', :value=>1) @ds.output(nil, [Sequel[:inserted][:name], Sequel[:deleted][:value]]).with_sql(:update_sql, :value => 2).all.must_equal [{:name=>"a", :value=>1}] @ds.all.must_equal [{:name=>"a", :value=>2}] @ds.output(nil, [Sequel[:inserted][:name]]).with_sql(:update_sql, :value => 3).all.must_equal [{:name=>"a"}] @ds.all.must_equal [{:name=>"a", :value=>3}] @ds.output(nil, [Sequel::SQL::ColumnAll.new(:inserted)]).with_sql(:update_sql, :value => 4).all.must_equal [{:name=>"a", :value=>4}] end it "should handle OUTPUT clauses with INTO for UPDATE statements" do @ds.insert(:name=>'a', :value=>1) @ds.output(:out, {:name => Sequel[:inserted][:name], :value => Sequel[:deleted][:value]}).update(:value => 2) @db[:out].all.must_equal [{:name=>"a", :value=>1}] end it "should execute OUTPUT clauses in DELETE statements" do @ds.insert(:name => "name", :value => 1) @ds.output(:out, [Sequel[:deleted][:name], Sequel[:deleted][:value]]).delete @db[:out].all.must_equal [{:name => "name", :value => 1}] @ds.insert(:name => "name", :value => 2) @ds.output(:out, {:name => Sequel[:deleted][:name], :value => Sequel[:deleted][:value]}).delete @db[:out].order(:value).all.must_equal [{:name => "name", :value => 1}, {:name => "name", :value => 2}] end it "should execute OUTPUT clauses in INSERT statements" do @ds.output(:out, [Sequel[:inserted][:name], Sequel[:inserted][:value]]).insert(:name => "name", :value => 1) @db[:out].all.must_equal [{:name => "name", :value => 1}] @ds.output(:out, {:name => Sequel[:inserted][:name], :value => Sequel[:inserted][:value]}).insert(:name => "name", :value => 2) @db[:out].order(:value).all.must_equal [{:name => "name", :value => 1}, {:name => "name", :value => 2}] end it "should execute OUTPUT clauses in UPDATE statements" do @ds.insert(:name => "name", :value => 1) @ds.output(:out, [Sequel[:inserted][:name], Sequel[:deleted][:value]]).update(:value => 2) @db[:out].all.must_equal [{:name => "name", :value => 1}] @ds.output(:out, {:name => Sequel[:inserted][:name], :value => Sequel[:deleted][:value]}).update(:value => 3) @db[:out].order(:value).all.must_equal [{:name => "name", :value => 1}, {:name => "name", :value => 2}] end end describe "MSSQL dataset using #with and #with_recursive" do before(:all) do @db = DB @ds = DB[:x] @ds1 = @ds.with(:t, @db[:x]) @ds2 = @ds.with_recursive(:t, @db[:x], @db[:t].where(false)) @db.create_table!(:x){Integer :v; Integer :y} end before do @db[:x].insert(:v=>1, :y=>2) end after do @db[:x].delete end after(:all) do @db.drop_table?(:x) end it "should handle CTEs in UPDATE queries" do @ds1.update(:v => @db[:t].select(:y)) @ds.all.must_equal [{:v=>2, :y=>2}] @ds2.update(:v => Sequel.+(@db[:t].select(:y), 1)) @ds.all.must_equal [{:v=>3, :y=>2}] end it "should handle CTEs in DELETE queries" do @ds1.where(@db[:t].select(:y)=>1).delete @ds.all.must_equal [{:v=>1, :y=>2}] @ds1.where(@db[:t].select(:y)=>2).delete @ds.all.must_equal [] @db[:x].insert(:v=>1, :y=>2) @ds2.where(@db[:t].select(:y)=>1).delete @ds.all.must_equal [{:v=>1, :y=>2}] @ds2.where(@db[:t].select(:y)=>2).delete @ds.all.must_equal [] end it "should handle CTEs in INSERT queries" do @ds1.insert(:v => @db[:t].select(:y), :y => @db[:t].select(:v)) @ds.select_order_map([:v, :y]).must_equal [[1, 2], [2, 1]] @ds1.insert(:v => Sequel.+(@db[:t].where(:v=>1).select(:y), 2), :y => Sequel.+(@db[:t].where(:y=>1).select(:v), 3)) @ds.select_order_map([:v, :y]).must_equal [[1, 2], [2, 1], [4, 5]] end it "should handle WITH clause on joined dataset" do @ds.cross_join(@ds1.select(Sequel[:v].as(:v1), Sequel[:y].as(:y1))).all.must_equal [{:v=>1, :y=>2, :v1=>1, :y1=>2}] @ds.cross_join(@ds2.select(Sequel[:v].as(:v1), Sequel[:y].as(:y1))).all.must_equal [{:v=>1, :y=>2, :v1=>1, :y1=>2}] end end describe "MSSQL::Dataset#import" do before do @db = DB @ds = @db[:test] end after do @db.drop_table?(:test) end it "#import should work correctly with an arbitrary output value" do @db.create_table!(:test){primary_key :x; Integer :y} @ds.output(nil, [Sequel[:inserted][:y], Sequel[:inserted][:x]]).import([:y], [[3], [4]]).must_equal [{:y=>3, :x=>1}, {:y=>4, :x=>2}] @ds.all.must_equal [{:x=>1, :y=>3}, {:x=>2, :y=>4}] end it "should handle WITH statements" do @db.create_table!(:test){Integer :x; Integer :y} @db[:testx].with(:testx, @db[:test]).import([:x, :y], [[1, 2], [3, 4], [5, 6]], :slice => 2) @ds.select_order_map([:x, :y]).must_equal [[1, 2], [3, 4], [5, 6]] end end describe "MSSQL joined datasets" do before do @db = DB @db.create_table!(:a){Integer :v} @db[:a].insert(:v=>1) end after do @db.drop_table?(:a) end it "should handle DELETE statements" do @db[:a].inner_join(Sequel[:a].as(:b), :v=>:v).delete.must_equal 1 @db[:a].empty?.must_equal true end it "should handle UPDATE statements" do @db[:a].inner_join(Sequel[:a].as(:b), :v=>:v).update(:v=>2).must_equal 1 @db[:a].all.must_equal [{:v=>2}] end end describe "Offset support" do before do @db = DB @db.create_table!(:i){Integer :id; Integer :parent_id} @ds = @db[:i].order(:id) @hs = [] @ds = @ds.with_row_proc(proc{|r| @hs << r.dup; r[:id] *= 2; r[:parent_id] *= 3; r}) @ds.import [:id, :parent_id], [[1,nil],[2,nil],[3,1],[4,1],[5,3],[6,5]] end after do @db.drop_table?(:i) end it "should return correct rows" do @ds.limit(2, 2).all.must_equal [{:id=>6, :parent_id=>3}, {:id=>8, :parent_id=>3}] end it "should not include offset column in hashes passed to row_proc" do @ds.limit(2, 2).all @hs.must_equal [{:id=>3, :parent_id=>1}, {:id=>4, :parent_id=>1}] end it "should support returning limited results with ties" do @ds.delete @ds.insert(:id => 1) @ds.insert(:id => 1) @ds.insert(:id => 2) @ds.insert(:id => 2) @ds.order(:id).select(:id).limit(1).select_order_map(:id).must_equal [1] @ds.order(:id).select(:id).limit(1).with_ties.select_order_map(:id).must_equal [1, 1] @ds.order(:id).select(:id).limit(2, 1).select_order_map(:id).must_equal [1, 2] proc{@ds.order(:id).select(:id).limit(2, 1).with_ties.select_order_map(:id)}.must_raise Sequel::Error end end describe "Update/Delete on limited datasets" do before do @db = DB @db.create_table!(:i){Integer :id} @ds = @db[:i] @ds.import [:id], [[1], [2]] end after do @db.drop_table?(:i) end it "should handle deletes and updates on limited datasets" do @ds.limit(1).update(:id=>Sequel[:id]+10) [[2, 11], [1, 12]].must_include @ds.select_order_map(:id) @ds.limit(1).delete [[1], [2]].must_include @ds.select_order_map(:id) end it "should raise error for updates on ordered, limited datasets" do end it "should raise error for updates and deletes on datasets with offsets or limits with orders" do proc{@ds.offset(1).delete}.must_raise Sequel::InvalidOperation proc{@ds.offset(1).update(:id=>Sequel[:id]+10)}.must_raise Sequel::InvalidOperation proc{@ds.limit(1, 1).delete}.must_raise Sequel::InvalidOperation proc{@ds.limit(1, 1).update(:id=>Sequel[:id]+10)}.must_raise Sequel::InvalidOperation proc{@ds.order(:id).limit(1).update(:id=>Sequel[:id]+10)}.must_raise Sequel::InvalidOperation proc{@ds.order(:id).limit(1).delete}.must_raise Sequel::InvalidOperation end end if DB.dataset.send(:is_2012_or_later?) describe "Common Table Expressions" do before do @db = DB @db.create_table!(:i1){Integer :id; Integer :parent_id} @db.create_table!(:i2){Integer :id; Integer :parent_id} @ds = @db[:i1] @ds2 = @db[:i2] @ds.import [:id, :parent_id], [[1,nil],[2,nil],[3,1],[4,1],[5,3],[6,5]] end after do @db.drop_table?(:i1, :i2) end it "using #with should be able to update" do @ds.insert(:id=>1) @ds2.insert(:id=>2, :parent_id=>1) @ds2.insert(:id=>3, :parent_id=>2) @ds.with(:t, @ds2).filter(:id => @db[:t].select(:id)).update(:parent_id => @db[:t].filter(:id => Sequel[:i1][:id]).select(:parent_id).limit(1)) @ds[:id => 1].must_equal(:id => 1, :parent_id => nil) @ds[:id => 2].must_equal(:id => 2, :parent_id => 1) @ds[:id => 3].must_equal(:id => 3, :parent_id => 2) @ds[:id => 4].must_equal(:id => 4, :parent_id => 1) end it "using #with_recursive should be able to update" do ds = @ds.with_recursive(:t, @ds.filter(:parent_id=>1).or(:id => 1), @ds.join(:t, :i=>:parent_id).select(Sequel[:i1][:id], Sequel[:i1][:parent_id]), :args=>[:i, :pi]) ds.exclude(:id => @db[:t].select(:i)).update(:parent_id => 1) @ds[:id => 1].must_equal(:id => 1, :parent_id => nil) @ds[:id => 2].must_equal(:id => 2, :parent_id => 1) @ds[:id => 5].must_equal(:id => 5, :parent_id => 3) end it "using #with should be able to insert" do @ds2.insert(:id=>7) @ds.with(:t, @ds2).insert(@db[:t]) @ds[:id => 7].must_equal(:id => 7, :parent_id => nil) end it "using #with_recursive should be able to insert" do ds = @ds2.with_recursive(:t, @ds.filter(:parent_id=>1), @ds.join(:t, :i=>:parent_id).select(Sequel[:i1][:id], Sequel[:i1][:parent_id]), :args=>[:i, :pi]) ds.insert @db[:t] @ds2.all.must_equal [{:id => 3, :parent_id => 1}, {:id => 4, :parent_id => 1}, {:id => 5, :parent_id => 3}, {:id => 6, :parent_id => 5}] end it "using #with should be able to delete" do @ds2.insert(:id=>6) @ds2.insert(:id=>5) @ds2.insert(:id=>4) @ds.with(:t, @ds2).filter(:id => @db[:t].select(:id)).delete @ds.all.must_equal [{:id => 1, :parent_id => nil}, {:id => 2, :parent_id => nil}, {:id => 3, :parent_id => 1}] end it "using #with_recursive should be able to delete" do @ds.insert(:id=>7, :parent_id=>2) ds = @ds.with_recursive(:t, @ds.filter(:parent_id=>1), @ds.join(:t, :i=>:parent_id).select(Sequel[:i1][:id], Sequel[:i1][:parent_id]), :args=>[:i, :pi]) ds.filter(Sequel[:i1][:id] => @db[:t].select(:i)).delete @ds.all.must_equal [{:id => 1, :parent_id => nil}, {:id => 2, :parent_id => nil}, {:id => 7, :parent_id => 2}] end it "using #with should be able to import" do @ds2.insert(:id=>7) @ds.with(:t, @ds2).import [:id, :parent_id], @db[:t].select(:id, :parent_id) @ds[:id => 7].must_equal(:id => 7, :parent_id => nil) end it "using #with_recursive should be able to import" do ds = @ds2.with_recursive(:t, @ds.filter(:parent_id=>1), @ds.join(:t, :i=>:parent_id).select(Sequel[:i1][:id], Sequel[:i1][:parent_id]), :args=>[:i, :pi]) ds.import [:id, :parent_id], @db[:t].select(:i, :pi) @ds2.all.must_equal [{:id => 3, :parent_id => 1}, {:id => 4, :parent_id => 1}, {:id => 5, :parent_id => 3}, {:id => 6, :parent_id => 5}] end end describe "MSSSQL::Dataset#insert" do before do @db = DB @db.create_table!(:test5){primary_key :xid; Integer :value} @db.create_table! :test4 do String :name, :size => 20 column :value, 'varbinary(max)' end @ds = @db[:test5] end after do @db.drop_table?(:test5, :test4) end it "should have insert_select return nil if disable_insert_output is used" do @ds.disable_insert_output.insert_select(:value=>10).must_be_nil end it "should have insert_select return nil if the server version is not 2005+" do @ds = @ds.with_extend do def server_version() 8000760 end end @ds.insert_select(:value=>10).must_be_nil end it "should have insert_select insert the record and return the inserted record" do h = @ds.insert_select(:value=>10) h[:value].must_equal 10 @ds.first(:xid=>h[:xid])[:value].must_equal 10 end cspecify "should allow large text and binary values", [:odbc] do blob = Sequel::SQL::Blob.new("0" * (65*1024)) @db[:test4].insert(:name => 'max varbinary test', :value => blob) b = @db[:test4].where(:name => 'max varbinary test').get(:value) b.length.must_equal blob.length b.must_equal blob end it "should play nicely with simple_select_all?" do DB[:test4].disable_insert_output.send(:simple_select_all?).must_equal true end end describe "MSSSQL::Dataset#into" do before do @db = DB @db.drop_table?(:t, :new) end after do @db.drop_table?(:t, :new) end it "should select rows into a new table" do @db.create_table!(:t) {Integer :id; String :value} @db[:t].insert(:id => 1, :value => "test") @db[:t].into(:new).with_sql(:select_sql).insert @db[:new].all.must_equal [{:id => 1, :value => "test"}] end end describe "A MSSQL database" do before do @db = DB end after do @db.drop_table?(:a) end it "should handle many existing types for set_column_allow_null" do @db.create_table!(:a){column :a, 'integer'} @db.alter_table(:a){set_column_allow_null :a, false} @db.create_table!(:a){column :a, 'decimal(24, 2)'} @db.alter_table(:a){set_column_allow_null :a, false} @db.schema(:a).first.last[:column_size].must_equal 24 @db.schema(:a).first.last[:scale].must_equal 2 @db.create_table!(:a){column :a, 'decimal(10)'} @db.schema(:a).first.last[:column_size].must_equal 10 @db.schema(:a).first.last[:scale].must_equal 0 @db.alter_table(:a){set_column_allow_null :a, false} @db.create_table!(:a){column :a, 'nchar(2)'} @db.alter_table(:a){set_column_allow_null :a, false} s = @db.schema(:a).first.last (s[:max_chars] || s[:column_size]).must_equal 2 end end describe "MSSQL::Database#rename_table" do it "should work on non-schema bound tables which need escaping" do DB.create_table! :'foo bar' do text :name end DB.rename_table 'foo bar', 'foo' DB.drop_table :foo end it "should work on schema bound tables within the same schema" do DB.execute(<<-SQL) IF NOT EXISTS (SELECT * FROM sys.schemas WHERE name = 'MY') EXECUTE sp_executesql N'create schema MY' SQL DB.create_table! Sequel[:MY][:foo] do text :name end DB.rename_table Sequel[:MY][:foo], Sequel[:MY][:bar] DB.rename_table Sequel[:MY][:bar], :foo DB.drop_table Sequel[:MY][:foo] end end describe "MSSQL::Dataset#count" do it "should work with a distinct query with an order clause" do DB.create_table!(:items){String :name; Integer :value} DB[:items].insert(:name => "name", :value => 1) DB[:items].insert(:name => "name", :value => 1) DB[:items].select(:name, :value).distinct.order(:name).count.must_equal 1 DB[:items].select(:name, :value).group(:name, :value).order(:name).count.must_equal 1 end end describe "MSSQL::Database#create_table" do it "should support collate with various other column options" do DB.create_table!(:items){ String :name, :size => 128, :collate => :sql_latin1_general_cp1_ci_as, :default => 'foo', :null => false, :unique => true} DB[:items].insert DB[:items].select_map(:name).must_equal ["foo"] end end describe "MSSQL::Database#mssql_unicode_strings = false" do before do DB.mssql_unicode_strings = false end after do DB.drop_table?(:items) DB.mssql_unicode_strings = true end it "should work correctly" do DB.create_table!(:items){String :name} DB[:items].mssql_unicode_strings.must_equal false DB[:items].insert(:name=>'foo') DB[:items].select_map(:name).must_equal ['foo'] end it "should be overridable at the dataset level" do DB.create_table!(:items){String :name} ds = DB[:items] ds.mssql_unicode_strings.must_equal false ds1 = ds.with_mssql_unicode_strings(true) ds.mssql_unicode_strings.must_equal false ds1.mssql_unicode_strings.must_equal true ds1.insert(:name=>'foo') ds1.select_map(:name).must_equal ['foo'] end end describe "A MSSQL database adds index with include" do before :all do @table_name = :test_index_include @db = DB @db.create_table! @table_name do integer :col1 integer :col2 integer :col3 end end after :all do @db.drop_table? @table_name end it "should be able add index with include" do @db.alter_table @table_name do add_index [:col1], :include => [:col2,:col3] end @db.indexes(@table_name).keys.must_include(:"#{@table_name}_col1_index") end end describe "MSSQL set_column_allow_null" do before do @db = DB end after do @db.drop_table?(:test3) end it "should work with nvarchar(MAX) columns" do @db.create_table!(:test3) do column :t, 'nvarchar(MAX)' end @db.alter_table(:test3) do set_column_not_null :t end end it "should work with text columns" do @db.create_table!(:test3) do column :t, 'text' end @db.alter_table(:test3) do set_column_not_null :t end end end describe "MSSQL::Database#drop_column with a schema" do before do DB.run "create schema test" rescue nil end after do DB.drop_table(Sequel[:test][:items]) DB.run "drop schema test" rescue nil end it "drops columns with a default value" do DB.create_table!(Sequel[:test][:items]){ Integer :id; String :name, :default => 'widget' } DB.drop_column(Sequel[:test][:items], :name) DB[Sequel[:test][:items]].columns.must_equal [:id] end end describe "Database#foreign_key_list" do before(:all) do DB.create_table! :items do primary_key :id Integer :sku end DB.create_table! :prices do Integer :item_id datetime :valid_from float :price primary_key [:item_id, :valid_from] foreign_key [:item_id], :items, :key => :id, :name => :fk_prices_items end DB.create_table! :sales do Integer :id Integer :price_item_id datetime :price_valid_from foreign_key [:price_item_id, :price_valid_from], :prices, :key => [:item_id, :valid_from], :name => :fk_sales_prices, :on_delete => :cascade end end after(:all) do DB.drop_table :sales DB.drop_table :prices DB.drop_table :items end it "should support typical foreign keys" do DB.foreign_key_list(:prices).must_equal [{:name => :fk_prices_items, :table => :items, :columns => [:item_id], :key => [:id], :on_update => :no_action, :on_delete => :no_action }] end it "should support a foreign key with multiple columns" do DB.foreign_key_list(:sales).must_equal [{:name => :fk_sales_prices, :table => :prices, :columns => [:price_item_id, :price_valid_from], :key => [:item_id, :valid_from], :on_update => :no_action, :on_delete => :cascade }] end describe "with multiple schemas" do before(:all) do DB.execute_ddl "create schema vendor" DB.create_table! Sequel[:vendor][:vendors] do primary_key :id varchar :name end DB.create_table! Sequel[:vendor][:mapping] do Integer :vendor_id Integer :item_id foreign_key [:vendor_id], Sequel[:vendor][:vendors], :name => :fk_mapping_vendor foreign_key [:item_id], :items, :name => :fk_mapping_item end end after(:all) do DB.drop_table? Sequel[:vendor][:mapping] DB.drop_table? Sequel[:vendor][:vendors] DB.execute_ddl "drop schema vendor" end it "should support mixed schema bound tables" do DB.foreign_key_list(Sequel[:vendor][:mapping]).sort_by{|h| h[:name].to_s}.must_equal [{:name => :fk_mapping_item, :table => :items, :columns => [:item_id], :key => [:id], :on_update => :no_action, :on_delete => :no_action }, {:name => :fk_mapping_vendor, :table => Sequel.qualify(:vendor, :vendors), :columns => [:vendor_id], :key => [:id], :on_update => :no_action, :on_delete => :no_action }] end end end describe "MSSQL optimistic locking plugin" do before do @db = DB @db.create_table! :items do primary_key :id String :name, :size => 20 column :timestamp, 'timestamp' end end after do @db.drop_table?(:items) end it "should not allow stale updates" do c = Class.new(Sequel::Model(:items)) c.plugin :mssql_optimistic_locking o = c.create(:name=>'test') o2 = c.first ts = o.timestamp ts.wont_equal nil o.name = 'test2' o.save o.timestamp.wont_equal ts proc{o2.save}.must_raise(Sequel::NoExistingObject) end end unless DB.adapter_scheme == :odbc describe "MSSQL Stored Procedure support" do before do @db = DB @now = DateTime.now.to_s @db.execute('CREATE PROCEDURE dbo.SequelTest (@Input varchar(25), @IntegerInput int, @Output varchar(25) OUTPUT, @IntegerOutput int OUTPUT) AS BEGIN SET @Output = @Input SET @IntegerOutput = @IntegerInput RETURN @IntegerInput END') end after do @db.execute('DROP PROCEDURE dbo.SequelTest') end describe "with unnamed parameters" do it "should return a hash of output variables" do r = @db.call_mssql_sproc(:SequelTest, {:args => [@now, 1, :output, :output]}) r.must_be_kind_of(Hash) r.values_at(:var2, :var3).must_equal [@now, '1'] end it "should support typed output variables" do @db.call_mssql_sproc(:SequelTest, {:args => [@now, 1, :output, [:output, 'int']]})[:var3].must_equal 1 end it "should support named output variables" do @db.call_mssql_sproc(:SequelTest, {:args => [@now, 1, [:output, nil, 'output'], :output]})[:output].must_equal @now end it "should return the number of Affected Rows" do @db.call_mssql_sproc(:SequelTest, {:args => [@now, 1, :output, :output]})[:numrows].must_equal 1 end it "should return the Result Code" do @db.call_mssql_sproc(:SequelTest, {:args => [@now, 1, :output, :output]})[:result].must_equal 1 end end describe "with named parameters" do it "should return a hash of output variables" do r = @db.call_mssql_sproc(:SequelTest, :args => { 'Input' => @now, 'IntegerInput' => 1, 'Output' => [:output, nil, 'output'], 'IntegerOutput' => [:output, nil, 'integer_output'] }) r.must_be_kind_of(Hash) r.values_at(:output, :integer_output).must_equal [@now, '1'] end it "should support typed output variables" do @db.call_mssql_sproc(:SequelTest, :args => { 'Input' => @now, 'IntegerInput' => 1, 'Output' => [:output, nil, 'output'], 'IntegerOutput' => [:output, 'int', 'integer_output'] })[:integer_output].must_equal 1 end it "should return the number of Affected Rows" do @db.call_mssql_sproc(:SequelTest, :args => { 'Input' => @now, 'IntegerInput' => 1, 'Output' => [:output, nil, 'output'], 'IntegerOutput' => [:output, nil, 'integer_output'] })[:numrows].must_equal 1 end it "should return the Result Code" do @db.call_mssql_sproc(:SequelTest, :args => { 'Input' => @now, 'IntegerInput' => 1, 'Output' => [:output, nil, 'output'], 'IntegerOutput' => [:output, nil, 'integer_output'] })[:result].must_equal 1 end end end unless DB.adapter_scheme == :odbc ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/adapters/mysql_spec.rb�����������������������������������������������������������0000664�0000000�0000000�00000121016�14342141206�0020342�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������SEQUEL_ADAPTER_TEST = :mysql require_relative 'spec_helper' describe "MySQL", '#create_table' do before do @db = DB @db.test_connection end after do @db.drop_table?(:dolls) end it "should create a temporary table" do @db.disconnect @db.create_table(:tmp_dolls, :temp => true, :engine => 'MyISAM', :charset => 'latin2'){text :name} @db.table_exists?(:tmp_dolls).must_equal true @db.disconnect @db.table_exists?(:tmp_dolls).must_equal false end it "should not use a default for a String :text=>true type" do @db.create_table(:dolls){String :name, :text=>true, :default=>'blah'} @db[:dolls].insert @db[:dolls].all.must_equal [{:name=>nil}] end it "should not use a default for a File type" do @db.create_table(:dolls){File :name, :default=>'blah'} @db[:dolls].insert @db[:dolls].all.must_equal [{:name=>nil}] end it "should respect the size option for File type" do @db.create_table(:dolls) do File :n1 File :n2, :size=>:tiny File :n3, :size=>:medium File :n4, :size=>:long File :n5, :size=>255 end @db.schema(:dolls).map{|k, v| v[:db_type]}.must_equal %w"blob tinyblob mediumblob longblob blob" end it "should include an :auto_increment schema attribute if auto incrementing" do @db.create_table(:dolls) do primary_key :n4 Integer :n2 String :n3 end @db.schema(:dolls).map{|k, v| v[:auto_increment]}.must_equal [true, nil, nil] end it "should support :on_update_current_timestamp column option" do t = Time.now-60 @db.create_table(:dolls) do Integer :id timestamp :ts, :default=>t, :on_update_current_timestamp=>true datetime :dt, :default=>t, :on_update_current_timestamp=>true end @db[:dolls].insert(:id=>1) ts1, dt1 = @db[:dolls].get([:ts, :dt]) @db[:dolls].update(:id=>2) ts2, dt2 = @db[:dolls].get([:ts, :dt]) ts1.wont_equal ts2 dt1.wont_equal dt2 end it "should support collate with various other column options" do @db.create_table!(:dolls){ String :name, :size=>128, :collate=>:utf8_bin, :default=>'foo', :null=>false, :unique=>true} @db[:dolls].insert @db[:dolls].select_map(:name).must_equal ["foo"] end it "should be able to parse the default value for set and enum types" do @db.create_table!(:dolls){column :t, "set('a', 'b', 'c', 'd')", :default=>'a,b'} @db.schema(:dolls).first.last[:ruby_default].must_equal 'a,b' @db.create_table!(:dolls){column :t, "enum('a', 'b', 'c', 'd')", :default=>'b'} @db.schema(:dolls).first.last[:ruby_default].must_equal 'b' end it "should allow setting auto_increment for existing column" do @db.create_table(:dolls){Integer :a, :primary_key=>true} @db.schema(:dolls).first.last[:auto_increment].must_equal false @db.set_column_type :dolls, :a, Integer, :auto_increment=>true @db.schema(:dolls).first.last[:auto_increment].must_equal true end it "should create generated column" do skip("generated columns not supported, skipping test") unless @db.supports_generated_columns? @db.create_table(:dolls){String :a; String :b, generated_always_as: Sequel.function(:CONCAT, :a, 'plus')} @db.schema(:dolls).map{|_,v| v[:generated]}.must_equal [false, true] end it "should include an :extra schema attribute" do @db.create_table(:dolls) {Integer :a, :primary_key => true } assert @db.schema(:dolls).first.last.key?(:extra) end end if [:mysql, :mysql2].include?(DB.adapter_scheme) describe "Sequel::MySQL::Database#convert_tinyint_to_bool" do before do @db = DB @db.create_table(:booltest){column :b, 'tinyint(1)'; column :i, 'tinyint(4)'} @ds = @db[:booltest] end after do @db.convert_tinyint_to_bool = true @db.drop_table?(:booltest) end it "should consider tinyint(1) datatypes as boolean if set, but not larger tinyints" do @db.schema(:booltest, :reload=>true).map{|_, s| s[:type]}.must_equal [:boolean, :integer] @db.convert_tinyint_to_bool = false @db.schema(:booltest, :reload=>true).map{|_, s| s[:type]}.must_equal [:integer, :integer] end it "should return tinyint(1)s as bools and tinyint(4)s as integers when set" do @db.convert_tinyint_to_bool = true @ds.delete @ds.insert(:b=>true, :i=>10) @ds.all.must_equal [{:b=>true, :i=>10}] @ds.delete @ds.insert(:b=>false, :i=>0) @ds.all.must_equal [{:b=>false, :i=>0}] @ds.delete @ds.insert(:b=>true, :i=>1) @ds.all.must_equal [{:b=>true, :i=>1}] end it "should return all tinyints as integers when unset" do @db.convert_tinyint_to_bool = false @ds.delete @ds.insert(:b=>true, :i=>10) @ds.all.must_equal [{:b=>1, :i=>10}] @ds.delete @ds.insert(:b=>false, :i=>0) @ds.all.must_equal [{:b=>0, :i=>0}] @ds.delete @ds.insert(:b=>1, :i=>10) @ds.all.must_equal [{:b=>1, :i=>10}] @ds.delete @ds.insert(:b=>0, :i=>0) @ds.all.must_equal [{:b=>0, :i=>0}] end it "should allow disabling the conversion on a per-dataset basis" do @db.convert_tinyint_to_bool = true ds = @ds.with_extend do def cast_tinyint_integer?(f) true end #mysql def convert_tinyint_to_bool?() false end #mysql2 end ds.delete ds.insert(:b=>true, :i=>10) ds.all.must_equal [{:b=>1, :i=>10}] @ds.all.must_equal [{:b=>true, :i=>10}] end end end describe "A MySQL dataset" do before do DB.create_table(:items){String :name; Integer :value} @d = DB[:items] end after do DB.drop_table?(:items) end it "should handle large unsigned smallint/integer values" do DB.alter_table(:items){set_column_type :value, 'smallint unsigned'} @d.insert(:value=>(1 << 15) + 1) @d.get(:value).must_equal((1 << 15) + 1) DB.alter_table(:items){set_column_type :value, 'integer unsigned'} @d.update(:value=>(1 << 31) + 1) @d.get(:value).must_equal((1 << 31) + 1) DB.alter_table(:items){set_column_type :value, 'bigint unsigned'} @d.update(:value=>(1 << 63) + 1) @d.get(:value).must_equal((1 << 63) + 1) end it "should support ORDER clause in UPDATE statements" do @d.order(:name).update_sql(:value => 1).must_equal 'UPDATE `items` SET `value` = 1 ORDER BY `name`' end it "should support updating a limited dataset" do @d.import [:value], [[2], [3]] @d.limit(1).update(:value => 4).must_equal 1 [[2,4], [3,4]].must_include @d.select_order_map(:value) end it "should support updating a ordered, limited dataset" do @d.import [:value], [[2], [3]] @d.order(:value).limit(1).update(:value => 4).must_equal 1 @d.select_order_map(:value).must_equal [3,4] end it "should raise error for updating a dataset with an offset" do proc{@d.offset(1).update(:value => 4)}.must_raise Sequel::InvalidOperation proc{@d.order(:value).offset(1).update(:value => 4)}.must_raise Sequel::InvalidOperation end it "should support regexps" do @d.insert(:name => 'abc', :value => 1) @d.insert(:name => 'bcd', :value => 2) @d.filter(:name => /bc/).count.must_equal 2 @d.filter(:name => /^bc/).count.must_equal 1 end it "should have explain output" do @d.explain.must_be_kind_of(String) @d.explain(:extended=>true).must_be_kind_of(String) end it "should correctly literalize strings with comment backslashes in them" do @d.delete @d.insert(:name => ':\\') @d.first[:name].must_equal ':\\' end it "should handle prepared statements with on_duplicate_key_update" do @d.db.add_index :items, :value, :unique=>true ds = @d.on_duplicate_key_update ps = ds.prepare(:insert, :insert_user_id_feature_name, :value => :$v, :name => :$n) ps.call(:v => 1, :n => 'a') ds.all.must_equal [{:value=>1, :name=>'a'}] ps.call(:v => 1, :n => 'b') ds.all.must_equal [{:value=>1, :name=>'b'}] end it "should support generated columns" do skip("generated columns not supported, skipping test") unless DB.supports_generated_columns? DB.alter_table(:items) {add_column :b, String, :generated_always_as => Sequel.function(:CONCAT, :name, 'plus')} @d.insert(name: 'hello') @d.first[:b].must_equal 'helloplus' end end describe "Dataset#distinct" do before do @db = DB @db.create_table!(:a) do Integer :a Integer :b end @ds = @db[:a] end after do @db.drop_table?(:a) end it "#distinct with arguments should return results distinct on those arguments" do skip("ONLY_FULL_GROUP_BY sql_mode set, skipping DISTINCT ON emulation test") if @db.get(Sequel.lit '@@sql_mode').include?('ONLY_FULL_GROUP_BY') @ds.insert(20, 10) @ds.insert(30, 10) @ds.order(:b, :a).distinct.map(:a).must_equal [20, 30] @ds.order(:b, Sequel.desc(:a)).distinct.map(:a).must_equal [30, 20] # MySQL doesn't respect orders when using the nonstandard GROUP BY [[20], [30]].must_include(@ds.order(:b, :a).distinct(:b).map(:a)) end end describe "MySQL join expressions" do before(:all) do @ds = DB[:nodes] DB.create_table!(:nodes){Integer :id; Integer :y} DB.create_table!(:n1){Integer :id} DB.create_table!(:n2){Integer :y} @ds.insert(:id=>1, :y=>2) DB[:n1].insert(1) DB[:n2].insert(2) end after(:all) do DB.drop_table?(:n2, :n1, :nodes) end it "should support straight joins (force left table to be read before right)" do @ds.join_table(:straight, :n1).all.must_equal [{:id=>1, :y=>2}] end it "should support natural joins on multiple tables." do @ds.join_table(:natural_left_outer, [:n1, :n2]).all.must_equal [{:id=>1, :y=>2}] end it "should support straight joins on multiple tables." do @ds.join_table(:straight, [:n1, :n2]).all.must_equal [{:id=>1, :y=>2}] end end describe "A MySQL database" do after do DB.drop_table?(:test_innodb) end it "should handle the creation and dropping of an InnoDB table with foreign keys" do DB.create_table!(:test_innodb, :engine=>:InnoDB){primary_key :id; foreign_key :fk, :test_innodb, :key=>:id} end it "should handle qualified tables in #indexes" do DB.create_table!(:test_innodb){primary_key :id; String :name; index :name, :unique=>true, :name=>:test_innodb_name_idx} DB.indexes(Sequel.qualify(DB.get{database.function}, :test_innodb)).must_equal(:test_innodb_name_idx=>{:unique=>true, :columns=>[:name]}) end end describe "A MySQL database" do before(:all) do @db = DB @db.create_table! :test2 do text :name Integer :value end end after(:all) do @db.drop_table?(:test2) end it "should provide the server version" do @db.server_version.must_be :>=, 40000 end it "should support for_share" do @db[:test2].delete @db.transaction{@db[:test2].for_share.all.must_equal []} end it "should support column operations" do @db.add_column :test2, :xyz, :text @db[:test2].columns.must_equal [:name, :value, :xyz] @db[:test2].insert(:name => 'mmm', :value => 111, :xyz => '000') @db[:test2].first[:xyz].must_equal '000' @db[:test2].columns.must_equal [:name, :value, :xyz] @db.drop_column :test2, :xyz @db[:test2].columns.must_equal [:name, :value] @db[:test2].delete @db.add_column :test2, :xyz, :text @db[:test2].insert(:name => 'mmm', :value => 111, :xyz => 'qqqq') @db[:test2].columns.must_equal [:name, :value, :xyz] @db.rename_column :test2, :xyz, :zyx, :type => :text @db[:test2].columns.must_equal [:name, :value, :zyx] @db[:test2].first[:zyx].must_equal 'qqqq' @db[:test2].delete @db.add_column :test2, :tre, :text @db[:test2].insert(:name => 'mmm', :value => 111, :tre => 'qqqq') @db[:test2].columns.must_equal [:name, :value, :zyx, :tre] @db.rename_column :test2, :tre, :ert, :type => :varchar, :size=>255 @db[:test2].columns.must_equal [:name, :value, :zyx, :ert] @db[:test2].first[:ert].must_equal 'qqqq' @db.add_column :test2, :xyz, :float @db[:test2].delete @db[:test2].insert(:name => 'mmm', :value => 111, :xyz => 56.78) @db.set_column_type :test2, :xyz, :integer @db[:test2].first[:xyz].must_equal 57 @db.alter_table :test2 do add_index :value, :unique=>true add_foreign_key :value2, :test2, :key=>:value end @db[:test2].columns.must_equal [:name, :value, :zyx, :ert, :xyz, :value2] @db.alter_table :test2 do drop_foreign_key :value2 drop_index :value end end end describe "A MySQL database with table options" do before do @options = {:engine=>'MyISAM', :charset=>'latin1', :collate => 'latin1_swedish_ci'} @db = DB @db.default_engine = 'InnoDB' @db.default_charset = 'utf8' @db.default_collate = 'utf8_general_ci' @db.drop_table?(:items) end after do @db.drop_table?(:items) @db.default_engine = nil @db.default_charset = nil @db.default_collate = nil end it "should allow to pass custom options (engine, charset, collate) for table creation" do @db.create_table(:items, @options){Integer :size; text :name} @db.transaction(:rollback=>:always) do @db[:items].insert(:size=>1) end @db[:items].all.must_equal [{:size=>1, :name=>nil}] end it "should use default options if specified (engine, charset, collate) for table creation" do @db.create_table(:items){Integer :size; text :name} @db.transaction(:rollback=>:always) do @db[:items].insert(:size=>1) end @db[:items].all.must_equal [] end it "should not use default if option has a nil value" do @db.default_engine = 'non_existent_engine' @db.create_table(:items, :engine=>nil, :charset=>nil, :collate=>nil){Integer :size; text :name} end end describe "A MySQL database" do before do @db = DB @db.drop_table?(:items) end after do @db.drop_table?(:items, :users) end it "should support defaults for boolean columns" do @db.create_table(:items){TrueClass :active1, :default=>true; FalseClass :active2, :default => false} @db[:items].insert @db[:items].get([:active1, :active2]).must_equal [true, false] @db[:items].get([Sequel.cast(:active1, Integer).as(:v1), Sequel.cast(:active2, Integer).as(:v2)]).must_equal [1, 0] end it "should correctly handle CREATE TABLE statements with foreign keys" do @db.create_table(:items){primary_key :id; foreign_key :p_id, :items, :key => :id, :null => false, :on_delete => :cascade} @db[:items].insert(:id=>1, :p_id=>1) @db[:items].insert(:id=>2, :p_id=>1) @db[:items].where(:id=>1).delete @db[:items].count.must_equal 0 end it "should correctly handle CREATE TABLE statements with foreign keys, when :key != the default (:id)" do @db.create_table(:items){primary_key :id; Integer :other_than_id; foreign_key :p_id, :items, :key => :other_than_id, :null => false, :on_delete => :cascade} @db[:items].insert(:id=>1, :other_than_id=>2, :p_id=>2) @db[:items].insert(:id=>2, :other_than_id=>3, :p_id=>2) @db[:items].where(:id=>1).delete @db[:items].count.must_equal 0 end it "should correctly handle ALTER TABLE statements with foreign keys" do @db.create_table(:items){Integer :id} @db.create_table(:users){primary_key :id} @db.alter_table(:items){add_foreign_key :p_id, :users, :key => :id, :null => false, :on_delete => :cascade} @db[:users].insert(:id=>1) @db[:items].insert(:id=>2, :p_id=>1) @db[:users].where(:id=>1).delete @db[:items].count.must_equal 0 end it "should correctly format ALTER TABLE statements with named foreign keys" do @db.create_table(:items){Integer :id} @db.create_table(:users){primary_key :id} @db.alter_table(:items){add_foreign_key :p_id, :users, :key => :id, :null => false, :on_delete => :cascade, :foreign_key_constraint_name => :pk_items__users } @db[:users].insert(:id=>1) @db[:items].insert(:id=>2, :p_id=>1) @db[:users].where(:id=>1).delete @db[:items].count.must_equal 0 end it "should correctly handle add_column :after option" do @db.create_table(:items){Integer :id; Integer :value} @db.alter_table(:items){add_column :name, String, :after=>:id} @db[:items].columns.must_equal [:id, :name, :value] end it "should correctly handle add_column :first option" do @db.create_table(:items){Integer :id; Integer :value} @db.alter_table(:items){add_column :name, String, :first => true} @db[:items].columns.must_equal [:name, :id, :value] end it "should correctly handle add_foreign_key :first option" do @db.create_table(:items){primary_key :id; Integer :value} @db.alter_table(:items){add_foreign_key :parent_id, :items, :first => true} @db[:items].columns.must_equal [:parent_id, :id, :value] end it "should have rename_column support keep existing options" do @db.create_table(:items){String :id, :null=>false, :default=>'blah'} @db.alter_table(:items){rename_column :id, :nid} @db[:items].insert @db[:items].all.must_equal [{:nid=>'blah'}] proc{@db[:items].insert(:nid=>nil)}.must_raise(Sequel::NotNullConstraintViolation) end it "should have set_column_type support keep existing options" do @db.create_table(:items){Integer :id, :null=>false, :default=>5} @db.alter_table(:items){set_column_type :id, :Bignum} @db[:items].insert @db[:items].all.must_equal [{:id=>5}] proc{@db[:items].insert(:id=>nil)}.must_raise(Sequel::NotNullConstraintViolation) @db[:items].delete @db[:items].insert(2**40) @db[:items].all.must_equal [{:id=>2**40}] end it "should have set_column_type pass through options" do @db.create_table(:items){integer :id; enum :list, :elements=>%w[one]} @db.alter_table(:items){set_column_type :id, :int, :unsigned=>true, :size=>8; set_column_type :list, :enum, :elements=>%w[two]} @db.schema(:items)[1][1][:db_type].must_equal "enum('two')" end it "should have set_column_default support keep existing options" do @db.create_table(:items){Integer :id, :null=>false, :default=>5} @db.alter_table(:items){set_column_default :id, 6} @db[:items].insert @db[:items].all.must_equal [{:id=>6}] proc{@db[:items].insert(:id=>nil)}.must_raise(Sequel::NotNullConstraintViolation) end it "should have set_column_allow_null support keep existing options" do @db.create_table(:items){Integer :id, :null=>false, :default=>5} @db.alter_table(:items){set_column_allow_null :id, true} @db[:items].insert @db[:items].all.must_equal [{:id=>5}] @db[:items].insert(:id=>nil) end it "should accept repeated raw sql statements using Database#<<" do @db.create_table(:items){String :name; Integer :value} @db << 'DELETE FROM items' @db[:items].count.must_equal 0 @db << "INSERT INTO items (name, value) VALUES ('tutu', 1234)" @db[:items].first.must_equal(:name => 'tutu', :value => 1234) @db << 'DELETE FROM items' @db[:items].first.must_be_nil end it "should have schema handle generated columns" do skip("generated columns not supported, skipping test") unless @db.supports_generated_columns? @db.create_table(:items) {String :a} @db.alter_table(:items){add_column :b, String, :generated_always_as=>Sequel.function(:CONCAT, :a, 'plus'), :generated_type=>:stored, :unique=>true} @db.schema(:items)[1][1][:generated].must_equal true @db.schema(:items)[1][1][:extra].must_equal "STORED GENERATED" @db.alter_table(:items){add_column :c, String, :generated_always_as=>Sequel.function(:CONCAT, :a, 'minus'), :generated_type=>:virtual} @db.schema(:items)[2][1][:generated].must_equal true @db.schema(:items)[2][1][:extra].must_equal "VIRTUAL GENERATED" end if !DB.mariadb? || DB.server_version >= 100200 end # Socket tests should only be run if the MySQL server is on localhost if DB.adapter_scheme == :mysql && %w'localhost 127.0.0.1 ::1'.include?(URI.parse(DB.uri).host) describe "A MySQL database" do socket_file = defined?(MYSQL_SOCKET_FILE) ? MYSQL_SOCKET_FILE : '/tmp/mysql.sock' it "should accept a socket option" do Sequel.mysql(DB.opts[:database], :host => 'localhost', :user => DB.opts[:user], :password => DB.opts[:password], :socket => socket_file, :keep_reference=>false) end it "should accept a socket option without host option" do Sequel.mysql(DB.opts[:database], :user => DB.opts[:user], :password => DB.opts[:password], :socket => socket_file, :keep_reference=>false) end it "should fail to connect with invalid socket" do proc{Sequel.mysql(DB.opts[:database], :user => DB.opts[:user], :password => DB.opts[:password], :socket =>'blah', :keep_reference=>false)}.must_raise Sequel::DatabaseConnectionError end end end describe "A MySQL database" do it "should accept a read_timeout option when connecting" do db = Sequel.connect(DB.opts.merge(:read_timeout=>22342)) db.test_connection end end describe "MySQL foreign key support" do after do DB.drop_table?(:testfk, :testpk) end it "should create table without :key" do DB.create_table!(:testpk){primary_key :id} DB.create_table!(:testfk){foreign_key :fk, :testpk} end it "should create table with composite keys without :key" do DB.create_table!(:testpk){Integer :id; Integer :id2; primary_key([:id, :id2])} DB.create_table!(:testfk){Integer :fk; Integer :fk2; foreign_key([:fk, :fk2], :testpk)} end it "should create table with self referential without :key" do DB.create_table!(:testfk){primary_key :id; foreign_key :fk, :testfk} end it "should create table with self referential with non-autoincrementing key without :key" do DB.create_table!(:testfk){Integer :id, :primary_key=>true; foreign_key :fk, :testfk} end it "should create table with self referential with composite keys without :key" do DB.create_table!(:testfk){Integer :id; Integer :id2; Integer :fk; Integer :fk2; primary_key([:id, :id2]); foreign_key([:fk, :fk2], :testfk)} end it "should alter table without :key" do DB.create_table!(:testpk){primary_key :id} DB.create_table!(:testfk){Integer :id} DB.alter_table(:testfk){add_foreign_key :fk, :testpk} end it "should alter table with composite keys without :key" do DB.create_table!(:testpk){Integer :id; Integer :id2; primary_key([:id, :id2])} DB.create_table!(:testfk){Integer :fk; Integer :fk2} DB.alter_table(:testfk){add_foreign_key([:fk, :fk2], :testpk)} end it "should alter table with self referential without :key" do DB.create_table!(:testfk){primary_key :id} DB.alter_table(:testfk){add_foreign_key :fk, :testfk} end it "should alter table with self referential with composite keys without :key" do DB.create_table!(:testfk){Integer :id; Integer :id2; Integer :fk; Integer :fk2; primary_key([:id, :id2])} DB.alter_table(:testfk){add_foreign_key [:fk, :fk2], :testfk} end end describe "A grouped MySQL dataset" do before do DB.create_table! :test2 do text :name integer :value end DB[:test2].insert(:name => '11', :value => 10) DB[:test2].insert(:name => '11', :value => 20) DB[:test2].insert(:name => '11', :value => 30) DB[:test2].insert(:name => '12', :value => 10) DB[:test2].insert(:name => '12', :value => 20) DB[:test2].insert(:name => '13', :value => 10) end after do DB.drop_table?(:test2) end it "should return the correct count for raw sql query" do ds = DB["select name FROM test2 WHERE name = '11' GROUP BY name"] ds.count.must_equal 1 end it "should return the correct count for a normal dataset" do ds = DB[:test2].select(:name).where(:name => '11').group(:name) ds.count.must_equal 1 end end describe "A MySQL database" do before do @db = DB @db.drop_table?(:posts) end after do @db.drop_table?(:posts) end it "should support fulltext indexes and full_text_search" do @db.create_table(:posts, :engine=>:MyISAM){text :title; text :body; full_text_index :title; full_text_index [:title, :body]} @db[:posts].insert(:title=>'ruby rails', :body=>'y') @db[:posts].insert(:title=>'sequel', :body=>'ruby') @db[:posts].insert(:title=>'ruby scooby', :body=>'x') @db[:posts].full_text_search(:title, 'rails').all.must_equal [{:title=>'ruby rails', :body=>'y'}] @db[:posts].full_text_search([:title, :body], ['sequel', 'ruby']).all.must_equal [{:title=>'sequel', :body=>'ruby'}] @db[:posts].full_text_search(:title, '+ruby -rails', :boolean => true).all.must_equal [{:title=>'ruby scooby', :body=>'x'}] @db[:posts].full_text_search(:title, :$n).call(:select, :n=>'rails').must_equal [{:title=>'ruby rails', :body=>'y'}] @db[:posts].full_text_search(:title, :$n).prepare(:select, :fts_select).call(:n=>'rails').must_equal [{:title=>'ruby rails', :body=>'y'}] end it "should support spatial indexes" do @db.create_table(:posts, :engine=>:MyISAM){point :geom, :null=>false; spatial_index [:geom]} end it "should support indexes with index type" do @db.create_table(:posts){Integer :id; index :id, :type => :btree} @db[:posts].insert(1) @db[:posts].where(:id=>1).count.must_equal 1 end it "should support unique indexes with index type" do @db.create_table(:posts){Integer :id; index :id, :type => :btree, :unique => true} @db[:posts].insert(1) proc{@db[:posts].insert(1)}.must_raise Sequel::UniqueConstraintViolation end it "should not dump partial indexes" do @db.create_table(:posts){text :id} @db << "CREATE INDEX posts_id_index ON posts (id(10))" @db.indexes(:posts).must_equal({}) end it "should dump partial indexes if :partial option is set to true" do @db.create_table(:posts){text :id} @db << "CREATE INDEX posts_id_index ON posts (id(10))" @db.indexes(:posts, :partial => true).must_equal(:posts_id_index => {:columns => [:id], :unique => false}) end end describe "MySQL::Dataset#insert and related methods" do before do DB.create_table(:items){String :name, :unique=>true; Integer :value} @d = DB[:items].order(:name) end after do DB.drop_table?(:items) end it "#insert should insert record with default values when no arguments given" do @d.insert @d.all.must_equal [{:name => nil, :value => nil}] end it "#insert should insert record with default values when empty hash given" do @d.insert({}) @d.all.must_equal [{:name => nil, :value => nil}] end it "#insert should insert record with default values when empty array given" do @d.insert [] @d.all.must_equal [{:name => nil, :value => nil}] end it "#on_duplicate_key_update should work with regular inserts" do DB.add_index :items, :name, :unique=>true @d.insert(:name => 'abc', :value => 1) @d.on_duplicate_key_update(:name, :value => 6).insert(:name => 'abc', :value => 1) @d.on_duplicate_key_update(:name, :value => 6).insert(:name => 'def', :value => 2) @d.all.must_equal [{:name => 'abc', :value => 6}, {:name => 'def', :value => 2}] end it "#multi_replace should replace multiple records in a single statement" do @d.multi_replace([{:name => 'abc'}, {:name => 'def'}]) @d.all.must_equal [ {:name => 'abc', :value => nil}, {:name => 'def', :value => nil} ] @d.multi_replace([{:name => 'abc', :value=>1}, {:name => 'ghi', :value=>3}]) @d.all.must_equal [ {:name => 'abc', :value => 1}, {:name => 'def', :value => nil}, {:name => 'ghi', :value=>3} ] end it "#multi_replace should support :commit_every option" do @d.multi_replace([{:value => 1}, {:value => 2}, {:value => 3}, {:value => 4}], :commit_every => 2) @d.all.must_equal [ {:name => nil, :value => 1}, {:name => nil, :value => 2}, {:name => nil, :value => 3}, {:name => nil, :value => 4} ] end it "#multi_replace should support :slice option" do @d.multi_replace([{:value => 1}, {:value => 2}, {:value => 3}, {:value => 4}], :slice => 2) @d.all.must_equal [ {:name => nil, :value => 1}, {:name => nil, :value => 2}, {:name => nil, :value => 3}, {:name => nil, :value => 4} ] end it "#multi_insert should insert multiple records in a single statement" do @d.multi_insert([{:name => 'abc'}, {:name => 'def'}]) @d.all.must_equal [ {:name => 'abc', :value => nil}, {:name => 'def', :value => nil} ] end it "#multi_insert should support :commit_every option" do @d.multi_insert([{:value => 1}, {:value => 2}, {:value => 3}, {:value => 4}], :commit_every => 2) @d.all.must_equal [ {:name => nil, :value => 1}, {:name => nil, :value => 2}, {:name => nil, :value => 3}, {:name => nil, :value => 4} ] end it "#multi_insert should support :slice option" do @d.multi_insert([{:value => 1}, {:value => 2}, {:value => 3}, {:value => 4}], :slice => 2) @d.all.must_equal [ {:name => nil, :value => 1}, {:name => nil, :value => 2}, {:name => nil, :value => 3}, {:name => nil, :value => 4} ] end it "#import should support inserting using columns and values arrays" do @d.import([:name, :value], [['abc', 1], ['def', 2]]) @d.all.must_equal [ {:name => 'abc', :value => 1}, {:name => 'def', :value => 2} ] end it "#insert_ignore should ignore existing records when used with multi_insert" do @d.insert_ignore.multi_insert([{:name => 'abc'}, {:name => 'def'}]) @d.all.must_equal [ {:name => 'abc', :value => nil}, {:name => 'def', :value => nil} ] @d.insert_ignore.multi_insert([{:name => 'abc', :value=>1}, {:name => 'ghi', :value=>3}]) @d.all.must_equal [ {:name => 'abc', :value => nil}, {:name => 'def', :value => nil}, {:name => 'ghi', :value=>3} ] end it "#insert_ignore should ignore single records when used with insert" do @d.insert_ignore.insert(:name => 'ghi') @d.all.must_equal [{:name => 'ghi', :value => nil}] @d.insert_ignore.insert(:name => 'ghi', :value=>2) @d.all.must_equal [{:name => 'ghi', :value => nil}] end it "#on_duplicate_key_update should handle inserts with duplicate keys" do @d.on_duplicate_key_update.import([:name,:value], [['abc', 1], ['def',2]]) @d.all.must_equal [ {:name => 'abc', :value => 1}, {:name => 'def', :value => 2} ] @d.on_duplicate_key_update.import([:name,:value], [['abc', 2], ['ghi',3]]) @d.all.must_equal [ {:name => 'abc', :value => 2}, {:name => 'def', :value => 2}, {:name => 'ghi', :value=>3} ] end it "#on_duplicate_key_update should add the ON DUPLICATE KEY UPDATE and columns specified when args are given" do @d.on_duplicate_key_update(:value).import([:name,:value], [['abc', 1], ['def',2]]) @d.all.must_equal [ {:name => 'abc', :value => 1}, {:name => 'def', :value => 2} ] @d.on_duplicate_key_update(:value).import([:name,:value], [['abc', 2], ['ghi',3]]) @d.all.must_equal [ {:name => 'abc', :value => 2}, {:name => 'def', :value => 2}, {:name => 'ghi', :value=>3} ] @d.on_duplicate_key_update(:name).import([:name,:value], [['abc', 5], ['ghi',6]]) @d.all.must_equal [ {:name => 'abc', :value => 2}, {:name => 'def', :value => 2}, {:name => 'ghi', :value=>3} ] end end describe "MySQL::Dataset#update and related methods" do before do DB.create_table(:items){String :name; Integer :value; index :name, :unique=>true} @d = DB[:items] end after do DB.drop_table?(:items) end it "#update_ignore should not raise error where normal update would fail" do @d.insert(:name => 'cow', :value => 0) @d.insert(:name => 'cat', :value => 1) proc{@d.where(:value => 1).update(:name => 'cow')}.must_raise(Sequel::UniqueConstraintViolation) @d.update_ignore.where(:value => 1).update(:name => 'cow') @d.order(:name).all.must_equal [{:name => 'cat', :value => 1}, {:name => 'cow', :value => 0}] end end describe "MySQL::Dataset#replace" do before do DB.create_table(:items){Integer :id, :unique=>true; Integer :value} @d = DB[:items] end after do DB.drop_table?(:items) end it "should use default values if they exist" do DB.alter_table(:items){set_column_default :id, 1; set_column_default :value, 2} @d.replace @d.all.must_equal [{:id=>1, :value=>2}] @d.replace([]) @d.all.must_equal [{:id=>1, :value=>2}] @d.replace({}) @d.all.must_equal [{:id=>1, :value=>2}] end end describe "MySQL::Dataset#complex_expression_sql" do before do @d = DB.dataset end it "should handle string concatenation with CONCAT if more than one record" do @d.literal(Sequel.join([:x, :y])).must_equal "CONCAT(`x`, `y`)" @d.literal(Sequel.join([:x, :y], ' ')).must_equal "CONCAT(`x`, ' ', `y`)" @d.literal(Sequel.join([Sequel.function(:x, :y), 1, Sequel.lit('z')], Sequel.subscript(:y, 1))).must_equal "CONCAT(x(`y`), `y`[1], '1', `y`[1], z)" end it "should handle string concatenation as simple string if just one record" do @d.literal(Sequel.join([:x])).must_equal "`x`" @d.literal(Sequel.join([:x], ' ')).must_equal "`x`" end end describe "MySQL::Dataset#calc_found_rows" do before do DB.create_table!(:items){Integer :a} end after do DB.drop_table?(:items) end it "should count matching rows disregarding LIMIT clause" do DB[:items].multi_insert([{:a => 1}, {:a => 1}, {:a => 2}]) DB.synchronize do DB[:items].calc_found_rows.filter(:a => 1).limit(1).all.must_equal [{:a => 1}] DB.dataset.select(Sequel.function(:FOUND_ROWS).as(:rows)).all.must_equal [{:rows => 2 }] end end end if DB.adapter_scheme == :mysql or DB.adapter_scheme == :jdbc or DB.adapter_scheme == :mysql2 describe "MySQL Stored Procedures" do before do DB.create_table(:items){Integer :id; Integer :value} @d = DB[:items] end after do DB.drop_table?(:items) DB.execute('DROP PROCEDURE test_sproc') end it "should be callable on the database object" do DB.execute_ddl('CREATE PROCEDURE test_sproc() BEGIN DELETE FROM items; END') DB[:items].delete DB[:items].insert(:value=>1) DB[:items].count.must_equal 1 DB.call_sproc(:test_sproc) DB[:items].count.must_equal 0 end # Mysql2 doesn't support stored procedures that return result sets, probably because # CLIENT_MULTI_RESULTS is not set. unless DB.adapter_scheme == :mysql2 it "should be callable on the dataset object" do DB.execute_ddl('CREATE PROCEDURE test_sproc(a INTEGER) BEGIN SELECT *, a AS b FROM items; END') DB[:items].delete @d = DB[:items] @d.call_sproc(:select, :test_sproc, 3).must_equal [] @d.insert(:value=>1) @d.call_sproc(:select, :test_sproc, 4).must_equal [{:id=>nil, :value=>1, :b=>4}] @d = @d.with_row_proc(proc{|r| r.keys.each{|k| r[k] *= 2 if r[k].is_a?(Integer)}; r}) @d.call_sproc(:select, :test_sproc, 3).must_equal [{:id=>nil, :value=>2, :b=>6}] end it "should be callable on the dataset object with multiple arguments" do DB.execute_ddl('CREATE PROCEDURE test_sproc(a INTEGER, c INTEGER) BEGIN SELECT *, a AS b, c AS d FROM items; END') DB[:items].delete @d = DB[:items] @d.call_sproc(:select, :test_sproc, 3, 4).must_equal [] @d.insert(:value=>1) @d.call_sproc(:select, :test_sproc, 4, 5).must_equal [{:id=>nil, :value=>1, :b=>4, :d=>5}] @d = @d.with_row_proc(proc{|r| r.keys.each{|k| r[k] *= 2 if r[k].is_a?(Integer)}; r}) @d.call_sproc(:select, :test_sproc, 3, 4).must_equal [{:id=>nil, :value=>2, :b=>6, :d => 8}] end end it "should deal with nil values" do DB.execute_ddl('CREATE PROCEDURE test_sproc(i INTEGER, v INTEGER) BEGIN INSERT INTO items VALUES (i, v); END') DB[:items].delete DB.call_sproc(:test_sproc, :args=>[1, nil]) DB[:items].all.must_equal [{:id=>1, :value=>nil}] end end end if DB.adapter_scheme == :mysql describe "MySQL bad date/time conversions" do after do DB.convert_invalid_date_time = false end it "should raise an exception when a bad date/time is used and convert_invalid_date_time is false" do DB.convert_invalid_date_time = false proc{DB["SELECT CAST('0000-00-00' AS date)"].single_value}.must_raise(Sequel::InvalidValue) proc{DB["SELECT CAST('0000-00-00 00:00:00' AS datetime)"].single_value}.must_raise(Sequel::InvalidValue) proc{DB["SELECT CAST('25:00:00' AS time)"].single_value}.must_raise(Sequel::InvalidValue) end it "should not use a nil value bad date/time is used and convert_invalid_date_time is nil or :nil" do DB.convert_invalid_date_time = nil DB["SELECT CAST('0000-00-00' AS date)"].single_value.must_be_nil DB["SELECT CAST('0000-00-00 00:00:00' AS datetime)"].single_value.must_be_nil DB["SELECT CAST('25:00:00' AS time)"].single_value.must_be_nil DB.convert_invalid_date_time = :nil DB["SELECT CAST('0000-00-00' AS date)"].single_value.must_be_nil DB["SELECT CAST('0000-00-00 00:00:00' AS datetime)"].single_value.must_be_nil DB["SELECT CAST('25:00:00' AS time)"].single_value.must_be_nil end it "should not use a nil value bad date/time is used and convert_invalid_date_time is :string" do DB.convert_invalid_date_time = :string DB["SELECT CAST('0000-00-00' AS date)"].single_value.must_equal '0000-00-00' DB["SELECT CAST('0000-00-00 00:00:00' AS datetime)"].single_value.must_equal '0000-00-00 00:00:00' DB["SELECT CAST('25:00:00' AS time)"].single_value.must_equal '25:00:00' end end describe "MySQL multiple result sets" do before do DB.create_table!(:a){Integer :a} DB.create_table!(:b){Integer :b} @ds = DB['SELECT * FROM a; SELECT * FROM b'] DB[:a].insert(10) DB[:a].insert(15) DB[:b].insert(20) DB[:b].insert(25) end after do DB.drop_table?(:a, :b) end it "should combine all results by default" do @ds.all.must_equal [{:a=>10}, {:a=>15}, {:b=>20}, {:b=>25}] end it "should work with Database#run" do DB.run('SELECT * FROM a; SELECT * FROM b') DB.run('SELECT * FROM a; SELECT * FROM b') end it "should work with Database#run and other statements" do DB.run('UPDATE a SET a = 1; SELECT * FROM a; DELETE FROM b') DB[:a].select_order_map(:a).must_equal [1, 1] DB[:b].all.must_equal [] end it "should split results returned into arrays if split_multiple_result_sets is used" do @ds.split_multiple_result_sets.all.must_equal [[{:a=>10}, {:a=>15}], [{:b=>20}, {:b=>25}]] end it "should have regular row_procs work when splitting multiple result sets" do @ds = @ds.with_row_proc(proc{|x| x[x.keys.first] *= 2; x}) @ds.split_multiple_result_sets.all.must_equal [[{:a=>20}, {:a=>30}], [{:b=>40}, {:b=>50}]] end it "should use the columns from the first result set when splitting result sets" do @ds.split_multiple_result_sets.columns.must_equal [:a] end it "should not allow graphing a dataset that splits multiple statements" do proc{@ds.split_multiple_result_sets.graph(:b, :b=>:a)}.must_raise(Sequel::Error) end it "should not allow splitting a graphed dataset" do proc{DB[:a].graph(:b, :b=>:a).split_multiple_result_sets}.must_raise(Sequel::Error) end end end if DB.adapter_scheme == :mysql2 describe "Mysql2 streaming" do before(:all) do DB.create_table!(:a){Integer :a} DB.transaction do 1000.times do |i| DB[:a].insert(i) end end @ds = DB[:a].stream.order(:a) end after(:all) do DB.drop_table?(:a) end it "should correctly stream results" do @ds.map(:a).must_equal((0...1000).to_a) end it "should correctly handle early returning when streaming results" do 3.times{@ds.each{|r| break r[:a]}.must_equal 0} end it "#paged_each should bypass streaming when :stream => false passed in" do DB[:a].order(:a).paged_each(:stream => false){|x| DB[:a].first; break} end end end describe "MySQL joined datasets" do before do @db = DB @db.create_table!(:a) do Integer :id end @db.create_table!(:b) do Integer :id Integer :a_id end @db[:a].insert(1) @db[:a].insert(2) @db[:b].insert(3, 1) @db[:b].insert(4, 1) @db[:b].insert(5, 2) @ds = @db[:a].join(:b, :a_id=>:id) end after do @db.drop_table?(:a, :b) end it "should support deletions from a single table" do @ds.where(Sequel[:a][:id]=>1).delete @db[:a].select_order_map(:id).must_equal [2] @db[:b].select_order_map(:id).must_equal [3, 4, 5] end it "should support deletions from multiple tables" do @ds.delete_from(:a, :b).where(Sequel[:a][:id]=>1).delete @db[:a].select_order_map(:id).must_equal [2] @db[:b].select_order_map(:id).must_equal [5] end end describe "MySQL::Database#rename_tables" do before do @db = DB end after do DB.drop_table?(:posts1, :messages1, :posts, :messages) end it "should rename multiple tables" do @db.create_table!(:posts1){primary_key :a} @db.create_table!(:messages1){primary_key :a} @db.rename_tables([:posts1, :posts], [:messages1, :messages]) @db.table_exists?(:posts1).must_equal false @db.table_exists?(:messages1).must_equal false @db.table_exists?(:posts).must_equal true @db.table_exists?(:messages).must_equal true end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/adapters/oracle_spec.rb����������������������������������������������������������0000664�0000000�0000000�00000035355�14342141206�0020454�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������SEQUEL_ADAPTER_TEST = :oracle require_relative 'spec_helper' unless DB.opts[:autosequence] warn "Running oracle adapter specs without :autosequence Database option results in many errors, use the :autosequence Database option when testing" end describe "An Oracle database" do before(:all) do DB.create_table!(:items) do String :name, :size => 50 Integer :value Date :date_created index :value end DB.create_table!(:books) do Integer :id String :title, :size => 50 Integer :category_id end DB.create_table!(:categories) do Integer :id String :cat_name, :size => 50 end DB.create_table!(:notes) do Integer :id String :title, :size => 50 String :content, :text => true end @d = DB[:items] end after do @d.delete end after(:all) do DB.drop_table?(:items, :books, :categories, :notes) end it "should allow limit and offset with clob columns" do notes = [] notes << {:id => 1, :title => 'abc', :content => 'zyx'} notes << {:id => 2, :title => 'def', :content => 'wvu'} notes << {:id => 3, :title => 'ghi', :content => 'tsr'} notes << {:id => 4, :title => 'jkl', :content => 'qpo'} notes << {:id => 5, :title => 'mno', :content => 'nml'} DB[:notes].multi_insert(notes) DB[:notes].sort_by{|x| x[:id]}.must_equal notes rows = DB[:notes].limit(3, 0).all rows.length.must_equal 3 rows.all?{|v| notes.must_include(v)} end it "should provide disconnect functionality" do DB.execute("select user from dual") DB.pool.size.must_equal 1 DB.disconnect DB.pool.size.must_equal 0 end it "should have working view_exists?" do begin DB.view_exists?(:cats).must_equal false DB.view_exists?(:cats, :current_schema=>true).must_equal false DB.create_view(:cats, DB[:categories]) DB.view_exists?(:cats).must_equal true DB.view_exists?(:cats, :current_schema=>true).must_equal true if IDENTIFIER_MANGLING && !DB.frozen? om = DB.identifier_output_method im = DB.identifier_input_method DB.identifier_output_method = :reverse DB.identifier_input_method = :reverse DB.view_exists?(:STAC).must_equal true DB.view_exists?(:STAC, :current_schema=>true).must_equal true DB.view_exists?(:cats).must_equal false DB.view_exists?(:cats, :current_schema=>true).must_equal false end ensure if IDENTIFIER_MANGLING && !DB.frozen? DB.identifier_output_method = om DB.identifier_input_method = im end DB.drop_view(:cats) end end it "should be able to get current sequence value with SQL" do begin DB.create_table!(:foo){primary_key :id} DB.fetch('SELECT seq_foo_id.nextval FROM DUAL').single_value.must_equal 1 ensure DB.drop_table(:foo) end end it "should provide schema information" do books_schema = [[:id, [:integer, false, true, nil]], [:title, [:string, false, true, nil]], [:category_id, [:integer, false, true, nil]]] categories_schema = [[:id, [:integer, false, true, nil]], [:cat_name, [:string, false, true, nil]]] items_schema = [[:name, [:string, false, true, nil]], [:value, [:integer, false, true, nil]], [:date_created, [:datetime, false, true, nil]]] {:books => books_schema, :categories => categories_schema, :items => items_schema}.each_pair do |table, expected_schema| schema = DB.schema(table) schema.wont_equal nil schema.map{|c, s| [c, s.values_at(:type, :primary_key, :allow_null, :ruby_default)]}.must_equal expected_schema end end it "should create a temporary table" do DB.create_table! :test_tmp, :temp => true do varchar2 :name, :size => 50 primary_key :id, :null => false index :name, :unique => true end DB.drop_table?(:test_tmp) end it "should return the correct record count" do @d.count.must_equal 0 @d.insert(:name => 'abc', :value => 123) @d.insert(:name => 'abc', :value => 456) @d.insert(:name => 'def', :value => 789) @d.count.must_equal 3 end it "should return the correct records" do @d.to_a.must_equal [] @d.insert(:name => 'abc', :value => 123) @d.insert(:name => 'abc', :value => 456) @d.insert(:name => 'def', :value => 789) @d.order(:value).to_a.must_equal [ {:date_created=>nil, :name => 'abc', :value => 123}, {:date_created=>nil, :name => 'abc', :value => 456}, {:date_created=>nil, :name => 'def', :value => 789} ] @d.select(:name).distinct.order_by(:name).to_a.must_equal [ {:name => 'abc'}, {:name => 'def'} ] @d.order(Sequel.desc(:value)).limit(1).to_a.must_equal [ {:date_created=>nil, :name => 'def', :value => 789} ] @d.filter(:name => 'abc').order(:value).to_a.must_equal [ {:date_created=>nil, :name => 'abc', :value => 123}, {:date_created=>nil, :name => 'abc', :value => 456} ] @d.order(Sequel.desc(:value)).filter(:name => 'abc').to_a.must_equal [ {:date_created=>nil, :name => 'abc', :value => 456}, {:date_created=>nil, :name => 'abc', :value => 123} ] @d.filter(:name => 'abc').order(:value).limit(1).to_a.must_equal [ {:date_created=>nil, :name => 'abc', :value => 123} ] @d.filter(:name => 'abc').order(Sequel.desc(:value)).limit(1).to_a.must_equal [ {:date_created=>nil, :name => 'abc', :value => 456} ] @d.filter(:name => 'abc').order(:value).limit(1).to_a.must_equal [ {:date_created=>nil, :name => 'abc', :value => 123} ] @d.order(:value).limit(1).to_a.must_equal [ {:date_created=>nil, :name => 'abc', :value => 123} ] @d.order(:value).limit(1, 1).to_a.must_equal [ {:date_created=>nil, :name => 'abc', :value => 456} ] @d.order(:value).limit(1, 2).to_a.must_equal [ {:date_created=>nil, :name => 'def', :value => 789} ] @d.avg(:value).to_i.must_equal((789+123+456)/3) @d.max(:value).to_i.must_equal 789 @d.select(:name, Sequel.function(:AVG, :value).as(:avg)).filter(:name => 'abc').group(:name).to_a.must_equal [ {:name => 'abc', :avg => (456+123)/2.0} ] @d.select(Sequel.function(:AVG, :value).as(:avg)).group(:name).order(:name).limit(1).to_a.must_equal [ {:avg => (456+123)/2.0} ] @d.select(:name, Sequel.function(:AVG, :value).as(:avg)).group(:name).order(:name).to_a.must_equal [ {:name => 'abc', :avg => (456+123)/2.0}, {:name => 'def', :avg => 789*1.0} ] @d.select(:name, Sequel.function(:AVG, :value).as(:avg)).group(:name).order(:name).to_a.must_equal [ {:name => 'abc', :avg => (456+123)/2.0}, {:name => 'def', :avg => 789*1.0} ] @d.select(:name, Sequel.function(:AVG, :value).as(:avg)).group(:name).having(:name => ['abc', 'def']).order(:name).to_a.must_equal [ {:name => 'abc', :avg => (456+123)/2.0}, {:name => 'def', :avg => 789*1.0} ] @d.select(:name, :value).filter(:name => 'abc').union(@d.select(:name, :value).filter(:name => 'def')).order(:value).to_a.must_equal [ {:name => 'abc', :value => 123}, {:name => 'abc', :value => 456}, {:name => 'def', :value => 789} ] end it "should update records correctly" do @d.insert(:name => 'abc', :value => 123) @d.insert(:name => 'abc', :value => 456) @d.insert(:name => 'def', :value => 789) @d.filter(:name => 'abc').update(:value => 530) @d[:name => 'def'][:value].must_equal 789 @d.filter(:value => 530).count.must_equal 2 end it "should translate values correctly" do @d.insert(:name => 'abc', :value => 456) @d.insert(:name => 'def', :value => 789) @d.filter{value > 500}.update(:date_created => Sequel.lit("to_timestamp('2009-09-09', 'YYYY-MM-DD')")) @d[:name => 'def'][:date_created].strftime('%F').must_equal '2009-09-09' end it "should delete records correctly" do @d.insert(:name => 'abc', :value => 123) @d.insert(:name => 'abc', :value => 456) @d.insert(:name => 'def', :value => 789) @d.filter(:name => 'abc').delete @d.count.must_equal 1 @d.first[:name].must_equal 'def' end it "should be able to literalize booleans" do @d.literal(true) @d.literal(false) end it "should support transactions" do DB.transaction do @d.insert(:name => 'abc', :value => 1) end @d.count.must_equal 1 end it "should return correct result" do @d1 = DB[:books] @d1.delete @d1.insert(:id => 1, :title => 'aaa', :category_id => 100) @d1.insert(:id => 2, :title => 'bbb', :category_id => 100) @d1.insert(:id => 3, :title => 'ccc', :category_id => 101) @d1.insert(:id => 4, :title => 'ddd', :category_id => 102) @d2 = DB[:categories] @d2.delete @d2.insert(:id => 100, :cat_name => 'ruby') @d2.insert(:id => 101, :cat_name => 'rails') @d1.join(:categories, :id => :category_id).select(Sequel[:books][:id], :title, :cat_name).order(Sequel[:books][:id]).to_a.must_equal [ {:id => 1, :title => 'aaa', :cat_name => 'ruby'}, {:id => 2, :title => 'bbb', :cat_name => 'ruby'}, {:id => 3, :title => 'ccc', :cat_name => 'rails'} ] @d1.join(:categories, :id => :category_id).select(Sequel[:books][:id], :title, :cat_name).order(Sequel[:books][:id]).limit(2, 1).to_a.must_equal [ {:id => 2, :title => 'bbb', :cat_name => 'ruby'}, {:id => 3, :title => 'ccc', :cat_name => 'rails'}, ] @d1.left_outer_join(:categories, :id => :category_id).select(Sequel[:books][:id], :title, :cat_name).order(Sequel[:books][:id]).to_a.must_equal [ {:id => 1, :title => 'aaa', :cat_name => 'ruby'}, {:id => 2, :title => 'bbb', :cat_name => 'ruby'}, {:id => 3, :title => 'ccc', :cat_name => 'rails'}, {:id => 4, :title => 'ddd', :cat_name => nil} ] @d1.left_outer_join(:categories, :id => :category_id).select(Sequel[:books][:id], :title, :cat_name).reverse_order(Sequel[:books][:id]).limit(2, 0).to_a.must_equal [ {:id => 4, :title => 'ddd', :cat_name => nil}, {:id => 3, :title => 'ccc', :cat_name => 'rails'} ] end it "should allow columns to be renamed" do @d1 = DB[:books] @d1.delete @d1.insert(:id => 1, :title => 'aaa', :category_id => 100) @d1.insert(:id => 2, :title => 'bbb', :category_id => 100) @d1.insert(:id => 3, :title => 'bbb', :category_id => 100) @d1.select(Sequel.as(:title, :name)).order_by(:id).to_a.must_equal [ { :name => 'aaa' }, { :name => 'bbb' }, { :name => 'bbb' }, ] DB[:books].select(:title).group_by(:title).count.must_equal 2 end end describe "An Oracle database with xml types" do before(:all) do DB.create_table!(:xml_test){xmltype :xml_col} end after(:all) do DB.drop_table(:xml_test) end it "should work correctly with temporary clobs" do DB[:xml_test].insert("<a href='b'>c</a>") DB.from(Sequel.lit('xml_test x')).select(Sequel.lit("x.xml_col.getCLOBVal() v")).all.must_equal [{:v=>"<a href=\"b\">c</a>\n"}] end end describe "Clob Bound Argument Type" do before(:all) do @db = DB @db.create_table!(:items) do primary_key :id clob :c end @ds = @db[:items] end before do @ds.delete end after(:all) do @db.drop_table?(:items) end it "should handle clob type in prepared statement arguments" do @ds.delete clob = "\"'[]`a0 " @ds.prepare(:insert, :ps_clob, {:c=>@db.adapter_scheme == :oracle ? :$c__clob : :$c}).call(:c=>clob) @ds.get(:c).must_equal clob end end describe "CLOB Returning Procedure" do before do DB.run <<SQL CREATE OR REPLACE PROCEDURE testCLOB(outParam OUT CLOB) IS BEGIN outParam := 'Hello World CLOB OUT parameter'; END; SQL end after do DB.run("DROP PROCEDURE testCLOB") end it "should work correctly with output clobs" do res = DB.execute("begin testCLOB(:1); end;", {:arguments => [[nil, 'clob']]}) {|c| c[1].read } res.must_equal 'Hello World CLOB OUT parameter' end end if DB.adapter_scheme == :oracle describe "Oracle non-standard MERGE" do before(:all) do @db = DB @db.create_table!(:m1){Integer :i1; Integer :a} @db.create_table!(:m2){Integer :i2; Integer :b} @m1 = @db[:m1] @m2 = @db[:m2] end after do @m1.delete @m2.delete end after(:all) do @db.drop_table?(:m1, :m2) end it "should allow inserts, updates, and deletes based on conditions in a single MERGE statement" do ds = @m1. merge_using(:m2, :i1=>:i2). merge_insert(:i1=>Sequel[:i2], :a=>Sequel[:b]+11){b <= 50}. merge_delete{{:a => 40..70}}. merge_update(:a=>Sequel[:a]+:b+20){a <= 50} @m2.insert(1, 2) @m1.all.must_equal [] # INSERT ds.merge @m1.all.must_equal [{:i1=>1, :a=>13}] # UPDATE ds.merge @m1.all.must_equal [{:i1=>1, :a=>35}] # DELETE MATCHING current row, INSERT NOT MATCHED new row @m1.update(:i1=>12, :a=>45) @m2.insert(12, 3) ds.merge @m1.all.must_equal [{:i1=>1, :a=>13}] # MATCHED DO NOTHING @m2.where(:i2=>12).delete @m1.update(:a=>51) ds.merge @m1.all.must_equal [{:i1=>1, :a=>51}] # NOT MATCHED DO NOTHING @m1.delete @m2.update(:b=>51) ds.merge @m1.all.must_equal [] end it "should calls inserts, updates, and deletes without conditions" do @m2.insert(1, 2) ds = @m1.merge_using(:m2, :i1=>:i2) ds.merge_insert(:i2, :b).merge @m1.all.must_equal [{:i1=>1, :a=>2}] ds.merge_update(:a=>Sequel[:a]+1).merge @m1.all.must_equal [{:i1=>1, :a=>3}] ds.merge_update(:a=>Sequel[:a]+1).merge_delete{true}.merge @m1.all.must_equal [] end it "should raise if a merge is attempted without WHEN clauses" do proc{@m1.merge_using(:m2, :i1=>:i2).merge}.must_raise Sequel::Error end it "should raise if a merge is attempted without a merge source" do proc{@m1.merge_insert(:a=>1).merge}.must_raise Sequel::Error end it "should raise if multiple merge operations of the same type are used" do ds = @m1.merge_using(:m2, :i1=>:i2).merge_insert(:a=>1){true}.merge_insert(:a=>1){true} proc{ds.merge}.must_raise Sequel::Error end it "should raise if a delete is attempted without an update" do proc{@m1.merge_using(:m2, :i1=>:i2).merge_delete.merge}.must_raise Sequel::Error end it "should handle uncachable SQL" do v = true @m2.insert(1, 2) ds = @m1. merge_using(:m2, :i1=>:i2). merge_insert(Sequel[:i2], Sequel[:b]+11){Sequel.delay{v}} ds.merge @m1.all.must_equal [{:i1=>1, :a=>13}] @m1.delete v = false ds.merge @m1.all.must_equal [] end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/adapters/postgres_spec.rb��������������������������������������������������������0000664�0000000�0000000�00000711464�14342141206�0021057�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������SEQUEL_ADAPTER_TEST = :postgres require_relative 'spec_helper' uses_pg = Sequel::Postgres::USES_PG if DB.adapter_scheme == :postgres uses_pg_or_jdbc = uses_pg || DB.adapter_scheme == :jdbc DB.extension :pg_array, :pg_range, :pg_row, :pg_inet, :pg_json, :pg_enum begin DB.extension :pg_interval rescue LoadError end DB.extension :pg_hstore if DB.type_supported?('hstore') DB.extension :pg_multirange if DB.server_version >= 140000 DB.extension :pg_auto_parameterize if uses_pg && ENV['SEQUEL_PG_AUTO_PARAMETERIZE'] describe 'PostgreSQL adapter' do before do @db = DB @db.disconnect end after do @db.disconnect end it "should handle case where status raises PGError" do proc do @db.synchronize do |c| def c.status; raise Sequel::Postgres::PGError end def c.execute_query(*); raise Sequel::Postgres::PGError end c.execute('SELECT 1') end end.must_raise Sequel::DatabaseDisconnectError end it "should handle case where execute_query returns nil" do @db.synchronize do |c| def c.execute_query(*); super; nil end c.execute('SELECT 1 AS v'){|v| v.must_be_nil} end end it "should handle prepared statement case where exec_prepared returns nil" do @db.synchronize do |c| def c.exec_prepared(*); super; nil end @db['SELECT 1 AS v'].prepare(:all, :test_prepared) @db.execute(:test_prepared){|v| v.must_be_nil} end end it "should handle prepared statement case where same variable is used more than once" do @db['SELECT (?::integer + ?::integer) AS v', :$v, :$v].prepare(:single_value, :test_prepared).call(:v=>2).must_equal 4 end end if uses_pg describe 'A PostgreSQL database' do before do @db = DB end after do @db.drop_table?(:test) end it "should provide a list of existing ordinary tables" do @db.create_table(:test){Integer :id} @db.tables.must_include :test end it "should handle providing a block to tables" do @db.create_table(:test){Integer :id} @db.tables{|ds| ds.where(:relkind=>'r').map{|r| r[:relname]}}.must_include 'test' @db.tables{|ds| ds.where(:relkind=>'p').map{|r| r[:relname]}}.wont_include 'test' end it "should handle blobs" do @db.create_table(:test){File :blob} blob = Sequel.blob("\0\1\254\255").force_encoding('BINARY') @db[:test].insert(blob) @db[:test].get(:blob).force_encoding('BINARY').must_equal blob end it "should provide a list of existing partitioned tables" do @db.create_table(:test, :partition_by => :id, :partition_type => :range){Integer :id} @db.tables.must_include :test end if DB.server_version >= 100000 it "should provide a list of existing ordinary and partitioned tables" do @db.create_table(:test, :partition_by => :id, :partition_type => :range){Integer :id} @db.create_table(:test_1, :partition_of => :test){from 1; to 3} @db.create_table(:test_2, :partition_of => :test){from 3; to 4} @db.tables.must_include :test @db.tables.must_include :test_1 @db.tables.must_include :test_2 end if DB.server_version >= 100000 it "should provide a list of foreign keys on partitioned tables" do begin @db.create_table(:test){primary_key :i} @db.create_table(:test2, :partition_by => :id, :partition_type => :range){foreign_key :id, :test} fks = @db.foreign_key_list(:test2) fks.length.must_equal 1 fks[0][:table].must_equal :test fks[0][:columns].must_equal [:id] fks[0][:key].must_equal [:i] ensure @db.drop_table?(:test2) end end if DB.server_version >= 110000 end describe "PostgreSQL", '#create_table' do before do @db = DB end after do @db.drop_table?(:tmp_dolls, :unlogged_dolls) @db.default_string_column_size = nil end it "should support multiple types of string columns" do @db.default_string_column_size = 50 @db.create_table(:tmp_dolls) do String :a1 String :a2, :size=>10 String :a3, :fixed=>true String :a4, :fixed=>true, :size=>10 String :a5, :text=>false String :a6, :text=>false, :size=>10 String :a7, :text=>true end @db.schema(:tmp_dolls).map{|k, v| v[:max_length]}.must_equal [nil, 10, 50, 10, 50, 10, nil] end it "should support range partitioned tables for single columns with :partition_* options" do @db.create_table(:tmp_dolls, :partition_by => :id, :partition_type=>:range){Integer :id} @db.create_table(:tmp_dolls_1, :partition_of => :tmp_dolls){from 1; to 3} @db.create_table(:tmp_dolls_2, :partition_of => :tmp_dolls){from 3; to 4} @db[:tmp_dolls].insert(1) @db[:tmp_dolls].insert(2) @db[:tmp_dolls].insert(3) proc{@db[:tmp_dolls].insert(0)}.must_raise Sequel::DatabaseError proc{@db[:tmp_dolls].insert(5)}.must_raise Sequel::DatabaseError @db.create_table(:tmp_dolls_0, :partition_of => :tmp_dolls){from minvalue; to 1} @db.create_table(:tmp_dolls_3, :partition_of => :tmp_dolls){from 4; to maxvalue} @db[:tmp_dolls].insert(0) @db[:tmp_dolls].insert(5) @db[:tmp_dolls].order(:id).select_order_map(:id).must_equal [0,1,2,3,5] @db[:tmp_dolls_0].order(:id).select_order_map(:id).must_equal [0] @db[:tmp_dolls_1].order(:id).select_order_map(:id).must_equal [1,2] @db[:tmp_dolls_2].order(:id).select_order_map(:id).must_equal [3] @db[:tmp_dolls_3].order(:id).select_order_map(:id).must_equal [5] end if DB.server_version >= 100000 it "should support range partitioned tables for multiple columns with :partition_* options" do @db.create_table(:tmp_dolls, :partition_by => [:id, :id2], :partition_type=>:range){Integer :id; Integer :id2} @db.create_table(:tmp_dolls_1, :partition_of => :tmp_dolls){from 1, 1; to 3, 3} @db.create_table(:tmp_dolls_2, :partition_of => :tmp_dolls){from 3, 3; to 4, 4} @db[:tmp_dolls].insert(1, 1) @db[:tmp_dolls].insert(2, 2) @db[:tmp_dolls].insert(3, 3) proc{@db[:tmp_dolls].insert(0, 0)}.must_raise Sequel::DatabaseError proc{@db[:tmp_dolls].insert(5, 5)}.must_raise Sequel::DatabaseError @db.create_table(:tmp_dolls_0, :partition_of => :tmp_dolls){from minvalue, minvalue; to 1, 1} @db.create_table(:tmp_dolls_3, :partition_of => :tmp_dolls){from 4, 4; to maxvalue, maxvalue} @db[:tmp_dolls].insert(0, 0) @db[:tmp_dolls].insert(5, 5) @db[:tmp_dolls].order(:id).select_order_map([:id, :id2]).must_equal [0,1,2,3,5].map{|x| [x,x]} @db[:tmp_dolls_0].order(:id).select_order_map([:id, :id2]).must_equal [0].map{|x| [x,x]} @db[:tmp_dolls_1].order(:id).select_order_map([:id, :id2]).must_equal [1,2].map{|x| [x,x]} @db[:tmp_dolls_2].order(:id).select_order_map([:id, :id2]).must_equal [3].map{|x| [x,x]} @db[:tmp_dolls_3].order(:id).select_order_map([:id, :id2]).must_equal [5].map{|x| [x,x]} end if DB.server_version >= 100000 it "should support list partitioned tables for single column with :partition_* options" do @db.create_table(:tmp_dolls, :partition_by => :id, :partition_type=>:list){Integer :id} @db.create_table(:tmp_dolls_1, :partition_of => :tmp_dolls){values_in 1, 2} @db.create_table(:tmp_dolls_2, :partition_of => :tmp_dolls){values_in 3} @db[:tmp_dolls].insert(1) @db[:tmp_dolls].insert(2) @db[:tmp_dolls].insert(3) proc{@db[:tmp_dolls].insert(0)}.must_raise Sequel::DatabaseError proc{@db[:tmp_dolls].insert(5)}.must_raise Sequel::DatabaseError @db.create_table(:tmp_dolls_0, :partition_of => :tmp_dolls){values_in 0} @db.create_table(:tmp_dolls_3, :partition_of => :tmp_dolls){default} @db[:tmp_dolls].insert(0) @db[:tmp_dolls].insert(5) @db[:tmp_dolls].order(:id).select_order_map(:id).must_equal [0,1,2,3,5] @db[:tmp_dolls_0].order(:id).select_order_map(:id).must_equal [0] @db[:tmp_dolls_1].order(:id).select_order_map(:id).must_equal [1,2] @db[:tmp_dolls_2].order(:id).select_order_map(:id).must_equal [3] @db[:tmp_dolls_3].order(:id).select_order_map(:id).must_equal [5] end if DB.server_version >= 110000 it "should support hash partitioned tables for single column with :partition_* options" do @db.create_table(:tmp_dolls, :partition_by => :id, :partition_type=>:hash){Integer :id} @db.create_table(:tmp_dolls_0, :partition_of => :tmp_dolls){modulus 4; remainder 0} @db.create_table(:tmp_dolls_1, :partition_of => :tmp_dolls){modulus 4; remainder 1} @db.create_table(:tmp_dolls_2, :partition_of => :tmp_dolls){modulus 4; remainder 2} @db.create_table(:tmp_dolls_3, :partition_of => :tmp_dolls){modulus 4; remainder 3} @db[:tmp_dolls].insert(1) @db[:tmp_dolls].insert(2) @db[:tmp_dolls].insert(3) @db[:tmp_dolls].insert(4) @db[:tmp_dolls].order(:id).select_order_map(:id).must_equal [1,2,3,4] [0,1,2,3].flat_map{|i| @db[:"tmp_dolls_#{i}"].select_order_map(:id)}.sort.must_equal [1,2,3,4] end if DB.server_version >= 110000 it "should support partitioned tables with create_table?" do @db.create_table(:tmp_dolls, :partition_by => :id, :partition_type=>:range){Integer :id} @db.create_table?(:tmp_dolls_1, :partition_of => :tmp_dolls){from 1; to 3} @db.create_table?(:tmp_dolls_2, :partition_of => :tmp_dolls){from 3; to 4} @db[:tmp_dolls].insert(1) @db[:tmp_dolls].insert(2) @db[:tmp_dolls].insert(3) proc{@db[:tmp_dolls].insert(0)}.must_raise Sequel::DatabaseError proc{@db[:tmp_dolls].insert(5)}.must_raise Sequel::DatabaseError @db.create_table?(:tmp_dolls_0, :partition_of => :tmp_dolls){from minvalue; to 1} @db.create_table?(:tmp_dolls_3, :partition_of => :tmp_dolls){from 4; to maxvalue} @db[:tmp_dolls].insert(0) @db[:tmp_dolls].insert(5) @db[:tmp_dolls].order(:id).select_order_map(:id).must_equal [0,1,2,3,5] @db[:tmp_dolls_0].order(:id).select_order_map(:id).must_equal [0] @db[:tmp_dolls_1].order(:id).select_order_map(:id).must_equal [1,2] @db[:tmp_dolls_2].order(:id).select_order_map(:id).must_equal [3] @db[:tmp_dolls_3].order(:id).select_order_map(:id).must_equal [5] end if DB.server_version >= 100000 it "should support partitioned tables with create_table!" do @db.create_table(:tmp_dolls, :partition_by => :id, :partition_type=>:range){Integer :id} @db.create_table!(:tmp_dolls_1, :partition_of => :tmp_dolls){from 1; to 3} @db.create_table!(:tmp_dolls_1, :partition_of => :tmp_dolls){from 1; to 3} @db.create_table!(:tmp_dolls_2, :partition_of => :tmp_dolls){from 3; to 4} @db.create_table!(:tmp_dolls_2, :partition_of => :tmp_dolls){from 3; to 4} @db[:tmp_dolls].insert(1) @db[:tmp_dolls].insert(2) @db[:tmp_dolls].insert(3) proc{@db[:tmp_dolls].insert(0)}.must_raise Sequel::DatabaseError proc{@db[:tmp_dolls].insert(5)}.must_raise Sequel::DatabaseError @db.create_table!(:tmp_dolls_0, :partition_of => :tmp_dolls){from minvalue; to 1} @db.create_table!(:tmp_dolls_0, :partition_of => :tmp_dolls){from minvalue; to 1} @db.create_table!(:tmp_dolls_3, :partition_of => :tmp_dolls){from 4; to maxvalue} @db.create_table!(:tmp_dolls_3, :partition_of => :tmp_dolls){from 4; to maxvalue} @db[:tmp_dolls].insert(0) @db[:tmp_dolls].insert(5) @db[:tmp_dolls].order(:id).select_order_map(:id).must_equal [0,1,2,3,5] @db[:tmp_dolls_0].order(:id).select_order_map(:id).must_equal [0] @db[:tmp_dolls_1].order(:id).select_order_map(:id).must_equal [1,2] @db[:tmp_dolls_2].order(:id).select_order_map(:id).must_equal [3] @db[:tmp_dolls_3].order(:id).select_order_map(:id).must_equal [5] end if DB.server_version >= 100000 it "should raise for unsupported partition types" do @db.create_table(:tmp_dolls, :partition_by => [:id, :id2], :partition_type=>:range){Integer :id; Integer :id2} proc{@db.create_table(:tmp_dolls_1, :partition_of => :tmp_dolls){from 1, 1; to 3, 3; modulus 10}}.must_raise Sequel::Error proc{@db.create_table(:tmp_dolls_1, :partition_of => :tmp_dolls){from 1, 1}}.must_raise Sequel::Error proc{@db.create_table(:tmp_dolls_1, :partition_of => :tmp_dolls){modulus 10}}.must_raise Sequel::Error proc{@db.create_table(:tmp_dolls_1, :partition_of => :tmp_dolls){}}.must_raise Sequel::Error end if DB.server_version >= 100000 it "should not use a size for text columns" do @db.create_table(:tmp_dolls){String :description, text: true, size: :long} @db.tables.must_include :tmp_dolls end it "should create an unlogged table" do @db.create_table(:unlogged_dolls, :unlogged => true){text :name} end if DB.server_version >= 90100 it "should create a table inheriting from another table" do @db.create_table(:unlogged_dolls){text :name} @db.create_table(:tmp_dolls, :inherits=>:unlogged_dolls){} @db[:tmp_dolls].insert('a') @db[:unlogged_dolls].all.must_equal [{:name=>'a'}] end it "should create a table inheriting from multiple tables" do begin @db.create_table(:unlogged_dolls){text :name} @db.create_table(:tmp_dolls){text :bar} @db.create_table!(:items, :inherits=>[:unlogged_dolls, :tmp_dolls]){text :foo} @db[:items].insert(:name=>'a', :bar=>'b', :foo=>'c') @db[:unlogged_dolls].all.must_equal [{:name=>'a'}] @db[:tmp_dolls].all.must_equal [{:bar=>'b'}] @db[:items].all.must_equal [{:name=>'a', :bar=>'b', :foo=>'c'}] ensure @db.drop_table?(:items) end end it "should have #check_constraints method for getting check constraints" do @db.create_table(:tmp_dolls) do Integer :i Integer :j constraint(:ic, Sequel[:i] > 2) constraint(:jc, Sequel[:j] > 2) constraint(:ijc, Sequel[:i] - Sequel[:j] > 2) end @db.check_constraints(:tmp_dolls).must_equal(:ic=>{:definition=>"CHECK ((i > 2))", :columns=>[:i]}, :jc=>{:definition=>"CHECK ((j > 2))", :columns=>[:j]}, :ijc=>{:definition=>"CHECK (((i - j) > 2))", :columns=>[:i, :j]}) end it "should have #check_constraints return check constraints where columns are unknown" do begin @db.create_table(:tmp_dolls) do Integer :i Integer :j end @db.run "CREATE OR REPLACE FUNCTION valid_tmp_dolls(t1 tmp_dolls) RETURNS boolean AS 'SELECT false' LANGUAGE SQL;" @db.alter_table(:tmp_dolls) do add_constraint(:valid_tmp_dolls, Sequel.function(:valid_tmp_dolls, :tmp_dolls)) end @db.check_constraints(:tmp_dolls).must_equal(:valid_tmp_dolls=>{:definition=>"CHECK (valid_tmp_dolls(tmp_dolls.*))", :columns=>[]}) ensure @db.run "ALTER TABLE tmp_dolls DROP CONSTRAINT IF EXISTS valid_tmp_dolls" @db.run "DROP FUNCTION IF EXISTS valid_tmp_dolls(tmp_dolls)" end end if DB.server_version >= 90000 it "should support :if_exists option to drop_column" do @db.create_table(:tmp_dolls){Integer :a; Integer :b} 2.times do @db.drop_column :tmp_dolls, :b, :if_exists=>true @db[:tmp_dolls].columns.must_equal [:a] end end if DB.server_version >= 90000 it "should support primary_key with :type=>:smallserial, :type=>:serial or :type=>:bigserial" do [:smallserial, :serial, :bigserial, 'smallserial', 'serial', 'bigserial'].each do |type| @db.create_table!(:tmp_dolls){primary_key :id, :type=>type} @db[:tmp_dolls].insert @db[:tmp_dolls].get(:id).must_equal 1 end end if DB.server_version >= 100002 it "should support primary_key with :serial=>true" do @db.create_table!(:tmp_dolls){primary_key :id, :serial=>true} @db[:tmp_dolls].insert @db[:tmp_dolls].get(:id).must_equal 1 end if DB.server_version >= 100002 it "should support primary_key with :Bignum type and serial=>true" do @db.create_table!(:tmp_dolls){primary_key :id, :type=>:Bignum, :serial=>true} @db[:tmp_dolls].insert(2**48) @db[:tmp_dolls].get(:id).must_equal(2**48) end it "should support Bignum column with serial=>true" do @db.create_table!(:tmp_dolls){Bignum :id, :serial=>true, :primary_key=>true} @db[:tmp_dolls].insert(2**48) @db[:tmp_dolls].get(:id).must_equal(2**48) end it "should support creating identity columns on non-primary key tables" do @db.create_table(:tmp_dolls){Integer :a, :identity=>true} 2.times do @db[:tmp_dolls].insert end @db[:tmp_dolls].select_order_map(:a).must_equal [1, 2] @db[:tmp_dolls].insert(:a=>2) @db[:tmp_dolls].select_order_map(:a).must_equal [1, 2, 2] @db[:tmp_dolls].insert(:a=>4) @db[:tmp_dolls].select_order_map(:a).must_equal [1, 2, 2, 4] @db[:tmp_dolls].overriding_user_value.insert(:a=>5) @db[:tmp_dolls].select_order_map(:a).must_equal [1, 2, 2, 3, 4] end if DB.server_version >= 100002 it "should support creating identity columns generated always" do @db.create_table(:tmp_dolls){primary_key :id, :identity=>:always} 2.times do @db[:tmp_dolls].insert end @db[:tmp_dolls].select_order_map(:id).must_equal [1, 2] proc{@db[:tmp_dolls].insert(:id=>2)}.must_raise Sequel::DatabaseError @db[:tmp_dolls].overriding_system_value.insert(:id=>4) @db[:tmp_dolls].select_order_map(:id).must_equal [1, 2, 4] @db[:tmp_dolls].insert @db[:tmp_dolls].select_order_map(:id).must_equal [1, 2, 3, 4] end if DB.server_version >= 100002 it "should handle generated column overrides using override value at time of merge_insert call" do @db.create_table(:tmp_dolls){primary_key :id, :identity=>:always} @db.create_table(:unlogged_dolls){Integer :i} @db[:unlogged_dolls].insert(10) @db[:unlogged_dolls].insert(20) @db[:tmp_dolls]. merge_using(:unlogged_dolls, :id=>:i). overriding_system_value. merge_insert(:i){i > 15}. overriding_user_value. merge_insert(:i). merge @db[:tmp_dolls].select_order_map(:id).must_equal [1, 20] end if DB.server_version >= 150000 it "should support converting serial columns to identity columns" do @db.create_table(:tmp_dolls){primary_key :id, :identity=>false, :serial=>true} sch = @db.schema(:tmp_dolls)[0][1] sch[:default].must_match(/nextval/) sch[:auto_increment].must_equal true 2.times do @db[:tmp_dolls].insert end @db.convert_serial_to_identity(:tmp_dolls) sch = @db.schema(:tmp_dolls)[0][1] sch[:default].must_be_nil sch[:auto_increment].must_equal true @db[:tmp_dolls].insert @db[:tmp_dolls].insert(5) @db[:tmp_dolls].select_order_map(:id).must_equal [1, 2, 3, 5] # Make sure it doesn't break if already converted @db.convert_serial_to_identity(:tmp_dolls) end if DB.server_version >= 100002 && DB.get{current_setting('is_superuser')} == 'on' it "should support converting serial columns to identity columns when using the :column option" do @db.create_table(:tmp_dolls){Integer :i, :primary_key=>true; serial :id} sch = @db.schema(:tmp_dolls)[1][1] sch[:default].must_match(/nextval/) 2.times do |i| @db[:tmp_dolls].insert(:i=>-i) end # Automatic conversion should not work proc{@db.convert_serial_to_identity(:tmp_dolls)}.must_raise Sequel::Error # Conversion of type without related sequence should not work proc{@db.convert_serial_to_identity(:tmp_dolls, :column=>:i)}.must_raise Sequel::Error @db.convert_serial_to_identity(:tmp_dolls, :column=>:id) sch = @db.schema(:tmp_dolls)[1][1] sch[:default].must_be_nil @db[:tmp_dolls].insert(:i=>200) @db[:tmp_dolls].insert(:i=>300, :id=>5) @db[:tmp_dolls].select_order_map(:id).must_equal [1, 2, 3, 5] # Make sure it doesn't break if already converted @db.convert_serial_to_identity(:tmp_dolls, :column=>:id) end if DB.server_version >= 100002 && DB.get{current_setting('is_superuser')} == 'on' it "should support creating generated columns" do @db.create_table(:tmp_dolls){Integer :a; Integer :b; Integer :c, :generated_always_as=>Sequel[:a] * 2 + :b + 1} @db[:tmp_dolls].insert(:a=>100, :b=>10) @db[:tmp_dolls].select_order_map([:a, :b, :c]).must_equal [[100, 10, 211]] end if DB.server_version >= 120000 it "should include :generated entry in schema for whether the column is generated" do @db.create_table(:tmp_dolls){Integer :a; Integer :b; Integer :c, :generated_always_as=>Sequel[:a] * 2 + :b + 1} @db.schema(:tmp_dolls).map{|_,v| v[:generated]}.must_equal [false, false, true] end if DB.server_version >= 120000 it "should support deferred primary key and unique constraints on columns" do @db.create_table(:tmp_dolls){primary_key :id, :primary_key_deferrable=>true; Integer :i, :unique=>true, :unique_deferrable=>true} @db[:tmp_dolls].insert(:i=>10) DB.transaction do @db[:tmp_dolls].insert(:id=>1, :i=>1) @db[:tmp_dolls].insert(:id=>10, :i=>10) @db[:tmp_dolls].where(:i=>1).update(:id=>2) @db[:tmp_dolls].where(:id=>10).update(:i=>2) end @db[:tmp_dolls].select_order_map([:id, :i]).must_equal [[1, 10], [2, 1], [10, 2]] end if DB.server_version >= 90000 it "should support pg_loose_count extension" do @db.extension :pg_loose_count @db.create_table(:tmp_dolls){text :name} @db.loose_count(:tmp_dolls).must_be_kind_of(Integer) [0, -1].must_include @db.loose_count(:tmp_dolls) [0, -1].must_include @db.loose_count(Sequel[:public][:tmp_dolls]) @db[:tmp_dolls].insert('a') @db << 'VACUUM ANALYZE tmp_dolls' @db.loose_count(:tmp_dolls).must_equal 1 @db.loose_count(Sequel[:public][:tmp_dolls]).must_equal 1 end end describe "PostgreSQL temporary table/view support" do before(:all) do @db = DB @db.disconnect end after do @db.drop_view(:tmp_dolls_view, :if_exists=>true, :cascade=>true) rescue nil @db.drop_table?(:tmp_dolls) end it "should create a temporary table" do @db.create_table(:tmp_dolls, :temp => true){text :name} @db.table_exists?(:tmp_dolls).must_equal true @db.disconnect @db.table_exists?(:tmp_dolls).must_equal false end it "temporary table should support :on_commit option" do @db.drop_table?(:some_table) @db.transaction do @db.create_table(:some_table, :temp => true, :on_commit => :drop){text :name} end @db.table_exists?(:some_table).must_equal false @db.transaction do @db.create_table(:some_table, :temp => true, :on_commit => :delete_rows){text :name} @db[:some_table].insert('a') end @db.table_exists?(:some_table).must_equal true @db[:some_table].empty?.must_equal true @db.drop_table(:some_table) @db.transaction do @db.create_table(:some_table, :temp => true, :on_commit => :preserve_rows){text :name} @db[:some_table].insert('a') end @db.table_exists?(:some_table).must_equal true @db[:some_table].count.must_equal 1 @db.drop_table(:some_table) end it "temporary table should accept :on_commit with :as option" do @db.drop_table?(:some_table) @db.transaction do @db.create_table(:some_table, :temp => true, :on_commit => :drop, :as => 'select 1') end @db.table_exists?(:some_table).must_equal false end it ":on_commit should raise error if not used on a temporary table" do proc{@db.create_table(:some_table, :on_commit => :drop)}.must_raise(Sequel::Error) end it ":on_commit should raise error if given unsupported value" do proc{@db.create_table(:some_table, :temp => true, :on_commit => :unsupported){text :name}}.must_raise(Sequel::Error) end it "should not allow to pass both :temp and :unlogged" do proc do @db.create_table(:temp_unlogged_dolls, :temp => true, :unlogged => true){text :name} end.must_raise(Sequel::Error, "can't provide both :temp and :unlogged to create_table") end it "should support temporary views" do @db.create_table(:tmp_dolls, :temp => true){Integer :number} @db[:tmp_dolls].insert(10) @db[:tmp_dolls].insert(20) @db.create_view(:tmp_dolls_view, @db[:tmp_dolls].where(:number=>10), :temp=>true) @db[:tmp_dolls_view].map(:number).must_equal [10] @db.create_or_replace_view(:tmp_dolls_view, @db[:tmp_dolls].where(:number=>20), :temp=>true) @db[:tmp_dolls_view].map(:number).must_equal [20] end it "should support security_invoker view option" do @db.create_table(:tmp_dolls, :temp => true){Integer :number} @db[:tmp_dolls].insert(10) @db.create_view(:tmp_dolls_view, @db[:tmp_dolls].where(:number=>10), :temp=>true, :security_invoker=>true) @db[:tmp_dolls_view].count.must_equal 1 end if DB.server_version >= 150000 end describe "PostgreSQL views" do before do @db = DB @db.drop_table?(:items, :cascade=>true) @db.create_table(:items){Integer :number} @db[:items].insert(10) @db[:items].insert(20) end after do @opts ||={} @db.drop_view(:items_view, @opts.merge(:if_exists=>true, :cascade=>true)) rescue nil @db.drop_table?(:items) end it "should support recursive views" do @db.create_view(:items_view, @db[:items].where(:number=>10).union(@db[:items, :items_view].where(Sequel.-(:number, 5)=>:n).select(:number), :all=>true, :from_self=>false), :recursive=>[:n]) @db[:items_view].select_order_map(:n).must_equal [10] @db[:items].insert(15) @db[:items_view].select_order_map(:n).must_equal [10, 15, 20] end if DB.server_version >= 90300 it "should support materialized views" do @opts = {:materialized=>true} @db.create_view(:items_view, @db[:items].where{number >= 10}, @opts) @db[:items_view].select_order_map(:number).must_equal [10, 20] @db[:items].insert(15) @db[:items_view].select_order_map(:number).must_equal [10, 20] @db.refresh_view(:items_view) @db[:items_view].select_order_map(:number).must_equal [10, 15, 20] @db.views.wont_include :items_view @db.views(@opts).must_include :items_view end if DB.server_version >= 90300 it "should support refreshing materialized views concurrently" do @opts = {:materialized=>true} @db.create_view(:items_view, @db[:items].where{number >= 10}, @opts) @db.refresh_view(:items_view) proc{@db.refresh_view(:items_view, :concurrently=>true)}.must_raise(Sequel::DatabaseError) @db.add_index :items_view, :number, :unique=>true @db.refresh_view(:items_view, :concurrently=>true) end if DB.server_version >= 90400 it "should support specifying tablespaces for materialized views" do @opts = {:materialized=>true} @db.create_view(:items_view, @db[:items].where{number >= 10}, :materialized=>true, :tablespace=>:pg_default) end if DB.server_version >= 90300 it "should support :if_exists=>true for not raising an error if the view does not exist" do @db.drop_view(:items_view, :if_exists=>true) end end describe "PostgreSQL", 'INSERT ON CONFLICT' do before(:all) do @db = DB @db.create_table!(:ic_test){Integer :a; Integer :b; Integer :c; TrueClass :c_is_unique, :default=>false; unique :a, :name=>:ic_test_a_uidx; unique [:b, :c], :name=>:ic_test_b_c_uidx; index [:c], :where=>:c_is_unique, :unique=>true} @ds = @db[:ic_test] end before do @ds.delete end after(:all) do @db.drop_table?(:ic_test) end it "Dataset#supports_insert_conflict? should be true" do @ds.supports_insert_conflict?.must_equal true end it "Dataset#insert_ignore and insert_conflict should ignore uniqueness violations" do @ds.insert(1, 2, 3) @ds.insert(10, 11, 3, true) proc{@ds.insert(1, 3, 4)}.must_raise Sequel::UniqueConstraintViolation proc{@ds.insert(11, 12, 3, true)}.must_raise Sequel::UniqueConstraintViolation @ds.insert_ignore.insert(1, 3, 4).must_be_nil @ds.insert_conflict.insert(1, 3, 4).must_be_nil @ds.insert_conflict.insert(11, 12, 3, true).must_be_nil @ds.insert_conflict(:target=>:a).insert(1, 3, 4).must_be_nil @ds.insert_conflict(:target=>:c, :conflict_where=>:c_is_unique).insert(11, 12, 3, true).must_be_nil @ds.insert_conflict(:constraint=>:ic_test_a_uidx).insert(1, 3, 4).must_be_nil @ds.all.must_equal [{:a=>1, :b=>2, :c=>3, :c_is_unique=>false}, {:a=>10, :b=>11, :c=>3, :c_is_unique=>true}] end it "Dataset#insert_ignore and insert_conflict should work with multi_insert/import" do @ds.insert(1, 2, 3) @ds.insert_ignore.multi_insert([{:a=>1, :b=>3, :c=>4}]) @ds.insert_ignore.import([:a, :b, :c], [[1, 3, 4]]) @ds.all.must_equal [{:a=>1, :b=>2, :c=>3, :c_is_unique=>false}] @ds.insert_conflict(:target=>:a, :update=>{:b=>3}).import([:a, :b, :c], [[1, 3, 4]]) @ds.all.must_equal [{:a=>1, :b=>3, :c=>3, :c_is_unique=>false}] @ds.insert_conflict(:target=>:a, :update=>{:b=>4}).multi_insert([{:a=>1, :b=>5, :c=>6}]) @ds.all.must_equal [{:a=>1, :b=>4, :c=>3, :c_is_unique=>false}] end it "Dataset#insert_conflict should handle upserts" do @ds.insert(1, 2, 3) @ds.insert_conflict(:target=>:a, :update=>{:b=>3}).insert(1, 3, 4).must_be_nil @ds.all.must_equal [{:a=>1, :b=>3, :c=>3, :c_is_unique=>false}] @ds.insert_conflict(:target=>[:b, :c], :update=>{:c=>5}).insert(5, 3, 3).must_be_nil @ds.all.must_equal [{:a=>1, :b=>3, :c=>5, :c_is_unique=>false}] @ds.insert_conflict(:constraint=>:ic_test_a_uidx, :update=>{:b=>4}).insert(1, 3).must_be_nil @ds.all.must_equal [{:a=>1, :b=>4, :c=>5, :c_is_unique=>false}] @ds.insert_conflict(:constraint=>:ic_test_a_uidx, :update=>{:b=>5}, :update_where=>{Sequel[:ic_test][:b]=>4}).insert(1, 3, 4).must_be_nil @ds.all.must_equal [{:a=>1, :b=>5, :c=>5, :c_is_unique=>false}] @ds.insert_conflict(:constraint=>:ic_test_a_uidx, :update=>{:b=>6}, :update_where=>{Sequel[:ic_test][:b]=>4}).insert(1, 3, 4).must_be_nil @ds.all.must_equal [{:a=>1, :b=>5, :c=>5, :c_is_unique=>false}] end it "Dataset#insert_conflict should support table aliases" do @ds = @db[Sequel[:ic_test].as(:foo)] @ds.insert(1, 2, 5) proc{@ds.insert(1, 3, 4)}.must_raise Sequel::UniqueConstraintViolation @ds.insert_conflict(:target=>:a, :update=>{:b=>Sequel[:foo][:c] + Sequel[:excluded][:c]}).insert(1, 7, 10) @ds.all.must_equal [{:a=>1, :b=>15, :c=>5, :c_is_unique=>false}] end end if DB.server_version >= 90500 describe "A PostgreSQL database" do before(:all) do @db = DB @db.create_table!(Sequel[:public][:testfk]){primary_key :id; foreign_key :i, Sequel[:public][:testfk]} end after(:all) do @db.drop_table?(Sequel[:public][:testfk]) end it "should provide the server version" do @db.server_version.must_be :>, 70000 end it "should create a dataset using the VALUES clause via #values" do @db.values([[1, 2], [3, 4]]).map([:column1, :column2]).must_equal [[1, 2], [3, 4]] end it "should correctly handle various numbers of columns" do [1, 2, 15, 16, 17, 63, 64, 65, 255, 256, 257, 1663, 1664].each do |i| DB.get((1..i).map{|j| Sequel.as(j, "c#{j}")}).must_equal((1..i).to_a) end proc{DB.get((1..1665).map{|j| Sequel.as(j, "c#{j}")})}.must_raise Sequel::DatabaseError end it "should support ordering in aggregate functions" do @db.from(@db.values([['1'], ['2']]).as(:t, [:a])).get{string_agg(:a, '-').order(Sequel.desc(:a)).as(:c)}.must_equal '2-1' end if DB.server_version >= 90000 it "should support ordering and limiting with #values" do @db.values([[1, 2], [3, 4]]).reverse(:column2, :column1).limit(1).map([:column1, :column2]).must_equal [[3, 4]] @db.values([[1, 2], [3, 4]]).reverse(:column2, :column1).offset(1).map([:column1, :column2]).must_equal [[1, 2]] end it "should support subqueries with #values" do @db.values([[1, 2]]).from_self.cross_join(@db.values([[3, 4]]).as(:x, [:c1, :c2])).map([:column1, :column2, :c1, :c2]).must_equal [[1, 2, 3, 4]] end it "should support JOIN USING" do @db.from(@db.values([[1, 2]]).as(:x, [:c1, :c2])).join(@db.values([[1, 2]]).as(:y, [:c1, :c2]), [:c1, :c2]).all.must_equal [{:c1=>1, :c2=>2}] end it "should support column aliases for JOIN USING" do @db.from(@db.values([[1, 2]]).as(:x, [:c1, :c2])).join(@db.values([[1, 2]]).as(:y, [:c1, :c2]), Sequel.as([:c1, :c2], :foo)).select_all(:foo).all.must_equal [{:c1=>1, :c2=>2}] end if DB.server_version >= 140000 it "should respect the :read_only option per-savepoint" do proc{@db.transaction{@db.transaction(:savepoint=>true, :read_only=>true){@db[Sequel[:public][:testfk]].insert}}}.must_raise(Sequel::DatabaseError) proc{@db.transaction(:auto_savepoint=>true, :read_only=>true){@db.transaction(:read_only=>false){@db[Sequel[:public][:testfk]].insert}}}.must_raise(Sequel::DatabaseError) @db[Sequel[:public][:testfk]].delete @db.transaction{@db[Sequel[:public][:testfk]].insert; @db.transaction(:savepoint=>true, :read_only=>true){@db[Sequel[:public][:testfk]].all;}} @db.transaction{@db.transaction(:savepoint=>true, :read_only=>true){}; @db[Sequel[:public][:testfk]].insert} @db.transaction{@db[Sequel[:public][:testfk]].all; @db.transaction(:savepoint=>true, :read_only=>true){@db[Sequel[:public][:testfk]].all;}} end it "should support disable_insert_returning" do ds = @db[Sequel[:public][:testfk]].disable_insert_returning ds.delete ds.insert.must_be_nil id = ds.max(:id) ds.select_order_map([:id, :i]).must_equal [[id, nil]] ds.insert(:i=>id).must_be_nil ds.select_order_map([:id, :i]).must_equal [[id, nil], [id+1, id]] ds.insert_select(:i=>ds.max(:id)).must_be_nil ds.select_order_map([:id, :i]).must_equal [[id, nil], [id+1, id]] c = Class.new(Sequel::Model(ds)) c.class_eval do def before_create self.id = model.max(:id)+1 super end end c.create(:i=>id+1).must_equal c.load(:id=>id+2, :i=>id+1) ds.select_order_map([:id, :i]).must_equal [[id, nil], [id+1, id], [id+2, id+1]] ds.delete end it "should support functions with and without quoting" do ds = @db[Sequel[:public][:testfk]] ds.delete ds.insert ds.get{sum(1)}.must_equal 1 ds.get{Sequel.function('pg_catalog.sum', 1)}.must_equal 1 ds.get{sum.function(1)}.must_equal 1 ds.get{pg_catalog[:sum].function(1)}.must_equal 1 ds.delete end it "should support a :qualify option to tables and views" do @db.tables(:qualify=>true).must_include(Sequel.qualify('public', 'testfk')) begin @db.create_view(:testfkv, @db[:testfk]) @db.views(:qualify=>true).must_include(Sequel.qualify('public', 'testfkv')) ensure @db.drop_view(:testfkv) end end it "should handle double underscores in tables when using the qualify option" do @db.create_table!(Sequel.qualify(:public, 'test__fk')){Integer :a} @db.tables(:qualify=>true).must_include(Sequel.qualify('public', 'test__fk')) @db.drop_table(Sequel.qualify(:public, 'test__fk')) end it "should not typecast the int2vector type incorrectly" do @db.get(Sequel.cast('10 20', :int2vector)).wont_equal 10 end it "should not typecast the money type incorrectly" do @db.get(Sequel.cast('10.01', :money)).wont_equal 0 end it "should correctly parse the schema" do [[[:id, 23], [:i, 23]], [[:id, 20], [:i, 20]]].must_include @db.schema(Sequel[:public][:testfk], :reload=>true).map{|c,s| [c, s[:oid]]} end it "should parse foreign keys for tables in a schema" do @db.foreign_key_list(Sequel[:public][:testfk]).must_equal [{:on_delete=>:no_action, :on_update=>:no_action, :columns=>[:i], :key=>[:id], :deferrable=>false, :table=>Sequel.qualify(:public, :testfk), :name=>:testfk_i_fkey}] @db.foreign_key_list(Sequel[:public][:testfk], :schema=>false).must_equal [{:on_delete=>:no_action, :on_update=>:no_action, :columns=>[:i], :key=>[:id], :deferrable=>false, :table=>:testfk, :name=>:testfk_i_fkey, :schema=>:public}] end it "should return uuid fields as strings" do @db.get(Sequel.cast('550e8400-e29b-41d4-a716-446655440000', :uuid)).must_equal '550e8400-e29b-41d4-a716-446655440000' end it "should handle inserts with placeholder literal string tables" do ds = @db.from(Sequel.lit('?', :testfk)) ds.delete ds.insert(:id=>1) ds.select_map(:id).must_equal [1] end it "should have notice receiver receive notices" do a = nil Sequel.connect(DB.opts.merge(:notice_receiver=>proc{|r| a = r.result_error_message})){|db| db.do("BEGIN\nRAISE WARNING 'foo';\nEND;")} a.must_equal "WARNING: foo\n" end if uses_pg && DB.server_version >= 90000 end describe "A PostgreSQL database " do after do DB.drop_table?(:b, :a) end it "should handle non-ASCII column aliases" do s = String.new("\u00E4").force_encoding(DB.get(Sequel.cast('a', :text)).encoding) k, v = DB.select(Sequel.as(Sequel.cast(s, :text), s)).first.shift k.to_s.must_equal v end it "should parse foreign keys referencing current table using :reverse option" do DB.create_table!(:a) do primary_key :id Integer :i Integer :j foreign_key :a_id, :a, :foreign_key_constraint_name=>:a_a unique [:i, :j] end DB.create_table!(:b) do foreign_key :a_id, :a, :foreign_key_constraint_name=>:a_a Integer :c Integer :d foreign_key [:c, :d], :a, :key=>[:j, :i], :name=>:a_c_d end DB.foreign_key_list(:a, :reverse=>true).must_equal [ {:name=>:a_a, :columns=>[:a_id], :key=>[:id], :on_update=>:no_action, :on_delete=>:no_action, :deferrable=>false, :table=>:a, :schema=>:public}, {:name=>:a_a, :columns=>[:a_id], :key=>[:id], :on_update=>:no_action, :on_delete=>:no_action, :deferrable=>false, :table=>:b, :schema=>:public}, {:name=>:a_c_d, :columns=>[:c, :d], :key=>[:j, :i], :on_update=>:no_action, :on_delete=>:no_action, :deferrable=>false, :table=>:b, :schema=>:public}] end end describe "A PostgreSQL database with domain types" do before(:all) do @db = DB @db << "DROP DOMAIN IF EXISTS positive_number CASCADE" @db << "CREATE DOMAIN positive_number AS numeric(10,2) CHECK (VALUE > 0)" @db.create_table!(:testfk){positive_number :id, :primary_key=>true} end after(:all) do @db.drop_table?(:testfk) @db << "DROP DOMAIN positive_number" end it "should correctly parse the schema" do sch = @db.schema(:testfk, :reload=>true) sch.first.last.delete(:domain_oid).must_be_kind_of(Integer) sch.first.last[:db_domain_type].must_equal 'positive_number' end end describe "A PostgreSQL dataset" do before(:all) do @db = DB @d = @db[:test] @db.create_table! :test do text :name integer :value, :index => true end end before do @d.delete end after do @db.drop_table?(:atest) end after(:all) do @db.drop_table?(:test) end it "should support returning limited results with ties" do @d.insert(:value => 1) @d.insert(:value => 1) @d.insert(:value => 2) @d.insert(:value => 2) @d.order(:value).select(:value).limit(1).select_order_map(:value).must_equal [1] @d.order(:value).select(:value).limit(1).with_ties.select_order_map(:value).must_equal [1, 1] @d.order(:value).select(:value).limit(2, 1).select_order_map(:value).must_equal [1, 2] @d.order(:value).select(:value).limit(2, 1).with_ties.select_order_map(:value).must_equal [1, 2, 2] @d.order(:value).select(:value).offset(1).select_order_map(:value).must_equal [1, 2, 2] @d.order(:value).select(:value).offset(1).with_ties.select_order_map(:value).must_equal [1, 2, 2] end if DB.server_version >= 130000 it "should support regexps" do @d.insert(:name => 'abc', :value => 1) @d.insert(:name => 'bcd', :value => 2) @d.filter(:name => /bc/).count.must_equal 2 @d.filter(:name => /^bc/).count.must_equal 1 end it "should support NULLS FIRST and NULLS LAST" do @d.insert(:name => 'abc') @d.insert(:name => 'bcd') @d.insert(:name => 'bcd', :value => 2) @d.order(Sequel.asc(:value, :nulls=>:first), :name).select_map(:name).must_equal %w[abc bcd bcd] @d.order(Sequel.asc(:value, :nulls=>:last), :name).select_map(:name).must_equal %w[bcd abc bcd] @d.order(Sequel.asc(:value, :nulls=>:first), :name).reverse.select_map(:name).must_equal %w[bcd bcd abc] end it "should support selecting from LATERAL functions" do @d.from{[generate_series(1,3,1).as(:a), pow(:a, 2).lateral.as(:b)]}.select_map([:a, :b])== [[1, 1], [2, 4], [3, 9]] end if DB.server_version >= 90300 it "should support ordered-set and hypothetical-set aggregate functions" do @d.from{generate_series(1,3,1).as(:a)}.select{(a.sql_number % 2).as(:a)}.from_self.get{mode.function.within_group(:a)}.must_equal 1 end if DB.server_version >= 90400 it "should support functions with ordinality" do @d.from{generate_series(1,10,3).with_ordinality}.select_map([:generate_series, :ordinality]).must_equal [[1, 1], [4, 2], [7, 3], [10, 4]] end if DB.server_version >= 90400 it "#lock should lock tables and yield if a block is given" do @d.lock('EXCLUSIVE'){@d.insert(:name=>'a')} end it "#lock should raise Error for unsupported lock mode" do proc{@d.lock('BAD'){}}.must_raise Sequel::Error end it "should support exclusion constraints when creating or altering tables" do @db.create_table!(:atest){Integer :t; exclude [[Sequel.desc(:t, :nulls=>:last), '=']], :using=>:btree, :where=>proc{t > 0}} @db[:atest].insert(1) @db[:atest].insert(2) proc{@db[:atest].insert(2)}.must_raise(Sequel::Postgres::ExclusionConstraintViolation) @db.create_table!(:atest){Integer :t} @db.alter_table(:atest){add_exclusion_constraint [[:t, '=']], :using=>:btree, :name=>'atest_ex'} @db[:atest].insert(1) @db[:atest].insert(2) proc{@db[:atest].insert(2)}.must_raise(Sequel::Postgres::ExclusionConstraintViolation) @db.alter_table(:atest){drop_constraint 'atest_ex'} end if DB.server_version >= 90000 it "should support deferrable exclusion constraints" do @db.create_table!(:atest){Integer :t; exclude [[Sequel.desc(:t, :nulls=>:last), '=']], :using=>:btree, :where=>proc{t > 0}, :deferrable => true} proc do @db.transaction do @db[:atest].insert(2) @db[:atest].insert(2) end end.must_raise(Sequel::Postgres::ExclusionConstraintViolation) end if DB.server_version >= 90000 it "should support Database#error_info for getting info hash on the given error" do @db.create_table!(:atest){Integer :t; Integer :t2, :null=>false, :default=>1; constraint :f, :t=>0} begin @db[:atest].insert(1) rescue => e end e.wont_equal nil info = @db.error_info(e) info[:schema].must_equal 'public' info[:table].must_equal 'atest' info[:constraint].must_equal 'f' info[:column].must_be_nil info[:type].must_be_nil begin @db[:atest].insert(0, nil) rescue => e end e.wont_equal nil info = @db.error_info(e.wrapped_exception) info[:schema].must_equal 'public' info[:table].must_equal 'atest' info[:constraint].must_be_nil info[:column].must_equal 't2' info[:type].must_be_nil end if DB.server_version >= 90300 && uses_pg && Object.const_defined?(:PG) && ::PG.const_defined?(:Constants) && ::PG::Constants.const_defined?(:PG_DIAG_SCHEMA_NAME) it "should support Database#do for executing anonymous code blocks" do @db.drop_table?(:btest) @db.do "BEGIN EXECUTE 'CREATE TABLE btest (a INTEGER)'; EXECUTE 'INSERT INTO btest VALUES (1)'; END" @db[:btest].select_map(:a).must_equal [1] @db.do "BEGIN EXECUTE 'DROP TABLE btest; CREATE TABLE atest (a INTEGER)'; EXECUTE 'INSERT INTO atest VALUES (1)'; END", :language=>:plpgsql @db[:atest].select_map(:a).must_equal [1] end if DB.server_version >= 90000 it "should support adding foreign key constarints that are not yet valid, and validating them later" do @db.create_table!(:atest){primary_key :id; Integer :fk} @db[:atest].insert(1, 5) @db.alter_table(:atest){add_foreign_key [:fk], :atest, :not_valid=>true, :name=>:atest_fk} @db[:atest].insert(2, 1) proc{@db[:atest].insert(3, 4)}.must_raise(Sequel::ForeignKeyConstraintViolation) proc{@db.alter_table(:atest){validate_constraint :atest_fk}}.must_raise(Sequel::ForeignKeyConstraintViolation) @db[:atest].where(:id=>1).update(:fk=>2) @db.alter_table(:atest){validate_constraint :atest_fk} @db.alter_table(:atest){validate_constraint :atest_fk} end if DB.server_version >= 90200 it "should support adding check constarints that are not yet valid, and validating them later" do @db.create_table!(:atest){Integer :a} @db[:atest].insert(5) @db.alter_table(:atest){add_constraint({:name=>:atest_check, :not_valid=>true}){a >= 10}} @db[:atest].insert(10) proc{@db[:atest].insert(6)}.must_raise(Sequel::CheckConstraintViolation) proc{@db.alter_table(:atest){validate_constraint :atest_check}}.must_raise(Sequel::CheckConstraintViolation, Sequel::DatabaseError) @db[:atest].where{a < 10}.update(:a=>Sequel.+(:a, 10)) @db.alter_table(:atest){validate_constraint :atest_check} @db.alter_table(:atest){validate_constraint :atest_check} end if DB.server_version >= 90200 it "should support :using when altering a column's type" do @db.create_table!(:atest){Integer :t} @db[:atest].insert(1262404000) @db.alter_table(:atest){set_column_type :t, Time, :using=>Sequel.cast('epoch', Time) + Sequel.cast('1 second', :interval) * :t} @db[:atest].get(Sequel.extract(:year, :t)).must_equal 2010 end it "should support :using with a string when altering a column's type" do @db.create_table!(:atest){Integer :t} @db[:atest].insert(1262304000) @db.alter_table(:atest){set_column_type :t, Time, :using=>"'epoch'::timestamp + '1 second'::interval * t"} @db[:atest].get(Sequel.extract(:year, :t)).must_equal 2010 end it "should have #transaction support various types of synchronous options" do @db.transaction(:synchronous=>:on){} @db.transaction(:synchronous=>true){} @db.transaction(:synchronous=>:off){} @db.transaction(:synchronous=>false){} @db.transaction(:synchronous=>nil){} if @db.server_version >= 90100 @db.transaction(:synchronous=>:local){} if @db.server_version >= 90200 @db.transaction(:synchronous=>:remote_write){} end end end it "should have #transaction support read only transactions" do @db.transaction(:read_only=>true){} @db.transaction(:read_only=>false){} @db.transaction(:isolation=>:serializable, :read_only=>true){} @db.transaction(:isolation=>:serializable, :read_only=>false){} end it "should have #transaction support deferrable transactions" do @db.transaction(:deferrable=>true){} @db.transaction(:deferrable=>false){} @db.transaction(:deferrable=>true, :read_only=>true){} @db.transaction(:deferrable=>false, :read_only=>false){} @db.transaction(:isolation=>:serializable, :deferrable=>true, :read_only=>true){} @db.transaction(:isolation=>:serializable, :deferrable=>false, :read_only=>false){} end if DB.server_version >= 90100 it "should support parsing partial indexes with :include_partial option" do @db.add_index :test, [:name, :value], :where=>(Sequel[:value] > 10), :name=>:tnv_partial @db.indexes(:test)[:tnv_partial].must_be_nil @db.indexes(:test, :include_partial=>true)[:tnv_partial].must_equal(:columns=>[:name, :value], :unique=>false, :deferrable=>nil) end it "should support creating indexes concurrently" do @db.add_index :test, [:name, :value], :concurrently=>true, :name=>'tnv0' end it "should support dropping indexes only if they already exist" do proc{@db.drop_index :test, [:name, :value], :name=>'tnv1'}.must_raise Sequel::DatabaseError @db.drop_index :test, [:name, :value], :if_exists=>true, :name=>'tnv1' @db.add_index :test, [:name, :value], :name=>'tnv1' @db.drop_index :test, [:name, :value], :if_exists=>true, :name=>'tnv1' end it "should support CASCADE when dropping indexes" do @db.add_index :test, [:name, :value], :name=>'tnv2', :unique=>true @db.create_table(:atest){text :name; integer :value; foreign_key [:name, :value], :test, :key=>[:name, :value]} @db.foreign_key_list(:atest).length.must_equal 1 @db.drop_index :test, [:name, :value], :cascade=>true, :name=>'tnv2' @db.foreign_key_list(:atest).length.must_equal 0 end it "should support dropping indexes concurrently" do @db.add_index :test, [:name, :value], :name=>'tnv2' @db.drop_index :test, [:name, :value], :concurrently=>true, :name=>'tnv2' end if DB.server_version >= 90200 it "should support creating indexes only if they do not exist" do @db.add_index :test, [:name, :value], :name=>'tnv3' proc{@db.add_index :test, [:name, :value], :name=>'tnv3'}.must_raise Sequel::DatabaseError @db.add_index :test, [:name, :value], :if_not_exists=>true, :name=>'tnv3' end if DB.server_version >= 90500 it "should support specifying whether NULLS are distinct in unique indexes" do @db.create_table(:atest){Integer :a} @db.add_index :atest, :a, :nulls_distinct=>true, :unique=>true @db[:atest].insert @db[:atest].insert @db[:atest].count.must_equal 2 @db.create_table!(:atest){Integer :a} @db.add_index :atest, :a, :nulls_distinct=>false, :unique=>true @db[:atest].insert proc{@db[:atest].insert}.must_raise Sequel::DatabaseError end if DB.server_version >= 150000 it "should support including columns in indexes" do @db.create_table(:atest){Integer :a; Integer :b; Integer :c} @db.add_index :atest, :a, :include=>[:b, :c] @db.add_index :atest, :b, :include=>:a end if DB.server_version >= 110000 it "should support specifying tablespaces for tables" do @db.create_table(:atest, :tablespace=>:pg_default){Integer :a} end it "should support specifying tablespaces for indexes" do @db.create_table(:atest){Integer :a} @db.add_index :atest, :a, :tablespace=>:pg_default end it "#lock should lock table if inside a transaction" do @db.transaction{@d.lock('EXCLUSIVE'); @d.insert(:name=>'a')} end it "#lock should return nil" do @d.lock('EXCLUSIVE'){@d.insert(:name=>'a')}.must_be_nil @db.transaction{@d.lock('EXCLUSIVE').must_be_nil; @d.insert(:name=>'a')} end it "should raise an error if attempting to update a joined dataset with a single FROM table" do proc{@db[:test].join(:test, [:name]).update(:name=>'a')}.must_raise(Sequel::Error, 'Need multiple FROM tables if updating/deleting a dataset with JOINs') end it "should truncate with options" do @d.insert( :name => 'abc', :value => 1) @d.count.must_equal 1 @d.truncate(:cascade => true) @d.count.must_equal 0 if @d.db.server_version > 80400 @d.insert( :name => 'abc', :value => 1) @d.truncate(:cascade => true, :only=>true, :restart=>true) @d.count.must_equal 0 end end it "should truncate multiple tables at once" do tables = [:test, :test] tables.each{|t| @d.from(t).insert} @d.from(:test, :test).truncate tables.each{|t| @d.from(t).count.must_equal 0} end it "should not allow truncate for grouped or joined datasets" do proc{@d.from(:test).cross_join(:test).truncate}.must_raise Sequel::InvalidOperation proc{@d.from(:test).group(:test).truncate}.must_raise Sequel::InvalidOperation end it "should raise when attempting to insert with 0 or multiple tables" do proc{@d.from(:test, :test).insert}.must_raise Sequel::InvalidOperation proc{@d.from.insert}.must_raise Sequel::Error end end describe "Dataset#distinct" do before do @db = DB @db.create_table!(:a) do Integer :a Integer :b end @ds = @db[:a] end after do @db.drop_table?(:a) end it "#distinct with arguments should return results distinct on those arguments" do @ds.insert(20, 10) @ds.insert(30, 10) @ds.order(:b, :a).distinct.map(:a).must_equal [20, 30] @ds.order(:b, Sequel.desc(:a)).distinct.map(:a).must_equal [30, 20] @ds.order(:b, :a).distinct(:b).map(:a).must_equal [20] @ds.order(:b, Sequel.desc(:a)).distinct(:b).map(:a).must_equal [30] end end if DB.pool.respond_to?(:max_size) and DB.pool.max_size > 1 describe "Dataset#for_update support" do before do @db = DB.create_table!(:items) do primary_key :id Integer :number String :name end @ds = DB[:items] end after do DB.drop_table?(:items) DB.disconnect end it "should handle FOR UPDATE" do @ds.insert(:number=>20) c, t = nil, nil q = Queue.new DB.transaction do @ds.for_update.first(:id=>1) t = Thread.new do DB.transaction do q.push nil @ds.filter(:id=>1).update(:name=>'Jim') c = @ds.first(:id=>1) q.push nil end end q.pop @ds.filter(:id=>1).update(:number=>30) end q.pop t.join c.must_equal(:id=>1, :number=>30, :name=>'Jim') end it "should handle FOR SHARE" do @ds.insert(:number=>20) c, t = nil q = Queue.new DB.transaction do @ds.for_share.first(:id=>1) t = Thread.new do DB.transaction do c = @ds.for_share.filter(:id=>1).first q.push nil end end q.pop @ds.filter(:id=>1).update(:name=>'Jim') c.must_equal(:id=>1, :number=>20, :name=>nil) end t.join end end end describe "A PostgreSQL dataset with a timestamp field" do before(:all) do @db = DB @db.create_table! :test3 do Date :date DateTime :time end @d = @db[:test3] if @db.adapter_scheme == :postgres @db.convert_infinite_timestamps.must_equal false @db.convert_infinite_timestamps = false @db.convert_infinite_timestamps = true @db.convert_infinite_timestamps = false else @db.extension :pg_extended_date_support end end before do @d.delete end after do @db.convert_infinite_timestamps = false Sequel.datetime_class = Time Sequel::SQLTime.date = nil Sequel.default_timezone = nil end after(:all) do @db.drop_table?(:test3) end it "should store milliseconds in time fields for Time objects" do t = Time.now @d.insert(:time=>t) t2 = @d.get(:time) @d.literal(t2).must_equal @d.literal(t) t2.strftime('%Y-%m-%d %H:%M:%S').must_equal t.strftime('%Y-%m-%d %H:%M:%S') (t2.is_a?(Time) ? t2.usec : t2.strftime('%N').to_i/1000).must_equal t.usec end it "should store milliseconds in time fields for DateTime objects" do t = DateTime.now @d.insert(:time=>t) t2 = @d.get(:time) @d.literal(t2).must_equal @d.literal(t) t2.strftime('%Y-%m-%d %H:%M:%S').must_equal t.strftime('%Y-%m-%d %H:%M:%S') (t2.is_a?(Time) ? t2.usec : t2.strftime('%N').to_i/1000).must_equal t.strftime('%N').to_i/1000 end it "should respect SQLTime.date setting for time columns" do Sequel::SQLTime.date = Time.local(2000, 1, 2) d = Sequel::SQLTime.create(10, 11, 12) @db.get(Sequel.cast(d, :time)).must_equal d @db.get(Sequel.cast(d, :timetz)).must_equal d end it "should respect Sequel.application_timezone for time columns" do d = Sequel::SQLTime.create(10, 11, 12) Sequel.application_timezone = :local @db.get(Sequel.cast(d, :time)).utc_offset.must_equal Time.now.utc_offset @db.get(Sequel.cast(d, :timetz)).utc_offset.must_equal Time.now.utc_offset Sequel.application_timezone = :utc @db.get(Sequel.cast(d, :time)).utc_offset.must_equal 0 @db.get(Sequel.cast(d, :timetz)).utc_offset.must_equal 0 end it "should handle parsing dates and timestamps in with 1, 2, and 3 digit years" do [1, 10, 100, -2, -20, -200].each do |year| d = Date.new(year, 2, 3) @db.get(Sequel.cast(d, Date)).must_equal d begin Sequel.default_timezone = :utc d = Time.utc(year, 2, 3, 10, 11, 12) @db.get(Sequel.cast(d, Time)).must_equal d Sequel.datetime_class = DateTime d = DateTime.new(year, 2, 3, 10, 11, 12) @db.get(Sequel.cast(d, Time)).must_equal d ensure Sequel.datetime_class = Time Sequel.default_timezone = nil end end end it "should handle parsing dates and timestamps in the distant future" do d = Date.new(5874896, 2, 3) @db.get(Sequel.cast(d, Date)).must_equal d d = Time.local(294275, 2, 3, 10, 11, 12) @db.get(Sequel.cast(d, Time)).must_equal d Sequel.datetime_class = DateTime d = DateTime.new(294275, 2, 3, 10, 11, 12) @db.get(Sequel.cast(d, Time)).must_equal d end it "should handle BC times and dates" do d = Date.new(-1234, 2, 3) @db.get(Sequel.cast(d, Date)).must_equal d Sequel.default_timezone = :utc t = Time.at(-100000000000).utc + 0.5 @db.get(Sequel.cast(t, Time)).must_equal t @db.get(Sequel.cast(t, :timestamptz)).must_equal t Sequel.datetime_class = DateTime dt = DateTime.new(-1234, 2, 3, 10, 20, Rational(30, 20)) @db.get(Sequel.cast(dt, DateTime)).must_equal dt @db.get(Sequel.cast(dt, :timestamptz)).must_equal dt end it "should handle BC times and dates in bound variables" do d = Date.new(-1234, 2, 3) @db.select(Sequel.cast(:$d, Date)).call(:single_value, :d=>d).must_equal d Sequel.default_timezone = :utc t = Time.at(-100000000000).utc + 0.5 @db.select(Sequel.cast(:$t, Time)).call(:single_value, :t=>t).must_equal t @db.select(Sequel.cast(:$t, :timestamptz)).call(:single_value, :t=>t).must_equal t Sequel.datetime_class = DateTime dt = DateTime.new(-1234, 2, 3, 10, 20, Rational(30, 20)) @db.select(Sequel.cast(:$dt, DateTime)).call(:single_value, :dt=>dt).must_equal dt @db.select(Sequel.cast(:$dt, :timestamptz)).call(:single_value, :dt=>dt).must_equal dt end if uses_pg_or_jdbc it "should handle infinite timestamps if convert_infinite_timestamps is set" do @d.insert(:time=>Sequel.cast('infinity', DateTime)) @db.convert_infinite_timestamps = :nil @db[:test3].get(:time).must_be_nil @db.convert_infinite_timestamps = :string @db[:test3].get(:time).must_equal 'infinity' @db.convert_infinite_timestamps = :float @db[:test3].get(:time).must_equal 1.0/0.0 @db.convert_infinite_timestamps = 'nil' @db[:test3].get(:time).must_be_nil @db.convert_infinite_timestamps = 'string' @db[:test3].get(:time).must_equal 'infinity' @db.convert_infinite_timestamps = 'date' @db[:test3].get(:time).must_equal Date::Infinity.new @db.convert_infinite_timestamps = 'float' @db[:test3].get(:time).must_equal 1.0/0.0 @db.convert_infinite_timestamps = 't' @db[:test3].get(:time).must_equal 1.0/0.0 @db.convert_infinite_timestamps = true @db[:test3].get(:time).must_equal 1.0/0.0 @db.convert_infinite_timestamps = 'f' proc{@db[:test3].get(:time)}.must_raise ArgumentError, Sequel::InvalidValue @db.convert_infinite_timestamps = nil proc{@db[:test3].get(:time)}.must_raise ArgumentError, Sequel::InvalidValue @db.convert_infinite_timestamps = false proc{@db[:test3].get(:time)}.must_raise ArgumentError, Sequel::InvalidValue @d.update(:time=>Sequel.cast('-infinity', DateTime)) @db.convert_infinite_timestamps = :nil @db[:test3].get(:time).must_be_nil @db.convert_infinite_timestamps = :string @db[:test3].get(:time).must_equal '-infinity' @db.convert_infinite_timestamps = :date @db[:test3].get(:time).must_equal(-Date::Infinity.new) @db.convert_infinite_timestamps = :float @db[:test3].get(:time).must_equal(-1.0/0.0) end it "should handle infinite dates if convert_infinite_timestamps is set" do @d.insert(:time=>Sequel.cast('infinity', DateTime)) @db.convert_infinite_timestamps = :nil @db[:test3].get(Sequel.cast(:time, Date)).must_be_nil @db.convert_infinite_timestamps = :string @db[:test3].get(Sequel.cast(:time, Date)).must_equal 'infinity' @db.convert_infinite_timestamps = :float @db[:test3].get(Sequel.cast(:time, Date)).must_equal 1.0/0.0 @db.convert_infinite_timestamps = 'nil' @db[:test3].get(Sequel.cast(:time, Date)).must_be_nil @db.convert_infinite_timestamps = 'string' @db[:test3].get(Sequel.cast(:time, Date)).must_equal 'infinity' @db.convert_infinite_timestamps = 'float' @db[:test3].get(Sequel.cast(:time, Date)).must_equal 1.0/0.0 @db.convert_infinite_timestamps = 't' @db[:test3].get(Sequel.cast(:time, Date)).must_equal 1.0/0.0 @db.convert_infinite_timestamps = true @db[:test3].get(Sequel.cast(:time, Date)).must_equal 1.0/0.0 @db.convert_infinite_timestamps = 'f' proc{@db[:test3].get(Sequel.cast(:time, Date))}.must_raise ArgumentError, Sequel::InvalidValue @db.convert_infinite_timestamps = nil proc{@db[:test3].get(Sequel.cast(:time, Date))}.must_raise ArgumentError, Sequel::InvalidValue @db.convert_infinite_timestamps = false proc{@db[:test3].get(Sequel.cast(:time, Date))}.must_raise ArgumentError, Sequel::InvalidValue @d.update(:time=>Sequel.cast('-infinity', DateTime)) @db.convert_infinite_timestamps = :nil @db[:test3].get(Sequel.cast(:time, Date)).must_be_nil @db.convert_infinite_timestamps = :string @db[:test3].get(Sequel.cast(:time, Date)).must_equal '-infinity' @db.convert_infinite_timestamps = :float @db[:test3].get(Sequel.cast(:time, Date)).must_equal(-1.0/0.0) end it "should handle conversions from infinite strings/floats in models" do c = Class.new(Sequel::Model(:test3)) @db.convert_infinite_timestamps = :float c.new(:time=>'infinity').time.must_equal 'infinity' c.new(:time=>'-infinity').time.must_equal '-infinity' c.new(:time=>1.0/0.0).time.must_equal 1.0/0.0 c.new(:time=>-1.0/0.0).time.must_equal(-1.0/0.0) end it "should handle infinite dates if convert_infinite_timestamps is set" do @d.insert(:date=>Sequel.cast('infinity', Date)) @db.convert_infinite_timestamps = :nil @db[:test3].get(:date).must_be_nil @db.convert_infinite_timestamps = :string @db[:test3].get(:date).must_equal 'infinity' @db.convert_infinite_timestamps = :float @db[:test3].get(:date).must_equal 1.0/0.0 @d.update(:date=>Sequel.cast('-infinity', :timestamp)) @db.convert_infinite_timestamps = :nil @db[:test3].get(:date).must_be_nil @db.convert_infinite_timestamps = :string @db[:test3].get(:date).must_equal '-infinity' @db.convert_infinite_timestamps = :float @db[:test3].get(:date).must_equal(-1.0/0.0) end it "should handle conversions from infinite strings/floats in models" do c = Class.new(Sequel::Model(:test3)) @db.convert_infinite_timestamps = :float c.new(:date=>'infinity').date.must_equal 'infinity' c.new(:date=>'-infinity').date.must_equal '-infinity' c.new(:date=>1.0/0.0).date.must_equal 1.0/0.0 c.new(:date=>-1.0/0.0).date.must_equal(-1.0/0.0) end it "explain and analyze should not raise errors" do @d = DB[:test3] @d.explain @d.analyze end it "#locks should be a dataset returning database locks " do @db.locks.must_be_kind_of(Sequel::Dataset) @db.locks.all.must_be_kind_of(Array) end end describe "A PostgreSQL database" do before do @db = DB @db.create_table! :test2 do text :name integer :value end end after do @db.drop_table?(:test2) end it "should support column operations" do @db.create_table!(:test2){text :name; integer :value} @db[:test2].insert({}) @db[:test2].columns.must_equal [:name, :value] @db.add_column :test2, :xyz, :text, :default => '000' @db[:test2].columns.must_equal [:name, :value, :xyz] @db[:test2].insert(:name => 'mmm', :value => 111) @db[:test2].first[:xyz].must_equal '000' @db[:test2].columns.must_equal [:name, :value, :xyz] @db.drop_column :test2, :xyz @db[:test2].columns.must_equal [:name, :value] @db[:test2].delete @db.add_column :test2, :xyz, :text, :default => '000' @db[:test2].insert(:name => 'mmm', :value => 111, :xyz => 'qqqq') @db[:test2].columns.must_equal [:name, :value, :xyz] @db.rename_column :test2, :xyz, :zyx @db[:test2].columns.must_equal [:name, :value, :zyx] @db[:test2].first[:zyx].must_equal 'qqqq' @db.add_column :test2, :xyz, :float @db[:test2].delete @db[:test2].insert(:name => 'mmm', :value => 111, :xyz => 56.78) @db.set_column_type :test2, :xyz, :integer @db[:test2].first[:xyz].must_equal 57 end end describe "A PostgreSQL database" do before do @db = DB @db.drop_table?(:posts) end after do @db.drop_table?(:posts) end it "should support resetting the primary key sequence" do @db.create_table(:posts){primary_key :a} @db[:posts].insert(:a=>20).must_equal 20 @db[:posts].insert.must_equal 1 @db[:posts].insert.must_equal 2 @db[:posts].insert(:a=>10).must_equal 10 @db.reset_primary_key_sequence(:posts).must_equal 21 @db[:posts].insert.must_equal 21 @db[:posts].order(:a).map(:a).must_equal [1, 2, 10, 20, 21] end it "should support specifying Integer/Bignum types in primary keys and have them be auto incrementing" do @db.create_table(:posts){primary_key :a, :type=>Integer} @db[:posts].insert.must_equal 1 @db[:posts].insert.must_equal 2 @db.create_table!(:posts){primary_key :a, :type=>:Bignum} @db[:posts].insert.must_equal 1 @db[:posts].insert.must_equal 2 end it "should not raise an error if attempting to resetting the primary key sequence for a table without a primary key" do @db.create_table(:posts){Integer :a} @db.reset_primary_key_sequence(:posts).must_be_nil end it "should support opclass specification" do @db.create_table(:posts){text :title; text :body; integer :user_id; index(:user_id, :opclass => :int4_ops, :type => :btree)} proc{@db.create_table(:posts){text :title; text :body; integer :user_id; index(:user_id, :opclass => :bogus_opclass, :type => :btree)}}.must_raise Sequel::DatabaseError end it "should support fulltext indexes and searching" do @db.create_table(:posts){text :title; text :body; full_text_index [:title, :body]; full_text_index :title, :language => 'french', :index_type=>:gist} @db[:posts].insert(:title=>'ruby rails', :body=>'yowsa') @db[:posts].insert(:title=>'sequel', :body=>'ruby') @db[:posts].insert(:title=>'ruby scooby', :body=>'x') @db[:posts].full_text_search(:title, 'rails').all.must_equal [{:title=>'ruby rails', :body=>'yowsa'}] @db[:posts].full_text_search(:title, 'rails', :headline=>true).all.must_equal [{:title=>'ruby rails', :body=>'yowsa', :headline=>'ruby <b>rails</b>'}] @db[:posts].full_text_search([:title, :body], ['yowsa', 'rails']).all.must_equal [:title=>'ruby rails', :body=>'yowsa'] @db[:posts].full_text_search(:title, 'scooby', :language => 'french').all.must_equal [{:title=>'ruby scooby', :body=>'x'}] @db[:posts].full_text_search(:title, :$n).call(:select, :n=>'rails').must_equal [{:title=>'ruby rails', :body=>'yowsa'}] @db[:posts].full_text_search(:title, :$n).prepare(:select, :fts_select).call(:n=>'rails').must_equal [{:title=>'ruby rails', :body=>'yowsa'}] @db[:posts].insert(:title=>'jruby rubinius ruby maglev mri iron') @db[:posts].insert(:title=>'ruby jruby maglev mri rubinius iron') @db[:posts].full_text_search(:title, 'rubinius ruby', :phrase=>true).select_order_map(:title).must_equal ['jruby rubinius ruby maglev mri iron'] @db[:posts].full_text_search(:title, 'jruby maglev', :phrase=>true).select_order_map(:title).must_equal ['ruby jruby maglev mri rubinius iron'] @db[:posts].full_text_search(:title, 'rubinius ruby', :plain=>true).select_order_map(:title).must_equal ['jruby rubinius ruby maglev mri iron', 'ruby jruby maglev mri rubinius iron'] @db[:posts].full_text_search(:title, 'jruby maglev', :plain=>true).select_order_map(:title).must_equal ['jruby rubinius ruby maglev mri iron', 'ruby jruby maglev mri rubinius iron'] if DB.server_version >= 90600 @db[:posts].full_text_search(:title, 'rubinius ruby', :to_tsquery=>:phrase).select_order_map(:title).must_equal ['jruby rubinius ruby maglev mri iron'] @db[:posts].full_text_search(:title, 'jruby maglev', :to_tsquery=>:phrase).select_order_map(:title).must_equal ['ruby jruby maglev mri rubinius iron'] end @db[:posts].full_text_search(Sequel.function(:to_tsvector, 'simple', :title), 'rails', :tsvector=>true).all.must_equal [{:title=>'ruby rails', :body=>'yowsa'}] @db[:posts].full_text_search(:title, Sequel.function(:to_tsquery, 'simple', 'rails'), :tsquery=>true).all.must_equal [{:title=>'ruby rails', :body=>'yowsa'}] proc{@db[:posts].full_text_search(Sequel.function(:to_tsvector, 'simple', :title), 'rubinius ruby', :tsvector=>true, :phrase=>true)}.must_raise(Sequel::Error) proc{@db[:posts].full_text_search(:title, Sequel.function(:to_tsquery, 'simple', 'rails'), :tsquery=>true, :phrase=>true)}.must_raise(Sequel::Error) @db[:posts].delete t1 = "bork " * 1000 + "ruby sequel" t2 = "ruby sequel " * 1000 @db[:posts].insert(:title=>t1) @db[:posts].insert(:title=>t2) @db[:posts].full_text_search(:title, 'ruby & sequel', :rank=>true).select_map(:title).must_equal [t2, t1] end if DB.server_version >= 80300 it "should support spatial indexes" do @db.create_table(:posts){box :geom; spatial_index [:geom]} end it "should support indexes with index type" do @db.create_table(:posts){box :geom; index :geom, :type => 'gist'} end it "should support unique indexes with index type" do @db.create_table(:posts){varchar :title, :size => 5; index :title, :type => 'btree', :unique => true, :name=>:post_index_foo} @db.indexes(:posts).length.must_equal 1 @db.indexes(:posts)[:post_index_foo][:unique].must_equal true end it "should support partial indexes" do @db.create_table(:posts){varchar :title, :size => 5; index :title, :where => {:title => '5'}} end it "should support identifiers for table names when creating indexes" do @db.create_table(Sequel::SQL::Identifier.new(:posts)){varchar :title, :size => 5; index :title} @db.indexes(:posts).length.must_equal 1 end it "should support renaming tables" do @db.create_table!(:posts1){primary_key :a} @db.rename_table(:posts1, :posts) end it "should adding a primary key only if it does not already exists" do @db.create_table(:posts){Integer :a} @db.alter_table(:posts){add_column :b, Integer} @db.alter_table(:posts){add_column :b, Integer, :if_not_exists=>true} proc{@db.alter_table(:posts){add_column :b, Integer}}.must_raise Sequel::DatabaseError end if DB.server_version >= 90600 end describe "Sequel::Postgres::Database" do before do @db = DB @db.create_table!(:posts){Integer :a} end after do @db.run("DROP PROCEDURE test_procedure_posts(#{@args || "int, int"})") @db.drop_table?(:posts) end it "#call_procedure should call a procedure that returns a row" do @db.run <<SQL CREATE OR REPLACE PROCEDURE test_procedure_posts(inout a int, inout b int) LANGUAGE SQL AS $$ INSERT INTO posts VALUES (a) RETURNING *; INSERT INTO posts VALUES (a * 2) RETURNING *; SELECT max(posts.a), min(posts.a) FROM posts; $$; SQL @db.call_procedure(:test_procedure_posts, 1, nil).must_equal(:a=>2, :b=>1) @db.call_procedure(:test_procedure_posts, 3, nil).must_equal(:a=>6, :b=>1) end # Remove after release of pg 1.3.5 skip_due_to_pg_bug = defined?(Sequel::Postgres::USES_PG) && Sequel::Postgres::USES_PG && DB.respond_to?(:stream_all_queries) && DB.stream_all_queries && defined?(PG::VERSION) && PG::VERSION == '1.3.4' it "#call_procedure should call a procedure without arguments" do @args = '' @db.run <<SQL CREATE OR REPLACE PROCEDURE test_procedure_posts() LANGUAGE SQL AS $$ INSERT INTO posts VALUES (1) RETURNING *; INSERT INTO posts VALUES (2) RETURNING *; SELECT max(posts.a), min(posts.a) FROM posts; $$; SQL @db.call_procedure(:test_procedure_posts).must_be_nil @db[:posts].select_order_map(:a).must_equal [1, 2] end unless skip_due_to_pg_bug it "#call_procedure should call a procedure with output parameters" do @db.run <<SQL CREATE OR REPLACE PROCEDURE test_procedure_posts(out a int, out b int) LANGUAGE SQL AS $$ INSERT INTO posts VALUES (1) RETURNING *; INSERT INTO posts VALUES (2) RETURNING *; SELECT max(posts.a), min(posts.a) FROM posts; $$; SQL @db.call_procedure(:test_procedure_posts, nil, nil).must_equal(:a=>2, :b=>1) end if DB.server_version >= 140000 it "#call_procedure should call a procedure that doesn't return a row" do @db.run <<SQL CREATE OR REPLACE PROCEDURE test_procedure_posts(int, int) LANGUAGE SQL AS $$ INSERT INTO posts VALUES ($1) RETURNING *; INSERT INTO posts VALUES ($1 * 2) RETURNING *; $$; SQL @db.call_procedure(:test_procedure_posts, 1, nil).must_be_nil @db.call_procedure(:test_procedure_posts, 3, nil).must_be_nil end unless skip_due_to_pg_bug it "#call_procedure should call a procedure that accepts text" do @args = 'text' @db.run <<SQL CREATE OR REPLACE PROCEDURE test_procedure_posts(inout t text) LANGUAGE SQL AS $$ SELECT 'a' || t; $$; SQL @db.call_procedure(:test_procedure_posts, 'b').must_equal(:t=>'ab') end end if DB.adapter_scheme == :postgres && DB.server_version >= 110000 describe "Postgres::Dataset#import" do before do @db = DB @db.create_table!(:test){primary_key :x; Integer :y} @ds = @db[:test] end after do @db.drop_table?(:test) end it "#import should a single insert statement" do @ds.import([:x, :y], [[1, 2], [3, 4]]) @ds.all.must_equal [{:x=>1, :y=>2}, {:x=>3, :y=>4}] end it "#import should work correctly when returning primary keys" do @ds.import([:x, :y], [[1, 2], [3, 4]], :return=>:primary_key).must_equal [1, 3] @ds.all.must_equal [{:x=>1, :y=>2}, {:x=>3, :y=>4}] end it "#import should work correctly when returning primary keys with :slice option" do @ds.import([:x, :y], [[1, 2], [3, 4]], :return=>:primary_key, :slice=>1).must_equal [1, 3] @ds.all.must_equal [{:x=>1, :y=>2}, {:x=>3, :y=>4}] end it "#import should work correctly with an arbitrary returning value" do @ds.returning(:y, :x).import([:x, :y], [[1, 2], [3, 4]]).must_equal [{:y=>2, :x=>1}, {:y=>4, :x=>3}] @ds.all.must_equal [{:x=>1, :y=>2}, {:x=>3, :y=>4}] end end describe "Postgres::Dataset#insert" do before do @db = DB @db.create_table!(:test5){primary_key :xid; Integer :value} @ds = @db[:test5] end after do @db.drop_table?(:test5) end it "should work with static SQL" do @ds.with_sql('INSERT INTO test5 (value) VALUES (10)').insert.must_be_nil @db['INSERT INTO test5 (value) VALUES (20)'].insert.must_be_nil @ds.all.must_equal [{:xid=>1, :value=>10}, {:xid=>2, :value=>20}] end it "should insert correctly if using a column array and a value array" do @ds.insert([:value], [10]).must_equal 1 @ds.all.must_equal [{:xid=>1, :value=>10}] end it "should have insert return primary key value" do @ds.insert(:value=>10).must_equal 1 end it "should have insert_select insert the record and return the inserted record" do h = @ds.insert_select(:value=>10) h[:value].must_equal 10 @ds.first(:xid=>h[:xid])[:value].must_equal 10 end it "should have insert_select respect existing returning clause" do h = @ds.returning(Sequel[:value].as(:v), Sequel[:xid].as(:x)).insert_select(:value=>10) h[:v].must_equal 10 @ds.first(:xid=>h[:x])[:value].must_equal 10 end it "should have prepared insert_select respect existing returning clause" do h = @ds.returning(Sequel[:value].as(:v), Sequel[:xid].as(:x)).prepare(:insert_select, :insert_select, :value=>10).call h[:v].must_equal 10 @ds.first(:xid=>h[:x])[:value].must_equal 10 end it "should correctly return the inserted record's primary key value" do value1 = 10 id1 = @ds.insert(:value=>value1) @ds.first(:xid=>id1)[:value].must_equal value1 value2 = 20 id2 = @ds.insert(:value=>value2) @ds.first(:xid=>id2)[:value].must_equal value2 end it "should return nil if the table has no primary key" do @db.create_table!(:test5){String :name; Integer :value} @ds.delete @ds.insert(:name=>'a').must_be_nil end end describe "Postgres::Database schema qualified tables" do before do @db = DB @db << "CREATE SCHEMA schema_test" @db.instance_variable_set(:@primary_keys, {}) @db.instance_variable_set(:@primary_key_sequences, {}) end after do @db << "DROP SCHEMA schema_test CASCADE" end it "should be able to create, drop, select and insert into tables in a given schema" do @db.create_table(Sequel[:schema_test][:schema_test]){primary_key :i} @db[Sequel[:schema_test][:schema_test]].first.must_be_nil @db[Sequel[:schema_test][:schema_test]].insert(:i=>1).must_equal 1 @db[Sequel[:schema_test][:schema_test]].first.must_equal(:i=>1) @db.from(Sequel.lit('schema_test.schema_test')).first.must_equal(:i=>1) @db.drop_table(Sequel[:schema_test][:schema_test]) @db.create_table(Sequel.qualify(:schema_test, :schema_test)){integer :i} @db[Sequel[:schema_test][:schema_test]].first.must_be_nil @db.from(Sequel.lit('schema_test.schema_test')).first.must_be_nil @db.drop_table(Sequel.qualify(:schema_test, :schema_test)) end it "#tables should not include tables in a default non-public schema" do @db.create_table(Sequel[:schema_test][:schema_test]){integer :i} @db.tables(:schema=>:schema_test).must_include(:schema_test) @db.tables.wont_include(:pg_am) @db.tables.wont_include(:domain_udt_usage) end it "#tables should return tables in the schema provided by the :schema argument" do @db.create_table(Sequel[:schema_test][:schema_test]){integer :i} @db.tables(:schema=>:schema_test).must_equal [:schema_test] end it "#schema should not include columns from tables in a default non-public schema" do @db.create_table(Sequel[:schema_test][:domains]){integer :i} sch = @db.schema(Sequel[:schema_test][:domains]) cs = sch.map{|x| x.first} cs.first.must_equal :i cs.wont_include(:data_type) end it "#schema should only include columns from the table in the given :schema argument" do @db.create_table!(:domains){integer :d} @db.create_table(Sequel[:schema_test][:domains]){integer :i} sch = @db.schema(:domains, :schema=>:schema_test) cs = sch.map{|x| x.first} cs.first.must_equal :i cs.wont_include(:d) @db.drop_table(:domains) end it "#schema should not include columns in tables from other domains by default" do @db.create_table!(Sequel[:public][:domains]){integer :d} @db.create_table(Sequel[:schema_test][:domains]){integer :i} begin @db.schema(:domains).map{|x| x.first}.must_equal [:d] @db.schema(Sequel[:schema_test][:domains]).map{|x| x.first}.must_equal [:i] ensure @db.drop_table?(Sequel[:public][:domains]) end end it "#table_exists? should see if the table is in a given schema" do @db.create_table(Sequel[:schema_test][:schema_test]){integer :i} @db.table_exists?(Sequel[:schema_test][:schema_test]).must_equal true end it "should be able to add and drop indexes in a schema" do @db.create_table(Sequel[:schema_test][:schema_test]){Integer :i, :index=>true} @db.indexes(Sequel[:schema_test][:schema_test]).keys.must_equal [:schema_test_schema_test_i_index] @db.drop_index Sequel[:schema_test][:schema_test], :i @db.indexes(Sequel[:schema_test][:schema_test]).keys.must_equal [] end it "should be able to get primary keys for tables in a given schema" do @db.create_table(Sequel[:schema_test][:schema_test]){primary_key :i} @db.primary_key(Sequel[:schema_test][:schema_test]).must_equal 'i' end it "should be able to get serial sequences for tables in a given schema" do @db.create_table(Sequel[:schema_test][:schema_test]){primary_key :i} 2.times{@db.primary_key_sequence(Sequel[:schema_test][:schema_test]).must_equal '"schema_test"."schema_test_i_seq"'} end it "should be able to get serial sequences for tables that have spaces in the name in a given schema" do @db.create_table(Sequel[:schema_test][:"schema test"]){primary_key :i} @db.primary_key_sequence(Sequel[:schema_test][:"schema test"]).must_equal '"schema_test"."schema test_i_seq"' end it "should be able to get custom sequences for tables in a given schema" do @db << "CREATE SEQUENCE schema_test.kseq" @db.create_table(Sequel[:schema_test][:schema_test]){integer :j; primary_key :k, :type=>:integer, :default=>Sequel.lit("nextval('schema_test.kseq'::regclass)")} @db.primary_key_sequence(Sequel[:schema_test][:schema_test]).must_equal '"schema_test".kseq' end it "should be able to get custom sequences for tables that have spaces in the name in a given schema" do @db << "CREATE SEQUENCE schema_test.\"ks eq\"" @db.create_table(Sequel[:schema_test][:"schema test"]){integer :j; primary_key :k, :type=>:integer, :default=>Sequel.lit("nextval('schema_test.\"ks eq\"'::regclass)")} @db.primary_key_sequence(Sequel[:schema_test][:"schema test"]).must_equal '"schema_test"."ks eq"' end it "should handle schema introspection cases with tables with same name in multiple schemas" do begin @db.create_table(Sequel[:schema_test][:schema_test]) do primary_key :id foreign_key :i, Sequel[:schema_test][:schema_test], :index=>{:name=>:schema_test_sti} end @db.create_table!(Sequel[:public][:schema_test]) do primary_key :id foreign_key :j, Sequel[:public][:schema_test], :index=>{:name=>:public_test_sti} end h = @db.schema(:schema_test) h.length.must_equal 2 h.last.first.must_equal :j @db.indexes(:schema_test).must_equal(:public_test_sti=>{:unique=>false, :columns=>[:j], :deferrable=>nil}) @db.foreign_key_list(:schema_test).must_equal [{:on_update=>:no_action, :columns=>[:j], :deferrable=>false, :key=>[:id], :table=>:schema_test, :on_delete=>:no_action, :name=>:schema_test_j_fkey, :schema=>:public}] ensure @db.drop_table?(Sequel[:public][:schema_test]) end end it "should support resetting the primary key sequence" do name = Sequel[:schema_test][:schema_test] @db.create_table(name){primary_key :id} @db[name].insert(:id=>10).must_equal 10 @db.reset_primary_key_sequence(name).must_equal 11 @db[name].insert.must_equal 11 @db[name].select_order_map(:id).must_equal [10, 11] end end describe "Postgres::Database schema qualified tables and eager graphing" do before(:all) do @db = DB @db.run "DROP SCHEMA s CASCADE" rescue nil @db.run "CREATE SCHEMA s" @db.create_table(Sequel[:s][:bands]){primary_key :id; String :name} @db.create_table(Sequel[:s][:albums]){primary_key :id; String :name; foreign_key :band_id, Sequel[:s][:bands]} @db.create_table(Sequel[:s][:tracks]){primary_key :id; String :name; foreign_key :album_id, Sequel[:s][:albums]} @db.create_table(Sequel[:s][:members]){primary_key :id; String :name; foreign_key :band_id, Sequel[:s][:bands]} @Band = Class.new(Sequel::Model(Sequel[:s][:bands])) @Album = Class.new(Sequel::Model(Sequel[:s][:albums])) @Track = Class.new(Sequel::Model(Sequel[:s][:tracks])) @Member = Class.new(Sequel::Model(Sequel[:s][:members])) def @Band.name; :Band; end def @Album.name; :Album; end def @Track.name; :Track; end def @Member.name; :Member; end @Band.one_to_many :albums, :class=>@Album, :order=>:name @Band.one_to_many :members, :class=>@Member, :order=>:name @Album.many_to_one :band, :class=>@Band, :order=>:name @Album.one_to_many :tracks, :class=>@Track, :order=>:name @Track.many_to_one :album, :class=>@Album, :order=>:name @Member.many_to_one :band, :class=>@Band, :order=>:name @Member.many_to_many :members, :class=>@Member, :join_table=>Sequel[:s][:bands], :right_key=>:id, :left_key=>:id, :left_primary_key=>:band_id, :right_primary_key=>:band_id, :order=>:name @Band.many_to_many :tracks, :class=>@Track, :join_table=>Sequel[:s][:albums], :right_key=>:id, :right_primary_key=>:album_id, :order=>:name @b1 = @Band.create(:name=>"BM") @b2 = @Band.create(:name=>"J") @a1 = @Album.create(:name=>"BM1", :band=>@b1) @a2 = @Album.create(:name=>"BM2", :band=>@b1) @a3 = @Album.create(:name=>"GH", :band=>@b2) @a4 = @Album.create(:name=>"GHL", :band=>@b2) @t1 = @Track.create(:name=>"BM1-1", :album=>@a1) @t2 = @Track.create(:name=>"BM1-2", :album=>@a1) @t3 = @Track.create(:name=>"BM2-1", :album=>@a2) @t4 = @Track.create(:name=>"BM2-2", :album=>@a2) @m1 = @Member.create(:name=>"NU", :band=>@b1) @m2 = @Member.create(:name=>"TS", :band=>@b1) @m3 = @Member.create(:name=>"NS", :band=>@b2) @m4 = @Member.create(:name=>"JC", :band=>@b2) end after(:all) do @db.run "DROP SCHEMA s CASCADE" end it "should return all eager graphs correctly" do bands = @Band.order(Sequel[:bands][:name]).eager_graph(:albums).all bands.must_equal [@b1, @b2] bands.map{|x| x.albums}.must_equal [[@a1, @a2], [@a3, @a4]] bands = @Band.order(Sequel[:bands][:name]).eager_graph(:albums=>:tracks).all bands.must_equal [@b1, @b2] bands.map{|x| x.albums}.must_equal [[@a1, @a2], [@a3, @a4]] bands.map{|x| x.albums.map{|y| y.tracks}}.must_equal [[[@t1, @t2], [@t3, @t4]], [[], []]] bands = @Band.order(Sequel[:bands][:name]).eager_graph({:albums=>:tracks}, :members).all bands.must_equal [@b1, @b2] bands.map{|x| x.albums}.must_equal [[@a1, @a2], [@a3, @a4]] bands.map{|x| x.albums.map{|y| y.tracks}}.must_equal [[[@t1, @t2], [@t3, @t4]], [[], []]] bands.map{|x| x.members}.must_equal [[@m1, @m2], [@m4, @m3]] end it "should have eager graphs work with previous joins" do bands = @Band.order(Sequel[:bands][:name]).select_all(Sequel[:s][:bands]).join(Sequel[:s][:members], :band_id=>:id).from_self(:alias=>:bands0).eager_graph(:albums=>:tracks).all bands.must_equal [@b1, @b2] bands.map{|x| x.albums}.must_equal [[@a1, @a2], [@a3, @a4]] bands.map{|x| x.albums.map{|y| y.tracks}}.must_equal [[[@t1, @t2], [@t3, @t4]], [[], []]] end it "should have eager graphs work with joins with the same tables" do bands = @Band.order(Sequel[:bands][:name]).select_all(Sequel[:s][:bands]).join(Sequel[:s][:members], :band_id=>:id).eager_graph({:albums=>:tracks}, :members).all bands.must_equal [@b1, @b2] bands.map{|x| x.albums}.must_equal [[@a1, @a2], [@a3, @a4]] bands.map{|x| x.albums.map{|y| y.tracks}}.must_equal [[[@t1, @t2], [@t3, @t4]], [[], []]] bands.map{|x| x.members}.must_equal [[@m1, @m2], [@m4, @m3]] end it "should have eager graphs work with self referential associations" do bands = @Band.order(Sequel[:bands][:name]).eager_graph(:tracks=>{:album=>:band}).all bands.must_equal [@b1, @b2] bands.map{|x| x.tracks}.must_equal [[@t1, @t2, @t3, @t4], []] bands.map{|x| x.tracks.map{|y| y.album}}.must_equal [[@a1, @a1, @a2, @a2], []] bands.map{|x| x.tracks.map{|y| y.album.band}}.must_equal [[@b1, @b1, @b1, @b1], []] members = @Member.order(Sequel[:members][:name]).eager_graph(:members).all members.must_equal [@m4, @m3, @m1, @m2] members.map{|x| x.members}.must_equal [[@m4, @m3], [@m4, @m3], [@m1, @m2], [@m1, @m2]] members = @Member.order(Sequel[:members][:name]).eager_graph(:band, :members=>:band).all members.must_equal [@m4, @m3, @m1, @m2] members.map{|x| x.band}.must_equal [@b2, @b2, @b1, @b1] members.map{|x| x.members}.must_equal [[@m4, @m3], [@m4, @m3], [@m1, @m2], [@m1, @m2]] members.map{|x| x.members.map{|y| y.band}}.must_equal [[@b2, @b2], [@b2, @b2], [@b1, @b1], [@b1, @b1]] end it "should have eager graphs work with a from_self dataset" do bands = @Band.order(Sequel[:bands][:name]).from_self.eager_graph(:tracks=>{:album=>:band}).all bands.must_equal [@b1, @b2] bands.map{|x| x.tracks}.must_equal [[@t1, @t2, @t3, @t4], []] bands.map{|x| x.tracks.map{|y| y.album}}.must_equal [[@a1, @a1, @a2, @a2], []] bands.map{|x| x.tracks.map{|y| y.album.band}}.must_equal [[@b1, @b1, @b1, @b1], []] end it "should have eager graphs work with different types of aliased from tables" do bands = @Band.order(Sequel[:tracks][:name]).from(Sequel[:s][:bands].as(:tracks)).eager_graph(:tracks).all bands.must_equal [@b1, @b2] bands.map{|x| x.tracks}.must_equal [[@t1, @t2, @t3, @t4], []] bands = @Band.order(Sequel[:tracks][:name]).from(Sequel.expr(Sequel[:s][:bands]).as(:tracks)).eager_graph(:tracks).all bands.must_equal [@b1, @b2] bands.map{|x| x.tracks}.must_equal [[@t1, @t2, @t3, @t4], []] bands = @Band.order(Sequel[:tracks][:name]).from(Sequel.expr(Sequel[:s][:bands]).as(Sequel.identifier(:tracks))).eager_graph(:tracks).all bands.must_equal [@b1, @b2] bands.map{|x| x.tracks}.must_equal [[@t1, @t2, @t3, @t4], []] bands = @Band.order(Sequel[:tracks][:name]).from(Sequel.expr(Sequel[:s][:bands]).as('tracks')).eager_graph(:tracks).all bands.must_equal [@b1, @b2] bands.map{|x| x.tracks}.must_equal [[@t1, @t2, @t3, @t4], []] end it "should have eager graphs work with join tables with aliases" do bands = @Band.order(Sequel[:bands][:name]).eager_graph(:members).join(Sequel[:s][:albums].as(:tracks), :band_id=>Sequel.qualify(Sequel[:s][:bands], :id)).eager_graph(:albums=>:tracks).all bands.must_equal [@b1, @b2] bands.map{|x| x.albums}.must_equal [[@a1, @a2], [@a3, @a4]] bands.map{|x| x.members}.must_equal [[@m1, @m2], [@m4, @m3]] bands = @Band.order(Sequel[:bands][:name]).eager_graph(:members).join(Sequel.as(Sequel[:s][:albums], :tracks), :band_id=>Sequel.qualify(Sequel[:s][:bands], :id)).eager_graph(:albums=>:tracks).all bands.must_equal [@b1, @b2] bands.map{|x| x.albums}.must_equal [[@a1, @a2], [@a3, @a4]] bands.map{|x| x.members}.must_equal [[@m1, @m2], [@m4, @m3]] bands = @Band.order(Sequel[:bands][:name]).eager_graph(:members).join(Sequel.as(Sequel[:s][:albums], 'tracks'), :band_id=>Sequel.qualify(Sequel[:s][:bands], :id)).eager_graph(:albums=>:tracks).all bands.must_equal [@b1, @b2] bands.map{|x| x.albums}.must_equal [[@a1, @a2], [@a3, @a4]] bands.map{|x| x.members}.must_equal [[@m1, @m2], [@m4, @m3]] bands = @Band.order(Sequel[:bands][:name]).eager_graph(:members).join(Sequel.as(Sequel[:s][:albums], Sequel.identifier(:tracks)), :band_id=>Sequel.qualify(Sequel[:s][:bands], :id)).eager_graph(:albums=>:tracks).all bands.must_equal [@b1, @b2] bands.map{|x| x.albums}.must_equal [[@a1, @a2], [@a3, @a4]] bands.map{|x| x.members}.must_equal [[@m1, @m2], [@m4, @m3]] bands = @Band.order(Sequel[:bands][:name]).eager_graph(:members).join(Sequel[:s][:albums], {:band_id=>Sequel.qualify(Sequel[:s][:bands], :id)}, :table_alias=>:tracks).eager_graph(:albums=>:tracks).all bands.must_equal [@b1, @b2] bands.map{|x| x.albums}.must_equal [[@a1, @a2], [@a3, @a4]] bands.map{|x| x.members}.must_equal [[@m1, @m2], [@m4, @m3]] bands = @Band.order(Sequel[:bands][:name]).eager_graph(:members).join(Sequel[:s][:albums], {:band_id=>Sequel.qualify(Sequel[:s][:bands], :id)}, :table_alias=>'tracks').eager_graph(:albums=>:tracks).all bands.must_equal [@b1, @b2] bands.map{|x| x.albums}.must_equal [[@a1, @a2], [@a3, @a4]] bands.map{|x| x.members}.must_equal [[@m1, @m2], [@m4, @m3]] bands = @Band.order(Sequel[:bands][:name]).eager_graph(:members).join(Sequel[:s][:albums], {:band_id=>Sequel.qualify(Sequel[:s][:bands], :id)}, :table_alias=>Sequel.identifier(:tracks)).eager_graph(:albums=>:tracks).all bands.must_equal [@b1, @b2] bands.map{|x| x.albums}.must_equal [[@a1, @a2], [@a3, @a4]] bands.map{|x| x.members}.must_equal [[@m1, @m2], [@m4, @m3]] end it "should have eager graphs work with different types of qualified from tables" do bands = @Band.order(Sequel[:bands][:name]).from(Sequel.qualify(:s, :bands)).eager_graph(:tracks).all bands.must_equal [@b1, @b2] bands.map{|x| x.tracks}.must_equal [[@t1, @t2, @t3, @t4], []] bands = @Band.order(Sequel[:bands][:name]).from(Sequel.identifier(:bands).qualify(:s)).eager_graph(:tracks).all bands.must_equal [@b1, @b2] bands.map{|x| x.tracks}.must_equal [[@t1, @t2, @t3, @t4], []] bands = @Band.order(Sequel[:bands][:name]).from(Sequel::SQL::QualifiedIdentifier.new(:s, 'bands')).eager_graph(:tracks).all bands.must_equal [@b1, @b2] bands.map{|x| x.tracks}.must_equal [[@t1, @t2, @t3, @t4], []] end end if DB.server_version >= 80300 describe "PostgreSQL tsearch2" do before(:all) do DB.create_table! :test6 do text :title text :body full_text_index [:title, :body] end @ds = DB[:test6] end after do DB[:test6].delete end after(:all) do DB.drop_table?(:test6) end it "should search by indexed column" do record = {:title => "oopsla conference", :body => "test"} @ds.insert(record) @ds.full_text_search(:title, "oopsla").all.must_equal [record] end it "should join multiple coumns with spaces to search by last words in row" do record = {:title => "multiple words", :body => "are easy to search"} @ds.insert(record) @ds.full_text_search([:title, :body], "words").all.must_equal [record] end it "should return rows with a NULL in one column if a match in another column" do record = {:title => "multiple words", :body =>nil} @ds.insert(record) @ds.full_text_search([:title, :body], "words").all.must_equal [record] end end end describe "Postgres::Database functions, languages, schemas, and triggers" do before do @d = DB end after do @d.drop_function('tf', :if_exists=>true, :cascade=>true) @d.drop_function('tf', :if_exists=>true, :cascade=>true, :args=>%w'integer integer') @d.drop_language(:plpgsql, :if_exists=>true, :cascade=>true) if @d.server_version < 90000 @d.drop_schema(:sequel, :if_exists=>true, :cascade=>true) @d.drop_table?(:test) end it "#create_function and #drop_function should create and drop functions" do proc{@d['SELECT tf()'].all}.must_raise(Sequel::DatabaseError) @d.create_function('tf', 'SELECT 1', :returns=>:integer) @d['SELECT tf()'].all.must_equal [{:tf=>1}] @d.drop_function('tf') proc{@d['SELECT tf()'].all}.must_raise(Sequel::DatabaseError) end it "#create_function and #drop_function should support options" do args = ['tf', 'SELECT $1 + $2', {:args=>[[:integer, :a], :integer], :replace=>true, :returns=>:integer, :language=>'SQL', :behavior=>:immutable, :strict=>true, :security_definer=>true, :cost=>2, :parallel=>(:unsafe if @d.server_version >= 90600), :set=>{:search_path => 'public'}}] @d.create_function(*args) # Make sure replace works @d.create_function(*args) @d['SELECT tf(1, 2)'].all.must_equal [{:tf=>3}] args = ['tf', {:if_exists=>true, :cascade=>true, :args=>[[:integer, :a], :integer]}] @d.drop_function(*args) # Make sure if exists works @d.drop_function(*args) end it "#create_language and #drop_language should create and drop languages" do @d.create_language(:plpgsql, :replace=>true) if @d.server_version < 90000 proc{@d.create_language(:plpgsql)}.must_raise(Sequel::DatabaseError) @d.drop_language(:plpgsql) if @d.server_version < 90000 proc{@d.drop_language(:plpgsql)}.must_raise(Sequel::DatabaseError) if @d.server_version < 90000 # Make sure if exists works @d.drop_language(:plpgsql, :if_exists=>true, :cascade=>true) if @d.server_version < 90000 end it "#create_schema and #drop_schema should create and drop schemas" do @d.create_schema(:sequel) @d.create_schema(:sequel, :if_not_exists=>true) if @d.server_version >= 90300 @d.create_table(Sequel[:sequel][:test]){Integer :a} @d.drop_schema(:sequel, :if_exists=>true, :cascade=>true) end it "#create_trigger and #drop_trigger should create and drop triggers" do @d.create_language(:plpgsql) if @d.server_version < 90000 @d.create_function(:tf, 'BEGIN IF NEW.value IS NULL THEN RAISE EXCEPTION \'Blah\'; END IF; RETURN NEW; END;', :language=>:plpgsql, :returns=>:trigger) @d.create_table(:test){String :name; Integer :value} @d.create_trigger(:test, :identity, :tf, :each_row=>true) @d[:test].insert(:name=>'a', :value=>1) @d[:test].filter(:name=>'a').all.must_equal [{:name=>'a', :value=>1}] proc{@d[:test].filter(:name=>'a').update(:value=>nil)}.must_raise(Sequel::DatabaseError) @d[:test].filter(:name=>'a').all.must_equal [{:name=>'a', :value=>1}] @d[:test].filter(:name=>'a').update(:value=>3) @d[:test].filter(:name=>'a').all.must_equal [{:name=>'a', :value=>3}] @d.drop_trigger(:test, :identity) # Make sure if exists works @d.drop_trigger(:test, :identity, :if_exists=>true, :cascade=>true) if @d.supports_trigger_conditions? @d.create_trigger(:test, :identity, :tf, :each_row=>true, :events => :update, :when=> {Sequel[:new][:name] => 'b'}) @d[:test].filter(:name=>'a').update(:value=>nil) @d[:test].filter(:name=>'a').all.must_equal [{:name=>'a', :value=>nil}] proc{@d[:test].filter(:name=>'a').update(:name=>'b')}.must_raise(Sequel::DatabaseError) @d[:test].filter(:name=>'a').all.must_equal [{:name=>'a', :value=>nil}] if @d.server_version >= 140000 proc{@d[:test].filter(:name=>'a').update(:name=>'b')}.must_raise(Sequel::DatabaseError) @d[:test].filter(:name=>'a').update(:name=>'c') @d.create_trigger(:test, :identity, :tf, :each_row=>true, :events => :update, :when=> {Sequel[:new][:name] => 'c'}, :replace=>true) proc{@d[:test].filter(:name=>'c').update(:name=>'c')}.must_raise(Sequel::DatabaseError) @d[:test].filter(:name=>'c').update(:name=>'b') end @d.drop_trigger(:test, :identity) end end end if DB.adapter_scheme == :postgres describe "Postgres::Dataset #use_cursor" do before(:all) do @db = DB @db.create_table!(:test_cursor){Integer :x} @ds = @db[:test_cursor] @db.transaction{1001.times{|i| @ds.insert(i)}} end after(:all) do @db.drop_table?(:test_cursor) end it "should return the same results as the non-cursor use" do @ds.all.must_equal @ds.use_cursor.all end it "should not swallow errors if closing cursor raises an error" do proc do @db.synchronize do |c| @ds.use_cursor.each do |r| @db.run "CLOSE sequel_cursor" raise ArgumentError end end end.must_raise(ArgumentError) end it "should handle errors raised while closing cursor after successful cursor fetch" do proc do closed = false @db.synchronize do |c| @ds.use_cursor(:rows_per_fetch=>2000).each do |r| if closed == false @db.run "CLOSE sequel_cursor" closed = true end end end end.must_raise(Sequel::DatabaseError) end it "should respect the :rows_per_fetch option" do i = 0 @ds = @ds.with_extend{define_method(:execute){|*a, &block| i+=1; super(*a, &block);}} @ds.use_cursor.all i.must_equal 2 i = 0 @ds.use_cursor(:rows_per_fetch=>100).all i.must_equal 11 i = 0 @ds.use_cursor(:rows_per_fetch=>0).all i.must_equal 2 i = 0 @ds.use_cursor(:rows_per_fetch=>2000).all i.must_equal 1 end it "should respect the :hold=>true option for creating the cursor WITH HOLD and not using a transaction" do @ds.use_cursor.each{@db.in_transaction?.must_equal true} @ds.use_cursor(:hold=>true).each{@db.in_transaction?.must_equal false} end it "should support updating individual rows based on a cursor" do @db.transaction(:rollback=>:always) do @ds.use_cursor(:rows_per_fetch=>1).each do |row| @ds.where_current_of.update(:x=>Sequel.*(row[:x], 10)) end @ds.select_order_map(:x).must_equal((0..1000).map{|x| x * 10}) end @ds.select_order_map(:x).must_equal((0..1000).to_a) end it "should respect the :cursor_name option" do one_rows = [] two_rows = [] @ds.order(:x).use_cursor(:cursor_name => 'cursor_one').each do |one| one_rows << one if one[:x] % 1000 == 500 two_rows = [] @ds.order(:x).use_cursor(:cursor_name => 'cursor_two').each do |two| two_rows << two end end end one_rows.must_equal two_rows end it "should handle returning inside block" do ds = @ds.with_extend do def check_return use_cursor.each{|r| return} end end ds.check_return ds.all.must_equal ds.use_cursor.all end end describe "Database#add_named_conversion_proc" do before(:all) do @db = DB @old_cp = @db.conversion_procs[1013] @db.conversion_procs.delete(1013) @db.add_named_conversion_proc(:oidvector, &:reverse) end after(:all) do @db.conversion_procs.delete(30) @db.conversion_procs[1013] = @old_cp @db.drop_table?(:foo) @db.drop_enum(:foo_enum) rescue nil end it "should work for scalar types" do @db.create_table!(:foo){oidvector :bar} @db[:foo].insert(Sequel.cast('21', :oidvector)) @db[:foo].get(:bar).must_equal '12' end it "should work for array types" do @db.create_table!(:foo){column :bar, 'oidvector[]'} @db[:foo].insert(Sequel.pg_array(['21'], :oidvector)) @db[:foo].get(:bar).must_equal ['12'] end it "should work with for enums" do @db.drop_enum(:foo_enum) rescue nil @db.create_enum(:foo_enum, %w(foo bar)) @db.add_named_conversion_proc(:foo_enum, &:reverse) @db.create_table!(:foo){foo_enum :bar} @db[:foo].insert(:bar => 'foo') @db[:foo].get(:bar).must_equal 'foo'.reverse end it "should raise error for unsupported type" do proc{@db.add_named_conversion_proc(:nonexistant_type, &:reverse)}.must_raise Sequel::Error end end end if uses_pg_or_jdbc && DB.server_version >= 90000 describe "Postgres::Database#copy_into" do before(:all) do @db = DB @db.create_table!(:test_copy){Integer :x; Integer :y} @ds = @db[:test_copy].order(:x, :y) end before do @db[:test_copy].delete end after(:all) do @db.drop_table?(:test_copy) end it "should work with a :data option containing data in PostgreSQL text format" do @db.copy_into(:test_copy, :data=>"1\t2\n3\t4\n") @ds.select_map([:x, :y]).must_equal [[1, 2], [3, 4]] end it "should work with :format=>:csv option and :data option containing data in CSV format" do @db.copy_into(:test_copy, :format=>:csv, :data=>"1,2\n3,4\n") @ds.select_map([:x, :y]).must_equal [[1, 2], [3, 4]] end it "should respect given :options" do @db.copy_into(:test_copy, :options=>"FORMAT csv, HEADER TRUE", :data=>"x,y\n1,2\n3,4\n") @ds.select_map([:x, :y]).must_equal [[1, 2], [3, 4]] end it "should respect given :options options when :format is used" do @db.copy_into(:test_copy, :options=>"QUOTE '''', DELIMITER '|'", :format=>:csv, :data=>"'1'|'2'\n'3'|'4'\n") @ds.select_map([:x, :y]).must_equal [[1, 2], [3, 4]] end it "should accept :columns option to online copy the given columns" do @db.copy_into(:test_copy, :data=>"1\t2\n3\t4\n", :columns=>[:y, :x]) @ds.select_map([:x, :y]).must_equal [[2, 1], [4, 3]] end it "should accept a block and use returned values for the copy in data stream" do buf = ["1\t2\n", "3\t4\n"] @db.copy_into(:test_copy){buf.shift} @ds.select_map([:x, :y]).must_equal [[1, 2], [3, 4]] end it "should work correctly with a block and :format=>:csv" do buf = ["1,2\n", "3,4\n"] @db.copy_into(:test_copy, :format=>:csv){buf.shift} @ds.select_map([:x, :y]).must_equal [[1, 2], [3, 4]] end it "should accept an enumerable as the :data option" do @db.copy_into(:test_copy, :data=>["1\t2\n", "3\t4\n"]) @ds.select_map([:x, :y]).must_equal [[1, 2], [3, 4]] end it "should have an exception, cause a rollback of copied data and still have a usable connection" do 2.times do sent = false proc{@db.copy_into(:test_copy){raise ArgumentError if sent; sent = true; "1\t2\n"}}.must_raise(ArgumentError) @ds.select_map([:x, :y]).must_equal [] end end it "should handle database errors with a rollback of copied data and still have a usable connection" do 2.times do proc{@db.copy_into(:test_copy, :data=>["1\t2\n", "3\ta\n"])}.must_raise(Sequel::DatabaseError) @ds.select_map([:x, :y]).must_equal [] end end it "should raise an Error if both :data and a block are provided" do proc{@db.copy_into(:test_copy, :data=>["1\t2\n", "3\t4\n"]){}}.must_raise(Sequel::Error) end it "should raise an Error if neither :data or a block are provided" do proc{@db.copy_into(:test_copy)}.must_raise(Sequel::Error) end end describe "Postgres::Database#copy_into using UTF-8 encoding" do before(:all) do @db = DB @db.create_table!(:test_copy){String :t} @ds = @db[:test_copy].order(:t) end before do @db[:test_copy].delete end after(:all) do @db.drop_table?(:test_copy) end it "should work with UTF-8 characters using the :data option" do @db.copy_into(:test_copy, :data=>(["\u00E4\n"]*2)) @ds.select_map([:t]).map{|a| a.map{|s| s.force_encoding('UTF-8')}}.must_equal([["\u00E4"]] * 2) end it "should work with UTF-8 characters using a block" do buf = (["\u00E4\n"]*2) @db.copy_into(:test_copy){buf.shift} @ds.select_map([:t]).map{|a| a.map{|s| s.force_encoding('UTF-8')}}.must_equal([["\u00E4"]] * 2) end end describe "Postgres::Database#copy_table" do before(:all) do @db = DB @db.create_table!(:test_copy){Integer :x; Integer :y} ds = @db[:test_copy] ds.insert(1, 2) ds.insert(3, 4) end after(:all) do @db.drop_table?(:test_copy) end it "without a block or options should return a text version of the table as a single string" do @db.copy_table(:test_copy).must_equal "1\t2\n3\t4\n" end it "without a block and with :format=>:csv should return a csv version of the table as a single string" do @db.copy_table(:test_copy, :format=>:csv).must_equal "1,2\n3,4\n" end it "should treat string as SQL code" do @db.copy_table('COPY "test_copy" TO STDOUT').must_equal "1\t2\n3\t4\n" end it "should respect given :options options" do @db.copy_table(:test_copy, :options=>"FORMAT csv, HEADER TRUE").must_equal "x,y\n1,2\n3,4\n" end it "should respect given :options options when :format is used" do @db.copy_table(:test_copy, :format=>:csv, :options=>"QUOTE '''', FORCE_QUOTE *").must_equal "'1','2'\n'3','4'\n" end it "should accept dataset as first argument" do @db.copy_table(@db[:test_copy].cross_join(Sequel[:test_copy].as(:tc)).order(Sequel[:test_copy][:x], Sequel[:test_copy][:y], Sequel[:tc][:x], Sequel[:tc][:y])).must_equal "1\t2\t1\t2\n1\t2\t3\t4\n3\t4\t1\t2\n3\t4\t3\t4\n" end it "with a block and no options should yield each row as a string in text format" do buf = [] @db.copy_table(:test_copy){|b| buf << b} buf.must_equal ["1\t2\n", "3\t4\n"] end it "with a block and :format=>:csv should yield each row as a string in csv format" do buf = [] @db.copy_table(:test_copy, :format=>:csv){|b| buf << b} buf.must_equal ["1,2\n", "3,4\n"] end it "should work fine when using a block that is terminated early with a following copy_table" do buf = [] proc{@db.copy_table(:test_copy, :format=>:csv){|b| buf << b; break}}.must_raise(Sequel::DatabaseDisconnectError) buf.must_equal ["1,2\n"] buf.clear proc{@db.copy_table(:test_copy, :format=>:csv){|b| buf << b; raise ArgumentError}}.must_raise(Sequel::DatabaseDisconnectError) buf.must_equal ["1,2\n"] buf.clear @db.copy_table(:test_copy){|b| buf << b} buf.must_equal ["1\t2\n", "3\t4\n"] end it "should work fine when using a block that is terminated early with a following regular query" do buf = [] proc{@db.copy_table(:test_copy, :format=>:csv){|b| buf << b; break}}.must_raise(Sequel::DatabaseDisconnectError) buf.must_equal ["1,2\n"] buf.clear proc{@db.copy_table(:test_copy, :format=>:csv){|b| buf << b; raise ArgumentError}}.must_raise(Sequel::DatabaseDisconnectError) buf.must_equal ["1,2\n"] @db[:test_copy].select_order_map(:x).must_equal [1, 3] end it "should not swallow error raised by block" do begin @db.copy_table(:test_copy){|b| raise ArgumentError, "foo"} rescue => e end e.must_be_kind_of Sequel::DatabaseDisconnectError e.wrapped_exception.must_be_kind_of ArgumentError e.message.must_include "foo" end it "should handle errors raised during row processing" do proc{@db.copy_table(@db[:test_copy].select(Sequel[1]/(Sequel[:x] - 3)))}.must_raise Sequel::DatabaseError @db.get(1).must_equal 1 end it "should disconnect if the result status after the copy is not expected" do proc do @db.synchronize do |c| def c.get_last_result; end @db.copy_table(:test_copy) end end.must_raise Sequel::DatabaseDisconnectError end if uses_pg end end if uses_pg && DB.server_version >= 90000 describe "Postgres::Database LISTEN/NOTIFY" do before(:all) do @db = DB end it "should support listen and notify" do # Spec assumes only one connection currently in the pool (otherwise notify_pid will not be deterministic) @db.disconnect notify_pid = @db.synchronize{|conn| conn.backend_pid} called = false @db.listen('foo', :after_listen=>proc{@db.notify('foo')}) do |ev, pid, payload| ev.must_equal 'foo' pid.must_equal notify_pid ['', nil].must_include(payload) called = true end.must_equal 'foo' called.must_equal true # Check weird identifier names called = false @db.listen('FOO bar', :after_listen=>proc{@db.notify('FOO bar')}) do |ev, pid, payload| ev.must_equal 'FOO bar' pid.must_equal notify_pid ['', nil].must_include(payload) called = true end.must_equal 'FOO bar' called.must_equal true # Check identifier symbols called = false @db.listen(:foo, :after_listen=>proc{@db.notify(:foo)}) do |ev, pid, payload| ev.must_equal 'foo' pid.must_equal notify_pid ['', nil].must_include(payload) called = true end.must_equal 'foo' called.must_equal true called = false @db.listen('foo', :after_listen=>proc{@db.notify('foo', :payload=>'bar')}) do |ev, pid, payload| ev.must_equal 'foo' pid.must_equal notify_pid payload.must_equal 'bar' called = true end.must_equal 'foo' called.must_equal true @db.listen('foo', :after_listen=>proc{@db.notify('foo')}).must_equal 'foo' called = false called2 = false i = 0 @db.listen(['foo', 'bar'], :after_listen=>proc{@db.notify('foo', :payload=>'bar'); @db.notify('bar', :payload=>'foo')}, :loop=>proc{i+=1}) do |ev, pid, payload| if !called ev.must_equal 'foo' pid.must_equal notify_pid payload.must_equal 'bar' called = true else ev.must_equal 'bar' pid.must_equal notify_pid payload.must_equal 'foo' called2 = true break end end.must_be_nil called.must_equal true called2.must_equal true i.must_equal 1 proc{@db.listen('foo', :loop=>true)}.must_raise Sequel::Error end it "should accept a :timeout option in listen" do @db.listen('foo2', :timeout=>0.001).must_be_nil called = false @db.listen('foo2', :timeout=>0.001){|ev, pid, payload| called = true}.must_be_nil called.must_equal false i = 0 @db.listen('foo2', :timeout=>0.001, :loop=>proc{i+=1; throw :stop if i > 3}){|ev, pid, payload| called = true}.must_be_nil i.must_equal 4 called = false i = 0 @db.listen('foo2', :timeout=>proc{i+=1; 0.001}){|ev, pid, payload| called = true}.must_be_nil called.must_equal false i.must_equal 1 i = 0 t = 0 @db.listen('foo2', :timeout=>proc{t+=1; 0.001}, :loop=>proc{i+=1; throw :stop if i > 3}){|ev, pid, payload| called = true}.must_be_nil called.must_equal false t.must_equal 4 t = 0 @db.listen('foo2', :timeout=>proc{t+=1; throw :stop if t == 4; 0.001}, :loop=>true){|ev, pid, payload| called = true}.must_be_nil called.must_equal false t.must_equal 4 end unless RUBY_PLATFORM =~ /mingw/ # Ruby freezes on this spec on this platform/version end end describe 'PostgreSQL special float handling' do before do @db = DB @db.create_table!(:test5){Float :value} @ds = @db[:test5] end after do @db.drop_table?(:test5) end it 'inserts NaN' do nan = 0.0/0.0 @ds.insert(:value=>nan) @ds.all[0][:value].nan?.must_equal true end it 'inserts +Infinity' do inf = 1.0/0.0 @ds.insert(:value=>inf) @ds.all[0][:value].infinite?.must_be :>, 0 end it 'inserts -Infinity' do inf = -1.0/0.0 @ds.insert(:value=>inf) @ds.all[0][:value].infinite?.must_be :<, 0 end end if DB.adapter_scheme == :postgres describe 'PostgreSQL array handling' do before(:all) do @db = DB @ds = @db[:items] @tp = lambda{@db.schema(:items).map{|a| a.last[:type]}} end after do @db.drop_table?(:items) end it 'insert and retrieve integer and float arrays of various sizes' do @db.create_table!(:items) do column :i2, 'int2[]' column :i4, 'int4[]' column :i8, 'int8[]' column :r, 'real[]' column :dp, 'double precision[]' end @tp.call.must_equal [:smallint_array, :integer_array, :bigint_array, :real_array, :float_array] @ds.insert(Sequel.pg_array([1], :int2), Sequel.pg_array([nil, 2], :int4), Sequel.pg_array([3, nil], :int8), Sequel.pg_array([4, nil, 4.5], :real), Sequel.pg_array([5, nil, 5.5], "double precision")) @ds.count.must_equal 1 rs = @ds.all rs.must_equal [{:i2=>[1], :i4=>[nil, 2], :i8=>[3, nil], :r=>[4.0, nil, 4.5], :dp=>[5.0, nil, 5.5]}] rs.first.values.each{|v| v.class.must_equal(Sequel::Postgres::PGArray)} rs.first.values.each{|v| v.to_a.must_be_kind_of(Array)} @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs @ds.delete @ds.insert(Sequel.pg_array([[1], [2]], :int2), Sequel.pg_array([[nil, 2], [3, 4]], :int4), Sequel.pg_array([[3, nil], [nil, nil]], :int8), Sequel.pg_array([[4, nil], [nil, 4.5]], :real), Sequel.pg_array([[5, nil], [nil, 5.5]], "double precision")) rs = @ds.all rs.must_equal [{:i2=>[[1], [2]], :i4=>[[nil, 2], [3, 4]], :i8=>[[3, nil], [nil, nil]], :r=>[[4, nil], [nil, 4.5]], :dp=>[[5, nil], [nil, 5.5]]}] rs.first.values.each{|v| v.class.must_equal(Sequel::Postgres::PGArray)} rs.first.values.each{|v| v.to_a.must_be_kind_of(Array)} @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs end it 'insert and retrieve decimal arrays' do @db.create_table!(:items) do column :n, 'numeric[]' end @tp.call.must_equal [:decimal_array] @ds.insert(Sequel.pg_array([BigDecimal('1.000000000000000000001'), nil, BigDecimal('1')], :numeric)) @ds.count.must_equal 1 rs = @ds.all rs.must_equal [{:n=>[BigDecimal('1.000000000000000000001'), nil, BigDecimal('1')]}] rs.first.values.each{|v| v.class.must_equal(Sequel::Postgres::PGArray)} rs.first.values.each{|v| v.to_a.must_be_kind_of(Array)} @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs @ds.delete @ds.insert(Sequel.pg_array([[BigDecimal('1.0000000000000000000000000000001'), nil], [nil, BigDecimal('1')]], :numeric)) rs = @ds.all rs.must_equal [{:n=>[[BigDecimal('1.0000000000000000000000000000001'), nil], [nil, BigDecimal('1')]]}] rs.first.values.each{|v| v.class.must_equal(Sequel::Postgres::PGArray)} rs.first.values.each{|v| v.to_a.must_be_kind_of(Array)} @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs end it 'insert and retrieve string arrays' do @db.create_table!(:items) do column :c, 'char(4)[]' column :vc, 'varchar[]' column :t, 'text[]' end @tp.call.must_equal [:character_array, :varchar_array, :string_array] @ds.insert(Sequel.pg_array(['a', nil, 'NULL', 'b"\'c'], 'char(4)'), Sequel.pg_array(['a', nil, 'NULL', 'b"\'c', '', ''], :varchar), Sequel.pg_array(['a', nil, 'NULL', 'b"\'c'], :text)) @ds.count.must_equal 1 rs = @ds.all rs.must_equal [{:c=>['a ', nil, 'NULL', 'b"\'c'], :vc=>['a', nil, 'NULL', 'b"\'c', '', ''], :t=>['a', nil, 'NULL', 'b"\'c']}] rs.first.values.each{|v| v.class.must_equal(Sequel::Postgres::PGArray)} rs.first.values.each{|v| v.to_a.must_be_kind_of(Array)} @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs @ds.delete @ds.insert(Sequel.pg_array([[['a'], [nil]], [['NULL'], ['b"\'c']]], 'char(4)'), Sequel.pg_array([[['a[],\\[\\]\\,\\""NULL",'], ['']], [['NULL'], ['b"\'c']]], :varchar), Sequel.pg_array([[['a'], [nil]], [['NULL'], ['b"\'c']]], :text)) rs = @ds.all rs.must_equal [{:c=>[[['a '], [nil]], [['NULL'], ['b"\'c']]], :vc=>[[['a[],\\[\\]\\,\\""NULL",'], ['']], [['NULL'], ['b"\'c']]], :t=>[[['a'], [nil]], [['NULL'], ['b"\'c']]]}] rs.first.values.each{|v| v.class.must_equal(Sequel::Postgres::PGArray)} rs.first.values.each{|v| v.to_a.must_be_kind_of(Array)} @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs end it 'insert and retrieve arrays of other types' do @db.create_table!(:items) do column :b, 'bool[]' column :d, 'date[]' column :t, 'time[]' column :ts, 'timestamp[]' column :tstz, 'timestamptz[]' end @tp.call.must_equal [:boolean_array, :date_array, :time_array, :datetime_array, :datetime_timezone_array] d = Date.today t = Sequel::SQLTime.create(10, 20, 30) ts = Time.local(2011, 1, 2, 3, 4, 5) @ds.insert(Sequel.pg_array([true, false], :bool), Sequel.pg_array([d, nil], :date), Sequel.pg_array([t, nil], :time), Sequel.pg_array([ts, nil], :timestamp), Sequel.pg_array([ts, nil], :timestamptz)) @ds.count.must_equal 1 rs = @ds.all rs.must_equal [{:b=>[true, false], :d=>[d, nil], :t=>[t, nil], :ts=>[ts, nil], :tstz=>[ts, nil]}] rs.first.values.each{|v| v.class.must_equal(Sequel::Postgres::PGArray)} rs.first.values.each{|v| v.to_a.must_be_kind_of(Array)} @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs @db.create_table!(:items) do column :ba, 'bytea[]' column :tz, 'timetz[]' column :o, 'oid[]' end @tp.call.must_equal [:blob_array, :time_timezone_array, :oid_array] @ds.insert(Sequel.pg_array([Sequel.blob("a\0"), nil], :bytea), Sequel.pg_array([t, nil], :timetz), Sequel.pg_array([1, 2, 3], :oid)) @ds.count.must_equal 1 rs = @ds.all rs.must_equal [{:ba=>[Sequel.blob("a\0"), nil], :tz=>[t, nil], :o=>[1, 2, 3]}] rs.first.values.each{|v| v.class.must_equal(Sequel::Postgres::PGArray)} rs.first.values.each{|v| v.to_a.must_be_kind_of(Array)} @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs @db.create_table!(:items) do column :x, 'xml[]' column :m, 'money[]' column :b, 'bit[]' column :vb, 'bit varying[]' column :u, 'uuid[]' column :xi, 'xid[]' column :c, 'cid[]' column :n, 'name[]' column :o, 'oidvector[]' end @tp.call.must_equal [:xml_array, :money_array, :bit_array, :varbit_array, :uuid_array, :xid_array, :cid_array, :name_array, :oidvector_array] @ds.insert(Sequel.pg_array(['<a></a>'], :xml), Sequel.pg_array(['1'], :money), Sequel.pg_array(['1'], :bit), Sequel.pg_array(['10'], :varbit), Sequel.pg_array(['c0f24910-39e7-11e4-916c-0800200c9a66'], :uuid), Sequel.pg_array(['12'], :xid), Sequel.pg_array(['12'], :cid), Sequel.pg_array(['N'], :name), Sequel.pg_array(['1 2'], :oidvector)) @ds.count.must_equal 1 rs = @ds.all r = rs.first m = r.delete(:m) m.class.must_equal(Sequel::Postgres::PGArray) m.to_a.must_be_kind_of(Array) m.first.must_be_kind_of(String) r.must_be(:==, :x=>['<a></a>'], :b=>['1'], :vb=>['10'], :u=>['c0f24910-39e7-11e4-916c-0800200c9a66'], :xi=>['12'], :c=>['12'], :n=>['N'], :o=>['1 2']) rs.first.values.each{|v| v.class.must_equal(Sequel::Postgres::PGArray)} rs.first.values.each{|v| v.to_a.must_be_kind_of(Array)} r[:m] = m @ds.delete @ds.insert(r) @ds.all.must_equal rs end it 'insert and retrieve empty arrays' do @db.create_table!(:items) do column :n, 'integer[]' end @ds.insert(:n=>Sequel.pg_array([], :integer)) @ds.count.must_equal 1 rs = @ds.all rs.must_equal [{:n=>[]}] rs.first.values.each{|v| v.class.must_equal(Sequel::Postgres::PGArray)} rs.first.values.each{|v| v.to_a.must_be_kind_of(Array)} @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs end it 'convert ruby array :default values' do @db.create_table!(:items) do column :n, 'integer[]', :default=>[] end @ds.insert @ds.count.must_equal 1 rs = @ds.all rs.must_equal [{:n=>[]}] rs.first.values.each{|v| v.class.must_equal(Sequel::Postgres::PGArray)} rs.first.values.each{|v| v.to_a.must_be_kind_of(Array)} @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs end it 'insert and retrieve custom array types' do point= Class.new do attr_reader :array def initialize(array) @array = array end def sql_literal_append(ds, sql) sql << "'(#{array.join(',')})'" end def ==(other) if other.is_a?(self.class) array == other.array else super end end end @db.register_array_type(:point){|s| point.new(s[1...-1].split(',').map{|i| i.to_i})} @db.create_table!(:items) do column :b, 'point[]' end @tp.call.must_equal [:point_array] pv = point.new([1, 2]) @ds.insert(Sequel.pg_array([pv], :point)) @ds.count.must_equal 1 rs = @ds.all rs.must_equal [{:b=>[pv]}] rs.first.values.each{|v| v.class.must_equal(Sequel::Postgres::PGArray)} rs.first.values.each{|v| v.to_a.must_be_kind_of(Array)} @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs end it 'retrieve arrays with explicit bounds' do @db.create_table!(:items) do column :n, 'integer[]' end @ds.insert(:n=>"[0:1]={2,3}") rs = @ds.all rs.must_equal [{:n=>[2,3]}] rs.first.values.each{|v| v.class.must_equal(Sequel::Postgres::PGArray)} rs.first.values.each{|v| v.to_a.must_be_kind_of(Array)} @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs @ds.delete @ds.insert(:n=>"[0:1][0:0]={{2},{3}}") rs = @ds.all rs.must_equal [{:n=>[[2], [3]]}] rs.first.values.each{|v| v.class.must_equal(Sequel::Postgres::PGArray)} rs.first.values.each{|v| v.to_a.must_be_kind_of(Array)} @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs end it 'use arrays in bound variables' do @db.create_table!(:items) do column :i, 'int4[]' end @ds.call(:insert, {:i=>[1,2]}, :i=>:$i) @ds.get(:i).must_equal [1, 2] @ds.filter(:i=>:$i).call(:first, :i=>[1,2]).must_equal(:i=>[1,2]) @ds.filter(:i=>:$i).call(:first, :i=>[1,3]).must_be_nil # NULL values @ds.delete @ds.call(:insert, {:i=>[nil,nil]}, :i=>:$i) @ds.first.must_equal(:i=>[nil, nil]) @db.create_table!(:items) do column :i, 'text[]' end a = ["\"\\\\\"{}\n\t\r \v\b123afP", 'NULL', nil, ''] @ds.call(:insert, {:i=>Sequel.pg_array(a)}, :i=>:$i) @ds.get(:i).must_equal a @ds.filter(:i=>:$i).call(:first, :i=>a).must_equal(:i=>a) @ds.filter(:i=>:$i).call(:first, :i=>['', nil, nil, 'a']).must_be_nil @db.create_table!(:items) do column :i, 'date[]' end a = [Date.today] @ds.call(:insert, {:i=>Sequel.pg_array(a, 'date')}, :i=>:$i) @ds.get(:i).must_equal a @ds.filter(:i=>:$i).call(:first, :i=>a).must_equal(:i=>a) @ds.filter(:i=>:$i).call(:first, :i=>Sequel.pg_array([Date.today-1], 'date')).must_be_nil @db.create_table!(:items) do column :i, 'timestamp[]' end a = [Time.local(2011, 1, 2, 3, 4, 5)] @ds.call(:insert, {:i=>Sequel.pg_array(a, 'timestamp')}, :i=>:$i) @ds.get(:i).must_equal a @ds.filter(:i=>:$i).call(:first, :i=>a).must_equal(:i=>a) @ds.filter(:i=>:$i).call(:first, :i=>Sequel.pg_array([a.first-1], 'timestamp')).must_be_nil @db.create_table!(:items) do column :i, 'boolean[]' end a = [true, false] @ds.call(:insert, {:i=>Sequel.pg_array(a, 'boolean')}, :i=>:$i) @ds.get(:i).must_equal a @ds.filter(:i=>:$i).call(:first, :i=>a).must_equal(:i=>a) @ds.filter(:i=>:$i).call(:first, :i=>Sequel.pg_array([false, true], 'boolean')).must_be_nil @db.create_table!(:items) do column :i, 'bytea[]' end a = [Sequel.blob("a\0'\"")] @ds.call(:insert, {:i=>Sequel.pg_array(a, 'bytea')}, :i=>:$i) @ds.get(:i).must_equal a @ds.filter(:i=>:$i).call(:first, :i=>a).must_equal(:i=>a) @ds.filter(:i=>:$i).call(:first, :i=>Sequel.pg_array([Sequel.blob("b\0")], 'bytea')).must_be_nil end if uses_pg_or_jdbc it 'with models' do @db.create_table!(:items) do primary_key :id column :i, 'integer[]' column :f, 'double precision[]' column :d, 'numeric[]' column :t, 'text[]' end c = Class.new(Sequel::Model(@db[:items])) h = {:i=>[1,2, nil], :f=>[[1, 2.5], [3, 4.5]], :d=>[1, BigDecimal('1.000000000000000000001')], :t=>[%w'a b c', ['NULL', nil, '1']]} o = c.create(h) o.i.must_equal [1, 2, nil] o.f.must_equal [[1, 2.5], [3, 4.5]] o.d.must_equal [BigDecimal('1'), BigDecimal('1.000000000000000000001')] o.t.must_equal [%w'a b c', ['NULL', nil, '1']] c.where(:i=>o.i, :f=>o.f, :d=>o.d, :t=>o.t).all.must_equal [o] o2 = c.new(h) c.where(:i=>o2.i, :f=>o2.f, :d=>o2.d, :t=>o2.t).all.must_equal [o] @db.create_table!(:items) do primary_key :id column :i, 'int2[]' column :f, 'real[]' column :d, 'numeric(30,28)[]' column :t, 'varchar[]' end c = Class.new(Sequel::Model(@db[:items])) o = c.create(:i=>[1,2, nil], :f=>[[1, 2.5], [3, 4.5]], :d=>[1, BigDecimal('1.000000000000000000001')], :t=>[%w'a b c', ['NULL', nil, '1']]) o.i.must_equal [1, 2, nil] o.f.must_equal [[1, 2.5], [3, 4.5]] o.d.must_equal [BigDecimal('1'), BigDecimal('1.000000000000000000001')] o.t.must_equal [%w'a b c', ['NULL', nil, '1']] c.where(:i=>o.i, :f=>o.f, :d=>o.d, :t=>o.t).all.must_equal [o] o2 = c.new(h) c.where(:i=>o2.i, :f=>o2.f, :d=>o2.d, :t=>o2.t).all.must_equal [o] end it 'with empty array default values and defaults_setter plugin' do @db.create_table!(:items) do column :n, 'integer[]', :default=>[] end c = Class.new(Sequel::Model(@db[:items])) c.plugin :defaults_setter, :cache=>true o = c.new o.n.class.must_equal(Sequel::Postgres::PGArray) o.n.to_a.must_be_same_as(o.n.to_a) o.n << 1 o.save.n.must_equal [1] end it 'operations/functions with pg_array_ops' do Sequel.extension :pg_array_ops @db.create_table!(:items){column :i, 'integer[]'; column :i2, 'integer[]'; column :i3, 'integer[]'; column :i4, 'integer[]'; column :i5, 'integer[]'} @ds.insert(Sequel.pg_array([1, 2, 3]), Sequel.pg_array([2, 1]), Sequel.pg_array([4, 4]), Sequel.pg_array([[5, 5], [4, 3]]), Sequel.pg_array([1, nil, 5])) @ds.get(Sequel.pg_array(:i) > :i3).must_equal false @ds.get(Sequel.pg_array(:i3) > :i).must_equal true @ds.get(Sequel.pg_array(:i) >= :i3).must_equal false @ds.get(Sequel.pg_array(:i) >= :i).must_equal true @ds.get(Sequel.pg_array(:i3) < :i).must_equal false @ds.get(Sequel.pg_array(:i) < :i3).must_equal true @ds.get(Sequel.pg_array(:i3) <= :i).must_equal false @ds.get(Sequel.pg_array(:i) <= :i).must_equal true @ds.get(Sequel.expr(5=>Sequel.pg_array(:i).any)).must_equal false @ds.get(Sequel.expr(1=>Sequel.pg_array(:i).any)).must_equal true @ds.get(Sequel.expr(1=>Sequel.pg_array(:i3).all)).must_equal false @ds.get(Sequel.expr(4=>Sequel.pg_array(:i3).all)).must_equal true @ds.get(Sequel.expr(1=>Sequel.pg_array(:i)[1..1].any)).must_equal true @ds.get(Sequel.expr(2=>Sequel.pg_array(:i)[1..1].any)).must_equal false @ds.get(Sequel.pg_array(:i2)[1]).must_equal 2 @ds.get(Sequel.pg_array(:i2)[1]).must_equal 2 @ds.get(Sequel.pg_array(:i2)[2]).must_equal 1 @ds.get(Sequel.pg_array(:i4)[2][1]).must_equal 4 @ds.get(Sequel.pg_array(:i4)[2][2]).must_equal 3 @ds.get(Sequel.pg_array(:i).contains(:i2)).must_equal true @ds.get(Sequel.pg_array(:i).contains(:i3)).must_equal false @ds.get(Sequel.pg_array(:i2).contained_by(:i)).must_equal true @ds.get(Sequel.pg_array(:i).contained_by(:i2)).must_equal false @ds.get(Sequel.pg_array(:i).overlaps(:i2)).must_equal true @ds.get(Sequel.pg_array(:i2).overlaps(:i3)).must_equal false @ds.get(Sequel.pg_array(:i).dims).must_equal '[1:3]' @ds.get(Sequel.pg_array(:i).length).must_equal 3 @ds.get(Sequel.pg_array(:i).lower).must_equal 1 if @db.server_version >= 80400 @ds.select(Sequel.pg_array(:i).unnest).from_self.count.must_equal 3 end if @db.server_version >= 90000 @ds.get(Sequel.pg_array(:i5).join).must_equal '15' @ds.get(Sequel.pg_array(:i5).join(':')).must_equal '1:5' end if @db.server_version >= 90100 @ds.get(Sequel.pg_array(:i5).join(':', '*')).must_equal '1:*:5' end if @db.server_version >= 90300 @ds.get(Sequel.pg_array(:i5).remove(1).length).must_equal 2 @ds.get(Sequel.pg_array(:i5).replace(1, 4).contains([1])).must_equal false @ds.get(Sequel.pg_array(:i5).replace(1, 4).contains([4])).must_equal true end if @db.server_version >= 90400 @ds.get(Sequel.pg_array(:i).cardinality).must_equal 3 @ds.get(Sequel.pg_array(:i4).cardinality).must_equal 4 @ds.get(Sequel.pg_array(:i5).cardinality).must_equal 3 @ds.from{Sequel.pg_array([1,2,3]).op.unnest([4,5,6], [7,8]).as(:t1, [:a, :b, :c])}.select_order_map([:a, :b, :c]).must_equal [[1, 4, 7], [2, 5, 8], [3, 6, nil]] end @ds.get(Sequel.pg_array(:i).push(4)).must_equal [1, 2, 3, 4] @ds.get(Sequel.pg_array(:i).unshift(4)).must_equal [4, 1, 2, 3] @ds.get(Sequel.pg_array(:i).concat(:i2)).must_equal [1, 2, 3, 2, 1] if @db.type_supported?(:hstore) Sequel.extension :pg_hstore_ops @db.get(Sequel.pg_array(['a', 'b']).op.hstore['a']).must_equal 'b' @db.get(Sequel.pg_array(['a', 'b']).op.hstore(['c', 'd'])['a']).must_equal 'c' end end end describe 'PostgreSQL hstore handling' do before(:all) do @db = DB @ds = @db[:items] @h = {'a'=>'b', 'c'=>nil, 'd'=>'NULL', 'e'=>'\\\\" \\\' ,=>'} end after do @db.drop_table?(:items) end it 'insert and retrieve hstore values' do @db.create_table!(:items) do column :h, :hstore end @ds.insert(Sequel.hstore(@h)) @ds.count.must_equal 1 rs = @ds.all v = rs.first[:h] v.must_equal @h v.class.must_equal(Sequel::Postgres::HStore) v.to_hash.must_be_kind_of(Hash) v.to_hash.must_equal @h @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs end it 'insert and retrieve hstore[] values' do @db.create_table!(:items) do column :h, 'hstore[]' end @ds.insert(Sequel.pg_array([Sequel.hstore(@h)], :hstore)) @ds.count.must_equal 1 rs = @ds.all v = rs.first[:h].first v.class.must_equal(Sequel::Postgres::HStore) v.to_hash.must_be_kind_of(Hash) v.to_hash.must_equal @h @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs end it 'use hstore in bound variables' do @db.create_table!(:items) do column :i, :hstore end @ds.call(:insert, {:i=>Sequel.hstore(@h)}, {:i=>:$i}) @ds.get(:i).must_equal @h @ds.filter(:i=>:$i).call(:first, :i=>Sequel.hstore(@h)).must_equal(:i=>@h) @ds.filter(:i=>:$i).call(:first, :i=>Sequel.hstore({})).must_be_nil @ds.delete @ds.call(:insert, {:i=>Sequel.hstore('a'=>nil)}, {:i=>:$i}) @ds.get(:i).must_equal Sequel.hstore('a'=>nil) @ds.delete @ds.call(:insert, {:i=>@h}, {:i=>:$i}) @ds.get(:i).must_equal @h @ds.filter(:i=>:$i).call(:first, :i=>@h).must_equal(:i=>@h) @ds.filter(:i=>:$i).call(:first, :i=>{}).must_be_nil @db.create_table!(:items) do column :i, 'hstore[]' end @ds.call(:insert, {:i=>Sequel.pg_array([Sequel.hstore(@h)], :hstore)}, {:i=>:$i}) @ds.get(:i).must_equal [@h] @ds.filter(:i=>:$i).call(:first, :i=>Sequel.pg_array([Sequel.hstore(@h)], :hstore)).must_equal(:i=>[@h]) @ds.filter(:i=>:$i).call(:first, :i=>Sequel.pg_array([Sequel.hstore({})], :hstore)).must_be_nil @ds.delete @ds.call(:insert, {:i=>Sequel.pg_array([Sequel.hstore('a'=>nil)], :hstore)}, {:i=>:$i}) @ds.get(:i).must_equal [Sequel.hstore('a'=>nil)] end if uses_pg_or_jdbc it 'with models and associations' do @db.create_table!(:items) do primary_key :id column :h, :hstore end c = Class.new(Sequel::Model(@db[:items])) do def self.name 'Item' end unrestrict_primary_key def item_id h['item_id'].to_i if h end def left_item_id h['left_item_id'].to_i if h end end Sequel.extension :pg_hstore_ops c.plugin :many_through_many h = {'item_id'=>"2", 'left_item_id'=>"1"} o2 = c.create(:id=>2) o = c.create(:id=>1, :h=>h) o.h.must_equal h c.many_to_one :item, :class=>c, :key_column=>Sequel.cast(Sequel.hstore(:h)['item_id'], Integer) c.one_to_many :items, :class=>c, :key=>Sequel.cast(Sequel.hstore(:h)['item_id'], Integer), :key_method=>:item_id c.many_to_many :related_items, :class=>c, :join_table=>Sequel[:items].as(:i), :left_key=>Sequel.cast(Sequel.hstore(:h)['left_item_id'], Integer), :right_key=>Sequel.cast(Sequel.hstore(:h)['item_id'], Integer) c.many_to_one :other_item, :class=>c, :key=>:id, :primary_key_method=>:item_id, :primary_key=>Sequel.cast(Sequel.hstore(:h)['item_id'], Integer), :reciprocal=>:other_items c.one_to_many :other_items, :class=>c, :primary_key=>:item_id, :key=>:id, :primary_key_column=>Sequel.cast(Sequel.hstore(:h)['item_id'], Integer), :reciprocal=>:other_item c.many_to_many :other_related_items, :class=>c, :join_table=>Sequel[:items].as(:i), :left_key=>:id, :right_key=>:id, :left_primary_key_column=>Sequel.cast(Sequel.hstore(:h)['left_item_id'], Integer), :left_primary_key=>:left_item_id, :right_primary_key=>Sequel.cast(Sequel.hstore(:h)['left_item_id'], Integer), :right_primary_key_method=>:left_item_id c.many_through_many :mtm_items, [ [:items, Sequel.cast(Sequel.hstore(:h)['item_id'], Integer), Sequel.cast(Sequel.hstore(:h)['left_item_id'], Integer)], [:items, Sequel.cast(Sequel.hstore(:h)['left_item_id'], Integer), Sequel.cast(Sequel.hstore(:h)['left_item_id'], Integer)] ], :class=>c, :left_primary_key_column=>Sequel.cast(Sequel.hstore(:h)['item_id'], Integer), :left_primary_key=>:item_id, :right_primary_key=>Sequel.cast(Sequel.hstore(:h)['left_item_id'], Integer), :right_primary_key_method=>:left_item_id # Lazily Loading o.item.must_equal o2 o2.items.must_equal [o] o.related_items.must_equal [o2] o2.other_item.must_equal o o.other_items.must_equal [o2] o.other_related_items.must_equal [o] o.mtm_items.must_equal [o] # Eager Loading via eager os = c.eager(:item, :related_items, :other_items, :other_related_items, :mtm_items).where(:id=>1).all.first os.item.must_equal o2 os.related_items.must_equal [o2] os.other_items.must_equal [o2] os.other_related_items.must_equal [o] os.mtm_items.must_equal [o] os = c.eager(:items, :other_item).where(:id=>2).all.first os.items.must_equal [o] os.other_item.must_equal o # Eager Loading via eager_graph c.eager_graph(:item).where(Sequel[:items][:id]=>1).all.first.item.must_equal o2 c.eager_graph(:items).where(Sequel[:items][:id]=>2).all.first.items.must_equal [o] c.eager_graph(:related_items).where(Sequel[:items][:id]=>1).all.first.related_items.must_equal [o2] c.eager_graph(:other_item).where(Sequel[:items][:id]=>2).all.first.other_item.must_equal o c.eager_graph(:other_items).where(Sequel[:items][:id]=>1).all.first.other_items.must_equal [o2] c.eager_graph(:other_related_items).where(Sequel[:items][:id]=>1).all.first.other_related_items.must_equal [o] c.eager_graph(:mtm_items).where(Sequel[:items][:id]=>1).all.first.mtm_items.must_equal [o] # Filter By Associations - Model Instances c.filter(:item=>o2).all.must_equal [o] c.filter(:items=>o).all.must_equal [o2] c.filter(:related_items=>o2).all.must_equal [o] c.filter(:other_item=>o).all.must_equal [o2] c.filter(:other_items=>o2).all.must_equal [o] c.filter(:other_related_items=>o).all.must_equal [o] c.filter(:mtm_items=>o).all.must_equal [o] # Filter By Associations - Model Datasets c.filter(:item=>c.filter(:id=>o2.id)).all.must_equal [o] c.filter(:items=>c.filter(:id=>o.id)).all.must_equal [o2] c.filter(:related_items=>c.filter(:id=>o2.id)).all.must_equal [o] c.filter(:other_item=>c.filter(:id=>o.id)).all.must_equal [o2] c.filter(:other_items=>c.filter(:id=>o2.id)).all.must_equal [o] c.filter(:other_related_items=>c.filter(:id=>o.id)).all.must_equal [o] c.filter(:mtm_items=>c.filter(:id=>o.id)).all.must_equal [o] end it 'with empty hstore default values and defaults_setter plugin' do @db.create_table!(:items) do hstore :h, :default=>Sequel.hstore({}) end c = Class.new(Sequel::Model(@db[:items])) c.plugin :defaults_setter, :cache=>true o = c.new o.h.class.must_equal(Sequel::Postgres::HStore) o.h.to_hash.must_be_same_as(o.h.to_hash) o.h['a'] = 'b' o.save.h.must_equal('a'=>'b') end it 'operations/functions with pg_hstore_ops' do Sequel.extension :pg_hstore_ops, :pg_array_ops @db.create_table!(:items){hstore :h1; hstore :h2; hstore :h3; String :t} @ds.insert(Sequel.hstore('a'=>'b', 'c'=>nil), Sequel.hstore('a'=>'b'), Sequel.hstore('d'=>'e')) h1 = Sequel.hstore(:h1) h2 = Sequel.hstore(:h2) h3 = Sequel.hstore(:h3) @ds.get(h1['a']).must_equal 'b' @ds.get(h1['d']).must_be_nil @ds.get(h2.concat(h3).keys.length).must_equal 2 @ds.get(h1.concat(h3).keys.length).must_equal 3 @ds.get(h2.merge(h3).keys.length).must_equal 2 @ds.get(h1.merge(h3).keys.length).must_equal 3 @ds.get(h1.contain_all(%w'a c')).must_equal true @ds.get(h1.contain_all(%w'a d')).must_equal false @ds.get(h1.contain_any(%w'a d')).must_equal true @ds.get(h1.contain_any(%w'e d')).must_equal false @ds.get(h1.contains(h2)).must_equal true @ds.get(h1.contains(h3)).must_equal false @ds.get(h2.contained_by(h1)).must_equal true @ds.get(h2.contained_by(h3)).must_equal false @ds.get(h1.defined('a')).must_equal true @ds.get(h1.defined('c')).must_equal false @ds.get(h1.defined('d')).must_equal false @ds.get(h1.delete('a')['c']).must_be_nil @ds.get(h1.delete(%w'a d')['c']).must_be_nil @ds.get(h1.delete(h2)['c']).must_be_nil @ds.from(Sequel.hstore('a'=>'b', 'c'=>nil).op.each).order(:key).all.must_equal [{:key=>'a', :value=>'b'}, {:key=>'c', :value=>nil}] @ds.get(h1.has_key?('c')).must_equal true @ds.get(h1.include?('c')).must_equal true @ds.get(h1.key?('c')).must_equal true @ds.get(h1.member?('c')).must_equal true @ds.get(h1.exist?('c')).must_equal true @ds.get(h1.has_key?('d')).must_equal false @ds.get(h1.include?('d')).must_equal false @ds.get(h1.key?('d')).must_equal false @ds.get(h1.member?('d')).must_equal false @ds.get(h1.exist?('d')).must_equal false @ds.get(h1.hstore.hstore.hstore.keys.length).must_equal 2 @ds.get(h1.keys.length).must_equal 2 @ds.get(h2.keys.length).must_equal 1 @ds.get(h1.akeys.length).must_equal 2 @ds.get(h2.akeys.length).must_equal 1 @ds.from(Sequel.hstore('t'=>'s').op.populate(Sequel::SQL::Cast.new(nil, :items))).select_map(:t).must_equal ['s'] @ds.from(Sequel[:items].as(:i)).select(Sequel.hstore('t'=>'s').op.record_set(:i).as(:r)).from_self(:alias=>:s).select(Sequel.lit('(r).*')).from_self.select_map(:t).must_equal ['s'] @ds.from(Sequel.hstore('t'=>'s', 'a'=>'b').op.skeys.as(:s)).select_order_map(:s).must_equal %w'a t' @ds.from((Sequel.hstore('t'=>'s', 'a'=>'b').op - 'a').skeys.as(:s)).select_order_map(:s).must_equal %w't' @ds.get(h1.slice(%w'a c').keys.length).must_equal 2 @ds.get(h1.slice(%w'd c').keys.length).must_equal 1 @ds.get(h1.slice(%w'd e').keys.length).must_be_nil @ds.from(Sequel.hstore('t'=>'s', 'a'=>'b').op.svals.as(:s)).select_order_map(:s).must_equal %w'b s' @ds.get(h1.to_array.length).must_equal 4 @ds.get(h2.to_array.length).must_equal 2 @ds.get(h1.to_matrix.length).must_equal 2 @ds.get(h2.to_matrix.length).must_equal 1 @ds.get(h1.values.length).must_equal 2 @ds.get(h2.values.length).must_equal 1 @ds.get(h1.avals.length).must_equal 2 @ds.get(h2.avals.length).must_equal 1 if DB.server_version >= 140000 @ds.update(h1['a'] => '2', h1['b'] => '3', h1['c'] => '4') @ds.get(:h1).must_equal("a"=>"2", "b"=>"3", "c"=>"4") end end end if DB.type_supported?(:hstore) describe 'PostgreSQL' do before(:all) do @db = DB @ds = @db[:items] @a = [1, 2, {'a'=>'b'}, 3.0] @h = {'a'=>'b', '1'=>[3, 4, 5]} end after do @db.wrap_json_primitives = nil @db.typecast_json_strings = nil @db.drop_table?(:items) end json_types = [:json] json_types << :jsonb if DB.server_version >= 90400 json_types.each do |json_type| json_array_type = "#{json_type}[]" pg_json = Sequel.method(:"pg_#{json_type}") pg_json_wrap = Sequel.method(:"pg_#{json_type}_wrap") hash_class = json_type == :jsonb ? Sequel::Postgres::JSONBHash : Sequel::Postgres::JSONHash array_class = json_type == :jsonb ? Sequel::Postgres::JSONBArray : Sequel::Postgres::JSONArray str_class = json_type == :jsonb ? Sequel::Postgres::JSONBString : Sequel::Postgres::JSONString object_class = json_type == :jsonb ? Sequel::Postgres::JSONBObject : Sequel::Postgres::JSONObject Sequel.extension :pg_json_ops jo = pg_json.call('a'=>1, 'b'=>{'c'=>2, 'd'=>{'e'=>3}}).op ja = pg_json.call([2, 3, %w'a b']).op it "insert and retrieve #{json_type} values" do @db.create_table!(:items){column :j, json_type} @ds.insert(pg_json.call(@h)) @ds.count.must_equal 1 rs = @ds.all v = rs.first[:j] v.class.must_equal(hash_class) v.to_hash.must_be_kind_of(Hash) v.must_equal @h v.to_hash.must_equal @h @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs @ds.delete @ds.insert(pg_json.call(@a)) @ds.count.must_equal 1 rs = @ds.all v = rs.first[:j] v.class.must_equal(array_class) v.to_a.must_be_kind_of(Array) v.must_equal @a v.to_a.must_equal @a @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs end it "insert and retrieve #{json_type} primitive values" do @db.create_table!(:items){column :j, json_type} ['str', 1, 2.5, nil, true, false].each do |rv| @ds.delete @ds.insert(pg_json_wrap.call(rv)) @ds.count.must_equal 1 rs = @ds.all v = rs.first[:j] v.class.must_equal(rv.class) if rv.nil? v.must_be_nil else v.must_equal rv end end @db.wrap_json_primitives = true ['str', 1, 2.5, nil, true, false].each do |rv| @ds.delete @ds.insert(pg_json_wrap.call(rv)) @ds.count.must_equal 1 rs = @ds.all v = rs.first[:j] v.class.ancestors.must_include(object_class) v.__getobj__.must_be_kind_of(rv.class) if rv.nil? v.must_be_nil v.__getobj__.must_be_nil else v.must_equal rv v.__getobj__.must_equal rv end @ds.delete @ds.insert(rs.first) @ds.all[0][:j].must_equal rs[0][:j] end end it "insert and retrieve #{json_type}[] values" do @db.create_table!(:items){column :j, json_array_type} j = Sequel.pg_array([pg_json.call('a'=>1), pg_json.call(['b', 2])]) @ds.insert(j) @ds.count.must_equal 1 rs = @ds.all v = rs.first[:j] v.class.must_equal(Sequel::Postgres::PGArray) v.to_a.must_be_kind_of(Array) v.must_equal j v.to_a.must_equal j @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs end it "insert and retrieve #{json_type}[] values with json primitives" do @db.create_table!(:items){column :j, json_array_type} raw = ['str', 1, 2.5, nil, true, false] j = Sequel.pg_array(raw.map(&pg_json_wrap), json_type) @ds.insert(j) @ds.count.must_equal 1 rs = @ds.all v = rs.first[:j] v.class.must_equal(Sequel::Postgres::PGArray) v.to_a.must_be_kind_of(Array) v.map(&:class).must_equal raw.map(&:class) v.must_equal raw v.to_a.must_equal raw @db.wrap_json_primitives = true j = Sequel.pg_array(raw.map(&pg_json_wrap), json_type) @ds.insert(j) rs = @ds.all v = rs.first[:j] v.class.must_equal(Sequel::Postgres::PGArray) v.to_a.must_be_kind_of(Array) v.map(&:class).each{|c| c.ancestors.must_include(object_class)} [v, v.to_a].each do |v0| v0.zip(raw) do |v1, r1| if r1.nil? v1.must_be_nil v1.__getobj__.must_be_nil else v1.must_equal r1 v1.__getobj__.must_equal r1 end end end @ds.delete @ds.insert(rs.first) @ds.all[0][:j].zip(rs[0][:j]) do |v1, r1| if v1.__getobj__.nil? v1.must_be_nil v1.__getobj__.must_be_nil else v1.must_equal r1 v1.must_equal r1.__getobj__ v1.__getobj__.must_equal r1 v1.__getobj__.must_equal r1.__getobj__ end end end it "models with #{json_type} columns" do @db.create_table!(:items) do primary_key :id column :h, json_type end c = Class.new(Sequel::Model(@db[:items])) c.create(:h=>@h).h.must_equal @h c.create(:h=>@a).h.must_equal @a c.create(:h=>pg_json.call(@h)).h.must_equal @h c.create(:h=>pg_json.call(@a)).h.must_equal @a end it "models with #{json_type} primitives" do @db.create_table!(:items) do primary_key :id column :h, json_type end c = Class.new(Sequel::Model(@db[:items])) ['str', 1, 2.5, nil, true, false].each do |v| @db.wrap_json_primitives = nil cv = c[c.insert(:h=>pg_json_wrap.call(v))] cv.h.class.ancestors.wont_include(object_class) if v.nil? cv.h.must_be_nil else cv.h.must_equal v end @db.wrap_json_primitives = true cv.refresh cv.h.class.ancestors.must_include(object_class) cv.save cv.refresh cv.h.class if v.nil? cv.h.must_be_nil else cv.h.must_equal v end c.new(:h=>cv.h).h.class.ancestors.must_include(object_class) end v = c.new(:h=>'{}').h v.class.must_equal hash_class v.must_equal({}) @db.typecast_json_strings = true v = c.new(:h=>'{}').h v.class.must_equal str_class v.must_equal '{}' c.new(:h=>'str').h.class.ancestors.must_include(object_class) c.new(:h=>'str').h.must_equal 'str' c.new(:h=>1).h.class.ancestors.must_include(object_class) c.new(:h=>1).h.must_equal 1 c.new(:h=>2.5).h.class.ancestors.must_include(object_class) c.new(:h=>2.5).h.must_equal 2.5 c.new(:h=>true).h.class.ancestors.must_include(object_class) c.new(:h=>true).h.must_equal true c.new(:h=>false).h.class.ancestors.must_include(object_class) c.new(:h=>false).h.must_equal false c.new(:h=>nil).h.class.ancestors.wont_include(object_class) c.new(:h=>nil).h.must_be_nil end it "with empty #{json_type} default values and defaults_setter plugin" do @db.create_table!(:items) do column :h, json_type, :default=>hash_class.new({}) column :a, json_type, :default=>array_class.new([]) end c = Class.new(Sequel::Model(@db[:items])) c.plugin :defaults_setter, :cache=>true o = c.new o.h.class.must_equal(hash_class) o.a.class.must_equal(array_class) o.h.to_hash.must_be_same_as(o.h.to_hash) o.a.to_a.must_be_same_as(o.a.to_a) o.h['a'] = 'b' o.a << 1 o.save o.h.must_equal('a'=>'b') o.a.must_equal([1]) end it "use #{json_type} in bound variables" do @db.create_table!(:items){column :i, json_type} @ds.call(:insert, {:i=>pg_json.call(@h)}, {:i=>:$i}) @ds.get(:i).must_equal @h @ds.delete @ds.call(:insert, {:i=>pg_json.call('a'=>nil)}, {:i=>:$i}) @ds.get(:i).must_equal pg_json.call('a'=>nil) @db.create_table!(:items){column :i, json_array_type} j = Sequel.pg_array([pg_json.call('a'=>1), pg_json.call(['b', 2])], json_type) @ds.call(:insert, {:i=>j}, {:i=>:$i}) @ds.get(:i).must_equal j end if uses_pg_or_jdbc it "use #{json_type} primitives in bound variables" do @db.create_table!(:items){column :i, json_type} @db.wrap_json_primitives = true raw = ['str', 1, 2.5, nil, true, false] raw.each do |v| @ds.delete @ds.call(:insert, {:i=>@db.get(pg_json_wrap.call(v))}, {:i=>:$i}) rv = @ds.get(:i) rv.class.ancestors.must_include(object_class) if v.nil? rv.must_be_nil else rv.must_equal v end end @db.create_table!(:items){column :i, json_array_type} j = Sequel.pg_array(raw.map(&pg_json_wrap), json_type) @ds.call(:insert, {:i=>j}, {:i=>:$i}) @ds.all[0][:i].zip(raw) do |v1, r1| if v1.__getobj__.nil? v1.must_be_nil v1.__getobj__.must_be_nil else v1.must_equal r1 v1.__getobj__.must_equal r1 end end end if uses_pg_or_jdbc it "9.3 #{json_type} operations/functions with pg_json_ops" do @db.get(jo['a']).must_equal 1 @db.get(jo['b']['c']).must_equal 2 @db.get(jo[%w'b c']).must_equal 2 @db.get(jo['b'].get_text(%w'd e')).must_equal "3" @db.get(jo[%w'b d'].get_text('e')).must_equal "3" @db.get(ja[1]).must_equal 3 @db.get(ja[%w'2 1']).must_equal 'b' @db.get(jo.extract('a')).must_equal 1 @db.get(jo.extract('b').extract('c')).must_equal 2 @db.get(jo.extract('b', 'c')).must_equal 2 @db.get(jo.extract('b', 'd', 'e')).must_equal 3 @db.get(jo.extract_text('b', 'd')).gsub(' ', '').must_equal '{"e":3}' @db.get(jo.extract_text('b', 'd', 'e')).must_equal '3' @db.get(ja.array_length).must_equal 3 @db.from(ja.array_elements.as(:v)).select_map(:v).must_equal [2, 3, %w'a b'] @db.from(jo.keys.as(:k)).select_order_map(:k).must_equal %w'a b' @db.from(jo.each).select_order_map(:key).must_equal %w'a b' @db.from(jo.each).order(:key).select_map(:value).must_equal [1, {'c'=>2, 'd'=>{'e'=>3}}] @db.from(jo.each_text).select_order_map(:key).must_equal %w'a b' @db.from(jo.each_text).order(:key).where(:key=>'b').get(:value).gsub(' ', '').must_match(/\{"d":\{"e":3\},"c":2\}|\{"c":2,"d":\{"e":3\}\}/) Sequel.extension :pg_row_ops @db.create_table!(:items) do Integer :a String :b end j = Sequel.pg_json('a'=>1, 'b'=>'c').op @db.get(j.populate(Sequel.cast(nil, :items)).pg_row[:a]).must_equal 1 @db.get(j.populate(Sequel.cast(nil, :items)).pg_row[:b]).must_equal 'c' j = Sequel.pg_json([{'a'=>1, 'b'=>'c'}, {'a'=>2, 'b'=>'d'}]).op @db.from(j.populate_set(Sequel.cast(nil, :items))).select_order_map(:a).must_equal [1, 2] @db.from(j.populate_set(Sequel.cast(nil, :items))).select_order_map(:b).must_equal %w'c d' end if DB.server_version >= 90300 it "9.4 #{json_type} operations/functions with pg_json_ops" do @db.get(jo.typeof).must_equal 'object' @db.get(ja.typeof).must_equal 'array' @db.from(ja.array_elements_text.as(:v)).select_map(:v).map{|s| s.gsub(' ', '')}.must_equal ['2', '3', '["a","b"]'] @db.from(jo.to_record.as(:v, [Sequel.lit('a integer'), Sequel.lit('b text')])).select_map(:a).must_equal [1] @db.from(pg_json.call([{'a'=>1, 'b'=>1}]).op.to_recordset.as(:v, [Sequel.lit('a integer'), Sequel.lit('b integer')])).select_map(:a).must_equal [1] if json_type == :jsonb @db.get(jo.has_key?('a')).must_equal true @db.get(jo.has_key?('c')).must_equal false @db.get(pg_json.call(['2', '3', %w'a b']).op.include?('2')).must_equal true @db.get(pg_json.call(['2', '3', %w'a b']).op.include?('4')).must_equal false @db.get(jo.contain_all(['a', 'b'])).must_equal true @db.get(jo.contain_all(['a', 'c'])).must_equal false @db.get(jo.contain_all(['d', 'c'])).must_equal false @db.get(jo.contain_any(['a', 'b'])).must_equal true @db.get(jo.contain_any(['a', 'c'])).must_equal true @db.get(jo.contain_any(['d', 'c'])).must_equal false @db.get(jo.contains(jo)).must_equal true @db.get(jo.contained_by(jo)).must_equal true @db.get(jo.contains('a'=>1)).must_equal true @db.get(jo.contained_by('a'=>1)).must_equal false @db.get(pg_json.call('a'=>1).op.contains(jo)).must_equal false @db.get(pg_json.call('a'=>1).op.contained_by(jo)).must_equal true @db.get(ja.contains(ja)).must_equal true @db.get(ja.contained_by(ja)).must_equal true @db.get(ja.contains([2,3])).must_equal true @db.get(ja.contained_by([2,3])).must_equal false @db.get(pg_json.call([2,3]).op.contains(ja)).must_equal false @db.get(pg_json.call([2,3]).op.contained_by(ja)).must_equal true end end if DB.server_version >= 90400 it '9.5 jsonb operations/functions with pg_json_ops' do @db.get(pg_json.call([nil, 2]).op.strip_nulls[1]).must_equal 2 @db.get(pg_json.call([nil, 2]).op.pretty).must_equal "[\n null,\n 2\n]" @db.from((jo - 'b').keys.as(:k)).select_order_map(:k).must_equal %w'a' @db.from(jo.delete_path(['b','c'])['b'].keys.as(:k)).select_order_map(:k).must_equal %w'd' @db.from(jo.concat('c'=>'d').keys.as(:k)).select_order_map(:k).must_equal %w'a b c' @db.get(jo.set(%w'a', 'f'=>'g')['a']['f']).must_equal 'g' end if DB.server_version >= 90500 && json_type == :jsonb it '9.6 jsonb operations/functions with pg_json_ops' do @db.get(pg_json.call([3]).op.insert(['0'], {'a'=>2})[0]['a']).must_equal 2 @db.get(pg_json.call([3]).op.insert(['0'], {'a'=>2}, false)[0]['a']).must_equal 2 @db.get(pg_json.call([3]).op.insert(['0'], {'a'=>2}, true)[0]).must_equal 3 @db.get(pg_json.call([3]).op.insert(['0'], {'a'=>2}, true)[1]['a']).must_equal 2 end if DB.server_version >= 90600 && json_type == :jsonb it '12 jsonb operations/functions with pg_json_ops' do @db.get(jo.path_exists('$.b.d.e')).must_equal true @db.get(jo.path_exists('$.b.d.f')).must_equal false @db.get(jo.path_exists!('$.b.d.e')).must_equal true @db.get(jo.path_exists!('$.b.d.f')).must_equal false @db.get(jo.path_exists!('$.b.d.e ? (@ > $x)', '{"x":2}')).must_equal true @db.get(jo.path_exists!('$.b.d.e ? (@ > $x)', '{"x":4}')).must_equal false @db.get(jo.path_exists!('$.b.d.e ? (@ > $x)', x: 2)).must_equal true @db.get(jo.path_exists!('$.b.d.e ? (@ > $x)', x: 4)).must_equal false @db.get(jo.path_exists!('$.b.d.e ? (@ > $x)', {x: 2}, true)).must_equal true @db.get(jo.path_exists!('$.b.d.e ? (@ > $x)', {x: 4}, false)).must_equal false @db.get(jo.path_match('$.b.d.e')).must_be_nil @db.get(jo.path_match('$.b.d.f')).must_be_nil @db.get(pg_json.call('b'=>{'d'=>{'e'=>true}}).op.path_match('$.b.d.e')).must_equal true @db.get(pg_json.call('b'=>{'d'=>{'e'=>false}}).op.path_match('$.b.d.e')).must_equal false proc{@db.get(jo.path_match!('$.b.d.e'))}.must_raise(Sequel::DatabaseError) proc{@db.get(jo.path_match!('$.b.d.f'))}.must_raise(Sequel::DatabaseError) @db.get(jo.path_match!('$.b.d.e', {}, true)).must_be_nil @db.get(jo.path_match!('$.b.d.f', {}, true)).must_be_nil @db.get(pg_json.call('b'=>{'d'=>{'e'=>true}}).op.path_match!('$.b.d.e')).must_equal true @db.get(pg_json.call('b'=>{'d'=>{'e'=>false}}).op.path_match!('$.b.d.e')).must_equal false @db.get(jo.path_match!('$.b.d.e > $x', '{"x":2}')).must_equal true @db.get(jo.path_match!('$.b.d.e > $x', '{"x":4}')).must_equal false @db.get(jo.path_match!('$.b.d.e > $x', x: 2)).must_equal true @db.get(jo.path_match!('$.b.d.e > $x', x: 4)).must_equal false @db.get(jo.path_match!('$.b.d.e > $x', {x: 2}, false)).must_equal true @db.get(jo.path_match!('$.b.d.e > $x', {x: 4}, true)).must_equal false @db.get(jo.path_query_first('$.b.d.e')).must_equal 3 @db.get(jo.path_query_first('$.b.d.f')).must_be_nil @db.get(jo.path_query_first('$.b.d.e ? (@ > $x)', '{"x":2}')).must_equal 3 @db.get(jo.path_query_first('$.b.d.e ? (@ > $x)', '{"x":4}')).must_be_nil @db.get(jo.path_query_first('$.b.d.e ? (@ > $x)', x: 2)).must_equal 3 @db.get(jo.path_query_first('$.b.d.e ? (@ > $x)', x: 4)).must_be_nil @db.get(jo.path_query_first('$.b.d.e ? (@ > $x)', {x: 2}, true)).must_equal 3 @db.get(jo.path_query_first('$.b.d.e ? (@ > $x)', {x: 4}, false)).must_be_nil @db.get(jo.path_query_array('$.b.d.e')).must_equal [3] @db.get(jo.path_query_array('$.b.d.f')).must_equal [] @db.get(jo.path_query_array('$.b.d.e ? (@ > $x)', '{"x":2}')).must_equal [3] @db.get(jo.path_query_array('$.b.d.e ? (@ > $x)', '{"x":4}')).must_equal [] @db.get(jo.path_query_array('$.b.d.e ? (@ > $x)', x: 2)).must_equal [3] @db.get(jo.path_query_array('$.b.d.e ? (@ > $x)', x: 4)).must_equal [] @db.get(jo.path_query_array('$.b.d.e ? (@ > $x)', {x: 2}, true)).must_equal [3] @db.get(jo.path_query_array('$.b.d.e ? (@ > $x)', {x: 4}, false)).must_equal [] @db.from(jo.path_query('$.b.d.e').as(:a, [:b])).get(:b).must_equal 3 @db.from(jo.path_query('$.b.d.f').as(:a, [:b])).get(:b).must_be_nil @db.from(jo.path_query('$.b.d.e ? (@ > $x)', '{"x":2}').as(:a, [:b])).get(:b).must_equal 3 @db.from(jo.path_query('$.b.d.e ? (@ > $x)', '{"x":4}').as(:a, [:b])).get(:b).must_be_nil @db.from(jo.path_query('$.b.d.e ? (@ > $x)', x: 2).as(:a, [:b])).get(:b).must_equal 3 @db.from(jo.path_query('$.b.d.e ? (@ > $x)', x: 4).as(:a, [:b])).get(:b).must_be_nil @db.from(jo.path_query('$.b.d.e ? (@ > $x)', {x: 2}, true).as(:a, [:b])).get(:b).must_equal 3 @db.from(jo.path_query('$.b.d.e ? (@ > $x)', {x: 4}, false).as(:a, [:b])).get(:b).must_be_nil end if DB.server_version >= 120000 && json_type == :jsonb it '13 jsonb operations/functions with pg_json_ops' do @db.get(jo.set_lax(%w'a', 'f'=>'g')['a']['f']).must_equal 'g' @db.get(jo.path_exists_tz!('$.b.d.e')).must_equal true @db.get(jo.path_exists_tz!('$.b.d.f')).must_equal false @db.get(jo.path_exists_tz!('$.b.d.e ? (@ > $x)', '{"x":2}')).must_equal true @db.get(jo.path_exists_tz!('$.b.d.e ? (@ > $x)', '{"x":4}')).must_equal false @db.get(jo.path_exists_tz!('$.b.d.e ? (@ > $x)', x: 2)).must_equal true @db.get(jo.path_exists_tz!('$.b.d.e ? (@ > $x)', x: 4)).must_equal false @db.get(jo.path_exists_tz!('$.b.d.e ? (@ > $x)', {x: 2}, true)).must_equal true @db.get(jo.path_exists_tz!('$.b.d.e ? (@ > $x)', {x: 4}, false)).must_equal false proc{@db.get(jo.path_match_tz!('$.b.d.e'))}.must_raise(Sequel::DatabaseError) proc{@db.get(jo.path_match_tz!('$.b.d.f'))}.must_raise(Sequel::DatabaseError) @db.get(jo.path_match_tz!('$.b.d.e', {}, true)).must_be_nil @db.get(jo.path_match_tz!('$.b.d.f', {}, true)).must_be_nil @db.get(pg_json.call('b'=>{'d'=>{'e'=>true}}).op.path_match_tz!('$.b.d.e')).must_equal true @db.get(pg_json.call('b'=>{'d'=>{'e'=>false}}).op.path_match_tz!('$.b.d.e')).must_equal false @db.get(jo.path_match_tz!('$.b.d.e > $x', '{"x":2}')).must_equal true @db.get(jo.path_match_tz!('$.b.d.e > $x', '{"x":4}')).must_equal false @db.get(jo.path_match_tz!('$.b.d.e > $x', x: 2)).must_equal true @db.get(jo.path_match_tz!('$.b.d.e > $x', x: 4)).must_equal false @db.get(jo.path_match_tz!('$.b.d.e > $x', {x: 2}, false)).must_equal true @db.get(jo.path_match_tz!('$.b.d.e > $x', {x: 4}, true)).must_equal false @db.get(jo.path_query_first_tz('$.b.d.e')).must_equal 3 @db.get(jo.path_query_first_tz('$.b.d.f')).must_be_nil @db.get(jo.path_query_first_tz('$.b.d.e ? (@ > $x)', '{"x":2}')).must_equal 3 @db.get(jo.path_query_first_tz('$.b.d.e ? (@ > $x)', '{"x":4}')).must_be_nil @db.get(jo.path_query_first_tz('$.b.d.e ? (@ > $x)', x: 2)).must_equal 3 @db.get(jo.path_query_first_tz('$.b.d.e ? (@ > $x)', x: 4)).must_be_nil @db.get(jo.path_query_first_tz('$.b.d.e ? (@ > $x)', {x: 2}, true)).must_equal 3 @db.get(jo.path_query_first_tz('$.b.d.e ? (@ > $x)', {x: 4}, false)).must_be_nil @db.get(jo.path_query_array_tz('$.b.d.e')).must_equal [3] @db.get(jo.path_query_array_tz('$.b.d.f')).must_equal [] @db.get(jo.path_query_array_tz('$.b.d.e ? (@ > $x)', '{"x":2}')).must_equal [3] @db.get(jo.path_query_array_tz('$.b.d.e ? (@ > $x)', '{"x":4}')).must_equal [] @db.get(jo.path_query_array_tz('$.b.d.e ? (@ > $x)', x: 2)).must_equal [3] @db.get(jo.path_query_array_tz('$.b.d.e ? (@ > $x)', x: 4)).must_equal [] @db.get(jo.path_query_array_tz('$.b.d.e ? (@ > $x)', {x: 2}, true)).must_equal [3] @db.get(jo.path_query_array_tz('$.b.d.e ? (@ > $x)', {x: 4}, false)).must_equal [] @db.from(jo.path_query_tz('$.b.d.e').as(:a, [:b])).get(:b).must_equal 3 @db.from(jo.path_query_tz('$.b.d.f').as(:a, [:b])).get(:b).must_be_nil @db.from(jo.path_query_tz('$.b.d.e ? (@ > $x)', '{"x":2}').as(:a, [:b])).get(:b).must_equal 3 @db.from(jo.path_query_tz('$.b.d.e ? (@ > $x)', '{"x":4}').as(:a, [:b])).get(:b).must_be_nil @db.from(jo.path_query_tz('$.b.d.e ? (@ > $x)', x: 2).as(:a, [:b])).get(:b).must_equal 3 @db.from(jo.path_query_tz('$.b.d.e ? (@ > $x)', x: 4).as(:a, [:b])).get(:b).must_be_nil @db.from(jo.path_query_tz('$.b.d.e ? (@ > $x)', {x: 2}, true).as(:a, [:b])).get(:b).must_equal 3 @db.from(jo.path_query_tz('$.b.d.e ? (@ > $x)', {x: 4}, false).as(:a, [:b])).get(:b).must_be_nil end if DB.server_version >= 130000 && json_type == :jsonb it '14 jsonb operations/functions with pg_json_ops' do @db.create_table!(:items){column :i, json_type} @db[:items].delete @db[:items].insert(:i=>Sequel.pg_jsonb('a'=>{'b'=>1})) @db[:items].update(Sequel.pg_jsonb_op(:i)['a']['b'] => '2', Sequel.pg_jsonb_op(:i)['a']['c'] => '3', Sequel.pg_jsonb_op(Sequel[:i])['d'] => Sequel.pg_jsonb('e'=>4)) @db[:items].all.must_equal [{:i=>{'a'=>{'b'=>2, 'c'=>3}, 'd'=>{'e'=>4}}}] end if DB.server_version >= 140000 && json_type == :jsonb end end if DB.server_version >= 90200 describe 'PostgreSQL inet/cidr types' do ipv6_broken = (IPAddr.new('::1'); false) rescue true before(:all) do @db = DB @ds = @db[:items] @v4 = '127.0.0.1' @v4nm = '127.0.0.0/8' @v6 = '2001:4f8:3:ba:2e0:81ff:fe22:d1f1' @v6nm = '2001:4f8:3:ba::/64' @ipv4 = IPAddr.new(@v4) @ipv4nm = IPAddr.new(@v4nm) unless ipv6_broken @ipv6 = IPAddr.new(@v6) @ipv6nm = IPAddr.new(@v6nm) end end after do @db.drop_table?(:items) end it 'insert and retrieve inet/cidr values' do @db.create_table!(:items){inet :i; cidr :c} @ds.insert(@ipv4, @ipv4nm) @ds.count.must_equal 1 rs = @ds.all rs.first[:i].must_equal @ipv4 rs.first[:c].must_equal @ipv4nm rs.first[:i].must_be_kind_of(IPAddr) rs.first[:c].must_be_kind_of(IPAddr) @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs unless ipv6_broken @ds.delete @ds.insert(@ipv6, @ipv6nm) @ds.count.must_equal 1 rs = @ds.all rs.first[:j] rs.first[:i].must_equal @ipv6 rs.first[:c].must_equal @ipv6nm rs.first[:i].must_be_kind_of(IPAddr) rs.first[:c].must_be_kind_of(IPAddr) @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs end end it 'allow overridding type conversions of inet/cidr types' do cp = @db.conversion_procs[869] acp = @db.conversion_procs[1041] begin @db.add_conversion_proc(1041, lambda{|s| s}) @db.add_conversion_proc(869, lambda{|s| s}) @db.get(Sequel.cast('127.0.0.1', :inet)).must_equal '127.0.0.1' @db.get(Sequel.pg_array(['127.0.0.1'], :inet)).must_equal '{127.0.0.1}' ensure @db.add_conversion_proc(869, cp) @db.add_conversion_proc(1041, acp) end end it 'insert and retrieve inet/cidr/macaddr array values' do @db.create_table!(:items){column :i, 'inet[]'; column :c, 'cidr[]'; column :m, 'macaddr[]'} @ds.insert(Sequel.pg_array([@ipv4], 'inet'), Sequel.pg_array([@ipv4nm], 'cidr'), Sequel.pg_array(['12:34:56:78:90:ab'], 'macaddr')) @ds.count.must_equal 1 rs = @ds.all rs.first.values.all?{|c| c.is_a?(Sequel::Postgres::PGArray)}.must_equal true rs.first[:i].first.must_equal @ipv4 rs.first[:c].first.must_equal @ipv4nm rs.first[:m].first.must_equal '12:34:56:78:90:ab' rs.first[:i].first.must_be_kind_of(IPAddr) rs.first[:c].first.must_be_kind_of(IPAddr) @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs end it 'use ipaddr in bound variables' do @db.create_table!(:items){inet :i; cidr :c} @ds.call(:insert, {:i=>@ipv4, :c=>@ipv4nm}, {:i=>:$i, :c=>:$c}) @ds.get(:i).must_equal @ipv4 @ds.get(:c).must_equal @ipv4nm @ds.filter(:i=>:$i, :c=>:$c).call(:first, :i=>@ipv4, :c=>@ipv4nm).must_equal(:i=>@ipv4, :c=>@ipv4nm) @ds.filter(:i=>:$i, :c=>:$c).call(:first, :i=>@ipv6, :c=>@ipv6nm).must_be_nil @ds.filter(:i=>:$i, :c=>:$c).call(:delete, :i=>@ipv4, :c=>@ipv4nm).must_equal 1 unless ipv6_broken @ds.call(:insert, {:i=>@ipv6, :c=>@ipv6nm}, {:i=>:$i, :c=>:$c}) @ds.get(:i).must_equal @ipv6 @ds.get(:c).must_equal @ipv6nm @ds.filter(:i=>:$i, :c=>:$c).call(:first, :i=>@ipv6, :c=>@ipv6nm).must_equal(:i=>@ipv6, :c=>@ipv6nm) @ds.filter(:i=>:$i, :c=>:$c).call(:first, :i=>@ipv4, :c=>@ipv4nm).must_be_nil @ds.filter(:i=>:$i, :c=>:$c).call(:delete, :i=>@ipv6, :c=>@ipv6nm).must_equal 1 end @db.create_table!(:items){column :i, 'inet[]'; column :c, 'cidr[]'; column :m, 'macaddr[]'} @ds.call(:insert, {:i=>[@ipv4], :c=>[@ipv4nm], :m=>['12:34:56:78:90:ab']}, {:i=>:$i, :c=>:$c, :m=>:$m}) @ds.filter(:i=>:$i, :c=>:$c, :m=>:$m).call(:first, :i=>[@ipv4], :c=>[@ipv4nm], :m=>['12:34:56:78:90:ab']).must_equal(:i=>[@ipv4], :c=>[@ipv4nm], :m=>['12:34:56:78:90:ab']) @ds.filter(:i=>:$i, :c=>:$c, :m=>:$m).call(:first, :i=>[], :c=>[], :m=>[]).must_be_nil @ds.filter(:i=>:$i, :c=>:$c, :m=>:$m).call(:delete, :i=>[@ipv4], :c=>[@ipv4nm], :m=>['12:34:56:78:90:ab']).must_equal 1 end if uses_pg_or_jdbc it 'parse default values for schema' do @db.create_table!(:items) do inet :i, :default=>IPAddr.new('127.0.0.1') cidr :c, :default=>IPAddr.new('127.0.0.1') end @db.schema(:items)[0][1][:ruby_default].must_equal IPAddr.new('127.0.0.1') @db.schema(:items)[1][1][:ruby_default].must_equal IPAddr.new('127.0.0.1') end it 'with models' do @db.create_table!(:items) do primary_key :id inet :i cidr :c end c = Class.new(Sequel::Model(@db[:items])) c.create(:i=>@v4, :c=>@v4nm).values.values_at(:i, :c).must_equal [@ipv4, @ipv4nm] unless ipv6_broken c.create(:i=>@ipv6, :c=>@ipv6nm).values.values_at(:i, :c).must_equal [@ipv6, @ipv6nm] end end it 'operations/functions with pg_inet_ops' do Sequel.extension :pg_inet_ops @db.get(Sequel.pg_inet_op('1.2.3.4') << '1.2.3.0/24').must_equal true @db.get(Sequel.pg_inet_op('1.2.3.4') << '1.2.3.4/32').must_equal false @db.get(Sequel.pg_inet_op('1.2.3.4') << '1.2.2.0/24').must_equal false @db.get(Sequel.pg_inet_op('1.2.3.4').contained_by('1.2.3.0/24')).must_equal true @db.get(Sequel.pg_inet_op('1.2.3.4').contained_by('1.2.3.4/32')).must_equal false @db.get(Sequel.pg_inet_op('1.2.3.4').contained_by('1.2.2.0/24')).must_equal false @db.get(Sequel.pg_inet_op('1.2.3.4').contained_by_or_equals('1.2.3.0/24')).must_equal true @db.get(Sequel.pg_inet_op('1.2.3.4').contained_by_or_equals('1.2.3.4/32')).must_equal true @db.get(Sequel.pg_inet_op('1.2.3.4').contained_by_or_equals('1.2.2.0/24')).must_equal false @db.get(Sequel.pg_inet_op('1.2.3.0/24') >> '1.2.3.4').must_equal true @db.get(Sequel.pg_inet_op('1.2.3.0/24') >> '1.2.2.4').must_equal false @db.get(Sequel.pg_inet_op('1.2.3.0/24').contains('1.2.3.4')).must_equal true @db.get(Sequel.pg_inet_op('1.2.3.0/24').contains('1.2.2.4')).must_equal false @db.get(Sequel.pg_inet_op('1.2.3.0/24').contains_or_equals('1.2.3.4')).must_equal true @db.get(Sequel.pg_inet_op('1.2.3.0/24').contains_or_equals('1.2.2.4')).must_equal false @db.get(Sequel.pg_inet_op('1.2.3.0/24').contains_or_equals('1.2.3.0/24')).must_equal true @db.get(Sequel.pg_inet_op('1.2.3.0/32') + 1).must_equal IPAddr.new('1.2.3.1/32') @db.get(Sequel.pg_inet_op('1.2.3.1/32') - 1).must_equal IPAddr.new('1.2.3.0/32') @db.get(Sequel.pg_inet_op('1.2.3.1/32') - '1.2.3.0/32').must_equal 1 @db.get(Sequel.pg_inet_op('1.2.3.0/32') & '1.2.0.4/32').must_equal IPAddr.new('1.2.0.0/32') @db.get(Sequel.pg_inet_op('1.2.0.0/32') | '0.0.3.4/32').must_equal IPAddr.new('1.2.3.4/32') @db.get(~Sequel.pg_inet_op('0.0.0.0/32')).must_equal IPAddr.new('255.255.255.255/32') @db.get(Sequel.pg_inet_op('1.2.3.4/24').abbrev).must_equal '1.2.3.4/24' @db.get(Sequel.pg_inet_op('1.2.3.4/24').broadcast).must_equal IPAddr.new('1.2.3.255/24') @db.get(Sequel.pg_inet_op('1234:3456:5678:789a:9abc:bced:edf0:f012/96').broadcast).must_equal IPAddr.new('1234:3456:5678:789a:9abc:bced::/96') @db.get(Sequel.pg_inet_op('1234:3456:5678:789a:9abc:bced:edf0:f012/128').broadcast).must_equal IPAddr.new('1234:3456:5678:789a:9abc:bced:edf0:f012/128') @db.get(Sequel.pg_inet_op('1234:3456:5678:789a:9abc:bced:edf0:f012/64').broadcast).must_equal IPAddr.new('1234:3456:5678:789a::/64') @db.get(Sequel.pg_inet_op('1234:3456:5678:789a:9abc:bced:edf0:f012/32').broadcast).must_equal IPAddr.new('1234:3456::/32') @db.get(Sequel.pg_inet_op('1234:3456:5678:789a:9abc:bced:edf0:f012/0').broadcast).must_equal IPAddr.new('::/0') @db.get(Sequel.pg_inet_op('1.2.3.4/24').family).must_equal 4 @db.get(Sequel.pg_inet_op('1.2.3.4/24').host).must_equal '1.2.3.4' @db.get(Sequel.pg_inet_op('1.2.3.4/24').hostmask).must_equal IPAddr.new('0.0.0.255/32') @db.get(Sequel.pg_inet_op('1.2.3.4/24').masklen).must_equal 24 @db.get(Sequel.pg_inet_op('1.2.3.4/24').netmask).must_equal IPAddr.new('255.255.255.0/32') @db.get(Sequel.pg_inet_op('1.2.3.4/24').network).must_equal IPAddr.new('1.2.3.0/24') @db.get(Sequel.pg_inet_op('1.2.3.4/24').set_masklen(16)).must_equal IPAddr.new('1.2.3.4/16') @db.get(Sequel.pg_inet_op('1.2.3.4/32').text).must_equal '1.2.3.4/32' if @db.server_version >= 90400 @db.get(Sequel.pg_inet_op('1.2.3.0/24').contains_or_contained_by('1.2.0.0/16')).must_equal true @db.get(Sequel.pg_inet_op('1.2.0.0/16').contains_or_contained_by('1.2.3.0/24')).must_equal true @db.get(Sequel.pg_inet_op('1.3.0.0/16').contains_or_contained_by('1.2.3.0/24')).must_equal false end end end describe 'PostgreSQL custom range types' do after do @db.run "DROP TYPE timerange"; end it "should allow registration and use" do @db = DB @db.run "CREATE TYPE timerange AS range (subtype = time)" @db.register_range_type('timerange') r = Sequel::SQLTime.create(10, 11, 12)..Sequel::SQLTime.create(11, 12, 13) @db.get(Sequel.pg_range(r, :timerange)).to_range.must_equal r if DB.server_version >= 140000 @db.register_multirange_type('timemultirange') r = [Sequel::SQLTime.create(1, 2, 3)..Sequel::SQLTime.create(7, 8, 9), Sequel::SQLTime.create(10, 11, 12)..Sequel::SQLTime.create(11, 12, 13)] @db.get(Sequel.pg_multirange(r, :timemultirange)).map(&:to_range).must_equal r end end end if DB.server_version >= 90200 describe 'PostgreSQL range types' do before(:all) do @db = DB @ds = @db[:items] @map = {:i4=>'int4range', :i8=>'int8range', :n=>'numrange', :d=>'daterange', :t=>'tsrange', :tz=>'tstzrange'} @r = {:i4=>1...2, :i8=>2...3, :n=>BigDecimal('1.0')..BigDecimal('2.0'), :d=>Date.today...(Date.today+1), :t=>Time.local(2011, 1)..Time.local(2011, 2), :tz=>Time.local(2011, 1)..Time.local(2011, 2)} @ra = {} @pgr = {} @pgra = {} @r.each{|k, v| @ra[k] = Sequel.pg_array([v], @map[k])} @r.each{|k, v| @pgr[k] = Sequel.pg_range(v)} @r.each{|k, v| @pgra[k] = Sequel.pg_array([Sequel.pg_range(v)], @map[k])} end after do @db.drop_table?(:items) end it 'insert and retrieve range type values' do @db.create_table!(:items){int4range :i4; int8range :i8; numrange :n; daterange :d; tsrange :t; tstzrange :tz} [@r, @pgr].each do |input| h = {} input.each{|k, v| h[k] = Sequel.cast(v, @map[k])} @ds.insert(h) @ds.count.must_equal 1 rs = @ds.all rs.first.each do |k, v| v.class.must_equal(Sequel::Postgres::PGRange) v.to_range.must_be_kind_of(Range) v.must_be :==, @r[k] v.to_range.must_equal @r[k] end @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs @ds.delete end end it 'insert and retrieve arrays of range type values' do @db.create_table!(:items){column :i4, 'int4range[]'; column :i8, 'int8range[]'; column :n, 'numrange[]'; column :d, 'daterange[]'; column :t, 'tsrange[]'; column :tz, 'tstzrange[]'} [@ra, @pgra].each do |input| @ds.insert(input) @ds.count.must_equal 1 rs = @ds.all rs.first.each do |k, v| v.class.must_equal(Sequel::Postgres::PGArray) v.to_a.must_be_kind_of(Array) v.first.class.must_equal(Sequel::Postgres::PGRange) v.first.to_range.must_be_kind_of(Range) v.must_be :==, @ra[k].to_a v.first.must_be :==, @r[k] end @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs @ds.delete end end it 'use range types in bound variables' do @db.create_table!(:items){int4range :i4; int8range :i8; numrange :n; daterange :d; tsrange :t; tstzrange :tz} h = {} @r.keys.each{|k| h[k] = :"$#{k}"} r2 = {} @r.each{|k, v| r2[k] = Range.new(v.begin, v.end+2)} @ds.call(:insert, @r, h) @ds.first.must_be :==, @r @ds.filter(h).call(:first, @r).must_be :==, @r @ds.filter(h).call(:first, @pgr).must_be :==, @r @ds.filter(h).call(:first, r2).must_be_nil @ds.filter(h).call(:delete, @r).must_equal 1 @db.create_table!(:items){column :i4, 'int4range[]'; column :i8, 'int8range[]'; column :n, 'numrange[]'; column :d, 'daterange[]'; column :t, 'tsrange[]'; column :tz, 'tstzrange[]'} @r.each{|k, v| r2[k] = [Range.new(v.begin, v.end+2)]} @ds.call(:insert, @ra, h) @ds.filter(h).call(:first, @ra).each{|k, v| v.must_be :==, @ra[k].to_a} @ds.filter(h).call(:first, @pgra).each{|k, v| v.must_be :==, @ra[k].to_a} @ds.filter(h).call(:first, r2).must_be_nil @ds.filter(h).call(:delete, @ra).must_equal 1 end if uses_pg_or_jdbc it 'handle endless ranges' do @db.get(Sequel.cast(eval('(1...)'), :int4range)).must_be :==, eval('(1...)') @db.get(Sequel.cast(eval('(1...)'), :int4range)).wont_be :==, eval('(2...)') @db.get(Sequel.cast(eval('(1...)'), :int4range)).wont_be :==, eval('(1..)') @db.get(Sequel.cast(eval('(2...)'), :int4range)).must_be :==, eval('(2...)') @db.get(Sequel.cast(eval('(2...)'), :int4range)).wont_be :==, eval('(2..)') @db.get(Sequel.cast(eval('(2...)'), :int4range)).wont_be :==, eval('(1...)') end if RUBY_VERSION >= '2.6' it 'handle startless ranges' do @db.get(Sequel.cast(eval('(...1)'), :int4range)).must_be :==, Sequel::Postgres::PGRange.new(nil, 1, :exclude_begin=>true, :exclude_end=>true, :db_type=>"int4range") @db.get(Sequel.cast(eval('(...1)'), :int4range)).wont_be :==, Sequel::Postgres::PGRange.new(nil, 2, :exclude_begin=>true, :exclude_end=>true, :db_type=>"int4range") @db.get(Sequel.cast(eval('(...1)'), :int4range)).wont_be :==, Sequel::Postgres::PGRange.new(nil, 1, :exclude_end=>true, :db_type=>"int4range") @db.get(Sequel.cast(eval('(...1)'), :int4range)).wont_be :==, Sequel::Postgres::PGRange.new(nil, 1, :exclude_begin=>true, :db_type=>"int4range") end if RUBY_VERSION >= '2.7' it 'handle startless, endless ranges' do @db.get(Sequel.cast(eval('nil...nil'), :int4range)).must_be :==, Sequel::Postgres::PGRange.new(nil, nil, :exclude_begin=>true, :exclude_end=>true, :db_type=>"int4range") @db.get(Sequel.cast(eval('nil...nil'), :int4range)).wont_be :==, Sequel::Postgres::PGRange.new(nil, nil, :exclude_begin=>true, :db_type=>"int4range") @db.get(Sequel.cast(eval('nil...nil'), :int4range)).wont_be :==, Sequel::Postgres::PGRange.new(nil, nil, :exclude_end=>true, :db_type=>"int4range") @db.get(Sequel.cast(eval('nil...nil'), :int4range)).wont_be :==, Sequel::Postgres::PGRange.new(1, nil, :exclude_begin=>true, :db_type=>"int4range") @db.get(Sequel.cast(eval('nil...nil'), :int4range)).wont_be :==, Sequel::Postgres::PGRange.new(nil, 1, :exclude_begin=>true, :db_type=>"int4range") end if RUBY_VERSION >= '2.7' it 'parse default values for schema' do @db.create_table!(:items) do Integer :j int4range :i, :default=>1..4 end @db.schema(:items)[0][1][:ruby_default].must_be_nil @db.schema(:items)[1][1][:ruby_default].must_equal Sequel::Postgres::PGRange.new(1, 5, :exclude_end=>true, :db_type=>'int4range') end it 'with models' do @db.create_table!(:items){primary_key :id; int4range :i4; int8range :i8; numrange :n; daterange :d; tsrange :t; tstzrange :tz} c = Class.new(Sequel::Model(@db[:items])) v = c.create(@r).values v.delete(:id) v.must_be :==, @r @db.create_table!(:items){primary_key :id; column :i4, 'int4range[]'; column :i8, 'int8range[]'; column :n, 'numrange[]'; column :d, 'daterange[]'; column :t, 'tsrange[]'; column :tz, 'tstzrange[]'} c = Class.new(Sequel::Model(@db[:items])) v = c.create(@ra).values v.delete(:id) v.each{|k,v1| v1.must_be :==, @ra[k].to_a} end it 'works with current_datetime_timestamp extension' do ds = @db.dataset.extension(:current_datetime_timestamp) tsr = ds.get(Sequel.pg_range(ds.current_datetime..ds.current_datetime, :tstzrange)) tsr.begin.must_be_kind_of Time tsr.end.must_be_kind_of Time end it 'operations/functions with pg_range_ops' do Sequel.extension :pg_range_ops @db.get(Sequel.pg_range(1..5, :int4range).op.contains(2..4)).must_equal true @db.get(Sequel.pg_range(1..5, :int4range).op.contains(3..6)).must_equal false @db.get(Sequel.pg_range(1..5, :int4range).op.contains(0..6)).must_equal false @db.get(Sequel.pg_range(1..5, :int4range).op.contained_by(0..6)).must_equal true @db.get(Sequel.pg_range(1..5, :int4range).op.contained_by(3..6)).must_equal false @db.get(Sequel.pg_range(1..5, :int4range).op.contained_by(2..4)).must_equal false @db.get(Sequel.pg_range(1..5, :int4range).op.overlaps(5..6)).must_equal true @db.get(Sequel.pg_range(1...5, :int4range).op.overlaps(5..6)).must_equal false @db.get(Sequel.pg_range(1..5, :int4range).op.left_of(6..10)).must_equal true @db.get(Sequel.pg_range(1..5, :int4range).op.left_of(5..10)).must_equal false @db.get(Sequel.pg_range(1..5, :int4range).op.left_of(-1..0)).must_equal false @db.get(Sequel.pg_range(1..5, :int4range).op.left_of(-1..3)).must_equal false @db.get(Sequel.pg_range(1..5, :int4range).op.right_of(6..10)).must_equal false @db.get(Sequel.pg_range(1..5, :int4range).op.right_of(5..10)).must_equal false @db.get(Sequel.pg_range(1..5, :int4range).op.right_of(-1..0)).must_equal true @db.get(Sequel.pg_range(1..5, :int4range).op.right_of(-1..3)).must_equal false @db.get(Sequel.pg_range(1..5, :int4range).op.ends_before(6..10)).must_equal true @db.get(Sequel.pg_range(1..5, :int4range).op.ends_before(5..10)).must_equal true @db.get(Sequel.pg_range(1..5, :int4range).op.ends_before(-1..0)).must_equal false @db.get(Sequel.pg_range(1..5, :int4range).op.ends_before(-1..3)).must_equal false @db.get(Sequel.pg_range(1..5, :int4range).op.ends_before(-1..7)).must_equal true @db.get(Sequel.pg_range(1..5, :int4range).op.starts_after(6..10)).must_equal false @db.get(Sequel.pg_range(1..5, :int4range).op.starts_after(5..10)).must_equal false @db.get(Sequel.pg_range(1..5, :int4range).op.starts_after(3..10)).must_equal false @db.get(Sequel.pg_range(1..5, :int4range).op.starts_after(-1..10)).must_equal true @db.get(Sequel.pg_range(1..5, :int4range).op.starts_after(-1..0)).must_equal true @db.get(Sequel.pg_range(1..5, :int4range).op.starts_after(-1..3)).must_equal true @db.get(Sequel.pg_range(1..5, :int4range).op.starts_after(-5..-1)).must_equal true @db.get(Sequel.pg_range(1..5, :int4range).op.adjacent_to(6..10)).must_equal true @db.get(Sequel.pg_range(1...5, :int4range).op.adjacent_to(6..10)).must_equal false @db.get((Sequel.pg_range(1..5, :int4range).op + (6..10)).adjacent_to(6..10)).must_equal false @db.get((Sequel.pg_range(1..5, :int4range).op + (6..10)).adjacent_to(11..20)).must_equal true @db.get((Sequel.pg_range(1..5, :int4range).op * (2..6)).adjacent_to(6..10)).must_equal true @db.get((Sequel.pg_range(1..4, :int4range).op * (2..6)).adjacent_to(6..10)).must_equal false @db.get((Sequel.pg_range(1..5, :int4range).op - (2..6)).adjacent_to(2..10)).must_equal true @db.get((Sequel.pg_range(0..4, :int4range).op - (3..6)).adjacent_to(4..10)).must_equal false @db.get(Sequel.pg_range(0..4, :int4range).op.lower).must_equal 0 @db.get(Sequel.pg_range(0..4, :int4range).op.upper).must_equal 5 @db.get(Sequel.pg_range(0..4, :int4range).op.isempty).must_equal false @db.get(Sequel::Postgres::PGRange.empty(:int4range).op.isempty).must_equal true @db.get(Sequel.pg_range(1..5, :numrange).op.lower_inc).must_equal true @db.get(Sequel::Postgres::PGRange.new(1, 5, :exclude_begin=>true, :db_type=>:numrange).op.lower_inc).must_equal false @db.get(Sequel.pg_range(1..5, :numrange).op.upper_inc).must_equal true @db.get(Sequel.pg_range(1...5, :numrange).op.upper_inc).must_equal false @db.get(Sequel::Postgres::PGRange.new(1, 5, :db_type=>:int4range).op.lower_inf).must_equal false @db.get(Sequel::Postgres::PGRange.new(nil, 5, :db_type=>:int4range).op.lower_inf).must_equal true @db.get(Sequel::Postgres::PGRange.new(1, 5, :db_type=>:int4range).op.upper_inf).must_equal false @db.get(Sequel::Postgres::PGRange.new(1, nil, :db_type=>:int4range).op.upper_inf).must_equal true end end if DB.server_version >= 90200 describe 'PostgreSQL multirange types' do before(:all) do @db = DB @ds = @db[:items] @map = {:i4=>'int4multirange', :i8=>'int8multirange', :n=>'nummultirange', :d=>'datemultirange', :t=>'tsmultirange', :tz=>'tstzmultirange'} @r = { :i4=>[1...2, 4...5], :i8=>[2...3, 5...6], :n=>[BigDecimal('1.0')..BigDecimal('2.0'), BigDecimal('4.0')..BigDecimal('5.0')], :d=>[Date.today...(Date.today+1), (Date.today+3)...(Date.today+4)], :t=>[Time.local(2011, 1)..Time.local(2011, 2), Time.local(2011, 4)..Time.local(2011, 5)], :tz=>[Time.local(2011, 1)..Time.local(2011, 2), Time.local(2011, 4)..Time.local(2011, 5)], } @pgr = {} @pgra = {} @r.each do |k, v| type = @map[k] val = @pgr[k] = Sequel.pg_multirange(v.map{|range| Sequel::Postgres::PGRange.new(range.begin, range.end, :exclude_end => range.exclude_end?, :db_type => type.sub('multi', ''))}, type) @pgra[k] = Sequel.pg_array([val], type) end end after do @db.drop_table?(:items) end it 'insert and retrieve multirange type values' do @db.create_table!(:items){int4multirange :i4; int8multirange :i8; nummultirange :n; datemultirange :d; tsmultirange :t; tstzmultirange :tz} h = {} @pgr.each{|k, v| h[k] = Sequel.cast(v, @map[k])} @ds.insert(h) @ds.count.must_equal 1 rs = @ds.all rs.first.each do |k, v| v.class.must_equal(Sequel::Postgres::PGMultiRange) v.each{|range| range.must_be_kind_of(Sequel::Postgres::PGRange)} v.must_be :==, @r[k] end @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs @ds.delete end it 'insert and retrieve arrays of multirange type values' do @db.create_table!(:items){column :i4, 'int4multirange[]'; column :i8, 'int8multirange[]'; column :n, 'nummultirange[]'; column :d, 'datemultirange[]'; column :t, 'tsmultirange[]'; column :tz, 'tstzmultirange[]'} @ds.insert(@pgra) @ds.count.must_equal 1 rs = @ds.all rs.first.each do |k, v| v.class.must_equal(Sequel::Postgres::PGArray) v.to_a.must_be_kind_of(Array) v.first.class.must_equal(Sequel::Postgres::PGMultiRange) v.first.first.class.must_equal(Sequel::Postgres::PGRange) v.first.first.to_range.must_be_kind_of(Range) v.must_be :==, @pgra[k] v.first.must_be :==, @pgr[k] v.first.must_be :==, @r[k] end @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs @ds.delete end it 'use multirange types in bound variables' do @db.create_table!(:items){int4multirange :i4; int8multirange :i8; nummultirange :n; datemultirange :d; tsmultirange :t; tstzmultirange :tz} h = {} @r.keys.each{|k| h[k] = :"$#{k}"} r2 = {} @r.each{|k, v| r2[k] = Sequel.pg_multirange(v.map{|range| Range.new(range.begin+2, range.end+2)}, @map[k])} @ds.call(:insert, @pgr, h) @ds.first.must_be :==, @r @ds.filter(h).call(:first, @pgr).must_be :==, @r @ds.filter(h).call(:first, r2).must_be_nil @ds.filter(h).call(:delete, @pgr).must_equal 1 @db.create_table!(:items){column :i4, 'int4multirange[]'; column :i8, 'int8multirange[]'; column :n, 'nummultirange[]'; column :d, 'datemultirange[]'; column :t, 'tsmultirange[]'; column :tz, 'tstzmultirange[]'} @r.each{|k, v| r2[k] = Sequel.pg_array([Sequel.pg_multirange(v.map{|range| Range.new(range.begin+2, range.end+2)}, @map[k])])} @ds.call(:insert, @pgra, h) @ds.filter(h).call(:first, @pgra).each{|k, v| v.must_be :==, @pgra[k].to_a} @ds.filter(h).call(:first, r2).must_be_nil @ds.filter(h).call(:delete, @pgra).must_equal 1 end if uses_pg_or_jdbc it 'parse multiranges containing empty ranges' do @db.get(Sequel::Postgres::PGMultiRange.new([], 'int4multirange')).must_be_empty @db.get(Sequel::Postgres::PGMultiRange.new([Sequel::Postgres::PGRange.empty('int4range')], 'int4multirange')).must_be_empty end it 'parse default values for schema' do @db.create_table!(:items) do Integer :j int4multirange :i, :default=>Sequel.pg_multirange([1..4], :int4multirange) end @db.schema(:items)[0][1][:ruby_default].must_be_nil @db.schema(:items)[1][1][:ruby_default].must_equal Sequel::Postgres::PGMultiRange.new([Sequel::Postgres::PGRange.new(1, 5, :exclude_end=>true, :db_type=>'int4range')], 'int4multirange') @db.schema(:items)[1][1][:ruby_default].first.must_equal Sequel::Postgres::PGRange.new(1, 5, :exclude_end=>true, :db_type=>'int4range') end it 'with models' do @db.create_table!(:items){primary_key :id; int4multirange :i4; int8multirange :i8; nummultirange :n; datemultirange :d; tsmultirange :t; tstzmultirange :tz} c = Class.new(Sequel::Model(@db[:items])) v = c.create(@pgr).values v.delete(:id) v.must_be :==, @pgr @db.create_table!(:items){primary_key :id; column :i4, 'int4multirange[]'; column :i8, 'int8multirange[]'; column :n, 'nummultirange[]'; column :d, 'datemultirange[]'; column :t, 'tsmultirange[]'; column :tz, 'tstzmultirange[]'} c = Class.new(Sequel::Model(@db[:items])) v = c.create(@pgra).values v.delete(:id) v.each{|k,v1| v1.must_be :==, @pgra[k].to_a} end it 'operations/functions with pg_range_ops' do Sequel.extension :pg_range_ops mr = lambda do |range| type = (Range === range ? 'int4multirange' : range.db_type.to_s.sub('range', 'multirange')) Sequel.pg_multirange([range], type) end @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.contains(mr[2..4])).must_equal true @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.contains(mr[3..6])).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.contains(mr[0..6])).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.contains(Sequel.pg_range(1..5, :int4range))).must_equal true @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.contains(Sequel.pg_range(3..6, :int4range))).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.contains(Sequel.pg_range(0..6, :int4range))).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.contains(3)).must_equal true @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.contains(6)).must_equal false @db.get(Sequel.pg_range(0..6, :int4range).op.contains(mr[Sequel.pg_range(1..5, :int4range)])).must_equal true @db.get(Sequel.pg_range(3..6, :int4range).op.contains(mr[Sequel.pg_range(1..5, :int4range)])).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.contained_by(mr[0..6])).must_equal true @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.contained_by(mr[3..6])).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.contained_by(mr[2..4])).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.contained_by(Sequel.pg_range(0..6, :int4range))).must_equal true @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.contained_by(Sequel.pg_range(3..6, :int4range))).must_equal false @db.get(Sequel.pg_range(1..5, :int4range).op.contained_by(mr[Sequel.pg_range(0..6, :int4range)])).must_equal true @db.get(Sequel.pg_range(1..5, :int4range).op.contained_by(mr[Sequel.pg_range(3..6, :int4range)])).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.overlaps(mr[5..6])).must_equal true @db.get(mr[Sequel.pg_range(1...5, :int4range)].op.overlaps(mr[5..6])).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.overlaps(Sequel.pg_range(5..6, :int4range))).must_equal true @db.get(mr[Sequel.pg_range(1...5, :int4range)].op.overlaps(Sequel.pg_range(5..6, :int4range))).must_equal false @db.get(Sequel.pg_range(1..5, :int4range).op.overlaps(mr[Sequel.pg_range(5..6, :int4range)])).must_equal true @db.get(Sequel.pg_range(1...5, :int4range).op.overlaps(mr[Sequel.pg_range(5..6, :int4range)])).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.left_of(mr[6..10])).must_equal true @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.left_of(mr[5..10])).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.left_of(mr[-1..0])).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.left_of(mr[-1..3])).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.left_of(Sequel.pg_range(6..10, :int4range))).must_equal true @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.left_of(Sequel.pg_range(5..10, :int4range))).must_equal false @db.get(Sequel.pg_range(1..5, :int4range).op.left_of(mr[Sequel.pg_range(6..10, :int4range)])).must_equal true @db.get(Sequel.pg_range(1..5, :int4range).op.left_of(mr[Sequel.pg_range(5..10, :int4range)])).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.right_of(mr[6..10])).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.right_of(mr[5..10])).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.right_of(mr[-1..0])).must_equal true @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.right_of(mr[-1..3])).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.right_of(Sequel.pg_range(6..10, :int4range))).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.right_of(Sequel.pg_range(-1..0, :int4range))).must_equal true @db.get(Sequel.pg_range(1..5, :int4range).op.right_of(mr[Sequel.pg_range(6..10, :int4range)])).must_equal false @db.get(Sequel.pg_range(1..5, :int4range).op.right_of(mr[Sequel.pg_range(-1..0, :int4range)])).must_equal true @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.ends_before(mr[6..10])).must_equal true @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.ends_before(mr[5..10])).must_equal true @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.ends_before(mr[-1..0])).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.ends_before(mr[-1..3])).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.ends_before(mr[-1..7])).must_equal true @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.ends_before(Sequel.pg_range(5..10, :int4range))).must_equal true @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.ends_before(Sequel.pg_range(-1..0, :int4range))).must_equal false @db.get(Sequel.pg_range(1..5, :int4range).op.ends_before(mr[Sequel.pg_range(5..10, :int4range)])).must_equal true @db.get(Sequel.pg_range(1..5, :int4range).op.ends_before(mr[Sequel.pg_range(-1..0, :int4range)])).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.starts_after(mr[6..10])).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.starts_after(mr[5..10])).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.starts_after(mr[3..10])).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.starts_after(mr[-1..10])).must_equal true @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.starts_after(mr[-1..0])).must_equal true @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.starts_after(mr[-1..3])).must_equal true @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.starts_after(mr[-5..-1])).must_equal true @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.starts_after(Sequel.pg_range(3..10, :int4range))).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.starts_after(Sequel.pg_range(-1..10, :int4range))).must_equal true @db.get(Sequel.pg_range(1..5, :int4range).op.starts_after(mr[Sequel.pg_range(3..10, :int4range)])).must_equal false @db.get(Sequel.pg_range(1..5, :int4range).op.starts_after(mr[Sequel.pg_range(-1..10, :int4range)])).must_equal true @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.adjacent_to(mr[6..10])).must_equal true @db.get(mr[Sequel.pg_range(1...5, :int4range)].op.adjacent_to(mr[6..10])).must_equal false @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.adjacent_to(Sequel.pg_range(6..10, :int4range))).must_equal true @db.get(mr[Sequel.pg_range(1...5, :int4range)].op.adjacent_to(Sequel.pg_range(6..10, :int4range))).must_equal false @db.get(Sequel.pg_range(1..5, :int4range).op.adjacent_to(mr[Sequel.pg_range(6..10, :int4range)])).must_equal true @db.get(Sequel.pg_range(1...5, :int4range).op.adjacent_to(mr[Sequel.pg_range(6..10, :int4range)])).must_equal false @db.get((mr[Sequel.pg_range(1..5, :int4range)].op + (mr[6..10])).adjacent_to(mr[6..10])).must_equal false @db.get((mr[Sequel.pg_range(1..5, :int4range)].op + (mr[6..10])).adjacent_to(mr[11..20])).must_equal true @db.get((mr[Sequel.pg_range(1..5, :int4range)].op * (mr[2..6])).adjacent_to(mr[6..10])).must_equal true @db.get((mr[Sequel.pg_range(1..4, :int4range)].op * (mr[2..6])).adjacent_to(mr[6..10])).must_equal false @db.get((mr[Sequel.pg_range(1..5, :int4range)].op - (mr[2..6])).adjacent_to(mr[2..10])).must_equal true @db.get((mr[Sequel.pg_range(0..4, :int4range)].op - (mr[3..6])).adjacent_to(mr[4..10])).must_equal false @db.get(mr[Sequel.pg_range(0..4, :int4range)].op.lower).must_equal 0 @db.get(mr[Sequel.pg_range(0..4, :int4range)].op.upper).must_equal 5 @db.get(mr[Sequel.pg_range(0..4, :int4range)].op.isempty).must_equal false @db.get(mr[Sequel::Postgres::PGRange.empty(:int4range)].op.isempty).must_equal true @db.get(mr[Sequel.pg_range(1..5, :numrange)].op.lower_inc).must_equal true @db.get(mr[Sequel::Postgres::PGRange.new(1, 5, :exclude_begin=>true, :db_type=>:numrange)].op.lower_inc).must_equal false @db.get(mr[Sequel.pg_range(1..5, :numrange)].op.upper_inc).must_equal true @db.get(mr[Sequel.pg_range(1...5, :numrange)].op.upper_inc).must_equal false @db.get(mr[Sequel::Postgres::PGRange.new(1, 5, :db_type=>:int4range)].op.lower_inf).must_equal false @db.get(mr[Sequel::Postgres::PGRange.new(nil, 5, :db_type=>:int4range)].op.lower_inf).must_equal true @db.get(mr[Sequel::Postgres::PGRange.new(1, 5, :db_type=>:int4range)].op.upper_inf).must_equal false @db.get(mr[Sequel::Postgres::PGRange.new(1, nil, :db_type=>:int4range)].op.upper_inf).must_equal true @db.get(mr[Sequel.pg_range(1..5, :int4range)].op.range_merge.adjacent_to(mr[6..10])).must_equal true @db.get(Sequel.pg_range(1...5, :int4range).op.multirange.adjacent_to(mr[6..10])).must_equal false @db.get(Sequel.pg_range(1...5, :int4range).op.multirange.unnest).to_range.must_equal 1...5 end end if DB.server_version >= 140000 describe 'PostgreSQL interval types' do before(:all) do @db = DB @ds = @db[:items] m = Sequel::Postgres::IntervalDatabaseMethods::Parser @year = m::SECONDS_PER_YEAR @month = m::SECONDS_PER_MONTH end after do @db.drop_table?(:items) end it 'insert and retrieve interval values' do @db.create_table!(:items){interval :i} [ ['0', '00:00:00', 0, []], ['1', '00:00:01', 1, [[:seconds, 1]]], ['1 microsecond', '00:00:00.000001', 0.000001, [[:seconds, 0.000001]]], ['1 millisecond', '00:00:00.001', 0.001, [[:seconds, 0.001]]], ['1 second', '00:00:01', 1, [[:seconds, 1]]], ['1 minute', '00:01:00', 60, [[:seconds, 60]]], ['1 hour', '01:00:00', 3600, [[:seconds, 3600]]], ['123000 hours', '123000:00:00', 442800000, [[:seconds, 442800000]]], ['1 day', '1 day', 86400, [[:days, 1]]], ['1 week', '7 days', 86400*7, [[:days, 7]]], ['1 month', '1 mon', @month, [[:months, 1]]], ['1 year', '1 year', @year, [[:years, 1]]], ['1 decade', '10 years', @year*10, [[:years, 10]]], ['1 century', '100 years', @year*100, [[:years, 100]]], ['1 millennium', '1000 years', @year*1000, [[:years, 1000]]], ['1 year 2 months 3 weeks 4 days 5 hours 6 minutes 7 seconds', '1 year 2 mons 25 days 05:06:07', @year + 2*@month + 3*86400*7 + 4*86400 + 5*3600 + 6*60 + 7, [[:years, 1], [:months, 2], [:days, 25], [:seconds, 18367]]], ['-1 year +2 months -3 weeks +4 days -5 hours +6 minutes -7 seconds', '-10 mons -17 days -04:54:07', -10*@month - 3*86400*7 + 4*86400 - 5*3600 + 6*60 - 7, [[:months, -10], [:days, -17], [:seconds, -17647]]], ['+2 years -1 months +3 weeks -4 days +5 hours -6 minutes +7 seconds', '1 year 11 mons 17 days 04:54:07', @year + 11*@month + 3*86400*7 - 4*86400 + 5*3600 - 6*60 + 7, [[:years, 1], [:months, 11], [:days, 17], [:seconds, 17647]]], ].each do |instr, outstr, value, parts| @ds.insert(instr) @ds.count.must_equal 1 @ds.get(Sequel.cast(:i, String)).must_equal outstr rs = @ds.all rs.first[:i].is_a?(ActiveSupport::Duration).must_equal true rs.first[:i].must_equal ActiveSupport::Duration.new(value, parts) rs.first[:i].parts.sort_by{|k,v| k.to_s}.reject{|k,v| v == 0}.must_equal parts.sort_by{|k,v| k.to_s} @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs @ds.delete end end it 'insert and retrieve interval array values' do @db.create_table!(:items){column :i, 'interval[]'} @ds.insert(Sequel.pg_array(['1 year 2 months 3 weeks 4 days 5 hours 6 minutes 7 seconds'], 'interval')) @ds.count.must_equal 1 rs = @ds.all rs.first[:i].is_a?(Sequel::Postgres::PGArray).must_equal true rs.first[:i].first.is_a?(ActiveSupport::Duration).must_equal true rs.first[:i].first.must_equal ActiveSupport::Duration.new(@year + 2*@month + 3*86400*7 + 4*86400 + 5*3600 + 6*60 + 7, [[:years, 1], [:months, 2], [:days, 25], [:seconds, 18367]]) rs.first[:i].first.parts.sort_by{|k,v| k.to_s}.must_equal [[:years, 1], [:months, 2], [:days, 25], [:seconds, 18367]].sort_by{|k,v| k.to_s} @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs end it 'use intervals in bound variables' do @db.create_table!(:items){interval :i} @ds.insert('1 year 2 months 3 weeks 4 days 5 hours 6 minutes 7 seconds') d = @ds.get(:i) @ds.delete @ds.call(:insert, {:i=>d}, {:i=>:$i}) @ds.get(:i).must_equal d @ds.filter(:i=>:$i).call(:first, :i=>d).must_equal(:i=>d) @ds.filter(:i=>Sequel.cast(:$i, :interval)).call(:first, :i=>'0').must_be_nil @ds.filter(:i=>:$i).call(:delete, :i=>d).must_equal 1 @db.create_table!(:items){column :i, 'interval[]'} @ds.call(:insert, {:i=>[d]}, {:i=>:$i}) @ds.filter(:i=>:$i).call(:first, :i=>[d]).must_equal(:i=>[d]) @ds.filter(:i=>:$i).call(:first, :i=>[]).must_be_nil @ds.filter(:i=>:$i).call(:delete, :i=>[d]).must_equal 1 end if uses_pg_or_jdbc it 'parse default values for schema' do @db.create_table!(:items) do Integer :j interval :i, :default=>ActiveSupport::Duration.new(3*86400, :days=>3) end @db.schema(:items)[0][1][:ruby_default].must_be_nil @db.schema(:items)[1][1][:ruby_default].must_equal ActiveSupport::Duration.new(3*86400, :days=>3) end it 'correctly handles round tripping interval values' do @db.create_table!(:items) do interval :i interval :j end d = Sequel.cast(Date.new(2020, 2, 1), Date) {'30 days'=>Date.new(2020, 3, 2), '1 month'=>Date.new(2020, 3, 1)}.each do |interval, result| @ds.insert(:i=>interval) @ds.update(:j=>@ds.get(:i)) @ds.where(:i=>:j).count.must_equal 1 @ds.get{(d+:i).cast(Date).as(:v)}.must_equal result @ds.get{(d+:j).cast(Date).as(:v)}.must_equal result @ds.delete end end it 'with models' do @db.create_table!(:items) do primary_key :id interval :i end c = Class.new(Sequel::Model(@db[:items])) v = c.create(:i=>'1 year 2 mons 25 days 05:06:07').i v.is_a?(ActiveSupport::Duration).must_equal true v.must_equal ActiveSupport::Duration.new(@year + 2*@month + 3*86400*7 + 4*86400 + 5*3600 + 6*60 + 7, [[:years, 1], [:months, 2], [:days, 25], [:seconds, 18367]]) v.parts.sort_by{|k,_| k.to_s}.must_equal [[:years, 1], [:months, 2], [:days, 25], [:seconds, 18367]].sort_by{|k,_| k.to_s} end end if (begin require 'active_support'; require 'active_support/duration'; require 'active_support/inflector'; require 'active_support/core_ext/string/inflections'; true; rescue LoadError; false end) describe 'PostgreSQL row-valued/composite types' do before(:all) do @db = DB Sequel.extension :pg_array_ops, :pg_row_ops @ds = @db[:person] @db.drop_table?(:company, :person, :address) @db.create_table(:address) do String :street String :city String :zip end @db.create_table(:person) do Integer :id address :address end @db.create_table(:company) do Integer :id column :employees, 'person[]' end oids = @db.conversion_procs.keys @db.register_row_type(:address) @db.register_row_type(Sequel.qualify(:public, :person)) @db.register_row_type(Sequel[:public][:company]) @new_oids = @db.conversion_procs.keys - oids end after(:all) do @new_oids.each{|oid| @db.conversion_procs.delete(oid)} @db.row_types.clear @db.drop_table?(:company, :person, :address) end after do [:company, :person, :address].each{|t| @db[t].delete} end it 'insert and retrieve row types' do @ds.insert(:id=>1, :address=>Sequel.pg_row(['123 Sesame St', 'Somewhere', '12345'])) @ds.count.must_equal 1 # Single row valued type rs = @ds.all v = rs.first[:address] v.class.superclass.must_equal(Sequel::Postgres::PGRow::HashRow) v.to_hash.must_be_kind_of(Hash) v.to_hash.must_equal(:street=>'123 Sesame St', :city=>'Somewhere', :zip=>'12345') @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs # Nested row value type p = @ds.get(:person) p[:id].must_equal 1 p[:address].must_equal v end it 'insert and retrieve row types containing domains' do begin @db << "DROP DOMAIN IF EXISTS positive_integer CASCADE" @db << "CREATE DOMAIN positive_integer AS integer CHECK (VALUE > 0)" @db.create_table!(:domain_check) do positive_integer :id end @db.register_row_type(:domain_check) @db.get(@db.row_type(:domain_check, [1])).must_equal(:id=>1) @db.register_row_type(Sequel[:public][:domain_check]) @db.get(@db.row_type(Sequel[:public][:domain_check], [1])).must_equal(:id=>1) @db.get(@db.row_type(Sequel.qualify(:public, :domain_check), [1])).must_equal(:id=>1) ensure @db.drop_table(:domain_check) @db << "DROP DOMAIN positive_integer" end end it 'insert and retrieve arrays of row types' do @ds = @db[:company] @ds.insert(:id=>1, :employees=>Sequel.pg_array([@db.row_type(:person, [1, Sequel.pg_row(['123 Sesame St', 'Somewhere', '12345'])])])) @ds.count.must_equal 1 v = @ds.get(:company) v.class.superclass.must_equal(Sequel::Postgres::PGRow::HashRow) v.to_hash.must_be_kind_of(Hash) v[:id].must_equal 1 employees = v[:employees] employees.class.must_equal(Sequel::Postgres::PGArray) employees.to_a.must_be_kind_of(Array) employees.must_equal [{:id=>1, :address=>{:street=>'123 Sesame St', :city=>'Somewhere', :zip=>'12345'}}] @ds.delete @ds.insert(v[:id], v[:employees]) @ds.get(:company).must_equal v end it 'use row types in bound variables' do @ds.call(:insert, {:address=>Sequel.pg_row(['123 Sesame St', 'Somewhere', '12345'])}, {:address=>:$address, :id=>1}) @ds.get(:address).must_equal(:street=>'123 Sesame St', :city=>'Somewhere', :zip=>'12345') @ds.filter(:address=>Sequel.cast(:$address, :address)).call(:first, :address=>Sequel.pg_row(['123 Sesame St', 'Somewhere', '12345']))[:id].must_equal 1 @ds.filter(:address=>Sequel.cast(:$address, :address)).call(:first, :address=>Sequel.pg_row(['123 Sesame St', 'Somewhere', '12356'])).must_be_nil @ds.delete @ds.call(:insert, {:address=>Sequel.pg_row([nil, nil, nil])}, {:address=>:$address, :id=>1}) @ds.get(:address).must_equal(:street=>nil, :city=>nil, :zip=>nil) end if uses_pg_or_jdbc it 'use arrays of row types in bound variables' do @ds = @db[:company] @ds.call(:insert, {:employees=>Sequel.pg_array([@db.row_type(:person, [1, Sequel.pg_row(['123 Sesame St', 'Somewhere', '12345'])])])}, {:employees=>:$employees, :id=>1}) @ds.get(:company).must_equal(:id=>1, :employees=>[{:id=>1, :address=>{:street=>'123 Sesame St', :city=>'Somewhere', :zip=>'12345'}}]) @ds.filter(:employees=>Sequel.cast(:$employees, 'person[]')).call(:first, :employees=>Sequel.pg_array([@db.row_type(:person, [1, Sequel.pg_row(['123 Sesame St', 'Somewhere', '12345'])])]))[:id].must_equal 1 @ds.filter(:employees=>Sequel.cast(:$employees, 'person[]')).call(:first, :employees=>Sequel.pg_array([@db.row_type(:person, [1, Sequel.pg_row(['123 Sesame St', 'Somewhere', '12356'])])])).must_be_nil @ds.delete @ds.call(:insert, {:employees=>Sequel.pg_array([@db.row_type(:person, [1, Sequel.pg_row([nil, nil, nil])])])}, {:employees=>:$employees, :id=>1}) @ds.get(:employees).must_equal [{:address=>{:city=>nil, :zip=>nil, :street=>nil}, :id=>1}] end if uses_pg_or_jdbc it 'operations/functions with pg_row_ops' do @ds.insert(:id=>1, :address=>Sequel.pg_row(['123 Sesame St', 'Somewhere', '12345'])) @ds.get(Sequel.pg_row(:address)[:street]).must_equal '123 Sesame St' @ds.get(Sequel.pg_row(:address)[:city]).must_equal 'Somewhere' @ds.get(Sequel.pg_row(:address)[:zip]).must_equal '12345' @ds = @db[:company] @ds.insert(:id=>1, :employees=>Sequel.pg_array([@db.row_type(:person, [1, Sequel.pg_row(['123 Sesame St', 'Somewhere', '12345'])])])) @ds.get(Sequel.pg_row(:company)[:id]).must_equal 1 @ds.get(Sequel.pg_row(:company)[:employees]).must_equal [{:id=>1, :address=>{:street=>'123 Sesame St', :city=>'Somewhere', :zip=>'12345'}}] @ds.get(Sequel.pg_row(:company)[:employees][1]).must_equal(:id=>1, :address=>{:street=>'123 Sesame St', :city=>'Somewhere', :zip=>'12345'}) @ds.get(Sequel.pg_row(:company)[:employees][1][:address]).must_equal(:street=>'123 Sesame St', :city=>'Somewhere', :zip=>'12345') @ds.get(Sequel.pg_row(:company)[:employees][1][:id]).must_equal 1 @ds.get(Sequel.pg_row(:company)[:employees][1][:address][:street]).must_equal '123 Sesame St' @ds.get(Sequel.pg_row(:company)[:employees][1][:address][:city]).must_equal 'Somewhere' @ds.get(Sequel.pg_row(:company)[:employees][1][:address][:zip]).must_equal '12345' @db.get(@ds.get(Sequel.pg_row(:company)[:employees][1][:address]).op[:street]).must_equal '123 Sesame St' if DB.server_version >= 130000 @db.get(Sequel.pg_row([1,2,3]).op[:f1]).must_equal 1 end end describe "#splat and #*" do before(:all) do @db.create_table!(:a){Integer :a} @db.create_table!(:b){a :b; Integer :a} @db.register_row_type(:a) @db.register_row_type(:b) @db[:b].insert(:a=>1, :b=>@db.row_type(:a, [2])) end after(:all) do @db.drop_table?(:b, :a) end it "splat should reference the table type" do @db[:b].select(:a).first.must_equal(:a=>1) @db[:b].select(Sequel[:b][:a]).first.must_equal(:a=>1) @db[:b].select(Sequel.pg_row(:b)[:a]).first.must_equal(:a=>2) @db[:b].select(Sequel.pg_row(:b).splat[:a]).first.must_equal(:a=>1) @db[:b].select(:b).first.must_equal(:b=>{:a=>2}) @db[:b].select(Sequel.pg_row(:b).splat).first.must_equal(:a=>1, :b=>{:a=>2}) @db[:b].select(Sequel.pg_row(:b).splat(:b)).first.must_equal(:b=>{:a=>1, :b=>{:a=>2}}) end it "* should expand the table type into separate columns" do ds = @db[:b].select(Sequel.pg_row(:b).splat(:b)).from_self(:alias=>:t) ds.first.must_equal(:b=>{:a=>1, :b=>{:a=>2}}) ds.select(Sequel.pg_row(:b).*).first.must_equal(:a=>1, :b=>{:a=>2}) ds.select(Sequel.pg_row(:b)[:b]).first.must_equal(:b=>{:a=>2}) ds.select(Sequel.pg_row(Sequel[:t][:b]).*).first.must_equal(:a=>1, :b=>{:a=>2}) ds.select(Sequel.pg_row(Sequel[:t][:b])[:b]).first.must_equal(:b=>{:a=>2}) ds.select(Sequel.pg_row(:b)[:a]).first.must_equal(:a=>1) ds.select(Sequel.pg_row(Sequel[:t][:b])[:a]).first.must_equal(:a=>1) end end describe "with models" do before(:all) do class Address < Sequel::Model(:address) plugin :pg_row end class Person < Sequel::Model(:person) plugin :pg_row end class Company < Sequel::Model(:company) plugin :pg_row end @a = Address.new(:street=>'123 Sesame St', :city=>'Somewhere', :zip=>'12345') @es = Sequel.pg_array([Person.new(:id=>1, :address=>@a)]) end after(:all) do Object.send(:remove_const, :Address) rescue nil Object.send(:remove_const, :Person) rescue nil Object.send(:remove_const, :Company) rescue nil end it 'create model objects whose values are model instances using pg_row' do person = Person.create(:id=>1, :address=>@a) person.address.must_equal @a Person.count.must_equal 1 company = Company.create(:employees=>[person]) company.employees.must_equal [person] Company.count.must_equal 1 end it 'insert and retrieve row types as model objects' do @ds.insert(:id=>1, :address=>@a) @ds.count.must_equal 1 # Single row valued type rs = @ds.all v = rs.first[:address] v.must_be_kind_of(Address) v.must_equal @a @ds.delete @ds.insert(rs.first) @ds.all.must_equal rs # Nested row value type p = @ds.get(:person) p.must_be_kind_of(Person) p.id.must_equal 1 p.address.must_be_kind_of(Address) p.address.must_equal @a end it 'insert and retrieve arrays of row types as model objects' do @ds = @db[:company] @ds.insert(:id=>1, :employees=>@es) @ds.count.must_equal 1 v = @ds.get(:company) v.must_be_kind_of(Company) v.id.must_equal 1 employees = v[:employees] employees.class.must_equal(Sequel::Postgres::PGArray) employees.to_a.must_be_kind_of(Array) employees.must_equal @es @ds.delete @ds.insert(v.id, v.employees) @ds.get(:company).must_equal v end it 'use model objects in bound variables' do @ds.call(:insert, {:address=>@a}, {:address=>:$address, :id=>1}) @ds.get(:address).must_equal @a @ds.filter(:address=>Sequel.cast(:$address, :address)).call(:first, :address=>@a)[:id].must_equal 1 @ds.filter(:address=>Sequel.cast(:$address, :address)).call(:first, :address=>Address.new(:street=>'123 Sesame St', :city=>'Somewhere', :zip=>'12356')).must_be_nil end if uses_pg_or_jdbc it 'use arrays of model objects in bound variables' do @ds = @db[:company] @ds.call(:insert, {:employees=>@es}, {:employees=>:$employees, :id=>1}) @ds.get(:company).must_equal Company.new(:id=>1, :employees=>@es) @ds.filter(:employees=>Sequel.cast(:$employees, 'person[]')).call(:first, :employees=>@es)[:id].must_equal 1 @ds.filter(:employees=>Sequel.cast(:$employees, 'person[]')).call(:first, :employees=>Sequel.pg_array([@db.row_type(:person, [1, Sequel.pg_row(['123 Sesame St', 'Somewhere', '12356'])])])).must_be_nil end if uses_pg_or_jdbc it 'model typecasting' do a = Address.new(:street=>'123 Sesame St', :city=>'Somewhere', :zip=>'12345') o = Person.create(:id=>1, :address=>['123 Sesame St', 'Somewhere', '12345']) o.address.must_equal a o = Person.create(:id=>1, :address=>{:street=>'123 Sesame St', :city=>'Somewhere', :zip=>'12345'}) o.address.must_equal a o = Person.create(:id=>1, :address=>a) o.address.must_equal a e = Person.new(:id=>1, :address=>a) o = Company.create(:id=>1, :employees=>[{:id=>1, :address=>{:street=>'123 Sesame St', :city=>'Somewhere', :zip=>'12345'}}]) o.employees.must_equal [e] o = Company.create(:id=>1, :employees=>[e]) o.employees.must_equal [e] end end end describe 'pg_static_cache_updater extension' do before(:all) do @db = DB @db.extension :pg_static_cache_updater @db.drop_function(@db.default_static_cache_update_name, :cascade=>true, :if_exists=>true) @db.create_static_cache_update_function @db.create_table!(:things) do primary_key :id String :name end @Thing = Class.new(Sequel::Model(:things)) @Thing.plugin :static_cache @db.create_static_cache_update_trigger(:things) end after(:all) do @db.drop_table(:things) @db.drop_function(@db.default_static_cache_update_name) end it "should reload model static cache when underlying table changes" do @Thing.all.must_equal [] q = Queue.new q1 = Queue.new @db.listen_for_static_cache_updates(@Thing, :timeout=>0, :loop=>proc{q.push(nil); q1.pop.call}, :before_thread_exit=>proc{q.push(nil)}) q.pop q1.push(proc{@db[:things].insert(1, 'A')}) q.pop @Thing.all.must_equal [@Thing.load(:id=>1, :name=>'A')] q1.push(proc{@db[:things].update(:name=>'B')}) q.pop @Thing.all.must_equal [@Thing.load(:id=>1, :name=>'B')] q1.push(proc{@db[:things].delete}) q.pop @Thing.all.must_equal [] q1.push(proc{throw :stop}) q.pop end end if uses_pg && DB.server_version >= 90000 describe 'PostgreSQL enum types' do before do @db = DB @initial_enum_values = %w'a b c d' @db.create_enum(:test_enum, @initial_enum_values) @db.create_table!(:test_enumt) do test_enum :t end end after do @db.drop_table?(:test_enumt) @db.drop_enum(:test_enum) end it "should return correct entries in the schema" do s = @db.schema(:test_enumt) s.first.last[:type].must_equal :enum s.first.last[:enum_values].must_equal @initial_enum_values end it "should add array parsers for enum values" do @db.get(Sequel.pg_array(%w'a b', :test_enum)).must_equal %w'a b' end it "should set up model typecasting correctly" do c = Class.new(Sequel::Model(:test_enumt)) o = c.new o.t = :a o.t.must_equal 'a' end it "should add values to existing enum" do @db.add_enum_value(:test_enum, 'e') @db.add_enum_value(:test_enum, 'f', :after=>'a') @db.add_enum_value(:test_enum, 'g', :before=>'b') @db.add_enum_value(:test_enum, 'a', :if_not_exists=>true) if @db.server_version >= 90300 @db.schema(:test_enumt, :reload=>true).first.last[:enum_values].must_equal %w'a f g b c d e' end if DB.server_version >= 90100 it "should rename existing enum" do @db.rename_enum(:test_enum, :new_enum) @db.schema(:test_enumt, :reload=>true).first.last[:db_type].must_equal 'new_enum' @db.schema(:test_enumt, :reload=>true).first.last[:enum_values].must_equal @initial_enum_values @db.rename_enum(:new_enum, :test_enum) end it "should rename enum values" do @db.rename_enum_value(:test_enum, :b, :x) new_enum_values = @initial_enum_values new_enum_values[new_enum_values.index('b')] = 'x' @db.schema(:test_enumt, :reload=>true).first.last[:enum_values].must_equal new_enum_values @db.rename_enum_value(:test_enum, :x, :b) end if DB.server_version >= 100000 end describe "PostgreSQL stored procedures for datasets" do before do @db = DB @db.create_table!(:items) do primary_key :id integer :numb end @db.execute(<<-SQL) create or replace function insert_item(numb bigint) returns items.id%type as $$ declare l_id items.id%type; begin l_id := 1; insert into items(id, numb) values(l_id, numb); return l_id; end; $$ language plpgsql; SQL @ds = @db[:items] end after do @db.drop_function("insert_item", :if_exists=>true) @db.drop_table?(:items) end it "should correctly call stored procedure for inserting record" do result = @ds.call_sproc(:insert, :insert_item, 100) result.must_be_nil @ds.call(:all).must_equal [{:id=>1, :numb=>100}] end end if DB.adapter_scheme == :jdbc describe "pg_auto_constraint_validations plugin" do before(:all) do @db = DB @db.create_table!(:test1) do Integer :id, :primary_key=>true Integer :i, :unique=>true, :null=>false constraint :valid_i, Sequel[:i] < 10 constraint(:valid_i_id, Sequel[:i] + Sequel[:id] < 20) end @db.run "CREATE OR REPLACE FUNCTION valid_test1(t1 test1) RETURNS boolean AS 'SELECT t1.i != -100' LANGUAGE SQL;" @db.alter_table(:test1) do add_constraint(:valid_test1, Sequel.function(:valid_test1, :test1)) end @db.create_table!(:test2) do Integer :test2_id, :primary_key=>true foreign_key :test1_id, :test1 index [:test1_id], :unique=>true, :where=>(Sequel[:test1_id] < 10) end @c1 = Sequel::Model(:test1) @c2 = Sequel::Model(:test2) @c1.plugin :update_primary_key @c1.plugin :pg_auto_constraint_validations @c2.plugin :pg_auto_constraint_validations @c1.unrestrict_primary_key @c2.unrestrict_primary_key end before do @c2.dataset.delete @c1.dataset.delete @c1.insert(:id=>1, :i=>2) @c2.insert(:test2_id=>3, :test1_id=>1) end after(:all) do @db.run "ALTER TABLE test1 DROP CONSTRAINT IF EXISTS valid_test1" @db.run "DROP FUNCTION IF EXISTS valid_test1(test1)" @db.drop_table?(:test2, :test1) end it "should handle check constraint failures as validation errors when creating" do o = @c1.new(:id=>5, :i=>12) proc{o.save}.must_raise Sequel::ValidationFailed o.errors.must_equal(:i=>['is invalid']) end it "should handle check constraint failures where the columns are unknown, if columns are explicitly specified" do o = @c1.new(:id=>5, :i=>-100) proc{o.save}.must_raise Sequel::CheckConstraintViolation @c1.pg_auto_constraint_validation_override(:valid_test1, :i, "should not be -100") proc{o.save}.must_raise Sequel::ValidationFailed o.errors.must_equal(:i=>['should not be -100']) end it "should handle check constraint failures as validation errors when updating" do o = @c1.new(:id=>5, :i=>3) o.save proc{o.update(:i=>12)}.must_raise Sequel::ValidationFailed o.errors.must_equal(:i=>['is invalid']) end it "should handle unique constraint failures as validation errors when creating" do o = @c1.new(:id=>5, :i=>2) proc{o.save}.must_raise Sequel::ValidationFailed o.errors.must_equal(:i=>['is already taken']) end it "should handle unique constraint failures as validation errors when updating" do o = @c1.new(:id=>5, :i=>3) o.save proc{o.update(:i=>2)}.must_raise Sequel::ValidationFailed o.errors.must_equal(:i=>['is already taken']) end it "should handle unique constraint failures as validation errors for partial unique indexes" do @c1.create(:id=>2, :i=>3) @c2.create(:test2_id=>6, :test1_id=>2) o = @c2.new(:test2_id=>5, :test1_id=>2) proc{o.save}.must_raise Sequel::ValidationFailed o.errors.must_equal(:test1_id=>['is already taken']) end it "should handle not null constraint failures as validation errors when creating" do o = @c1.new(:id=>5) proc{o.save}.must_raise Sequel::ValidationFailed o.errors.must_equal(:i=>['is not present']) end it "should handle not null constraint failures as validation errors when updating" do o = @c1.new(:id=>5, :i=>3) o.save proc{o.update(:i=>nil)}.must_raise Sequel::ValidationFailed o.errors.must_equal(:i=>['is not present']) end it "should handle foreign key constraint failures as validation errors when creating" do o = @c2.new(:test2_id=>4, :test1_id=>2) proc{o.save}.must_raise Sequel::ValidationFailed o.errors.must_equal(:test1_id=>['is invalid']) end it "should handle foreign key constraint failures as validation errors when updating" do o = @c2.first proc{o.update(:test1_id=>2)}.must_raise Sequel::ValidationFailed o.errors.must_equal(:test1_id=>['is invalid']) end it "should handle foreign key constraint failures in other tables as validation errors when updating" do o = @c1[1] proc{o.update(:id=>2)}.must_raise Sequel::ValidationFailed o.errors.must_equal(:id=>['cannot be changed currently']) end it "should handle multi-column constraint failures as validation errors" do c = Class.new(@c1) o = c.new(:id=>18, :i=>8) proc{o.save}.must_raise Sequel::ValidationFailed [{[:i, :id]=>['is invalid']}, {[:id, :i]=>['is invalid']}].must_include o.errors end it "should handle multi-column constraint failures as validation errors when using the error_splitter plugin" do c = Class.new(@c1) c.plugin :error_splitter o = c.new(:id=>18, :i=>8) proc{o.save}.must_raise Sequel::ValidationFailed o.errors.must_equal(:i=>['is invalid'], :id=>['is invalid']) end it "should handle dumping cached metadata and loading metadata from cache" do cache_file = "spec/files/pgacv-#{$$}.cache" begin c = Class.new(Sequel::Model) c.plugin :pg_auto_constraint_validations, :cache_file=>cache_file c1 = Class.new(c) def c1.name; 'Foo' end c1.dataset = DB[:test1] c2 = Class.new(c) def c2.name; 'Bar' end c2.dataset = DB[:test2] c1.unrestrict_primary_key c2.unrestrict_primary_key o = c1.new(:id=>5, :i=>12) proc{o.save}.must_raise Sequel::ValidationFailed o.errors.must_equal(:i=>['is invalid']) o = c2.new(:test2_id=>4, :test1_id=>2) proc{o.save}.must_raise Sequel::ValidationFailed o.errors.must_equal(:test1_id=>['is invalid']) c.dump_pg_auto_constraint_validations_cache c = Class.new(Sequel::Model) c.plugin :pg_auto_constraint_validations, :cache_file=>cache_file c1 = Class.new(c) def c1.name; 'Foo' end c1.dataset = DB[:test1] c2 = Class.new(c) def c2.name; 'Bar' end c2.dataset = DB[:test2] c1.unrestrict_primary_key c2.unrestrict_primary_key o = c1.new(:id=>5, :i=>12) proc{o.save}.must_raise Sequel::ValidationFailed o.errors.must_equal(:i=>['is invalid']) o = c2.new(:test2_id=>4, :test1_id=>2) proc{o.save}.must_raise Sequel::ValidationFailed o.errors.must_equal(:test1_id=>['is invalid']) ensure File.delete(cache_file) if File.file?(cache_file) end end end if DB.respond_to?(:error_info) && DB.server_version >= 90300 describe "Common Table Expression SEARCH" do before(:all) do @db = DB @db.create_table!(:i1){Integer :id; Integer :parent_id} @ds = @db[:i1] @ds.insert(:id=>1) @ds.insert(:id=>2) @ds.insert(:id=>3, :parent_id=>1) @ds.insert(:id=>4, :parent_id=>1) @ds.insert(:id=>5, :parent_id=>3) @ds.insert(:id=>6, :parent_id=>5) end after(:all) do @db.drop_table?(:i1) end it "should support :search option for depth/breadth first ordering" do @db[:t].with_recursive(:t, @ds.filter(:parent_id=>nil), @ds.join(:t, :id=>:parent_id).select_all(:i1), :search=>{:by=>:id}). order(:ordercol, :id). select_map([:id, :ordercol]).must_equal [ [1, [["1"]]], [3, [["1"], ["3"]]], [5, [["1"], ["3"], ["5"]]], [6, [["1"], ["3"], ["5"], ["6"]]], [4, [["1"], ["4"]]], [2, [["2"]]] ] @db[:t].with_recursive(:t, @ds.filter(:parent_id=>nil), @ds.join(:t, :id=>:parent_id).select_all(:i1), :search=>{:type=>:breadth, :by=>[:id, :parent_id], :set=>:c}, :args=>[:id, :parent_id]). order(:c, :id). select_map([:id, :c]).must_equal [ [1, ["0", "1", nil]], [2, ["0", "2", nil]], [3, ["1", "3", "1"]], [4, ["1", "4", "1"]], [5, ["2", "5", "3"]], [6, ["3", "6", "5"]] ] end end if DB.server_version >= 140000 describe "Common Table Expression CYCLE" do before(:all) do @db = DB @db.create_table!(:i1){Integer :id; Integer :parent_id} @ds = @db[:i1] @ds.insert(:id=>1, :parent_id=>6) @ds.insert(:id=>2) @ds.insert(:id=>3, :parent_id=>1) @ds.insert(:id=>4, :parent_id=>1) @ds.insert(:id=>5, :parent_id=>3) @ds.insert(:id=>6, :parent_id=>5) end after(:all) do @db.drop_table?(:i1) end it "should support :cycle option for detecting cycles" do @db[:t].with_recursive(:t, @ds.filter(:id=>[1,2]), @ds.join(:t, :id=>:parent_id).select_all(:i1), :cycle=>{:columns=>:id}, :args=>[:id, :parent_id]). order(:id). exclude(:is_cycle). select_map([:id, :is_cycle, :path]).must_equal [ [1, false, [["1"]]], [2, false, [["2"]]], [3, false, [["1"], ["3"]]], [4, false, [["1"], ["4"]]], [5, false, [["1"], ["3"], ["5"]]], [6, false, [["1"], ["3"], ["5"], ["6"]]] ] @db[:t].with_recursive(:t, @ds.filter(:id=>[1,2]), @ds.join(:t, :id=>:parent_id).select_all(:i1), :cycle=>{:columns=>[:id, :parent_id], :path_column=>:pc, :cycle_column=>:cc, :cycle_value=>1, :noncycle_value=>0}). order(:id). where(:cc=>0). select_map([:id, :cc, :pc]).must_equal [ [1, 0, [["1", "6"]]], [2, 0, [["2", nil]]], [3, 0, [["1", "6"], ["3", "1"]]], [4, 0, [["1", "6"], ["4", "1"]]], [5, 0, [["1", "6"], ["3", "1"], ["5", "3"]]], [6, 0, [["1", "6"], ["3", "1"], ["5", "3"], ["6", "5"]]] ] end it "should support both :search and :cycle options together" do @db[:t].with_recursive(:t, @ds.filter(:id=>[1,2]), @ds.join(:t, :id=>:parent_id).select_all(:i1), :cycle=>{:columns=>:id}, :search=>{:by=>:id}, :args=>[:id, :parent_id]). order(:ordercol, :id). exclude(:is_cycle). select_map([:id, :is_cycle, :path, :ordercol]).must_equal [ [1, false, [["1"]], [["1"]]], [3, false, [["1"], ["3"]], [["1"], ["3"]]], [5, false, [["1"], ["3"], ["5"]], [["1"], ["3"], ["5"]]], [6, false, [["1"], ["3"], ["5"], ["6"]], [["1"], ["3"], ["5"], ["6"]]], [4, false, [["1"], ["4"]], [["1"], ["4"]]], [2, false, [["2"]], [["2"]]] ] @db[:t].with_recursive(:t, @ds.filter(:id=>[1,2]), @ds.join(:t, :id=>:parent_id).select_all(:i1), :cycle=>{:columns=>:id}, :search=>{:type=>:breadth, :by=>:id}, :args=>[:id, :parent_id]). order(:ordercol, :id). exclude(:is_cycle). select_map([:id, :is_cycle, :path, :ordercol]).must_equal [ [1, false, [["1"]], ["0", "1"]], [2, false, [["2"]], ["0", "2"]], [3, false, [["1"], ["3"]], ["1", "3"]], [4, false, [["1"], ["4"]], ["1", "4"]], [5, false, [["1"], ["3"], ["5"]], ["2", "5"]], [6, false, [["1"], ["3"], ["5"], ["6"]], ["3", "6"]] ] end end if DB.server_version >= 140000 describe "MERGE DO NOTHING" do before(:all) do @db = DB @db.create_table!(:m1){Integer :i1; Integer :a} @db.create_table!(:m2){Integer :i2; Integer :b} @m1 = @db[:m1] @m2 = @db[:m2] end after do @m1.delete @m2.delete end after(:all) do @db.drop_table?(:m1, :m2) end it "should allow inserts, updates, do nothings, and deletes based on conditions in a single MERGE statement" do ds = @m1. merge_using(:m2, :i1=>:i2). merge_do_nothing_when_not_matched{b > 50}. merge_insert(:i1=>Sequel[:i2], :a=>Sequel[:b]+11). merge_do_nothing_when_matched{a > 50}. merge_delete{a > 30}. merge_update(:i1=>Sequel[:i1]+:i2+10, :a=>Sequel[:a]+:b+20) @m2.insert(1, 2) @m1.all.must_equal [] # INSERT ds.merge @m1.all.must_equal [{:i1=>1, :a=>13}] # UPDATE ds.merge @m1.all.must_equal [{:i1=>12, :a=>35}] # DELETE MATCHING current row, INSERT NOT MATCHED new row @m2.insert(12, 3) ds.merge @m1.all.must_equal [{:i1=>1, :a=>13}] # MATCHED DO NOTHING @m2.where(:i2=>12).delete @m1.update(:a=>51) ds.merge @m1.all.must_equal [{:i1=>1, :a=>51}] # NOT MATCHED DO NOTHING @m1.delete @m2.update(:b=>51) ds.merge @m1.all.must_equal [] @m2.insert(1, 2) @m2.insert(11, 22) ds = @m1. merge_using(:m2, :i1=>:i2). merge_insert(:i2, :b){b <= 10}. merge_update(:a=>Sequel[:a]+1){false}. merge_delete{false} ds.merge @m1.all.must_equal [{:i1=>1, :a=>2}] ds.merge @m1.all.must_equal [{:i1=>1, :a=>2}] end it "should consider condition blocks that return nil as NULL" do @m2.insert(1, 2) @m1. merge_using(:m2, :i1=>:i2). merge_insert(Sequel[:i2], Sequel[:b]+11){nil}. merge @m1.all.must_equal [] end it "supports static SQL" do @m2.insert(1, 2) @m1.with_sql(<<SQL).merge MERGE INTO m1 USING m2 ON (i1 = i2) WHEN NOT MATCHED AND (b > 50) THEN DO NOTHING WHEN NOT MATCHED THEN INSERT (i1, a) VALUES (i2, (b + 11)) WHEN MATCHED AND (a > 50) THEN DO NOTHING WHEN MATCHED AND (a > 30) THEN DELETE WHEN MATCHED THEN UPDATE SET i1 = (i1 + i2 + 10), a = (a + b + 20) SQL @m1.all.must_equal [{:i1=>1, :a=>13}] end it "should support merge_do_nothing_* without blocks" do @m2.insert(1, 2) ds = @m1. merge_using(:m2, :i1=>:i2). merge_do_nothing_when_matched. merge_do_nothing_when_not_matched ds.merge @m1.all.must_equal [] @m1.insert(1, 3) ds.merge @m1.all.must_equal [{:i1=>1, :a=>3}] end end if DB.server_version >= 150000 ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/adapters/spec_helper.rb����������������������������������������������������������0000664�0000000�0000000�00000003523�14342141206�0020456�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require 'logger' if ENV['COVERAGE'] require_relative "../sequel_coverage" SimpleCov.sequel_coverage(:group=>%r{lib/sequel/adapters}) end $:.unshift(File.join(File.dirname(File.expand_path(__FILE__)), "../../lib/")) require_relative "../../lib/sequel" begin require_relative "../spec_config" rescue LoadError end Sequel.extension :fiber_concurrency if ENV['SEQUEL_FIBER_CONCURRENCY'] Sequel::DB = nil Sequel.split_symbols = true if ENV['SEQUEL_SPLIT_SYMBOLS'] Sequel::Database.extension :duplicate_column_handler if ENV['SEQUEL_DUPLICATE_COLUMN_HANDLER'] Sequel::Database.extension :columns_introspection if ENV['SEQUEL_COLUMNS_INTROSPECTION'] Sequel::Model.cache_associations = false if ENV['SEQUEL_NO_CACHE_ASSOCIATIONS'] Sequel::Model.plugin :prepared_statements if ENV['SEQUEL_MODEL_PREPARED_STATEMENTS'] Sequel::Model.plugin :throw_failures if ENV['SEQUEL_MODEL_THROW_FAILURES'] Sequel::Model.plugin :primary_key_lookup_check_values if ENV['SEQUEL_PRIMARY_KEY_LOOKUP_CHECK_VALUES'] Sequel::Model.cache_anonymous_models = false require_relative '../guards_helper' unless defined?(DB) env_var = "SEQUEL_#{SEQUEL_ADAPTER_TEST.to_s.upcase}_URL" env_var = ENV.has_key?(env_var) ? env_var : 'SEQUEL_INTEGRATION_URL' DB = Sequel.connect(ENV[env_var]) end require_relative "../visibility_checking" if ENV['CHECK_METHOD_VISIBILITY'] IDENTIFIER_MANGLING = !!ENV['SEQUEL_IDENTIFIER_MANGLING'] unless defined?(IDENTIFIER_MANGLING) DB.extension(:identifier_mangling) if IDENTIFIER_MANGLING require_relative '../async_spec_helper' DB.extension :pg_timestamptz if ENV['SEQUEL_PG_TIMESTAMPTZ'] DB.extension :integer64 if ENV['SEQUEL_INTEGER64'] DB.extension :index_caching if ENV['SEQUEL_INDEX_CACHING'] if dch = ENV['SEQUEL_DUPLICATE_COLUMNS_HANDLER'] DB.extension :duplicate_columns_handler DB.opts[:on_duplicate_columns] = dch.to_sym unless dch.empty? end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/adapters/sqlanywhere_spec.rb�����������������������������������������������������0000664�0000000�0000000�00000005570�14342141206�0021545�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������SEQUEL_ADAPTER_TEST = :sqlanywhere require_relative 'spec_helper' describe "convert_smallint_to_bool" do before do @db = DB @ds = @db[:booltest] end after do @db.convert_smallint_to_bool = true end describe "Database#convert_smallint_to_bool" do before do @db.create_table!(:booltest){column :b, 'smallint'; column :i, 'integer'} end after do @db.drop_table(:booltest) end it "should consider smallint datatypes as boolean if set, but not larger smallints" do @db.schema(:booltest, :reload=>true).first.last[:type].must_equal :boolean @db.schema(:booltest, :reload=>true).first.last[:db_type].must_match(/smallint/i) @db.convert_smallint_to_bool = false @db.schema(:booltest, :reload=>true).first.last[:type].must_equal :integer @db.schema(:booltest, :reload=>true).first.last[:db_type].must_match(/smallint/i) end it "should return smallints as bools and integers as integers when set" do @ds.delete @ds.insert(:b=>true, :i=>10) @ds.all.must_equal [{:b=>true, :i=>10}] @ds.delete @ds.insert(:b=>false, :i=>0) @ds.all.must_equal [{:b=>false, :i=>0}] @ds.delete @ds.insert(:b=>true, :i=>1) @ds.all.must_equal [{:b=>true, :i=>1}] end it "should return all smallints as integers when unset" do @db.convert_smallint_to_bool = false @ds.delete @ds.insert(:b=>true, :i=>10) @ds.all.must_equal [{:b=>1, :i=>10}] @ds.delete @ds.insert(:b=>false, :i=>0) @ds.all.must_equal [{:b=>0, :i=>0}] @ds.delete @ds.insert(:b=>1, :i=>10) @ds.all.must_equal [{:b=>1, :i=>10}] @ds.delete @ds.insert(:b=>0, :i=>0) @ds.all.must_equal [{:b=>0, :i=>0}] end end describe "Dataset#convert_smallint_to_bool" do before do @db.create_table!(:booltest){column :b, 'smallint'; column :i, 'integer'} end after do @db.drop_table(:booltest) end it "should return smallints as bools and integers as integers when set" do @ds.delete @ds.insert(:b=>true, :i=>10) @ds.all.must_equal [{:b=>true, :i=>10}] @ds.delete @ds.insert(:b=>false, :i=>0) @ds.all.must_equal [{:b=>false, :i=>0}] @ds.delete @ds.insert(:b=>true, :i=>1) @ds.all.must_equal [{:b=>true, :i=>1}] end it "should support with_convert_smallint_to_bool for returning modified dataset with setting changed" do @ds = @ds.with_convert_smallint_to_bool(false) @ds.delete @ds.insert(:b=>true, :i=>10) @ds.all.must_equal [{:b=>1, :i=>10}] @ds.delete @ds.insert(:b=>false, :i=>0) @ds.all.must_equal [{:b=>0, :i=>0}] @ds.delete @ds.insert(:b=>1, :i=>10) @ds.all.must_equal [{:b=>1, :i=>10}] @ds.delete @ds.insert(:b=>0, :i=>0) @ds.all.must_equal [{:b=>0, :i=>0}] end end end ����������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/adapters/sqlite_spec.rb����������������������������������������������������������0000664�0000000�0000000�00000100331�14342141206�0020473�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������SEQUEL_ADAPTER_TEST = :sqlite require_relative 'spec_helper' describe "An SQLite database" do before do @db = DB end after do @db.drop_table?(:fk) @db.use_timestamp_timezones = false Sequel.datetime_class = Time end it "should support casting to Date by using the date function" do @db.get(Sequel.cast('2012-10-20 11:12:13', Date)).must_equal '2012-10-20' end it "should support casting to Time or DateTime by using the datetime function" do @db.get(Sequel.cast('2012-10-20', Time)).must_equal '2012-10-20 00:00:00' @db.get(Sequel.cast('2012-10-20', DateTime)).must_equal '2012-10-20 00:00:00' end it "should provide the SQLite version as an integer" do @db.sqlite_version.must_be_kind_of(Integer) end it "should support dropping noncomposite unique constraint" do @db.create_table(:fk) do primary_key :id String :name, null: false, unique: true end # Find name of unique index, as SQLite does not use a given constraint name name_constraint = @db.indexes(:fk).find do |_, properties| properties[:unique] == true && properties[:columns] == [:name] end || [:missing] @db.alter_table(:fk) do drop_constraint(name_constraint.first, type: :unique) end @db[:fk].insert(:name=>'a') @db[:fk].insert(:name=>'a') end it "should keep composite unique constraint when changing a column default" do @db.create_table(:fk) do Bignum :id, null: false, unique: true Bignum :another_id, null: false String :name, size: 50, null: false String :test unique [:another_id, :name], :name=>:fk_uidx end @db.alter_table(:fk) do set_column_default :test, 'test' end @db[:fk].insert(:id=>1, :another_id=>2, :name=>'a') @db[:fk].insert(:id=>2, :another_id=>3, :name=>'a') @db[:fk].insert(:id=>3, :another_id=>2, :name=>'b') proc{@db[:fk].insert(:id=>4, :another_id=>2, :name=>'a')}.must_raise Sequel::ConstraintViolation end it "should keep composite primary key when changing a column default" do @db.create_table(:fk) do Bignum :id, null: false, unique: true Bignum :another_id, null: false String :name, size: 50, null: false String :test primary_key [:another_id, :name] end @db.alter_table(:fk) do set_column_default :test, 'test' end @db[:fk].insert(:id=>1, :another_id=>2, :name=>'a') @db[:fk].insert(:id=>2, :another_id=>3, :name=>'a') @db[:fk].insert(:id=>3, :another_id=>2, :name=>'b') proc{@db[:fk].insert(:id=>4, :another_id=>2, :name=>'a')}.must_raise Sequel::ConstraintViolation end it "should allow setting current_timestamp_utc to keep CURRENT_* in UTC" do begin v = @db.current_timestamp_utc @db.current_timestamp_utc = true Time.parse(@db.get(Sequel::CURRENT_TIMESTAMP)).strftime('%Y%m%d%H%M').must_equal Time.now.utc.strftime('%Y%m%d%H%M') Time.parse(@db.get(Sequel::CURRENT_DATE)).strftime('%Y%m%d').must_equal Time.now.utc.strftime('%Y%m%d') Time.parse(@db.get(Sequel::CURRENT_TIME)).strftime('%H%M').must_equal Time.now.utc.strftime('%H%M') ensure @db.current_timestamp_utc = v end end it "should support a use_timestamp_timezones setting" do @db.use_timestamp_timezones = true @db.create_table!(:fk){Time :time} @db[:fk].insert(Time.now) @db[:fk].get(Sequel.cast(:time, String)).must_match(/[-+]\d\d\d\d\z/) @db.use_timestamp_timezones = false @db[:fk].delete @db[:fk].insert(Time.now) @db[:fk].get(Sequel.cast(:time, String)).wont_match(/[-+]\d\d\d\d\z/) end it "should provide a list of existing tables" do @db.drop_table?(:fk) @db.tables.must_be_kind_of(Array) @db.tables.wont_include(:fk) @db.create_table!(:fk){String :name} @db.tables.must_include(:fk) end cspecify "should support timestamps and datetimes and respect datetime_class", [:jdbc] do @db.create_table!(:fk){timestamp :t; datetime :d} @db.use_timestamp_timezones = true t1 = Time.at(1) @db[:fk].insert(:t => t1, :d => t1) @db[:fk].map(:t).must_equal [t1] @db[:fk].map(:d).must_equal [t1] Sequel.datetime_class = DateTime t2 = Sequel.string_to_datetime(t1.iso8601) @db[:fk].map(:t).must_equal [t2] @db[:fk].map(:d).must_equal [t2] end it "should support sequential primary keys" do @db.create_table!(:fk) {primary_key :id; text :name} @db[:fk].insert(:name => 'abc') @db[:fk].insert(:name => 'def') @db[:fk].insert(:name => 'ghi') @db[:fk].order(:name).all.must_equal [ {:id => 1, :name => 'abc'}, {:id => 2, :name => 'def'}, {:id => 3, :name => 'ghi'} ] end it "should correctly parse the schema" do @db.create_table!(:fk) {timestamp :t} h = {:generated=>false, :type=>:datetime, :allow_null=>true, :default=>nil, :ruby_default=>nil, :db_type=>"timestamp", :primary_key=>false} h.delete(:generated) if @db.sqlite_version < 33100 @db.schema(:fk, :reload=>true).must_equal [[:t, h]] end it "should handle and return BigDecimal values for numeric columns" do DB.create_table!(:fk){numeric :d} d = DB[:fk] d.insert(:d=>BigDecimal('80.0')) d.insert(:d=>BigDecimal('NaN')) d.insert(:d=>BigDecimal('Infinity')) d.insert(:d=>BigDecimal('-Infinity')) ds = d.all ds.shift.must_equal(:d=>BigDecimal('80.0')) ds.map{|x| x[:d].to_s}.must_equal %w'NaN Infinity -Infinity' DB end it "should support creating and parsing generated columns" do @db.create_table!(:fk){Integer :a; Integer :b; Integer :c, :generated_always_as=>Sequel[:a] * 2 + :b + 1; Integer :d, :generated_always_as=>Sequel[:a] * 2 + :b + 2, :generated_type=>:stored; ; Integer :e, :generated_always_as=>Sequel[:a] * 2 + :b + 3, :generated_type=>:virtual} @db[:fk].insert(:a=>100, :b=>10) @db[:fk].select_order_map([:a, :b, :c, :d, :e]).must_equal [[100, 10, 211, 212, 213]] # Generated columns do not show up in schema on SQLite 3.37.0 (or maybe 3.38.0) expected = DB.sqlite_version >= 33700 ? [false, false] : [false, false, true, true, true] @db.schema(:fk).map{|_,v| v[:generated]}.must_equal expected end if DB.sqlite_version >= 33100 end describe "SQLite temporary views" do before do @db = DB @db.drop_view(:items) rescue nil @db.create_table!(:items){Integer :number} @db[:items].insert(10) @db[:items].insert(20) end after do @db.drop_table?(:items) end it "should be supported" do @db.create_view(:items_view, @db[:items].where(:number=>10), :temp=>true) @db[:items_view].map(:number).must_equal [10] @db.disconnect lambda{@db[:items_view].map(:number)}.must_raise(Sequel::DatabaseError) end end describe "SQLite VALUES support" do before do @db = DB end it "should create a dataset using the VALUES clause via #values" do @db.values([[1, 2], [3, 4]]).map([:column1, :column2]).must_equal [[1, 2], [3, 4]] end it "should support VALUES with unions" do @db.values([[1]]).union(@db.values([[3]])).map(&:values).map(&:first).must_equal [1, 3] end it "should support VALUES in CTEs" do @db[:a].cross_join(:b).with(:a, @db.values([[1, 2]]), :args=>[:c1, :c2]).with(:b, @db.values([[3, 4]]), :args=>[:c3, :c4]).map([:c1, :c2, :c3, :c4]).must_equal [[1, 2, 3, 4]] end end if DB.sqlite_version >= 30803 describe "SQLite type conversion" do before do @db = DB @integer_booleans = @db.integer_booleans @db.integer_booleans = true @ds = @db[:items] @db.drop_table?(:items) end after do @db.integer_booleans = @integer_booleans Sequel.datetime_class = Time @db.drop_table?(:items) end it "should handle integers in boolean columns" do @db.create_table(:items){TrueClass :a} @db[:items].insert(false) @db[:items].select_map(:a).must_equal [false] @db[:items].select_map(Sequel.expr(:a)+:a).must_equal [0] @db[:items].update(:a=>true) @db[:items].select_map(:a).must_equal [true] @db[:items].select_map(Sequel.expr(:a)+:a).must_equal [2] end it "should handle integers/floats/strings/decimals in numeric/decimal columns" do @db.create_table(:items){Numeric :a} @db[:items].insert(100) @db[:items].select_map(:a).must_equal [BigDecimal('100')] @db[:items].get(:a).must_be_kind_of(BigDecimal) @db[:items].update(:a=>100.1) @db[:items].select_map(:a).must_equal [BigDecimal('100.1')] @db[:items].get(:a).must_be_kind_of(BigDecimal) @db[:items].update(:a=>'100.1') @db[:items].select_map(:a).must_equal [BigDecimal('100.1')] @db[:items].get(:a).must_be_kind_of(BigDecimal) @db[:items].update(:a=>BigDecimal('100.1')) @db[:items].select_map(:a).must_equal [BigDecimal('100.1')] @db[:items].get(:a).must_be_kind_of(BigDecimal) end it "should handle integer/float date columns as julian date" do @db.create_table(:items){Date :a} i = 2455979 @db[:items].insert(i) @db[:items].first.must_equal(:a=>Date.jd(i)) @db[:items].update(:a=>2455979.1) @db[:items].first.must_equal(:a=>Date.jd(i)) end it "should handle integer/float time columns as seconds" do @db.create_table(:items){Time :a, :only_time=>true} @db[:items].insert(3661) @db[:items].first.must_equal(:a=>Sequel::SQLTime.create(1, 1, 1)) @db[:items].update(:a=>3661.000001) @db[:items].first.must_equal(:a=>Sequel::SQLTime.create(1, 1, 1, 1)) end it "should handle integer datetime columns as unix timestamp" do @db.create_table(:items){DateTime :a} i = 1329860756 @db[:items].insert(i) @db[:items].first.must_equal(:a=>Time.at(i)) Sequel.datetime_class = DateTime @db[:items].first.must_equal(:a=>DateTime.strptime(i.to_s, '%s')) end it "should handle float datetime columns as julian date" do @db.create_table(:items){DateTime :a} i = 2455979.5 @db[:items].insert(i) @db[:items].first.must_equal(:a=>Time.at(1329825600)) Sequel.datetime_class = DateTime @db[:items].first.must_equal(:a=>DateTime.jd(2455979.5)) end it "should handle integer/float blob columns" do @db.create_table(:items){File :a} @db[:items].insert(1) @db[:items].first.must_equal(:a=>Sequel::SQL::Blob.new('1')) @db[:items].update(:a=>'1.1') @db[:items].first.must_equal(:a=>Sequel::SQL::Blob.new(1.1.to_s)) end end if DB.adapter_scheme == :sqlite describe "An SQLite dataset" do before do @d = DB.dataset end it "should raise errors if given a regexp pattern match" do proc{@d.literal(Sequel.expr(:x).like(/a/))}.must_raise(Sequel::InvalidOperation) proc{@d.literal(~Sequel.expr(:x).like(/a/))}.must_raise(Sequel::InvalidOperation) proc{@d.literal(Sequel.expr(:x).like(/a/i))}.must_raise(Sequel::InvalidOperation) proc{@d.literal(~Sequel.expr(:x).like(/a/i))}.must_raise(Sequel::InvalidOperation) end end unless DB.adapter_scheme == :sqlite && DB.opts[:setup_regexp_function] describe "SQLite::Dataset#delete" do before do DB.create_table! :items do primary_key :id String :name Float :value end @d = DB[:items] @d.delete # remove all records @d.insert(:name => 'abc', :value => 1.23) @d.insert(:name => 'def', :value => 4.56) @d.insert(:name => 'ghi', :value => 7.89) end after do DB.drop_table?(:items) end it "should return the number of records affected when filtered" do @d.count.must_equal 3 @d.filter{value < 3}.delete.must_equal 1 @d.count.must_equal 2 @d.filter{value < 3}.delete.must_equal 0 @d.count.must_equal 2 end it "should return the number of records affected when unfiltered" do @d.count.must_equal 3 @d.delete.must_equal 3 @d.count.must_equal 0 @d.delete.must_equal 0 end end describe "SQLite::Dataset#update" do before do DB.create_table! :items do primary_key :id String :name Float :value end @d = DB[:items] @d.delete # remove all records @d.insert(:name => 'abc', :value => 1.23) @d.insert(:name => 'def', :value => 4.56) @d.insert(:name => 'ghi', :value => 7.89) end it "should return the number of records affected" do @d.filter(:name => 'abc').update(:value => 2).must_equal 1 @d.update(:value => 10).must_equal 3 @d.filter(:name => 'xxx').update(:value => 23).must_equal 0 end end describe "SQLite::Dataset#insert_conflict" do before(:all) do DB.create_table! :ic_test do primary_key :id String :name end end after(:each) do DB[:ic_test].delete end after(:all) do DB.drop_table?(:ic_test) end it "Dataset#insert_ignore and insert_constraint should ignore uniqueness violations" do DB[:ic_test].insert(:id => 1, :name => "one") proc {DB[:ic_test].insert(:id => 1, :name => "one")}.must_raise Sequel::ConstraintViolation DB[:ic_test].insert_ignore.insert(:id => 1, :name => "one") DB[:ic_test].all.must_equal([{:id => 1, :name => "one"}]) DB[:ic_test].insert_conflict(:ignore).insert(:id => 1, :name => "one") DB[:ic_test].all.must_equal([{:id => 1, :name => "one"}]) end it "Dataset#insert_constraint should handle replacement" do DB[:ic_test].insert(:id => 1, :name => "one") DB[:ic_test].insert_conflict(:replace).insert(:id => 1, :name => "two") DB[:ic_test].all.must_equal([{:id => 1, :name => "two"}]) end end describe "SQLite dataset" do before do DB.create_table! :test do primary_key :id String :name Float :value end DB.create_table! :items do primary_key :id String :name Float :value end @d = DB[:items] @d.insert(:name => 'abc', :value => 1.23) @d.insert(:name => 'def', :value => 4.56) @d.insert(:name => 'ghi', :value => 7.89) end after do DB.drop_table?(:test, :items) end it "should be able to insert from a subquery" do DB[:test].insert(@d) DB[:test].count.must_equal 3 DB[:test].select(:name, :value).order(:value).to_a.must_equal \ @d.select(:name, :value).order(:value).to_a end it "should support #explain" do DB[:test].explain.must_be_kind_of(String) end it "should have #explain work when identifier_output_method is modified" do DB[:test].with_identifier_output_method(:upcase).explain.must_be_kind_of(String) end if IDENTIFIER_MANGLING end describe "A SQLite database" do before do @db = DB @db.create_table! :test2 do text :name integer :value end end after do @db.drop_table?(:test, :test2, :test3, :test3_backup0, :test3_backup1, :test3_backup2) end it "should support add_column operations" do @db.add_column :test2, :xyz, :text @db[:test2].columns.must_equal [:name, :value, :xyz] @db[:test2].insert(:name => 'mmm', :value => 111, :xyz=>'000') @db[:test2].first.must_equal(:name => 'mmm', :value => 111, :xyz=>'000') end it "should support drop_column operations" do @db.drop_column :test2, :value @db[:test2].columns.must_equal [:name] @db[:test2].insert(:name => 'mmm') @db[:test2].first.must_equal(:name => 'mmm') end it "should support drop_column operations in a transaction" do @db.transaction{@db.drop_column :test2, :value} @db[:test2].columns.must_equal [:name] @db[:test2].insert(:name => 'mmm') @db[:test2].first.must_equal(:name => 'mmm') end it "should keep a composite primary key when dropping columns" do @db.create_table!(:test2){Integer :a; Integer :b; Integer :c; primary_key [:a, :b]} @db.drop_column :test2, :c @db[:test2].columns.must_equal [:a, :b] @db[:test2].insert(:a=>1, :b=>2) @db[:test2].insert(:a=>2, :b=>3) proc{@db[:test2].insert(:a=>2, :b=>3)}.must_raise(Sequel::UniqueConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError) end it "should keep column attributes when dropping a column" do @db.create_table! :test3 do primary_key :id text :name integer :value end # This lame set of additions and deletions are to test that the primary keys # don't get messed up when we recreate the database. @db[:test3].insert( :name => "foo", :value => 1) @db[:test3].insert( :name => "foo", :value => 2) @db[:test3].insert( :name => "foo", :value => 3) @db[:test3].filter(:id => 2).delete @db.drop_column :test3, :value @db['PRAGMA table_info(?)', :test3][:id][:pk].to_i.must_equal 1 @db[:test3].select(:id).all.must_equal [{:id => 1}, {:id => 3}] end it "should keep foreign keys when dropping a column" do @db.create_table! :test do primary_key :id String :name Integer :value end @db.create_table! :test3 do String :name Integer :value foreign_key :test_id, :test, :on_delete => :set_null, :on_update => :cascade end @db[:test3].insert(:name => "abc", :test_id => @db[:test].insert(:name => "foo", :value => 3)) @db[:test3].insert(:name => "def", :test_id => @db[:test].insert(:name => "bar", :value => 4)) @db.drop_column :test3, :value @db[:test].filter(:name => 'bar').delete @db[:test3][:name => 'def'][:test_id].must_be_nil @db[:test].filter(:name => 'foo').update(:id=>100) @db[:test3][:name => 'abc'][:test_id].must_equal 100 end it "should support rename_column operations" do @db[:test2].delete @db.add_column :test2, :xyz, :text @db[:test2].insert(:name => 'mmm', :value => 111, :xyz => 'qqqq') @db[:test2].columns.must_equal [:name, :value, :xyz] @db.rename_column :test2, :xyz, :zyx, :type => :text @db[:test2].columns.must_equal [:name, :value, :zyx] @db[:test2].first[:zyx].must_equal 'qqqq' @db[:test2].count.must_equal 1 end it "should preserve defaults when dropping or renaming columns" do @db.create_table! :test3 do String :s, :default=>'a' Integer :i end @db[:test3].insert @db[:test3].first[:s].must_equal 'a' @db[:test3].delete @db.drop_column :test3, :i @db[:test3].insert @db[:test3].first[:s].must_equal 'a' @db[:test3].delete @db.rename_column :test3, :s, :t @db[:test3].insert @db[:test3].first[:t].must_equal 'a' @db[:test3].delete end it "should preserve autoincrement after table modification" do @db.create_table!(:test2) do primary_key :id Integer :val, :null => false end @db.rename_column(:test2, :val, :value) t = @db[:test2] id1 = t.insert(:value=>1) t.delete id2 = t.insert(:value=>1) id2.must_be :>, id1 end it "should handle quoted tables when dropping or renaming columns" do table_name = "T T" @db.drop_table?(table_name) @db.create_table! table_name do Integer :"s s" Integer :"i i" end @db.from(table_name).insert(:"s s"=>1, :"i i"=>2) @db.from(table_name).all.must_equal [{:"s s"=>1, :"i i"=>2}] @db.drop_column table_name, :"i i" @db.from(table_name).all.must_equal [{:"s s"=>1}] @db.rename_column table_name, :"s s", :"t t" @db.from(table_name).all.must_equal [{:"t t"=>1}] @db.drop_table?(table_name) end it "should choose a temporary table name that isn't already used when dropping or renaming columns" do @db.tables.each{|t| @db.drop_table(t) if t.to_s =~ /test3/} @db.create_table :test3 do Integer :h Integer :i end @db.create_table :test3_backup0 do Integer :j end @db.create_table :test3_backup1 do Integer :k end @db[:test3].columns.must_equal [:h, :i] @db[:test3_backup0].columns.must_equal [:j] @db[:test3_backup1].columns.must_equal [:k] @db.drop_column(:test3, :i) @db[:test3].columns.must_equal [:h] @db[:test3_backup0].columns.must_equal [:j] @db[:test3_backup1].columns.must_equal [:k] @db.create_table :test3_backup2 do Integer :l end @db.rename_column(:test3, :h, :i) @db[:test3].columns.must_equal [:i] @db[:test3_backup0].columns.must_equal [:j] @db[:test3_backup1].columns.must_equal [:k] @db[:test3_backup2].columns.must_equal [:l] end it "should support add_index" do @db.add_index :test2, :value, :unique => true @db.add_index :test2, [:name, :value] end it "should support drop_index" do @db.add_index :test2, :value, :unique => true @db.drop_index :test2, :value end it "should keep applicable indexes when emulating schema methods" do @db.create_table!(:test3){Integer :a; Integer :b} @db.add_index :test3, :a @db.add_index :test3, :b @db.add_index :test3, [:b, :a] @db.rename_column :test3, :b, :c @db.indexes(:test3)[:test3_a_index].must_equal(:unique=>false, :columns=>[:a]) end it "should have support for various #transaction modes" do @db.transaction(:mode => :immediate){} @db.transaction(:mode => :exclusive){} @db.transaction(:mode => :deferred){} @db.transaction{} @db.transaction_mode.must_be_nil @db.transaction_mode = :immediate @db.transaction_mode.must_equal :immediate @db.transaction{} @db.transaction(:mode => :exclusive){} proc {@db.transaction_mode = :invalid}.must_raise(Sequel::Error) @db.transaction_mode.must_equal :immediate proc {@db.transaction(:mode => :invalid) {}}.must_raise(Sequel::Error) end it "should keep unique constraints when copying tables" do @db.alter_table(:test2){add_unique_constraint :name} @db.alter_table(:test2){drop_column :value} @db[:test2].insert(:name=>'a') proc{@db[:test2].insert(:name=>'a')}.must_raise(Sequel::ConstraintViolation, Sequel::UniqueConstraintViolation) end it "should not ignore adding new constraints when adding not null constraints" do @db.alter_table :test2 do set_column_not_null :value add_constraint(:value_range1, :value => 3..5) add_constraint(:value_range2, :value => 0..9) end @db[:test2].insert(:value => 4) proc{@db[:test2].insert(:value => 1)}.must_raise(Sequel::ConstraintViolation) proc{@db[:test2].insert(:value => nil)}.must_raise(Sequel::ConstraintViolation) @db[:test2].select_order_map(:value).must_equal [4] end it "should show unique constraints in Database#indexes" do @db.alter_table(:test2){add_unique_constraint :name} @db.indexes(:test2).values.first[:columns].must_equal [:name] end if DB.sqlite_version >= 30808 end describe "SQLite", 'INSERT ON CONFLICT' do before(:all) do @db = DB @db.create_table!(:ic_test){Integer :a; Integer :b; Integer :c; TrueClass :c_is_unique, :default=>false; unique :a, :name=>:ic_test_a_uidx; unique [:b, :c], :name=>:ic_test_b_c_uidx; index [:c], :where=>:c_is_unique, :unique=>true} @ds = @db[:ic_test] end before do @ds.delete end after(:all) do @db.drop_table?(:ic_test) end it "Dataset#insert_ignore and insert_conflict should ignore uniqueness violations" do @ds.insert(1, 2, 3, false) @ds.insert(10, 11, 3, true) proc{@ds.insert(1, 3, 4, false)}.must_raise Sequel::UniqueConstraintViolation proc{@ds.insert(11, 12, 3, true)}.must_raise Sequel::UniqueConstraintViolation @ds.insert_ignore.insert(1, 3, 4, false) @ds.insert_conflict.insert(1, 3, 4, false) @ds.insert_conflict.insert(11, 12, 3, true) @ds.insert_conflict(:target=>:a).insert(1, 3, 4, false) @ds.insert_conflict(:target=>:c, :conflict_where=>:c_is_unique).insert(11, 12, 3, true) @ds.all.must_equal [{:a=>1, :b=>2, :c=>3, :c_is_unique=>false}, {:a=>10, :b=>11, :c=>3, :c_is_unique=>true}] end unless DB.adapter_scheme == :amalgalite it "Dataset#insert_ignore and insert_conflict should work with multi_insert/import" do @ds.insert(1, 2, 3, false) @ds.insert_ignore.multi_insert([{:a=>1, :b=>3, :c=>4}]) @ds.insert_ignore.import([:a, :b, :c], [[1, 3, 4]]) @ds.all.must_equal [{:a=>1, :b=>2, :c=>3, :c_is_unique=>false}] @ds.insert_conflict(:target=>:a, :update=>{:b=>3}).import([:a, :b, :c], [[1, 3, 4]]) @ds.all.must_equal [{:a=>1, :b=>3, :c=>3, :c_is_unique=>false}] @ds.insert_conflict(:target=>:a, :update=>{:b=>4}).multi_insert([{:a=>1, :b=>5, :c=>6}]) @ds.all.must_equal [{:a=>1, :b=>4, :c=>3, :c_is_unique=>false}] end it "Dataset#insert_conflict should handle upserts" do @ds.insert(1, 2, 3, false) @ds.insert_conflict(:target=>:a, :update=>{:b=>3}).insert(1, 3, 4, false) @ds.all.must_equal [{:a=>1, :b=>3, :c=>3, :c_is_unique=>false}] @ds.insert_conflict(:target=>[:b, :c], :update=>{:c=>5}).insert(5, 3, 3, false) @ds.all.must_equal [{:a=>1, :b=>3, :c=>5, :c_is_unique=>false}] @ds.insert_conflict(:target=>:a, :update=>{:b=>4}).insert(1, 3, nil, false) @ds.all.must_equal [{:a=>1, :b=>4, :c=>5, :c_is_unique=>false}] @ds.insert_conflict(:target=>:a, :update=>{:b=>5}, :update_where=>{Sequel[:ic_test][:b]=>4}).insert(1, 3, 4, false) @ds.all.must_equal [{:a=>1, :b=>5, :c=>5, :c_is_unique=>false}] @ds.insert_conflict(:target=>:a, :update=>{:b=>6}, :update_where=>{Sequel[:ic_test][:b]=>4}).insert(1, 3, 4, false) @ds.all.must_equal [{:a=>1, :b=>5, :c=>5, :c_is_unique=>false}] end end if DB.sqlite_version >= 32400 describe 'SQLite STRICT tables' do before do @db = DB end after do @db.drop_table?(:strict_table) end it "supports creation via :strict option" do @db = DB @db.create_table(:strict_table, :strict=>true) do primary_key :id int :a integer :b real :c text :d blob :e any :f end ds = @db[:strict_table] ds.insert(:id=>1, :a=>2, :b=>3, :c=>1.2, :d=>'foo', :e=>Sequel.blob("\0\1\2\3"), :f=>'f') ds.all.must_equal [{:id=>1, :a=>2, :b=>3, :c=>1.2, :d=>'foo', :e=>Sequel.blob("\0\1\2\3"), :f=>'f'}] proc{ds.insert(:a=>'a')}.must_raise Sequel::ConstraintViolation proc{ds.insert(:b=>'a')}.must_raise Sequel::ConstraintViolation proc{ds.insert(:c=>'a')}.must_raise Sequel::ConstraintViolation proc{ds.insert(:d=>Sequel.blob("\0\1\2\3"))}.must_raise Sequel::ConstraintViolation proc{ds.insert(:e=>1)}.must_raise Sequel::ConstraintViolation end end if DB.sqlite_version >= 33700 describe 'SQLite Database' do it 'supports operations/functions with sqlite_json_ops' do Sequel.extension :sqlite_json_ops @db = DB jo = Sequel.sqlite_json_op('{"a": 1 ,"b": {"c": 2, "d": {"e": 3}}}') ja = Sequel.sqlite_json_op('[2, 3, ["a", "b"]]') @db.get(jo['a']).must_equal 1 @db.get(jo.get('b')['c']).must_equal 2 @db.get(jo['$.b.c']).must_equal 2 @db.get(jo['b'].get_json('$.d.e')).must_equal "3" @db.get(jo['$.b.d'].get_json('e')).must_equal "3" @db.get(ja[1]).must_equal 3 @db.get(ja['$[2][1]']).must_equal 'b' @db.get(ja.get_json(1)).must_equal '3' @db.get(ja.get_json('$[2][1]')).must_equal '"b"' @db.get(jo.extract('$.a')).must_equal 1 @db.get(jo.extract('$.a', '$.b.c')).must_equal '[1,2]' @db.get(jo.extract('$.a', '$.b.d.e')).must_equal '[1,3]' @db.get(ja.array_length).must_equal 3 @db.get(ja.array_length('$[2]')).must_equal 2 @db.get(jo.type).must_equal 'object' @db.get(ja.type).must_equal 'array' @db.get(jo.typeof).must_equal 'object' @db.get(ja.typeof).must_equal 'array' @db.get(jo.type('$.a')).must_equal 'integer' @db.get(ja.typeof('$[2][1]')).must_equal 'text' @db.from(jo.each).all.must_equal [ {:key=>"a", :value=>1, :type=>"integer", :atom=>1, :id=>2, :parent=>nil, :fullkey=>"$.a", :path=>"$"}, {:key=>"b", :value=>"{\"c\":2,\"d\":{\"e\":3}}", :type=>"object", :atom=>nil, :id=>4, :parent=>nil, :fullkey=>"$.b", :path=>"$"}] @db.from(jo.each('$.b')).all.must_equal [ {:key=>"c", :value=>2, :type=>"integer", :atom=>2, :id=>6, :parent=>nil, :fullkey=>"$.b.c", :path=>"$.b"}, {:key=>"d", :value=>"{\"e\":3}", :type=>"object", :atom=>nil, :id=>8, :parent=>nil, :fullkey=>"$.b.d", :path=>"$.b"}] @db.from(ja.each).all.must_equal [ {:key=>0, :value=>2, :type=>"integer", :atom=>2, :id=>1, :parent=>nil, :fullkey=>"$[0]", :path=>"$"}, {:key=>1, :value=>3, :type=>"integer", :atom=>3, :id=>2, :parent=>nil, :fullkey=>"$[1]", :path=>"$"}, {:key=>2, :value=>"[\"a\",\"b\"]", :type=>"array", :atom=>nil, :id=>3, :parent=>nil, :fullkey=>"$[2]", :path=>"$"}] @db.from(ja.each('$[2]')).all.must_equal [ {:key=>0, :value=>"a", :type=>"text", :atom=>"a", :id=>4, :parent=>nil, :fullkey=>"$[2][0]", :path=>"$[2]"}, {:key=>1, :value=>"b", :type=>"text", :atom=>"b", :id=>5, :parent=>nil, :fullkey=>"$[2][1]", :path=>"$[2]"}] @db.from(jo.tree).all.must_equal [ {:key=>nil, :value=>"{\"a\":1,\"b\":{\"c\":2,\"d\":{\"e\":3}}}", :type=>"object", :atom=>nil, :id=>0, :parent=>nil, :fullkey=>"$", :path=>"$"}, {:key=>"a", :value=>1, :type=>"integer", :atom=>1, :id=>2, :parent=>0, :fullkey=>"$.a", :path=>"$"}, {:key=>"b", :value=>"{\"c\":2,\"d\":{\"e\":3}}", :type=>"object", :atom=>nil, :id=>4, :parent=>0, :fullkey=>"$.b", :path=>"$"}, {:key=>"c", :value=>2, :type=>"integer", :atom=>2, :id=>6, :parent=>4, :fullkey=>"$.b.c", :path=>"$.b"}, {:key=>"d", :value=>"{\"e\":3}", :type=>"object", :atom=>nil, :id=>8, :parent=>4, :fullkey=>"$.b.d", :path=>"$.b"}, {:key=>"e", :value=>3, :type=>"integer", :atom=>3, :id=>10, :parent=>8, :fullkey=>"$.b.d.e", :path=>"$.b.d"}] @db.from(jo.tree('$.b')).all.must_equal [ {:key=>"b", :value=>"{\"c\":2,\"d\":{\"e\":3}}", :type=>"object", :atom=>nil, :id=>4, :parent=>nil, :fullkey=>"$.b", :path=>"$"}, {:key=>"c", :value=>2, :type=>"integer", :atom=>2, :id=>6, :parent=>4, :fullkey=>"$.b.c", :path=>"$.b"}, {:key=>"d", :value=>"{\"e\":3}", :type=>"object", :atom=>nil, :id=>8, :parent=>4, :fullkey=>"$.b.d", :path=>"$.b"}, {:key=>"e", :value=>3, :type=>"integer", :atom=>3, :id=>10, :parent=>8, :fullkey=>"$.b.d.e", :path=>"$.b.d"}] @db.from(ja.tree).all.must_equal [ {:key=>nil, :value=>"[2,3,[\"a\",\"b\"]]", :type=>"array", :atom=>nil, :id=>0, :parent=>nil, :fullkey=>"$", :path=>"$"}, {:key=>0, :value=>2, :type=>"integer", :atom=>2, :id=>1, :parent=>0, :fullkey=>"$[0]", :path=>"$"}, {:key=>1, :value=>3, :type=>"integer", :atom=>3, :id=>2, :parent=>0, :fullkey=>"$[1]", :path=>"$"}, {:key=>2, :value=>"[\"a\",\"b\"]", :type=>"array", :atom=>nil, :id=>3, :parent=>0, :fullkey=>"$[2]", :path=>"$"}, {:key=>0, :value=>"a", :type=>"text", :atom=>"a", :id=>4, :parent=>3, :fullkey=>"$[2][0]", :path=>"$[2]"}, {:key=>1, :value=>"b", :type=>"text", :atom=>"b", :id=>5, :parent=>3, :fullkey=>"$[2][1]", :path=>"$[2]"}] @db.from(ja.tree('$[2]')).all.must_equal [ {:key=>nil, :value=>"[\"a\",\"b\"]", :type=>"array", :atom=>nil, :id=>3, :parent=>nil, :fullkey=>"$[0]", :path=>"$"}, {:key=>0, :value=>"a", :type=>"text", :atom=>"a", :id=>4, :parent=>3, :fullkey=>"$[0][0]", :path=>"$[0]"}, {:key=>1, :value=>"b", :type=>"text", :atom=>"b", :id=>5, :parent=>3, :fullkey=>"$[0][1]", :path=>"$[0]"}] @db.get(jo.json).must_equal '{"a":1,"b":{"c":2,"d":{"e":3}}}' @db.get(ja.minify).must_equal '[2,3,["a","b"]]' @db.get(ja.insert('$[1]', 5)).must_equal '[2,3,["a","b"]]' @db.get(ja.replace('$[1]', 5)).must_equal '[2,5,["a","b"]]' @db.get(ja.set('$[1]', 5)).must_equal '[2,5,["a","b"]]' @db.get(ja.insert('$[3]', 5)).must_equal '[2,3,["a","b"],5]' @db.get(ja.replace('$[3]', 5)).must_equal '[2,3,["a","b"]]' @db.get(ja.set('$[3]', 5)).must_equal '[2,3,["a","b"],5]' @db.get(ja.insert('$[1]', 5, '$[3]', 6)).must_equal '[2,3,["a","b"],6]' @db.get(ja.replace('$[1]', 5, '$[3]', 6)).must_equal '[2,5,["a","b"]]' @db.get(ja.set('$[1]', 5, '$[3]', 6)).must_equal '[2,5,["a","b"],6]' @db.get(jo.insert('$.f', 4)).must_equal '{"a":1,"b":{"c":2,"d":{"e":3}},"f":4}' @db.get(jo.replace('$.f', 4)).must_equal '{"a":1,"b":{"c":2,"d":{"e":3}}}' @db.get(jo.set('$.f', 4)).must_equal '{"a":1,"b":{"c":2,"d":{"e":3}},"f":4}' @db.get(jo.insert('$.a', 4)).must_equal '{"a":1,"b":{"c":2,"d":{"e":3}}}' @db.get(jo.replace('$.a', 4)).must_equal '{"a":4,"b":{"c":2,"d":{"e":3}}}' @db.get(jo.set('$.a', 4)).must_equal '{"a":4,"b":{"c":2,"d":{"e":3}}}' @db.get(jo.insert('$.f', 4, '$.a', 5)).must_equal '{"a":1,"b":{"c":2,"d":{"e":3}},"f":4}' @db.get(jo.replace('$.f', 4, '$.a', 5)).must_equal '{"a":5,"b":{"c":2,"d":{"e":3}}}' @db.get(jo.set('$.f', 4, '$.a', 5)).must_equal '{"a":5,"b":{"c":2,"d":{"e":3}},"f":4}' @db.get(jo.patch('{"e": 4, "b": 5, "a": null}')).must_equal '{"b":5,"e":4}' @db.get(ja.remove('$[1]')).must_equal '[2,["a","b"]]' @db.get(ja.remove('$[1]', '$[1]')).must_equal '[2]' @db.get(jo.remove('$.a')).must_equal '{"b":{"c":2,"d":{"e":3}}}' @db.get(jo.remove('$.a', '$.b.c')).must_equal '{"b":{"d":{"e":3}}}' @db.get(jo.valid).must_equal 1 @db.get(ja.valid).must_equal 1 end end if DB.sqlite_version >= 33800 �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/async_spec_helper.rb�������������������������������������������������������������0000664�0000000�0000000�00000001321�14342141206�0020042�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������class Minitest::Spec if ENV['SEQUEL_ASYNC_THREAD_POOL'] || ENV['SEQUEL_ASYNC_THREAD_POOL_PREEMPT'] || ENV['SEQUEL_EAGER_ASYNC'] use_async = true if ENV['SEQUEL_ASYNC_THREAD_POOL_PREEMPT'] ::DB.opts[:preempt_async_thread] = true end ::DB.opts[:num_async_threads] = 12 ::DB.extension :async_thread_pool if ENV['SEQUEL_EAGER_ASYNC'] Sequel::Model.plugin :concurrent_eager_loading, :always=>true end end if use_async && DB.pool.pool_type == :threaded && (!DB.opts[:max_connections] || DB.opts[:max_connections] >= 4) def async? true end def wait yield.tap{} end else def async? false end def wait yield end end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/bin_shim�������������������������������������������������������������������������0000664�0000000�0000000�00000000047�14342141206�0015546�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������load(File.join(Dir.pwd, 'bin/sequel')) �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/bin_spec.rb����������������������������������������������������������������������0000664�0000000�0000000�00000031546�14342141206�0016152�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require 'rbconfig' require 'yaml' if ENV['COVERAGE'] require_relative "sequel_coverage" SimpleCov.sequel_coverage(:subprocesses=>true) end RUBY = File.join(RbConfig::CONFIG['bindir'], RbConfig::CONFIG['RUBY_INSTALL_NAME']) OUTPUT = "spec/bin-sequel-spec-output-#{$$}.log" TMP_FILE = "spec/bin-sequel-tmp-#{$$}.rb" BIN_SPEC_DB = "spec/bin-sequel-spec-db-#{$$}.sqlite3" BIN_SPEC_DB2 = "spec/bin-sequel-spec-db2-#{$$}.sqlite3" if defined?(RUBY_ENGINE) && RUBY_ENGINE == 'jruby' CONN_PREFIX = 'jdbc:sqlite:' CONN_HASH = {:adapter=>'jdbc', :uri=>"#{CONN_PREFIX}#{BIN_SPEC_DB}"} else CONN_PREFIX = 'sqlite://' CONN_HASH = {:adapter=>'sqlite', :database=>BIN_SPEC_DB} end require_relative '../lib/sequel' Sequel::DB = nil File.delete(BIN_SPEC_DB) if File.file?(BIN_SPEC_DB) File.delete(BIN_SPEC_DB2) if File.file?(BIN_SPEC_DB2) DB = Sequel.connect("#{CONN_PREFIX}#{BIN_SPEC_DB}", :test=>false) DB2 = Sequel.connect("#{CONN_PREFIX}#{BIN_SPEC_DB2}", :test=>false) ENV['MT_NO_PLUGINS'] = '1' # Work around stupid autoloading of plugins gem 'minitest' require 'minitest/global_expectations/autorun' describe "bin/sequel" do def bin(opts={}) cmd = "#{opts[:pre]}\"#{RUBY}\" -I lib spec/bin_shim #{opts[:args]} #{"#{CONN_PREFIX}#{BIN_SPEC_DB}" unless opts[:no_conn]} #{opts[:post]}> #{OUTPUT}#{" 2>&1" if opts[:stderr]}" system(cmd) File.read(OUTPUT) end int_type = DB.sqlite_version >= 33700 ? "INTEGER" : 'integer' after do DB.disconnect DB2.disconnect [BIN_SPEC_DB, BIN_SPEC_DB2, TMP_FILE, OUTPUT].each do |file| if File.file?(file) begin File.delete(file) rescue Errno::ENOENT nil end end end end it "-h should print the help" do help = bin(:args=>"-h", :no_conn=>true) help.must_match(/\ASequel: The Database Toolkit for Ruby/) help.must_match(/^Usage: sequel /) end it "-c should run code" do bin(:args=>'-c "print DB.tables.inspect"').must_equal '[]' DB.create_table(:a){Integer :a} bin(:args=>'-c "print DB.tables.inspect"').must_equal '[:a]' bin(:args=>'-v -c "print DB.tables.inspect"').strip.must_equal "sequel #{Sequel.version}\n[:a]" end it "-C should copy databases" do DB.create_table(:a) do primary_key :a String :name end DB.create_table(:b) do foreign_key :a, :a index :a end DB[:a].insert(1, 'foo') DB[:b].insert(1) bin(:args=>'-C', :post=>"#{CONN_PREFIX}#{BIN_SPEC_DB2}").must_match Regexp.new(<<END) Databases connections successful Migrations dumped successfully Tables created Begin copying data Begin copying records for table: a Finished copying 1 records for table: a Begin copying records for table: b Finished copying 1 records for table: b Finished copying data Begin creating indexes Finished creating indexes Begin adding foreign key constraints Finished adding foreign key constraints Database copy finished in \\d+\\.\\d+ seconds END DB2.tables.sort_by{|t| t.to_s}.must_equal [:a, :b] DB[:a].all.must_equal [{:a=>1, :name=>'foo'}] DB[:b].all.must_equal [{:a=>1}] DB2.schema(:a).map{|col, sch| [col, *sch.values_at(:allow_null, :default, :primary_key, :db_type, :type, :ruby_default)]}.must_equal [[:a, false, nil, true, int_type, :integer, nil], [:name, true, nil, false, "varchar(255)", :string, nil]] DB2.schema(:b).map{|col, sch| [col, *sch.values_at(:allow_null, :default, :primary_key, :db_type, :type, :ruby_default)]}.must_equal [[:a, true, nil, false, int_type, :integer, nil]] DB2.indexes(:a).must_equal({}) DB2.indexes(:b).must_equal(:b_a_index=>{:unique=>false, :columns=>[:a]}) DB2.foreign_key_list(:a).must_equal [] DB2.foreign_key_list(:b).must_equal [{:columns=>[:a], :table=>:a, :key=>nil, :on_update=>:no_action, :on_delete=>:no_action}] end it "-C should copy databases showing status while iterating over tables" do DB.create_table(:a) do primary_key :a String :name end DB.create_table(:b) do foreign_key :a, :a index :a end DB[:a].insert(1, 'foo') begin ENV['SEQUEL_BIN_STATUS_ALL_LINES'] = '1' bin(:args=>'-C', :post=>"#{CONN_PREFIX}#{BIN_SPEC_DB2}").must_match Regexp.new(<<END) Databases connections successful Migrations dumped successfully Tables created Begin copying data Begin copying records for table: a Status: 1 records copied Finished copying 1 records for table: a Begin copying records for table: b Finished copying 0 records for table: b Finished copying data Begin creating indexes Finished creating indexes Begin adding foreign key constraints Finished adding foreign key constraints Database copy finished in \\d+\\.\\d+ seconds END ensure ENV.delete('SEQUEL_BIN_STATUS_ALL_LINES') end DB2.tables.sort_by{|t| t.to_s}.must_equal [:a, :b] DB[:a].all.must_equal [{:a=>1, :name=>'foo'}] DB[:b].all.must_equal [] DB2.schema(:a).map{|col, sch| [col, *sch.values_at(:allow_null, :default, :primary_key, :db_type, :type, :ruby_default)]}.must_equal [[:a, false, nil, true, int_type, :integer, nil], [:name, true, nil, false, "varchar(255)", :string, nil]] DB2.schema(:b).map{|col, sch| [col, *sch.values_at(:allow_null, :default, :primary_key, :db_type, :type, :ruby_default)]}.must_equal [[:a, true, nil, false, int_type, :integer, nil]] DB2.indexes(:a).must_equal({}) DB2.indexes(:b).must_equal(:b_a_index=>{:unique=>false, :columns=>[:a]}) DB2.foreign_key_list(:a).must_equal [] DB2.foreign_key_list(:b).must_equal [{:columns=>[:a], :table=>:a, :key=>nil, :on_update=>:no_action, :on_delete=>:no_action}] end it "-C should display error if not given second database" do bin(:args=>'-C', :stderr=>true).must_include 'Error: Must specify database connection string or path to yaml file as second argument for database you want to copy to' end it "-C should convert integer to bigint when copying from SQLite to other databases" do DB.create_table(:a) do Integer :id end bin(:args=>'-EC', :post=>"mock://postgres").must_include 'CREATE TABLE "a" ("id" bigint)' end it "-d and -D should dump generic and specific migrations" do DB.create_table(:a) do primary_key :a String :name end DB.create_table(:b) do foreign_key :a, :a index :a end bin(:args=>'-d').must_equal <<END Sequel.migration do change do create_table(:a) do primary_key :a String :name, :size=>255 end create_table(:b, :ignore_index_errors=>true) do foreign_key :a, :a index [:a] end end end END bin(:args=>'-D').must_equal <<END Sequel.migration do change do create_table(:a) do primary_key :a column :name, "varchar(255)" end create_table(:b) do foreign_key :a, :a index [:a] end end end END end it "-E should echo SQL statements to stdout" do bin(:args=>'-E -c DB.tables').must_include "SELECT * FROM `sqlite_master` WHERE ((`name` != 'sqlite_sequence') AND (`type` = 'table'))" end it "-I should include directory in load path" do bin(:args=>'-Ifoo -c "p 1 if $:.include?(\'foo\')"').must_equal "1\n" end it "-l should log SQL statements to file" do bin(:args=>"-l #{TMP_FILE} -c DB.tables").must_equal '' File.read(TMP_FILE).must_include "SELECT * FROM `sqlite_master` WHERE ((`name` != 'sqlite_sequence') AND (`type` = 'table'))" end it "-L should load all *.rb files in given directory" do bin(:args=>'-r ./lib/sequel/extensions/migration -L ./spec/files/integer_migrations -c "p Sequel::Migration.descendants.length"').must_equal "3\n" end it "-m should migrate database up" do bin(:args=>"-m spec/files/integer_migrations").must_equal '' DB.tables.sort_by{|t| t.to_s}.must_equal [:schema_info, :sm1111, :sm2222, :sm3333] end it "-M should specify version to migrate to" do bin(:args=>"-m spec/files/integer_migrations -M 2").must_equal '' DB.tables.sort_by{|t| t.to_s}.must_equal [:schema_info, :sm1111, :sm2222] end it "-N should not test for a valid connection" do bin(:no_conn=>true, :args=>"-c '' -N #{CONN_PREFIX}spec/nonexistent/foo").must_equal '' bin(:no_conn=>true, :args=>"-c '' #{CONN_PREFIX}spec/nonexistent/foo", :stderr=>true).must_match(/\AError: Sequel::DatabaseConnectionError: /) end it "-r should require a given library" do bin(:args=>'-rsequel/extensions/sql_expr -c "print DB.literal(1.sql_expr)"').must_equal "1" end it "-S should dump the schema cache" do bin(:args=>"-S #{TMP_FILE}").must_equal '' Marshal.load(File.read(TMP_FILE)).must_equal({}) DB.create_table(:a){Integer :a} bin(:args=>"-S #{TMP_FILE}").must_equal '' h = Marshal.load(File.read(TMP_FILE)) h.keys.must_equal ['`a`'] column, schema = h.values.first.first column.must_equal :a schema[:type].must_equal :integer schema[:db_type].must_equal int_type schema.fetch(:ruby_default).must_be_nil schema.fetch(:default).must_be_nil schema[:allow_null].must_equal true schema[:primary_key].must_equal false end it "-X should dump the index cache" do bin(:args=>"-X #{TMP_FILE}").must_equal '' Marshal.load(File.read(TMP_FILE)).must_equal({}) DB.create_table(:a){Integer :id} DB.create_table(:b){Integer :b, index: {name: "idx_test", unique: true}} bin(:args=>"-X #{TMP_FILE}").must_equal '' Marshal.load(File.read(TMP_FILE)).must_equal("`a`"=>{}, "`b`"=>{:idx_test=>{:unique=>true, :columns=>[:b]}}) end it "-t should output full backtraces on error" do bin(:args=>'-c "lambda{lambda{lambda{raise \'foo\'}.call}.call}.call"', :stderr=>true).count("\n").must_be :<, 3 bin(:args=>'-t -c "lambda{lambda{lambda{raise \'foo\'}.call}.call}.call"', :stderr=>true).count("\n").must_be :>, 3 end it "-v should output the Sequel version and exit if database is not given" do bin(:args=>"-v", :no_conn=>true).strip.must_equal "sequel #{Sequel.version}" end it "should error if using -M without -m" do bin(:args=>'-M 2', :stderr=>true).must_equal "Error: Must specify -m if using -M\n" end it "should error if using mutually exclusive options together" do bin(:args=>'-c foo -d', :stderr=>true).must_equal "Error: Cannot specify -c and -d together\n" bin(:args=>'-D -d', :stderr=>true).must_equal "Error: Cannot specify -D and -d together\n" bin(:args=>'-m foo -d', :stderr=>true).must_equal "Error: Cannot specify -m and -d together\n" bin(:args=>'-S foo -d', :stderr=>true).must_equal "Error: Cannot specify -S and -d together\n" bin(:args=>'-S foo -C', :stderr=>true).must_equal "Error: Cannot specify -S and -C together\n" end it "should warn if providing too many arguments" do bin(:args=>'-c "" "" 1 2 3 4', :stderr=>true).must_equal "Warning: last 5 arguments ignored\n" end it "should use a mock database if no database is given" do bin(:args=>'-c "print DB.adapter_scheme"', :no_conn=>true).must_equal "mock" end it "should work with a yaml config file" do File.open(TMP_FILE, 'wb'){|f| f.write(YAML.dump(CONN_HASH))} bin(:args=>"-c \"print DB.tables.inspect\" #{TMP_FILE}", :no_conn=>true).must_equal "[]" DB.create_table(:a){Integer :a} bin(:args=>"-c \"print DB.tables.inspect\" #{TMP_FILE}", :no_conn=>true).must_equal "[:a]" end it "should work with a yaml config file with string keys" do h = {} CONN_HASH.each{|k,v| h[k.to_s] = v} File.open(TMP_FILE, 'wb'){|f| f.write(YAML.dump(h))} DB.create_table(:a){Integer :a} bin(:args=>"-c \"print DB.tables.inspect\" #{TMP_FILE}", :no_conn=>true).must_equal "[:a]" end it "should work with a yaml config file with environments" do File.open(TMP_FILE, 'wb'){|f| f.write(YAML.dump(:development=>CONN_HASH))} bin(:args=>"-c \"print DB.tables.inspect\" #{TMP_FILE}", :no_conn=>true).must_equal "[]" DB.create_table(:a){Integer :a} bin(:args=>"-c \"print DB.tables.inspect\" #{TMP_FILE}", :no_conn=>true).must_equal "[:a]" end it "-e should set environment for yaml config file" do File.open(TMP_FILE, 'wb'){|f| f.write(YAML.dump(:foo=>CONN_HASH))} bin(:args=>"-c \"print DB.tables.inspect\" -e foo #{TMP_FILE}", :no_conn=>true).must_equal "[]" DB.create_table(:a){Integer :a} bin(:args=>"-c \"print DB.tables.inspect\" -e foo #{TMP_FILE}", :no_conn=>true).must_equal "[:a]" File.open(TMP_FILE, 'wb'){|f| f.write(YAML.dump('foo'=>CONN_HASH))} bin(:args=>"-c \"print DB.tables.inspect\" -e foo #{TMP_FILE}", :no_conn=>true).must_equal "[:a]" end it "should run code in given filenames" do File.open(TMP_FILE, 'wb'){|f| f.write('print DB.tables.inspect')} bin(:post=>TMP_FILE).must_equal '[]' DB.create_table(:a){Integer :a} bin(:post=>TMP_FILE).must_equal '[:a]' bin(:post=>TMP_FILE, :args=>'-v').strip.must_equal "sequel #{Sequel.version}\n[:a]" end it "should run code provided on stdin" do bin(:pre=>'echo print DB.tables.inspect | ').must_equal '[]' DB.create_table(:a){Integer :a} bin(:pre=>'echo print DB.tables.inspect | ').must_equal '[:a]' end end ����������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/core/����������������������������������������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0014762�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/core/connection_pool_spec.rb�����������������������������������������������������0000664�0000000�0000000�00000130772�14342141206�0021523�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" require_relative '../../lib/sequel/connection_pool/sharded_threaded' connection_pool_defaults = {:pool_timeout=>5, :max_connections=>4} st_connection_pool_defaults = connection_pool_defaults.merge(:single_threaded=>true) mock_db = lambda do |a=nil, opts={}, &b| db = Sequel.mock(opts) db.define_singleton_method(:connect){|c| b.arity == 1 ? b.call(c) : b.call} if b if b2 = a db.define_singleton_method(:disconnect_connection){|c| b2.arity == 1 ? b2.call(c) : b2.call} end # Work around JRuby Issue #3854 db.singleton_class.send(:public, :connect, :disconnect_connection) db end describe "An empty ConnectionPool" do before do @cpool = Sequel::ConnectionPool.get_pool(mock_db.call, connection_pool_defaults) end it "should have no available connections" do @cpool.available_connections.must_equal [] end it "should have no allocated connections" do @cpool.allocated.must_equal({}) end it "should have a size of zero" do @cpool.size.must_equal 0 end it "should support specific pool class" do pool = Sequel::ConnectionPool.get_pool(mock_db.call, :pool_class=>Sequel::ShardedThreadedConnectionPool) pool.must_be_instance_of Sequel::ShardedThreadedConnectionPool end it "should raise Error for bad pool class" do proc{Sequel::ConnectionPool.get_pool(mock_db.call, :pool_class=>:foo)}.must_raise Sequel::Error end end describe "ConnectionPool options" do it "should support string option values" do cpool = Sequel::ConnectionPool.get_pool(mock_db.call, {:max_connections=>'5', :pool_timeout=>'3'}) cpool.max_size.must_equal 5 cpool.instance_variable_get(:@timeout).must_equal 3 end it "should raise an error unless size is positive" do lambda{Sequel::ConnectionPool.get_pool(mock_db.call{1}, :max_connections=>0)}.must_raise(Sequel::Error) lambda{Sequel::ConnectionPool.get_pool(mock_db.call{1}, :max_connections=>-10)}.must_raise(Sequel::Error) lambda{Sequel::ConnectionPool.get_pool(mock_db.call{1}, :max_connections=>'-10')}.must_raise(Sequel::Error) lambda{Sequel::ConnectionPool.get_pool(mock_db.call{1}, :max_connections=>'0')}.must_raise(Sequel::Error) end end describe "A connection pool handling connections" do before do @max_size = 2 msp = proc{@max_size=3} @cpool = Sequel::ConnectionPool.get_pool(mock_db.call(proc{|c| msp.call}){:got_connection}, connection_pool_defaults.merge(:max_connections=>@max_size)) end it "#hold should increment #size" do @cpool.hold do @cpool.size.must_equal 1 @cpool.hold {@cpool.hold {@cpool.size.must_equal 1}} Thread.new{@cpool.hold {_(@cpool.size).must_equal 2}}.join end end it "#hold should add the connection to the #allocated array" do @cpool.hold do @cpool.allocated.size.must_equal 1 Hash[@cpool.allocated.to_a].must_equal(Thread.current=>:got_connection) end end it "#hold should yield a new connection" do @cpool.hold {|conn| conn.must_equal :got_connection} end it "a connection should be de-allocated after it has been used in #hold" do @cpool.hold {} @cpool.allocated.size.must_equal 0 end it "#hold should return the value of its block" do @cpool.hold {:block_return}.must_equal :block_return end it "#make_new should not make more than max_size connections" do q = Queue.new 50.times{Thread.new{@cpool.hold{q.pop}}} 50.times{q.push nil} @cpool.size.must_be :<=, @max_size end it "database's disconnect connection method should be called when a disconnect is detected" do @max_size.must_equal 2 proc{@cpool.hold{raise Sequel::DatabaseDisconnectError}}.must_raise(Sequel::DatabaseDisconnectError) @max_size.must_equal 3 end it "#hold should remove the connection if a DatabaseDisconnectError is raised" do @cpool.size.must_equal 0 q, q1 = Queue.new, Queue.new @cpool.hold{Thread.new{@cpool.hold{q1.pop; q.push nil}; q1.pop; q.push nil}; q1.push nil; q.pop; q1.push nil; q.pop} @cpool.size.must_equal 2 proc{@cpool.hold{raise Sequel::DatabaseDisconnectError}}.must_raise(Sequel::DatabaseDisconnectError) @cpool.size.must_equal 1 proc{@cpool.hold{raise Sequel::DatabaseDisconnectError}}.must_raise(Sequel::DatabaseDisconnectError) @cpool.size.must_equal 0 proc{@cpool.hold{raise Sequel::DatabaseDisconnectError}}.must_raise(Sequel::DatabaseDisconnectError) @cpool.size.must_equal 0 end end describe "A connection pool handling connection errors" do it "#hold should raise a Sequel::DatabaseConnectionError if an exception is raised by the connection_proc" do cpool = Sequel::ConnectionPool.get_pool(mock_db.call{raise Interrupt}, connection_pool_defaults) proc{cpool.hold{:block_return}}.must_raise(Sequel::DatabaseConnectionError) cpool.size.must_equal 0 end it "#hold should raise a Sequel::DatabaseConnectionError if nil is returned by the connection_proc" do cpool = Sequel::ConnectionPool.get_pool(mock_db.call{nil}, connection_pool_defaults) proc{cpool.hold{:block_return}}.must_raise(Sequel::DatabaseConnectionError) cpool.size.must_equal 0 end end describe "ConnectionPool#hold" do before do value = 0 c = @c = Class.new do define_method(:initialize){value += 1} define_method(:value){value} end @pool = Sequel::ConnectionPool.get_pool(mock_db.call{c.new}, connection_pool_defaults) end it "shoulda use the database's connect method to get new connections" do res = nil @pool.hold {|c| res = c} res.must_be_kind_of(@c) res.value.must_equal 1 @pool.hold {|c| res = c} res.must_be_kind_of(@c) res.value.must_equal 1 # the connection maker is invoked only once end it "should be re-entrant by the same thread" do cc = nil @pool.hold {|c| @pool.hold {|c1| @pool.hold {|c2| cc = c2}}} cc.must_be_kind_of(@c) end it "should catch exceptions and reraise them" do proc {@pool.hold {|c| c.foobar}}.must_raise(NoMethodError) end end describe "A connection pool with a max size of 1" do before do @invoked_count = 0 icp = proc{@invoked_count += 1} @pool = Sequel::ConnectionPool.get_pool(mock_db.call{icp.call; 'herro'.dup}, connection_pool_defaults.merge(:max_connections=>1)) end it "should let only one thread access the connection at any time" do cc,c1, c2 = nil q, q1 = Queue.new, Queue.new t1 = Thread.new {@pool.hold {|c| cc = c; c1 = c.dup; q1.push nil; q.pop}} q1.pop cc.must_equal 'herro' c1.must_equal 'herro' t2 = Thread.new {@pool.hold {|c| c2 = c.dup; q1.push nil; q.pop;}} # connection held by t1 t1.must_be :alive? t2.must_be :alive? cc.must_equal 'herro' c1.must_equal 'herro' c2.must_be_nil @pool.available_connections.must_be :empty? Hash[@pool.allocated.to_a].must_equal(t1=>cc) cc.gsub!('rr', 'll') q.push nil q1.pop t1.join t2.must_be :alive? c2.must_equal 'hello' @pool.available_connections.must_be :empty? Hash[@pool.allocated.to_a].must_equal(t2=>cc) #connection released q.push nil t2.join @invoked_count.must_equal 1 @pool.size.must_equal 1 @pool.available_connections.must_equal [cc] @pool.allocated.must_be :empty? end it "should let the same thread reenter #hold" do c1, c2, c3 = nil @pool.hold do |c| c1 = c @pool.hold do |cc2| c2 = cc2 @pool.hold do |cc3| c3 = cc3 end end end c1.must_equal 'herro' c2.must_equal 'herro' c3.must_equal 'herro' @invoked_count.must_equal 1 @pool.size.must_equal 1 @pool.available_connections.size.must_equal 1 @pool.allocated.must_be :empty? end end concurrent_connection_pool_specs = Module.new do extend Minitest::Spec::DSL it "should raise error if max_connections is not positive" do proc{get_pool(:max_connections=>0)}.must_raise Sequel::Error end it "should not have all_connections yield connections allocated to other threads" do pool = get_pool(:max_connections=>2, :pool_timeout=>0) q, q1 = Queue.new, Queue.new t = Thread.new do pool.hold do |c1| q1.push nil q.pop end end pool.hold do |c1| q1.pop pool.all_connections{|c| c.must_equal c1} q.push nil end t.join end it "should work when acquire fails and then succeeds" do pool = get_pool(:max_connections=>2, :pool_timeout=>0) def pool._acquire(*) if @called super else @called = true nil end end c = nil pool.hold do |c1| c = c1 end c.wont_be_nil end it "should wait until a connection is available if all are checked out" do pool = get_pool(:max_connections=>1, :pool_timeout=>0.1) q, q1 = Queue.new, Queue.new t = Thread.new do pool.hold do |c| q1.push nil 3.times{Thread.pass} q.pop end end q1.pop proc{pool.hold{}}.must_raise(Sequel::PoolTimeout) q.push nil t.join end it "should not have all_connections yield all available connections" do pool = get_pool(:max_connections=>2, :pool_timeout=>0) q, q1 = Queue.new, Queue.new b = [] t = Thread.new do pool.hold do |c1| @m.synchronize{b << c1} q1.push nil q.pop end end pool.hold do |c1| q1.pop @m.synchronize{b << c1} q.push nil end t.join a = [] pool.all_connections{|c| a << c} a.sort.must_equal b.sort end it "should raise a PoolTimeout error if a connection couldn't be acquired before timeout" do q, q1 = Queue.new, Queue.new db = mock_db.call(&@icpp) db.opts[:name] = 'testing' pool = get_pool(:db=>db, :max_connections=>1, :pool_timeout=>0) t = Thread.new{pool.hold{|c| q1.push nil; q.pop}} q1.pop e = proc{pool.hold{|c|}}.must_raise(Sequel::PoolTimeout) e.message.must_include "name: testing" e.message.must_include "server: default" if pool.is_a?(Sequel::ShardedThreadedConnectionPool) q.push nil t.join end it "should not add a disconnected connection back to the pool if the disconnection_proc raises an error" do pool = get_pool(:max_connections=>1, :pool_timeout=>0, :mock_db_call_args=>[proc{|c| raise Sequel::Error}]) proc{pool.hold{raise Sequel::DatabaseDisconnectError}}.must_raise(Sequel::Error) pool.available_connections.length.must_equal 0 end it "should let five threads simultaneously access separate connections" do cc = {} threads = [] q, q1, q2 = Queue.new, Queue.new, Queue.new 5.times{|i| threads << Thread.new{@pool.hold{|c| q.pop; @m.synchronize{cc[i] = c}; q1.push nil; q2.pop}}; q.push nil; q1.pop} threads.each {|t| t.must_be :alive?} cc.size.must_equal 5 @invoked_count.must_equal 5 @pool.size.must_equal 5 @pool.available_connections.must_be :empty? h = {} i = 0 threads.each{|t| h[t] = (i+=1)} Hash[@pool.allocated.to_a].must_equal h @pool.available_connections.must_equal [] 5.times{q2.push nil} threads.each{|t| t.join} @pool.available_connections.size.must_equal 5 @pool.allocated.must_be :empty? end it "should allow simultaneous connections without preconnecting" do @pool.disconnect b = @icpp time = Time.now cc = {} threads = [] results = [] j = 0 q, q1, q2, q3, q4 = Queue.new, Queue.new, Queue.new, Queue.new, Queue.new m = @m @pool.db.singleton_class.send(:alias_method, :connect, :connect) @pool.db.define_singleton_method(:connect) do |server| q1.pop m.synchronize{q3.push(j += 1)} q4.pop b.call end 5.times{|i| threads << Thread.new{@pool.hold{|c| m.synchronize{i -= 1; cc[i] = c}; q2.pop; q.push nil}}} 5.times{|i| q1.push nil} 5.times{|i| results << q3.pop} 5.times{|i| q4.push nil} 5.times{|i| q2.push nil} 5.times{|i| q.pop} results.sort.must_equal (1..5).to_a threads.each(&:join) (Time.now - time).must_be :<, 0.75 threads.each{|t| t.wont_be :alive?} cc.size.must_equal 5 @invoked_count.must_equal 5 @pool.size.must_equal 5 @pool.available_connections.sort.must_equal (1..5).to_a end it "should block threads until a connection becomes available" do cc = {} threads = [] q, q1 = Queue.new, Queue.new 5.times{|i| threads << Thread.new{@pool.hold{|c| @m.synchronize{cc[i] = c}; q1.push nil; q.pop}}} 5.times{q1.pop} threads.each {|t| t.must_be :alive?} @pool.available_connections.must_be :empty? 3.times {|i| threads << Thread.new {@pool.hold {|c| @m.synchronize{cc[i + 5] = c}; q1.push nil}}} threads[5].must_be :alive? threads[6].must_be :alive? threads[7].must_be :alive? cc.size.must_equal 5 cc[5].must_be_nil cc[6].must_be_nil cc[7].must_be_nil 5.times{q.push nil} 5.times{|i| threads[i].join} 3.times{q1.pop} 3.times{|i| threads[i+5].join} threads.each {|t| t.wont_be :alive?} cc.values.uniq.length.must_equal 5 @pool.size.must_equal 5 @invoked_count.must_equal 5 @pool.available_connections.size.must_equal 5 @pool.allocated.must_be :empty? end it "should block threads until a connection becomes available, when assign connection returns nil" do # Shorten pool timeout, as making assign_connection return nil when there are # connections in the pool can make the pool later block until the timeout expires, # since then the pool will not be signalled correctly. # This spec is only added for coverage purposes, to ensure that fallback code is tested. @pool = get_pool(:pool_timeout=>0.25) cc = {} threads = [] q, q1 = Queue.new, Queue.new 5.times{|i| threads << Thread.new{@pool.hold{|c| @m.synchronize{cc[i] = c}; q1.push nil; q.pop}}} 5.times{q1.pop} threads.each {|t| t.must_be :alive?} @pool.available_connections.must_be :empty? def @pool.assign_connection(*) nil end 3.times {|i| threads << Thread.new {@pool.hold {|c| @m.synchronize{cc[i + 5] = c}; q1.push nil}}} threads[5].must_be :alive? threads[6].must_be :alive? threads[7].must_be :alive? cc.size.must_equal 5 cc[5].must_be_nil cc[6].must_be_nil cc[7].must_be_nil 5.times{q.push nil} 5.times{|i| threads[i].join} 3.times{q1.pop} 3.times{|i| threads[i+5].join} threads.each {|t| t.wont_be :alive?} cc.values.uniq.length.must_equal 5 @pool.size.must_equal 5 @invoked_count.must_equal 5 @pool.available_connections.size.must_equal 5 @pool.allocated.must_be :empty? end it "should block threads until a connection becomes available, and reconnect on disconnection" do cc = {} threads = [] exceptions = [] q, q1, q2, q3 = Queue.new, Queue.new, Queue.new, Queue.new b = @icpp @pool.db.singleton_class.send(:alias_method, :connect, :connect) @pool.db.define_singleton_method(:connect) do |server| b.call Object.new end 5.times{|i| threads << Thread.new{@pool.hold{|c| @m.synchronize{cc[i] = c}; q1.push nil; q.pop; raise Sequel::DatabaseDisconnectError} rescue q2.push($!)}} 5.times{q1.pop} threads.each {|t| t.must_be :alive?} @pool.available_connections.must_be :empty? 3.times {|i| threads << Thread.new {@pool.hold {|c| @m.synchronize{cc[i + 5] = c}; q1.push nil; q3.pop}}} threads[5].must_be :alive? threads[6].must_be :alive? threads[7].must_be :alive? cc.size.must_equal 5 cc[5].must_be_nil cc[6].must_be_nil cc[7].must_be_nil 5.times{q.push nil} 5.times{|i| threads[i].join} 5.times{exceptions << q2.pop} 3.times{q1.pop} 3.times{q3.push nil} 3.times{|i| threads[i+5].join} threads.each {|t| t.wont_be :alive?} exceptions.length.must_equal 5 cc.values.uniq.length.must_equal 8 size = @pool.size # Timed Queue pool can use up to 5 because it eagerly sets up additional connections, # while other threads are waiting on the queue. # This is not a bug as long as the number of connections it sets up is still # within the maximum number of connections in the pool. [3,4,5].must_include(size) @invoked_count.must_equal(size+5) @pool.available_connections.size.must_equal size @pool.allocated.must_be :empty? end it "should store connections in a queue" do c2 = nil c = @pool.hold{|cc| Thread.new{@pool.hold{|cc2| c2 = cc2}}.join; cc} @pool.size.must_equal 2 @pool.hold{|cc| cc.must_equal c2} @pool.hold{|cc| cc.must_equal c} @pool.hold do |cc| cc.must_equal c2 Thread.new{@pool.hold{|cc2| _(cc2).must_equal c}}.join end end it "should handle dead threads with checked out connections" do pool = get_pool(:max_connections=>1) skip = true # Leave allocated connection to emulate dead thread with checked out connection pool.define_singleton_method(:release){|*a| return if skip; super(*a)} Thread.new{pool.hold{Thread.current.kill}}.join skip = false pool.allocated.wont_be :empty? pool.available_connections.must_be :empty? pool.hold{|c1| c1} pool.allocated.must_be :empty? pool.available_connections.wont_be :empty? pool.disconnect pool.allocated.must_be :empty? pool.available_connections.must_be :empty? end end threaded_connection_pool_specs = Module.new do extend Minitest::Spec::DSL it "should store connections in a stack if :connection_handling=>:stack" do @pool = get_pool(:connection_handling=>:stack) c2 = nil c = @pool.hold{|cc| Thread.new{@pool.hold{|cc2| c2 = cc2}}.join; cc} @pool.size.must_equal 2 @pool.hold{|cc| cc.must_equal c} @pool.hold{|cc| cc.must_equal c} @pool.hold do |cc| cc.must_equal c Thread.new{@pool.hold{|cc2| _(cc2).must_equal c2}}.join end end it "should not store connections if :connection_handling=>:disconnect" do @pool = get_pool(:connection_handling=>:disconnect) d = [] m = @m @pool.db.define_singleton_method(:disconnect_connection){|c| m.synchronize{d << c}} @pool.hold do |cc| cc.must_equal 1 Thread.new{@pool.hold{|cc2| _(cc2).must_equal 2}}.join d.must_equal [2] @pool.hold{|cc3| cc3.must_equal 1} end @pool.size.must_equal 0 d.must_equal [2, 1] @pool.hold{|cc| cc.must_equal 3} @pool.size.must_equal 0 d.must_equal [2, 1, 3] @pool.hold{|cc| cc.must_equal 4} @pool.size.must_equal 0 d.must_equal [2, 1, 3, 4] end end describe "Connection Pool" do before do @m = Mutex.new @invoked_count = 0 @icpp = proc{@m.synchronize{@invoked_count += 1}} end define_method(:get_pool) do |opts={}| args = opts[:mock_db_call_args] || [] Sequel::ConnectionPool.get_pool(opts[:db] || mock_db.call(*args, &@icpp), @cp_opts.merge(opts)) end describe "Threaded Unsharded" do before do @cp_opts = connection_pool_defaults.merge(:max_connections=>5) @pool = get_pool end include concurrent_connection_pool_specs include threaded_connection_pool_specs it "should work correctly if acquire raises an exception" do @pool.hold{} def @pool.acquire(_) raise Sequel::DatabaseDisconnectError; end proc{@pool.hold{}}.must_raise(Sequel::DatabaseDisconnectError) end end describe "Threaded Sharded" do before do @cp_opts = connection_pool_defaults.merge(:max_connections=>5, :servers=>{}) @pool = get_pool end include concurrent_connection_pool_specs include threaded_connection_pool_specs end describe "Timed Queue" do def get_pool(opts={}) pool = super def pool.allocated; @allocated; end def pool.available_connections conns = [] while conn = @queue.pop(timeout: 0) conns << conn end conns.each{|conn| @queue.push(conn)} end pool end before do @cp_opts = connection_pool_defaults.merge(:max_connections=>5, :pool_class=>:timed_queue) @pool = get_pool end include concurrent_connection_pool_specs it "should handle preconnect(true) where a connection cannot be made due to maximum pool size being reached" do m = Mutex.new called = false @pool.define_singleton_method(:try_make_new){super() if m.synchronize{c = called; called = true; c}} i = 0 @pool.send(:preconnect, true) @pool.all_connections{|c1| i+=1} i.must_equal(@pool.max_size - 1) i = 0 @pool.send(:preconnect, true) @pool.all_connections{|c1| i+=1} i.must_equal @pool.max_size end it "should work correctly if acquire raises an exception" do @pool.hold{} def @pool.acquire(_) raise Sequel::DatabaseDisconnectError; end proc{@pool.hold{}}.must_raise(Sequel::DatabaseDisconnectError) end end if RUBY_VERSION >= '3.2' end describe "ConnectionPool#disconnect" do before do @count = 0 cp = proc{@count += 1} @pool = Sequel::ConnectionPool.get_pool(mock_db.call{{:id => cp.call}}, connection_pool_defaults.merge(:max_connections=>5, :servers=>{})) threads = [] q, q1 = Queue.new, Queue.new 5.times {|i| threads << Thread.new {@pool.hold {|c| q1.push nil; q.pop}}} 5.times{q1.pop} 5.times{q.push nil} threads.each {|t| t.join} end it "should invoke the given block for each available connection" do @pool.size.must_equal 5 @pool.available_connections.size.must_equal 5 @pool.available_connections.each {|c| c[:id].wont_equal nil} conns = [] @pool.db.define_singleton_method(:disconnect_connection){|c| conns << c} @pool.disconnect conns.size.must_equal 5 end it "should remove all available connections" do @pool.size.must_equal 5 @pool.disconnect @pool.size.must_equal 0 end it "should disconnect connections in use as soon as they are no longer in use" do @pool.size.must_equal 5 @pool.hold do |conn| @pool.available_connections.size.must_equal 4 @pool.available_connections.each {|c| c.wont_be_same_as(conn)} conns = [] @pool.db.define_singleton_method(:disconnect_connection){|c| conns << c} @pool.disconnect conns.size.must_equal 4 @pool.size.must_equal 1 end @pool.size.must_equal 0 end end describe "A connection pool with multiple servers" do before do ic = @invoked_counts = Hash.new(0) @pool = Sequel::ConnectionPool.get_pool(mock_db.call{|server| "#{server}#{ic[server] += 1}"}, connection_pool_defaults.merge(:servers=>{:read_only=>{}})) end it "should support preconnect method that immediately creates the maximum number of connections" do @pool.send(:preconnect) i = 0 @pool.all_connections{|c1| i+=1} i.must_equal(@pool.max_size * 2) end it "should support preconnect method that immediately creates the maximum number of connections concurrently" do @pool.send(:preconnect, true) i = 0 @pool.all_connections{|c1| i+=1} i.must_equal(@pool.max_size * 2) end it "#all_connections should return connections for all servers" do @pool.hold{} @pool.all_connections{|c1| c1.must_equal "default1"} a = [] @pool.hold(:read_only) do |c| @pool.all_connections{|c1| a << c1} end a.sort_by{|c| c.to_s}.must_equal ["default1", "read_only1"] end it "#servers should return symbols for all servers" do @pool.servers.sort_by{|s| s.to_s}.must_equal [:default, :read_only] end it "should use the :default server by default" do @pool.size.must_equal 0 @pool.hold do |c| c.must_equal "default1" Hash[@pool.allocated.to_a].must_equal(Thread.current=>"default1") end @pool.available_connections.must_equal ["default1"] @pool.size.must_equal 1 @invoked_counts.must_equal(:default=>1) end it "should use the :default server an invalid server is used" do @pool.hold do |c1| c1.must_equal "default1" @pool.hold(:blah) do |c2| c2.must_equal c1 @pool.hold(:blah2) do |c3| c2.must_equal c3 end end end end it "should support a :servers_hash option used for converting the server argument" do ic = @invoked_counts @pool = Sequel::ConnectionPool.get_pool(mock_db.call{|server| "#{server}#{ic[server] += 1}"}, connection_pool_defaults.merge(:servers_hash=>Hash.new(:read_only), :servers=>{:read_only=>{}})) @pool.hold(:blah) do |c1| c1.must_equal "read_only1" @pool.hold(:blah) do |c2| c2.must_equal c1 @pool.hold(:blah2) do |c3| c2.must_equal c3 end end end @pool = Sequel::ConnectionPool.get_pool(mock_db.call{|server| "#{server}#{ic[server] += 1}"}, connection_pool_defaults.merge(:servers_hash=>Hash.new{|h,k| raise Sequel::Error}, :servers=>{:read_only=>{}})) proc{@pool.hold(:blah){|c1|}}.must_raise(Sequel::Error) end it "should use the requested server if server is given" do @pool.size(:read_only).must_equal 0 @pool.hold(:read_only) do |c| c.must_equal "read_only1" Hash[@pool.allocated(:read_only).to_a].must_equal(Thread.current=>"read_only1") end @pool.available_connections(:read_only).must_equal ["read_only1"] @pool.size(:read_only).must_equal 1 @invoked_counts.must_equal(:read_only=>1) end it "#hold should only yield connections for the server requested" do @pool.hold(:read_only) do |c| c.must_equal "read_only1" Hash[@pool.allocated(:read_only).to_a].must_equal(Thread.current=>"read_only1") @pool.hold do |d| d.must_equal "default1" @pool.hold do |e| e.must_equal d @pool.hold(:read_only){|b| b.must_equal c} end Hash[@pool.allocated.to_a].must_equal(Thread.current=>"default1") end end @invoked_counts.must_equal(:read_only=>1, :default=>1) end it "#disconnect should disconnect from all servers" do @pool.hold(:read_only){} @pool.hold{} conns = [] @pool.size.must_equal 1 @pool.size(:read_only).must_equal 1 @pool.db.define_singleton_method(:disconnect_connection){|c| conns << c} @pool.disconnect conns.sort.must_equal %w'default1 read_only1' @pool.size.must_equal 0 @pool.size(:read_only).must_equal 0 @pool.hold(:read_only){|c| c.must_equal 'read_only2'} @pool.hold{|c| c.must_equal 'default2'} end it "#disconnect with :server should disconnect from specific servers" do @pool.hold(:read_only){} @pool.hold{} conns = [] @pool.size.must_equal 1 @pool.size(:read_only).must_equal 1 @pool.db.define_singleton_method(:disconnect_connection){|c| conns << c} @pool.disconnect(:server=>:default) conns.sort.must_equal %w'default1' @pool.size.must_equal 0 @pool.size(:read_only).must_equal 1 @pool.hold(:read_only){|c| c.must_equal 'read_only1'} @pool.hold{|c| c.must_equal 'default2'} end it "#disconnect with invalid :server should raise error" do proc{@pool.disconnect(:server=>:foo)}.must_raise Sequel::Error end it "#add_servers should add new servers to the pool" do pool = Sequel::ConnectionPool.get_pool(mock_db.call{|s| s}, :servers=>{:server1=>{}}) pool.hold{} pool.hold(:server2){} pool.hold(:server3){} pool.hold(:server1) do pool.allocated.length.must_equal 0 pool.allocated(:server1).length.must_equal 1 pool.allocated(:server2).must_be_nil pool.allocated(:server3).must_be_nil pool.available_connections.length.must_equal 1 pool.available_connections(:server1).length.must_equal 0 pool.available_connections(:server2).must_be_nil pool.available_connections(:server3).must_be_nil pool.add_servers([:server2, :server3]) pool.hold(:server2){} pool.hold(:server3) do pool.allocated.length.must_equal 0 pool.allocated(:server1).length.must_equal 1 pool.allocated(:server2).length.must_equal 0 pool.allocated(:server3).length.must_equal 1 pool.available_connections.length.must_equal 1 pool.available_connections(:server1).length.must_equal 0 pool.available_connections(:server2).length.must_equal 1 pool.available_connections(:server3).length.must_equal 0 end end end it "#add_servers should ignore existing keys" do pool = Sequel::ConnectionPool.get_pool(mock_db.call{|s| s}, :servers=>{:server1=>{}}) pool.allocated.length.must_equal 0 pool.allocated(:server1).length.must_equal 0 pool.available_connections.length.must_equal 0 pool.available_connections(:server1).length.must_equal 0 pool.hold do |c1| c1.must_equal :default pool.allocated.length.must_equal 1 pool.allocated(:server1).length.must_equal 0 pool.available_connections.length.must_equal 0 pool.available_connections(:server1).length.must_equal 0 pool.hold(:server1) do |c2| c2.must_equal :server1 pool.allocated.length.must_equal 1 pool.allocated(:server1).length.must_equal 1 pool.available_connections.length.must_equal 0 pool.available_connections(:server1).length.must_equal 0 pool.add_servers([:default, :server1]) pool.allocated.length.must_equal 1 pool.allocated(:server1).length.must_equal 1 pool.available_connections.length.must_equal 0 pool.available_connections(:server1).length.must_equal 0 end pool.allocated.length.must_equal 1 pool.allocated(:server1).length.must_equal 0 pool.available_connections.length.must_equal 0 pool.available_connections(:server1).length.must_equal 1 pool.add_servers([:default, :server1]) pool.allocated.length.must_equal 1 pool.allocated(:server1).length.must_equal 0 pool.available_connections.length.must_equal 0 pool.available_connections(:server1).length.must_equal 1 end pool.allocated.length.must_equal 0 pool.allocated(:server1).length.must_equal 0 pool.available_connections.length.must_equal 1 pool.available_connections(:server1).length.must_equal 1 pool.add_servers([:default, :server1]) pool.allocated.length.must_equal 0 pool.allocated(:server1).length.must_equal 0 pool.available_connections.length.must_equal 1 pool.available_connections(:server1).length.must_equal 1 end it "#remove_servers should disconnect available connections immediately" do pool = Sequel::ConnectionPool.get_pool(mock_db.call{|s| s}, :max_connections=>5, :servers=>{:server1=>{}}) threads = [] q, q1 = Queue.new, Queue.new 5.times {|i| threads << Thread.new {pool.hold(:server1){|c| q1.push nil; q.pop}}} 5.times{q1.pop} 5.times{q.push nil} threads.each {|t| t.join} pool.size(:server1).must_equal 5 pool.remove_servers([:server1]) pool.size(:server1).must_equal 0 end it "#remove_servers should disconnect connections in use as soon as they are returned to the pool" do dc = [] pool = Sequel::ConnectionPool.get_pool(mock_db.call(proc{|c| dc << c}){|c| c}, :servers=>{:server1=>{}}) c1 = nil pool.hold(:server1) do |c| pool.size(:server1).must_equal 1 dc.must_equal [] pool.remove_servers([:server1]) pool.size(:server1).must_equal 0 dc.must_equal [] c1 = c end pool.size(:server1).must_equal 0 dc.must_equal [c1] end it "#remove_servers should remove server related data structures immediately" do pool = Sequel::ConnectionPool.get_pool(mock_db.call{|s| s}, :servers=>{:server1=>{}}) pool.available_connections(:server1).must_equal [] pool.allocated(:server1).must_equal({}) pool.remove_servers([:server1]) pool.available_connections(:server1).must_be_nil pool.allocated(:server1).must_be_nil end it "#remove_servers should not allow the removal of the default server" do pool = Sequel::ConnectionPool.get_pool(mock_db.call{|s| s}, :servers=>{:server1=>{}}) pool.remove_servers([:server1]) proc{pool.remove_servers([:default])}.must_raise(Sequel::Error) end it "#remove_servers should ignore servers that have already been removed" do dc = [] pool = Sequel::ConnectionPool.get_pool(mock_db.call(proc{|c| dc << c}){|c| c}, :servers=>{:server1=>{}}) c1 = nil pool.hold(:server1) do |c| pool.size(:server1).must_equal 1 dc.must_equal [] pool.remove_servers([:server1]) pool.remove_servers([:server1]) pool.size(:server1).must_equal 0 dc.must_equal [] c1 = c end pool.size(:server1).must_equal 0 dc.must_equal [c1] end end describe "SingleConnectionPool" do before do @pool = Sequel::ConnectionPool.get_pool(mock_db.call{1234}, st_connection_pool_defaults) end it "should provide a #hold method" do conn = nil @pool.hold{|c| conn = c} conn.must_equal 1234 end it "should provide a #disconnect method" do conn = nil x = nil pool = Sequel::ConnectionPool.get_pool(mock_db.call(proc{|c| conn = c; c.must_be_kind_of(Integer)}){1234}, st_connection_pool_defaults) pool.hold{|c| x = c} x.must_equal 1234 pool.disconnect conn.must_equal 1234 pool.disconnect end it "should have #all_connections not yield if not connected" do called = false @pool.all_connections{called = true} called.must_equal false end end describe "A single threaded pool with multiple servers" do before do @max_size=2 msp = proc{@max_size += 1} @pool = Sequel::ConnectionPool.get_pool(mock_db.call(proc{|c| msp.call}){|c| c}, st_connection_pool_defaults.merge(:servers=>{:read_only=>{}})) end it "should support preconnect method that immediately creates the maximum number of connections" do @pool.send(:preconnect) i = 0 @pool.all_connections{|c1| i+=1} i.must_equal 2 end it "should support preconnect method that immediately creates the maximum number of connections, ignoring concurrent param" do @pool.send(:preconnect, true) i = 0 @pool.all_connections{|c1| i+=1} i.must_equal 2 end it "#all_connections should return connections for all servers" do @pool.hold{} @pool.all_connections{|c1| c1.must_equal :default} a = [] @pool.hold(:read_only) do @pool.all_connections{|c1| a << c1} end a.sort_by{|c| c.to_s}.must_equal [:default, :read_only] end it "#servers should return symbols for all servers" do @pool.servers.sort_by{|s| s.to_s}.must_equal [:default, :read_only] end it "#add_servers should add new servers to the pool" do @pool.hold(:blah){|c| c.must_equal :default} @pool.add_servers([:blah]) @pool.hold(:blah){|c| c.must_equal :blah} end it "#add_servers should ignore keys already existing" do @pool.hold{|c| c.must_equal :default} @pool.hold(:read_only){|c| c.must_equal :read_only} @pool.add_servers([:default, :read_only]) @pool.conn.must_equal :default @pool.conn(:read_only).must_equal :read_only end it "#remove_servers should remove servers from the pool" do @pool.hold(:read_only){|c| c.must_equal :read_only} @pool.remove_servers([:read_only]) @pool.hold(:read_only){|c| c.must_equal :default} end it "#remove_servers should not allow the removal of the default server" do proc{@pool.remove_servers([:default])}.must_raise(Sequel::Error) end it "#remove_servers should disconnect connection immediately" do @pool.hold(:read_only){|c| c.must_equal :read_only} @pool.conn(:read_only).must_equal :read_only @pool.remove_servers([:read_only]) @pool.conn(:read_only).must_be_nil @pool.hold{} @pool.conn(:read_only).must_equal :default end it "#remove_servers should ignore keys that do not exist" do @pool.remove_servers([:blah]) end it "should use the :default server by default" do @pool.hold{|c| c.must_equal :default} @pool.conn.must_equal :default end it "should use the :default server an invalid server is used" do @pool.hold do |c1| c1.must_equal :default @pool.hold(:blah) do |c2| c2.must_equal c1 @pool.hold(:blah2) do |c3| c2.must_equal c3 end end end end it "should use the requested server if server is given" do @pool.hold(:read_only){|c| c.must_equal :read_only} @pool.conn(:read_only).must_equal :read_only end it "#hold should only yield connections for the server requested" do @pool.hold(:read_only) do |c| c.must_equal :read_only @pool.hold do |d| d.must_equal :default @pool.hold do |e| e.must_equal d @pool.hold(:read_only){|b| b.must_equal c} end end end @pool.conn.must_equal :default @pool.conn(:read_only).must_equal :read_only end it "#disconnect should disconnect from all servers" do @pool.hold(:read_only){} @pool.hold{} @pool.conn.must_equal :default @pool.conn(:read_only).must_equal :read_only @pool.disconnect @max_size.must_equal 4 @pool.conn.must_be_nil @pool.conn(:read_only).must_be_nil end it "#disconnect with :server should disconnect from specific servers" do @pool.hold(:read_only){} @pool.hold{} @pool.conn.must_equal :default @pool.conn(:read_only).must_equal :read_only @pool.disconnect(:server=>:default) @max_size.must_equal 3 @pool.conn.must_be_nil @pool.conn(:read_only).must_equal :read_only end it "#disconnect with invalid :server should raise error" do proc{@pool.disconnect(:server=>:foo)}.must_raise Sequel::Error end it ":disconnection_proc option should set the disconnection proc to use" do @max_size.must_equal 2 proc{@pool.hold{raise Sequel::DatabaseDisconnectError}}.must_raise(Sequel::DatabaseDisconnectError) @max_size.must_equal 3 end it "#hold should remove the connection if a DatabaseDisconnectError is raised" do @pool.instance_variable_get(:@conns).length.must_equal 0 @pool.hold{} @pool.instance_variable_get(:@conns).length.must_equal 1 proc{@pool.hold{raise Sequel::DatabaseDisconnectError}}.must_raise(Sequel::DatabaseDisconnectError) @pool.instance_variable_get(:@conns).length.must_equal 0 end end all_pools = [] [true, false].each do |k| [true, false].each do |v| all_pools << {:single_threaded=>k, :servers=>(v ? {} : nil)} end end all_pools << {:pool_class=>:timed_queue} if RUBY_VERSION >= '3.2' all_pools.each do |opts| describe "Connection pool with #{opts.inspect}" do before(:all) do Sequel::ConnectionPool.send(:get_pool, mock_db.call, opts) end before do @class = Sequel::ConnectionPool.send(:connection_pool_class, opts) end it "should work correctly after being frozen" do o = Object.new db = mock_db.call{o} cp = @class.new(db, {}) db.instance_variable_set(:@pool, cp) db.freeze cp.frozen?.must_equal true db.synchronize{|c| c.must_be_same_as o} end it "should have pool correctly handle disconnect errors not raised as DatabaseDisconnectError" do db = mock_db.call{Object.new} def db.dec; @dec ||= Class.new(StandardError) end def db.database_error_classes; super + [dec] end def db.disconnect_error?(e, opts); e.message =~ /foo/ end cp = @class.new(db, {}) conn = nil cp.hold do |c| conn = c end proc do cp.hold do |c| c.must_equal conn raise db.dec, "bar" end end.must_raise db.dec proc do cp.hold do |c| c.must_equal conn raise StandardError end end.must_raise StandardError cp.hold do |c| c.must_equal conn end proc do cp.hold do |c| c.must_equal conn raise db.dec, "foo" end end.must_raise db.dec cp.hold do |c| c.wont_equal conn end end it "should have pool_type return a symbol" do @class.new(mock_db.call{123}, {}).pool_type.must_be_kind_of(Symbol) end it "should support :pool_class option given as a string" do type = @class.new(mock_db.call{123}, {}).pool_type pool = Sequel::ConnectionPool.send(:connection_pool_class, opts.merge(:pool_class=>type.to_s)).new(mock_db.call{123}, {}) pool.pool_type.must_equal type end it "should have all_connections yield current and available connections" do p = @class.new(mock_db.call{123}, {}) p.hold{|c| p.all_connections{|c1| c.must_equal c1}} end it "should have a size method that gives the current size of the pool" do p = @class.new(mock_db.call{123}, {}) p.size.must_equal 0 p.hold{} p.size.must_equal 1 end it "should have a max_size method that gives the maximum size of the pool" do @class.new(mock_db.call{123}, {}).max_size.must_be :>=, 1 end it "should support preconnect method that immediately creates the maximum number of connections" do p = @class.new(mock_db.call{Object.new}, {}) p.send(:preconnect) i = 0 p.all_connections{|c1| i+=1} i.must_equal p.max_size p.send(:preconnect) i.must_equal p.max_size end it "should support preconnect method that immediately creates the maximum number of connections concurrently" do p = @class.new(mock_db.call{Object.new}, {}) p.send(:preconnect, true) i = 0 p.all_connections{|c1| i+=1} i.must_equal p.max_size p.send(:preconnect, true) i.must_equal p.max_size end it "should be able to modify after_connect proc after the pool is created" do a = [] p = @class.new(mock_db.call{123}, {}) p.after_connect = pr = proc{|c| a << c} p.after_connect.must_equal pr a.must_equal [] p.hold{} a.must_equal [123] p.after_connect = proc{|c, s| a = [c, s]} p.disconnect p.hold{} a.must_equal [123, :default] end it "should be able to modify connect_sqls after the pool is created" do db = mock_db.call p = @class.new(db, {}) p.connect_sqls = ['SELECT 1'] p.connect_sqls.must_equal ['SELECT 1'] db.disconnect p.hold{} db.sqls.must_equal ['SELECT 1'] end it "should not raise an error when disconnecting twice" do c = @class.new(mock_db.call{123}, {}) c.disconnect c.disconnect end it "should yield a connection created by the initialize block to hold" do x = nil @class.new(mock_db.call{123}, {}).hold{|c| x = c} x.must_equal 123 end it "should have the initialize block accept a shard/server argument" do x = nil @class.new(mock_db.call{|c| [c, c]}, {}).hold{|c| x = c} x.must_equal [:default, :default] end it "should have respect an :after_connect proc that is called with each newly created connection" do x = nil db = mock_db.call(nil, :after_connect=>proc{|c| x = [c, c]}){123} @class.new(db, db.opts).hold{} x.must_equal [123, 123] x = nil db = mock_db.call(nil, :after_connect=>lambda{|c| x = [c, c]}){123} @class.new(db, db.opts).hold{} x.must_equal [123, 123] x = nil db = mock_db.call(nil, :after_connect=>proc{|c, s| x = [c, s]}){123} @class.new(db, db.opts).hold{} x.must_equal [123, :default] x = nil db = mock_db.call(nil, :after_connect=>lambda{|c, s| x = [c, s]}){123} @class.new(db, db.opts).hold{} x.must_equal [123, :default] end it "should raise a DatabaseConnectionError if the connection raises an exception" do proc{@class.new(mock_db.call{|c| raise Exception}, {}).hold{}}.must_raise(Sequel::DatabaseConnectionError) end it "should raise a DatabaseConnectionError if the initialize block returns nil" do proc{@class.new(mock_db.call{}, {}).hold{}}.must_raise(Sequel::DatabaseConnectionError) end it "should call the disconnection_proc option if the hold block raises a DatabaseDisconnectError" do x = nil proc{@class.new(mock_db.call(proc{|c| x = c}){123}).hold{raise Sequel::DatabaseDisconnectError}}.must_raise(Sequel::DatabaseDisconnectError) x.must_equal 123 end it "should have a disconnect method that disconnects the connection" do x = nil c = @class.new(mock_db.call(proc{|c1| x = c1}){123}) c.hold{} x.must_be_nil c.disconnect x.must_equal 123 end it "should have a reentrent hold method" do o = Object.new c = @class.new(mock_db.call{o}, {}) c.hold do |x| x.must_equal o c.hold do |x1| x1.must_equal o c.hold do |x2| x2.must_equal o end end end end it "should have a servers method that returns an array of shard/server symbols" do @class.new(mock_db.call{123}, {}).servers.must_equal [:default] end it "should have a servers method that returns an array of shard/server symbols" do c = @class.new(mock_db.call{123}, {}) c.size.must_equal 0 c.hold{} c.size.must_equal 1 end end end ������sequel-5.63.0/spec/core/database_spec.rb������������������������������������������������������������0000664�0000000�0000000�00000403676�14342141206�0020105�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "A new Database" do before do @db = Sequel::Database.new(1 => 2, :logger => 3) end it "should not allow dup/clone" do proc{@db.dup}.must_raise NoMethodError proc{@db.clone}.must_raise NoMethodError end it "should receive options" do @db.opts[1].must_equal 2 @db.opts[:logger].must_equal 3 end it "should set the logger from opts[:logger] and opts[:loggers]" do @db.loggers.must_equal [3] Sequel::Database.new(1 => 2, :loggers => 3).loggers.must_equal [3] Sequel::Database.new(1 => 2, :loggers => [3]).loggers.must_equal [3] Sequel::Database.new(1 => 2, :logger => 4, :loggers => 3).loggers.must_equal [4,3] Sequel::Database.new(1 => 2, :logger => [4], :loggers => [3]).loggers.must_equal [4,3] end it "should support :preconnect option to preconnect to database" do @db.pool.size.must_equal 0 c = Class.new(Sequel::Database) do def dataset_class_default; Sequel::Dataset end def connect(_) :connect end end db = c.new(1 => 2, :logger => 3, :preconnect=>true) db.pool.size.must_equal db.pool.max_size db = c.new(1 => 2, :logger => 3, :preconnect=>:concurrently) db.pool.size.must_equal db.pool.max_size end it "should handle the default string column size" do @db.default_string_column_size.must_equal 255 db = Sequel::Database.new(:default_string_column_size=>50) db.default_string_column_size.must_equal 50 db.default_string_column_size = 2 db.default_string_column_size.must_equal 2 end it "should handle checking string bytesize before typecasting" do @db.check_string_typecast_bytesize.must_equal true db = Sequel::Database.new(:check_string_typecast_bytesize=>'f') db.check_string_typecast_bytesize.must_equal false db.check_string_typecast_bytesize = true db.check_string_typecast_bytesize.must_equal true end it "should set the sql_log_level from opts[:sql_log_level]" do Sequel::Database.new(1 => 2, :sql_log_level=>:debug).sql_log_level.must_equal :debug Sequel::Database.new(1 => 2, :sql_log_level=>'debug').sql_log_level.must_equal :debug end it "should create a connection pool" do @db.pool.must_be_kind_of(Sequel::ConnectionPool) @db.pool.max_size.must_equal 4 Sequel::Database.new(:max_connections => 10).pool.max_size.must_equal 10 end it "should have the connection pool use the connect method to get connections" do cc = nil d = Sequel::Database.new d.define_singleton_method(:connect){|c| 1234} d.synchronize {|c| cc = c} cc.must_equal 1234 end it "should not add the instance to Sequel::DATABASES if testing the connection during initialization fails" do c = Class.new(Sequel::Database) do def connect(*) raise end end num_dbs = Sequel::DATABASES.size proc{c.new}.must_raise Sequel::DatabaseConnectionError Sequel::DATABASES.size.must_equal num_dbs db = c.new(:test=>false) Sequel::DATABASES.size.must_equal(num_dbs+1) Sequel::DATABASES[-1].must_equal db end it "should respect the :single_threaded option" do db = Sequel::Database.new(:single_threaded=>true){123} db.pool.must_be_kind_of(Sequel::SingleConnectionPool) db = Sequel::Database.new(:single_threaded=>'t'){123} db.pool.must_be_kind_of(Sequel::SingleConnectionPool) db = Sequel::Database.new(:single_threaded=>'1'){123} db.pool.must_be_kind_of(Sequel::SingleConnectionPool) db = Sequel::Database.new(:single_threaded=>false){123} db.pool.must_be_kind_of(Sequel::ConnectionPool) db = Sequel::Database.new(:single_threaded=>'f'){123} db.pool.must_be_kind_of(Sequel::ConnectionPool) db = Sequel::Database.new(:single_threaded=>'0'){123} db.pool.must_be_kind_of(Sequel::ConnectionPool) end it "should just use a :uri option for jdbc with the full connection string" do db = Sequel::Database.stub(:adapter_class, Class.new(Sequel::Database){def connect(*); Object.new end}) do Sequel.connect('jdbc:test://host/db_name') end db.must_be_kind_of(Sequel::Database) db.opts[:uri].must_equal 'jdbc:test://host/db_name' end it "should populate :adapter option when using connection string" do Sequel.connect('mock:/').opts[:adapter].must_equal "mock" end it "should respect the :keep_reference option for not keeping a reference in Sequel::DATABASES" do db = Sequel.connect('mock:///?keep_reference=f') Sequel::DATABASES.wont_include(db) end it 'should strip square brackets for ipv6 hosts' do Sequel.connect('mock://[::1]').opts[:host].must_equal "::1" end it 'should ignore options for with empty key' do Sequel.connect('mock:///?=foo').opts.has_key?(:"").must_equal false end it 'should translate username option into user' do Sequel.connect('mock:///?username=foo').opts[:user].must_equal 'foo' end it 'should setup a shared database if servers key is given' do db = Sequel.connect('mock:///?servers=t') db.opts[:servers].must_equal({}) db.sharded?.must_equal true end it "should not keep a reference if initialization fails" do length = Sequel::DATABASES.length klass = Class.new(Sequel::Database.adapter_class('mock')) do def initialize_load_extensions(_) raise ArgumentError end end proc{Sequel.connect(:adapter_class=>klass)}.must_raise ArgumentError Sequel::DATABASES.length.must_equal length proc{Sequel.connect(:adapter_class=>klass, :keep_reference=>false)}.must_raise ArgumentError Sequel::DATABASES.length.must_equal length proc{Sequel.connect(:adapter_class=>klass){}}.must_raise ArgumentError Sequel::DATABASES.length.must_equal length proc{Sequel.connect(:adapter_class=>klass, :keep_reference=>false){}}.must_raise ArgumentError Sequel::DATABASES.length.must_equal length end it ".adapter_class should return the class of the adapter" do klass = Sequel::Database.adapter_class('mock') klass.must_be_same_as(Sequel::Database.adapter_class(klass)) end it 'should log exceptions without messages correctly' do logger = [] def logger.error(x) self << x end db = Sequel::Database.new db.loggers << logger e = Sequel::Error.new def e.message; end db.log_exception(e, 'x') logger.must_equal ["Sequel::Error: : x"] end end describe "Database :after_connect option" do it "should be called for each new connection" do db = Sequel.mock(:after_connect=>proc{|c| c.execute('SELECT 1'); c.execute('SELECT 2');}) db.sqls.must_equal ['SELECT 1', 'SELECT 2'] db['SELECT 3'].get db.sqls.must_equal ['SELECT 3'] db.disconnect db['SELECT 3'].get db.sqls.must_equal ['SELECT 1', 'SELECT 2', 'SELECT 3'] end it "should pass shard as second argument if supported" do db = Sequel.mock(:servers=>{:a=>{}}, :after_connect=>proc{|c, s| c.execute("SELECT #{s}")}) db.sqls.must_equal ['SELECT default'] db['SELECT 3'].get db.sqls.must_equal ['SELECT 3'] db.disconnect db['SELECT 3'].server(:a).get db.sqls.must_equal ['SELECT a -- a', 'SELECT 3 -- a'] end it "should allow for server/shard specific :after_connect" do db = Sequel.mock( :after_connect=>proc{|c, s| c.execute("SELECT #{s}")}, :servers=>{ :a=>{:after_connect=>proc{|c, s| c.execute("SELECT A #{s}")}}, :b=>{:after_connect=>proc{|c, s| c.execute("SELECT B #{s}")}} }) db.sqls.must_equal ['SELECT default'] db['SELECT 3'].get db.sqls.must_equal ['SELECT 3'] db.disconnect db['SELECT 3'].server(:a).get db.sqls.must_equal ['SELECT A a -- a', 'SELECT 3 -- a'] db['SELECT 3'].server(:b).get db.sqls.must_equal ['SELECT B b -- b', 'SELECT 3 -- b'] end end describe "Database :connect_sqls option" do it "should issue the each sql query for each new connection" do db = Sequel.mock(:connect_sqls=>['SELECT 1', 'SELECT 2']) db.sqls.must_equal ['SELECT 1', 'SELECT 2'] db['SELECT 3'].get db.sqls.must_equal ['SELECT 3'] db.disconnect db['SELECT 3'].get db.sqls.must_equal ['SELECT 1', 'SELECT 2', 'SELECT 3'] end it "should allow for server/shard specific :connect_sqls" do db = Sequel.mock(:connect_sqls=>['SELECT 1'], :servers=>{:a=>{:connect_sqls=>['SELECT A']}, :b=>{:connect_sqls=>['SELECT B']}}) db.sqls.must_equal ['SELECT 1'] db['SELECT 3'].get db.sqls.must_equal ['SELECT 3'] db.disconnect db['SELECT 3'].server(:a).get db.sqls.must_equal ['SELECT A -- a', 'SELECT 3 -- a'] db['SELECT 3'].server(:b).get db.sqls.must_equal ['SELECT B -- b', 'SELECT 3 -- b'] end end describe "Database#freeze" do before do @db = Sequel.mock.freeze end it "should freeze internal structures" do @db.instance_exec do frozen?.must_equal true opts.frozen?.must_equal true pool.frozen?.must_equal true loggers.frozen?.must_equal true @dataset_class.frozen?.must_equal true @dataset_modules.frozen?.must_equal true @schema_type_classes.frozen?.must_equal true from(:a).frozen?.must_equal true metadata_dataset.frozen?.must_equal true end proc{@db.extend_datasets{}}.must_raise RuntimeError, TypeError end end describe "Database#disconnect" do it "should call pool.disconnect" do d = Sequel::Database.new p = d.pool def p.disconnect(h) raise unless h == {} 2 end d.disconnect.must_equal 2 end end describe "Sequel.extension" do it "should attempt to load the given extension" do proc{Sequel.extension :blah}.must_raise(LoadError) end end describe "Database#log_info" do before do @o = Object.new def @o.logs; @logs || []; end def @o.to_ary; [self]; end def @o.method_missing(*args); (@logs ||= []) << args; end @db = Sequel::Database.new(:logger=>@o) end it "should log message at info level to all loggers" do @db.log_info('blah') @o.logs.must_equal [[:info, 'blah']] end it "should log message with args at info level to all loggers" do @db.log_info('blah', [1, 2]) @o.logs.must_equal [[:info, 'blah; [1, 2]']] end end describe "Database#log_connection_yield" do before do @o = Object.new def @o.logs; @logs || []; end def @o.to_ary; [self]; end def @o.warn(*args); (@logs ||= []) << [:warn] + args; end def @o.method_missing(*args); (@logs ||= []) << args; end @conn = Object.new @db = Sequel::Database.new(:logger=>@o) end it "should log SQL to the loggers" do @db.log_connection_yield("some SQL", @conn){} @o.logs.length.must_equal 1 @o.logs.first.length.must_equal 2 @o.logs.first.first.must_equal :info @o.logs.first.last.must_match(/some SQL\z/) @o.logs.first.last.wont_match(/\(conn: -?\d+\) some SQL\z/) end it "should include connection information when logging" do @db.log_connection_info = true @db.log_connection_yield("some SQL", @conn){} @o.logs.length.must_equal 1 @o.logs.first.length.must_equal 2 @o.logs.first.first.must_equal :info @o.logs.first.last.must_match(/\(conn: -?\d+\) some SQL\z/) end it "should yield to the passed block" do a = nil @db.log_connection_yield('blah', @conn){a = 1} a.must_equal 1 end it "should raise an exception if a block is not passed" do proc{@db.log_connection_yield('blah', @conn)}.must_raise LocalJumpError end it "should log message with duration at info level to all loggers" do @db.log_connection_yield('blah', @conn){} @o.logs.length.must_equal 1 @o.logs.first.length.must_equal 2 @o.logs.first.first.must_equal :info @o.logs.first.last.must_match(/\A\(\d\.\d{6}s\) blah\z/) end it "should respect sql_log_level setting" do @db.sql_log_level = :debug @db.log_connection_yield('blah', @conn){} @o.logs.length.must_equal 1 @o.logs.first.length.must_equal 2 @o.logs.first.first.must_equal :debug @o.logs.first.last.must_match(/\A\(\d\.\d{6}s\) blah\z/) end it "should log message with duration at warn level if duration greater than log_warn_duration" do @db.log_warn_duration = 0 @db.log_connection_yield('blah', @conn){} @o.logs.length.must_equal 1 @o.logs.first.length.must_equal 2 @o.logs.first.first.must_equal :warn @o.logs.first.last.must_match(/\A\(\d\.\d{6}s\) blah\z/) end it "should log message with duration at info level if duration less than log_warn_duration" do @db.log_warn_duration = 1000 @db.log_connection_yield('blah', @conn){} @o.logs.length.must_equal 1 @o.logs.first.length.must_equal 2 @o.logs.first.first.must_equal :info @o.logs.first.last.must_match(/\A\(\d\.\d{6}s\) blah\z/) end it "should log message at error level if block raises an error" do @db.log_warn_duration = 0 proc{@db.log_connection_yield('blah', @conn){raise Sequel::Error, 'adsf'}}.must_raise Sequel::Error @o.logs.length.must_equal 1 @o.logs.first.length.must_equal 2 @o.logs.first.first.must_equal :error @o.logs.first.last.must_match(/\ASequel::Error: adsf: blah\z/) end it "should include args with message if args passed" do @db.log_connection_yield('blah', @conn, [1, 2]){} @o.logs.length.must_equal 1 @o.logs.first.length.must_equal 2 @o.logs.first.first.must_equal :info @o.logs.first.last.must_match(/\A\(\d\.\d{6}s\) blah; \[1, 2\]\z/) end it "should log without a logger defined by forcing skip_logging? to return false" do @db.logger = nil @db.extend(Module.new do def skip_logging? false end def log_duration(*) self.did_log = true end attr_accessor :did_log end) @db.log_connection_yield('some sql', @conn) {} @db.did_log.must_equal true end end describe "Database#uri" do before do @c = Class.new(Sequel::Database) do def dataset_class_default; Sequel::Dataset end def connect(*); Object.new end set_adapter_scheme :mau end @db = Sequel.connect('mau://user:pass@localhost:9876/maumau') end it "should return the connection URI for the database" do @db.uri.must_equal 'mau://user:pass@localhost:9876/maumau' end it "should return nil if a connection uri was not used" do Sequel.mock.uri.must_be_nil end it "should be aliased as #url" do @db.url.must_equal 'mau://user:pass@localhost:9876/maumau' end end describe "Database.adapter_scheme and #adapter_scheme" do it "should return the database scheme" do Sequel::Database.adapter_scheme.must_be_nil @c = Class.new(Sequel::Database) do def dataset_class_default; Sequel::Dataset end set_adapter_scheme :mau end @c.adapter_scheme.must_equal :mau @c.new({}).adapter_scheme.must_equal :mau end end describe "Database#dataset" do before do @db = Sequel.mock @ds = @db.dataset end it "should provide a blank dataset through #dataset" do @ds.must_be_kind_of(Sequel::Dataset) @ds.opts.must_equal({}) @ds.db.must_be_same_as(@db) end it "should provide a #from dataset" do d = @db.from(:mau) d.must_be_kind_of(Sequel::Dataset) d.sql.must_equal 'SELECT * FROM mau' e = @db[:miu] e.must_be_kind_of(Sequel::Dataset) e.sql.must_equal 'SELECT * FROM miu' end it "should provide a #from dataset that supports virtual row blocks" do @db.from{a(b)}.sql.must_equal 'SELECT * FROM a(b)' end it "should provide a #select dataset" do d = @db.select(:a, :b, :c).from(:mau) d.must_be_kind_of(Sequel::Dataset) d.sql.must_equal 'SELECT a, b, c FROM mau' end it "should allow #select to take a block" do d = @db.select(:a, :b){c}.from(:mau) d.must_be_kind_of(Sequel::Dataset) d.sql.must_equal 'SELECT a, b, c FROM mau' end end describe "Database#dataset_class" do before do @db = Sequel::Database.new @dsc = Class.new(Sequel::Dataset) end it "should have setter set the class to use to create datasets" do @db.dataset_class = @dsc ds = @db.dataset ds.must_be_kind_of(@dsc) ds.opts.must_equal({}) ds.db.must_be_same_as(@db) end it "should have getter return the class to use to create datasets" do [@db.dataset_class, @db.dataset_class.superclass].must_include(Sequel::Dataset) @db.dataset_class = @dsc [@db.dataset_class, @db.dataset_class.superclass].must_include(@dsc) end end describe "Database#extend_datasets" do before do @db = Sequel::Database.new @m = Module.new{def foo() [3] end} @m2 = Module.new{def foo() [4] + super end} @db.extend_datasets(@m) end it "should clear a cached dataset" do @db = Sequel::Database.new @db.literal(1).must_equal '1' @db.extend_datasets{def literal(v) '2' end} @db.literal(1).must_equal '2' end it "should change the dataset class to a subclass the first time it is called" do @db.dataset_class.superclass.must_equal Sequel::Dataset end it "should not create a subclass of the dataset class if called more than once" do @db.extend_datasets(@m2) @db.dataset_class.superclass.must_equal Sequel::Dataset end it "should make the dataset class include the module" do @db.dataset_class.ancestors.must_include(@m) @db.dataset_class.ancestors.wont_include(@m2) @db.extend_datasets(@m2) @db.dataset_class.ancestors.must_include(@m) @db.dataset_class.ancestors.must_include(@m2) end it "should have datasets respond to the module's methods" do @db.dataset.foo.must_equal [3] @db.extend_datasets(@m2) @db.dataset.foo.must_equal [4, 3] end it "should take a block and create a module from it to use" do @db.dataset.foo.must_equal [3] @db.extend_datasets{def foo() [5] + super end} @db.dataset.foo.must_equal [5, 3] end it "should raise an error if both a module and a block are provided" do proc{@db.extend_datasets(@m2){def foo() [5] + super end}}.must_raise(Sequel::Error) end it "should be able to override methods defined in the original Dataset class" do @db.extend_datasets do def select(*a, &block) super.order(*a, &block) end def input_identifier(v) v.to_s end end @db[:t].with_quote_identifiers(false).select(:a, :b).sql.must_equal 'SELECT a, b FROM t ORDER BY a, b' end it "should reapply settings if dataset_class is changed" do c = Class.new(Sequel::Dataset) @db.dataset_class = c @db.dataset_class.superclass.must_equal c @db.dataset_class.ancestors.must_include(@m) @db.dataset.foo.must_equal [3] end end describe "Database#extend_datasets custom methods" do before do @db = Sequel.mock end def ds @db[:items] end it "should have dataset_module support a where method" do @db.extend_datasets{where :released, :released} ds.released.sql.must_equal 'SELECT * FROM items WHERE released' ds.where(:foo).released.sql.must_equal 'SELECT * FROM items WHERE (foo AND released)' end it "should have dataset_module support a having method" do @db.extend_datasets{having(:released){released}} ds.released.sql.must_equal 'SELECT * FROM items HAVING released' ds.where(:foo).released.sql.must_equal 'SELECT * FROM items WHERE foo HAVING released' end it "should have dataset_module support an exclude method" do @db.extend_datasets{exclude :released, :released} ds.released.sql.must_equal 'SELECT * FROM items WHERE NOT released' ds.where(:foo).released.sql.must_equal 'SELECT * FROM items WHERE (foo AND NOT released)' end it "should have dataset_module support an exclude_having method" do @db.extend_datasets{exclude_having :released, :released} ds.released.sql.must_equal 'SELECT * FROM items HAVING NOT released' ds.where(:foo).released.sql.must_equal 'SELECT * FROM items WHERE foo HAVING NOT released' end it "should have dataset_module support a distinct method" do @db.extend_datasets{def supports_distinct_on?; true end; distinct :foo, :baz} ds.foo.sql.must_equal 'SELECT DISTINCT ON (baz) * FROM items' ds.where(:bar).foo.sql.must_equal 'SELECT DISTINCT ON (baz) * FROM items WHERE bar' end it "should have dataset_module support a grep method" do @db.extend_datasets{grep :foo, :baz, 'quux%'} ds.foo.sql.must_equal 'SELECT * FROM items WHERE ((baz LIKE \'quux%\' ESCAPE \'\\\'))' ds.where(:bar).foo.sql.must_equal 'SELECT * FROM items WHERE (bar AND ((baz LIKE \'quux%\' ESCAPE \'\\\')))' end it "should have dataset_module support a group method" do @db.extend_datasets{group :foo, :baz} ds.foo.sql.must_equal 'SELECT * FROM items GROUP BY baz' ds.where(:bar).foo.sql.must_equal 'SELECT * FROM items WHERE bar GROUP BY baz' end it "should have dataset_module support a group_and_count method" do @db.extend_datasets{group_and_count :foo, :baz} ds.foo.sql.must_equal 'SELECT baz, count(*) AS count FROM items GROUP BY baz' ds.where(:bar).foo.sql.must_equal 'SELECT baz, count(*) AS count FROM items WHERE bar GROUP BY baz' end it "should have dataset_module support a group_append method" do @db.extend_datasets{group_append :foo, :baz} ds.foo.sql.must_equal 'SELECT * FROM items GROUP BY baz' ds.group(:bar).foo.sql.must_equal 'SELECT * FROM items GROUP BY bar, baz' end it "should have dataset_module support a limit method" do @db.extend_datasets{limit :foo, 1} ds.foo.sql.must_equal 'SELECT * FROM items LIMIT 1' ds.where(:bar).foo.sql.must_equal 'SELECT * FROM items WHERE bar LIMIT 1' end it "should have dataset_module support a offset method" do @db.extend_datasets{offset :foo, 1} ds.foo.sql.must_equal 'SELECT * FROM items OFFSET 1' ds.where(:bar).foo.sql.must_equal 'SELECT * FROM items WHERE bar OFFSET 1' end it "should have dataset_module support a order method" do @db.extend_datasets{order(:foo){:baz}} ds.foo.sql.must_equal 'SELECT * FROM items ORDER BY baz' ds.where(:bar).foo.sql.must_equal 'SELECT * FROM items WHERE bar ORDER BY baz' end it "should have dataset_module support a order_append method" do @db.extend_datasets{order_append :foo, :baz} ds.foo.sql.must_equal 'SELECT * FROM items ORDER BY baz' ds.order(:bar).foo.sql.must_equal 'SELECT * FROM items ORDER BY bar, baz' end it "should have dataset_module support a order_prepend method" do @db.extend_datasets{order_prepend :foo, :baz} ds.foo.sql.must_equal 'SELECT * FROM items ORDER BY baz' ds.order(:bar).foo.sql.must_equal 'SELECT * FROM items ORDER BY baz, bar' end it "should have dataset_module support a reverse method" do @db.extend_datasets{reverse(:foo){:baz}} ds.foo.sql.must_equal 'SELECT * FROM items ORDER BY baz DESC' ds.where(:bar).foo.sql.must_equal 'SELECT * FROM items WHERE bar ORDER BY baz DESC' end it "should have dataset_module support a select method" do @db.extend_datasets{select :foo, :baz} ds.foo.sql.must_equal 'SELECT baz FROM items' ds.where(:bar).foo.sql.must_equal 'SELECT baz FROM items WHERE bar' end it "should have dataset_module support a select_all method" do @db.extend_datasets{select_all :foo, :baz} ds.foo.sql.must_equal 'SELECT baz.* FROM items' ds.where(:bar).foo.sql.must_equal 'SELECT baz.* FROM items WHERE bar' end it "should have dataset_module support a select_append method" do @db.extend_datasets{select_append :foo, :baz} ds.foo.sql.must_equal 'SELECT *, baz FROM items' ds.where(:bar).foo.sql.must_equal 'SELECT *, baz FROM items WHERE bar' end it "should have dataset_module support a select_group method" do @db.extend_datasets{select_group :foo, :baz} ds.foo.sql.must_equal 'SELECT baz FROM items GROUP BY baz' ds.where(:bar).foo.sql.must_equal 'SELECT baz FROM items WHERE bar GROUP BY baz' end it "should have dataset_module support a server method" do @db.extend_datasets{server :foo, :baz} ds.foo.opts[:server].must_equal :baz ds.where(:bar).foo.opts[:server].must_equal :baz end end describe "Database#disconnect_connection" do it "should call close on the connection" do o = Object.new def o.close() @closed=true end Sequel::Database.new.disconnect_connection(o) o.instance_variable_get(:@closed).must_equal true end end describe "Database#valid_connection?" do it "should issue a query to validate the connection" do db = Sequel.mock db.synchronize{|c| db.valid_connection?(c)}.must_equal true db.synchronize do |c| def c.execute(*) raise Sequel::DatabaseError, "error" end db.valid_connection?(c) end.must_equal false end end describe "Database#run" do before do @db = Sequel.mock(:servers=>{:s1=>{}}) end it "should execute the code on the database" do @db.run("DELETE FROM items") @db.sqls.must_equal ["DELETE FROM items"] end it "should handle placeholder literal strings" do @db.run(Sequel.lit("DELETE FROM ?", :items)) @db.sqls.must_equal ["DELETE FROM items"] end it "should return nil" do @db.run("DELETE FROM items").must_be_nil end it "should accept options passed to execute_ddl" do @db.run("DELETE FROM items", :server=>:s1) @db.sqls.must_equal ["DELETE FROM items -- s1"] end end describe "Database#<<" do before do @db = Sequel.mock end it "should execute the code on the database" do @db << "DELETE FROM items" @db.sqls.must_equal ["DELETE FROM items"] end it "should handle placeholder literal strings" do @db << Sequel.lit("DELETE FROM ?", :items) @db.sqls.must_equal ["DELETE FROM items"] end it "should be chainable" do @db << "DELETE FROM items" << "DELETE FROM items2" @db.sqls.must_equal ["DELETE FROM items", "DELETE FROM items2"] end end describe "Database#synchronize" do before do @db = Sequel::Database.new(:max_connections => 1) @db.define_singleton_method(:connect){|c| 12345} end it "should wrap the supplied block in pool.hold" do q, q1, q2 = Queue.new, Queue.new, Queue.new c1, c2 = nil t1 = Thread.new{@db.synchronize{|c| c1 = c; q.push nil; q1.pop}; q.push nil} q.pop c1.must_equal 12345 t2 = Thread.new{@db.synchronize{|c| c2 = c; q2.push nil}} @db.pool.available_connections.must_be :empty? c2.must_be_nil q1.push nil q.pop q2.pop c2.must_equal 12345 t1.join t2.join end end describe "Database#test_connection" do before do @db = Sequel::Database.new pr = proc{@test = rand(100)} @db.define_singleton_method(:connect){|c| pr.call} end it "should attempt to get a connection" do @db.test_connection @test.wont_equal nil end it "should return true if successful" do @db.test_connection.must_equal true end it "should raise an error if the attempting to connect raises an error" do @db.singleton_class.send(:alias_method, :connect, :connect) def @db.connect(*) raise Sequel::Error end proc{@db.test_connection}.must_raise(Sequel::DatabaseConnectionError) end end describe "Database#table_exists?" do it "should test existence by selecting a row from the table's dataset" do db = Sequel.mock(:fetch=>[Sequel::Error, [], [{:a=>1}]]) db.table_exists?(:a).must_equal false db.sqls.must_equal ["SELECT NULL AS nil FROM a LIMIT 1"] db.table_exists?(:b).must_equal true db.table_exists?(:c).must_equal true end it "should work for a schema qualified table" do db = Sequel.mock(:fetch=>[[{:a=>1}]]) db.table_exists?(Sequel[:a][:b]).must_equal true db.sqls.must_equal ["SELECT NULL AS nil FROM a.b LIMIT 1"] end it "should use a savepoint if inside a transaction" do db = Sequel.mock(:fetch=>[Sequel::Error, [], [{:a=>1}]]) def db.supports_savepoints?; true end db.transaction do db.table_exists?(:a).must_equal false end db.sqls.must_equal ["BEGIN", "SAVEPOINT autopoint_1", "SELECT NULL AS nil FROM a LIMIT 1", "ROLLBACK TO SAVEPOINT autopoint_1", "COMMIT"] db.table_exists?(:b).must_equal true db.table_exists?(:c).must_equal true end end database_transaction_specs = Module.new do extend Minitest::Spec::DSL it "should wrap the supplied block with BEGIN + COMMIT statements" do @db.transaction{@db.execute 'DROP TABLE test;'} @db.sqls.must_equal ['BEGIN', 'DROP TABLE test;', 'COMMIT'] end it "should support transaction isolation levels" do @db.define_singleton_method(:supports_transaction_isolation_levels?){true} [:uncommitted, :committed, :repeatable, :serializable].each do |l| @db.transaction(:isolation=>l){@db.run "DROP TABLE #{l}"} end @db.sqls.must_equal ['BEGIN', 'SET TRANSACTION ISOLATION LEVEL READ UNCOMMITTED', 'DROP TABLE uncommitted', 'COMMIT', 'BEGIN', 'SET TRANSACTION ISOLATION LEVEL READ COMMITTED', 'DROP TABLE committed', 'COMMIT', 'BEGIN', 'SET TRANSACTION ISOLATION LEVEL REPEATABLE READ', 'DROP TABLE repeatable', 'COMMIT', 'BEGIN', 'SET TRANSACTION ISOLATION LEVEL SERIALIZABLE', 'DROP TABLE serializable', 'COMMIT'] end it "should allow specifying a default transaction isolation level" do @db.define_singleton_method(:supports_transaction_isolation_levels?){true} [:uncommitted, :committed, :repeatable, :serializable].each do |l| @db.transaction_isolation_level = l @db.transaction{@db.run "DROP TABLE #{l}"} end @db.sqls.must_equal ['BEGIN', 'SET TRANSACTION ISOLATION LEVEL READ UNCOMMITTED', 'DROP TABLE uncommitted', 'COMMIT', 'BEGIN', 'SET TRANSACTION ISOLATION LEVEL READ COMMITTED', 'DROP TABLE committed', 'COMMIT', 'BEGIN', 'SET TRANSACTION ISOLATION LEVEL REPEATABLE READ', 'DROP TABLE repeatable', 'COMMIT', 'BEGIN', 'SET TRANSACTION ISOLATION LEVEL SERIALIZABLE', 'DROP TABLE serializable', 'COMMIT'] end it "should support :retry_on option for automatically retrying transactions" do a = [] @db.transaction(:retry_on=>Sequel::DatabaseDisconnectError){a << 1; raise Sequel::DatabaseDisconnectError if a.length < 2} @db.sqls.must_equal ['BEGIN', 'ROLLBACK', 'BEGIN', 'COMMIT'] a.must_equal [1, 1] a = [] @db.transaction(:retry_on=>[Sequel::ConstraintViolation, Sequel::SerializationFailure]) do a << 1 raise Sequel::SerializationFailure if a.length == 1 raise Sequel::ConstraintViolation if a.length == 2 end @db.sqls.must_equal ['BEGIN', 'ROLLBACK', 'BEGIN', 'ROLLBACK', 'BEGIN', 'COMMIT'] a.must_equal [1, 1, 1] end it "should support :num_retries option for limiting the number of retry times" do a = [] lambda do @db.transaction(:num_retries=>1, :retry_on=>[Sequel::ConstraintViolation, Sequel::SerializationFailure]) do a << 1 raise Sequel::SerializationFailure if a.length == 1 raise Sequel::ConstraintViolation if a.length == 2 end end.must_raise(Sequel::ConstraintViolation) @db.sqls.must_equal ['BEGIN', 'ROLLBACK', 'BEGIN', 'ROLLBACK'] a.must_equal [1, 1] end it "should support :num_retries=>nil option to retry indefinitely" do a = [] lambda do @db.transaction(:num_retries=>nil, :retry_on=>[Sequel::ConstraintViolation]) do a << 1 raise Sequel::SerializationFailure if a.length >= 100 raise Sequel::ConstraintViolation end end.must_raise(Sequel::SerializationFailure) @db.sqls.must_equal ['BEGIN', 'ROLLBACK'] * 100 a.must_equal [1] * 100 end it "should support :before_retry option for invoking callback before retrying" do a, errs, calls = [], [], [] retryer = proc{|n, err| calls << n; errs << err } @db.transaction(:retry_on=>Sequel::DatabaseDisconnectError, :before_retry => retryer) do a << 1; raise Sequel::DatabaseDisconnectError if a.length < 3 end @db.sqls.must_equal ['BEGIN', 'ROLLBACK', 'BEGIN', 'ROLLBACK', 'BEGIN', 'COMMIT'] a.must_equal [1, 1, 1] errs.count.must_equal 2 errs.each { |e| e.class.must_equal Sequel::DatabaseDisconnectError } calls.must_equal [1, 2] end it "should support :before_retry option for invoking callback before retrying when doing it indefinitely" do a, errs, calls = [], [], [] retryer = proc{|n, err| calls << n; errs << err } @db.transaction(:num_retries => nil, :retry_on=>Sequel::DatabaseDisconnectError, :before_retry => retryer) do a << 1; raise Sequel::DatabaseDisconnectError if a.length < 3 end @db.sqls.must_equal ['BEGIN', 'ROLLBACK', 'BEGIN', 'ROLLBACK', 'BEGIN', 'COMMIT'] a.must_equal [1, 1, 1] errs.count.must_equal 2 errs.each { |e| e.class.must_equal Sequel::DatabaseDisconnectError } calls.must_equal [1, 2] end it "should raise an error if attempting to use :retry_on inside another transaction" do proc{@db.transaction{@db.transaction(:retry_on=>Sequel::ConstraintViolation){}}}.must_raise(Sequel::Error) @db.sqls.must_equal ['BEGIN', 'ROLLBACK'] end it "should handle returning inside of the block by committing" do def @db.ret_commit transaction do execute 'DROP TABLE test;' return end end @db.ret_commit @db.sqls.must_equal ['BEGIN', 'DROP TABLE test;', 'COMMIT'] end it "should issue ROLLBACK if an exception is raised, and re-raise" do @db.transaction {@db.execute 'DROP TABLE test'; raise RuntimeError} rescue nil @db.sqls.must_equal ['BEGIN', 'DROP TABLE test', 'ROLLBACK'] proc {@db.transaction {raise RuntimeError}}.must_raise(RuntimeError) end it "should handle errors when sending BEGIN" do ec = Class.new(StandardError) @db.define_singleton_method(:database_error_classes){[ec]} @db.define_singleton_method(:log_connection_execute){|c, sql| sql =~ /BEGIN/ ? raise(ec, 'bad') : super(c, sql)} begin @db.transaction{@db.execute 'DROP TABLE test;'} rescue Sequel::DatabaseError => e end e.wont_equal nil e.wrapped_exception.must_be_kind_of(ec) @db.sqls.must_equal ['ROLLBACK'] end it "should handle errors when sending COMMIT" do ec = Class.new(StandardError) @db.define_singleton_method(:database_error_classes){[ec]} @db.define_singleton_method(:log_connection_execute){|c, sql| sql =~ /COMMIT/ ? raise(ec, 'bad') : super(c, sql)} begin @db.transaction{@db.execute 'DROP TABLE test;'} rescue Sequel::DatabaseError => e end e.wont_equal nil e.wrapped_exception.must_be_kind_of(ec) @db.sqls.must_equal ['BEGIN', 'DROP TABLE test;', 'ROLLBACK'] end it "should raise original exception if there is an exception raised when rolling back" do ec = Class.new(StandardError) @db.define_singleton_method(:database_error_classes){[ec]} @db.define_singleton_method(:log_connection_execute){|c, sql| sql =~ /ROLLBACK/ ? raise(ec, 'bad') : super(c, sql)} begin @db.transaction{raise ArgumentError, 'asdf'} rescue => e end e.must_be_kind_of(ArgumentError) @db.sqls.must_equal ['BEGIN'] end it "should raise original exception if there is an exception raised when rolling back when using :rollback=>:always" do ec = Class.new(StandardError) @db.define_singleton_method(:database_error_classes){[ec]} @db.define_singleton_method(:log_connection_execute){|c, sql| sql =~ /ROLLBACK/ ? raise(ec, 'bad') : super(c, sql)} begin @db.transaction(:rollback=>:always){} rescue => e end e.must_be_kind_of(ec) @db.sqls.must_equal ['BEGIN'] end it "should issue ROLLBACK if Sequel::Rollback is called in the transaction" do @db.transaction do @db.drop_table(:a) raise Sequel::Rollback @db.drop_table(:b) end @db.sqls.must_equal ['BEGIN', 'DROP TABLE a', 'ROLLBACK'] end it "should have in_transaction? return true if inside a transaction" do c = nil @db.transaction{c = @db.in_transaction?} c.must_equal true end it "should have in_transaction? handle sharding correctly" do c = [] @db.transaction(:server=>:test){c << @db.in_transaction?} @db.transaction(:server=>:test){c << @db.in_transaction?(:server=>:test)} c.must_equal [false, true] end it "should have in_transaction? return false if not in a transaction" do @db.in_transaction?.must_equal false end it "should have rollback_checker return a proc which returns whether the transaction was rolled back" do proc{@db.rollback_checker}.must_raise Sequel::Error proc{@db.transaction(:server=>:test){@db.rollback_checker}}.must_raise Sequel::Error rbc = nil @db.transaction do rbc = @db.rollback_checker rbc.call.must_be_nil end rbc.call.must_equal false @db.transaction(:rollback=>:always) do rbc = @db.rollback_checker rbc.call.must_be_nil end rbc.call.must_equal true proc do @db.transaction do rbc = @db.rollback_checker raise end end.must_raise RuntimeError rbc.call.must_equal true @db.transaction(:server=>:test){rbc = @db.rollback_checker(:server=>:test)} rbc.call.must_equal false end it "should return nil if Sequel::Rollback is called in the transaction" do @db.transaction{raise Sequel::Rollback}.must_be_nil end it "should reraise Sequel::Rollback errors when using :rollback=>:reraise option is given" do proc {@db.transaction(:rollback=>:reraise){raise Sequel::Rollback}}.must_raise(Sequel::Rollback) @db.sqls.must_equal ['BEGIN', 'ROLLBACK'] proc {@db.transaction(:rollback=>:reraise){raise ArgumentError}}.must_raise(ArgumentError) @db.sqls.must_equal ['BEGIN', 'ROLLBACK'] @db.transaction(:rollback=>:reraise){1}.must_equal 1 @db.sqls.must_equal ['BEGIN', 'COMMIT'] end it "should always rollback if :rollback=>:always option is given" do proc {@db.transaction(:rollback=>:always){raise ArgumentError}}.must_raise(ArgumentError) @db.sqls.must_equal ['BEGIN', 'ROLLBACK'] @db.transaction(:rollback=>:always){raise Sequel::Rollback}.must_be_nil @db.sqls.must_equal ['BEGIN', 'ROLLBACK'] @db.transaction(:rollback=>:always){1}.must_equal 1 @db.sqls.must_equal ['BEGIN', 'ROLLBACK'] catch(:foo) do @db.transaction(:rollback=>:always){throw :foo} end @db.sqls.must_equal ['BEGIN', 'ROLLBACK'] end it "should raise database errors when commiting a transaction as Sequel::DatabaseError" do @db.define_singleton_method(:commit_transaction){raise ArgumentError} lambda{@db.transaction{}}.must_raise(ArgumentError) @db.define_singleton_method(:database_error_classes){[ArgumentError]} lambda{@db.transaction{}}.must_raise(Sequel::DatabaseError) end it "should be re-entrant" do q, q1 = Queue.new, Queue.new cc = nil t = Thread.new do @db.transaction {@db.transaction {@db.transaction {|c| cc = c q.pop q1.push nil q.pop }}} end q.push nil q1.pop cc.must_be_kind_of(Sequel::Mock::Connection) tr = @db.instance_variable_get(:@transactions) tr.keys.must_equal [cc] q.push nil t.join tr.must_be :empty? end it "should correctly handle nested transaction use with separate shards" do @db.transaction do |c1| @db.transaction(:server=>:test) do |c2| c1.wont_equal c2 @db.execute 'DROP TABLE test;' end end @db.sqls.must_equal ['BEGIN', 'BEGIN -- test', 'DROP TABLE test;', 'COMMIT -- test', 'COMMIT'] end if (!defined?(RUBY_ENGINE) or RUBY_ENGINE == 'ruby') and !RUBY_VERSION.start_with?('1.9') it "should handle Thread#kill for transactions inside threads" do q = Queue.new q1 = Queue.new t = Thread.new do @db.transaction do @db.execute 'DROP TABLE test' q1.push nil q.pop @db.execute 'DROP TABLE test2' end end q1.pop t.kill t.join @db.sqls.must_equal ['BEGIN', 'DROP TABLE test', 'ROLLBACK'] end end it "should raise an Error if after_commit or after_rollback is called without a block" do proc{@db.after_commit}.must_raise(Sequel::Error) proc{@db.after_rollback}.must_raise(Sequel::Error) end it "should have after_commit and after_rollback respect :server option" do @db.transaction(:server=>:test){@db.after_commit(:server=>:test){@db.execute('foo', :server=>:test)}} @db.sqls.must_equal ['BEGIN -- test', 'COMMIT -- test', 'foo -- test'] @db.transaction(:server=>:test){@db.after_rollback(:server=>:test){@db.execute('foo', :server=>:test)}; raise Sequel::Rollback} @db.sqls.must_equal ['BEGIN -- test', 'ROLLBACK -- test', 'foo -- test'] end it "should execute after_commit outside transactions" do @db.after_commit{@db.execute('foo')} @db.sqls.must_equal ['foo'] end it "should ignore after_rollback outside transactions" do @db.after_rollback{@db.execute('foo')} @db.sqls.must_equal [] end it "should support after_commit inside transactions" do @db.transaction{@db.after_commit{@db.execute('foo')}} @db.sqls.must_equal ['BEGIN', 'COMMIT', 'foo'] end it "should support after_rollback inside transactions" do @db.transaction{@db.after_rollback{@db.execute('foo')}} @db.sqls.must_equal ['BEGIN', 'COMMIT'] end it "should have transaction inside after_commit work correctly" do @db.transaction{@db.after_commit{@db.transaction{@db.execute('foo')}}} @db.sqls.must_equal ['BEGIN', 'COMMIT', 'BEGIN', 'foo', 'COMMIT'] end it "should have transaction inside after_rollback work correctly" do @db.transaction(:rollback=>:always){@db.after_rollback{@db.transaction{@db.execute('foo')}}} @db.sqls.must_equal ['BEGIN', 'ROLLBACK', 'BEGIN', 'foo', 'COMMIT'] end it "should not call after_commit if the transaction rolls back" do @db.transaction{@db.after_commit{@db.execute('foo')}; raise Sequel::Rollback} @db.sqls.must_equal ['BEGIN', 'ROLLBACK'] end it "should call after_rollback if the transaction rolls back" do @db.transaction{@db.after_rollback{@db.execute('foo')}; raise Sequel::Rollback} @db.sqls.must_equal ['BEGIN', 'ROLLBACK', 'foo'] end it "should call multiple after_commit blocks in order if called inside transactions" do @db.transaction{@db.after_commit{@db.execute('foo')}; @db.after_commit{@db.execute('bar')}} @db.sqls.must_equal ['BEGIN', 'COMMIT', 'foo', 'bar'] end it "should call multiple after_rollback blocks in order if called inside transactions" do @db.transaction{@db.after_rollback{@db.execute('foo')}; @db.after_rollback{@db.execute('bar')}; raise Sequel::Rollback} @db.sqls.must_equal ['BEGIN', 'ROLLBACK', 'foo', 'bar'] end it "should support after_commit inside nested transactions" do @db.transaction{@db.transaction{@db.after_commit{@db.execute('foo')}}} @db.sqls.must_equal ['BEGIN', 'COMMIT', 'foo'] end it "should support after_rollback inside nested transactions" do @db.transaction{@db.transaction{@db.after_rollback{@db.execute('foo')}}; raise Sequel::Rollback} @db.sqls.must_equal ['BEGIN', 'ROLLBACK', 'foo'] end it "should raise an error if you attempt to use after_commit inside a prepared transaction" do @db.define_singleton_method(:supports_prepared_transactions?){true} proc{@db.transaction(:prepare=>'XYZ'){@db.after_commit{@db.execute('foo')}}}.must_raise(Sequel::Error) @db.sqls.must_equal ['BEGIN', 'ROLLBACK'] end it "should raise an error if you attempt to use after_rollback inside a prepared transaction" do @db.define_singleton_method(:supports_prepared_transactions?){true} proc{@db.transaction(:prepare=>'XYZ'){@db.after_rollback{@db.execute('foo')}}}.must_raise(Sequel::Error) @db.sqls.must_equal ['BEGIN', 'ROLLBACK'] end it "should have rollback_on_exit cause the transaction to rollback on exit" do @db.transaction{@db.rollback_on_exit}.must_be_nil @db.sqls.must_equal ['BEGIN', 'ROLLBACK'] catch(:foo){@db.transaction{@db.rollback_on_exit; throw :foo}} @db.sqls.must_equal ['BEGIN', 'ROLLBACK'] lambda{@db.transaction{@db.rollback_on_exit; return true}}.call @db.sqls.must_equal ['BEGIN', 'ROLLBACK'] end it "should have rollback_on_exit with :cancel option will cause the transaction to commit on exit" do @db.transaction{@db.rollback_on_exit(:cancel=>true)}.must_be_nil @db.sqls.must_equal ['BEGIN', 'COMMIT'] @db.transaction{@db.rollback_on_exit; @db.rollback_on_exit(:cancel=>true)}.must_be_nil @db.sqls.must_equal ['BEGIN', 'COMMIT'] end it "should have rollback_on_exit raise error outside a transaction" do proc{@db.rollback_on_exit}.must_raise Sequel::Error @db.sqls.must_equal [] end end describe "Database#transaction with savepoint support" do before do @db = Sequel.mock(:servers=>{:test=>{}}) end include database_transaction_specs it "should support :retry_on option for automatically retrying transactions when using :savepoint option" do a = [] @db.transaction do @db.transaction(:retry_on=>Sequel::SerializationFailure, :savepoint=>true) do a << 1 raise Sequel::SerializationFailure if a.length == 1 end end @db.sqls.must_equal ["BEGIN", "SAVEPOINT autopoint_1", "ROLLBACK TO SAVEPOINT autopoint_1", "SAVEPOINT autopoint_1", "RELEASE SAVEPOINT autopoint_1", "COMMIT"] a.must_equal [1, 1] end it "should automatically use a savepoint if :rollback=>:always given inside a transaction" do @db.transaction do @db.transaction(:rollback=>:always) do @db.get(1) end end @db.sqls.must_equal ["BEGIN", "SAVEPOINT autopoint_1", "SELECT 1 AS v LIMIT 1", "ROLLBACK TO SAVEPOINT autopoint_1", "COMMIT"] end it "should support :retry_on option for automatically retrying transactions inside an :auto_savepoint transaction" do a = [] @db.transaction(:auto_savepoint=>true) do @db.transaction(:retry_on=>Sequel::SerializationFailure) do a << 1 raise Sequel::SerializationFailure if a.length == 1 end end @db.sqls.must_equal ["BEGIN", "SAVEPOINT autopoint_1", "ROLLBACK TO SAVEPOINT autopoint_1", "SAVEPOINT autopoint_1", "RELEASE SAVEPOINT autopoint_1", "COMMIT"] a.must_equal [1, 1] end it "should support after_commit inside savepoints" do @db.transaction do @db.after_commit{@db.execute('foo')} @db.transaction(:savepoint=>true){@db.after_commit{@db.execute('bar')}} @db.after_commit{@db.execute('baz')} end @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1', 'RELEASE SAVEPOINT autopoint_1', 'COMMIT', 'foo', 'bar', 'baz'] end it "should support after_rollback inside savepoints" do @db.transaction(:rollback=>:always) do @db.after_rollback{@db.execute('foo')} @db.transaction(:savepoint=>true){@db.after_rollback{@db.execute('bar')}} @db.after_rollback{@db.execute('baz')} end @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1', 'RELEASE SAVEPOINT autopoint_1', 'ROLLBACK', 'foo', 'bar', 'baz'] end it "should run after_commit if savepoint rolled back" do @db.transaction do @db.after_commit{@db.execute('foo')} @db.transaction(:savepoint=>true, :rollback=>:always){@db.after_commit{@db.execute('bar')}} end @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1', 'ROLLBACK TO SAVEPOINT autopoint_1', 'COMMIT', 'foo', 'bar'] end it "should not run after_commit if savepoint rolled back and :savepoint option used" do @db.transaction do @db.after_commit{@db.execute('foo')} @db.transaction(:savepoint=>true, :rollback=>:always){@db.after_commit(:savepoint=>true){@db.execute('bar')}} end @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1', 'ROLLBACK TO SAVEPOINT autopoint_1', 'COMMIT', 'foo'] end it "should not run after_commit if higher-level savepoint rolled back and :savepoint option used" do @db.transaction do @db.after_commit{@db.execute('foo')} @db.transaction(:savepoint=>true, :rollback=>:always){@db.transaction(:savepoint=>true){@db.after_commit(:savepoint=>true){@db.execute('bar')}}} end @db.sqls.must_equal ["BEGIN", "SAVEPOINT autopoint_1", "SAVEPOINT autopoint_2", "RELEASE SAVEPOINT autopoint_2", "ROLLBACK TO SAVEPOINT autopoint_1", "COMMIT", "foo"] end it "should not run after_commit if transaction rolled back and :savepoint option used" do @db.transaction(:rollback=>:always) do @db.after_commit{@db.execute('foo')} @db.transaction(:savepoint=>true){@db.transaction(:savepoint=>true){@db.after_commit(:savepoint=>true){@db.execute('bar')}}} end @db.sqls.must_equal ["BEGIN", "SAVEPOINT autopoint_1", "SAVEPOINT autopoint_2", "RELEASE SAVEPOINT autopoint_2", "RELEASE SAVEPOINT autopoint_1", "ROLLBACK"] end it "should run after_rollback if savepoint rolls back" do @db.transaction(:rollback=>:always) do @db.after_rollback{@db.execute('foo')} @db.transaction(:savepoint=>true, :rollback=>:always){@db.after_rollback{@db.execute('bar')}} @db.after_rollback{@db.execute('baz')} end @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1', 'ROLLBACK TO SAVEPOINT autopoint_1', 'ROLLBACK', 'foo', 'bar', 'baz'] end it "should run after_rollback when savepoint rolls back if :savepoint option used" do @db.transaction(:rollback=>:always) do @db.after_rollback{@db.execute('foo')} @db.transaction(:savepoint=>true, :rollback=>:always){@db.after_rollback(:savepoint=>true){@db.execute('bar')}} @db.after_rollback{@db.execute('baz')} end @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1', 'ROLLBACK TO SAVEPOINT autopoint_1', 'bar', 'ROLLBACK', 'foo', 'baz'] end it "should run after_rollback if savepoint rolled back and :savepoint option used, even if transaction commits" do @db.transaction do @db.after_commit{@db.execute('foo')} @db.transaction(:savepoint=>true, :rollback=>:always){@db.after_rollback(:savepoint=>true){@db.execute('bar')}} end @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1', 'ROLLBACK TO SAVEPOINT autopoint_1', 'bar', 'COMMIT', 'foo'] end it "should run after_rollback if higher-level savepoint rolled back and :savepoint option used" do @db.transaction do @db.transaction(:savepoint=>true, :rollback=>:always){@db.transaction(:savepoint=>true){@db.after_rollback(:savepoint=>true){@db.execute('bar')}}} end @db.sqls.must_equal ["BEGIN", "SAVEPOINT autopoint_1", "SAVEPOINT autopoint_2", "RELEASE SAVEPOINT autopoint_2", "ROLLBACK TO SAVEPOINT autopoint_1", "bar", "COMMIT"] end it "should run after_rollback if transaction rolled back and :savepoint option used" do @db.transaction(:rollback=>:always) do @db.transaction(:savepoint=>true){@db.transaction(:savepoint=>true){@db.after_rollback(:savepoint=>true){@db.execute('bar')}}} end @db.sqls.must_equal ["BEGIN", "SAVEPOINT autopoint_1", "SAVEPOINT autopoint_2", "RELEASE SAVEPOINT autopoint_2", "RELEASE SAVEPOINT autopoint_1", "ROLLBACK", "bar"] end it "should raise an error if you attempt to use after_commit inside a savepoint in a prepared transaction" do @db.define_singleton_method(:supports_prepared_transactions?){true} proc{@db.transaction(:prepare=>'XYZ'){@db.transaction(:savepoint=>true){@db.after_commit{@db.execute('foo')}}}}.must_raise(Sequel::Error) @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1','ROLLBACK TO SAVEPOINT autopoint_1', 'ROLLBACK'] end it "should raise an error if you attempt to use after_rollback inside a savepoint in a prepared transaction" do @db.define_singleton_method(:supports_prepared_transactions?){true} proc{@db.transaction(:prepare=>'XYZ'){@db.transaction(:savepoint=>true){@db.after_rollback{@db.execute('foo')}}}}.must_raise(Sequel::Error) @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1','ROLLBACK TO SAVEPOINT autopoint_1', 'ROLLBACK'] end it "should create savepoint if inside a transaction when :savepoint=>:only is used" do @db.transaction{@db.transaction(:savepoint=>:only){}} @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1','RELEASE SAVEPOINT autopoint_1', 'COMMIT'] end it "should not create transaction if not inside a transaction when :savepoint=>:only is used" do @db.transaction(:savepoint=>:only){} @db.sqls.must_equal [] end it "should have rollback_on_exit with :savepoint option inside transaction cause the transaction to rollback on exit" do @db.transaction{@db.rollback_on_exit(:savepoint=>true)}.must_be_nil @db.sqls.must_equal ['BEGIN', 'ROLLBACK'] catch(:foo){@db.transaction{@db.rollback_on_exit(:savepoint=>true); throw :foo}} @db.sqls.must_equal ['BEGIN', 'ROLLBACK'] lambda{@db.transaction{@db.rollback_on_exit(:savepoint=>true); return true}}.call @db.sqls.must_equal ['BEGIN', 'ROLLBACK'] end it "should have rollback_on_exit with :savepoint option inside savepoint cause the savepoint to rollback on exit" do @db.transaction{@db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>true)}}.must_be_nil @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1','ROLLBACK TO SAVEPOINT autopoint_1', 'COMMIT'] catch(:foo){@db.transaction{@db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>true); throw :foo}}} @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1','ROLLBACK TO SAVEPOINT autopoint_1', 'COMMIT'] lambda{@db.transaction{@db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>true); return true}}}.call @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1','ROLLBACK TO SAVEPOINT autopoint_1', 'COMMIT'] end it "should have rollback_on_exit with :savepoint option inside nested savepoint cause the current savepoint to rollback on exit" do @db.transaction{@db.transaction(:savepoint=>true){@db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>true)}}}.must_be_nil @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1','SAVEPOINT autopoint_2','ROLLBACK TO SAVEPOINT autopoint_2', 'RELEASE SAVEPOINT autopoint_1', 'COMMIT'] catch(:foo){@db.transaction{@db.transaction(:savepoint=>true){@db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>true); throw :foo}}}} @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1','SAVEPOINT autopoint_2','ROLLBACK TO SAVEPOINT autopoint_2', 'RELEASE SAVEPOINT autopoint_1', 'COMMIT'] lambda{@db.transaction{@db.transaction(:savepoint=>true){@db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>true); return true}}}}.call @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1','SAVEPOINT autopoint_2','ROLLBACK TO SAVEPOINT autopoint_2', 'RELEASE SAVEPOINT autopoint_1', 'COMMIT'] end it "should have rollback_on_exit with :savepoint=>1 option inside nested savepoint cause the current savepoint to rollback on exit" do @db.transaction{@db.transaction(:savepoint=>true){@db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>1)}}}.must_be_nil @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1','SAVEPOINT autopoint_2','ROLLBACK TO SAVEPOINT autopoint_2', 'RELEASE SAVEPOINT autopoint_1', 'COMMIT'] catch(:foo){@db.transaction{@db.transaction(:savepoint=>true){@db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>1); throw :foo}}}} @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1','SAVEPOINT autopoint_2','ROLLBACK TO SAVEPOINT autopoint_2', 'RELEASE SAVEPOINT autopoint_1', 'COMMIT'] lambda{@db.transaction{@db.transaction(:savepoint=>true){@db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>1); return true}}}}.call @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1','SAVEPOINT autopoint_2','ROLLBACK TO SAVEPOINT autopoint_2', 'RELEASE SAVEPOINT autopoint_1', 'COMMIT'] end it "should have rollback_on_exit with :savepoint=>2 option inside nested savepoint cause the current and next savepoint to rollback on exit" do @db.transaction{@db.transaction(:savepoint=>true){@db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>2)}}}.must_be_nil @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1','SAVEPOINT autopoint_2','ROLLBACK TO SAVEPOINT autopoint_2', 'ROLLBACK TO SAVEPOINT autopoint_1', 'COMMIT'] catch(:foo){@db.transaction{@db.transaction(:savepoint=>true){@db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>2); throw :foo}}}} @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1','SAVEPOINT autopoint_2','ROLLBACK TO SAVEPOINT autopoint_2', 'ROLLBACK TO SAVEPOINT autopoint_1', 'COMMIT'] lambda{@db.transaction{@db.transaction(:savepoint=>true){@db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>2); return true}}}}.call @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1','SAVEPOINT autopoint_2','ROLLBACK TO SAVEPOINT autopoint_2', 'ROLLBACK TO SAVEPOINT autopoint_1', 'COMMIT'] end it "should have rollback_on_exit with :savepoint=>3 option inside nested savepoint cause the three enclosing savepoints/transaction to rollback on exit" do @db.transaction{@db.transaction(:savepoint=>true){@db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>3)}}}.must_be_nil @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1','SAVEPOINT autopoint_2','ROLLBACK TO SAVEPOINT autopoint_2', 'ROLLBACK TO SAVEPOINT autopoint_1', 'ROLLBACK'] catch(:foo){@db.transaction{@db.transaction(:savepoint=>true){@db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>3); throw :foo}}}} @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1','SAVEPOINT autopoint_2','ROLLBACK TO SAVEPOINT autopoint_2', 'ROLLBACK TO SAVEPOINT autopoint_1', 'ROLLBACK'] lambda{@db.transaction{@db.transaction(:savepoint=>true){@db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>3); return true}}}}.call @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1','SAVEPOINT autopoint_2','ROLLBACK TO SAVEPOINT autopoint_2', 'ROLLBACK TO SAVEPOINT autopoint_1', 'ROLLBACK'] end it "should have rollback_on_exit with :savepoint and :cancel option will cause the transaction to commit on exit" do @db.transaction{@db.rollback_on_exit(:savepoint=>true, :cancel=>true)}.must_be_nil @db.sqls.must_equal ['BEGIN', 'COMMIT'] @db.transaction{@db.rollback_on_exit(:savepoint=>true); @db.rollback_on_exit(:savepoint=>true, :cancel=>true)}.must_be_nil @db.sqls.must_equal ['BEGIN', 'COMMIT'] end it "should have rollback_on_exit with :savepoint option called at different levels work correctly" do @db.transaction{@db.rollback_on_exit(:savepoint=>true); @db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>true)}}.must_be_nil @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1','ROLLBACK TO SAVEPOINT autopoint_1', 'ROLLBACK'] @db.transaction{@db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>true); @db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>true)}}}.must_be_nil @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1', 'SAVEPOINT autopoint_2','ROLLBACK TO SAVEPOINT autopoint_2', 'ROLLBACK TO SAVEPOINT autopoint_1', 'COMMIT'] @db.transaction{@db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>true); @db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>true, :cancel=>true)}}}.must_be_nil @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1', 'SAVEPOINT autopoint_2','RELEASE SAVEPOINT autopoint_2', 'ROLLBACK TO SAVEPOINT autopoint_1', 'COMMIT'] @db.transaction{@db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>true); @db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>2, :cancel=>true)}}}.must_be_nil @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1', 'SAVEPOINT autopoint_2','RELEASE SAVEPOINT autopoint_2', 'RELEASE SAVEPOINT autopoint_1', 'COMMIT'] @db.transaction{@db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>true); @db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>3, :cancel=>true)}}}.must_be_nil @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1', 'SAVEPOINT autopoint_2','RELEASE SAVEPOINT autopoint_2', 'RELEASE SAVEPOINT autopoint_1', 'COMMIT'] @db.transaction{@db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>true); @db.transaction(:savepoint=>true){@db.rollback_on_exit(:savepoint=>4, :cancel=>true)}}}.must_be_nil @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1', 'SAVEPOINT autopoint_2','RELEASE SAVEPOINT autopoint_2', 'RELEASE SAVEPOINT autopoint_1', 'COMMIT'] end it "should have rollback_on_exit raise error when using invalid :savepoint option" do proc{@db.transaction{@db.rollback_on_exit(:savepoint=>Object.new)}}.must_raise Sequel::Error @db.sqls.must_equal ['BEGIN', 'ROLLBACK'] proc{@db.transaction{@db.rollback_on_exit(:savepoint=>-1)}}.must_raise Sequel::Error @db.sqls.must_equal ['BEGIN', 'ROLLBACK'] end end describe "Database#transaction without savepoint support" do before do @db = Sequel.mock(:servers=>{:test=>{}}) @db.define_singleton_method(:supports_savepoints?){false} end it "should not create savepoint if inside a transaction when :savepoint=>:only is used" do @db.transaction{@db.transaction(:savepoint=>:only){}} @db.sqls.must_equal ['BEGIN', 'COMMIT'] end it "should not automatically use a savepoint if :rollback=>:always given inside a transaction" do proc do @db.transaction do @db.transaction(:rollback=>:always) do @db.get(1) end end end.must_raise Sequel::Error @db.sqls.must_equal ["BEGIN", "ROLLBACK"] end include database_transaction_specs end describe "Sequel.transaction" do before do @sqls = [] @db1 = Sequel.mock(:append=>'1', :sqls=>@sqls) @db2 = Sequel.mock(:append=>'2', :sqls=>@sqls) @db3 = Sequel.mock(:append=>'3', :sqls=>@sqls) end it "should run the block inside transacitons on all three databases" do Sequel.transaction([@db1, @db2, @db3]){1}.must_equal 1 @sqls.must_equal ['BEGIN -- 1', 'BEGIN -- 2', 'BEGIN -- 3', 'COMMIT -- 3', 'COMMIT -- 2', 'COMMIT -- 1'] end it "should pass options to all the blocks" do Sequel.transaction([@db1, @db2, @db3], :rollback=>:always){1}.must_equal 1 @sqls.must_equal ['BEGIN -- 1', 'BEGIN -- 2', 'BEGIN -- 3', 'ROLLBACK -- 3', 'ROLLBACK -- 2', 'ROLLBACK -- 1'] end it "should handle Sequel::Rollback exceptions raised by the block to rollback on all databases" do Sequel.transaction([@db1, @db2, @db3]){raise Sequel::Rollback}.must_be_nil @sqls.must_equal ['BEGIN -- 1', 'BEGIN -- 2', 'BEGIN -- 3', 'ROLLBACK -- 3', 'ROLLBACK -- 2', 'ROLLBACK -- 1'] end it "should handle nested transactions" do Sequel.transaction([@db1, @db2, @db3]){Sequel.transaction([@db1, @db2, @db3]){1}}.must_equal 1 @sqls.must_equal ['BEGIN -- 1', 'BEGIN -- 2', 'BEGIN -- 3', 'COMMIT -- 3', 'COMMIT -- 2', 'COMMIT -- 1'] end it "should handle savepoints" do Sequel.transaction([@db1, @db2, @db3]){Sequel.transaction([@db1, @db2, @db3], :savepoint=>true){1}}.must_equal 1 @sqls.must_equal ['BEGIN -- 1', 'BEGIN -- 2', 'BEGIN -- 3', 'SAVEPOINT autopoint_1 -- 1', 'SAVEPOINT autopoint_1 -- 2', 'SAVEPOINT autopoint_1 -- 3', 'RELEASE SAVEPOINT autopoint_1 -- 3', 'RELEASE SAVEPOINT autopoint_1 -- 2', 'RELEASE SAVEPOINT autopoint_1 -- 1', 'COMMIT -- 3', 'COMMIT -- 2', 'COMMIT -- 1'] end end describe "Database#transaction with savepoints" do before do @db = Sequel.mock end it "should wrap the supplied block with BEGIN + COMMIT statements" do @db.transaction {@db.execute 'DROP TABLE test;'} @db.sqls.must_equal ['BEGIN', 'DROP TABLE test;', 'COMMIT'] end it "should use savepoints if given the :savepoint option" do @db.transaction{@db.transaction(:savepoint=>true){@db.execute 'DROP TABLE test;'}} @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1', 'DROP TABLE test;', 'RELEASE SAVEPOINT autopoint_1', 'COMMIT'] end it "should use savepoints if surrounding transaction uses :auto_savepoint option" do @db.transaction(:auto_savepoint=>true){@db.transaction{@db.execute 'DROP TABLE test;'}} @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1', 'DROP TABLE test;', 'RELEASE SAVEPOINT autopoint_1', 'COMMIT'] @db.transaction(:auto_savepoint=>true){@db.transaction{@db.transaction{@db.execute 'DROP TABLE test;'}}} @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1', 'DROP TABLE test;', 'RELEASE SAVEPOINT autopoint_1', 'COMMIT'] @db.transaction(:auto_savepoint=>true){@db.transaction(:auto_savepoint=>true){@db.transaction{@db.execute 'DROP TABLE test;'}}} @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1', 'SAVEPOINT autopoint_2', 'DROP TABLE test;', 'RELEASE SAVEPOINT autopoint_2', 'RELEASE SAVEPOINT autopoint_1', 'COMMIT'] @db.transaction{@db.transaction(:auto_savepoint=>true, :savepoint=>true){@db.transaction{@db.execute 'DROP TABLE test;'}}} @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1', 'SAVEPOINT autopoint_2', 'DROP TABLE test;', 'RELEASE SAVEPOINT autopoint_2', 'RELEASE SAVEPOINT autopoint_1', 'COMMIT'] end it "should not use savepoints if surrounding transaction uses :auto_savepoint and current transaction uses :savepoint=>false option" do @db.transaction(:auto_savepoint=>true){@db.transaction(:savepoint=>false){@db.execute 'DROP TABLE test;'}} @db.sqls.must_equal ['BEGIN', 'DROP TABLE test;', 'COMMIT'] end it "should not use a savepoint if no transaction is in progress" do @db.transaction(:savepoint=>true){@db.execute 'DROP TABLE test;'} @db.sqls.must_equal ['BEGIN', 'DROP TABLE test;', 'COMMIT'] end it "should reuse the current transaction if no :savepoint option is given" do @db.transaction{@db.transaction{@db.execute 'DROP TABLE test;'}} @db.sqls.must_equal ['BEGIN', 'DROP TABLE test;', 'COMMIT'] end it "should handle returning inside of the block by committing" do def @db.ret_commit transaction do execute 'DROP TABLE test;' return end end @db.ret_commit @db.sqls.must_equal ['BEGIN', 'DROP TABLE test;', 'COMMIT'] end it "should handle returning inside of a savepoint by committing" do def @db.ret_commit transaction do transaction(:savepoint=>true) do execute 'DROP TABLE test;' return end end end @db.ret_commit @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1', 'DROP TABLE test;', 'RELEASE SAVEPOINT autopoint_1', 'COMMIT'] end it "should issue ROLLBACK if an exception is raised, and re-raise" do @db.transaction {@db.execute 'DROP TABLE test'; raise RuntimeError} rescue nil @db.sqls.must_equal ['BEGIN', 'DROP TABLE test', 'ROLLBACK'] proc {@db.transaction {raise RuntimeError}}.must_raise(RuntimeError) end it "should issue ROLLBACK SAVEPOINT if an exception is raised inside a savepoint, and re-raise" do @db.transaction{@db.transaction(:savepoint=>true){@db.execute 'DROP TABLE test'; raise RuntimeError}} rescue nil @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1', 'DROP TABLE test', 'ROLLBACK TO SAVEPOINT autopoint_1', 'ROLLBACK'] proc {@db.transaction {raise RuntimeError}}.must_raise(RuntimeError) end it "should issue ROLLBACK if Sequel::Rollback is raised in the transaction" do @db.transaction do @db.drop_table(:a) raise Sequel::Rollback @db.drop_table(:b) end @db.sqls.must_equal ['BEGIN', 'DROP TABLE a', 'ROLLBACK'] end it "should issue ROLLBACK SAVEPOINT if Sequel::Rollback is raised in a savepoint" do @db.transaction do @db.transaction(:savepoint=>true) do @db.drop_table(:a) raise Sequel::Rollback end @db.drop_table(:b) end @db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1', 'DROP TABLE a', 'ROLLBACK TO SAVEPOINT autopoint_1', 'DROP TABLE b', 'COMMIT'] end it "should raise database errors when commiting a transaction as Sequel::DatabaseError" do @db.define_singleton_method(:commit_transaction){raise ArgumentError} lambda{@db.transaction{}}.must_raise(ArgumentError) lambda{@db.transaction{@db.transaction(:savepoint=>true){}}}.must_raise(ArgumentError) @db.define_singleton_method(:database_error_classes){[ArgumentError]} lambda{@db.transaction{}}.must_raise(Sequel::DatabaseError) lambda{@db.transaction{@db.transaction(:savepoint=>true){}}}.must_raise(Sequel::DatabaseError) end end describe "A Database adapter with a scheme" do before do require_relative '../../lib/sequel/adapters/mock' @ccc = Class.new(Sequel::Mock::Database) @ccc.send(:set_adapter_scheme, :ccc) end it "should be registered in the ADAPTER_MAP" do Sequel::ADAPTER_MAP[:ccc].must_equal @ccc end it "should give the database_type as the adapter scheme by default" do @ccc.new.database_type.must_equal :ccc end it "should be instantiated when its scheme is specified" do c = Sequel::Database.connect('ccc://localhost/db') c.must_be_kind_of(@ccc) c.opts[:host].must_equal 'localhost' c.opts[:database].must_equal 'db' end it "should be accessible through Sequel.connect" do c = Sequel.connect 'ccc://localhost/db' c.must_be_kind_of(@ccc) c.opts[:host].must_equal 'localhost' c.opts[:database].must_equal 'db' end it "should be accessible through Sequel.connect via a block" do x = nil y = nil z = nil returnValue = 'anything' p = proc do |c| c.must_be_kind_of(@ccc) c.opts[:host].must_equal 'localhost' c.opts[:database].must_equal 'db' z = y y = x x = c returnValue end @ccc.class_eval do self::DISCONNECTS = [] def disconnect self.class::DISCONNECTS << self end end Sequel::Database.connect('ccc://localhost/db', &p).must_equal returnValue @ccc::DISCONNECTS.must_equal [x] Sequel.connect('ccc://localhost/db', &p).must_equal returnValue @ccc::DISCONNECTS.must_equal [y, x] Sequel.send(:def_adapter_method, :ccc) Sequel.ccc('db', :host=>'localhost', &p).must_equal returnValue @ccc::DISCONNECTS.must_equal [z, y, x] Sequel.singleton_class.send(:remove_method, :ccc) end it "should be accessible through Sequel.<adapter>" do Sequel.send(:def_adapter_method, :ccc) # invalid parameters proc {Sequel.ccc('abc', 'def')}.must_raise(Sequel::Error) proc {Sequel.ccc(1)}.must_raise(Sequel::Error) c = Sequel.ccc('mydb') c.must_be_kind_of(@ccc) c.opts.values_at(:adapter, :database, :adapter_class).must_equal [:ccc, 'mydb', @ccc] c = Sequel.ccc('mydb', :host => 'localhost') c.must_be_kind_of(@ccc) c.opts.values_at(:adapter, :database, :host, :adapter_class).must_equal [:ccc, 'mydb', 'localhost', @ccc] c = Sequel.ccc c.must_be_kind_of(@ccc) c.opts.values_at(:adapter, :adapter_class).must_equal [:ccc, @ccc] c = Sequel.ccc(:database => 'mydb', :host => 'localhost') c.must_be_kind_of(@ccc) c.opts.values_at(:adapter, :database, :host, :adapter_class).must_equal [:ccc, 'mydb', 'localhost', @ccc] Sequel.singleton_class.send(:remove_method, :ccc) end it "should be accessible through Sequel.connect with options" do c = Sequel.connect(:adapter => :ccc, :database => 'mydb') c.must_be_kind_of(@ccc) c.opts[:adapter].must_equal :ccc end it "should be accessible through Sequel.connect with URL parameters" do c = Sequel.connect 'ccc:///db?host=/tmp&user=test' c.must_be_kind_of(@ccc) c.opts[:host].must_equal '/tmp' c.opts[:database].must_equal 'db' c.opts[:user].must_equal 'test' end it "should have URL parameters take precedence over fixed URL parts" do c = Sequel.connect 'ccc://localhost/db?host=a&database=b' c.must_be_kind_of(@ccc) c.opts[:host].must_equal 'a' c.opts[:database].must_equal 'b' end it "should have hash options take predence over URL parameters or parts" do c = Sequel.connect 'ccc://localhost/db?host=/tmp', :host=>'a', :database=>'b', :user=>'c' c.must_be_kind_of(@ccc) c.opts[:host].must_equal 'a' c.opts[:database].must_equal 'b' c.opts[:user].must_equal 'c' end it "should unescape values of URL parameters and parts" do c = Sequel.connect 'ccc:///d%5bb%5d?host=domain%5cinstance' c.must_be_kind_of(@ccc) c.opts[:database].must_equal 'd[b]' c.opts[:host].must_equal 'domain\\instance' end it "should test the connection if test parameter is truthy" do @ccc.send(:define_method, :connect){} proc{Sequel.connect 'ccc:///d%5bb%5d?test=t'}.must_raise(Sequel::DatabaseConnectionError) proc{Sequel.connect 'ccc:///d%5bb%5d?test=1'}.must_raise(Sequel::DatabaseConnectionError) proc{Sequel.connect 'ccc:///d%5bb%5d', :test=>true}.must_raise(Sequel::DatabaseConnectionError) proc{Sequel.connect 'ccc:///d%5bb%5d', :test=>'t'}.must_raise(Sequel::DatabaseConnectionError) end it "should not test the connection if test parameter is not truthy" do Sequel.connect 'ccc:///d%5bb%5d?test=f' Sequel.connect 'ccc:///d%5bb%5d?test=0' Sequel.connect 'ccc:///d%5bb%5d', :test=>false Sequel.connect 'ccc:///d%5bb%5d', :test=>'f' end end describe "Sequel::Database.connect" do it "should raise an Error if not given a String or Hash" do proc{Sequel::Database.connect(nil)}.must_raise(Sequel::Error) proc{Sequel::Database.connect(Object.new)}.must_raise(Sequel::Error) end end describe "An unknown database scheme" do it "should raise an error in Sequel::Database.connect" do proc {Sequel::Database.connect('ddd://localhost/db')}.must_raise(Sequel::AdapterNotFound) end it "should raise an error in Sequel.connect" do proc {Sequel.connect('ddd://localhost/db')}.must_raise(Sequel::AdapterNotFound) end end describe "A broken adapter (lib is there but the class is not)" do before do @adapter = "blah#{$$}" @fn = File.join(File.dirname(__FILE__), "../../lib/sequel/adapters/#{@adapter}.rb") File.open(@fn,'a'){} end after do File.delete(@fn) end it "should raise an error" do proc {Sequel.connect("#{@adapter}://blow")}.must_raise(Sequel::AdapterNotFound) end end describe "Sequel::Database.load_adapter" do it "should not raise an error if subadapter does not exist" do Sequel::Database.load_adapter(:foo, :subdir=>'bar').must_be_nil end end describe "A single threaded database" do after do Sequel.single_threaded = false end it "should use a SingleConnectionPool instead of a ConnectionPool" do db = Sequel::Database.new(:single_threaded => true){123} db.pool.must_be_kind_of(Sequel::SingleConnectionPool) end it "should be constructable using :single_threaded => true option" do db = Sequel::Database.new(:single_threaded => true){123} db.pool.must_be_kind_of(Sequel::SingleConnectionPool) end it "should be constructable using Sequel.single_threaded = true" do Sequel.single_threaded = true Sequel.single_threaded.must_equal true db = Sequel::Database.new{123} db.pool.must_be_kind_of(Sequel::SingleConnectionPool) end end describe "A single threaded database" do before do conn = 1234567 @db = Sequel::Database.new(:single_threaded => true) @db.define_singleton_method(:connect) do |c| conn += 1 end end it "should invoke connection_proc only once" do @db.pool.hold {|c| c.must_equal 1234568} @db.pool.hold {|c| c.must_equal 1234568} end it "should disconnect correctly" do def @db.disconnect_connection(c); @dc = c end def @db.dc; @dc end x = nil @db.pool.hold{|c| x = c} @db.pool.hold{|c| c.must_equal x} @db.disconnect @db.dc.must_equal x end it "should convert an Exception on connection into a DatabaseConnectionError" do db = Class.new(Sequel::Database){def connect(*) raise Exception end}.new(:single_threaded => true, :servers=>{}, :test=>false) proc {db.pool.hold {|c|}}.must_raise(Sequel::DatabaseConnectionError) end it "should not convert an DatabaseConnectionError on connection into a DatabaseConnectionError" do e = Sequel::DatabaseConnectionError.new db = Class.new(Sequel::Database){define_method(:connect){|*| raise e}}.new(:single_threaded => true, :servers=>{}, :test=>false) (db.pool.hold {|c|} rescue $!).must_be_same_as e end it "should raise a DatabaseConnectionError if the connection proc returns nil" do db = Class.new(Sequel::Database){def connect(*) end}.new(:single_threaded => true, :servers=>{}, :test=>false) proc {db.pool.hold {|c|}}.must_raise(Sequel::DatabaseConnectionError) end end describe "A database" do after do Sequel.single_threaded = false end it "should have single_threaded? respond to true if in single threaded mode" do db = Sequel::Database.new(:single_threaded => true){1234} db.must_be :single_threaded? db = Sequel::Database.new(:max_options => 1) db.wont_be :single_threaded? db = Sequel::Database.new db.wont_be :single_threaded? Sequel.single_threaded = true db = Sequel::Database.new{123} db.must_be :single_threaded? db = Sequel::Database.new(:max_options => 4){123} db.must_be :single_threaded? end it "should be able to set loggers via the logger= and loggers= methods" do db = Sequel::Database.new s = "I'm a logger" db.logger = s db.loggers.must_equal [s] db.logger = nil db.loggers.must_equal [] db.loggers = [s] db.loggers.must_equal [s] db.loggers = [] db.loggers.must_equal [] t = "I'm also a logger" db.loggers = [s, t] db.loggers.must_equal [s,t] end end describe "Database#fetch" do before do @db = Sequel.mock(:fetch=>proc{|sql| {:sql => sql}}) end it "should create a dataset and invoke its fetch_rows method with the given sql" do sql = nil @db.fetch('select * from xyz') {|r| sql = r[:sql]} sql.must_equal 'select * from xyz' end it "should format the given sql with any additional arguments" do sql = nil @db.fetch('select * from xyz where x = ? and y = ?', 15, 'abc') {|r| sql = r[:sql]} sql.must_equal "select * from xyz where x = 15 and y = 'abc'" @db.fetch('select name from table where name = ? or id in ?', 'aman', [3,4,7]) {|r| sql = r[:sql]} sql.must_equal "select name from table where name = 'aman' or id in (3, 4, 7)" end it "should format the given sql with named arguments" do sql = nil @db.fetch('select * from xyz where x = :x and y = :y', :x=>15, :y=>'abc') {|r| sql = r[:sql]} sql.must_equal "select * from xyz where x = 15 and y = 'abc'" end it "should return the dataset if no block is given" do @db.fetch('select * from xyz').must_be_kind_of(Sequel::Dataset) @db.fetch('select a from b').map {|r| r[:sql]}.must_equal ['select a from b'] @db.fetch('select c from d').inject([]) {|m, r| m << r; m}.must_equal \ [{:sql => 'select c from d'}] end it "should return a dataset that always uses the given sql for SELECTs" do ds = @db.fetch('select * from xyz') ds.select_sql.must_equal 'select * from xyz' ds.sql.must_equal 'select * from xyz' ds = ds.where{price.sql_number < 100} ds.select_sql.must_equal 'select * from xyz' ds.sql.must_equal 'select * from xyz' end end describe "Database#[]" do before do @db = Sequel.mock end it "should return a dataset when symbols are given" do ds = @db[:items] ds.must_be_kind_of(Sequel::Dataset) ds.opts[:from].must_equal [:items] end it "should return a dataset when a string is given" do @db.fetch = proc{|sql| {:sql=>sql}} sql = nil @db['select * from xyz where x = ? and y = ?', 15, 'abc'].each {|r| sql = r[:sql]} sql.must_equal "select * from xyz where x = 15 and y = 'abc'" end end describe "Database#inspect" do it "should include the class name and the connection url" do Sequel.connect('mock://foo/bar').inspect.must_equal '#<Sequel::Mock::Database: "mock://foo/bar">' end it "should include the class name and the connection options if an options hash was given" do Sequel.connect(:adapter=>:mock).inspect.must_equal '#<Sequel::Mock::Database: {:adapter=>:mock}>' end it "should include the class name, uri, and connection options if uri and options hash was given" do Sequel.connect('mock://foo', :database=>'bar').inspect.must_equal '#<Sequel::Mock::Database: "mock://foo" {:database=>"bar"}>' end end describe "Database#get" do before do @db = Sequel.mock(:fetch=>{:a=>1}) end it "should use Dataset#get to get a single value" do @db.get(:a).must_equal 1 @db.sqls.must_equal ['SELECT a LIMIT 1'] @db.get(Sequel.function(:version).as(:version)) @db.sqls.must_equal ['SELECT version() AS version LIMIT 1'] end it "should accept a block" do @db.get{a} @db.sqls.must_equal ['SELECT a LIMIT 1'] @db.get{version(a).as(version)} @db.sqls.must_equal ['SELECT version(a) AS version LIMIT 1'] end it "should work when an alias cannot be determined" do @db.get(1).must_equal 1 @db.sqls.must_equal ['SELECT 1 AS v LIMIT 1'] end end describe "Database#call" do it "should call the prepared statement with the given name" do db = Sequel.mock(:fetch=>{:id => 1, :x => 1}) db[:items].prepare(:select, :select_all) db.call(:select_all).must_equal [{:id => 1, :x => 1}] db[:items].filter(:n=>:$n).prepare(:select, :select_n) db.call(:select_n, :n=>1).must_equal [{:id => 1, :x => 1}] db.sqls.must_equal ['SELECT * FROM items', 'SELECT * FROM items WHERE (n = 1)'] end end describe "Database#server_opts" do it "should return the general opts if no :servers option is used" do opts = {:host=>1, :database=>2} Sequel::Database.new(opts).send(:server_opts, :server1)[:host].must_equal 1 end it "should return the general opts if entry for the server is present in the :servers option" do opts = {:host=>1, :database=>2, :servers=>{}} Sequel::Database.new(opts).send(:server_opts, :server1)[:host].must_equal 1 end it "should return the general opts merged with the specific opts if given as a hash" do opts = {:host=>1, :database=>2, :servers=>{:server1=>{:host=>3}}} Sequel::Database.new(opts).send(:server_opts, :server1)[:host].must_equal 3 end it "should return the sgeneral opts merged with the specific opts if given as a proc" do opts = {:host=>1, :database=>2, :servers=>{:server1=>proc{|db| {:host=>4}}}} Sequel::Database.new(opts).send(:server_opts, :server1)[:host].must_equal 4 end it "should raise an error if the specific opts is not a proc or hash" do opts = {:host=>1, :database=>2, :servers=>{:server1=>2}} proc{Sequel::Database.new(opts).send(:server_opts, :server1)}.must_raise(Sequel::Error) end it "should return the general opts merged with given opts if given opts is a Hash" do opts = {:host=>1, :database=>2} Sequel::Database.new(opts).send(:server_opts, :host=>2)[:host].must_equal 2 end end describe "Database#add_servers" do before do @db = Sequel.mock(:host=>1, :database=>2, :servers=>{:server1=>{:host=>3}}) end it "should add new servers to the connection pool" do @db.synchronize{|c| c.opts[:host].must_equal 1} @db.synchronize(:server1){|c| c.opts[:host].must_equal 3} @db.synchronize(:server2){|c| c.opts[:host].must_equal 1} @db.add_servers(:server2=>{:host=>6}) @db.synchronize{|c| c.opts[:host].must_equal 1} @db.synchronize(:server1){|c| c.opts[:host].must_equal 3} @db.synchronize(:server2){|c| c.opts[:host].must_equal 6} @db.disconnect @db.synchronize{|c| c.opts[:host].must_equal 1} @db.synchronize(:server1){|c| c.opts[:host].must_equal 3} @db.synchronize(:server2){|c| c.opts[:host].must_equal 6} end it "should replace options for future connections to existing servers" do @db.synchronize{|c| c.opts[:host].must_equal 1} @db.synchronize(:server1){|c| c.opts[:host].must_equal 3} @db.synchronize(:server2){|c| c.opts[:host].must_equal 1} @db.add_servers(:default=>proc{{:host=>4}}, :server1=>{:host=>8}) @db.synchronize{|c| c.opts[:host].must_equal 1} @db.synchronize(:server1){|c| c.opts[:host].must_equal 3} @db.synchronize(:server2){|c| c.opts[:host].must_equal 1} @db.disconnect @db.synchronize{|c| c.opts[:host].must_equal 4} @db.synchronize(:server1){|c| c.opts[:host].must_equal 8} @db.synchronize(:server2){|c| c.opts[:host].must_equal 4} end it "should raise error for unsharded pool" do proc{Sequel.mock.add_servers(:server1=>{})}.must_raise Sequel::Error end end describe "Database#remove_servers" do before do @db = Sequel.mock(:host=>1, :database=>2, :servers=>{:server1=>{:host=>3}, :server2=>{:host=>4}}) end it "should remove servers from the connection pool" do @db.synchronize{|c| c.opts[:host].must_equal 1} @db.synchronize(:server1){|c| c.opts[:host].must_equal 3} @db.synchronize(:server2){|c| c.opts[:host].must_equal 4} @db.remove_servers(:server1, :server2) @db.synchronize{|c| c.opts[:host].must_equal 1} @db.synchronize(:server1){|c| c.opts[:host].must_equal 1} @db.synchronize(:server2){|c| c.opts[:host].must_equal 1} end it "should accept arrays of symbols" do @db.remove_servers([:server1, :server2]) @db.synchronize{|c| c.opts[:host].must_equal 1} @db.synchronize(:server1){|c| c.opts[:host].must_equal 1} @db.synchronize(:server2){|c| c.opts[:host].must_equal 1} end it "should allow removal while connections are still open" do @db.synchronize do |c1| c1.opts[:host].must_equal 1 @db.synchronize(:server1) do |c2| c2.opts[:host].must_equal 3 @db.synchronize(:server2) do |c3| c3.opts[:host].must_equal 4 @db.remove_servers(:server1, :server2) @db.synchronize(:server1) do |c4| c4.wont_equal c2 c4.must_equal c1 c4.opts[:host].must_equal 1 @db.synchronize(:server2) do |c5| c5.wont_equal c3 c5.must_equal c1 c5.opts[:host].must_equal 1 end end c3.opts[:host].must_equal 4 end c2.opts[:host].must_equal 3 end c1.opts[:host].must_equal 1 end end it "should raise error for unsharded pool" do proc{Sequel.mock.remove_servers(:server1)}.must_raise Sequel::Error end end describe "Database#raise_error" do before do @db = Sequel.mock end it "should reraise if the exception class is not in opts[:classes]" do e = Class.new(StandardError) proc{@db.send(:raise_error, e.new(''), :classes=>[])}.must_raise(e) end it "should convert the exception to a DatabaseError if the exception class is in opts[:classes]" do proc{@db.send(:raise_error, Interrupt.new(''), :classes=>[Interrupt])}.must_raise(Sequel::DatabaseError) end it "should convert the exception to a DatabaseError if opts[:classes] if not present" do proc{@db.send(:raise_error, Interrupt.new(''))}.must_raise(Sequel::DatabaseError) end it "should convert the exception to a DatabaseDisconnectError if opts[:disconnect] is true" do proc{@db.send(:raise_error, Interrupt.new(''), :disconnect=>true)}.must_raise(Sequel::DatabaseDisconnectError) end it "should convert the exception to an appropriate error if exception message matches regexp" do def @db.database_error_regexps {/foo/ => Sequel::DatabaseDisconnectError, /bar/ => Sequel::ConstraintViolation} end e = Class.new(StandardError) proc{@db.send(:raise_error, e.new('foo'))}.must_raise(Sequel::DatabaseDisconnectError) proc{@db.send(:raise_error, e.new('bar'))}.must_raise(Sequel::ConstraintViolation) end end describe "Database#typecast_value" do before do @db = Sequel::Database.new end it "should raise an InvalidValue when given an invalid value" do proc{@db.typecast_value(:integer, "13a")}.must_raise(Sequel::InvalidValue) proc{@db.typecast_value(:float, "4.e2")}.must_raise(Sequel::InvalidValue) proc{@db.typecast_value(:decimal, :invalid_value)}.must_raise(Sequel::InvalidValue) proc{@db.typecast_value(:date, Object.new)}.must_raise(Sequel::InvalidValue) proc{@db.typecast_value(:date, 'a')}.must_raise(Sequel::InvalidValue) proc{@db.typecast_value(:time, Date.new)}.must_raise(Sequel::InvalidValue) proc{@db.typecast_value(:datetime, 4)}.must_raise(Sequel::InvalidValue) end it "should raise an InvalidValue when given an invalid timezone value" do begin Sequel.default_timezone = :blah proc{@db.typecast_value(:datetime, [2019, 2, 3, 4, 5, 6])}.must_raise(Sequel::InvalidValue) Sequel.datetime_class = DateTime proc{@db.typecast_value(:datetime, [2019, 2, 3, 4, 5, 6])}.must_raise(Sequel::InvalidValue) ensure Sequel.default_timezone = nil Sequel.datetime_class = Time end end it "should handle integers with leading 0 as base 10" do @db.typecast_value(:integer, "013").must_equal 13 @db.typecast_value(:integer, "08").must_equal 8 @db.typecast_value(:integer, "000013").must_equal 13 @db.typecast_value(:integer, "000008").must_equal 8 end it "should handle integers with leading 0x as base 16" do @db.typecast_value(:integer, "0x013").must_equal 19 @db.typecast_value(:integer, "0x80").must_equal 128 @db.typecast_value(:integer, "-0x013").must_equal(-19) @db.typecast_value(:integer, "-0x80").must_equal(-128) end it "should typecast blobs as as Sequel::SQL::Blob" do v = @db.typecast_value(:blob, "0x013") v.must_be_kind_of(Sequel::SQL::Blob) v.must_equal Sequel::SQL::Blob.new("0x013") @db.typecast_value(:blob, v).object_id.must_equal v.object_id end it "should typecast boolean values to true, false, or nil" do @db.typecast_value(:boolean, false).must_equal false @db.typecast_value(:boolean, 0).must_equal false @db.typecast_value(:boolean, "0").must_equal false @db.typecast_value(:boolean, 'f').must_equal false @db.typecast_value(:boolean, 'false').must_equal false @db.typecast_value(:boolean, true).must_equal true @db.typecast_value(:boolean, 1).must_equal true @db.typecast_value(:boolean, '1').must_equal true @db.typecast_value(:boolean, 't').must_equal true @db.typecast_value(:boolean, 'true').must_equal true @db.typecast_value(:boolean, '').must_be_nil end it "should typecast date values to Date" do @db.typecast_value(:date, Date.today).must_equal Date.today @db.typecast_value(:date, DateTime.now).must_equal Date.today @db.typecast_value(:date, Time.now).must_equal Date.today @db.typecast_value(:date, Date.today.to_s).must_equal Date.today @db.typecast_value(:date, :year=>Date.today.year, :month=>Date.today.month, :day=>Date.today.day).must_equal Date.today end it "should have Sequel.application_to_database_timestamp convert to Sequel.database_timezone" do begin t = Time.utc(2011, 1, 2, 3, 4, 5) # UTC Time t2 = Time.mktime(2011, 1, 2, 3, 4, 5) # Local Time t3 = Time.utc(2011, 1, 2, 3, 4, 5) - (t - t2) # Local Time in UTC Time t4 = Time.mktime(2011, 1, 2, 3, 4, 5) + (t - t2) # UTC Time in Local Time Sequel.application_timezone = :utc Sequel.database_timezone = :local Sequel.application_to_database_timestamp(t).must_equal t4 Sequel.application_timezone = :local Sequel.database_timezone = :utc Sequel.application_to_database_timestamp(t2).must_equal t3 ensure Sequel.default_timezone = nil end end it "should have Database#to_application_timestamp convert values using the database's timezone" do begin t = Time.utc(2011, 1, 2, 3, 4, 5) # UTC Time t2 = Time.mktime(2011, 1, 2, 3, 4, 5) # Local Time t3 = Time.utc(2011, 1, 2, 3, 4, 5) - (t - t2) # Local Time in UTC Time t4 = Time.mktime(2011, 1, 2, 3, 4, 5) + (t - t2) # UTC Time in Local Time Sequel.default_timezone = :utc @db.to_application_timestamp('2011-01-02 03:04:05').must_equal t Sequel.database_timezone = :local @db.to_application_timestamp('2011-01-02 03:04:05').must_equal t3 Sequel.default_timezone = :local @db.to_application_timestamp('2011-01-02 03:04:05').must_equal t2 Sequel.database_timezone = :utc @db.to_application_timestamp('2011-01-02 03:04:05').must_equal t4 Sequel.default_timezone = :utc @db.timezone = :local @db.to_application_timestamp('2011-01-02 03:04:05').must_equal t3 Sequel.default_timezone = :local @db.timezone = :utc @db.to_application_timestamp('2011-01-02 03:04:05').must_equal t4 ensure Sequel.default_timezone = nil end end it "should typecast datetime values to Sequel.datetime_class with correct timezone handling" do t = Time.utc(2011, 1, 2, 3, 4, 5, 500000) # UTC Time t2 = Time.mktime(2011, 1, 2, 3, 4, 5, 500000) # Local Time t3 = Time.utc(2011, 1, 2, 3, 4, 5, 500000) - (t - t2) # Local Time in UTC Time t4 = Time.mktime(2011, 1, 2, 3, 4, 5, 500000) + (t - t2) # UTC Time in Local Time secs = Rational(11, 2) r1 = Rational(t2.utc_offset, 86400) r2 = Rational((t - t2).to_i, 86400) dt = DateTime.civil(2011, 1, 2, 3, 4, secs) dt2 = DateTime.civil(2011, 1, 2, 3, 4, secs, r1) dt3 = DateTime.civil(2011, 1, 2, 3, 4, secs) - r2 dt4 = DateTime.civil(2011, 1, 2, 3, 4, secs, r1) + r2 t.must_equal t4 t2.must_equal t3 dt.must_equal dt4 dt2.must_equal dt3 check = proc do |i, o| v = @db.typecast_value(:datetime, i) v.must_equal o if o.is_a?(Time) v.utc_offset.must_equal o.utc_offset else v.offset.must_equal o.offset end end @db.extend_datasets(Module.new{def supports_timestamp_timezones?; true; end}) begin @db.typecast_value(:datetime, dt).must_equal t @db.typecast_value(:datetime, dt2).must_equal t2 @db.typecast_value(:datetime, t).must_equal t @db.typecast_value(:datetime, t2).must_equal t2 @db.typecast_value(:datetime, @db.literal(dt)[1...-1]).must_equal t @db.typecast_value(:datetime, dt.strftime('%F %T.%N')).must_equal t2 @db.typecast_value(:datetime, Date.civil(2011, 1, 2)).must_equal Time.mktime(2011, 1, 2, 0, 0, 0) @db.typecast_value(:datetime, :year=>dt.year, :month=>dt.month, :day=>dt.day, :hour=>dt.hour, :minute=>dt.min, :second=>dt.sec, :nanos=>500000000).must_equal t2 Sequel.datetime_class = DateTime @db.typecast_value(:datetime, dt).must_equal dt @db.typecast_value(:datetime, dt2).must_equal dt2 @db.typecast_value(:datetime, t).must_equal dt @db.typecast_value(:datetime, t2).must_equal dt2 @db.typecast_value(:datetime, @db.literal(dt)[1...-1]).must_equal dt @db.typecast_value(:datetime, dt.strftime('%F %T.%N')).must_equal dt @db.typecast_value(:datetime, Date.civil(2011, 1, 2)).must_equal DateTime.civil(2011, 1, 2, 0, 0, 0) @db.typecast_value(:datetime, :year=>dt.year, :month=>dt.month, :day=>dt.day, :hour=>dt.hour, :minute=>dt.min, :second=>dt.sec, :nanos=>500000000).must_equal dt Sequel.application_timezone = :utc Sequel.typecast_timezone = :local Sequel.datetime_class = Time check[dt, t] check[dt2, t3] check[t, t] check[t2, t3] check[@db.literal(dt)[1...-1], t] check[dt.strftime('%F %T.%N'), t3] check[Date.civil(2011, 1, 2), Time.utc(2011, 1, 2, 0, 0, 0)] check[{:year=>dt.year, :month=>dt.month, :day=>dt.day, :hour=>dt.hour, :minute=>dt.min, :second=>dt.sec, :nanos=>500000000}, t3] Sequel.datetime_class = DateTime check[dt, dt] check[dt2, dt3] check[t, dt] check[t2, dt3] check[@db.literal(dt)[1...-1], dt] check[dt.strftime('%F %T.%N'), dt3] check[Date.civil(2011, 1, 2), DateTime.civil(2011, 1, 2, 0, 0, 0)] check[{:year=>dt.year, :month=>dt.month, :day=>dt.day, :hour=>dt.hour, :minute=>dt.min, :second=>dt.sec, :nanos=>500000000}, dt3] Sequel.typecast_timezone = :utc Sequel.datetime_class = Time check[dt, t] check[dt2, t3] check[t, t] check[t2, t3] check[@db.literal(dt)[1...-1], t] check[dt.strftime('%F %T.%N'), t] check[Date.civil(2011, 1, 2), Time.utc(2011, 1, 2, 0, 0, 0)] check[{:year=>dt.year, :month=>dt.month, :day=>dt.day, :hour=>dt.hour, :minute=>dt.min, :second=>dt.sec, :nanos=>500000000}, t] Sequel.datetime_class = DateTime check[dt, dt] check[dt2, dt3] check[t, dt] check[t2, dt3] check[@db.literal(dt)[1...-1], dt] check[dt.strftime('%F %T.%N'), dt] check[Date.civil(2011, 1, 2), DateTime.civil(2011, 1, 2, 0, 0, 0)] check[{:year=>dt.year, :month=>dt.month, :day=>dt.day, :hour=>dt.hour, :minute=>dt.min, :second=>dt.sec, :nanos=>500000000}, dt] Sequel.application_timezone = :local Sequel.datetime_class = Time check[dt, t4] check[dt2, t2] check[t, t4] check[t2, t2] check[@db.literal(dt)[1...-1], t4] check[dt.strftime('%F %T.%N'), t4] check[Date.civil(2011, 1, 2), Time.local(2011, 1, 2, 0, 0, 0)] check[{:year=>dt.year, :month=>dt.month, :day=>dt.day, :hour=>dt.hour, :minute=>dt.min, :second=>dt.sec, :nanos=>500000000}, t4] Sequel.datetime_class = DateTime check[dt, dt4] check[dt2, dt2] check[t, dt4] check[t2, dt2] check[@db.literal(dt)[1...-1], dt4] check[dt.strftime('%F %T.%N'), dt4] check[Date.civil(2011, 1, 2), DateTime.civil(2011, 1, 2, 0, 0, 0, r1)] check[{:year=>dt.year, :month=>dt.month, :day=>dt.day, :hour=>dt.hour, :minute=>dt.min, :second=>dt.sec, :nanos=>500000000}, dt4] Sequel.typecast_timezone = :local Sequel.datetime_class = Time check[dt, t4] check[dt2, t2] check[t, t4] check[t2, t2] check[@db.literal(dt)[1...-1], t4] check[dt.strftime('%F %T.%N'), t2] check[Date.civil(2011, 1, 2), Time.local(2011, 1, 2, 0, 0, 0)] check[{:year=>dt.year, :month=>dt.month, :day=>dt.day, :hour=>dt.hour, :minute=>dt.min, :second=>dt.sec, :nanos=>500000000}, t2] Sequel.datetime_class = DateTime check[dt, dt4] check[dt2, dt2] check[t, dt4] check[t2, dt2] check[@db.literal(dt)[1...-1], dt4] check[dt.strftime('%F %T.%N'), dt2] check[Date.civil(2011, 1, 2), DateTime.civil(2011, 1, 2, 0, 0, 0, r1)] check[{:year=>dt.year, :month=>dt.month, :day=>dt.day, :hour=>dt.hour, :minute=>dt.min, :second=>dt.sec, :nanos=>500000000}, dt2] ensure Sequel.default_timezone = nil Sequel.datetime_class = Time end end it "should handle arrays when typecasting timestamps" do begin @db.typecast_value(:datetime, [2011, 10, 11, 12, 13, 14]).must_equal Time.local(2011, 10, 11, 12, 13, 14) @db.typecast_value(:datetime, [2011, 10, 11, 12, 13, 14, 500000000]).must_equal Time.local(2011, 10, 11, 12, 13, 14, 500000) @db.typecast_value(:datetime, [2011, 10, 11, 12, 13, 14, 500000000, Rational(1, 2)]).must_equal Time.parse('2011-10-11 12:13:14.5+12:00') @db.typecast_value(:datetime, [2011, 10, 11, 12, 13, 14, nil, Rational(1, 2)]).must_equal Time.parse('2011-10-11 12:13:14+12:00') Sequel.datetime_class = DateTime @db.typecast_value(:datetime, [2011, 10, 11, 12, 13, 14]).must_equal DateTime.civil(2011, 10, 11, 12, 13, 14) @db.typecast_value(:datetime, [2011, 10, 11, 12, 13, 14, 500000000]).must_equal DateTime.civil(2011, 10, 11, 12, 13, Rational(29, 2)) @db.typecast_value(:datetime, [2011, 10, 11, 12, 13, 14, 500000000, Rational(1, 2)]).must_equal DateTime.civil(2011, 10, 11, 12, 13, Rational(29, 2), Rational(1, 2)) ensure Sequel.datetime_class = Time end end it "should handle hashes when typecasting timestamps" do begin @db.typecast_value(:datetime, :year=>2011, :month=>10, :day=>11, :hour=>12, :minute=>13, :second=>14).must_equal Time.local(2011, 10, 11, 12, 13, 14) @db.typecast_value(:datetime, :year=>2011, :month=>10, :day=>11, :hour=>12, :minute=>13, :second=>14, :nanos=>500000000).must_equal Time.local(2011, 10, 11, 12, 13, 14, 500000) @db.typecast_value(:datetime, 'year'=>2011, 'month'=>10, 'day'=>11, 'hour'=>12, 'minute'=>13, 'second'=>14).must_equal Time.local(2011, 10, 11, 12, 13, 14) @db.typecast_value(:datetime, 'year'=>2011, 'month'=>10, 'day'=>11, 'hour'=>12, 'minute'=>13, 'second'=>14, 'nanos'=>500000000).must_equal Time.local(2011, 10, 11, 12, 13, 14, 500000) @db.typecast_value(:datetime, :year=>2011, :month=>10, :day=>11, :hour=>12, :minute=>13, :second=>14, :offset=>Rational(1, 2)).must_equal Time.new(2011, 10, 11, 12, 13, 14, 43200) @db.typecast_value(:datetime, :year=>2011, :month=>10, :day=>11, :hour=>12, :minute=>13, :second=>14, :nanos=>500000000, :offset=>Rational(1, 2)).must_equal Time.new(2011, 10, 11, 12, 13, 14.5, 43200) @db.typecast_value(:datetime, 'year'=>2011, 'month'=>10, 'day'=>11, 'hour'=>12, 'minute'=>13, 'second'=>14, 'offset'=>Rational(1, 2)).must_equal Time.new(2011, 10, 11, 12, 13, 14, 43200) @db.typecast_value(:datetime, 'year'=>2011, 'month'=>10, 'day'=>11, 'hour'=>12, 'minute'=>13, 'second'=>14, 'nanos'=>500000000, 'offset'=>Rational(1, 2)).must_equal Time.new(2011, 10, 11, 12, 13, 14.5, 43200) Sequel.default_timezone = :foo proc{@db.typecast_value(:datetime, 'year'=>2011, 'month'=>10, 'day'=>11, 'hour'=>12, 'minute'=>13, 'second'=>14, 'nanos'=>500000000)}.must_raise Sequel::InvalidValue Sequel.default_timezone = :utc @db.typecast_value(:datetime, :year=>2011, :month=>10, :day=>11, :hour=>12, :minute=>13, :second=>14).must_equal Time.utc(2011, 10, 11, 12, 13, 14) @db.typecast_value(:datetime, :year=>2011, :month=>10, :day=>11, :hour=>12, :minute=>13, :second=>14, :nanos=>500000000).must_equal Time.utc(2011, 10, 11, 12, 13, 14, 500000) @db.typecast_value(:datetime, 'year'=>2011, 'month'=>10, 'day'=>11, 'hour'=>12, 'minute'=>13, 'second'=>14).must_equal Time.utc(2011, 10, 11, 12, 13, 14) @db.typecast_value(:datetime, 'year'=>2011, 'month'=>10, 'day'=>11, 'hour'=>12, 'minute'=>13, 'second'=>14, 'nanos'=>500000000).must_equal Time.utc(2011, 10, 11, 12, 13, 14, 500000) Sequel.datetime_class = DateTime @db.typecast_value(:datetime, :year=>2011, :month=>10, :day=>11, :hour=>12, :minute=>13, :second=>14).must_equal DateTime.civil(2011, 10, 11, 12, 13, 14) @db.typecast_value(:datetime, :year=>2011, :month=>10, :day=>11, :hour=>12, :minute=>13, :second=>14, :nanos=>500000000).must_equal DateTime.civil(2011, 10, 11, 12, 13, Rational(29, 2)) @db.typecast_value(:datetime, 'year'=>2011, 'month'=>10, 'day'=>11, 'hour'=>12, 'minute'=>13, 'second'=>14).must_equal DateTime.civil(2011, 10, 11, 12, 13, 14) @db.typecast_value(:datetime, 'year'=>2011, 'month'=>10, 'day'=>11, 'hour'=>12, 'minute'=>13, 'second'=>14, 'nanos'=>500000000).must_equal DateTime.civil(2011, 10, 11, 12, 13, Rational(29, 2)) @db.typecast_value(:datetime, :year=>2011, :month=>10, :day=>11, :hour=>12, :minute=>13, :second=>14, :offset=>Rational(1, 2)).must_equal DateTime.civil(2011, 10, 11, 12, 13, 14, Rational(1, 2)) @db.typecast_value(:datetime, :year=>2011, :month=>10, :day=>11, :hour=>12, :minute=>13, :second=>14, :nanos=>500000000, :offset=>Rational(1, 2)).must_equal DateTime.civil(2011, 10, 11, 12, 13, Rational(29, 2), Rational(1, 2)) @db.typecast_value(:datetime, 'year'=>2011, 'month'=>10, 'day'=>11, 'hour'=>12, 'minute'=>13, 'second'=>14, 'offset'=>Rational(1, 2)).must_equal DateTime.civil(2011, 10, 11, 12, 13, 14, Rational(1, 2)) @db.typecast_value(:datetime, 'year'=>2011, 'month'=>10, 'day'=>11, 'hour'=>12, 'minute'=>13, 'second'=>14, 'nanos'=>500000000, 'offset'=>Rational(1, 2)).must_equal DateTime.civil(2011, 10, 11, 12, 13, Rational(29, 2), Rational(1, 2)) Sequel.default_timezone = :local offset = Rational(Time.local(2011, 10, 11, 12, 13, 14).utc_offset, 86400) @db.typecast_value(:datetime, :year=>2011, :month=>10, :day=>11, :hour=>12, :minute=>13, :second=>14).must_equal DateTime.civil(2011, 10, 11, 12, 13, 14, offset) @db.typecast_value(:datetime, :year=>2011, :month=>10, :day=>11, :hour=>12, :minute=>13, :second=>14, :nanos=>500000000).must_equal DateTime.civil(2011, 10, 11, 12, 13, Rational(29, 2), offset) @db.typecast_value(:datetime, 'year'=>2011, 'month'=>10, 'day'=>11, 'hour'=>12, 'minute'=>13, 'second'=>14).must_equal DateTime.civil(2011, 10, 11, 12, 13, 14, offset) @db.typecast_value(:datetime, 'year'=>2011, 'month'=>10, 'day'=>11, 'hour'=>12, 'minute'=>13, 'second'=>14, 'nanos'=>500000000).must_equal DateTime.civil(2011, 10, 11, 12, 13, Rational(29, 2), offset) ensure Sequel.datetime_class = Time Sequel.default_timezone = nil end end it "should typecast decimal values to BigDecimal" do [1.0, 1, '1.0', BigDecimal('1.0')].each do |i| v = @db.typecast_value(:decimal, i) v.must_be_kind_of(BigDecimal) v.must_equal BigDecimal('1.0') end end it "should typecast float values to Float" do [1.0, 1, '1.0', BigDecimal('1.0')].each do |i| v = @db.typecast_value(:float, i) v.must_be_kind_of(Float) v.must_equal 1.0 end end it "should typecast string values to String" do [1.0, '1.0', Sequel.blob('1.0')].each do |i| v = @db.typecast_value(:string, i) v.must_be_instance_of(String) v.must_equal "1.0" end end it "should raise errors when typecasting hash and array values to String" do [[], {}].each do |i| proc{@db.typecast_value(:string, i)}.must_raise(Sequel::InvalidValue) end end it "should typecast time values to SQLTime" do t = Time.now st = Sequel::SQLTime.local(t.year, t.month, t.day, 1, 2, 3) [st, Time.utc(t.year, t.month, t.day, 1, 2, 3), Time.local(t.year, t.month, t.day, 1, 2, 3), '01:02:03', {:hour=>1, :minute=>2, :second=>3}].each do |i| v = @db.typecast_value(:time, i) v.must_be_instance_of(Sequel::SQLTime) v.must_equal st end end it "should correctly handle time value conversion to SQLTime with fractional seconds" do t = Time.now st = Sequel::SQLTime.local(t.year, t.month, t.day, 1, 2, 3, 500000) t = Time.local(t.year, t.month, t.day, 1, 2, 3, 500000) @db.typecast_value(:time, t).must_equal st end it "should enforce bytesize limits when typecasting strings" do s = ' '*100 proc{@db.typecast_value(:date, '2010-10-30'+s)}.must_raise Sequel::InvalidValue proc{@db.typecast_value(:date, 'year'=>'2010'+s, 'month'=>'10', 'day'=>'30')}.must_raise Sequel::InvalidValue proc{@db.typecast_value(:date, 'year'=>'2010', 'month'=>'10'+s, 'day'=>'30')}.must_raise Sequel::InvalidValue proc{@db.typecast_value(:date, 'year'=>'2010', 'month'=>'10', 'day'=>'30'+s)}.must_raise Sequel::InvalidValue proc{@db.typecast_value(:datetime, '2010-10-30 10:20:30'+s)}.must_raise Sequel::InvalidValue proc{@db.typecast_value(:datetime, 'year'=>'2010'+s, 'month'=>'10', 'day'=>'30', 'hour'=>'1', 'minute'=>'2', 'second'=>'3', 'nanos'=>'123456789', 'offset'=>'+0000')}.must_raise Sequel::InvalidValue proc{@db.typecast_value(:datetime, 'year'=>'2010', 'month'=>'10'+s, 'day'=>'30', 'hour'=>'1', 'minute'=>'2', 'second'=>'3', 'nanos'=>'123456789', 'offset'=>'+0000')}.must_raise Sequel::InvalidValue proc{@db.typecast_value(:datetime, 'year'=>'2010', 'month'=>'10', 'day'=>'30'+s, 'hour'=>'1', 'minute'=>'2', 'second'=>'3', 'nanos'=>'123456789', 'offset'=>'+0000')}.must_raise Sequel::InvalidValue proc{@db.typecast_value(:datetime, 'year'=>'2010', 'month'=>'10', 'day'=>'30', 'hour'=>'1'+s, 'minute'=>'2', 'second'=>'3', 'nanos'=>'123456789', 'offset'=>'+0000')}.must_raise Sequel::InvalidValue proc{@db.typecast_value(:datetime, 'year'=>'2010', 'month'=>'10', 'day'=>'30', 'hour'=>'1', 'minute'=>'2'+s, 'second'=>'3', 'nanos'=>'123456789', 'offset'=>'+0000')}.must_raise Sequel::InvalidValue proc{@db.typecast_value(:datetime, 'year'=>'2010', 'month'=>'10', 'day'=>'30', 'hour'=>'1', 'minute'=>'2', 'second'=>'3'+s, 'nanos'=>'123456789'+s, 'offset'=>'+0000')}.must_raise Sequel::InvalidValue proc{@db.typecast_value(:datetime, 'year'=>'2010', 'month'=>'10', 'day'=>'30', 'hour'=>'1', 'minute'=>'2', 'second'=>'3', 'nanos'=>'123456789', 'offset'=>'+0000'+s)}.must_raise Sequel::InvalidValue proc{@db.typecast_value(:decimal, '1'*1001)}.must_raise Sequel::InvalidValue proc{@db.typecast_value(:float, '1.'+'0'*1000)}.must_raise Sequel::InvalidValue proc{@db.typecast_value(:integer, '1'*101)}.must_raise Sequel::InvalidValue proc{@db.typecast_value(:time, '10:20:30'+s)}.must_raise Sequel::InvalidValue proc{@db.typecast_value(:time, 'hour'=>'10'+s, 'minute'=>'20', 'second'=>'30')}.must_raise Sequel::InvalidValue proc{@db.typecast_value(:time, 'hour'=>'10', 'minute'=>'20'+s, 'second'=>'30')}.must_raise Sequel::InvalidValue proc{@db.typecast_value(:time, 'hour'=>'10', 'minute'=>'20', 'second'=>'30'+s)}.must_raise Sequel::InvalidValue end it "should not enforce bytesize limits when typecasting strings if check_string_typecast_bytesize = false" do @db.check_string_typecast_bytesize = false s = ' '*100 @db.typecast_value(:date, '2010-10-30'+s).must_equal Date.new(2010, 10, 30) @db.typecast_value(:date, 'year'=>'2010'+s, 'month'=>'10', 'day'=>'30').must_equal Date.new(2010, 10, 30) @db.typecast_value(:date, 'year'=>'2010', 'month'=>'10'+s, 'day'=>'30').must_equal Date.new(2010, 10, 30) @db.typecast_value(:date, 'year'=>'2010', 'month'=>'10', 'day'=>'30'+s).must_equal Date.new(2010, 10, 30) @db.typecast_value(:datetime, '2010-10-30 10:20:30'+s).must_equal Time.local(2010, 10, 30, 10, 20, 30) @db.typecast_value(:datetime, 'year'=>'2010'+s, 'month'=>'10', 'day'=>'30', 'hour'=>'1', 'minute'=>'2', 'second'=>'3', 'nanos'=>'123456789', 'offset'=>'+0000').must_be_kind_of Time @db.typecast_value(:datetime, 'year'=>'2010', 'month'=>'10'+s, 'day'=>'30', 'hour'=>'1', 'minute'=>'2', 'second'=>'3', 'nanos'=>'123456789', 'offset'=>'+0000').must_be_kind_of Time @db.typecast_value(:datetime, 'year'=>'2010', 'month'=>'10', 'day'=>'30'+s, 'hour'=>'1', 'minute'=>'2', 'second'=>'3', 'nanos'=>'123456789', 'offset'=>'+0000').must_be_kind_of Time @db.typecast_value(:datetime, 'year'=>'2010', 'month'=>'10', 'day'=>'30', 'hour'=>'1'+s, 'minute'=>'2', 'second'=>'3', 'nanos'=>'123456789', 'offset'=>'+0000').must_be_kind_of Time @db.typecast_value(:datetime, 'year'=>'2010', 'month'=>'10', 'day'=>'30', 'hour'=>'1', 'minute'=>'2'+s, 'second'=>'3', 'nanos'=>'123456789', 'offset'=>'+0000').must_be_kind_of Time @db.typecast_value(:datetime, 'year'=>'2010', 'month'=>'10', 'day'=>'30', 'hour'=>'1', 'minute'=>'2', 'second'=>'3'+s, 'nanos'=>'123456789'+s, 'offset'=>'+0000').must_be_kind_of Time @db.typecast_value(:datetime, 'year'=>'2010', 'month'=>'10', 'day'=>'30', 'hour'=>'1', 'minute'=>'2', 'second'=>'3', 'nanos'=>'123456789', 'offset'=>'+0000'+s).must_be_kind_of Time @db.typecast_value(:decimal, '1'*1001).must_equal BigDecimal('1'*1001) @db.typecast_value(:float, '1.'+'0'*1000).must_equal 1.0 @db.typecast_value(:integer, '1'*101).must_equal Integer('1'*101) @db.typecast_value(:time, '10:20:30'+s).must_equal Sequel::SQLTime.create(10, 20, 30) @db.typecast_value(:time, 'hour'=>'10'+s, 'minute'=>'20', 'second'=>'30').must_equal Sequel::SQLTime.create(10, 20, 30) @db.typecast_value(:time, 'hour'=>'10', 'minute'=>'20'+s, 'second'=>'30').must_equal Sequel::SQLTime.create(10, 20, 30) @db.typecast_value(:time, 'hour'=>'10', 'minute'=>'20', 'second'=>'30'+s).must_equal Sequel::SQLTime.create(10, 20, 30) end it "should have an underlying exception class available at wrapped_exception" do begin @db.typecast_value(:date, 'a') true.must_equal false rescue Sequel::InvalidValue => e e.wrapped_exception.must_be_kind_of(ArgumentError) end end it "should have an underlying exception class available at cause" do begin @db.typecast_value(:date, 'a') true.must_equal false rescue Sequel::InvalidValue => e e.cause.must_be_kind_of(ArgumentError) end end if RUBY_VERSION >= '2.1' it "should have an underlying exception class available at cause when using nested exceptions" do begin begin raise ArgumentError rescue => e1 begin raise RuntimeError rescue @db.send(:raise_error, e1) end end rescue Sequel::DatabaseError => e e.cause.must_be_kind_of(ArgumentError) end end if RUBY_VERSION >= '2.1' it "should include underlying exception class in #inspect" do begin @db.typecast_value(:date, 'a') true.must_equal false rescue Sequel::InvalidValue => e e.inspect.must_match(/\A#\<Sequel::InvalidValue: (Argument|Date::)Error: invalid date\>\z/) end end end describe "Database#blank_object?" do it "should return whether the object is considered blank" do db = Sequel::Database.new c = lambda{|meth, value| Class.new{define_method(meth){value}}.new} db.send(:blank_object?, "").must_equal true db.send(:blank_object?, " ").must_equal true db.send(:blank_object?, nil).must_equal true db.send(:blank_object?, false).must_equal true db.send(:blank_object?, []).must_equal true db.send(:blank_object?, {}).must_equal true db.send(:blank_object?, c[:empty?, true]).must_equal true db.send(:blank_object?, c[:blank?, true]).must_equal true db.send(:blank_object?, c[:foo, true]).must_equal false db.send(:blank_object?, " a ").must_equal false db.send(:blank_object?, 1).must_equal false db.send(:blank_object?, 1.0).must_equal false db.send(:blank_object?, true).must_equal false db.send(:blank_object?, [1]).must_equal false db.send(:blank_object?, {1.0=>2.0}).must_equal false db.send(:blank_object?, c[:empty?, false]).must_equal false db.send(:blank_object?, c[:blank?, false]).must_equal false end end describe "Database#schema_autoincrementing_primary_key?" do it "should indicate whether the parsed schema row indicates a primary key" do m = Sequel::Database.new.method(:schema_autoincrementing_primary_key?) m.call(:primary_key=>true, :auto_increment=>true).must_equal true m.call(:primary_key=>true, :auto_increment=>false).must_equal false m.call(:primary_key=>false).must_equal false end end describe "Database#supports_schema_parsing?" do it "should be false by default" do Sequel::Database.new.supports_schema_parsing?.must_equal false end it "should be true if the database implements schema_parse_table" do db = Sequel::Database.new def db.schema_parse_table(*) end db.supports_schema_parsing?.must_equal true end end describe "Database#supports_foreign_key_parsing?" do it "should be false by default" do Sequel::Database.new.supports_foreign_key_parsing?.must_equal false end it "should be true if the database implements foreign_key_list" do db = Sequel::Database.new def db.foreign_key_list(*) end db.supports_foreign_key_parsing?.must_equal true end end describe "Database#supports_index_parsing?" do it "should be false by default" do Sequel::Database.new.supports_index_parsing?.must_equal false end it "should be true if the database implements indexes" do db = Sequel::Database.new def db.indexes(*) end db.supports_index_parsing?.must_equal true end end describe "Database#supports_table_listing?" do it "should be false by default" do Sequel::Database.new.supports_table_listing?.must_equal false end it "should be true if the database implements tables" do db = Sequel::Database.new def db.tables(*) end db.supports_table_listing?.must_equal true end end describe "Database#supports_view_listing?" do it "should be false by default" do Sequel::Database.new.supports_view_listing?.must_equal false end it "should be true if the database implements views" do db = Sequel::Database.new def db.views(*) end db.supports_view_listing?.must_equal true end end describe "Database#supports_deferrable_constraints?" do it "should be false by default" do Sequel::Database.new.supports_deferrable_constraints?.must_equal false end end describe "Database#supports_deferrable_foreign_key_constraints?" do it "should be false by default" do Sequel::Database.new.supports_deferrable_foreign_key_constraints?.must_equal false end end describe "Database#supports_transactional_ddl?" do it "should be false by default" do Sequel::Database.new.supports_transactional_ddl?.must_equal false end end describe "Database#global_index_namespace?" do it "should be true by default" do Sequel::Database.new.global_index_namespace?.must_equal true end end describe "Database#supports_savepoints?" do it "should be false by default" do Sequel::Database.new.supports_savepoints?.must_equal false end end describe "Database#supports_views_with_check_option?" do it "should be false by default" do Sequel::Database.new.supports_views_with_check_option?.must_equal false end end describe "Database#supports_views_with_local_check_option?" do it "should be false by default" do Sequel::Database.new.supports_views_with_local_check_option?.must_equal false end end describe "Database#supports_savepoints_in_prepared_transactions?" do it "should be false by default" do Sequel::Database.new.supports_savepoints_in_prepared_transactions?.must_equal false end it "should be true if both savepoints and prepared transactions are supported" do db = Sequel::Database.new db.define_singleton_method(:supports_savepoints?){true} db.define_singleton_method(:supports_prepared_transactions?){true} db.supports_savepoints_in_prepared_transactions?.must_equal true end end describe "Database#supports_prepared_transactions?" do it "should be false by default" do Sequel::Database.new.supports_prepared_transactions?.must_equal false end end describe "Database#supports_transaction_isolation_levels?" do it "should be false by default" do Sequel::Database.new.supports_transaction_isolation_levels?.must_equal false end end describe "Database#schema min/max integer values" do def min_max(type) @db = Sequel::Database.new @db.define_singleton_method(:schema_parse_table){|*| [[:a, {:db_type=>type, :type=>type == 'varchar' ? :string : :integer}]]} yield @db if block_given? sch = @db.schema(:t)[0][1] [sch.fetch(:min_value, :none), sch.fetch(:max_value, :none)] end it "should parse minimum and maximum values for regular integer types" do min_max('integer').must_equal [-2147483648, 2147483647] min_max('int4').must_equal [-2147483648, 2147483647] min_max('bigint').must_equal [-9223372036854775808, 9223372036854775807] min_max('int8').must_equal [-9223372036854775808, 9223372036854775807] min_max('smallint').must_equal [-32768, 32767] min_max('int2').must_equal [-32768, 32767] min_max('tinyint').must_equal [-128, 127] min_max('mediumint').must_equal [-8388608, 8388607] end it "should parse minimum and maximum values for unsigned integer types" do min_max('integer unsigned').must_equal [0, 4294967295] min_max('unsigned int4').must_equal [0, 4294967295] min_max('bigint unsigned').must_equal [0, 18446744073709551615] min_max('int8 unsigned').must_equal [0, 18446744073709551615] min_max('smallint unsigned').must_equal [0, 65535] min_max('int2 unsigned').must_equal [0, 65535] min_max('tinyint unsigned').must_equal [0, 255] min_max('mediumint unsigned').must_equal [0, 16777215] end it "should parse minimum and maximum values for tinyint types where database tinyint type is unsigned by default" do min_max('tinyint'){|db| def db.column_schema_tinyint_type_is_unsigned?; true end}.must_equal [0, 255] end it "should not parse minimum and maximum values for non-integer types" do min_max('varchar').must_equal [:none, :none] end end describe "Database#column_schema_to_ruby_default" do it "should handle converting many default formats" do db = Sequel::Database.new p = lambda{|d,t| db.send(:column_schema_to_ruby_default, d, t)} p['any', :unknown].must_be_nil p[nil, :integer].must_be_nil p[1, :integer].must_equal 1 p['1', :integer].must_equal 1 p['-1', :integer].must_equal(-1) p[1.0, :float].must_equal 1.0 p['1.0', :float].must_equal 1.0 p['-1.0', :float].must_equal(-1.0) p['1.0', :decimal].must_equal BigDecimal('1.0') p['-1.0', :decimal].must_equal BigDecimal('-1.0') p[true, :boolean].must_equal true p[false, :boolean].must_equal false p['1', :boolean].must_equal true p['0', :boolean].must_equal false p['true', :boolean].must_equal true p['false', :boolean].must_equal false p["'t'", :boolean].must_equal true p["'f'", :boolean].must_equal false p["'x'", :boolean].must_be_nil p["'a'", :string].must_equal 'a' p["'a'", :blob].must_equal Sequel.blob('a') p["'a'", :blob].must_be_kind_of(Sequel::SQL::Blob) p["''", :string].must_equal '' p["'\\a''b'", :string].must_equal "\\a'b" p["a", :string].must_be_nil p["'NULL'", :string].must_equal "NULL" p[Date.today, :date].must_equal Date.today p["'2009-10-29'", :date].must_equal Date.new(2009,10,29) p["CURRENT_TIMESTAMP", :date].must_equal Sequel::CURRENT_DATE p["CURRENT_DATE", :date].must_equal Sequel::CURRENT_DATE p["now()", :date].must_equal Sequel::CURRENT_DATE p["getdate()", :date].must_equal Sequel::CURRENT_DATE p["CURRENT_TIMESTAMP", :datetime].must_equal Sequel::CURRENT_TIMESTAMP p["CURRENT_DATE", :datetime].must_equal Sequel::CURRENT_TIMESTAMP p["now()", :datetime].must_equal Sequel::CURRENT_TIMESTAMP p["getdate()", :datetime].must_equal Sequel::CURRENT_TIMESTAMP p["'2009-10-29T10:20:30-07:00'", :datetime].must_equal Time.parse('2009-10-29T10:20:30-07:00') p["'2009-10-29 10:20:30'", :datetime].must_equal Time.parse('2009-10-29 10:20:30') p["'10:20:30'", :time].must_equal Sequel::SQLTime.create(10, 20, 30) p["NaN", :float].must_be_nil begin Sequel.datetime_class = DateTime p["'2009-10-29 10:20:30'", :datetime].must_equal DateTime.parse('2009-10-29 10:20:30') ensure Sequel.datetime_class = Time end db = Sequel.mock(:host=>'postgres') p["''::text", :string].must_equal "" p["'\\a''b'::character varying", :string].must_equal "\\a'b" p["'a'::bpchar", :string].must_equal "a" p["(-1)", :integer].must_equal(-1) p["(-1.0)", :float].must_equal(-1.0) p['(-1.0)', :decimal].must_equal BigDecimal('-1.0') p["'a'::bytea", :blob].must_equal Sequel.blob('a') p["'a'::bytea", :blob].must_be_kind_of(Sequel::SQL::Blob) p["'2009-10-29'::date", :date].must_equal Date.new(2009,10,29) p["'2009-10-29 10:20:30.241343'::timestamp without time zone", :datetime].must_equal Time.parse('2009-10-29 10:20:30.241343') p["'10:20:30'::time without time zone", :time].must_equal Sequel::SQLTime.create(10, 20, 30) db = Sequel.mock(:host=>'mysql') p["\\a'b", :string].must_equal "\\a'b" p["a", :string].must_equal "a" p["NULL", :string].must_equal "NULL" p["-1", :float].must_equal(-1.0) p['-1', :decimal].must_equal BigDecimal('-1.0') p["2009-10-29", :date].must_equal Date.new(2009,10,29) p["2009-10-29 10:20:30", :datetime].must_equal Time.parse('2009-10-29 10:20:30') p["10:20:30", :time].must_equal Sequel::SQLTime.create(10, 20, 30) p["a", :enum].must_equal "a" p["a,b", :set].must_equal "a,b" db = Sequel.mock(:host=>'mssql') p["(N'a')", :string].must_equal "a" p["((-12))", :integer].must_equal(-12) p["((12.1))", :float].must_equal 12.1 p["((-12.1))", :decimal].must_equal BigDecimal('-12.1') end end describe "Database extensions" do before(:all) do class << Sequel def extension(*) end end end after(:all) do class << Sequel remove_method :extension end end before do @db = Sequel.mock end after do Sequel::Database.instance_variable_set(:@initialize_hook, proc{|db| }) end it "should be able to register an extension with a module have Database#extension extend the module" do Sequel::Database.register_extension(:foo, Module.new{def a; 1; end}) @db.extension(:foo).a.must_equal 1 end it "should not call the block multiple times if extension loaded more than once" do @db.opts[:foo] = [] Sequel::Database.register_extension(:foo){|db| db.opts[:foo] << 1} @db.extension(:foo).opts[:foo].must_equal [1] @db.extension(:foo).opts[:foo].must_equal [1] end it "should be able to register an extension with a block and have Database#extension call the block" do Sequel::Database.register_extension(:foo){|db| db.opts[:foo] = 1} @db.extension(:foo).opts[:foo].must_equal 1 end it "should be able to register an extension with a callable and Database#extension call the callable" do Sequel::Database.register_extension(:foo, proc{|db| db.opts[:foo] = 1}) @db.extension(:foo).opts[:foo].must_equal 1 end it "should be able to load multiple extensions in the same call" do a = [] Sequel::Database.register_extension(:foo, proc{|db| a << db.opts[:foo] = 1}) Sequel::Database.register_extension(:bar, proc{|db| a << db.opts[:bar] = 2}) @db.extension(:foo, :bar).opts.values_at(:foo, :bar).must_equal [1, 2] a.must_equal [1, 2] end it "should return the receiver" do Sequel::Database.register_extension(:foo, Module.new{def a; 1; end}) @db.extension(:foo).must_be_same_as(@db) end it "should raise an Error if registering with both a module and a block" do proc{Sequel::Database.register_extension(:foo, Module.new){}}.must_raise(Sequel::Error) end it "should raise an Error if attempting to load an incompatible extension" do proc{@db.extension(:foo2)}.must_raise(Sequel::Error) end it "should be able to load an extension into all future Databases with Database.extension" do Sequel::Database.register_extension(:foo, Module.new{def a; 1; end}) Sequel::Database.register_extension(:bar, Module.new{def b; 2; end}) Sequel::Database.extension(:foo, :bar) @db.wont_respond_to(:a) @db.wont_respond_to(:b) Sequel.mock.a.must_equal 1 Sequel.mock.b.must_equal 2 end it "should be loadable via the :extensions Database option" do Sequel::Database.register_extension(:a, Module.new{def a; 1; end}) Sequel::Database.register_extension(:b, Module.new{def b; 2; end}) Sequel.mock(:extensions=>:a).a.must_equal 1 db = Sequel.mock(:extensions=>'a,b') db.a.must_equal 1 db.b.must_equal 2 db = Sequel.mock(:extensions=>[:a, :b]) db.a.must_equal 1 db.b.must_equal 2 proc{Sequel.mock(:extensions=>nil).a}.must_raise NoMethodError proc{Sequel.mock(:extensions=>Object.new)}.must_raise Sequel::Error end it "should support :preconnect_extensions Database option to load extensions before :preconnect" do x = [] Sequel::Database.register_extension(:a, Module.new{define_singleton_method(:extended){|_| x << :a}}) Sequel::Database.register_extension(:b, Module.new{define_singleton_method(:extended){|_| x << :b}}) m = Mutex.new c = Class.new(Sequel::Database) do def dataset_class_default; Sequel::Dataset end define_method(:connect) do |_| m.synchronize{x << :c} :connect end end db = c.new(:max_connections=>2, :preconnect=>true, :preconnect_extensions=>:a, :extensions=>:b) db.pool.size.must_equal db.pool.max_size x.must_equal [:a, :c, :c, :b] x.clear db = c.new(:max_connections=>3, :preconnect=>:concurrently, :preconnect_extensions=>:b, :extensions=>:a) x.must_equal [:b, :c, :c, :c, :a] db.pool.size.must_equal db.pool.max_size end it "should support :before_preconnect Database option to configure extensions before :preconnect" do x = [] Sequel::Database.register_extension(:a, Module.new do attr_accessor :z define_singleton_method(:extended){|db| db.z = 0} end) m = Mutex.new c = Class.new(Sequel::Database) do def dataset_class_default; Sequel::Dataset end define_method(:connect) do |_| m.synchronize{x << z} :connect end end db = c.new(:max_connections=>2, :preconnect=>true, :preconnect_extensions=>:a, :before_preconnect=> proc{|d| d.z = 1}) db.pool.size.must_equal db.pool.max_size x.must_equal [1, 1] x.clear db = c.new(:max_connections=>2, :preconnect_extensions=>:a, :before_preconnect=> proc{|d| d.z = 1}) db.pool.size.must_equal 1 x.must_equal [1] x.clear db = c.new(:max_connections=>2, :preconnect=>true, :preconnect_extensions=>:a) db.pool.size.must_equal db.pool.max_size x.must_equal [0, 0] end end describe "Database specific exception classes" do before do @db = Sequel.mock class << @db attr_accessor :sql_state def database_exception_sqlstate(exception, opts={}) @sql_state end end end it "should use appropriate exception classes for given SQL states" do @db.fetch = ArgumentError @db.sql_state = '23502' proc{@db.get(:a)}.must_raise(Sequel::NotNullConstraintViolation) @db.sql_state = '23503' proc{@db.get(:a)}.must_raise(Sequel::ForeignKeyConstraintViolation) @db.sql_state = '23505' proc{@db.get(:a)}.must_raise(Sequel::UniqueConstraintViolation) @db.sql_state = '23513' proc{@db.get(:a)}.must_raise(Sequel::CheckConstraintViolation) @db.sql_state = '40001' proc{@db.get(:a)}.must_raise(Sequel::SerializationFailure) @db.sql_state = '41245' proc{@db.get(:a)}.must_raise(Sequel::DatabaseError) def @db.database_specific_error_class_from_sqlstate(_) end (@db.get(:a) rescue $!.class).must_equal(Sequel::DatabaseError) @db.sql_state = nil (@db.get(:a) rescue $!.class).must_equal(Sequel::DatabaseError) end end describe "Database.after_initialize" do after do Sequel::Database.instance_variable_set(:@initialize_hook, proc{|db| }) end it "should allow a block to be run after each new instance is created" do Sequel::Database.after_initialize{|db| db.sql_log_level = :debug } db = Sequel.mock db.sql_log_level.must_equal :debug end it "should allow multiple hooks to be registered" do Sequel::Database.after_initialize{|db| db.sql_log_level = :debug } Sequel::Database.after_initialize{|db| db.loggers << 11 } db = Sequel.mock db.sql_log_level.must_equal :debug db.loggers.must_include(11) end it "should raise an error if registration is called without a block" do proc { Sequel::Database.after_initialize }.must_raise(Sequel::Error, /must provide block/i) end end describe "Database#schema_type_class" do it "should return the class or array of classes for the given type symbol" do db = Sequel.mock {:string=>String, :integer=>Integer, :date=>Date, :datetime=>[Time, DateTime], :time=>Sequel::SQLTime, :boolean=>[TrueClass, FalseClass], :float=>Float, :decimal=>BigDecimal, :blob=>Sequel::SQL::Blob}.each do |sym, klass| db.schema_type_class(sym).must_equal klass end end end describe "Database#execute_{dui,ddl,insert}" do before do @db = Sequel::Database.new def @db.execute(sql, opts={}) (@sqls ||= []) << sql end def @db.sqls @sqls end end it "should execute the SQL" do @db.execute_dui "DELETE FROM table" @db.execute_ddl "SET foo" @db.execute_insert "INSERT INTO table DEFAULT VALUES" @db.sqls.must_equal ["DELETE FROM table", "SET foo", "INSERT INTO table DEFAULT VALUES"] end end describe "Dataset identifier folding" do it "should fold to uppercase by default, as per SQL" do Sequel::Database.new.send(:folds_unquoted_identifiers_to_uppercase?).must_equal true end end ������������������������������������������������������������������sequel-5.63.0/spec/core/dataset_spec.rb�������������������������������������������������������������0000664�0000000�0000000�00000745771�14342141206�0017773�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Dataset" do before do @dataset = Sequel.mock.dataset end it "should accept database in initialize" do db = "db" d = Sequel::Dataset.new(db) d.db.must_be_same_as(db) d.opts.must_equal({}) end it "should provide clone for chainability" do d1 = @dataset.clone(:from => [:test]) d1.class.must_equal @dataset.class d1.wont_equal @dataset d1.db.must_be_same_as(@dataset.db) d1.opts[:from].must_equal [:test] @dataset.opts[:from].must_be_nil d2 = d1.clone(:order => [:name]) d2.class.must_equal @dataset.class d2.wont_equal d1 d2.wont_equal @dataset d2.db.must_be_same_as(@dataset.db) d2.opts[:from].must_equal [:test] d2.opts[:order].must_equal [:name] d1.opts[:order].must_be_nil end it "should include Enumerable" do Sequel::Dataset.included_modules.must_include(Enumerable) end it "should yield rows to each" do ds = Sequel.mock[:t].with_fetch(:x=>1) called = false ds.each{|a| called = true; a.must_equal(:x=>1)} called.must_equal true end end describe "Dataset#clone" do before do @dataset = Sequel.mock.dataset.from(:items) end it "should create an exact copy of the dataset" do @dataset = @dataset.with_row_proc(proc{|r| r}) clone = @dataset.clone @dataset.dup.must_be_same_as @dataset if RUBY_VERSION >= '2.4' clone.must_be_same_as @dataset else clone.object_id.wont_equal @dataset.object_id clone.class.must_equal @dataset.class clone.db.must_equal @dataset.db clone.opts.must_equal @dataset.opts end end it "should create an exact copy of the dataset when given an empty hash" do @dataset = @dataset.with_row_proc(proc{|r| r}) clone = @dataset.clone({}) clone.object_id.wont_equal @dataset.object_id clone.class.must_equal @dataset.class clone.db.must_equal @dataset.db clone.opts.must_equal @dataset.opts end it "should copy the dataset opts" do clone = @dataset.clone(:from => [:other]) @dataset.opts[:from].must_equal [:items] clone.opts[:from].must_equal [:other] end it "should merge the specified options" do clone = @dataset.clone(1 => 2) clone.opts.must_equal(1 => 2, :from => [:items]) end it "should overwrite existing options" do clone = @dataset.clone(:from => [:other]) clone.opts.must_equal(:from => [:other]) end it "should return an object with the same modules included" do m = Module.new do def __xyz__; "xyz"; end end @dataset.with_extend(m).clone({}).must_respond_to(:__xyz__) end end describe "Dataset#==" do before do @db = Sequel.mock @h = {} end it "should be the true for dataset with the same db, opts, and SQL" do @db[:t].must_equal @db[:t] end it "should be different for datasets with different dbs" do @db[:t].wont_equal Sequel.mock[:t] end it "should be different for datasets with different opts" do @db[:t].wont_equal @db[:t].clone(:blah=>1) end it "should be different for datasets with different SQL" do ds = @db[:t] ds.with_quote_identifiers(true).wont_equal ds end end describe "Dataset#hash" do before do @db = Sequel.mock @h = {} end it "should be the same for dataset with the same db, opts, and SQL" do @db[:t].hash.must_equal @db[:t].hash @h[@db[:t]] = 1 @h[@db[:t]].must_equal 1 end it "should be different for datasets with different dbs" do @db[:t].hash.wont_equal Sequel.mock[:t].hash end it "should be different for datasets with different opts" do @db[:t].hash.wont_equal @db[:t].clone(:blah=>1).hash end it "should be different for datasets with different SQL" do ds = @db[:t] ds.with_quote_identifiers(true).hash.wont_equal ds.hash end end describe "A simple dataset" do before do @dataset = Sequel.mock.dataset.from(:test) end it "should format a select statement" do @dataset.select_sql.must_equal 'SELECT * FROM test' end it "should format a delete statement" do @dataset.delete_sql.must_equal 'DELETE FROM test' end it "should format a truncate statement" do @dataset.truncate_sql.must_equal 'TRUNCATE TABLE test' end it "should format a truncate statement with multiple tables if supported" do @dataset.with_extend{def check_truncation_allowed!; end}.from(:test, :test2).truncate_sql.must_equal 'TRUNCATE TABLE test, test2' end it "should format an insert statement with default values" do @dataset.insert_sql.must_equal 'INSERT INTO test DEFAULT VALUES' end it "should use a single column with a default value when the dataset doesn't support using insert statement with default values" do @dataset.with_extend do def insert_supports_empty_values?; false end def columns; [:a, :b] end end.insert_sql.must_equal 'INSERT INTO test (b) VALUES (DEFAULT)' end it "should format an insert statement with hash" do @dataset.insert_sql(:name => 'wxyz', :price => 342).must_equal 'INSERT INTO test (name, price) VALUES (\'wxyz\', 342)' @dataset.insert_sql({}).must_equal "INSERT INTO test DEFAULT VALUES" end it "should format an insert statement with string keys" do @dataset.insert_sql('name' => 'wxyz', 'price' => 342).must_equal 'INSERT INTO test (name, price) VALUES (\'wxyz\', 342)' end it "should format an insert statement with an arbitrary value" do @dataset.insert_sql(123).must_equal "INSERT INTO test VALUES (123)" end it "should format an insert statement with sub-query" do @dataset.insert_sql(@dataset.from(:something).filter(:x => 2)).must_equal "INSERT INTO test SELECT * FROM something WHERE (x = 2)" end it "should format an insert statement with array" do @dataset.insert_sql('a', 2, 6.5).must_equal "INSERT INTO test VALUES ('a', 2, 6.5)" end it "should format an update statement" do @dataset.update_sql(:name => 'abc').must_equal "UPDATE test SET name = 'abc'" end it "should be able to return rows for arbitrary SQL" do @dataset.clone(:sql => 'xxx yyy zzz').select_sql.must_equal "xxx yyy zzz" end it "should use the :sql option for all sql methods" do sql = "X" ds = @dataset.clone(:sql=>sql) ds.sql.must_equal sql ds.select_sql.must_equal sql ds.insert_sql.must_equal sql ds.delete_sql.must_equal sql ds.update_sql.must_equal sql ds.truncate_sql.must_equal sql end end describe "A dataset with multiple tables in its FROM clause" do before do @dataset = Sequel.mock.dataset.from(:t1, :t2) end it "should raise on #update_sql" do proc {@dataset.update_sql(:a=>1)}.must_raise(Sequel::InvalidOperation) end it "should raise on #delete_sql" do proc {@dataset.delete_sql}.must_raise(Sequel::InvalidOperation) end it "should raise on #truncate_sql" do proc {@dataset.truncate_sql}.must_raise(Sequel::InvalidOperation) end it "should raise on #insert_sql" do proc {@dataset.insert_sql}.must_raise(Sequel::InvalidOperation) end it "should generate a select query FROM all specified tables" do @dataset.select_sql.must_equal "SELECT * FROM t1, t2" end end describe "A dataset with a limit" do before do @dataset = Sequel.mock[:a].limit(1) end it "should ignore limit if skip_limit_check is used before #update" do @dataset.skip_limit_check.update(:a=>1) @dataset.db.sqls.must_equal ['UPDATE a SET a = 1'] end it "should ignore limit if skip_limit_check is used before #delete" do @dataset.skip_limit_check.delete @dataset.db.sqls.must_equal ['DELETE FROM a'] end it "should raise on #update" do proc{@dataset.update(:a=>1)}.must_raise(Sequel::InvalidOperation) end it "should raise on #delete" do proc{@dataset.delete}.must_raise(Sequel::InvalidOperation) end it "should raise on #truncate" do proc{@dataset.truncate}.must_raise(Sequel::InvalidOperation) proc{@dataset.skip_limit_check.truncate}.must_raise(Sequel::InvalidOperation) end end describe "A dataset with an offset" do before do @dataset = Sequel.mock[:a].offset(1) end it "should ignore offset if skip_limit_check is used before #update" do @dataset.skip_limit_check.update(:a=>1) @dataset.db.sqls.must_equal ['UPDATE a SET a = 1'] end it "should ignore offset if skip_limit_check is used before #delete" do @dataset.skip_limit_check.delete @dataset.db.sqls.must_equal ['DELETE FROM a'] end it "should raise on #update" do proc{@dataset.update(:a=>1)}.must_raise(Sequel::InvalidOperation) end it "should raise on #delete" do proc{@dataset.delete}.must_raise(Sequel::InvalidOperation) end it "should raise on #truncate" do proc{@dataset.truncate}.must_raise(Sequel::InvalidOperation) proc{@dataset.skip_limit_check.truncate}.must_raise(Sequel::InvalidOperation) end end describe "Dataset#unused_table_alias" do before do @ds = Sequel.mock.dataset.from(:test) end it "should return given symbol if it hasn't already been used and dataset has no table" do @ds.from.unused_table_alias(:blah).must_equal :blah end it "should return given symbol if it hasn't already been used" do @ds.unused_table_alias(:blah).must_equal :blah end it "should return a symbol specifying an alias that hasn't already been used if it has already been used" do @ds.unused_table_alias(:test).must_equal :test_0 @ds.from(:test, :test_0).unused_table_alias(:test).must_equal :test_1 @ds.from(:test, :test_0).cross_join(:test_1).unused_table_alias(:test).must_equal :test_2 end with_symbol_splitting "should return an appropriate symbol if given splittable symbol" do @ds.unused_table_alias(:b__t___test).must_equal :test_0 @ds.unused_table_alias(:b__test).must_equal :test_0 end it "should return an appropriate symbol if given other forms of identifiers" do @ds.unused_table_alias('test').must_equal :test_0 @ds.unused_table_alias(Sequel.qualify(:b, :test)).must_equal :test_0 @ds.unused_table_alias(Sequel.expr(:b).as(:test)).must_equal :test_0 @ds.unused_table_alias(Sequel.expr(:b).as(Sequel.identifier(:test))).must_equal :test_0 @ds.unused_table_alias(Sequel.expr(:b).as('test')).must_equal :test_0 @ds.unused_table_alias(Sequel.identifier(:test)).must_equal :test_0 end end describe "Dataset#exists" do before do @ds1 = Sequel.mock[:test] @ds2 = @ds1.filter(Sequel.expr(:price) < 100) @ds3 = @ds1.filter(Sequel.expr(:price) > 50) end it "should work in filters" do @ds1.filter(@ds2.exists).sql. must_equal 'SELECT * FROM test WHERE (EXISTS (SELECT * FROM test WHERE (price < 100)))' @ds1.filter(@ds2.exists & @ds3.exists).sql. must_equal 'SELECT * FROM test WHERE ((EXISTS (SELECT * FROM test WHERE (price < 100))) AND (EXISTS (SELECT * FROM test WHERE (price > 50))))' end it "should work in select" do @ds1.select(@ds2.exists.as(:a), @ds3.exists.as(:b)).sql. must_equal 'SELECT (EXISTS (SELECT * FROM test WHERE (price < 100))) AS a, (EXISTS (SELECT * FROM test WHERE (price > 50))) AS b FROM test' end end describe "Dataset#where" do before do @dataset = Sequel.mock[:test] @d1 = @dataset.where(:region => 'Asia') end it "should raise Error if given no arguments or block" do proc{@dataset.where}.must_raise Sequel::Error end it "should raise Error for arrays/multiple arguments that are not condition specifiers" do proc{@dataset.where('a = ?', 1)}.must_raise Sequel::Error proc{@dataset.where(['a = ?', 1])}.must_raise Sequel::Error proc{@dataset.where({:a=>1}, {:b=>2})}.must_raise Sequel::Error proc{@dataset.where([{:a=>1}, {:b=>2}])}.must_raise Sequel::Error end it "should handle nil argument if block is given" do @d1.where(nil){a}.sql.must_equal "SELECT * FROM test WHERE ((region = 'Asia') AND NULL AND a)" end it "should handle nil argument if block has no existing filter" do @dataset.where(nil).sql.must_equal "SELECT * FROM test WHERE NULL" end it "should handle nil block result has no existing filter" do @dataset.where{nil}.sql.must_equal "SELECT * FROM test WHERE NULL" end it "should just clone if given an empty array or hash argument" do @dataset.where({}).sql.must_equal @dataset.sql @dataset.where([]).sql.must_equal @dataset.sql @dataset.filter({}).sql.must_equal @dataset.sql @dataset.filter([]).sql.must_equal @dataset.sql end it "should raise if no arguments or block" do proc{@dataset.where}.must_raise Sequel::Error proc{@dataset.filter}.must_raise Sequel::Error end it "should treat nil as NULL argument if block has no existing filter" do @dataset.where(nil).sql.must_equal "SELECT * FROM test WHERE NULL" @d1.where(nil).sql.must_equal "SELECT * FROM test WHERE ((region = 'Asia') AND NULL)" end it "should work with hashes" do @dataset.where(:name => 'xyz', :price => 342).select_sql.must_equal 'SELECT * FROM test WHERE ((name = \'xyz\') AND (price = 342))' end it "should work with a placeholder literal string" do @dataset.where(Sequel.lit('price < ? AND id in ?', 100, [1, 2, 3])).select_sql.must_equal "SELECT * FROM test WHERE (price < 100 AND id in (1, 2, 3))" end it "should work with literal strings" do @dataset.where(Sequel.lit('(a = 1 AND b = 2)')).select_sql.must_equal "SELECT * FROM test WHERE ((a = 1 AND b = 2))" end it "should work with named placeholder strings" do @dataset.where(Sequel.lit('price < :price AND id in :ids', :price=>100, :ids=>[1, 2, 3])).select_sql.must_equal "SELECT * FROM test WHERE (price < 100 AND id in (1, 2, 3))" end it "should not replace named placeholders that don't exist in the hash when using placeholder strings" do @dataset.where(Sequel.lit('price < :price AND id in :ids', :price=>100)).select_sql.must_equal "SELECT * FROM test WHERE (price < 100 AND id in :ids)" end it "should raise an error for a mismatched number of placeholders in placeholder literal strings" do proc{@dataset.where(Sequel.lit('price < ? AND id in ?', 100)).select_sql}.must_raise(Sequel::Error) proc{@dataset.where(Sequel.lit('price < ? AND id in ?', 100, [1, 2, 3], 4)).select_sql}.must_raise(Sequel::Error) end it "should handle placeholders when using an array" do @dataset.where(Sequel.lit(['price < ', ' AND id in '], 100, [1, 2, 3])).select_sql.must_equal "SELECT * FROM test WHERE (price < 100 AND id in (1, 2, 3))" @dataset.where(Sequel.lit(['price < ', ' AND id in '], 100)).select_sql.must_equal "SELECT * FROM test WHERE (price < 100 AND id in )" end it "should handle a mismatched number of placeholders when using an array" do proc{@dataset.where(Sequel.lit(['a = ', ' AND price < ', ' AND id in '], 100)).select_sql}.must_raise(Sequel::Error) proc{@dataset.where(Sequel.lit(['price < ', ' AND id in '], 100, [1, 2, 3], 4)).select_sql}.must_raise(Sequel::Error) end it "should handle partial names when using placeholder literal strings" do @dataset.where(Sequel.lit('price < :price AND id = :p', :p=>2, :price=>100)).select_sql.must_equal "SELECT * FROM test WHERE (price < 100 AND id = 2)" end it "should handle ::cast syntax when no parameters are supplied when using placeholder strings" do @dataset.where(Sequel.lit('price::float = 10', {})).select_sql.must_equal "SELECT * FROM test WHERE (price::float = 10)" @dataset.where(Sequel.lit('price::float ? 10', {})).select_sql.must_equal "SELECT * FROM test WHERE (price::float ? 10)" end it "should affect select, delete and update statements" do @d1.select_sql.must_equal "SELECT * FROM test WHERE (region = 'Asia')" @d1.delete_sql.must_equal "DELETE FROM test WHERE (region = 'Asia')" @d1.update_sql(:GDP => 0).must_equal "UPDATE test SET GDP = 0 WHERE (region = 'Asia')" end it "should affect select, delete and update statements when using literal strings" do @d2 = @dataset.where(Sequel.lit('region = ?', 'Asia')) @d2.select_sql.must_equal "SELECT * FROM test WHERE (region = 'Asia')" @d2.delete_sql.must_equal "DELETE FROM test WHERE (region = 'Asia')" @d2.update_sql(:GDP => 0).must_equal "UPDATE test SET GDP = 0 WHERE (region = 'Asia')" @d3 = @dataset.where(Sequel.lit("a = 1")) @d3.select_sql.must_equal "SELECT * FROM test WHERE (a = 1)" @d3.delete_sql.must_equal "DELETE FROM test WHERE (a = 1)" @d3.update_sql(:GDP => 0).must_equal "UPDATE test SET GDP = 0 WHERE (a = 1)" end it "should be composable using AND operator (for scoping)" do @d1.where(:size => 'big').select_sql.must_equal "SELECT * FROM test WHERE ((region = 'Asia') AND (size = 'big'))" @d1.where{population > 1000}.select_sql.must_equal "SELECT * FROM test WHERE ((region = 'Asia') AND (population > 1000))" @d1.where{(a > 1) | (b < 2)}.select_sql.must_equal "SELECT * FROM test WHERE ((region = 'Asia') AND ((a > 1) OR (b < 2)))" @d1.where{GDP() > 1000}.select_sql.must_equal "SELECT * FROM test WHERE ((region = 'Asia') AND (GDP > 1000))" end it "should be composable using AND operator (for scoping) when using literal strings" do @d2 = @dataset.where(Sequel.lit('region = ?', 'Asia')) @d2.where(Sequel.lit('GDP > ?', 1000)).select_sql.must_equal "SELECT * FROM test WHERE ((region = 'Asia') AND (GDP > 1000))" @d2.where(:name => ['Japan', 'China']).select_sql.must_equal "SELECT * FROM test WHERE ((region = 'Asia') AND (name IN ('Japan', 'China')))" @d2.where(Sequel.lit('GDP > ?')).select_sql.must_equal "SELECT * FROM test WHERE ((region = 'Asia') AND (GDP > ?))" @d3 = @dataset.where(Sequel.lit("a = 1")) @d3.where(Sequel.lit('b = 2')).select_sql.must_equal "SELECT * FROM test WHERE ((a = 1) AND (b = 2))" @d3.where(:c => 3).select_sql.must_equal "SELECT * FROM test WHERE ((a = 1) AND (c = 3))" @d3.where(Sequel.lit('d = ?', 4)).select_sql.must_equal "SELECT * FROM test WHERE ((a = 1) AND (d = 4))" end it "should be composable using AND operator (for scoping) with block and literal string" do @dataset.where(Sequel.lit("a = 1")).where{e < 5}.select_sql.must_equal "SELECT * FROM test WHERE ((a = 1) AND (e < 5))" end it "should accept ranges" do @dataset.filter(:id => 4..7).sql.must_equal 'SELECT * FROM test WHERE ((id >= 4) AND (id <= 7))' @dataset.filter(:id => 4...7).sql.must_equal 'SELECT * FROM test WHERE ((id >= 4) AND (id < 7))' @dataset.filter(:id => 4..7).sql.must_equal 'SELECT * FROM test WHERE ((id >= 4) AND (id <= 7))' @dataset.filter(:id => 4...7).sql.must_equal 'SELECT * FROM test WHERE ((id >= 4) AND (id < 7))' end it "should accept nil" do @dataset.filter(:owner_id => nil).sql.must_equal 'SELECT * FROM test WHERE (owner_id IS NULL)' end it "should not accept unexpected value for IS operator" do ds = @dataset.filter(Sequel::SQL::ComplexExpression.new(:IS, :x, :y)) proc{ds.sql}.must_raise Sequel::InvalidOperation end it "should accept a subquery" do @dataset.filter{|o| o.gdp > @d1.select(Sequel.function(:avg, :gdp))}.sql.must_equal "SELECT * FROM test WHERE (gdp > (SELECT avg(gdp) FROM test WHERE (region = 'Asia')))" end it "should handle all types of IN/NOT IN queries with empty arrays" do @dataset.filter(:id => []).sql.must_equal "SELECT * FROM test WHERE (1 = 0)" @dataset.filter([:id1, :id2] => []).sql.must_equal "SELECT * FROM test WHERE (1 = 0)" @dataset.exclude(:id => []).sql.must_equal "SELECT * FROM test WHERE (1 = 1)" @dataset.exclude([:id1, :id2] => []).sql.must_equal "SELECT * FROM test WHERE (1 = 1)" end it "should handle all types of IN/NOT IN queries" do @dataset.filter(:id => @d1.select(:id)).sql.must_equal "SELECT * FROM test WHERE (id IN (SELECT id FROM test WHERE (region = 'Asia')))" @dataset.filter(:id => [1, 2]).sql.must_equal "SELECT * FROM test WHERE (id IN (1, 2))" @dataset.filter([:id1, :id2] => @d1.select(:id1, :id2)).sql.must_equal "SELECT * FROM test WHERE ((id1, id2) IN (SELECT id1, id2 FROM test WHERE (region = 'Asia')))" @dataset.filter([:id1, :id2] => Sequel.value_list([[1, 2], [3,4]])).sql.must_equal "SELECT * FROM test WHERE ((id1, id2) IN ((1, 2), (3, 4)))" @dataset.filter([:id1, :id2] => [[1, 2], [3,4]]).sql.must_equal "SELECT * FROM test WHERE ((id1, id2) IN ((1, 2), (3, 4)))" @dataset.exclude(:id => @d1.select(:id)).sql.must_equal "SELECT * FROM test WHERE (id NOT IN (SELECT id FROM test WHERE (region = 'Asia')))" @dataset.exclude(:id => [1, 2]).sql.must_equal "SELECT * FROM test WHERE (id NOT IN (1, 2))" @dataset.exclude([:id1, :id2] => @d1.select(:id1, :id2)).sql.must_equal "SELECT * FROM test WHERE ((id1, id2) NOT IN (SELECT id1, id2 FROM test WHERE (region = 'Asia')))" @dataset.exclude([:id1, :id2] => Sequel.value_list([[1, 2], [3,4]])).sql.must_equal "SELECT * FROM test WHERE ((id1, id2) NOT IN ((1, 2), (3, 4)))" @dataset.exclude([:id1, :id2] => [[1, 2], [3,4]]).sql.must_equal "SELECT * FROM test WHERE ((id1, id2) NOT IN ((1, 2), (3, 4)))" end it "should handle IN/NOT IN queries with multiple columns and an array where the database doesn't support it" do @dataset = @dataset.with_extend{def supports_multiple_column_in?; false end} @dataset.filter([:id1, :id2] => [[1, 2], [3,4]]).sql.must_equal "SELECT * FROM test WHERE (((id1 = 1) AND (id2 = 2)) OR ((id1 = 3) AND (id2 = 4)))" @dataset.exclude([:id1, :id2] => [[1, 2], [3,4]]).sql.must_equal "SELECT * FROM test WHERE (((id1 != 1) OR (id2 != 2)) AND ((id1 != 3) OR (id2 != 4)))" @dataset.filter([:id1, :id2] => Sequel.value_list([[1, 2], [3,4]])).sql.must_equal "SELECT * FROM test WHERE (((id1 = 1) AND (id2 = 2)) OR ((id1 = 3) AND (id2 = 4)))" @dataset.exclude([:id1, :id2] => Sequel.value_list([[1, 2], [3,4]])).sql.must_equal "SELECT * FROM test WHERE (((id1 != 1) OR (id2 != 2)) AND ((id1 != 3) OR (id2 != 4)))" end it "should handle IN/NOT IN queries with multiple columns and a dataset where the database doesn't support it" do @dataset = @dataset.with_extend{def supports_multiple_column_in?; false end} db = Sequel.mock(:fetch=>[{:id1=>1, :id2=>2}, {:id1=>3, :id2=>4}]) d1 = db[:test].select(:id1, :id2).filter(:region=>'Asia').columns(:id1, :id2) @dataset.filter([:id1, :id2] => d1).sql.must_equal "SELECT * FROM test WHERE (((id1 = 1) AND (id2 = 2)) OR ((id1 = 3) AND (id2 = 4)))" db.sqls.must_equal ["SELECT id1, id2 FROM test WHERE (region = 'Asia')"] @dataset.exclude([:id1, :id2] => d1).sql.must_equal "SELECT * FROM test WHERE (((id1 != 1) OR (id2 != 2)) AND ((id1 != 3) OR (id2 != 4)))" db.sqls.must_equal ["SELECT id1, id2 FROM test WHERE (region = 'Asia')"] end it "should handle IN/NOT IN queries with multiple columns and a non-array/non-dataset where the database doesn't support it" do @dataset = @dataset.with_extend{def supports_multiple_column_in?; false end} db = Sequel.mock(:fetch=>[{:id1=>1, :id2=>2}, {:id1=>3, :id2=>4}]) d1 = Class.new(Sequel::SQL::Wrapper) do def to_a; @value.to_a; end def columns; @value.columns; end end.new(db[:test].select(:id1, :id2).filter(:region=>'Asia').columns(:id1, :id2)) @dataset.where(Sequel::SQL::ComplexExpression.new(:IN, [:id1, :id2], d1)).sql.must_equal "SELECT * FROM test WHERE (((id1 = 1) AND (id2 = 2)) OR ((id1 = 3) AND (id2 = 4)))" db.sqls.must_equal ["SELECT id1, id2 FROM test WHERE (region = 'Asia')"] end it "should handle IN/NOT IN queries with multiple columns and an empty dataset where the database doesn't support it" do @dataset = @dataset.with_extend{def supports_multiple_column_in?; false end} db = Sequel.mock d1 = db[:test].select(:id1, :id2).filter(:region=>'Asia').columns(:id1, :id2) @dataset.filter([:id1, :id2] => d1).sql.must_equal "SELECT * FROM test WHERE (1 = 0)" db.sqls.must_equal ["SELECT id1, id2 FROM test WHERE (region = 'Asia')"] @dataset.exclude([:id1, :id2] => d1).sql.must_equal "SELECT * FROM test WHERE (1 = 1)" db.sqls.must_equal ["SELECT id1, id2 FROM test WHERE (region = 'Asia')"] end it "should handle IN/NOT IN queries for datasets with row_procs" do @dataset = @dataset.with_extend{def supports_multiple_column_in?; false end} db = Sequel.mock(:fetch=>[{:id1=>1, :id2=>2}, {:id1=>3, :id2=>4}]) d1 = db[:test].select(:id1, :id2).filter(:region=>'Asia').columns(:id1, :id2).with_row_proc(proc{|h| Object.new}) @dataset.filter([:id1, :id2] => d1).sql.must_equal "SELECT * FROM test WHERE (((id1 = 1) AND (id2 = 2)) OR ((id1 = 3) AND (id2 = 4)))" db.sqls.must_equal ["SELECT id1, id2 FROM test WHERE (region = 'Asia')"] @dataset.exclude([:id1, :id2] => d1).sql.must_equal "SELECT * FROM test WHERE (((id1 != 1) OR (id2 != 2)) AND ((id1 != 3) OR (id2 != 4)))" db.sqls.must_equal ["SELECT id1, id2 FROM test WHERE (region = 'Asia')"] end it "should accept a subquery for an EXISTS clause" do a = @dataset.filter(Sequel.expr(:price) < 100) @dataset.filter(a.exists).sql.must_equal 'SELECT * FROM test WHERE (EXISTS (SELECT * FROM test WHERE (price < 100)))' end it "should accept proc expressions" do d = @d1.select(Sequel.function(:avg, :gdp)) @dataset.filter{gdp > d}.sql.must_equal "SELECT * FROM test WHERE (gdp > (SELECT avg(gdp) FROM test WHERE (region = 'Asia')))" @dataset.filter{a < 1}.sql.must_equal 'SELECT * FROM test WHERE (a < 1)' @dataset.filter{(a >= 1) & (b <= 2)}.sql.must_equal 'SELECT * FROM test WHERE ((a >= 1) AND (b <= 2))' @dataset.filter{c.like 'ABC%'}.sql.must_equal "SELECT * FROM test WHERE (c LIKE 'ABC%' ESCAPE '\\')" @dataset.filter{c.like 'ABC%', '%XYZ'}.sql.must_equal "SELECT * FROM test WHERE ((c LIKE 'ABC%' ESCAPE '\\') OR (c LIKE '%XYZ' ESCAPE '\\'))" end it "should work for grouped datasets" do @dataset.group(:a).filter(:b => 1).sql.must_equal 'SELECT * FROM test WHERE (b = 1) GROUP BY a' end it "should accept true and false as arguments" do @dataset.filter(true).sql.must_equal "SELECT * FROM test WHERE 't'" @dataset.filter(Sequel::SQLTRUE).sql.must_equal "SELECT * FROM test WHERE 't'" @dataset.filter(false).sql.must_equal "SELECT * FROM test WHERE 'f'" @dataset.filter(Sequel::SQLFALSE).sql.must_equal "SELECT * FROM test WHERE 'f'" end it "should use boolean expression if dataset does not support where true/false" do @dataset = @dataset.with_extend{def supports_where_true?() false end} @dataset.filter(true).sql.must_equal "SELECT * FROM test WHERE (1 = 1)" @dataset.filter(Sequel::SQLTRUE).sql.must_equal "SELECT * FROM test WHERE (1 = 1)" @dataset.filter(false).sql.must_equal "SELECT * FROM test WHERE (1 = 0)" @dataset.filter(Sequel::SQLFALSE).sql.must_equal "SELECT * FROM test WHERE (1 = 0)" end it "should allow the use of blocks and arguments simultaneously" do @dataset.filter(Sequel.expr(:zz) < 3){yy > 3}.sql.must_equal 'SELECT * FROM test WHERE ((zz < 3) AND (yy > 3))' end it "should yield a VirtualRow to the block" do x = nil @dataset.filter{|r| x = r; false} x.must_be_kind_of(Sequel::SQL::VirtualRow) @dataset.filter{|r| ((r.name < 'b') & {r.table_id => 1}) | r.is_active(r.blah, r.xx, r.x_y_z)}.sql. must_equal "SELECT * FROM test WHERE (((name < 'b') AND (table_id = 1)) OR is_active(blah, xx, x_y_z))" end it "should instance_eval the block in the context of a VirtualRow if the block doesn't request an argument" do x = nil @dataset.filter{x = self; false} x.must_be_kind_of(Sequel::SQL::VirtualRow) @dataset.filter{((name < 'b') & {table_id => 1}) | is_active(blah, xx, x_y_z)}.sql. must_equal "SELECT * FROM test WHERE (((name < 'b') AND (table_id = 1)) OR is_active(blah, xx, x_y_z))" end it "should handle arbitrary objects" do o = Object.new def o.sql_literal(ds) "foo" end @dataset.filter(o).sql.must_equal 'SELECT * FROM test WHERE foo' end it "should raise an error if an numeric is used" do proc{@dataset.filter(1)}.must_raise(Sequel::Error) proc{@dataset.filter(1.0)}.must_raise(Sequel::Error) proc{@dataset.filter(BigDecimal('1.0'))}.must_raise(Sequel::Error) end it "should raise an error if a NumericExpression or StringExpression is used" do proc{@dataset.filter(Sequel.expr(:x) + 1)}.must_raise(Sequel::Error) proc{@dataset.filter(Sequel.expr(:x).sql_string)}.must_raise(Sequel::Error) end end describe "Dataset#or" do before do @dataset = Sequel.mock.dataset.from(:test) @d1 = @dataset.where(:x => 1) end it "should just clone if no where clause exists" do @dataset.or(:a => 1).sql.must_equal 'SELECT * FROM test' end it "should just clone if given an empty array or hash argument" do @d1.or({}).sql.must_equal @d1.sql @d1.or([]).sql.must_equal @d1.sql end it "should add an alternative expression to the where clause" do @d1.or(:y => 2).sql.must_equal 'SELECT * FROM test WHERE ((x = 1) OR (y = 2))' end it "should accept literal string filters" do @d1.or(Sequel.lit('y > ?', 2)).sql.must_equal 'SELECT * FROM test WHERE ((x = 1) OR (y > 2))' end it "should accept expression filters" do @d1.or(Sequel.expr(:yy) > 3).sql.must_equal 'SELECT * FROM test WHERE ((x = 1) OR (yy > 3))' end it "should accept blocks passed to filter" do @d1.or{yy > 3}.sql.must_equal 'SELECT * FROM test WHERE ((x = 1) OR (yy > 3))' end it "should correctly add parens to give predictable results" do @d1.filter(:y => 2).or(:z => 3).sql.must_equal 'SELECT * FROM test WHERE (((x = 1) AND (y = 2)) OR (z = 3))' @d1.or(:y => 2).filter(:z => 3).sql.must_equal 'SELECT * FROM test WHERE (((x = 1) OR (y = 2)) AND (z = 3))' end it "should allow the use of blocks and arguments simultaneously" do @d1.or(Sequel.expr(:zz) < 3){yy > 3}.sql.must_equal 'SELECT * FROM test WHERE ((x = 1) OR ((zz < 3) AND (yy > 3)))' end end describe "Dataset#exclude" do before do @dataset = Sequel.mock.dataset.from(:test) end it "should correctly negate the expression when one condition is given" do @dataset.exclude(:region=>'Asia').select_sql.must_equal "SELECT * FROM test WHERE (region != 'Asia')" end it "should take multiple conditions as a hash and express the logic correctly in SQL" do @dataset.exclude(:region => 'Asia', :name => 'Japan').select_sql.must_equal 'SELECT * FROM test WHERE ((region != \'Asia\') OR (name != \'Japan\'))' end it "should parenthesize a single literal string condition correctly" do @dataset.exclude(Sequel.lit("region = 'Asia' AND name = 'Japan'")).select_sql.must_equal "SELECT * FROM test WHERE NOT (region = 'Asia' AND name = 'Japan')" end it "should parenthesize a placeholder literal string condition correctly" do @dataset.exclude(Sequel.lit('region = ? AND name = ?', 'Asia', 'Japan')).select_sql.must_equal "SELECT * FROM test WHERE NOT (region = 'Asia' AND name = 'Japan')" end it "should correctly parenthesize when it is used twice" do @dataset.exclude(:region => 'Asia').exclude(:name => 'Japan').select_sql.must_equal "SELECT * FROM test WHERE ((region != 'Asia') AND (name != 'Japan'))" end it "should support proc expressions" do @dataset.exclude{id < 6}.sql.must_equal 'SELECT * FROM test WHERE (id >= 6)' end it "should allow the use of blocks and arguments simultaneously" do @dataset.exclude(:id => (7..11)){id < 6}.sql.must_equal 'SELECT * FROM test WHERE ((id < 7) OR (id > 11) OR (id >= 6))' @dataset.exclude([:id, 1], [:x, 3]){id < 6}.sql.must_equal 'SELECT * FROM test WHERE ((id != 1) OR (x != 3) OR (id >= 6))' end end describe "Dataset#exclude_having" do it "should correctly negate the expression and add it to the having clause" do Sequel.mock.dataset.from(:test).exclude_having{count > 2}.exclude_having{count < 0}.sql.must_equal "SELECT * FROM test HAVING ((count <= 2) AND (count >= 0))" end end describe "Dataset#invert" do before do @d = Sequel.mock.dataset.from(:test) end it "should return a dataset that selects no rows if dataset is not filtered" do 3.times do @d.invert.sql.must_equal "SELECT * FROM test WHERE 'f'" end end it "should invert current filter if dataset is filtered" do @d.filter(:x).invert.sql.must_equal 'SELECT * FROM test WHERE NOT x' end it "should invert both having and where if both are present" do @d.filter(:x).group(:x).having(:x).invert.sql.must_equal 'SELECT * FROM test WHERE NOT x GROUP BY x HAVING NOT x' end it "should invert having if where not present" do @d.group(:x).having(:x).invert.sql.must_equal 'SELECT * FROM test GROUP BY x HAVING NOT x' end end describe "Dataset#having" do before do @dataset = Sequel.mock.dataset.from(:test) @grouped = @dataset.group(:region).select(:region, Sequel.function(:sum, :population), Sequel.function(:avg, :gdp)) end it "should just clone if given an empty array or hash argument" do @dataset.having({}).sql.must_equal @dataset.sql @dataset.having([]).sql.must_equal @dataset.sql end it "should handle literal string arguments" do @grouped.having(Sequel.lit('sum(population) > 10')).select_sql.must_equal "SELECT region, sum(population), avg(gdp) FROM test GROUP BY region HAVING (sum(population) > 10)" end it "should support proc expressions" do @grouped.having{Sequel.function(:sum, :population) > 10}.sql.must_equal "SELECT region, sum(population), avg(gdp) FROM test GROUP BY region HAVING (sum(population) > 10)" end end describe "a grouped dataset" do before do @dataset = Sequel.mock.dataset.from(:test).group(:type_id) end it "should raise when trying to generate an update statement" do proc {@dataset.update_sql(:id => 0)}.must_raise Sequel::InvalidOperation end it "should raise when trying to generate a delete statement" do proc {@dataset.delete_sql}.must_raise Sequel::InvalidOperation end it "should raise when trying to generate a truncate statement" do proc {@dataset.truncate_sql}.must_raise Sequel::InvalidOperation end it "should raise when trying to generate an insert statement" do proc {@dataset.insert_sql}.must_raise Sequel::InvalidOperation end it "should specify the grouping in generated select statement" do @dataset.select_sql.must_equal "SELECT * FROM test GROUP BY type_id" end it "should format the right statement for counting (as a subquery)" do db = Sequel.mock db[:test].select(:name).group(:name).count db.sqls.must_equal ["SELECT count(*) AS count FROM (SELECT name FROM test GROUP BY name) AS t1 LIMIT 1"] end end describe "Dataset#group_by" do before do @dataset = Sequel.mock[:test].group_by(:type_id) end it "should raise when trying to generate an update statement" do proc {@dataset.update_sql(:id => 0)}.must_raise Sequel::InvalidOperation end it "should raise when trying to generate a delete statement" do proc {@dataset.delete_sql}.must_raise Sequel::InvalidOperation end it "should specify the grouping in generated select statement" do @dataset.select_sql.must_equal "SELECT * FROM test GROUP BY type_id" @dataset.group_by(:a, :b).select_sql.must_equal "SELECT * FROM test GROUP BY a, b" @dataset.group_by(:type_id=>nil).select_sql.must_equal "SELECT * FROM test GROUP BY (type_id IS NULL)" end it "should ungroup when passed nil or no arguments" do @dataset.group_by.select_sql.must_equal "SELECT * FROM test" @dataset.group_by(nil).select_sql.must_equal "SELECT * FROM test" end it "should undo previous grouping" do @dataset.group_by(:a).group_by(:b).select_sql.must_equal "SELECT * FROM test GROUP BY b" @dataset.group_by(:a, :b).group_by.select_sql.must_equal "SELECT * FROM test" end it "should be aliased as #group" do @dataset.group(:type_id=>nil).select_sql.must_equal "SELECT * FROM test GROUP BY (type_id IS NULL)" end it "should take a virtual row block" do @dataset.group{type_id > 1}.sql.must_equal "SELECT * FROM test GROUP BY (type_id > 1)" @dataset.group_by{type_id > 1}.sql.must_equal "SELECT * FROM test GROUP BY (type_id > 1)" @dataset.group{[type_id > 1, type_id < 2]}.sql.must_equal "SELECT * FROM test GROUP BY (type_id > 1), (type_id < 2)" @dataset.group(:foo){type_id > 1}.sql.must_equal "SELECT * FROM test GROUP BY foo, (type_id > 1)" end it "should support a #group_rollup method if the database supports it" do @dataset = @dataset.with_extend{def supports_group_rollup?; true end} @dataset.group(:type_id).group_rollup.select_sql.must_equal "SELECT * FROM test GROUP BY ROLLUP(type_id)" @dataset.group(:type_id, :b).group_rollup.select_sql.must_equal "SELECT * FROM test GROUP BY ROLLUP(type_id, b)" @dataset = @dataset.with_extend{def uses_with_rollup?; true end} @dataset.group(:type_id).group_rollup.select_sql.must_equal "SELECT * FROM test GROUP BY type_id WITH ROLLUP" @dataset.group(:type_id, :b).group_rollup.select_sql.must_equal "SELECT * FROM test GROUP BY type_id, b WITH ROLLUP" end it "should support a #group_cube method if the database supports it" do @dataset = @dataset.with_extend{def supports_group_cube?; true end} @dataset.group(:type_id).group_cube.select_sql.must_equal "SELECT * FROM test GROUP BY CUBE(type_id)" @dataset.group(:type_id, :b).group_cube.select_sql.must_equal "SELECT * FROM test GROUP BY CUBE(type_id, b)" @dataset = @dataset.with_extend{def uses_with_rollup?; true end} @dataset.group(:type_id).group_cube.select_sql.must_equal "SELECT * FROM test GROUP BY type_id WITH CUBE" @dataset.group(:type_id, :b).group_cube.select_sql.must_equal "SELECT * FROM test GROUP BY type_id, b WITH CUBE" end it "should support a #grouping_sets method if the database supports it" do @dataset = @dataset.with_extend{def supports_grouping_sets?; true end} @dataset.group(:type_id).grouping_sets.select_sql.must_equal "SELECT * FROM test GROUP BY GROUPING SETS((type_id))" @dataset.group([:type_id, :b], :type_id, []).grouping_sets.select_sql.must_equal "SELECT * FROM test GROUP BY GROUPING SETS((type_id, b), (type_id), ())" end it "should have #group_* methods raise an Error if not supported it" do proc{@dataset.group(:type_id).group_rollup}.must_raise(Sequel::Error) proc{@dataset.group(:type_id).group_cube}.must_raise(Sequel::Error) proc{@dataset.group(:type_id).grouping_sets}.must_raise(Sequel::Error) end end describe "Dataset#group_append" do before do @d = Sequel.mock.dataset.from(:test) end it "should group by the given columns if no existing columns are present" do @d.group_append(:a).sql.must_equal 'SELECT * FROM test GROUP BY a' end it "should add to the currently grouped columns" do @d.group(:a).group_append(:b).sql.must_equal 'SELECT * FROM test GROUP BY a, b' end it "should accept a block that yields a virtual row" do @d.group(:a).group_append{:b}.sql.must_equal 'SELECT * FROM test GROUP BY a, b' @d.group(:a).group_append(:c){b}.sql.must_equal 'SELECT * FROM test GROUP BY a, c, b' end end describe "Dataset#as" do before do @ds = Sequel.mock.dataset.from(:test) end it "should set up an alias" do @ds.select(@ds.limit(1).select(:name).as(:n)).sql.must_equal 'SELECT (SELECT name FROM test LIMIT 1) AS n FROM test' @ds.select(@ds.limit(1).select(:name).as(:n, [:nm])).sql.must_equal 'SELECT (SELECT name FROM test LIMIT 1) AS n(nm) FROM test' end it "should error if the database does not support derived column lists and one is given" do @ds = @ds.with_extend{def supports_derived_column_lists?; false end} @ds = @ds.select(@ds.limit(1).select(:name).as(:n, [:nm])) proc{@ds.sql}.must_raise Sequel::Error end end describe "Dataset#literal with expressions" do before do @ds = Sequel.mock.dataset end it "should convert qualified identifiers into dot notation" do @ds.literal(Sequel[:abc][:def]).must_equal 'abc.def' end it "should convert aliased expressions into SQL AS notation" do @ds.literal(Sequel[:xyz].as(:x)).must_equal 'xyz AS x' @ds.literal(Sequel[:abc][:def].as(:x)).must_equal 'abc.def AS x' end it "should support names with digits" do @ds.literal(:abc2).must_equal 'abc2' @ds.literal(Sequel[:xx][:yy3]).must_equal 'xx.yy3' @ds.literal(Sequel[:ab34][:temp3_4ax]).must_equal 'ab34.temp3_4ax' @ds.literal(Sequel[:x1].as(:y2)).must_equal 'x1 AS y2' @ds.literal(Sequel[:abc2][:def3].as(:ggg4)).must_equal 'abc2.def3 AS ggg4' end it "should support upper case and lower case" do @ds.literal(:ABC).must_equal 'ABC' @ds.literal(Sequel[:Zvashtoy][:aBcD]).must_equal 'Zvashtoy.aBcD' end it "should support spaces inside column names" do @ds = @ds.with_quote_identifiers(true) @ds.literal(:"AB C").must_equal '"AB C"' @ds.literal(Sequel[:"Zvas htoy"][:"aB cD"]).must_equal '"Zvas htoy"."aB cD"' @ds.literal(Sequel[:"aB cD"].as(:"XX XX")).must_equal '"aB cD" AS "XX XX"' @ds.literal(Sequel[:"Zva shtoy"][:"aB cD"].as("XX XX")).must_equal '"Zva shtoy"."aB cD" AS "XX XX"' end end describe "Dataset#literal with splittable symbols" do before do @ds = Sequel.mock.dataset end with_symbol_splitting "should convert qualified symbol notation into dot notation" do @ds.literal(:abc__def).must_equal 'abc.def' end with_symbol_splitting "should convert AS symbol notation into SQL AS notation" do @ds.literal(:xyz___x).must_equal 'xyz AS x' @ds.literal(:abc__def___x).must_equal 'abc.def AS x' end with_symbol_splitting "should support names with digits" do @ds.literal(:abc2).must_equal 'abc2' @ds.literal(:xx__yy3).must_equal 'xx.yy3' @ds.literal(:ab34__temp3_4ax).must_equal 'ab34.temp3_4ax' @ds.literal(:x1___y2).must_equal 'x1 AS y2' @ds.literal(:abc2__def3___ggg4).must_equal 'abc2.def3 AS ggg4' end with_symbol_splitting "should support upper case and lower case" do @ds.literal(:ABC).must_equal 'ABC' @ds.literal(:Zvashtoy__aBcD).must_equal 'Zvashtoy.aBcD' end with_symbol_splitting "should support spaces inside column names" do @ds = @ds.with_quote_identifiers(true) @ds.literal(:"AB C").must_equal '"AB C"' @ds.literal(:"Zvas htoy__aB cD").must_equal '"Zvas htoy"."aB cD"' @ds.literal(:"aB cD___XX XX").must_equal '"aB cD" AS "XX XX"' @ds.literal(:"Zva shtoy__aB cD___XX XX").must_equal '"Zva shtoy"."aB cD" AS "XX XX"' end end describe "Dataset#literal" do before do @dataset = Sequel.mock[:test] end it "should escape strings properly" do @dataset.literal('abc').must_equal "'abc'" @dataset.literal('a"x"bc').must_equal "'a\"x\"bc'" @dataset.literal("a'bc").must_equal "'a''bc'" @dataset.literal("a''bc").must_equal "'a''''bc'" @dataset.literal("a\\bc").must_equal "'a\\bc'" @dataset.literal("a\\\\bc").must_equal "'a\\\\bc'" @dataset.literal("a\\'bc").must_equal "'a\\''bc'" end it "should escape blobs as strings by default" do @dataset.literal(Sequel.blob('abc')).must_equal "'abc'" end it "should literalize numbers properly" do @dataset.literal(1).must_equal "1" @dataset.literal(1.5).must_equal "1.5" end it "should literalize nil as NULL" do @dataset.literal(nil).must_equal "NULL" end it "should literalize an array properly" do @dataset.literal([]).must_equal "(NULL)" @dataset.literal([1, 'abc', 3]).must_equal "(1, 'abc', 3)" @dataset.literal([1, "a'b''c", 3]).must_equal "(1, 'a''b''''c', 3)" end it "should literalize symbols as column references" do @dataset.literal(:name).must_equal "name" end with_symbol_splitting "should literalize symbols with embedded qualifiers as column references" do @dataset.literal(:items__name).must_equal "items.name" @dataset.literal(:"items__na#m$e").must_equal "items.na#m$e" end it "should call sql_literal_append with dataset and sql on type if not natively supported and the object responds to it" do @a = Class.new do def sql_literal_append(ds, sql) sql << "called #{ds.blah}" end def sql_literal(ds) "not called #{ds.blah}" end end @dataset.with_extend{def blah; "ds" end}.literal(@a.new).must_equal "called ds" end it "should call sql_literal with dataset on type if not natively supported and the object responds to it" do @a = Class.new do def sql_literal(ds) "called #{ds.blah}" end end @dataset.with_extend{def blah; "ds" end}.literal(@a.new).must_equal "called ds" end it "should literalize datasets as subqueries" do d = @dataset.from(:test) d.literal(d).must_equal "(#{d.sql})" end it "should literalize times properly" do @dataset.literal(Sequel::SQLTime.create(1, 2, 3, 500000)).must_equal "'01:02:03.500000'" @dataset.literal(Time.local(2010, 1, 2, 3, 4, 5, 500000)).must_equal "'2010-01-02 03:04:05.500000'" @dataset.literal(DateTime.new(2010, 1, 2, 3, 4, Rational(55, 10))).must_equal "'2010-01-02 03:04:05.500000'" end it "should literalize times properly for databases not supporting fractional seconds" do @dataset = @dataset.with_extend{def supports_timestamp_usecs?; false end} @dataset.literal(Sequel::SQLTime.create(1, 2, 3, 500000)).must_equal "'01:02:03'" @dataset.literal(Time.local(2010, 1, 2, 3, 4, 5, 500000)).must_equal "'2010-01-02 03:04:05'" @dataset.literal(DateTime.new(2010, 1, 2, 3, 4, Rational(55, 10))).must_equal "'2010-01-02 03:04:05'" end it "should literalize times properly for databases supporting millisecond precision" do @dataset = @dataset.with_extend{def timestamp_precision; 3 end} @dataset.literal(Sequel::SQLTime.create(1, 2, 3, 500000)).must_equal "'01:02:03.500'" @dataset.literal(Time.local(2010, 1, 2, 3, 4, 5, 500000)).must_equal "'2010-01-02 03:04:05.500'" @dataset.literal(DateTime.new(2010, 1, 2, 3, 4, Rational(55, 10))).must_equal "'2010-01-02 03:04:05.500'" end it "should literalize times properly for databases with different time and timestamp precision" do @dataset = @dataset.with_extend{def timestamp_precision; 3 end; def sqltime_precision; 6 end} @dataset.literal(Sequel::SQLTime.create(1, 2, 3, 500000)).must_equal "'01:02:03.500000'" @dataset.literal(Time.local(2010, 1, 2, 3, 4, 5, 500000)).must_equal "'2010-01-02 03:04:05.500'" @dataset.literal(DateTime.new(2010, 1, 2, 3, 4, Rational(55, 10))).must_equal "'2010-01-02 03:04:05.500'" end it "should return 0 for timestamp precision if not supporting fractional seconds" do @dataset.with_extend{def supports_timestamp_usecs?; false end}.send(:timestamp_precision).must_equal 0 end it "should literalize Date properly" do d = Date.today s = d.strftime("'%Y-%m-%d'") @dataset.literal(d).must_equal s end it "should literalize Date properly, even if to_s is overridden" do d = Date.today def d.to_s; "adsf" end s = d.strftime("'%Y-%m-%d'") @dataset.literal(d).must_equal s end it "should literalize Time, DateTime, Date properly if SQL standard format is required" do @dataset = @dataset.with_extend{def requires_sql_standard_datetimes?; true end} @dataset.literal(Time.local(2010, 1, 2, 3, 4, 5, 500000)).must_equal "TIMESTAMP '2010-01-02 03:04:05.500000'" @dataset.literal(DateTime.new(2010, 1, 2, 3, 4, Rational(55, 10))).must_equal "TIMESTAMP '2010-01-02 03:04:05.500000'" @dataset.literal(Date.new(2010, 1, 2)).must_equal "DATE '2010-01-02'" end it "should literalize Time and DateTime properly if the database support timezones in timestamps" do @dataset = @dataset.with_extend{def supports_timestamp_timezones?; true end} @dataset.literal(Time.utc(2010, 1, 2, 3, 4, 5, 500000)).must_equal "'2010-01-02 03:04:05.500000+0000'" @dataset.literal(DateTime.new(2010, 1, 2, 3, 4, Rational(55, 10))).must_equal "'2010-01-02 03:04:05.500000+0000'" @dataset = @dataset.with_extend{def supports_timestamp_usecs?; false end} @dataset.literal(Time.utc(2010, 1, 2, 3, 4, 5)).must_equal "'2010-01-02 03:04:05+0000'" @dataset.literal(DateTime.new(2010, 1, 2, 3, 4, 5)).must_equal "'2010-01-02 03:04:05+0000'" end it "should not modify literal strings" do @dataset = @dataset.with_quote_identifiers(true) @dataset.literal(Sequel.lit('col1 + 2')).must_equal 'col1 + 2' @dataset.update_sql(Sequel::SQL::Identifier.new(Sequel.lit('a')) => Sequel.lit('a + 2')).must_equal 'UPDATE "test" SET a = a + 2' end it "should literalize BigDecimal instances correctly" do @dataset.literal(BigDecimal("80")).must_equal "80.0" @dataset.literal(BigDecimal("NaN")).must_equal "'NaN'" @dataset.literal(BigDecimal("Infinity")).must_equal "'Infinity'" @dataset.literal(BigDecimal("-Infinity")).must_equal "'-Infinity'" end it "should literalize PlaceholderLiteralStrings correctly" do @dataset.literal(Sequel::SQL::PlaceholderLiteralString.new('? = ?', [1, 2])).must_equal '1 = 2' @dataset.literal(Sequel::SQL::PlaceholderLiteralString.new('? = ?', [1, 2], true)).must_equal '(1 = 2)' @dataset.literal(Sequel::SQL::PlaceholderLiteralString.new(':a = :b', :a=>1, :b=>2)).must_equal '1 = 2' @dataset.literal(Sequel::SQL::PlaceholderLiteralString.new(':a = :b', {:a=>1, :b=>2}, true)).must_equal '(1 = 2)' @dataset.literal(Sequel::SQL::PlaceholderLiteralString.new(['', ' = ', ''], [1, 2])).must_equal '1 = 2' @dataset.literal(Sequel::SQL::PlaceholderLiteralString.new(['', ' = ', ''], [1, 2], true)).must_equal '(1 = 2)' @dataset.literal(Sequel::SQL::PlaceholderLiteralString.new(['', ' = '], [1, 2])).must_equal '1 = 2' @dataset.literal(Sequel::SQL::PlaceholderLiteralString.new(['', ' = '], [1, 2], true)).must_equal '(1 = 2)' end it "should raise an Error if the object can't be literalized" do proc{@dataset.literal(Object.new)}.must_raise(Sequel::Error) end end describe "Dataset#from" do before do @dataset = Sequel.mock.dataset end it "should accept a Dataset" do @dataset.from(@dataset) end it "should format a Dataset as a subquery if it has had options set" do @dataset.from(@dataset.from(:a).where(:a=>1)).select_sql.must_equal "SELECT * FROM (SELECT * FROM a WHERE (a = 1)) AS t1" end it "should automatically alias sub-queries" do @dataset.from(@dataset.from(:a).group(:b)).select_sql.must_equal "SELECT * FROM (SELECT * FROM a GROUP BY b) AS t1" d1 = @dataset.from(:a).group(:b) d2 = @dataset.from(:c).group(:d) @dataset.from(d1, d2).sql.must_equal "SELECT * FROM (SELECT * FROM a GROUP BY b) AS t1, (SELECT * FROM c GROUP BY d) AS t2" end it "should always use a subquery if given a dataset" do @dataset.from(@dataset.from(:a)).select_sql.must_equal "SELECT * FROM (SELECT * FROM a) AS t1" end it "should treat string arguments as identifiers" do @dataset.with_quote_identifiers(true).from('a').select_sql.must_equal "SELECT * FROM \"a\"" end it "should not treat literal strings or blobs as identifiers" do @dataset = @dataset.with_quote_identifiers(true) @dataset.from(Sequel.lit('a')).select_sql.must_equal "SELECT * FROM a" @dataset.from(Sequel.blob('a')).select_sql.must_equal "SELECT * FROM 'a'" end it "should remove all FROM tables if called with no arguments" do @dataset.from.sql.must_equal 'SELECT *' end it "should accept sql functions" do @dataset.from(Sequel.function(:abc, :def)).select_sql.must_equal "SELECT * FROM abc(def)" @dataset.from(Sequel.function(:a, :i)).select_sql.must_equal "SELECT * FROM a(i)" end it "should accept virtual row blocks" do @dataset.from{abc(de)}.select_sql.must_equal "SELECT * FROM abc(de)" @dataset.from{[i, abc(de)]}.select_sql.must_equal "SELECT * FROM i, abc(de)" @dataset.from(:a){i}.select_sql.must_equal "SELECT * FROM a, i" @dataset.from(:a, :b){i}.select_sql.must_equal "SELECT * FROM a, b, i" @dataset.from(:a, :b){[i, abc(de)]}.select_sql.must_equal "SELECT * FROM a, b, i, abc(de)" end it "should handle LATERAL subqueries" do @dataset.from(:a, @dataset.from(:b).lateral).select_sql.must_equal "SELECT * FROM a, LATERAL (SELECT * FROM b) AS t1" end it "should automatically use a default from table if no from table is present" do @dataset = @dataset.with_extend{def empty_from_sql; ' FROM DEFFROM'; end} @dataset.select_sql.must_equal "SELECT * FROM DEFFROM" end with_symbol_splitting "should accept :schema__table___alias symbol format" do @dataset.from(:abc__def).select_sql.must_equal "SELECT * FROM abc.def" @dataset.from(:a_b__c).select_sql.must_equal "SELECT * FROM a_b.c" @dataset.from(:'#__#').select_sql.must_equal 'SELECT * FROM #.#' @dataset.from(:abc__def___d).select_sql.must_equal "SELECT * FROM abc.def AS d" @dataset.from(:a_b__d_e___f_g).select_sql.must_equal "SELECT * FROM a_b.d_e AS f_g" @dataset.from(:'#__#___#').select_sql.must_equal 'SELECT * FROM #.# AS #' @dataset.from(:abc___def).select_sql.must_equal "SELECT * FROM abc AS def" @dataset.from(:a_b___c_d).select_sql.must_equal "SELECT * FROM a_b AS c_d" @dataset.from(:'#___#').select_sql.must_equal 'SELECT * FROM # AS #' end with_symbol_splitting "should not handle multi level qualification in embedded symbols specially" do @dataset.from(:foo__schema__table___alias).select_sql.must_equal "SELECT * FROM foo.schema__table AS alias" end it "should hoist WITH clauses from subqueries if the dataset doesn't support CTEs in subselects" do @dataset = @dataset.with_extend do def supports_cte?; true end def supports_cte_in_subselect?; false end end @dataset.from(@dataset.from(:a).with(:a, @dataset.from(:b))).sql.must_equal 'WITH a AS (SELECT * FROM b) SELECT * FROM (SELECT * FROM a) AS t1' @dataset.from(@dataset.from(:a).with(:a, @dataset.from(:b)), @dataset.from(:c).with(:c, @dataset.from(:d))).sql.must_equal 'WITH a AS (SELECT * FROM b), c AS (SELECT * FROM d) SELECT * FROM (SELECT * FROM a) AS t1, (SELECT * FROM c) AS t2' end end describe "Dataset#select" do before do @d = Sequel.mock.dataset.from(:test) end it "should accept variable arity" do @d.select(:name).sql.must_equal 'SELECT name FROM test' @d.select(:a, :b, Sequel[:test][:c]).sql.must_equal 'SELECT a, b, test.c FROM test' end with_symbol_splitting "should accept symbols with embedded qualification and aliasing" do @d.select(:test__cc).sql.must_equal 'SELECT test.cc FROM test' @d.select(:test___cc).sql.must_equal 'SELECT test AS cc FROM test' @d.select(:test__name___n).sql.must_equal 'SELECT test.name AS n FROM test' end it "should accept symbols and literal strings" do @d.select(Sequel.lit('aaa')).sql.must_equal 'SELECT aaa FROM test' @d.select(:a, Sequel.lit('b')).sql.must_equal 'SELECT a, b FROM test' @d.select(:test, Sequel.lit('test.d AS e')).sql.must_equal 'SELECT test, test.d AS e FROM test' @d.select(Sequel.lit('test.d AS e'), :test).sql.must_equal 'SELECT test.d AS e, test FROM test' end it "should accept ColumnAlls" do @d.select(Sequel::SQL::ColumnAll.new(:test)).sql.must_equal 'SELECT test.* FROM test' end it "should accept QualifiedIdentifiers" do @d.select(Sequel.expr(Sequel[:test][:name]).as(:n)).sql.must_equal 'SELECT test.name AS n FROM test' end with_symbol_splitting "should accept qualified identifiers in symbols in expressions" do @d.select(Sequel.expr(:test__name).as(:n)).sql.must_equal 'SELECT test.name AS n FROM test' end it "should use the wildcard if no arguments are given" do @d.select.sql.must_equal 'SELECT * FROM test' end it "should handle array condition specifiers that are aliased" do @d.select(Sequel.as([[:b, :c]], :n)).sql.must_equal 'SELECT (b = c) AS n FROM test' end it "should handle hashes returned from virtual row blocks" do @d.select{{:b=>:c}}.sql.must_equal 'SELECT (b = c) FROM test' end it "should override the previous select option" do @d.select(:a, :b, :c).select.sql.must_equal 'SELECT * FROM test' @d.select(:price).select(:name).sql.must_equal 'SELECT name FROM test' end it "should accept arbitrary objects and literalize them correctly" do @d.select(1, :a, 't').sql.must_equal "SELECT 1, a, 't' FROM test" @d.select(nil, Sequel.function(:sum, :t), Sequel[:x].as(:y)).sql.must_equal "SELECT NULL, sum(t), x AS y FROM test" @d.select(nil, 1, Sequel.as(:x, :y)).sql.must_equal "SELECT NULL, 1, x AS y FROM test" end it "should accept a block that yields a virtual row" do @d.select{|o| o.a}.sql.must_equal 'SELECT a FROM test' @d.select{a(1)}.sql.must_equal 'SELECT a(1) FROM test' @d.select{|o| o.a(1, 2)}.sql.must_equal 'SELECT a(1, 2) FROM test' @d.select{[a, a(1, 2)]}.sql.must_equal 'SELECT a, a(1, 2) FROM test' end it "should merge regular arguments with argument returned from block" do @d.select(:b){a}.sql.must_equal 'SELECT b, a FROM test' @d.select(:b, :c){|o| o.a(1)}.sql.must_equal 'SELECT b, c, a(1) FROM test' @d.select(:b){[a, a(1, 2)]}.sql.must_equal 'SELECT b, a, a(1, 2) FROM test' @d.select(:b, :c){|o| [o.a, o.a(1, 2)]}.sql.must_equal 'SELECT b, c, a, a(1, 2) FROM test' end end describe "Dataset#select_group" do before do @d = Sequel.mock.dataset.from(:test) end it "should set both SELECT and GROUP" do @d.select_group(:name).sql.must_equal 'SELECT name FROM test GROUP BY name' @d.select_group(:a, Sequel[:b][:c], Sequel[:d].as(:e)).sql.must_equal 'SELECT a, b.c, d AS e FROM test GROUP BY a, b.c, d' end with_symbol_splitting "should set both SELECT and GROUP when using splittable symbols" do @d.select_group(:a, :b__c, :d___e).sql.must_equal 'SELECT a, b.c, d AS e FROM test GROUP BY a, b.c, d' end it "should remove from both SELECT and GROUP if no arguments" do @d.select_group(:name).select_group.sql.must_equal 'SELECT * FROM test' end it "should accept virtual row blocks" do @d.select_group{name}.sql.must_equal 'SELECT name FROM test GROUP BY name' @d.select_group{[name, f(v).as(a)]}.sql.must_equal 'SELECT name, f(v) AS a FROM test GROUP BY name, f(v)' @d.select_group(:name){f(v).as(a)}.sql.must_equal 'SELECT name, f(v) AS a FROM test GROUP BY name, f(v)' end end describe "Dataset#select_all" do before do @d = Sequel.mock.dataset.from(:test) end it "should select the wildcard" do @d.select_all.sql.must_equal 'SELECT * FROM test' end it "should override the previous select option" do @d.select(:a, :b, :c).select_all.sql.must_equal 'SELECT * FROM test' end it "should select all columns in a table if given an argument" do @d.select_all(:test).sql.must_equal 'SELECT test.* FROM test' end it "should select all columns all tables if given a multiple arguments" do @d.select_all(:test, :foo).sql.must_equal 'SELECT test.*, foo.* FROM test' end with_symbol_splitting "should work correctly with qualified symbols" do @d.select_all(:sch__test).sql.must_equal 'SELECT sch.test.* FROM test' end with_symbol_splitting "should work correctly with aliased symbols" do @d.select_all(:test___al).sql.must_equal 'SELECT al.* FROM test' @d.select_all(:sch__test___al).sql.must_equal 'SELECT al.* FROM test' end it "should work correctly with SQL::Identifiers" do @d.select_all(Sequel.identifier(:test)).sql.must_equal 'SELECT test.* FROM test' end it "should work correctly with SQL::QualifiedIdentifier" do @d.select_all(Sequel.qualify(:sch, :test)).sql.must_equal 'SELECT sch.test.* FROM test' end it "should work correctly with SQL::AliasedExpressions" do @d.select_all(Sequel.expr(:test).as(:al)).sql.must_equal 'SELECT al.* FROM test' end with_symbol_splitting "should work correctly with SQL::JoinClauses with splittable symbols" do d = @d.cross_join(:foo).cross_join(:test___al) @d.select_all(*d.opts[:join]).sql.must_equal 'SELECT foo.*, al.* FROM test' end it "should work correctly with SQL::JoinClauses" do d = @d.cross_join(:foo).cross_join(Sequel[:test].as(:al)) @d.select_all(*d.opts[:join]).sql.must_equal 'SELECT foo.*, al.* FROM test' end end describe "Dataset#select_more" do before do @d = Sequel.mock.dataset.from(:test) end it "should act like #select_append for datasets with no selection" do @d.select_more(:a, :b).sql.must_equal 'SELECT *, a, b FROM test' @d.select_all.select_more(:a, :b).sql.must_equal 'SELECT *, a, b FROM test' @d.select(:blah).select_all.select_more(:a, :b).sql.must_equal 'SELECT *, a, b FROM test' end it "should add to the currently selected columns" do @d.select(:a).select_more(:b).sql.must_equal 'SELECT a, b FROM test' @d.select(Sequel::SQL::ColumnAll.new(:a)).select_more(Sequel::SQL::ColumnAll.new(:b)).sql.must_equal 'SELECT a.*, b.* FROM test' end it "should accept a block that yields a virtual row" do @d.select(:a).select_more{|o| o.b}.sql.must_equal 'SELECT a, b FROM test' @d.select(Sequel::SQL::ColumnAll.new(:a)).select_more(Sequel::SQL::ColumnAll.new(:b)){b(1)}.sql.must_equal 'SELECT a.*, b.*, b(1) FROM test' end end describe "Dataset#select_append" do before do @d = Sequel.mock.dataset.from(:test) end it "should select * in addition to columns if no columns selected" do @d.select_append(:a, :b).sql.must_equal 'SELECT *, a, b FROM test' @d.select_all.select_append(:a, :b).sql.must_equal 'SELECT *, a, b FROM test' @d.select(:blah).select_all.select_append(:a, :b).sql.must_equal 'SELECT *, a, b FROM test' end it "should add to the currently selected columns" do @d.select(:a).select_append(:b).sql.must_equal 'SELECT a, b FROM test' @d.select(Sequel::SQL::ColumnAll.new(:a)).select_append(Sequel::SQL::ColumnAll.new(:b)).sql.must_equal 'SELECT a.*, b.* FROM test' end it "should accept a block that yields a virtual row" do @d.select(:a).select_append{|o| o.b}.sql.must_equal 'SELECT a, b FROM test' @d.select(Sequel::SQL::ColumnAll.new(:a)).select_append(Sequel::SQL::ColumnAll.new(:b)){b(1)}.sql.must_equal 'SELECT a.*, b.*, b(1) FROM test' end it "should select from all from and join tables if SELECT *, column not supported" do @d = @d.with_extend{def supports_select_all_and_column?; false end} @d.select_append(:b).sql.must_equal 'SELECT test.*, b FROM test' @d.from(:test, :c).select_append(:b).sql.must_equal 'SELECT test.*, c.*, b FROM test, c' @d.cross_join(:c).select_append(:b).sql.must_equal 'SELECT test.*, c.*, b FROM test CROSS JOIN c' @d.cross_join(:c).cross_join(:d).select_append(:b).sql.must_equal 'SELECT test.*, c.*, d.*, b FROM test CROSS JOIN c CROSS JOIN d' end end describe "Dataset#order" do before do @dataset = Sequel.mock.dataset.from(:test) end it "should include an ORDER BY clause in the select statement" do @dataset.order(:name).sql.must_equal 'SELECT * FROM test ORDER BY name' end it "should accept multiple arguments" do @dataset.order(:name, Sequel.desc(:price)).sql.must_equal 'SELECT * FROM test ORDER BY name, price DESC' end it "should accept :nulls options for asc and desc" do @dataset.order(Sequel.asc(:name, :nulls=>:last), Sequel.desc(:price, :nulls=>:first)).sql.must_equal 'SELECT * FROM test ORDER BY name ASC NULLS LAST, price DESC NULLS FIRST' end it "should emulate :nulls options for asc and desc if not natively supported" do @dataset.with_extend{def requires_emulating_nulls_first?; true end}.order(Sequel.asc(:name, :nulls=>:last), Sequel.desc(:price, :nulls=>:first), Sequel.desc(:foo, :nulls=>nil)).sql.must_equal 'SELECT * FROM test ORDER BY (CASE WHEN (name IS NULL) THEN 2 ELSE 1 END), name ASC, (CASE WHEN (price IS NULL) THEN 0 ELSE 1 END), price DESC, foo DESC' end it "should override a previous ordering" do @dataset.order(:name).order(:stamp).sql.must_equal 'SELECT * FROM test ORDER BY stamp' end it "should accept a literal string" do @dataset.order(Sequel.lit('dada ASC')).sql.must_equal 'SELECT * FROM test ORDER BY dada ASC' end it "should accept a hash as an expression" do @dataset.order(:name=>nil).sql.must_equal 'SELECT * FROM test ORDER BY (name IS NULL)' end it "should accept a nil to remove ordering" do @dataset.order(:bah).order(nil).sql.must_equal 'SELECT * FROM test' end it "should accept a block that yields a virtual row" do @dataset.order{|o| o.a}.sql.must_equal 'SELECT * FROM test ORDER BY a' @dataset.order{a(1)}.sql.must_equal 'SELECT * FROM test ORDER BY a(1)' @dataset.order{|o| o.a(1, 2)}.sql.must_equal 'SELECT * FROM test ORDER BY a(1, 2)' @dataset.order{[a, a(1, 2)]}.sql.must_equal 'SELECT * FROM test ORDER BY a, a(1, 2)' end it "should merge regular arguments with argument returned from block" do @dataset.order(:b){a}.sql.must_equal 'SELECT * FROM test ORDER BY b, a' @dataset.order(:b, :c){|o| o.a(1)}.sql.must_equal 'SELECT * FROM test ORDER BY b, c, a(1)' @dataset.order(:b){[a, a(1, 2)]}.sql.must_equal 'SELECT * FROM test ORDER BY b, a, a(1, 2)' @dataset.order(:b, :c){|o| [o.a, o.a(1, 2)]}.sql.must_equal 'SELECT * FROM test ORDER BY b, c, a, a(1, 2)' end end describe "Dataset#unfiltered" do it "should remove filtering from the dataset" do ds = Sequel.mock.dataset.from(:test).filter(:score=>1) 3.times do ds.unfiltered.sql.must_equal 'SELECT * FROM test' end end end describe "Dataset#unlimited" do it "should remove limit and offset from the dataset" do ds = Sequel.mock.dataset.from(:test).limit(1, 2) 3.times do ds.unlimited.sql.must_equal 'SELECT * FROM test' end end end describe "Dataset#ungrouped" do it "should remove group and having clauses from the dataset" do ds = Sequel.mock.dataset.from(:test).group(:a).having(:b) 3.times do ds.ungrouped.sql.must_equal 'SELECT * FROM test' end end end describe "Dataset#unordered" do it "should remove ordering from the dataset" do ds = Sequel.mock.dataset.from(:test).order(:name) 3.times do ds.unordered.sql.must_equal 'SELECT * FROM test' end end end describe "Dataset#with_sql" do before do @dataset = Sequel.mock.dataset.from(:test) end it "should use static sql" do @dataset.with_sql('SELECT 1 FROM test').sql.must_equal 'SELECT 1 FROM test' end it "should work with placeholders" do @dataset.with_sql('SELECT ? FROM test', 1).sql.must_equal 'SELECT 1 FROM test' end it "should work with named placeholders" do @dataset.with_sql('SELECT :x FROM test', :x=>1).sql.must_equal 'SELECT 1 FROM test' end it "should keep row_proc" do @dataset.with_sql('SELECT 1 FROM test').row_proc.must_be_nil p = lambda{} @dataset.with_row_proc(p).with_sql('SELECT 1 FROM test').row_proc.must_equal p end it "should work with method symbols and arguments" do @dataset.with_sql(:delete_sql).sql.must_equal 'DELETE FROM test' @dataset.with_sql(:insert_sql, :b=>1).sql.must_equal 'INSERT INTO test (b) VALUES (1)' @dataset.with_sql(:update_sql, :b=>1).sql.must_equal 'UPDATE test SET b = 1' end end describe "Dataset#order_by" do before do @dataset = Sequel.mock.dataset.from(:test) end it "should include an ORDER BY clause in the select statement" do @dataset.order_by(:name).sql.must_equal 'SELECT * FROM test ORDER BY name' end it "should accept multiple arguments" do @dataset.order_by(:name, Sequel.desc(:price)).sql.must_equal 'SELECT * FROM test ORDER BY name, price DESC' end it "should override a previous ordering" do @dataset.order_by(:name).order(:stamp).sql.must_equal 'SELECT * FROM test ORDER BY stamp' end it "should accept a string" do @dataset.order_by(Sequel.lit('dada ASC')).sql.must_equal 'SELECT * FROM test ORDER BY dada ASC' end it "should accept a nil to remove ordering" do @dataset.order_by(:bah).order_by(nil).sql.must_equal 'SELECT * FROM test' end end describe "Dataset#order_more and order_append" do before do @dataset = Sequel.mock.dataset.from(:test) end it "should include an ORDER BY clause in the select statement" do @dataset.order_more(:name).sql.must_equal 'SELECT * FROM test ORDER BY name' @dataset.order_append(:name).sql.must_equal 'SELECT * FROM test ORDER BY name' end it "should add to the end of a previous ordering" do @dataset.order(:name).order_more(Sequel.desc(:stamp)).sql.must_equal 'SELECT * FROM test ORDER BY name, stamp DESC' @dataset.order(:name).order_append(Sequel.desc(:stamp)).sql.must_equal 'SELECT * FROM test ORDER BY name, stamp DESC' end it "should accept a block that yields a virtual row" do @dataset.order(:a).order_more{|o| o.b}.sql.must_equal 'SELECT * FROM test ORDER BY a, b' @dataset.order(:a, :b).order_more(:c, :d){[e, f(1, 2)]}.sql.must_equal 'SELECT * FROM test ORDER BY a, b, c, d, e, f(1, 2)' @dataset.order(:a).order_append{|o| o.b}.sql.must_equal 'SELECT * FROM test ORDER BY a, b' @dataset.order(:a, :b).order_append(:c, :d){[e, f(1, 2)]}.sql.must_equal 'SELECT * FROM test ORDER BY a, b, c, d, e, f(1, 2)' end end describe "Dataset#order_prepend" do before do @dataset = Sequel.mock.dataset.from(:test) end it "should include an ORDER BY clause in the select statement" do @dataset.order_prepend(:name).sql.must_equal 'SELECT * FROM test ORDER BY name' end it "should add to the beginning of a previous ordering" do @dataset.order(:name).order_prepend(Sequel.desc(:stamp)).sql.must_equal 'SELECT * FROM test ORDER BY stamp DESC, name' end it "should accept a block that yields a virtual row" do @dataset.order(:a).order_prepend{|o| o.b}.sql.must_equal 'SELECT * FROM test ORDER BY b, a' @dataset.order(:a, :b).order_prepend(:c, :d){[e, f(1, 2)]}.sql.must_equal 'SELECT * FROM test ORDER BY c, d, e, f(1, 2), a, b' end end describe "Dataset#reverse" do before do @dataset = Sequel.mock.dataset.from(:test) end it "should use DESC as default order" do @dataset.reverse(:name).sql.must_equal 'SELECT * FROM test ORDER BY name DESC' end it "should invert the order given" do @dataset.reverse(Sequel.desc(:name)).sql.must_equal 'SELECT * FROM test ORDER BY name ASC' end it "should invert the order for ASC expressions" do @dataset.reverse(Sequel.asc(:name)).sql.must_equal 'SELECT * FROM test ORDER BY name DESC' end it "should accept multiple arguments" do @dataset.reverse(:name, Sequel.desc(:price)).sql.must_equal 'SELECT * FROM test ORDER BY name DESC, price ASC' end it "should handles NULLS ordering correctly when reversing" do @dataset.reverse(Sequel.asc(:name, :nulls=>:first), Sequel.desc(:price, :nulls=>:last)).sql.must_equal 'SELECT * FROM test ORDER BY name DESC NULLS LAST, price ASC NULLS FIRST' end it "should reverse a previous ordering if no arguments are given" do ds1 = @dataset.order(:name) ds2 = @dataset.order(Sequel.desc(:clumsy), :fool) 3.times do ds1.reverse.sql.must_equal 'SELECT * FROM test ORDER BY name DESC' ds2.reverse.sql.must_equal 'SELECT * FROM test ORDER BY clumsy ASC, fool DESC' ds1.reverse{[]}.sql.must_equal 'SELECT * FROM test ORDER BY name DESC' ds2.reverse{[]}.sql.must_equal 'SELECT * FROM test ORDER BY clumsy ASC, fool DESC' end end it "should return an unordered dataset for a dataset with no order" do @dataset.unordered.reverse.sql.must_equal 'SELECT * FROM test' end it "should have #reverse_order alias" do @dataset.order(:name).reverse_order.sql.must_equal 'SELECT * FROM test ORDER BY name DESC' end it "should accept a block" do @dataset.reverse{name}.sql.must_equal 'SELECT * FROM test ORDER BY name DESC' @dataset.reverse_order{name}.sql.must_equal 'SELECT * FROM test ORDER BY name DESC' @dataset.reverse(:foo){name}.sql.must_equal 'SELECT * FROM test ORDER BY foo DESC, name DESC' @dataset.reverse_order(:foo){name}.sql.must_equal 'SELECT * FROM test ORDER BY foo DESC, name DESC' @dataset.reverse(Sequel.desc(:foo)){name}.sql.must_equal 'SELECT * FROM test ORDER BY foo ASC, name DESC' @dataset.reverse_order(Sequel.desc(:foo)){name}.sql.must_equal 'SELECT * FROM test ORDER BY foo ASC, name DESC' end end describe "Dataset#limit" do before do @dataset = Sequel.mock.dataset.from(:test) end it "should include a LIMIT clause in the select statement" do @dataset.limit(10).sql.must_equal 'SELECT * FROM test LIMIT 10' end it "should accept ranges" do @dataset.limit(3..7).sql.must_equal 'SELECT * FROM test LIMIT 5 OFFSET 3' @dataset.limit(3...7).sql.must_equal 'SELECT * FROM test LIMIT 4 OFFSET 3' end it "should include an offset if a second argument is given" do @dataset.limit(6, 10).sql.must_equal 'SELECT * FROM test LIMIT 6 OFFSET 10' end it "should convert regular strings to integers" do @dataset.limit('6', 'a() - 1').sql.must_equal 'SELECT * FROM test LIMIT 6 OFFSET 0' end it "should not convert literal strings to integers" do @dataset.limit(Sequel.lit('6'), Sequel.lit('a() - 1')).sql.must_equal 'SELECT * FROM test LIMIT 6 OFFSET a() - 1' end it "should not convert other objects" do @dataset.limit(6, Sequel.function(:a) - 1).sql.must_equal 'SELECT * FROM test LIMIT 6 OFFSET (a() - 1)' end it "should be able to reset limit and offset with nil values" do @dataset.limit(6).limit(nil).sql.must_equal 'SELECT * FROM test' @dataset.limit(6, 1).limit(nil).sql.must_equal 'SELECT * FROM test OFFSET 1' @dataset.limit(6, 1).limit(nil, nil).sql.must_equal 'SELECT * FROM test' end it "should work with fixed sql datasets" do @dataset.with_sql('select * from cccc').limit(6, 10).sql.must_equal 'SELECT * FROM (select * from cccc) AS t1 LIMIT 6 OFFSET 10' end it "should raise an error if an invalid limit or offset is used" do proc{@dataset.limit(-1)}.must_raise(Sequel::Error) proc{@dataset.limit(0)}.must_raise(Sequel::Error) @dataset.limit(1) proc{@dataset.limit(1, -1)}.must_raise(Sequel::Error) @dataset.limit(1, 0) @dataset.limit(1, 1) end end describe "Dataset#offset" do before do @dataset = Sequel.mock.dataset.from(:test) end it "should include an OFFSET clause in the select statement" do @dataset.offset(10).sql.must_equal 'SELECT * FROM test OFFSET 10' end it "should convert regular strings to integers" do @dataset.offset('a() - 1').sql.must_equal 'SELECT * FROM test OFFSET 0' end it "should raise an error if a negative offset is used" do proc{@dataset.offset(-1)}.must_raise(Sequel::Error) end it "should be able to reset offset with nil values" do @dataset.offset(6).offset(nil).sql.must_equal 'SELECT * FROM test' end it "should not convert literal strings to integers" do @dataset.offset(Sequel.lit('a() - 1')).sql.must_equal 'SELECT * FROM test OFFSET a() - 1' end it "should not convert other objects" do @dataset.offset(Sequel.function(:a) - 1).sql.must_equal 'SELECT * FROM test OFFSET (a() - 1)' end it "should override offset given to limit" do @dataset.limit(nil, 5).offset(6).sql.must_equal 'SELECT * FROM test OFFSET 6' end it "should not be overridable by limit if limit is not given an offset" do @dataset.offset(6).limit(nil).sql.must_equal 'SELECT * FROM test OFFSET 6' end it "should be overridable by limit if limit is given an offset" do @dataset.offset(6).limit(nil, nil).sql.must_equal 'SELECT * FROM test' @dataset.offset(6).limit(nil, 5).sql.must_equal 'SELECT * FROM test OFFSET 5' end end describe "Dataset#with_extend" do it "should returned clone dataset extended with given modules" do d = Sequel.mock.dataset m1 = Module.new{def a; 2**super end} m2 = Module.new{def a; 3 end} d.with_extend(m1, m2){def a; 4**super end}.a.must_equal 65536 d.respond_to?(:a).must_equal false ds = d.with_extend(m1, m2){def a; 4**super end} ds.a.must_equal 65536 ds.frozen?.must_equal true end it "should work with just a block" do Sequel.mock.dataset.with_extend{def a; 1 end}.a.must_equal 1 end end describe "Dataset#with_extend custom methods" do before do @ds = Sequel.mock[:items] end it "should have dataset_module support a where method" do @ds = @ds.with_extend{where :released, :released} @ds.released.sql.must_equal 'SELECT * FROM items WHERE released' @ds.where(:foo).released.sql.must_equal 'SELECT * FROM items WHERE (foo AND released)' end it "should have dataset_module support a having method" do @ds = @ds.with_extend{having(:released){released}} @ds.released.sql.must_equal 'SELECT * FROM items HAVING released' @ds.where(:foo).released.sql.must_equal 'SELECT * FROM items WHERE foo HAVING released' end it "should have dataset_module support an exclude method" do @ds = @ds.with_extend{exclude :released, :released} @ds.released.sql.must_equal 'SELECT * FROM items WHERE NOT released' @ds.where(:foo).released.sql.must_equal 'SELECT * FROM items WHERE (foo AND NOT released)' end it "should have dataset_module support an exclude_having method" do @ds = @ds.with_extend{exclude_having :released, :released} @ds.released.sql.must_equal 'SELECT * FROM items HAVING NOT released' @ds.where(:foo).released.sql.must_equal 'SELECT * FROM items WHERE foo HAVING NOT released' end it "should have dataset_module support a distinct method" do @ds = @ds.with_extend{def supports_distinct_on?; true end; distinct :foo, :baz} @ds.foo.sql.must_equal 'SELECT DISTINCT ON (baz) * FROM items' @ds.where(:bar).foo.sql.must_equal 'SELECT DISTINCT ON (baz) * FROM items WHERE bar' end it "should have dataset_module support a grep method" do @ds = @ds.with_extend{grep :foo, :baz, 'quux%'} @ds.foo.sql.must_equal 'SELECT * FROM items WHERE ((baz LIKE \'quux%\' ESCAPE \'\\\'))' @ds.where(:bar).foo.sql.must_equal 'SELECT * FROM items WHERE (bar AND ((baz LIKE \'quux%\' ESCAPE \'\\\')))' end it "should have dataset_module support a group method" do @ds = @ds.with_extend{group :foo, :baz} @ds.foo.sql.must_equal 'SELECT * FROM items GROUP BY baz' @ds.where(:bar).foo.sql.must_equal 'SELECT * FROM items WHERE bar GROUP BY baz' end it "should have dataset_module support a group_and_count method" do @ds = @ds.with_extend{group_and_count :foo, :baz} @ds.foo.sql.must_equal 'SELECT baz, count(*) AS count FROM items GROUP BY baz' @ds.where(:bar).foo.sql.must_equal 'SELECT baz, count(*) AS count FROM items WHERE bar GROUP BY baz' end it "should have dataset_module support a group_append method" do @ds = @ds.with_extend{group_append :foo, :baz} @ds.foo.sql.must_equal 'SELECT * FROM items GROUP BY baz' @ds.group(:bar).foo.sql.must_equal 'SELECT * FROM items GROUP BY bar, baz' end it "should have dataset_module support a limit method" do @ds = @ds.with_extend{limit :foo, 1} @ds.foo.sql.must_equal 'SELECT * FROM items LIMIT 1' @ds.where(:bar).foo.sql.must_equal 'SELECT * FROM items WHERE bar LIMIT 1' end it "should have dataset_module support a offset method" do @ds = @ds.with_extend{offset :foo, 1} @ds.foo.sql.must_equal 'SELECT * FROM items OFFSET 1' @ds.where(:bar).foo.sql.must_equal 'SELECT * FROM items WHERE bar OFFSET 1' end it "should have dataset_module support a order method" do @ds = @ds.with_extend{order(:foo){:baz}} @ds.foo.sql.must_equal 'SELECT * FROM items ORDER BY baz' @ds.where(:bar).foo.sql.must_equal 'SELECT * FROM items WHERE bar ORDER BY baz' end it "should have dataset_module support a order_append method" do @ds = @ds.with_extend{order_append :foo, :baz} @ds.foo.sql.must_equal 'SELECT * FROM items ORDER BY baz' @ds.order(:bar).foo.sql.must_equal 'SELECT * FROM items ORDER BY bar, baz' end it "should have dataset_module support a order_prepend method" do @ds = @ds.with_extend{order_prepend :foo, :baz} @ds.foo.sql.must_equal 'SELECT * FROM items ORDER BY baz' @ds.order(:bar).foo.sql.must_equal 'SELECT * FROM items ORDER BY baz, bar' end it "should have dataset_module support a select method" do @ds = @ds.with_extend{select :foo, :baz} @ds.foo.sql.must_equal 'SELECT baz FROM items' @ds.where(:bar).foo.sql.must_equal 'SELECT baz FROM items WHERE bar' end it "should have dataset_module support a select_all method" do @ds = @ds.with_extend{select_all :foo, :baz} @ds.foo.sql.must_equal 'SELECT baz.* FROM items' @ds.where(:bar).foo.sql.must_equal 'SELECT baz.* FROM items WHERE bar' end it "should have dataset_module support a select_append method" do @ds = @ds.with_extend{select_append :foo, :baz} @ds.foo.sql.must_equal 'SELECT *, baz FROM items' @ds.where(:bar).foo.sql.must_equal 'SELECT *, baz FROM items WHERE bar' end it "should have dataset_module support a select_group method" do @ds = @ds.with_extend{select_group :foo, :baz} @ds.foo.sql.must_equal 'SELECT baz FROM items GROUP BY baz' @ds.where(:bar).foo.sql.must_equal 'SELECT baz FROM items WHERE bar GROUP BY baz' end it "should have dataset_module support a server method" do @ds = @ds.with_extend{server :foo, :baz} @ds.foo.opts[:server].must_equal :baz @ds.where(:bar).foo.opts[:server].must_equal :baz end end describe "Dataset#with_row_proc" do it "should returned clone dataset with the given row_proc" do d = Sequel.mock.dataset l = lambda{|r| r} d.with_row_proc(l).row_proc.must_equal l d.row_proc.must_be_nil ds = d.with_row_proc(l) ds.frozen?.must_equal true ds.row_proc.must_equal l end end describe "Dataset#naked" do it "should returned clone dataset without row_proc" do d = Sequel.mock.dataset.with_row_proc(proc{|r| r}) d.naked.row_proc.must_be_nil d.row_proc.wont_be_nil end end describe "Dataset#qualified_column_name" do before do @dataset = Sequel.mock.dataset.from(:test) end it "should return the literal value if not given a symbol" do @dataset.literal(@dataset.send(:qualified_column_name, 'ccc__b', :items)).must_equal "'ccc__b'" @dataset.literal(@dataset.send(:qualified_column_name, 3, :items)).must_equal '3' @dataset.literal(@dataset.send(:qualified_column_name, Sequel.lit('a'), :items)).must_equal 'a' end it "should qualify the column with the supplied table name if given an unqualified symbol" do @dataset.literal(@dataset.send(:qualified_column_name, :b1, :items)).must_equal 'items.b1' end with_symbol_splitting "should not changed the qualifed column's table if given a qualified symbol" do @dataset.literal(@dataset.send(:qualified_column_name, :ccc__b, :items)).must_equal 'ccc.b' end it "should not changed the qualifed column's table if given a qualified identifier" do @dataset.literal(@dataset.send(:qualified_column_name, Sequel[:ccc][:b], :items)).must_equal 'ccc.b' end it "should handle an aliased identifier" do @dataset.literal(@dataset.send(:qualified_column_name, :ccc, Sequel.expr(:items).as(:i))).must_equal 'i.ccc' end end describe "Dataset#map" do before do @d = Sequel.mock(:fetch=>[{:a => 1, :b => 2}, {:a => 3, :b => 4}, {:a => 5, :b => 6}])[:items] end it "should provide the usual functionality if no argument is given" do @d.map{|n| n[:a] + n[:b]}.must_equal [3, 7, 11] end it "should map using #[column name] if column name is given" do @d.map(:a).must_equal [1, 3, 5] end it "should support multiple column names if an array of column names is given" do @d.map([:a, :b]).must_equal [[1, 2], [3, 4], [5, 6]] end it "should not call the row_proc if an argument is given" do @d = @d.with_row_proc(proc{|r| h = {}; r.keys.each{|k| h[k] = r[k] * 2}; h}) @d.map(:a).must_equal [1, 3, 5] @d.map([:a, :b]).must_equal [[1, 2], [3, 4], [5, 6]] end it "should call the row_proc if no argument is given" do @d = @d.with_row_proc(proc{|r| h = {}; r.keys.each{|k| h[k] = r[k] * 2}; h}) @d.map{|n| n[:a] + n[:b]}.must_equal [6, 14, 22] end it "should return the complete dataset values if nothing is given" do @d.map.to_a.must_equal [{:a => 1, :b => 2}, {:a => 3, :b => 4}, {:a => 5, :b => 6}] end it "should raise an error if calling with both an argument and block" do proc{@d.map(:a){}}.must_raise Sequel::Error end end describe "Dataset#as_hash" do before do @d = Sequel.mock(:fetch=>[{:a => 1, :b => 2}, {:a => 3, :b => 4}, {:a => 5, :b => 6}])[:items] end it "should provide a hash with the first column as key and the second as value" do @d.as_hash(:a, :b).must_equal(1 => 2, 3 => 4, 5 => 6) @d.as_hash(:b, :a).must_equal(2 => 1, 4 => 3, 6 => 5) end it "should be aliased as #to_hash" do @d.to_hash(:a, :b).must_equal(1 => 2, 3 => 4, 5 => 6) @d.to_hash(:b, :a).must_equal(2 => 1, 4 => 3, 6 => 5) end it "should provide a hash with the first column as key and the entire hash as value if the value column is blank or nil" do @d.as_hash(:a).must_equal(1 => {:a => 1, :b => 2}, 3 => {:a => 3, :b => 4}, 5 => {:a => 5, :b => 6}) @d.as_hash(:b).must_equal(2 => {:a => 1, :b => 2}, 4 => {:a => 3, :b => 4}, 6 => {:a => 5, :b => 6}) end it "should accept an optional :hash parameter into which entries can be merged" do @d.as_hash(:a, :b, :hash => (tmp = {})).must_be_same_as(tmp) end it "should support using an array of columns as either the key or the value" do @d.as_hash([:a, :b], :b).must_equal([1, 2] => 2, [3, 4] => 4, [5, 6] => 6) @d.as_hash(:b, [:a, :b]).must_equal(2 => [1, 2], 4 => [3, 4], 6 => [5, 6]) @d.as_hash([:b, :a], [:a, :b]).must_equal([2, 1] => [1, 2], [4, 3] => [3, 4], [6, 5] => [5, 6]) @d.as_hash([:a, :b]).must_equal([1, 2] => {:a => 1, :b => 2}, [3, 4] => {:a => 3, :b => 4}, [5, 6] => {:a => 5, :b => 6}) end it "should not call the row_proc if two arguments are given" do @d = @d.with_row_proc(proc{|r| h = {}; r.keys.each{|k| h[k] = r[k] * 2}; h}) @d.as_hash(:a, :b).must_equal(1 => 2, 3 => 4, 5 => 6) @d.as_hash(:b, :a).must_equal(2 => 1, 4 => 3, 6 => 5) @d.as_hash([:a, :b], :b).must_equal([1, 2] => 2, [3, 4] => 4, [5, 6] => 6) @d.as_hash(:b, [:a, :b]).must_equal(2 => [1, 2], 4 => [3, 4], 6 => [5, 6]) @d.as_hash([:b, :a], [:a, :b]).must_equal([2, 1] => [1, 2], [4, 3] => [3, 4], [6, 5] => [5, 6]) end it "should call the row_proc if only a single argument is given" do @d = @d.with_row_proc(proc{|r| h = {}; r.keys.each{|k| h[k] = r[k] * 2}; h}) @d.as_hash(:a).must_equal(2 => {:a => 2, :b => 4}, 6 => {:a => 6, :b => 8}, 10 => {:a => 10, :b => 12}) @d.as_hash(:b).must_equal(4 => {:a => 2, :b => 4}, 8 => {:a => 6, :b => 8}, 12 => {:a => 10, :b => 12}) @d.as_hash([:a, :b]).must_equal([2, 4] => {:a => 2, :b => 4}, [6, 8] => {:a => 6, :b => 8}, [10, 12] => {:a => 10, :b => 12}) end it "should handle a single composite key when using a row_proc" do c = Class.new do def self.call(h); new(h); end def initialize(h); @h = h; end def [](k) @h[k]; end def h; @h; end def ==(o) @h == o.h; end end @d.with_row_proc(c).as_hash([:a, :b]).must_equal([1, 2] => c.call(:a => 1, :b => 2), [3, 4] => c.call(:a => 3, :b => 4), [5, 6] => c.call(:a => 5, :b => 6)) end end describe "Dataset#to_hash_groups" do before do @d = Sequel.mock(:fetch=>[{:a => 1, :b => 2}, {:a => 3, :b => 4}, {:a => 1, :b => 6}, {:a => 7, :b => 4}])[:items] end it "should provide a hash with the first column as key and the second as arrays of matching values" do @d.to_hash_groups(:a, :b).must_equal(1 => [2, 6], 3 => [4], 7 => [4]) @d.to_hash_groups(:b, :a).must_equal(2 => [1], 4=>[3, 7], 6=>[1]) end it "should provide a hash with the first column as key and the entire hash as value if the value column is blank or nil" do @d.to_hash_groups(:a).must_equal(1 => [{:a => 1, :b => 2}, {:a => 1, :b => 6}], 3 => [{:a => 3, :b => 4}], 7 => [{:a => 7, :b => 4}]) @d.to_hash_groups(:b).must_equal(2 => [{:a => 1, :b => 2}], 4 => [{:a => 3, :b => 4}, {:a => 7, :b => 4}], 6 => [{:a => 1, :b => 6}]) end it "should support using an array of columns as either the key or the value" do @d.to_hash_groups([:a, :b], :b).must_equal([1, 2] => [2], [3, 4] => [4], [1, 6] => [6], [7, 4]=>[4]) @d.to_hash_groups(:b, [:a, :b]).must_equal(2 => [[1, 2]], 4 => [[3, 4], [7, 4]], 6 => [[1, 6]]) @d.to_hash_groups([:b, :a], [:a, :b]).must_equal([2, 1] => [[1, 2]], [4, 3] => [[3, 4]], [6, 1] => [[1, 6]], [4, 7]=>[[7, 4]]) @d.to_hash_groups([:a, :b]).must_equal([1, 2] => [{:a => 1, :b => 2}], [3, 4] => [{:a => 3, :b => 4}], [1, 6] => [{:a => 1, :b => 6}], [7, 4] => [{:a => 7, :b => 4}]) end it "should accept a :hash option into which entries can be merged" do @d.to_hash_groups(:a, :b, :hash => (tmp = {})).must_be_same_as(tmp) end it "should accept an :all option to use all into which entries can be merged" do called = false @d.with_extend{define_method(:post_load){|_| called = true}}.to_hash_groups(:a, :b, :all=>true) called.must_equal true end it "should not call the row_proc if two arguments are given" do @d = @d.with_row_proc(proc{|r| h = {}; r.keys.each{|k| h[k] = r[k] * 2}; h}) @d.to_hash_groups(:a, :b).must_equal(1 => [2, 6], 3 => [4], 7 => [4]) @d.to_hash_groups(:b, :a).must_equal(2 => [1], 4=>[3, 7], 6=>[1]) @d.to_hash_groups([:a, :b], :b).must_equal([1, 2] => [2], [3, 4] => [4], [1, 6] => [6], [7, 4]=>[4]) @d.to_hash_groups(:b, [:a, :b]).must_equal(2 => [[1, 2]], 4 => [[3, 4], [7, 4]], 6 => [[1, 6]]) @d.to_hash_groups([:b, :a], [:a, :b]).must_equal([2, 1] => [[1, 2]], [4, 3] => [[3, 4]], [6, 1] => [[1, 6]], [4, 7]=>[[7, 4]]) end it "should call the row_proc if only a single argument is given" do @d = @d.with_row_proc(proc{|r| h = {}; r.keys.each{|k| h[k] = r[k] * 2}; h}) @d.to_hash_groups(:a).must_equal(2 => [{:a => 2, :b => 4}, {:a => 2, :b => 12}], 6 => [{:a => 6, :b => 8}], 14 => [{:a => 14, :b => 8}]) @d.to_hash_groups(:b).must_equal(4 => [{:a => 2, :b => 4}], 8 => [{:a => 6, :b => 8}, {:a => 14, :b => 8}], 12 => [{:a => 2, :b => 12}]) @d.to_hash_groups([:a, :b]).must_equal([2, 4] => [{:a => 2, :b => 4}], [6, 8] => [{:a => 6, :b => 8}], [2, 12] => [{:a => 2, :b => 12}], [14, 8] => [{:a => 14, :b => 8}]) end it "should handle a single composite key when using a row_proc" do c = Class.new do def self.call(h); new(h); end def initialize(h); @h = h; end def [](k) @h[k]; end def h; @h; end def ==(o) @h == o.h; end end @d.with_row_proc(c).to_hash_groups([:a, :b]).must_equal([1, 2] => [c.call(:a => 1, :b => 2)], [3, 4] => [c.call(:a => 3, :b => 4)], [1, 6] => [c.call(:a => 1, :b => 6)], [7, 4] => [c.call(:a => 7, :b => 4)]) end end describe "Dataset#distinct" do before do @db = Sequel.mock @dataset = @db[:test].select(:name) end it "should include DISTINCT clause in statement" do @dataset.distinct.sql.must_equal 'SELECT DISTINCT name FROM test' end it "should raise an error if columns given and DISTINCT ON is not supported" do @dataset.distinct proc{@dataset.distinct(:a)}.must_raise(Sequel::InvalidOperation) end it "should use DISTINCT ON if columns are given and DISTINCT ON is supported" do @dataset = @dataset.with_extend{def supports_distinct_on?; true end} @dataset.distinct(:a, :b).sql.must_equal 'SELECT DISTINCT ON (a, b) name FROM test' @dataset.distinct(Sequel.cast(:stamp, :integer), :node_id=>nil).sql.must_equal 'SELECT DISTINCT ON (CAST(stamp AS integer), (node_id IS NULL)) name FROM test' end it "should use DISTINCT ON if columns are given in a virtual row block and DISTINCT ON is supported" do @dataset = @dataset.with_extend{def supports_distinct_on?; true end} @dataset.distinct{func(:id)}.sql.must_equal 'SELECT DISTINCT ON (func(id)) name FROM test' end it "should do a subselect for count" do @dataset.distinct.count @db.sqls.must_equal ['SELECT count(*) AS count FROM (SELECT DISTINCT name FROM test) AS t1 LIMIT 1'] end end describe "Dataset#count" do before do @db = Sequel.mock(:fetch=>{:count=>1}) @dataset = @db.from(:test).columns(:count) end it "should format SQL properly" do 5.times do @dataset.count.must_equal 1 @db.sqls.must_equal ['SELECT count(*) AS count FROM test LIMIT 1'] end end it "should accept an argument" do 5.times do @dataset.count(:foo).must_equal 1 @db.sqls.must_equal ['SELECT count(foo) AS count FROM test LIMIT 1'] end end it "should work with a nil argument" do 5.times do @dataset.count(nil).must_equal 1 @db.sqls.must_equal ['SELECT count(NULL) AS count FROM test LIMIT 1'] end end it "should accept a virtual row block" do 5.times do @dataset.count{foo(bar)}.must_equal 1 @db.sqls.must_equal ['SELECT count(foo(bar)) AS count FROM test LIMIT 1'] end end it "should raise an Error if given an argument and a block" do proc{@dataset.count(:foo){foo(bar)}}.must_raise(Sequel::Error) end it "should include the where clause if it's there" do @dataset.filter(Sequel.expr(:abc) < 30).count.must_equal 1 @db.sqls.must_equal ['SELECT count(*) AS count FROM test WHERE (abc < 30) LIMIT 1'] end it "should count properly for datasets with fixed sql" do @dataset.with_sql("select abc from xyz").count.must_equal 1 @db.sqls.must_equal ["SELECT count(*) AS count FROM (select abc from xyz) AS t1 LIMIT 1"] end it "should count properly when using UNION, INTERSECT, or EXCEPT" do @dataset.union(@dataset).count.must_equal 1 @db.sqls.must_equal ["SELECT count(*) AS count FROM (SELECT * FROM test UNION SELECT * FROM test) AS t1 LIMIT 1"] @dataset.intersect(@dataset).count.must_equal 1 @db.sqls.must_equal ["SELECT count(*) AS count FROM (SELECT * FROM test INTERSECT SELECT * FROM test) AS t1 LIMIT 1"] @dataset.except(@dataset).count.must_equal 1 @db.sqls.must_equal ["SELECT count(*) AS count FROM (SELECT * FROM test EXCEPT SELECT * FROM test) AS t1 LIMIT 1"] end it "should return limit if count is greater than it" do @dataset.limit(5).count.must_equal 1 @db.sqls.must_equal ["SELECT count(*) AS count FROM (SELECT * FROM test LIMIT 5) AS t1 LIMIT 1"] end it "should work correctly with offsets" do @dataset.limit(nil, 5).count.must_equal 1 @db.sqls.must_equal ["SELECT count(*) AS count FROM (SELECT * FROM test OFFSET 5) AS t1 LIMIT 1"] end it "should work on a graphed_dataset" do ds = @dataset.with_extend{ def columns; [:a] end} ds.graph(@dataset, [:a], :table_alias=>:test2).count.must_equal 1 @dataset.graph(ds, [:a], :table_alias=>:test2).count.must_equal 1 @db.sqls.must_equal(['SELECT count(*) AS count FROM test LEFT OUTER JOIN test AS test2 USING (a) LIMIT 1'] * 2) end it "should not cache the columns value" do ds = @dataset.from(:blah).columns(:a) ds.columns.must_equal [:a] ds.count.must_equal 1 @db.sqls.must_equal ['SELECT count(*) AS count FROM blah LIMIT 1'] ds.columns.must_equal [:a] end end describe "Dataset#group_and_count" do before do @ds = Sequel.mock.dataset.from(:test) end it "should format SQL properly" do @ds.group_and_count(:name).sql.must_equal "SELECT name, count(*) AS count FROM test GROUP BY name" end it "should accept multiple columns for grouping" do @ds.group_and_count(:a, :b).sql.must_equal "SELECT a, b, count(*) AS count FROM test GROUP BY a, b" end it "should format column aliases in the select clause but not in the group clause" do @ds.group_and_count(Sequel[:name].as(:n)).sql.must_equal "SELECT name AS n, count(*) AS count FROM test GROUP BY name" @ds.group_and_count(Sequel[:name][:n]).sql.must_equal "SELECT name.n, count(*) AS count FROM test GROUP BY name.n" end with_symbol_splitting "should format column aliases in the select clause but not in the group clause when using splittable symbols" do @ds.group_and_count(:name___n).sql.must_equal "SELECT name AS n, count(*) AS count FROM test GROUP BY name" @ds.group_and_count(:name__n).sql.must_equal "SELECT name.n, count(*) AS count FROM test GROUP BY name.n" end it "should handle identifiers" do @ds.group_and_count(Sequel.identifier(:name___n)).sql.must_equal "SELECT name___n, count(*) AS count FROM test GROUP BY name___n" end it "should handle literal strings" do @ds.group_and_count(Sequel.lit("name")).sql.must_equal "SELECT name, count(*) AS count FROM test GROUP BY name" end it "should handle aliased expressions" do @ds.group_and_count(Sequel.expr(:name).as(:n)).sql.must_equal "SELECT name AS n, count(*) AS count FROM test GROUP BY name" @ds.group_and_count(Sequel.identifier(:name).as(:n)).sql.must_equal "SELECT name AS n, count(*) AS count FROM test GROUP BY name" end it "should take a virtual row block" do @ds.group_and_count{(type_id > 1).as(t)}.sql.must_equal "SELECT (type_id > 1) AS t, count(*) AS count FROM test GROUP BY (type_id > 1)" @ds.group_and_count{[(type_id > 1).as(t), type_id < 2]}.sql.must_equal "SELECT (type_id > 1) AS t, (type_id < 2), count(*) AS count FROM test GROUP BY (type_id > 1), (type_id < 2)" @ds.group_and_count(:foo){type_id > 1}.sql.must_equal "SELECT foo, (type_id > 1), count(*) AS count FROM test GROUP BY foo, (type_id > 1)" end end describe "Dataset#empty?" do it "should return true if no records exist in the dataset" do db = Sequel.mock(:fetch=>proc{|sql| {1=>1} unless sql =~ /WHERE 'f'/}) db.from(:test).wont_be :empty? db.sqls.must_equal ['SELECT 1 AS one FROM test LIMIT 1'] db.from(:test).filter(false).must_be :empty? db.sqls.must_equal ["SELECT 1 AS one FROM test WHERE 'f' LIMIT 1"] end it "should ignore order" do db = Sequel.mock(:fetch=>proc{|sql| {1=>1}}) db.from(:test).wont_be :empty? without_order = db.sqls db.from(:test).order(:the_order_column).wont_be :empty? with_order = db.sqls without_order.must_equal with_order end end describe "Dataset#first_source_alias" do before do @ds = Sequel.mock.dataset end it "should be the entire first source if not aliased" do @ds.from(:s__t).first_source_alias.must_equal :s__t @ds.clone(:from=>[:s__t]).first_source_alias.must_equal :s__t end with_symbol_splitting "should be the alias if aliased when using symbol splitting" do @ds.from(:t___a).first_source_alias.must_equal :a @ds.from(:s__t___a).first_source_alias.must_equal :a @ds.clone(:from=>[:t___a]).first_source_alias.must_equal :a @ds.clone(:from=>[:s__t___a]).first_source_alias.must_equal :a end with_symbol_splitting "should be aliased as first_source when using symbol splitting" do @ds.from(:s__t___a).first_source.must_equal :a end it "should be the entire first source if not aliased" do @ds.from(:t).first_source_alias.must_equal :t @ds.from(Sequel.identifier(:t__a)).first_source_alias.must_equal Sequel.identifier(:t__a) @ds.from(Sequel.qualify(:s, :t)).first_source_alias.must_equal Sequel.qualify(:s, :t) end it "should be the alias if aliased" do @ds.from(Sequel.expr(:t).as(:a)).first_source_alias.must_equal :a end it "should be aliased as first_source" do @ds.from(:t).first_source.must_equal :t @ds.from(Sequel.identifier(:t__a)).first_source.must_equal Sequel.identifier(:t__a) @ds.from(Sequel.expr(:t).as(:a)).first_source.must_equal :a end it "should raise exception if table doesn't have a source" do proc{@ds.first_source_alias}.must_raise(Sequel::Error) end end describe "Dataset#first_source_table" do before do @ds = Sequel.mock.dataset end it "should be the entire first source if not aliased" do @ds.from(:t).first_source_table.must_equal :t @ds.from(:s__t).first_source_table.must_equal :s__t @ds.clone(:from=>[:s__t]).first_source_table.must_equal :s__t end it "should be the entire first source if not aliased" do @ds.from(Sequel.identifier(:t__a)).first_source_table.must_equal Sequel.identifier(:t__a) @ds.from(Sequel.qualify(:s, :t)).first_source_table.must_equal Sequel.qualify(:s, :t) end with_symbol_splitting "should be the unaliased part if aliased symbols with embedded aliasing" do @ds.literal(@ds.from(:t___a).first_source_table).must_equal "t" @ds.literal(@ds.from(:s__t___a).first_source_table).must_equal "s.t" @ds.literal(@ds.clone(:from=>[:t___a]).first_source_table).must_equal "t" @ds.literal(@ds.clone(:from=>[:s__t___a]).first_source_table).must_equal "s.t" end it "should be the unaliased part if aliased" do @ds.literal(@ds.from(Sequel.expr(:t).as(:a)).first_source_table).must_equal "t" end it "should raise exception if table doesn't have a source" do proc{@ds.first_source_table}.must_raise(Sequel::Error) end end describe "Dataset#from_self" do before do @ds = Sequel.mock.dataset.from(:test).select(:name).limit(1) end it "should set up a default alias" do @ds.from_self.sql.must_equal 'SELECT * FROM (SELECT name FROM test LIMIT 1) AS t1' end it "should keep any existing columns" do @ds.columns(:id, :a) @ds.from_self.columns.must_equal [:id, :a] end it "should modify only the new dataset" do @ds.from_self.select(:bogus).sql.must_equal 'SELECT bogus FROM (SELECT name FROM test LIMIT 1) AS t1' end it "should use the user-specified alias" do @ds.from_self(:alias=>:some_name).sql.must_equal 'SELECT * FROM (SELECT name FROM test LIMIT 1) AS some_name' end it "should use the user-specified column aliases" do @ds.from_self(:alias=>:some_name, :column_aliases=>[:c1, :c2]).sql.must_equal 'SELECT * FROM (SELECT name FROM test LIMIT 1) AS some_name(c1, c2)' end it "should use the user-specified alias" do @ds.from_self(:alias=>:some_name).sql.must_equal 'SELECT * FROM (SELECT name FROM test LIMIT 1) AS some_name' @ds.from_self(:alias=>:some_name1).sql.must_equal 'SELECT * FROM (SELECT name FROM test LIMIT 1) AS some_name1' end it "should use the user-specified alias for joins" do @ds.from_self(:alias=>:some_name).inner_join(:posts, :alias=>:name).sql.must_equal \ 'SELECT * FROM (SELECT name FROM test LIMIT 1) AS some_name INNER JOIN posts ON (posts.alias = some_name.name)' end it "should not remove non-SQL options such as :server" do @ds.server(:blah).from_self(:alias=>:some_name).opts[:server].must_equal :blah end it "should work correctly when a delayed evaluation is used " do a = true ds = @ds.where(Sequel.delay{a}).from_self ds.sql.must_equal "SELECT * FROM (SELECT name FROM test WHERE 't' LIMIT 1) AS t1" a = false ds.sql.must_equal "SELECT * FROM (SELECT name FROM test WHERE 'f' LIMIT 1) AS t1" end it "should hoist WITH clauses in current dataset if dataset doesn't support WITH in subselect" do ds = Sequel.mock.dataset ds = ds.with_extend do def supports_cte?; true end def supports_cte_in_subselect?; false end end ds.from(:a).with(:a, ds.from(:b)).from_self.sql.must_equal 'WITH a AS (SELECT * FROM b) SELECT * FROM (SELECT * FROM a) AS t1' ds.from(:a, :c).with(:a, ds.from(:b)).with(:c, ds.from(:d)).from_self.sql.must_equal 'WITH a AS (SELECT * FROM b), c AS (SELECT * FROM d) SELECT * FROM (SELECT * FROM a, c) AS t1' end end describe "Dataset#join_table" do before do @d = Sequel.mock.dataset.from(:items).with_quote_identifiers(true) end it "should format the JOIN clause properly" do @d.join_table(:left_outer, :categories, :category_id => :id).sql.must_equal 'SELECT * FROM "items" LEFT OUTER JOIN "categories" ON ("categories"."category_id" = "items"."id")' end it "should handle multiple conditions on the same join table column" do @d.join_table(:left_outer, :categories, [[:category_id, :id], [:category_id, 0..100]]).sql. must_equal 'SELECT * FROM "items" LEFT OUTER JOIN "categories" ON (("categories"."category_id" = "items"."id") AND ("categories"."category_id" >= 0) AND ("categories"."category_id" <= 100))' end it "should include WHERE clause if applicable" do @d.filter(Sequel.expr(:price) < 100).join_table(:right_outer, :categories, :category_id => :id).sql. must_equal 'SELECT * FROM "items" RIGHT OUTER JOIN "categories" ON ("categories"."category_id" = "items"."id") WHERE ("price" < 100)' end it "should include ORDER BY clause if applicable" do @d.order(:stamp).join_table(:full_outer, :categories, :category_id => :id).sql.must_equal 'SELECT * FROM "items" FULL OUTER JOIN "categories" ON ("categories"."category_id" = "items"."id") ORDER BY "stamp"' end it "should support multiple joins" do @d.join_table(:inner, :b, :items_id=>:id).join_table(:left_outer, :c, :b_id => Sequel[:b][:id]).sql.must_equal 'SELECT * FROM "items" INNER JOIN "b" ON ("b"."items_id" = "items"."id") LEFT OUTER JOIN "c" ON ("c"."b_id" = "b"."id")' end with_symbol_splitting "should support multiple joins with splittable symbols" do @d.join_table(:inner, :b, :items_id=>:id).join_table(:left_outer, :c, :b_id => :b__id).sql.must_equal 'SELECT * FROM "items" INNER JOIN "b" ON ("b"."items_id" = "items"."id") LEFT OUTER JOIN "c" ON ("c"."b_id" = "b"."id")' end it "should handle LATERAL subqueries" do @d.join(@d.lateral, :a=>:b).select_sql.must_equal 'SELECT * FROM "items" INNER JOIN LATERAL (SELECT * FROM "items") AS "t1" ON ("t1"."a" = "items"."b")' @d.left_join(@d.lateral, :a=>:b).select_sql.must_equal 'SELECT * FROM "items" LEFT JOIN LATERAL (SELECT * FROM "items") AS "t1" ON ("t1"."a" = "items"."b")' @d.cross_join(@d.lateral).select_sql.must_equal 'SELECT * FROM "items" CROSS JOIN LATERAL (SELECT * FROM "items") AS "t1"' end it "should support arbitrary join types" do @d.join_table(:magic, :categories, :category_id=>:id).sql.must_equal 'SELECT * FROM "items" MAGIC JOIN "categories" ON ("categories"."category_id" = "items"."id")' end it "should support many join methods" do @d.left_outer_join(:categories, :category_id=>:id).sql.must_equal 'SELECT * FROM "items" LEFT OUTER JOIN "categories" ON ("categories"."category_id" = "items"."id")' @d.right_outer_join(:categories, :category_id=>:id).sql.must_equal 'SELECT * FROM "items" RIGHT OUTER JOIN "categories" ON ("categories"."category_id" = "items"."id")' @d.full_outer_join(:categories, :category_id=>:id).sql.must_equal 'SELECT * FROM "items" FULL OUTER JOIN "categories" ON ("categories"."category_id" = "items"."id")' @d.inner_join(:categories, :category_id=>:id).sql.must_equal 'SELECT * FROM "items" INNER JOIN "categories" ON ("categories"."category_id" = "items"."id")' @d.left_join(:categories, :category_id=>:id).sql.must_equal 'SELECT * FROM "items" LEFT JOIN "categories" ON ("categories"."category_id" = "items"."id")' @d.right_join(:categories, :category_id=>:id).sql.must_equal 'SELECT * FROM "items" RIGHT JOIN "categories" ON ("categories"."category_id" = "items"."id")' @d.full_join(:categories, :category_id=>:id).sql.must_equal 'SELECT * FROM "items" FULL JOIN "categories" ON ("categories"."category_id" = "items"."id")' @d.natural_join(:categories).sql.must_equal 'SELECT * FROM "items" NATURAL JOIN "categories"' @d.natural_left_join(:categories).sql.must_equal 'SELECT * FROM "items" NATURAL LEFT JOIN "categories"' @d.natural_right_join(:categories).sql.must_equal 'SELECT * FROM "items" NATURAL RIGHT JOIN "categories"' @d.natural_full_join(:categories).sql.must_equal 'SELECT * FROM "items" NATURAL FULL JOIN "categories"' @d.cross_join(:categories).sql.must_equal 'SELECT * FROM "items" CROSS JOIN "categories"' end it "should support options hashes for join methods that don't take conditions" do @d.natural_join(:categories, :table_alias=>:a).sql.must_equal 'SELECT * FROM "items" NATURAL JOIN "categories" AS "a"' @d.natural_left_join(:categories, :table_alias=>:a).sql.must_equal 'SELECT * FROM "items" NATURAL LEFT JOIN "categories" AS "a"' @d.natural_right_join(:categories, :table_alias=>:a).sql.must_equal 'SELECT * FROM "items" NATURAL RIGHT JOIN "categories" AS "a"' @d.natural_full_join(:categories, :table_alias=>:a).sql.must_equal 'SELECT * FROM "items" NATURAL FULL JOIN "categories" AS "a"' @d.cross_join(:categories, :table_alias=>:a).sql.must_equal 'SELECT * FROM "items" CROSS JOIN "categories" AS "a"' end it "should raise an error if non-hash arguments are provided to join methods that don't take conditions" do proc{@d.natural_join(:categories, nil)}.must_raise(Sequel::Error) proc{@d.natural_left_join(:categories, nil)}.must_raise(Sequel::Error) proc{@d.natural_right_join(:categories, nil)}.must_raise(Sequel::Error) proc{@d.natural_full_join(:categories, nil)}.must_raise(Sequel::Error) proc{@d.cross_join(:categories, nil)}.must_raise(Sequel::Error) end it "should raise an error if blocks are provided to join methods that don't pass them" do proc{@d.natural_join(:categories){}}.must_raise(Sequel::Error) proc{@d.natural_left_join(:categories){}}.must_raise(Sequel::Error) proc{@d.natural_right_join(:categories){}}.must_raise(Sequel::Error) proc{@d.natural_full_join(:categories){}}.must_raise(Sequel::Error) proc{@d.cross_join(:categories){}}.must_raise(Sequel::Error) end it "should default to a plain join if nil is used for the type" do @d.join_table(nil, :categories, :category_id=>:id).sql.must_equal 'SELECT * FROM "items" JOIN "categories" ON ("categories"."category_id" = "items"."id")' end it "should use an inner join for Dataset#join" do @d.join(:categories, :category_id=>:id).sql.must_equal 'SELECT * FROM "items" INNER JOIN "categories" ON ("categories"."category_id" = "items"."id")' end it "should support aliased tables using the :table_alias option" do @d.from('stats').join('players', {:id => :player_id}, :table_alias=>:p).sql.must_equal 'SELECT * FROM "stats" INNER JOIN "players" AS "p" ON ("p"."id" = "stats"."player_id")' end it "should support aliased tables using an implicit alias" do @d.from('stats').join(Sequel.expr(:players).as(:p), {:id => :player_id}).sql.must_equal 'SELECT * FROM "stats" INNER JOIN "players" AS "p" ON ("p"."id" = "stats"."player_id")' end it "should support aliased tables with an implicit column aliases" do @d.from('stats').join(Sequel.expr(:players).as(:p, [:c1, :c2]), {:id => :player_id}).sql.must_equal 'SELECT * FROM "stats" INNER JOIN "players" AS "p"("c1", "c2") ON ("p"."id" = "stats"."player_id")' end it "should support aliased tables with an implicit column aliases where table alias is the same" do @d.from('stats').join(Sequel.expr(:players).as(Sequel[:players], [:c1, :c2]), {:id => :player_id}).sql.must_equal 'SELECT * FROM "stats" INNER JOIN "players" AS "players"("c1", "c2") ON ("players"."id" = "stats"."player_id")' end it "should support using an alias for the FROM when doing the first join with unqualified condition columns" do @d.from(Sequel.as(:foo, :f)).join_table(:inner, :bar, :id => :bar_id).sql.must_equal 'SELECT * FROM "foo" AS "f" INNER JOIN "bar" ON ("bar"."id" = "f"."bar_id")' end with_symbol_splitting "should support implicit schemas in from table symbols" do @d.from(:s__t).join(:u__v, {:id => :player_id}).sql.must_equal 'SELECT * FROM "s"."t" INNER JOIN "u"."v" ON ("u"."v"."id" = "s"."t"."player_id")' end with_symbol_splitting "should support implicit aliases in from table symbols" do @d.from(:t___z).join(:v___y, {:id => :player_id}).sql.must_equal 'SELECT * FROM "t" AS "z" INNER JOIN "v" AS "y" ON ("y"."id" = "z"."player_id")' @d.from(:s__t___z).join(:u__v___y, {:id => :player_id}).sql.must_equal 'SELECT * FROM "s"."t" AS "z" INNER JOIN "u"."v" AS "y" ON ("y"."id" = "z"."player_id")' end it "should support AliasedExpressions" do @d.from(Sequel.expr(:s).as(:t)).join(Sequel.expr(:u).as(:v), {:id => :player_id}).sql.must_equal 'SELECT * FROM "s" AS "t" INNER JOIN "u" AS "v" ON ("v"."id" = "t"."player_id")' end it "should support the :implicit_qualifier option" do @d.from('stats').join('players', {:id => :player_id}, :implicit_qualifier=>:p).sql.must_equal 'SELECT * FROM "stats" INNER JOIN "players" ON ("players"."id" = "p"."player_id")' end it "should support the :reset_implicit_qualifier option" do @d.from(:stats).join(:a, [:b], :reset_implicit_qualifier=>false).join(:players, {:id => :player_id}).sql.must_equal 'SELECT * FROM "stats" INNER JOIN "a" USING ("b") INNER JOIN "players" ON ("players"."id" = "stats"."player_id")' end it "should default :qualify option to default_join_table_qualification" do @d = @d.with_extend{def default_join_table_qualification; false end} @d.from('stats').join(:players, :id => :player_id).sql.must_equal 'SELECT * FROM "stats" INNER JOIN "players" ON ("id" = "player_id")' end it "should not qualify if :qualify=>false option is given" do @d.from('stats').join(:players, {:id => :player_id}, :qualify=>false).sql.must_equal 'SELECT * FROM "stats" INNER JOIN "players" ON ("id" = "player_id")' end it "should do deep qualification if :qualify=>:deep option is given" do @d.from('stats').join(:players, {Sequel.function(:f, :id) => Sequel.subscript(:player_id, 0)}, :qualify=>:deep).sql.must_equal 'SELECT * FROM "stats" INNER JOIN "players" ON (f("players"."id") = "stats"."player_id"[0])' end it "should do only qualification if :qualify=>:symbol option is given" do @d.from('stats').join(:players, {Sequel.function(:f, :id) => :player_id}, :qualify=>:symbol).sql.must_equal 'SELECT * FROM "stats" INNER JOIN "players" ON (f("id") = "stats"."player_id")' end it "should allow for arbitrary conditions in the JOIN clause" do @d.join_table(:left_outer, :categories, :status => 0).sql.must_equal 'SELECT * FROM "items" LEFT OUTER JOIN "categories" ON ("categories"."status" = 0)' @d.join_table(:left_outer, :categories, :categorizable_type => "Post").sql.must_equal 'SELECT * FROM "items" LEFT OUTER JOIN "categories" ON ("categories"."categorizable_type" = \'Post\')' @d.join_table(:left_outer, :categories, :timestamp => Sequel::CURRENT_TIMESTAMP).sql.must_equal 'SELECT * FROM "items" LEFT OUTER JOIN "categories" ON ("categories"."timestamp" = CURRENT_TIMESTAMP)' @d.join_table(:left_outer, :categories, :status => [1, 2, 3]).sql.must_equal 'SELECT * FROM "items" LEFT OUTER JOIN "categories" ON ("categories"."status" IN (1, 2, 3))' end it "should raise error for a table without a source" do proc {Sequel.mock.dataset.join('players', :id => :player_id)}.must_raise(Sequel::Error) end it "should support joining datasets" do ds = Sequel.mock.dataset.from(:categories) @d.join_table(:left_outer, ds, :item_id => :id).sql.must_equal 'SELECT * FROM "items" LEFT OUTER JOIN (SELECT * FROM categories) AS "t1" ON ("t1"."item_id" = "items"."id")' ds = ds.where(:active => true) @d.join_table(:left_outer, ds, :item_id => :id).sql.must_equal 'SELECT * FROM "items" LEFT OUTER JOIN (SELECT * FROM categories WHERE (active IS TRUE)) AS "t1" ON ("t1"."item_id" = "items"."id")' @d.from_self.join_table(:left_outer, ds, :item_id => :id).sql.must_equal 'SELECT * FROM (SELECT * FROM "items") AS "t1" LEFT OUTER JOIN (SELECT * FROM categories WHERE (active IS TRUE)) AS "t2" ON ("t2"."item_id" = "t1"."id")' end it "should support joining datasets and aliasing the join" do ds = Sequel.mock.dataset.from(:categories) @d.join_table(:left_outer, ds, {Sequel[:ds][:item_id] => :id}, :table_alias=>:ds).sql.must_equal 'SELECT * FROM "items" LEFT OUTER JOIN (SELECT * FROM categories) AS "ds" ON ("ds"."item_id" = "items"."id")' end with_symbol_splitting "should support joining datasets and aliasing the join when using symbols with embedded qualification" do ds = Sequel.mock.dataset.from(:categories) @d.join_table(:left_outer, ds, {:ds__item_id => :id}, :table_alias=>:ds).sql.must_equal 'SELECT * FROM "items" LEFT OUTER JOIN (SELECT * FROM categories) AS "ds" ON ("ds"."item_id" = "items"."id")' end it "should support joining multiple datasets" do ds = Sequel.mock.dataset.from(:categories) ds2 = Sequel.mock.dataset.from(:nodes).select(:name) ds3 = Sequel.mock.dataset.from(:attributes).where(Sequel.lit("name = 'blah'")) @d.join_table(:left_outer, ds, :item_id => :id).join_table(:inner, ds2, :node_id=>:id).join_table(:right_outer, ds3, :attribute_id=>:id).sql. must_equal 'SELECT * FROM "items" LEFT OUTER JOIN (SELECT * FROM categories) AS "t1" ON ("t1"."item_id" = "items"."id") ' \ 'INNER JOIN (SELECT name FROM nodes) AS "t2" ON ("t2"."node_id" = "t1"."id") ' \ 'RIGHT OUTER JOIN (SELECT * FROM attributes WHERE (name = \'blah\')) AS "t3" ON ("t3"."attribute_id" = "t2"."id")' end it "should support using a literal string as the join condition" do @d.join(:categories, Sequel.lit("c.item_id = items.id"), :table_alias=>:c).sql.must_equal 'SELECT * FROM "items" INNER JOIN "categories" AS "c" ON (c.item_id = items.id)' end it "should support using a boolean column as the join condition" do @d.join(:categories, :active).sql.must_equal 'SELECT * FROM "items" INNER JOIN "categories" ON "active"' end it "should support using an expression as the join condition" do @d.join(:categories, Sequel.expr(:number) > 10).sql.must_equal 'SELECT * FROM "items" INNER JOIN "categories" ON ("number" > 10)' end it "should support natural and cross joins" do @d.join_table(:natural, :categories).sql.must_equal 'SELECT * FROM "items" NATURAL JOIN "categories"' @d.join_table(:cross, :categories, nil).sql.must_equal 'SELECT * FROM "items" CROSS JOIN "categories"' @d.join_table(:natural, :categories, nil, :table_alias=>:c).sql.must_equal 'SELECT * FROM "items" NATURAL JOIN "categories" AS "c"' end it "should support joins with a USING clause if an array of symbols is used" do @d.join(:categories, [:id]).sql.must_equal 'SELECT * FROM "items" INNER JOIN "categories" USING ("id")' @d.join(:categories, [:id1, :id2]).sql.must_equal 'SELECT * FROM "items" INNER JOIN "categories" USING ("id1", "id2")' end it "should emulate JOIN USING (poorly) if the dataset doesn't support it" do @d = @d.with_extend{def supports_join_using?; false end} @d.join(:categories, [:id]).sql.must_equal 'SELECT * FROM "items" INNER JOIN "categories" ON ("categories"."id" = "items"."id")' end it "should be able to force JOIN USING without an array of symbols using :join_using" do @d.join(:categories, [Sequel.identifier(:id)], :join_using=>true).sql.must_equal 'SELECT * FROM "items" INNER JOIN "categories" USING ("id")' end it "should hoist WITH clauses from subqueries if the dataset doesn't support CTEs in subselects" do @d = @d.with_extend do def supports_cte?; true end def supports_cte_in_subselect?; false end end ds = Sequel.mock.dataset.from(:categories) @d.join(ds.with_extend{def supports_cte?; true end}.with(:a, Sequel.mock.dataset.from(:b)), [:id]).sql.must_equal 'WITH "a" AS (SELECT * FROM b) SELECT * FROM "items" INNER JOIN (SELECT * FROM categories) AS "t1" USING ("id")' end it "should raise an error if using an array of symbols with a block" do proc{@d.join(:categories, [:id]){|j,lj,js|}}.must_raise(Sequel::Error) end with_symbol_splitting "should support using a block that receieves the join table/alias, last join table/alias, and array of previous joins when using splittable symbols" do @d.from(:items___i).join(:categories, nil, :table_alias=>:c) do |join_alias, last_join_alias, joins| join_alias.must_equal :c last_join_alias.must_equal :i joins.must_equal [] end end it "should support using a block that receieves the join table/alias, last join table/alias, and array of previous joins" do @d.join(:categories) do |join_alias, last_join_alias, joins| join_alias.must_equal :categories last_join_alias.must_equal :items joins.must_equal [] end @d.from(Sequel.as(:items, :i)).join(:categories, nil, :table_alias=>:c) do |join_alias, last_join_alias, joins| join_alias.must_equal :c last_join_alias.must_equal :i joins.must_equal [] end @d.join(:blah).join(:categories, nil, :table_alias=>:c) do |join_alias, last_join_alias, joins| join_alias.must_equal :c last_join_alias.must_equal :blah joins.must_be_kind_of(Array) joins.length.must_equal 1 joins.first.must_be_kind_of(Sequel::SQL::JoinClause) joins.first.join_type.must_equal :inner joins.last.table.must_equal :blah joins.last.table_alias.must_be_nil joins.last.column_aliases.must_be_nil end @d.join_table(:natural, :blah, nil, :table_alias=>:b).join(:categories, nil, :table_alias=>:c) do |join_alias, last_join_alias, joins| join_alias.must_equal :c last_join_alias.must_equal :b joins.must_be_kind_of(Array) joins.length.must_equal 1 joins.first.must_be_kind_of(Sequel::SQL::JoinClause) joins.first.join_type.must_equal :natural end @d.join(:blah).join(:categories).join(:blah2) do |join_alias, last_join_alias, joins| join_alias.must_equal :blah2 last_join_alias.must_equal :categories joins.must_be_kind_of(Array) joins.length.must_equal 2 joins.first.must_be_kind_of(Sequel::SQL::JoinClause) joins.first.table.must_equal :blah joins.last.must_be_kind_of(Sequel::SQL::JoinClause) joins.last.table.must_equal :categories end end it "should use the block result as the only condition if no condition is given" do @d.join(:categories){|j,lj,js| {Sequel.qualify(j, :b)=>Sequel.qualify(lj, :c)}}.sql.must_equal 'SELECT * FROM "items" INNER JOIN "categories" ON ("categories"."b" = "items"."c")' @d.join(:categories){|j,lj,js| Sequel.qualify(j, :b) > Sequel.qualify(lj, :c)}.sql.must_equal 'SELECT * FROM "items" INNER JOIN "categories" ON ("categories"."b" > "items"."c")' end it "should combine the block conditions and argument conditions if both given" do @d.join(:categories, :a=>:d){|j,lj,js| {Sequel.qualify(j, :b)=>Sequel.qualify(lj, :c)}}.sql.must_equal 'SELECT * FROM "items" INNER JOIN "categories" ON (("categories"."a" = "items"."d") AND ("categories"."b" = "items"."c"))' @d.join(:categories, :a=>:d){|j,lj,js| Sequel.qualify(j, :b) > Sequel.qualify(lj, :c)}.sql.must_equal 'SELECT * FROM "items" INNER JOIN "categories" ON (("categories"."a" = "items"."d") AND ("categories"."b" > "items"."c"))' end it "should prefer explicit aliases over implicit" do @d.from(Sequel[:items].as(:i)).join(Sequel[:categories].as(:c), {:category_id => :id}, {:table_alias=>:c2, :implicit_qualifier=>:i2}).sql.must_equal 'SELECT * FROM "items" AS "i" INNER JOIN "categories" AS "c2" ON ("c2"."category_id" = "i2"."id")' @d.from(Sequel.expr(:items).as(:i)).join(Sequel.expr(:categories).as(:c), {:category_id => :id}, {:table_alias=>:c2, :implicit_qualifier=>:i2}).sql. must_equal 'SELECT * FROM "items" AS "i" INNER JOIN "categories" AS "c2" ON ("c2"."category_id" = "i2"."id")' end with_symbol_splitting "should prefer explicit aliases over implicit when using splittable symbols" do @d.from(:items___i).join(:categories___c, {:category_id => :id}, {:table_alias=>:c2, :implicit_qualifier=>:i2}).sql.must_equal 'SELECT * FROM "items" AS "i" INNER JOIN "categories" AS "c2" ON ("c2"."category_id" = "i2"."id")' @d.from(Sequel.expr(:items).as(:i)).join(Sequel.expr(:categories).as(:c), {:category_id => :id}, {:table_alias=>:c2, :implicit_qualifier=>:i2}).sql. must_equal 'SELECT * FROM "items" AS "i" INNER JOIN "categories" AS "c2" ON ("c2"."category_id" = "i2"."id")' end it "should not allow insert, update, delete, or truncate" do proc{@d.join(:categories, :a=>:d).insert_sql}.must_raise(Sequel::InvalidOperation) proc{@d.join(:categories, :a=>:d).update_sql(:a=>1)}.must_raise(Sequel::InvalidOperation) proc{@d.join(:categories, :a=>:d).delete_sql}.must_raise(Sequel::InvalidOperation) proc{@d.join(:categories, :a=>:d).truncate_sql}.must_raise(Sequel::InvalidOperation) end end describe "Dataset aggregate methods" do before do @d = Sequel.mock(:fetch=>proc{|s| {1=>s}})[:test] end it "should include min" do 5.times do @d.min(:a).must_equal 'SELECT min(a) AS min FROM test LIMIT 1' end end it "should include max" do 5.times do @d.max(:b).must_equal 'SELECT max(b) AS max FROM test LIMIT 1' end end it "should include sum" do 5.times do @d.sum(:c).must_equal 'SELECT sum(c) AS sum FROM test LIMIT 1' end end it "should include avg" do 5.times do @d.avg(:d).must_equal 'SELECT avg(d) AS avg FROM test LIMIT 1' end end it "should accept qualified columns" do 5.times do @d.avg(Sequel[:test][:bc]).must_equal 'SELECT avg(test.bc) AS avg FROM test LIMIT 1' end end it "should use a subselect for the same conditions as count" do d = @d.order(:a).limit(5) 5.times do d.avg(:a).must_equal 'SELECT avg(a) AS avg FROM (SELECT * FROM test ORDER BY a LIMIT 5) AS t1 LIMIT 1' d.sum(:a).must_equal 'SELECT sum(a) AS sum FROM (SELECT * FROM test ORDER BY a LIMIT 5) AS t1 LIMIT 1' d.min(:a).must_equal 'SELECT min(a) AS min FROM (SELECT * FROM test ORDER BY a LIMIT 5) AS t1 LIMIT 1' d.max(:a).must_equal 'SELECT max(a) AS max FROM (SELECT * FROM test ORDER BY a LIMIT 5) AS t1 LIMIT 1' end end it "should accept virtual row blocks" do 5.times do @d.avg{a(b)}.must_equal 'SELECT avg(a(b)) AS avg FROM test LIMIT 1' @d.sum{a(b)}.must_equal 'SELECT sum(a(b)) AS sum FROM test LIMIT 1' @d.min{a(b)}.must_equal 'SELECT min(a(b)) AS min FROM test LIMIT 1' @d.max{a(b)}.must_equal 'SELECT max(a(b)) AS max FROM test LIMIT 1' end end end describe "Dataset #first and #last" do before do @d = Sequel.mock(:fetch=>proc{|s| {:s=>s}})[:test] end it "should return a single record if no argument is given" do ds = @d.order(:a) 3.times do ds.first.must_equal(:s=>'SELECT * FROM test ORDER BY a LIMIT 1') ds.last.must_equal(:s=>'SELECT * FROM test ORDER BY a DESC LIMIT 1') end end it "should handle empty arrays and hashes" do ds = @d.order(:a) 3.times do ds.first({}).must_equal(:s=>'SELECT * FROM test ORDER BY a LIMIT 1') ds.last({}).must_equal(:s=>'SELECT * FROM test ORDER BY a DESC LIMIT 1') ds.first([]).must_equal(:s=>'SELECT * FROM test ORDER BY a LIMIT 1') ds.last([]).must_equal(:s=>'SELECT * FROM test ORDER BY a DESC LIMIT 1') end end it "should return the first/last matching record if argument is not an Integer" do ds = @d.order(:a) 5.times do ds.first(:z => 26).must_equal(:s=>'SELECT * FROM test WHERE (z = 26) ORDER BY a LIMIT 1') ds.first([[:z, 15]]).must_equal(:s=>'SELECT * FROM test WHERE (z = 15) ORDER BY a LIMIT 1') ds.last(:z => 26).must_equal(:s=>'SELECT * FROM test WHERE (z = 26) ORDER BY a DESC LIMIT 1') ds.last([[:z, 15]]).must_equal(:s=>'SELECT * FROM test WHERE (z = 15) ORDER BY a DESC LIMIT 1') end end it "should return the first/last matching record if argument if not caching SQL" do ds = @d.order(:a).clone(:no_cache_sql=>true) 5.times do ds.first(:z => 26).must_equal(:s=>'SELECT * FROM test WHERE (z = 26) ORDER BY a LIMIT 1') ds.first([[:z, 15]]).must_equal(:s=>'SELECT * FROM test WHERE (z = 15) ORDER BY a LIMIT 1') ds.last(:z => 26).must_equal(:s=>'SELECT * FROM test WHERE (z = 26) ORDER BY a DESC LIMIT 1') ds.last([[:z, 15]]).must_equal(:s=>'SELECT * FROM test WHERE (z = 15) ORDER BY a DESC LIMIT 1') end end it "should set the limit and return an array of records if the given number is > 1" do ds = @d.order(:a) 5.times do i = rand(10) + 10 ds.first(i).must_equal [{:s=>"SELECT * FROM test ORDER BY a LIMIT #{i}"}] ds.last(i).must_equal [{:s=>"SELECT * FROM test ORDER BY a DESC LIMIT #{i}"}] end end it "should return the first matching record if a block is given without an argument" do ds = @d.order(:name) 5.times do @d.first{z > 26}.must_equal(:s=>'SELECT * FROM test WHERE (z > 26) LIMIT 1') ds.last{z > 26}.must_equal(:s=>'SELECT * FROM test WHERE (z > 26) ORDER BY name DESC LIMIT 1') end end it "should combine block and standard argument filters if argument is not an Integer" do ds = @d.order(:name) 5.times do @d.first(:y=>25){z > 26}.must_equal(:s=>'SELECT * FROM test WHERE ((y = 25) AND (z > 26)) LIMIT 1') ds.last(:y=>16){z > 26}.must_equal(:s=>'SELECT * FROM test WHERE ((y = 16) AND (z > 26)) ORDER BY name DESC LIMIT 1') end end it "should combine block and standard argument filters if argument is a literal string" do ds = @d.order(:name) 5.times do @d.first(Sequel.lit('y = 25')){z > 26}.must_equal(:s=>'SELECT * FROM test WHERE ((y = 25) AND (z > 26)) LIMIT 1') ds.last(Sequel.lit('y = 16')){z > 26}.must_equal(:s=>'SELECT * FROM test WHERE ((y = 16) AND (z > 26)) ORDER BY name DESC LIMIT 1') @d.first(Sequel.lit('y = ?', 25)){z > 26}.must_equal(:s=>'SELECT * FROM test WHERE ((y = 25) AND (z > 26)) LIMIT 1') ds.last(Sequel.lit('y = ?', 16)){z > 26}.must_equal(:s=>'SELECT * FROM test WHERE ((y = 16) AND (z > 26)) ORDER BY name DESC LIMIT 1') end end it "should filter and return an array of records if an Integer argument is provided and a block is given" do ds = @d.order(:a) 5.times do i = rand(10) + 10 ds.first(i){z > 26}.must_equal [{:s=>"SELECT * FROM test WHERE (z > 26) ORDER BY a LIMIT #{i}"}] ds.last(i){z > 26}.must_equal [{:s=>"SELECT * FROM test WHERE (z > 26) ORDER BY a DESC LIMIT #{i}"}] end end it "should return nil if no records match" do Sequel.mock[:t].first.must_be_nil end it "#last should raise if no order is given" do proc {@d.last}.must_raise(Sequel::Error) proc {@d.last(2)}.must_raise(Sequel::Error) @d.order(:a).last @d.order(:a).last(2) end it "#last should invert the order" do @d.order(:a).last.must_equal(:s=>'SELECT * FROM test ORDER BY a DESC LIMIT 1') @d.order(Sequel.desc(:b)).last.must_equal(:s=>'SELECT * FROM test ORDER BY b ASC LIMIT 1') @d.order(:c, :d).last.must_equal(:s=>'SELECT * FROM test ORDER BY c DESC, d DESC LIMIT 1') @d.order(Sequel.desc(:e), :f).last.must_equal(:s=>'SELECT * FROM test ORDER BY e ASC, f DESC LIMIT 1') end it "should raise an error for multiple arguments" do proc{@d.first(1, :z=>10)}.must_raise Sequel::Error proc{@d.last(1, :z=>10)}.must_raise Sequel::Error end end describe "Dataset #first!" do before do @db = Sequel.mock(:fetch=>proc{|s| {:s=>s}}) @d = @db[:test] end it "should return a single record if no argument is given" do @d.order(:a).first!.must_equal(:s=>'SELECT * FROM test ORDER BY a LIMIT 1') end it "should return the first! matching record if argument is not an Integer" do @d.order(:a).first!(:z => 26).must_equal(:s=>'SELECT * FROM test WHERE (z = 26) ORDER BY a LIMIT 1') @d.order(:a).first!(Sequel.lit('z = ?', 15)).must_equal(:s=>'SELECT * FROM test WHERE (z = 15) ORDER BY a LIMIT 1') end it "should set the limit and return an array of records if the given number is > 1" do i = rand(10) + 10 @d.order(:a).first!(i).must_equal [{:s=>"SELECT * FROM test ORDER BY a LIMIT #{i}"}] end it "should return the first! matching record if a block is given without an argument" do @d.first!{z > 26}.must_equal(:s=>'SELECT * FROM test WHERE (z > 26) LIMIT 1') end it "should combine block and standard argument filters if argument is not an Integer" do @d.first!(:y=>25){z > 26}.must_equal(:s=>'SELECT * FROM test WHERE ((y = 25) AND (z > 26)) LIMIT 1') end it "should filter and return an array of records if an Integer argument is provided and a block is given" do i = rand(10) + 10 @d.order(:a).first!(i){z > 26}.must_equal [{:s=>"SELECT * FROM test WHERE (z > 26) ORDER BY a LIMIT #{i}"}] end it "should raise NoMatchingRow exception if no rows match" do proc{Sequel.mock[:t].first!}.must_raise(Sequel::NoMatchingRow) end it "saves a reference to the dataset with the exception to allow further processing" do dataset = Sequel.mock[:t] begin dataset.first! rescue Sequel::NoMatchingRow => e e.dataset.must_equal(dataset) end proc{raise Sequel::NoMatchingRow, 'test'}.must_raise Sequel::NoMatchingRow proc{raise Sequel::NoMatchingRow.new('test')}.must_raise Sequel::NoMatchingRow end end describe "Dataset compound operations" do before do @a = Sequel.mock.dataset.from(:a).filter(:z => 1) @b = Sequel.mock.dataset.from(:b).filter(:z => 2) end it "should support UNION and UNION ALL" do @a.union(@b).sql.must_equal "SELECT * FROM (SELECT * FROM a WHERE (z = 1) UNION SELECT * FROM b WHERE (z = 2)) AS t1" @b.union(@a, :all=>true).sql.must_equal "SELECT * FROM (SELECT * FROM b WHERE (z = 2) UNION ALL SELECT * FROM a WHERE (z = 1)) AS t1" end it "should support INTERSECT and INTERSECT ALL" do @a.intersect(@b).sql.must_equal "SELECT * FROM (SELECT * FROM a WHERE (z = 1) INTERSECT SELECT * FROM b WHERE (z = 2)) AS t1" @b.intersect(@a, :all=>true).sql.must_equal "SELECT * FROM (SELECT * FROM b WHERE (z = 2) INTERSECT ALL SELECT * FROM a WHERE (z = 1)) AS t1" end it "should support EXCEPT and EXCEPT ALL" do @a.except(@b).sql.must_equal "SELECT * FROM (SELECT * FROM a WHERE (z = 1) EXCEPT SELECT * FROM b WHERE (z = 2)) AS t1" @b.except(@a, :all=>true).sql.must_equal "SELECT * FROM (SELECT * FROM b WHERE (z = 2) EXCEPT ALL SELECT * FROM a WHERE (z = 1)) AS t1" end it "should support :alias option for specifying identifier" do @a.union(@b, :alias=>:xx).sql.must_equal "SELECT * FROM (SELECT * FROM a WHERE (z = 1) UNION SELECT * FROM b WHERE (z = 2)) AS xx" @a.intersect(@b, :alias=>:xx).sql.must_equal "SELECT * FROM (SELECT * FROM a WHERE (z = 1) INTERSECT SELECT * FROM b WHERE (z = 2)) AS xx" @a.except(@b, :alias=>:xx).sql.must_equal "SELECT * FROM (SELECT * FROM a WHERE (z = 1) EXCEPT SELECT * FROM b WHERE (z = 2)) AS xx" end it "should support :from_self=>false option to not wrap the compound in a SELECT * FROM (...)" do @b.union(@a, :from_self=>false).sql.must_equal "SELECT * FROM b WHERE (z = 2) UNION SELECT * FROM a WHERE (z = 1)" @b.intersect(@a, :from_self=>false).sql.must_equal "SELECT * FROM b WHERE (z = 2) INTERSECT SELECT * FROM a WHERE (z = 1)" @b.except(@a, :from_self=>false).sql.must_equal "SELECT * FROM b WHERE (z = 2) EXCEPT SELECT * FROM a WHERE (z = 1)" @b.union(@a, :from_self=>false, :all=>true).sql.must_equal "SELECT * FROM b WHERE (z = 2) UNION ALL SELECT * FROM a WHERE (z = 1)" @b.intersect(@a, :from_self=>false, :all=>true).sql.must_equal "SELECT * FROM b WHERE (z = 2) INTERSECT ALL SELECT * FROM a WHERE (z = 1)" @b.except(@a, :from_self=>false, :all=>true).sql.must_equal "SELECT * FROM b WHERE (z = 2) EXCEPT ALL SELECT * FROM a WHERE (z = 1)" end it "should raise an InvalidOperation if INTERSECT or EXCEPT is used and they are not supported" do @a = @a.with_extend{def supports_intersect_except?; false end} proc{@a.intersect(@b)}.must_raise(Sequel::InvalidOperation) proc{@a.intersect(@b,:all=> true)}.must_raise(Sequel::InvalidOperation) proc{@a.except(@b)}.must_raise(Sequel::InvalidOperation) proc{@a.except(@b, :all=>true)}.must_raise(Sequel::InvalidOperation) end it "should raise an InvalidOperation if INTERSECT ALL or EXCEPT ALL is used and they are not supported" do @a = @a.with_extend{def supports_intersect_except_all?; false end} @a.intersect(@b) proc{@a.intersect(@b, :all=>true)}.must_raise(Sequel::InvalidOperation) @a.except(@b) proc{@a.except(@b, :all=>true)}.must_raise(Sequel::InvalidOperation) end it "should handle chained compound operations" do @a.union(@b).union(@a, :all=>true).sql.must_equal "SELECT * FROM (SELECT * FROM (SELECT * FROM a WHERE (z = 1) UNION SELECT * FROM b WHERE (z = 2)) AS t1 UNION ALL SELECT * FROM a WHERE (z = 1)) AS t1" @a.intersect(@b, :all=>true).intersect(@a).sql.must_equal "SELECT * FROM (SELECT * FROM (SELECT * FROM a WHERE (z = 1) INTERSECT ALL SELECT * FROM b WHERE (z = 2)) AS t1 INTERSECT SELECT * FROM a WHERE (z = 1)) AS t1" @a.except(@b).except(@a, :all=>true).sql.must_equal "SELECT * FROM (SELECT * FROM (SELECT * FROM a WHERE (z = 1) EXCEPT SELECT * FROM b WHERE (z = 2)) AS t1 EXCEPT ALL SELECT * FROM a WHERE (z = 1)) AS t1" end it "should use a subselect when using a compound operation with a dataset that already has a compound operation" do @a.union(@b.union(@a, :all=>true)).sql.must_equal "SELECT * FROM (SELECT * FROM a WHERE (z = 1) UNION SELECT * FROM (SELECT * FROM b WHERE (z = 2) UNION ALL SELECT * FROM a WHERE (z = 1)) AS t1) AS t1" @a.intersect(@b.intersect(@a), :all=>true).sql.must_equal "SELECT * FROM (SELECT * FROM a WHERE (z = 1) INTERSECT ALL SELECT * FROM (SELECT * FROM b WHERE (z = 2) INTERSECT SELECT * FROM a WHERE (z = 1)) AS t1) AS t1" @a.except(@b.except(@a, :all=>true)).sql.must_equal "SELECT * FROM (SELECT * FROM a WHERE (z = 1) EXCEPT SELECT * FROM (SELECT * FROM b WHERE (z = 2) EXCEPT ALL SELECT * FROM a WHERE (z = 1)) AS t1) AS t1" end it "should order and limit properly when using UNION, INTERSECT, or EXCEPT" do @dataset = Sequel.mock.dataset.from(:test) @dataset.union(@dataset).limit(2).sql.must_equal "SELECT * FROM (SELECT * FROM test UNION SELECT * FROM test) AS t1 LIMIT 2" @dataset.limit(2).intersect(@dataset).sql.must_equal "SELECT * FROM (SELECT * FROM (SELECT * FROM test LIMIT 2) AS t1 INTERSECT SELECT * FROM test) AS t1" @dataset.except(@dataset.limit(2)).sql.must_equal "SELECT * FROM (SELECT * FROM test EXCEPT SELECT * FROM (SELECT * FROM test LIMIT 2) AS t1) AS t1" @dataset.union(@dataset).order(:num).sql.must_equal "SELECT * FROM (SELECT * FROM test UNION SELECT * FROM test) AS t1 ORDER BY num" @dataset.order(:num).intersect(@dataset).sql.must_equal "SELECT * FROM (SELECT * FROM (SELECT * FROM test ORDER BY num) AS t1 INTERSECT SELECT * FROM test) AS t1" @dataset.except(@dataset.order(:num)).sql.must_equal "SELECT * FROM (SELECT * FROM test EXCEPT SELECT * FROM (SELECT * FROM test ORDER BY num) AS t1) AS t1" @dataset.limit(2).order(:a).union(@dataset.limit(3).order(:b)).order(:c).limit(4).sql. must_equal "SELECT * FROM (SELECT * FROM (SELECT * FROM test ORDER BY a LIMIT 2) AS t1 UNION SELECT * FROM (SELECT * FROM test ORDER BY b LIMIT 3) AS t1) AS t1 ORDER BY c LIMIT 4" end it "should handle raw SQL datasets properly when using UNION, INTERSECT, or EXCEPT" do @dataset = Sequel.mock['SELECT 1'] @dataset.union(@dataset).sql.must_equal "SELECT * FROM (SELECT * FROM (SELECT 1) AS t1 UNION SELECT * FROM (SELECT 1) AS t1) AS t1" @dataset.intersect(@dataset).sql.must_equal "SELECT * FROM (SELECT * FROM (SELECT 1) AS t1 INTERSECT SELECT * FROM (SELECT 1) AS t1) AS t1" @dataset.except(@dataset).sql.must_equal "SELECT * FROM (SELECT * FROM (SELECT 1) AS t1 EXCEPT SELECT * FROM (SELECT 1) AS t1) AS t1" end it "should hoist WITH clauses in given dataset if dataset doesn't support WITH in subselect" do ds = Sequel.mock.dataset ds = ds.with_extend do def supports_cte?; true end def supports_cte_in_subselect?; false end end ds.from(:a).union(ds.from(:c).with(:c, ds.from(:d)), :from_self=>false).sql.must_equal 'WITH c AS (SELECT * FROM d) SELECT * FROM a UNION SELECT * FROM c' ds.from(:a).except(ds.from(:c).with(:c, ds.from(:d))).sql.must_equal 'WITH c AS (SELECT * FROM d) SELECT * FROM (SELECT * FROM a EXCEPT SELECT * FROM c) AS t1' ds.from(:a).with(:a, ds.from(:b)).intersect(ds.from(:c).with(:c, ds.from(:d)), :from_self=>false).sql.must_equal 'WITH a AS (SELECT * FROM b), c AS (SELECT * FROM d) SELECT * FROM a INTERSECT SELECT * FROM c' end end describe "Dataset#[]" do before do @db = Sequel.mock(:fetch=>{1 => 2, 3 => 4}) @d = @db[:items] end it "should return a single record filtered according to the given conditions" do @d[:name => 'didi'].must_equal(1 => 2, 3 => 4) @db.sqls.must_equal ["SELECT * FROM items WHERE (name = 'didi') LIMIT 1"] @d[:id => 5..45].must_equal(1 => 2, 3 => 4) @db.sqls.must_equal ["SELECT * FROM items WHERE ((id >= 5) AND (id <= 45)) LIMIT 1"] end it "should raise an error for no arguments or a single integer argument" do proc{@d[]}.must_raise Sequel::Error proc{@d[1]}.must_raise Sequel::Error end end describe "Dataset#single_record" do before do @db = Sequel.mock end it "should call each with a limit of 1 and return the record" do @db.fetch = {:a=>1} @db[:test].single_record.must_equal(:a=>1) @db.sqls.must_equal ['SELECT * FROM test LIMIT 1'] end it "should return nil if no record is present" do @db[:test].single_record.must_be_nil @db.sqls.must_equal ['SELECT * FROM test LIMIT 1'] end end describe "Dataset#single_record!" do before do @db = Sequel.mock end it "should call each and return the first record" do @db.fetch = [{:a=>1}, {:a=>2}] @db[:test].single_record!.must_equal(:a=>1) @db.sqls.must_equal ['SELECT * FROM test'] end it "should return nil if no record is present" do @db[:test].single_record!.must_be_nil @db.sqls.must_equal ['SELECT * FROM test'] end end describe "Dataset#single_value" do before do @db = Sequel.mock end it "should call each and return the first value of the first record" do @db.fetch = {:a=>1} @db[:test].single_value.must_equal 1 @db.sqls.must_equal ['SELECT * FROM test LIMIT 1'] end it "should return nil if no records" do @db[:test].single_value.must_be_nil @db.sqls.must_equal ['SELECT * FROM test LIMIT 1'] end it "should work on a graphed_dataset" do @db.fetch = {:a=>1} ds = @db[:test].columns(:a) ds.graph(ds, [:a], :table_alias=>:test2).single_value.must_equal 1 @db.sqls.must_equal ['SELECT test.a, test2.a AS test2_a FROM test LEFT OUTER JOIN test AS test2 USING (a) LIMIT 1'] end end describe "Dataset#single_value!" do before do @db = Sequel.mock end it "should call each and return the first value of the first record" do @db.fetch = [{:a=>1, :b=>2}, {:a=>3, :b=>4}] @db[:test].single_value!.must_equal 1 @db.sqls.must_equal ['SELECT * FROM test'] end it "should return nil if no records" do @db[:test].single_value!.must_be_nil @db.sqls.must_equal ['SELECT * FROM test'] end end describe "Dataset#get" do before do @d = Sequel.mock(:fetch=>proc{|s| {:name=>s}})[:test] end it "should select the specified column and fetch its value" do 5.times do @d.get(:name).must_equal "SELECT name FROM test LIMIT 1" @d.get(:abc).must_equal "SELECT abc FROM test LIMIT 1" end end it "should work with filters" do @d.filter(:id => 1).get(:name).must_equal "SELECT name FROM test WHERE (id = 1) LIMIT 1" end it "should work with aliased fields" do 5.times do @d.get(Sequel.expr(Sequel[:x][:b]).as(:name)).must_equal "SELECT x.b AS name FROM test LIMIT 1" end end it "should work with plain strings" do 5.times do @d.get('a').must_equal "SELECT 'a' AS v FROM test LIMIT 1" end end it "should accept a block that yields a virtual row" do @d.get{|o| o.x_b.as(:name)}.must_equal "SELECT x_b AS name FROM test LIMIT 1" @d.get{x(1).as(:name)}.must_equal "SELECT x(1) AS name FROM test LIMIT 1" end it "should raise an error if both a regular argument and block argument are used" do proc{@d.get(:name){|o| o.x__b.as(:name)}}.must_raise(Sequel::Error) end it "should support false and nil values" do @d.get(false).must_equal "SELECT 'f' AS v FROM test LIMIT 1" @d.get(nil).must_equal "SELECT NULL AS v FROM test LIMIT 1" end it "should support an array of expressions to get an array of results" do @d = @d.with_fetch(:name=>1, :abc=>2) @d.get([:name, :abc]).must_equal [1, 2] @d.db.sqls.must_equal ['SELECT name, abc FROM test LIMIT 1'] end it "should support an array with a single expression" do @d.get([:name]).must_equal ['SELECT name FROM test LIMIT 1'] end it "should handle an array with aliased expressions" do @d = @d.with_fetch(:name=>1, :abc=>2) @d.get([Sequel[:n].as(:name), Sequel.as(:a, :abc)]).must_equal [1, 2] @d.db.sqls.must_equal ['SELECT n AS name, a AS abc FROM test LIMIT 1'] end with_symbol_splitting "should handle an array with symbols with embedded aliases" do @d = @d.with_fetch(:name=>1, :abc=>2) @d.get([:n___name, :a__b___abc]).must_equal [1, 2] @d.db.sqls.must_equal ['SELECT n AS name, a.b AS abc FROM test LIMIT 1'] end it "should raise an Error if an alias cannot be determined" do proc{@d.with_fetch(:name=>1, :abc=>2).get([Sequel.+(:a, 1), :a])}.must_raise(Sequel::Error) end it "should support an array of expressions in a virtual row" do @d = @d.with_fetch(:name=>1, :abc=>2) @d.get{[name, n[abc]]}.must_equal [1, 2] @d.db.sqls.must_equal ['SELECT name, n.abc FROM test LIMIT 1'] end it "should work with static SQL" do @d.with_sql('SELECT foo').get(:name).must_equal "SELECT foo" @d = @d.with_fetch(:name=>1, :abc=>2) @d.with_sql('SELECT foo').get{[name, n[abc]]}.must_equal [1, 2] @d.db.sqls.must_equal ['SELECT foo'] * 2 end it "should handle cases where no rows are returned" do @d = @d.with_fetch([]) @d.get(:n).must_be_nil @d.get([:n, :a]).must_be_nil @d.db.sqls.must_equal ['SELECT n FROM test LIMIT 1', 'SELECT n, a FROM test LIMIT 1'] end end describe "Dataset#with_row_proc" do before do @db = Sequel.mock(:fetch=>[{:a=>1}, {:a=>2}]) @dataset = @db[:items].with_row_proc(proc{|h| h[:der] = h[:a] + 2; h}) end it "should cause dataset to pass all rows through the filter" do rows = @dataset.all rows.map{|h| h[:der]}.must_equal [3, 4] @db.sqls.must_equal ['SELECT * FROM items'] end it "should be copied over when dataset is cloned" do @dataset.filter(:a => 1).all.must_equal [{:a=>1, :der=>3}, {:a=>2, :der=>4}] end end describe "Dataset#<<" do before do @db = Sequel.mock end it "should call #insert" do @db[:items] << {:name => 1} @db.sqls.must_equal ['INSERT INTO items (name) VALUES (1)'] end it "should be chainable" do @db[:items] << {:name => 1} << @db[:old_items].select(:name) @db.sqls.must_equal ['INSERT INTO items (name) VALUES (1)', 'INSERT INTO items SELECT name FROM old_items'] end end describe "Dataset#columns" do before do @dataset = Sequel.mock[:items] end it "should return the value of @columns if @columns is not nil" do @dataset.columns(:a, :b, :c).columns.must_equal [:a, :b, :c] @dataset.db.sqls.must_equal [] end it "should attempt to get a single record and return @columns if @columns is nil" do @dataset.db.columns = [:a] @dataset.columns.must_equal [:a] @dataset.db.sqls.must_equal ['SELECT * FROM items LIMIT 0'] end it "should be cleared if you change the selected columns" do @dataset.db.columns = [[:a], [:b]] @dataset.columns.must_equal [:a] @dataset.db.sqls.must_equal ['SELECT * FROM items LIMIT 0'] @dataset.columns.must_equal [:a] @dataset.db.sqls.must_equal [] ds = @dataset.select{foo.function} ds.columns.must_equal [:b] @dataset.db.sqls.must_equal ['SELECT foo() FROM items LIMIT 0'] end it "should be cleared if you change the FROM table" do @dataset.db.columns = [[:a], [:b]] @dataset.columns.must_equal [:a] @dataset.db.sqls.must_equal ['SELECT * FROM items LIMIT 0'] ds = @dataset.from(:foo) ds.columns.must_equal [:b] @dataset.db.sqls.must_equal ['SELECT * FROM foo LIMIT 0'] end it "should be cleared if you join a table to the dataset" do @dataset.db.columns = [[:a], [:a, :b]] @dataset.columns.must_equal [:a] @dataset.db.sqls.must_equal ['SELECT * FROM items LIMIT 0'] ds = @dataset.cross_join(:foo) ds.columns.must_equal [:a, :b] @dataset.db.sqls.must_equal ['SELECT * FROM items CROSS JOIN foo LIMIT 0'] end it "should be cleared if you set custom SQL for the dataset" do @dataset.db.columns = [[:a], [:b]] @dataset.columns.must_equal [:a] @dataset.db.sqls.must_equal ['SELECT * FROM items LIMIT 0'] ds = @dataset.with_sql('SELECT b FROM foo') ds.columns.must_equal [:b] @dataset.db.sqls.must_equal ['SELECT b FROM foo'] end it "should ignore any filters, orders, or DISTINCT clauses" do @dataset.db.columns = [:a] @dataset = @dataset.where(:b=>100).order(:b).distinct @dataset.columns.must_equal [:a] @dataset.db.sqls.must_equal ['SELECT * FROM items LIMIT 0'] end end describe "Dataset#columns!" do it "should always attempt to get a record and return @columns" do ds = Sequel.mock(:columns=>[[:a, :b, :c], [:d, :e, :f]])[:items] ds.columns!.must_equal [:a, :b, :c] ds.db.sqls.must_equal ['SELECT * FROM items LIMIT 0'] ds.columns!.must_equal [:d, :e, :f] ds.db.sqls.must_equal ['SELECT * FROM items LIMIT 0'] end end describe "Dataset#import" do before do @db = Sequel.mock @ds = @db[:items] end it "should return nil without a query if no values" do @ds.import(['x', 'y'], []).must_be_nil @db.sqls.must_equal [] end it "should accept string keys as column names" do @ds.import(['x', 'y'], [[1, 2], [3, 4]]) @db.sqls.must_equal ['BEGIN', "INSERT INTO items (x, y) VALUES (1, 2)", "INSERT INTO items (x, y) VALUES (3, 4)", 'COMMIT'] end it "should accept a columns array and a values array" do @ds.import([:x, :y], [[1, 2], [3, 4]]) @db.sqls.must_equal ['BEGIN', "INSERT INTO items (x, y) VALUES (1, 2)", "INSERT INTO items (x, y) VALUES (3, 4)", 'COMMIT'] end it "should accept a columns array and a dataset" do @ds2 = @ds.from(:cats).filter(:purr => true).select(:a, :b) @ds.import([:x, :y], @ds2) @db.sqls.must_equal ['BEGIN', "INSERT INTO items (x, y) SELECT a, b FROM cats WHERE (purr IS TRUE)", 'COMMIT'] end it "should slice based on the default_import_slice option" do @ds = @ds.with_extend{def default_import_slice; 2 end} @ds.import([:x, :y], [[1, 2], [3, 4], [5, 6]]) @db.sqls.must_equal ['BEGIN', "INSERT INTO items (x, y) VALUES (1, 2)", "INSERT INTO items (x, y) VALUES (3, 4)", 'COMMIT', "INSERT INTO items (x, y) VALUES (5, 6)"] @ds.import([:x, :y], [[1, 2], [3, 4], [5, 6]], :slice=>nil) @db.sqls.must_equal ['BEGIN', "INSERT INTO items (x, y) VALUES (1, 2)", "INSERT INTO items (x, y) VALUES (3, 4)", "INSERT INTO items (x, y) VALUES (5, 6)", 'COMMIT'] end it "should accept a columns array and a values array with :commit_every option" do @ds.import([:x, :y], [[1, 2], [3, 4], [5, 6]], :commit_every => 2) @db.sqls.must_equal ['BEGIN', "INSERT INTO items (x, y) VALUES (1, 2)", "INSERT INTO items (x, y) VALUES (3, 4)", 'COMMIT', "INSERT INTO items (x, y) VALUES (5, 6)"] end it "should accept a columns array and a values array with :slice option" do @ds.import([:x, :y], [[1, 2], [3, 4], [5, 6]], :slice => 2) @db.sqls.must_equal ['BEGIN', "INSERT INTO items (x, y) VALUES (1, 2)", "INSERT INTO items (x, y) VALUES (3, 4)", 'COMMIT', "INSERT INTO items (x, y) VALUES (5, 6)"] end it "should use correct sql for :values strategy" do @ds = @ds.with_extend{def multi_insert_sql_strategy; :values end} @ds.import([:x, :y], [[1, 2], [3, 4], [5, 6]]) @db.sqls.must_equal ["INSERT INTO items (x, y) VALUES (1, 2), (3, 4), (5, 6)"] @ds.import([:x, :y], [[1, 2], [3, 4], [5, 6]], :slice=>2) @db.sqls.must_equal ["INSERT INTO items (x, y) VALUES (1, 2), (3, 4)", "INSERT INTO items (x, y) VALUES (5, 6)"] end it "should use correct sql for :union strategy" do @ds = @ds.with_extend{def multi_insert_sql_strategy; :union end} @ds.import([:x, :y], [[1, 2], [3, 4], [5, 6]]) @db.sqls.must_equal ["INSERT INTO items (x, y) SELECT 1, 2 UNION ALL SELECT 3, 4 UNION ALL SELECT 5, 6"] @ds.import([:x, :y], [[1, 2], [3, 4], [5, 6]], :slice=>2) @db.sqls.must_equal ["INSERT INTO items (x, y) SELECT 1, 2 UNION ALL SELECT 3, 4", "INSERT INTO items (x, y) SELECT 5, 6"] end it "should use correct sql for :union strategy when FROM is required" do @ds = @ds.with_extend do def empty_from_sql; ' FROM foo' end def multi_insert_sql_strategy; :union end end @ds.import([:x, :y], [[1, 2], [3, 4], [5, 6]]) @db.sqls.must_equal ["INSERT INTO items (x, y) SELECT 1, 2 FROM foo UNION ALL SELECT 3, 4 FROM foo UNION ALL SELECT 5, 6 FROM foo"] @ds.import([:x, :y], [[1, 2], [3, 4], [5, 6]], :slice=>2) @db.sqls.must_equal ["INSERT INTO items (x, y) SELECT 1, 2 FROM foo UNION ALL SELECT 3, 4 FROM foo", "INSERT INTO items (x, y) SELECT 5, 6 FROM foo"] end it "should raise an error if columns are empty and values are not empty" do proc{@ds.import([], [[]])}.must_raise Sequel::Error end end describe "Dataset#multi_insert" do before do @db = Sequel.mock(:servers=>{:s1=>{}}) @ds = @db[:items] @list = [{:name => 'abc'}, {:name => 'def'}, {:name => 'ghi'}] end it "should return nil without a query if no values" do @ds.multi_insert([]).must_be_nil @db.sqls.must_equal [] end it "should issue multiple insert statements inside a transaction" do @ds.multi_insert(@list) @db.sqls.must_equal ['BEGIN', "INSERT INTO items (name) VALUES ('abc')", "INSERT INTO items (name) VALUES ('def')", "INSERT INTO items (name) VALUES ('ghi')", 'COMMIT'] end it "should respect :server option" do @ds.multi_insert(@list, :server=>:s1) @db.sqls.must_equal ['BEGIN -- s1', "INSERT INTO items (name) VALUES ('abc') -- s1", "INSERT INTO items (name) VALUES ('def') -- s1", "INSERT INTO items (name) VALUES ('ghi') -- s1", 'COMMIT -- s1'] end it "should respect existing :server option on dataset" do @ds.server(:s1).multi_insert(@list) @db.sqls.must_equal ['BEGIN -- s1', "INSERT INTO items (name) VALUES ('abc') -- s1", "INSERT INTO items (name) VALUES ('def') -- s1", "INSERT INTO items (name) VALUES ('ghi') -- s1", 'COMMIT -- s1'] end it "should respect :return=>:primary_key option" do @db.autoid = 1 @ds.multi_insert(@list, :return=>:primary_key).must_equal [1, 2, 3] @db.sqls.must_equal ['BEGIN', "INSERT INTO items (name) VALUES ('abc')", "INSERT INTO items (name) VALUES ('def')", "INSERT INTO items (name) VALUES ('ghi')", 'COMMIT'] end it "should handle :return=>:primary_key option if dataset has a row_proc" do @db.autoid = 1 @ds.with_row_proc(lambda{|h| Object.new}).multi_insert(@list, :return=>:primary_key).must_equal [1, 2, 3] @db.sqls.must_equal ['BEGIN', "INSERT INTO items (name) VALUES ('abc')", "INSERT INTO items (name) VALUES ('def')", "INSERT INTO items (name) VALUES ('ghi')", 'COMMIT'] end it "should not use transaction when using :return=>:primary_key option with single row" do @db.autoid = 1 @ds.multi_insert(@list[0,1], :return=>:primary_key).must_equal [1] @db.sqls.must_equal ["INSERT INTO items (name) VALUES ('abc')"] end with_symbol_splitting "should handle splittable symbols for tables" do @ds = @ds.from(:sch__tab) @ds.multi_insert(@list) @db.sqls.must_equal ['BEGIN', "INSERT INTO sch.tab (name) VALUES ('abc')", "INSERT INTO sch.tab (name) VALUES ('def')", "INSERT INTO sch.tab (name) VALUES ('ghi')", 'COMMIT'] end it "should handle SQL::QualifiedIdentifier for tables" do @ds = @ds.from(Sequel.qualify(:sch, :tab)) @ds.multi_insert(@list) @db.sqls.must_equal ['BEGIN', "INSERT INTO sch.tab (name) VALUES ('abc')", "INSERT INTO sch.tab (name) VALUES ('def')", "INSERT INTO sch.tab (name) VALUES ('ghi')", 'COMMIT'] end it "should handle SQL::Identifier for tables" do @ds = @ds.from(Sequel.identifier(:sch__tab)) @ds.multi_insert(@list) @db.sqls.must_equal ['BEGIN', "INSERT INTO sch__tab (name) VALUES ('abc')", "INSERT INTO sch__tab (name) VALUES ('def')", "INSERT INTO sch__tab (name) VALUES ('ghi')", 'COMMIT'] end it "should accept the :commit_every option for committing every x records" do @ds.multi_insert(@list, :commit_every => 1) @db.sqls.must_equal ["INSERT INTO items (name) VALUES ('abc')", "INSERT INTO items (name) VALUES ('def')", "INSERT INTO items (name) VALUES ('ghi')"] end it "should accept the :slice option for committing every x records" do @ds.multi_insert(@list, :slice => 2) @db.sqls.must_equal ['BEGIN', "INSERT INTO items (name) VALUES ('abc')", "INSERT INTO items (name) VALUES ('def')", 'COMMIT', "INSERT INTO items (name) VALUES ('ghi')"] end it "should accept string keys as column names" do @ds.multi_insert([{'x'=>1, 'y'=>2}, {'x'=>3, 'y'=>4}]) @db.sqls.must_equal ['BEGIN', "INSERT INTO items (x, y) VALUES (1, 2)", "INSERT INTO items (x, y) VALUES (3, 4)", 'COMMIT'] end it "should not do anything if no hashes are provided" do @ds.multi_insert([]) @db.sqls.must_equal [] end end describe "Dataset#update_sql" do before do @ds = Sequel.mock.dataset.from(:items) end it "should raise Error for plain strings" do proc{@ds.update_sql("a = b")}.must_raise Sequel::Error end it "should accept literal strings" do @ds.update_sql(Sequel.lit("a = b")).must_equal "UPDATE items SET a = b" end it "should handle qualified identifiers" do @ds.update_sql(Sequel[:items][:a]=>:b).must_equal "UPDATE items SET items.a = b" end with_symbol_splitting "should handle implicitly qualified symbols" do @ds.update_sql(:items__a=>:b).must_equal "UPDATE items SET items.a = b" end it "should accept hash with string keys" do @ds.update_sql('c' => 'd').must_equal "UPDATE items SET c = 'd'" end it "should accept array subscript references" do @ds.update_sql((Sequel.subscript(:day, 1)) => 'd').must_equal "UPDATE items SET day[1] = 'd'" end end describe "Dataset#insert_sql" do before do @ds = Sequel.mock.dataset.from(:items) end it "should accept hash with symbol keys" do @ds.insert_sql(:c => 'd').must_equal "INSERT INTO items (c) VALUES ('d')" end it "should accept hash with string keys" do @ds.insert_sql('c' => 'd').must_equal "INSERT INTO items (c) VALUES ('d')" end it "should quote string keys" do @ds.with_quote_identifiers(true).insert_sql('c' => 'd').must_equal "INSERT INTO \"items\" (\"c\") VALUES ('d')" end it "should accept array subscript references" do @ds.insert_sql((Sequel.subscript(:day, 1)) => 'd').must_equal "INSERT INTO items (day[1]) VALUES ('d')" end it "should raise an Error if the dataset has no sources" do proc{Sequel::Database.new.dataset.insert_sql}.must_raise(Sequel::Error) end it "should accept datasets" do @ds.insert_sql(@ds).must_equal "INSERT INTO items SELECT * FROM items" end it "should accept datasets with columns" do @ds.insert_sql([:a, :b], @ds).must_equal "INSERT INTO items (a, b) SELECT * FROM items" end it "should raise if given bad values" do proc{@ds.clone(:values=>'a').send(:_insert_sql)}.must_raise(Sequel::Error) end it "should accept separate values" do @ds.insert_sql(1).must_equal "INSERT INTO items VALUES (1)" @ds.insert_sql(1, 2).must_equal "INSERT INTO items VALUES (1, 2)" @ds.insert_sql(1, 2, 3).must_equal "INSERT INTO items VALUES (1, 2, 3)" end it "should accept a single array of values" do @ds.insert_sql([1, 2, 3]).must_equal "INSERT INTO items VALUES (1, 2, 3)" end it "should accept an array of columns and an array of values" do @ds.insert_sql([:a, :b, :c], [1, 2, 3]).must_equal "INSERT INTO items (a, b, c) VALUES (1, 2, 3)" end it "should raise an array if the columns and values differ in size" do proc{@ds.insert_sql([:a, :b], [1, 2, 3])}.must_raise(Sequel::Error) end it "should accept a single LiteralString" do @ds.insert_sql(Sequel.lit('VALUES (1, 2, 3)')).must_equal "INSERT INTO items VALUES (1, 2, 3)" end it "should accept an array of columns and an LiteralString" do @ds.insert_sql([:a, :b, :c], Sequel.lit('VALUES (1, 2, 3)')).must_equal "INSERT INTO items (a, b, c) VALUES (1, 2, 3)" end with_symbol_splitting "should use unaliased table name when using splittable symbol" do @ds.from(:items___i).insert_sql(1).must_equal "INSERT INTO items VALUES (1)" end it "should use unaliased table name" do @ds.from(Sequel.as(:items, :i)).insert_sql(1).must_equal "INSERT INTO items VALUES (1)" end it "should hoist WITH clauses from query if the dataset doesn't support CTEs in subselects" do @ds = @ds.with_extend do Sequel::Dataset.def_sql_method(self, :insert, %w'with insert into columns values') def supports_cte?(type=:select); true end def supports_cte_in_subselect?; false end end @ds.insert_sql(@ds.from(:foo).with(:foo, @ds.select(:bar))).must_equal 'WITH foo AS (SELECT bar FROM items) INSERT INTO items SELECT * FROM foo' @ds.insert_sql([:a], @ds.from(:foo).with(:foo, @ds.select(:bar))).must_equal 'WITH foo AS (SELECT bar FROM items) INSERT INTO items (a) SELECT * FROM foo' end end describe "Dataset#inspect" do before do class ::InspectDataset < Sequel::Dataset; end end after do Object.send(:remove_const, :InspectDataset) if defined?(::InspectDataset) end it "should include the class name and the corresponding SQL statement" do Sequel::Dataset.new(Sequel.mock).from(:blah).inspect.must_equal '#<Sequel::Dataset: "SELECT * FROM \\"BLAH\\"">' InspectDataset.new(Sequel.mock).from(:blah).inspect.must_equal '#<InspectDataset: "SELECT * FROM \\"BLAH\\"">' end it "should skip anonymous classes" do Class.new(Class.new(Sequel::Dataset)).new(Sequel.mock).from(:blah).inspect.must_equal '#<Sequel::Dataset: "SELECT * FROM \\"BLAH\\"">' Class.new(InspectDataset).new(Sequel.mock).from(:blah).inspect.must_equal '#<InspectDataset: "SELECT * FROM \\"BLAH\\"">' end end describe "Dataset#all" do before do @dataset = Sequel.mock(:fetch=>[{:x => 1, :y => 2}, {:x => 3, :y => 4}])[:items] end it "should return an array with all records" do @dataset.all.must_equal [{:x => 1, :y => 2}, {:x => 3, :y => 4}] @dataset.db.sqls.must_equal ["SELECT * FROM items"] end it "should iterate over the array if a block is given" do a = [] @dataset.all{|r| a << r.values_at(:x, :y)}.must_equal [{:x => 1, :y => 2}, {:x => 3, :y => 4}] a.must_equal [[1, 2], [3, 4]] @dataset.db.sqls.must_equal ["SELECT * FROM items"] end end describe "Dataset#grep" do before do @ds = Sequel.mock[:posts] end it "should format a filter correctly" do @ds.grep(:title, 'ruby').sql.must_equal "SELECT * FROM posts WHERE ((title LIKE 'ruby' ESCAPE '\\'))" end it "should support multiple columns" do @ds.grep([:title, :body], 'ruby').sql.must_equal "SELECT * FROM posts WHERE ((title LIKE 'ruby' ESCAPE '\\') OR (body LIKE 'ruby' ESCAPE '\\'))" end it "should support multiple search terms" do @ds.grep(:title, ['abc', 'def']).sql.must_equal "SELECT * FROM posts WHERE ((title LIKE 'abc' ESCAPE '\\') OR (title LIKE 'def' ESCAPE '\\'))" end it "should support multiple columns and search terms" do @ds.grep([:title, :body], ['abc', 'def']).sql.must_equal "SELECT * FROM posts WHERE ((title LIKE 'abc' ESCAPE '\\') OR (title LIKE 'def' ESCAPE '\\') OR (body LIKE 'abc' ESCAPE '\\') OR (body LIKE 'def' ESCAPE '\\'))" end it "should support the :all_patterns option" do @ds.grep([:title, :body], ['abc', 'def'], :all_patterns=>true).sql.must_equal "SELECT * FROM posts WHERE (((title LIKE 'abc' ESCAPE '\\') OR (body LIKE 'abc' ESCAPE '\\')) AND ((title LIKE 'def' ESCAPE '\\') OR (body LIKE 'def' ESCAPE '\\')))" end it "should support the :all_columns option" do @ds.grep([:title, :body], ['abc', 'def'], :all_columns=>true).sql.must_equal "SELECT * FROM posts WHERE (((title LIKE 'abc' ESCAPE '\\') OR (title LIKE 'def' ESCAPE '\\')) AND ((body LIKE 'abc' ESCAPE '\\') OR (body LIKE 'def' ESCAPE '\\')))" end it "should support the :case_insensitive option" do @ds.grep([:title, :body], ['abc', 'def'], :case_insensitive=>true).sql.must_equal "SELECT * FROM posts WHERE ((UPPER(title) LIKE UPPER('abc') ESCAPE '\\') OR (UPPER(title) LIKE UPPER('def') ESCAPE '\\') OR (UPPER(body) LIKE UPPER('abc') ESCAPE '\\') OR (UPPER(body) LIKE UPPER('def') ESCAPE '\\'))" end it "should support the :all_patterns and :all_columns options together" do @ds.grep([:title, :body], ['abc', 'def'], :all_patterns=>true, :all_columns=>true).sql.must_equal "SELECT * FROM posts WHERE ((title LIKE 'abc' ESCAPE '\\') AND (body LIKE 'abc' ESCAPE '\\') AND (title LIKE 'def' ESCAPE '\\') AND (body LIKE 'def' ESCAPE '\\'))" end it "should support the :all_patterns and :case_insensitive options together" do @ds.grep([:title, :body], ['abc', 'def'], :all_patterns=>true, :case_insensitive=>true).sql.must_equal "SELECT * FROM posts WHERE (((UPPER(title) LIKE UPPER('abc') ESCAPE '\\') OR (UPPER(body) LIKE UPPER('abc') ESCAPE '\\')) AND ((UPPER(title) LIKE UPPER('def') ESCAPE '\\') OR (UPPER(body) LIKE UPPER('def') ESCAPE '\\')))" end it "should support the :all_columns and :case_insensitive options together" do @ds.grep([:title, :body], ['abc', 'def'], :all_columns=>true, :case_insensitive=>true).sql.must_equal "SELECT * FROM posts WHERE (((UPPER(title) LIKE UPPER('abc') ESCAPE '\\') OR (UPPER(title) LIKE UPPER('def') ESCAPE '\\')) AND ((UPPER(body) LIKE UPPER('abc') ESCAPE '\\') OR (UPPER(body) LIKE UPPER('def') ESCAPE '\\')))" end it "should support the :all_patterns, :all_columns, and :case_insensitive options together" do @ds.grep([:title, :body], ['abc', 'def'], :all_patterns=>true, :all_columns=>true, :case_insensitive=>true).sql.must_equal "SELECT * FROM posts WHERE ((UPPER(title) LIKE UPPER('abc') ESCAPE '\\') AND (UPPER(body) LIKE UPPER('abc') ESCAPE '\\') AND (UPPER(title) LIKE UPPER('def') ESCAPE '\\') AND (UPPER(body) LIKE UPPER('def') ESCAPE '\\'))" end it "should not support regexps if the database doesn't supports it" do proc{@ds.grep(:title, /ruby/).sql}.must_raise(Sequel::InvalidOperation) proc{@ds.grep(:title, [/^ruby/, 'ruby']).sql}.must_raise(Sequel::InvalidOperation) end it "should support regexps if the database supports it" do @ds = @ds.with_extend{def supports_regexp?; true end} @ds.grep(:title, /ruby/).sql.must_equal "SELECT * FROM posts WHERE ((title ~ 'ruby'))" @ds.grep(:title, [/^ruby/, 'ruby']).sql.must_equal "SELECT * FROM posts WHERE ((title ~ '^ruby') OR (title LIKE 'ruby' ESCAPE '\\'))" end it "should support searching against other columns" do @ds.grep(:title, :body).sql.must_equal "SELECT * FROM posts WHERE ((title LIKE body ESCAPE '\\'))" end it "should support databases where LIKE cannot be escaped" do @ds = @ds.with_extend{private; def requires_like_escape?; false end} @ds.grep(:baz, 'quux%').sql.must_equal 'SELECT * FROM posts WHERE ((baz LIKE \'quux%\'))' end end describe "Dataset default #fetch_rows, #insert, #update, #delete, #truncate, #execute" do before do @db = Sequel.mock(:servers=>{:read_only=>{}, :r1=>{}}, :autoid=>1) @ds = @db[:items] end it "#delete should execute delete SQL" do @ds.delete.must_equal 0 @db.sqls.must_equal ["DELETE FROM items"] end it "#delete should execute delete SQL even without a table" do @ds.from.delete.must_equal 0 @db.sqls.must_equal ["DELETE"] end it "#insert should execute insert SQL" do @ds.insert([]).must_equal 1 @db.sqls.must_equal ["INSERT INTO items DEFAULT VALUES"] end it "#update should execute update SQL" do @ds.update(:number=>1).must_equal 0 @db.sqls.must_equal ["UPDATE items SET number = 1"] end it "#truncate should execute truncate SQL" do @ds.truncate.must_be_nil @db.sqls.must_equal ["TRUNCATE TABLE items"] end it "#truncate should raise an InvalidOperation exception if the dataset is filtered" do proc{@ds.filter(:a=>1).truncate}.must_raise(Sequel::InvalidOperation) proc{@ds.having(:a=>1).truncate}.must_raise(Sequel::InvalidOperation) end it "#execute should execute the SQL on the read_only database" do @ds.send(:execute, 'SELECT 1') @db.sqls.must_equal ["SELECT 1 -- read_only"] end it "#execute should execute the SQL on the default database if locking is used" do @ds.for_update.send(:execute, 'SELECT 1') @db.sqls.must_equal ["SELECT 1"] end [:execute, :execute_dui, :execute_insert, :execute_ddl].each do |meth| it "##{meth} should respect explicit :server option" do @ds.send(meth, 'SELECT 1', :server=>:r1) @db.sqls.must_equal ["SELECT 1 -- r1"] end it "##{meth} should respect dataset's :server option if :server option not given" do @ds.server(:r1).send(meth, 'SELECT 1') @db.sqls.must_equal ["SELECT 1 -- r1"] end end end describe "Dataset#merge*" do before do @db = Sequel.mock @db.extend_datasets{def supports_merge?; true end} @ds = @db[:t1].merge_using(:t2, :c1=>:c2) end it "#merge_delete should set a DELETE clause for the merge" do @ds.merge_delete.merge_sql.must_equal "MERGE INTO t1 USING t2 ON (c1 = c2) WHEN MATCHED THEN DELETE" end it "#merge_delete should accept a virtual row block for conditions" do @ds.merge_delete{a > 30}.merge_sql.must_equal "MERGE INTO t1 USING t2 ON (c1 = c2) WHEN MATCHED AND (a > 30) THEN DELETE" end it "#merge_insert should set an INSERT clause for the merge" do @ds.merge_insert(:a=>1).merge_sql.must_equal "MERGE INTO t1 USING t2 ON (c1 = c2) WHEN NOT MATCHED THEN INSERT (a) VALUES (1)" end it "#merge_insert should accept a virtual row block for condition" do @ds.merge_insert(:a=>1){b > 30}.merge_sql.must_equal "MERGE INTO t1 USING t2 ON (c1 = c2) WHEN NOT MATCHED AND (b > 30) THEN INSERT (a) VALUES (1)" end it "#merge_insert should accept other arguments acceptable by insert" do @ds.merge_insert.merge_sql.must_equal "MERGE INTO t1 USING t2 ON (c1 = c2) WHEN NOT MATCHED THEN INSERT DEFAULT VALUES" @ds.merge_insert(1).merge_sql.must_equal "MERGE INTO t1 USING t2 ON (c1 = c2) WHEN NOT MATCHED THEN INSERT VALUES (1)" @ds.merge_insert([1]).merge_sql.must_equal "MERGE INTO t1 USING t2 ON (c1 = c2) WHEN NOT MATCHED THEN INSERT VALUES (1)" @ds.merge_insert(1, 2).merge_sql.must_equal "MERGE INTO t1 USING t2 ON (c1 = c2) WHEN NOT MATCHED THEN INSERT VALUES (1, 2)" @ds.merge_insert([:a], [1]).merge_sql.must_equal "MERGE INTO t1 USING t2 ON (c1 = c2) WHEN NOT MATCHED THEN INSERT (a) VALUES (1)" end it "#merge_update should set an UPDATE clause for the merge" do @ds.merge_update(:a=>1).merge_sql.must_equal "MERGE INTO t1 USING t2 ON (c1 = c2) WHEN MATCHED THEN UPDATE SET a = 1" end it "#merge_update should accept a virtual row block for condition" do @ds.merge_update(:a=>1){b > 30}.merge_sql.must_equal "MERGE INTO t1 USING t2 ON (c1 = c2) WHEN MATCHED AND (b > 30) THEN UPDATE SET a = 1" end it "#merge_{insert,update,delete} methods should add SQL clauses in the order they are called" do @ds. merge_delete{a > 30}. merge_insert(:a=>1). merge_update(:a=>1). merge_sql. must_equal "MERGE INTO t1 USING t2 ON (c1 = c2) WHEN MATCHED AND (a > 30) THEN DELETE WHEN NOT MATCHED THEN INSERT (a) VALUES (1) WHEN MATCHED THEN UPDATE SET a = 1" @ds. merge_update(:a=>1){b > 30}. merge_insert(:a=>1). merge_delete. merge_sql. must_equal "MERGE INTO t1 USING t2 ON (c1 = c2) WHEN MATCHED AND (b > 30) THEN UPDATE SET a = 1 WHEN NOT MATCHED THEN INSERT (a) VALUES (1) WHEN MATCHED THEN DELETE" end it "#merge should execute the MERGE statement" do @ds.merge_delete.merge.must_be_nil @db.sqls.must_equal ["MERGE INTO t1 USING t2 ON (c1 = c2) WHEN MATCHED THEN DELETE"] end it "#merge_sql should support static SQL" do @ds.with_sql('M').merge_sql.must_equal 'M' end it "#merge_sql should cache SQL by default" do ds = @ds.merge_delete ds.merge_sql.must_be_same_as ds.merge_sql end it "#merge_sql should not cache SQL if it shouldn't be cached" do v = true ds = @ds.merge_delete{Sequel.delay{v}} sql = ds.merge_sql sql.must_equal "MERGE INTO t1 USING t2 ON (c1 = c2) WHEN MATCHED AND 't' THEN DELETE" ds.merge_sql.wont_be_same_as sql v = false ds.merge_sql.must_equal "MERGE INTO t1 USING t2 ON (c1 = c2) WHEN MATCHED AND 'f' THEN DELETE" end it "#merge_sql should raise for MERGE without source" do proc{@db[:a].merge_delete.merge_sql}.must_raise Sequel::Error end it "#merge_sql should raise for MERGE without WHEN clauses" do proc{@ds.merge_sql}.must_raise Sequel::Error end it "#merge_sql should raise if MERGE is not supported" do proc{@ds.merge_delete.with_extend{def supports_merge?; false end}.merge_sql}.must_raise Sequel::Error end it "does not support MERGE by default" do Sequel.mock.dataset.supports_merge?.must_equal false end end describe "Dataset#with_sql_*" do before do @db = Sequel.mock(:servers=>{:read_only=>{}}, :autoid=>1, :fetch=>{:id=>1}) @ds = @db[:items] end it "#with_sql_insert should execute given insert SQL" do @ds.with_sql_insert('INSERT INTO foo (1)').must_equal 1 @db.sqls.must_equal ["INSERT INTO foo (1)"] end it "#with_sql_delete should execute given delete SQL" do @ds.with_sql_delete('DELETE FROM foo').must_equal 0 @db.sqls.must_equal ["DELETE FROM foo"] end it "#with_sql_update should execute given update SQL" do @ds.with_sql_update('UPDATE foo SET a = 1').must_equal 0 @db.sqls.must_equal ["UPDATE foo SET a = 1"] end it "#with_sql_all should return all rows from running the SQL" do @ds.with_sql_all('SELECT * FROM foo').must_equal [{:id=>1}] @db.sqls.must_equal ["SELECT * FROM foo -- read_only"] end it "#with_sql_all should yield each row to the block" do a = [] @ds.with_sql_all('SELECT * FROM foo'){|r| a << r} a.must_equal [{:id=>1}] @db.sqls.must_equal ["SELECT * FROM foo -- read_only"] end it "#with_sql_each should yield each row to the block" do a = [] @ds.with_sql_each('SELECT * FROM foo'){|r| a << r} a.must_equal [{:id=>1}] @db.sqls.must_equal ["SELECT * FROM foo -- read_only"] end it "#with_sql_first should return first row" do @ds.with_sql_first('SELECT * FROM foo').must_equal(:id=>1) @db.sqls.must_equal ["SELECT * FROM foo -- read_only"] end it "#with_sql_first should return nil if no rows returned" do @db.fetch = [] @ds.with_sql_first('SELECT * FROM foo').must_be_nil @db.sqls.must_equal ["SELECT * FROM foo -- read_only"] end it "#with_sql_single_value should return first value from first row" do @ds.with_sql_single_value('SELECT * FROM foo').must_equal 1 @db.sqls.must_equal ["SELECT * FROM foo -- read_only"] end it "#with_sql_single_value should return nil if no rows returned" do @db.fetch = [] @ds.with_sql_single_value('SELECT * FROM foo').must_be_nil @db.sqls.must_equal ["SELECT * FROM foo -- read_only"] end it "#with_sql_* should not modify the columns of the receiver" do @ds = @ds.with_extend{def fetch_rows(sql) self.columns = [:id]; super end} @ds.send(:columns=, [:x]) @ds.with_sql_all('SELECT * FROM foo') @ds.with_sql_each('SELECT * FROM foo'){}.must_be_same_as @ds @ds.with_sql_first('SELECT * FROM foo') @ds.with_sql_single_value('SELECT * FROM foo') @ds.columns.must_equal [:x] end it "#_with_sql_dataset (private) should return a clone that doesn't use separate dataset for columns" do @ds = @ds.with_extend{def fetch_rows(sql) self.columns = [:id]; super end} @ds.send(:cache_set, :_columns, [:foo]) ds = @ds.send(:_with_sql_dataset) ds.must_be_same_as ds.send(:_with_sql_dataset) ds.with_sql_first('SELECT * FROM foo').must_equal(:id=>1) ds.columns.must_equal [:id] @ds.with_sql_first('SELECT * FROM foo').must_equal(:id=>1) @ds.columns.must_equal [:foo] end end describe "Dataset prepared statements and bound variables " do before do @db = Sequel.mock @ds = @db[:items].with_extend{def insert_select_sql(*v) insert_sql(*v) << " RETURNING *" end} end it "#call should take a type and bind hash and interpolate it" do @ds.filter(:num=>:$n).bind({:n=>1}.freeze).call(:each) @ds.filter(:num=>:$n).call(:select, :n=>1) @ds.filter(:num=>:$n).call([:map, :a], :n=>1) @ds.filter(:num=>:$n).call([:as_hash, :a, :b], :n=>1) @ds.filter(:num=>:$n).call([:to_hash, :a, :b], :n=>1) @ds.filter(:num=>:$n).call([:to_hash_groups, :a, :b], :n=>1) @ds.filter(:num=>:$n).call(:first, :n=>1) @ds.filter(:num=>:$n).call(:single_value, :n=>1) @ds.filter(:num=>:$n).call(:delete, :n=>1) @ds.filter(:num=>:$n).call(:update, {:n=>1, :n2=>2}, :num=>:$n2) @ds.call(:insert, {:n=>1}, :num=>:$n) @ds.call(:insert_pk, {:n=>1}, :num=>:$n) @ds.call(:insert_select, {:n=>1}, :num=>:$n) @db.sqls.must_equal [ 'SELECT * FROM items WHERE (num = 1)', 'SELECT * FROM items WHERE (num = 1)', 'SELECT * FROM items WHERE (num = 1)', 'SELECT * FROM items WHERE (num = 1)', 'SELECT * FROM items WHERE (num = 1)', 'SELECT * FROM items WHERE (num = 1)', 'SELECT * FROM items WHERE (num = 1) LIMIT 1', 'SELECT * FROM items WHERE (num = 1) LIMIT 1', 'DELETE FROM items WHERE (num = 1)', 'UPDATE items SET num = 2 WHERE (num = 1)', 'INSERT INTO items (num) VALUES (1)', 'INSERT INTO items (num) VALUES (1)', 'INSERT INTO items (num) VALUES (1) RETURNING *'] end it "#prepare should take a type and name and store it in the database for later use with call" do pss = [] pss << @ds.filter(:num=>:$n).prepare(:each, :en) pss << @ds.filter(:num=>:$n).prepare(:select, :sn) pss << @ds.filter(:num=>:$n).prepare([:map, :a], :sm) pss << @ds.filter(:num=>:$n).prepare([:as_hash, :a, :b], :ah) pss << @ds.filter(:num=>:$n).prepare([:to_hash, :a, :b], :sh) pss << @ds.filter(:num=>:$n).prepare([:to_hash_groups, :a, :b], :shg) pss << @ds.filter(:num=>:$n).prepare(:first, :fn) pss << @ds.filter(:num=>:$n).prepare(:single_value, :svn) pss << @ds.filter(:num=>:$n).prepare(:delete, :dn) pss << @ds.filter(:num=>:$n).prepare(:update, :un, :num=>:$n2) pss << @ds.prepare(:insert, :in, :num=>:$n) pss << @ds.prepare(:insert_pk, :inp, :num=>:$n) pss << @ds.prepare(:insert_select, :ins, :num=>:$n) @db.prepared_statements.keys.sort_by{|k| k.to_s}.must_equal [:ah, :dn, :en, :fn, :in, :inp, :ins, :sh, :shg, :sm, :sn, :svn, :un] [:en, :sn, :sm, :ah, :sh, :shg, :fn, :svn, :dn, :un, :in, :inp, :ins].each_with_index{|x, i| @db.prepared_statements[x].must_equal pss[i]} @db.call(:en, :n=>1){} @db.call(:sn, :n=>1) @db.call(:sm, :n=>1) @db.call(:ah, :n=>1) @db.call(:sh, :n=>1) @db.call(:shg, :n=>1) @db.call(:fn, :n=>1) @db.call(:svn, :n=>1) @db.call(:dn, :n=>1) @db.call(:un, :n=>1, :n2=>2) @db.call(:in, :n=>1) @db.call(:inp, :n=>1) @db.call(:ins, :n=>1) @db.sqls.must_equal [ 'SELECT * FROM items WHERE (num = 1)', 'SELECT * FROM items WHERE (num = 1)', 'SELECT * FROM items WHERE (num = 1)', 'SELECT * FROM items WHERE (num = 1)', 'SELECT * FROM items WHERE (num = 1)', 'SELECT * FROM items WHERE (num = 1)', 'SELECT * FROM items WHERE (num = 1) LIMIT 1', 'SELECT * FROM items WHERE (num = 1) LIMIT 1', 'DELETE FROM items WHERE (num = 1)', 'UPDATE items SET num = 2 WHERE (num = 1)', 'INSERT INTO items (num) VALUES (1)', 'INSERT INTO items (num) VALUES (1)', 'INSERT INTO items (num) VALUES (1) RETURNING *'] end it "should give correct results for recursive WITH" do ps = @ds.with_extend{def supports_cte?(type=nil) true end}. select(Sequel[:i].as(:id), Sequel[:pi].as(:parent_id)). with_recursive(:t, @ds.filter(:parent_id=>:$n), @ds.join(:t, :i=>:parent_id).filter(Sequel[:t][:i]=>:parent_id). select(Sequel[:i1][:id], Sequel[:i1][:parent_id]), :args=>[:i, :pi]). order(:id). prepare(:select, :cte_sel) ps.call(:n=>1).must_equal [] @db.sqls.must_equal ["WITH t(i, pi) AS (SELECT * FROM items WHERE (parent_id = 1) UNION ALL SELECT i1.id, i1.parent_id FROM items INNER JOIN t ON (t.i = items.parent_id) WHERE (t.i = parent_id)) SELECT i AS id, pi AS parent_id FROM items ORDER BY id"] end it "#call and #prepare should handle returning" do @ds = @ds.with_extend do def supports_returning?(_) true end def insert_sql(*) super << " RETURNING *" end def update_sql(*) super << " RETURNING *" end def delete_sql; super << " RETURNING *" end end @ds = @ds.returning @ds.call(:insert, {:n=>1}, :num=>:$n) @ds.filter(:num=>:$n).call(:update, {:n=>1, :n2=>2}, :num=>:$n2) @ds.filter(:num=>:$n).call(:delete, :n=>1) @ds.prepare(:insert, :insert_rn, :num=>:$n).call(:n=>1) @ds.filter(:num=>:$n).prepare(:update, :update_rn, :num=>:$n2).call(:n=>1, :n2=>2) @ds.filter(:num=>:$n).prepare(:delete, :delete_rn).call(:n=>1) @db.sqls.must_equal([ 'INSERT INTO items (num) VALUES (1) RETURNING *', 'UPDATE items SET num = 2 WHERE (num = 1) RETURNING *', 'DELETE FROM items WHERE (num = 1) RETURNING *', ]*2) end it "Dataset#prepare with a delayed evaluation should raise an error" do proc{@ds.where(Sequel.delay{{:n=>1}}).prepare(:select, :select_n)}.must_raise Sequel::Error end it "Dataset#call with a delayed evaluation should work" do @ds.where(Sequel.delay{{:n=>1}}).call(:select).must_equal [] @db.sqls.must_equal ["SELECT * FROM items WHERE (n = 1)"] end it "PreparedStatement#prepare should raise an error" do ps = @ds.prepare(:select, :select_n) proc{ps.prepare(:select, :select_n2)}.must_raise Sequel::Error end it "PreparedStatement#prepare should not raise an error if preparing prepared statements is allowed" do ps = @ds.prepare(:select, :select_n).with_extend{def allow_preparing_prepared_statements?; true end} ps.prepare(:select, :select_n2).call @db.sqls.must_equal ["SELECT * FROM items"] end it "#call should raise Error if an invalid type is given" do proc{@ds.filter(:num=>:$n).call(:select_all, :n=>1)}.must_raise Sequel::Error end it "#inspect should indicate it is a prepared statement with the prepared SQL" do @ds.filter(:num=>:$n).prepare(:select, :sn).inspect.must_equal \ '<Sequel::Mock::Dataset/PreparedStatement "SELECT * FROM items WHERE (num = $n)">' end it "should handle literal strings" do @ds.filter(Sequel.lit("num = ?", :$n)).call(:select, :n=>1) @db.sqls.must_equal ['SELECT * FROM items WHERE (num = 1)'] end it "should handle columns on prepared statements correctly" do @db.columns = [:num] @ds = @ds.with_extend{def select_where_sql(sql) super(sql); sql << " OR #{columns.first} = 1" if opts[:where] end} @ds.filter(:num=>:$n).prepare(:select, :sn).sql.must_equal 'SELECT * FROM items WHERE (num = $n) OR num = 1' @db.sqls.must_equal ['SELECT * FROM items LIMIT 0'] end it "should handle datasets using static sql and placeholders" do @db["SELECT * FROM items WHERE (num = ?)", :$n].call(:select, :n=>1) @db.sqls.must_equal ['SELECT * FROM items WHERE (num = 1)'] end it "should handle subselects" do @ds.filter(:$b).filter(:num=>@ds.select(:num).filter(:num=>:$n)).filter(:$c).call(:select, :n=>1, :b=>0, :c=>2) @db.sqls.must_equal ['SELECT * FROM items WHERE (0 AND (num IN (SELECT num FROM items WHERE (num = 1))) AND 2)'] end it "should handle subselects in subselects" do @ds.filter(:$b).filter(:num=>@ds.select(:num).filter(:num=>@ds.select(:num).filter(:num=>:$n))).call(:select, :n=>1, :b=>0) @db.sqls.must_equal ['SELECT * FROM items WHERE (0 AND (num IN (SELECT num FROM items WHERE (num IN (SELECT num FROM items WHERE (num = 1))))))'] end it "should handle subselects with literal strings" do @ds.filter(:$b).filter(:num=>@ds.select(:num).filter(Sequel.lit("num = ?", :$n))).call(:select, :n=>1, :b=>0) @db.sqls.must_equal ['SELECT * FROM items WHERE (0 AND (num IN (SELECT num FROM items WHERE (num = 1))))'] end it "should handle subselects with static sql and placeholders" do @ds.filter(:$b).filter(:num=>@db["SELECT num FROM items WHERE (num = ?)", :$n]).call(:select, :n=>1, :b=>0) @db.sqls.must_equal ['SELECT * FROM items WHERE (0 AND (num IN (SELECT num FROM items WHERE (num = 1))))'] end it "should handle usage with Dataset.prepared_statements_module without a block" do @ds = @ds.with_extend(Sequel::Dataset.send(:prepared_statements_module, :prepare_bind, [Sequel::Dataset::ArgumentMapper, Sequel::Dataset::PreparedStatementMethods])) @ds = @ds.clone(:prepared_statement_name => 'foo', :prepared_type=>:select) @ds.call(:a=>1) @db.sqls.must_equal ["foo"] end it "should handle usage with Dataset.prepared_statements_module" do @ds = @ds.with_extend(Sequel::Dataset.send(:prepared_statements_module, :prepare_bind, [Sequel::Dataset::ArgumentMapper, Sequel::Dataset::PreparedStatementMethods]){def foo; :bar; end}) @ds.foo.must_equal :bar @ds = @ds.clone(:prepared_statement_name => 'foo', :prepared_type=>:select) @ds.call(:a=>1) @db.sqls.must_equal ["foo"] end it "should support log_sql option" do @ds.prepare(:select, :foo).log_sql.must_be_nil @ds.clone(:log_sql=>true).prepare(:select, :foo).log_sql.must_equal true end end describe Sequel::Dataset::UnnumberedArgumentMapper do before do @db = Sequel.mock @ds = @db[:items].filter(:num=>:$n).with_extend do def execute(sql, opts={}, &block) super(sql, opts.merge({:arguments=>bind_arguments}), &block) end def execute_dui(sql, opts={}, &block) super(sql, opts.merge({:arguments=>bind_arguments}), &block) end def execute_insert(sql, opts={}, &block) super(sql, opts.merge({:arguments=>bind_arguments}), &block) end def prepared_statement_modules [Sequel::Dataset::UnnumberedArgumentMapper] end end @ps = [] @ps << @ds.prepare(:select, :s) @ps << @ds.prepare(:all, :a) @ps << @ds.prepare(:first, :f) @ps << @ds.prepare(:single_value, :sv) @ps << @ds.prepare(:delete, :d) @ps << @ds.prepare(:insert, :i, :num=>:$n) @ps << @ds.prepare(:update, :u, :num=>:$n) end it "#inspect should show the actual SQL submitted to the database" do @ps.first.inspect.must_equal '<Sequel::Mock::Dataset/PreparedStatement "SELECT * FROM items WHERE (num = ?)">' end it "should submit the SQL to the database with placeholders and bind variables" do @ps.each{|p| p.prepared_sql; p.call(:n=>1)} @db.sqls.must_equal ["SELECT * FROM items WHERE (num = ?) -- args: [1]", "SELECT * FROM items WHERE (num = ?) -- args: [1]", "SELECT * FROM items WHERE (num = ?) LIMIT 1 -- args: [1]", "SELECT * FROM items WHERE (num = ?) LIMIT 1 -- args: [1]", "DELETE FROM items WHERE (num = ?) -- args: [1]", "INSERT INTO items (num) VALUES (?) -- args: [1]", "UPDATE items SET num = ? WHERE (num = ?) -- args: [1, 1]"] end it "should raise Error for unrecognized statement types" do ps = @ds.prepare(:select_all, :s) ps = ps.with_extend(Sequel::Dataset::UnnumberedArgumentMapper) sql = ps.prepared_sql ps.prepared_sql.must_be_same_as(sql) proc{ps.call(:n=>1)}.must_raise Sequel::Error end end describe "Sequel::Dataset#server" do it "should set the server to use for the dataset" do @db = Sequel.mock(:servers=>{:s=>{}, :i=>{}, :d=>{}, :u=>{}}) @ds = @db[:items].server(:s) @ds.all @ds.server(:i).insert(:a=>1) @ds.server(:d).delete @ds.server(:u).update(:a=>Sequel.expr(:a)+1) @db.sqls.must_equal ['SELECT * FROM items -- s', 'INSERT INTO items (a) VALUES (1) -- i', 'DELETE FROM items -- d', 'UPDATE items SET a = (a + 1) -- u'] end end describe "Sequel::Dataset#each_server" do it "should yield a dataset for each server" do @db = Sequel.mock(:servers=>{:s=>{}, :i=>{}}) @ds = @db[:items] @ds.each_server do |ds| ds.must_be_kind_of(Sequel::Dataset) ds.wont_equal @ds ds.sql.must_equal @ds.sql ds.all end @db.sqls.sort.must_equal ['SELECT * FROM items', 'SELECT * FROM items -- i', 'SELECT * FROM items -- s'] end end describe "Sequel::Dataset#qualify" do before do @ds = Sequel.mock[:t] end it "should return self if raw SQL is used" do ds = @ds.with_sql('A') ds.qualify.must_be_same_as ds end it "should qualify to the table if one is given" do @ds.filter{a<b}.qualify(:e).sql.must_equal 'SELECT e.* FROM t WHERE (e.a < e.b)' end it "should handle the select, order, where, having, and group options/clauses" do @ds.select(:a).filter(:a=>1).order(:a).group(:a).having(:a).qualify.sql.must_equal 'SELECT t.a FROM t WHERE (t.a = 1) GROUP BY t.a HAVING t.a ORDER BY t.a' end it "should handle the select using a table.* if all columns are currently selected" do @ds.filter(:a=>1).order(:a).group(:a).having(:a).qualify.sql.must_equal 'SELECT t.* FROM t WHERE (t.a = 1) GROUP BY t.a HAVING t.a ORDER BY t.a' end it "should handle hashes in select option" do @ds.select(:a=>:b).qualify.sql.must_equal 'SELECT (t.a = t.b) FROM t' end it "should handle symbols" do @ds.select(:a).qualify.sql.must_equal 'SELECT t.a FROM t' end with_symbol_splitting "should handle splittable symbols" do @ds.select(:a, :b__c, :d___e, :f__g___h).qualify.sql.must_equal 'SELECT t.a, b.c, t.d AS e, f.g AS h FROM t' end it "should handle arrays" do @ds.filter(:a=>[:b, :c]).qualify.sql.must_equal 'SELECT t.* FROM t WHERE (t.a IN (t.b, t.c))' end it "should handle hashes" do @ds.select(Sequel.case({:b=>{:c=>1}}, false)).qualify.sql.must_equal "SELECT (CASE WHEN t.b THEN (t.c = 1) ELSE 'f' END) FROM t" end it "should handle SQL::Identifiers" do @ds.select{a}.qualify.sql.must_equal 'SELECT t.a FROM t' end it "should handle SQL::OrderedExpressions" do @ds.order(Sequel.desc(:a), Sequel.asc(:b)).qualify.sql.must_equal 'SELECT t.* FROM t ORDER BY t.a DESC, t.b ASC' end it "should handle SQL::AliasedExpressions" do @ds.select(Sequel.expr(:a).as(:b)).qualify.sql.must_equal 'SELECT t.a AS b FROM t' @ds.select(Sequel.expr(:a).as(:b, [:c, :d])).qualify.sql.must_equal 'SELECT t.a AS b(c, d) FROM t' end it "should handle SQL::CaseExpressions" do @ds.filter{Sequel.case({a=>b}, c, d)}.qualify.sql.must_equal 'SELECT t.* FROM t WHERE (CASE t.d WHEN t.a THEN t.b ELSE t.c END)' end it "should handle SQL:Casts" do @ds.filter{a.cast(:boolean)}.qualify.sql.must_equal 'SELECT t.* FROM t WHERE CAST(t.a AS boolean)' end it "should handle SQL::Functions" do @ds.filter{a(b, 1)}.qualify.sql.must_equal 'SELECT t.* FROM t WHERE a(t.b, 1)' end it "should handle SQL::ComplexExpressions" do @ds.filter{(a+b)<(c-3)}.qualify.sql.must_equal 'SELECT t.* FROM t WHERE ((t.a + t.b) < (t.c - 3))' end it "should handle SQL::ValueLists" do @ds.filter(:a=>Sequel.value_list([:b, :c])).qualify.sql.must_equal 'SELECT t.* FROM t WHERE (t.a IN (t.b, t.c))' end it "should handle SQL::Subscripts" do @ds.filter{a.sql_subscript(b,3)}.qualify.sql.must_equal 'SELECT t.* FROM t WHERE t.a[t.b, 3]' end it "should handle SQL::PlaceholderLiteralStrings" do @ds.filter(Sequel.lit('? > ?', :a, 1)).qualify.sql.must_equal 'SELECT t.* FROM t WHERE (t.a > 1)' end it "should handle SQL::PlaceholderLiteralStrings with named placeholders" do @ds.filter(Sequel.lit(':a > :b', :a=>:c, :b=>1)).qualify.sql.must_equal 'SELECT t.* FROM t WHERE (t.c > 1)' end it "should handle SQL::Wrappers" do @ds.filter(Sequel::SQL::Wrapper.new(:a)).qualify.sql.must_equal 'SELECT t.* FROM t WHERE t.a' end it "should handle SQL::Functions with windows" do @ds = @ds.with_extend{def supports_window_functions?; true end} @ds.select{sum(:a).over(:partition=>:b, :order=>:c)}.qualify.sql.must_equal 'SELECT sum(t.a) OVER (PARTITION BY t.b ORDER BY t.c) FROM t' @ds.select{sum(:a).over(:partition=>:b)}.qualify.sql.must_equal 'SELECT sum(t.a) OVER (PARTITION BY t.b) FROM t' @ds.select{sum(:a).over(:order=>:c)}.qualify.sql.must_equal 'SELECT sum(t.a) OVER (ORDER BY t.c) FROM t' end it "should handle SQL::Functions with orders" do @ds.select{sum(:a).order(:a)}.qualify.sql.must_equal 'SELECT sum(t.a ORDER BY t.a) FROM t' end it "should handle Sequel.extract" do @ds.select(Sequel.extract(:year, :d)).qualify.sql.must_equal 'SELECT extract(year FROM t.d) FROM t' end it "should handle SQL::DelayedEvaluation" do t = :a ds = @ds.filter(Sequel.delay{t}).qualify ds.sql.must_equal 'SELECT t.* FROM t WHERE t.a' t = :b ds.sql.must_equal 'SELECT t.* FROM t WHERE t.b' end it "should handle SQL::DelayedEvaluations that take dataset arguments" do ds = @ds.filter(Sequel.delay{|x| x.first_source}).qualify ds.sql.must_equal 'SELECT t.* FROM t WHERE t.t' end it "should handle all other objects by returning them unchanged" do @ds.select("a").filter{a(3)}.filter(Sequel.lit('blah')).order(Sequel.lit('true')).group(Sequel.lit('a > ?', 1)).having(false).qualify.sql.must_equal "SELECT 'a' FROM t WHERE (a(3) AND (blah)) GROUP BY a > 1 HAVING 'f' ORDER BY true" end end describe "Dataset#with and #with_recursive" do before do @db = Sequel.mock @ds = @db[:t].with_extend{def supports_cte?(*) true end} end it "#with should take a name and dataset and use a WITH clause" do @ds.with(:t, @db[:x]).sql.must_equal 'WITH t AS (SELECT * FROM x) SELECT * FROM t' end it "#with should support materialized CTEs" do @ds.with(:t, @db[:x], :materialized=>true).sql.must_equal 'WITH t AS MATERIALIZED (SELECT * FROM x) SELECT * FROM t' end it "#with should support not materialized CTEs" do @ds.with(:t, @db[:x], :materialized=>false).sql.must_equal 'WITH t AS NOT MATERIALIZED (SELECT * FROM x) SELECT * FROM t' end it "#with_recursive should take a name, nonrecursive dataset, and recursive dataset, and use a WITH clause" do @ds.with_recursive(:t, @db[:x], @db[:t]).sql.must_equal 'WITH t AS (SELECT * FROM x UNION ALL SELECT * FROM t) SELECT * FROM t' end it "#with and #with_recursive should add to existing WITH clause if called multiple times" do @ds.with(:t, @db[:x]).with(:j, @db[:y]).sql.must_equal 'WITH t AS (SELECT * FROM x), j AS (SELECT * FROM y) SELECT * FROM t' @ds.with_recursive(:t, @db[:x], @db[:t]).with_recursive(:j, @db[:y], @db[:j]).sql.must_equal 'WITH t AS (SELECT * FROM x UNION ALL SELECT * FROM t), j AS (SELECT * FROM y UNION ALL SELECT * FROM j) SELECT * FROM t' @ds.with(:t, @db[:x]).with_recursive(:j, @db[:y], @db[:j]).sql.must_equal 'WITH t AS (SELECT * FROM x), j AS (SELECT * FROM y UNION ALL SELECT * FROM j) SELECT * FROM t' end it "#with and #with_recursive should take an :args option" do @ds.with(:t, @db[:x], :args=>[:b]).sql.must_equal 'WITH t(b) AS (SELECT * FROM x) SELECT * FROM t' @ds.with_recursive(:t, @db[:x], @db[:t], :args=>[:b, :c]).sql.must_equal 'WITH t(b, c) AS (SELECT * FROM x UNION ALL SELECT * FROM t) SELECT * FROM t' end it "#with and #with_recursive should quote the columns in the :args option" do @ds = @ds.with_quote_identifiers(true) @ds.with(:t, @db[:x], :args=>[:b]).sql.must_equal 'WITH "t"("b") AS (SELECT * FROM x) SELECT * FROM "t"' @ds.with_recursive(:t, @db[:x], @db[:t], :args=>[:b, :c]).sql.must_equal 'WITH "t"("b", "c") AS (SELECT * FROM x UNION ALL SELECT * FROM t) SELECT * FROM "t"' end it "#with_recursive should take an :union_all=>false option" do @ds.with_recursive(:t, @db[:x], @db[:t], :union_all=>false).sql.must_equal 'WITH t AS (SELECT * FROM x UNION SELECT * FROM t) SELECT * FROM t' end it "#with and #with_recursive should raise an error unless the dataset supports CTEs" do @ds = @ds.with_extend{def supports_cte?; false end} proc{@ds.with(:t, @db[:x], :args=>[:b])}.must_raise(Sequel::Error) proc{@ds.with_recursive(:t, @db[:x], @db[:t], :args=>[:b, :c])}.must_raise(Sequel::Error) end it "#with should work on insert, update, and delete statements if they support it" do @ds = @ds.with_extend do Sequel::Dataset.def_sql_method(self, :delete, %w'with delete from where') Sequel::Dataset.def_sql_method(self, :insert, %w'with insert into columns values') Sequel::Dataset.def_sql_method(self, :update, %w'with update table set where') end @ds.with(:t, @db[:x]).insert_sql(1).must_equal 'WITH t AS (SELECT * FROM x) INSERT INTO t VALUES (1)' @ds.with(:t, @db[:x]).update_sql(:foo=>1).must_equal 'WITH t AS (SELECT * FROM x) UPDATE t SET foo = 1' @ds.with(:t, @db[:x]).delete_sql.must_equal 'WITH t AS (SELECT * FROM x) DELETE FROM t' end it "should hoist WITH clauses in given dataset(s) if dataset doesn't support WITH in subselect" do @ds = @ds.with_extend do def supports_cte?; true end def supports_cte_in_subselect?; false end end @ds.with(:t, @ds.from(:s).with(:s, @ds.from(:r))).sql.must_equal 'WITH s AS (SELECT * FROM r), t AS (SELECT * FROM s) SELECT * FROM t' @ds.with_recursive(:t, @ds.from(:s).with(:s, @ds.from(:r)), @ds.from(:q).with(:q, @ds.from(:p))).sql.must_equal 'WITH s AS (SELECT * FROM r), q AS (SELECT * FROM p), t AS (SELECT * FROM s UNION ALL SELECT * FROM q) SELECT * FROM t' end end describe "Dataset#window" do before do @db = Sequel.mock @ds = @db[:t].with_extend do Sequel::Dataset.def_sql_method(self, :select, %w'select columns from window') def supports_window_clause?; true end def supports_window_functions?; true end end end it "should not support window clause by default" do @db.dataset.supports_window_clause?.must_equal false end it "should take a name and hash of window options" do ds = @ds.window(:w, :partition=>:a, :order=>:b) ds.sql.must_equal 'SELECT * FROM t WINDOW w AS (PARTITION BY a ORDER BY b)' ds.window(:w2, :partition=>:c, :order=>:d).sql.must_equal 'SELECT * FROM t WINDOW w AS (PARTITION BY a ORDER BY b), w2 AS (PARTITION BY c ORDER BY d)' end end describe Sequel::SQL::Constants do before do @db = Sequel::Database.new end it "should have CURRENT_DATE" do @db.literal(Sequel::SQL::Constants::CURRENT_DATE).must_equal 'CURRENT_DATE' @db.literal(Sequel::CURRENT_DATE).must_equal 'CURRENT_DATE' end it "should have CURRENT_TIME" do @db.literal(Sequel::SQL::Constants::CURRENT_TIME).must_equal 'CURRENT_TIME' @db.literal(Sequel::CURRENT_TIME).must_equal 'CURRENT_TIME' end it "should have CURRENT_TIMESTAMP" do @db.literal(Sequel::SQL::Constants::CURRENT_TIMESTAMP).must_equal 'CURRENT_TIMESTAMP' @db.literal(Sequel::CURRENT_TIMESTAMP).must_equal 'CURRENT_TIMESTAMP' end it "should have NULL" do @db.literal(Sequel::SQL::Constants::NULL).must_equal 'NULL' @db.literal(Sequel::NULL).must_equal 'NULL' end it "should have NOTNULL" do @db.literal(Sequel::SQL::Constants::NOTNULL).must_equal 'NOT NULL' @db.literal(Sequel::NOTNULL).must_equal 'NOT NULL' end it "should have TRUE and SQLTRUE" do @db.literal(Sequel::SQL::Constants::TRUE).must_equal "'t'" @db.literal(Sequel::TRUE).must_equal "'t'" @db.literal(Sequel::SQL::Constants::SQLTRUE).must_equal "'t'" @db.literal(Sequel::SQLTRUE).must_equal "'t'" end it "should have FALSE and SQLFALSE" do @db.literal(Sequel::SQL::Constants::FALSE).must_equal "'f'" @db.literal(Sequel::FALSE).must_equal "'f'" @db.literal(Sequel::SQL::Constants::SQLFALSE).must_equal "'f'" @db.literal(Sequel::SQLFALSE).must_equal "'f'" end end describe "Sequel timezone support" do before do @db = Sequel::Database.new @dataset = @db.dataset.with_extend do def supports_timestamp_timezones?; true end def supports_timestamp_usecs?; false end end @utc_time = Time.utc(2010, 1, 2, 3, 4, 5) @local_time = Time.local(2010, 1, 2, 3, 4, 5) @offset = sprintf("%+03i%02i", *(@local_time.utc_offset/60).divmod(60)) @dt_offset = @local_time.utc_offset/Rational(86400, 1) @utc_datetime = DateTime.new(2010, 1, 2, 3, 4, 5) @local_datetime = DateTime.new(2010, 1, 2, 3, 4, 5, @dt_offset) end after do Sequel.default_timezone = nil Sequel.datetime_class = Time end it "should handle an database timezone of :utc when literalizing values" do Sequel.database_timezone = :utc @dataset.literal(Time.utc(2010, 1, 2, 3, 4, 5)).must_equal "'2010-01-02 03:04:05+0000'" @dataset.literal(DateTime.new(2010, 1, 2, 3, 4, 5)).must_equal "'2010-01-02 03:04:05+0000'" end it "should handle an database timezone of :local when literalizing values" do Sequel.database_timezone = :local @dataset.literal(Time.local(2010, 1, 2, 3, 4, 5)).must_equal "'2010-01-02 03:04:05#{@offset}'" @dataset.literal(DateTime.new(2010, 1, 2, 3, 4, 5, @dt_offset)).must_equal "'2010-01-02 03:04:05#{@offset}'" end it "should have Database#timezone override Sequel.database_timezone" do Sequel.database_timezone = :local @db.timezone = :utc @dataset.literal(Time.utc(2010, 1, 2, 3, 4, 5)).must_equal "'2010-01-02 03:04:05+0000'" @dataset.literal(DateTime.new(2010, 1, 2, 3, 4, 5)).must_equal "'2010-01-02 03:04:05+0000'" Sequel.database_timezone = :utc @db.timezone = :local @dataset.literal(Time.local(2010, 1, 2, 3, 4, 5)).must_equal "'2010-01-02 03:04:05#{@offset}'" @dataset.literal(DateTime.new(2010, 1, 2, 3, 4, 5, @dt_offset)).must_equal "'2010-01-02 03:04:05#{@offset}'" end it "should handle converting database timestamps into application timestamps" do Sequel.database_timezone = :utc Sequel.application_timezone = :local t = Time.now.utc Sequel.database_to_application_timestamp(t).to_s.must_equal t.getlocal.to_s Sequel.database_to_application_timestamp(t.to_s).to_s.must_equal t.getlocal.to_s Sequel.database_to_application_timestamp(t.strftime('%Y-%m-%d %H:%M:%S')).to_s.must_equal t.getlocal.to_s Sequel.datetime_class = DateTime dt = DateTime.now dt2 = dt.new_offset(0) Sequel.database_to_application_timestamp(dt2).to_s.must_equal dt.to_s Sequel.database_to_application_timestamp(dt2.to_s).to_s.must_equal dt.to_s Sequel.database_to_application_timestamp(dt2.strftime('%Y-%m-%d %H:%M:%S')).to_s.must_equal dt.to_s Sequel.datetime_class = Time Sequel.database_timezone = :local Sequel.application_timezone = :utc Sequel.database_to_application_timestamp(t.getlocal).to_s.must_equal t.to_s Sequel.database_to_application_timestamp(t.getlocal.to_s).to_s.must_equal t.to_s Sequel.database_to_application_timestamp(t.getlocal.strftime('%Y-%m-%d %H:%M:%S')).to_s.must_equal t.to_s Sequel.datetime_class = DateTime Sequel.database_to_application_timestamp(dt).to_s.must_equal dt2.to_s Sequel.database_to_application_timestamp(dt.to_s).to_s.must_equal dt2.to_s Sequel.database_to_application_timestamp(dt.strftime('%Y-%m-%d %H:%M:%S')).to_s.must_equal dt2.to_s end it "should handle typecasting timestamp columns" do Sequel.typecast_timezone = :utc Sequel.application_timezone = :local t = Time.now.utc @db.typecast_value(:datetime, t).to_s.must_equal t.getlocal.to_s @db.typecast_value(:datetime, t.to_s).to_s.must_equal t.getlocal.to_s @db.typecast_value(:datetime, t.strftime('%Y-%m-%d %H:%M:%S')).to_s.must_equal t.getlocal.to_s Sequel.datetime_class = DateTime dt = DateTime.now dt2 = dt.new_offset(0) @db.typecast_value(:datetime, dt2).to_s.must_equal dt.to_s @db.typecast_value(:datetime, dt2.to_s).to_s.must_equal dt.to_s @db.typecast_value(:datetime, dt2.strftime('%Y-%m-%d %H:%M:%S')).to_s.must_equal dt.to_s Sequel.datetime_class = Time Sequel.typecast_timezone = :local Sequel.application_timezone = :utc @db.typecast_value(:datetime, t.getlocal).to_s.must_equal t.to_s @db.typecast_value(:datetime, t.getlocal.to_s).to_s.must_equal t.to_s @db.typecast_value(:datetime, t.getlocal.strftime('%Y-%m-%d %H:%M:%S')).to_s.must_equal t.to_s Sequel.datetime_class = DateTime @db.typecast_value(:datetime, dt).to_s.must_equal dt2.to_s @db.typecast_value(:datetime, dt.to_s).to_s.must_equal dt2.to_s @db.typecast_value(:datetime, dt.strftime('%Y-%m-%d %H:%M:%S')).to_s.must_equal dt2.to_s end it "should handle converting database timestamp columns from an array of values" do Sequel.database_timezone = :utc Sequel.application_timezone = :local t = Time.now.utc Sequel.database_to_application_timestamp([t.year, t.mon, t.day, t.hour, t.min, t.sec]).to_s.must_equal t.getlocal.to_s Sequel.datetime_class = DateTime dt = DateTime.now dt2 = dt.new_offset(0) Sequel.database_to_application_timestamp([dt2.year, dt2.mon, dt2.day, dt2.hour, dt2.min, dt2.sec]).to_s.must_equal dt.to_s Sequel.datetime_class = Time Sequel.database_timezone = :local Sequel.application_timezone = :utc t = t.getlocal Sequel.database_to_application_timestamp([t.year, t.mon, t.day, t.hour, t.min, t.sec]).to_s.must_equal t.getutc.to_s Sequel.datetime_class = DateTime Sequel.database_to_application_timestamp([dt.year, dt.mon, dt.day, dt.hour, dt.min, dt.sec]).to_s.must_equal dt2.to_s end it "should raise an InvalidValue error when an error occurs while converting a timestamp" do proc{Sequel.database_to_application_timestamp([0, 0, 0, 0, 0, 0])}.must_raise(Sequel::InvalidValue) end it "should raise an error when attempting to typecast to a timestamp from an unsupported type" do proc{Sequel.database_to_application_timestamp(Object.new)}.must_raise(Sequel::InvalidValue) end it "should raise an InvalidValue error when the Time class is used and when a bad application timezone is used when attempting to convert timestamps" do Sequel.application_timezone = :blah proc{Sequel.database_to_application_timestamp('2009-06-01 10:20:30')}.must_raise(Sequel::InvalidValue) end it "should raise an InvalidValue error when the Time class is used and when a bad database timezone is used when attempting to convert timestamps" do Sequel.database_timezone = :blah proc{Sequel.database_to_application_timestamp('2009-06-01 10:20:30')}.must_raise(Sequel::InvalidValue) end it "should raise an InvalidValue error when the DateTime class is used and when a bad application timezone is used when attempting to convert timestamps" do Sequel.application_timezone = :blah Sequel.datetime_class = DateTime proc{Sequel.database_to_application_timestamp('2009-06-01 10:20:30')}.must_raise(Sequel::InvalidValue) end it "should raise an InvalidValue error when the DateTime class is used and when a bad database timezone is used when attempting to convert timestamps" do Sequel.database_timezone = :blah Sequel.datetime_class = DateTime proc{Sequel.database_to_application_timestamp('2009-06-01 10:20:30')}.must_raise(Sequel::InvalidValue) end it "should have Sequel.default_timezone= should set all other timezones" do Sequel.database_timezone.must_be_nil Sequel.application_timezone.must_be_nil Sequel.typecast_timezone.must_be_nil Sequel.default_timezone = :utc Sequel.database_timezone.must_equal :utc Sequel.application_timezone.must_equal :utc Sequel.typecast_timezone.must_equal :utc end end describe "Sequel::Dataset#select_map" do before do @ds = Sequel.mock(:fetch=>[{:c=>1}, {:c=>2}])[:t] end it "should do select and map in one step" do @ds.select_map(:a).must_equal [1, 2] @ds.db.sqls.must_equal ['SELECT a FROM t'] end it "should handle qualified identifiers in arguments" do @ds.select_map(Sequel[:a][:b]).must_equal [1, 2] @ds.db.sqls.must_equal ['SELECT a.b FROM t'] end with_symbol_splitting "should handle implicit qualifiers in arguments" do @ds.select_map(:a__b).must_equal [1, 2] @ds.db.sqls.must_equal ['SELECT a.b FROM t'] end it "should raise if multiple arguments and can't determine alias" do proc{@ds.select_map([Sequel.function(:a), :b])}.must_raise(Sequel::Error) proc{@ds.select_map(Sequel.function(:a)){b}}.must_raise(Sequel::Error) proc{@ds.select_map{[a.function, b]}}.must_raise(Sequel::Error) end with_symbol_splitting "should handle implicit aliases in arguments" do @ds.select_map(:a___b).must_equal [1, 2] @ds.db.sqls.must_equal ['SELECT a AS b FROM t'] end it "should handle aliased expressions in arguments" do @ds.select_map(Sequel[:a].as(:b)).must_equal [1, 2] @ds.db.sqls.must_equal ['SELECT a AS b FROM t'] end it "should handle other objects" do @ds.select_map(Sequel.lit("a").as(:b)).must_equal [1, 2] @ds.db.sqls.must_equal ['SELECT a AS b FROM t'] end it "should handle identifiers with strings" do @ds.select_map([Sequel::SQL::Identifier.new('c'), :c]).must_equal [[1, 1], [2, 2]] @ds.db.sqls.must_equal ['SELECT c, c FROM t'] end it "should raise an error for plain strings" do proc{@ds.select_map(['c', :c])}.must_raise(Sequel::Error) @ds.db.sqls.must_equal [] end it "should handle an expression without a determinable alias" do @ds.select_map{a(t[c])}.must_equal [1, 2] @ds.db.sqls.must_equal ['SELECT a(t.c) AS v FROM t'] end it "should accept a block" do @ds.select_map{a(t[c]).as(b)}.must_equal [1, 2] @ds.db.sqls.must_equal ['SELECT a(t.c) AS b FROM t'] end it "should accept a block with an array of columns" do @ds.select_map{[a(t[c]).as(c), a(t[c]).as(c)]}.must_equal [[1, 1], [2, 2]] @ds.db.sqls.must_equal ['SELECT a(t.c) AS c, a(t.c) AS c FROM t'] end it "should accept a block with a column" do @ds.select_map(:c){a(t[c]).as(c)}.must_equal [[1, 1], [2, 2]] @ds.db.sqls.must_equal ['SELECT c, a(t.c) AS c FROM t'] end it "should accept a block and array of arguments" do @ds.select_map([:c, :c]){[a(t[c]).as(c), a(t[c]).as(c)]}.must_equal [[1, 1, 1, 1], [2, 2, 2, 2]] @ds.db.sqls.must_equal ['SELECT c, c, a(t.c) AS c, a(t.c) AS c FROM t'] end it "should handle an array of columns" do @ds.select_map([:c, :c]).must_equal [[1, 1], [2, 2]] @ds.db.sqls.must_equal ['SELECT c, c FROM t'] @ds.select_map([Sequel.expr(:d).as(:c), Sequel.qualify(:b, :c), Sequel.identifier(:c), Sequel.identifier(:c).qualify(:b)]).must_equal [[1, 1, 1, 1], [2, 2, 2, 2]] @ds.db.sqls.must_equal ['SELECT d AS c, b.c, c, b.c FROM t'] end with_symbol_splitting "should handle an array of columns with splittable symbols" do @ds.select_map([:a__c, :a__d___c]).must_equal [[1, 1], [2, 2]] @ds.db.sqls.must_equal ['SELECT a.c, a.d AS c FROM t'] end it "should handle an array with a single element" do @ds.select_map([:c]).must_equal [[1], [2]] @ds.db.sqls.must_equal ['SELECT c FROM t'] end end describe "Sequel::Dataset#select_order_map" do before do @ds = Sequel.mock(:fetch=>[{:c=>1}, {:c=>2}])[:t] end it "should do select and map in one step" do @ds.select_order_map(:a).must_equal [1, 2] @ds.db.sqls.must_equal ['SELECT a FROM t ORDER BY a'] end it "should handle qualified identifiers in arguments" do @ds.select_order_map(Sequel[:a][:b]).must_equal [1, 2] @ds.db.sqls.must_equal ['SELECT a.b FROM t ORDER BY a.b'] end with_symbol_splitting "should handle implicit qualifiers in arguments" do @ds.select_order_map(:a__b).must_equal [1, 2] @ds.db.sqls.must_equal ['SELECT a.b FROM t ORDER BY a.b'] end it "should raise if multiple arguments and can't determine alias" do proc{@ds.select_order_map([Sequel.function(:a), :b])}.must_raise(Sequel::Error) proc{@ds.select_order_map(Sequel.function(:a)){b}}.must_raise(Sequel::Error) proc{@ds.select_order_map{[a.function, b]}}.must_raise(Sequel::Error) end with_symbol_splitting "should handle implicit aliases in arguments" do @ds.select_order_map(:a___b).must_equal [1, 2] @ds.db.sqls.must_equal ['SELECT a AS b FROM t ORDER BY a'] end with_symbol_splitting "should handle implicit qualifiers and aliases in arguments" do @ds.select_order_map(:t__a___b).must_equal [1, 2] @ds.db.sqls.must_equal ['SELECT t.a AS b FROM t ORDER BY t.a'] end it "should handle AliasedExpressions" do @ds.select_order_map(Sequel.lit("a").as(:b)).must_equal [1, 2] @ds.db.sqls.must_equal ['SELECT a AS b FROM t ORDER BY a'] @ds.select_order_map(Sequel[:a].as(:b)).must_equal [1, 2] @ds.db.sqls.must_equal ['SELECT a AS b FROM t ORDER BY a'] @ds.select_order_map(Sequel[:t][:a].as(:b)).must_equal [1, 2] @ds.db.sqls.must_equal ['SELECT t.a AS b FROM t ORDER BY t.a'] end it "should handle OrderedExpressions" do @ds.select_order_map(Sequel.desc(:a)).must_equal [1, 2] @ds.db.sqls.must_equal ['SELECT a FROM t ORDER BY a DESC'] end it "should handle an expression without a determinable alias" do @ds.select_order_map{a(t[c])}.must_equal [1, 2] @ds.db.sqls.must_equal ['SELECT a(t.c) AS v FROM t ORDER BY a(t.c)'] end it "should accept a block" do @ds.select_order_map{a(t[c]).as(b)}.must_equal [1, 2] @ds.db.sqls.must_equal ['SELECT a(t.c) AS b FROM t ORDER BY a(t.c)'] end it "should accept a block with an array of columns" do @ds.select_order_map{[c.desc, a(t[c]).as(c)]}.must_equal [[1, 1], [2, 2]] @ds.db.sqls.must_equal ['SELECT c, a(t.c) AS c FROM t ORDER BY c DESC, a(t.c)'] end it "should accept a block with a column" do @ds.select_order_map(:c){a(t[c]).as(c)}.must_equal [[1, 1], [2, 2]] @ds.db.sqls.must_equal ['SELECT c, a(t.c) AS c FROM t ORDER BY c, a(t.c)'] end it "should accept a block and array of arguments" do @ds.select_order_map([:c, :c]){[a(t[c]).as(c), c.desc]}.must_equal [[1, 1, 1, 1], [2, 2, 2, 2]] @ds.db.sqls.must_equal ['SELECT c, c, a(t.c) AS c, c FROM t ORDER BY c, c, a(t.c), c DESC'] end it "should handle an array of columns" do @ds.select_order_map([:c, :c]).must_equal [[1, 1], [2, 2]] @ds.db.sqls.must_equal ['SELECT c, c FROM t ORDER BY c, c'] end it "should handle an array of columns" do @ds.select_order_map([Sequel.expr(:d).as(:c), Sequel.qualify(:b, :c), Sequel.identifier(:c), Sequel.identifier(:c).qualify(:b), Sequel.identifier(:c).qualify(:b).desc]).must_equal [[1, 1, 1, 1, 1], [2, 2, 2, 2, 2]] @ds.db.sqls.must_equal ['SELECT d AS c, b.c, c, b.c, b.c FROM t ORDER BY d, b.c, c, b.c, b.c DESC'] end with_symbol_splitting "should handle an array of columns with splittable symbols" do @ds.select_order_map([:a__c, Sequel.desc(:a__d___c), Sequel.desc(Sequel.expr(:a__d___c))]).must_equal [[1, 1, 1], [2, 2, 2]] @ds.db.sqls.must_equal ['SELECT a.c, a.d AS c, a.d AS c FROM t ORDER BY a.c, a.d DESC, a.d DESC'] end it "should handle an array with a single element" do @ds.select_order_map([:c]).must_equal [[1], [2]] @ds.db.sqls.must_equal ['SELECT c FROM t ORDER BY c'] end end describe "Sequel::Dataset#select_hash" do before do @db = Sequel.mock(:fetch=>[{:a=>1, :b=>2}, {:a=>3, :b=>4}]) @ds = @db[:t] end it "should do select and to_hash in one step" do @ds.select_hash(:a, :b).must_equal(1=>2, 3=>4) @ds.db.sqls.must_equal ['SELECT a, b FROM t'] end it "should handle qualified identifiers in arguments" do @ds.select_hash(Sequel[:t][:a], Sequel[:t][:b]).must_equal(1=>2, 3=>4) @ds.db.sqls.must_equal ['SELECT t.a, t.b FROM t'] end with_symbol_splitting "should handle implicit qualifiers in arguments" do @ds.select_hash(:t__a, :t__b).must_equal(1=>2, 3=>4) @ds.db.sqls.must_equal ['SELECT t.a, t.b FROM t'] end it "should handle aliased expresssions in arguments" do @ds.select_hash(Sequel[:c].as(:a), Sequel[:d].as(:b)).must_equal(1=>2, 3=>4) @ds.db.sqls.must_equal ['SELECT c AS a, d AS b FROM t'] end with_symbol_splitting "should handle implicit aliases in arguments" do @ds.select_hash(:c___a, :d___b).must_equal(1=>2, 3=>4) @ds.db.sqls.must_equal ['SELECT c AS a, d AS b FROM t'] end it "should handle qualified identifiers and aliased expressions in arguments" do @ds.select_hash(Sequel[:t][:c].as(:a), Sequel[:t][:d].as(:b)).must_equal(1=>2, 3=>4) @ds.db.sqls.must_equal ['SELECT t.c AS a, t.d AS b FROM t'] end with_symbol_splitting "should handle implicit qualifiers and aliases in arguments" do @ds.select_hash(:t__c___a, :t__d___b).must_equal(1=>2, 3=>4) @ds.db.sqls.must_equal ['SELECT t.c AS a, t.d AS b FROM t'] end it "should handle SQL::Identifiers in arguments" do @ds.select_hash(Sequel.identifier(:a), Sequel.identifier(:b)).must_equal(1=>2, 3=>4) @ds.db.sqls.must_equal ['SELECT a, b FROM t'] end it "should handle SQL::QualifiedIdentifiers in arguments" do @ds.select_hash(Sequel.qualify(:t, :a), Sequel.identifier(:b).qualify(:t)).must_equal(1=>2, 3=>4) @ds.db.sqls.must_equal ['SELECT t.a, t.b FROM t'] end it "should handle SQL::AliasedExpressions in arguments" do @ds.select_hash(Sequel.expr(:c).as(:a), Sequel.expr(:t).as(:b)).must_equal(1=>2, 3=>4) @ds.db.sqls.must_equal ['SELECT c AS a, t AS b FROM t'] end it "should work with arrays of columns" do @db.fetch = [{:a=>1, :b=>2, :c=>3}, {:a=>4, :b=>5, :c=>6}] @ds.select_hash([:a, :c], :b).must_equal([1, 3]=>2, [4, 6]=>5) @ds.db.sqls.must_equal ['SELECT a, c, b FROM t'] @ds.select_hash(:a, [:b, :c]).must_equal(1=>[2, 3], 4=>[5, 6]) @ds.db.sqls.must_equal ['SELECT a, b, c FROM t'] @ds.select_hash([:a, :b], [:b, :c]).must_equal([1, 2]=>[2, 3], [4, 5]=>[5, 6]) @ds.db.sqls.must_equal ['SELECT a, b, b, c FROM t'] end it "should raise an error if the resulting symbol cannot be determined" do proc{@ds.select_hash(Sequel.expr(:c).as(:a), Sequel.function(:b))}.must_raise(Sequel::Error) end end describe "Sequel::Dataset#select_hash_groups" do before do @db = Sequel.mock(:fetch=>[{:a=>1, :b=>2}, {:a=>3, :b=>4}]) @ds = @db[:t] end it "should do select and to_hash in one step" do @ds.select_hash_groups(:a, :b).must_equal(1=>[2], 3=>[4]) @ds.db.sqls.must_equal ['SELECT a, b FROM t'] end with_symbol_splitting "should handle implicit qualifiers in arguments" do @ds.select_hash_groups(:t__a, :t__b).must_equal(1=>[2], 3=>[4]) @ds.db.sqls.must_equal ['SELECT t.a, t.b FROM t'] end with_symbol_splitting "should handle implicit aliases in arguments" do @ds.select_hash_groups(:c___a, :d___b).must_equal(1=>[2], 3=>[4]) @ds.db.sqls.must_equal ['SELECT c AS a, d AS b FROM t'] end with_symbol_splitting "should handle implicit qualifiers and aliases in arguments" do @ds.select_hash_groups(:t__c___a, :t__d___b).must_equal(1=>[2], 3=>[4]) @ds.db.sqls.must_equal ['SELECT t.c AS a, t.d AS b FROM t'] end it "should handle SQL::Identifiers in arguments" do @ds.select_hash_groups(Sequel.identifier(:a), Sequel.identifier(:b)).must_equal(1=>[2], 3=>[4]) @ds.db.sqls.must_equal ['SELECT a, b FROM t'] end it "should handle SQL::QualifiedIdentifiers in arguments" do @ds.select_hash_groups(Sequel.qualify(:t, :a), Sequel.identifier(:b).qualify(:t)).must_equal(1=>[2], 3=>[4]) @ds.db.sqls.must_equal ['SELECT t.a, t.b FROM t'] end it "should handle SQL::AliasedExpressions in arguments" do @ds.select_hash_groups(Sequel.expr(:c).as(:a), Sequel.expr(:t).as(:b)).must_equal(1=>[2], 3=>[4]) @ds.db.sqls.must_equal ['SELECT c AS a, t AS b FROM t'] end it "should handle SQL::QualifiedIdentifiers and SQL::AliasedExpressions in arguments" do @ds.select_hash_groups(Sequel[:t][:c].as(:a), Sequel[:t][:d].as(:b)).must_equal(1=>[2], 3=>[4]) @ds.db.sqls.must_equal ['SELECT t.c AS a, t.d AS b FROM t'] end it "should work with arrays of columns" do @db.fetch = [{:a=>1, :b=>2, :c=>3}, {:a=>4, :b=>5, :c=>6}] @ds.select_hash_groups([:a, :c], :b).must_equal([1, 3]=>[2], [4, 6]=>[5]) @ds.db.sqls.must_equal ['SELECT a, c, b FROM t'] @ds.select_hash_groups(:a, [:b, :c]).must_equal(1=>[[2, 3]], 4=>[[5, 6]]) @ds.db.sqls.must_equal ['SELECT a, b, c FROM t'] @ds.select_hash_groups([:a, :b], [:b, :c]).must_equal([1, 2]=>[[2, 3]], [4, 5]=>[[5, 6]]) @ds.db.sqls.must_equal ['SELECT a, b, b, c FROM t'] end it "should raise an error if the resulting symbol cannot be determined" do proc{@ds.select_hash_groups(Sequel.expr(:c).as(:a), Sequel.function(:b))}.must_raise(Sequel::Error) end end describe "Modifying joined datasets" do before do @ds2 = Sequel.mock.from(:b, :c).join(:d, [:id]).where(:id => 2) @ds = @ds2.with_extend{def supports_modifying_joins?; true end} end it "should not allow inserting into joined datasets" do proc{@ds.insert(:a=>1)}.must_raise Sequel::InvalidOperation proc{@ds2.insert(:a=>1)}.must_raise Sequel::InvalidOperation end it "should not allow truncating joined datasets" do proc{@ds.truncate}.must_raise Sequel::InvalidOperation proc{@ds2.truncate}.must_raise Sequel::InvalidOperation end it "should allow deleting from joined datasets if supported" do @ds.delete @ds.db.sqls.must_equal ['DELETE FROM b, c WHERE (id = 2)'] proc{@ds2.delete}.must_raise Sequel::InvalidOperation @ds.db.sqls.must_equal [] end it "should allow updating joined datasets if supported" do @ds.update(:a=>1) @ds.db.sqls.must_equal ['UPDATE b, c INNER JOIN d USING (id) SET a = 1 WHERE (id = 2)'] proc{@ds2.update(:a=>1)}.must_raise Sequel::InvalidOperation @ds.db.sqls.must_equal [] end deprecated "should have check_modification_allowed! private method" do @ds.send(:check_modification_allowed!).must_be_nil proc{@ds2.send(:check_modification_allowed!)}.must_raise Sequel::InvalidOperation end end describe "Dataset#lock_style and for_update" do before do @ds = Sequel.mock.dataset.from(:t) end it "#for_update should use FOR UPDATE" do 3.times do @ds.for_update.sql.must_equal "SELECT * FROM t FOR UPDATE" end end it "#lock_style should accept symbols" do @ds.lock_style(:update).sql.must_equal "SELECT * FROM t FOR UPDATE" end it "#lock_style should accept strings for arbitrary SQL" do @ds.lock_style("FOR SHARE").sql.must_equal "SELECT * FROM t FOR SHARE" end end describe "Dataset#nowait" do before do @ds = Sequel.mock.dataset.from(:t).for_update end it "should raise an error if not supported" do proc{@ds.nowait}.must_raise Sequel::Error end it "should use the nowait SYNTAX if supported" do @ds = @ds.with_extend do def supports_nowait?; true end def select_lock_sql(sql) super; sql << " NOWAIT" if @opts[:nowait] end end @ds.sql.must_equal "SELECT * FROM t FOR UPDATE" 3.times do @ds.nowait.sql.must_equal "SELECT * FROM t FOR UPDATE NOWAIT" end end end describe "Dataset#skip_locked" do before do @ds = Sequel.mock.dataset.from(:t).for_update end it "should raise an error if not supported" do proc{@ds.skip_locked}.must_raise Sequel::Error end it "should skipped locked rows if supported" do @ds = @ds.with_extend do def supports_skip_locked?; true end def select_lock_sql(sql) super; sql << " SKIP LOCKED" if @opts[:skip_locked] end end @ds.sql.must_equal "SELECT * FROM t FOR UPDATE" 3.times do @ds.skip_locked.sql.must_equal "SELECT * FROM t FOR UPDATE SKIP LOCKED" end end end describe "Custom ASTTransformer" do before do @c = Class.new(Sequel::ASTTransformer) do def v(s) (s.is_a?(Symbol) || s.is_a?(String)) ? :"#{s}#{s}" : super end end.new end it "should transform given objects" do ds = Sequel.mock.dataset.from(:t).cross_join(Sequel[:a].as(:g)).join(Sequel[:b].as(:h), [:c]).join(Sequel[:d].as(:i), :e=>:f) ds.sql.must_equal 'SELECT * FROM t CROSS JOIN a AS g INNER JOIN b AS h USING (c) INNER JOIN d AS i ON (i.e = h.f)' ds.clone(:from=>@c.transform(ds.opts[:from]), :join=>@c.transform(ds.opts[:join])).sql.must_equal 'SELECT * FROM tt CROSS JOIN aa AS g INNER JOIN bb AS h USING (cc) INNER JOIN dd AS i ON (ii.ee = hh.ff)' end with_symbol_splitting "should transform given objects with splittable symbols" do ds = Sequel.mock.dataset.from(:t).cross_join(:a___g).join(:b___h, [:c]).join(:d___i, :e=>:f) ds.sql.must_equal 'SELECT * FROM t CROSS JOIN a AS g INNER JOIN b AS h USING (c) INNER JOIN d AS i ON (i.e = h.f)' ds.clone(:from=>@c.transform(ds.opts[:from]), :join=>@c.transform(ds.opts[:join])).sql.must_equal 'SELECT * FROM tt CROSS JOIN aa AS g INNER JOIN bb AS h USING (cc) INNER JOIN dd AS i ON (ii.ee = hh.ff)' end it "should support sequel_ast_transform on objects for custom transforms" do obj = Sequel::SQL::Expression.new def obj.sequel_ast_transform(transformer); transformer.call(:a) end Sequel.mock.literal(@c.transform(obj)).must_equal "aa" obj = Sequel::SQL::Expression.new def obj.to_s_append(ds, sql) sql << 'a' end Sequel.mock.literal(@c.transform(obj)).must_equal "a" end end describe "Dataset#returning" do before do @db = Sequel.mock(:fetch=>proc{|s| {:foo=>s}}) @db.extend_datasets{def supports_returning?(type) true end} @ds = @db[:t].returning(:foo) @pr = proc do @ds = @ds.with_extend do Sequel::Dataset.def_sql_method(self, :delete, %w'delete from where returning') Sequel::Dataset.def_sql_method(self, :insert, %w'insert into columns values returning') Sequel::Dataset.def_sql_method(self, :update, %w'update table set where returning') end end end it "should use RETURNING clause in the SQL if the dataset supports it" do @pr.call @ds.delete_sql.must_equal "DELETE FROM t RETURNING foo" @ds.insert_sql(1).must_equal "INSERT INTO t VALUES (1) RETURNING foo" @ds.update_sql(:foo=>1).must_equal "UPDATE t SET foo = 1 RETURNING foo" end it "should not use RETURNING clause in the SQL if the dataset does not support it" do @ds.delete_sql.must_equal "DELETE FROM t" @ds.insert_sql(1).must_equal "INSERT INTO t VALUES (1)" @ds.update_sql(:foo=>1).must_equal "UPDATE t SET foo = 1" end it "should have insert, update, and delete yield to blocks if RETURNING is used" do @pr.call h = {} @ds.delete{|r| h = r} h.must_equal(:foo=>"DELETE FROM t RETURNING foo") @ds.insert(1){|r| h = r} h.must_equal(:foo=>"INSERT INTO t VALUES (1) RETURNING foo") @ds.update(:foo=>1){|r| h = r} h.must_equal(:foo=>"UPDATE t SET foo = 1 RETURNING foo") end it "should have insert, update, and delete return arrays of hashes if RETURNING is used and a block is not given" do @pr.call @ds.delete.must_equal [{:foo=>"DELETE FROM t RETURNING foo"}] @ds.insert(1).must_equal [{:foo=>"INSERT INTO t VALUES (1) RETURNING foo"}] @ds.update(:foo=>1).must_equal [{:foo=>"UPDATE t SET foo = 1 RETURNING foo"}] end it "should raise an error if RETURNING is not supported" do @db.extend_datasets{def supports_returning?(type) false end} proc{@db[:t].returning}.must_raise(Sequel::Error) proc{@db[:t].returning(:id)}.must_raise(Sequel::Error) end end describe "Dataset emulating bitwise operator support" do before do @ds = Sequel.mock.dataset.with_quote_identifiers(true).with_extend do def complex_expression_sql_append(sql, op, args) complex_expression_arg_pairs_append(sql, args){|a, b| Sequel.function(:bitand, a, b)} end end end it "should work with any numbers of arguments for operators" do @ds.select(Sequel::SQL::ComplexExpression.new(:&, :x)).sql.must_equal 'SELECT "x"' @ds.select(Sequel.expr(:x) & 1).sql.must_equal 'SELECT bitand("x", 1)' @ds.select(Sequel.expr(:x) & 1 & 2).sql.must_equal 'SELECT bitand(bitand("x", 1), 2)' end end describe "Dataset feature defaults" do it "should not require aliases for recursive CTEs by default" do Sequel::Database.new.dataset.recursive_cte_requires_column_aliases?.must_equal false end it "should not require placeholder type specifiers by default" do Sequel::Database.new.dataset.requires_placeholder_type_specifiers?.must_equal false end end describe "Dataset extensions" do before(:all) do class << Sequel def extension(*) end end end after(:all) do class << Sequel remove_method :extension end end before do @ds = Sequel.mock.dataset end it "should be able to register an extension with a module Database#extension extend the module" do Sequel::Dataset.register_extension(:foo, Module.new{def a; 1; end}) @ds.extension(:foo).a.must_equal 1 end it "should be able to register an extension with a block and Database#extension call the block" do Sequel::Dataset.register_extension(:foo){|ds| ds.extend(Module.new{def a; 1; end})} @ds.extension(:foo).a.must_equal 1 end it "should be able to register an extension with a callable and Database#extension call the callable" do Sequel::Dataset.register_extension(:foo, proc{|ds| ds.extend(Module.new{def a; 1; end})}) @ds.extension(:foo).a.must_equal 1 end it "should be able to load multiple extensions in the same call" do Sequel::Dataset.register_extension(:foo, proc{|ds| ds.send(:cache_set, :_columns, ds.columns + [:a])}) Sequel::Dataset.register_extension(:bar, proc{|ds| ds.send(:cache_set, :_columns, ds.columns + [:b])}) @ds.extension(:foo, :bar).columns.must_equal [:a, :b] end it "should have #extension not modify the receiver" do Sequel::Dataset.register_extension(:foo, Module.new{def a; 1; end}) @ds.extension(:foo) proc{@ds.a}.must_raise(NoMethodError) end it "should have #extension not return a cloned dataset" do @ds = @ds.with_extend(Module.new{def b; 2; end}) Sequel::Dataset.register_extension(:foo, Module.new{def a; 1; end}) v = @ds.extension(:foo) v.must_equal(@ds) v.must_be_kind_of(Sequel::Dataset) v.b.must_equal 2 end it "should register a Database extension for modifying all datasets when registering with a module" do Sequel::Dataset.register_extension(:foo, Module.new{def a; 1; end}) Sequel.mock.extension(:foo).dataset.a.must_equal 1 end it "should raise an Error if registering with both a module and a block" do proc{Sequel::Dataset.register_extension(:foo, Module.new){}}.must_raise(Sequel::Error) end it "should raise an Error if attempting to load an incompatible extension" do proc{@ds.extension(:foo2)}.must_raise(Sequel::Error) end end describe "Dataset#schema_and_table" do before do @ds = Sequel.mock[:test] end with_symbol_splitting "should correctly handle symbols" do @ds.schema_and_table(:s).must_equal [nil, 's'] @ds.schema_and_table(:s___a).must_equal [nil, 's'] @ds.schema_and_table(:t__s).must_equal ['t', 's'] @ds.schema_and_table(:t__s___a).must_equal ['t', 's'] end it "should correctly handle strings" do @ds.schema_and_table('s').must_equal [nil, 's'] end it "should correctly handle literal strings" do s = Sequel.lit('s') @ds.schema_and_table(s).last.must_be_same_as(s) end it "should correctly handle identifiers" do @ds.schema_and_table(Sequel.identifier(:s)).must_equal [nil, 's'] end it "should correctly handle qualified identifiers" do @ds.schema_and_table(Sequel.qualify(:t, :s)).must_equal ['t', 's'] end it "should correctly handle given schema" do @ds.schema_and_table(Sequel.qualify(:t, :s), 'x').must_equal ['t', 's'] @ds.schema_and_table(:s, 'x').must_equal ['x', 's'] end end describe "Dataset#split_qualifiers" do before do @ds = Sequel.mock[:test] end it "should correctly handle symbols" do @ds.split_qualifiers(:s).must_equal ['s'] end with_symbol_splitting "should correctly handle splittable symbols" do @ds.split_qualifiers(:s___a).must_equal ['s'] @ds.split_qualifiers(:t__s).must_equal ['t', 's'] @ds.split_qualifiers(:t__s___a).must_equal ['t', 's'] end it "should correctly handle strings" do @ds.split_qualifiers('s').must_equal ['s'] end it "should correctly handle identifiers" do @ds.split_qualifiers(Sequel.identifier(:s)).must_equal ['s'] end it "should correctly handle simple qualified identifiers" do @ds.split_qualifiers(Sequel.qualify(:t, :s)).must_equal ['t', 's'] end with_symbol_splitting "should correctly handle complex qualified identifiers with splittable symbols" do @ds.split_qualifiers(Sequel.qualify(:d__t, :s)).must_equal ['d', 't', 's'] @ds.split_qualifiers(Sequel.qualify(:d, :t__s)).must_equal ['d', 't', 's'] @ds.split_qualifiers(Sequel.qualify(:d__t, :s__s2)).must_equal ['d', 't', 's', 's2'] end it "should correctly handle complex qualified identifiers" do @ds.split_qualifiers(Sequel.qualify(Sequel.qualify(:d, :t), :s)).must_equal ['d', 't', 's'] @ds.split_qualifiers(Sequel.qualify(:d, Sequel.qualify(:t, :s))).must_equal ['d', 't', 's'] @ds.split_qualifiers(Sequel.qualify(Sequel.qualify(:d, :t), Sequel.qualify(:s, :s2))).must_equal ['d', 't', 's', 's2'] end end describe "Dataset#paged_each" do before do @db = (0...10).map{|i| {:x=>i}} @ds = Sequel.mock[:test].order(:x).with_fetch(@db) @rows = [] @proc = lambda{|row| @rows << row} end it "should yield rows to the passed block" do @ds.paged_each(&@proc) @rows.must_equal @db end it "should return enumerator when called without block" do @ds.paged_each.each(&@proc) @rows.must_equal @db end it "should respect the row_proc" do @ds = @ds.with_row_proc(lambda{|row| {:x=>row[:x]*2}}) @ds.paged_each(&@proc) @rows.must_equal @db.map{|row| {:x=>row[:x]*2}} end it "should use a transaction to ensure consistent results" do @ds.paged_each(&@proc) sqls = @ds.db.sqls sqls[0].must_equal 'BEGIN' sqls[-1].must_equal 'COMMIT' end it "should use a limit and offset to go through the dataset in chunks at a time" do @ds.paged_each(&@proc) @ds.db.sqls[1...-1].must_equal ['SELECT * FROM test ORDER BY x LIMIT 1000 OFFSET 0'] end it "should accept a :rows_per_fetch option to change the number of rows per fetch" do @ds = @ds.with_fetch(@db.each_slice(3).to_a) @ds.paged_each(:rows_per_fetch=>3, &@proc) @rows.must_equal @db @ds.db.sqls[1...-1].must_equal ['SELECT * FROM test ORDER BY x LIMIT 3 OFFSET 0', 'SELECT * FROM test ORDER BY x LIMIT 3 OFFSET 3', 'SELECT * FROM test ORDER BY x LIMIT 3 OFFSET 6', 'SELECT * FROM test ORDER BY x LIMIT 3 OFFSET 9'] end it "should handle cases where the last query returns nothing" do @ds = @ds.with_fetch(@db.each_slice(5).to_a) @ds.paged_each(:rows_per_fetch=>5, &@proc) @rows.must_equal @db @ds.db.sqls[1...-1].must_equal ['SELECT * FROM test ORDER BY x LIMIT 5 OFFSET 0', 'SELECT * FROM test ORDER BY x LIMIT 5 OFFSET 5', 'SELECT * FROM test ORDER BY x LIMIT 5 OFFSET 10'] end it "should respect an existing server option to use" do @ds = Sequel.mock(:servers=>{:foo=>{}})[:test].order(:x) @ds = @ds.with_fetch(@db) @ds.server(:foo).paged_each(&@proc) @rows.must_equal @db @ds.db.sqls.must_equal ["BEGIN -- foo", "SELECT * FROM test ORDER BY x LIMIT 1000 OFFSET 0 -- foo", "COMMIT -- foo"] end it "should require an order" do lambda{@ds.unordered.paged_each(&@proc)}.must_raise(Sequel::Error) end it "should handle an existing limit and/or offset" do @ds = @ds.with_fetch(@db.each_slice(3).to_a) @ds.limit(5).paged_each(:rows_per_fetch=>3, &@proc) @ds.db.sqls[1...-1].must_equal ["SELECT * FROM test ORDER BY x LIMIT 3 OFFSET 0", "SELECT * FROM test ORDER BY x LIMIT 2 OFFSET 3"] @ds = @ds.with_fetch(@db.each_slice(3).to_a) @ds.limit(5, 2).paged_each(:rows_per_fetch=>3, &@proc) @ds.db.sqls[1...-1].must_equal ["SELECT * FROM test ORDER BY x LIMIT 3 OFFSET 2", "SELECT * FROM test ORDER BY x LIMIT 2 OFFSET 5"] @ds = @ds.with_fetch(@db.each_slice(3).to_a) @ds.limit(nil, 2).paged_each(:rows_per_fetch=>3, &@proc) @ds.db.sqls[1...-1].must_equal ["SELECT * FROM test ORDER BY x LIMIT 3 OFFSET 2", "SELECT * FROM test ORDER BY x LIMIT 3 OFFSET 5", "SELECT * FROM test ORDER BY x LIMIT 3 OFFSET 8", "SELECT * FROM test ORDER BY x LIMIT 3 OFFSET 11"] end it "should support :strategy=>:filter" do @ds = @ds.with_fetch(@db.each_slice(5).to_a) @ds.paged_each(:rows_per_fetch=>5, :strategy=>:filter, &@proc) @ds.db.sqls[1...-1].must_equal ["SELECT * FROM test ORDER BY x LIMIT 5", "SELECT * FROM test WHERE (x > 4) ORDER BY x LIMIT 5", "SELECT * FROM test WHERE (x > 9) ORDER BY x LIMIT 5"] @rows.must_equal @db @rows = [] db = @db.map{|h| h[:y] = h[:x] % 5; h[:z] = h[:x] % 9; h}.sort_by{|h| [h[:z], -h[:y], h[:x]]} @ds = @ds.with_fetch(db.each_slice(5).to_a) @ds.order(Sequel.identifier(:z), Sequel.desc(Sequel.qualify(:test, :y)), Sequel.asc(:x)).paged_each(:rows_per_fetch=>5, :strategy=>:filter, &@proc) @ds.db.sqls[1...-1].must_equal ["SELECT * FROM test ORDER BY z, test.y DESC, x ASC LIMIT 5", "SELECT * FROM test WHERE ((z > 3) OR ((z = 3) AND (test.y < 3)) OR ((z = 3) AND (test.y = 3) AND (x > 3))) ORDER BY z, test.y DESC, x ASC LIMIT 5", "SELECT * FROM test WHERE ((z > 8) OR ((z = 8) AND (test.y < 3)) OR ((z = 8) AND (test.y = 3) AND (x > 8))) ORDER BY z, test.y DESC, x ASC LIMIT 5"] @rows.must_equal db end it "should support :strategy=>:filter with :filter_values option" do db = @db.map{|h| h[:y] = h[:x] % 5; h[:z] = h[:x] % 9; h}.sort_by{|h| [h[:z], -h[:y], h[:x]]} @ds = @ds.with_fetch(db.each_slice(5).to_a) @ds.order(Sequel.identifier(:z), Sequel.desc(Sequel.qualify(:test, :y) * 2), Sequel.asc(:x)).paged_each(:rows_per_fetch=>5, :strategy=>:filter, :filter_values=>proc{|row, expr| [row[expr[0].value], row[expr[1].args.first.column] * expr[1].args.last, row[expr[2]]]}, &@proc) @ds.db.sqls[1...-1].must_equal ["SELECT * FROM test ORDER BY z, (test.y * 2) DESC, x ASC LIMIT 5", "SELECT * FROM test WHERE ((z > 3) OR ((z = 3) AND ((test.y * 2) < 6)) OR ((z = 3) AND ((test.y * 2) = 6) AND (x > 3))) ORDER BY z, (test.y * 2) DESC, x ASC LIMIT 5", "SELECT * FROM test WHERE ((z > 8) OR ((z = 8) AND ((test.y * 2) < 6)) OR ((z = 8) AND ((test.y * 2) = 6) AND (x > 8))) ORDER BY z, (test.y * 2) DESC, x ASC LIMIT 5"] @rows.must_equal db end end describe "Dataset#current_datetime" do after do Sequel.datetime_class = Time end it "should return an instance of Sequel.datetime_class for the current datetime" do t = Sequel::Dataset.new(nil).current_datetime t.must_be_kind_of(Time) (Time.now - t < 0.1).must_equal true Sequel.datetime_class = DateTime t = Sequel::Dataset.new(nil).current_datetime t.must_be_kind_of(DateTime) (DateTime.now - t < (0.1/86400)).must_equal true end end describe "Dataset#escape_like" do before do @ds = Sequel.mock[:test] end it "should escape % and _ and \\ characters" do @ds.escape_like("foo\\%_bar").must_equal "foo\\\\\\%\\_bar" end end describe "Dataset#supports_replace?" do it "should be false by default" do Sequel::Dataset.new(nil).supports_replace?.must_equal false end end describe "Dataset#supports_lateral_subqueries?" do it "should be false by default" do Sequel::Dataset.new(nil).supports_lateral_subqueries?.must_equal false end end describe "Frozen Datasets" do before do @ds = Sequel.mock[:test] end it "datasets should be frozen by default" do @ds.must_be :frozen? end it "should have Dataset#freeze return receiver" do @ds.freeze.must_be_same_as(@ds) end it "should have clones be frozen" do @ds.clone.must_be :frozen? end end describe "Dataset emulated complex expression operators" do before do @ds = Sequel.mock[:test].with_extend do def complex_expression_sql_append(sql, op, args) case op when :&, :|, :^, :%, :<<, :>>, :'B~' complex_expression_emulate_append(sql, op, args) else super end end end @n = Sequel.expr(:x).sql_number end it "should emulate &" do @ds.literal(Sequel::SQL::NumericExpression.new(:&, @n)).must_equal "x" @ds.literal(@n & 1).must_equal "BITAND(x, 1)" @ds.literal(@n & 1 & 2).must_equal "BITAND(BITAND(x, 1), 2)" end it "should emulate |" do @ds.literal(Sequel::SQL::NumericExpression.new(:|, @n)).must_equal "x" @ds.literal(@n | 1).must_equal "BITOR(x, 1)" @ds.literal(@n | 1 | 2).must_equal "BITOR(BITOR(x, 1), 2)" end it "should emulate ^" do @ds.literal(Sequel::SQL::NumericExpression.new(:^, @n)).must_equal "x" @ds.literal(@n ^ 1).must_equal "BITXOR(x, 1)" @ds.literal(@n ^ 1 ^ 2).must_equal "BITXOR(BITXOR(x, 1), 2)" end it "should emulate %" do @ds.literal(Sequel::SQL::NumericExpression.new(:%, @n)).must_equal "x" @ds.literal(@n % 1).must_equal "MOD(x, 1)" @ds.literal(@n % 1 % 2).must_equal "MOD(MOD(x, 1), 2)" end it "should emulate >>" do @ds.literal(Sequel::SQL::NumericExpression.new(:>>, @n)).must_equal "x" @ds.literal(@n >> 1).must_equal "(x / power(2, 1))" @ds.literal(@n >> 1 >> 2).must_equal "((x / power(2, 1)) / power(2, 2))" end it "should emulate <<" do @ds.literal(Sequel::SQL::NumericExpression.new(:<<, @n)).must_equal "x" @ds.literal(@n << 1).must_equal "(x * power(2, 1))" @ds.literal(@n << 1 << 2).must_equal "((x * power(2, 1)) * power(2, 2))" end it "should emulate B~" do @ds.literal(~@n).must_equal "((0 - x) - 1)" end end describe "#joined_dataset?" do before do @ds = Sequel.mock.dataset end it "should be false if the dataset has 0 or 1 from table" do @ds.joined_dataset?.must_equal false @ds.from(:a).joined_dataset?.must_equal false end it "should be true if the dataset has 2 or more from tables" do @ds.from(:a, :b).joined_dataset?.must_equal true @ds.from(:a, :b, :c).joined_dataset?.must_equal true end it "should be true if the dataset has any join tables" do @ds.from(:a).cross_join(:b).joined_dataset?.must_equal true end end describe "#unqualified_column_for" do before do @ds = Sequel.mock.dataset end it "should handle Symbols" do @ds.unqualified_column_for(:a).must_equal Sequel.identifier('a') end with_symbol_splitting "should handle splittable symbols" do @ds.unqualified_column_for(:b__a).must_equal Sequel.identifier('a') @ds.unqualified_column_for(:a___c).must_equal Sequel.identifier('a').as('c') @ds.unqualified_column_for(:b__a___c).must_equal Sequel.identifier('a').as('c') end it "should handle SQL::Identifiers" do @ds.unqualified_column_for(Sequel.identifier(:a)).must_equal Sequel.identifier(:a) end it "should handle SQL::QualifiedIdentifiers" do @ds.unqualified_column_for(Sequel.qualify(:b, :a)).must_equal Sequel.identifier('a') @ds.unqualified_column_for(Sequel.qualify(:b, 'a')).must_equal Sequel.identifier('a') end it "should handle SQL::AliasedExpressions" do @ds.unqualified_column_for(Sequel.qualify(:b, :a).as(:c)).must_equal Sequel.identifier('a').as(:c) end it "should return nil for other objects" do @ds.unqualified_column_for(Object.new).must_be_nil @ds.unqualified_column_for('a').must_be_nil end it "should return nil for other objects inside SQL::AliasedExpressions" do @ds.unqualified_column_for(Sequel.as(Object.new, 'a')).must_be_nil @ds.unqualified_column_for(Sequel.as('a', 'b')).must_be_nil end end describe "Dataset#output_identifier" do it "should handle empty identifiers and uppercase identifiers" do meth = Sequel::Database.new.dataset.method(:output_identifier) meth.call('').must_equal :untitled meth.call('A').must_equal :a end end describe "Dataset#where_all" do before do @ds = Sequel.mock(:fetch=>{:id=>1})[:items] end it "should filter dataset with condition, and return related rows" do 5.times do @ds.where_all(:id=>1).must_equal [{:id=>1}] @ds.db.sqls.must_equal ['SELECT * FROM items WHERE (id = 1)'] end end it "should handle empty arrays and hashes" do 5.times do @ds.where_all([]).must_equal [{:id=>1}] @ds.db.sqls.must_equal ['SELECT * FROM items'] @ds.where_all({}).must_equal [{:id=>1}] @ds.db.sqls.must_equal ['SELECT * FROM items'] end end it "should yield each row to the given block" do 5.times do a = [] @ds.where_all(:id=>1){|r| a << r}.must_equal [{:id=>1}] a.must_equal [{:id=>1}] @ds.db.sqls.must_equal ['SELECT * FROM items WHERE (id = 1)'] end end end describe "Dataset#where_each" do before do @ds = Sequel.mock(:fetch=>{:id=>1})[:items] end it "should handle empty arrays and hashes" do [[], {}].each do |arg| 5.times do a = [] @ds.where_each(arg){|r| a << r} a.must_equal [{:id=>1}] @ds.db.sqls.must_equal ['SELECT * FROM items'] end end end it "should yield each row to the given block" do 5.times do a = [] @ds.where_each(:id=>1){|r| a << r} a.must_equal [{:id=>1}] @ds.db.sqls.must_equal ['SELECT * FROM items WHERE (id = 1)'] end end end describe "Dataset#where_single_value" do before do @ds = Sequel.mock(:fetch=>{:id=>1})[:items].with_extend do select :only_id, :id end end it "should handle empty arrays and hashes" do [[], {}].each do |arg| 5.times do @ds.only_id.where_single_value(arg).must_equal 1 @ds.db.sqls.must_equal ['SELECT id FROM items LIMIT 1'] end end end it "should return single value" do 5.times do @ds.only_id.where_single_value(:id=>1).must_equal 1 @ds.db.sqls.must_equal ['SELECT id FROM items WHERE (id = 1) LIMIT 1'] end end end �������sequel-5.63.0/spec/core/deprecated_spec.rb����������������������������������������������������������0000664�0000000�0000000�00000003535�14342141206�0020427�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Deprecated" do before do @d = Sequel::Deprecation @prev_prefix = @d.prefix @prev_output = @d.output @prev_backtrace_filter = @d.backtrace_filter @output = [] def @output.puts(s) self << s end @d.prefix = false @d.output = @output @d.backtrace_filter = false end after do @d.prefix = @prev_prefix @d.output = @prev_output @d.backtrace_filter = @prev_backtrace_filter end it "should output full messages to the given output" do @d.deprecate("foo") @output.must_equal ['foo'] end it "should consider two arguments to be a method name and additional text" do @d.deprecate("foo", "Use bar instead") @output.must_equal ['foo is deprecated and will be removed in Sequel 6. Use bar instead.'] end it "should include a prefix if set" do @d.prefix = "DEPWARN: " @d.deprecate("foo") @output.must_equal ['DEPWARN: foo'] end it "should not output anything if output is false" do @d.output = false @d.deprecate("foo") end it "should include full backtrace if backtrace_filter is true" do @d.backtrace_filter = true @d.deprecate("foo") @output.first.must_equal 'foo' (4..100).must_include(@output.count) end it "should include given lines of backtrace if backtrace_filter is an integer" do @d.backtrace_filter = 1 @d.deprecate("foo") @output.first.must_equal 'foo' @output.count.must_equal 2 @output.clear @d.backtrace_filter = 3 @d.deprecate("foo") @output.first.must_equal 'foo' @output.count.must_equal 4 end it "should select backtrace lines if backtrace_filter is a proc" do @d.backtrace_filter = lambda{|line, line_no| line_no < 3 && line =~ /./} @d.deprecate("foo") @output.first.must_equal 'foo' @output.count.must_equal 4 end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/core/expression_filters_spec.rb��������������������������������������������������0000664�0000000�0000000�00000201732�14342141206�0022255�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Blockless Ruby Filters" do before do db = Sequel.mock @d = db[:items].with_extend do def l(*args, &block) literal(filter_expr(*args, &block)) end def lit(*args) literal(*args) end end end it "should support boolean columns directly" do x = Sequel[:x] x.dup.must_be_same_as x x.clone.must_be_same_as x end it "should support boolean columns directly" do @d.l(:x).must_equal 'x' end with_symbol_splitting "should support qualified columns and aliased columns using symbols" do @d.l(:x__y).must_equal 'x.y' @d.l(:x___y).must_equal 'x AS y' @d.l(:x__y___z).must_equal 'x.y AS z' end with_symbol_splitting "should support qualified columns using virtual rows" do @d.l(Sequel.expr{x__y}).must_equal 'x.y' end it "should not split symbols or virtual row methods if symbol splitting is disabled" do @d.l(:x__y).must_equal 'x__y' @d.l(:x___y).must_equal 'x___y' @d.l(:x__y___z).must_equal 'x__y___z' @d.l(Sequel.expr{x__y}).must_equal 'x__y' end it "should support NOT with SQL functions" do @d.l(~Sequel.function(:is_blah)).must_equal 'NOT is_blah()' @d.l(~Sequel.function(:is_blah, :x)).must_equal 'NOT is_blah(x)' @d.l(~Sequel.function(:is_blah, Sequel[:x][:y])).must_equal 'NOT is_blah(x.y)' @d.l(~Sequel.function(:is_blah, :x, Sequel[:x][:y])).must_equal 'NOT is_blah(x, x.y)' end it "should handle multiple ~" do @d.l(~Sequel.~(:x)).must_equal 'x' @d.l(~~Sequel.~(:x)).must_equal 'NOT x' @d.l(~~Sequel.&(:x, :y)).must_equal '(x AND y)' @d.l(~~Sequel.|(:x, :y)).must_equal '(x OR y)' end it "should not modifying boolean expression created from array if array is modified" do a = [1] expr = Sequel.expr(:b=>a) @d.l(expr).must_equal '(b IN (1))' a << 2 @d.l(expr).must_equal '(b IN (1))' end it "should not modifying boolean expression created from string if string is modified" do a = '1'.dup expr = Sequel.expr(:b=>a) @d.l(expr).must_equal "(b = '1')" a << '2' @d.l(expr).must_equal "(b = '1')" end it "should handle already frozen arrays in boolean expressions" do @d.l(Sequel.expr(:b=>[1].freeze)).must_equal '(b IN (1))' end it "should handle already frozen strings in boolean expressions" do @d.l(Sequel.expr(:b=>'1'.freeze)).must_equal "(b = '1')" end it "should support = via Hash" do @d.l(:x => 100).must_equal '(x = 100)' @d.l(:x => 'a').must_equal '(x = \'a\')' @d.l(:x => true).must_equal '(x IS TRUE)' @d.l(:x => false).must_equal '(x IS FALSE)' @d.l(:x => nil).must_equal '(x IS NULL)' @d.l(:x => [1,2,3]).must_equal '(x IN (1, 2, 3))' end it "should use = 't' and != 't' OR IS NULL if IS TRUE is not supported" do @d = @d.with_extend{def supports_is_true?; false end} @d.l(:x => true).must_equal "(x = 't')" @d.l(~Sequel.expr(:x => true)).must_equal "((x != 't') OR (x IS NULL))" @d.l(:x => false).must_equal "(x = 'f')" @d.l(~Sequel.expr(:x => false)).must_equal "((x != 'f') OR (x IS NULL))" end it "should support != via inverted Hash" do @d.l(~Sequel.expr(:x => 100)).must_equal '(x != 100)' @d.l(~Sequel.expr(:x => 'a')).must_equal '(x != \'a\')' @d.l(~Sequel.expr(:x => true)).must_equal '(x IS NOT TRUE)' @d.l(~Sequel.expr(:x => false)).must_equal '(x IS NOT FALSE)' @d.l(~Sequel.expr(:x => nil)).must_equal '(x IS NOT NULL)' end it "should use NOT for inverting boolean expressions where right hand side is function or literal strings" do @d.l(~Sequel.expr(:x => Sequel.function(:any))).must_equal 'NOT (x = any())' @d.l(~Sequel.expr(:x => Sequel.lit('any()'))).must_equal 'NOT (x = any())' @d.l(~Sequel.expr(:x => Sequel.lit('any(?)', 1))).must_equal 'NOT (x = any(1))' end it "should support = and similar operations via =~ method" do @d.l{x =~ 100}.must_equal '(x = 100)' @d.l{x =~ 'a'}.must_equal '(x = \'a\')' @d.l{x =~ true}.must_equal '(x IS TRUE)' @d.l{x =~ false}.must_equal '(x IS FALSE)' @d.l{x =~ nil}.must_equal '(x IS NULL)' @d.l{x =~ (1...5)}.must_equal '((x >= 1) AND (x < 5))' @d.l{x =~ [1,2,3]}.must_equal '(x IN (1, 2, 3))' @d.l{(x + y) =~ 100}.must_equal '((x + y) = 100)' @d.l{(x + y) =~ 'a'}.must_equal '((x + y) = \'a\')' @d.l{(x + y) =~ true}.must_equal '((x + y) IS TRUE)' @d.l{(x + y) =~ false}.must_equal '((x + y) IS FALSE)' @d.l{(x + y) =~ nil}.must_equal '((x + y) IS NULL)' @d.l{(x + y) =~ (1...5)}.must_equal '(((x + y) >= 1) AND ((x + y) < 5))' @d.l{(x + y) =~ [1,2,3]}.must_equal '((x + y) IN (1, 2, 3))' @d = @d.with_extend{def supports_regexp?; true end} @d.l{x =~ /blah/}.must_equal '(x ~ \'blah\')' @d.l{(x + y) =~ /blah/}.must_equal '((x + y) ~ \'blah\')' end it "should support != and similar inversions via !~ method" do @d.l{x !~ 100}.must_equal '(x != 100)' @d.l{x !~ 'a'}.must_equal '(x != \'a\')' @d.l{x !~ true}.must_equal '(x IS NOT TRUE)' @d.l{x !~ false}.must_equal '(x IS NOT FALSE)' @d.l{x !~ nil}.must_equal '(x IS NOT NULL)' @d.l{x !~ (1...5)}.must_equal '((x < 1) OR (x >= 5))' @d.l{x !~ [1,2,3]}.must_equal '(x NOT IN (1, 2, 3))' @d.l{(x + y) !~ 100}.must_equal '((x + y) != 100)' @d.l{(x + y) !~ 'a'}.must_equal '((x + y) != \'a\')' @d.l{(x + y) !~ true}.must_equal '((x + y) IS NOT TRUE)' @d.l{(x + y) !~ false}.must_equal '((x + y) IS NOT FALSE)' @d.l{(x + y) !~ nil}.must_equal '((x + y) IS NOT NULL)' @d.l{(x + y) !~ (1...5)}.must_equal '(((x + y) < 1) OR ((x + y) >= 5))' @d.l{(x + y) !~ [1,2,3]}.must_equal '((x + y) NOT IN (1, 2, 3))' @d = @d.with_extend{def supports_regexp?; true end} @d.l{x !~ /blah/}.must_equal '(x !~ \'blah\')' @d.l{(x + y) !~ /blah/}.must_equal '((x + y) !~ \'blah\')' end it "should support ~ via Hash and Regexp (if supported by database)" do @d = @d.with_extend{def supports_regexp?; true end} @d.l(:x => /blah/).must_equal '(x ~ \'blah\')' end it "should support !~ via inverted Hash and Regexp" do @d = @d.with_extend{def supports_regexp?; true end} @d.l(~Sequel.expr(:x => /blah/)).must_equal '(x !~ \'blah\')' end it "should support negating ranges" do @d.l(~Sequel.expr(:x => 1..5)).must_equal '((x < 1) OR (x > 5))' @d.l(~Sequel.expr(:x => 1...5)).must_equal '((x < 1) OR (x >= 5))' end it "should support negating IN with Dataset or Array" do @d.l(~Sequel.expr(:x => @d.select(:i))).must_equal '(x NOT IN (SELECT i FROM items))' @d.l(~Sequel.expr(:x => [1,2,3])).must_equal '(x NOT IN (1, 2, 3))' end it "should not add ~ method to string expressions" do proc{~Sequel.expr(:x).sql_string}.must_raise(NoMethodError) end it "should only allow combining associative operators" do @d.lit(Sequel.expr{a + b + c}).must_equal '(a + b + c)' @d.lit(Sequel.expr{a - b - c}).must_equal '((a - b) - c)' @d.lit(Sequel.expr{a * b * c}).must_equal '(a * b * c)' @d.lit(Sequel.expr{a / b / c}).must_equal '((a / b) / c)' @d.lit(Sequel.expr{a & b & c}).must_equal '(a AND b AND c)' @d.lit(Sequel.expr{a | b | c}).must_equal '(a OR b OR c)' @d.lit(Sequel.expr{a.sql_string + b + c}).must_equal '(a || b || c)' @d.lit(Sequel.expr{a.sql_number >> b >> c}).must_equal '((a >> b) >> c)' @d.lit(Sequel.expr{a.sql_number << b << c}).must_equal '((a << b) << c)' @d.lit(Sequel.expr{a.sql_number % b % c}).must_equal '((a % b) % c)' @d.lit(Sequel.expr{a.sql_number & b & c}).must_equal '(a & b & c)' @d.lit(Sequel.expr{a.sql_number | b | c}).must_equal '(a | b | c)' end it "should allow mathematical or string operations on true, false, or nil" do @d.lit(Sequel.expr(:x) + 1).must_equal '(x + 1)' @d.lit(Sequel.expr(:x) - true).must_equal "(x - 't')" @d.lit(Sequel.expr(:x) / false).must_equal "(x / 'f')" @d.lit(Sequel.expr(:x) * nil).must_equal '(x * NULL)' @d.lit(Sequel.expr(:x) ** 1).must_equal 'power(x, 1)' @d.lit(Sequel.join([:x, nil])).must_equal '(x || NULL)' end it "should allow mathematical or string operations on boolean complex expressions" do @d.lit(Sequel.expr(:x) + (Sequel.expr(:y) + 1)).must_equal '(x + y + 1)' @d.lit(Sequel.expr(:x) - ~Sequel.expr(:y)).must_equal '(x - NOT y)' @d.lit(Sequel.expr(:x) / (Sequel.expr(:y) & :z)).must_equal '(x / (y AND z))' @d.lit(Sequel.expr(:x) * (Sequel.expr(:y) | :z)).must_equal '(x * (y OR z))' @d.lit(Sequel.expr(:x) + Sequel.expr(:y).like('a')).must_equal "(x + (y LIKE 'a' ESCAPE '\\'))" @d.lit(Sequel.expr(:x) + Sequel.expr(:y).ilike('a')).must_equal "(x + (UPPER(y) LIKE UPPER('a') ESCAPE '\\'))" @d.lit(Sequel.expr(:x) - ~Sequel.expr(:y).like('a')).must_equal "(x - (y NOT LIKE 'a' ESCAPE '\\'))" @d.lit(Sequel.join([:x, ~Sequel.expr(:y).like('a')])).must_equal "(x || (y NOT LIKE 'a' ESCAPE '\\'))" @d.lit(Sequel.expr(:x) - ~Sequel.expr(:y).ilike('a')).must_equal "(x - (UPPER(y) NOT LIKE UPPER('a') ESCAPE '\\'))" @d.lit(Sequel.expr(:x) ** (Sequel.expr(:y) + 1)).must_equal 'power(x, (y + 1))' end it "should allow mathematical or string operations on numerics when argument is a generic or numeric expressions" do @d.lit(1 + Sequel.expr(:x)).must_equal '(1 + x)' @d.lit(2**65 - Sequel.+(:x, 1)).must_equal "(#{2**65} - (x + 1))" @d.lit(1.0 / Sequel.function(:x)).must_equal '(1.0 / x())' @d.lit(BigDecimal('1.0') * Sequel[:a][:y]).must_equal '(1.0 * a.y)' @d.lit(2 ** Sequel.cast(:x, Integer)).must_equal 'power(2, CAST(x AS integer))' @d.lit(1 + Sequel.lit('x')).must_equal '(1 + x)' @d.lit(1 + Sequel.lit('?', :x)).must_equal '(1 + x)' end it "should not break Date/DateTime equality" do (Date.today == Sequel.expr(:x)).must_equal false (DateTime.now == Sequel.expr(:x)).must_equal false end it "should have coerce return super if called with non-numeric when coerce is already implemented" do Class.new(Sequel::SQL::Expression) do include(Module.new do def coerce(other) [:y, other] end end) include Sequel::SQL::NumericMethods end.new.coerce(:a).must_equal [:y, :a] end it "should have coerce return array if called with a non-numeric" do Sequel.expr(:x).coerce(:a).must_equal [Sequel.expr(:x), :a] end it "should support AND conditions via &" do @d.l(Sequel.expr(:x) & :y).must_equal '(x AND y)' @d.l(Sequel.expr(:x).sql_boolean & :y).must_equal '(x AND y)' @d.l(Sequel.expr(:x) & :y & :z).must_equal '(x AND y AND z)' @d.l(Sequel.expr(:x) & {:y => :z}).must_equal '(x AND (y = z))' @d.l((Sequel.expr(:x) + 200 < 0) & (Sequel.expr(:y) - 200 < 0)).must_equal '(((x + 200) < 0) AND ((y - 200) < 0))' @d.l(Sequel.expr(:x) & ~Sequel.expr(:y)).must_equal '(x AND NOT y)' @d.l(~Sequel.expr(:x) & :y).must_equal '(NOT x AND y)' @d.l(~Sequel.expr(:x) & ~Sequel.expr(:y)).must_equal '(NOT x AND NOT y)' end it "should support OR conditions via |" do @d.l(Sequel.expr(:x) | :y).must_equal '(x OR y)' @d.l(Sequel.expr(:x).sql_boolean | :y).must_equal '(x OR y)' @d.l(Sequel.expr(:x) | :y | :z).must_equal '(x OR y OR z)' @d.l(Sequel.expr(:x) | {:y => :z}).must_equal '(x OR (y = z))' @d.l((Sequel.expr(:x).sql_number > 200) | (Sequel.expr(:y).sql_number < 200)).must_equal '((x > 200) OR (y < 200))' end it "should support & | combinations" do @d.l((Sequel.expr(:x) | :y) & :z).must_equal '((x OR y) AND z)' @d.l(Sequel.expr(:x) | (Sequel.expr(:y) & :z)).must_equal '(x OR (y AND z))' @d.l((Sequel.expr(:x) & :w) | (Sequel.expr(:y) & :z)).must_equal '((x AND w) OR (y AND z))' end it "should support & | with ~" do @d.l(~((Sequel.expr(:x) | :y) & :z)).must_equal '((NOT x AND NOT y) OR NOT z)' @d.l(~(Sequel.expr(:x) | (Sequel.expr(:y) & :z))).must_equal '(NOT x AND (NOT y OR NOT z))' @d.l(~((Sequel.expr(:x) & :w) | (Sequel.expr(:y) & :z))).must_equal '((NOT x OR NOT w) AND (NOT y OR NOT z))' @d.l(~((Sequel.expr(:x).sql_number > 200) | (Sequel.expr(:y) & :z))).must_equal '((x <= 200) AND (NOT y OR NOT z))' end it "should support LiteralString" do @d.l(Sequel.lit('x')).must_equal '(x)' @d.l(~Sequel.lit('x')).must_equal 'NOT x' @d.l(~~Sequel.lit('x')).must_equal 'x' @d.l(~((Sequel.lit('x') | :y) & :z)).must_equal '((NOT x AND NOT y) OR NOT z)' @d.l(~(Sequel.expr(:x) | Sequel.lit('y'))).must_equal '(NOT x AND NOT y)' @d.l(~(Sequel.lit('x') & Sequel.lit('y'))).must_equal '(NOT x OR NOT y)' @d.l(Sequel.expr(Sequel.lit('y') => Sequel.lit('z')) & Sequel.lit('x')).must_equal '((y = z) AND x)' @d.l((Sequel.lit('x') > 200) & (Sequel.lit('y') < 200)).must_equal '((x > 200) AND (y < 200))' @d.l(~(Sequel.lit('x') + 1 > 100)).must_equal '((x + 1) <= 100)' @d.l(Sequel.lit('x').like('a')).must_equal '(x LIKE \'a\' ESCAPE \'\\\')' @d.l(Sequel.lit('x') + 1 > 100).must_equal '((x + 1) > 100)' @d.l((Sequel.lit('x') * :y) < 100.01).must_equal '((x * y) < 100.01)' @d.l((Sequel.lit('x') ** :y) < 100.01).must_equal '(power(x, y) < 100.01)' @d.l((Sequel.lit('x') - Sequel.expr(:y)/2) >= 100000000000000000000000000000000000).must_equal '((x - (y / 2)) >= 100000000000000000000000000000000000)' @d.l((Sequel.lit('z') * ((Sequel.lit('x') / :y)/(Sequel.expr(:x) + :y))) <= 100).must_equal '((z * ((x / y) / (x + y))) <= 100)' @d.l(~((((Sequel.lit('x') - :y)/(Sequel.expr(:x) + :y))*:z) <= 100)).must_equal '((((x - y) / (x + y)) * z) > 100)' end it "should have LiteralString#inspect show it is a literal string" do Sequel.lit('x').inspect.must_equal "#<Sequel::LiteralString \"x\">" end it "should support hashes by ANDing the conditions" do @d.l(:x => 100, :y => 'a')[1...-1].split(' AND ').sort.must_equal ['(x = 100)', '(y = \'a\')'] @d.l(:x => true, :y => false)[1...-1].split(' AND ').sort.must_equal ['(x IS TRUE)', '(y IS FALSE)'] @d.l(:x => nil, :y => [1,2,3])[1...-1].split(' AND ').sort.must_equal ['(x IS NULL)', '(y IN (1, 2, 3))'] end it "should support arrays with all two pairs the same as hashes" do @d.l([[:x, 100],[:y, 'a']]).must_equal '((x = 100) AND (y = \'a\'))' @d.l([[:x, true], [:y, false]]).must_equal '((x IS TRUE) AND (y IS FALSE))' @d.l([[:x, nil], [:y, [1,2,3]]]).must_equal '((x IS NULL) AND (y IN (1, 2, 3)))' end it "should emulate columns for array values" do @d.l([:x, :y]=>Sequel.value_list([[1,2], [3,4]])).must_equal '((x, y) IN ((1, 2), (3, 4)))' @d.l([:x, :y, :z]=>[[1,2,5], [3,4,6]]).must_equal '((x, y, z) IN ((1, 2, 5), (3, 4, 6)))' end it "should emulate multiple column in if not supported" do @d = @d.with_extend{def supports_multiple_column_in?; false end} @d.l([:x, :y]=>Sequel.value_list([[1,2], [3,4]])).must_equal '(((x = 1) AND (y = 2)) OR ((x = 3) AND (y = 4)))' @d.l([:x, :y, :z]=>[[1,2,5], [3,4,6]]).must_equal '(((x = 1) AND (y = 2) AND (z = 5)) OR ((x = 3) AND (y = 4) AND (z = 6)))' end it "should have SQL::ValueList#inspect show it is a value list" do Sequel.value_list([[1,2], [3,4]]).inspect.must_equal "#<Sequel::SQL::ValueList [[1, 2], [3, 4]]>" end it "should support StringExpression#+ for concatenation of SQL strings" do @d.lit(Sequel.expr(:x).sql_string + :y).must_equal '(x || y)' @d.lit(Sequel.join([:x]) + :y).must_equal '(x || y)' @d.lit(Sequel.join([:x, :z], ' ') + :y).must_equal "(x || ' ' || z || y)" end it "should be supported inside blocks" do @d.l{Sequel.or([[:x, nil], [:y, [1,2,3]]])}.must_equal '((x IS NULL) OR (y IN (1, 2, 3)))' @d.l{Sequel.~([[:x, nil], [:y, [1,2,3]]])}.must_equal '((x IS NOT NULL) OR (y NOT IN (1, 2, 3)))' @d.l{~((((Sequel.lit('x') - :y)/(Sequel.expr(:x) + :y))*:z) <= 100)}.must_equal '((((x - y) / (x + y)) * z) > 100)' @d.l{Sequel.&({:x => :a}, {:y => :z})}.must_equal '((x = a) AND (y = z))' end it "should support &, |, ^, ~, <<, and >> for NumericExpressions" do @d.l(Sequel.expr(:x).sql_number & 1 > 100).must_equal '((x & 1) > 100)' @d.l(Sequel.expr(:x).sql_number | 1 > 100).must_equal '((x | 1) > 100)' @d.l(Sequel.expr(:x).sql_number ^ 1 > 100).must_equal '((x ^ 1) > 100)' @d.l(~Sequel.expr(:x).sql_number > 100).must_equal '(~x > 100)' @d.l(Sequel.expr(:x).sql_number << 1 > 100).must_equal '((x << 1) > 100)' @d.l(Sequel.expr(:x).sql_number >> 1 > 100).must_equal '((x >> 1) > 100)' @d.l((Sequel.expr(:x) + 1) & 1 > 100).must_equal '(((x + 1) & 1) > 100)' @d.l((Sequel.expr(:x) + 1) | 1 > 100).must_equal '(((x + 1) | 1) > 100)' @d.l((Sequel.expr(:x) + 1) ^ 1 > 100).must_equal '(((x + 1) ^ 1) > 100)' @d.l(~(Sequel.expr(:x) + 1) > 100).must_equal '(~(x + 1) > 100)' @d.l((Sequel.expr(:x) + 1) << 1 > 100).must_equal '(((x + 1) << 1) > 100)' @d.l((Sequel.expr(:x) + 1) >> 1 > 100).must_equal '(((x + 1) >> 1) > 100)' @d.l((Sequel.expr(:x) + 1) & (Sequel.expr(:x) + 2) > 100).must_equal '(((x + 1) & (x + 2)) > 100)' end it "should allow using a Bitwise method on a ComplexExpression that isn't a NumericExpression" do @d.lit((Sequel.expr(:x) + 1) & (Sequel.expr(:x) + '2')).must_equal "((x + 1) & (x || '2'))" end it "should allow using a Boolean method on a ComplexExpression that isn't a BooleanExpression" do @d.l(Sequel.expr(:x) & (Sequel.expr(:x) + '2')).must_equal "(x AND (x || '2'))" end it "should raise an error if attempting to invert a ComplexExpression that isn't a BooleanExpression" do proc{Sequel::SQL::BooleanExpression.invert(Sequel.expr(:x) + 2)}.must_raise(Sequel::Error) end it "should support SQL::Constants" do @d.l({:x => Sequel::NULL}).must_equal '(x IS NULL)' @d.l({:x => Sequel::NOTNULL}).must_equal '(x IS NOT NULL)' @d.l({:x => Sequel::TRUE}).must_equal '(x IS TRUE)' @d.l({:x => Sequel::FALSE}).must_equal '(x IS FALSE)' @d.l({:x => Sequel::SQLTRUE}).must_equal '(x IS TRUE)' @d.l({:x => Sequel::SQLFALSE}).must_equal '(x IS FALSE)' @d.l({:x => Sequel::CURRENT_DATE}).must_equal '(x = CURRENT_DATE)' @d.l({:x => Sequel::CURRENT_TIME}).must_equal '(x = CURRENT_TIME)' @d.l({:x => Sequel::CURRENT_TIMESTAMP}).must_equal '(x = CURRENT_TIMESTAMP)' @d.l({:x => Sequel::DEFAULT}).must_equal '(x = DEFAULT)' end it "should support negation of SQL::Constants" do @d.l(Sequel.~(:x => Sequel::NULL)).must_equal '(x IS NOT NULL)' @d.l(Sequel.~(:x => Sequel::NOTNULL)).must_equal '(x IS NULL)' @d.l(Sequel.~(:x => Sequel::TRUE)).must_equal '(x IS NOT TRUE)' @d.l(Sequel.~(:x => Sequel::FALSE)).must_equal '(x IS NOT FALSE)' @d.l(Sequel.~(:x => Sequel::SQLTRUE)).must_equal '(x IS NOT TRUE)' @d.l(Sequel.~(:x => Sequel::SQLFALSE)).must_equal '(x IS NOT FALSE)' end it "should support direct negation of SQL::Constants" do @d.l({:x => ~Sequel::NULL}).must_equal '(x IS NOT NULL)' @d.l({:x => ~Sequel::NOTNULL}).must_equal '(x IS NULL)' @d.l({:x => ~Sequel::TRUE}).must_equal '(x IS FALSE)' @d.l({:x => ~Sequel::FALSE}).must_equal '(x IS TRUE)' @d.l({:x => ~Sequel::SQLTRUE}).must_equal '(x IS FALSE)' @d.l({:x => ~Sequel::SQLFALSE}).must_equal '(x IS TRUE)' end it "should raise an error if trying to invert an invalid SQL::Constant" do proc{~Sequel::CURRENT_DATE}.must_raise(Sequel::Error) end it "should raise an error if trying to create an invalid complex expression" do proc{Sequel::SQL::ComplexExpression.new(:BANG, 1, 2)}.must_raise(Sequel::Error) end it "should use a string concatentation for + if given a string" do @d.lit(Sequel.expr(:x) + '1').must_equal "(x || '1')" @d.lit(Sequel.expr(:x) + '1' + '1').must_equal "(x || '1' || '1')" end it "should use an addition for + if given a literal string" do @d.lit(Sequel.expr(:x) + Sequel.lit('1')).must_equal "(x + 1)" @d.lit(Sequel.expr(:x) + Sequel.lit('1') + Sequel.lit('1')).must_equal "(x + 1 + 1)" end it "should use a bitwise operator for & and | if given an integer" do @d.lit(Sequel.expr(:x) & 1).must_equal "(x & 1)" @d.lit(Sequel.expr(:x) | 1).must_equal "(x | 1)" @d.lit(Sequel.expr(:x) & 1 & 1).must_equal "(x & 1 & 1)" @d.lit(Sequel.expr(:x) | 1 | 1).must_equal "(x | 1 | 1)" end it "should allow adding a string to an integer expression" do @d.lit(Sequel.expr(:x) + 1 + 'a').must_equal "(x + 1 + 'a')" end it "should allow adding an integer to an string expression" do @d.lit(Sequel.expr(:x) + 'a' + 1).must_equal "(x || 'a' || 1)" end it "should allow adding a boolean to an integer expression" do @d.lit(Sequel.expr(:x) + 1 + true).must_equal "(x + 1 + 't')" end it "should allow adding a boolean to an string expression" do @d.lit(Sequel.expr(:x) + 'a' + true).must_equal "(x || 'a' || 't')" end it "should allow using a boolean operation with an integer on an boolean expression" do @d.lit(Sequel.expr(:x) & :a & 1).must_equal "(x AND a AND 1)" end it "should allow using a boolean operation with a string on an boolean expression" do @d.lit(Sequel.expr(:x) & :a & 'a').must_equal "(x AND a AND 'a')" end it "should allowing AND of boolean expression and literal string" do @d.lit(Sequel.expr(:x) & :a & Sequel.lit('a')).must_equal "(x AND a AND a)" end it "should allowing + of integer expression and literal string" do @d.lit(Sequel.expr(:x) + :a + Sequel.lit('a')).must_equal "(x + a + a)" end it "should allowing + of string expression and literal string" do @d.lit(Sequel.expr(:x) + 'a' + Sequel.lit('a')).must_equal "(x || 'a' || a)" end it "should allow sql_{string,boolean,number} methods on numeric expressions" do @d.lit((Sequel.expr(:x) + 1).sql_string + 'a').must_equal "((x + 1) || 'a')" @d.lit((Sequel.expr(:x) + 1).sql_boolean & 1).must_equal "((x + 1) AND 1)" @d.lit((Sequel.expr(:x) + 1).sql_number + 'a').must_equal "(x + 1 + 'a')" end it "should allow sql_{string,boolean,number} methods on string expressions" do @d.lit((Sequel.expr(:x) + 'a').sql_string + 'a').must_equal "(x || 'a' || 'a')" @d.lit((Sequel.expr(:x) + 'a').sql_boolean & 1).must_equal "((x || 'a') AND 1)" @d.lit((Sequel.expr(:x) + 'a').sql_number + 'a').must_equal "((x || 'a') + 'a')" end it "should allow sql_{string,boolean,number} methods on boolean expressions" do @d.lit((Sequel.expr(:x) & :y).sql_string + 'a').must_equal "((x AND y) || 'a')" @d.lit((Sequel.expr(:x) & :y).sql_boolean & 1).must_equal "(x AND y AND 1)" @d.lit((Sequel.expr(:x) & :y).sql_number + 'a').must_equal "((x AND y) + 'a')" end it "should raise an error if trying to literalize an invalid complex expression" do ce = Sequel::SQL::ComplexExpression.allocate ce.instance_eval do @op = :BANG @args = [:x, 1] end proc{@d.lit(ce)}.must_raise(Sequel::InvalidOperation) end it "should support equality comparison of two expressions" do e1 = ~Sequel.like(:comment, '%:hidden:%') e2 = ~Sequel.like(:comment, '%:hidden:%') e1.must_equal e2 end it "should support expression filter methods on Datasets" do d = @d.select(:a) @d.lit(d + 1).must_equal '((SELECT a FROM items) + 1)' @d.lit(d - 1).must_equal '((SELECT a FROM items) - 1)' @d.lit(d * 1).must_equal '((SELECT a FROM items) * 1)' @d.lit(d / 1).must_equal '((SELECT a FROM items) / 1)' @d.lit(d ** 1).must_equal 'power((SELECT a FROM items), 1)' @d.lit(d => 1).must_equal '((SELECT a FROM items) = 1)' @d.lit(Sequel.~(d => 1)).must_equal '((SELECT a FROM items) != 1)' @d.lit(d > 1).must_equal '((SELECT a FROM items) > 1)' @d.lit(d < 1).must_equal '((SELECT a FROM items) < 1)' @d.lit(d >= 1).must_equal '((SELECT a FROM items) >= 1)' @d.lit(d <= 1).must_equal '((SELECT a FROM items) <= 1)' @d.lit(d.as(:b)).must_equal '(SELECT a FROM items) AS b' @d.lit(d & :b).must_equal '((SELECT a FROM items) AND b)' @d.lit(d | :b).must_equal '((SELECT a FROM items) OR b)' @d.lit(~d).must_equal 'NOT (SELECT a FROM items)' @d.lit(d.cast(Integer)).must_equal 'CAST((SELECT a FROM items) AS integer)' @d.lit(d.cast_numeric).must_equal 'CAST((SELECT a FROM items) AS integer)' @d.lit(d.cast_string).must_equal 'CAST((SELECT a FROM items) AS varchar(255))' @d.lit(d.cast_numeric << :b).must_equal '(CAST((SELECT a FROM items) AS integer) << b)' @d.lit(d.cast_string + :b).must_equal '(CAST((SELECT a FROM items) AS varchar(255)) || b)' @d.lit(d.extract(:year)).must_equal 'extract(year FROM (SELECT a FROM items))' @d.lit(d.sql_boolean & :b).must_equal '((SELECT a FROM items) AND b)' @d.lit(d.sql_number << :b).must_equal '((SELECT a FROM items) << b)' @d.lit(d.sql_string + :b).must_equal '((SELECT a FROM items) || b)' @d.lit(d.asc).must_equal '(SELECT a FROM items) ASC' @d.lit(d.desc).must_equal '(SELECT a FROM items) DESC' @d.lit(d.like(:b)).must_equal '((SELECT a FROM items) LIKE b ESCAPE \'\\\')' @d.lit(d.ilike(:b)).must_equal '(UPPER((SELECT a FROM items)) LIKE UPPER(b) ESCAPE \'\\\')' end it "should handled emulated char_length function" do @d.lit(Sequel.char_length(:a)).must_equal 'char_length(a)' end it "should handled emulated trim function" do @d.lit(Sequel.trim(:a)).must_equal 'trim(a)' end it "should handled emulated function where only name is emulated" do ds = Sequel.mock[:a] ds.literal(Sequel.trim(:a)).must_equal 'trim(a)' ds.with_extend{def native_function_name(f) 'foo' end}.literal(Sequel.trim(:a)).must_equal 'foo(a)' end it "should handled emulated function needing full emulation" do dsc = Class.new(Sequel::Dataset) do def emulate_function?(n) n == :trim end def emulate_function_sql_append(sql, f) sql << "#{f.name}FOO(lower(#{f.args.first}))" end end dsc.new(@d.db).literal(Sequel.trim(:a)).must_equal 'trimFOO(lower(a))' end it "should raise error when providing the wrong arity when directly creating ComplexExpression instances" do proc{Sequel::SQL::ComplexExpression.new(:IS)}.must_raise Sequel::Error proc{Sequel::SQL::ComplexExpression.new(:IS, :x)}.must_raise Sequel::Error proc{Sequel::SQL::ComplexExpression.new(:IS, :x, :y, :z)}.must_raise Sequel::Error proc{Sequel::SQL::ComplexExpression.new(:NOT)}.must_raise Sequel::Error proc{Sequel::SQL::ComplexExpression.new(:NOT, :x, :y)}.must_raise Sequel::Error end it "should handle endless ranges" do endless = eval('(1..)') @d.l{x =~ endless}.must_equal '(x >= 1)' @d.l(:x => endless).must_equal '(x >= 1)' endless = eval('(1...)') @d.l{x =~ endless}.must_equal '(x >= 1)' @d.l(:x => endless).must_equal '(x >= 1)' end if RUBY_VERSION >= '2.6' it "should handle startless ranges" do endless = eval('(..1)') @d.l{x =~ endless}.must_equal '(x <= 1)' @d.l(:x => endless).must_equal '(x <= 1)' endless = eval('(...1)') @d.l{x =~ endless}.must_equal '(x < 1)' @d.l(:x => endless).must_equal '(x < 1)' end if RUBY_VERSION >= '2.7' it "should handle startless, endless ranges" do endless = eval('nil..nil') @d.l{x =~ endless}.must_equal '(1 = 1)' @d.l(:x => endless).must_equal '(1 = 1)' endless = eval('nil...nil') @d.l{x =~ endless}.must_equal '(1 = 1)' @d.l(:x => endless).must_equal '(1 = 1)' end if RUBY_VERSION >= '2.7' end describe Sequel::SQL::VirtualRow do before do @d = Sequel.mock[:items].with_quote_identifiers(true).with_extend do def supports_window_functions?; true end def l(*args, &block) literal(filter_expr(*args, &block)) end end end it "should treat methods without arguments as identifiers" do @d.l{column}.must_equal '"column"' end with_symbol_splitting "should treat methods without arguments that have embedded double underscores as qualified identifiers" do @d.l{table__column}.must_equal '"table"."column"' end it "should treat methods with arguments as functions with the arguments" do @d.l{function(arg1, 10, 'arg3')}.must_equal 'function("arg1", 10, \'arg3\')' end it "should treat methods followed by function as a function call with no arguments" do @d.l{version.function}.must_equal 'version()' end it "should treat methods followed by function.* as a function call with * argument" do @d.l{count.function.*}.must_equal 'count(*)' end it "should support * method on functions to raise error if function already has an argument" do proc{@d.l{count(1).*}}.must_raise(Sequel::Error) end it "should support * method on functions to use * as the argument" do @d.l{count.function.*}.must_equal 'count(*)' @d.literal(Sequel.expr{sum(1) * 2}).must_equal '(sum(1) * 2)' end it "should support distinct methods on functions to use DISTINCT before the arguments" do @d.l{count(column1).distinct}.must_equal 'count(DISTINCT "column1")' @d.l{count(column1, column2).distinct}.must_equal 'count(DISTINCT "column1", "column2")' end it "should handle method.function.over as a window function call" do @d.l{rank.function.over}.must_equal 'rank() OVER ()' end it "should handle method.function.over(:partition) as a window function call" do @d.l{rank.function.over(:partition=>column1)}.must_equal 'rank() OVER (PARTITION BY "column1")' @d.l{rank.function.over(:partition=>[column1, column2])}.must_equal 'rank() OVER (PARTITION BY "column1", "column2")' end it "should handle method(arg).over options as a window function call" do @d.l{avg(column1).over}.must_equal 'avg("column1") OVER ()' @d.l{avg(column1, column2).over}.must_equal 'avg("column1", "column2") OVER ()' end it "should handle method.function.over(:order) as a window function call" do @d.l{rank.function.over(:order=>column1)}.must_equal 'rank() OVER (ORDER BY "column1")' @d.l{rank.function.over(:order=>[column1, column2])}.must_equal 'rank() OVER (ORDER BY "column1", "column2")' end it "should handle method.function.over(:window) as a window function call" do @d.l{rank.function.over(:window=>:win)}.must_equal 'rank() OVER ("win")' end it "should handle method.function.*.over as a window function call" do @d.l{count.function.*.over}.must_equal 'count(*) OVER ()' end it "should handle method.function.over(:frame=>:all) as a window function call with frame for all rows" do @d.l{rank.function.over(:frame=>:all)}.must_equal 'rank() OVER (ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)' end it "should handle method.function.over(:frame=>:rows) as a window function call with frame for all rows before current row" do @d.l{rank.function.over(:frame=>:rows)}.must_equal 'rank() OVER (ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)' end it "should handle method.function.over(:frame=>:groups) as a window function call with frame for all groups before current row" do @d.l{rank.function.over(:frame=>:groups)}.must_equal 'rank() OVER (GROUPS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)' end it "should handle method.function.over(:frame=>:range) as a window function call with frame for all groups before current row" do @d.l{rank.function.over(:frame=>:range)}.must_equal 'rank() OVER (RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)' end it "should handle window function with :frame hash argument with :type option" do @d.l{rank.function.over(:frame=>{:type=>:rows, :start=>:preceding})}.must_equal 'rank() OVER (ROWS UNBOUNDED PRECEDING)' @d.l{rank.function.over(:frame=>{:type=>:range, :start=>:preceding})}.must_equal 'rank() OVER (RANGE UNBOUNDED PRECEDING)' @d.l{rank.function.over(:frame=>{:type=>:groups, :start=>:preceding})}.must_equal 'rank() OVER (GROUPS UNBOUNDED PRECEDING)' end it "should handle window function with :frame hash argument with :start option" do @d.l{rank.function.over(:frame=>{:type=>:rows, :start=>:preceding})}.must_equal 'rank() OVER (ROWS UNBOUNDED PRECEDING)' @d.l{rank.function.over(:frame=>{:type=>:rows, :start=>:following})}.must_equal 'rank() OVER (ROWS UNBOUNDED FOLLOWING)' @d.l{rank.function.over(:frame=>{:type=>:rows, :start=>:current})}.must_equal 'rank() OVER (ROWS CURRENT ROW)' @d.l{rank.function.over(:frame=>{:type=>:rows, :start=>1})}.must_equal 'rank() OVER (ROWS 1 PRECEDING)' @d.l{rank.function.over(:frame=>{:type=>:rows, :start=>[1, :following]})}.must_equal 'rank() OVER (ROWS 1 FOLLOWING)' end it "should handle window function with :frame hash argument with :end option" do @d.l{rank.function.over(:frame=>{:type=>:rows, :start=>:preceding, :end=>:preceding})}.must_equal 'rank() OVER (ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED PRECEDING)' @d.l{rank.function.over(:frame=>{:type=>:rows, :start=>:preceding, :end=>:following})}.must_equal 'rank() OVER (ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)' @d.l{rank.function.over(:frame=>{:type=>:rows, :start=>:preceding, :end=>:current})}.must_equal 'rank() OVER (ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)' @d.l{rank.function.over(:frame=>{:type=>:rows, :start=>:preceding, :end=>1})}.must_equal 'rank() OVER (ROWS BETWEEN UNBOUNDED PRECEDING AND 1 FOLLOWING)' @d.l{rank.function.over(:frame=>{:type=>:rows, :start=>:preceding, :end=>[1, :preceding]})}.must_equal 'rank() OVER (ROWS BETWEEN UNBOUNDED PRECEDING AND 1 PRECEDING)' end it "should handle window function with :frame hash argument with :exclude option" do @d.l{rank.function.over(:frame=>{:type=>:rows, :start=>:preceding,:exclude=>:current})}.must_equal 'rank() OVER (ROWS UNBOUNDED PRECEDING EXCLUDE CURRENT ROW)' @d.l{rank.function.over(:frame=>{:type=>:rows, :start=>:preceding, :exclude=>:group})}.must_equal 'rank() OVER (ROWS UNBOUNDED PRECEDING EXCLUDE GROUP)' @d.l{rank.function.over(:frame=>{:type=>:rows, :start=>:preceding, :exclude=>:ties})}.must_equal 'rank() OVER (ROWS UNBOUNDED PRECEDING EXCLUDE TIES)' @d.l{rank.function.over(:frame=>{:type=>:rows, :start=>:preceding, :exclude=>:no_others})}.must_equal 'rank() OVER (ROWS UNBOUNDED PRECEDING EXCLUDE NO OTHERS)' end it "should handle window function with :frame hash argument with invalid options" do proc{@d.l{rank.function.over(:frame=>{:type=>:blah, :start=>:preceding})}}.must_raise Sequel::Error proc{@d.l{rank.function.over(:frame=>{:type=>:rows})}}.must_raise Sequel::Error proc{@d.l{rank.function.over(:frame=>{:type=>:rows, :start=>:blah})}}.must_raise Sequel::Error proc{@d.l{rank.function.over(:frame=>{:type=>:rows, :start=>[1, :blah]})}}.must_raise Sequel::Error proc{@d.l{rank.function.over(:frame=>{:type=>:rows, :start=>[1, :preceding, 3]})}}.must_raise Sequel::Error proc{@d.l{rank.function.over(:frame=>{:type=>:rows, :start=>:preceding, :end=>:blah})}}.must_raise Sequel::Error proc{@d.l{rank.function.over(:frame=>{:type=>:rows, :start=>:preceding, :end=>[1, :blah]})}}.must_raise Sequel::Error proc{@d.l{rank.function.over(:frame=>{:type=>:rows, :start=>:preceding, :end=>[1, :following, 3]})}}.must_raise Sequel::Error proc{@d.l{rank.function.over(:frame=>{:type=>:rows, :start=>:preceding, :exclude=>:blah})}}.must_raise Sequel::Error end it "should handle method.function.over(:frame=>'some string') as a window function call with explicit frame" do @d.l{rank.function.over(:frame=>'RANGE BETWEEN 3 PRECEDING AND CURRENT ROW')}.must_equal 'rank() OVER (RANGE BETWEEN 3 PRECEDING AND CURRENT ROW)' end it "should support window functions options" do @d.supports_window_function_frame_option?(:rows).must_equal true @d.supports_window_function_frame_option?(:range).must_equal true @d.supports_window_function_frame_option?(:groups).must_equal false @d.supports_window_function_frame_option?(:offset).must_equal true @d.supports_window_function_frame_option?(:exclude).must_equal false end it "should raise an error if an invalid :frame option is used" do proc{@d.l{rank.function.over(:frame=>:blah)}}.must_raise(Sequel::Error) end it "should support all over options together" do @d.l{count.function.*.over(:partition=>a, :order=>b, :window=>:win, :frame=>:rows)}.must_equal 'count(*) OVER ("win" PARTITION BY "a" ORDER BY "b" ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)' end it "should support order method on functions to specify orders for aggregate functions" do @d.l{rank(:c).order(:a, :b)}.must_equal 'rank("c" ORDER BY "a", "b")' end it "should support over method on functions to create window functions" do @d.l{sum(c).over(:partition=>a, :order=>b, :window=>:win, :frame=>:rows)}.must_equal 'sum("c") OVER ("win" PARTITION BY "a" ORDER BY "b" ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)' end it "should support over method with a Window argument" do @d.l{sum(c).over(Sequel::SQL::Window.new(:partition=>a, :order=>b, :window=>:win, :frame=>:rows))}.must_equal 'sum("c") OVER ("win" PARTITION BY "a" ORDER BY "b" ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)' end it "should raise error if over is called on a function that already has a window " do proc{@d.l{rank.function.over.over}}.must_raise(Sequel::Error) end it "should raise an error if window functions are not supported" do proc{@d.with_extend{def supports_window_functions?; false end}.l{count.function.*.over(:partition=>a, :order=>b, :window=>:win, :frame=>:rows)}}.must_raise(Sequel::Error) proc{Sequel.mock.dataset.filter{count.function.*.over(:partition=>a, :order=>b, :window=>:win, :frame=>:rows)}.sql}.must_raise(Sequel::Error) end it "should handle lateral function calls" do @d.l{rank.function.lateral}.must_equal 'LATERAL rank()' end it "should handle ordered-set and hypothetical-set function calls" do @d.l{mode.function.within_group(:a)}.must_equal 'mode() WITHIN GROUP (ORDER BY "a")' @d.l{mode.function.within_group(:a, :b)}.must_equal 'mode() WITHIN GROUP (ORDER BY "a", "b")' end it "should handle emualted filtered aggregate function calls" do @d.l{count.function.*.filter(Sequel.&(:a, :b))}.must_equal 'count((CASE WHEN ("a" AND "b") THEN 1 ELSE NULL END))' @d.l{count.function.*.filter(:a=>1)}.must_equal 'count((CASE WHEN ("a" = 1) THEN 1 ELSE NULL END))' @d.l{count(:a).filter{b > 1}}.must_equal 'count((CASE WHEN ("b" > 1) THEN "a" ELSE NULL END))' @d.l{count(:a).filter(:a=>1){b > 1}}.must_equal 'count((CASE WHEN (("a" = 1) AND ("b" > 1)) THEN "a" ELSE NULL END))' end it "should handle filtered aggregate function calls" do @d = @d.with_extend{def supports_filtered_aggregates?; true end} @d.l{count.function.*.filter(Sequel.&(:a, :b))}.must_equal 'count(*) FILTER (WHERE ("a" AND "b"))' @d.l{count.function.*.filter(:a=>1)}.must_equal 'count(*) FILTER (WHERE ("a" = 1))' @d.l{count.function.*.filter{b > 1}}.must_equal 'count(*) FILTER (WHERE ("b" > 1))' @d.l{count.function.*.filter(:a=>1){b > 1}}.must_equal 'count(*) FILTER (WHERE (("a" = 1) AND ("b" > 1)))' end it "should handle filtered ordered-set and hypothetical-set function calls" do @d = @d.with_extend{def supports_filtered_aggregates?; true end} @d.l{mode.function.within_group(:a).filter(:a=>1)}.must_equal 'mode() WITHIN GROUP (ORDER BY "a") FILTER (WHERE ("a" = 1))' end it "should handle function calls with ordinality" do @d.l{foo.function.with_ordinality}.must_equal 'foo() WITH ORDINALITY' end it "should support function method on identifiers to create functions" do @d.l{rank.function}.must_equal 'rank()' @d.l{sum.function(c)}.must_equal 'sum("c")' @d.l{sum.function(c, 1)}.must_equal 'sum("c", 1)' end with_symbol_splitting "should support function method on foo__bar methods to create functions" do @d.l{sch__rank.function}.must_equal 'sch.rank()' @d.l{sch__sum.function(c)}.must_equal 'sch.sum("c")' @d.l{sch__sum.function(c, 1)}.must_equal 'sch.sum("c", 1)' @d.l{Sequel.qualify(sch[:sum], x[:y]).function(c, 1)}.must_equal 'sch.sum.x.y("c", 1)' end it "should support function method on qualified identifiers to create functions" do @d.l{sch[rank].function}.must_equal 'sch.rank()' @d.l{sch[sum].function(c)}.must_equal 'sch.sum("c")' @d.l{sch[sum].function(c, 1)}.must_equal 'sch.sum("c", 1)' @d.l{Sequel.qualify(sch[:sum], x[:y]).function(c, 1)}.must_equal 'sch.sum.x.y("c", 1)' end with_symbol_splitting "should support function method on qualified identifiers to create functions" do @d.l{Sequel.qualify(sch__sum, :x__y).function(c, 1)}.must_equal 'sch.sum.x.y("c", 1)' end it "should not quote function names created from identifiers by default" do @d = @d.with_extend{def supports_quoted_function_names?; true end} @d.l{rank.function}.must_equal 'rank()' end with_symbol_splitting "should handle quoted function names when using double underscores" do @d = @d.with_extend{def supports_quoted_function_names?; true end} @d.l{sch__rank.function}.must_equal '"sch"."rank"()' end it "should quote function names if a quoted function is used and database supports quoted function names" do @d = @d.with_extend{def supports_quoted_function_names?; true end} @d.l{rank(1).quoted}.must_equal '"rank"(1)' @d.l{rank.function.quoted}.must_equal '"rank"()' @d.l{sch__rank(1).quoted}.must_equal '"sch__rank"(1)' end it "should not quote function names created from qualified identifiers if an unquoted function is used" do @d = @d.with_extend{def supports_quoted_function_names?; true end} @d.l{sch[rank].function.unquoted}.must_equal 'sch.rank()' end it "should deal with classes without requiring :: prefix" do @d.l{date < Date.today}.must_equal "(\"date\" < '#{Date.today}')" @d.l{date < Sequel::CURRENT_DATE}.must_equal "(\"date\" < CURRENT_DATE)" @d.l{num < Math::PI.to_i}.must_equal "(\"num\" < 3)" end it "should have operator methods defined that produce Sequel expression objects" do @d.l{|o| o.&({:a=>1}, :b)}.must_equal '(("a" = 1) AND "b")' @d.l{|o| o.|({:a=>1}, :b)}.must_equal '(("a" = 1) OR "b")' @d.l{|o| o.+(1, :b) > 2}.must_equal '((1 + "b") > 2)' @d.l{|o| o.-(1, :b) < 2}.must_equal '((1 - "b") < 2)' @d.l{|o| o.*(1, :b) >= 2}.must_equal '((1 * "b") >= 2)' @d.l{|o| o.**(1, :b) >= 2}.must_equal '(power(1, "b") >= 2)' @d.l{|o| o./(1, :b) <= 2}.must_equal '((1 / "b") <= 2)' @d.l{|o| o.~(:a=>1)}.must_equal '("a" != 1)' @d.l{|o| o.~([[:a, 1], [:b, 2]])}.must_equal '(("a" != 1) OR ("b" != 2))' @d.l{|o| o.<(1, :b)}.must_equal '(1 < "b")' @d.l{|o| o.>(1, :b)}.must_equal '(1 > "b")' @d.l{|o| o.<=(1, :b)}.must_equal '(1 <= "b")' @d.l{|o| o.>=(1, :b)}.must_equal '(1 >= "b")' end end describe "Sequel core extension replacements" do before do @db = Sequel.mock @ds = @db.dataset.with_extend{def supports_regexp?; true end} @o = Object.new def @o.sql_literal(ds) 'foo' end end def l(arg, should) @ds.literal(arg).must_equal should end it "Sequel.expr should return items wrapped in Sequel objects" do Sequel.expr(1).must_be_kind_of(Sequel::SQL::NumericExpression) Sequel.expr('a').must_be_kind_of(Sequel::SQL::StringExpression) Sequel.expr(true).must_be_kind_of(Sequel::SQL::BooleanExpression) Sequel.expr(nil).must_be_kind_of(Sequel::SQL::Wrapper) Sequel.expr({1=>2}).must_be_kind_of(Sequel::SQL::BooleanExpression) Sequel.expr([[1, 2]]).must_be_kind_of(Sequel::SQL::BooleanExpression) Sequel.expr([1]).must_be_kind_of(Sequel::SQL::Wrapper) Sequel.expr{|o| o.a}.must_be_kind_of(Sequel::SQL::Identifier) Sequel.expr{a}.must_be_kind_of(Sequel::SQL::Identifier) Sequel.expr(:a).must_be_kind_of(Sequel::SQL::Identifier) end with_symbol_splitting "Sequel.expr should return items wrapped in Sequel objects for splittable symbols" do Sequel.expr(:a__b).must_be_kind_of(Sequel::SQL::QualifiedIdentifier) Sequel.expr(:a___c).must_be_kind_of(Sequel::SQL::AliasedExpression) Sequel.expr(:a___c).expression.must_be_kind_of(Sequel::SQL::Identifier) Sequel.expr(:a__b___c).must_be_kind_of(Sequel::SQL::AliasedExpression) Sequel.expr(:a__b___c).expression.must_be_kind_of(Sequel::SQL::QualifiedIdentifier) end it "Sequel.expr should return an appropriate wrapped object" do l(Sequel.expr(1) + 1, "(1 + 1)") l(Sequel.expr('a') + 'b', "('a' || 'b')") l(Sequel.expr(:b) & nil, "(b AND NULL)") l(Sequel.expr(nil) & true, "(NULL AND 't')") l(Sequel.expr(false) & true, "('f' AND 't')") l(Sequel.expr(true) | false, "('t' OR 'f')") l(Sequel.expr(@o) + 1, "(foo + 1)") end it "Sequel.expr should handle condition specifiers" do l(Sequel.expr(:a=>1) & nil, "((a = 1) AND NULL)") l(Sequel.expr([[:a, 1]]) & nil, "((a = 1) AND NULL)") l(Sequel.expr([[:a, 1], [:b, 2]]) & nil, "((a = 1) AND (b = 2) AND NULL)") end it "Sequel.expr should handle arrays that are not condition specifiers" do l(Sequel.expr([1]), "(1)") l(Sequel.expr([1, 2]), "(1, 2)") end it "Sequel.expr should treat blocks/procs as virtual rows and wrap the output" do l(Sequel.expr{1} + 1, "(1 + 1)") l(Sequel.expr{o[a]} + 1, "(o.a + 1)") l(Sequel.expr{[[:a, 1]]} & nil, "((a = 1) AND NULL)") l(Sequel.expr{|v| @o} + 1, "(foo + 1)") l(Sequel.expr(proc{1}) + 1, "(1 + 1)") l(Sequel.expr(proc{o[a]}) + 1, "(o.a + 1)") l(Sequel.expr(proc{[[:a, 1]]}) & nil, "((a = 1) AND NULL)") l(Sequel.expr(proc{|v| @o}) + 1, "(foo + 1)") end it "Sequel.expr should handle lambda proc virtual rows" do l(Sequel.expr(&lambda{1}), "1") l(Sequel.expr(&lambda{|| 1}), "1") end it "Sequel.expr should raise an error if given an argument and a block" do proc{Sequel.expr(nil){}}.must_raise(Sequel::Error) end it "Sequel.expr should raise an error if given neither an argument nor a block" do proc{Sequel.expr}.must_raise(Sequel::Error) end it "Sequel.expr should return existing Sequel expressions directly" do o = Sequel.expr(1) Sequel.expr(o).must_be_same_as(o) o = Sequel.lit('1') Sequel.expr(o).must_be_same_as(o) end it "Sequel.~ should invert the given object" do l(Sequel.~(nil), 'NOT NULL') l(Sequel.~(:a=>1), "(a != 1)") l(Sequel.~([[:a, 1]]), "(a != 1)") l(Sequel.~([[:a, 1], [:b, 2]]), "((a != 1) OR (b != 2))") l(Sequel.~(Sequel.expr([[:a, 1], [:b, 2]]) & nil), "((a != 1) OR (b != 2) OR NOT NULL)") end it "Sequel.case should use a CASE expression" do l(Sequel.case({:a=>1}, 2), "(CASE WHEN a THEN 1 ELSE 2 END)") l(Sequel.case({:a=>1}, 2, :b), "(CASE b WHEN a THEN 1 ELSE 2 END)") l(Sequel.case([[:a, 1]], 2), "(CASE WHEN a THEN 1 ELSE 2 END)") l(Sequel.case([[:a, 1]], 2, :b), "(CASE b WHEN a THEN 1 ELSE 2 END)") l(Sequel.case([[:a, 1], [:c, 3]], 2), "(CASE WHEN a THEN 1 WHEN c THEN 3 ELSE 2 END)") l(Sequel.case([[:a, 1], [:c, 3]], 2, :b), "(CASE b WHEN a THEN 1 WHEN c THEN 3 ELSE 2 END)") end it "Sequel.case should raise an error if not given a condition specifier" do proc{Sequel.case(1, 2)}.must_raise(Sequel::Error) end it "Sequel.value_list should use an SQL value list" do l(Sequel.value_list([[1, 2]]), "((1, 2))") end it "Sequel.value_list raise an error if not given an array" do proc{Sequel.value_list(1)}.must_raise(Sequel::Error) end it "Sequel.negate should negate all entries in conditions specifier and join with AND" do l(Sequel.negate(:a=>1), "(a != 1)") l(Sequel.negate([[:a, 1]]), "(a != 1)") l(Sequel.negate([[:a, 1], [:b, 2]]), "((a != 1) AND (b != 2))") end it "Sequel.negate should raise an error if not given a conditions specifier" do proc{Sequel.negate(1)}.must_raise(Sequel::Error) end it "Sequel.or should join all entries in conditions specifier with OR" do l(Sequel.or(:a=>1), "(a = 1)") l(Sequel.or([[:a, 1]]), "(a = 1)") l(Sequel.or([[:a, 1], [:b, 2]]), "((a = 1) OR (b = 2))") end it "Sequel.or should raise an error if not given a conditions specifier" do proc{Sequel.or(1)}.must_raise(Sequel::Error) end it "Sequel.join should should use SQL string concatenation to join array" do l(Sequel.join([]), "''") l(Sequel.join(['a']), "('a')") l(Sequel.join(['a', 'b']), "('a' || 'b')") l(Sequel.join(['a', 'b'], 'c'), "('a' || 'c' || 'b')") l(Sequel.join([true, :b], :c), "('t' || c || b)") l(Sequel.join([false, nil], Sequel.lit('c')), "('f' || c || NULL)") l(Sequel.join([Sequel.expr('a'), Sequel.lit('d')], 'c'), "('a' || 'c' || d)") end it "Sequel.join should raise an error if not given an array" do proc{Sequel.join(1)}.must_raise(Sequel::Error) end it "Sequel.& should join all arguments given with AND" do l(Sequel.&(:a), "a") l(Sequel.&(:a, :b=>:c), "(a AND (b = c))") l(Sequel.&(:a, {:b=>:c}, Sequel.lit('d')), "(a AND (b = c) AND d)") end it "Sequel.& should raise an error if given no arguments" do proc{Sequel.&}.must_raise(Sequel::Error) end it "Sequel.| should join all arguments given with OR" do l(Sequel.|(:a), "a") l(Sequel.|(:a, :b=>:c), "(a OR (b = c))") l(Sequel.|(:a, {:b=>:c}, Sequel.lit('d')), "(a OR (b = c) OR d)") end it "Sequel.| should raise an error if given no arguments" do proc{Sequel.|}.must_raise(Sequel::Error) end it "Sequel.as should return an aliased expression" do l(Sequel.as(:a, :b), "a AS b") end it "Sequel.cast should return a CAST expression" do l(Sequel.cast(:a, :int), "CAST(a AS int)") l(Sequel.cast(:a, Integer), "CAST(a AS integer)") end it "Sequel.cast_numeric should return a CAST expression treated as a number" do l(Sequel.cast_numeric(:a), "CAST(a AS integer)") l(Sequel.cast_numeric(:a, :int), "CAST(a AS int)") l(Sequel.cast_numeric(:a) << 2, "(CAST(a AS integer) << 2)") end it "Sequel.cast_string should return a CAST expression treated as a string" do l(Sequel.cast_string(:a), "CAST(a AS varchar(255))") l(Sequel.cast_string(:a, :text), "CAST(a AS text)") l(Sequel.cast_string(:a) + 'a', "(CAST(a AS varchar(255)) || 'a')") end it "Sequel.lit should return a literal string" do l(Sequel.lit('a'), "a") end it "Sequel.lit should return the argument if given a single literal string" do o = Sequel.lit('a') Sequel.lit(o).must_be_same_as(o) end it "Sequel.lit should accept multiple arguments for a placeholder literal string" do l(Sequel.lit('a = ?', 1), "a = 1") l(Sequel.lit('? = ?', :a, 1), "a = 1") l(Sequel.lit('a = :a', :a=>1), "a = 1") end it "Sequel.lit should work with an array for the placeholder string" do l(Sequel.lit(['a = '], 1), "a = 1") l(Sequel.lit(['', ' = '], :a, 1), "a = 1") end it "Sequel.blob should return an SQL::Blob" do l(Sequel.blob('a'), "'a'") Sequel.blob('a').must_be_kind_of(Sequel::SQL::Blob) end it "Sequel.blob should return the given argument if given a blob" do o = Sequel.blob('a') Sequel.blob(o).must_be_same_as(o) end it "Sequel.blob#inspect output should indicate it is a blob and the size" do o = Sequel.blob('a') o.inspect.must_equal "#<Sequel::SQL::Blob:0x#{'%x' % o.object_id} bytes=1 content=\"a\">" o = Sequel.blob(('a'..'z').to_a.join) o.inspect.must_equal "#<Sequel::SQL::Blob:0x#{'%x' % o.object_id} bytes=26 start=\"abcdefghij\" end=\"qrstuvwxyz\">" o = Sequel.blob(255.chr) o.inspect.must_equal "#<Sequel::SQL::Blob:0x#{'%x' % o.object_id} bytes=1 content=\"\\xFF\">" o = Sequel.blob((230..255).map(&:chr).join) o.inspect.must_equal "#<Sequel::SQL::Blob:0x#{'%x' % o.object_id} bytes=26 start=\"\\xE6\\xE7\\xE8\\xE9\\xEA\\xEB\\xEC\\xED\\xEE\\xEF\" end=\"\\xF6\\xF7\\xF8\\xF9\\xFA\\xFB\\xFC\\xFD\\xFE\\xFF\">" end it "Sequel.deep_qualify should do a deep qualification into nested structors" do l(Sequel.deep_qualify(:t, Sequel.+(:c, 1)), "(t.c + 1)") end it "Sequel.qualify should return a qualified identifier" do l(Sequel.qualify(:t, :c), "t.c") end it "Sequel::SQL::Identifier#[] should return a qualified identifier" do l(Sequel[:t][:c], "t.c") end it "Sequel::SQL::QualifiedIdentifier#[] should return a nested qualified identifier" do l(Sequel[:s][:t][:c], "s.t.c") end it "Sequel.identifier should return an identifier" do l(Sequel.identifier(:t__c), "t__c") end it "Sequel.asc should return an ASC ordered expression" do l(Sequel.asc(:a), "a ASC") l(Sequel.asc(:a, :nulls=>:first), "a ASC NULLS FIRST") end it "Sequel.desc should return a DESC ordered expression " do l(Sequel.desc(:a), "a DESC") l(Sequel.desc(:a, :nulls=>:last), "a DESC NULLS LAST") end it "Sequel.{+,-,*,/} should accept arguments and use the appropriate operator" do %w'+ - * /'.each do |op| l(Sequel.send(op, 1), '1') l(Sequel.send(op, 1, 2), "(1 #{op} 2)") l(Sequel.send(op, 1, 2, 3), "(1 #{op} 2 #{op} 3)") end end it "Sequel.{+,-,*,/} should raise if given no arguments" do %w'+ - * /'.each do |op| proc{Sequel.send(op)}.must_raise(Sequel::Error) end end it "Sequel.** should use power function if given 2 arguments" do l(Sequel.**(1, 2), 'power(1, 2)') end it "Sequel.** should raise if not given 2 arguments" do proc{Sequel.**}.must_raise(ArgumentError) proc{Sequel.**(1)}.must_raise(ArgumentError) proc{Sequel.**(1, 2, 3)}.must_raise(ArgumentError) end it "Sequel.like should use a LIKE expression" do l(Sequel.like('a', 'b'), "('a' LIKE 'b' ESCAPE '\\')") l(Sequel.like(:a, :b), "(a LIKE b ESCAPE '\\')") l(Sequel.like(:a, /b/), "(a ~ 'b')") l(Sequel.like(:a, 'c', /b/), "((a LIKE 'c' ESCAPE '\\') OR (a ~ 'b'))") end it "Sequel.ilike should use an ILIKE expression" do l(Sequel.ilike('a', 'b'), "(UPPER('a') LIKE UPPER('b') ESCAPE '\\')") l(Sequel.ilike(:a, :b), "(UPPER(a) LIKE UPPER(b) ESCAPE '\\')") l(Sequel.ilike(:a, /b/), "(a ~* 'b')") l(Sequel.ilike(:a, 'c', /b/), "((UPPER(a) LIKE UPPER('c') ESCAPE '\\') OR (a ~* 'b'))") end it "Sequel.subscript should use an SQL subscript" do l(Sequel.subscript(:a, 1), 'a[1]') l(Sequel.subscript(:a, 1, 2), 'a[1, 2]') l(Sequel.subscript(:a, [1, 2]), 'a[1, 2]') l(Sequel.subscript(:a, 1..2), 'a[1:2]') l(Sequel.subscript(:a, 1...3), 'a[1:2]') end it "Sequel.subscript.f should be subscript expression for backwards compatibility" do Sequel.subscript(:a, 1).f.must_equal :a end it "Sequel.function should return an SQL function" do l(Sequel.function(:a), 'a()') l(Sequel.function(:a, 1), 'a(1)') l(Sequel.function(:a, :b, 2), 'a(b, 2)') end it "Sequel.extract should use a date/time extraction" do l(Sequel.extract(:year, :a), 'extract(year FROM a)') end it "#* with no arguments should use a ColumnAll for Identifier and QualifiedIdentifier" do l(Sequel.expr(:a).*, 'a.*') l(Sequel[:a][:b].*, 'a.b.*') end it "SQL::Blob should be aliasable and castable by default" do b = Sequel.blob('a') l(b.as(:a), "'a' AS a") l(b.cast(Integer), "CAST('a' AS integer)") end it "SQL::Blob should be convertable to a literal string by default" do b = Sequel.blob('a ?') l(b.lit, "a ?") l(b.lit(1), "a 1") end end describe "Sequel::SQL::Function#==" do it "should be true for functions with the same name and arguments, false otherwise" do a = Sequel.function(:date, :t) b = Sequel.function(:date, :t) a.must_equal b (a == b).must_equal true c = Sequel.function(:date, :c) a.wont_equal c (a == c).must_equal false d = Sequel.function(:time, :c) a.wont_equal d c.wont_equal d (a == d).must_equal false (c == d).must_equal false end end describe "Sequel::SQL::OrderedExpression" do it "should #desc" do @oe = Sequel.asc(:column) @oe.descending.must_equal false @oe.desc.descending.must_equal true end it "should #asc" do @oe = Sequel.desc(:column) @oe.descending.must_equal true @oe.asc.descending.must_equal false end it "should #invert" do @oe = Sequel.desc(:column) @oe.invert.descending.must_equal false @oe.invert.invert.descending.must_equal true end end describe "Expression" do it "should consider objects == only if they have the same attributes" do Sequel.qualify(:table, :column).cast(:type).*(:numeric_column).asc.must_equal Sequel.qualify(:table, :column).cast(:type).*(:numeric_column).asc Sequel.qualify(:table, :other_column).cast(:type).*(:numeric_column).asc.wont_equal Sequel.qualify(:table, :column).cast(:type).*(:numeric_column).asc Sequel.qualify(:table, :column).cast(:type).*(:numeric_column).asc.must_equal(Sequel.qualify(:table, :column).cast(:type).*(:numeric_column).asc) Sequel.qualify(:table, :other_column).cast(:type).*(:numeric_column).asc.wont_equal(Sequel.qualify(:table, :column).cast(:type).*(:numeric_column).asc) end it "should use the same hash value for objects that have the same attributes" do Sequel.qualify(:table, :column).cast(:type).*(:numeric_column).asc.hash.must_equal Sequel.qualify(:table, :column).cast(:type).*(:numeric_column).asc.hash Sequel.qualify(:table, :other_column).cast(:type).*(:numeric_column).asc.hash.wont_equal Sequel.qualify(:table, :column).cast(:type).*(:numeric_column).asc.hash h = {} a = Sequel.qualify(:table, :column).cast(:type).*(:numeric_column).asc b = Sequel.qualify(:table, :column).cast(:type).*(:numeric_column).asc h[a] = 1 h[b] = 2 h[a].must_equal 2 h[b].must_equal 2 end end describe "Sequel::SQLTime" do before do @db = Sequel.mock end after do Sequel::application_timezone = Sequel::SQLTime.date = nil end it ".create should create from hour, minutes, seconds and optional microseconds" do @db.literal(Sequel::SQLTime.create(1, 2, 3)).must_equal "'01:02:03.000000'" @db.literal(Sequel::SQLTime.create(1, 2, 3, 500000)).must_equal "'01:02:03.500000'" end it ".create should use utc is that is the application_timezone setting" do Sequel::SQLTime.create(1, 2, 3).utc?.must_equal false Sequel::application_timezone = :local Sequel::SQLTime.create(1, 2, 3).utc?.must_equal false Sequel::application_timezone = :utc Sequel::SQLTime.create(1, 2, 3).utc?.must_equal true end it ".create should use today's date by default" do Sequel::SQLTime.create(1, 2, 3).strftime('%Y-%m-%d').must_equal Date.today.strftime('%Y-%m-%d') end it ".create should use specific date if set" do Sequel::SQLTime.date = Date.new(2000) Sequel::SQLTime.create(1, 2, 3).strftime('%Y-%m-%d').must_equal Date.new(2000).strftime('%Y-%m-%d') end it ".parse should respect SQLTime.date setting" do Sequel::SQLTime.date = Date.new(2000, 2, 3) Sequel::SQLTime.parse('10:11:12').strftime('%F').must_equal "2000-02-03" end it ".parse should respect application_timezone setting" do Sequel::application_timezone = :utc Sequel::SQLTime.parse('10:11:12').utc_offset.must_equal 0 end it "#inspect should show class and time by default" do Sequel::SQLTime.create(1, 2, 3).inspect.must_equal "#<Sequel::SQLTime 01:02:03>" Sequel::SQLTime.create(13, 24, 35).inspect.must_equal "#<Sequel::SQLTime 13:24:35>" end it "#to_s should include hour, minute, and second by default" do Sequel::SQLTime.create(1, 2, 3).to_s.must_equal "01:02:03" Sequel::SQLTime.create(1, 2, 3, 500000).to_s.must_equal "01:02:03" end it "#to_s should handle arguments with super" do t = Sequel::SQLTime.create(1, 2, 3) begin Time.now.to_s('%F') rescue proc{t.to_s('%F')}.must_raise ArgumentError else t.to_s('%F') end end end describe "Sequel::SQL::Wrapper" do before do @ds = Sequel.mock.dataset end it "should wrap objects so they can be used by the Sequel DSL" do o = Object.new def o.sql_literal(ds) 'foo' end s = Sequel::SQL::Wrapper.new(o) @ds.literal(s).must_equal "foo" @ds.literal(s+1).must_equal "(foo + 1)" @ds.literal(s**1).must_equal "power(foo, 1)" @ds.literal(s & true).must_equal "(foo AND 't')" @ds.literal(s < 1).must_equal "(foo < 1)" @ds.literal(s.sql_subscript(1)).must_equal "(foo)[1]" @ds.literal(s.like('a')).must_equal "(foo LIKE 'a' ESCAPE '\\')" @ds.literal(s.as(:a)).must_equal "foo AS a" @ds.literal(s.cast(Integer)).must_equal "CAST(foo AS integer)" @ds.literal(s.desc).must_equal "foo DESC" @ds.literal(s.sql_string + '1').must_equal "(foo || '1')" end end describe "Sequel::SQL::Blob" do it ".call should be an alias for .new" do Sequel::SQL::Blob.call('a').must_equal Sequel::SQL::Blob.new('a') end it "#to_sequel_blob should return self" do c = Sequel::SQL::Blob.new('a') c.to_sequel_blob.must_be_same_as(c) end end describe Sequel::SQL::Subscript do before do @s = Sequel::SQL::Subscript.new(:a, [1]) @ds = Sequel.mock.dataset end it "should have | return a new non-nested subscript" do s = (@s | 2) @ds.literal(s).must_equal 'a[1, 2]' end it "should have [] return a new nested subscript" do s = @s[2] @ds.literal(s).must_equal 'a[1][2]' end it "should not wrap identifiers in parentheses" do @ds.literal(Sequel::SQL::Subscript.new(:a, [1])).must_equal 'a[1]' @ds.literal(Sequel::SQL::Subscript.new(Sequel[:a], [1])).must_equal 'a[1]' @ds.literal(Sequel::SQL::Subscript.new(Sequel[:a][:b], [1])).must_equal 'a.b[1]' end it "should wrap other expression types in parentheses" do @ds.literal(Sequel::SQL::Subscript.new(Sequel.function('a'), [1])).must_equal '(a())[1]' @ds.literal(Sequel::SQL::Subscript.new(Sequel.lit('a'), [1])).must_equal '(a)[1]' @ds.literal(Sequel::SQL::Subscript.new(Sequel.lit('a(?)', 2), [1])).must_equal '(a(2))[1]' end end describe Sequel::SQL::CaseExpression, "#with_merged_expression" do it "should return self if it has no expression" do c = Sequel.case({1=>0}, 3) c.with_merged_expression.must_be_same_as(c) end it "should merge expression into conditions if it has an expression" do db = Sequel::Database.new c = Sequel.case({1=>0}, 3, 4) db.literal(c.with_merged_expression).must_equal db.literal(Sequel.case({{4=>1}=>0}, 3)) end end describe "Sequel.recursive_map" do it "should recursively convert an array using a callable" do Sequel.recursive_map(['1'], proc{|s| s.to_i}).must_equal [1] Sequel.recursive_map([['1']], proc{|s| s.to_i}).must_equal [[1]] end it "should not call callable if value is nil" do Sequel.recursive_map([nil], proc{|s| s.to_i}).must_equal [nil] Sequel.recursive_map([[nil]], proc{|s| s.to_i}).must_equal [[nil]] end it "should call callable for falsey value" do Sequel.recursive_map([false], proc{|s| s.to_s}).must_equal ['false'] Sequel.recursive_map([[false]], proc{|s| s.to_s}).must_equal [['false']] end end describe "Sequel.delay" do before do @o = Class.new do def a @a ||= 0 @a += 1 end def _a @a if defined?(@a) end attr_accessor :b end.new end it "should delay calling the block until literalization" do ds = Sequel.mock[:b].where(:a=>Sequel.delay{@o.a}) @o._a.must_be_nil ds.sql.must_equal "SELECT * FROM b WHERE (a = 1)" @o._a.must_equal 1 ds.sql.must_equal "SELECT * FROM b WHERE (a = 2)" @o._a.must_equal 2 end it "should call the block with the current dataset if it accepts one argument" do ds = Sequel.mock[:b].where(Sequel.delay{|x| x.first_source}) ds.sql.must_equal "SELECT * FROM b WHERE b" ds.from(:c).sql.must_equal "SELECT * FROM c WHERE c" end it "should have the condition specifier handling respect delayed evaluations" do ds = Sequel.mock[:b].where(:a=>Sequel.delay{@o.b}) ds.sql.must_equal "SELECT * FROM b WHERE (a IS NULL)" @o.b = 1 ds.sql.must_equal "SELECT * FROM b WHERE (a = 1)" @o.b = [1, 2] ds.sql.must_equal "SELECT * FROM b WHERE (a IN (1, 2))" end it "should have the condition specifier handling call block with the current dataset if it accepts one argument" do ds = Sequel.mock[:b].where(:a=>Sequel.delay{|x| x.first_source}) ds.sql.must_equal "SELECT * FROM b WHERE (a = b)" ds.from(:c).sql.must_equal "SELECT * FROM c WHERE (a = c)" end it "should raise if called without a block" do proc{Sequel.delay}.must_raise(Sequel::Error) end end describe Sequel do before do Sequel::JSON = Class.new do self::ParserError = Sequel def self.parse(json, opts={}) [json, opts] end end end after do Sequel.send(:remove_const, :JSON) end it ".parse_json should parse json correctly" do Sequel.parse_json('[]').must_equal ['[]', {:create_additions=>false}] end it ".json_parser_error_class should return the related parser error class" do Sequel.json_parser_error_class.must_equal Sequel end it ".object_to_json should return a json version of the object" do o = Object.new def o.to_json(*args); [1, args]; end Sequel.object_to_json(o, :foo).must_equal [1, [:foo]] end end describe "Sequel::LiteralString" do before do @s = Sequel::LiteralString.new("? = ?") end it "should have lit return self if no arguments" do @s.lit.must_be_same_as(@s) end it "should have lit return self if return a placeholder literal string if arguments" do @s.lit(1, 2).must_be_kind_of(Sequel::SQL::PlaceholderLiteralString) Sequel.mock.literal(@s.lit(1, :a)).must_equal '1 = a' end it "should have to_sequel_blob convert to blob" do @s.to_sequel_blob.must_equal @s @s.to_sequel_blob.must_be_kind_of(Sequel::SQL::Blob) end end describe "Sequel core extensions" do it "should have Sequel.core_extensions? be false by default" do Sequel.core_extensions?.must_equal false end end describe "Sequel.synchronize_with" do it "should yield if given nil" do Sequel.synchronize_with(nil){1}.must_equal 1 end it "should synchronize with given mutex" do m = Mutex.new Sequel.synchronize_with(m){1}.must_equal 1 proc do Sequel.synchronize_with(m) do Sequel.synchronize_with(m){} end end.must_raise ThreadError end end ��������������������������������������sequel-5.63.0/spec/core/mock_adapter_spec.rb��������������������������������������������������������0000664�0000000�0000000�00000121710�14342141206�0020754�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel Mock Adapter" do it "should have an adapter method" do db = Sequel.mock db.must_be_kind_of(Sequel::Mock::Database) db.adapter_scheme.must_equal :mock end it "should support registering mock adapter type" do begin Module.new do Sequel::Database.set_shared_adapter_scheme(:foo, self) def self.mock_adapter_setup(db) db.instance_variable_set(:@foo, :foo) end module self::DatabaseMethods def foo @foo end end module self::DatasetMethods def foo db.foo end end end Sequel.connect('mock://foo') do |db| db.foo.must_equal :foo db.dataset.foo.must_equal :foo end ensure Sequel.synchronize{Sequel::SHARED_ADAPTER_MAP.delete(:foo)} end end it "should have constructor accept no arguments" do Sequel.require 'adapters/mock' Sequel::Mock::Database.new.must_be_kind_of(Sequel::Mock::Database) Sequel.require 'mock', 'adapters' Sequel::Mock::Database.new.must_be_kind_of(Sequel::Mock::Database) end it "should each not return any rows by default" do called = false Sequel.mock[:t].each{|r| called = true} called.must_equal false end it "should return 0 for update/delete/with_sql_delete/execute_dui by default" do Sequel.mock[:t].update(:a=>1).must_equal 0 Sequel.mock[:t].delete.must_equal 0 Sequel.mock[:t].with_sql_delete('DELETE FROM t').must_equal 0 Sequel.mock.execute_dui('DELETE FROM t').must_equal 0 end it "should handle return results for Database#execute without use of dataset" do a = [] db = Sequel.mock(:fetch=>{:a=>1}) db.execute('SELECT 1') do |r| a << r end a.must_equal [{:a=>1}] db.sqls.must_equal ["SELECT 1"] end it "should handle empty columns when fetching" do db = Sequel.mock(:columns=>[]) ds = db[:t] ds.each do |r| raise end db.sqls.must_equal ["SELECT * FROM t"] ds.columns.must_equal [] end it "should return nil for insert/execute_insert by default" do Sequel.mock[:t].insert(:a=>1).must_be_nil Sequel.mock.execute_insert('INSERT INTO a () DEFAULT VALUES').must_be_nil end it "should be able to set the rows returned by each using :fetch option with a single hash" do rs = [] db = Sequel.mock(:fetch=>{:a=>1}) db[:t].each{|r| rs << r} rs.must_equal [{:a=>1}] db[:t].each{|r| rs << r} rs.must_equal [{:a=>1}, {:a=>1}] db[:t].each{|r| r[:a] = 2; rs << r} rs.must_equal [{:a=>1}, {:a=>1}, {:a=>2}] db[:t].each{|r| rs << r} rs.must_equal [{:a=>1}, {:a=>1}, {:a=>2}, {:a=>1}] end it "should be able to set the rows returned by each using :fetch option with an array of hashes" do rs = [] db = Sequel.mock(:fetch=>[{:a=>1}, {:a=>2}]) db[:t].each{|r| rs << r} rs.must_equal [{:a=>1}, {:a=>2}] db[:t].each{|r| rs << r} rs.must_equal [{:a=>1}, {:a=>2}, {:a=>1}, {:a=>2}] db[:t].each{|r| r[:a] += 2; rs << r} rs.must_equal [{:a=>1}, {:a=>2}, {:a=>1}, {:a=>2}, {:a=>3}, {:a=>4}] db[:t].each{|r| rs << r} rs.must_equal [{:a=>1}, {:a=>2}, {:a=>1}, {:a=>2}, {:a=>3}, {:a=>4}, {:a=>1}, {:a=>2}] end it "should be able to set the rows returned by each using :fetch option with an array or arrays of hashes" do rs = [] db = Sequel.mock(:fetch=>[[{:a=>1}, {:a=>2}], [{:a=>3}, {:a=>4}]]) db[:t].each{|r| rs << r} rs.must_equal [{:a=>1}, {:a=>2}] db[:t].each{|r| rs << r} rs.must_equal [{:a=>1}, {:a=>2}, {:a=>3}, {:a=>4}] db[:t].each{|r| rs << r} rs.must_equal [{:a=>1}, {:a=>2}, {:a=>3}, {:a=>4}] end it "should be able to set the rows returned by each using :fetch option with a proc that takes sql" do rs = [] db = Sequel.mock(:fetch=>proc{|sql| sql =~ /FROM t/ ? {:b=>1} : [{:a=>1}, {:a=>2}]}) db[:t].each{|r| rs << r} rs.must_equal [{:b=>1}] db[:b].each{|r| rs << r} rs.must_equal [{:b=>1}, {:a=>1}, {:a=>2}] db[:t].each{|r| r[:b] += 1; rs << r} db[:b].each{|r| r[:a] += 2; rs << r} rs.must_equal [{:b=>1}, {:a=>1}, {:a=>2}, {:b=>2}, {:a=>3}, {:a=>4}] db[:t].each{|r| rs << r} db[:b].each{|r| rs << r} rs.must_equal [{:b=>1}, {:a=>1}, {:a=>2}, {:b=>2}, {:a=>3}, {:a=>4}, {:b=>1}, {:a=>1}, {:a=>2}] end it "should have a fetch= method for setting rows returned by each after the fact" do rs = [] db = Sequel.mock db.fetch = {:a=>1} db[:t].each{|r| rs << r} rs.must_equal [{:a=>1}] db[:t].each{|r| rs << r} rs.must_equal [{:a=>1}] * 2 end it "should be able to set an exception to raise by setting the :fetch option to an exception class " do db = Sequel.mock(:fetch=>ArgumentError) proc{db[:t].all}.must_raise(Sequel::DatabaseError) begin db[:t].all rescue => e end e.must_be_kind_of(Sequel::DatabaseError) e.wrapped_exception.must_be_kind_of(ArgumentError) end it "should be able to set separate kinds of results for fetch using an array" do rs = [] db = Sequel.mock(:fetch=>[{:a=>1}, [{:a=>2}, {:a=>3}], proc{|s| {:a=>4}}, proc{|s| }, nil, ArgumentError]) db[:t].each{|r| rs << r} rs.must_equal [{:a=>1}] db[:t].each{|r| rs << r} rs.must_equal [{:a=>1}, {:a=>2}, {:a=>3}] db[:t].each{|r| rs << r} rs.must_equal [{:a=>1}, {:a=>2}, {:a=>3}, {:a=>4}] db[:t].each{|r| rs << r} rs.must_equal [{:a=>1}, {:a=>2}, {:a=>3}, {:a=>4}] db[:t].each{|r| rs << r} rs.must_equal [{:a=>1}, {:a=>2}, {:a=>3}, {:a=>4}] proc{db[:t].all}.must_raise(Sequel::DatabaseError) end it "should be able to set the rows returned by each on a per dataset basis using with_fetch" do rs = [] db = Sequel.mock(:fetch=>{:a=>1}) ds = db[:t] ds.each{|r| rs << r} rs.must_equal [{:a=>1}] ds = ds.with_fetch(:b=>2) ds.each{|r| rs << r} rs.must_equal [{:a=>1}, {:b=>2}] end it "should raise Error if given an invalid object to fetch" do proc{Sequel.mock(:fetch=>Class.new).get(:a)}.must_raise(Sequel::DatabaseError) proc{Sequel.mock(:fetch=>Object.new).get(:a)}.must_raise(Sequel::DatabaseError) end it "should be able to set the number of rows modified by update and delete using :numrows option as an integer" do db = Sequel.mock(:numrows=>2) db[:t].update(:a=>1).must_equal 2 db[:t].delete.must_equal 2 db[:t].update(:a=>1).must_equal 2 db[:t].delete.must_equal 2 end it "should be able to set the number of rows modified by update and delete using :numrows option as an array of integers" do db = Sequel.mock(:numrows=>[2, 1]) db[:t].update(:a=>1).must_equal 2 db[:t].delete.must_equal 1 db[:t].update(:a=>1).must_equal 0 db[:t].delete.must_equal 0 end it "should be able to set the number of rows modified by update and delete using :numrows option as a proc" do db = Sequel.mock(:numrows=>proc{|sql| sql =~ / t/ ? 2 : 1}) db[:t].update(:a=>1).must_equal 2 db[:t].delete.must_equal 2 db[:b].update(:a=>1).must_equal 1 db[:b].delete.must_equal 1 end it "should be able to set an exception to raise by setting the :numrows option to an exception class " do db = Sequel.mock(:numrows=>ArgumentError) proc{db[:t].update(:a=>1)}.must_raise(Sequel::DatabaseError) begin db[:t].delete rescue => e end e.must_be_kind_of(Sequel::DatabaseError) e.wrapped_exception.must_be_kind_of(ArgumentError) end it "should be able to set separate kinds of results for numrows using an array" do db = Sequel.mock(:numrows=>[1, proc{|s| 2}, nil, ArgumentError]) db[:t].delete.must_equal 1 db[:t].update(:a=>1).must_equal 2 db[:t].delete.must_equal 0 proc{db[:t].delete}.must_raise(Sequel::DatabaseError) end it "should have a numrows= method to set the number of rows modified by update and delete after the fact" do db = Sequel.mock db.numrows = 2 db[:t].update(:a=>1).must_equal 2 db[:t].delete.must_equal 2 db[:t].update(:a=>1).must_equal 2 db[:t].delete.must_equal 2 end it "should be able to set the number of rows modified by update and delete on a per dataset basis" do db = Sequel.mock(:numrows=>2) ds = db[:t] ds.update(:a=>1).must_equal 2 ds.delete.must_equal 2 ds = ds.with_numrows(3) ds.update(:a=>1).must_equal 3 ds.delete.must_equal 3 end it "should raise Error if given an invalid object for numrows or autoid" do proc{Sequel.mock(:numrows=>Class.new)[:a].delete}.must_raise(Sequel::DatabaseError) proc{Sequel.mock(:numrows=>Object.new)[:a].delete}.must_raise(Sequel::DatabaseError) proc{Sequel.mock(:autoid=>Class.new)[:a].insert}.must_raise(Sequel::DatabaseError) proc{Sequel.mock(:autoid=>Object.new)[:a].insert}.must_raise(Sequel::DatabaseError) end it "should be able to set the autogenerated primary key returned by insert using :autoid option as an integer" do db = Sequel.mock(:autoid=>1) db[:t].insert(:a=>1).must_equal 1 db[:t].insert(:a=>1).must_equal 2 db[:t].insert(:a=>1).must_equal 3 end it "should be able to set the autogenerated primary key returned by insert using :autoid option as an array of integers" do db = Sequel.mock(:autoid=>[1, 3, 5]) db[:t].insert(:a=>1).must_equal 1 db[:t].insert(:a=>1).must_equal 3 db[:t].insert(:a=>1).must_equal 5 db[:t].insert(:a=>1).must_be_nil end it "should be able to set the autogenerated primary key returned by insert using :autoid option as a proc" do db = Sequel.mock(:autoid=>proc{|sql| sql =~ /INTO t / ? 2 : 1}) db[:t].insert(:a=>1).must_equal 2 db[:t].insert(:a=>1).must_equal 2 db[:b].insert(:a=>1).must_equal 1 db[:b].insert(:a=>1).must_equal 1 end it "should be able to set an exception to raise by setting the :autoid option to an exception class " do db = Sequel.mock(:autoid=>ArgumentError) proc{db[:t].insert(:a=>1)}.must_raise(Sequel::DatabaseError) begin db[:t].insert rescue => e end e.must_be_kind_of(Sequel::DatabaseError) e.wrapped_exception.must_be_kind_of(ArgumentError) end it "should be able to set separate kinds of results for autoid using an array" do db = Sequel.mock(:autoid=>[1, proc{|s| 2}, nil, ArgumentError]) db[:t].insert.must_equal 1 db[:t].insert.must_equal 2 db[:t].insert.must_be_nil proc{db[:t].insert}.must_raise(Sequel::DatabaseError) end it "should have an autoid= method to set the autogenerated primary key returned by insert after the fact" do db = Sequel.mock db.autoid = 1 db[:t].insert(:a=>1).must_equal 1 db[:t].insert(:a=>1).must_equal 2 db[:t].insert(:a=>1).must_equal 3 end it "should be able to set the autogenerated primary key returned by insert on a per dataset basis" do db = Sequel.mock(:autoid=>1) ds = db[:t] ds.insert(:a=>1).must_equal 1 ds = ds.with_autoid(5) ds.insert(:a=>1).must_equal 5 ds.insert(:a=>1).must_equal 6 db[:t].insert(:a=>1).must_equal 2 end it "should be able to set the columns to set in the dataset as an array of symbols" do db = Sequel.mock(:columns=>[:a, :b]) db[:t].columns.must_equal [:a, :b] db.sqls.must_equal ["SELECT * FROM t LIMIT 0"] ds = db[:t] ds.all db.sqls.must_equal ["SELECT * FROM t"] ds.columns.must_equal [:a, :b] db.sqls.must_equal [] db[:t].columns.must_equal [:a, :b] end it "should be able to set the columns to set in the dataset as an array of arrays of symbols" do db = Sequel.mock(:columns=>[[:a, :b], [:c, :d]]) db[:t].columns.must_equal [:a, :b] db[:x].columns.must_equal [:c, :d] end it "should be able to set the columns to set in the dataset as a proc" do db = Sequel.mock(:columns=>proc{|sql| (sql =~ / t/) ? [:a, :b] : [:c, :d]}) db[:b].columns.must_equal [:c, :d] db[:t].columns.must_equal [:a, :b] end it "should have a columns= method to set the columns to set after the fact" do db = Sequel.mock db.columns = [[:a, :b], [:c, :d]] db[:t].columns.must_equal [:a, :b] db[:x].columns.must_equal [:c, :d] end it "should raise Error if given an invalid columns" do proc{Sequel.mock(:columns=>Object.new)[:a].columns}.must_raise(Sequel::DatabaseError) end it "should not quote identifiers by default" do Sequel.mock.send(:quote_identifiers_default).must_equal false end it "should allow overriding of server_version" do db = Sequel.mock db.server_version.must_be_nil db.server_version = 80102 db.server_version.must_equal 80102 end it "should not fold to uppercase by default" do Sequel.mock.send(:folds_unquoted_identifiers_to_uppercase?).must_equal false end it "should keep a record of all executed SQL in #sqls" do db = Sequel.mock db[:t].all db[:b].delete db[:c].insert(:a=>1) db[:d].update(:a=>1) db.sqls.must_equal ['SELECT * FROM t', 'DELETE FROM b', 'INSERT INTO c (a) VALUES (1)', 'UPDATE d SET a = 1'] end it "should clear sqls on retrieval" do db = Sequel.mock db[:t].all db.sqls.must_equal ['SELECT * FROM t'] db.sqls.must_equal [] end it "should also log SQL executed to the given loggers" do a = [] def a.method_missing(m, *x) push(*x) end db = Sequel.mock(:loggers=>[a]) db[:t].all db[:b].delete db[:c].insert(:a=>1) db[:d].update(:a=>1) a.zip(['SELECT * FROM t', 'DELETE FROM b', 'INSERT INTO c (a) VALUES (1)', 'UPDATE d SET a = 1']).each do |is, should| is.must_match should end end it "should correctly handle transactions" do db = Sequel.mock db.transaction{db[:a].all} db.sqls.must_equal ['BEGIN', 'SELECT * FROM a', 'COMMIT'] db.transaction{db[:a].all; raise Sequel::Rollback} db.sqls.must_equal ['BEGIN', 'SELECT * FROM a', 'ROLLBACK'] proc{db.transaction{db[:a].all; raise ArgumentError}}.must_raise(ArgumentError) db.sqls.must_equal ['BEGIN', 'SELECT * FROM a', 'ROLLBACK'] proc{db.transaction(:rollback=>:reraise){db[:a].all; raise Sequel::Rollback}}.must_raise(Sequel::Rollback) db.sqls.must_equal ['BEGIN', 'SELECT * FROM a', 'ROLLBACK'] db.transaction(:rollback=>:always){db[:a].all} db.sqls.must_equal ['BEGIN', 'SELECT * FROM a', 'ROLLBACK'] db.transaction{db.transaction{db[:a].all; raise Sequel::Rollback}} db.sqls.must_equal ['BEGIN', 'SELECT * FROM a', 'ROLLBACK'] db.transaction{db.transaction(:savepoint=>true){db[:a].all; raise Sequel::Rollback}} db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1', 'SELECT * FROM a', 'ROLLBACK TO SAVEPOINT autopoint_1', 'COMMIT'] db.transaction{db.transaction(:savepoint=>true){db[:a].all}; raise Sequel::Rollback} db.sqls.must_equal ['BEGIN', 'SAVEPOINT autopoint_1', 'SELECT * FROM a', 'RELEASE SAVEPOINT autopoint_1', 'ROLLBACK'] end it "should correctly handle transactions when sharding" do db = Sequel.mock(:servers=>{:test=>{}, 'test'=>{}}) db.transaction{db.transaction(:server=>:test){db[:a].all; db[:t].server(:test).all}} db.sqls.must_equal ['BEGIN', 'BEGIN -- test', 'SELECT * FROM a', 'SELECT * FROM t -- test', 'COMMIT -- test', 'COMMIT'] db.transaction{db.transaction(:server=>'test'){db[:a].all; db[:t].server("test").all}} db.sqls.must_equal ['BEGIN', 'BEGIN -- "test"', 'SELECT * FROM a', 'SELECT * FROM t -- "test"', 'COMMIT -- "test"', 'COMMIT'] end it "should yield a mock connection object from synchronize" do c = Sequel.mock.synchronize{|conn| conn} c.must_be_kind_of(Sequel::Mock::Connection) end it "should deal correctly with sharding" do db = Sequel.mock(:servers=>{:test=>{}}) c1 = db.synchronize{|conn| conn} c2 = db.synchronize(:test){|conn| conn} c1.server.must_equal :default c2.server.must_equal :test end it "should disconnect correctly" do db = Sequel.mock db.test_connection db.disconnect end it "should accept :extend option for extending the object with a module" do Sequel.mock(:extend=>Module.new{def foo(v) v * 2 end}).foo(3).must_equal 6 end it "should accept :sqls option for where to store the SQL queries" do a = [] Sequel.mock(:sqls=>a)[:t].all a.must_equal ['SELECT * FROM t'] end it "should include :append option in SQL if it is given" do db = Sequel.mock(:append=>'a') db[:t].all db.sqls.must_equal ['SELECT * FROM t -- a'] end it "should append :arguments option to execute to the SQL if present" do db = Sequel.mock db.execute('SELECT * FROM t', :arguments=>[1, 2]) db.sqls.must_equal ['SELECT * FROM t -- args: [1, 2]'] end it "should have Dataset#columns take columns to set and return self" do db = Sequel.mock ds = db[:t].columns(:id, :a, :b) ds.must_be_kind_of(Sequel::Mock::Dataset) ds.columns.must_equal [:id, :a, :b] end it "should be able to load dialects based on the database name" do Sequel.mock(:host=>'access').select(Date.new(2011, 12, 13)).sql.must_equal 'SELECT #2011-12-13#' Sequel.mock(:host=>'db2').select(1).sql.must_equal 'SELECT 1 FROM "SYSIBM"."SYSDUMMY1"' Sequel.mock(:host=>'mssql')[:A].full_text_search(:B, 'C').with_quote_identifiers(false).sql.must_equal "SELECT * FROM A WHERE (CONTAINS (B, 'C'))" Sequel.mock(:host=>'mysql')[:a].full_text_search(:b, 'c').with_quote_identifiers(false).sql.must_equal "SELECT * FROM a WHERE (MATCH (b) AGAINST ('c'))" Sequel.mock(:host=>'oracle')[:a].limit(1).with_quote_identifiers(false).sql.upcase.must_equal 'SELECT * FROM (SELECT * FROM A) T1 WHERE (ROWNUM <= 1)' Sequel.mock(:host=>'postgres')[:a].full_text_search(:b, 'c').with_quote_identifiers(false).sql.must_equal "SELECT * FROM a WHERE (to_tsvector(CAST('simple' AS regconfig), (COALESCE(b, ''))) @@ to_tsquery(CAST('simple' AS regconfig), 'c'))" Sequel.mock(:host=>'sqlanywhere').from(:A).offset(1).with_quote_identifiers(false).sql.must_equal 'SELECT TOP 2147483647 START AT (1 + 1) * FROM A' Sequel.mock(:host=>'sqlite')[Sequel[:a].as(:b)].with_quote_identifiers(false).sql.must_equal "SELECT * FROM a AS 'b'" end it "should be able to mock schema calls" do Sequel.mock(:host=>'mysql', :fetch=>{:Field=>'a'}).schema(:a).first.first.must_equal :a end it "should automatically set version for adapters needing versions" do Sequel.mock(:host=>'postgres').server_version.must_be :>=, 90400 Sequel.mock(:host=>'mssql').server_version.must_be :>=, 11000000 Sequel.mock(:host=>'mysql').server_version.must_be :>=, 50617 Sequel.mock(:host=>'sqlite').sqlite_version.must_be :>=, 30804 Sequel.mock(:host=>'oracle').server_version.must_be :>=, 11000000 end it "should stub out the primary_key method for postgres" do Sequel.mock(:host=>'postgres').primary_key(:t).must_equal :id end it "should stub out the bound_variable_arg method for postgres" do Sequel.mock(:host=>'postgres').bound_variable_arg(:t, nil).must_equal :t end it "should handle creating tables on oracle" do Sequel.mock(:host=>'oracle').create_table(:a){String :b} end end describe "PostgreSQL support" do before do @db = Sequel.mock(:host=>'postgres') end it "should support savepoints" do @db.transaction{@db.transaction(:savepoint=>true){}} @db.sqls.must_equal ["BEGIN", "SAVEPOINT autopoint_1", "RELEASE SAVEPOINT autopoint_1", "COMMIT"] end it "should create an unlogged table" do @db.create_table(:unlogged_dolls, :unlogged => true){text :name} @db.sqls.must_equal ['CREATE UNLOGGED TABLE "unlogged_dolls" ("name" text)'] end it "should support spatial indexes" do @db.alter_table(:posts){add_spatial_index [:geom]} @db.sqls.must_equal ['CREATE INDEX "posts_geom_index" ON "posts" USING gist ("geom")'] end it "should support indexes with index type" do @db.alter_table(:posts){add_index :p, :type => 'gist'} @db.sqls.must_equal ['CREATE INDEX "posts_p_index" ON "posts" USING gist ("p")'] end it "should have #transaction support various types of synchronous options" do @db.transaction(:synchronous=>:on){} @db.transaction(:synchronous=>true){} @db.transaction(:synchronous=>:off){} @db.transaction(:synchronous=>false){} @db.sqls.grep(/synchronous/).must_equal ["SET LOCAL synchronous_commit = on", "SET LOCAL synchronous_commit = on", "SET LOCAL synchronous_commit = off", "SET LOCAL synchronous_commit = off"] @db.transaction(:synchronous=>nil){} @db.sqls.must_equal ['BEGIN', 'COMMIT'] if @db.server_version >= 90100 @db.transaction(:synchronous=>:local){} @db.sqls.grep(/synchronous/).must_equal ["SET LOCAL synchronous_commit = local"] if @db.server_version >= 90200 @db.transaction(:synchronous=>:remote_write){} @db.sqls.grep(/synchronous/).must_equal ["SET LOCAL synchronous_commit = remote_write"] end end end it "should have #transaction support read only transactions" do @db.transaction(:read_only=>true){} @db.transaction(:read_only=>false){} @db.transaction(:isolation=>:serializable, :read_only=>true){} @db.transaction(:isolation=>:serializable, :read_only=>false){} @db.sqls.grep(/READ/).must_equal ["SET TRANSACTION READ ONLY", "SET TRANSACTION READ WRITE", "SET TRANSACTION ISOLATION LEVEL SERIALIZABLE READ ONLY", "SET TRANSACTION ISOLATION LEVEL SERIALIZABLE READ WRITE"] end it "should have #transaction support deferrable transactions" do @db.transaction(:deferrable=>true){} @db.transaction(:deferrable=>false){} @db.transaction(:deferrable=>true, :read_only=>true){} @db.transaction(:deferrable=>false, :read_only=>false){} @db.transaction(:isolation=>:serializable, :deferrable=>true, :read_only=>true){} @db.transaction(:isolation=>:serializable, :deferrable=>false, :read_only=>false){} @db.sqls.grep(/DEF/).must_equal ["SET TRANSACTION DEFERRABLE", "SET TRANSACTION NOT DEFERRABLE", "SET TRANSACTION READ ONLY DEFERRABLE", "SET TRANSACTION READ WRITE NOT DEFERRABLE", "SET TRANSACTION ISOLATION LEVEL SERIALIZABLE READ ONLY DEFERRABLE", "SET TRANSACTION ISOLATION LEVEL SERIALIZABLE READ WRITE NOT DEFERRABLE"] end it "should support creating indexes concurrently" do @db.add_index :test, [:name, :value], :concurrently=>true @db.sqls.must_equal ['CREATE INDEX CONCURRENTLY "test_name_value_index" ON "test" ("name", "value")'] end it "should support dropping indexes concurrently" do @db.drop_index :test, [:name, :value], :concurrently=>true, :name=>'tnv2' @db.sqls.must_equal ['DROP INDEX CONCURRENTLY "tnv2"'] end it "should use INSERT RETURNING for inserts" do @db[:test5].insert(:value=>10) @db.sqls.must_equal ['INSERT INTO "test5" ("value") VALUES (10) RETURNING "id"'] end it "should support opclass specification" do @db.alter_table(:posts){add_index(:user_id, :opclass => :int4_ops, :type => :btree)} @db.sqls.must_equal ['CREATE INDEX "posts_user_id_index" ON "posts" USING btree ("user_id" int4_ops)'] end it 'should quote NaN' do nan = 0.0/0.0 @db[:test5].insert_sql(:value => nan).must_equal %q{INSERT INTO "test5" ("value") VALUES ('NaN')} end it 'should quote +Infinity' do inf = 1.0/0.0 @db[:test5].insert_sql(:value => inf).must_equal %q{INSERT INTO "test5" ("value") VALUES ('Infinity')} end it 'should quote -Infinity' do inf = -1.0/0.0 @db[:test5].insert_sql(:value => inf).must_equal %q{INSERT INTO "test5" ("value") VALUES ('-Infinity')} end it "Dataset#insert_conflict should respect expressions in the target argument" do @ds = @db[:ic_test] @ds.insert_conflict(:target=>:a).insert_sql(1, 2, 3).must_equal "INSERT INTO \"ic_test\" VALUES (1, 2, 3) ON CONFLICT (\"a\") DO NOTHING" @ds.insert_conflict(:target=>:c, :conflict_where=>{:c_is_unique=>true}).insert_sql(1, 2, 3).must_equal "INSERT INTO \"ic_test\" VALUES (1, 2, 3) ON CONFLICT (\"c\") WHERE (\"c_is_unique\" IS TRUE) DO NOTHING" @ds.insert_conflict(:target=>[:b, :c]).insert_sql(1, 2, 3).must_equal "INSERT INTO \"ic_test\" VALUES (1, 2, 3) ON CONFLICT (\"b\", \"c\") DO NOTHING" @ds.insert_conflict(:target=>[:b, Sequel.function(:round, :c)]).insert_sql(1, 2, 3).must_equal "INSERT INTO \"ic_test\" VALUES (1, 2, 3) ON CONFLICT (\"b\", round(\"c\")) DO NOTHING" @ds.insert_conflict(:target=>[:b, Sequel.virtual_row{|o| o.round(:c)}]).insert_sql(1, 2, 3).must_equal "INSERT INTO \"ic_test\" VALUES (1, 2, 3) ON CONFLICT (\"b\", round(\"c\")) DO NOTHING" end it "should support creating and dropping foreign tables" do @db.create_table(:t, :foreign=>:f, :options=>{:o=>1}){Integer :a} @db.drop_table(:t, :foreign=>true) @db.sqls.must_equal ['CREATE FOREIGN TABLE "t" ("a" integer) SERVER "f" OPTIONS (o \'1\')', 'DROP FOREIGN TABLE "t"'] @db.create_table(:t, :foreign=>:f){Integer :a} @db.sqls.must_equal ['CREATE FOREIGN TABLE "t" ("a" integer) SERVER "f"'] end it "should not support creating temporary foreign tables" do proc{@db.create_table(:t, :foreign=>:f, :temp=>true){Integer :a}}.must_raise Sequel::Error end it "should not support creating unlogged foreign tables" do proc{@db.create_table(:t, :foreign=>:f, :unlogged=>true){Integer :a}}.must_raise Sequel::Error end it "#create_function and #drop_function should create and drop functions" do @db.create_function('tf', 'SELECT 1', :returns=>:integer) @db.drop_function('tf') @db.sqls.map{|s| s.gsub(/\s+/, ' ').strip}.must_equal ["CREATE FUNCTION tf() RETURNS integer LANGUAGE SQL AS 'SELECT 1'", 'DROP FUNCTION tf()'] end it "#create_function and #drop_function should support options" do @db.create_function('tf', 'SELECT $1 + $2', :args=>[[:integer, :a], :integer], :replace=>true, :returns=>:integer, :language=>'SQL', :behavior=>:immutable, :strict=>true, :security_definer=>true, :cost=>2, :set=>{:search_path => 'public'}) @db.drop_function('tf', :if_exists=>true, :cascade=>true, :args=>[[:integer, :a], :integer]) @db.sqls.map{|s| s.gsub(/\s+/, ' ').strip}.must_equal ["CREATE OR REPLACE FUNCTION tf(a integer, integer) RETURNS integer LANGUAGE SQL IMMUTABLE STRICT SECURITY DEFINER COST 2 SET search_path = public AS 'SELECT $1 + $2'", 'DROP FUNCTION IF EXISTS tf(a integer, integer) CASCADE'] @db.create_function('tf', 'myfunc.so', :language=>'SQL', :rows=>20, :link_symbol=>:ls) @db.sqls.map{|s| s.gsub(/\s+/, ' ').strip}.must_equal ["CREATE FUNCTION tf() RETURNS void LANGUAGE SQL ROWS 20 AS 'myfunc.so', 'ls'"] @db.create_function('tf', 'SELECT $1 + $2', :args=>[[:integer, :a, 'OUT']], :language=>'SQL') @db.sqls.map{|s| s.gsub(/\s+/, ' ').strip}.must_equal ["CREATE FUNCTION tf(OUT a integer) LANGUAGE SQL AS 'SELECT $1 + $2'"] end it "#create_language and #drop_language should create and drop languages" do @db.create_language(:plpgsql) @db.create_language(:plpgsql, :replace=>true, :trusted=>true, :handler=>:a, :validator=>:b) @db.drop_language(:plpgsql) @db.drop_language(:plpgsql, :if_exists=>true, :cascade=>true) @db.sqls.map{|s| s.gsub(/\s+/, ' ').strip}.must_equal ['CREATE LANGUAGE plpgsql', 'CREATE OR REPLACE TRUSTED LANGUAGE plpgsql HANDLER a VALIDATOR b', 'DROP LANGUAGE plpgsql', 'DROP LANGUAGE IF EXISTS plpgsql CASCADE'] end it "#create_schema and #drop_schema should create and drop schemas" do @db.create_schema(:sequel) @db.create_schema(:sequel, :if_not_exists=>true, :owner=>:foo) @db.drop_schema(:sequel) @db.drop_schema(:sequel, :if_exists=>true, :cascade=>true) @db.sqls.must_equal ['CREATE SCHEMA "sequel"', 'CREATE SCHEMA IF NOT EXISTS "sequel" AUTHORIZATION "foo"', 'DROP SCHEMA "sequel"', 'DROP SCHEMA IF EXISTS "sequel" CASCADE'] end it "#create_trigger and #drop_trigger should create and drop triggers" do @db.create_trigger(:test, :identity, :tf, :each_row=>true) @db.create_trigger(:test, :identity, :tf, :after=>true, :events=>:insert, :args=>[1, 'a']) @db.create_trigger(:test, :identity, :tf, :each_row=>true, :when=> {Sequel[:new][:name] => 'b'}) @db.drop_trigger(:test, :identity) @db.drop_trigger(:test, :identity, :if_exists=>true, :cascade=>true) @db.sqls.map{|s| s.gsub(/\s+/, ' ').strip}.must_equal ['CREATE TRIGGER identity BEFORE INSERT OR UPDATE OR DELETE ON "test" FOR EACH ROW EXECUTE PROCEDURE tf()', 'CREATE TRIGGER identity AFTER INSERT ON "test" EXECUTE PROCEDURE tf(1, \'a\')', %q{CREATE TRIGGER identity BEFORE INSERT OR UPDATE OR DELETE ON "test" FOR EACH ROW WHEN ("new"."name" = 'b') EXECUTE PROCEDURE tf()}, 'DROP TRIGGER identity ON "test"', 'DROP TRIGGER IF EXISTS identity ON "test" CASCADE'] end it "#create_trigger should raise exception if using trigger conditions on an unsupported version" do def @db.server_version; 80400 end proc{@db.create_trigger(:test, :identity, :tf, :when=>true)}.must_raise Sequel::Error end it "should attempt to remove aliases when inserting on PostgreSQL <9.5" do def @db.server_version(*); 90400 end @db.from{a.as(:b)}.returning.insert @db.sqls.must_equal ["INSERT INTO \"a\" DEFAULT VALUES RETURNING *"] end it "should support adding columns with COLLATE" do @db.alter_table(:t){add_column :x, String, :collate=>'s'} @db.sqls.must_equal ["ALTER TABLE \"t\" ADD COLUMN \"x\" text COLLATE s"] @db.alter_table(:t){add_column :x, String, :collate=>:s} @db.sqls.must_equal ["ALTER TABLE \"t\" ADD COLUMN \"x\" text COLLATE \"s\""] end it "should support dropping columns with CASCADE and IF EXISTS" do @db.alter_table(:t){drop_column :x, :cascade=>true, :if_exists=>true} @db.sqls.must_equal ["ALTER TABLE \"t\" DROP COLUMN IF EXISTS \"x\" CASCADE"] end it "should support transaction isolation levels" do @db.supports_transaction_isolation_levels?.must_equal true end it "should not support arbitrary window function frame options" do @db.dataset.supports_window_function_frame_option?(:bad).must_equal false end it "should support getting server version from specific server" do begin version = @db.server_version @db.instance_variable_set(:@server_version, nil) @db.fetch = {:v=>version} @db.server_version(:default) @db.sqls.must_equal ["SELECT CAST(current_setting('server_version_num') AS integer) AS v"] ensure @db.instance_variable_set(:@server_version, version) end end it "should support bytea unescaping" do bytea = @db.conversion_procs[17] bytea.call('\\x70').ord.must_equal 112 bytea.call('\\070').ord.must_equal 56 bytea.call('\\\\').ord.must_equal 92 end it "should raise for integers outside of PostgreSQL bigint range" do proc{@db.literal(2**63)}.must_raise Sequel::InvalidValue end it "should support converting serial columns to identity" do @db.fetch= [[{:v=>'on'}], [{:attnum=>1}], [{:objid=>2345, :v=>false}]] def @db.regclass_oid(x) 1234 end def @db.schema(x) [[:id, {:primary_key=>true, :auto_increment=>true}]] end @db.convert_serial_to_identity(:table).must_be_nil @db.sqls.must_equal [ "SELECT current_setting('is_superuser') LIMIT 1", "SELECT \"attnum\" FROM \"pg_attribute\" WHERE ((\"attrelid\" = 1234) AND (\"attname\" = 'id')) LIMIT 1", "SELECT \"objid\", (\"deptype\" = 'i') AS \"v\" FROM \"pg_depend\" WHERE ((\"refclassid\" = CAST('pg_class' AS regclass)) AND (\"refobjid\" = 1234) AND (\"refobjsubid\" = 1) AND (\"classid\" = CAST('pg_class' AS regclass)) AND (\"objsubid\" = 0) AND (\"deptype\" IN ('a', 'i')))", "BEGIN", "ALTER TABLE \"table\" ALTER COLUMN \"id\" DROP DEFAULT", "UPDATE \"pg_depend\" SET \"deptype\" = 'i' WHERE ((\"classid\" = CAST('pg_class' AS regclass)) AND (\"objid\" = 2345) AND (\"objsubid\" = 0) AND (\"deptype\" = 'a'))", "UPDATE \"pg_attribute\" SET \"attidentity\" = 'd' WHERE ((\"attrelid\" = 1234) AND (\"attname\" = 'id'))", "COMMIT" ] end it "should support :column option when converting serial columns to identity" do @db.fetch= [[{:v=>'on'}], [{:attnum=>1}], [{:objid=>2345, :v=>false}]] def @db.regclass_oid(x) 1234 end @db.convert_serial_to_identity(:table, :column=>'id').must_be_nil @db.sqls.must_equal [ "SELECT current_setting('is_superuser') LIMIT 1", "SELECT \"attnum\" FROM \"pg_attribute\" WHERE ((\"attrelid\" = 1234) AND (\"attname\" = 'id')) LIMIT 1", "SELECT \"objid\", (\"deptype\" = 'i') AS \"v\" FROM \"pg_depend\" WHERE ((\"refclassid\" = CAST('pg_class' AS regclass)) AND (\"refobjid\" = 1234) AND (\"refobjsubid\" = 1) AND (\"classid\" = CAST('pg_class' AS regclass)) AND (\"objsubid\" = 0) AND (\"deptype\" IN ('a', 'i')))", "BEGIN", "ALTER TABLE \"table\" ALTER COLUMN \"id\" DROP DEFAULT", "UPDATE \"pg_depend\" SET \"deptype\" = 'i' WHERE ((\"classid\" = CAST('pg_class' AS regclass)) AND (\"objid\" = 2345) AND (\"objsubid\" = 0) AND (\"deptype\" = 'a'))", "UPDATE \"pg_attribute\" SET \"attidentity\" = 'd' WHERE ((\"attrelid\" = 1234) AND (\"attname\" = 'id'))", "COMMIT" ] end it "should support :server option converting serial columns to identity" do @db.fetch= [[{:v=>'on'}], [{:attnum=>1}], [{:objid=>2345, :v=>false}]] def @db.regclass_oid(x) 1234 end @db.convert_serial_to_identity(:table, :column=>'id', :server=>:default).must_be_nil @db.sqls.must_equal [ "SELECT current_setting('is_superuser') LIMIT 1", "SELECT \"attnum\" FROM \"pg_attribute\" WHERE ((\"attrelid\" = 1234) AND (\"attname\" = 'id')) LIMIT 1", "SELECT \"objid\", (\"deptype\" = 'i') AS \"v\" FROM \"pg_depend\" WHERE ((\"refclassid\" = CAST('pg_class' AS regclass)) AND (\"refobjid\" = 1234) AND (\"refobjsubid\" = 1) AND (\"classid\" = CAST('pg_class' AS regclass)) AND (\"objsubid\" = 0) AND (\"deptype\" IN ('a', 'i')))", "BEGIN", "ALTER TABLE \"table\" ALTER COLUMN \"id\" DROP DEFAULT", "UPDATE \"pg_depend\" SET \"deptype\" = 'i' WHERE ((\"classid\" = CAST('pg_class' AS regclass)) AND (\"objid\" = 2345) AND (\"objsubid\" = 0) AND (\"deptype\" = 'a'))", "UPDATE \"pg_attribute\" SET \"attidentity\" = 'd' WHERE ((\"attrelid\" = 1234) AND (\"attname\" = 'id'))", "COMMIT" ] end it "should handle case where column is already identity when converting serial columns to identity" do @db.fetch= [[{:v=>'on'}], [{:attnum=>1}], [{:objid=>2345, :v=>true}]] def @db.regclass_oid(x) 1234 end @db.convert_serial_to_identity(:table, :column=>'id').must_be_nil @db.sqls.must_equal [ "SELECT current_setting('is_superuser') LIMIT 1", "SELECT \"attnum\" FROM \"pg_attribute\" WHERE ((\"attrelid\" = 1234) AND (\"attname\" = 'id')) LIMIT 1", "SELECT \"objid\", (\"deptype\" = 'i') AS \"v\" FROM \"pg_depend\" WHERE ((\"refclassid\" = CAST('pg_class' AS regclass)) AND (\"refobjid\" = 1234) AND (\"refobjsubid\" = 1) AND (\"classid\" = CAST('pg_class' AS regclass)) AND (\"objsubid\" = 0) AND (\"deptype\" IN ('a', 'i')))", ] end it "should handle case where multiple serial columns exist when converting serial columns to identity" do @db.fetch= [[{:v=>'on'}], [{:attnum=>1}], [{:objid=>2345, :v=>false}, {:objid=>3456, :v=>false}]] def @db.regclass_oid(x) 1234 end proc{@db.convert_serial_to_identity(:table, :column=>'id')}.must_raise Sequel::Error @db.sqls.must_equal [ "SELECT current_setting('is_superuser') LIMIT 1", "SELECT \"attnum\" FROM \"pg_attribute\" WHERE ((\"attrelid\" = 1234) AND (\"attname\" = 'id')) LIMIT 1", "SELECT \"objid\", (\"deptype\" = 'i') AS \"v\" FROM \"pg_depend\" WHERE ((\"refclassid\" = CAST('pg_class' AS regclass)) AND (\"refobjid\" = 1234) AND (\"refobjsubid\" = 1) AND (\"classid\" = CAST('pg_class' AS regclass)) AND (\"objsubid\" = 0) AND (\"deptype\" IN ('a', 'i')))", ] end it "should handle case where no serial columns exist when converting serial columns to identity" do @db.fetch= [[{:v=>'on'}], [{:attnum=>1}], []] def @db.regclass_oid(x) 1234 end proc{@db.convert_serial_to_identity(:table, :column=>'id')}.must_raise Sequel::Error @db.sqls.must_equal [ "SELECT current_setting('is_superuser') LIMIT 1", "SELECT \"attnum\" FROM \"pg_attribute\" WHERE ((\"attrelid\" = 1234) AND (\"attname\" = 'id')) LIMIT 1", "SELECT \"objid\", (\"deptype\" = 'i') AS \"v\" FROM \"pg_depend\" WHERE ((\"refclassid\" = CAST('pg_class' AS regclass)) AND (\"refobjid\" = 1234) AND (\"refobjsubid\" = 1) AND (\"classid\" = CAST('pg_class' AS regclass)) AND (\"objsubid\" = 0) AND (\"deptype\" IN ('a', 'i')))", ] end it "should not support converting serial columns to identity on PostgreSQL <10.2" do def @db.server_version; 100000; end proc{@db.convert_serial_to_identity(:table)}.must_raise Sequel::Error end it "should not support converting serial columns to identity if not a superuser" do @db.fetch = {:v=>'off'} proc{@db.convert_serial_to_identity(:table)}.must_raise Sequel::Error end it "should not support converting serial columns to identity if column cannot be determined" do @db.fetch = {:v=>'on'} def @db.schema(x) [[:id, {:primary_key=>false, :auto_increment=>false}]] end proc{@db.convert_serial_to_identity(:table)}.must_raise Sequel::Error end it "should have connection_configuration_sqls return SQLs to use for new connections" do @db.send(:connection_configuration_sqls).must_equal [ "SET standard_conforming_strings = ON", "SET client_min_messages = 'WARNING'" ] h = {:force_standard_strings=>false, :client_min_messages=>:notice} @db.send(:connection_configuration_sqls, h).must_equal ["SET client_min_messages = 'NOTICE'"] h[:client_min_messages] = :notice2 proc{@db.send(:connection_configuration_sqls, h)}.must_raise Sequel::Error h[:client_min_messages] = nil @db.send(:connection_configuration_sqls, h).must_equal [] h[:search_path] = Object.new proc{@db.send(:connection_configuration_sqls, h)}.must_raise Sequel::Error h[:search_path] = 'public,foo' @db.send(:connection_configuration_sqls, h).must_equal ["SET search_path = \"public\",\"foo\""] h[:search_path] = %w'public foo2' @db.send(:connection_configuration_sqls, h).must_equal ["SET search_path = \"public\",\"foo2\""] end it "should recognize 40P01 SQL state as a serialization failure" do @db.send(:database_specific_error_class_from_sqlstate, '40P01').must_equal Sequel::SerializationFailure end end describe "MySQL support" do before do @db = Sequel.mock(:host=>'mysql') end it "should support spatial indexes" do @db.alter_table(:posts){add_spatial_index [:geom]} @db.sqls.must_equal ['CREATE SPATIAL INDEX `posts_geom_index` ON `posts` (`geom`)'] end it "should support fulltext indexes and full_text_search" do @db.alter_table(:posts){add_full_text_index :title; add_full_text_index [:title, :body]} @db.sqls.must_equal [ "CREATE FULLTEXT INDEX `posts_title_index` ON `posts` (`title`)", "CREATE FULLTEXT INDEX `posts_title_body_index` ON `posts` (`title`, `body`)" ] end it "should support indexes with index type" do @db.alter_table(:posts){add_index :id, :type => :btree} @db.sqls.must_equal ["CREATE INDEX `posts_id_index` USING btree ON `posts` (`id`)"] end end describe "SQLite support" do before do @db = Sequel.mock(:host=>'sqlite') end it "should use a string literal for Sequel[:col].as(:alias)" do @db.literal(Sequel[:c].as(:a)).must_equal "`c` AS 'a'" end it "should use a string literal for Sequel[:table][:col].as(:alias)" do @db.literal(Sequel[:t][:c].as(:a)).must_equal "`t`.`c` AS 'a'" end it "should use a string literal for :column.as(:alias)" do @db.literal(Sequel.as(:c, :a)).must_equal "`c` AS 'a'" end it "should use a string literal in the SELECT clause" do @db[:t].select(Sequel[:c].as(:a)).sql.must_equal "SELECT `c` AS 'a' FROM `t`" end it "should use a string literal in the FROM clause" do @db[Sequel[:t].as(:a)].sql.must_equal "SELECT * FROM `t` AS 'a'" end it "should use a string literal in the JOIN clause" do @db[:t].join_table(:natural, :j, nil, :table_alias=>:a).sql.must_equal "SELECT * FROM `t` NATURAL JOIN `j` AS 'a'" end it "should have support for various #transaction modes" do @db.transaction{} @db.transaction(:mode => :immediate){} @db.transaction(:mode => :exclusive){} @db.transaction(:mode => :deferred){} @db.sqls.must_equal ["BEGIN", "COMMIT", "BEGIN IMMEDIATE TRANSACTION", "COMMIT", "BEGIN EXCLUSIVE TRANSACTION", "COMMIT", "BEGIN DEFERRED TRANSACTION", "COMMIT"] @db.transaction_mode.must_be_nil @db.transaction_mode = :immediate @db.transaction_mode.must_equal :immediate @db.transaction{} @db.transaction(:mode => :exclusive){} @db.sqls.must_equal ["BEGIN IMMEDIATE TRANSACTION", "COMMIT", "BEGIN EXCLUSIVE TRANSACTION", "COMMIT"] end it "should choose a temporary table name that isn't already used when dropping or renaming columns" do exists = [true, true, false] @db.define_singleton_method(:table_exists?){|x| exists.shift} @db.drop_column(:test3, :i) @db.sqls.grep(/ALTER/).must_equal ["ALTER TABLE `test3` RENAME TO `test3_backup2`"] exists = [true, true, true, false] @db.rename_column(:test3, :h, :i) @db.sqls.grep(/ALTER/).must_equal ["ALTER TABLE `test3` RENAME TO `test3_backup3`"] end end describe "Oracle support" do before do @db = Sequel.mock(:host=>'oracle') end it "should not support savepoints" do proc{@db.transaction{@db.transaction(:savepoint=>true){}}}.must_raise Sequel::InvalidOperation end end ��������������������������������������������������������sequel-5.63.0/spec/core/object_graph_spec.rb��������������������������������������������������������0000664�0000000�0000000�00000066220�14342141206�0020756�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Dataset do before do @db = Sequel.mock(:columns=>proc do |sql| case sql when /points/ [:id, :x, :y] when /lines|foo/ [:id, :x, :y, :graph_id] else [:id, :name, :x, :y, :lines_x] end end) @ds1 = @db.from(:points) @ds2 = @db.from(:lines) @ds3 = @db.from(:graphs) [@ds1, @ds2, @ds3].each{|ds| ds.columns} @db.sqls end it "#graph should not modify the current dataset's opts" do o1 = @ds1.opts o2 = o1.dup ds1 = @ds1.graph(@ds2, :x=>:id) @ds1.opts.must_equal o1 @ds1.opts.must_equal o2 ds1.opts.wont_equal o1 end it "#graph should not modify the current dataset's opts if current dataset is already graphed" do ds2 = @ds1.graph(@ds2) @ds1.graph(@ds2) ds2.graph(@ds3) ds2.graph(@ds3) end it "#graph should accept a simple dataset and pass the table to join" do ds = @ds1.graph(@ds2, :x=>:id) ds.sql.must_equal 'SELECT points.id, points.x, points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM points LEFT OUTER JOIN lines ON (lines.x = points.id)' end it "#graph should use currently selected columns as the basis for the selected columns in a new graph" do ds = @ds1.select(:id).graph(@ds2, :x=>:id) ds.sql.must_equal 'SELECT points.id, lines.id AS lines_id, lines.x, lines.y, lines.graph_id FROM points LEFT OUTER JOIN lines ON (lines.x = points.id)' ds = @ds1.select(:id, :x).graph(@ds2, :x=>:id) ds.sql.must_equal 'SELECT points.id, points.x, lines.id AS lines_id, lines.x AS lines_x, lines.y, lines.graph_id FROM points LEFT OUTER JOIN lines ON (lines.x = points.id)' ds = @ds1.select(Sequel.identifier(:id), Sequel.qualify(:points, :x)).graph(@ds2, :x=>:id) ds.sql.must_equal 'SELECT points.id, points.x, lines.id AS lines_id, lines.x AS lines_x, lines.y, lines.graph_id FROM points LEFT OUTER JOIN lines ON (lines.x = points.id)' ds = @ds1.select(Sequel.identifier(:id).qualify(:points), Sequel.identifier(:x).as(:y)).graph(@ds2, :x=>:id) ds.sql.must_equal 'SELECT points.id, points.x AS y, lines.id AS lines_id, lines.x, lines.y AS lines_y, lines.graph_id FROM points LEFT OUTER JOIN lines ON (lines.x = points.id)' ds = @ds1.select(:id, Sequel.identifier(:x).qualify(Sequel.identifier(:points)).as(Sequel.identifier(:y))).graph(@ds2, :x=>:id) ds.sql.must_equal 'SELECT points.id, points.x AS y, lines.id AS lines_id, lines.x, lines.y AS lines_y, lines.graph_id FROM points LEFT OUTER JOIN lines ON (lines.x = points.id)' end it "#graph should requalify currently selected columns in new graph if current dataset joins tables" do ds = @ds1.cross_join(:lines).select(Sequel[:points][:id], Sequel[:lines][:id].as(:lid), Sequel[:lines][:x], Sequel[:lines][:y]).graph(@ds3, :x=>:id) ds.sql.must_equal 'SELECT points.id, points.lid, points.x, points.y, graphs.id AS graphs_id, graphs.name, graphs.x AS graphs_x, graphs.y AS graphs_y, graphs.lines_x FROM (SELECT points.id, lines.id AS lid, lines.x, lines.y FROM points CROSS JOIN lines) AS points LEFT OUTER JOIN graphs ON (graphs.x = points.id)' end with_symbol_splitting "#graph should requalify currently selected columns in new graph if current dataset joins tables with splittable symbols" do ds = @ds1.cross_join(:lines).select(:points__id, :lines__id___lid, :lines__x, :lines__y).graph(@ds3, :x=>:id) ds.sql.must_equal 'SELECT points.id, points.lid, points.x, points.y, graphs.id AS graphs_id, graphs.name, graphs.x AS graphs_x, graphs.y AS graphs_y, graphs.lines_x FROM (SELECT points.id, lines.id AS lid, lines.x, lines.y FROM points CROSS JOIN lines) AS points LEFT OUTER JOIN graphs ON (graphs.x = points.id)' end it "#graph should handle selection expression without introspectable alias using a subselect" do ds = @ds1.select(Sequel.lit('1 AS v')) ds.columns :v ds.graph(@ds2, :x=>:v).sql.must_equal "SELECT points.v, lines.id, lines.x, lines.y, lines.graph_id FROM (SELECT 1 AS v FROM points) AS points LEFT OUTER JOIN lines ON (lines.x = points.v)" end it "#graph should accept a complex dataset and pass it directly to join" do ds = @ds1.graph(@ds2.select_all(:lines), {:x=>:id}) ds.sql.must_equal 'SELECT points.id, points.x, points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM points LEFT OUTER JOIN lines ON (lines.x = points.id)' end it "#graph should accept a complex dataset and pass it directly to join" do ds = @ds1.graph(@ds2.filter(:x=>1), {:x=>:id}) ds.sql.must_equal 'SELECT points.id, points.x, points.y, t1.id AS t1_id, t1.x AS t1_x, t1.y AS t1_y, t1.graph_id FROM points LEFT OUTER JOIN (SELECT * FROM lines WHERE (x = 1)) AS t1 ON (t1.x = points.id)' ds = @ds1.graph(@ds2.select_all(:lines).filter(:x=>1), {:x=>:id}) ds.sql.must_equal 'SELECT points.id, points.x, points.y, t1.id AS t1_id, t1.x AS t1_x, t1.y AS t1_y, t1.graph_id FROM points LEFT OUTER JOIN (SELECT lines.* FROM lines WHERE (x = 1)) AS t1 ON (t1.x = points.id)' end it "#graph should work on from_self datasets" do ds = @ds1.from_self.graph(@ds2, :x=>:id) ds.sql.must_equal 'SELECT t1.id, t1.x, t1.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM (SELECT * FROM points) AS t1 LEFT OUTER JOIN lines ON (lines.x = t1.id)' ds = @ds1.graph(@ds2.from_self, :x=>:id) ds.sql.must_equal 'SELECT points.id, points.x, points.y, t1.id AS t1_id, t1.x AS t1_x, t1.y AS t1_y, t1.graph_id FROM points LEFT OUTER JOIN (SELECT * FROM (SELECT * FROM lines) AS t1) AS t1 ON (t1.x = points.id)' ds = @ds1.from_self.from_self.graph(@ds2.from_self.from_self, :x=>:id) ds.sql.must_equal 'SELECT t1.id, t1.x, t1.y, t2.id AS t2_id, t2.x AS t2_x, t2.y AS t2_y, t2.graph_id FROM (SELECT * FROM (SELECT * FROM points) AS t1) AS t1 LEFT OUTER JOIN (SELECT * FROM (SELECT * FROM (SELECT * FROM lines) AS t1) AS t1) AS t2 ON (t2.x = t1.id)' ds = @ds1.from(@ds1, @ds3).graph(@ds2.from_self, :x=>:id) ds.sql.must_equal 'SELECT t1.id, t1.x, t1.y, t3.id AS t3_id, t3.x AS t3_x, t3.y AS t3_y, t3.graph_id FROM (SELECT * FROM (SELECT * FROM points) AS t1, (SELECT * FROM graphs) AS t2) AS t1 LEFT OUTER JOIN (SELECT * FROM (SELECT * FROM lines) AS t1) AS t3 ON (t3.x = t1.id)' end it "#graph should accept a symbol table name as the dataset" do ds = @ds1.graph(:lines, :x=>:id) ds.sql.must_equal 'SELECT points.id, points.x, points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM points LEFT OUTER JOIN lines ON (lines.x = points.id)' end with_symbol_splitting "#graph should accept a schema qualified symbolic table name as the dataset" do ds = @ds1.graph(:schema__lines, :x=>:id) ds.sql.must_equal 'SELECT points.id, points.x, points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM points LEFT OUTER JOIN schema.lines AS lines ON (lines.x = points.id)' end it "#graph should accept a qualified identifier table name as the dataset" do ds = @ds1.graph(Sequel[:schema][:lines], :x=>:id) ds.sql.must_equal 'SELECT points.id, points.x, points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM points LEFT OUTER JOIN schema.lines AS lines ON (lines.x = points.id)' end with_symbol_splitting "#graph allows giving table alias in symbolic argument" do ds = @ds1.graph(:lines___sketch, :x=>:id) ds.sql.must_equal 'SELECT points.id, points.x, points.y, sketch.id AS sketch_id, sketch.x AS sketch_x, sketch.y AS sketch_y, sketch.graph_id FROM points LEFT OUTER JOIN lines AS sketch ON (sketch.x = points.id)' ds = @ds1.graph(:schema__lines___sketch, :x=>:id) ds.sql.must_equal 'SELECT points.id, points.x, points.y, sketch.id AS sketch_id, sketch.x AS sketch_x, sketch.y AS sketch_y, sketch.graph_id FROM points LEFT OUTER JOIN schema.lines AS sketch ON (sketch.x = points.id)' end it "#graph should accept a SQL::Identifier as the dataset" do ds = @ds1.graph(Sequel.identifier(:lines), :x=>:id) ds.sql.must_equal 'SELECT points.id, points.x, points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM points LEFT OUTER JOIN lines ON (lines.x = points.id)' ds = @ds1.graph(Sequel.identifier('lines'), :x=>:id) ds.sql.must_equal 'SELECT points.id, points.x, points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM points LEFT OUTER JOIN lines AS lines ON (lines.x = points.id)' end it "#graph should handle a SQL::Identifier with double underscores correctly" do ds = @ds1.graph(Sequel.identifier(:lin__es), :x=>:id) ds.sql.must_equal 'SELECT points.id, points.x, points.y, lin__es.id AS lin__es_id, lin__es.name, lin__es.x AS lin__es_x, lin__es.y AS lin__es_y, lin__es.lines_x FROM points LEFT OUTER JOIN lin__es ON (lin__es.x = points.id)' ds = @ds1.from(Sequel.identifier(:poi__nts)).graph(Sequel.identifier(:lin__es), :x=>:id) ds.sql.must_equal 'SELECT poi__nts.id, poi__nts.name, poi__nts.x, poi__nts.y, poi__nts.lines_x, lin__es.id AS lin__es_id, lin__es.name AS lin__es_name, lin__es.x AS lin__es_x, lin__es.y AS lin__es_y, lin__es.lines_x AS lin__es_lines_x FROM poi__nts LEFT OUTER JOIN lin__es ON (lin__es.x = poi__nts.id)' ds = @ds1.from(Sequel.identifier(:poi__nts).qualify(:foo)).graph(Sequel.identifier(:lin__es).qualify(:bar), :x=>:id) ds.sql.must_equal 'SELECT foo.poi__nts.id, foo.poi__nts.x, foo.poi__nts.y, foo.poi__nts.graph_id, lin__es.id AS lin__es_id, lin__es.name, lin__es.x AS lin__es_x, lin__es.y AS lin__es_y, lin__es.lines_x FROM foo.poi__nts LEFT OUTER JOIN bar.lin__es AS lin__es ON (lin__es.x = foo.poi__nts.id)' end it "#graph should accept a SQL::QualifiedIdentifier as the dataset" do ds = @ds1.graph(Sequel.qualify(:schema, :lines), :x=>:id) ds.sql.must_equal 'SELECT points.id, points.x, points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM points LEFT OUTER JOIN schema.lines AS lines ON (lines.x = points.id)' ds = @ds1.graph(Sequel.qualify('schema', 'lines'), :x=>:id) ds.sql.must_equal 'SELECT points.id, points.x, points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM points LEFT OUTER JOIN schema.lines AS lines ON (lines.x = points.id)' ds = @ds1.graph(Sequel.qualify(Sequel.identifier(:schema), Sequel.identifier(:lines)), :x=>:id) ds.sql.must_equal 'SELECT points.id, points.x, points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM points LEFT OUTER JOIN schema.lines AS lines ON (lines.x = points.id)' ds = @ds1.graph(Sequel.qualify(Sequel.identifier('schema'), Sequel.identifier('lines')), :x=>:id) ds.sql.must_equal 'SELECT points.id, points.x, points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM points LEFT OUTER JOIN schema.lines AS lines ON (lines.x = points.id)' end with_symbol_splitting "#graph should handle a qualified identifier symbol as the source" do ds = @ds1.from(:schema__points).graph(:lines, :x=>:id) ds.sql.must_equal 'SELECT schema.points.id, schema.points.x, schema.points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM schema.points LEFT OUTER JOIN lines ON (lines.x = schema.points.id)' end it "#graph should handle a qualified identifier as the source" do ds = @ds1.from(Sequel.qualify(:schema, :points)).graph(:lines, :x=>:id) ds.sql.must_equal 'SELECT schema.points.id, schema.points.x, schema.points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM schema.points LEFT OUTER JOIN lines ON (lines.x = schema.points.id)' end with_symbol_splitting "#graph should accept a SQL::AliasedExpression with splittable symbol as the dataset" do ds = @ds1.graph(Sequel.as(:schema__lines, :foo), :x=>:id) ds.sql.must_equal 'SELECT points.id, points.x, points.y, foo.id AS foo_id, foo.x AS foo_x, foo.y AS foo_y, foo.graph_id FROM points LEFT OUTER JOIN schema.lines AS foo ON (foo.x = points.id)' end it "#graph should accept a SQL::AliasedExpression as the dataset" do ds = @ds1.graph(Sequel.as(:lines, :foo), :x=>:id) ds.sql.must_equal 'SELECT points.id, points.x, points.y, foo.id AS foo_id, foo.x AS foo_x, foo.y AS foo_y, foo.graph_id FROM points LEFT OUTER JOIN lines AS foo ON (foo.x = points.id)' ds = @ds1.graph(Sequel.as(Sequel.identifier(:lines), :foo), :x=>:id) ds.sql.must_equal 'SELECT points.id, points.x, points.y, foo.id AS foo_id, foo.x AS foo_x, foo.y AS foo_y, foo.graph_id FROM points LEFT OUTER JOIN lines AS foo ON (foo.x = points.id)' ds = @ds1.graph(Sequel.as(Sequel.qualify(:schema, :lines), :foo), :x=>:id) ds.sql.must_equal 'SELECT points.id, points.x, points.y, foo.id AS foo_id, foo.x AS foo_x, foo.y AS foo_y, foo.graph_id FROM points LEFT OUTER JOIN schema.lines AS foo ON (foo.x = points.id)' end it "#graph should raise an error if a symbol, dataset, or model is not used" do proc{@ds1.graph(Object.new, :x=>:id)}.must_raise(Sequel::Error) end it "#graph should accept a :table_alias option" do ds = @ds1.graph(:lines, {:x=>:id}, :table_alias=>:planes) ds.sql.must_equal 'SELECT points.id, points.x, points.y, planes.id AS planes_id, planes.x AS planes_x, planes.y AS planes_y, planes.graph_id FROM points LEFT OUTER JOIN lines AS planes ON (planes.x = points.id)' end it "#graph should accept a :implicit_qualifier option" do ds = @ds1.graph(:lines, {:x=>:id}, :implicit_qualifier=>:planes) ds.sql.must_equal 'SELECT points.id, points.x, points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM points LEFT OUTER JOIN lines ON (lines.x = planes.id)' end it "#graph should accept a :join_type option" do ds = @ds1.graph(:lines, {:x=>:id}, :join_type=>:inner) ds.sql.must_equal 'SELECT points.id, points.x, points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM points INNER JOIN lines ON (lines.x = points.id)' end it "#graph should accept a :join_only option" do ds = @ds1.graph(:lines, {:x=>:id}, :join_only=>true) ds.sql.must_equal 'SELECT * FROM points LEFT OUTER JOIN lines ON (lines.x = points.id)' end it "#graph should not select any columns from the graphed table if :select option is false" do ds = @ds1.graph(:lines, {:x=>:id}, :select=>false).graph(:graphs, :id=>:graph_id) ds.sql.must_equal 'SELECT points.id, points.x, points.y, graphs.id AS graphs_id, graphs.name, graphs.x AS graphs_x, graphs.y AS graphs_y, graphs.lines_x FROM points LEFT OUTER JOIN lines ON (lines.x = points.id) LEFT OUTER JOIN graphs ON (graphs.id = lines.graph_id)' end it "#graph should use the given columns if :select option is used" do ds = @ds1.graph(:lines, {:x=>:id}, :select=>[:x, :graph_id]).graph(:graphs, :id=>:graph_id) ds.sql.must_equal 'SELECT points.id, points.x, points.y, lines.x AS lines_x, lines.graph_id, graphs.id AS graphs_id, graphs.name, graphs.x AS graphs_x, graphs.y AS graphs_y, graphs.lines_x AS graphs_lines_x FROM points LEFT OUTER JOIN lines ON (lines.x = points.id) LEFT OUTER JOIN graphs ON (graphs.id = lines.graph_id)' end it "#graph should pass all join_conditions to join_table" do ds = @ds1.graph(@ds2, [[:x, :id], [:y, :id]]) ds.sql.must_equal 'SELECT points.id, points.x, points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM points LEFT OUTER JOIN lines ON ((lines.x = points.id) AND (lines.y = points.id))' end it "#graph should accept a block instead of conditions and pass it to join_table" do ds = @ds1.graph(@ds2){|ja, lja, js| [[Sequel.qualify(ja, :x), Sequel.qualify(lja, :id)], [Sequel.qualify(ja, :y), Sequel.qualify(lja, :id)]]} ds.sql.must_equal 'SELECT points.id, points.x, points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM points LEFT OUTER JOIN lines ON ((lines.x = points.id) AND (lines.y = points.id))' end it "#graph should raise Error if set_graph_aliases is called on an ungraphed dataset" do proc{@ds1.set_graph_aliases([[:x,[:points, :x]], [:y,[:lines, :y]]])}.must_raise Sequel::Error end it "#graph should allow graphing of multiple datasets" do ds = @ds1.graph(@ds2, :x=>:id).graph(@ds3, :id=>:graph_id) ds.sql.must_equal 'SELECT points.id, points.x, points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id, graphs.id AS graphs_id, graphs.name, graphs.x AS graphs_x, graphs.y AS graphs_y, graphs.lines_x AS graphs_lines_x FROM points LEFT OUTER JOIN lines ON (lines.x = points.id) LEFT OUTER JOIN graphs ON (graphs.id = lines.graph_id)' end it "#graph should allow graphing of the same dataset multiple times" do ds = @ds1.graph(@ds2, :x=>:id).graph(@ds2, {:y=>Sequel[:points][:id]}, :table_alias=>:graph) ds.sql.must_equal 'SELECT points.id, points.x, points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id, graph.id AS graph_id_0, graph.x AS graph_x, graph.y AS graph_y, graph.graph_id AS graph_graph_id FROM points LEFT OUTER JOIN lines ON (lines.x = points.id) LEFT OUTER JOIN lines AS graph ON (graph.y = points.id)' end with_symbol_splitting "#graph should allow graphing of the same dataset multiple times when using splittable symbols" do ds = @ds1.graph(@ds2, :x=>:id).graph(@ds2, {:y=>:points__id}, :table_alias=>:graph) ds.sql.must_equal 'SELECT points.id, points.x, points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id, graph.id AS graph_id_0, graph.x AS graph_x, graph.y AS graph_y, graph.graph_id AS graph_graph_id FROM points LEFT OUTER JOIN lines ON (lines.x = points.id) LEFT OUTER JOIN lines AS graph ON (graph.y = points.id)' end it "#graph should raise an error if the table/table alias has already been used" do proc{@ds1.graph(@ds1, :x=>:id)}.must_raise(Sequel::Error) @ds1.graph(@ds2, :x=>:id) proc{@ds1.graph(@ds2, :x=>:id).graph(@ds2, :x=>:id)}.must_raise(Sequel::Error) @ds1.graph(@ds2, :x=>:id).graph(@ds2, {:x=>:id}, :table_alias=>:blah) end it "#graph should raise an error if the table/table alias has already been used and an explicit alias is given" do proc{@ds1.graph(@ds2, {:x=>:id}, :table_alias=>:points)}.must_raise(Sequel::Error) proc{@ds1.graph(@ds2, :x=>:id).graph(@ds3, {:x=>:id}, :table_alias=>:lines)}.must_raise(Sequel::Error) end it "#graph should handle ColumnAll values in selections" do @ds1.select_all(:points).graph(:lines, :x=>:id).sql.must_equal "SELECT points.id, points.x, points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM points LEFT OUTER JOIN lines ON (lines.x = points.id)" @ds1.from{points}.select_all(:points).graph(:lines, :x=>:id).sql.must_equal "SELECT points.id, points.x, points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM points LEFT OUTER JOIN lines ON (lines.x = points.id)" @ds1.select_all(:points).graph(:lines, :x=>:id).sql.must_equal "SELECT points.id, points.x, points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM points LEFT OUTER JOIN lines ON (lines.x = points.id)" @ds1.from_self(:alias=>:p).select_all(:p).graph(:lines, :x=>:id).sql.must_equal "SELECT p.id, p.x, p.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM (SELECT * FROM points) AS p LEFT OUTER JOIN lines ON (lines.x = p.id)" @ds1.from{points.as(p)}.select_all(:p).graph(:lines, :x=>:id).sql.must_equal "SELECT p.id, p.x, p.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM points AS p LEFT OUTER JOIN lines ON (lines.x = p.id)" @ds1.from(Sequel[:s][:points]).select_all(Sequel[:s][:points]).graph(:lines, :x=>:id).sql.must_equal "SELECT s.points.id, s.points.x, s.points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM s.points LEFT OUTER JOIN lines ON (lines.x = s.points.id)" @ds1.from(Sequel[:s][:points].as(:p)).select_all(:p).graph(:lines, :x=>:id).sql.must_equal "SELECT p.id, p.x, p.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM s.points AS p LEFT OUTER JOIN lines ON (lines.x = p.id)" @ds1.select_all('points').graph(:lines, :x=>:id).sql.must_equal "SELECT points.id, points.x, points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM points LEFT OUTER JOIN lines ON (lines.x = points.id)" @ds1.from_self(:alias=>'p').select_all(:p).graph(:lines, :x=>:id).sql.must_equal "SELECT p.id, p.x, p.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM (SELECT * FROM points) AS p LEFT OUTER JOIN lines ON (lines.x = p.id)" @ds1.select_all(Sequel.identifier('points')).graph(:lines, :x=>:id).sql.must_equal "SELECT points.id, points.x, points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM points LEFT OUTER JOIN lines ON (lines.x = points.id)" @ds1.from_self(:alias=>Sequel.identifier(:p)).select_all(:p).graph(:lines, :x=>:id).sql.must_equal "SELECT p.id, p.x, p.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM (SELECT * FROM points) AS p LEFT OUTER JOIN lines ON (lines.x = p.id)" ds = @ds1.select_all(:points).select_append{(points[:id]+lines[:id]).as(:id2)}.join(:lines, :x=>:id) ds.columns :id, :x, :y, :id2 ds.graph(:graphs, :id=>:graph_id).sql.must_equal "SELECT points.id, points.x, points.y, points.id2, graphs.id AS graphs_id, graphs.name, graphs.x AS graphs_x, graphs.y AS graphs_y, graphs.lines_x FROM (SELECT points.*, (points.id + lines.id) AS id2 FROM points INNER JOIN lines ON (lines.x = points.id)) AS points LEFT OUTER JOIN graphs ON (graphs.id = points.graph_id)" ds = @ds1.select_all(:lines).select_append{(points[:id]+lines[:id]).as(:id2)}.join(:lines, :x=>:id) ds.columns :id, :x, :y, :graph_id, :id2 ds.graph(:graphs, :id=>:graph_id).sql.must_equal "SELECT points.id, points.x, points.y, points.graph_id, points.id2, graphs.id AS graphs_id, graphs.name, graphs.x AS graphs_x, graphs.y AS graphs_y, graphs.lines_x FROM (SELECT lines.*, (points.id + lines.id) AS id2 FROM points INNER JOIN lines ON (lines.x = points.id)) AS points LEFT OUTER JOIN graphs ON (graphs.id = points.graph_id)" ds = @ds1.select_all(:l).select_append{(points[:id]+lines[:id]).as(:id2)}.join(Sequel[:lines].as(:l), :x=>:id) ds.columns :id, :x, :y, :graph_id, :id2 ds.graph(:graphs, :id=>:graph_id).sql.must_equal "SELECT points.id, points.x, points.y, points.graph_id, points.id2, graphs.id AS graphs_id, graphs.name, graphs.x AS graphs_x, graphs.y AS graphs_y, graphs.lines_x FROM (SELECT l.*, (points.id + lines.id) AS id2 FROM points INNER JOIN lines AS l ON (l.x = points.id)) AS points LEFT OUTER JOIN graphs ON (graphs.id = points.graph_id)" ds = @ds1.select_all(:l).select_append{(points[:id]+lines[:id]).as(:id2)}.join(Sequel.as(:lines, :l), :x=>:id) ds.columns :id, :x, :y, :graph_id, :id2 ds.graph(:graphs, :id=>:graph_id).sql.must_equal "SELECT points.id, points.x, points.y, points.graph_id, points.id2, graphs.id AS graphs_id, graphs.name, graphs.x AS graphs_x, graphs.y AS graphs_y, graphs.lines_x FROM (SELECT l.*, (points.id + lines.id) AS id2 FROM points INNER JOIN lines AS l ON (l.x = points.id)) AS points LEFT OUTER JOIN graphs ON (graphs.id = points.graph_id)" ds = @ds1.select_all(:l).select_append{(points[:id]+lines[:id]).as(:id2)}.join(@ds1.db[:graphs].as(:l), :id=>:y) ds.columns :id, :name, :x, :y, :lines_x, :id2 ds.graph(:lines, :x=>:id).sql.must_equal "SELECT points.id, points.name, points.x, points.y, points.lines_x, points.id2, lines.id AS lines_id, lines.x AS lines_x_0, lines.y AS lines_y, lines.graph_id FROM (SELECT l.*, (points.id + lines.id) AS id2 FROM points INNER JOIN (SELECT * FROM graphs) AS l ON (l.id = points.y)) AS points LEFT OUTER JOIN lines ON (lines.x = points.id)" end it "#set_graph_aliases should not modify the current dataset's opts" do o1 = @ds1.opts o2 = o1.dup ds1 = @ds1.graph(:lines, :x=>:id).set_graph_aliases(:x=>[:graphs,:id]) @ds1.opts.must_equal o1 @ds1.opts.must_equal o2 ds1.opts.wont_equal o1 end it "#set_graph_aliases should specify the graph mapping" do ds = @ds1.graph(:lines, :x=>:id) ds.sql.must_equal 'SELECT points.id, points.x, points.y, lines.id AS lines_id, lines.x AS lines_x, lines.y AS lines_y, lines.graph_id FROM points LEFT OUTER JOIN lines ON (lines.x = points.id)' ds.set_graph_aliases(:x=>[:points, :x], :y=>[:lines, :y]).sql.must_equal 'SELECT points.x, lines.y FROM points LEFT OUTER JOIN lines ON (lines.x = points.id)' end it "#set_graph_aliases should allow a third entry to specify an expression to use other than the default" do @ds1.graph(:lines, :x=>:id).set_graph_aliases(:x=>[:points, :x, 1], :y=>[:lines, :y, Sequel.function(:random)]).sql.must_equal 'SELECT 1 AS x, random() AS y FROM points LEFT OUTER JOIN lines ON (lines.x = points.id)' end it "#set_graph_aliases should allow a single array entry to specify a table, assuming the same column as the key" do @ds1.graph(:lines, :x=>:id).set_graph_aliases(:x=>[:points], :y=>[:lines]).sql.must_equal 'SELECT points.x, lines.y FROM points LEFT OUTER JOIN lines ON (lines.x = points.id)' end it "#set_graph_aliases should allow hash values to be symbols specifying table, assuming the same column as the key" do @ds1.graph(:lines, :x=>:id).set_graph_aliases(:x=>:points, :y=>:lines).sql.must_equal 'SELECT points.x, lines.y FROM points LEFT OUTER JOIN lines ON (lines.x = points.id)' end it "#set_graph_aliases should only alias columns if necessary" do @ds1.graph(:lines, :x=>:id).set_graph_aliases(:x=>[:points, :x], :y=>[:lines, :y]).sql.must_equal 'SELECT points.x, lines.y FROM points LEFT OUTER JOIN lines ON (lines.x = points.id)' end it "#set_graph_aliases should only alias columns if necessary" do @ds1.graph(:lines, :x=>:id).set_graph_aliases(:x=>[:points, :x], :y=>[:lines, :y]).sql.must_equal 'SELECT points.x, lines.y FROM points LEFT OUTER JOIN lines ON (lines.x = points.id)' @ds1.graph(:lines, :x=>:id).set_graph_aliases(:x1=>[:points, :x], :y=>[:lines, :y]).sql.must_equal 'SELECT points.x AS x1, lines.y FROM points LEFT OUTER JOIN lines ON (lines.x = points.id)' end it "#add_graph_aliases should not modify the current dataset's opts" do ds1 = @ds1.graph(:lines, :x=>:id).set_graph_aliases(:x=>[:graphs,:id]) o1 = ds1.opts o2 = o1.dup ds2 = ds1.add_graph_aliases(:y=>[:blah,:id]) ds1.opts.must_equal o1 ds1.opts.must_equal o2 ds2.opts.wont_equal o1 end it "#add_graph_aliases should add columns to the graph mapping" do @ds1.graph(:lines, :x=>:id).set_graph_aliases(:x=>[:points, :q]).add_graph_aliases(:y=>[:lines, :r]).sql.must_equal 'SELECT points.q AS x, lines.r AS y FROM points LEFT OUTER JOIN lines ON (lines.x = points.id)' end it "#add_graph_aliases should raise an error if called without existing graph aliases" do proc{@ds1.add_graph_aliases(:y=>[:lines, :r])}.must_raise(Sequel::Error) end it "#ungraphed should remove the splitting of result sets into component tables" do @db.fetch = {:id=>1,:x=>2,:y=>3,:lines_id=>4,:lines_x=>5,:lines_y=>6,:graph_id=>7} @ds1.graph(@ds2, :x=>:id).ungraphed.all.must_equal [{:id=>1,:x=>2,:y=>3,:lines_id=>4,:lines_x=>5,:lines_y=>6,:graph_id=>7}] end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/core/placeholder_literalizer_spec.rb���������������������������������������������0000664�0000000�0000000�00000015556�14342141206�0023225�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Dataset::PlaceholderLiteralizer" do before do @c = Sequel::Dataset::PlaceholderLiteralizer @db = Sequel.mock @ds = @db[:items] @h = {:id=>1} @ds.db.fetch = @h end it "should handle calls with no placeholders" do loader = @c.loader(@ds){|pl, ds| ds.where(:a=>1)} loader.first.must_equal @h @db.sqls.must_equal ["SELECT * FROM items WHERE (a = 1)"] end it "should handle calls with a single placeholder" do loader = @c.loader(@ds){|pl, ds| ds.where(:a=>pl.arg)} loader.first(1).must_equal @h loader.first(2).must_equal @h @db.sqls.must_equal ["SELECT * FROM items WHERE (a = 1)", "SELECT * FROM items WHERE (a = 2)"] end it "should handle calls with multiple placeholders" do loader = @c.loader(@ds){|pl, ds| ds.where(:a=>pl.arg).where(:b=>Sequel.+(pl.arg, 1)).where(pl.arg)} loader.first(1, :c, :id=>1).must_equal @h loader.first(2, :d, :id=>2).must_equal @h @db.sqls.must_equal ["SELECT * FROM items WHERE ((a = 1) AND (b = (c + 1)) AND (id = 1))", "SELECT * FROM items WHERE ((a = 2) AND (b = (d + 1)) AND (id = 2))"] end it "should handle calls with placeholders and delayed arguments" do h = :h s = :s d = @ds.having(Sequel.delay{h}).select(Sequel.delay{s}) loader = @c.loader(d){|pl, ds| ds.where(:a=>pl.arg).where(:b=>Sequel.+(pl.arg, 1)).where(pl.arg)} loader.first(1, :c, :id=>1).must_equal @h h = :h2 s = :s2 loader.first(2, :d, :id=>2).must_equal @h @db.sqls.must_equal ["SELECT s FROM items WHERE ((a = 1) AND (b = (c + 1)) AND (id = 1)) HAVING h", "SELECT s2 FROM items WHERE ((a = 2) AND (b = (d + 1)) AND (id = 2)) HAVING h2"] end it "should handle calls with placeholders and delayed arguments that take dataset argument" do d = @ds.select(Sequel.delay{|ds| ds.first_source}) loader = @c.loader(d){|pl, ds| ds.where(:a=>pl.arg).where(:b=>Sequel.+(pl.arg, 1)).where(pl.arg)} loader.first(1, :c, :id=>1).must_equal @h loader.first(2, :d, :id=>2).must_equal @h @db.sqls.must_equal ["SELECT items FROM items WHERE ((a = 1) AND (b = (c + 1)) AND (id = 1))", "SELECT items FROM items WHERE ((a = 2) AND (b = (d + 1)) AND (id = 2))"] end it "should handle calls with a placeholders used as filter arguments" do loader = @c.loader(@ds){|pl, ds| ds.where(pl.arg)} loader.first(:id=>1).must_equal @h loader.first(Sequel.expr{a(b)}).must_equal @h @db.sqls.must_equal ["SELECT * FROM items WHERE (id = 1)", "SELECT * FROM items WHERE a(b)"] end it "should handle calls with a literal strings used as filter arguments" do loader = @c.loader(@ds){|pl, ds| ds.where(pl.arg)} loader.first(Sequel.lit("a = 1")).must_equal @h @db.sqls.must_equal ["SELECT * FROM items WHERE (a = 1)"] end it "should handle calls with a placeholders used as right hand side of condition specifiers" do loader = @c.loader(@ds){|pl, ds| ds.where(:a=>pl.arg)} loader.first(1).must_equal @h loader.first([1, 2]).must_equal @h loader.first(nil).must_equal @h @db.sqls.must_equal ["SELECT * FROM items WHERE (a = 1)", "SELECT * FROM items WHERE (a IN (1, 2))", "SELECT * FROM items WHERE (a IS NULL)"] end it "should handle calls with a placeholder used multiple times" do loader = @c.loader(@ds){|pl, ds| a = pl.arg; ds.where(:a=>a).where(:b=>a)} loader.first(1).must_equal @h loader.first(2).must_equal @h @db.sqls.must_equal ["SELECT * FROM items WHERE ((a = 1) AND (b = 1))", "SELECT * FROM items WHERE ((a = 2) AND (b = 2))"] end it "should handle calls with a placeholder used multiple times in different capacities" do loader = @c.loader(@ds){|pl, ds| a = pl.arg; ds.select(a).where(:b=>a)} loader.first("a").must_equal @h loader.first(["a = ?", 2]).must_equal @h @db.sqls.must_equal ["SELECT 'a' FROM items WHERE (b = 'a')", "SELECT ('a = ?', 2) FROM items WHERE (b IN ('a = ?', 2))"] end it "should handle calls with manually specified argument positions" do loader = @c.loader(@ds){|pl, ds| ds.where(:a=>pl.arg(1)).where(:b=>pl.arg(0))} loader.first(1, 2).must_equal @h loader.first(2, 1).must_equal @h @db.sqls.must_equal ["SELECT * FROM items WHERE ((a = 2) AND (b = 1))", "SELECT * FROM items WHERE ((a = 1) AND (b = 2))"] end it "should handle dataset with row procs" do @ds = @ds.with_row_proc(proc{|r| {:foo=>r[:id]+1}}) loader = @c.loader(@ds){|pl, ds| ds.where(:a=>pl.arg)} loader.first(1).must_equal(:foo=>2) @db.sqls.must_equal ["SELECT * FROM items WHERE (a = 1)"] end it "should return all rows for #all" do loader = @c.loader(@ds){|pl, ds| ds.where(:a=>pl.arg)} loader.all(1).must_equal [@h] @db.sqls.must_equal ["SELECT * FROM items WHERE (a = 1)"] end it "should iterate over block for #all" do a = [] loader = @c.loader(@ds){|pl, ds| ds.where(:a=>pl.arg)} loader.all(1){|r| a << r}.must_equal [@h] a.must_equal [@h] @db.sqls.must_equal ["SELECT * FROM items WHERE (a = 1)"] end it "should iterate over block for #each" do a = [] loader = @c.loader(@ds){|pl, ds| ds.where(:a=>pl.arg)} loader.each(1){|r| a << r} a.must_equal [@h] @db.sqls.must_equal ["SELECT * FROM items WHERE (a = 1)"] end it "should return first value for #get" do loader = @c.loader(@ds){|pl, ds| ds.where(:a=>pl.arg)} loader.get(2).must_equal 1 @db.sqls.must_equal ["SELECT * FROM items WHERE (a = 2)"] end it "should support modifying dataset used on per-call basis with #run" do loader = @c.loader(@ds){|pl, ds| ds.where(:a=>pl.arg)} loader.with_dataset do |ds| ds.with_row_proc(lambda{|row| [row]}) end.all(1).must_equal [[@h]] @db.sqls.must_equal ["SELECT * FROM items WHERE (a = 1)"] end it "should literalize args as NULL if :placeholder_literal_null is set" do loader = @c.loader(@ds){|pl, ds| ds.where(pl.arg=>:a).clone(:placeholder_literal_null=>true)} loader.sql(1).must_equal "SELECT * FROM items WHERE (NULL = a)" end it "should raise an error if called with an incorrect number of arguments" do loader = @c.loader(@ds){|pl, ds| ds.where(:a=>pl.arg)} proc{loader.first}.must_raise(Sequel::Error) proc{loader.first(1, 2)}.must_raise(Sequel::Error) end it "should raise an error if called with an incorrect number of arguments when manually providing argument positions" do loader = @c.loader(@ds){|pl, ds| ds.where(:a=>pl.arg(1))} proc{loader.first}.must_raise(Sequel::Error) proc{loader.first(1)}.must_raise(Sequel::Error) proc{loader.first(1, 2, 3)}.must_raise(Sequel::Error) end it "should raise an error if argument literalized into a different string than returned by query" do o = Object.new def o.wrap(v) @v = v self end def o.sql_literal(ds) ds.literal(@v) end proc{@c.loader(@ds){|pl, ds| ds.where(o.wrap(pl.arg))}}.must_raise(Sequel::Error) end end ��������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/core/schema_generator_spec.rb����������������������������������������������������0000664�0000000�0000000�00000022736�14342141206�0021641�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Schema::CreateTableGenerator do before do @generator = Sequel::Schema::CreateTableGenerator.new(Sequel.mock) do string(:title).must_be_nil column(:body, :text).must_be_nil foreign_key(:parent_id).must_be_nil primary_key(:id).must_be_nil check('price > 100').must_be_nil constraint(:xxx){{:yyy => :zzz}}.must_be_nil index(:title).must_be_nil index([:title, :body], :unique => true).must_be_nil foreign_key(:node_id, :nodes).must_be_nil foreign_key(:deferrable_node_id, :nodes, :deferrable => true).must_be_nil primary_key([:title, :parent_id], :name => :cpk).must_be_nil foreign_key([:node_id, :prop_id], :nodes_props, :name => :cfk).must_be_nil end @columns, @indexes, @constraints = @generator.columns, @generator.indexes, @generator.constraints end it "should respond to everything" do @generator.respond_to?(:foo).must_equal true end it "should respond adding types" do c = Class.new(Sequel::Schema::CreateTableGenerator) c2 = Class.new def c2.to_s; 'Foo' end c.add_type_method(c2) gen = c.new(Sequel.mock) do Foo :bar end gen.columns.first.values_at(:name, :type).must_equal [:bar, c2] end it "should primary key column first" do @columns.first[:name].must_equal :id @columns.first[:primary_key].must_equal true @columns[3][:name].must_equal :parent_id @columns[3][:primary_key].must_be_nil end it "should respect existing column order if primary_key :keep_order is used" do generator = Sequel::Schema::CreateTableGenerator.new(Sequel.mock) do string :title primary_key :id, :keep_order=>true end columns = generator.columns columns.last[:name].must_equal :id columns.last[:primary_key].must_equal true columns.first[:name].must_equal :title columns.first[:primary_key].must_be_nil end it "should handle SQL::Identifier and SQL::QualifiedIdentifier as foreign_key arguments" do generator = Sequel::Schema::CreateTableGenerator.new(Sequel.mock) do foreign_key :a_id, Sequel.identifier(:as) foreign_key :b_id, Sequel.qualify(:c, :b) end columns = generator.columns columns.first.values_at(:name, :table).must_equal [:a_id, Sequel.identifier(:as)] columns.last.values_at(:name, :table).must_equal [:b_id, Sequel.qualify(:c, :b)] end it "counts definitions correctly" do @columns.size.must_equal 6 @indexes.size.must_equal 2 @constraints.size.must_equal 4 end it "retrieves primary key name" do @generator.primary_key_name.must_equal :id end it "keeps columns in order" do @columns[1][:name].must_equal :title @columns[1][:type].must_equal :string @columns[2][:name].must_equal :body @columns[2][:type].must_equal :text end it "creates foreign key column" do @columns[3][:name].must_equal :parent_id @columns[3][:type].must_equal Integer @columns[4][:name].must_equal :node_id @columns[4][:type].must_equal Integer end it "creates deferrable altered foreign key column" do @columns[5][:name].must_equal :deferrable_node_id @columns[5][:type].must_equal Integer @columns[5][:deferrable].must_equal true end it "uses table for foreign key columns, if specified" do @columns[3][:table].must_be_nil @columns[4][:table].must_equal :nodes @constraints[3][:table].must_equal :nodes_props end it "finds columns" do [:title, :body, :parent_id, :id].each do |col| @generator.has_column?(col).must_equal true end @generator.has_column?(:foo).wont_equal true end it "creates constraints" do @constraints[0][:name].must_be_nil @constraints[0][:type].must_equal :check @constraints[0][:check].must_equal ['price > 100'] @constraints[1][:name].must_equal :xxx @constraints[1][:type].must_equal :check @constraints[1][:check].must_be_kind_of Proc @constraints[2][:name].must_equal :cpk @constraints[2][:type].must_equal :primary_key @constraints[2][:columns].must_equal [ :title, :parent_id ] @constraints[3][:name].must_equal :cfk @constraints[3][:type].must_equal :foreign_key @constraints[3][:columns].must_equal [ :node_id, :prop_id ] @constraints[3][:table].must_equal :nodes_props end it "creates indexes" do @indexes[0][:columns].must_equal [:title] @indexes[1][:columns].must_equal [:title, :body] end end describe Sequel::Schema::AlterTableGenerator do before do @generator = Sequel::Schema::AlterTableGenerator.new(Sequel.mock) do add_column(:aaa, :text).must_be_nil drop_column(:bbb).must_be_nil rename_column(:ccc, :ho).must_be_nil set_column_type(:ddd, :float).must_be_nil set_column_default(:eee, 1).must_be_nil add_index([:fff, :ggg]).must_be_nil drop_index(:hhh).must_be_nil drop_index(:hhh, :name=>:blah_blah).must_be_nil add_full_text_index(:blah).must_be_nil add_spatial_index(:geom).must_be_nil add_index(:blah, :type => :hash).must_be_nil add_index(:blah, :where => {:something => true}).must_be_nil add_constraint(:con1, 'fred > 100').must_be_nil drop_constraint(:con2).must_be_nil add_unique_constraint([:aaa, :bbb, :ccc], :name => :con3).must_be_nil add_primary_key(:id).must_be_nil add_foreign_key(:node_id, :nodes).must_be_nil add_primary_key([:aaa, :bbb]).must_be_nil add_foreign_key([:node_id, :prop_id], :nodes_props).must_be_nil add_foreign_key([:node_id, :prop_id], :nodes_props, :name => :fkey).must_be_nil drop_foreign_key(:node_id).must_be_nil drop_foreign_key([:node_id, :prop_id]).must_be_nil drop_foreign_key([:node_id, :prop_id], :name => :fkey).must_be_nil add_column(:iii, :text, :index=>true).must_be_nil add_column(:jjj, :text, :index=>{:name=>:jjj_index}).must_be_nil end end it "should generate operation records" do @generator.operations.must_equal [ {:op => :add_column, :name => :aaa, :type => :text}, {:op => :drop_column, :name => :bbb}, {:op => :rename_column, :name => :ccc, :new_name => :ho}, {:op => :set_column_type, :name => :ddd, :type => :float}, {:op => :set_column_default, :name => :eee, :default => 1}, {:op => :add_index, :columns => [:fff, :ggg]}, {:op => :drop_index, :columns => [:hhh]}, {:op => :drop_index, :columns => [:hhh], :name=>:blah_blah}, {:op => :add_index, :columns => [:blah], :type => :full_text}, {:op => :add_index, :columns => [:geom], :type => :spatial}, {:op => :add_index, :columns => [:blah], :type => :hash}, {:op => :add_index, :columns => [:blah], :where => {:something => true}}, {:op => :add_constraint, :type => :check, :name => :con1, :check => ['fred > 100']}, {:op => :drop_constraint, :name => :con2}, {:op => :add_constraint, :type => :unique, :name => :con3, :columns => [:aaa, :bbb, :ccc]}, {:op => :add_column, :name => :id, :type => Integer, :primary_key=>true, :auto_increment=>true}, {:op => :add_column, :name => :node_id, :type => Integer, :table=>:nodes}, {:op => :add_constraint, :type => :primary_key, :columns => [:aaa, :bbb]}, {:op => :add_constraint, :type => :foreign_key, :columns => [:node_id, :prop_id], :table => :nodes_props}, {:op => :add_constraint, :type => :foreign_key, :columns => [:node_id, :prop_id], :table => :nodes_props, :name => :fkey}, {:op => :drop_constraint, :type => :foreign_key, :columns => [:node_id]}, {:op => :drop_column, :name => :node_id}, {:op => :drop_constraint, :type => :foreign_key, :columns => [:node_id, :prop_id]}, {:op => :drop_constraint, :type => :foreign_key, :columns => [:node_id, :prop_id], :name => :fkey}, {:op => :add_column, :name => :iii, :type => :text}, {:op => :add_index, :columns => [:iii]}, {:op => :add_column, :name => :jjj, :type => :text}, {:op => :add_index, :columns => [:jjj], :name => :jjj_index}, ] end end describe "Sequel::Schema::CreateTableGenerator generic type methods" do it "should store the type class in :type for each column" do Sequel::Schema::CreateTableGenerator.new(Sequel.mock) do String :a Integer :b Fixnum :c Bignum :d Float :e BigDecimal :f Date :g DateTime :h Time :i Numeric :j File :k TrueClass :l FalseClass :m end.columns.map{|c| c[:type]}.must_equal [String, Integer, Integer, :Bignum, Float, BigDecimal, Date, DateTime, Time, Numeric, File, TrueClass, FalseClass] end end describe Sequel::Schema::CreateTableGenerator do before do @generator = Sequel::Schema::CreateTableGenerator.new(Sequel.mock) end it "should support usage without a block" do @generator.columns.must_be_empty @generator.indexes.must_be_empty @generator.constraints.must_be_empty @generator.column :a, String @generator.columns.wont_be_empty end it "should not handle method calls without name" do proc{@generator.foo123}.must_raise NoMethodError end it "should return name of the primary key" do @generator.primary_key_name.must_be_nil @generator.primary_key :id @generator.primary_key_name.must_equal :id end end describe Sequel::Schema::AlterTableGenerator do before do @generator = Sequel::Schema::AlterTableGenerator.new(Sequel.mock) end it "should support usage without a block" do @generator.operations.must_be_empty @generator.add_column :a, String @generator.operations.wont_be_empty end end ����������������������������������sequel-5.63.0/spec/core/schema_spec.rb��������������������������������������������������������������0000664�0000000�0000000�00000223311�14342141206�0017563�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "DB#create_table" do before do @db = Sequel.mock end it "should accept the table name" do @db.create_table(:cats){}.must_be_nil @db.sqls.must_equal ['CREATE TABLE cats ()'] end with_symbol_splitting "should accept the table name with splittable symbols" do @db.create_table(:cats__cats) {} @db.sqls.must_equal ['CREATE TABLE cats.cats ()'] end it "should accept the table name in multiple formats" do @db.create_table(Sequel[:cats][:cats]) {} @db.create_table("cats__cats1") {} @db.create_table(Sequel.identifier(:cats__cats2)) {} @db.create_table(Sequel.qualify(:cats3, :cats)) {} @db.sqls.must_equal ['CREATE TABLE cats.cats ()', 'CREATE TABLE cats__cats1 ()', 'CREATE TABLE cats__cats2 ()', 'CREATE TABLE cats3.cats ()'] end it "should raise an error if the table name argument is not valid" do proc{@db.create_table(1) {}}.must_raise(Sequel::Error) proc{@db.create_table(Sequel.as(:cats, :c)) {}}.must_raise(Sequel::Error) end it "should remove cached schema entry" do @db.instance_variable_set(:@schemas, {'cats'=>[]}) @db.create_table(:cats){Integer :a} @db.instance_variable_get(:@schemas).must_be :empty? end it "should accept multiple columns" do @db.create_table(:cats) do column :id, :integer column :name, :text end @db.sqls.must_equal ['CREATE TABLE cats (id integer, name text)'] end it "should accept method calls as data types" do @db.create_table(:cats) do integer :id text :name end @db.sqls.must_equal ['CREATE TABLE cats (id integer, name text)'] end it "should transform types given as ruby classes to database-specific types" do @db.create_table(:cats) do String :a Integer :b Fixnum :c Bignum :d Float :e BigDecimal :f Date :g DateTime :h Time :i Numeric :j File :k TrueClass :l FalseClass :m column :n, Integer primary_key :o, :type=>String foreign_key :p, :f, :type=>Date end @db.sqls.must_equal ['CREATE TABLE cats (o varchar(255) PRIMARY KEY AUTOINCREMENT, a varchar(255), b integer, c integer, d bigint, e double precision, f numeric, g date, h timestamp, i timestamp, j numeric, k blob, l boolean, m boolean, n integer, p date REFERENCES f)'] end it "should transform types given as ruby classes to database-specific types" do @db.default_string_column_size = 50 @db.create_table(:cats) do String :a String :a2, :size=>13 String :a3, :fixed=>true String :a4, :size=>13, :fixed=>true String :a5, :text=>true varchar :a6 varchar :a7, :size=>13 end @db.sqls.must_equal ['CREATE TABLE cats (a varchar(50), a2 varchar(13), a3 char(50), a4 char(13), a5 text, a6 varchar(50), a7 varchar(13))'] end it "should allow the use of modifiers with ruby class types" do @db.create_table(:cats) do String :a, :size=>50 String :b, :text=>true String :c, :fixed=>true, :size=>40 Time :d, :only_time=>true BigDecimal :e, :size=>[11,2] end @db.sqls.must_equal ['CREATE TABLE cats (a varchar(50), b text, c char(40), d time, e numeric(11, 2))'] end it "should use clob type for String columns if database uses clob for text" do @db.extend(Module.new{private; def uses_clob_for_text?; true; end}) @db.create_table(:cats) do String :b, :text=>true end @db.sqls.must_equal ['CREATE TABLE cats (b clob)'] end it "should allow the use of modifiers with ruby class types" do c = Class.new def c.name; 'Fixnum'; end @db.create_table(:cats) do column :a, c end @db.sqls.must_equal ['CREATE TABLE cats (a integer)'] end it "should raise an error if you use a ruby class that isn't handled" do proc{@db.create_table(:cats){column :a, Class}}.must_raise(Sequel::Error) end it "should accept primary key definition" do @db.create_table(:cats) do primary_key :id end @db.sqls.must_equal ['CREATE TABLE cats (id integer PRIMARY KEY AUTOINCREMENT)'] @db.create_table(:cats) do primary_key :id, :serial, :auto_increment => false end @db.sqls.must_equal ['CREATE TABLE cats (id serial PRIMARY KEY)'] @db.create_table(:cats) do primary_key :id, :type => :serial, :auto_increment => false end @db.sqls.must_equal ['CREATE TABLE cats (id serial PRIMARY KEY)'] @db.create_table(:cats) do Integer :a primary_key :id end @db.sqls.must_equal ['CREATE TABLE cats (id integer PRIMARY KEY AUTOINCREMENT, a integer)'] @db.create_table(:cats) do Integer :a primary_key :id, :Bignum end @db.sqls.must_equal ['CREATE TABLE cats (id bigint PRIMARY KEY AUTOINCREMENT, a integer)'] @db.create_table(:cats) do Integer :a primary_key :id, :keep_order=>true end @db.sqls.must_equal ['CREATE TABLE cats (a integer, id integer PRIMARY KEY AUTOINCREMENT)'] end it "should allow naming primary key constraint with :primary_key_constraint_name option" do @db.create_table(:cats) do primary_key :id, :primary_key_constraint_name=>:foo end @db.sqls.must_equal ['CREATE TABLE cats (id integer CONSTRAINT foo PRIMARY KEY AUTOINCREMENT)'] end it "should automatically set primary key column NOT NULL if database doesn't do it automatically" do def @db.can_add_primary_key_constraint_on_nullable_columns?; false end @db.create_table(:cats) do primary_key :id end @db.sqls.must_equal ['CREATE TABLE cats (id integer NOT NULL PRIMARY KEY AUTOINCREMENT)'] end it "should automatically set primary key column NOT NULL when adding constraint if database doesn't do it automatically" do def @db.can_add_primary_key_constraint_on_nullable_columns?; false end @db.create_table(:cats) do String :id primary_key [:id] end @db.sqls.must_equal ['CREATE TABLE cats (id varchar(255) NOT NULL, PRIMARY KEY (id))'] end it "should handle case where the primary key column cannot be found when adding NOT NULL constraint if database doesn't do it automatically" do def @db.can_add_primary_key_constraint_on_nullable_columns?; false end @db.create_table(:cats) do String Sequel[:id] primary_key [:id] end @db.sqls.must_equal ['CREATE TABLE cats (id varchar(255), PRIMARY KEY (id))'] end it "should handling splitting named column constraints into table constraints if unsupported" do def @db.supports_named_column_constraints?; false end @db.create_table(:cats) do primary_key :id, :primary_key_constraint_name=>:foo foreign_key :cat_id, :cats, :unique=>true, :unique_constraint_name=>:bar, :foreign_key_constraint_name=>:baz, :deferrable=>true, :key=>:foo_id, :on_delete=>:cascade, :on_update=>:restrict end @db.sqls.must_equal ['CREATE TABLE cats (id integer AUTOINCREMENT, cat_id integer, CONSTRAINT foo PRIMARY KEY (id), CONSTRAINT baz FOREIGN KEY (cat_id) REFERENCES cats(foo_id) ON DELETE CASCADE ON UPDATE RESTRICT DEFERRABLE INITIALLY DEFERRED, CONSTRAINT bar UNIQUE (cat_id))'] end it "should accept and literalize default values" do @db.create_table(:cats) do integer :id, :default => 123 text :name, :default => "abc'def" end @db.sqls.must_equal ["CREATE TABLE cats (id integer DEFAULT 123, name text DEFAULT 'abc''def')"] end it "should accept not null definition" do @db.create_table(:cats) do integer :id text :name, :null => false text :name2, :allow_null => false end @db.sqls.must_equal ["CREATE TABLE cats (id integer, name text NOT NULL, name2 text NOT NULL)"] end it "should accept null definition" do @db.create_table(:cats) do integer :id text :name, :null => true text :name2, :allow_null => true end @db.sqls.must_equal ["CREATE TABLE cats (id integer, name text NULL, name2 text NULL)"] end it "should accept unique definition" do @db.create_table(:cats) do integer :id text :name, :unique => true end @db.sqls.must_equal ["CREATE TABLE cats (id integer, name text UNIQUE)"] end it "should allow naming unique constraint with :unique_constraint_name option" do @db.create_table(:cats) do text :name, :unique => true, :unique_constraint_name=>:foo end @db.sqls.must_equal ["CREATE TABLE cats (name text CONSTRAINT foo UNIQUE)"] end it "should handle not deferred unique constraints" do @db.create_table(:cats) do integer :id text :name unique :name, :deferrable=>false end @db.sqls.must_equal ["CREATE TABLE cats (id integer, name text, UNIQUE (name) NOT DEFERRABLE)"] end it "should handle deferred unique constraints" do @db.create_table(:cats) do integer :id text :name unique :name, :deferrable=>true end @db.sqls.must_equal ["CREATE TABLE cats (id integer, name text, UNIQUE (name) DEFERRABLE INITIALLY DEFERRED)"] end it "should handle deferred initially immediate unique constraints" do @db.create_table(:cats) do integer :id text :name unique :name, :deferrable=>:immediate end @db.sqls.must_equal ["CREATE TABLE cats (id integer, name text, UNIQUE (name) DEFERRABLE INITIALLY IMMEDIATE)"] end it "should handle deferred unique column constraints" do @db.create_table(:cats) do integer :id, :unique=>true, :unique_deferrable=>true integer :i, :unique=>true, :unique_deferrable=>:immediate integer :j, :unique=>true, :unique_deferrable=>false end @db.sqls.must_equal ["CREATE TABLE cats (id integer UNIQUE DEFERRABLE INITIALLY DEFERRED, i integer UNIQUE DEFERRABLE INITIALLY IMMEDIATE, j integer UNIQUE NOT DEFERRABLE)"] end it "should handle deferred primary key column constraints" do @db.create_table(:cats) do integer :id, :primary_key=>true, :primary_key_deferrable=>true integer :i, :primary_key=>true, :primary_key_deferrable=>:immediate integer :j, :primary_key=>true, :primary_key_deferrable=>false end @db.sqls.must_equal ["CREATE TABLE cats (id integer PRIMARY KEY DEFERRABLE INITIALLY DEFERRED, i integer PRIMARY KEY DEFERRABLE INITIALLY IMMEDIATE, j integer PRIMARY KEY NOT DEFERRABLE)"] end it "should accept unsigned definition" do @db.create_table(:cats) do integer :value, :unsigned => true end @db.sqls.must_equal ["CREATE TABLE cats (value integer UNSIGNED)"] end it "should accept [SET|ENUM](...) types" do @db.create_table(:cats) do set :color, :elements => ['black', 'tricolor', 'grey'] end @db.sqls.must_equal ["CREATE TABLE cats (color set('black', 'tricolor', 'grey'))"] end it "should accept varchar size" do @db.create_table(:cats) do varchar :name end @db.sqls.must_equal ["CREATE TABLE cats (name varchar(255))"] @db.create_table(:cats) do varchar :name, :size => 51 end @db.sqls.must_equal ["CREATE TABLE cats (name varchar(51))"] end it "should use double precision for double type" do @db.create_table(:cats) do double :name end @db.sqls.must_equal ["CREATE TABLE cats (name double precision)"] end it "should accept foreign keys without options" do @db.create_table(:cats) do foreign_key :project_id end @db.sqls.must_equal ["CREATE TABLE cats (project_id integer)"] end it "should accept foreign keys with options" do @db.create_table(:cats) do foreign_key :project_id, :table => :projects end @db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects)"] end it "should accept foreign keys with separate table argument" do @db.create_table(:cats) do foreign_key :project_id, :projects, :default=>3 end @db.sqls.must_equal ["CREATE TABLE cats (project_id integer DEFAULT 3 REFERENCES projects)"] end it "should allowing naming foreign key constraint with :foreign_key_constraint_name option" do @db.create_table(:cats) do foreign_key :project_id, :projects, :foreign_key_constraint_name=>:foo end @db.sqls.must_equal ["CREATE TABLE cats (project_id integer CONSTRAINT foo REFERENCES projects)"] end it "should raise an error if the table argument to foreign_key isn't a hash, symbol, or nil" do proc{@db.create_table(:cats){foreign_key :project_id, Object.new, :default=>3}}.must_raise(Sequel::Error) end it "should accept foreign keys with arbitrary keys" do @db.create_table(:cats) do foreign_key :project_id, :table => :projects, :key => :id end @db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects(id))"] @db.create_table(:cats) do foreign_key :project_id, :table => :projects, :key => :zzz end @db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects(zzz))"] end it "should accept foreign keys with ON DELETE clause" do @db.create_table(:cats) do foreign_key :project_id, :table => :projects, :on_delete => :restrict end @db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE RESTRICT)"] @db.create_table(:cats) do foreign_key :project_id, :table => :projects, :on_delete => :cascade end @db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE CASCADE)"] @db.create_table(:cats) do foreign_key :project_id, :table => :projects, :on_delete => :no_action end @db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE NO ACTION)"] @db.create_table(:cats) do foreign_key :project_id, :table => :projects, :on_delete => :set_null end @db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE SET NULL)"] @db.create_table(:cats) do foreign_key :project_id, :table => :projects, :on_delete => :set_default end @db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE SET DEFAULT)"] @db.create_table(:cats) do foreign_key :project_id, :table => :projects, :on_delete => 'NO ACTION FOO' end @db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE NO ACTION FOO)"] end it "should accept foreign keys with ON UPDATE clause" do @db.create_table(:cats) do foreign_key :project_id, :table => :projects, :on_update => :restrict end @db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON UPDATE RESTRICT)"] @db.create_table(:cats) do foreign_key :project_id, :table => :projects, :on_update => :cascade end @db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON UPDATE CASCADE)"] @db.create_table(:cats) do foreign_key :project_id, :table => :projects, :on_update => :no_action end @db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON UPDATE NO ACTION)"] @db.create_table(:cats) do foreign_key :project_id, :table => :projects, :on_update => :set_null end @db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON UPDATE SET NULL)"] @db.create_table(:cats) do foreign_key :project_id, :table => :projects, :on_update => :set_default end @db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON UPDATE SET DEFAULT)"] @db.create_table(:cats) do foreign_key :project_id, :table => :projects, :on_update => 'SET DEFAULT FOO' end @db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON UPDATE SET DEFAULT FOO)"] end it "should accept foreign keys with deferrable option" do @db.create_table(:cats) do foreign_key :project_id, :projects, :deferrable=>true end @db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects DEFERRABLE INITIALLY DEFERRED)"] end it "should accept collation" do @db.create_table(:cats) do String :name, :collate => :utf8_bin end @db.sqls.must_equal ['CREATE TABLE cats (name varchar(255) COLLATE utf8_bin)'] end it "should accept collation as a String, treated literally" do @db.create_table(:cats) do String :name, :collate => '"utf8_bin"' end @db.sqls.must_equal ['CREATE TABLE cats (name varchar(255) COLLATE "utf8_bin")'] end it "should accept inline index definition" do @db.create_table(:cats) do integer :id, :index => true end @db.sqls.must_equal ["CREATE TABLE cats (id integer)", "CREATE INDEX cats_id_index ON cats (id)"] end it "should accept inline index definition for qualified table" do @db.create_table(Sequel[:sch][:cats]) do integer :id, :index => true end @db.sqls.must_equal ["CREATE TABLE sch.cats (id integer)", "CREATE INDEX sch_cats_id_index ON sch.cats (id)"] end it "should accept inline index definition with a hash of options" do @db.create_table(:cats) do integer :id, :index => {:unique=>true} end @db.sqls.must_equal ["CREATE TABLE cats (id integer)", "CREATE UNIQUE INDEX cats_id_index ON cats (id)"] end it "should accept inline index definition for foreign keys" do @db.create_table(:cats) do foreign_key :project_id, :table => :projects, :on_delete => :cascade, :index => true end @db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE CASCADE)", "CREATE INDEX cats_project_id_index ON cats (project_id)"] end it "should accept inline index definition for foreign keys with a hash of options" do @db.create_table(:cats) do foreign_key :project_id, :table => :projects, :on_delete => :cascade, :index => {:unique=>true} end @db.sqls.must_equal ["CREATE TABLE cats (project_id integer REFERENCES projects ON DELETE CASCADE)", "CREATE UNIQUE INDEX cats_project_id_index ON cats (project_id)"] end it "should accept index definitions" do @db.create_table(:cats) do integer :id index :id end @db.sqls.must_equal ["CREATE TABLE cats (id integer)", "CREATE INDEX cats_id_index ON cats (id)"] end it "should accept unique constraint definitions" do @db.create_table(:cats) do text :name unique :name end @db.sqls.must_equal ["CREATE TABLE cats (name text, UNIQUE (name))"] end it "should accept partial index definitions" do def @db.supports_partial_indexes?() true end @db.create_table(:cats) do integer :id index :id, :where=>proc{id > 1} end @db.sqls.must_equal ["CREATE TABLE cats (id integer)", "CREATE INDEX cats_id_index ON cats (id) WHERE (id > 1)"] end it "should raise an error if partial indexes are not supported" do proc do @db.create_table(:cats) do integer :id index :id, :where=>proc{id > 1} end end.must_raise(Sequel::Error) end it "should not raise on index error for unsupported index definitions if ignore_index_errors is used" do @db.create_table(:cats, :ignore_index_errors=>true) do text :name full_text_index :name end end it "should raise on full-text index definitions" do proc { @db.create_table(:cats) do text :name full_text_index :name end }.must_raise(Sequel::Error) end it "should raise on spatial index definitions" do proc { @db.create_table(:cats) do point :geom spatial_index :geom end }.must_raise(Sequel::Error) end it "should raise on partial index definitions" do proc { @db.create_table(:cats) do text :name index :name, :where => {:something => true} end }.must_raise(Sequel::Error) end it "should raise index definitions with type" do proc { @db.create_table(:cats) do text :name index :name, :type => :hash end }.must_raise(Sequel::Error) end it "should ignore errors if the database raises an error on an index creation statement and the :ignore_index_errors option is used" do @db.define_singleton_method(:execute_ddl){|*a| raise Sequel::DatabaseError if /blah/.match(a.first); super(*a)} lambda{@db.create_table(:cats){Integer :id; index :blah; index :id}}.must_raise(Sequel::DatabaseError) @db.sqls.must_equal ['CREATE TABLE cats (id integer)'] @db.create_table(:cats, :ignore_index_errors=>true){Integer :id; index :blah; index :id} @db.sqls.must_equal ['CREATE TABLE cats (id integer)', 'CREATE INDEX cats_id_index ON cats (id)'] end it "should not use savepoints around index creation if running inside a transaction if :ignore_index_errors option is used" do @db.define_singleton_method(:execute_ddl){|*a| super(*a); raise Sequel::DatabaseError if /blah/.match(a.first)} @db.transaction{@db.create_table(:cats, :ignore_index_errors=>true){Integer :id; index :blah; index :id}} @db.sqls.must_equal ["BEGIN", "CREATE TABLE cats (id integer)", "CREATE INDEX cats_blah_index ON cats (blah)", "CREATE INDEX cats_id_index ON cats (id)", "COMMIT"] end it "should use savepoints around index creation if running inside a transaction if :ignore_index_errors option is used and transactional schema modifications are supported" do @db.define_singleton_method(:supports_transactional_ddl?){true} @db.define_singleton_method(:execute_ddl){|*a| super(*a); raise Sequel::DatabaseError if /blah/.match(a.first)} @db.transaction{@db.create_table(:cats, :ignore_index_errors=>true){Integer :id; index :blah; index :id}} @db.sqls.must_equal ["BEGIN", "CREATE TABLE cats (id integer)", "SAVEPOINT autopoint_1", "CREATE INDEX cats_blah_index ON cats (blah)", "ROLLBACK TO SAVEPOINT autopoint_1", "SAVEPOINT autopoint_1", "CREATE INDEX cats_id_index ON cats (id)", "RELEASE SAVEPOINT autopoint_1", "COMMIT"] end it "should accept multiple index definitions" do @db.create_table(:cats) do integer :id index :id index :name end @db.sqls.must_equal ["CREATE TABLE cats (id integer)", "CREATE INDEX cats_id_index ON cats (id)", "CREATE INDEX cats_name_index ON cats (name)"] end it "should accept functional indexes" do @db.create_table(:cats) do integer :id index Sequel.function(:lower, :name) end @db.sqls.must_equal ["CREATE TABLE cats (id integer)", "CREATE INDEX cats_lower_name__index ON cats (lower(name))"] end it "should accept indexes with identifiers" do @db.create_table(:cats) do integer :id index Sequel.identifier(:lower__name) end @db.sqls.must_equal ["CREATE TABLE cats (id integer)", "CREATE INDEX cats_lower__name_index ON cats (lower__name)"] end it "should accept custom index names" do @db.create_table(:cats) do integer :id index :id, :name => 'abc' end @db.sqls.must_equal ["CREATE TABLE cats (id integer)", "CREATE INDEX abc ON cats (id)"] end it "should accept unique index definitions" do @db.create_table(:cats) do integer :id index :id, :unique => true end @db.sqls.must_equal ["CREATE TABLE cats (id integer)", "CREATE UNIQUE INDEX cats_id_index ON cats (id)"] end it "should accept composite index definitions" do @db.create_table(:cats) do integer :id index [:id, :name], :unique => true end @db.sqls.must_equal ["CREATE TABLE cats (id integer)", "CREATE UNIQUE INDEX cats_id_name_index ON cats (id, name)"] end it "should accept unnamed constraint definitions with blocks" do @db.create_table(:cats) do integer :score check{(x > 0) & (y < 1)} end @db.sqls.must_equal ["CREATE TABLE cats (score integer, CHECK ((x > 0) AND (y < 1)))"] end it "should accept unnamed constraint definitions with function calls" do @db.create_table(:cats) do integer :score check{f(x)} end @db.sqls.must_equal ["CREATE TABLE cats (score integer, CHECK (f(x)))"] end it "should accept unnamed constraint definitions" do @db.create_table(:cats) do check 'price < ?', 100 end @db.sqls.must_equal ["CREATE TABLE cats (CHECK (price < 100))"] end it "should accept arrays of pairs constraints" do @db.create_table(:cats) do check [[:price, 100]] end @db.sqls.must_equal ["CREATE TABLE cats (CHECK (price = 100))"] end it "should accept hash constraints" do @db.create_table(:cats) do check :price=>100 end @db.sqls.must_equal ["CREATE TABLE cats (CHECK (price = 100))"] end it "should accept array constraints" do @db.create_table(:cats) do check [Sequel.expr(:x) > 0, Sequel.expr(:y) < 1] end @db.sqls.must_equal ["CREATE TABLE cats (CHECK ((x > 0) AND (y < 1)))"] end it "should accept expression constraints" do @db.create_table(:cats) do check Sequel.&(Sequel.expr(:x) > 0, Sequel.expr(:y) < 1) end @db.sqls.must_equal ["CREATE TABLE cats (CHECK ((x > 0) AND (y < 1)))"] end it "should accept named constraint definitions" do @db.create_table(:cats) do integer :score constraint :valid_score, 'score <= 100' end @db.sqls.must_equal ["CREATE TABLE cats (score integer, CONSTRAINT valid_score CHECK (score <= 100))"] end it "should accept named constraint definitions with options" do @db.create_table(:cats) do integer :score constraint({:name=>:valid_score, :deferrable=>true}, 'score <= 100') end @db.sqls.must_equal ["CREATE TABLE cats (score integer, CONSTRAINT valid_score CHECK (score <= 100) DEFERRABLE INITIALLY DEFERRED)"] end it "should accept named constraint definitions with block" do @db.create_table(:cats) do constraint(:blah_blah){(x.sql_number > 0) & (y.sql_number < 1)} end @db.sqls.must_equal ["CREATE TABLE cats (CONSTRAINT blah_blah CHECK ((x > 0) AND (y < 1)))"] end it "should raise an error if an invalid constraint type is used" do proc{@db.create_table(:cats){unique [:a, :b], :type=>:bb}}.must_raise(Sequel::Error) end it "should accept composite primary keys" do @db.create_table(:cats) do integer :a integer :b primary_key [:a, :b] end @db.sqls.must_equal ["CREATE TABLE cats (a integer, b integer, PRIMARY KEY (a, b))"] end it "should accept named composite primary keys" do @db.create_table(:cats) do integer :a integer :b primary_key [:a, :b], :name => :cpk end @db.sqls.must_equal ["CREATE TABLE cats (a integer, b integer, CONSTRAINT cpk PRIMARY KEY (a, b))"] end it "should accept composite foreign keys" do @db.create_table(:cats) do integer :a integer :b foreign_key [:a, :b], :abc end @db.sqls.must_equal ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc)"] end it "should accept named composite foreign keys" do @db.create_table(:cats) do integer :a integer :b foreign_key [:a, :b], :abc, :name => :cfk end @db.sqls.must_equal ["CREATE TABLE cats (a integer, b integer, CONSTRAINT cfk FOREIGN KEY (a, b) REFERENCES abc)"] end it "should accept composite foreign keys with arbitrary keys" do @db.create_table(:cats) do integer :a integer :b foreign_key [:a, :b], :abc, :key => [:real_a, :real_b] end @db.sqls.must_equal ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc(real_a, real_b))"] @db.create_table(:cats) do integer :a integer :b foreign_key [:a, :b], :abc, :key => [:z, :x] end @db.sqls.must_equal ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc(z, x))"] end it "should accept composite foreign keys with on delete and on update clauses" do @db.create_table(:cats) do integer :a integer :b foreign_key [:a, :b], :abc, :on_delete => :cascade end @db.sqls.must_equal ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc ON DELETE CASCADE)"] @db.create_table(:cats) do integer :a integer :b foreign_key [:a, :b], :abc, :on_update => :no_action end @db.sqls.must_equal ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc ON UPDATE NO ACTION)"] @db.create_table(:cats) do integer :a integer :b foreign_key [:a, :b], :abc, :on_delete => :restrict, :on_update => :set_default end @db.sqls.must_equal ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc ON DELETE RESTRICT ON UPDATE SET DEFAULT)"] @db.create_table(:cats) do integer :a integer :b foreign_key [:a, :b], :abc, :key => [:x, :y], :on_delete => :set_null, :on_update => :set_null end @db.sqls.must_equal ["CREATE TABLE cats (a integer, b integer, FOREIGN KEY (a, b) REFERENCES abc(x, y) ON DELETE SET NULL ON UPDATE SET NULL)"] end it "should accept an :as option to create a table from the results of a dataset" do @db.create_table(:cats, :as=>@db[:a]) @db.sqls.must_equal ['CREATE TABLE cats AS SELECT * FROM a'] end it "should accept an :as option to create a table from a SELECT string" do @db.create_table(:cats, :as=>'SELECT * FROM a') @db.sqls.must_equal ['CREATE TABLE cats AS SELECT * FROM a'] end it "should raise an Error if both a block and an :as argument are given" do proc{@db.create_table(:cats, :as=>@db[:a]){}}.must_raise(Sequel::Error) end end describe "DB#create_table!" do before do @db = Sequel.mock end it "should create the table if it does not exist" do @db.define_singleton_method(:table_exists?){|a| false} @db.create_table!(:cats){|*a|}.must_be_nil @db.sqls.must_equal ['CREATE TABLE cats ()'] end it "should drop the table before creating it if it already exists" do @db.define_singleton_method(:table_exists?){|a| true} @db.create_table!(:cats){|*a|} @db.sqls.must_equal ['DROP TABLE cats', 'CREATE TABLE cats ()'] end it "should use IF EXISTS if the database supports it" do @db.define_singleton_method(:supports_drop_table_if_exists?){true} @db.create_table!(:cats){|*a|} @db.sqls.must_equal ['DROP TABLE IF EXISTS cats', 'CREATE TABLE cats ()'] end end describe "DB#create_table?" do before do @db = Sequel.mock end it "should not create the table if the table already exists" do @db.define_singleton_method(:table_exists?){|a| true} @db.create_table?(:cats){|*a|}.must_be_nil @db.sqls.must_equal [] end it "should create the table if the table doesn't already exist" do @db.define_singleton_method(:table_exists?){|a| false} @db.create_table?(:cats){|*a|} @db.sqls.must_equal ['CREATE TABLE cats ()'] end it "should use IF NOT EXISTS if the database supports that" do @db.define_singleton_method(:supports_create_table_if_not_exists?){true} @db.create_table?(:cats){|*a|} @db.sqls.must_equal ['CREATE TABLE IF NOT EXISTS cats ()'] end it "should not use IF NOT EXISTS if the indexes are created" do @db.define_singleton_method(:table_exists?){|a| false} @db.define_singleton_method(:supports_create_table_if_not_exists?){true} @db.create_table?(:cats){|*a| Integer :a, :index=>true} @db.sqls.must_equal ['CREATE TABLE cats (a integer)', 'CREATE INDEX cats_a_index ON cats (a)'] @db.singleton_class.send(:alias_method, :table_exists?, :table_exists?) @db.define_singleton_method(:table_exists?){|a| true} @db.create_table?(:cats){|*a| Integer :a, :index=>true} @db.sqls.must_equal [] end end describe "DB#create_join_table" do before do @db = Sequel.mock end it "should take a hash with foreign keys and table name values" do @db.create_join_table(:cat_id=>:cats, :dog_id=>:dogs).must_be_nil @db.sqls.must_equal ['CREATE TABLE cats_dogs (cat_id integer NOT NULL REFERENCES cats, dog_id integer NOT NULL REFERENCES dogs, PRIMARY KEY (cat_id, dog_id))', 'CREATE INDEX cats_dogs_dog_id_cat_id_index ON cats_dogs (dog_id, cat_id)'] end it "should be able to have values be a hash of options" do @db.create_join_table(:cat_id=>{:table=>:cats, :null=>true}, :dog_id=>{:table=>:dogs, :default=>0}) @db.sqls.must_equal ['CREATE TABLE cats_dogs (cat_id integer NULL REFERENCES cats, dog_id integer DEFAULT 0 NOT NULL REFERENCES dogs, PRIMARY KEY (cat_id, dog_id))', 'CREATE INDEX cats_dogs_dog_id_cat_id_index ON cats_dogs (dog_id, cat_id)'] end it "should be able to pass a second hash of table options" do @db.create_join_table({:cat_id=>:cats, :dog_id=>:dogs}, :temp=>true) @db.sqls.must_equal ['CREATE TEMPORARY TABLE cats_dogs (cat_id integer NOT NULL REFERENCES cats, dog_id integer NOT NULL REFERENCES dogs, PRIMARY KEY (cat_id, dog_id))', 'CREATE INDEX cats_dogs_dog_id_cat_id_index ON cats_dogs (dog_id, cat_id)'] end it "should recognize :name option in table options" do @db.create_join_table({:cat_id=>:cats, :dog_id=>:dogs}, :name=>:f) @db.sqls.must_equal ['CREATE TABLE f (cat_id integer NOT NULL REFERENCES cats, dog_id integer NOT NULL REFERENCES dogs, PRIMARY KEY (cat_id, dog_id))', 'CREATE INDEX f_dog_id_cat_id_index ON f (dog_id, cat_id)'] end it "should recognize :index_options option in table options" do @db.create_join_table({:cat_id=>:cats, :dog_id=>:dogs}, :index_options=>{:name=>:foo_index}) @db.sqls.must_equal ['CREATE TABLE cats_dogs (cat_id integer NOT NULL REFERENCES cats, dog_id integer NOT NULL REFERENCES dogs, PRIMARY KEY (cat_id, dog_id))', 'CREATE INDEX foo_index ON cats_dogs (dog_id, cat_id)'] end it "should recognize :no_index option in table options" do @db.create_join_table({:cat_id=>:cats, :dog_id=>:dogs}, :no_index=>true) @db.sqls.must_equal ['CREATE TABLE cats_dogs (cat_id integer NOT NULL REFERENCES cats, dog_id integer NOT NULL REFERENCES dogs, PRIMARY KEY (cat_id, dog_id))'] end it "should recognize :no_primary_key option in table options" do @db.create_join_table({:cat_id=>:cats, :dog_id=>:dogs}, :no_primary_key=>true) @db.sqls.must_equal ['CREATE TABLE cats_dogs (cat_id integer NOT NULL REFERENCES cats, dog_id integer NOT NULL REFERENCES dogs)', 'CREATE INDEX cats_dogs_dog_id_cat_id_index ON cats_dogs (dog_id, cat_id)'] end it "should raise an error if the hash doesn't have 2 entries with table names" do proc{@db.create_join_table({})}.must_raise(Sequel::Error) proc{@db.create_join_table({:cat_id=>:cats})}.must_raise(Sequel::Error) proc{@db.create_join_table({:cat_id=>:cats, :human_id=>:humans, :dog_id=>:dog})}.must_raise(Sequel::Error) proc{@db.create_join_table({:cat_id=>:cats, :dog_id=>{}})}.must_raise(Sequel::Error) end end describe "DB#create_join_table?" do before do @db = Sequel.mock end it "should create the table if it does not already exist" do @db.define_singleton_method(:table_exists?){|a| false} @db.create_join_table?(:cat_id=>:cats, :dog_id=>:dogs).must_be_nil @db.sqls.must_equal ['CREATE TABLE cats_dogs (cat_id integer NOT NULL REFERENCES cats, dog_id integer NOT NULL REFERENCES dogs, PRIMARY KEY (cat_id, dog_id))', 'CREATE INDEX cats_dogs_dog_id_cat_id_index ON cats_dogs (dog_id, cat_id)'] end it "should not create the table if it already exists" do @db.define_singleton_method(:table_exists?){|a| true} @db.create_join_table?(:cat_id=>:cats, :dog_id=>:dogs) @db.sqls.must_equal [] end it "should not use IF NOT EXISTS" do @db.define_singleton_method(:table_exists?){|a| false} @db.define_singleton_method(:supports_create_table_if_not_exists?){true} @db.create_join_table?(:cat_id=>:cats, :dog_id=>:dogs) @db.sqls.must_equal ['CREATE TABLE cats_dogs (cat_id integer NOT NULL REFERENCES cats, dog_id integer NOT NULL REFERENCES dogs, PRIMARY KEY (cat_id, dog_id))', 'CREATE INDEX cats_dogs_dog_id_cat_id_index ON cats_dogs (dog_id, cat_id)'] @db.singleton_class.send(:alias_method, :table_exists?, :table_exists?) @db.define_singleton_method(:table_exists?){|a| true} @db.create_join_table?(:cat_id=>:cats, :dog_id=>:dogs) @db.sqls.must_equal [] end it "should not use IF NOT EXISTS if no_index is used" do @db.define_singleton_method(:supports_create_table_if_not_exists?){true} @db.create_join_table?({:cat_id=>:cats, :dog_id=>:dogs}, :no_index=>true) @db.sqls.must_equal ['CREATE TABLE IF NOT EXISTS cats_dogs (cat_id integer NOT NULL REFERENCES cats, dog_id integer NOT NULL REFERENCES dogs, PRIMARY KEY (cat_id, dog_id))'] end end describe "DB#create_join_table!" do before do @db = Sequel.mock end it "should drop the table first if it already exists" do @db.define_singleton_method(:table_exists?){|a| true} @db.create_join_table!(:cat_id=>:cats, :dog_id=>:dogs).must_be_nil @db.sqls.must_equal ['DROP TABLE cats_dogs', 'CREATE TABLE cats_dogs (cat_id integer NOT NULL REFERENCES cats, dog_id integer NOT NULL REFERENCES dogs, PRIMARY KEY (cat_id, dog_id))', 'CREATE INDEX cats_dogs_dog_id_cat_id_index ON cats_dogs (dog_id, cat_id)'] end it "should not drop the table if it doesn't exists" do @db.define_singleton_method(:table_exists?){|a| false} @db.create_join_table!(:cat_id=>:cats, :dog_id=>:dogs) @db.sqls.must_equal ['CREATE TABLE cats_dogs (cat_id integer NOT NULL REFERENCES cats, dog_id integer NOT NULL REFERENCES dogs, PRIMARY KEY (cat_id, dog_id))', 'CREATE INDEX cats_dogs_dog_id_cat_id_index ON cats_dogs (dog_id, cat_id)'] end it "should use IF EXISTS if the database supports it" do @db.define_singleton_method(:supports_drop_table_if_exists?){true} @db.create_join_table!(:cat_id=>:cats, :dog_id=>:dogs) @db.sqls.must_equal ['DROP TABLE IF EXISTS cats_dogs', 'CREATE TABLE cats_dogs (cat_id integer NOT NULL REFERENCES cats, dog_id integer NOT NULL REFERENCES dogs, PRIMARY KEY (cat_id, dog_id))', 'CREATE INDEX cats_dogs_dog_id_cat_id_index ON cats_dogs (dog_id, cat_id)'] end end describe "DB#drop_join_table" do before do @db = Sequel.mock end it "should take a hash with foreign keys and table name values and drop the table" do @db.drop_join_table(:cat_id=>:cats, :dog_id=>:dogs).must_be_nil @db.sqls.must_equal ['DROP TABLE cats_dogs'] end it "should be able to have values be a hash of options" do @db.drop_join_table(:cat_id=>{:table=>:cats, :null=>true}, :dog_id=>{:table=>:dogs, :default=>0}) @db.sqls.must_equal ['DROP TABLE cats_dogs'] end it "should respect a second hash of table options" do @db.drop_join_table({:cat_id=>:cats, :dog_id=>:dogs}, :cascade=>true) @db.sqls.must_equal ['DROP TABLE cats_dogs CASCADE'] end it "should respect :name option for table name" do @db.drop_join_table({:cat_id=>:cats, :dog_id=>:dogs}, :name=>:f) @db.sqls.must_equal ['DROP TABLE f'] end it "should raise an error if the hash doesn't have 2 entries with table names" do proc{@db.drop_join_table({})}.must_raise(Sequel::Error) proc{@db.drop_join_table({:cat_id=>:cats})}.must_raise(Sequel::Error) proc{@db.drop_join_table({:cat_id=>:cats, :human_id=>:humans, :dog_id=>:dog})}.must_raise(Sequel::Error) proc{@db.drop_join_table({:cat_id=>:cats, :dog_id=>{}})}.must_raise(Sequel::Error) end end describe "DB#drop_table" do before do @db = Sequel.mock end it "should generate a DROP TABLE statement" do @db.drop_table(:cats).must_be_nil @db.sqls.must_equal ['DROP TABLE cats'] end it "should drop multiple tables at once" do @db.drop_table :cats, :dogs @db.sqls.must_equal ['DROP TABLE cats', 'DROP TABLE dogs'] end it "should take an options hash and support the :cascade option" do @db.drop_table :cats, :dogs, :cascade=>true @db.sqls.must_equal ['DROP TABLE cats CASCADE', 'DROP TABLE dogs CASCADE'] end end describe "DB#drop_table?" do before do @db = Sequel.mock end it "should drop the table if it exists" do @db.define_singleton_method(:table_exists?){|a| true} @db.drop_table?(:cats).must_be_nil @db.sqls.must_equal ["DROP TABLE cats"] end it "should do nothing if the table does not exist" do @db.define_singleton_method(:table_exists?){|a| false} @db.drop_table?(:cats) @db.sqls.must_equal [] end it "should operate on multiple tables at once" do @db.define_singleton_method(:table_exists?){|a| a == :cats} @db.drop_table? :cats, :dogs @db.sqls.must_equal ['DROP TABLE cats'] end it "should take an options hash and support the :cascade option" do @db.define_singleton_method(:table_exists?){|a| true} @db.drop_table? :cats, :dogs, :cascade=>true @db.sqls.must_equal ['DROP TABLE cats CASCADE', 'DROP TABLE dogs CASCADE'] end it "should use IF NOT EXISTS if the database supports that" do @db.define_singleton_method(:supports_drop_table_if_exists?){true} @db.drop_table? :cats, :dogs @db.sqls.must_equal ['DROP TABLE IF EXISTS cats', 'DROP TABLE IF EXISTS dogs'] end it "should use IF NOT EXISTS with CASCADE if the database supports that" do @db.define_singleton_method(:supports_drop_table_if_exists?){true} @db.drop_table? :cats, :dogs, :cascade=>true @db.sqls.must_equal ['DROP TABLE IF EXISTS cats CASCADE', 'DROP TABLE IF EXISTS dogs CASCADE'] end end describe "DB#alter_table" do before do @db = Sequel.mock end it "should allow adding not null constraint via set_column_allow_null with false argument" do @db.alter_table(:cats) do set_column_allow_null :score, false end.must_be_nil @db.sqls.must_equal ["ALTER TABLE cats ALTER COLUMN score SET NOT NULL"] end it "should allow removing not null constraint via set_column_allow_null with true argument" do @db.alter_table(:cats) do set_column_allow_null :score, true end @db.sqls.must_equal ["ALTER TABLE cats ALTER COLUMN score DROP NOT NULL"] end it "should allow adding not null constraint via set_column_not_null" do @db.alter_table(:cats) do set_column_not_null :score end @db.sqls.must_equal ["ALTER TABLE cats ALTER COLUMN score SET NOT NULL"] end it "should allow removing not null constraint via set_column_allow_null without argument" do @db.alter_table(:cats) do set_column_allow_null :score end @db.sqls.must_equal ["ALTER TABLE cats ALTER COLUMN score DROP NOT NULL"] end it "should support add_column" do @db.alter_table(:cats) do add_column :score, :integer end @db.sqls.must_equal ["ALTER TABLE cats ADD COLUMN score integer"] end it "should support add_constraint" do @db.alter_table(:cats) do add_constraint :valid_score, 'score <= 100' end @db.sqls.must_equal ["ALTER TABLE cats ADD CONSTRAINT valid_score CHECK (score <= 100)"] end it "should support add_constraint with options" do @db.alter_table(:cats) do add_constraint({:name=>:valid_score, :deferrable=>true}, 'score <= 100') end @db.sqls.must_equal ["ALTER TABLE cats ADD CONSTRAINT valid_score CHECK (score <= 100) DEFERRABLE INITIALLY DEFERRED"] end it "should support add_constraint with block" do @db.alter_table(:cats) do add_constraint(:blah_blah){(x.sql_number > 0) & (y.sql_number < 1)} end @db.sqls.must_equal ["ALTER TABLE cats ADD CONSTRAINT blah_blah CHECK ((x > 0) AND (y < 1))"] end it "should support add_unique_constraint" do @db.alter_table(:cats) do add_unique_constraint [:a, :b] end @db.sqls.must_equal ["ALTER TABLE cats ADD UNIQUE (a, b)"] @db.alter_table(:cats) do add_unique_constraint [:a, :b], :name => :ab_uniq end @db.sqls.must_equal ["ALTER TABLE cats ADD CONSTRAINT ab_uniq UNIQUE (a, b)"] end it "should support add_foreign_key" do @db.alter_table(:cats) do add_foreign_key :node_id, :nodes end @db.sqls.must_equal ["ALTER TABLE cats ADD COLUMN node_id integer REFERENCES nodes"] end it "should support add_foreign_key with :index option" do @db.alter_table(:cats) do add_foreign_key :node_id, :nodes, :index=>true end @db.sqls.must_equal ["ALTER TABLE cats ADD COLUMN node_id integer REFERENCES nodes", "CREATE INDEX cats_node_id_index ON cats (node_id)"] end it "should support add_foreign_key with composite foreign keys" do @db.alter_table(:cats) do add_foreign_key [:node_id, :prop_id], :nodes_props end @db.sqls.must_equal ["ALTER TABLE cats ADD FOREIGN KEY (node_id, prop_id) REFERENCES nodes_props"] @db.alter_table(:cats) do add_foreign_key [:node_id, :prop_id], :nodes_props, :name => :cfk end @db.sqls.must_equal ["ALTER TABLE cats ADD CONSTRAINT cfk FOREIGN KEY (node_id, prop_id) REFERENCES nodes_props"] @db.alter_table(:cats) do add_foreign_key [:node_id, :prop_id], :nodes_props, :key => [:nid, :pid] end @db.sqls.must_equal ["ALTER TABLE cats ADD FOREIGN KEY (node_id, prop_id) REFERENCES nodes_props(nid, pid)"] @db.alter_table(:cats) do add_foreign_key [:node_id, :prop_id], :nodes_props, :on_delete => :restrict, :on_update => :cascade end @db.sqls.must_equal ["ALTER TABLE cats ADD FOREIGN KEY (node_id, prop_id) REFERENCES nodes_props ON DELETE RESTRICT ON UPDATE CASCADE"] end it "should support add_column with :index=>true option" do @db.alter_table(:cats) do add_column :name, Integer, :index=>true end @db.sqls.must_equal ["ALTER TABLE cats ADD COLUMN name integer", "CREATE INDEX cats_name_index ON cats (name)"] end it "should support add_column with :index=>hash option" do @db.alter_table(:cats) do add_column :name, Integer, :index=>{:name=>:foo} end @db.sqls.must_equal ["ALTER TABLE cats ADD COLUMN name integer", "CREATE INDEX foo ON cats (name)"] end it "should support add_index" do @db.alter_table(:cats) do add_index :name end @db.sqls.must_equal ["CREATE INDEX cats_name_index ON cats (name)"] end it "should ignore errors if the database raises an error on an add_index call and the :ignore_errors option is used" do @db.define_singleton_method(:execute_ddl){|*a| raise Sequel::DatabaseError} lambda{@db.add_index(:cats, :id)}.must_raise(Sequel::DatabaseError) @db.add_index(:cats, :id, :ignore_errors=>true) @db.sqls.must_equal [] end it "should support add_primary_key" do @db.alter_table(:cats) do add_primary_key :id end @db.sqls.must_equal ["ALTER TABLE cats ADD COLUMN id integer PRIMARY KEY AUTOINCREMENT"] end it "should support add_primary_key with composite primary keys" do @db.alter_table(:cats) do add_primary_key [:id, :type] end @db.sqls.must_equal ["ALTER TABLE cats ADD PRIMARY KEY (id, type)"] @db.alter_table(:cats) do add_primary_key [:id, :type], :name => :cpk end @db.sqls.must_equal ["ALTER TABLE cats ADD CONSTRAINT cpk PRIMARY KEY (id, type)"] end it "should set primary key column NOT NULL when using add_primary_key if database doesn't handle it" do def @db.can_add_primary_key_constraint_on_nullable_columns?; false end @db.alter_table(:cats) do add_primary_key :id end @db.sqls.must_equal ["ALTER TABLE cats ADD COLUMN id integer NOT NULL PRIMARY KEY AUTOINCREMENT"] end it "should set primary key column NOT NULL when adding primary key constraint if database doesn't handle it" do def @db.can_add_primary_key_constraint_on_nullable_columns?; false end @db.alter_table(:cats) do add_primary_key [:id, :type] end @db.sqls.must_equal ["ALTER TABLE cats ALTER COLUMN id SET NOT NULL", "ALTER TABLE cats ALTER COLUMN type SET NOT NULL", "ALTER TABLE cats ADD PRIMARY KEY (id, type)"] end it "should support drop_column" do @db.alter_table(:cats) do drop_column :score end @db.sqls.must_equal ["ALTER TABLE cats DROP COLUMN score"] end it "should support drop_column with :cascade=>true option" do @db.alter_table(:cats) do drop_column :score, :cascade=>true end @db.sqls.must_equal ["ALTER TABLE cats DROP COLUMN score CASCADE"] end it "should support drop_constraint" do @db.alter_table(:cats) do drop_constraint :valid_score end @db.sqls.must_equal ["ALTER TABLE cats DROP CONSTRAINT valid_score"] end it "should support drop_constraint with :cascade=>true option" do @db.alter_table(:cats) do drop_constraint :valid_score, :cascade=>true end @db.sqls.must_equal ["ALTER TABLE cats DROP CONSTRAINT valid_score CASCADE"] end it "should support drop_foreign_key" do def @db.foreign_key_list(table_name) [{:name=>:cats_node_id_fkey, :columns=>[:node_id]}] end @db.alter_table(:cats) do drop_foreign_key :node_id end @db.sqls.must_equal ["ALTER TABLE cats DROP CONSTRAINT cats_node_id_fkey", "ALTER TABLE cats DROP COLUMN node_id"] end it "should support drop_foreign_key with :foreign_key_constraint_name option" do @db.alter_table(:cats) do drop_foreign_key :node_id, :foreign_key_constraint_name=>:foo end @db.sqls.must_equal ["ALTER TABLE cats DROP CONSTRAINT foo", "ALTER TABLE cats DROP COLUMN node_id"] end it "should support drop_foreign_key with :name option" do @db.alter_table(:cats) do drop_foreign_key :node_id, :name=>:foo end @db.sqls.must_equal ["ALTER TABLE cats DROP CONSTRAINT foo", "ALTER TABLE cats DROP COLUMN node_id"] end it "should support drop_foreign_key with composite foreign keys" do def @db.foreign_key_list(table_name) [{:name=>:cats_node_id_prop_id_fkey, :columns=>[:node_id, :prop_id]}] end @db.alter_table(:cats) do drop_foreign_key [:node_id, :prop_id] end @db.sqls.must_equal ["ALTER TABLE cats DROP CONSTRAINT cats_node_id_prop_id_fkey"] @db.alter_table(:cats) do drop_foreign_key [:node_id, :prop_id], :name => :cfk end @db.sqls.must_equal ["ALTER TABLE cats DROP CONSTRAINT cfk"] end it "should have drop_foreign_key raise Error if no name is found" do def @db.foreign_key_list(table_name) [{:name=>:cats_node_id_fkey, :columns=>[:foo_id]}] end lambda{@db.alter_table(:cats){drop_foreign_key :node_id}}.must_raise(Sequel::Error) end it "should have drop_foreign_key raise Error if multiple foreign keys found" do def @db.foreign_key_list(table_name) [{:name=>:cats_node_id_fkey, :columns=>[:node_id]}, {:name=>:cats_node_id_fkey2, :columns=>[:node_id]}] end lambda{@db.alter_table(:cats){drop_foreign_key :node_id}}.must_raise(Sequel::Error) end it "should support drop_index" do @db.alter_table(:cats) do drop_index :name end @db.sqls.must_equal ["DROP INDEX cats_name_index"] end it "should support drop_index with a given name" do @db.alter_table(:cats) do drop_index :name, :name=>:blah_blah end @db.sqls.must_equal ["DROP INDEX blah_blah"] end it "should support rename_column" do @db.alter_table(:cats) do rename_column :name, :old_name end @db.sqls.must_equal ["ALTER TABLE cats RENAME COLUMN name TO old_name"] end it "should support set_column_default" do @db.alter_table(:cats) do set_column_default :score, 3 end @db.sqls.must_equal ["ALTER TABLE cats ALTER COLUMN score SET DEFAULT 3"] end it "should support set_column_type" do @db.alter_table(:cats) do set_column_type :score, :real end @db.sqls.must_equal ["ALTER TABLE cats ALTER COLUMN score TYPE real"] end it "should support set_column_type with options" do @db.alter_table(:cats) do set_column_type :score, :integer, :unsigned=>true set_column_type :score, :varchar, :size=>30 set_column_type :score, :enum, :elements=>['a', 'b'] end @db.sqls.must_equal ["ALTER TABLE cats ALTER COLUMN score TYPE integer UNSIGNED", "ALTER TABLE cats ALTER COLUMN score TYPE varchar(30)", "ALTER TABLE cats ALTER COLUMN score TYPE enum('a', 'b')"] end it "should combine operations into a single query if the database supports it" do @db.define_singleton_method(:supports_combining_alter_table_ops?){true} @db.alter_table(:cats) do add_column :a, Integer drop_column :b set_column_not_null :c rename_column :d, :e set_column_default :f, 'g' set_column_type :h, Integer add_constraint(:i){a > 1} drop_constraint :j end @db.sqls.must_equal ["ALTER TABLE cats ADD COLUMN a integer, DROP COLUMN b, ALTER COLUMN c SET NOT NULL, RENAME COLUMN d TO e, ALTER COLUMN f SET DEFAULT 'g', ALTER COLUMN h TYPE integer, ADD CONSTRAINT i CHECK (a > 1), DROP CONSTRAINT j"] end it "should combine operations into consecutive groups of combinable operations if the database supports combining operations" do @db.define_singleton_method(:supports_combining_alter_table_ops?){true} @db.alter_table(:cats) do add_column :a, Integer drop_column :b set_column_not_null :c rename_column :d, :e add_index :e set_column_default :f, 'g' set_column_type :h, Integer add_constraint(:i){a > 1} drop_constraint :j end @db.sqls.must_equal ["ALTER TABLE cats ADD COLUMN a integer, DROP COLUMN b, ALTER COLUMN c SET NOT NULL, RENAME COLUMN d TO e", "CREATE INDEX cats_e_index ON cats (e)", "ALTER TABLE cats ALTER COLUMN f SET DEFAULT 'g', ALTER COLUMN h TYPE integer, ADD CONSTRAINT i CHECK (a > 1), DROP CONSTRAINT j"] end it "should handle operations that don't emit SQL when combining" do @db.define_singleton_method(:supports_combining_alter_table_ops?){true} @db.define_singleton_method(:combinable_alter_table_op?){|op| super(op) && (op[:op] != :rename_column || op[:name] == :d2)} @db.define_singleton_method(:alter_table_op_sql){|t, op| super(t, op) unless op[:op] == :rename_column} @db.alter_table(:cats) do rename_column :d, :e add_column :a, Integer drop_column :b set_column_not_null :c rename_column :d2, :e2 add_index :e set_column_default :f, 'g' set_column_type :h, Integer add_constraint(:i){a > 1} drop_constraint :j end @db.sqls.must_equal ["ALTER TABLE cats ADD COLUMN a integer, DROP COLUMN b, ALTER COLUMN c SET NOT NULL", "CREATE INDEX cats_e_index ON cats (e)", "ALTER TABLE cats ALTER COLUMN f SET DEFAULT 'g', ALTER COLUMN h TYPE integer, ADD CONSTRAINT i CHECK (a > 1), DROP CONSTRAINT j"] end end describe "Database#create_table" do before do @db = Sequel.mock end it "should construct proper SQL" do @db.create_table :test do primary_key :id, :integer, :null => false column :name, :text index :name, :unique => true end @db.sqls.must_equal ['CREATE TABLE test (id integer NOT NULL PRIMARY KEY AUTOINCREMENT, name text)', 'CREATE UNIQUE INDEX test_name_index ON test (name)'] end it "should create a temporary table" do @db.create_table :test_tmp, :temp => true do primary_key :id, :integer, :null => false column :name, :text index :name, :unique => true end @db.sqls.must_equal ['CREATE TEMPORARY TABLE test_tmp (id integer NOT NULL PRIMARY KEY AUTOINCREMENT, name text)', 'CREATE UNIQUE INDEX test_tmp_name_index ON test_tmp (name)'] end end describe "Database#alter_table" do before do @db = Sequel.mock end it "should construct proper SQL" do @db.alter_table :xyz do add_column :aaa, :text, :null => false, :unique => true drop_column :bbb rename_column :ccc, :ddd set_column_type :eee, :integer set_column_default :hhh, 'abcd' add_index :fff, :unique => true drop_index :ggg end @db.sqls.must_equal ['ALTER TABLE xyz ADD COLUMN aaa text NOT NULL UNIQUE', 'ALTER TABLE xyz DROP COLUMN bbb', 'ALTER TABLE xyz RENAME COLUMN ccc TO ddd', 'ALTER TABLE xyz ALTER COLUMN eee TYPE integer', "ALTER TABLE xyz ALTER COLUMN hhh SET DEFAULT 'abcd'", 'CREATE UNIQUE INDEX xyz_fff_index ON xyz (fff)', 'DROP INDEX xyz_ggg_index'] end end describe "Database#add_column" do it "should construct proper SQL" do db = Sequel.mock db.add_column(:test, :name, :text, :unique => true).must_be_nil db.sqls.must_equal ['ALTER TABLE test ADD COLUMN name text UNIQUE'] end end describe "Database#drop_column" do before do @db = Sequel.mock end it "should construct proper SQL" do @db.drop_column(:test, :name).must_be_nil @db.sqls.must_equal ['ALTER TABLE test DROP COLUMN name'] end it "should use CASCADE for :cascade=>true option" do @db.drop_column :test, :name, :cascade=>true @db.sqls.must_equal ['ALTER TABLE test DROP COLUMN name CASCADE'] end end describe "Database#rename_column" do before do @db = Sequel.mock end it "should construct proper SQL" do @db.rename_column(:test, :abc, :def).must_be_nil @db.sqls.must_equal ['ALTER TABLE test RENAME COLUMN abc TO def'] end end describe "Database#set_column_type" do before do @db = Sequel.mock end it "should construct proper SQL" do @db.set_column_type(:test, :name, :integer).must_be_nil @db.sqls.must_equal ['ALTER TABLE test ALTER COLUMN name TYPE integer'] end end describe "Database#set_column_default" do before do @db = Sequel.mock end it "should construct proper SQL" do @db.set_column_default(:test, :name, 'zyx').must_be_nil @db.sqls.must_equal ["ALTER TABLE test ALTER COLUMN name SET DEFAULT 'zyx'"] end end describe "Database#add_index" do before do @db = Sequel.mock end it "should construct proper SQL" do @db.add_index(:test, :name, :unique => true).must_be_nil @db.sqls.must_equal ['CREATE UNIQUE INDEX test_name_index ON test (name)'] end it "should accept multiple columns" do @db.add_index :test, [:one, :two] @db.sqls.must_equal ['CREATE INDEX test_one_two_index ON test (one, two)'] end end describe "Database#drop_index" do before do @db = Sequel.mock end it "should construct proper SQL" do @db.drop_index(:test, :name).must_be_nil @db.sqls.must_equal ['DROP INDEX test_name_index'] end end describe "Database#drop_table" do before do @db = Sequel.mock end it "should construct proper SQL" do @db.drop_table(:test).must_be_nil @db.sqls.must_equal ['DROP TABLE test'] end it "should accept multiple table names" do @db.drop_table :a, :bb, :ccc @db.sqls.must_equal ['DROP TABLE a', 'DROP TABLE bb', 'DROP TABLE ccc'] end end describe "Database#rename_table" do before do @db = Sequel.mock end it "should construct proper SQL" do @db.rename_table(:abc, :xyz).must_be_nil @db.sqls.must_equal ['ALTER TABLE abc RENAME TO xyz'] end end describe "Database#create_view" do before do @db = Sequel.mock end it "should construct proper SQL with raw SQL" do @db.create_view(:test, "SELECT * FROM xyz").must_be_nil @db.sqls.must_equal ['CREATE VIEW test AS SELECT * FROM xyz'] @db.create_view Sequel.identifier(:test), "SELECT * FROM xyz" @db.sqls.must_equal ['CREATE VIEW test AS SELECT * FROM xyz'] end it "should construct proper SQL with dataset" do @db.create_view :test, @db[:items].select(:a, :b).order(:c) @db.sqls.must_equal ['CREATE VIEW test AS SELECT a, b FROM items ORDER BY c'] end it "should handle :columns option" do @db.create_view :test, @db[:items].select(:a, :b).order(:c), :columns=>[:d, :e] @db.sqls.must_equal ['CREATE VIEW test (d, e) AS SELECT a, b FROM items ORDER BY c'] @db.create_view :test, @db[:items].select(:a, :b).order(:c), :columns=>%w'd e' @db.sqls.must_equal ['CREATE VIEW test (d, e) AS SELECT a, b FROM items ORDER BY c'] @db.create_view :test, @db[:items].select(:a, :b).order(:c), :columns=>[Sequel.identifier('d'), Sequel.lit('e')] @db.sqls.must_equal ['CREATE VIEW test (d, e) AS SELECT a, b FROM items ORDER BY c'] end it "should handle :check option" do @db.create_view :test, @db[:items].select(:a, :b).order(:c), :check=>true @db.sqls.must_equal ['CREATE VIEW test AS SELECT a, b FROM items ORDER BY c WITH CHECK OPTION'] @db.create_view :test, @db[:items].select(:a, :b).order(:c), :check=>:local @db.sqls.must_equal ['CREATE VIEW test AS SELECT a, b FROM items ORDER BY c WITH LOCAL CHECK OPTION'] end with_symbol_splitting "should handle create_or_replace_view with splittable symbols" do @db.create_or_replace_view :sch__test, "SELECT * FROM xyz" @db.sqls.must_equal ['DROP VIEW sch.test', 'CREATE VIEW sch.test AS SELECT * FROM xyz'] end it "should handle create_or_replace_view" do @db.create_or_replace_view :test, @db[:items].select(:a, :b).order(:c) @db.sqls.must_equal ['DROP VIEW test', 'CREATE VIEW test AS SELECT a, b FROM items ORDER BY c'] @db.create_or_replace_view Sequel.identifier(:test), @db[:items].select(:a, :b).order(:c) @db.sqls.must_equal ['DROP VIEW test', 'CREATE VIEW test AS SELECT a, b FROM items ORDER BY c'] end it "should handle create_or_replace_view when DROP VIEW raises a database error" do def @db.drop_view(*) super; raise Sequel::DatabaseError end @db.create_or_replace_view :test, @db[:items].select(:a, :b).order(:c) @db.sqls.must_equal ['DROP VIEW test', 'CREATE VIEW test AS SELECT a, b FROM items ORDER BY c'] end it "should raise in create_or_replace_view when DROP VIEW raises a disconnect error" do def @db.drop_view(*) super; raise Sequel::DatabaseDisconnectError end proc{@db.create_or_replace_view :test, @db[:items].select(:a, :b).order(:c)}.must_raise Sequel::DatabaseDisconnectError @db.sqls.must_equal ['DROP VIEW test'] end it "should raise in create_or_replace_view when DROP VIEW raises a connect error" do def @db.drop_view(*) super; raise Sequel::DatabaseConnectionError end proc{@db.create_or_replace_view :test, @db[:items].select(:a, :b).order(:c)}.must_raise Sequel::DatabaseConnectionError @db.sqls.must_equal ['DROP VIEW test'] end it "should use CREATE OR REPLACE VIEW if such syntax is supported" do def @db.supports_create_or_replace_view?() true end @db.create_or_replace_view :test, @db[:items] @db.sqls.must_equal ['CREATE OR REPLACE VIEW test AS SELECT * FROM items'] end end describe "Database#drop_view" do before do @db = Sequel.mock end with_symbol_splitting "should construct proper SQL for splittable symbols" do @db.drop_view(:sch__test).must_be_nil @db.sqls.must_equal ['DROP VIEW sch.test'] end it "should construct proper SQL" do @db.drop_view :test @db.drop_view Sequel.identifier(:test) @db.drop_view Sequel.qualify(:sch, :test) @db.sqls.must_equal ['DROP VIEW test', 'DROP VIEW test', 'DROP VIEW sch.test'] end it "should drop multiple views at once" do @db.drop_view :cats, :dogs @db.sqls.must_equal ['DROP VIEW cats', 'DROP VIEW dogs'] end it "should support the :cascade option" do @db.drop_view :cats, :dogs, :cascade=>true @db.sqls.must_equal ['DROP VIEW cats CASCADE', 'DROP VIEW dogs CASCADE'] end it "should support the :if_exists option" do @db.drop_view :cats, :dogs, :if_exists=>true @db.sqls.must_equal ['DROP VIEW IF EXISTS cats', 'DROP VIEW IF EXISTS dogs'] end end describe "Database#alter_table_sql" do it "should raise error for an invalid op" do proc {Sequel.mock.send(:alter_table_sql, :mau, :op => :blah)}.must_raise(Sequel::Error) end end describe "Schema Parser" do before do @sqls = [] @db = Sequel::Database.new end it "should raise an error if there are no columns" do @db.define_singleton_method(:schema_parse_table) do |t, opts| [] end proc{@db.schema(:x)}.must_raise(Sequel::Error) end it "should cache data by default" do @db.define_singleton_method(:schema_parse_table) do |t, opts| [[:a, {}]] end @db.schema(:x).must_be_same_as(@db.schema(:x)) end it "should not cache data if :reload=>true is given" do @db.define_singleton_method(:schema_parse_table) do |t, opts| [[:a, {}]] end @db.schema(:x).wont_be_same_as(@db.schema(:x, :reload=>true)) end it "should not cache schema metadata if cache_schema is false" do @db.cache_schema = false @db.define_singleton_method(:schema_parse_table) do |t, opts| [[:a, {}]] end @db.schema(:x).wont_be_same_as(@db.schema(:x)) end it "should freeze string values in resulting hash" do @db.define_singleton_method(:schema_parse_table) do |t, opts| [[:a, {:oid=>1, :db_type=>'integer'.dup, :default=>"'a'".dup, :ruby_default=>'a'.dup}]] end c = @db.schema(:x)[0][1] c[:db_type].frozen?.must_equal true c[:default].frozen?.must_equal true c[:ruby_default].frozen?.must_equal true end it "should provide options if given a table name" do c = nil @db.define_singleton_method(:schema_parse_table) do |t, opts| c = [t, opts] [[:a, {:db_type=>t.to_s}]] end @db.schema(:x) c.must_equal ["x", {}] @db.schema(Sequel[:s][:x]) c.must_equal ["x", {:schema=>"s"}] ds = @db[Sequel[:s][:y]] @db.schema(ds) c.must_equal ["y", {:schema=>"s", :dataset=>ds}] end it "should raise error if asked to schema parse a dataset not involving a single table" do @db.define_singleton_method(:schema_parse_table) do |t, opts| [[:a, {:db_type=>t.to_s}]] end proc{@db.schema(@db.from)}.must_raise Sequel::Error proc{@db.schema(@db.from(:x, :y))}.must_raise Sequel::Error proc{@db.schema(@db.from(:x).cross_join(:y))}.must_raise Sequel::Error end with_symbol_splitting "should provide options if given a table name with splittable symbols" do c = nil @db.define_singleton_method(:schema_parse_table) do |t, opts| c = [t, opts] [[:a, {:db_type=>t.to_s}]] end @db.schema(:s__x) c.must_equal ["x", {:schema=>"s"}] ds = @db[:s__y] @db.schema(ds) c.must_equal ["y", {:schema=>"s", :dataset=>ds}] end it "should parse the schema correctly for a single table" do sqls = @sqls proc{@db.schema(:x)}.must_raise(Sequel::Error) @db.define_singleton_method(:schema_parse_table) do |t, opts| sqls << t [[:a, {:db_type=>t.to_s}]] end @db.schema(:x).must_equal [[:a, {:db_type=>"x", :ruby_default=>nil}]] @sqls.must_equal ['x'] @db.schema(:x).must_equal [[:a, {:db_type=>"x", :ruby_default=>nil}]] @sqls.must_equal ['x'] @db.schema(:x, :reload=>true).must_equal [[:a, {:db_type=>"x", :ruby_default=>nil}]] @sqls.must_equal ['x', 'x'] end it "should dedup :db_type strings" do @db.define_singleton_method(:schema_parse_table) do |t, opts| [[:a, {:db_type=>t.to_s.dup}], [:b, {:db_type=>t.to_s.dup}]] end sch = @db.schema(:x) sch.must_equal [[:a, {:db_type=>"x", :ruby_default=>nil}], [:b, {:db_type=>"x", :ruby_default=>nil}]] sch[0][1][:db_type].must_be_same_as(sch[1][1][:db_type]) end if RUBY_VERSION >= '2.5' it "should set :auto_increment to true by default if unset and a single integer primary key is used" do @db.define_singleton_method(:schema_parse_table){|*| [[:a, {:primary_key=>true, :db_type=>'integer'}]]} @db.schema(:x).first.last[:auto_increment].must_equal true end it "should not set :auto_increment if already set" do @db.define_singleton_method(:schema_parse_table){|*| [[:a, {:primary_key=>true, :db_type=>'integer', :auto_increment=>false}]]} @db.schema(:x).first.last[:auto_increment].must_equal false end it "should set :auto_increment to false by default if unset and a single nonintegery primary key is used" do @db.define_singleton_method(:schema_parse_table){|*| [[:a, {:primary_key=>true, :db_type=>'varchar'}]]} @db.schema(:x).first.last[:auto_increment].must_equal false end it "should set :auto_increment to false by default if unset and a composite primary key" do @db.define_singleton_method(:schema_parse_table){|*| [[:a, {:primary_key=>true, :db_type=>'integer'}], [:b, {:primary_key=>true, :db_type=>'integer'}]]} @db.schema(:x).first.last[:auto_increment].must_equal false @db.schema(:x).last.last[:auto_increment].must_equal false end it "should set :auto_increment to true by default if set and not the first column" do @db.define_singleton_method(:schema_parse_table){|*| [[:b, {}], [:a, {:primary_key=>true, :db_type=>'integer'}]]} @db.schema(:x).last.last[:auto_increment].must_equal true end it "should convert various types of table name arguments" do @db.define_singleton_method(:schema_parse_table) do |t, opts| [[t, opts]] end s1 = @db.schema(:x) s1.must_equal [['x', {:ruby_default=>nil}]] @db.schema(:x).object_id.must_equal s1.object_id @db.schema(Sequel.identifier(:x)).object_id.must_equal s1.object_id s2 = @db.schema(Sequel[:x][:y]) s2.must_equal [['y', {:schema=>'x', :ruby_default=>nil}]] @db.schema(Sequel[:x][:y]).object_id.must_equal s2.object_id @db.schema(Sequel.qualify(:x, :y)).object_id.must_equal s2.object_id s2 = @db.schema(Sequel.qualify(:v, Sequel[:x][:y])) s2.must_equal [['y', {:schema=>'x', :ruby_default=>nil, :information_schema_schema=>Sequel.identifier('v')}]] @db.schema(Sequel.qualify(:v, Sequel[:x][:y])).object_id.must_equal s2.object_id @db.schema(Sequel.qualify(Sequel[:v][:x], :y)).object_id.must_equal s2.object_id s2 = @db.schema(Sequel.qualify(Sequel[:u][:v], Sequel[:x][:y])) s2.must_equal [['y', {:schema=>'x', :ruby_default=>nil, :information_schema_schema=>Sequel.qualify('u', 'v')}]] @db.schema(Sequel.qualify(Sequel[:u][:v], Sequel[:x][:y])).object_id.must_equal s2.object_id @db.schema(Sequel.qualify(Sequel.qualify(:u, :v), Sequel.qualify(:x, :y))).object_id.must_equal s2.object_id end with_symbol_splitting "should convert splittable symbol arguments" do @db.define_singleton_method(:schema_parse_table) do |t, opts| [[t, opts]] end s1 = @db.schema(:x) s1.must_equal [['x', {:ruby_default=>nil}]] @db.schema(:x).object_id.must_equal s1.object_id @db.schema(Sequel.identifier(:x)).object_id.must_equal s1.object_id s2 = @db.schema(:x__y) s2.must_equal [['y', {:schema=>'x', :ruby_default=>nil}]] @db.schema(:x__y).object_id.must_equal s2.object_id @db.schema(Sequel.qualify(:x, :y)).object_id.must_equal s2.object_id s2 = @db.schema(Sequel.qualify(:v, :x__y)) s2.must_equal [['y', {:schema=>'x', :ruby_default=>nil, :information_schema_schema=>Sequel.identifier('v')}]] @db.schema(Sequel.qualify(:v, :x__y)).object_id.must_equal s2.object_id @db.schema(Sequel.qualify(:v__x, :y)).object_id.must_equal s2.object_id s2 = @db.schema(Sequel.qualify(:u__v, :x__y)) s2.must_equal [['y', {:schema=>'x', :ruby_default=>nil, :information_schema_schema=>Sequel.qualify('u', 'v')}]] @db.schema(Sequel.qualify(:u__v, :x__y)).object_id.must_equal s2.object_id @db.schema(Sequel.qualify(Sequel.qualify(:u, :v), Sequel.qualify(:x, :y))).object_id.must_equal s2.object_id end it "should correctly parse all supported data types" do sm = Module.new do def schema_parse_table(t, opts) [[:x, {:db_type=>t.to_s, :type=>schema_column_type(t.to_s)}]] end end @db.extend(sm) @db.schema(:tinyint).first.last[:type].must_equal :integer @db.schema(:int).first.last[:type].must_equal :integer @db.schema(:integer).first.last[:type].must_equal :integer @db.schema(:bigint).first.last[:type].must_equal :integer @db.schema(:smallint).first.last[:type].must_equal :integer @db.schema(:character).first.last[:type].must_equal :string @db.schema(:"character varying").first.last[:type].must_equal :string @db.schema(:varchar).first.last[:type].must_equal :string @db.schema(:"varchar(255)").first.last[:type].must_equal :string @db.schema(:"varchar(255)").first.last[:max_length].must_equal 255 @db.schema(:text).first.last[:type].must_equal :string @db.schema(:date).first.last[:type].must_equal :date @db.schema(:datetime).first.last[:type].must_equal :datetime @db.schema(:smalldatetime).first.last[:type].must_equal :datetime @db.schema(:timestamp).first.last[:type].must_equal :datetime @db.schema(:"timestamp with time zone").first.last[:type].must_equal :datetime @db.schema(:"timestamp without time zone").first.last[:type].must_equal :datetime @db.schema(:"timestamp(6)").first.last[:type].must_equal :datetime @db.schema(:"timestamp(6) with time zone").first.last[:type].must_equal :datetime @db.schema(:time).first.last[:type].must_equal :time @db.schema(:"time with time zone").first.last[:type].must_equal :time @db.schema(:"time without time zone").first.last[:type].must_equal :time @db.schema(:bool).first.last[:type].must_equal :boolean @db.schema(:boolean).first.last[:type].must_equal :boolean @db.schema(:real).first.last[:type].must_equal :float @db.schema(:float).first.last[:type].must_equal :float @db.schema(:"float unsigned").first.last[:type].must_equal :float @db.schema(:double).first.last[:type].must_equal :float @db.schema(:"double(1,2)").first.last[:type].must_equal :float @db.schema(:"double(1,2) unsigned").first.last[:type].must_equal :float @db.schema(:"double precision").first.last[:type].must_equal :float @db.schema(:number).first.last[:type].must_equal :decimal @db.schema(:numeric).first.last[:type].must_equal :decimal @db.schema(:decimal).first.last[:type].must_equal :decimal @db.schema(:"number(10,0)").first.last[:type].must_equal :integer @db.schema(:"numeric(10, 10)").first.last[:type].must_equal :decimal @db.schema(:"decimal(10,1)").first.last[:type].must_equal :decimal @db.schema(:bytea).first.last[:type].must_equal :blob @db.schema(:blob).first.last[:type].must_equal :blob @db.schema(:image).first.last[:type].must_equal :blob @db.schema(:nchar).first.last[:type].must_equal :string @db.schema(:nvarchar).first.last[:type].must_equal :string @db.schema(:ntext).first.last[:type].must_equal :string @db.schema(:clob).first.last[:type].must_equal :string @db.schema(:ntext).first.last[:type].must_equal :string @db.schema(:smalldatetime).first.last[:type].must_equal :datetime @db.schema(:binary).first.last[:type].must_equal :blob @db.schema(:varbinary).first.last[:type].must_equal :blob @db.schema(:enum).first.last[:type].must_equal :enum @db = Sequel.mock(:host=>'postgres') @db.extend(sm) @db.schema(:interval).first.last[:type].must_equal :interval @db.schema(:citext).first.last[:type].must_equal :string @db = Sequel.mock(:host=>'mysql') @db.extend(sm) @db.schema(:set).first.last[:type].must_equal :set @db.schema(:mediumint).first.last[:type].must_equal :integer @db.schema(:mediumtext).first.last[:type].must_equal :string end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/core/spec_helper.rb��������������������������������������������������������������0000664�0000000�0000000�00000001417�14342141206�0017603�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������if ENV['COVERAGE'] require_relative "../sequel_coverage" SimpleCov.sequel_coverage(:filter=>%r{lib/sequel/(\w+\.rb|(dataset|database|model|connection_pool)/\w+\.rb|adapters/mock\.rb)\z}) end $:.unshift(File.join(File.dirname(File.expand_path(__FILE__)), "../../lib/")) require_relative "../../lib/sequel/core" ENV['MT_NO_PLUGINS'] = '1' # Work around stupid autoloading of plugins gem 'minitest' require 'minitest/global_expectations/autorun' require 'minitest/hooks/default' require_relative '../deprecation_helper' if ENV['SEQUEL_COLUMNS_INTROSPECTION'] Sequel.extension :columns_introspection Sequel::Database.extension :columns_introspection require_relative '../../lib/sequel/adapters/mock' Sequel::Mock::Dataset.send(:include, Sequel::ColumnsIntrospection) end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/core/version_spec.rb�������������������������������������������������������������0000664�0000000�0000000�00000000657�14342141206�0020016�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel.version" do it "should be in the form X.Y.Z with all being numbers" do Sequel.version.must_match(/\A\d+\.\d+\.\d+\z/) end it "MAJOR/MINOR/TINY/VERSION_NUMBER should be integers" do Sequel::MAJOR.must_be_kind_of(Integer) Sequel::MINOR.must_be_kind_of(Integer) Sequel::TINY.must_be_kind_of(Integer) Sequel::VERSION_NUMBER.must_be_kind_of(Integer) end end ���������������������������������������������������������������������������������sequel-5.63.0/spec/core_extensions_spec.rb����������������������������������������������������������0000664�0000000�0000000�00000075754�14342141206�0020622�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������if ENV['COVERAGE'] require_relative "sequel_coverage" SimpleCov.sequel_coverage(:filter=>%r{lib/sequel/extensions/core_extensions\.rb\z}) end $:.unshift(File.join(File.dirname(File.expand_path(__FILE__)), "../lib/")) require_relative '../lib/sequel' Regexp.send(:include, Sequel::SQL::StringMethods) String.send(:include, Sequel::SQL::StringMethods) Sequel.extension :core_extensions Sequel.extension :symbol_aref Sequel.extension :virtual_row_method_block ENV['MT_NO_PLUGINS'] = '1' # Work around stupid autoloading of plugins gem 'minitest' require 'minitest/global_expectations/autorun' require 'minitest/hooks/default' require_relative "visibility_checking_after_hook" if ENV['CHECK_METHOD_VISIBILITY'] require_relative "deprecation_helper.rb" describe "Sequel core extensions" do it "should have Sequel.core_extensions? be true if enabled" do Sequel.core_extensions?.must_equal true end end describe "Sequel.current" do it "should be Thread.current unless fiber_concurrency extension is used" do Sequel.current.must_equal Thread.current Sequel.extension :fiber_concurrency Sequel.current.must_equal Fiber.current end end describe "Core extensions" do before do db = Sequel.mock @d = db[:items].with_extend do def supports_regexp?; true end def l(*args, &block) literal(filter_expr(*args, &block)) end def lit(*args) literal(*args) end end end it "should support NOT via Symbol#~" do @d.l(~:x).must_equal 'NOT x' end with_symbol_splitting "should support NOT via Symbol#~ for splittable symbols" do @d.l(~:x__y).must_equal 'NOT x.y' end it "should support + - * / power via Symbol#+,-,*,/,**" do @d.l(:x + 1 > 100).must_equal '((x + 1) > 100)' @d.l((:x * :y) < 100.01).must_equal '((x * y) < 100.01)' @d.l((:x - :y/2) >= 100000000000000000000000000000000000).must_equal '((x - (y / 2)) >= 100000000000000000000000000000000000)' @d.l((((:x - :y)/(:x + :y))*:z) <= 100).must_equal '((((x - y) / (x + y)) * z) <= 100)' @d.l(~((((:x - :y)/(:x + :y))*:z) <= 100)).must_equal '((((x - y) / (x + y)) * z) > 100)' @d.l(~((((:x ** :y)/(:x + :y))*:z) <= 100)).must_equal '(((power(x, y) / (x + y)) * z) > 100)' end it "should support coercion for symbols" do @d.l(1 + :x > 2).must_equal '((1 + x) > 2)' end it "should support LIKE via Symbol#like" do @d.l(:x.like('a')).must_equal '(x LIKE \'a\' ESCAPE \'\\\')' @d.l(:x.like(/a/)).must_equal '(x ~ \'a\')' @d.l(:x.like('a', 'b')).must_equal '((x LIKE \'a\' ESCAPE \'\\\') OR (x LIKE \'b\' ESCAPE \'\\\'))' @d.l(:x.like(/a/, /b/i)).must_equal '((x ~ \'a\') OR (x ~* \'b\'))' @d.l(:x.like('a', /b/)).must_equal '((x LIKE \'a\' ESCAPE \'\\\') OR (x ~ \'b\'))' @d.l('a'.like(:x)).must_equal "('a' LIKE x ESCAPE '\\')" @d.l('a'.like(:x, 'b')).must_equal "(('a' LIKE x ESCAPE '\\') OR ('a' LIKE 'b' ESCAPE '\\'))" @d.l('a'.like(:x, /b/)).must_equal "(('a' LIKE x ESCAPE '\\') OR ('a' ~ 'b'))" @d.l('a'.like(:x, /b/i)).must_equal "(('a' LIKE x ESCAPE '\\') OR ('a' ~* 'b'))" @d.l(/a/.like(:x)).must_equal "('a' ~ x)" @d.l(/a/.like(:x, 'b')).must_equal "(('a' ~ x) OR ('a' ~ 'b'))" @d.l(/a/.like(:x, /b/)).must_equal "(('a' ~ x) OR ('a' ~ 'b'))" @d.l(/a/.like(:x, /b/i)).must_equal "(('a' ~ x) OR ('a' ~* 'b'))" @d.l(/a/i.like(:x)).must_equal "('a' ~* x)" @d.l(/a/i.like(:x, 'b')).must_equal "(('a' ~* x) OR ('a' ~* 'b'))" @d.l(/a/i.like(:x, /b/)).must_equal "(('a' ~* x) OR ('a' ~* 'b'))" @d.l(/a/i.like(:x, /b/i)).must_equal "(('a' ~* x) OR ('a' ~* 'b'))" end it "should support NOT LIKE via Symbol#like and Symbol#~" do @d.l(~:x.like('a')).must_equal '(x NOT LIKE \'a\' ESCAPE \'\\\')' @d.l(~:x.like(/a/)).must_equal '(x !~ \'a\')' @d.l(~:x.like('a', 'b')).must_equal '((x NOT LIKE \'a\' ESCAPE \'\\\') AND (x NOT LIKE \'b\' ESCAPE \'\\\'))' @d.l(~:x.like(/a/, /b/i)).must_equal '((x !~ \'a\') AND (x !~* \'b\'))' @d.l(~:x.like('a', /b/)).must_equal '((x NOT LIKE \'a\' ESCAPE \'\\\') AND (x !~ \'b\'))' @d.l(~'a'.like(:x)).must_equal "('a' NOT LIKE x ESCAPE '\\')" @d.l(~'a'.like(:x, 'b')).must_equal "(('a' NOT LIKE x ESCAPE '\\') AND ('a' NOT LIKE 'b' ESCAPE '\\'))" @d.l(~'a'.like(:x, /b/)).must_equal "(('a' NOT LIKE x ESCAPE '\\') AND ('a' !~ 'b'))" @d.l(~'a'.like(:x, /b/i)).must_equal "(('a' NOT LIKE x ESCAPE '\\') AND ('a' !~* 'b'))" @d.l(~/a/.like(:x)).must_equal "('a' !~ x)" @d.l(~/a/.like(:x, 'b')).must_equal "(('a' !~ x) AND ('a' !~ 'b'))" @d.l(~/a/.like(:x, /b/)).must_equal "(('a' !~ x) AND ('a' !~ 'b'))" @d.l(~/a/.like(:x, /b/i)).must_equal "(('a' !~ x) AND ('a' !~* 'b'))" @d.l(~/a/i.like(:x)).must_equal "('a' !~* x)" @d.l(~/a/i.like(:x, 'b')).must_equal "(('a' !~* x) AND ('a' !~* 'b'))" @d.l(~/a/i.like(:x, /b/)).must_equal "(('a' !~* x) AND ('a' !~* 'b'))" @d.l(~/a/i.like(:x, /b/i)).must_equal "(('a' !~* x) AND ('a' !~* 'b'))" end it "should support ILIKE via Symbol#ilike" do @d.l(:x.ilike('a')).must_equal '(UPPER(x) LIKE UPPER(\'a\') ESCAPE \'\\\')' @d.l(:x.ilike(/a/)).must_equal '(x ~* \'a\')' @d.l(:x.ilike('a', 'b')).must_equal '((UPPER(x) LIKE UPPER(\'a\') ESCAPE \'\\\') OR (UPPER(x) LIKE UPPER(\'b\') ESCAPE \'\\\'))' @d.l(:x.ilike(/a/, /b/i)).must_equal '((x ~* \'a\') OR (x ~* \'b\'))' @d.l(:x.ilike('a', /b/)).must_equal '((UPPER(x) LIKE UPPER(\'a\') ESCAPE \'\\\') OR (x ~* \'b\'))' @d.l('a'.ilike(:x)).must_equal "(UPPER('a') LIKE UPPER(x) ESCAPE '\\')" @d.l('a'.ilike(:x, 'b')).must_equal "((UPPER('a') LIKE UPPER(x) ESCAPE '\\') OR (UPPER('a') LIKE UPPER('b') ESCAPE '\\'))" @d.l('a'.ilike(:x, /b/)).must_equal "((UPPER('a') LIKE UPPER(x) ESCAPE '\\') OR ('a' ~* 'b'))" @d.l('a'.ilike(:x, /b/i)).must_equal "((UPPER('a') LIKE UPPER(x) ESCAPE '\\') OR ('a' ~* 'b'))" @d.l(/a/.ilike(:x)).must_equal "('a' ~* x)" @d.l(/a/.ilike(:x, 'b')).must_equal "(('a' ~* x) OR ('a' ~* 'b'))" @d.l(/a/.ilike(:x, /b/)).must_equal "(('a' ~* x) OR ('a' ~* 'b'))" @d.l(/a/.ilike(:x, /b/i)).must_equal "(('a' ~* x) OR ('a' ~* 'b'))" @d.l(/a/i.ilike(:x)).must_equal "('a' ~* x)" @d.l(/a/i.ilike(:x, 'b')).must_equal "(('a' ~* x) OR ('a' ~* 'b'))" @d.l(/a/i.ilike(:x, /b/)).must_equal "(('a' ~* x) OR ('a' ~* 'b'))" @d.l(/a/i.ilike(:x, /b/i)).must_equal "(('a' ~* x) OR ('a' ~* 'b'))" end it "should support NOT ILIKE via Symbol#ilike and Symbol#~" do @d.l(~:x.ilike('a')).must_equal '(UPPER(x) NOT LIKE UPPER(\'a\') ESCAPE \'\\\')' @d.l(~:x.ilike(/a/)).must_equal '(x !~* \'a\')' @d.l(~:x.ilike('a', 'b')).must_equal '((UPPER(x) NOT LIKE UPPER(\'a\') ESCAPE \'\\\') AND (UPPER(x) NOT LIKE UPPER(\'b\') ESCAPE \'\\\'))' @d.l(~:x.ilike(/a/, /b/i)).must_equal '((x !~* \'a\') AND (x !~* \'b\'))' @d.l(~:x.ilike('a', /b/)).must_equal '((UPPER(x) NOT LIKE UPPER(\'a\') ESCAPE \'\\\') AND (x !~* \'b\'))' @d.l(~'a'.ilike(:x)).must_equal "(UPPER('a') NOT LIKE UPPER(x) ESCAPE '\\')" @d.l(~'a'.ilike(:x, 'b')).must_equal "((UPPER('a') NOT LIKE UPPER(x) ESCAPE '\\') AND (UPPER('a') NOT LIKE UPPER('b') ESCAPE '\\'))" @d.l(~'a'.ilike(:x, /b/)).must_equal "((UPPER('a') NOT LIKE UPPER(x) ESCAPE '\\') AND ('a' !~* 'b'))" @d.l(~'a'.ilike(:x, /b/i)).must_equal "((UPPER('a') NOT LIKE UPPER(x) ESCAPE '\\') AND ('a' !~* 'b'))" @d.l(~/a/.ilike(:x)).must_equal "('a' !~* x)" @d.l(~/a/.ilike(:x, 'b')).must_equal "(('a' !~* x) AND ('a' !~* 'b'))" @d.l(~/a/.ilike(:x, /b/)).must_equal "(('a' !~* x) AND ('a' !~* 'b'))" @d.l(~/a/.ilike(:x, /b/i)).must_equal "(('a' !~* x) AND ('a' !~* 'b'))" @d.l(~/a/i.ilike(:x)).must_equal "('a' !~* x)" @d.l(~/a/i.ilike(:x, 'b')).must_equal "(('a' !~* x) AND ('a' !~* 'b'))" @d.l(~/a/i.ilike(:x, /b/)).must_equal "(('a' !~* x) AND ('a' !~* 'b'))" @d.l(~/a/i.ilike(:x, /b/i)).must_equal "(('a' !~* x) AND ('a' !~* 'b'))" end it "should support sql_expr on arrays with all two pairs" do @d.l([[:x, 100],[:y, 'a']].sql_expr).must_equal '((x = 100) AND (y = \'a\'))' @d.l([[:x, true], [:y, false]].sql_expr).must_equal '((x IS TRUE) AND (y IS FALSE))' @d.l([[:x, nil], [:y, [1,2,3]]].sql_expr).must_equal '((x IS NULL) AND (y IN (1, 2, 3)))' end it "should support sql_negate on arrays with all two pairs" do @d.l([[:x, 100],[:y, 'a']].sql_negate).must_equal '((x != 100) AND (y != \'a\'))' @d.l([[:x, true], [:y, false]].sql_negate).must_equal '((x IS NOT TRUE) AND (y IS NOT FALSE))' @d.l([[:x, nil], [:y, [1,2,3]]].sql_negate).must_equal '((x IS NOT NULL) AND (y NOT IN (1, 2, 3)))' end it "should support ~ on arrays with all two pairs" do @d.l(~[[:x, 100],[:y, 'a']]).must_equal '((x != 100) OR (y != \'a\'))' @d.l(~[[:x, true], [:y, false]]).must_equal '((x IS NOT TRUE) OR (y IS NOT FALSE))' @d.l(~[[:x, nil], [:y, [1,2,3]]]).must_equal '((x IS NOT NULL) OR (y NOT IN (1, 2, 3)))' end it "should support sql_or on arrays with all two pairs" do @d.l([[:x, 100],[:y, 'a']].sql_or).must_equal '((x = 100) OR (y = \'a\'))' @d.l([[:x, true], [:y, false]].sql_or).must_equal '((x IS TRUE) OR (y IS FALSE))' @d.l([[:x, nil], [:y, [1,2,3]]].sql_or).must_equal '((x IS NULL) OR (y IN (1, 2, 3)))' end it "should support Array#sql_string_join for concatenation of SQL strings" do @d.lit([:x].sql_string_join).must_equal '(x)' @d.lit([:x].sql_string_join(', ')).must_equal '(x)' @d.lit([:x, :y].sql_string_join).must_equal '(x || y)' @d.lit([:x, :y].sql_string_join(', ')).must_equal "(x || ', ' || y)" @d.lit([:x.sql_function(1), :y.sql_subscript(1)].sql_string_join).must_equal '(x(1) || y[1])' @d.lit([:x.sql_function(1), 'y.z'.lit].sql_string_join(', ')).must_equal "(x(1) || ', ' || y.z)" @d.lit([:x, 1, :y].sql_string_join).must_equal "(x || '1' || y)" @d.lit([:x, 1, :y].sql_string_join(', ')).must_equal "(x || ', ' || '1' || ', ' || y)" @d.lit([:x, 1, :y].sql_string_join(Sequel[:y][:z])).must_equal "(x || y.z || '1' || y.z || y)" @d.lit([:x, 1, :y].sql_string_join(1)).must_equal "(x || '1' || '1' || '1' || y)" @d.lit([:x, :y].sql_string_join('y.x || x.y'.lit)).must_equal "(x || y.x || x.y || y)" @d.lit([[:x, :y].sql_string_join, [:a, :b].sql_string_join].sql_string_join).must_equal "(x || y || a || b)" end it "should support sql_expr on hashes" do @d.l({:x => 100, :y => 'a'}.sql_expr)[1...-1].split(' AND ').sort.must_equal ['(x = 100)', '(y = \'a\')'] @d.l({:x => true, :y => false}.sql_expr)[1...-1].split(' AND ').sort.must_equal ['(x IS TRUE)', '(y IS FALSE)'] @d.l({:x => nil, :y => [1,2,3]}.sql_expr)[1...-1].split(' AND ').sort.must_equal ['(x IS NULL)', '(y IN (1, 2, 3))'] end it "should support sql_negate on hashes" do @d.l({:x => 100, :y => 'a'}.sql_negate)[1...-1].split(' AND ').sort.must_equal ['(x != 100)', '(y != \'a\')'] @d.l({:x => true, :y => false}.sql_negate)[1...-1].split(' AND ').sort.must_equal ['(x IS NOT TRUE)', '(y IS NOT FALSE)'] @d.l({:x => nil, :y => [1,2,3]}.sql_negate)[1...-1].split(' AND ').sort.must_equal ['(x IS NOT NULL)', '(y NOT IN (1, 2, 3))'] end it "should support ~ on hashes" do @d.l(~{:x => 100, :y => 'a'})[1...-1].split(' OR ').sort.must_equal ['(x != 100)', '(y != \'a\')'] @d.l(~{:x => true, :y => false})[1...-1].split(' OR ').sort.must_equal ['(x IS NOT TRUE)', '(y IS NOT FALSE)'] @d.l(~{:x => nil, :y => [1,2,3]})[1...-1].split(' OR ').sort.must_equal ['(x IS NOT NULL)', '(y NOT IN (1, 2, 3))'] end it "should support sql_or on hashes" do @d.l({:x => 100, :y => 'a'}.sql_or)[1...-1].split(' OR ').sort.must_equal ['(x = 100)', '(y = \'a\')'] @d.l({:x => true, :y => false}.sql_or)[1...-1].split(' OR ').sort.must_equal ['(x IS TRUE)', '(y IS FALSE)'] @d.l({:x => nil, :y => [1,2,3]}.sql_or)[1...-1].split(' OR ').sort.must_equal ['(x IS NULL)', '(y IN (1, 2, 3))'] end it "should Hash#& and Hash#|" do @d.l({:y => :z} & :x).must_equal '((y = z) AND x)' @d.l({:x => :a} & {:y => :z}).must_equal '((x = a) AND (y = z))' @d.l({:y => :z} | :x).must_equal '((y = z) OR x)' @d.l({:x => :a} | {:y => :z}).must_equal '((x = a) OR (y = z))' end end describe "Array#case and Hash#case" do before do @d = Sequel.mock.dataset end it "should return SQL CASE expression" do @d.literal({:x=>:y}.case(:z)).must_equal '(CASE WHEN x THEN y ELSE z END)' @d.literal({:x=>:y}.case(:z, :exp)).must_equal '(CASE exp WHEN x THEN y ELSE z END)' @d.literal({:x=>:y, :a=>:b}.case(:z)).must_equal '(CASE WHEN x THEN y WHEN a THEN b ELSE z END)' @d.literal([[:x, :y]].case(:z)).must_equal '(CASE WHEN x THEN y ELSE z END)' @d.literal([[:x, :y], [:a, :b]].case(:z)).must_equal '(CASE WHEN x THEN y WHEN a THEN b ELSE z END)' @d.literal([[:x, :y], [:a, :b]].case(:z, :exp)).must_equal '(CASE exp WHEN x THEN y WHEN a THEN b ELSE z END)' @d.literal([[:x, :y], [:a, :b]].case(:z, Sequel[:exp][:w])).must_equal '(CASE exp.w WHEN x THEN y WHEN a THEN b ELSE z END)' end it "should return SQL CASE expression with expression even if nil" do @d.literal({:x=>:y}.case(:z, nil)).must_equal '(CASE NULL WHEN x THEN y ELSE z END)' end it "should raise an error if an array that isn't all two pairs is used" do proc{[:b].case(:a)}.must_raise(Sequel::Error) proc{[:b, :c].case(:a)}.must_raise(Sequel::Error) proc{[[:b, :c], :d].case(:a)}.must_raise(Sequel::Error) end it "should raise an error if an empty array/hash is used" do proc{[].case(:a)}.must_raise(Sequel::Error) proc{{}.case(:a)}.must_raise(Sequel::Error) end end describe "Array#sql_value_list" do before do @d = Sequel.mock.dataset end it "should treat the array as an SQL value list instead of conditions when used as a placeholder value" do @d.filter(Sequel.lit("(a, b) IN ?", [[:x, 1], [:y, 2]])).sql.must_equal 'SELECT * WHERE ((a, b) IN ((x = 1) AND (y = 2)))' @d.filter(Sequel.lit("(a, b) IN ?", [[:x, 1], [:y, 2]].sql_value_list)).sql.must_equal 'SELECT * WHERE ((a, b) IN ((x, 1), (y, 2)))' end it "should be no difference when used as a hash value" do @d.filter([:a, :b]=>[[:x, 1], [:y, 2]]).sql.must_equal 'SELECT * WHERE ((a, b) IN ((x, 1), (y, 2)))' @d.filter([:a, :b]=>[[:x, 1], [:y, 2]].sql_value_list).sql.must_equal 'SELECT * WHERE ((a, b) IN ((x, 1), (y, 2)))' end end describe "String#lit" do before do @ds = Sequel.mock[:t] end it "should return an LiteralString object" do 'xyz'.lit.must_be_kind_of(Sequel::LiteralString) 'xyz'.lit.to_s.must_equal 'xyz' end it "should inhibit string literalization" do @ds.update_sql(:stamp => "NOW()".lit).must_equal "UPDATE t SET stamp = NOW()" end it "should return a PlaceholderLiteralString object if args are given" do a = 'DISTINCT ?'.lit(:a) a.must_be_kind_of(Sequel::SQL::PlaceholderLiteralString) @ds.literal(a).must_equal 'DISTINCT a' @ds.with_quote_identifiers(true).literal(a).must_equal 'DISTINCT "a"' end it "should handle named placeholders if given a single argument hash" do a = 'DISTINCT :b'.lit(:b=>:a) a.must_be_kind_of(Sequel::SQL::PlaceholderLiteralString) @ds.literal(a).must_equal 'DISTINCT a' @ds.with_quote_identifiers(true).literal(a).must_equal 'DISTINCT "a"' end it "should treat placeholder literal strings as generic expressions" do a = ':b'.lit(:b=>:a) @ds.literal(a + 1).must_equal "(a + 1)" @ds.literal(a & :b).must_equal "(a AND b)" @ds.literal(a.sql_string + :b).must_equal "(a || b)" end end describe "String#to_sequel_blob" do it "should return a Blob object" do 'xyz'.to_sequel_blob.must_be_kind_of(::Sequel::SQL::Blob) 'xyz'.to_sequel_blob.must_equal 'xyz' end it "should retain binary data" do "\1\2\3\4".to_sequel_blob.must_equal "\1\2\3\4" end end describe "String cast methods" do before do @ds = Sequel.mock.dataset end it "should support cast method" do @ds.literal('abc'.cast(:integer)).must_equal "CAST('abc' AS integer)" end it "should support cast_numeric and cast_string" do x = 'abc'.cast_numeric x.must_be_kind_of(Sequel::SQL::NumericExpression) @ds.literal(x).must_equal "CAST('abc' AS integer)" x = 'abc'.cast_numeric(:real) x.must_be_kind_of(Sequel::SQL::NumericExpression) @ds.literal(x).must_equal "CAST('abc' AS real)" x = 'abc'.cast_string x.must_be_kind_of(Sequel::SQL::StringExpression) @ds.literal(x).must_equal "CAST('abc' AS varchar(255))" x = 'abc'.cast_string(:varchar) x.must_be_kind_of(Sequel::SQL::StringExpression) @ds.literal(x).must_equal "CAST('abc' AS varchar(255))" end end describe "#desc" do before do @ds = Sequel.mock.dataset end it "should format a DESC clause for a column ref" do @ds.literal(:test.desc).must_equal 'test DESC' end with_symbol_splitting "should format a DESC clause for a column ref with a splitting symbol" do @ds.literal(:items__price.desc).must_equal 'items.price DESC' end it "should format a DESC clause for a function" do @ds.literal(:avg.sql_function(:test).desc).must_equal 'avg(test) DESC' end end describe "#asc" do before do @ds = Sequel.mock.dataset end it "should format a ASC clause for a column ref" do @ds.literal(:test.asc).must_equal 'test ASC' end with_symbol_splitting "should format a ASC clause for a column ref for a splittable symbol" do @ds.literal(:items__price.asc).must_equal 'items.price ASC' end it "should format a ASC clause for a function" do @ds.literal(:avg.sql_function(:test).asc).must_equal 'avg(test) ASC' end end describe "#as" do before do @ds = Sequel.mock.dataset end it "should format a AS clause for a column ref" do @ds.literal(:test.as(:t)).must_equal 'test AS t' end with_symbol_splitting "should format a AS clause for a column ref for splittable symbols" do @ds.literal(:items__price.as(:p)).must_equal 'items.price AS p' end it "should format a AS clause for a function" do @ds.literal(:avg.sql_function(:test).as(:avg)).must_equal 'avg(test) AS avg' end it "should format a AS clause for a literal value" do @ds.literal('abc'.as(:abc)).must_equal "'abc' AS abc" end end describe "Column references" do before do @ds = Sequel.mock.dataset.with_quote_identifiers(true).with_extend{def quoted_identifier_append(sql, c) sql << "`#{c}`" end} end it "should be quoted properly" do @ds.literal(:xyz).must_equal "`xyz`" @ds.literal(:xyz.as(:x)).must_equal "`xyz` AS `x`" end it "should be quoted properly in SQL functions" do @ds.literal(:avg.sql_function(:xyz)).must_equal "avg(`xyz`)" @ds.literal(:avg.sql_function(:xyz, 1)).must_equal "avg(`xyz`, 1)" @ds.literal(:avg.sql_function(:xyz).as(:a)).must_equal "avg(`xyz`) AS `a`" end it "should be quoted properly in ASC/DESC clauses" do @ds.literal(:xyz.asc).must_equal "`xyz` ASC" @ds.literal(:avg.sql_function(:xyz, 1).desc).must_equal "avg(`xyz`, 1) DESC" end it "should be quoted properly in a cast function" do @ds.literal(:x.cast(:integer)).must_equal "CAST(`x` AS integer)" end with_symbol_splitting "should be quoted properly when using symbol splitting" do @ds.literal(:xyz__abc).must_equal "`xyz`.`abc`" @ds.literal(:xyz__abc.as(:x)).must_equal "`xyz`.`abc` AS `x`" @ds.literal(:xyz___x).must_equal "`xyz` AS `x`" @ds.literal(:xyz__abc___x).must_equal "`xyz`.`abc` AS `x`" @ds.literal(:x__y.cast('varchar(20)')).must_equal "CAST(`x`.`y` AS varchar(20))" end end describe "Blob" do it "#to_sequel_blob should return self" do blob = "x".to_sequel_blob blob.to_sequel_blob.object_id.must_equal blob.object_id end end describe "Symbol#*" do before do @ds = Sequel.mock.dataset end it "should format a qualified wildcard if no argument" do @ds.literal(:xyz.*).must_equal 'xyz.*' @ds.literal(:abc.*).must_equal 'abc.*' end it "should format a filter expression if an argument" do @ds.literal(:xyz.*(3)).must_equal '(xyz * 3)' @ds.literal(:abc.*(5)).must_equal '(abc * 5)' end with_symbol_splitting "should support qualified symbols if no argument" do @ds.literal(:xyz__abc.*).must_equal 'xyz.abc.*' end end describe "Symbol" do before do @ds = Sequel.mock.dataset.with_quote_identifiers(true) end it "#identifier should format an identifier" do @ds.literal(:xyz__abc.identifier).must_equal '"xyz__abc"' end it "#qualify should format a qualified column" do @ds.literal(:xyz.qualify(:abc)).must_equal '"abc"."xyz"' end it "#qualify should work on QualifiedIdentifiers" do @ds.literal(:xyz.qualify(:abc).qualify(:def)).must_equal '"def"."abc"."xyz"' end with_symbol_splitting "should be able to qualify an identifier" do @ds.literal(:xyz.identifier.qualify(:xyz__abc)).must_equal '"xyz"."abc"."xyz"' end it "should be able to specify a schema.table.column" do @ds.literal(:column.qualify(:table.qualify(:schema))).must_equal '"schema"."table"."column"' @ds.literal(:column.qualify(:table__name.identifier.qualify(:schema))).must_equal '"schema"."table__name"."column"' end it "should be able to specify order" do @oe = :xyz.desc @oe.class.must_equal Sequel::SQL::OrderedExpression @oe.descending.must_equal true @oe = :xyz.asc @oe.class.must_equal Sequel::SQL::OrderedExpression @oe.descending.must_equal false end it "should work correctly with objects" do o = Object.new def o.sql_literal(ds) "(foo)" end @ds.literal(:column.qualify(o)).must_equal '(foo)."column"' end end describe "Symbol" do before do @ds = Sequel.mock.dataset end it "should support sql_function method" do @ds.literal(:COUNT.sql_function('1')).must_equal "COUNT('1')" @ds.select(:COUNT.sql_function('1')).sql.must_equal "SELECT COUNT('1')" end it "should support cast method" do @ds.literal(:abc.cast(:integer)).must_equal "CAST(abc AS integer)" end it "should support sql array accesses via sql_subscript" do @ds.literal(:abc.sql_subscript(1)).must_equal "abc[1]" @ds.literal(:abc.sql_subscript(1)|2).must_equal "abc[1, 2]" @ds.literal(:abc.sql_subscript(1)[2]).must_equal "abc[1][2]" end with_symbol_splitting "should support sql array accesses via sql_subscript for splittable symbols" do @ds.literal(:abc__def.sql_subscript(1)).must_equal "abc.def[1]" end it "should support cast_numeric and cast_string" do x = :abc.cast_numeric x.must_be_kind_of(Sequel::SQL::NumericExpression) @ds.literal(x).must_equal "CAST(abc AS integer)" x = :abc.cast_numeric(:real) x.must_be_kind_of(Sequel::SQL::NumericExpression) @ds.literal(x).must_equal "CAST(abc AS real)" x = :abc.cast_string x.must_be_kind_of(Sequel::SQL::StringExpression) @ds.literal(x).must_equal "CAST(abc AS varchar(255))" x = :abc.cast_string(:varchar) x.must_be_kind_of(Sequel::SQL::StringExpression) @ds.literal(x).must_equal "CAST(abc AS varchar(255))" end it "should support boolean methods" do @ds.literal(~:x).must_equal "NOT x" @ds.literal(:x & :y).must_equal "(x AND y)" @ds.literal(:x | :y).must_equal "(x OR y)" end it "should support complex expression methods" do @ds.literal(:x.sql_boolean & 1).must_equal "(x AND 1)" @ds.literal(:x.sql_number & :y).must_equal "(x & y)" @ds.literal(:x.sql_string + :y).must_equal "(x || y)" end it "should allow database independent types when casting" do db = @ds.db def db.cast_type_literal(type) return :foo if type == Integer return :bar if type == String type end @ds.literal(:abc.cast(String)).must_equal "CAST(abc AS bar)" @ds.literal(:abc.cast(String)).must_equal "CAST(abc AS bar)" @ds.literal(:abc.cast_string).must_equal "CAST(abc AS bar)" @ds.literal(:abc.cast_string(Integer)).must_equal "CAST(abc AS foo)" @ds.literal(:abc.cast_numeric).must_equal "CAST(abc AS foo)" @ds.literal(:abc.cast_numeric(String)).must_equal "CAST(abc AS bar)" end it "should support SQL EXTRACT function via #extract " do @ds.literal(:abc.extract(:year)).must_equal "extract(year FROM abc)" end end describe "Postgres/SQLite extensions integration" do before do @db = Sequel.mock Sequel.extension(:pg_array, :pg_array_ops, :pg_hstore, :pg_hstore_ops, :pg_json, :pg_json_ops, :pg_range, :pg_range_ops, :pg_row, :pg_row_ops, :pg_inet_ops, :sqlite_json_ops) end it "Symbol#pg_array should return an ArrayOp" do @db.literal(:a.pg_array.unnest).must_equal "unnest(a)" end it "Symbol#pg_row should return a PGRowOp" do @db.literal(:a.pg_row[:a]).must_equal "(a).a" end it "Symbol#hstore should return an HStoreOp" do @db.literal(:a.hstore['a']).must_equal "(a -> 'a')" end it "Symbol#pg_inet should return an InetOp" do @db.literal(:a.pg_inet.contains(:b)).must_equal "(a >> b)" end it "Symbol#pg_json should return an JSONOp" do @db.literal(:a.pg_json[%w'a b']).must_equal "(a #> ARRAY['a','b'])" @db.literal(:a.pg_json.extract('a')).must_equal "json_extract_path(a, 'a')" end it "Symbol#pg_jsonb should return an JSONBOp" do @db.literal(:a.pg_jsonb[%w'a b']).must_equal "(a #> ARRAY['a','b'])" @db.literal(:a.pg_jsonb.extract('a')).must_equal "jsonb_extract_path(a, 'a')" end it "Symbol#pg_range should return a RangeOp" do @db.literal(:a.pg_range.lower).must_equal "lower(a)" end it "Array#pg_array should return a PGArray" do @db.literal([1].pg_array.op.unnest).must_equal "unnest(ARRAY[1])" @db.literal([1].pg_array(:int4).op.unnest).must_equal "unnest(ARRAY[1]::int4[])" end it "Array#pg_json should return a JSONArray" do @db.literal([1].pg_json).must_equal "'[1]'::json" end it "Array#pg_jsonb should return a JSONBArray" do @db.literal([1].pg_jsonb).must_equal "'[1]'::jsonb" end it "Array#pg_row should return a ArrayRow" do @db.literal([1].pg_row).must_equal "ROW(1)" end it "Hash#hstore should return an HStore" do @db.literal({'a'=>1}.hstore.op['a']).must_equal '(\'"a"=>"1"\'::hstore -> \'a\')' end it "Hash#pg_json should return an JSONHash" do @db.literal({'a'=>'b'}.pg_json).must_equal "'{\"a\":\"b\"}'::json" end it "Hash#pg_jsonb should return an JSONBHash" do @db.literal({'a'=>'b'}.pg_jsonb).must_equal "'{\"a\":\"b\"}'::jsonb" end it "Range#pg_range should return an PGRange" do @db.literal((1..2).pg_range).must_equal "'[1,2]'" @db.literal((1..2).pg_range(:int4range)).must_equal "int4range(1,2,'[]')" end it "Symbol#sqlite_json_opt should return an SQLite::JSONOp" do @db.literal(:a.sqlite_json_op[1]).must_equal "(a ->> 1)" @db.literal(:a.sqlite_json_op.minify).must_equal "json(a)" end end describe "is_distinct_from extension integration" do it "Symbol#is_distinct_from should return an IsDistinctFrom" do db = Sequel.connect("mock://postgres") db.extension :is_distinct_from db.literal(:a.is_distinct_from(:b)).must_equal '("a" IS DISTINCT FROM "b")' end end describe "symbol_aref extensions" do before do @db = Sequel.mock end it "Symbol#[] should create qualified identifier if given a symbol" do @db.literal(:x[:y]).must_equal "x.y" end it "Symbol#[] should create qualified identifier if given an identifier" do @db.literal(:x[Sequel[:y]]).must_equal "x.y" end it "Symbol#[] should create qualified identifier if given a qualified identifier" do @db.literal(:x[:y[:z]]).must_equal "x.y.z" end it "should not affect other arguments to Symbol#[]" do :x[0].must_equal "x" end end describe Sequel::SQL::VirtualRow do before do @d = Sequel.mock[:items].with_quote_identifiers(true).with_extend do def supports_window_functions?; true end def l(*args, &block) literal(filter_expr(*args, &block)) end end end it "should treat methods without blocks normally" do @d.l{column}.must_equal '"column"' @d.l{foo(a)}.must_equal 'foo("a")' end it "should treat methods with a block and no arguments as a function call with no arguments" do @d.l{version{}}.must_equal 'version()' end it "should treat methods with a block and a leading argument :* as a function call with the SQL wildcard" do @d.l{count(:*){}}.must_equal 'count(*)' end it "should treat methods with a block and a leading argument :distinct as a function call with DISTINCT and the additional method arguments" do @d.l{count(:distinct, column1){}}.must_equal 'count(DISTINCT "column1")' @d.l{count(:distinct, column1, column2){}}.must_equal 'count(DISTINCT "column1", "column2")' end it "should raise an error if an unsupported argument is used with a block" do proc{@d.where{count(:blah){}}}.must_raise(Sequel::Error) end it "should treat methods with a block and a leading argument :over as a window function call" do @d.l{rank(:over){}}.must_equal 'rank() OVER ()' end it "should support :partition options for window function calls" do @d.l{rank(:over, :partition=>column1){}}.must_equal 'rank() OVER (PARTITION BY "column1")' @d.l{rank(:over, :partition=>[column1, column2]){}}.must_equal 'rank() OVER (PARTITION BY "column1", "column2")' end it "should support :args options for window function calls" do @d.l{avg(:over, :args=>column1){}}.must_equal 'avg("column1") OVER ()' @d.l{avg(:over, :args=>[column1, column2]){}}.must_equal 'avg("column1", "column2") OVER ()' end it "should support :order option for window function calls" do @d.l{rank(:over, :order=>column1){}}.must_equal 'rank() OVER (ORDER BY "column1")' @d.l{rank(:over, :order=>[column1, column2]){}}.must_equal 'rank() OVER (ORDER BY "column1", "column2")' end it "should support :window option for window function calls" do @d.l{rank(:over, :window=>:win){}}.must_equal 'rank() OVER ("win")' end it "should support :*=>true option for window function calls" do @d.l{count(:over, :* =>true){}}.must_equal 'count(*) OVER ()' end it "should support :frame=>:all option for window function calls" do @d.l{rank(:over, :frame=>:all){}}.must_equal 'rank() OVER (ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING)' end it "should support :frame=>:rows option for window function calls" do @d.l{rank(:over, :frame=>:rows){}}.must_equal 'rank() OVER (ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)' end it "should support :frame=>'some string' option for window function calls" do @d.l{rank(:over, :frame=>'RANGE BETWEEN 3 PRECEDING AND CURRENT ROW'){}}.must_equal 'rank() OVER (RANGE BETWEEN 3 PRECEDING AND CURRENT ROW)' end it "should raise an error if an invalid :frame option is used" do proc{@d.l{rank(:over, :frame=>:blah){}}}.must_raise(Sequel::Error) end it "should support all these options together" do @d.l{count(:over, :* =>true, :partition=>a, :order=>b, :window=>:win, :frame=>:rows){}}.must_equal 'count(*) OVER ("win" PARTITION BY "a" ORDER BY "b" ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)' end it "should raise an error if window functions are not supported" do proc{@d.with_extend{def supports_window_functions?; false end}.l{count(:over, :* =>true, :partition=>a, :order=>b, :window=>:win, :frame=>:rows){}}}.must_raise(Sequel::Error) proc{Sequel.mock.dataset.filter{count(:over, :* =>true, :partition=>a, :order=>b, :window=>:win, :frame=>:rows){}}.sql}.must_raise(Sequel::Error) end end ��������������������sequel-5.63.0/spec/core_model_spec.rb���������������������������������������������������������������0000664�0000000�0000000�00000000073�14342141206�0017501�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative 'core_spec' require_relative 'model_spec' ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/core_spec.rb���������������������������������������������������������������������0000664�0000000�0000000�00000000061�14342141206�0016316�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Dir['./spec/core/*_spec.rb'].each{|f| require f} �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/deprecation_helper.rb������������������������������������������������������������0000664�0000000�0000000�00000001253�14342141206�0020214�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel::Deprecation.backtrace_filter = lambda{|line, lineno| lineno < 4 || line =~ /_spec\.rb/} class Minitest::HooksSpec def self.deprecated(a, &block) it("#{a} (deprecated)") do deprecated{instance_exec(&block)} end end def deprecated output = Sequel::Deprecation.output Sequel::Deprecation.output = nil yield ensure Sequel::Deprecation.output = output end def self.with_symbol_splitting(a, &block) it("#{a}, with symbol splitting enabled") do with_symbol_splitting{instance_exec(&block)} end end def with_symbol_splitting Sequel.split_symbols = true yield ensure Sequel.split_symbols = false end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/����������������������������������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0016231�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/accessed_columns_spec.rb����������������������������������������������0000664�0000000�0000000�00000002506�14342141206�0023105�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "accessed_columns plugin" do before do @db = Sequel.mock(:fetch=>{:name=>'a', :b=>'c'}, :numrows=>1) @c = Class.new(Sequel::Model(@db[:test])) @c.columns :name, :b @c.plugin :accessed_columns @o = @c.new end it "should record columns accessed" do @o.accessed_columns.must_equal [] @o.name @o.accessed_columns.must_equal [:name] @o.name @o.accessed_columns.must_equal [:name] @o.b @o.accessed_columns.sort_by{|s| s.to_s}.must_equal [:b, :name] end it "should clear accessed columns when refreshing" do @o.name @o.refresh @o.accessed_columns.must_equal [] end it "should clear accessed columns when saving" do @o.name @o.save @o.accessed_columns.must_equal [] end it "should work when duping and cloning instances" do o = @o.dup o.accessed_columns.must_be_empty @o.name o = @o.dup @o.accessed_columns.must_equal [:name] o.accessed_columns.must_equal [:name] @o.b @o.accessed_columns.sort_by{|s| s.to_s}.must_equal [:b, :name] o.accessed_columns.must_equal [:name] o2 = o.clone o2.refresh o.accessed_columns.must_equal [:name] o2.accessed_columns.must_equal [] end it "should not raise exceptions when object is frozen" do @o.freeze @o.name end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/active_model_spec.rb��������������������������������������������������0000664�0000000�0000000�00000005241�14342141206�0022225�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" begin require 'active_model' rescue LoadError warn "Skipping test of active_model plugin: can't load active_model" else describe "ActiveModel plugin" do before do class ::AMLintTest < Sequel::Model set_primary_key :id columns :id, :id2 def delete; end end module ::Blog class Post < Sequel::Model plugin :active_model end end @c = AMLintTest @c.plugin :active_model @c.freeze if @freeze_class @m = @model = @c.new @o = @c.load({}) end after do Object.send(:remove_const, :AMLintTest) Object.send(:remove_const, :Blog) end it ".to_model should return self, not a proxy object" do @m.object_id.must_equal @m.to_model.object_id end it "#to_key should return a key array, or nil" do @o.to_key.must_be_nil @o.id = 1 @o.to_key.must_equal [1] @o.id = nil @o.to_key.must_be_nil @c.set_primary_key [:id2, :id] @c.freeze @o.to_key.must_be_nil @o.id = 1 @o.id2 = 2 @o.to_key.must_equal [2, 1] @o.destroy @o.to_key.must_equal [2, 1] @o.id = nil @o.to_key.must_be_nil end it "#to_param should return a param string or nil" do @o.to_param.must_be_nil @o.id = 1 @o.to_param.must_equal '1' @c.set_primary_key [:id2, :id] @c.freeze @o.id2 = 2 @o.to_param.must_equal '2-1' def @o.to_param_joiner; '|' end @o.to_param.must_equal '2|1' @o.destroy @o.to_param.must_be_nil end it "#persisted? should return true if the object exists and has not been destroyed" do @m.persisted?.must_equal false @o.persisted?.must_equal true @m.destroy @o.destroy @m.persisted?.must_equal false @o.persisted?.must_equal false end it "#persisted? should return false if the object is created and the transaction is rolled back" do DB.transaction(:rollback=>:always){@m.save} @m.persisted?.must_equal false end it "#persisted? should return true if the object is created and the transaction is not rolled back" do DB.transaction{@m.save} @m.persisted?.must_equal true end it "#persisted? should return true if the object is created without a transaction" do @m.save(:transaction=>false) @m.persisted?.must_equal true end it "#to_partial_path should return a path string" do @m.to_partial_path.must_equal 'am_lint_tests/am_lint_test' Blog::Post.new.to_partial_path.must_equal 'blog/posts/post' end describe "with unfrozen model class" do include ActiveModel::Lint::Tests end describe "with frozen model class" do before do @freeze_class = true end include ActiveModel::Lint::Tests end end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/after_initialize_spec.rb����������������������������������������������0000664�0000000�0000000�00000001434�14342141206�0023114�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::AfterInitialize" do before do @db = Sequel.mock(:host=>'mysql', :numrows=>1) @c = Class.new(Sequel::Model(@db[:test])) @c.class_eval do columns :id, :name plugin :after_initialize def after_initialize self.name *= 2 self.id *= 3 if id end end end it "should have after_initialize hook be called for new objects" do @c.new(:name=>'foo').values.must_equal(:name=>'foofoo') end it "should have after_initialize hook be called for objects loaded from the database" do @c.call(:id=>1, :name=>'foo').values.must_equal(:id=>3, :name=>'foofoo') end it "should not allow .call to be called without arguments" do proc{@c.call}.must_raise ArgumentError end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/any_not_empty_spec.rb�������������������������������������������������0000664�0000000�0000000�00000001475�14342141206�0022464�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "any_not_empty extension" do before do @ds = Sequel.mock[:t].extension(:any_not_empty) end it "should use a limited query if no block is given" do @ds.with_fetch(:one=>1).any?.must_equal true @ds.db.sqls.must_equal ["SELECT 1 AS one FROM t LIMIT 1"] @ds.with_fetch([]).any?.must_equal false @ds.db.sqls.must_equal ["SELECT 1 AS one FROM t LIMIT 1"] end it "should use default behavior if block is given" do @ds.with_fetch(:one=>1).any?{|x| x[:one] == 1}.must_equal true @ds.db.sqls.must_equal ["SELECT * FROM t"] @ds.with_fetch(:one=>1).any?{|x| x[:one] != 1}.must_equal false @ds.db.sqls.must_equal ["SELECT * FROM t"] @ds.with_fetch([]).any?{|x| x[:one] == 1}.must_equal false @ds.db.sqls.must_equal ["SELECT * FROM t"] end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/arbitrary_servers_spec.rb���������������������������������������������0000664�0000000�0000000�00000006126�14342141206�0023345�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "arbtirary servers" do before do @db = Sequel.mock(:servers=>{}) @db.extension :arbitrary_servers end it "should allow arbitrary server options using a hash" do @db.synchronize(:host=>'host1', :database=>'db1') do |c| c.opts[:host].must_equal 'host1' c.opts[:database].must_equal 'db1' end end it "should not cache connections to arbitrary servers" do x = nil @db.synchronize(:host=>'host1', :database=>'db1') do |c| x = c end @db.synchronize(:host=>'host1', :database=>'db1') do |c2| c2.wont_be_same_as(x) end end it "should yield same connection correctly when nesting" do @db.synchronize(:host=>'host1', :database=>'db1') do |c| @db.synchronize(:host=>'host1', :database=>'db1') do |c2| c2.must_be_same_as(c) end end end it "should disconnect when connection is finished" do x, x1 = nil, nil @db.define_singleton_method(:disconnect_connection){|c| x = c} @db.synchronize(:host=>'host1', :database=>'db1') do |c| x1 = c @db.synchronize(:host=>'host1', :database=>'db1') do |c2| c2.must_be_same_as(c) end x.must_be_same_as(nil) end x.must_be_same_as(x1) end it "should yield different connection correctly when nesting" do @db.synchronize(:host=>'host1', :database=>'db1') do |c| c.opts[:host].must_equal 'host1' @db.synchronize(:host=>'host2', :database=>'db1') do |c2| c2.opts[:host].must_equal 'host2' c2.wont_be_same_as(c) end end end it "should respect multithreaded access" do @db.synchronize(:host=>'host1', :database=>'db1') do |c| Thread.new do @db.synchronize(:host=>'host1', :database=>'db1') do |c2| _(c2).wont_be_same_as(c) end end.join end end it "should work correctly with server_block plugin" do @db.extension :server_block @db.with_server(:host=>'host1', :database=>'db1') do @db.synchronize do |c| c.opts[:host].must_equal 'host1' c.opts[:database].must_equal 'db1' @db.synchronize do |c2| c2.must_be_same_as(c) end end end end it "should respect multithreaded access with server block plugin" do @db.extension :server_block q, q1 = Queue.new, Queue.new t = nil @db[:t].all @db.with_server(:host=>'a') do @db[:t].all t = Thread.new do @db[:t].all @db.with_server(:host=>'c') do @db[:t].all @db.with_server(:host=>'d'){@db[:t].all} q.push nil q1.pop @db[:t].all end @db[:t].all end q.pop @db.with_server(:host=>'b'){@db[:t].all} @db[:t].all end @db[:t].all q1.push nil t.join @db.sqls.must_equal ['SELECT * FROM t', 'SELECT * FROM t -- {:host=>"a"}', 'SELECT * FROM t', 'SELECT * FROM t -- {:host=>"c"}', 'SELECT * FROM t -- {:host=>"d"}', 'SELECT * FROM t -- {:host=>"b"}', 'SELECT * FROM t -- {:host=>"a"}', 'SELECT * FROM t', 'SELECT * FROM t -- {:host=>"c"}', 'SELECT * FROM t'] end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/association_dependencies_spec.rb��������������������������������������0000664�0000000�0000000�00000014037�14342141206�0024617�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "AssociationDependencies plugin" do before do @mods = [] @c = Class.new(Sequel::Model) @c.plugin :association_dependencies @Artist = Class.new(@c).set_dataset(:artists) @Artist.dataset = @Artist.dataset.with_fetch(:id=>2, :name=>'Ar') @Album = Class.new(@c).set_dataset(:albums) @Album.dataset = @Album.dataset.with_fetch(:id=>1, :name=>'Al', :artist_id=>2) @Artist.columns :id, :name @Album.columns :id, :name, :artist_id @Artist.one_to_many :albums, :class=>@Album, :key=>:artist_id @Artist.one_to_one :first_album, :class=>@Album, :key=>:artist_id, :conditions=>{:position=>1} @Artist.many_to_many :other_artists, :class=>@Artist, :join_table=>:aoa, :left_key=>:l, :right_key=>:r @Album.many_to_one :artist, :class=>@Artist DB.reset end it "should allow destroying associated many_to_one associated object" do @Album.add_association_dependencies :artist=>:destroy @Album.load(:id=>1, :name=>'Al', :artist_id=>2).destroy DB.sqls.must_equal ['DELETE FROM albums WHERE id = 1', 'SELECT * FROM artists WHERE (artists.id = 2) LIMIT 1', 'DELETE FROM artists WHERE id = 2'] end it "should allow deleting associated many_to_one associated object" do @Album.add_association_dependencies :artist=>:delete @Album.load(:id=>1, :name=>'Al', :artist_id=>2).destroy DB.sqls.must_equal ['DELETE FROM albums WHERE id = 1', 'DELETE FROM artists WHERE (artists.id = 2)'] end it "should allow destroying associated one_to_one associated object" do @Artist.add_association_dependencies :first_album=>:destroy @Artist.load(:id=>2, :name=>'Ar').destroy DB.sqls.must_equal ['SELECT * FROM albums WHERE ((position = 1) AND (albums.artist_id = 2)) LIMIT 1', 'DELETE FROM albums WHERE id = 1', 'DELETE FROM artists WHERE id = 2'] end it "should allow deleting associated one_to_one associated object" do @Artist.add_association_dependencies :first_album=>:delete @Artist.load(:id=>2, :name=>'Ar').destroy DB.sqls.must_equal ['DELETE FROM albums WHERE ((position = 1) AND (albums.artist_id = 2))', 'DELETE FROM artists WHERE id = 2'] end it "should allow destroying associated one_to_many objects" do @Artist.add_association_dependencies :albums=>:destroy @Artist.load(:id=>2, :name=>'Ar').destroy DB.sqls.must_equal ['SELECT * FROM albums WHERE (albums.artist_id = 2)', 'DELETE FROM albums WHERE id = 1', 'DELETE FROM artists WHERE id = 2'] end it "should allow deleting associated one_to_many objects" do @Artist.add_association_dependencies :albums=>:delete @Artist.load(:id=>2, :name=>'Ar').destroy DB.sqls.must_equal ['DELETE FROM albums WHERE (albums.artist_id = 2)', 'DELETE FROM artists WHERE id = 2'] end it "should allow nullifying associated one_to_one objects" do @Artist.add_association_dependencies :first_album=>:nullify @Artist.load(:id=>2, :name=>'Ar').destroy DB.sqls.must_equal ['UPDATE albums SET artist_id = NULL WHERE ((position = 1) AND (artist_id = 2))', 'DELETE FROM artists WHERE id = 2'] end it "should allow nullifying associated one_to_many objects" do @Artist.add_association_dependencies :albums=>:nullify @Artist.load(:id=>2, :name=>'Ar').destroy DB.sqls.must_equal ['UPDATE albums SET artist_id = NULL WHERE (artist_id = 2)', 'DELETE FROM artists WHERE id = 2'] end it "should allow nullifying associated many_to_many associations" do @Artist.add_association_dependencies :other_artists=>:nullify @Artist.load(:id=>2, :name=>'Ar').destroy DB.sqls.must_equal ['DELETE FROM aoa WHERE (l = 2)', 'DELETE FROM artists WHERE id = 2'] end it "should not allow modifications if class is frozen" do @Artist.add_association_dependencies :other_artists=>:nullify @Artist.freeze proc{@Artist.add_association_dependencies :albums=>:nullify}.must_raise RuntimeError, TypeError @Artist.association_dependencies.frozen?.must_equal true @Artist.association_dependencies[:before_nullify].frozen?.must_equal true end it "should raise an error if attempting to nullify a many_to_one association" do proc{@Album.add_association_dependencies :artist=>:nullify}.must_raise(Sequel::Error) end it "should raise an error if using an unrecognized dependence action" do proc{@Album.add_association_dependencies :artist=>:blah}.must_raise(Sequel::Error) end it "should raise an error if a nonexistent association is used" do proc{@Album.add_association_dependencies :blah=>:delete}.must_raise(Sequel::Error) end it "should raise an error if a invalid association type is used" do @Artist.plugin :many_through_many @Artist.many_through_many :other_albums, [[:id, :id, :id]] proc{@Artist.add_association_dependencies :other_albums=>:nullify}.must_raise(Sequel::Error) end it "should raise an error if using a many_to_many association type without nullify" do proc{@Artist.add_association_dependencies :other_artists=>:delete}.must_raise(Sequel::Error) end it "should allow specifying association dependencies in the plugin call" do @Album.plugin :association_dependencies, :artist=>:destroy @Album.load(:id=>1, :name=>'Al', :artist_id=>2).destroy DB.sqls.must_equal ['DELETE FROM albums WHERE id = 1', 'SELECT * FROM artists WHERE (artists.id = 2) LIMIT 1', 'DELETE FROM artists WHERE id = 2'] end it "should work with subclasses" do c = Class.new(@Album) c.add_association_dependencies :artist=>:destroy c.load(:id=>1, :name=>'Al', :artist_id=>2).destroy DB.sqls.must_equal ['DELETE FROM albums WHERE id = 1', 'SELECT * FROM artists WHERE (artists.id = 2) LIMIT 1', 'DELETE FROM artists WHERE id = 2'] @Album.load(:id=>1, :name=>'Al', :artist_id=>2).destroy DB.sqls.must_equal ['DELETE FROM albums WHERE id = 1'] @Album.add_association_dependencies :artist=>:destroy c2 = Class.new(@Album) c2.load(:id=>1, :name=>'Al', :artist_id=>2).destroy DB.sqls.must_equal ['DELETE FROM albums WHERE id = 1', 'SELECT * FROM artists WHERE (artists.id = 2) LIMIT 1', 'DELETE FROM artists WHERE id = 2'] end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/association_lazy_eager_option_spec.rb���������������������������������0000664�0000000�0000000�00000006117�14342141206�0025703�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "association_lazy_eager_option plugin" do before do @c = Class.new(Sequel::Model) @db = Sequel.mock @c.set_dataset @db[:ts] @c.plugin :association_lazy_eager_option @c.columns :id, :t_id @c.many_to_one :t, :class=>@c, :key=>:t_id @c.one_to_many :ts, :class=>@c, :key=>:t_id @c.many_to_many :mtm_ts, :class=>@c, :join_table=>:ts, :left_primary_key=>:t_id, :left_key=>:id, :right_key=>:t_id @c.one_to_one :otoo_t, :class=>@c, :key=>:t_id @c.one_through_one :oto_t, :class=>@c, :join_table=>:ts, :left_primary_key=>:t_id, :left_key=>:id, :right_key=>:t_id @o1 = @c.load(:id=>1, :t_id=>2) @o2 = @c.load(:id=>2, :t_id=>3) @o3 = @c.load(:id=>3, :t_id=>4) @db.sqls end it "should support the :eager association method option when doing lazy association loads" do @db.fetch = [[{:id=>2, :t_id=>3}], [{:id=>3, :t_id=>4}]] o = @o1.t(:eager=>:t) @db.sqls.must_equal ["SELECT * FROM ts WHERE (ts.id = 2) LIMIT 1", "SELECT * FROM ts WHERE (ts.id IN (3))"] o.must_equal @o2 o.associations[:t].must_equal @o3 @db.fetch = [[{:id=>2, :t_id=>3}], [{:id=>3, :t_id=>4}]] o = @o3.ts(:eager=>[:t]) @db.sqls.must_equal ["SELECT * FROM ts WHERE (ts.t_id = 3)", "SELECT * FROM ts WHERE (ts.id IN (3))"] o.must_equal [@o2] o.first.associations[:t].must_equal @o3 @db.fetch = [[{:id=>2, :t_id=>3}], [{:id=>3, :t_id=>4}]] o = @o1.mtm_ts(:eager=>{:t=>{}}) @db.sqls.must_equal ["SELECT ts.* FROM ts INNER JOIN ts ON (ts.t_id = ts.id) WHERE (ts.id = 2)", "SELECT * FROM ts WHERE (ts.id IN (3))"] o.must_equal [@o2] o.first.associations[:t].must_equal @o3 @db.fetch = [[{:id=>2, :t_id=>3}], [{:id=>1, :t_id=>2}]] o = @o1.otoo_t(:eager=>:ts) @db.sqls.must_equal ["SELECT * FROM ts WHERE (ts.t_id = 1) LIMIT 1", "SELECT * FROM ts WHERE (ts.t_id IN (2))"] o.must_equal @o2 o.associations[:ts].must_equal [@o1] @db.fetch = [[{:id=>2, :t_id=>3}], [{:id=>3, :t_id=>4}]] o = @o1.oto_t(:eager=>:t) @db.sqls.must_equal ["SELECT ts.* FROM ts INNER JOIN ts ON (ts.t_id = ts.id) WHERE (ts.id = 2) LIMIT 1", "SELECT * FROM ts WHERE (ts.id IN (3))"] o.must_equal @o2 o.associations[:t].must_equal @o3 end it "should ignore the :eager and :eager_graph options and return cached result when association is already loaded" do @db.fetch = [{:id=>2, :t_id=>3}] @o1.t.must_equal @o2 @o1.ts.must_equal [@o2] @o1.mtm_ts.must_equal [@o2] @o1.otoo_t.must_equal @o2 @o1.oto_t.must_equal @o2 @db.sqls @o1.t(:eager=>:t).must_equal @o2 @o1.ts(:eager=>:t).must_equal [@o2] @o1.mtm_ts(:eager=>:t).must_equal [@o2] @o1.otoo_t(:eager=>:t).must_equal @o2 @o1.oto_t(:eager=>:t).must_equal @o2 @db.sqls.must_equal [] end it "should work normally if the :eager option is not passed to the association method" do @db.fetch = {:id=>2, :t_id=>3} o = @o1.mtm_ts{|ds| ds} @db.sqls.must_equal ["SELECT ts.* FROM ts INNER JOIN ts ON (ts.t_id = ts.id) WHERE (ts.id = 2)"] o.must_equal [@o2] o.first.associations.must_be_empty end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/association_multi_add_remove_spec.rb����������������������������������0000664�0000000�0000000�00000112530�14342141206�0025505�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "association_multi_add_remove plugin - one_to_many" do before do @c1 = Class.new(Sequel::Model(:attributes)) do unrestrict_primary_key columns :id, :node_id, :y, :z end @c2 = Class.new(Sequel::Model(:nodes)) do plugin :association_multi_add_remove unrestrict_primary_key attr_accessor :xxx def self.name; 'Node'; end def self.to_s; 'Node'; end columns :id, :x private def _refresh(ds); end end @dataset = @c2.dataset = @c2.dataset.with_fetch({}) @c1.dataset = @c1.dataset.with_fetch(proc { |sql| sql =~ /SELECT 1/ ? { a: 1 } : {} }) DB.reset end it "should define an add_*s method that works on existing records" do @c2.one_to_many :attributes, class: @c1 n = @c2.load(id: 1234) a1 = @c1.load(id: 2345) a2 = @c1.load(id: 3456) [a1, a2].must_equal n.add_attributes([a1, a2]) a1.values.must_equal(:node_id => 1234, id: 2345) a2.values.must_equal(:node_id => 1234, id: 3456) DB.sqls.must_equal [ 'BEGIN', 'UPDATE attributes SET node_id = 1234 WHERE (id = 2345)', 'UPDATE attributes SET node_id = 1234 WHERE (id = 3456)', 'COMMIT' ] end it "should not define add/remove methods with the same name as the ones defined by default " do @c2.one_to_many :sheep, class: @c1, :key=>:node_id n = @c2.load(id: 1234) a1 = @c1.load(id: 2345) a1.must_be_same_as n.add_sheep(a1) a1.values.must_equal(:node_id => 1234, id: 2345) DB.sqls.must_equal ['UPDATE attributes SET node_id = 1234 WHERE (id = 2345)'] a1.must_be_same_as n.remove_sheep(a1) a1.values.must_equal(:node_id => nil, id: 2345) DB.sqls.must_equal [ "SELECT 1 AS one FROM attributes WHERE ((attributes.node_id = 1234) AND (id = 2345)) LIMIT 1", 'UPDATE attributes SET node_id = NULL WHERE (id = 2345)', ] n.respond_to?(:sheep=).must_equal false end it "should support :multi_add_method" do @c2.one_to_many :attributes, class: @c1, :multi_add_method=>:add_multiple_attributes n = @c2.load(id: 1234) a1 = @c1.load(id: 2345) a2 = @c1.load(id: 3456) [a1, a2].must_equal n.add_multiple_attributes([a1, a2]) a1.values.must_equal(:node_id => 1234, id: 2345) a2.values.must_equal(:node_id => 1234, id: 3456) DB.sqls.must_equal [ 'BEGIN', 'UPDATE attributes SET node_id = 1234 WHERE (id = 2345)', 'UPDATE attributes SET node_id = 1234 WHERE (id = 3456)', 'COMMIT' ] end it "should define an add_*s method that works on new records" do @c2.one_to_many :attributes, :class => @c1 n = @c2.load(:id => 1234) a1 = @c1.new(:id => 234) a2 = @c1.new(:id => 345) @c1.dataset = @c1.dataset.with_fetch([ [{ :id=>234, :node_id=>1234 }], [{ :id=>345, :node_id=>1234 }] ]) [a1, a2].must_equal n.add_attributes([a1, a2]) DB.sqls.must_equal [ 'BEGIN', "INSERT INTO attributes (id, node_id) VALUES (234, 1234)", "SELECT * FROM attributes WHERE id = 234", "INSERT INTO attributes (id, node_id) VALUES (345, 1234)", "SELECT * FROM attributes WHERE id = 345", 'COMMIT' ] a1.values.must_equal(:node_id => 1234, :id => 234) a2.values.must_equal(:node_id => 1234, :id => 345) end it "should define a remove_*s method that works on existing records" do @c2.one_to_many :attributes, :class => @c1 n = @c2.load(:id => 1234) a1 = @c1.load(:id => 2345, :node_id => 1234) a2 = @c1.load(:id => 3456, :node_id => 1234) [a1, a2].must_equal n.remove_attributes([a1, a2]) a1.values.must_equal(:node_id => nil, :id => 2345) a2.values.must_equal(:node_id => nil, :id => 3456) DB.sqls.must_equal [ 'BEGIN', "SELECT 1 AS one FROM attributes WHERE ((attributes.node_id = 1234) AND (id = 2345)) LIMIT 1", 'UPDATE attributes SET node_id = NULL WHERE (id = 2345)', "SELECT 1 AS one FROM attributes WHERE ((attributes.node_id = 1234) AND (id = 3456)) LIMIT 1", 'UPDATE attributes SET node_id = NULL WHERE (id = 3456)', 'COMMIT' ] end it "should support :multi_remove_method" do @c2.one_to_many :attributes, :class => @c1, :multi_remove_method=>:remove_multiple_attributes n = @c2.load(:id => 1234) a1 = @c1.load(:id => 2345, :node_id => 1234) a2 = @c1.load(:id => 3456, :node_id => 1234) [a1, a2].must_equal n.remove_multiple_attributes([a1, a2]) a1.values.must_equal(:node_id => nil, :id => 2345) a2.values.must_equal(:node_id => nil, :id => 3456) DB.sqls.must_equal [ 'BEGIN', "SELECT 1 AS one FROM attributes WHERE ((attributes.node_id = 1234) AND (id = 2345)) LIMIT 1", 'UPDATE attributes SET node_id = NULL WHERE (id = 2345)', "SELECT 1 AS one FROM attributes WHERE ((attributes.node_id = 1234) AND (id = 3456)) LIMIT 1", 'UPDATE attributes SET node_id = NULL WHERE (id = 3456)', 'COMMIT' ] end it "should have the remove_*s method raise an error if the passed objects are not already associated" do @c2.one_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) a1 = @c1.load(:id => 2345, :node_id => 1234) a2 = @c1.load(:id => 3456, :node_id => 1234) @c1.dataset = @c1.dataset.with_fetch([]) proc{n.remove_attributes([a1, a2])}.must_raise(Sequel::Error) DB.sqls.must_equal [ 'BEGIN', "SELECT 1 AS one FROM attributes WHERE ((attributes.node_id = 1234) AND (id = 2345)) LIMIT 1", 'ROLLBACK' ] end it "should accept hashes for the add_*s method and create a new records" do @c2.one_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) DB.reset @c1.dataset = @c1.dataset.with_fetch([ [{ :node_id => 1234, :id => 234 }], [{ :node_id => 1234, :id => 345 }] ]) n.add_attributes([{ :id => 234 }, { :id => 345 }]).must_equal [ @c1.load(:node_id => 1234, :id => 234), @c1.load(:node_id => 1234, :id => 345) ] DB.sqls.must_equal [ 'BEGIN', "INSERT INTO attributes (id, node_id) VALUES (234, 1234)", "SELECT * FROM attributes WHERE id = 234", "INSERT INTO attributes (id, node_id) VALUES (345, 1234)", "SELECT * FROM attributes WHERE id = 345", 'COMMIT' ] end it "should accept primary keys for the add_*s method" do @c2.one_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) @c1.dataset = @c1.dataset.with_fetch([ [{ :node_id => nil, :id => 234 }], [{ :node_id => nil, :id => 345 }] ]) n.add_attributes([234, 345]).must_equal [ @c1.load(:node_id => 1234, :id => 234), @c1.load(:node_id => 1234, :id => 345) ] DB.sqls.must_equal [ 'BEGIN', "SELECT * FROM attributes WHERE id = 234", "UPDATE attributes SET node_id = 1234 WHERE (id = 234)", "SELECT * FROM attributes WHERE id = 345", "UPDATE attributes SET node_id = 1234 WHERE (id = 345)", 'COMMIT' ] end it "should raise an error if the primary key passed to the add_*s method does not match an existing record" do @c2.one_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) @c1.dataset = @c1.dataset.with_fetch([]) proc{n.add_attributes([234, 345])}.must_raise(Sequel::NoMatchingRow) DB.sqls.must_equal [ 'BEGIN', "SELECT * FROM attributes WHERE id = 234", 'ROLLBACK' ] end it "should raise an error in the add_*s method if the passed associated objects are not of the correct type" do @c2.one_to_many :attributes, :class => @c1 proc{@c2.new(:id => 1234).add_attributes([@c2.new, @c2.new])}.must_raise(Sequel::Error) end it "should accept primary keys for the remove_*s method and remove existing records" do @c2.one_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) @c1.dataset = @c1.dataset.with_fetch([ [{ :id=>234, :node_id=>1234 }], [{ :id=>345, :node_id=>1234 }] ]) n.remove_attributes([234, 345]).must_equal [ @c1.load(:node_id => nil, :id => 234), @c1.load(:node_id => nil, :id => 345) ] DB.sqls.must_equal [ 'BEGIN', 'SELECT * FROM attributes WHERE ((attributes.node_id = 1234) AND (attributes.id = 234)) LIMIT 1', 'UPDATE attributes SET node_id = NULL WHERE (id = 234)', 'SELECT * FROM attributes WHERE ((attributes.node_id = 1234) AND (attributes.id = 345)) LIMIT 1', 'UPDATE attributes SET node_id = NULL WHERE (id = 345)', 'COMMIT' ] end it "should raise an error in the remove_*s method if the passed associated objects are not of the correct type" do @c2.one_to_many :attributes, :class => @c1 proc{@c2.new(:id => 1234).remove_attributes([@c2.new, @c2.new])}.must_raise(Sequel::Error) end it "should have add_*s method respect the :primary_key option" do @c2.one_to_many :attributes, :class => @c1, :primary_key=>:xxx n = @c2.new(:id => 1234, :xxx=>5) a1 = @c1.load(:id => 2345) a2 = @c1.load(:id => 3456) n.add_attributes([a1, a2]).must_equal [a1, a2] DB.sqls.must_equal [ 'BEGIN', 'UPDATE attributes SET node_id = 5 WHERE (id = 2345)', 'UPDATE attributes SET node_id = 5 WHERE (id = 3456)', 'COMMIT' ] end it "should have add_*s method not add the same objects to the cached association array if the objects are already in the array" do @c2.one_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) a1 = @c1.load(:id => 2345) a2 = @c1.load(:id => 3456) n.associations[:attributes] = [] [a1, a2].must_equal n.add_attributes([a1, a2]) [a1, a2].must_equal n.add_attributes([a1, a2]) a1.values.must_equal(:node_id => 1234, :id => 2345) a2.values.must_equal(:node_id => 1234, :id => 3456) n.attributes.must_equal [a1, a2] DB.sqls.must_equal [ 'BEGIN', 'UPDATE attributes SET node_id = 1234 WHERE (id = 2345)', 'UPDATE attributes SET node_id = 1234 WHERE (id = 3456)', 'COMMIT' ] * 2 end it "should have add_*s method respect composite keys" do @c2.one_to_many :attributes, :class => @c1, :key =>[:node_id, :y], :primary_key=>[:id, :x] n = @c2.load(:id => 1234, :x=>5) a1 = @c1.load(:id => 2345) a2 = @c1.load(:id => 3456) n.add_attributes([a1, a2]).must_equal [a1, a2] DB.sqls.must_equal [ 'BEGIN', "UPDATE attributes SET node_id = 1234, y = 5 WHERE (id = 2345)", "UPDATE attributes SET node_id = 1234, y = 5 WHERE (id = 3456)", 'COMMIT' ] end it "should have add_*s method accept composite keys" do @c1.dataset = @c1.dataset.with_fetch([ [{ :id=>2345, :node_id=>1234, :z=>8, :y=>5 }], [{ :id=>3456, :node_id=>1234, :z=>9, :y=>5 }] ]) @c1.set_primary_key [:id, :z] @c2.one_to_many :attributes, :class => @c1, :key =>[:node_id, :y], :primary_key=>[:id, :x] n = @c2.load(:id => 1234, :x=>5) a1 = @c1.load(:id => 2345, :z => 8, :node_id => 1234, :y=>5) a2 = @c1.load(:id => 3456, :z => 9, :node_id => 1234, :y=>5) n.add_attributes([[2345, 8], [3456, 9]]).must_equal [a1, a2] DB.sqls.must_equal [ 'BEGIN', "SELECT * FROM attributes WHERE ((id = 2345) AND (z = 8)) LIMIT 1", "UPDATE attributes SET node_id = 1234, y = 5 WHERE ((id = 2345) AND (z = 8))", "SELECT * FROM attributes WHERE ((id = 3456) AND (z = 9)) LIMIT 1", "UPDATE attributes SET node_id = 1234, y = 5 WHERE ((id = 3456) AND (z = 9))", 'COMMIT' ] end it "should have remove_*s method respect composite keys" do @c2.one_to_many :attributes, :class => @c1, :key =>[:node_id, :y], :primary_key=>[:id, :x] n = @c2.load(:id => 1234, :x=>5) a1 = @c1.load(:id => 2345, :node_id=>1234, :y=>5) a2 = @c1.load(:id => 3456, :node_id=>1234, :y=>5) n.remove_attributes([a1, a2]).must_equal [a1, a2] DB.sqls.must_equal [ 'BEGIN', "SELECT 1 AS one FROM attributes WHERE ((attributes.node_id = 1234) AND (attributes.y = 5) AND (id = 2345)) LIMIT 1", "UPDATE attributes SET node_id = NULL, y = NULL WHERE (id = 2345)", "SELECT 1 AS one FROM attributes WHERE ((attributes.node_id = 1234) AND (attributes.y = 5) AND (id = 3456)) LIMIT 1", "UPDATE attributes SET node_id = NULL, y = NULL WHERE (id = 3456)", 'COMMIT' ] end it "should accept a array of composite primary keys values for the remove_*s method and remove existing records" do @c1.dataset = @c1.dataset.with_fetch([ [{ :id=>234, :node_id=>123, :y=>5 }], [{ :id=>345, :node_id=>123, :y=>6 }] ]) @c1.set_primary_key [:id, :y] @c2.one_to_many :attributes, :class => @c1, :key=>:node_id, :primary_key=>:id n = @c2.new(:id => 123) n.remove_attributes([[234, 5], [345, 6]]).must_equal [ @c1.load(:node_id => nil, :y => 5, :id => 234), @c1.load(:node_id => nil, :y => 6, :id => 345) ] DB.sqls.must_equal [ 'BEGIN', "SELECT * FROM attributes WHERE ((attributes.node_id = 123) AND (attributes.id = 234) AND (attributes.y = 5)) LIMIT 1", "UPDATE attributes SET node_id = NULL WHERE ((id = 234) AND (y = 5))", "SELECT * FROM attributes WHERE ((attributes.node_id = 123) AND (attributes.id = 345) AND (attributes.y = 6)) LIMIT 1", "UPDATE attributes SET node_id = NULL WHERE ((id = 345) AND (y = 6))", 'COMMIT' ] end it "should raise an error in add_*s and remove_*s if the passed objects return false to save (are not valid)" do @c2.one_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) a1 = @c1.new(:id => 2345) a2 = @c1.new(:id => 3456) def a1.validate() errors.add(:id, 'foo') end def a2.validate() errors.add(:id, 'bar') end proc{n.add_attributes([a1, a2])}.must_raise(Sequel::ValidationFailed) proc{n.remove_attributes([a1, a2])}.must_raise(Sequel::ValidationFailed) end it "should not validate the associated objects in add_*s and remove_*s if the :validate=>false option is used" do @c2.one_to_many :attributes, :class => @c1, :validate=>false n = @c2.new(:id => 1234) a1 = @c1.new(:id => 2345) a2 = @c1.new(:id => 3456) def a1.validate() errors.add(:id, 'foo') end def a2.validate() errors.add(:id, 'bar') end n.add_attributes([a1, a2]).must_equal [a1, a2] n.remove_attributes([a1, a2]).must_equal [a1, a2] end it "should not raise exception in add_*s and remove_*s if the :raise_on_save_failure=>false option is used" do @c2.one_to_many :attributes, :class => @c1, :raise_on_save_failure=>false n = @c2.new(:id => 1234) a1 = @c1.new(:id => 2345) a2 = @c1.new(:id => 3456) def a1.validate() errors.add(:id, 'foo') end def a2.validate() errors.add(:id, 'bar') end n.associations[:attributes] = [] n.add_attributes([a1, a2]).must_equal [] n.associations[:attributes].must_equal [] n.remove_attributes([a1, a2]).must_equal [] n.associations[:attributes].must_equal [] end it "should add item to cache if it exists when calling add_*s" do @c2.one_to_many :attributes, :class => @c1 n = @c2.new(:id => 123) a1 = @c1.load(:id => 234) a2 = @c1.load(:id => 345) arr = [] n.associations[:attributes] = arr n.add_attributes([a1, a2]) arr.must_equal [a1, a2] end it "should set object to item's reciprocal cache when calling add_*s" do @c2.one_to_many :attributes, :class => @c1 @c1.many_to_one :node, :class => @c2 n = @c2.new(:id => 123) a1 = @c1.new(:id => 234) a2 = @c1.new(:id => 345) n.add_attributes([a1, a2]) a1.node.must_equal n a2.node.must_equal n end it "should remove item from cache if it exists when calling remove_*s" do @c2.one_to_many :attributes, :class => @c1 n = @c2.load(:id => 123) a1 = @c1.load(:id => 234) a2 = @c1.load(:id => 345) arr = [a1, a2] n.associations[:attributes] = arr n.remove_attributes([a1, a2]) arr.must_equal [] end it "should remove item's reciprocal cache calling remove_*s" do @c2.one_to_many :attributes, :class => @c1 @c1.many_to_one :node, :class => @c2 n = @c2.new(:id => 123) a1 = @c1.new(:id => 234) a2 = @c1.new(:id => 345) a1.associations[:node] = n a2.associations[:node] = n a1.node.must_equal n a2.node.must_equal n n.remove_attributes([a1, a2]) a1.node.must_be_nil a2.node.must_be_nil end it "should not create the add_*s or remove_*s methods if :read_only option is used" do @c2.one_to_many :attributes, :class => @c1, :read_only=>true im = @c2.instance_methods im.wont_include(:add_attributes) im.wont_include(:remove_attributes) end it "should not add associations methods directly to class" do @c2.one_to_many :attributes, :class => @c1 im = @c2.instance_methods im.must_include(:add_attributes) im.must_include(:remove_attributes) im2 = @c2.instance_methods(false) im2.wont_include(:add_attributes) im2.wont_include(:remove_attributes) end it "should call an _add_ method internally to add attributes" do @c2.one_to_many :attributes, :class => @c1 @c2.private_instance_methods.must_include(:_add_attribute) p = @c2.load(:id=>10) c1 = @c1.load(:id=>123) c2 = @c1.load(:id=>234) def p._add_attribute(x) (@x ||= []) << x end def c1._node_id=; raise; end def c2._node_id=; raise; end p.add_attributes([c1, c2]) p.instance_variable_get(:@x).must_equal [c1, c2] end it "should allow additional arguments given to the add_*s method and pass them onwards to the _add_ method" do @c2.one_to_many :attributes, :class => @c1 p = @c2.load(:id=>10) c1 = @c1.load(:id=>123) c2 = @c1.load(:id=>234) def p._add_attribute(x,*y) (@x ||= []) << x (@y ||= []) << y end def c1._node_id=; raise; end def c2._node_id=; raise; end p.add_attributes([c1, c2], :foo, :bar=>:baz) p.instance_variable_get(:@x).must_equal [c1, c2] p.instance_variable_get(:@y).must_equal [ [:foo,{:bar=>:baz}], [:foo,{:bar=>:baz}] ] end it "should call a _remove_ method internally to remove attributes" do @c2.one_to_many :attributes, :class => @c1 @c2.private_instance_methods.must_include(:_remove_attribute) p = @c2.load(:id=>10) c1 = @c1.load(:id=>123) c2 = @c1.load(:id=>234) def p._remove_attribute(x) (@x ||= []) << x end def c1._node_id=; raise; end def c2._node_id=; raise; end p.remove_attributes([c1, c2]) p.instance_variable_get(:@x).must_equal [c1, c2] end it "should allow additional arguments given to the remove_*s method and pass them onwards to the _remove_ method" do @c2.one_to_many :attributes, :class => @c1, :reciprocal=>nil p = @c2.load(:id=>10) c1 = @c1.load(:id=>123) c2 = @c1.load(:id=>234) def p._remove_attribute(x,*y) (@x ||= []) << x (@y ||= []) << y end def c1._node_id=; raise; end def c2._node_id=; raise; end p.remove_attributes([c1, c2], :foo, :bar=>:baz) p.instance_variable_get(:@x).must_equal [c1, c2] p.instance_variable_get(:@y).must_equal [ [:foo,{:bar=>:baz}], [:foo,{:bar=>:baz}] ] end it "should support (before|after)_(add|remove) callbacks for (add|remove)_*s methods" do h = [] @c2.one_to_many :attributes, :class => @c1, :before_add=>[proc{|x,y| h << x.pk; h << -y.pk}, :blah], :after_add=>proc{h << 3}, :before_remove=>:blah, :after_remove=>[:blahr] @c2.class_eval do self::Foo = h def blah(x) model::Foo << x.pk end def blahr(x) model::Foo << 6 end private def _add_attribute(v) model::Foo << 4 end def _remove_attribute(v) model::Foo << 5 end end p = @c2.load(:id=>10) c1 = @c1.load(:id=>123) c2 = @c1.load(:id=>234) h.must_equal [] p.add_attributes([c1, c2]) h.must_equal [ 10, -123, 123, 4, 3, 10, -234, 234, 4, 3 ] p.remove_attributes([c1, c2]) h.must_equal [ 10, -123, 123, 4, 3, 10, -234, 234, 4, 3, 123, 5, 6, 234, 5, 6 ] end it "should raise error and not call internal add_*s or remove_*s method if before callback calls cancel_action if raise_on_save_failure is true" do p = @c2.load(:id=>10) c1 = @c1.load(:id=>123) c2 = @c1.load(:id=>234) @c2.one_to_many :attributes, :class => @c1, :before_add=>:ba, :before_remove=>:br def p.ba(o); cancel_action; end def p._add_attribute; raise; end def p._remove_attribute; raise; end p.associations[:attributes] = [] proc{p.add_attributes([c1, c2])}.must_raise(Sequel::HookFailed) p.attributes.must_equal [] p.associations[:attributes] = [c1, c2] def p.br(o); cancel_action; end proc{p.remove_attributes([c1, c2])}.must_raise(Sequel::HookFailed) p.attributes.must_equal [c1, c2] end it "should return nil and not call internal add_*s or remove_*s method if before callback calls cancel_action if raise_on_save_failure is false" do p = @c2.load(:id=>10) c1 = @c1.load(:id=>123) c2 = @c1.load(:id=>234) p.raise_on_save_failure = false @c2.one_to_many :attributes, :class => @c1, :before_add=>:ba, :before_remove=>:br def p.ba(o); cancel_action; end def p._add_attribute; raise; end def p._remove_attribute; raise; end p.associations[:attributes] = [] p.add_attributes([c1, c2]).must_equal [] p.attributes.must_equal [] p.associations[:attributes] = [c1, c2] def p.br(o); cancel_action; end p.remove_attributes([c1, c2]).must_equal [] p.attributes.must_equal [c1, c2] end it "should define a setter that works on existing records" do @c2.one_to_many :attributes, class: @c1 n = @c2.load(id: 1234) a1 = @c1.load(id: 2345, node_id: 1234) a2 = @c1.load(id: 3456, node_id: 1234) a3 = @c1.load(id: 4567) n.associations[:attributes] = [a1, a2] [a2, a3].must_equal(n.attributes = [a2, a3]) a1.values.must_equal(node_id: nil, id: 2345) a2.values.must_equal(node_id: 1234, id: 3456) a3.values.must_equal(node_id: 1234, id: 4567) DB.sqls.must_equal [ 'BEGIN', 'SELECT 1 AS one FROM attributes WHERE ((attributes.node_id = 1234) AND (id = 2345)) LIMIT 1', 'UPDATE attributes SET node_id = NULL WHERE (id = 2345)', 'UPDATE attributes SET node_id = 1234 WHERE (id = 4567)', 'COMMIT' ] end end describe "association_multi_add_remove plugin - many_to_many" do before do @c1 = Class.new(Sequel::Model(:attributes)) do unrestrict_primary_key attr_accessor :yyy def self.name; 'Attribute'; end def self.to_s; 'Attribute'; end columns :id, :y, :z end @c2 = Class.new(Sequel::Model(:nodes)) do unrestrict_primary_key plugin :association_multi_add_remove attr_accessor :xxx def self.name; 'Node'; end def self.to_s; 'Node'; end columns :id, :x end @dataset = @c2.dataset @c1.dataset = @c1.dataset.with_autoid(1) [@c1, @c2].each{|c| c.dataset = c.dataset.with_fetch({})} DB.reset end it "should define an add_*s method that works on existing records" do @c2.many_to_many :attributes, :class => @c1 n = @c2.load(:id => 1234) a1 = @c1.load(:id => 2345) a2 = @c1.load(:id => 3456) n.add_attributes([a1, a2]).must_equal [a1, a2] DB.sqls.must_equal [ 'BEGIN', "INSERT INTO attributes_nodes (node_id, attribute_id) VALUES (1234, 2345)", "INSERT INTO attributes_nodes (node_id, attribute_id) VALUES (1234, 3456)", 'COMMIT' ] end it "should define an add_*s method that works with a primary key" do @c2.many_to_many :attributes, :class => @c1 n = @c2.load(:id => 1234) a1 = @c1.load(:id => 2345) a2 = @c1.load(:id => 3456) @c1.dataset = @c1.dataset.with_fetch([[{ :id=>2345 }], [{ :id=>3456 }]]) n.add_attributes([2345, 3456]).must_equal [a1, a2] DB.sqls.must_equal [ 'BEGIN', "SELECT * FROM attributes WHERE id = 2345", "INSERT INTO attributes_nodes (node_id, attribute_id) VALUES (1234, 2345)", "SELECT * FROM attributes WHERE id = 3456", "INSERT INTO attributes_nodes (node_id, attribute_id) VALUES (1234, 3456)", 'COMMIT' ] end it "should allow passing hashes to the add_*s method which creates new records" do @c2.many_to_many :attributes, :class => @c1 n = @c2.load(:id => 1234) @c1.dataset = @c1.dataset.with_fetch([[{ :id=>1 }], [{ :id=>2 }]]) n.add_attributes([{ :id => 1 }, { :id => 2 }]).must_equal [ @c1.load(:id => 1), @c1.load(:id => 2) ] DB.sqls.must_equal [ 'BEGIN', 'INSERT INTO attributes (id) VALUES (1)', "SELECT * FROM attributes WHERE id = 1", "INSERT INTO attributes_nodes (node_id, attribute_id) VALUES (1234, 1)", 'INSERT INTO attributes (id) VALUES (2)', "SELECT * FROM attributes WHERE id = 2", "INSERT INTO attributes_nodes (node_id, attribute_id) VALUES (1234, 2)", 'COMMIT' ] end it "should define a remove_*s method that works on existing records" do @c2.many_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) a1 = @c1.new(:id => 2345) a2 = @c1.new(:id => 3456) n.remove_attributes([a1, a2]).must_equal [a1, a2] DB.sqls.must_equal [ 'BEGIN', 'DELETE FROM attributes_nodes WHERE ((node_id = 1234) AND (attribute_id = 2345))', 'DELETE FROM attributes_nodes WHERE ((node_id = 1234) AND (attribute_id = 3456))', 'COMMIT' ] end it "should accept primary keys for the remove_*s method and remove existing records" do @c2.many_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) @c1.dataset = @c1.dataset.with_fetch([[{ :id=>234 }], [{ :id=>345 }]]) n.remove_attributes([234, 345]).must_equal [ @c1.load(:id => 234), @c1.load(:id => 345) ] DB.sqls.must_equal [ 'BEGIN', "SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE ((attributes_nodes.node_id = 1234) AND (attributes.id = 234)) LIMIT 1", "DELETE FROM attributes_nodes WHERE ((node_id = 1234) AND (attribute_id = 234))", "SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE ((attributes_nodes.node_id = 1234) AND (attributes.id = 345)) LIMIT 1", "DELETE FROM attributes_nodes WHERE ((node_id = 1234) AND (attribute_id = 345))", 'COMMIT' ] end it "should have the add_*s method respect the :left_primary_key and :right_primary_key options" do @c2.many_to_many :attributes, :class => @c1, :left_primary_key=>:xxx, :right_primary_key=>:yyy n = @c2.load(:id => 1234).set(:xxx=>5) a1 = @c1.load(:id => 2345).set(:yyy=>8) a2 = @c1.load(:id => 3456).set(:yyy=>9) n.add_attributes([a1, a2]).must_equal [a1, a2] DB.sqls.must_equal [ 'BEGIN', "INSERT INTO attributes_nodes (node_id, attribute_id) VALUES (5, 8)", "INSERT INTO attributes_nodes (node_id, attribute_id) VALUES (5, 9)", 'COMMIT' ] end it "should have the add_*s method respect composite keys" do @c2.many_to_many :attributes, :class => @c1, :left_key=>[:l1, :l2], :right_key=>[:r1, :r2], :left_primary_key=>[:id, :x], :right_primary_key=>[:id, :z] @c1.dataset = @c1.dataset.with_fetch([ [{ :id=>2345, :z=>8 }], [{ :id=>3456, :z=>9 }] ]) @c1.set_primary_key [:id, :z] n = @c2.load(:id => 1234, :x=>5) a1 = @c1.load(:id => 2345, :z=>8) a2 = @c1.load(:id => 3456, :z=>9) n.add_attributes([[2345, 8], [3456, 9]]).must_equal [a1, a2] DB.sqls.must_equal [ 'BEGIN', "SELECT * FROM attributes WHERE ((id = 2345) AND (z = 8)) LIMIT 1", "INSERT INTO attributes_nodes (l1, l2, r1, r2) VALUES (1234, 5, 2345, 8)", "SELECT * FROM attributes WHERE ((id = 3456) AND (z = 9)) LIMIT 1", "INSERT INTO attributes_nodes (l1, l2, r1, r2) VALUES (1234, 5, 3456, 9)", 'COMMIT' ] end it "should have the remove_*s method respect the :left_primary_key and :right_primary_key options" do @c2.many_to_many :attributes, :class => @c1, :left_primary_key=>:xxx, :right_primary_key=>:yyy n = @c2.new(:id => 1234, :xxx=>5) a1 = @c1.new(:id => 2345, :yyy=>8) a2 = @c1.new(:id => 3456, :yyy=>9) n.remove_attributes([a1, a2]).must_equal [a1, a2] DB.sqls.must_equal [ 'BEGIN', 'DELETE FROM attributes_nodes WHERE ((node_id = 5) AND (attribute_id = 8))', 'DELETE FROM attributes_nodes WHERE ((node_id = 5) AND (attribute_id = 9))', 'COMMIT' ] end it "should have the remove_*s method respect composite keys" do @c2.many_to_many :attributes, :class => @c1, :left_key=>[:l1, :l2], :right_key=>[:r1, :r2], :left_primary_key=>[:id, :x], :right_primary_key=>[:id, :z] n = @c2.load(:id => 1234, :x=>5) a1 = @c1.load(:id => 2345, :z=>8) a2 = @c1.load(:id => 3456, :z=>9) [a1, a2].must_equal n.remove_attributes([a1, a2]) DB.sqls.must_equal [ 'BEGIN', "DELETE FROM attributes_nodes WHERE ((l1 = 1234) AND (l2 = 5) AND (r1 = 2345) AND (r2 = 8))", "DELETE FROM attributes_nodes WHERE ((l1 = 1234) AND (l2 = 5) AND (r1 = 3456) AND (r2 = 9))", 'COMMIT' ] end it "should accept an array of arrays of composite primary key values for the remove_*s method and remove existing records" do @c1.dataset = @c1.dataset.with_fetch([ [{ :id=>234, :y=>8 }], [{ :id=>345, :y=>9 }] ]) @c1.set_primary_key [:id, :y] @c2.many_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) n.remove_attributes([[234, 8], [345, 9]]).must_equal [ @c1.load(:id => 234, :y=>8), @c1.load(:id => 345, :y=>9) ] DB.sqls.must_equal [ 'BEGIN', "SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE ((attributes_nodes.node_id = 1234) AND (attributes.id = 234) AND (attributes.y = 8)) LIMIT 1", "DELETE FROM attributes_nodes WHERE ((node_id = 1234) AND (attribute_id = 234))", "SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE ((attributes_nodes.node_id = 1234) AND (attributes.id = 345) AND (attributes.y = 9)) LIMIT 1", "DELETE FROM attributes_nodes WHERE ((node_id = 1234) AND (attribute_id = 345))", 'COMMIT' ] end it "should raise an error if trying to remove model objects that don't have valid primary keys" do @c2.many_to_many :attributes, :class => @c1 n = @c2.new a1 = @c1.load(:id=>123) a2 = @c1.load(:id=>234) proc { n.remove_attributes([a1, a2]) }.must_raise(Sequel::Error) end it "should remove items from cache if they exist when calling remove_*s" do @c2.many_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) a1 = @c1.load(:id => 345) a2 = @c1.load(:id => 456) arr = [a1, a2] n.associations[:attributes] = arr n.remove_attributes([a1, a2]) arr.must_equal [] end it "should remove items from reciprocal's if they exist when calling remove_*s" do @c2.many_to_many :attributes, :class => @c1 @c1.many_to_many :nodes, :class => @c2 n = @c2.new(:id => 1234) a1 = @c1.new(:id => 345) a2 = @c1.new(:id => 456) a1.associations[:nodes] = [n] a2.associations[:nodes] = [n] n.remove_attributes([a1, a2]) a1.nodes.must_equal [] a2.nodes.must_equal [] end it "should not create the add_*s or remove_*s methods if :read_only option is used" do @c2.many_to_many :attributes, :class => @c1, :read_only=>true im = @c2.instance_methods im.wont_include(:add_attributes) im.wont_include(:remove_attributes) end it "should not add associations methods directly to class" do @c2.many_to_many :attributes, :class => @c1 im = @c2.instance_methods im.must_include(:add_attributes) im.must_include(:remove_attributes) im2 = @c2.instance_methods(false) im2.wont_include(:add_attributes) im2.wont_include(:remove_attributes) end it "should call a _remove_*s method internally to remove attributes" do @c2.many_to_many :attributes, :class => @c1 @c2.private_instance_methods.must_include(:_remove_attribute) p = @c2.load(:id=>10) c1 = @c1.load(:id=>123) c2 = @c1.load(:id=>234) def p._remove_attribute(x) (@x ||= []) << x end p.remove_attributes([c1, c2]) p.instance_variable_get(:@x).must_equal [c1, c2] DB.sqls.must_equal ['BEGIN', 'COMMIT'] end it "should support a :remover option for defining the _remove_*s method" do @c2.many_to_many :attributes, :class => @c1, :remover=>proc { |x| (@x ||= []) << x } p = @c2.load(:id=>10) c1 = @c1.load(:id=>123) c2 = @c1.load(:id=>234) p.remove_attributes([c1, c2]) p.instance_variable_get(:@x).must_equal [c1, c2] DB.sqls.must_equal ['BEGIN', 'COMMIT'] end it "should allow additional arguments given to the remove_*s method and pass them onwards to the _remove_ method" do @c2.many_to_many :attributes, :class => @c1 p = @c2.load(:id=>10) c1 = @c1.load(:id=>123) c2 = @c1.load(:id=>234) def p._remove_attribute(x,*y) (@x ||= []) << x (@y ||= []) << y end p.remove_attributes([c1, c2], :foo, :bar=>:baz) p.instance_variable_get(:@x).must_equal [c1, c2] p.instance_variable_get(:@y).must_equal [ [:foo, { :bar=>:baz }], [:foo, { :bar=>:baz }] ] end it "should raise an error in the remove_*s method if the passed associated objects are not of the correct type" do @c2.many_to_many :attributes, :class => @c1 proc do @c2.new(:id => 1234).remove_attributes([@c2.new, @c2.new]) end .must_raise(Sequel::Error) end it "should support (before|after)_(add|remove) callbacks for (add|remove)_* methods" do h = [] @c2.many_to_many :attributes, :class => @c1, :before_add=>[proc{|x,y| h << x.pk; h << -y.pk}, :blah], :after_add=>proc{h << 3}, :before_remove=>:blah, :after_remove=>[:blahr] @c2.class_eval do self::Foo = h def blah(x) model::Foo << x.pk end def blahr(x) model::Foo << 6 end private def _add_attribute(v) model::Foo << 4 end def _remove_attribute(v) model::Foo << 5 end end p = @c2.load(:id=>10) c1 = @c1.load(:id=>123) c2 = @c1.load(:id=>234) h.must_equal [] p.add_attributes([c1, c2]) h.must_equal [ 10, -123, 123, 4, 3, 10, -234, 234, 4, 3 ] p.remove_attributes([c1, c2]) h.must_equal [ 10, -123, 123, 4, 3, 10, -234, 234, 4, 3, 123, 5, 6, 234, 5, 6 ] end it "should raise error and not call internal add_*s or remove_*s method if before callback calls cancel_action if raise_on_save_failure is true" do p = @c2.load(:id=>10) c1 = @c1.load(:id=>123) c2 = @c1.load(:id=>234) @c2.many_to_many :attributes, :class => @c1, :before_add=>:ba, :before_remove=>:br def p.ba(o) cancel_action end def p._add_attribute; raise; end def p._remove_attribute; raise; end p.associations[:attributes] = [] p.raise_on_save_failure = true proc{p.add_attributes([c1, c2])}.must_raise(Sequel::HookFailed) p.attributes.must_equal [] p.associations[:attributes] = [c1, c2] def p.br(o) cancel_action end proc { p.remove_attributes([c1, c2]) }.must_raise(Sequel::HookFailed) p.attributes.must_equal [c1, c2] end it "should return nil and not call internal add_*s or remove_*s method if before callback calls cancel_action if raise_on_save_failure is false" do p = @c2.load(:id=>10) c1 = @c1.load(:id=>123) c2 = @c1.load(:id=>234) p.raise_on_save_failure = false @c2.many_to_many :attributes, :class => @c1, :before_add=>:ba, :before_remove=>:br def p.ba(o) cancel_action end def p._add_attribute; raise; end def p._remove_attribute; raise; end p.associations[:attributes] = [] p.add_attributes([c1, c2]).must_equal [] p.attributes.must_equal [] p.associations[:attributes] = [c1, c2] def p.br(o) cancel_action end p.remove_attributes([c1, c2]).must_equal [] p.attributes.must_equal [c1, c2] end it "should define a setter that works on existing records" do @c2.many_to_many :attributes, class: @c1 n = @c2.load(id: 1234) a1 = @c1.load(id: 2345) a2 = @c1.load(id: 3456) a3 = @c1.load(id: 4567) n.associations[:attributes] = [a1, a2] [a2, a3].must_equal(n.attributes = [a2, a3]) DB.sqls.must_equal [ 'BEGIN', 'DELETE FROM attributes_nodes WHERE ((node_id = 1234) AND (attribute_id = 2345))', 'INSERT INTO attributes_nodes (node_id, attribute_id) VALUES (1234, 4567)', 'COMMIT' ] end end describe "association_multi_add_remove plugin - sharding" do before do @db = Sequel.mock(:servers=>{:a=>{}}, :numrows=>1) @c1 = Class.new(Sequel::Model(@db[:attributes])) do unrestrict_primary_key columns :id, :node_id, :y, :z end @c2 = Class.new(Sequel::Model(@db[:nodes])) do plugin :association_multi_add_remove unrestrict_primary_key attr_accessor :xxx def self.name; 'Node'; end def self.to_s; 'Node'; end columns :id, :x private def _refresh(ds); end end @dataset = @c2.dataset = @c2.dataset.with_fetch({}) @c1.dataset = @c1.dataset.with_fetch(proc { |sql| sql =~ /SELECT 1/ ? { a: 1 } : {} }) @db.sqls end it "should handle servers correctly" do @c2.one_to_many :attributes, class: @c1 n = @c2.load(id: 1234).set_server(:a) a1 = @c1.load(id: 2345).set_server(:a) a2 = @c1.load(id: 3456).set_server(:a) [a1, a2].must_equal n.add_attributes([a1, a2]) a1.values.must_equal(:node_id => 1234, id: 2345) a2.values.must_equal(:node_id => 1234, id: 3456) @db.sqls.must_equal [ 'BEGIN -- a', 'UPDATE attributes SET node_id = 1234 WHERE (id = 2345) -- a', 'UPDATE attributes SET node_id = 1234 WHERE (id = 3456) -- a', 'COMMIT -- a' ] end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/association_pks_spec.rb�����������������������������������������������0000664�0000000�0000000�00000071515�14342141206�0022772�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::AssociationPks" do before do @db = Sequel.mock(:autoid=>1, :fetch=>proc do |sql| case sql when /SELECT \* FROM (?:artists|albums) WHERE \(id = (\d+)\) LIMIT 1/ {:id=>$1.to_i} when "SELECT id FROM albums WHERE (albums.artist_id = 1)" [{:id=>1}, {:id=>2}, {:id=>3}] when /SELECT tag_id FROM albums_tags WHERE \(album_id = (\d)\)/ a = [] a << {:tag_id=>1} if $1 == '1' a << {:tag_id=>2} if $1 != '3' a << {:tag_id=>3} if $1 == '2' a when /SELECT tags.id FROM tags INNER JOIN albums_tags ON \(albums_tags.tag_id = tags.id\) WHERE \(albums_tags.album_id = (\d)\)/ a = [] a << {:id=>1} if $1 == '1' a << {:id=>2} if $1 != '3' a << {:id=>3} if $1 == '2' a when "SELECT first, last FROM vocalists WHERE (vocalists.album_id = 1)" [{:first=>"F1", :last=>"L1"}, {:first=>"F2", :last=>"L2"}] when /SELECT first, last FROM albums_vocalists WHERE \(album_id = (\d)\)/ a = [] a << {:first=>"F1", :last=>"L1"} if $1 == '1' a << {:first=>"F2", :last=>"L2"} if $1 != '3' a << {:first=>"F3", :last=>"L3"} if $1 == '2' a when "SELECT id FROM instruments WHERE ((instruments.first = 'F1') AND (instruments.last = 'L1'))" [{:id=>1}, {:id=>2}] when /SELECT instrument_id FROM vocalists_instruments WHERE \(\((?:first|last) = '?[FL1](\d)/ a = [] a << {:instrument_id=>1} if $1 == "1" a << {:instrument_id=>2} if $1 != "3" a << {:instrument_id=>3} if $1 == "2" a when "SELECT year, week FROM hits WHERE ((hits.first = 'F1') AND (hits.last = 'L1'))" [{:year=>1997, :week=>1}, {:year=>1997, :week=>2}] when /SELECT year, week FROM vocalists_hits WHERE \(\((?:first|last) = '?[FL1](\d)/, /SELECT hits.year, hits.week FROM hits INNER JOIN vocalists_hits ON \(\(vocalists_hits.(?:year|week) = hits.(?:year|week)\) AND \(vocalists_hits.(?:year|week) = hits.(?:year|week)\)\) WHERE \(\(vocalists_hits.(?:first|last) = '?[FL1](\d)/ a = [] a << {:year=>1997, :week=>1} if $1 == "1" a << {:year=>1997, :week=>2} if $1 != "3" a << {:year=>1997, :week=>3} if $1 == "2" a end end) @Artist = Class.new(Sequel::Model(@db[:artists])) @Artist.columns :id @Album = Class.new(Sequel::Model(@db[:albums])) @Album.columns :id, :artist_id @Tag = Class.new(Sequel::Model(@db[:tags])) @Tag.columns :id @Vocalist = Class.new(Sequel::Model(@db[:vocalists])) @Vocalist.columns :first, :last, :album_id @Vocalist.set_primary_key [:first, :last] @Instrument = Class.new(Sequel::Model(@db[:instruments])) @Instrument.columns :id, :first, :last @Hit = Class.new(Sequel::Model(@db[:hits])) @Hit.columns :year, :week, :first, :last @Hit.set_primary_key [:year, :week] @Artist.plugin :association_pks @Album.plugin :association_pks @Vocalist.plugin :association_pks @Artist.one_to_many :albums, :class=>@Album, :key=>:artist_id, :delay_pks=>false @Album.many_to_many :tags, :class=>@Tag, :join_table=>:albums_tags, :left_key=>:album_id, :delay_pks=>false @db.sqls end it "should return correct associated pks for one_to_many associations" do @Artist.load(:id=>1).album_pks.must_equal [1,2,3] @db.sqls.must_equal ["SELECT id FROM albums WHERE (albums.artist_id = 1)"] @Artist.load(:id=>2).album_pks.must_equal [] @db.sqls.must_equal ["SELECT id FROM albums WHERE (albums.artist_id = 2)"] end it "should return correct associated pks for many_to_many associations" do @Album.load(:id=>1).tag_pks.must_equal [1, 2] @db.sqls.must_equal ["SELECT tag_id FROM albums_tags WHERE (album_id = 1)"] @Album.load(:id=>2).tag_pks.must_equal [2, 3] @db.sqls.must_equal ["SELECT tag_id FROM albums_tags WHERE (album_id = 2)"] @Album.load(:id=>3).tag_pks.must_equal [] @db.sqls.must_equal ["SELECT tag_id FROM albums_tags WHERE (album_id = 3)"] end it "should not affect one_to_one or one_through_one associations" do @Artist.one_to_one :foo, :clone => :albums @Artist.instance_methods.wont_include :foo_pks_dataset @Album.one_through_one :bar, :clone => :tags @Album.instance_methods.wont_include :bar_pks_dataset end it "should not add setter methods for :read_only associations" do @Artist.one_to_many :foos, :clone => :albums, :read_only=>true @Artist.instance_methods.must_include :foo_pks_dataset @Artist.instance_methods.must_include :foo_pks @Artist.instance_methods.wont_include :foo_pks= @Album.many_to_many :bars, :clone => :tags, :read_only=>true @Album.instance_methods.must_include :bar_pks_dataset @Album.instance_methods.must_include :bar_pks @Album.instance_methods.wont_include :bar_pks= end it "should return correct associated pks for many_to_many associations using :association_pks_use_associated_table" do @Album.many_to_many :tags, :class=>@Tag, :join_table=>:albums_tags, :left_key=>:album_id, :delay_pks=>false, :association_pks_use_associated_table=>true @Album.load(:id=>1).tag_pks.must_equal [1, 2] @db.sqls.must_equal ["SELECT tags.id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE (albums_tags.album_id = 1)"] @Album.load(:id=>2).tag_pks.must_equal [2, 3] @db.sqls.must_equal ["SELECT tags.id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE (albums_tags.album_id = 2)"] @Album.load(:id=>3).tag_pks.must_equal [] @db.sqls.must_equal ["SELECT tags.id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE (albums_tags.album_id = 3)"] end it "should return correct dataset for one_to_many associations" do ds = @Artist.load(:id=>1).album_pks_dataset ds.map(:id).must_equal [1,2,3] ds.sql.must_equal "SELECT id FROM albums WHERE (albums.artist_id = 1)" ds = @Artist.load(:id=>2).album_pks_dataset ds.map(:id).must_equal [] ds.sql.must_equal "SELECT id FROM albums WHERE (albums.artist_id = 2)" end it "should return correct dataset for many_to_many associations" do ds = @Album.load(:id=>1).tag_pks_dataset ds.map(:tag_id).must_equal [1, 2] ds.sql.must_equal "SELECT tag_id FROM albums_tags WHERE (album_id = 1)" ds = @Album.load(:id=>2).tag_pks_dataset ds.map(:tag_id).must_equal [2, 3] ds.sql.must_equal "SELECT tag_id FROM albums_tags WHERE (album_id = 2)" ds = @Album.load(:id=>3).tag_pks_dataset ds.map(:tag_id).must_equal [] ds.sql.must_equal "SELECT tag_id FROM albums_tags WHERE (album_id = 3)" end it "should return correct dataset for many_to_many associations using :association_pks_use_associated_table" do @Album.many_to_many :tags, :class=>@Tag, :join_table=>:albums_tags, :left_key=>:album_id, :delay_pks=>false, :association_pks_use_associated_table=>true ds = @Album.load(:id=>1).tag_pks_dataset ds.map(:id).must_equal [1, 2] ds.sql.must_equal "SELECT tags.id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE (albums_tags.album_id = 1)" ds = @Album.load(:id=>2).tag_pks_dataset ds.map(:id).must_equal [2, 3] ds.sql.must_equal "SELECT tags.id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE (albums_tags.album_id = 2)" ds = @Album.load(:id=>3).tag_pks_dataset ds.map(:id).must_equal [] ds.sql.must_equal "SELECT tags.id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE (albums_tags.album_id = 3)" end it "should set associated pks correctly for a one_to_many association" do @Artist.load(:id=>1).album_pks = [1, 2] @db.sqls.must_equal ["UPDATE albums SET artist_id = 1 WHERE (id IN (1, 2))", "UPDATE albums SET artist_id = NULL WHERE ((albums.artist_id = 1) AND (id NOT IN (1, 2)))"] end it "should use associated class's primary key for a one_to_many association" do @Album.set_primary_key :foo @Artist.load(:id=>1).album_pks = [1, 2] @db.sqls.must_equal ["UPDATE albums SET artist_id = 1 WHERE (foo IN (1, 2))", "UPDATE albums SET artist_id = NULL WHERE ((albums.artist_id = 1) AND (foo NOT IN (1, 2)))"] end it "should set associated pks correctly for a many_to_many association" do @Album.load(:id=>2).tag_pks = [1, 3] @db.sqls.must_equal ["DELETE FROM albums_tags WHERE ((album_id = 2) AND (tag_id NOT IN (1, 3)))", 'SELECT tag_id FROM albums_tags WHERE (album_id = 2)', 'INSERT INTO albums_tags (album_id, tag_id) VALUES (2, 1)'] end it "should return correct right-side associated cpks for one_to_many associations" do @Album.one_to_many :vocalists, :class=>@Vocalist, :key=>:album_id @Album.load(:id=>1).vocalist_pks.must_equal [["F1", "L1"], ["F2", "L2"]] @Album.load(:id=>2).vocalist_pks.must_equal [] end it "should return correct right-side associated cpks for many_to_many associations" do @Album.many_to_many :vocalists, :class=>@Vocalist, :join_table=>:albums_vocalists, :left_key=>:album_id, :right_key=>[:first, :last] @Album.load(:id=>1).vocalist_pks.must_equal [["F1", "L1"], ["F2", "L2"]] @Album.load(:id=>2).vocalist_pks.must_equal [["F2", "L2"], ["F3", "L3"]] @Album.load(:id=>3).vocalist_pks.must_equal [] end it "should set associated right-side cpks correctly for a one_to_many association" do @Album.one_to_many :vocalists, :class=>@Vocalist, :key=>:album_id, :delay_pks=>false @Album.load(:id=>1).vocalist_pks = [["F1", "L1"], ["F2", "L2"]] @db.sqls.must_equal ["UPDATE vocalists SET album_id = 1 WHERE ((first, last) IN (('F1', 'L1'), ('F2', 'L2')))", "UPDATE vocalists SET album_id = NULL WHERE ((vocalists.album_id = 1) AND ((first, last) NOT IN (('F1', 'L1'), ('F2', 'L2'))))"] end it "should set associated right-side cpks correctly for a many_to_many association" do @Album.many_to_many :vocalists, :class=>@Vocalist, :join_table=>:albums_vocalists, :left_key=>:album_id, :right_key=>[:first, :last], :delay_pks=>false @Album.load(:id=>2).vocalist_pks = [["F1", "L1"], ["F2", "L2"]] sqls = @db.sqls sqls[0].must_equal "DELETE FROM albums_vocalists WHERE ((album_id = 2) AND ((first, last) NOT IN (('F1', 'L1'), ('F2', 'L2'))))" sqls[1].must_equal 'SELECT first, last FROM albums_vocalists WHERE (album_id = 2)' match = sqls[2].match(/INSERT INTO albums_vocalists \((.*)\) VALUES \((.*)\)/) Hash[match[1].split(', ').zip(match[2].split(', '))].must_equal("first"=>"'F1'", "last"=>"'L1'", "album_id"=>"2") sqls.length.must_equal 3 end it "should return correct associated pks for left-side cpks for one_to_many associations" do @Vocalist.one_to_many :instruments, :class=>@Instrument, :key=>[:first, :last] @Vocalist.load(:first=>'F1', :last=>'L1').instrument_pks.must_equal [1, 2] @Vocalist.load(:first=>'F2', :last=>'L2').instrument_pks.must_equal [] end it "should return correct associated pks for left-side cpks for many_to_many associations" do @Vocalist.many_to_many :instruments, :class=>@Instrument, :join_table=>:vocalists_instruments, :left_key=>[:first, :last] @Vocalist.load(:first=>'F1', :last=>'L1').instrument_pks.must_equal [1, 2] @Vocalist.load(:first=>'F2', :last=>'L2').instrument_pks.must_equal [2, 3] @Vocalist.load(:first=>'F3', :last=>'L3').instrument_pks.must_equal [] end it "should set associated pks correctly for left-side cpks for a one_to_many association" do @Vocalist.one_to_many :instruments, :class=>@Instrument, :key=>[:first, :last], :delay_pks=>false @Vocalist.load(:first=>'F1', :last=>'L1').instrument_pks = [1, 2] @db.sqls.must_equal ["UPDATE instruments SET first = 'F1', last = 'L1' WHERE (id IN (1, 2))", "UPDATE instruments SET first = NULL, last = NULL WHERE ((instruments.first = 'F1') AND (instruments.last = 'L1') AND (id NOT IN (1, 2)))"] end it "should set associated pks correctly for left-side cpks for a many_to_many association" do @Vocalist.many_to_many :instruments, :class=>@Instrument, :join_table=>:vocalists_instruments, :left_key=>[:first, :last], :delay_pks=>false @Vocalist.load(:first=>'F2', :last=>'L2').instrument_pks = [1, 2] sqls = @db.sqls sqls[0].must_equal "DELETE FROM vocalists_instruments WHERE ((first = 'F2') AND (last = 'L2') AND (instrument_id NOT IN (1, 2)))" sqls[1].must_equal "SELECT instrument_id FROM vocalists_instruments WHERE ((first = 'F2') AND (last = 'L2'))" match = sqls[2].match(/INSERT INTO vocalists_instruments \((.*)\) VALUES \((.*)\)/) Hash[match[1].split(', ').zip(match[2].split(', '))].must_equal("first"=>"'F2'", "last"=>"'L2'", "instrument_id"=>"1") sqls.length.must_equal 3 end it "should return correct right-side associated cpks for left-side cpks for one_to_many associations" do @Vocalist.one_to_many :hits, :class=>@Hit, :key=>[:first, :last] @Vocalist.load(:first=>'F1', :last=>'L1').hit_pks.must_equal [[1997, 1], [1997, 2]] @db.sqls.must_equal ["SELECT year, week FROM hits WHERE ((hits.first = 'F1') AND (hits.last = 'L1'))"] @Vocalist.load(:first=>'F2', :last=>'L2').hit_pks.must_equal [] @db.sqls.must_equal ["SELECT year, week FROM hits WHERE ((hits.first = 'F2') AND (hits.last = 'L2'))"] end it "should return correct right-side associated cpks for left-side cpks for many_to_many associations" do @Vocalist.many_to_many :hits, :class=>@Hit, :join_table=>:vocalists_hits, :left_key=>[:first, :last], :right_key=>[:year, :week] @Vocalist.load(:first=>'F1', :last=>'L1').hit_pks.must_equal [[1997, 1], [1997, 2]] @db.sqls.must_equal ["SELECT year, week FROM vocalists_hits WHERE ((first = 'F1') AND (last = 'L1'))"] @Vocalist.load(:first=>'F2', :last=>'L2').hit_pks.must_equal [[1997, 2], [1997, 3]] @db.sqls.must_equal ["SELECT year, week FROM vocalists_hits WHERE ((first = 'F2') AND (last = 'L2'))"] @Vocalist.load(:first=>'F3', :last=>'L3').hit_pks.must_equal [] @db.sqls.must_equal ["SELECT year, week FROM vocalists_hits WHERE ((first = 'F3') AND (last = 'L3'))"] end it "should return correct right-side associated cpks for left-side cpks for many_to_many associations when using :association_pks_use_associated_table" do @Vocalist.many_to_many :hits, :class=>@Hit, :join_table=>:vocalists_hits, :left_key=>[:first, :last], :right_key=>[:year, :week], :association_pks_use_associated_table=>true @Vocalist.load(:first=>'F1', :last=>'L1').hit_pks.must_equal [[1997, 1], [1997, 2]] @db.sqls.must_equal ["SELECT hits.year, hits.week FROM hits INNER JOIN vocalists_hits ON ((vocalists_hits.year = hits.year) AND (vocalists_hits.week = hits.week)) WHERE ((vocalists_hits.first = 'F1') AND (vocalists_hits.last = 'L1'))"] @Vocalist.load(:first=>'F2', :last=>'L2').hit_pks.must_equal [[1997, 2], [1997, 3]] @db.sqls.must_equal ["SELECT hits.year, hits.week FROM hits INNER JOIN vocalists_hits ON ((vocalists_hits.year = hits.year) AND (vocalists_hits.week = hits.week)) WHERE ((vocalists_hits.first = 'F2') AND (vocalists_hits.last = 'L2'))"] @Vocalist.load(:first=>'F3', :last=>'L3').hit_pks.must_equal [] @db.sqls.must_equal ["SELECT hits.year, hits.week FROM hits INNER JOIN vocalists_hits ON ((vocalists_hits.year = hits.year) AND (vocalists_hits.week = hits.week)) WHERE ((vocalists_hits.first = 'F3') AND (vocalists_hits.last = 'L3'))"] end it "should set associated right-side cpks correctly for left-side cpks for a one_to_many association" do @Vocalist.one_to_many :hits, :class=>@Hit, :key=>[:first, :last], :order=>:week, :delay_pks=>false @Vocalist.load(:first=>'F1', :last=>'L1').hit_pks = [[1997, 1], [1997, 2]] @db.sqls.must_equal ["UPDATE hits SET first = 'F1', last = 'L1' WHERE ((year, week) IN ((1997, 1), (1997, 2)))", "UPDATE hits SET first = NULL, last = NULL WHERE ((hits.first = 'F1') AND (hits.last = 'L1') AND ((year, week) NOT IN ((1997, 1), (1997, 2))))"] end it "should set associated right-side cpks correctly for left-side cpks for a many_to_many association" do @Vocalist.many_to_many :hits, :class=>@Hit, :join_table=>:vocalists_hits, :left_key=>[:first, :last], :right_key=>[:year, :week], :delay_pks=>false @Vocalist.load(:first=>'F2', :last=>'L2').hit_pks = [[1997, 1], [1997, 2]] sqls = @db.sqls sqls[0].must_equal "DELETE FROM vocalists_hits WHERE ((first = 'F2') AND (last = 'L2') AND ((year, week) NOT IN ((1997, 1), (1997, 2))))" sqls[1].must_equal "SELECT year, week FROM vocalists_hits WHERE ((first = 'F2') AND (last = 'L2'))" match = sqls[2].match(/INSERT INTO vocalists_hits \((.*)\) VALUES \((.*)\)/) Hash[match[1].split(', ').zip(match[2].split(', '))].must_equal("first"=>"'F2'", "last"=>"'L2'", "year"=>"1997", "week"=>"1") sqls.length.must_equal 3 end it "should use transactions if the object is configured to use transactions" do artist = @Artist.load(:id=>1) artist.use_transactions = true artist.album_pks = [1, 2] @db.sqls.must_equal ["BEGIN", "UPDATE albums SET artist_id = 1 WHERE (id IN (1, 2))", "UPDATE albums SET artist_id = NULL WHERE ((albums.artist_id = 1) AND (id NOT IN (1, 2)))", "COMMIT"] album = @Album.load(:id=>2) album.use_transactions = true album.tag_pks = [1, 3] @db.sqls.must_equal ["BEGIN", "DELETE FROM albums_tags WHERE ((album_id = 2) AND (tag_id NOT IN (1, 3)))", 'SELECT tag_id FROM albums_tags WHERE (album_id = 2)', 'INSERT INTO albums_tags (album_id, tag_id) VALUES (2, 1)', "COMMIT"] end it "should automatically convert keys to numbers if the primary key is an integer for one_to_many associations" do @Album.db_schema[:id][:type] = :integer @Artist.load(:id=>1).album_pks = %w'1 2' @db.sqls.must_equal ["UPDATE albums SET artist_id = 1 WHERE (id IN (1, 2))", "UPDATE albums SET artist_id = NULL WHERE ((albums.artist_id = 1) AND (id NOT IN (1, 2)))"] end it "should not automatically convert keys if the primary key is not an integer for one_to_many associations" do @Album.db_schema[:id][:type] = :string @Artist.load(:id=>1).album_pks = %w'1 2' @db.sqls.must_equal ["UPDATE albums SET artist_id = 1 WHERE (id IN ('1', '2'))", "UPDATE albums SET artist_id = NULL WHERE ((albums.artist_id = 1) AND (id NOT IN ('1', '2')))"] end it "should automatically convert keys to numbers if the primary key is an integer for many_to_many associations" do @Tag.db_schema[:id][:type] = :integer @Album.load(:id=>2).tag_pks = %w'1 3' @db.sqls.must_equal ["DELETE FROM albums_tags WHERE ((album_id = 2) AND (tag_id NOT IN (1, 3)))", 'SELECT tag_id FROM albums_tags WHERE (album_id = 2)', 'INSERT INTO albums_tags (album_id, tag_id) VALUES (2, 1)'] end it "should not automatically convert keys to numbers if the primary key is an integer for many_to_many associations" do @Tag.db_schema[:id][:type] = :string @Album.load(:id=>2).tag_pks = %w'1 3' @db.sqls.must_equal [ "DELETE FROM albums_tags WHERE ((album_id = 2) AND (tag_id NOT IN ('1', '3')))", 'SELECT tag_id FROM albums_tags WHERE (album_id = 2)', 'BEGIN', "INSERT INTO albums_tags (album_id, tag_id) VALUES (2, '1')", "INSERT INTO albums_tags (album_id, tag_id) VALUES (2, '3')", 'COMMIT'] end it "should automatically convert keys to numbers for appropriate integer primary key for composite key associations" do @Hit.db_schema[:year][:type] = :integer @Hit.db_schema[:week][:type] = :integer @Vocalist.many_to_many :hits, :class=>@Hit, :join_table=>:vocalists_hits, :left_key=>[:first, :last], :right_key=>[:year, :week], :delay_pks=>false @Vocalist.load(:first=>'F2', :last=>'L2').hit_pks = [['1997', '1'], ['1997', '2']] sqls = @db.sqls sqls[0].must_equal "DELETE FROM vocalists_hits WHERE ((first = 'F2') AND (last = 'L2') AND ((year, week) NOT IN ((1997, 1), (1997, 2))))" sqls[1].must_equal "SELECT year, week FROM vocalists_hits WHERE ((first = 'F2') AND (last = 'L2'))" match = sqls[2].match(/INSERT INTO vocalists_hits \((.*)\) VALUES \((.*)\)/) Hash[match[1].split(', ').zip(match[2].split(', '))].must_equal("first"=>"'F2'", "last"=>"'L2'", "year"=>"1997", "week"=>"1") sqls.length.must_equal 3 @Vocalist.db_schema[:first][:type] = :integer @Vocalist.db_schema[:last][:type] = :integer @Album.one_to_many :vocalists, :class=>@Vocalist, :key=>:album_id, :delay_pks=>false @Album.load(:id=>1).vocalist_pks = [["11", "11"], ["12", "12"]] @db.sqls.must_equal ["UPDATE vocalists SET album_id = 1 WHERE ((first, last) IN ((11, 11), (12, 12)))", "UPDATE vocalists SET album_id = NULL WHERE ((vocalists.album_id = 1) AND ((first, last) NOT IN ((11, 11), (12, 12))))"] @Album.many_to_many :vocalists, :class=>@Vocalist, :join_table=>:albums_vocalists, :left_key=>:album_id, :right_key=>[:first, :last], :delay_pks=>false @Album.load(:id=>2).vocalist_pks = [["11", "11"], ["12", "12"]] sqls = @db.sqls sqls[0].must_equal "DELETE FROM albums_vocalists WHERE ((album_id = 2) AND ((first, last) NOT IN ((11, 11), (12, 12))))" sqls[1].must_equal 'SELECT first, last FROM albums_vocalists WHERE (album_id = 2)' match = sqls[3].match(/INSERT INTO albums_vocalists \((.*)\) VALUES \((.*)\)/) Hash[match[1].split(', ').zip(match[2].split(', '))].must_equal("first"=>"11", "last"=>"11", "album_id"=>"2") match = sqls[4].match(/INSERT INTO albums_vocalists \((.*)\) VALUES \((.*)\)/) Hash[match[1].split(', ').zip(match[2].split(', '))].must_equal("first"=>"12", "last"=>"12", "album_id"=>"2") sqls.length.must_equal 6 end it "should not automatically convert keys to numbers for for mixed key types for composite key associations" do @Hit.db_schema[:year][:type] = :integer @Hit.db_schema[:week][:type] = :string @Vocalist.many_to_many :hits, :class=>@Hit, :join_table=>:vocalists_hits, :left_key=>[:first, :last], :right_key=>[:year, :week], :delay_pks=>false @Vocalist.load(:first=>'F2', :last=>'L2').hit_pks = [['1997', '1'], [1997, 2]] sqls = @db.sqls sqls[0].must_equal "DELETE FROM vocalists_hits WHERE ((first = 'F2') AND (last = 'L2') AND ((year, week) NOT IN (('1997', '1'), (1997, 2))))" sqls[1].must_equal "SELECT year, week FROM vocalists_hits WHERE ((first = 'F2') AND (last = 'L2'))" match = sqls[2].match(/INSERT INTO vocalists_hits \((.*)\) VALUES \((.*)\)/) Hash[match[1].split(', ').zip(match[2].split(', '))].must_equal("first"=>"'F2'", "last"=>"'L2'", "year"=>"'1997'", "week"=>"'1'") sqls.length.must_equal 3 @Vocalist.db_schema[:first][:type] = :integer @Vocalist.db_schema[:last][:type] = :string @Album.one_to_many :vocalists, :class=>@Vocalist, :key=>:album_id, :delay_pks=>false @Album.load(:id=>1).vocalist_pks = [["11", "11"], ["12", "12"]] @db.sqls.must_equal ["UPDATE vocalists SET album_id = 1 WHERE ((first, last) IN (('11', '11'), ('12', '12')))", "UPDATE vocalists SET album_id = NULL WHERE ((vocalists.album_id = 1) AND ((first, last) NOT IN (('11', '11'), ('12', '12'))))"] @Album.many_to_many :vocalists, :class=>@Vocalist, :join_table=>:albums_vocalists, :left_key=>:album_id, :right_key=>[:first, :last], :delay_pks=>false @Album.load(:id=>2).vocalist_pks = [["11", "11"], ["12", "12"]] sqls = @db.sqls sqls[0].must_equal "DELETE FROM albums_vocalists WHERE ((album_id = 2) AND ((first, last) NOT IN (('11', '11'), ('12', '12'))))" sqls[1].must_equal 'SELECT first, last FROM albums_vocalists WHERE (album_id = 2)' match = sqls[3].match(/INSERT INTO albums_vocalists \((.*)\) VALUES \((.*)\)/) Hash[match[1].split(', ').zip(match[2].split(', '))].must_equal("first"=>"'11'", "last"=>"'11'", "album_id"=>"2") match = sqls[4].match(/INSERT INTO albums_vocalists \((.*)\) VALUES \((.*)\)/) Hash[match[1].split(', ').zip(match[2].split(', '))].must_equal("first"=>"'12'", "last"=>"'12'", "album_id"=>"2") sqls.length.must_equal 6 end it "should support saving without setting associated pks" do @Artist.new.save.must_be_instance_of @Artist end it "should handle delaying setting of association pks until after saving for existing objects, if :delay_pks=>:always association option is used" do @Artist.one_to_many :albums, :clone=>:albums, :delay_pks=>:always @Album.many_to_many :tags, :clone=>:tags, :delay_pks=>:always @Album.db_schema[:id][:type] = :integer ar = @Artist.new ar.album_pks.must_equal [] ar.album_pks = ["1","2","3"] ar.album_pks.must_equal [1,2,3] @db.sqls.must_equal [] ar.save @db.sqls.must_equal [ "INSERT INTO artists DEFAULT VALUES", "SELECT * FROM artists WHERE (id = 1) LIMIT 1", "UPDATE albums SET artist_id = 1 WHERE (id IN (1, 2, 3))", "UPDATE albums SET artist_id = NULL WHERE ((albums.artist_id = 1) AND (id NOT IN (1, 2, 3)))", ] al = @Album.new al.tag_pks.must_equal [] al.tag_pks = [1,2] al.tag_pks.must_equal [1, 2] @db.sqls.must_equal [] al.save @db.sqls.must_equal [ "INSERT INTO albums DEFAULT VALUES", "SELECT * FROM albums WHERE (id = 2) LIMIT 1", "DELETE FROM albums_tags WHERE ((album_id = 2) AND (tag_id NOT IN (1, 2)))", "SELECT tag_id FROM albums_tags WHERE (album_id = 2)", "INSERT INTO albums_tags (album_id, tag_id) VALUES (2, 1)" ] ar = @Artist.load(:id=>1) ar.album_pks.must_equal [1,2,3] @db.sqls ar.album_pks = ["2","4"] ar.album_pks.must_equal [2,4] @db.sqls.must_equal [] ar.save_changes @db.sqls.must_equal [ "UPDATE albums SET artist_id = 1 WHERE (id IN (2, 4))", "UPDATE albums SET artist_id = NULL WHERE ((albums.artist_id = 1) AND (id NOT IN (2, 4)))" ] ar.album_pks = [] @db.sqls.must_equal [] ar.save_changes @db.sqls.must_equal [ "UPDATE albums SET artist_id = NULL WHERE (artist_id = 1)" ] al = @Album.load(:id=>1) al.tag_pks.must_equal [1,2] @db.sqls al.tag_pks = [2,3] al.tag_pks.must_equal [2,3] @db.sqls.must_equal [] al.save_changes @db.sqls.must_equal [ "DELETE FROM albums_tags WHERE ((album_id = 1) AND (tag_id NOT IN (2, 3)))", "SELECT tag_id FROM albums_tags WHERE (album_id = 1)", "INSERT INTO albums_tags (album_id, tag_id) VALUES (1, 3)" ] al.tag_pks = [] @db.sqls.must_equal [] al.save_changes @db.sqls.must_equal [ "DELETE FROM albums_tags WHERE (album_id = 1)", ] end it "should clear delayed associated pks if refreshing, if :delay plugin option is used" do @Artist.one_to_many :albums, :clone=>:albums, :delay_pks=>:always @Album.many_to_many :tags, :clone=>:tags, :delay_pks=>:always ar = @Artist.load(:id=>1) ar.album_pks.must_equal [1,2,3] ar.album_pks = [2,4] ar.album_pks.must_equal [2,4] ar.refresh ar.album_pks.must_equal [1,2,3] end it "should cache getter calls if :cache_pks association option is used" do @Artist.one_to_many :albums, :class=>@Album, :key=>:artist_id, :cache_pks=>true artist = @Artist.load(:id=>1) artist.album_pks.must_equal [1,2,3] @db.sqls.must_equal ["SELECT id FROM albums WHERE (albums.artist_id = 1)"] artist.album_pks.must_equal [1,2,3] @db.sqls.must_equal [] artist.refresh @db.sqls.must_equal ["SELECT * FROM artists WHERE (id = 1) LIMIT 1"] artist.album_pks.must_equal [1,2,3] @db.sqls.must_equal ["SELECT id FROM albums WHERE (albums.artist_id = 1)"] end it "should cache getter calls if :cache_pks association option is used with :delay_pks=>false option" do @Artist.one_to_many :albums, :class=>@Album, :key=>:artist_id, :cache_pks=>true, :delay_pks=>false artist = @Artist.load(:id=>1) artist.album_pks.must_equal [1,2,3] @db.sqls.must_equal ["SELECT id FROM albums WHERE (albums.artist_id = 1)"] artist.album_pks.must_equal [1,2,3] @db.sqls.must_equal [] artist.refresh @db.sqls.must_equal ["SELECT * FROM artists WHERE (id = 1) LIMIT 1"] artist.album_pks.must_equal [1,2,3] @db.sqls.must_equal ["SELECT id FROM albums WHERE (albums.artist_id = 1)"] end it "should ignore cache in getter calls if :refresh option is used" do @Artist.one_to_many :albums, :class=>@Album, :key=>:artist_id, :cache_pks=>true artist = @Artist.load(:id=>1) artist.album_pks.must_equal [1,2,3] @db.sqls.must_equal ["SELECT id FROM albums WHERE (albums.artist_id = 1)"] artist.album_pks(:refresh=>true).must_equal [1,2,3] @db.sqls.must_equal ["SELECT id FROM albums WHERE (albums.artist_id = 1)"] end it "should remove all values if nil given to setter and :association_pks_nil=>:remove" do @Artist.one_to_many :albums, :clone=>:albums, :association_pks_nil=>:remove ar = @Artist.load(:id=>1) ar.album_pks = nil @db.sqls.must_equal ["UPDATE albums SET artist_id = NULL WHERE (artist_id = 1)"] end it "should take no action if nil given to setter and :association_pks_nil=>:ignore" do @Artist.one_to_many :albums, :clone=>:albums, :association_pks_nil=>:ignore ar = @Artist.load(:id=>1) ar = @Artist.new ar.album_pks = nil @db.sqls.must_equal [] end it "should raise error if nil given to setter by default" do ar = @Artist.load(:id=>1) proc{ar.album_pks = nil}.must_raise Sequel::Error end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/association_proxies_spec.rb�������������������������������������������0000664�0000000�0000000�00000011526�14342141206�0023662�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::AssociationProxies" do before do class ::Tag < Sequel::Model end class ::Item < Sequel::Model plugin :association_proxies many_to_many :tags, :extend=>Module.new{def size; count end} end @i = Item.load(:id=>1) @t = @i.tags Item.db.reset end after do Object.send(:remove_const, :Tag) Object.send(:remove_const, :Item) end it "should send method calls to the associated object array if sent an array method" do @i.associations.has_key?(:tags).must_equal false @t.select{|x| false}.must_equal [] @i.associations.has_key?(:tags).must_equal true end if RUBY_VERSION < '2.6' deprecated "should issue deprecation warning when using filter on association proxy on ruby <2.6" do @i.associations.has_key?(:tags).must_equal false @t.filter{|x| false}.sql.must_equal "SELECT tags.* FROM tags INNER JOIN items_tags ON (items_tags.tag_id = tags.id) WHERE ((items_tags.item_id = 1) AND 'f')" @i.associations.has_key?(:tags).must_equal false end else it "should treat filter on association proxy as array method on ruby 2.6+" do @i.associations.has_key?(:tags).must_equal false @t.filter{|x| false}.must_equal [] @i.associations.has_key?(:tags).must_equal true end end it "should send method calls to the association dataset if sent a non-array method" do @i.associations.has_key?(:tags).must_equal false @t.where(:a=>1).sql.must_equal "SELECT tags.* FROM tags INNER JOIN items_tags ON (items_tags.tag_id = tags.id) WHERE ((items_tags.item_id = 1) AND (a = 1))" @i.associations.has_key?(:tags).must_equal false end it "should accept block to plugin to specify which methods to proxy to dataset" do Item.plugin :association_proxies do |opts| opts[:method] == :where || opts[:arguments].first.is_a?(Sequel::LiteralString) || opts[:block] end @i.associations.has_key?(:tags).must_equal false @t.where(:a=>1).sql.must_equal "SELECT tags.* FROM tags INNER JOIN items_tags ON (items_tags.tag_id = tags.id) WHERE ((items_tags.item_id = 1) AND (a = 1))" @t.where(Sequel.lit('a = 1')).sql.must_equal "SELECT tags.* FROM tags INNER JOIN items_tags ON (items_tags.tag_id = tags.id) WHERE ((items_tags.item_id = 1) AND (a = 1))" @t.where{{:a=>1}}.sql.must_equal "SELECT tags.* FROM tags INNER JOIN items_tags ON (items_tags.tag_id = tags.id) WHERE ((items_tags.item_id = 1) AND (a = 1))" @i.associations.has_key?(:tags).must_equal false Item.plugin :association_proxies do |opts| proxy_arg = opts[:proxy_argument] proxy_block = opts[:proxy_block] cached = opts[:instance].associations[opts[:reflection][:name]] is_size = opts[:method] == :size is_size && !cached && !proxy_arg[:reload] && !proxy_block end @t.size.must_equal 1 Item.db.sqls.must_equal ["SELECT count(*) AS count FROM tags INNER JOIN items_tags ON (items_tags.tag_id = tags.id) WHERE (items_tags.item_id = 1) LIMIT 1"] @i.tags{|ds| ds}.size.must_equal 1 Item.db.sqls.must_equal ["SELECT tags.* FROM tags INNER JOIN items_tags ON (items_tags.tag_id = tags.id) WHERE (items_tags.item_id = 1)"] @i.tags(:reload=>true).size.must_equal 1 Item.db.sqls.must_equal ["SELECT tags.* FROM tags INNER JOIN items_tags ON (items_tags.tag_id = tags.id) WHERE (items_tags.item_id = 1)"] @t.size.must_equal 1 Item.db.sqls.must_equal [] end it "should reload the cached association if sent an array method and the reload flag was given" do @t.select{|x| false}.must_equal [] Item.db.sqls.length.must_equal 1 @t.select{|x| false}.must_equal [] Item.db.sqls.length.must_equal 0 @i.tags(:reload=>true).select{|x| false}.must_equal [] Item.db.sqls.length.must_equal 1 @t.where(:a=>1).sql.must_equal "SELECT tags.* FROM tags INNER JOIN items_tags ON (items_tags.tag_id = tags.id) WHERE ((items_tags.item_id = 1) AND (a = 1))" Item.db.sqls.length.must_equal 0 end it "should not return a proxy object for associations that do not return an array" do Item.many_to_one :tag proc{@i.tag.where(:a=>1)}.must_raise(NoMethodError) Tag.one_to_one :item proc{Tag.load(:id=>1, :item_id=>2).item.where(:a=>1)}.must_raise(NoMethodError) end it "should work correctly in subclasses" do i = Class.new(Item).load(:id=>1) i.associations.has_key?(:tags).must_equal false i.tags.select{|x| false}.must_equal [] i.associations.has_key?(:tags).must_equal true i.tags.where(:a=>1).sql.must_equal "SELECT tags.* FROM tags INNER JOIN items_tags ON (items_tags.tag_id = tags.id) WHERE ((items_tags.item_id = 1) AND (a = 1))" end if RUBY_VERSION >= '2.7' it "should handle keywords when delegating" do Tag.send(:define_method, :to_int){1} s = String.new @t.pack('i', buffer: s) s.wont_be_empty end end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/async_thread_pool_plugin_spec.rb��������������������������������������0000664�0000000�0000000�00000002616�14342141206�0024650�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative 'spec_helper' describe "async_thread_pool plugin" do before do @db = Sequel.mock(:extensions=>'async_thread_pool', :fetch=>{:id=>1}, :keep_reference=>false, :num_async_threads=>1, :numrows=>1) @Album = Class.new(Sequel::Model) @Album.set_dataset(@db[:albums]) @Album.plugin :async_thread_pool @db.sqls end it 'should support creating async datasets via Model.async' do t = Thread.current t2 = nil v = @Album.all{|x| t2 = Thread.current} (Array === v).must_equal true v.first.must_be_kind_of @Album t2.must_equal t v = @Album.async.all{|x| t2 = Thread.current} (Array === v).must_equal false v.first.must_be_kind_of @Album t2.wont_be_nil t2.wont_equal t @db.sqls.must_equal ["SELECT * FROM albums", "SELECT * FROM albums"] end it 'should support async versions of destroy' do @Album.dataset.async.destroy.__value.must_equal 1 @db.sqls.must_equal ["SELECT * FROM albums", "DELETE FROM albums WHERE (id = 1)"] end it 'should support async versions of with_pk' do @Album.dataset.async.with_pk(1).__value.pk.must_equal 1 @db.sqls.must_equal ["SELECT * FROM albums WHERE (albums.id = 1) LIMIT 1"] end it 'should support async versions of with_pk!' do @Album.dataset.async.with_pk!(1).__value.pk.must_equal 1 @db.sqls.must_equal ["SELECT * FROM albums WHERE (albums.id = 1) LIMIT 1"] end end ������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/async_thread_pool_spec.rb���������������������������������������������0000664�0000000�0000000�00000026401�14342141206�0023270�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative 'spec_helper' {''=>false, ' with :preempt_async_thread Database option'=>true}.each do |desc, preempt_async_thread| describe "async_thread_pool extension" do before do @db = Sequel.mock(:extensions=>'async_thread_pool', :fetch=>{:v=>1}, :keep_reference=>false, :num_async_threads=>1, :preempt_async_thread=>preempt_async_thread) end it 'should allow running queries in async threads' do t = Thread.current t2 = nil q = Queue.new q2 = Queue.new @db[:test].async.all{|x| t3 = Thread.current; q2.push(x); q.pop; t2 = t3; q2.push(nil)} t2.must_be_nil q2.pop.must_equal(:v=>1) q.push(nil) q2.pop t2.wont_be_nil t.wont_equal t2 end it 'should raise exceptions that occur in async threads when result is accessed' do v = @db[:test].with_fetch(RuntimeError).async.first proc{v.__value}.must_raise Sequel::DatabaseError end it 'should have proxy objects delegate all methods other than equal?, __id__, and __send__' do v = @db[:test].async.first v.class.must_equal Hash (!v).must_equal false (v == {:v=>1}).must_equal true (v != {:v=>1}).must_equal false v.instance_eval{__id__}.must_equal v.__value.instance_eval{__id__} v.instance_exec{__id__}.must_equal v.__value.instance_exec{__id__} v.__send__(:__id__).wont_equal v.__value.__send__(:__id__) v.respond_to?(:each).must_equal true v.__send__(:respond_to_missing?, :each).must_equal true v = @db[:test].async.with_fetch(:v=>false).get(:v) v.class.must_equal FalseClass (!v).must_equal true (v == false).must_equal true (v != false).must_equal false v.respond_to?(:each).must_equal false v.__send__(:respond_to_missing?, :each).must_equal false end it 'should work when loading async_thread_pool extension after already loaded' do @db.extension(:async_thread_pool) @db[:test].async.first.must_equal(:v=>1) end it 'should support sync methods on async datasets to not use an async thread or proxy object' do t = Thread.current t2 = nil v = @db[:test].async.sync.all{|x| t2 = Thread.current} (Array === v).must_equal true t2.must_equal t end it 'should support async loading with proxy objects on all dataset action and enumerable methods' do ds = @db[:test].async.with_autoid(1) ds.<<(:v=>1).__value.must_be_kind_of Sequel::Dataset ds.each{}.__value.must_be_kind_of Sequel::Dataset ds.fetch_rows('foo'){}.__value.must_be_nil ds.import([:v], [[1]]).__value ds.multi_insert([{:v=>1}]).__value ds.order(:v).paged_each{}.__value.must_be_kind_of Sequel::Dataset ds.where_each(:v){}.__value.must_be_kind_of Sequel::Dataset ds.truncate.__value.must_be_nil @db.sqls.must_equal [ "INSERT INTO test (v) VALUES (1)", "SELECT * FROM test", "foo", "INSERT INTO test (v) VALUES (1)", "INSERT INTO test (v) VALUES (1)", "BEGIN", "SELECT * FROM test ORDER BY v LIMIT 1000 OFFSET 0", "COMMIT", "SELECT * FROM test WHERE v", "TRUNCATE TABLE test", ] ds[:v].__value.must_equal(:v=>1) ds.all.__value.must_equal [{:v=>1}] ds.as_hash(:v, :v).__value.must_equal(1=>1) ds.avg(:v).__value.must_equal(1) ds.count.__value.must_equal(1) ds.columns.__value.must_equal [] ds.columns!.__value.must_equal [] ds.delete.__value.must_equal 0 ds.empty?.__value.must_equal false ds.first.__value.must_equal(:v=>1) ds.first!.__value.must_equal(:v=>1) ds.get(:v).__value.must_equal 1 ds.insert.__value.must_equal 2 ds.order(:v).last.__value.must_equal(:v=>1) ds.max(:v).__value.must_equal 1 ds.min(:v).__value.must_equal 1 ds.select_hash(:v, :v).__value.must_equal(1=>1) ds.select_hash_groups(:v, :v).__value.must_equal(1=>[1]) ds.select_map(:v).__value.must_equal([1]) ds.select_order_map(:v).__value.must_equal([1]) ds.single_record.__value.must_equal(:v=>1) ds.single_record!.__value.must_equal(:v=>1) ds.single_value.__value.must_equal 1 ds.single_value!.__value.must_equal 1 ds.sum(:v).__value.must_equal 1 ds.to_hash(:v).__value.must_equal(1=>{:v=>1}) ds.to_hash_groups(:v).__value.must_equal(1=>[{:v=>1}]) ds.update(:v=>1).__value.must_equal 0 ds.where_all(:v).__value.must_equal [{:v=>1}] ds.where_single_value(:v).__value.must_equal 1 ds.all?.__value.must_equal true ds.any?.__value.must_equal true ds.drop(0).__value.must_equal [{:v=>1}] ds.entries.__value.must_equal [{:v=>1}] ds.grep_v(//).__value.must_equal [{:v=>1}] if RUBY_VERSION >= '2.3' ds.include?(:v=>1).__value.must_equal true ds.inject{}.__value.must_equal(:v=>1) ds.member?(:v=>1).__value.must_equal true ds.minmax.__value.must_equal([{:v=>1}, {:v=>1}]) ds.none?.__value.must_equal false ds.one?.__value.must_equal true ds.reduce{}.__value.must_equal(:v=>1) ds.sort.__value.must_equal [{:v=>1}] ds.take(1).__value.must_equal [{:v=>1}] ds.tally.__value.must_equal({:v=>1}=>1) if RUBY_VERSION >= '2.7' ds.to_a.__value.must_equal [{:v=>1}] ds.to_h{|x| [x[:v], x]}.__value.must_equal(1=>{:v=>1}) if RUBY_VERSION >= '2.6' ds.uniq.__value.must_equal [{:v=>1}] if RUBY_VERSION >= '2.4' ds.zip.__value.must_equal [[{:v=>1}]] ds.collect{|x| x}.__value.must_equal [{:v=>1}] ds.collect_concat{|x| x}.__value.must_equal [{:v=>1}] ds.detect{true}.__value.must_equal(:v=>1) ds.drop_while{false}.__value.must_equal [{:v=>1}] ds.each_with_object(0){|x| x}.__value.must_equal 0 ds.filter_map{|x| x}.__value.must_equal [{:v=>1}] if RUBY_VERSION >= '2.7' ds.find{true}.__value.must_equal(:v=>1) ds.find_all{true}.__value.must_equal [{:v=>1}] ds.find_index{true}.__value.must_equal 0 ds.flat_map{|x| x}.__value.must_equal [{:v=>1}] ds.max_by{}.__value.must_equal(:v=>1) ds.min_by{}.__value.must_equal(:v=>1) ds.minmax_by{}.__value.must_equal [{:v=>1}, {:v=>1}] ds.partition{true}.__value.must_equal [[{:v=>1}], []] ds.reject{false}.__value.must_equal [{:v=>1}] ds.sort_by{}.__value.must_equal [{:v=>1}] ds.take_while{true}.__value.must_equal [{:v=>1}] @db.sqls if RUBY_VERSION >= '3.1' ds.each_cons(1){}.__value.must_be_kind_of Sequel::Dataset ds.each_slice(1){}.__value.must_be_kind_of Sequel::Dataset else ds.each_cons(1){}.__value.must_be_nil ds.each_slice(1){}.__value.must_be_nil end ds.each_entry{}.__value.must_be_kind_of Sequel::Dataset ds.each_with_index{}.__value.must_be_kind_of Sequel::Dataset ds.reverse_each{}.__value.must_be_kind_of Sequel::Dataset @db.sqls.must_equal [ "SELECT * FROM test", "SELECT * FROM test", "SELECT * FROM test", "SELECT * FROM test", "SELECT * FROM test", ] (Enumerator === ds.collect).must_equal true (Enumerator === ds.collect_concat).must_equal true (Enumerator === ds.detect).must_equal true (Enumerator === ds.drop_while).must_equal true (Enumerator === ds.each_cons(1)).must_equal true (Enumerator === ds.each_entry).must_equal true (Enumerator === ds.each_slice(1)).must_equal true (Enumerator === ds.each_with_index).must_equal true (Enumerator === ds.each_with_object(1)).must_equal true (Enumerator === ds.filter_map).must_equal true if RUBY_VERSION >= '2.7' (Enumerator === ds.find).must_equal true (Enumerator === ds.find_all).must_equal true (Enumerator === ds.find_index).must_equal true (Enumerator === ds.flat_map).must_equal true (Enumerator === ds.max_by).must_equal true (Enumerator === ds.min_by).must_equal true (Enumerator === ds.minmax_by).must_equal true (Enumerator === ds.partition).must_equal true (Enumerator === ds.reject).must_equal true (Enumerator === ds.reverse_each).must_equal true (Enumerator === ds.sort_by).must_equal true (Enumerator === ds.take_while).must_equal true (Enumerator === ds.order(:v).paged_each).must_equal true ds.map(:v).__value.must_equal [1] ds.map{|x| x}.__value.must_equal [{:v=>1}] (Enumerator === ds.map).must_equal true end end end describe "async_thread_pool extension" do before do @db = Sequel.mock(:extensions=>'async_thread_pool', :fetch=>{:v=>1}, :keep_reference=>false, :num_async_threads=>1) end it 'should perform async work before returning value' do t = Thread.current t2 = nil v = @db[:test].async.all{|x| t2 = Thread.current} v.must_equal [{:v=>1}] t2.wont_be_nil t.wont_equal t2 v.equal?(v.to_a).must_equal false (Array === v).must_equal false v.__value.equal?(v.to_a).must_equal true (Array === v.__value).must_equal true if RUBY_VERSION >= '2.2' v.itself.equal?(v.to_a).must_equal true (Array === v.itself).must_equal true end end it 'should not allow calling the __run_block multiple times' do v = Sequel::Database::AsyncThreadPool::Proxy.new{1} v.__send__(:__run_block) proc{v.__send__(:__run_block)}.must_raise Sequel::Error end it 'should not allow creating proxy objects without a block' do proc{Sequel::Database::AsyncThreadPool::Proxy.new}.must_raise Sequel::Error end end describe "async_thread_pool extension with :preempt_async_thread Database option" do before do @db = Sequel.mock(:extensions=>'async_thread_pool', :fetch=>{:v=>1}, :keep_reference=>false, :num_async_threads=>1, :preempt_async_thread=>true) end it 'should allow preempting async threads' do t = Thread.current t2 = nil t4 = nil q = Queue.new q2 = Queue.new @db[:test].async.all{|x| t3 = Thread.current; q2.push(x); q.pop; t2 = t3; q2.push(nil)} t2.must_be_nil q2.pop.must_equal(:v=>1) @db[:test].async.all{|x| t4 = Thread.current}.__value t4.must_equal t q.push(nil) q2.pop t2.wont_be_nil t.wont_equal t2 end end describe "async_thread_pool extension" do it "should raise an error if trying to load the async_thread_pool extension into a single connection pool" do db = Sequel.mock(:keep_reference=>false, :single_threaded=>true) proc{db.extension(:async_thread_pool)}.must_raise Sequel::Error end it "should use :num_async_threads as size of async thread pool" do 3.times do |i| Sequel.mock(:extensions=>'async_thread_pool', :num_async_threads=>i+1, :max_connections=>4).instance_variable_get(:@async_thread_pool).size.must_equal(i+1) end end it "should use :max_connections as size of async thread pool if :num_async_threads is not given" do 3.times do |i| Sequel.mock(:extensions=>'async_thread_pool', :max_connections=>i+1).instance_variable_get(:@async_thread_pool).size.must_equal(i+1) end end it "should use 4 as size of async thread pool if :num_async_threads and :max_connections is not given" do Sequel.mock(:extensions=>'async_thread_pool').instance_variable_get(:@async_thread_pool).size.must_equal 4 end it "should raise if the number of async threads is not positive" do proc{Sequel.mock(:extensions=>'async_thread_pool', :num_async_threads=>0)}.must_raise Sequel::Error end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/auto_literal_strings_spec.rb������������������������������������������0000664�0000000�0000000�00000021037�14342141206�0024030�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Dataset#where" do before do @dataset = Sequel.mock[:test].extension(:auto_literal_strings) end it "should work with a string with placeholders and arguments for those placeholders" do @dataset.where('price < ? AND id in ?', 100, [1, 2, 3]).select_sql.must_equal "SELECT * FROM test WHERE (price < 100 AND id in (1, 2, 3))" end it "should use default behavior for array of conditions" do @dataset.where([[:a, 1], [:b, 2]]).sql.must_equal 'SELECT * FROM test WHERE ((a = 1) AND (b = 2))' end it "should not modify passed array with placeholders" do a = ['price < ? AND id in ?', 100, 1, 2, 3] b = a.dup @dataset.where(a) b.must_equal a end it "should work with strings (custom SQL expressions)" do @dataset.where('(a = 1 AND b = 2)').select_sql.must_equal "SELECT * FROM test WHERE ((a = 1 AND b = 2))" end it "should work with a string with named placeholders and a hash of placeholder value arguments" do @dataset.where('price < :price AND id in :ids', :price=>100, :ids=>[1, 2, 3]).select_sql.must_equal "SELECT * FROM test WHERE (price < 100 AND id in (1, 2, 3))" end it "should not modify passed array with named placeholders" do a = ['price < :price AND id in :ids', {:price=>100}] b = a.dup @dataset.where(a) b.must_equal a end it "should not replace named placeholders that don't exist in the hash" do @dataset.where('price < :price AND id in :ids', :price=>100).select_sql.must_equal "SELECT * FROM test WHERE (price < 100 AND id in :ids)" end it "should raise an error for a mismatched number of placeholders" do proc{@dataset.where('price < ? AND id in ?', 100).select_sql}.must_raise(Sequel::Error) proc{@dataset.where('price < ? AND id in ?', 100, [1, 2, 3], 4).select_sql}.must_raise(Sequel::Error) end it "should handle partial names" do @dataset.where('price < :price AND id = :p', :p=>2, :price=>100).select_sql.must_equal "SELECT * FROM test WHERE (price < 100 AND id = 2)" end it "should handle ::cast syntax when no parameters are supplied" do @dataset.where('price::float = 10', {}).select_sql.must_equal "SELECT * FROM test WHERE (price::float = 10)" @dataset.where('price::float ? 10', {}).select_sql.must_equal "SELECT * FROM test WHERE (price::float ? 10)" end it "should affect select, delete and update statements when using strings" do @d2 = @dataset.where('region = ?', 'Asia') @d2.select_sql.must_equal "SELECT * FROM test WHERE (region = 'Asia')" @d2.delete_sql.must_equal "DELETE FROM test WHERE (region = 'Asia')" @d2.update_sql(:GDP => 0).must_equal "UPDATE test SET GDP = 0 WHERE (region = 'Asia')" @d3 = @dataset.where("a = 1") @d3.select_sql.must_equal "SELECT * FROM test WHERE (a = 1)" @d3.delete_sql.must_equal "DELETE FROM test WHERE (a = 1)" @d3.update_sql(:GDP => 0).must_equal "UPDATE test SET GDP = 0 WHERE (a = 1)" end it "should be composable using AND operator (for scoping) when using strings" do @d2 = @dataset.where('region = ?', 'Asia') @d2.where('GDP > ?', 1000).select_sql.must_equal "SELECT * FROM test WHERE ((region = 'Asia') AND (GDP > 1000))" @d2.where(:name => ['Japan', 'China']).select_sql.must_equal "SELECT * FROM test WHERE ((region = 'Asia') AND (name IN ('Japan', 'China')))" @d2.where('GDP > ?').select_sql.must_equal "SELECT * FROM test WHERE ((region = 'Asia') AND (GDP > ?))" @d3 = @dataset.where("a = 1") @d3.where('b = 2').select_sql.must_equal "SELECT * FROM test WHERE ((a = 1) AND (b = 2))" @d3.where(:c => 3).select_sql.must_equal "SELECT * FROM test WHERE ((a = 1) AND (c = 3))" @d3.where('d = ?', 4).select_sql.must_equal "SELECT * FROM test WHERE ((a = 1) AND (d = 4))" end it "should be composable using AND operator (for scoping) with block and string" do @dataset.where("a = 1").where{e < 5}.select_sql.must_equal "SELECT * FROM test WHERE ((a = 1) AND (e < 5))" end end describe "Dataset #first and #last" do before do @d = Sequel.mock(:fetch=>proc{|s| {:s=>s}})[:test].extension(:auto_literal_strings) end it "should combine block and standard argument filters if argument is not an Integer" do ds = @d.order(:name).freeze 5.times do @d.first('y = 25'){z > 26}.must_equal(:s=>'SELECT * FROM test WHERE ((y = 25) AND (z > 26)) LIMIT 1') ds.last('y = 16'){z > 26}.must_equal(:s=>'SELECT * FROM test WHERE ((y = 16) AND (z > 26)) ORDER BY name DESC LIMIT 1') @d.first('y = ?', 25){z > 26}.must_equal(:s=>'SELECT * FROM test WHERE ((y = 25) AND (z > 26)) LIMIT 1') ds.last('y = ?', 16){z > 26}.must_equal(:s=>'SELECT * FROM test WHERE ((y = 16) AND (z > 26)) ORDER BY name DESC LIMIT 1') end end end describe "Dataset#exclude" do before do @dataset = Sequel.mock.dataset.from(:test).extension(:auto_literal_strings) end it "should parenthesize a single string condition correctly" do @dataset.exclude("region = 'Asia' AND name = 'Japan'").select_sql.must_equal "SELECT * FROM test WHERE NOT (region = 'Asia' AND name = 'Japan')" end it "should parenthesize an array condition correctly" do @dataset.exclude('region = ? AND name = ?', 'Asia', 'Japan').select_sql.must_equal "SELECT * FROM test WHERE NOT (region = 'Asia' AND name = 'Japan')" end end describe "Dataset#or" do before do @dataset = Sequel.mock.dataset.from(:test).extension(:auto_literal_strings) @d1 = @dataset.where(:x => 1) end it "should accept string filters" do @d1.or('y > ?', 2).sql.must_equal 'SELECT * FROM test WHERE ((x = 1) OR (y > 2))' end end describe "Dataset#having" do before do @dataset = Sequel.mock.dataset.from(:test).extension(:auto_literal_strings) @grouped = @dataset.group(:region).select(:region, Sequel.function(:sum, :population), Sequel.function(:avg, :gdp)) end it "should handle string arguments" do @grouped.having('sum(population) > 10').select_sql.must_equal "SELECT region, sum(population), avg(gdp) FROM test GROUP BY region HAVING (sum(population) > 10)" end end describe "Dataset#join_table" do before do @d = Sequel.mock.dataset.from(:items).with_quote_identifiers(true).extension(:auto_literal_strings) end it "should support using a string as the join condition" do @d.join(:categories, "c.item_id = items.id", :table_alias=>:c).sql.must_equal 'SELECT * FROM "items" INNER JOIN "categories" AS "c" ON (c.item_id = items.id)' end end describe "Dataset prepared statements and bound variables " do before do @db = Sequel.mock @ds = @db[:items].with_extend{def insert_select_sql(*v) "#{insert_sql(*v)} RETURNING *" end}.extension(:auto_literal_strings) end it "should handle literal strings" do @ds.filter("num = ?", :$n).call(:select, :n=>1) @db.sqls.must_equal ['SELECT * FROM items WHERE (num = 1)'] end it "should handle subselects with strings" do @ds.filter(:$b).filter(:num=>@ds.select(:num).filter("num = ?", :$n)).call(:select, :n=>1, :b=>0) @db.sqls.must_equal ['SELECT * FROM items WHERE (0 AND (num IN (SELECT num FROM items WHERE (num = 1))))'] end end describe "Dataset#update_sql" do before do @ds = Sequel.mock.dataset.from(:items).extension(:auto_literal_strings) end it "should accept strings" do @ds.update_sql("a = b").must_equal "UPDATE items SET a = b" end it "should accept literal strings" do @ds.update_sql(Sequel.lit("a = b")).must_equal "UPDATE items SET a = b" end it "should accept hash" do @ds.update_sql(:c => 'd').must_equal "UPDATE items SET c = 'd'" end end describe "Dataset::PlaceholderLiteralizer" do before do @c = Sequel::Dataset::PlaceholderLiteralizer @db = Sequel.mock @ds = @db[:items].extension(:auto_literal_strings) @h = {:id=>1} @ds.db.fetch = @h end it "should handle calls with a placeholders used as filter arguments" do loader = @c.loader(@ds){|pl, ds| ds.where(pl.arg)} loader.first(:id=>1).must_equal @h loader.first(Sequel.expr{a(b)}).must_equal @h loader.first("a = 1").must_equal @h @db.sqls.must_equal ["SELECT * FROM items WHERE (id = 1)", "SELECT * FROM items WHERE a(b)", "SELECT * FROM items WHERE (a = 1)"] end it "should handle calls with a placeholder used multiple times in different capacities" do loader = @c.loader(@ds){|pl, ds| a = pl.arg; ds.where(a).where(:b=>a)} loader.first("a = 1").must_equal @h loader.first(["a = ?", 2]).must_equal @h @db.sqls.must_equal ["SELECT * FROM items WHERE ((a = 1) AND (b = 'a = 1'))", "SELECT * FROM items WHERE ((a = 2) AND (b IN ('a = ?', 2)))"] end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/auto_restrict_eager_graph_spec.rb�������������������������������������0000664�0000000�0000000�00000002366�14342141206�0025012�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "auto_restrict_eager_graph plugin" do before do @db = Sequel.mock @c = Class.new(Sequel::Model(@db[:items])) @c.plugin :auto_restrict_eager_graph end it "should restrict eager_graph for associations with blocks without :graph_* options" do @c.many_to_one :cs, :class=>@c do |ds| ds.where(:x) end proc{@c.eager_graph(:cs)}.must_raise Sequel::Error end it "should not restrict eager_graph for associations without blocks" do @c.many_to_one :cs, :class=>@c @c.eager_graph(:cs).sql.must_equal "SELECT * FROM items LEFT OUTER JOIN items AS cs ON (cs.id = items.cs_id)" end it "should not restrict eager_graph for associations with :graph_* options" do @c.many_to_one :cs, :class=>@c, :graph_conditions=>{:x=>true} do |ds| ds.where(:x) end @c.eager_graph(:cs).sql.must_equal "SELECT * FROM items LEFT OUTER JOIN items AS cs ON ((cs.id = items.cs_id) AND (cs.x IS TRUE))" end it "should not restrict eager_graph for associations with :allow_eager_graph option" do @c.many_to_one :cs, :class=>@c, :allow_eager_graph=>true do |ds| ds.where(:x) end @c.eager_graph(:cs).sql.must_equal "SELECT * FROM items LEFT OUTER JOIN items AS cs ON (cs.id = items.cs_id)" end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/auto_validations_constraint_validations_presence_message_spec.rb������0000664�0000000�0000000�00000013770�14342141206�0033376�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::AutoValidations" do before do db = Sequel.mock(:fetch=>proc{|sql| sql =~ /a{51}/ ? {:v=>0} : {:v=>1}}) def db.schema_parse_table(*) true; end def db.schema(t, *) t = t.first_source if t.is_a?(Sequel::Dataset) return [] if t != :test [[:id, {:primary_key=>true, :type=>:integer, :allow_null=>false}], [:name, {:primary_key=>false, :type=>:string, :allow_null=>false, :max_length=>50}], [:d, {:primary_key=>false, :type=>:date, :allow_null=>false}]] end def db.supports_index_parsing?() true end db.singleton_class.send(:alias_method, :supports_index_parsing?, :supports_index_parsing?) def db.indexes(t, *) raise if t.is_a?(Sequel::Dataset) return [] if t != :test {:a=>{:columns=>[:name, :num], :unique=>true}, :b=>{:columns=>[:num], :unique=>false}} end db.singleton_class.send(:alias_method, :indexes, :indexes) @c = Class.new(Sequel::Model(db[:test])) @c.send(:def_column_accessor, :id, :name, :num, :d, :nnd) @c.raise_on_typecast_failure = false @m = @c.new @c.db.sqls @c.db.fetch = {:table=>'test', :message=>'this is a bad column', :allow_nil=>true, :constraint_name=>nil, :validation_type=>'presence', :argument=>nil, :column=>'name'} end [true, false, nil].each do |before| [true, false].each do |set_dataset| it "should use constraint validations presence message if #{before ? 'constraint_validations' : 'auto_validations'} is loaded first#{' when using set_dataset' if set_dataset}" do if before @c.plugin :constraint_validations @c.plugin :auto_validations elsif !before.nil? @c.plugin :auto_validations @c.plugin :constraint_validations end @c.plugin :auto_validations_constraint_validations_presence_message if set_dataset @c.set_dataset :test end @c.db.sqls.must_equal ["SELECT * FROM sequel_constraint_validations"] @c.db.constraint_validations.must_equal("test"=>[{:allow_nil=>true, :constraint_name=>nil, :message=>'this is a bad column', :validation_type=>"presence", :column=>"name", :argument=>nil, :table=>"test"}]) @c.constraint_validations.must_equal [[:validates_presence, :name, {:message=>'this is a bad column', :allow_nil=>false}]] @c.constraint_validation_reflections.must_equal(:name=>[[:presence, {:message=>'this is a bad column', :allow_nil=>true}]]) @m.name = '' @m.valid?.must_equal false @m.errors.must_equal(:d=>["is not present"], :name=>["this is a bad column"]) @m.name = nil @m.valid?.must_equal false @m.errors.must_equal(:d=>["is not present"], :name=>["this is a bad column"]) end end end it "should not override auto_validations message if constraint_validations doesn't have a message" do @c.db.fetch = {:table=>'test', :message=>nil, :allow_nil=>true, :constraint_name=>nil, :validation_type=>'presence', :argument=>nil, :column=>'name'} @c.plugin :auto_validations_constraint_validations_presence_message @m.valid?.must_equal false @m.errors.must_equal(:d=>["is not present"], :name=>["is not present"]) end it "should not override auto_validations message if constraint_validations does not have a message and does not allow nil values" do @c.db.fetch = {:table=>'test', :message=>nil, :allow_nil=>false, :constraint_name=>nil, :validation_type=>'presence', :argument=>nil, :column=>'name'} @c.plugin :auto_validations_constraint_validations_presence_message @m.valid?.must_equal false @m.errors.must_equal(:d=>["is not present"], :name=>["is not present", "is not present"]) end it "should not override auto_validations message if constraint_validations does not allow nil values" do @c.db.fetch = {:table=>'test', :message=>'this is a bad column', :allow_nil=>false, :constraint_name=>nil, :validation_type=>'presence', :argument=>nil, :column=>'name'} @c.plugin :auto_validations_constraint_validations_presence_message @m.valid?.must_equal false @m.errors.must_equal(:d=>["is not present"], :name=>["is not present", "this is a bad column"]) end it "should not override auto_validations message if auto_validations plugin uses a not_null message" do @c.plugin :auto_validations, :not_null_opts=>{:message=>'default'} @c.plugin :auto_validations_constraint_validations_presence_message @m.valid?.must_equal false @m.errors.must_equal(:d=>["default"], :name=>["default"]) end it "should not override auto_validations message if auto_validations plugin uses an explicit_not_null message" do @c.db.singleton_class.send(:remove_method, :schema) def (@c.db).schema(t, *) t = t.first_source if t.is_a?(Sequel::Dataset) return [] if t != :test [[:id, {:primary_key=>true, :type=>:integer, :allow_null=>false}], [:name, {:primary_key=>false, :type=>:string, :allow_null=>false, :max_length=>50, :default=>'a'}], [:d, {:primary_key=>false, :type=>:date, :allow_null=>false, :default=>'2000-10-10'}]] end @c.set_dataset :test @c.plugin :auto_validations, :explicit_not_null_opts=>{:message=>'default'} @c.plugin :auto_validations_constraint_validations_presence_message @m.d = nil @m.name = nil @m.valid?.must_equal false @m.errors.must_equal(:d=>["default"], :name=>["default"]) end it "should handle case where there isn't an NOT NULL constraint on the column" do @c.db.singleton_class.send(:remove_method, :schema) def (@c.db).schema(t, *) t = t.first_source if t.is_a?(Sequel::Dataset) return [] if t != :test [[:id, {:primary_key=>true, :type=>:integer, :allow_null=>false}], [:name, {:primary_key=>false, :type=>:string, :allow_null=>true, :max_length=>50, :default=>'a'}], [:d, {:primary_key=>false, :type=>:date, :allow_null=>true, :default=>'2000-10-10'}]] end @c.set_dataset :test @c.plugin :auto_validations_constraint_validations_presence_message @m.valid?.must_equal true end end ��������sequel-5.63.0/spec/extensions/auto_validations_spec.rb����������������������������������������������0000664�0000000�0000000�00000030230�14342141206�0023133�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::AutoValidations" do before do db = Sequel.mock(:fetch=>proc{|sql| sql =~ /'a{51}'|'uniq'/ ? {:v=>0} : {:v=>1}}) def db.schema_parse_table(*) true; end def db.schema(t, *) t = t.first_source if t.is_a?(Sequel::Dataset) return [] if t != :test [[:id, {:primary_key=>true, :type=>:integer, :allow_null=>false}], [:name, {:primary_key=>false, :type=>:string, :allow_null=>false, :max_length=>50}], [:num, {:primary_key=>false, :type=>:integer, :allow_null=>true, :min_value=>-100000, :max_value=>100000}], [:d, {:primary_key=>false, :type=>:date, :allow_null=>false}], [:nnd, {:primary_key=>false, :type=>:string, :allow_null=>false, :default=>'nnd'}]] end def db.supports_index_parsing?() true end db.singleton_class.send(:alias_method, :supports_index_parsing?, :supports_index_parsing?) def db.indexes(t, *) raise if t.is_a?(Sequel::Dataset) return [] if t != :test {:a=>{:columns=>[:name, :num], :unique=>true}, :b=>{:columns=>[:num], :unique=>false}} end db.singleton_class.send(:alias_method, :indexes, :indexes) @c = Class.new(Sequel::Model(db[:test])) @c.send(:def_column_accessor, :id, :name, :num, :d, :nnd) @c.raise_on_typecast_failure = false @c.plugin :auto_validations @m = @c.new db.sqls end it "should have automatically created validations" do @m.num = 100001 @m.valid?.must_equal false @m.errors.must_equal(:d=>["is not present"], :name=>["is not present"], :num=>["is greater than maximum allowed value"]) @m.set(:num=>-100001, :name=>"") @m.valid?.must_equal false @m.errors.must_equal(:d=>["is not present"], :num=>["is less than minimum allowed value"]) @m.set(:d=>'/', :num=>'a', :name=>"a\0b") @m.valid?.must_equal false @m.errors.must_equal(:d=>["is not a valid date"], :num=>["is not a valid integer"], :name=>["contains a null byte"]) @m.set(:d=>Date.today, :num=>1, :name=>'') @m.valid?.must_equal false @m.errors.must_equal([:name, :num]=>["is already taken"]) @m.set(:name=>'a'*51) @m.valid?.must_equal false @m.errors.must_equal(:name=>["is longer than 50 characters"]) end it "should add errors to columns that already have errors by default" do def @m.validate errors.add(:name, 'no good') super end @m.d = Date.today @m.valid?.must_equal false @m.errors.must_equal(:name=>['no good', "is not present"]) end it "should not add errors to columns that already have errors when using :skip_invalid plugin option" do @c.plugin :auto_validations, :skip_invalid=>true def @m.validate errors.add(:name, 'no good') super end @m.d = Date.today @m.valid?.must_equal false @m.errors.must_equal(:name=>['no good']) end it "should handle simple unique indexes correctly" do def (@c.db).indexes(t, *) raise if t.is_a?(Sequel::Dataset) return [] if t != :test {:a=>{:columns=>[:name], :unique=>true}} end @c.plugin :auto_validations @m.set(:name=>'foo', :d=>Date.today) @m.valid?.must_equal false @m.errors.must_equal(:name=>["is already taken"]) end it "should validate using the underlying column values" do @c.send(:define_method, :name){super() * 2} @c.db.fetch = {:v=>0} @m.set(:d=>Date.today, :num=>1, :name=>'b'*26) @m.valid?.must_equal true end it "should handle databases that don't support index parsing" do def (@m.db).supports_index_parsing?() false end @m.model.send(:setup_auto_validations) @m.set(:d=>Date.today, :num=>1, :name=>'1') @m.valid?.must_equal true end it "should handle models that select from subqueries" do @c.set_dataset @c.dataset.from_self @c.send(:setup_auto_validations) end it "should support :not_null=>:presence option" do @c.plugin :auto_validations, :not_null=>:presence @m.set(:d=>Date.today, :num=>'') @m.valid?.must_equal false @m.errors.must_equal(:name=>["is not present"]) end it "should automatically validate explicit nil values for columns with not nil defaults" do @m.set(:d=>Date.today, :name=>1, :nnd=>nil) @m.id = nil @m.valid?.must_equal false @m.errors.must_equal(:id=>["is not present"], :nnd=>["is not present"]) end it "should allow skipping validations by type" do @c = Class.new(@c) @m = @c.new @m.skip_auto_validations(:not_null) do @m.valid?.must_equal true @m.nnd = nil @m.valid?.must_equal true end @m.set(:nnd => 'nnd') @c.skip_auto_validations(:not_null) @m.valid?.must_equal true @m.nnd = nil @m.valid?.must_equal true @m.set(:d=>'/', :num=>'a', :name=>'1') @m.valid?.must_equal false @m.errors.must_equal(:d=>["is not a valid date"], :num=>["is not a valid integer"]) @m.skip_auto_validations(:types, :unique) do @m.valid?.must_equal true end @m.skip_auto_validations(:types) do @m.valid?.must_equal false @m.errors.must_equal([:name, :num]=>["is already taken"]) end @c.skip_auto_validations(:types) @m.valid?.must_equal false @m.errors.must_equal([:name, :num]=>["is already taken"]) @m.skip_auto_validations(:unique) do @m.valid?.must_equal true end @c.skip_auto_validations(:unique) @m.valid?.must_equal true @m.set(:name=>'a'*51) @m.valid?.must_equal false @m.errors.must_equal(:name=>["is longer than 50 characters"]) @m.skip_auto_validations(:max_length) do @m.valid?.must_equal true end @c.skip_auto_validations(:max_length) @m.valid?.must_equal true end it "should allow skipping all auto validations" do @c = Class.new(@c) @m = @c.new @m.skip_auto_validations(:all) do @m.valid?.must_equal true @m.set(:d=>'/', :num=>'a', :name=>'1') @m.valid?.must_equal true @m.set(:name=>'a'*51) @m.valid?.must_equal true end @m = @c.new @c.skip_auto_validations(:all) @m.valid?.must_equal true @m.set(:d=>'/', :num=>'a', :name=>'1') @m.valid?.must_equal true @m.set(:name=>'a'*51) @m.valid?.must_equal true end it "should skip min/max value validations when skipping type validations" do @m.set(:d=>Date.today, :num=>100001, :name=>'uniq') @m.valid?.must_equal false @m.skip_auto_validations(:types) do @m.valid?.must_equal true end @m.num = -100001 @m.valid?.must_equal false @m.skip_auto_validations(:types) do @m.valid?.must_equal true end end it "should default to skipping all auto validations if no arguments given to instance method" do @c = Class.new(@c) @m = @c.new @m.skip_auto_validations do @m.valid?.must_equal true @m.set(:d=>'/', :num=>'a', :name=>'1') @m.valid?.must_equal true @m.set(:name=>'a'*51) @m.valid?.must_equal true end end it "should work correctly in subclasses" do @c = Class.new(@c) @m = @c.new @m.num = 100001 @m.valid?.must_equal false @m.errors.must_equal(:d=>["is not present"], :name=>["is not present"], :num=>["is greater than maximum allowed value"]) @m.set(:num=>-100001, :name=>"") @m.valid?.must_equal false @m.errors.must_equal(:d=>["is not present"], :num=>["is less than minimum allowed value"]) @m.set(:d=>'/', :num=>'a', :name=>"a\0b") @m.valid?.must_equal false @m.errors.must_equal(:d=>["is not a valid date"], :num=>["is not a valid integer"], :name=>["contains a null byte"]) @m.set(:d=>Date.today, :num=>1, :name=>'') @m.valid?.must_equal false @m.errors.must_equal([:name, :num]=>["is already taken"]) @m.set(:name=>'a'*51) @m.valid?.must_equal false @m.errors.must_equal(:name=>["is longer than 50 characters"]) end it "should work correctly in STI subclasses" do @c.plugin(:single_table_inheritance, :num, :model_map=>{1=>@c}, :key_map=>proc{[1, 2]}) sc = Class.new(@c) @m = sc.new @m.valid?.must_equal false @m.errors.must_equal(:d=>["is not present"], :name=>["is not present"]) @m.set(:d=>'/', :num=>'a', :name=>'1') @m.valid?.must_equal false @m.errors.must_equal(:d=>["is not a valid date"], :num=>["is not a valid integer"]) @m.db.sqls @m.set(:d=>Date.today, :num=>1) @m.valid?.must_equal false @m.errors.must_equal([:name, :num]=>["is already taken"]) @m.db.sqls.must_equal ["SELECT count(*) AS count FROM test WHERE ((name = '1') AND (num = 1)) LIMIT 1"] @m.set(:name=>'a'*51) @m.valid?.must_equal false @m.errors.must_equal(:name=>["is longer than 50 characters"]) end it "should work correctly when changing the dataset" do @c.set_dataset(@c.db[:foo]) @c.new.valid?.must_equal true end it "should support setting validator options" do sc = Class.new(@c) sc.plugin :auto_validations, :max_length_opts=> {:message=> 'ml_message'}, :max_value_opts=> {:message=> 'mv_message'}, :min_value_opts=> {:message=> 'min_message'}, :no_null_byte_opts=> {:message=> 'nnb_message'}, :schema_types_opts=> {:message=> 'st_message'}, :explicit_not_null_opts=> {:message=> 'enn_message'}, :unique_opts=> {:message=> 'u_message'} @m = sc.new @m.set(:name=>'a'*51, :d => '/', :nnd => nil, :num=>1) @m.valid?.must_equal false @m.errors.must_equal(:name=>["ml_message"], :d=>["st_message"], :nnd=>["enn_message"]) @m = sc.new @m.set(:name=>1, :num=>1, :d=>Date.today) @m.valid?.must_equal false @m.errors.must_equal([:name, :num]=>["u_message"]) @m.set(:num=>100001, :name=>"a\0b") @m.valid?.must_equal false @m.errors.must_equal(:name=>["nnb_message"], :num=>["mv_message"]) @m.num = -100001 @m.valid?.must_equal false @m.errors.must_equal(:name=>["nnb_message"], :num=>["min_message"]) end it "should store modifying auto validation information in mutable auto_validate_* attributes" do @c.auto_validate_not_null_columns.frozen?.must_equal false @c.auto_validate_explicit_not_null_columns.frozen?.must_equal false @c.auto_validate_max_length_columns.frozen?.must_equal false @c.auto_validate_unique_columns.frozen?.must_equal false @c.auto_validate_no_null_byte_columns.frozen?.must_equal false @c.auto_validate_max_value_columns.frozen?.must_equal false @c.auto_validate_min_value_columns.frozen?.must_equal false @c.auto_validate_not_null_columns.frozen?.must_equal false @c.auto_validate_explicit_not_null_columns.sort.must_equal [:id, :nnd] @c.auto_validate_max_length_columns.sort.must_equal [[:name, 50]] @c.auto_validate_unique_columns.sort.must_equal [[:name, :num]] @c.auto_validate_no_null_byte_columns.sort.must_equal [:name, :nnd] @c.auto_validate_max_value_columns.sort.must_equal [[:num, 100000]] @c.auto_validate_min_value_columns.sort.must_equal [[:num, -100000]] end it "should copy auto validation information when subclassing" do sc = Class.new(@c) @c.auto_validate_not_null_columns.clear @c.auto_validate_explicit_not_null_columns.clear @c.auto_validate_max_length_columns.clear @c.auto_validate_unique_columns.clear @c.auto_validate_no_null_byte_columns.clear @c.auto_validate_max_value_columns.clear @c.auto_validate_min_value_columns.clear @c.auto_validate_not_null_columns.clear sc.auto_validate_explicit_not_null_columns.sort.must_equal [:id, :nnd] sc.auto_validate_max_length_columns.sort.must_equal [[:name, 50]] sc.auto_validate_unique_columns.sort.must_equal [[:name, :num]] sc.auto_validate_no_null_byte_columns.sort.must_equal [:name, :nnd] sc.auto_validate_max_value_columns.sort.must_equal [[:num, 100000]] sc.auto_validate_min_value_columns.sort.must_equal [[:num, -100000]] end it "should not allow modifying auto validation information for frozen model classes" do @c.freeze @c.auto_validate_not_null_columns.frozen?.must_equal true @c.auto_validate_explicit_not_null_columns.frozen?.must_equal true @c.auto_validate_max_length_columns.frozen?.must_equal true @c.auto_validate_unique_columns.frozen?.must_equal true @c.auto_validate_no_null_byte_columns.frozen?.must_equal true @c.auto_validate_max_value_columns.frozen?.must_equal true @c.auto_validate_min_value_columns.frozen?.must_equal true end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/blacklist_security_spec.rb��������������������������������������������0000664�0000000�0000000�00000012750�14342141206�0023474�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Model, "#(set|update)_except" do before do @c = Class.new(Sequel::Model(:items)) @c.class_eval do plugin :blacklist_security set_primary_key :id columns :x, :y, :z, :id set_restricted_columns :y end @c.strict_param_setting = false @o1 = @c.new DB.reset end it "should raise errors if not all hash fields can be set and strict_param_setting is true" do @c.strict_param_setting = true proc{@c.new.set_except({:x => 1, :y => 2, :z=>3, :id=>4}, :x, :y)}.must_raise(Sequel::MassAssignmentRestriction) proc{@c.new.set_except({:x => 1, :y => 2, :z=>3}, :x, :y)}.must_raise(Sequel::MassAssignmentRestriction) (o = @c.new).set_except({:z => 3}, :x, :y) o.values.must_equal(:z=>3) end it "#set_except should not set given attributes or the primary key" do @o1.set_except({:x => 1, :y => 2, :z=>3, :id=>4}, [:y, :z]) @o1.values.must_equal(:x => 1) @o1.set_except({:x => 4, :y => 2, :z=>3, :id=>4}, :y, :z) @o1.values.must_equal(:x => 4) end it "#update_except should not update given attributes" do @o1.update_except({:x => 1, :y => 2, :z=>3, :id=>4}, [:y, :z]) DB.sqls.must_equal ["INSERT INTO items (x) VALUES (1)", "SELECT * FROM items WHERE id = 10"] @c.new.update_except({:x => 1, :y => 2, :z=>3, :id=>4}, :y, :z) DB.sqls.must_equal ["INSERT INTO items (x) VALUES (1)", "SELECT * FROM items WHERE id = 10"] end end describe Sequel::Model, ".restricted_columns " do before do @c = Class.new(Sequel::Model(:blahblah)) @c.class_eval do plugin :blacklist_security columns :x, :y, :z end @c.strict_param_setting = false @c.instance_variable_set(:@columns, [:x, :y, :z]) DB.sqls end it "should set the restricted columns correctly" do @c.restricted_columns.must_be_nil @c.set_restricted_columns :x @c.restricted_columns.must_equal [:x] @c.set_restricted_columns :x, :y @c.restricted_columns.must_equal [:x, :y] end it "should not set restricted columns by default" do @c.set_restricted_columns :z i = @c.new(:x => 1, :y => 2, :z => 3) i.values.must_equal(:x => 1, :y => 2) i.set(:x => 4, :y => 5, :z => 6) i.values.must_equal(:x => 4, :y => 5) @c.dataset = @c.dataset.with_fetch(:x => 7) i = @c.new i.update(:x => 7, :z => 9) i.values.must_equal(:x => 7) DB.sqls.must_equal ["INSERT INTO blahblah (x) VALUES (7)", "SELECT * FROM blahblah WHERE id = 10"] end it "should not set restricted primary keys when restricting columns unless primary key setting is restricted" do @c.set_restricted_columns :z i = @c.new(:id => 10, :x => 1, :y => 2, :z => 3) i.values.must_equal(:x => 1, :y => 2) i.set(:id => 10, :x => 4, :y => 5, :z => 6) i.values.must_equal(:x => 4, :y => 5) i.set_except({:id => 10, :x => 4, :y => 5, :z => 6}, [:z]) i.values.must_equal(:x => 4, :y => 5) @c.dataset = @c.dataset.with_fetch(:x => 7) i = @c.new i.update(:id => 10, :x => 7, :z => 9) i.values.must_equal(:x => 7) DB.sqls.must_equal ["INSERT INTO blahblah (x) VALUES (7)", "SELECT * FROM blahblah WHERE id = 10"] end it "should set unrestricted primary keys when restricting columns if primary key setting is unrestricted" do @c.unrestrict_primary_key @c.set_restricted_columns :z i = @c.new(:id => 10, :x => 1, :y => 2, :z => 3) i.values.must_equal(:id => 10, :x => 1, :y => 2) i.set(:id => 10, :x => 4, :y => 5, :z => 6) i.values.must_equal(:id => 10, :x => 4, :y => 5) i.set_except({:id => 10, :x => 4, :y => 5, :z => 6}, [:z]) i.values.must_equal(:id => 10, :x => 4, :y => 5) @c.dataset = @c.dataset.with_fetch(:id => 10, :x => 7) i = @c.new i.update(:id => 10, :x => 7, :z => 9) i.values.must_equal(:id => 10, :x => 7) DB.sqls.must_equal ["INSERT INTO blahblah (id, x) VALUES (10, 7)", "SELECT * FROM blahblah WHERE id = 10"] end it "should have allowed take precedence over restricted" do @c.plugin :whitelist_security @c.set_allowed_columns :x, :y @c.set_restricted_columns :y, :z i = @c.new(:x => 1, :y => 2, :z => 3) i.values.must_equal(:x => 1, :y => 2) i.set(:x => 4, :y => 5, :z => 6) i.values.must_equal(:x => 4, :y => 5) @c.dataset = @c.dataset.with_fetch(:y => 7) i = @c.new i.update(:y => 7, :z => 9) i.values.must_equal(:y => 7) DB.sqls.must_equal ["INSERT INTO blahblah (y) VALUES (7)", "SELECT * FROM blahblah WHERE id = 10"] end it "should have allowed take precedence over restricted when whitelist_security plugin is added first" do @c = Class.new(Sequel::Model(:blahblah)) @c.class_eval do plugin :whitelist_security plugin :blacklist_security set_primary_key :id columns :x, :y, :z, :id set_restricted_columns :y self.strict_param_setting = false end @o1 = @c.new DB.reset @c.set_allowed_columns :x, :y @c.set_restricted_columns :y, :z i = @c.new(:x => 1, :y => 2, :z => 3) i.values.must_equal(:x => 1, :y => 2) i.set(:x => 4, :y => 5, :z => 6) i.values.must_equal(:x => 4, :y => 5) @c.dataset = @c.dataset.with_fetch(:y => 7) i = @c.new i.update(:y => 7, :z => 9) i.values.must_equal(:y => 7) DB.sqls.must_equal ["INSERT INTO blahblah (y) VALUES (7)", "SELECT * FROM blahblah WHERE id = 10"] end it "should freeze restricted_columns when freezing class" do @c.set_restricted_columns :z @c.freeze @c.restricted_columns.frozen?.must_equal true end end ������������������������sequel-5.63.0/spec/extensions/blank_spec.rb���������������������������������������������������������0000664�0000000�0000000�00000003440�14342141206�0020660�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" Sequel.extension :blank describe "Object#blank?" do it "it should be true if the object responds true to empty?" do [].blank?.must_equal true {}.blank?.must_equal true o = Object.new def o.empty?; true; end o.blank?.must_equal true end it "it should be false if the object doesn't respond true to empty?" do [2].blank?.must_equal false {1=>2}.blank?.must_equal false Object.new.blank?.must_equal false end end describe "Numeric#blank?" do it "it should always be false" do 1.blank?.must_equal false 0.blank?.must_equal false -1.blank?.must_equal false 1.0.blank?.must_equal false 0.0.blank?.must_equal false -1.0.blank?.must_equal false 10000000000000000.blank?.must_equal false -10000000000000000.blank?.must_equal false 10000000000000000.0.blank?.must_equal false -10000000000000000.0.blank?.must_equal false end end describe "NilClass#blank?" do it "it should always be true" do nil.blank?.must_equal true end end describe "TrueClass#blank?" do it "it should always be false" do true.blank?.must_equal false end end describe "FalseClass#blank?" do it "it should always be true" do false.blank?.must_equal true end end describe "String#blank?" do it "it should be true if the string is empty" do ''.blank?.must_equal true end it "it should be true if the string is composed of just whitespace" do ' '.blank?.must_equal true "\r\n\t".blank?.must_equal true (' '*4000).blank?.must_equal true ("\r\n\t"*4000).blank?.must_equal true end it "it should be false if the string has any non whitespace characters" do '1'.blank?.must_equal false ("\r\n\t"*4000 + 'a').blank?.must_equal false ("\r\na\t"*4000).blank?.must_equal false end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/boolean_readers_spec.rb�����������������������������������������������0000664�0000000�0000000�00000004601�14342141206�0022715�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Model, "BooleanReaders plugin" do before do @db = Sequel.mock def @db.supports_schema_parsing?() true end def @db.schema(*args) [[:id, {}], [:z, {:type=>:integer, :db_type=>'tinyint(1)'}], [:b, {:type=>:boolean, :db_type=>'boolean'}]] end @c = Class.new(Sequel::Model(@db[:items])) @p = proc do @columns = [:id, :b, :z] def columns; @columns; end alias columns columns end @c.instance_eval(&@p) end it "should create attribute? readers for all boolean attributes" do @c.plugin(:boolean_readers) o = @c.new o.b?.must_be_nil o.b = '1' o.b?.must_equal true o.b = '0' o.b?.must_equal false o.b = '' o.b?.must_be_nil end it "should not create attribute? readers for non-boolean attributes" do @c.plugin(:boolean_readers) proc{@c.new.z?}.must_raise(NoMethodError) proc{@c.new.id?}.must_raise(NoMethodError) end it "should accept a block to determine if an attribute is boolean" do @c.plugin(:boolean_readers){|c| db_schema[c][:db_type] == 'tinyint(1)'} proc{@c.new.b?}.must_raise(NoMethodError) o = @c.new o.z.must_be_nil o.z?.must_be_nil o.z = '1' o.z.must_equal 1 o.z?.must_equal true o.z = '0' o.z.must_equal 0 o.z?.must_equal false o.z = '' o.z.must_be_nil o.z?.must_be_nil end it "should create boolean readers when set_dataset is defined" do c = Class.new(Sequel::Model(@db)) c.instance_eval(&@p) c.plugin(:boolean_readers) c.set_dataset(@db[:a]) o = c.new o.b?.must_be_nil o.b = '1' o.b?.must_equal true o.b = '0' o.b?.must_equal false o.b = '' o.b?.must_be_nil proc{o.i?}.must_raise(NoMethodError) c = Class.new(Sequel::Model(@db)) c.instance_eval(&@p) c.plugin(:boolean_readers){|x| db_schema[x][:db_type] == 'tinyint(1)'} c.set_dataset(@db[:a]) o = c.new o.z.must_be_nil o.z?.must_be_nil o.z = '1' o.z.must_equal 1 o.z?.must_equal true o.z = '0' o.z.must_equal 0 o.z?.must_equal false o.z = '' o.z.must_be_nil o.z?.must_be_nil proc{o.b?}.must_raise(NoMethodError) end it "should handle cases where getting the columns raises an error" do def @c.columns; raise Sequel::Error end @c.plugin(:boolean_readers) proc{@c.new.b?}.must_raise(NoMethodError) end end �������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/boolean_subsets_spec.rb�����������������������������������������������0000664�0000000�0000000�00000002743�14342141206�0022765�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "boolean_subsets plugin" do before do @db = Sequel.mock def @db.supports_schema_parsing?() true end def @db.schema(*args) [[:asdaf9898as, {}], [:active, {:type=>:boolean}]] end @c = Class.new(Sequel::Model(@db[:items])) @p = proc do @columns = [:asdaf9898as, :active] def columns; @columns; end singleton_class.send(:alias_method, :columns, :columns) end @c.instance_eval(&@p) end it "should create subsets only for boolean attributes" do @c.plugin(:boolean_subsets) @c.active.sql.must_equal "SELECT * FROM items WHERE (active IS TRUE)" @c.respond_to?(:asdaf9898as).must_equal false end it "should handle a block passed to the plugin" do @c.plugin(:boolean_subsets){|c| ["where_#{c}", c]} @c.where_active.sql.must_equal "SELECT * FROM items WHERE active" @c.respond_to?(:active).must_equal false end it "should create boolean subsets when set_dataset is called" do c = Class.new(Sequel::Model(@db)) c.instance_eval(&@p) c.plugin(:boolean_subsets) c.respond_to?(:active).must_equal false c.set_dataset(@db[:items]) c.active.sql.must_equal "SELECT * FROM items WHERE (active IS TRUE)" c.respond_to?(:asdaf9898as).must_equal false end it "should handle cases where getting the columns raises an error" do def @c.columns; raise Sequel::Error end @c.plugin(:boolean_subsets) @c.respond_to?(:active).must_equal false end end �����������������������������sequel-5.63.0/spec/extensions/caching_spec.rb�������������������������������������������������������0000664�0000000�0000000�00000017663�14342141206�0021201�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Model, "caching" do before do @cache_class = Class.new(Hash) do attr_accessor :ttl def set(k, v, ttl); self[k] = v; @ttl = ttl; end def get(k); self[k]; end end cache = @cache_class.new @cache = cache @memcached_class = Class.new(Hash) do attr_accessor :ttl def set(k, v, ttl); self[k] = v; @ttl = ttl; end def get(k); if self[k] then return self[k]; else raise ArgumentError; end end def delete(k); if self[k] then super; else raise ArgumentError; end end end cache2 = @memcached_class.new @memcached = cache2 @c = Class.new(Sequel::Model(:items)) @c.class_eval do plugin :caching, cache def self.name; 'Item' end columns :name, :id end @c3 = Class.new(Sequel::Model(:items)) @c3.class_eval do plugin :caching, cache2 def self.name; 'Item' end columns :name, :id end @c4 = Class.new(Sequel::Model(:items)) @c4.class_eval do plugin :caching, cache2, :ignore_exceptions => true def self.name; 'Item' end columns :name, :id end @c2 = Class.new(@c) do def self.name; 'SubItem' end end [@c, @c2, @c3, @c4].each do |c| c.dataset = c.dataset.with_fetch(:name => 'sharon'.dup, :id => 1).with_numrows(1) end @c.db.reset end it "should set the model's cache store" do @c.cache_store.wont_be_nil @c2.cache_store.wont_be_nil end it "should have a default ttl of 3600" do @c.cache_ttl.must_equal 3600 @c2.cache_ttl.must_equal 3600 end it "should take a ttl option" do c = Class.new(Sequel::Model(:items)) c.plugin :caching, @cache, :ttl => 1234 c.cache_ttl.must_equal 1234 Class.new(c).cache_ttl.must_equal 1234 end it "should allow overriding the ttl option via a plugin :caching call" do @c.plugin :caching, @cache, :ttl => 1234 @c.cache_ttl.must_equal 1234 Class.new(@c).cache_ttl.must_equal 1234 end it "should offer a set_cache_ttl method for setting the ttl" do @c.cache_ttl.must_equal 3600 @c.set_cache_ttl 1234 @c.cache_ttl.must_equal 1234 Class.new(@c).cache_ttl.must_equal 1234 end it "should generate a cache key appropriate to the class via the Model#cache_key" do m = @c.new m.values[:id] = 1 m.cache_key.must_equal "#{m.class}:1" m = @c2.new m.values[:id] = 1 m.cache_key.must_equal "#{m.class}:1" # custom primary key @c.set_primary_key :ttt m = @c.new m.values[:ttt] = 333 m.cache_key.must_equal "#{m.class}:333" c = Class.new(@c) m = c.new m.values[:ttt] = 333 m.cache_key.must_equal "#{m.class}:333" # composite primary key @c.set_primary_key [:a, :b, :c] m = @c.new m.values[:a] = 123 m.values[:c] = 456 m.values[:b] = 789 m.cache_key.must_equal "#{m.class}:123,789,456" c = Class.new(@c) m = c.new m.values[:a] = 123 m.values[:c] = 456 m.values[:b] = 789 m.cache_key.must_equal "#{m.class}:123,789,456" end it "should generate a cache key via the Model.cache_key method" do @c.cache_key(1).must_equal "#{@c}:1" @c.cache_key([1, 2]).must_equal "#{@c}:1,2" end it "should raise error if attempting to generate cache_key and primary key value is null" do m = @c.new proc {m.cache_key}.must_raise(Sequel::Error) m.values[:id] = 1 m.cache_key m = @c2.new proc {m.cache_key}.must_raise(Sequel::Error) m.values[:id] = 1 m.cache_key end it "should not raise error if trying to save a new record" do @c.new(:name=>'blah').save @c.create(:name=>'blah') @c2.new(:name=>'blah').save @c2.create(:name=>'blah') end it "should set the cache when reading from the database" do @c.db.sqls.must_equal [] @cache.must_be :empty? m = @c[1] @c.db.sqls.must_equal ['SELECT * FROM items WHERE id = 1'] m.values.must_equal(:name=>"sharon", :id=>1) @cache[m.cache_key].must_equal m m2 = @c[1] @c.db.sqls.must_equal [] m2.must_equal m m2.values.must_equal(:name=>"sharon", :id=>1) m = @c2[1] @c.db.sqls.must_equal ['SELECT * FROM items WHERE id = 1'] m.values.must_equal(:name=>"sharon", :id=>1) @cache[m.cache_key].must_equal m m2 = @c2[1] @c.db.sqls.must_equal [] m2.must_equal m m2.values.must_equal(:name=>"sharon", :id=>1) end it "should handle lookups by nil primary keys" do @c[nil].must_be_nil @c.db.sqls.must_equal [] end it "should delete the cache when writing to the database" do m = @c[1] @cache[m.cache_key].must_equal m m.name = 'hey' m.save @cache.has_key?(m.cache_key).must_equal false @c.db.sqls.must_equal ["SELECT * FROM items WHERE id = 1", "UPDATE items SET name = 'hey' WHERE (id = 1)"] m = @c2[1] @cache[m.cache_key].must_equal m m.name = 'hey' m.save @cache.has_key?(m.cache_key).must_equal false @c.db.sqls.must_equal ["SELECT * FROM items WHERE id = 1", "UPDATE items SET name = 'hey' WHERE (id = 1)"] end it "should delete the cache when deleting the record" do m = @c[1] @cache[m.cache_key].must_equal m m.delete @cache.has_key?(m.cache_key).must_equal false @c.db.sqls.must_equal ["SELECT * FROM items WHERE id = 1", "DELETE FROM items WHERE id = 1"] m = @c2[1] @cache[m.cache_key].must_equal m m.delete @cache.has_key?(m.cache_key).must_equal false @c.db.sqls.must_equal ["SELECT * FROM items WHERE id = 1", "DELETE FROM items WHERE id = 1"] end it "should support #[] as a shortcut to #find with hash" do m = @c[:id => 3] @cache[m.cache_key].must_be_nil @c.db.sqls.must_equal ["SELECT * FROM items WHERE (id = 3) LIMIT 1"] m = @c[1] @cache[m.cache_key].must_equal m @c.db.sqls.must_equal ["SELECT * FROM items WHERE id = 1"] @c[:id => 4] @c.db.sqls.must_equal ["SELECT * FROM items WHERE (id = 4) LIMIT 1"] m = @c2[:id => 3] @cache[m.cache_key].must_be_nil @c.db.sqls.must_equal ["SELECT * FROM items WHERE (id = 3) LIMIT 1"] m = @c2[1] @cache[m.cache_key].must_equal m @c.db.sqls.must_equal ["SELECT * FROM items WHERE id = 1"] @c2[:id => 4] @c.db.sqls.must_equal ["SELECT * FROM items WHERE (id = 4) LIMIT 1"] end it "should support ignore_exception option" do c = Class.new(Sequel::Model(:items)) c.plugin :caching, @cache, :ignore_exceptions => true Class.new(c).cache_ignore_exceptions.must_equal true end it "should raise an exception if cache_store is memcached and ignore_exception is not enabled" do proc{@c3[1]}.must_raise ArgumentError m = @c3.new.save proc{m.update({:name=>'blah'})}.must_raise ArgumentError end it "should rescue an exception if cache_store is memcached and ignore_exception is enabled" do @c4[1].values.must_equal(:name => 'sharon', :id => 1) @c4.dataset = @c4.dataset.with_fetch(:name => 'sharon', :id => 1, :x=>1) m = @c4.new.save m.update({:name=>'blah'}) m.values.must_equal(:name => 'blah', :id => 1, :x => 1) end it "should support Model.cache_get_pk for getting a value from the cache by primary key" do @c.cache_get_pk(1).must_be_nil m = @c[1] @c.cache_get_pk(1).must_equal m end it "should support Model.cache_delete_pk for removing a value from the cache by primary key" do @c[1] @c.cache_get_pk(1).wont_equal nil @c.cache_delete_pk(1).must_be_nil @c.cache_get_pk(1).must_be_nil end it "should support overriding the cache key prefix" do c2 = Class.new(@c) def c2.cache_key_prefix; "ceetwo" end c3 = Class.new(c2) @c.cache_key(:id).wont_equal c2.cache_key(:id) c2.cache_key(:id).must_equal c3.cache_key(:id) @c[1] c2.cache_get_pk(1).must_be_nil m = c2[1] c2.cache_get_pk(1).values.must_equal @c[1].values c3.cache_get_pk(1).values.must_equal m.values m.name << m.name m.save c2[1].values.must_equal c3[1].values end end �����������������������������������������������������������������������������sequel-5.63.0/spec/extensions/caller_logging_spec.rb������������������������������������������������0000664�0000000�0000000�00000003333�14342141206�0022542�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" require 'logger' describe "caller_logging extension" do before do @db = Sequel.mock(:extensions=>[:caller_logging]) @log_stream = StringIO.new @db.loggers << Logger.new(@log_stream) @ds = @db[:items] end exec_sql_line = __LINE__ + 2 def exec_sql(sql) @db[sql].all end it "should log caller information, skipping internal Sequel code" do exec_sql("SELECT * FROM items") @log_stream.rewind lines = @log_stream.read.split("\n") lines.length.must_equal 1 lines[0].must_match(/ \(source: #{__FILE__}:#{exec_sql_line}:in `exec_sql'\) SELECT \* FROM items\z/) end it "should allow formatting of caller information" do @db.caller_logging_formatter = lambda{|line| line.sub(/\A.+(caller_logging_spec\.rb:\d+).+\z/, '\1:')} exec_sql("SELECT * FROM items") @log_stream.rewind lines = @log_stream.read.split("\n") lines.length.must_equal 1 lines[0].must_match(/ caller_logging_spec\.rb:#{exec_sql_line}: SELECT \* FROM items\z/) end it "should allow ignoring additional caller lines in application" do @db.caller_logging_ignore = /exec_sql/ exec_sql("SELECT * FROM items"); line = __LINE__ @log_stream.rewind lines = @log_stream.read.split("\n") lines.length.must_equal 1 lines[0].must_match(/ \(source: #{__FILE__}:#{line}:in `block.+\) SELECT \* FROM items\z/) end it "should not log caller information if all callers lines are filtered" do @db.caller_logging_ignore = /./ exec_sql("SELECT * FROM items") @log_stream.rewind lines = @log_stream.read.split("\n") lines.length.must_equal 1 lines[0].must_match(/ SELECT \* FROM items\z/) lines[0].wont_match(/ source: #{__FILE__}/) end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/class_table_inheritance_spec.rb���������������������������������������0000664�0000000�0000000�00000120433�14342141206�0024420�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "class_table_inheritance plugin" do before do @db = Sequel.mock(:numrows=>1, :autoid=>proc{|sql| 1}) def @db.supports_schema_parsing?() true end def @db.schema(table, opts={}) {:employees=>[[:id, {:primary_key=>true, :type=>:integer}], [:name, {:type=>:string, :allow_null=>false}], [:kind, {:type=>:string}]], :managers=>[[:id, {:type=>:integer}], [:num_staff, {:type=>:integer, :allow_null=>false}] ], :executives=>[[:id, {:type=>:integer}], [:num_managers, {:type=>:integer}]], :staff=>[[:id, {:type=>:integer}], [:manager_id, {:type=>:integer}]], }[table.is_a?(Sequel::Dataset) ? table.first_source_table : table] end @db.extend_datasets do def columns {[:employees]=>[:id, :name, :kind], [:managers]=>[:id, :num_staff], [:executives]=>[:id, :num_managers], [:staff]=>[:id, :manager_id], [:employees, :managers]=>[:id, :name, :kind, :num_staff], [:employees, :managers, :executives]=>[:id, :name, :kind, :num_staff, :num_managers], [:employees, :staff]=>[:id, :name, :kind, :manager_id], }[opts[:from] + (opts[:join] || []).map{|x| x.table}] end end base = Sequel::Model(@db) base.plugin :auto_validations if @use_auto_validations class ::Employee < base def self.columns dataset.columns || dataset.opts[:from].first.expression.columns end private def _save_refresh; @values[:id] = 1 end plugin :class_table_inheritance, :key=>:kind, :table_map=>{:Staff=>:staff} end class ::Manager < Employee one_to_many :staff_members, :class=>:Staff end class ::Executive < Manager end class ::Ceo < Executive end class ::Staff < Employee many_to_one :manager end class ::Intern < Employee end @ds = Employee.dataset @db.sqls end after do [:Intern, :Ceo, :Executive, :Manager, :Staff, :Employee].each{|s| Object.send(:remove_const, s)} end it "should freeze CTI information when freezing model class" do Employee.freeze Employee.cti_models.frozen?.must_equal true Employee.cti_tables.frozen?.must_equal true Employee.cti_instance_dataset.frozen?.must_equal true Employee.cti_table_columns.frozen?.must_equal true Employee.cti_table_map.frozen?.must_equal true end it "should not attempt to use prepared statements" do Manager.plugin :prepared_statements Manager.load(:id=>1, :kind=>'Manager', :num_staff=>2).save @db.sqls.must_equal ["UPDATE employees SET kind = 'Manager' WHERE (id = 1)", "UPDATE managers SET num_staff = 2 WHERE (id = 1)"] Employee.plugin :prepared_statements Employee.load(:id=>2, :kind=>'Employee').save @db.sqls.must_equal ["UPDATE employees SET kind = 'Employee' WHERE (id = 2)"] end it "#cti_models.first should be the model that loaded the plugin" do Executive.cti_models.first.must_equal Employee end it "should have simple_table = nil for all subclasses" do Manager.simple_table.must_be_nil Executive.simple_table.must_be_nil Ceo.simple_table.must_be_nil Staff.simple_table.must_be_nil Intern.simple_table.must_be_nil end it "should have working row_proc if using set_dataset in subclass to remove columns" do Manager.set_dataset(Manager.dataset.select(*(Manager.columns - [:blah]))) Manager.dataset = Manager.dataset.with_fetch(:id=>1, :kind=>'Ceo') Manager[1].must_equal Ceo.load(:id=>1, :kind=>'Ceo') end it "should use a subquery in subclasses" do Employee.dataset.sql.must_equal 'SELECT * FROM employees' Manager.dataset.sql.must_equal 'SELECT * FROM (SELECT employees.id, employees.name, employees.kind, managers.num_staff FROM employees INNER JOIN managers ON (managers.id = employees.id)) AS employees' Executive.dataset.sql.must_equal 'SELECT * FROM (SELECT employees.id, employees.name, employees.kind, managers.num_staff, executives.num_managers FROM employees INNER JOIN managers ON (managers.id = employees.id) INNER JOIN executives ON (executives.id = managers.id)) AS employees' Ceo.dataset.sql.must_equal 'SELECT * FROM (SELECT employees.id, employees.name, employees.kind, managers.num_staff, executives.num_managers FROM employees INNER JOIN managers ON (managers.id = employees.id) INNER JOIN executives ON (executives.id = managers.id) WHERE (employees.kind IN (\'Ceo\'))) AS employees' Staff.dataset.sql.must_equal 'SELECT * FROM (SELECT employees.id, employees.name, employees.kind, staff.manager_id FROM employees INNER JOIN staff ON (staff.id = employees.id)) AS employees' Intern.dataset.sql.must_equal 'SELECT * FROM employees WHERE (employees.kind IN (\'Intern\'))' end it "should use a empty empty string for class name for anonymous subclasses" do Class.new(Employee).dataset.sql.must_equal "SELECT * FROM employees WHERE (employees.kind IN (''))" Class.new(Manager).dataset.sql.must_equal "SELECT * FROM (SELECT employees.id, employees.name, employees.kind, managers.num_staff FROM employees INNER JOIN managers ON (managers.id = employees.id) WHERE (employees.kind IN (''))) AS employees" Class.new(Executive).dataset.sql.must_equal "SELECT * FROM (SELECT employees.id, employees.name, employees.kind, managers.num_staff, executives.num_managers FROM employees INNER JOIN managers ON (managers.id = employees.id) INNER JOIN executives ON (executives.id = managers.id) WHERE (employees.kind IN (''))) AS employees" Class.new(Ceo).dataset.sql.must_equal "SELECT * FROM (SELECT employees.id, employees.name, employees.kind, managers.num_staff, executives.num_managers FROM employees INNER JOIN managers ON (managers.id = employees.id) INNER JOIN executives ON (executives.id = managers.id) WHERE (employees.kind IN (''))) AS employees" Class.new(Staff).dataset.sql.must_equal "SELECT * FROM (SELECT employees.id, employees.name, employees.kind, staff.manager_id FROM employees INNER JOIN staff ON (staff.id = employees.id) WHERE (employees.kind IN (''))) AS employees" Class.new(Intern).dataset.sql.must_equal "SELECT * FROM (SELECT * FROM employees WHERE (employees.kind IN (''))) AS employees" end it "should return rows with the correct class based on the polymorphic_key value" do @ds.with_fetch([{:kind=>'Employee'}, {:kind=>'Manager'}, {:kind=>'Executive'}, {:kind=>'Ceo'}, {:kind=>'Staff'}, {:kind=>'Intern'}]).all.collect{|x| x.class}.must_equal [Employee, Manager, Executive, Ceo, Staff, Intern] end it "should return rows with the correct class based on the polymorphic_key value for subclasses" do Manager.dataset.with_fetch([{:kind=>'Manager'}, {:kind=>'Executive'}, {:kind=>'Ceo'}]).all.collect{|x| x.class}.must_equal [Manager, Executive, Ceo] end it "should have refresh return all columns in subclass after loading from superclass" do Employee.dataset = Employee.dataset.with_fetch([{:id=>1, :name=>'A', :kind=>'Ceo'}]) Ceo.dataset = Ceo.dataset.with_fetch([{:id=>1, :name=>'A', :kind=>'Ceo', :num_staff=>3, :num_managers=>2}]) a = Employee.first a.class.must_equal Ceo a.values.must_equal(:id=>1, :name=>'A', :kind=>'Ceo') a.refresh.values.must_equal(:id=>1, :name=>'A', :kind=>'Ceo', :num_staff=>3, :num_managers=>2) @db.sqls.must_equal ["SELECT * FROM employees LIMIT 1", "SELECT * FROM (SELECT employees.id, employees.name, employees.kind, managers.num_staff, executives.num_managers FROM employees INNER JOIN managers ON (managers.id = employees.id) INNER JOIN executives ON (executives.id = managers.id) WHERE (employees.kind IN ('Ceo'))) AS employees WHERE (id = 1) LIMIT 1"] end describe "with auto_validations plugin" do before(:all) do @use_auto_validations = true end it "should work" do e = Employee.new e.valid?.must_equal false e.errors.must_equal(:name=>["is not present"]) e = Manager.new e.valid?.must_equal false e.errors.must_equal(:name=>["is not present"], :num_staff=>["is not present"]) e = Executive.new e.valid?.must_equal false e.errors.must_equal(:name=>["is not present"], :num_staff=>["is not present"]) end end it "should return rows with the current class if sti_key is nil" do Employee.plugin :class_table_inheritance Employee.dataset.with_fetch([{:kind=>'Employee'}, {:kind=>'Manager'}, {:kind=>'Executive'}, {:kind=>'Ceo'}, {:kind=>'Staff'}, {:kind=>'Intern'}]).all.map{|x| x.class}.must_equal [Employee, Employee, Employee, Employee, Employee, Employee] end it "should return rows with the current class if sti_key is nil in subclasses" do Employee.plugin :class_table_inheritance Object.send(:remove_const, :Executive) Object.send(:remove_const, :Manager) class ::Manager < Employee; end class ::Executive < Manager; end Manager.dataset.with_fetch([{:kind=>'Manager'}, {:kind=>'Executive'}]).all.map{|x| x.class}.must_equal [Manager, Manager] end it "should handle a model map with integer values" do Employee.plugin :class_table_inheritance, :key=>:kind, :model_map=>{0=>:Employee, 1=>:Manager, 2=>:Executive, 3=>:Ceo, 4=>:Intern} Object.send(:remove_const, :Intern) Object.send(:remove_const, :Ceo) Object.send(:remove_const, :Executive) Object.send(:remove_const, :Manager) class ::Intern < Employee; end class ::Manager < Employee; end class ::Executive < Manager; end class ::Ceo < Executive; end Employee.dataset = Employee.dataset.with_fetch([{:kind=>nil},{:kind=>0},{:kind=>1}, {:kind=>2}, {:kind=>3}, {:kind=>4}]) Employee.all.collect{|x| x.class}.must_equal [Employee, Employee, Manager, Executive, Ceo, Intern] Manager.dataset = Manager.dataset.with_fetch([{:kind=>nil},{:kind=>0},{:kind=>1}, {:kind=>2}, {:kind=>3}]) Manager.all.collect{|x| x.class}.must_equal [Manager, Employee, Manager, Executive, Ceo] end it "should fallback to the main class if the given class does not exist" do @ds.with_fetch([{:kind=>'Employee'}, {:kind=>'Manager'}, {:kind=>'Blah'}, {:kind=>'Staff'}]).all.map{|x| x.class}.must_equal [Employee, Manager, Employee, Staff] end it "should fallback to the main class if the given class does not exist in subclasses" do Manager.dataset.with_fetch([{:kind=>'Manager'}, {:kind=>'Executive'}, {:kind=>'Ceo'}, {:kind=>'Blah'}]).all.map{|x| x.class}.must_equal [Manager, Executive, Ceo, Manager] end it "should sets the model class name for the key when creating new parent class records" do Employee.create @db.sqls.must_equal ["INSERT INTO employees (kind) VALUES ('Employee')"] end it "should sets the model class name for the key when creating new class records for subclass without separate table" do Intern.create @db.sqls.must_equal ["INSERT INTO employees (kind) VALUES ('Intern')"] end it "should sets the model class name for the key when creating new subclass records" do Ceo.create @db.sqls.must_equal ["INSERT INTO employees (kind) VALUES ('Ceo')", "INSERT INTO managers (id) VALUES (1)", "INSERT INTO executives (id) VALUES (1)"] end it "should ignore existing sti_key value when creating new records" do Employee.create(:kind=>'Manager') @db.sqls.must_equal ["INSERT INTO employees (kind) VALUES ('Employee')"] end it "should ignore existing sti_key value in subclasses" do Manager.create(:kind=>'Executive') @db.sqls.must_equal ["INSERT INTO employees (kind) VALUES ('Manager')", "INSERT INTO managers (id) VALUES (1)"] end it "should handle validations on the type column field" do o = Employee.new def o.validate errors.add(:kind, 'not present') unless kind end o.valid?.must_equal true end it "should set the type column field even when not validating" do Employee.new.save(:validate=>false) @db.sqls.must_equal ["INSERT INTO employees (kind) VALUES ('Employee')"] end it "should handle type field matching current class when saving" do Employee.create(:kind=>'Employee') @db.sqls.must_equal ["INSERT INTO employees (kind) VALUES ('Employee')"] end it "should convert type field matching subclass using different table when saving" do Employee.create(:kind=>'Manager') @db.sqls.must_equal ["INSERT INTO employees (kind) VALUES ('Employee')"] end it "should keep type field matching subclass using same table when saving" do Employee.create(:kind=>'Intern') @db.sqls.must_equal ["INSERT INTO employees (kind) VALUES ('Intern')"] end it "should allow specifying a map of names to tables to override implicit mapping" do Manager.dataset.sql.must_equal 'SELECT * FROM (SELECT employees.id, employees.name, employees.kind, managers.num_staff FROM employees INNER JOIN managers ON (managers.id = employees.id)) AS employees' Staff.dataset.sql.must_equal 'SELECT * FROM (SELECT employees.id, employees.name, employees.kind, staff.manager_id FROM employees INNER JOIN staff ON (staff.id = employees.id)) AS employees' end it "should lazily load attributes for columns in subclass tables" do Manager.dataset = Manager.dataset.with_fetch(:id=>1, :name=>'J', :kind=>'Ceo', :num_staff=>2) m = Manager[1] @db.sqls.must_equal ['SELECT * FROM (SELECT employees.id, employees.name, employees.kind, managers.num_staff FROM employees INNER JOIN managers ON (managers.id = employees.id)) AS employees WHERE (id = 1) LIMIT 1'] @db.fetch = {:num_managers=>3} m.must_be_kind_of Ceo m.num_managers.must_equal 3 @db.sqls.must_equal ['SELECT employees.num_managers FROM (SELECT employees.id, employees.name, employees.kind, managers.num_staff, executives.num_managers FROM employees INNER JOIN managers ON (managers.id = employees.id) INNER JOIN executives ON (executives.id = managers.id)) AS employees WHERE (employees.id = 1) LIMIT 1'] m.values.must_equal(:id=>1, :name=>'J', :kind=>'Ceo', :num_staff=>2, :num_managers=>3) end it "should lazily load columns in middle classes correctly when loaded from parent class" do Employee.dataset = Employee.dataset.with_fetch(:id=>1, :kind=>'Ceo') @db.fetch = [[:num_staff=>2]] e = Employee[1] e.must_be_kind_of(Ceo) @db.sqls.must_equal ["SELECT * FROM employees WHERE (id = 1) LIMIT 1"] e.num_staff.must_equal 2 @db.sqls.must_equal ["SELECT employees.num_staff FROM (SELECT employees.id, employees.name, employees.kind, managers.num_staff FROM employees INNER JOIN managers ON (managers.id = employees.id)) AS employees WHERE (employees.id = 1) LIMIT 1"] end it "should eagerly load lazily columns in subclasses when loaded from parent class" do Employee.dataset = Employee.dataset.with_fetch(:id=>1, :kind=>'Ceo') @db.fetch = [[{:id=>1, :num_staff=>2}], [{:id=>1, :num_managers=>3}]] e = Employee.all.first e.must_be_kind_of(Ceo) @db.sqls.must_equal ["SELECT * FROM employees"] e.num_staff.must_equal 2 @db.sqls.must_equal ["SELECT employees.id, employees.num_staff FROM (SELECT employees.id, employees.name, employees.kind, managers.num_staff FROM employees INNER JOIN managers ON (managers.id = employees.id)) AS employees WHERE (employees.id IN (1))"] e.num_managers.must_equal 3 @db.sqls.must_equal ['SELECT employees.id, employees.num_managers FROM (SELECT employees.id, employees.name, employees.kind, managers.num_staff, executives.num_managers FROM employees INNER JOIN managers ON (managers.id = employees.id) INNER JOIN executives ON (executives.id = managers.id)) AS employees WHERE (employees.id IN (1))'] end it "should include schema for columns for tables for ancestor classes" do Employee.db_schema.must_equal(:id=>{:primary_key=>true, :type=>:integer}, :name=>{:type=>:string, :allow_null=>false}, :kind=>{:type=>:string}) Manager.db_schema.must_equal(:id=>{:primary_key=>true, :type=>:integer}, :name=>{:type=>:string, :allow_null=>false}, :kind=>{:type=>:string}, :num_staff=>{:type=>:integer, :allow_null=>false}) Executive.db_schema.must_equal(:id=>{:primary_key=>true, :type=>:integer}, :name=>{:type=>:string, :allow_null=>false}, :kind=>{:type=>:string}, :num_staff=>{:type=>:integer, :allow_null=>false}, :num_managers=>{:type=>:integer}) Staff.db_schema.must_equal(:id=>{:primary_key=>true, :type=>:integer}, :name=>{:type=>:string, :allow_null=>false}, :kind=>{:type=>:string}, :manager_id=>{:type=>:integer}) Intern.db_schema.must_equal(:id=>{:primary_key=>true, :type=>:integer}, :name=>{:type=>:string, :allow_null=>false}, :kind=>{:type=>:string}) end it "should use the correct primary key (which should have the same name in all subclasses)" do [Employee, Manager, Executive, Ceo, Staff, Intern].each{|c| c.primary_key.must_equal :id} end it "should have table_name return the table name of the most specific table" do Employee.table_name.must_equal :employees Manager.table_name.must_equal :employees Executive.table_name.must_equal :employees Ceo.table_name.must_equal :employees Staff.table_name.must_equal :employees Intern.table_name.must_equal :employees end it "should delete the correct rows from all tables when deleting" do Employee.load(:id=>1).delete @db.sqls.must_equal ["DELETE FROM employees WHERE (id = 1)"] Intern.load(:id=>1).delete @db.sqls.must_equal ["DELETE FROM employees WHERE (id = 1)"] Ceo.load(:id=>1).delete @db.sqls.must_equal ["DELETE FROM executives WHERE (id = 1)", "DELETE FROM managers WHERE (id = 1)", "DELETE FROM employees WHERE (id = 1)"] end it "should not allow deletion of frozen object" do [Ceo, Executive, Employee, Manager, Intern].each do |c| o = c.load(:id=>1) o.freeze proc{o.delete}.must_raise(Sequel::Error) @db.sqls.must_equal [] end end it "should insert the correct rows into all tables when inserting into parent class" do Employee.create(:name=>'E') @db.sqls.must_equal ["INSERT INTO employees (name, kind) VALUES ('E', 'Employee')"] end it "should insert the correct rows into all tables when inserting into subclass without separate table" do Intern.create(:name=>'E') @db.sqls.must_equal ["INSERT INTO employees (name, kind) VALUES ('E', 'Intern')"] end it "should insert the correct rows into all tables when inserting" do Ceo.create(:num_managers=>3, :num_staff=>2, :name=>'E') @db.sqls.must_equal ["INSERT INTO employees (name, kind) VALUES ('E', 'Ceo')", "INSERT INTO managers (id, num_staff) VALUES (1, 2)", "INSERT INTO executives (id, num_managers) VALUES (1, 3)"] end it "should insert the correct rows into all tables when inserting when insert_select is supported" do [Executive, Manager, Employee].each do |klass| klass.instance_variable_set(:@cti_instance_dataset, klass.cti_instance_dataset.with_extend do def supports_insert_select?; true; end def insert_select(v) db.run(insert_sql(v) + " RETURNING *") v.merge(:id=>1) end end) end Ceo.create(:num_managers=>3, :num_staff=>2, :name=>'E') @db.sqls.must_equal ["INSERT INTO employees (name, kind) VALUES ('E', 'Ceo') RETURNING *", "INSERT INTO managers (id, num_staff) VALUES (1, 2) RETURNING *", "INSERT INTO executives (id, num_managers) VALUES (1, 3) RETURNING *"] end it "should insert the correct rows into all tables with a given primary key" do e = Ceo.new(:num_managers=>3, :num_staff=>2, :name=>'E') e.id = 2 e.save @db.sqls.must_equal ["INSERT INTO employees (id, name, kind) VALUES (2, 'E', 'Ceo')", "INSERT INTO managers (id, num_staff) VALUES (2, 2)", "INSERT INTO executives (id, num_managers) VALUES (2, 3)"] end it "should update the correct rows in all tables when updating parent class" do Employee.load(:id=>2).update(:name=>'E') @db.sqls.must_equal ["UPDATE employees SET name = 'E' WHERE (id = 2)"] end it "should update the correct rows in all tables when updating subclass without separate table" do Intern.load(:id=>2).update(:name=>'E') @db.sqls.must_equal ["UPDATE employees SET name = 'E' WHERE (id = 2)"] end it "should update the correct rows in all tables when updating" do Ceo.load(:id=>2).update(:num_managers=>3, :num_staff=>2, :name=>'E') @db.sqls.must_equal ["UPDATE employees SET name = 'E' WHERE (id = 2)", "UPDATE managers SET num_staff = 2 WHERE (id = 2)", "UPDATE executives SET num_managers = 3 WHERE (id = 2)"] end it "should update only tables with changes when updating" do obj = Ceo.load(:id=>2, :num_managers=>3, :num_staff=>2, :name=>'E') obj.update(:num_staff=>3) @db.sqls.must_equal ["UPDATE managers SET num_staff = 3 WHERE (id = 2)"] end it "should raise error if one of the updates does not update a single row" do @db.numrows = [1, 0] proc{Ceo.load(:id=>2).update(:num_managers=>3, :num_staff=>2, :name=>'E')}.must_raise Sequel::NoExistingObject @db.sqls.must_equal ["UPDATE employees SET name = 'E' WHERE (id = 2)", "UPDATE managers SET num_staff = 2 WHERE (id = 2)"] end it "should handle many_to_one relationships correctly" do Manager.dataset = Manager.dataset.with_fetch(:id=>3, :name=>'E', :kind=>'Ceo', :num_managers=>3) Staff.load(:manager_id=>3).manager.must_equal Ceo.load(:id=>3, :name=>'E', :kind=>'Ceo', :num_managers=>3) @db.sqls.must_equal ['SELECT * FROM (SELECT employees.id, employees.name, employees.kind, managers.num_staff FROM employees INNER JOIN managers ON (managers.id = employees.id)) AS employees WHERE (id = 3) LIMIT 1'] end it "should handle one_to_many relationships correctly" do Staff.dataset = Staff.dataset.with_fetch(:id=>1, :name=>'S', :kind=>'Staff', :manager_id=>3) Ceo.load(:id=>3).staff_members.must_equal [Staff.load(:id=>1, :name=>'S', :kind=>'Staff', :manager_id=>3)] @db.sqls.must_equal ['SELECT * FROM (SELECT employees.id, employees.name, employees.kind, staff.manager_id FROM employees INNER JOIN staff ON (staff.id = employees.id)) AS employees WHERE (employees.manager_id = 3)'] end end describe "class_table_inheritance plugin without sti_key with :alias option" do before do @db = Sequel.mock(:numrows=>1, :autoid=>proc{|sql| 1}) def @db.supports_schema_parsing?() true end def @db.schema(table, opts={}) {:employees=>[[:id, {:primary_key=>true, :type=>:integer}], [:name, {:type=>:string}]], :managers=>[[:id, {:type=>:integer}], [:num_staff, {:type=>:integer}]], :executives=>[[:id, {:type=>:integer}], [:num_managers, {:type=>:integer}]], :staff=>[[:id, {:type=>:integer}], [:manager_id, {:type=>:integer}]], }[table.is_a?(Sequel::Dataset) ? table.first_source_table : table] end @db.extend_datasets do def columns {[:employees]=>[:id, :name], [:managers]=>[:id, :num_staff], [:executives]=>[:id, :num_managers], [:staff]=>[:id, :manager_id], [:employees, :managers]=>[:id, :name, :num_staff], [:employees, :managers, :executives]=>[:id, :name, :num_staff, :num_managers], [:employees, :staff]=>[:id, :name, :manager_id], }[opts[:from] + (opts[:join] || []).map{|x| x.table}] end end class ::Employee < Sequel::Model(@db) private def _save_refresh; @values[:id] = 1 end def self.columns dataset.columns || dataset.opts[:from].first.expression.columns end plugin :class_table_inheritance, :table_map=>{:Staff=>:staff}, :alias=>:emps end class ::Manager < Employee one_to_many :staff_members, :class=>:Staff end class ::Executive < Manager end class ::Staff < Employee many_to_one :manager end @ds = Employee.dataset @db.sqls end after do Object.send(:remove_const, :Executive) Object.send(:remove_const, :Manager) Object.send(:remove_const, :Staff) Object.send(:remove_const, :Employee) end it "should have simple_table = nil for all subclasses" do Manager.simple_table.must_be_nil Executive.simple_table.must_be_nil Staff.simple_table.must_be_nil end it "should have working row_proc if using set_dataset in subclass to remove columns" do Manager.set_dataset(Manager.dataset.select(*(Manager.columns - [:blah]))) Manager.dataset = Manager.dataset.with_fetch(:id=>1) Manager[1].must_equal Manager.load(:id=>1) end it "should use a joined dataset in subclasses" do Employee.dataset.sql.must_equal 'SELECT * FROM employees' Manager.dataset.sql.must_equal 'SELECT * FROM (SELECT employees.id, employees.name, managers.num_staff FROM employees INNER JOIN managers ON (managers.id = employees.id)) AS emps' Executive.dataset.sql.must_equal 'SELECT * FROM (SELECT employees.id, employees.name, managers.num_staff, executives.num_managers FROM employees INNER JOIN managers ON (managers.id = employees.id) INNER JOIN executives ON (executives.id = managers.id)) AS emps' Staff.dataset.sql.must_equal 'SELECT * FROM (SELECT employees.id, employees.name, staff.manager_id FROM employees INNER JOIN staff ON (staff.id = employees.id)) AS emps' end it "should return rows with the current class if sti_key is nil" do Employee.plugin(:class_table_inheritance) Employee.dataset = Employee.dataset.with_fetch([{}]) Employee.first.class.must_equal Employee end it "should include schema for columns for tables for ancestor classes" do Employee.db_schema.must_equal(:id=>{:primary_key=>true, :type=>:integer}, :name=>{:type=>:string}) Manager.db_schema.must_equal(:id=>{:primary_key=>true, :type=>:integer}, :name=>{:type=>:string}, :num_staff=>{:type=>:integer}) Executive.db_schema.must_equal(:id=>{:primary_key=>true, :type=>:integer}, :name=>{:type=>:string}, :num_staff=>{:type=>:integer}, :num_managers=>{:type=>:integer}) Staff.db_schema.must_equal(:id=>{:primary_key=>true, :type=>:integer}, :name=>{:type=>:string}, :manager_id=>{:type=>:integer}) end it "should use the correct primary key (which should have the same name in all subclasses)" do [Employee, Manager, Executive, Staff].each{|c| c.primary_key.must_equal :id} end it "should have table_name return the table name of the most specific table" do Employee.table_name.must_equal :employees Manager.table_name.must_equal :emps Executive.table_name.must_equal :emps Staff.table_name.must_equal :emps end it "should delete the correct rows from all tables when deleting" do Executive.load(:id=>1).delete @db.sqls.must_equal ["DELETE FROM executives WHERE (id = 1)", "DELETE FROM managers WHERE (id = 1)", "DELETE FROM employees WHERE (id = 1)"] end it "should not allow deletion of frozen object" do o = Executive.load(:id=>1) o.freeze proc{o.delete}.must_raise(Sequel::Error) @db.sqls.must_equal [] end it "should insert the correct rows into all tables when inserting" do Executive.create(:num_managers=>3, :num_staff=>2, :name=>'E') @db.sqls.must_equal ["INSERT INTO employees (name) VALUES ('E')", "INSERT INTO managers (id, num_staff) VALUES (1, 2)", "INSERT INTO executives (id, num_managers) VALUES (1, 3)"] end it "should insert the correct rows into all tables with a given primary key" do e = Executive.new(:num_managers=>3, :num_staff=>2, :name=>'E') e.id = 2 e.save @db.sqls.must_equal ["INSERT INTO employees (id, name) VALUES (2, 'E')", "INSERT INTO managers (id, num_staff) VALUES (2, 2)", "INSERT INTO executives (id, num_managers) VALUES (2, 3)"] end it "should update the correct rows in all tables when updating" do Executive.load(:id=>2).update(:num_managers=>3, :num_staff=>2, :name=>'E') @db.sqls.must_equal ["UPDATE employees SET name = 'E' WHERE (id = 2)", "UPDATE managers SET num_staff = 2 WHERE (id = 2)", "UPDATE executives SET num_managers = 3 WHERE (id = 2)"] end it "should handle many_to_one relationships correctly" do Manager.dataset = Manager.dataset.with_fetch(:id=>3, :name=>'E', :num_staff=>3) Staff.load(:manager_id=>3).manager.must_equal Manager.load(:id=>3, :name=>'E', :num_staff=>3) @db.sqls.must_equal ['SELECT * FROM (SELECT employees.id, employees.name, managers.num_staff FROM employees INNER JOIN managers ON (managers.id = employees.id)) AS emps WHERE (id = 3) LIMIT 1'] end it "should handle one_to_many relationships correctly" do Staff.dataset = Staff.dataset.with_fetch(:id=>1, :name=>'S', :manager_id=>3) Executive.load(:id=>3).staff_members.must_equal [Staff.load(:id=>1, :name=>'S', :manager_id=>3)] @db.sqls.must_equal ['SELECT * FROM (SELECT employees.id, employees.name, staff.manager_id FROM employees INNER JOIN staff ON (staff.id = employees.id)) AS emps WHERE (emps.manager_id = 3)'] end end describe "class_table_inheritance plugin with duplicate columns" do it "should raise error if no columns are explicitly ignored" do @db = Sequel.mock(:autoid=>proc{|sql| 1}) def @db.supports_schema_parsing?() true end def @db.schema(table, opts={}) {:employees=>[[:id, {:primary_key=>true, :type=>:integer}], [:name, {:type=>:string}], [:kind, {:type=>:string}]], :managers=>[[:id, {:type=>:integer}], [:name, {:type=>:string}]], }[table.is_a?(Sequel::Dataset) ? table.first_source_table : table] end @db.extend_datasets do def columns {[:employees]=>[:id, :name, :kind], [:managers]=>[:id, :name], }[opts[:from] + (opts[:join] || []).map{|x| x.table}] end end class ::Employee < Sequel::Model(@db) private def _save_refresh; @values[:id] = 1 end def self.columns dataset.columns || dataset.opts[:from].first.expression.columns end plugin :class_table_inheritance end proc{class ::Manager < Employee; end}.must_raise Sequel::Error end describe "with certain sub-class columns ignored" do before do @db = Sequel.mock(:autoid=>proc{|sql| 1}) def @db.supports_schema_parsing?() true end def @db.schema(table, opts={}) {:employees=>[[:id, {:primary_key=>true, :type=>:integer}], [:name, {:type=>:string}], [:kind, {:type=>:string}], [:updated_at, {:type=>:datetime}]], :managers=>[[:id, {:type=>:integer}], [:num_staff, {:type=>:integer}], [:updated_at, {:type=>:datetime}], [:another_duplicate_column, {:type=>:integer}]], :executives=>[[:id, {:type=>:integer}], [:num_managers, {:type=>:integer}], [:updated_at, {:type=>:datetime}], [:another_duplicate_column, {:type=>:integer}]], }[table.is_a?(Sequel::Dataset) ? table.first_source_table : table] end @db.extend_datasets do def columns {[:employees]=>[:id, :name, :kind, :updated_at], [:managers]=>[:id, :num_staff, :updated_at, :another_duplicate_column], [:executives]=>[:id, :num_managers, :updated_at, :another_duplicate_column], [:employees, :managers]=>[:id, :name, :kind, :updated_at, :num_staff], }[opts[:from] + (opts[:join] || []).map{|x| x.table}] end end class ::Employee < Sequel::Model(@db) private def _save_refresh; @values[:id] = 1 end def self.columns dataset.columns || dataset.opts[:from].first.expression.columns end plugin :class_table_inheritance, :ignore_subclass_columns=>[:updated_at] end class ::Manager < Employee Manager.cti_ignore_subclass_columns.push(:another_duplicate_column) end class ::Executive < Manager; end end it "should not use the ignored column in a sub-class subquery" do Employee.dataset.sql.must_equal 'SELECT * FROM employees' Manager.dataset.sql.must_equal 'SELECT * FROM (SELECT employees.id, employees.name, employees.kind, employees.updated_at, managers.num_staff, managers.another_duplicate_column FROM employees INNER JOIN managers ON (managers.id = employees.id)) AS employees' Executive.dataset.sql.must_equal 'SELECT * FROM (SELECT employees.id, employees.name, employees.kind, employees.updated_at, managers.num_staff, managers.another_duplicate_column, executives.num_managers FROM employees INNER JOIN managers ON (managers.id = employees.id) INNER JOIN executives ON (executives.id = managers.id)) AS employees' end it "should include schema for columns for tables for ancestor classes" do Employee.db_schema.must_equal(:id=>{:primary_key=>true, :type=>:integer}, :name=>{:type=>:string}, :kind=>{:type=>:string}, :updated_at=>{:type=>:datetime}) Manager.db_schema.must_equal(:id=>{:primary_key=>true, :type=>:integer}, :name=>{:type=>:string}, :kind=>{:type=>:string}, :updated_at=>{:type=>:datetime}, :num_staff=>{:type=>:integer}, :another_duplicate_column=>{:type=>:integer}) Executive.db_schema.must_equal(:id=>{:primary_key=>true, :type=>:integer}, :name=>{:type=>:string}, :kind=>{:type=>:string}, :updated_at=>{:type=>:datetime}, :num_staff=>{:type=>:integer}, :another_duplicate_column=>{:type=>:integer}, :num_managers=>{:type=>:integer}) end after do Object.send(:remove_const, :Executive) end end after do Object.send(:remove_const, :Manager) Object.send(:remove_const, :Employee) end end describe "class_table_inheritance plugin with dataset defined with QualifiedIdentifier" do before do @db = Sequel.mock(:numrows=>1, :autoid=>proc{|sql| 1}) def @db.supports_schema_parsing?() true end def @db.schema(table, opts={}) {Sequel[:hr][:employees]=>[[:id, {:primary_key=>true, :type=>:integer}], [:name, {:type=>:string}], [:kind, {:type=>:string}]], Sequel[:hr][:managers]=>[[:id, {:type=>:integer}]], Sequel[:hr][:staff]=>[[:id, {:type=>:integer}], [:manager_id, {:type=>:integer}]], Sequel[:hr][:executives]=>[[:id, {:type=>:integer}], [:num_managers, {:type=>:integer}]], }[table.is_a?(Sequel::Dataset) ? table.first_source_table : table] end @db.singleton_class.send(:alias_method, :schema, :schema) @db.extend_datasets do def columns {[Sequel[:hr][:employees]]=>[:id, :name, :kind], [Sequel[:hr][:managers]]=>[:id], [Sequel[:hr][:staff]]=>[:id, :manager_id], [Sequel[:hr][:employees], Sequel[:hr][:managers]]=>[:id, :name, :kind], [Sequel[:hr][:employees], Sequel[:hr][:staff]]=>[:id, :name, :kind, :manager_id], [Sequel[:hr][:employees], Sequel[:hr][:managers], Sequel[:hr][:executives]]=>[:id, :name, :kind, :manager_id, :num_managers], }[opts[:from] + (opts[:join] || []).map{|x| x.table}] end end end after do [:Manager, :Staff, :Employee, :Executive].each{|s| Object.send(:remove_const, s) if Object.const_defined?(s)} end describe "with table_map used to qualify subclasses" do before do ::Employee = Class.new(Sequel::Model) ::Employee.db = @db ::Employee.set_dataset(Sequel[:hr][:employees]) class ::Employee private def _save_refresh; @values[:id] = 1 end def self.columns dataset.columns || dataset.opts[:from].first.expression.columns end plugin :class_table_inheritance, :table_map=>{:Manager=>Sequel[:hr][:managers],:Staff=>Sequel[:hr][:staff]} end class ::Manager < Employee one_to_many :staff_members, :class=>:Staff end class ::Staff < Employee many_to_one :manager end end it "should handle many_to_one relationships correctly" do Manager.dataset = Manager.dataset.with_fetch(:id=>3, :name=>'E') Staff.load(:manager_id=>3).manager.must_equal Manager.load(:id=>3, :name=>'E') @db.sqls.must_equal ['SELECT * FROM (SELECT hr.employees.id, hr.employees.name, hr.employees.kind FROM hr.employees INNER JOIN hr.managers ON (hr.managers.id = hr.employees.id)) AS employees WHERE (id = 3) LIMIT 1'] end it "should handle one_to_many relationships correctly" do Staff.dataset = Staff.dataset.with_fetch(:id=>1, :name=>'S', :manager_id=>3) Manager.load(:id=>3).staff_members.must_equal [Staff.load(:id=>1, :name=>'S', :manager_id=>3)] @db.sqls.must_equal ['SELECT * FROM (SELECT hr.employees.id, hr.employees.name, hr.employees.kind, hr.staff.manager_id FROM hr.employees INNER JOIN hr.staff ON (hr.staff.id = hr.employees.id)) AS employees WHERE (employees.manager_id = 3)'] end end describe "without table_map or qualify_tables set" do it "should use a non-qualified subquery in subclasses" do def @db.schema(table, opts={}) {Sequel[:hr][:employees]=>[[:id, {:primary_key=>true, :type=>:integer}], [:name, {:type=>:string}], [:kind, {:type=>:string}]], :managers=>[[:id, {:type=>:integer}]], }[table.is_a?(Sequel::Dataset) ? table.first_source_table : table] end @db.extend_datasets do def columns {[Sequel[:hr][:employees]]=>[:id, :name, :kind], [:managers]=>[:id], [Sequel[:hr][:employees], :managers]=>[:id, :name, :kind] }[opts[:from] + (opts[:join] || []).map{|x| x.table}] end end ::Employee = Class.new(Sequel::Model) ::Employee.db = @db ::Employee.set_dataset(Sequel[:hr][:employees]) class ::Employee private def _save_refresh; @values[:id] = 1 end def self.columns dataset.columns || dataset.opts[:from].first.expression.columns end plugin :class_table_inheritance end class ::Manager < ::Employee end Employee.dataset.sql.must_equal 'SELECT * FROM hr.employees' Manager.dataset.sql.must_equal 'SELECT * FROM (SELECT hr.employees.id, hr.employees.name, hr.employees.kind FROM hr.employees INNER JOIN managers ON (managers.id = hr.employees.id)) AS employees' end end describe "with qualify_tables option set" do it "should use a subquery with the same qualifier in subclasses" do ::Employee = Class.new(Sequel::Model) ::Employee.db = @db ::Employee.set_dataset(Sequel[:hr][:employees]) class ::Employee private def _save_refresh; @values[:id] = 1 end def self.columns dataset.columns || dataset.opts[:from].first.expression.columns end plugin :class_table_inheritance, :table_map=>{:Staff=>Sequel[:hr][:staff]}, qualify_tables: true end class ::Manager < ::Employee one_to_many :staff_members, :class=>:Staff end class ::Staff < ::Employee many_to_one :manager end class ::Executive < ::Manager end Employee.dataset.sql.must_equal 'SELECT * FROM hr.employees' Manager.dataset.sql.must_equal 'SELECT * FROM (SELECT hr.employees.id, hr.employees.name, hr.employees.kind FROM hr.employees INNER JOIN hr.managers ON (hr.managers.id = hr.employees.id)) AS employees' Staff.dataset.sql.must_equal 'SELECT * FROM (SELECT hr.employees.id, hr.employees.name, hr.employees.kind, hr.staff.manager_id FROM hr.employees INNER JOIN hr.staff ON (hr.staff.id = hr.employees.id)) AS employees' Executive.dataset.sql.must_equal 'SELECT * FROM (SELECT hr.employees.id, hr.employees.name, hr.employees.kind, hr.executives.num_managers FROM hr.employees INNER JOIN hr.managers ON (hr.managers.id = hr.employees.id) INNER JOIN hr.executives ON (hr.executives.id = hr.managers.id)) AS employees' end end end describe "class_table_inheritance plugin with schema_caching extension" do before do @db = Sequel.mock(:autoid=>proc{|sql| 1}) def @db.supports_schema_parsing?() true end def @db.schema(table, opts={}) {:employees=>[[:id, {:primary_key=>true, :type=>:integer}], [:name, {:type=>:string}], [:kind, {:type=>:string}]], :managers=>[[:id, {:type=>:integer}], [:num_staff, {:type=>:integer}] ], :executives=>[[:id, {:type=>:integer}], [:num_managers, {:type=>:integer}]], }[table.is_a?(Sequel::Dataset) ? table.first_source_table : table] end end after do [:Executive, :Manager, :Employee, :Staff].each{|s| Object.send(:remove_const, s) if Object.const_defined?(s)} end it "should not query for columns if the schema cache is present and a table_map is given" do class ::Employee < Sequel::Model(@db) plugin :class_table_inheritance, :table_map=>{:Staff=>:employees, :Manager=>:managers, :Executive=>:executives} end class ::Staff < Employee; end class ::Manager < Employee; end class ::Executive < Manager; end Employee.columns.must_equal [:id, :name, :kind] Staff.columns.must_equal [:id, :name, :kind] Manager.columns.must_equal [:id, :name, :kind, :num_staff] Executive.columns.must_equal [:id, :name, :kind, :num_staff, :num_managers] @db.sqls.must_equal [] end it "should not query for columns if the schema cache is present and no table_map is given" do class ::Employee < Sequel::Model(@db) plugin :class_table_inheritance end class ::Manager < Employee; end class ::Executive < Manager; end @db.sqls.must_equal [] end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/column_conflicts_spec.rb����������������������������������������������0000664�0000000�0000000�00000005665�14342141206�0023145�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "column_conflicts plugin" do before do @db = Sequel.mock @c = Class.new(Sequel::Model(@db[:test])) @c.columns :model, :use_transactions, :foo @c.plugin :column_conflicts @o = @c.load(:model=>1, :use_transactions=>2, :foo=>4) end it "should have mass assignment work correctly" do @o.set_fields({:use_transactions=>3}, [:use_transactions]) @o.get_column_value(:use_transactions).must_equal 3 end it "should handle both symbols and strings" do @o.get_column_value(:model).must_equal 1 @o.get_column_value("model").must_equal 1 @o.set_column_value(:use_transactions=, 3) @o.get_column_value(:use_transactions).must_equal 3 @o.set_column_value(:use_transactions=, 4) @o.get_column_value(:use_transactions).must_equal 4 end it "should work correctly if there are no conflicts" do @o.get_column_value(:foo).must_equal 4 @o.set_column_value(:model=, 2).must_equal 2 end it "should allow manual setting of conflicted columns" do @c.send(:define_method, :foo){raise} @c.get_column_conflict!(:foo) @o.get_column_value(:foo).must_equal 4 @c.send(:define_method, :model=){raise} @c.set_column_conflict!(:model) @o.set_column_value(:model=, 2).must_equal 2 @o.get_column_value(:model).must_equal 2 end it "should not erase existing column conflicts when loading the plugin" do @c.send(:define_method, :foo){raise} @c.send(:define_method, :model=){raise} @c.get_column_conflict!(:foo) @c.set_column_conflict!(:model) @c.plugin :column_conflicts @o.get_column_value(:foo).must_equal 4 @o.set_column_value(:model=, 2).must_equal 2 @o.get_column_value(:model).must_equal 2 end it "should work correctly in subclasses" do @o = Class.new(@c).load(:model=>1, :use_transactions=>2) @o.get_column_value(:model).must_equal 1 @o.get_column_value("model").must_equal 1 @o.set_column_value(:use_transactions=, 3) @o.get_column_value(:use_transactions).must_equal 3 @o.set_column_value(:use_transactions=, 4) @o.get_column_value(:use_transactions).must_equal 4 end it "should work correctly for dataset changes" do @c.dataset = @db[:test].with_extend{def columns; [:object_id] end} o = @c.load(:object_id=>3) o.get_column_value(:object_id).must_equal 3 o.object_id.wont_equal 3 end it "should work correctly if loaded before dataset is set" do @c = Class.new(Sequel::Model) @c.plugin :column_conflicts @c.columns :model, :use_transactions, :foo @c.dataset = @db[:test] @o = @c.load(:model=>1, :use_transactions=>2, :foo=>4) @o.set_fields({:use_transactions=>3}, [:use_transactions]) @o.get_column_value(:use_transactions).must_equal 3 end it "should freeze column conflict information when freezing model class" do @c.freeze @c.get_column_conflicts.frozen?.must_equal true @c.set_column_conflicts.frozen?.must_equal true end end ���������������������������������������������������������������������������sequel-5.63.0/spec/extensions/column_encryption_spec.rb���������������������������������������������0000664�0000000�0000000�00000044556�14342141206�0023355�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "column_encryption plugin" do def have_matching_search(ds, obj) ds.sql.gsub("\\_", "_").include?("'#{obj[:enc][0, 48]}%'") end before do @db = Sequel.mock(:numrows=>1) @model = Class.new(Sequel::Model) @model.set_dataset @db[:ce_test] @model.columns :id, :not_enc, :enc @model.plugin :column_encryption do |enc| enc.key 0, "0"*32 enc.column :enc end @obj = @model.new(:not_enc=>'123', :enc=>'Abc') @obj.valid? @db.fetch = {:id=>1, :not_enc=>'123', :enc=>@obj[:enc]} @obj.save @db.sqls end it "should store columns encrypted" do @obj.not_enc.must_equal '123' @obj[:not_enc].must_equal '123' @obj.enc.must_equal 'Abc' @obj[:enc].start_with?('AAAA').must_equal true end it "should support searching encrypted columns" do @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>true end have_matching_search(@model.with_encrypted_value(:enc, 'Abc'), @obj).must_equal false @obj.reencrypt have_matching_search(@model.with_encrypted_value(:enc, 'Abc'), @obj).must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'abc'), @obj).must_equal false end it "should support case insensitive searching encrypted columns" do @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>:case_insensitive end have_matching_search(@model.with_encrypted_value(:enc, 'Abc'), @obj).must_equal false @obj.reencrypt have_matching_search(@model.with_encrypted_value(:enc, 'Abc'), @obj).must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'abc'), @obj).must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'Abd'), @obj).must_equal false end it "should support searching columns encrypted with previous keys" do @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>true end @obj.reencrypt @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>true do |cenc| cenc.key 1, "1"*32 cenc.key 0, "0"*32 end end have_matching_search(@model.with_encrypted_value(:enc, 'Abc'), @obj).must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'abc'), @obj).must_equal false @obj[:enc].start_with?('AQAA').must_equal true @obj.reencrypt have_matching_search(@model.with_encrypted_value(:enc, 'Abc'), @obj).must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'abc'), @obj).must_equal false @obj.enc.must_equal 'Abc' @obj[:enc].start_with?('AQAB').must_equal true end it "should support case insensitive searching columns encrypted with previous keys" do @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>:case_insensitive end @obj.reencrypt @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>:case_insensitive do |cenc| cenc.key 1, "1"*32 cenc.key 0, "0"*32 end end have_matching_search(@model.with_encrypted_value(:enc, 'Abc'), @obj).must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'abc'), @obj).must_equal true @obj[:enc].start_with?('AgAA').must_equal true @obj.reencrypt have_matching_search(@model.with_encrypted_value(:enc, 'Abc'), @obj).must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'abc'), @obj).must_equal true @obj.enc.must_equal 'Abc' @obj[:enc].start_with?('AgAB').must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'Abd'), @obj).must_equal false end it "should support searching columns encrypted with previous keys and different case sensitivity setting" do @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>true end @obj.reencrypt obj2 = @model.new(:not_enc=>'234', :enc=>'Def') obj2.valid? def obj2.save(*) end @model.plugin :column_encryption do |enc| enc.key 1, "1"*32 enc.key 0, "0"*32 enc.column :enc, :searchable=>:case_insensitive end have_matching_search(@model.with_encrypted_value(:enc, 'Abc'), @obj).must_equal false have_matching_search(@model.with_encrypted_value(:enc, 'Def'), obj2).must_equal false @model.plugin :column_encryption do |enc| enc.key 2, "2"*32 enc.key 1, "1"*32 enc.key 0, "0"*32 enc.column :enc, :searchable=>:case_insensitive, :search_both=>true end have_matching_search(@model.with_encrypted_value(:enc, 'Abc'), @obj).must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'abc'), @obj).must_equal false have_matching_search(@model.with_encrypted_value(:enc, 'Def'), obj2).must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'def'), obj2).must_equal false @obj.reencrypt have_matching_search(@model.with_encrypted_value(:enc, 'Abc'), @obj).must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'abc'), @obj).must_equal true @obj[:enc].start_with?('AgAC').must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'Def'), obj2).must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'def'), obj2).must_equal false obj2.reencrypt have_matching_search(@model.with_encrypted_value(:enc, 'Def'), obj2).must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'def'), obj2).must_equal true obj2[:enc].start_with?('AgAC').must_equal true @model.plugin :column_encryption do |enc| enc.key 3, "3"*32 enc.key 2, "2"*32 enc.key 1, "1"*32 enc.key 0, "0"*32 enc.column :enc, :searchable=>true, :search_both=>true end have_matching_search(@model.with_encrypted_value(:enc, 'Abc'), @obj).must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'abc'), @obj).must_equal true @obj.reencrypt have_matching_search(@model.with_encrypted_value(:enc, 'Abc'), @obj).must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'abc'), @obj).must_equal false @obj[:enc].start_with?('AQAD').must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'Abd'), @obj).must_equal false end it "should keep existing column encryption keys when reloading the plugin without keys" do @model.plugin(:column_encryption) do |enc| enc.column :enc end @obj.deserialized_values.clear @obj.enc.must_equal "Abc" end it "should clear existing column encryption keys when reloading the plugin with keys" do @model.plugin(:column_encryption) do |enc| enc.key 1, "1"*32 enc.column :enc end @obj.deserialized_values.clear proc{@obj.enc}.must_raise Sequel::Error end it "should not affect existing column encryption keys when reloading the plugin with keys" do @model.plugin(:column_encryption) do |enc| enc.key 1, "1"*32 enc.column :not_enc end @obj.deserialized_values.clear @obj.enc.must_equal "Abc" end it "should raise an error when trying to decrypt with missing key" do @model.plugin :column_encryption do |enc| enc.key 1, "1"*32 enc.column :enc, :searchable=>true end obj = @model.first proc{obj.enc}.must_raise Sequel::Error end it "should raise an error when trying to decrypt without any keys set" do @model = Class.new(Sequel::Model) @model.set_dataset @db[:ce_test] @model.columns :id, :not_enc, :enc proc do @model.plugin :column_encryption do |enc| enc.column :enc end end.must_raise Sequel::Error end it "should raise an error when trying to decrypt with invalid key" do @model.plugin :column_encryption do |enc| enc.key 0, "1"*32 enc.column :enc, :searchable=>true end obj = @model.first proc{obj.enc}.must_raise end it "should raise an error when trying to decrypt with invalid auth data" do @model.plugin :column_encryption do |enc| enc.key 0, "0"*32, :auth_data=>'Foo' enc.column :enc, :searchable=>true end obj = @model.first proc{obj.enc}.must_raise Sequel::Error obj = @model.new(:enc=>"Abc") obj.valid? obj.deserialized_values.clear obj.enc.must_equal "Abc" end it "should support a configurable amount of padding" do @model.plugin :column_encryption do |enc| enc.key 1, "0"*32, :padding=>110 enc.key 0, "0"*32 enc.column :enc end encrypt_len = @obj[:enc].bytesize @obj.reencrypt @obj[:enc].bytesize.must_be(:>, encrypt_len + 100) end it "should support not using padding" do @model.plugin :column_encryption do |enc| enc.key 1, "0"*32, :padding=>false enc.key 0, "0"*32 enc.column :enc end encrypt_len = @obj[:enc].bytesize @obj.reencrypt @obj[:enc].bytesize.must_be(:<, encrypt_len) end it "should support reencrypting rows that need reencryption" do obj = @model.new(:not_enc=>'234', :enc=>'Def') obj.valid? def obj.save(*); end need_reencrypt = lambda do sql = @model.needing_reencryption.sql [@obj, obj].reject{|o| sql.include?("< '#{o[:enc][0, 4]}'") && sql.include?("> '#{o[:enc][0, 4]}B'") }.length end need_reencrypt.call.must_equal 0 @model.plugin :column_encryption do |enc| enc.key 1, "1"*32 enc.key 0, "0"*32 enc.column :enc end need_reencrypt.call.must_equal 2 @obj.reencrypt need_reencrypt.call.must_equal 1 obj.reencrypt need_reencrypt.call.must_equal 0 @model.plugin :column_encryption do |enc| enc.key 1, "1"*32 enc.column :enc end need_reencrypt.call.must_equal 0 @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>true end need_reencrypt.call.must_equal 2 @obj.reencrypt need_reencrypt.call.must_equal 1 obj.reencrypt need_reencrypt.call.must_equal 0 @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>:case_insensitive end need_reencrypt.call.must_equal 2 @obj.reencrypt need_reencrypt.call.must_equal 1 obj.reencrypt need_reencrypt.call.must_equal 0 end it "should not support searching encrypted columns not marked searchable" do proc{@model.with_encrypted_value(:enc, 'Abc')}.must_raise Sequel::Error end it "should not allow column encryption configuration with empty keys" do proc do @model.plugin :column_encryption do |enc| enc.column(:enc){} end end.must_raise Sequel::Error end it "should not allow column encryption configuration with invalid :searchable option" do proc do @model.plugin :column_encryption do |enc| enc.column(:enc, :searchable=>Object.new) end end.must_raise Sequel::Error end it "should require key ids are integers between 0 and 255" do proc do @model.plugin :column_encryption do |enc| enc.key Object.new, "1"*32 end end.must_raise Sequel::Error proc do @model.plugin :column_encryption do |enc| enc.key(-1, "1"*32) end end.must_raise Sequel::Error proc do @model.plugin :column_encryption do |enc| enc.key 256, "1"*32 end end.must_raise Sequel::Error end it "should require keys are strings with 32 bytes" do proc do @model.plugin :column_encryption do |enc| enc.key 0, Object.new end end.must_raise Sequel::Error proc do @model.plugin :column_encryption do |enc| enc.key 0, "1"*31 end end.must_raise Sequel::Error proc do @model.plugin :column_encryption do |enc| enc.key 0, "1"*33 end end.must_raise Sequel::Error end it "should require padding is integer between 1 and 120" do proc do @model.plugin :column_encryption do |enc| enc.key 1, "1"*32, :padding=>Object.new end end.must_raise Sequel::Error proc do @model.plugin :column_encryption do |enc| enc.key 1, "1"*32, :padding=>0 end end.must_raise Sequel::Error proc do @model.plugin :column_encryption do |enc| enc.key 1, "1"*32, :padding=>121 end end.must_raise Sequel::Error end it "should handle empty data" do @obj.enc = '' @obj.valid? @obj.enc.must_equal '' @obj[:enc].start_with?('AAAA').must_equal true end it "should check for errors during decryption" do @obj.deserialized_values.clear enc = @obj[:enc].dup @obj[:enc] = enc.dup.tap{|x| x[0] = '%'} proc{@obj.enc}.must_raise Sequel::Error # invalid base-64 @obj[:enc] = enc.dup.tap{|x| x[0,4] = Base64.urlsafe_encode64("\4\0\0")} proc{@obj.enc}.must_raise Sequel::Error # invalid flags @obj[:enc] = enc.dup.tap{|x| x[0,4] = Base64.urlsafe_encode64("\0\1\0")} proc{@obj.enc}.must_raise Sequel::Error # invalid reserved byte @obj[:enc] = enc.dup.tap{|x| x[0,4] = Base64.urlsafe_encode64("\0\0\1")} proc{@obj.enc}.must_raise Sequel::Error # invalid key id @obj[:enc] = enc.dup.tap{|x| x[0,4] = Base64.urlsafe_encode64("\1\0\0")} proc{@obj.enc}.must_raise Sequel::Error # invalid minimum size for searchable @obj[:enc] = enc.dup.tap{|x| x.slice!(60, 1000)} proc{@obj.enc}.must_raise Sequel::Error # invalid minimum size for nonsearchable @obj[:enc] = enc.dup.tap{|x| x[63..-3] = x[63..-3].reverse} proc{@obj.enc}.must_raise Sequel::Error # corrupt encrypted data end it "should work in subclasses" do sc = Class.new(@model) obj = sc.first obj.not_enc.must_equal '123' obj[:not_enc].must_equal '123' obj.enc.must_equal 'Abc' obj[:enc].start_with?('AAAA').must_equal true sc.plugin :column_encryption do |enc| enc.key 1, "1"*32 enc.column :not_enc end obj = sc.new obj.not_enc = "123" obj.not_enc.must_equal '123' obj.valid? obj[:not_enc].start_with?('AAAB').must_equal true obj = @model.first obj.not_enc.must_equal '123' obj[:not_enc].must_equal '123' end it "#reencrypt should save only if it modified a column" do @obj.reencrypt.must_be_nil @model.plugin :column_encryption do |enc| enc.column :not_enc end obj = @model.new(:not_enc=>'123', :enc=>'Abc') obj.valid? def obj.save(*) self; end obj.reencrypt.must_be_nil @model.plugin :column_encryption do |enc| enc.column :not_enc do |cenc| cenc.key 2, "2"*32 cenc.key 0, "0"*32 end end obj.reencrypt.must_be_same_as obj obj[:not_enc].start_with?('AAAC').must_equal true obj[:enc].start_with?('AAAA').must_equal true @model.plugin :column_encryption do |enc| enc.column :enc do |cenc| cenc.key 1, "1"*32 cenc.key 0, "0"*32 end end obj.reencrypt.must_be_same_as obj obj[:not_enc].start_with?('AAAC').must_equal true obj[:enc].start_with?('AAAB').must_equal true @model.plugin :column_encryption do |enc| enc.key 3, "3"*32 enc.key 2, "2"*32 enc.key 1, "1"*32 enc.key 0, "0"*32 enc.column :not_enc enc.column :enc end obj.reencrypt.must_be_same_as obj obj[:not_enc].start_with?('AAAD').must_equal true obj[:enc].start_with?('AAAD').must_equal true obj[:enc] = nil obj.reencrypt.must_be_nil end it "should support encrypted columns with a registered serialization format" do require 'json' @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>true, :format=>:json end obj = @model.new(:not_enc=>'123', :enc=>{'a'=>1}) obj.id = 1 obj.valid? @db.fetch = obj.values @model[obj.id].enc['a'].must_equal 1 obj[:enc].start_with?('AQAA').must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'a'=>1), obj).must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'a'=>2), obj).must_equal false @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>true, :format=>:json do |cenc| cenc.key 1, "1"*32 cenc.key 0, "0"*32 end end @model[obj.id].enc['a'].must_equal 1 obj[:enc].start_with?('AQAA').must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'a'=>1), obj).must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'a'=>2), obj).must_equal false db = @db obj.define_singleton_method(:save) do |*| valid? db.fetch = values end obj.reencrypt @model[obj.id].enc['a'].must_equal 1 obj[:enc].start_with?('AQAB').must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'a'=>1), obj).must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'a'=>2), obj).must_equal false end it "should support encrypted columns with a custom serialization format" do require 'json' @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>true, :format=>[:to_json.to_proc, JSON.method(:parse)] end obj = @model.new(:not_enc=>'123', :enc=>{'a'=>1}) obj.id = 1 obj.valid? @db.fetch = obj.values @model[obj.id].enc['a'].must_equal 1 obj[:enc].start_with?('AQAA').must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'a'=>1), obj).must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'a'=>2), obj).must_equal false @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>true, :format=>[:to_json.to_proc, JSON.method(:parse)] do |cenc| cenc.key 1, "1"*32 cenc.key 0, "0"*32 end end @model[obj.id].enc['a'].must_equal 1 obj[:enc].start_with?('AQAA').must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'a'=>1), obj).must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'a'=>2), obj).must_equal false db = @db obj.define_singleton_method(:save) do |*| valid? db.fetch = values end obj.reencrypt @model[obj.id].enc['a'].must_equal 1 obj[:enc].start_with?('AQAB').must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'a'=>1), obj).must_equal true have_matching_search(@model.with_encrypted_value(:enc, 'a'=>2), obj).must_equal false end it "should raise an error if trying to use an unregistered serialization format" do proc do @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>true, :format=>:test_ce end end.must_raise Sequel::Error end end if RUBY_VERSION >= '2.3' && (begin; require 'sequel/plugins/column_encryption'; true; rescue LoadError; false end) ��������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/column_select_spec.rb�������������������������������������������������0000664�0000000�0000000�00000012360�14342141206�0022426�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::ColumnSelect" do def set_cols(*cols) @cols.replace(cols) end before do cols = @cols = [] @db = Sequel.mock @db.extend_datasets(Module.new{define_method(:columns){cols}}) set_cols :id, :a, :b, :c @Album = Class.new(Sequel::Model(@db[:albums])) end it "should add a explicit column selections to existing dataset without explicit selection" do @Album.plugin :column_select @Album.dataset.sql.must_equal 'SELECT albums.id, albums.a, albums.b, albums.c FROM albums' @Album.dataset = :albs @Album.dataset.sql.must_equal 'SELECT albs.id, albs.a, albs.b, albs.c FROM albs' @Album.dataset = Sequel.identifier(:albs) @Album.dataset.sql.must_equal 'SELECT albs.id, albs.a, albs.b, albs.c FROM albs' end with_symbol_splitting "should handle splittable symbols" do @Album.dataset = :s__albums @Album.plugin :column_select @Album.dataset.sql.must_equal 'SELECT s.albums.id, s.albums.a, s.albums.b, s.albums.c FROM s.albums' @Album.dataset = :albums___a @Album.dataset.sql.must_equal 'SELECT a.id, a.a, a.b, a.c FROM albums AS a' @Album.dataset = :s__albums___a @Album.dataset.sql.must_equal 'SELECT a.id, a.a, a.b, a.c FROM s.albums AS a' end it "should handle qualified tables" do @Album.dataset = Sequel.qualify(:s2, :albums) @Album.plugin :column_select @Album.dataset.sql.must_equal 'SELECT s2.albums.id, s2.albums.a, s2.albums.b, s2.albums.c FROM s2.albums' end it "should handle aliases" do @Album.plugin :column_select @Album.dataset = Sequel.as(:albums, :b) @Album.dataset.sql.must_equal 'SELECT b.id, b.a, b.b, b.c FROM albums AS b' @Album.dataset = @Album.db[:albums].from_self @Album.dataset.sql.must_equal 'SELECT t1.id, t1.a, t1.b, t1.c FROM (SELECT * FROM albums) AS t1' @Album.dataset = Sequel.as(@Album.db[:albums], :b) @Album.dataset.sql.must_equal 'SELECT b.id, b.a, b.b, b.c FROM (SELECT * FROM albums) AS b' end it "should not add a explicit column selection selection on existing dataset with explicit selection" do @Album.dataset = @Album.dataset.select(:name) @Album.plugin :column_select @Album.dataset.sql.must_equal 'SELECT name FROM albums' @Album.dataset = @Album.dataset.select(:name, :artist) @Album.dataset.sql.must_equal 'SELECT name, artist FROM albums' end it "should work with implicit subqueries used for joined datasets" do @Album.dataset = @Album.db.from(:a1, :a2) @Album.plugin :column_select @Album.dataset.sql.must_equal "SELECT a1.id, a1.a, a1.b, a1.c FROM (SELECT * FROM a1, a2) AS a1" @Album.dataset = @Album.db.from(:a1).cross_join(:a2) @Album.dataset.sql.must_equal "SELECT a1.id, a1.a, a1.b, a1.c FROM (SELECT * FROM a1 CROSS JOIN a2) AS a1" end it "should add a explicit column selection on existing dataset with a subquery" do @Album.dataset = @Album.db.from(:a1, :a2).from_self(:alias=>:foo) @Album.plugin :column_select @Album.dataset.sql.must_equal 'SELECT foo.id, foo.a, foo.b, foo.c FROM (SELECT * FROM a1, a2) AS foo' @Album.dataset = @Album.db.from(:a1).cross_join(:a2).from_self(:alias=>:foo) @Album.dataset.sql.must_equal 'SELECT foo.id, foo.a, foo.b, foo.c FROM (SELECT * FROM a1 CROSS JOIN a2) AS foo' end it "should use explicit column selection for many_to_many associations" do @Album.plugin :column_select @Album.many_to_many :albums, :class=>@Album, :left_key=>:l, :right_key=>:r, :join_table=>:j @Album.load(:id=>1).albums_dataset.sql.must_equal 'SELECT albums.id, albums.a, albums.b, albums.c FROM albums INNER JOIN j ON (j.r = albums.id) WHERE (j.l = 1)' end it "should set not explicit column selection for many_to_many associations when overriding select" do @Album.plugin :column_select @Album.dataset = @Album.dataset.select(:a) @Album.many_to_many :albums, :class=>@Album, :left_key=>:l, :right_key=>:r, :join_table=>:j @Album.load(:id=>1).albums_dataset.sql.must_equal 'SELECT albums.* FROM albums INNER JOIN j ON (j.r = albums.id) WHERE (j.l = 1)' end it "should use the schema to get columns if available" do def @db.supports_schema_parsing?() true end def @db.schema(t, *) [[:t, {}], [:d, {}]] end @Album.plugin :column_select @Album.dataset.sql.must_equal 'SELECT albums.t, albums.d FROM albums' end it "should handle case where schema parsing does not produce results" do def @db.supports_schema_parsing?() true end def @db.schema_parse_table(t, *) [] end @Album.plugin :column_select @Album.dataset.sql.must_equal 'SELECT albums.id, albums.a, albums.b, albums.c FROM albums' end it "should handle case where schema parsing and columns does not produce results" do def @db.supports_schema_parsing?() true end def @db.schema_parse_table(t, *) [] end @db.extend_datasets{def columns; raise Sequel::DatabaseError; end} @Album.require_valid_table = false @Album.plugin :column_select @Album.dataset.sql.must_equal 'SELECT * FROM albums' end it "works correctly when loaded on model without a dataset" do c = Class.new(Sequel::Model) c.plugin :column_select sc = Class.new(c) sc.dataset = @db[:a] sc.dataset.sql.must_equal "SELECT a.id, a.a, a.b, a.c FROM a" end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/columns_introspection_spec.rb�����������������������������������������0000664�0000000�0000000�00000006250�14342141206�0024233�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" Sequel.extension :columns_introspection describe "columns_introspection extension" do before do @db = Sequel.mock.extension(:columns_introspection) @ds = @db[:a] @db.sqls end it "should not issue a database query if the columns are already loaded" do @ds.send(:columns=, [:x]) @ds.columns.must_equal [:x] @db.sqls.length.must_equal 0 end it "should handle plain symbols without a database query" do @ds.select(:x).columns.must_equal [:x] @db.sqls.length.must_equal 0 end with_symbol_splitting "should handle qualified symbols without a database query" do @ds.select(:t__x).columns.must_equal [:x] @db.sqls.length.must_equal 0 end with_symbol_splitting "should handle aliased symbols without a database query" do @ds.select(:x___a).columns.must_equal [:a] @db.sqls.length.must_equal 0 end with_symbol_splitting "should handle qualified and aliased symbols without a database query" do @ds.select(:t__x___a).columns.must_equal [:a] @db.sqls.length.must_equal 0 end it "should handle SQL::Identifiers " do @ds.select(Sequel.identifier(:x)).columns.must_equal [:x] @db.sqls.length.must_equal 0 end it "should handle SQL::QualifiedIdentifiers" do @ds.select(Sequel.qualify(:t, :x)).columns.must_equal [:x] @ds.select(Sequel.identifier(:x).qualify(:t)).columns.must_equal [:x] @db.sqls.length.must_equal 0 end it "should handle SQL::AliasedExpressions" do @ds.select(Sequel.as(:x, :a)).columns.must_equal [:a] @ds.select(Sequel.as(:x, Sequel.identifier(:a))).columns.must_equal [:a] @db.sqls.length.must_equal 0 end it "should handle LiteralStrings in FROM tables by issuing a query" do @ds.from(Sequel.lit('x')).columns.must_equal [] @db.sqls.must_equal ["SELECT * FROM x LIMIT 0"] end it "should handle selecting * from a single subselect with no joins without a database query if the subselect's columns can be handled" do @ds.select(:x).from_self.columns.must_equal [:x] @db.sqls.length.must_equal 0 @ds.select(:x).from_self.from_self.columns.must_equal [:x] @db.sqls.length.must_equal 0 end it "should handle selecting * from a single table with no joins without a database query if the database has cached schema columns for the table" do @db.instance_variable_set(:@schemas, "a"=>[[:x, {}]]) @ds.columns.must_equal [:x] @db.sqls.length.must_equal 0 end it "should issue a database query for multiple subselects or joins" do @ds.from(@ds.select(:x), @ds.select(:y)).columns @db.sqls.length.must_equal 1 @ds.select(:x).from_self.natural_join(:a).columns @db.sqls.length.must_equal 1 end it "should issue a database query when common table expressions are used" do @db.instance_variable_set(:@schemas, "a"=>[[:x, {}]]) @ds.with_extend{def supports_cte?(*) true end}.with(:a, @ds).columns @db.sqls.length.must_equal 1 end it "should issue a database query if the wildcard is selected" do @ds.columns @db.sqls.length.must_equal 1 end it "should issue a database query if an unsupported type is used" do @ds.select(1).columns @db.sqls.length.must_equal 1 end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/columns_updated_spec.rb�����������������������������������������������0000664�0000000�0000000�00000001507�14342141206�0022761�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::ColumnsUpdated" do before do @c = Class.new(Sequel::Model(DB[:items].with_autoid(13))) @c.columns :id, :x, :y @c.plugin :columns_updated end it "should make hash used for updating available in columns_updated until after hooks finish running" do res = nil @c.send(:define_method, :after_save){res = columns_updated} o = @c.new(:x => 1, :y => nil) o[:x] = 2 o.save res.must_be_nil o.after_save res.must_be_nil o = @c.load(:id => 23,:x => 1, :y => nil) o[:x] = 2 o.save res.must_equal(:x=>2, :y=>nil) o.after_save res.must_be_nil o = @c.load(:id => 23,:x => 2, :y => nil) o[:x] = 2 o[:y] = 22 o.save(:columns=>:x) res.must_equal(:x=>2) o.after_save res.must_be_nil end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/composition_spec.rb���������������������������������������������������0000664�0000000�0000000�00000022023�14342141206�0022132�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Composition plugin" do before do @c = Class.new(Sequel::Model(:items)) @c.plugin :composition @c.columns :id, :year, :month, :day @o = @c.load(:id=>1, :year=>1, :month=>2, :day=>3) DB.reset end it ".composition should add compositions" do @o.wont_respond_to(:date) @c.composition :date, :mapping=>[:year, :month, :day] @o.date.must_equal Date.new(1, 2, 3) end it "loading the plugin twice should not remove existing compositions" do @c.composition :date, :mapping=>[:year, :month, :day] @c.plugin :composition @c.compositions.keys.must_equal [:date] end it ".composition should raise an error if :composer and :decomposer options are not present and :mapping option is not provided" do proc{@c.composition :date}.must_raise(Sequel::Error) @c.composition :date, :composer=>proc{}, :decomposer=>proc{} @c.composition :date, :mapping=>[] end it "should handle validations of underlying columns" do @c.composition :date, :mapping=>[:year, :month, :day] o = @c.new def o.validate [:year, :month, :day].each{|c| errors.add(c, "not present") unless send(c)} end o.valid?.must_equal false o.date = Date.new(1, 2, 3) o.valid?.must_equal true end it "should have decomposer work with column_conflicts plugin" do @c.plugin :column_conflicts @c.set_column_conflict! :year @c.composition :date, :mapping=>[:year, :month, :day] o = @c.new def o.validate [:year, :month, :day].each{|c| errors.add(c, "not present") unless send(c)} end o.valid?.must_equal false o.date = Date.new(1, 2, 3) o.valid?.must_equal true end it "should set column values even when not validating" do @c.composition :date, :mapping=>[:year, :month, :day] @c.load(id: 1).set(:date=>Date.new(4, 8, 12)).save(:validate=>false) DB.sqls.must_equal ['UPDATE items SET year = 4, month = 8, day = 12 WHERE (id = 1)'] end it ".compositions should return the reflection hash of compositions" do @c.compositions.must_equal({}) @c.composition :date, :mapping=>[:year, :month, :day] @c.compositions.keys.must_equal [:date] r = @c.compositions.values.first r[:mapping].must_equal [:year, :month, :day] r[:composer].must_be_kind_of Proc r[:decomposer].must_be_kind_of Proc end it "#compositions should be a hash of cached values of compositions" do @o.compositions.must_equal({}) @c.composition :date, :mapping=>[:year, :month, :day] @o.date @o.compositions.must_equal(:date=>Date.new(1, 2, 3)) end it "should work with custom :composer and :decomposer options" do @c.composition :date, :composer=>proc{Date.new(year+1, month+2, day+3)}, :decomposer=>proc{[:year, :month, :day].each{|s| self.send("#{s}=", date.send(s) * 2)}} @o.date.must_equal Date.new(2, 4, 6) @o.save DB.sqls.must_equal ['UPDATE items SET year = 4, month = 8, day = 12 WHERE (id = 1)'] end it "should work with custom :composer and :decomposer options when :mapping option provided" do @c.composition :date, :composer=>proc{Date.new(year+1, month+2, day+3)}, :decomposer=>proc{[:year, :month, :day].each{|s| self.send("#{s}=", date.send(s) * 2)}}, :mapping=>[:year, :month, :day] @o.date.must_equal Date.new(2, 4, 6) @o.save DB.sqls.must_equal ['UPDATE items SET year = 4, month = 8, day = 12 WHERE (id = 1)'] end it "should allow call super in composition getter and setter method definition in class" do @c.composition :date, :mapping=>[:year, :month, :day] @c.class_eval do def date super + 1 end def date=(v) super(v - 3) end end @o.date.must_equal Date.new(1, 2, 4) @o.compositions[:date].must_equal Date.new(1, 2, 3) @o.date = Date.new(1, 3, 5) @o.compositions[:date].must_equal Date.new(1, 3, 2) @o.date.must_equal Date.new(1, 3, 3) end it "should mark the object as modified whenever the composition is set" do @c.composition :date, :mapping=>[:year, :month, :day] @o.modified?.must_equal false @o.date = Date.new(3, 4, 5) @o.modified?.must_equal true end it "should only decompose existing compositions" do called = false @c.composition :date, :composer=>proc{}, :decomposer=>proc{called = true} called.must_equal false @o.save called.must_equal false @o.date = Date.new(1,2,3) called.must_equal false @o.save_changes called.must_equal true end it "should clear compositions cache when refreshing" do @c.composition :date, :composer=>proc{}, :decomposer=>proc{} @o.date = Date.new(3, 4, 5) @o.refresh @o.compositions.must_equal({}) end it "should handle case when no compositions are cached when refreshing" do @c.composition :date, :composer=>proc{}, :decomposer=>proc{} @o.refresh @o.compositions.must_equal({}) end it "should not clear compositions cache when refreshing after save" do @c.composition :date, :composer=>proc{}, :decomposer=>proc{} @c.create(:date=>Date.new(3, 4, 5)).compositions.must_equal(:date=>Date.new(3, 4, 5)) end it "should not clear compositions cache when saving with insert_select" do @c.dataset = @c.dataset.with_extend do def supports_insert_select?; true end def insert_select(*) {:id=>1} end end @c.composition :date, :composer=>proc{}, :decomposer=>proc{} @c.create(:date=>Date.new(3, 4, 5)).compositions.must_equal(:date=>Date.new(3, 4, 5)) end it "should instantiate compositions lazily" do @c.composition :date, :mapping=>[:year, :month, :day] @o.compositions.must_equal({}) @o.date @o.compositions.must_equal(:date=>Date.new(1,2,3)) end it "should cache value of composition" do times = 0 @c.composition :date, :composer=>proc{times+=1}, :decomposer=>proc{} times.must_equal 0 @o.date times.must_equal 1 @o.date times.must_equal 1 end it ":class option should take an string, symbol, or class" do @c.composition :date1, :class=>'Date', :mapping=>[:year, :month, :day] @c.composition :date2, :class=>:Date, :mapping=>[:year, :month, :day] @c.composition :date3, :class=>Date, :mapping=>[:year, :month, :day] @o.date1.must_equal Date.new(1, 2, 3) @o.date2.must_equal Date.new(1, 2, 3) @o.date3.must_equal Date.new(1, 2, 3) end it ":mapping option should work with a single array of symbols" do c = Class.new do def initialize(y, m) @y, @m = y, m end def year @y * 2 end def month @m * 3 end end @c.composition :date, :class=>c, :mapping=>[:year, :month] @o.date.year.must_equal 2 @o.date.month.must_equal 6 @o.date = c.new(3, 4) @o.save DB.sqls.must_equal ['UPDATE items SET year = 6, month = 12, day = 3 WHERE (id = 1)'] end it ":mapping option should work with an array of two pairs of symbols" do c = Class.new do def initialize(y, m) @y, @m = y, m end def y @y * 2 end def m @m * 3 end end @c.composition :date, :class=>c, :mapping=>[[:year, :y], [:month, :m]] @o.date.y.must_equal 2 @o.date.m.must_equal 6 @o.date = c.new(3, 4) @o.save DB.sqls.must_equal ['UPDATE items SET year = 6, month = 12, day = 3 WHERE (id = 1)'] end it ":mapping option :composer should return nil if all values are nil" do @c.composition :date, :mapping=>[:year, :month, :day] @c.new.date.must_be_nil end it ":mapping option :decomposer should set all related fields to nil if nil" do @c.composition :date, :mapping=>[:year, :month, :day] @o.date = nil @o.save DB.sqls.must_equal ['UPDATE items SET year = NULL, month = NULL, day = NULL WHERE (id = 1)'] end it "should work with frozen instances" do @c.composition :date, :mapping=>[:year, :month, :day] @o.freeze.must_be_same_as @o @o.date.must_equal Date.new(1, 2, 3) proc{@o.date = Date.today}.must_raise end it "should work with frozen instances that validate the composted value" do @c.composition :date, :mapping=>[:year, :month, :day] @c.send(:define_method, :validate){errors.add(:date, "something") if date} @o.freeze @o.date.must_equal Date.new(1, 2, 3) proc{@o.date = Date.today}.must_raise end it "should have #dup duplicate compositions" do @c.composition :date, :mapping=>[:year, :month, :day] @o.date.must_equal Date.new(1, 2, 3) @o.dup.compositions.must_equal @o.compositions @o.dup.compositions.wont_be_same_as(@o.compositions) end it "should work correctly with subclasses" do @c.composition :date, :mapping=>[:year, :month, :day] c = Class.new(@c) o = c.load(:id=>1, :year=>1, :month=>2, :day=>3) o.date.must_equal Date.new(1, 2, 3) o.save DB.sqls.must_equal ['UPDATE items SET year = 1, month = 2, day = 3 WHERE (id = 1)'] end it "should freeze composition metadata when freezing model class" do @c.composition :date, :mapping=>[:year, :month, :day] @c.freeze @c.compositions.frozen?.must_equal true @c.compositions[:date].frozen?.must_equal true end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/concurrent_eager_loading_spec.rb��������������������������������������0000664�0000000�0000000�00000006221�14342141206�0024613�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative 'spec_helper' describe "concurrent_eager_loading plugin" do before do @db = Sequel.mock(:extensions=>'async_thread_pool', :fetch=>{:id=>1}, :keep_reference=>false, :num_async_threads=>2, :numrows=>1) @C = Class.new(Sequel::Model) @C.columns :id @C.set_dataset(@db[:cs]) @C.plugin :concurrent_eager_loading @db.sqls end it 'should eager load concurrently if configured' do m2o_thread = nil o2m_thread = nil m2o_mutex = nil o2m_mutex = nil q1 = Queue.new q2 = Queue.new @C.many_to_one :c, :key=>:id, :class=>@C, :eager_loader=>(proc do |eo| m2o_thread = Thread.current if m2o_mutex = eo[:mutex] q2.push(nil) q1.pop end end) @C.one_to_many :cs, :key=>:id, :class=>@C, :eager_loader=>(proc do |eo| o2m_thread = Thread.current if o2m_mutex = eo[:mutex] q1.push(nil) q2.pop end end) @C.eager(:c, :cs).all m2o_thread.must_equal Thread.current o2m_thread.must_equal Thread.current m2o_mutex.must_be_nil o2m_mutex.must_be_nil @C.eager_load_serially.eager(:c, :cs).all m2o_thread.must_equal Thread.current o2m_thread.must_equal Thread.current m2o_mutex.must_be_nil o2m_mutex.must_be_nil @C.eager_load_concurrently.eager(:c, :cs).all m2o_thread.wont_equal Thread.current o2m_thread.wont_equal Thread.current o2m_thread.wont_equal m2o_thread m2o_mutex.wont_be_nil o2m_mutex.must_equal m2o_mutex @C.eager_load_concurrently.eager_load_serially.eager(:c, :cs).all m2o_thread.must_equal Thread.current o2m_thread.must_equal Thread.current m2o_mutex.must_be_nil o2m_mutex.must_be_nil @C.eager_load_serially.eager_load_concurrently.eager(:c, :cs).all m2o_thread.wont_equal Thread.current o2m_thread.wont_equal Thread.current o2m_thread.wont_equal m2o_thread m2o_mutex.wont_be_nil o2m_mutex.must_equal m2o_mutex @C.plugin :concurrent_eager_loading, :always=>true @C.eager(:c, :cs).all m2o_thread.wont_equal Thread.current o2m_thread.wont_equal Thread.current o2m_thread.wont_equal m2o_thread m2o_mutex.wont_be_nil o2m_mutex.must_equal m2o_mutex @C.eager_load_serially.eager(:c, :cs).all m2o_thread.must_equal Thread.current o2m_thread.must_equal Thread.current m2o_mutex.must_be_nil o2m_mutex.must_be_nil m2o_thread = nil @C.eager(:c).all m2o_thread.must_equal Thread.current m2o_mutex.must_be_nil o2m_thread = nil @C.eager(:cs).all o2m_thread.must_equal Thread.current o2m_mutex.must_be_nil vs = [] @C.eager(:c, :cs). with_extend{define_method(:perform_eager_load) do |*a| vs << Struct.new(:wrapped).new(super(*a)) end}.all vs.map{|v| v.wrapped.__value} m2o_thread.wont_equal Thread.current o2m_thread.wont_equal Thread.current o2m_thread.wont_equal m2o_thread m2o_mutex.wont_be_nil o2m_mutex.must_equal m2o_mutex Class.new(@C).eager(:c, :cs).all m2o_thread.wont_equal Thread.current o2m_thread.wont_equal Thread.current o2m_thread.wont_equal m2o_thread m2o_mutex.wont_be_nil o2m_mutex.must_equal m2o_mutex end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/connection_expiration_spec.rb�����������������������������������������0000664�0000000�0000000�00000010576�14342141206�0024202�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" connection_expiration_specs = Module.new do extend Minitest::Spec::DSL before do @db = db @m = Module.new do def disconnect_connection(conn) @sqls << 'disconnect' end end @db.extend @m @db.extension(:connection_expiration) @db.pool.connection_expiration_timeout = 2 end it "should still allow new connections" do @db.synchronize{|c| c}.must_be_kind_of(Sequel::Mock::Connection) end it "should not override connection_expiration_timeout when loading extension" do @db.extension(:connection_expiration) @db.pool.connection_expiration_timeout.must_equal 2 end it "should handle Database#disconnect calls while the connection is checked out" do @db.synchronize{|c| @db.disconnect} end it "should handle disconnected connections" do proc{@db.synchronize{|c| raise Sequel::DatabaseDisconnectError}}.must_raise Sequel::DatabaseDisconnectError @db.sqls.must_equal ['disconnect'] end it "should handle :connection_handling => :disconnect setting" do @db = Sequel.mock(@db.opts.merge(:connection_handling => :disconnect)) @db.extend @m @db.extension(:connection_expiration) @db.synchronize{} @db.sqls.must_equal ['disconnect'] end it "should only expire if older than timeout" do c1 = @db.synchronize{|c| c} @db.sqls.must_equal [] @db.synchronize{|c| c}.must_be_same_as(c1) @db.sqls.must_equal [] end it "should disconnect connection if expired" do c1 = @db.synchronize{|c| c} @db.sqls.must_equal [] simulate_sleep(c1) c2 = @db.synchronize{|c| c} @db.sqls.must_equal ['disconnect'] c2.wont_be_same_as(c1) end it "should disconnect only expired connections among multiple" do c1, c2 = multiple_connections # Expire c1 only. simulate_sleep(c1) simulate_sleep(c2, 1) c1, c2 = multiple_connections c3 = @db.synchronize{|c| c} @db.sqls.must_equal ['disconnect'] c3.wont_be_same_as(c1) c3.must_be_same_as(c2) end it "should disconnect connections repeatedly if they are expired" do c1, c2 = multiple_connections simulate_sleep(c1) simulate_sleep(c2) c3 = @db.synchronize{|c| c} @db.sqls.must_equal ['disconnect', 'disconnect'] c3.wont_be_same_as(c1) c3.wont_be_same_as(c2) end it "should not leak connection references to expiring connections" do c1 = @db.synchronize{|c| c} simulate_sleep(c1) c2 = @db.synchronize{|c| c} c2.wont_be_same_as(c1) @db.pool.instance_variable_get(:@connection_expiration_timestamps).must_include(c2) @db.pool.instance_variable_get(:@connection_expiration_timestamps).wont_include(c1) end it "should not leak connection references during disconnect" do multiple_connections @db.pool.instance_variable_get(:@connection_expiration_timestamps).size.must_equal 2 @db.disconnect @db.pool.instance_variable_get(:@connection_expiration_timestamps).size.must_equal 0 end it "should not vary expiration timestamps by default" do c1 = @db.synchronize{|c| c} @db.pool.instance_variable_get(:@connection_expiration_timestamps)[c1].last.must_equal 2 end it "should support #connection_expiration_random_delay to vary expiration timestamps" do @db.pool.connection_expiration_random_delay = 1 c1 = @db.synchronize{|c| c} @db.pool.instance_variable_get(:@connection_expiration_timestamps)[c1].last.wont_equal 2 end def multiple_connections q, q1 = Queue.new, Queue.new c1 = nil c2 = nil @db.synchronize do |c| Thread.new do @db.synchronize do |cc| c2 = cc end q1.pop q.push nil end q1.push nil q.pop c1 = c end [c1, c2] end # Set the timestamp back in time to simulate sleep / passage of time. def simulate_sleep(conn, sleep_time = 3) timestamps = @db.pool.instance_variable_get(:@connection_expiration_timestamps) timer, max = timestamps[conn] timestamps[conn] = [timer - sleep_time, max] @db.pool.instance_variable_set(:@connection_expiration_timestamps, timestamps) end end describe "Sequel::ConnectionExpiration with threaded pool" do def db Sequel.mock(:test=>false) end include connection_expiration_specs end describe "Sequel::ConnectionExpiration with sharded threaded pool" do def db Sequel.mock(:test=>false, :servers=>{}) end include connection_expiration_specs end ����������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/connection_validator_spec.rb������������������������������������������0000664�0000000�0000000�00000010206�14342141206�0023773�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" connection_validator_specs = Module.new do extend Minitest::Spec::DSL before do @db = db @m = Module.new do def disconnect_connection(conn) @sqls << 'disconnect' end def valid_connection?(conn) super conn.valid end def connect(server) conn = super conn.extend(Module.new do attr_accessor :valid end) conn.valid = true conn end end @db.extend @m @db.extension(:connection_validator) end it "should still allow new connections" do @db.synchronize{|c| c}.must_be_kind_of(Sequel::Mock::Connection) end it "should only validate if connection idle longer than timeout" do c1 = @db.synchronize{|c| c} @db.sqls.must_equal [] @db.synchronize{|c| c}.must_be_same_as(c1) @db.sqls.must_equal [] @db.pool.connection_validation_timeout = -1 @db.synchronize{|c| c}.must_be_same_as(c1) @db.sqls.must_equal ['SELECT NULL'] @db.pool.connection_validation_timeout = 1 @db.synchronize{|c| c}.must_be_same_as(c1) @db.sqls.must_equal [] @db.synchronize{|c| c}.must_be_same_as(c1) @db.sqls.must_equal [] end it "should disconnect connection if not valid" do c1 = @db.synchronize{|c| c} @db.sqls.must_equal [] c1.valid = false @db.pool.connection_validation_timeout = -1 c2 = @db.synchronize{|c| c} @db.sqls.must_equal ['SELECT NULL', 'disconnect'] c2.wont_be_same_as(c1) end it "should handle Database#disconnect calls while the connection is checked out" do @db.synchronize{|c| @db.disconnect} end it "should handle disconnected connections" do proc{@db.synchronize{|c| raise Sequel::DatabaseDisconnectError}}.must_raise Sequel::DatabaseDisconnectError @db.sqls.must_equal ['disconnect'] end it "should handle :connection_handling => :disconnect setting" do @db = Sequel.mock(@db.opts.merge(:connection_handling => :disconnect)) @db.extend @m @db.extension(:connection_validator) @db.synchronize{} @db.sqls.must_equal ['disconnect'] end it "should disconnect multiple connections repeatedly if they are not valid" do q, q1 = Queue.new, Queue.new c1 = nil c2 = nil @db.pool.connection_validation_timeout = -1 @db.synchronize do |c| Thread.new do @db.synchronize do |cc| c2 = cc end q1.pop q.push nil end q1.push nil q.pop c1 = c end c1.valid = false c2.valid = false c3 = @db.synchronize{|c| c} @db.sqls.must_equal ['SELECT NULL', 'disconnect', 'SELECT NULL', 'disconnect'] c3.wont_be_same_as(c1) c3.wont_be_same_as(c2) end it "should not leak connection references during disconnect" do @db.synchronize{} @db.pool.instance_variable_get(:@connection_timestamps).size.must_equal 1 @db.disconnect @db.pool.instance_variable_get(:@connection_timestamps).size.must_equal 0 end it "should not leak connection references" do c1 = @db.synchronize do |c| @db.pool.instance_variable_get(:@connection_timestamps).must_equal({}) c end @db.pool.instance_variable_get(:@connection_timestamps).must_include(c1) c1.valid = false @db.pool.connection_validation_timeout = -1 c2 = @db.synchronize do |c| @db.pool.instance_variable_get(:@connection_timestamps).must_equal({}) c end c2.wont_be_same_as(c1) @db.pool.instance_variable_get(:@connection_timestamps).wont_include(c1) @db.pool.instance_variable_get(:@connection_timestamps).must_include(c2) end it "should handle case where determining validity requires a connection" do def @db.valid_connection?(c) synchronize{}; true end @db.pool.connection_validation_timeout = -1 c1 = @db.synchronize{|c| c} @db.synchronize{|c| c}.must_be_same_as(c1) end end describe "Sequel::ConnectionValidator with threaded pool" do def db Sequel.mock(:test=>false) end include connection_validator_specs end describe "Sequel::ConnectionValidator with sharded threaded pool" do def db Sequel.mock(:test=>false, :servers=>{}) end include connection_validator_specs end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/constant_sql_override_spec.rb�����������������������������������������0000664�0000000�0000000�00000001623�14342141206�0024201�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "constant_sql_override extension" do before do @db = Sequel.mock.extension(:constant_sql_override) end it 'overrides configured constants' do @db.set_constant_sql(Sequel::CURRENT_TIMESTAMP, "CURRENT TIMESTAMP AT TIME ZONE 'UTC'") @db[:tbl].where(foo: Sequel::CURRENT_TIMESTAMP).first @db.sqls.must_equal ["SELECT * FROM tbl WHERE (foo = CURRENT TIMESTAMP AT TIME ZONE 'UTC') LIMIT 1"] end it 'does not change behavior for unconfigured constants' do @db[:tbl].where(foo: Sequel::CURRENT_TIMESTAMP).first @db.sqls.must_equal ["SELECT * FROM tbl WHERE (foo = CURRENT_TIMESTAMP) LIMIT 1"] end it 'freezes the constant_sqls hash when frozen' do @db.freeze @db.constant_sqls.frozen?.must_equal true proc{@db.set_constant_sql(Sequel::CURRENT_TIMESTAMP, "CURRENT TIMESTAMP AT TIME ZONE 'UTC'")}.must_raise RuntimeError end end �������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/constraint_validations_plugin_spec.rb���������������������������������0000664�0000000�0000000�00000036075�14342141206�0025742�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::ConstraintValidations" do def model_class(opts={}) return @c if @c @c = Class.new(Sequel::Model(@db[:items])) @c.columns :name @db.sqls set_fetch(opts) @c.plugin :constraint_validations @c end def set_fetch(opts) @db.fetch = {:table=>'items', :message=>nil, :allow_nil=>nil, :constraint_name=>nil, :validation_type=>'presence', :argument=>nil, :column=>'name'}.merge(opts) end before do @db = Sequel.mock set_fetch({}) @ds = @db[:items] @ds.send(:columns=, [:name]) @ds2 = Sequel.mock[:items2] @ds2.send(:columns=, [:name]) end it "should load the validation_helpers plugin into the class" do model_class.new.must_respond_to(:validates_presence) end it "should parse constraint validations when loading plugin" do @c = model_class @db.sqls.must_equal ["SELECT * FROM sequel_constraint_validations"] @db.constraint_validations.must_equal("items"=>[{:allow_nil=>nil, :constraint_name=>nil, :message=>nil, :validation_type=>"presence", :column=>"name", :argument=>nil, :table=>"items"}]) @c.constraint_validations.must_equal [[:validates_presence, :name]] @c.constraint_validation_reflections.must_equal(:name=>[[:presence, {}]]) end it "should parse constraint validations with a custom constraint validations table" do c = Class.new(Sequel::Model(@db[:items])) @db.sqls c.plugin :constraint_validations, :constraint_validations_table=>:foo @db.sqls.must_equal ["SELECT * FROM foo"] @db.constraint_validations.must_equal("items"=>[{:allow_nil=>nil, :constraint_name=>nil, :message=>nil, :validation_type=>"presence", :column=>"name", :argument=>nil, :table=>"items"}]) c.constraint_validations.must_equal [[:validates_presence, :name]] c.constraint_validation_reflections.must_equal(:name=>[[:presence, {}]]) end it "should populate constraint_validations when subclassing" do c = Class.new(Sequel::Model(@db)) c.plugin :constraint_validations @db.sqls.must_equal ["SELECT * FROM sequel_constraint_validations"] sc = Class.new(c) sc.set_dataset @ds @db.sqls.must_equal [] sc.constraint_validations.must_equal [[:validates_presence, :name]] sc.constraint_validation_reflections.must_equal(:name=>[[:presence, {}]]) end it "should handle plugin being loaded in subclass when superclass uses a custom constraint validations table" do c = Class.new(Sequel::Model(@db)) c.plugin :constraint_validations, :constraint_validations_table=>:foo @db.sqls.must_equal ["SELECT * FROM foo"] sc = Class.new(c) sc.plugin :constraint_validations sc.constraint_validations_table.must_equal :foo sc.set_dataset @ds @db.sqls.must_equal [] sc.constraint_validations.must_equal [[:validates_presence, :name]] sc.constraint_validation_reflections.must_equal(:name=>[[:presence, {}]]) end it "should populate constraint_validations when changing the model's dataset" do c = Class.new(Sequel::Model(@db[:foo])) c.columns :name @db.sqls c.plugin :constraint_validations @db.sqls.must_equal ["SELECT * FROM sequel_constraint_validations"] sc = Class.new(c) sc.set_dataset @ds @db.sqls.must_equal [] sc.constraint_validations.must_equal [[:validates_presence, :name]] sc.constraint_validation_reflections.must_equal(:name=>[[:presence, {}]]) end it "should reparse constraint validations when changing the model's database" do c = Class.new(Sequel::Model(@ds2)) c.plugin :constraint_validations @ds2.db.sqls.must_equal ["SELECT * FROM sequel_constraint_validations"] sc = Class.new(c) sc.set_dataset @ds @db.sqls.must_equal ["SELECT * FROM sequel_constraint_validations"] sc.constraint_validations.must_equal [[:validates_presence, :name]] sc.constraint_validation_reflections.must_equal(:name=>[[:presence, {}]]) end it "should reparse constraint validations when changing the model's database with a custom constraint validations table" do c = Class.new(Sequel::Model(@ds2)) c.plugin :constraint_validations, :constraint_validations_table=>:foo @ds2.db.sqls.must_equal ["SELECT * FROM foo"] sc = Class.new(c) sc.set_dataset @ds @db.sqls.must_equal ["SELECT * FROM foo"] sc.constraint_validations.must_equal [[:validates_presence, :name]] sc.constraint_validation_reflections.must_equal(:name=>[[:presence, {}]]) end it "should correctly retrieve :message option from constraint validations table" do model_class(:message=>'foo').constraint_validations.must_equal [[:validates_presence, :name, {:message=>'foo'}]] @c.constraint_validation_reflections.must_equal(:name=>[[:presence, {:message=>'foo'}]]) end it "should correctly retrieve :allow_nil option from constraint validations table" do model_class(:allow_nil=>true).constraint_validations.must_equal [[:validates_presence, :name, {:allow_nil=>true}]] @c.constraint_validation_reflections.must_equal(:name=>[[:presence, {:allow_nil=>true}]]) end it "should handle presence validation" do model_class(:validation_type=>'presence').constraint_validations.must_equal [[:validates_presence, :name]] @c.constraint_validation_reflections.must_equal(:name=>[[:presence, {}]]) end it "should handle exact_length validation" do model_class(:validation_type=>'exact_length', :argument=>'5').constraint_validations.must_equal [[:validates_exact_length, 5, :name]] @c.constraint_validation_reflections.must_equal(:name=>[[:exact_length, {:argument=>5}]]) end it "should handle min_length validation" do model_class(:validation_type=>'min_length', :argument=>'5').constraint_validations.must_equal [[:validates_min_length, 5, :name]] @c.constraint_validation_reflections.must_equal(:name=>[[:min_length, {:argument=>5}]]) end it "should handle max_length validation" do model_class(:validation_type=>'max_length', :argument=>'5').constraint_validations.must_equal [[:validates_max_length, 5, :name]] @c.constraint_validation_reflections.must_equal(:name=>[[:max_length, {:argument=>5}]]) end it "should handle length_range validation" do model_class(:validation_type=>'length_range', :argument=>'3..5').constraint_validations.must_equal [[:validates_length_range, 3..5, :name]] @c.constraint_validation_reflections.must_equal(:name=>[[:length_range, {:argument=>3..5}]]) end it "should handle length_range validation with an exclusive end" do model_class(:validation_type=>'length_range', :argument=>'3...5').constraint_validations.must_equal [[:validates_length_range, 3...5, :name]] @c.constraint_validation_reflections.must_equal(:name=>[[:length_range, {:argument=>3...5}]]) end it "should handle format validation" do model_class(:validation_type=>'format', :argument=>'^foo.*').constraint_validations.must_equal [[:validates_format, /^foo.*/, :name]] @c.constraint_validation_reflections.must_equal(:name=>[[:format, {:argument=>/^foo.*/}]]) end it "should handle format validation with case insensitive format" do model_class(:validation_type=>'iformat', :argument=>'^foo.*').constraint_validations.must_equal [[:validates_format, /^foo.*/i, :name]] @c.constraint_validation_reflections.must_equal(:name=>[[:format, {:argument=>/^foo.*/i}]]) end it "should handle includes validation with array of strings" do model_class(:validation_type=>'includes_str_array', :argument=>'a,b,c').constraint_validations.must_equal [[:validates_includes, %w'a b c', :name]] @c.constraint_validation_reflections.must_equal(:name=>[[:includes, {:argument=>%w'a b c'}]]) end it "should handle includes validation with array of integers" do model_class(:validation_type=>'includes_int_array', :argument=>'1,2,3').constraint_validations.must_equal [[:validates_includes, [1, 2, 3], :name]] @c.constraint_validation_reflections.must_equal(:name=>[[:includes, {:argument=>[1, 2, 3]}]]) end it "should handle includes validation with inclusive range of integers" do model_class(:validation_type=>'includes_int_range', :argument=>'3..5').constraint_validations.must_equal [[:validates_includes, 3..5, :name]] @c.constraint_validation_reflections.must_equal(:name=>[[:includes, {:argument=>3..5}]]) end it "should handle includes validation with exclusive range of integers" do model_class(:validation_type=>'includes_int_range', :argument=>'3...5').constraint_validations.must_equal [[:validates_includes, 3...5, :name]] @c.constraint_validation_reflections.must_equal(:name=>[[:includes, {:argument=>3...5}]]) end it "should handle like validation" do model_class(:validation_type=>'like', :argument=>'foo').constraint_validations.must_equal [[:validates_format, /\Afoo\z/, :name]] @c.constraint_validation_reflections.must_equal(:name=>[[:format, {:argument=>/\Afoo\z/}]]) end it "should handle ilike validation" do model_class(:validation_type=>'ilike', :argument=>'foo').constraint_validations.must_equal [[:validates_format, /\Afoo\z/i, :name]] @c.constraint_validation_reflections.must_equal(:name=>[[:format, {:argument=>/\Afoo\z/i}]]) end it "should handle operator validation" do [[:str_lt, :<], [:str_lte, :<=], [:str_gt, :>], [:str_gte, :>=]].each do |vt, op| model_class(:validation_type=>vt.to_s, :argument=>'a').constraint_validations.must_equal [[:validates_operator, op, 'a', :name]] @c.constraint_validation_reflections.must_equal(:name=>[[:operator, {:operator=>op, :argument=>'a'}]]) @c = @c.db.constraint_validations = nil end [[:int_lt, :<], [:int_lte, :<=], [:int_gt, :>], [:int_gte, :>=]].each do |vt, op| model_class(:validation_type=>vt.to_s, :argument=>'1').constraint_validations.must_equal [[:validates_operator, op, 1, :name]] @c.constraint_validation_reflections.must_equal(:name=>[[:operator, {:operator=>op, :argument=>1}]]) @c = @c.db.constraint_validations = nil end end it "should handle like validation with % metacharacter" do model_class(:validation_type=>'like', :argument=>'%foo%').constraint_validations.must_equal [[:validates_format, /\A.*foo.*\z/, :name]] @c.constraint_validation_reflections.must_equal(:name=>[[:format, {:argument=>/\A.*foo.*\z/}]]) end it "should handle like validation with %% metacharacter" do model_class(:validation_type=>'like', :argument=>'%%foo%%').constraint_validations.must_equal [[:validates_format, /\A%foo%\z/, :name]] @c.constraint_validation_reflections.must_equal(:name=>[[:format, {:argument=>/\A%foo%\z/}]]) end it "should handle like validation with _ metacharacter" do model_class(:validation_type=>'like', :argument=>'f_o').constraint_validations.must_equal [[:validates_format, /\Af.o\z/, :name]] @c.constraint_validation_reflections.must_equal(:name=>[[:format, {:argument=>/\Af.o\z/}]]) end it "should handle like validation with Regexp metacharacter" do model_class(:validation_type=>'like', :argument=>'\wfoo\d').constraint_validations.must_equal [[:validates_format, /\A\\wfoo\\d\z/, :name]] @c.constraint_validation_reflections.must_equal(:name=>[[:format, {:argument=>/\A\\wfoo\\d\z/}]]) end it "should handle unique validation" do model_class(:validation_type=>'unique').constraint_validations.must_equal [[:validates_unique, [:name]]] @c.constraint_validation_reflections.must_equal(:name=>[[:unique, {}]]) end it "should handle unique validation with multiple columns" do model_class(:validation_type=>'unique', :column=>'name,id').constraint_validations.must_equal [[:validates_unique, [:name, :id]]] @c.constraint_validation_reflections.must_equal([:name, :id]=>[[:unique, {}]]) end it "should handle :validation_options" do c = model_class(:validation_type=>'unique', :column=>'name') c.plugin :constraint_validations, :validation_options=>{:unique=>{:message=>'is bad'}} c.constraint_validations.must_equal [[:validates_unique, [:name], {:message=>'is bad'}]] c.constraint_validation_reflections.must_equal(:name=>[[:unique, {:message=>'is bad'}]]) c.dataset = c.dataset.with_fetch(:count=>1) o = c.new(:name=>'a') o.valid?.must_equal false o.errors.full_messages.must_equal ['name is bad'] end it "should handle :validation_options merging with constraint validation options" do c = model_class(:validation_type=>'unique', :column=>'name', :allow_nil=>true) c.plugin :constraint_validations, :validation_options=>{:unique=>{:message=>'is bad'}} c.constraint_validations.must_equal [[:validates_unique, [:name], {:message=>'is bad', :allow_nil=>true}]] c.constraint_validation_reflections.must_equal(:name=>[[:unique, {:message=>'is bad', :allow_nil=>true}]]) c.dataset = c.dataset.with_fetch(:count=>1) o = c.new(:name=>'a') o.valid?.must_equal false o.errors.full_messages.must_equal ['name is bad'] end it "should handle :validation_options merging with subclasses" do c = model_class(:validation_type=>'unique', :column=>'name') c.plugin :constraint_validations, :validation_options=>{:unique=>{:message=>'is bad', :allow_nil=>true}} sc = Class.new(c) sc.plugin :constraint_validations, :validation_options=>{:unique=>{:allow_missing=>true, :allow_nil=>false}} sc.constraint_validations.must_equal [[:validates_unique, [:name], {:message=>'is bad', :allow_missing=>true, :allow_nil=>false}]] sc.constraint_validation_reflections.must_equal(:name=>[[:unique, {:message=>'is bad', :allow_missing=>true, :allow_nil=>false}]]) sc.dataset = sc.dataset.with_fetch(:count=>1) o = sc.new(:name=>'a') o.valid?.must_equal false o.errors.full_messages.must_equal ['name is bad'] end it "should used parsed constraint validations when validating" do o = model_class.new o.valid?.must_equal false o.errors.full_messages.must_equal ['name is not present'] end it "should handle a table name specified as SQL::Identifier" do set_fetch(:table=>'sch__items') c = Class.new(Sequel::Model(@db[Sequel.identifier(:sch__items)])) c.plugin :constraint_validations c.constraint_validations.must_equal [[:validates_presence, :name]] c.constraint_validation_reflections.must_equal(:name=>[[:presence, {}]]) end it "should handle a table name specified as SQL::QualifiedIdentifier" do set_fetch(:table=>'sch.items') c = Class.new(Sequel::Model(@db[Sequel.qualify(:sch, :items)])) c.plugin :constraint_validations c.constraint_validations.must_equal [[:validates_presence, :name]] c.constraint_validation_reflections.must_equal(:name=>[[:presence, {}]]) end it "should freeze constraint validations data when freezing model class" do @c = model_class @c.freeze @c.constraint_validations.frozen?.must_equal true @c.constraint_validations.all?(&:frozen?).must_equal true @c.constraint_validation_reflections.frozen?.must_equal true @c.constraint_validation_reflections.values.all?(&:frozen?).must_equal true @c.constraint_validation_reflections.values.all?{|r| r.all?(&:frozen?)}.must_equal true @c.instance_variable_get(:@constraint_validation_options).frozen?.must_equal true @c.instance_variable_get(:@constraint_validation_options).values.all?(&:frozen?).must_equal true end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/constraint_validations_spec.rb����������������������������������������0000664�0000000�0000000�00000062333�14342141206�0024360�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "constraint_validations extension" do def parse_insert(s) m = /\AINSERT INTO "?sequel_constraint_validations"? \("?(.*)"?\) VALUES \((.*)\)\z/.match(s) raise "Couldn't extract insertion from statement <#{s}>" unless m Hash[*m[1].split(/"?, "?/).map{|v| v.to_sym}.zip(m[2].split(/"?, "?/).map{|v| parse_insert_value(v)}).reject{|k, v| v.nil?}.flatten] end def parse_insert_value(s) case s when 'NULL' nil when /\A'(.*)'\z/ $1 else raise Sequel::Error, "unhandled insert value: #{s.inspect}" end end before do @db = Sequel.mock @db.extend(Module.new{attr_writer :schema; def schema(table, *) execute("parse schema for #{table}"); @schema; end}) def @db.table_exists?(_) true; end @db.singleton_class.send(:alias_method, :table_exists?, :table_exists?) @db.extension(:constraint_validations) end it "should allow creating the sequel_constraint_validations table" do @db.create_constraint_validations_table @db.sqls.must_equal ["CREATE TABLE sequel_constraint_validations (table varchar(255) NOT NULL, constraint_name varchar(255), validation_type varchar(255) NOT NULL, column varchar(255) NOT NULL, argument varchar(255), message varchar(255), allow_nil boolean)"] end it "should allow creating the sequel_constraint_validations table with a non-default table name" do @db.constraint_validations_table = :foo @db.create_constraint_validations_table @db.sqls.must_equal ["CREATE TABLE foo (table varchar(255) NOT NULL, constraint_name varchar(255), validation_type varchar(255) NOT NULL, column varchar(255) NOT NULL, argument varchar(255), message varchar(255), allow_nil boolean)"] end it "should allow dropping the sequel_constraint_validations table" do @db.drop_constraint_validations_table @db.sqls.must_equal ["DELETE FROM sequel_constraint_validations WHERE (table = 'sequel_constraint_validations')", "DROP TABLE sequel_constraint_validations"] end it "should allow dropping the sequel_constraint_validations table with a non-default table name" do @db.constraint_validations_table = :foo @db.drop_constraint_validations_table @db.sqls.must_equal ["DELETE FROM foo WHERE (table = 'foo')", "DROP TABLE foo"] end it "should allow dropping validations for a given table" do @db.drop_constraint_validations_for(:table=>:foo) @db.sqls.must_equal ["DELETE FROM sequel_constraint_validations WHERE (table = 'foo')"] end it "should drop validations for a given table when dropping the table" do @db.drop_table(:foo) @db.sqls.must_equal ["DELETE FROM sequel_constraint_validations WHERE (table = 'foo')", "DROP TABLE foo"] @db.drop_table(:foo, :if_exists => true) @db.sqls.must_equal ["DELETE FROM sequel_constraint_validations WHERE (table = 'foo')", "DROP TABLE IF EXISTS foo"] @db.drop_table?(:foo) @db.sqls.must_equal ["DELETE FROM sequel_constraint_validations WHERE (table = 'foo')", "DROP TABLE foo"] end it "should not drop validations for a given table if the constraint validations table does not exist" do def @db.table_exists?(_) false; end @db.drop_table(:foo) @db.sqls.must_equal ["DROP TABLE foo"] end it "should allow dropping validations for a given table and column" do @db.drop_constraint_validations_for(:table=>:foo, :column=>:bar) @db.sqls.must_equal ["DELETE FROM sequel_constraint_validations WHERE ((table = 'foo') AND (column = 'bar'))"] end it "should allow dropping validations for a given table and constraint" do @db.drop_constraint_validations_for(:table=>:foo, :constraint=>:bar) @db.sqls.must_equal ["DELETE FROM sequel_constraint_validations WHERE ((table = 'foo') AND (constraint_name = 'bar'))"] end it "should allow dropping validations for a non-default constraint_validations table" do @db.constraint_validations_table = :cv @db.drop_constraint_validations_for(:table=>:foo) @db.sqls.must_equal ["DELETE FROM cv WHERE (table = 'foo')"] end it "should raise an error without deleting if attempting to drop validations without table, column, or constraint" do proc{@db.drop_constraint_validations_for({})}.must_raise(Sequel::Error) @db.sqls.must_equal [] end it "should allow adding constraint validations via create_table validate" do @db.create_table(:foo){String :name; validate{presence :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"presence", :column=>"name", :table=>"foo") sqls.must_equal ["CREATE TABLE foo (name varchar(255), CHECK ((name IS NOT NULL) AND (trim(name) != '')))"] end it "should allow adding constraint validations using generator directly" do gen = @db.create_table_generator gen.String :name gen.validate{presence :name} @db.create_table(:foo, :generator=>gen) sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"presence", :column=>"name", :table=>"foo") sqls.must_equal ["CREATE TABLE foo (name varchar(255), CHECK ((name IS NOT NULL) AND (trim(name) != '')))"] end it "should allow adding constraint validations via alter_table validate" do @db.schema = [[:name, {:type=>:string}]] @db.alter_table(:foo){validate{presence :name}} sqls = @db.sqls parse_insert(sqls.slice!(1)).must_equal(:validation_type=>"presence", :column=>"name", :table=>"foo") sqls.must_equal ["parse schema for foo", "ALTER TABLE foo ADD CHECK ((name IS NOT NULL) AND (trim(name) != ''))"] end it "should allow altering the table without validation" do @db.schema = [[:name, {:type=>:string}]] @db.alter_table(:foo){rename_column :a, :b} @db.sqls.must_equal ["ALTER TABLE foo RENAME COLUMN a TO b"] end it "should handle :message option when adding validations" do @db.create_table(:foo){String :name; validate{presence :name, :message=>'not there'}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"presence", :column=>"name", :table=>"foo", :message=>'not there') sqls.must_equal ["CREATE TABLE foo (name varchar(255), CHECK ((name IS NOT NULL) AND (trim(name) != '')))"] end it "should handle :allow_nil option when adding validations" do @db.create_table(:foo){String :name; validate{presence :name, :allow_nil=>true}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"presence", :column=>"name", :table=>"foo", :allow_nil=>'t') sqls.must_equal ["CREATE TABLE foo (name varchar(255), CHECK ((name IS NULL) OR (trim(name) != '')))"] end it "should handle :name option when adding validations" do @db.create_table(:foo){String :name; validate{presence :name, :name=>'cons'}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"presence", :column=>"name", :table=>"foo", :constraint_name=>'cons') sqls.must_equal ["CREATE TABLE foo (name varchar(255), CONSTRAINT cons CHECK ((name IS NOT NULL) AND (trim(name) != '')))"] end it "should handle multiple string columns when adding presence validations" do @db.create_table(:foo){String :name; String :bar; validate{presence [:name, :bar]}} sqls = @db.sqls parse_insert(sqls.slice!(1)).must_equal(:validation_type=>"presence", :column=>"name", :table=>"foo") parse_insert(sqls.slice!(1)).must_equal(:validation_type=>"presence", :column=>"bar", :table=>"foo") sqls.must_equal ["BEGIN", "COMMIT", "CREATE TABLE foo (name varchar(255), bar varchar(255), CHECK ((name IS NOT NULL) AND (trim(name) != '') AND (bar IS NOT NULL) AND (trim(bar) != '')))"] end it "should handle multiple string columns when adding presence validations with :allow_nil" do @db.create_table(:foo){String :name; String :bar; validate{presence [:name, :bar], :allow_nil=>true}} sqls = @db.sqls parse_insert(sqls.slice!(1)).must_equal(:validation_type=>"presence", :column=>"name", :table=>"foo", :allow_nil=>'t') parse_insert(sqls.slice!(1)).must_equal(:validation_type=>"presence", :column=>"bar", :table=>"foo", :allow_nil=>'t') sqls.must_equal ["BEGIN", "COMMIT", "CREATE TABLE foo (name varchar(255), bar varchar(255), CHECK (((name IS NULL) OR (trim(name) != '')) AND ((bar IS NULL) OR (trim(bar) != ''))))"] end it "should handle multiple string columns when adding presence validations" do @db.create_table(:foo){String :name; Integer :x; String :bar; validate{presence [:name, :x, :bar]}} sqls = @db.sqls parse_insert(sqls.slice!(1)).must_equal(:validation_type=>"presence", :column=>"name", :table=>"foo") parse_insert(sqls.slice!(1)).must_equal(:validation_type=>"presence", :column=>"x", :table=>"foo") parse_insert(sqls.slice!(1)).must_equal(:validation_type=>"presence", :column=>"bar", :table=>"foo") sqls.must_equal ["BEGIN", "COMMIT", "CREATE TABLE foo (name varchar(255), x integer, bar varchar(255), CHECK ((name IS NOT NULL) AND (trim(name) != '') AND (bar IS NOT NULL) AND (trim(bar) != '') AND (x IS NOT NULL)))"] end it "should handle multiple string columns when adding presence validations with :allow_nil" do @db.create_table(:foo){String :name; Integer :x; String :bar; validate{presence [:name, :x, :bar], :allow_nil=>true}} sqls = @db.sqls parse_insert(sqls.slice!(1)).must_equal(:validation_type=>"presence", :column=>"name", :table=>"foo", :allow_nil=>'t') parse_insert(sqls.slice!(1)).must_equal(:validation_type=>"presence", :column=>"x", :table=>"foo", :allow_nil=>'t') parse_insert(sqls.slice!(1)).must_equal(:validation_type=>"presence", :column=>"bar", :table=>"foo", :allow_nil=>'t') sqls.must_equal ["BEGIN", "COMMIT", "CREATE TABLE foo (name varchar(255), x integer, bar varchar(255), CHECK (((name IS NULL) OR (trim(name) != '')) AND ((bar IS NULL) OR (trim(bar) != ''))))"] end it "should handle presence validation on non-String columns" do @db.create_table(:foo){Integer :name; validate{presence :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"presence", :column=>"name", :table=>"foo") sqls.must_equal ["CREATE TABLE foo (name integer, CHECK (name IS NOT NULL))"] @db.schema = [[:name, {:type=>:integer}]] @db.alter_table(:foo){validate{presence :name}} sqls = @db.sqls parse_insert(sqls.slice!(1)).must_equal(:validation_type=>"presence", :column=>"name", :table=>"foo") sqls.must_equal ["parse schema for foo", "ALTER TABLE foo ADD CHECK (name IS NOT NULL)"] end it "should handle presence validation on Oracle with IS NOT NULL instead of != ''" do @db = Sequel.mock(:host=>'oracle') @db.extend_datasets do def quote_identifiers?; false end private def input_identifier(v) v.to_s end end @db.extension(:constraint_validations) @db.create_table(:foo){String :name; validate{presence :name}} sqls = @db.sqls s = sqls.slice!(0).upcase m = /\AINSERT INTO sequel_constraint_validations \((.*)\) SELECT (.*) FROM DUAL\z/i.match(s) Hash[*m[1].split(', ').map{|v| v.downcase.to_sym}.zip(m[2].split(', ').map{|v| parse_insert_value(v.downcase.gsub('null', 'NULL'))}).reject{|k, v| v.nil?}.flatten].must_equal(:validation_type=>"presence", :column=>"name", :table=>"foo") sqls.must_equal ['CREATE TABLE foo (name varchar(255), CHECK ((name IS NOT NULL) AND (trim(name) IS NOT NULL)))'] end it "should assume column is not a String if it can't determine the type" do @db.create_table(:foo){Integer :name; validate{presence :bar}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"presence", :column=>"bar", :table=>"foo") sqls.must_equal ["CREATE TABLE foo (name integer, CHECK (bar IS NOT NULL))"] @db.schema = [[:name, {:type=>:integer}]] @db.alter_table(:foo){validate{presence :bar}} sqls = @db.sqls parse_insert(sqls.slice!(1)).must_equal(:validation_type=>"presence", :column=>"bar", :table=>"foo") sqls.must_equal ["parse schema for foo", "ALTER TABLE foo ADD CHECK (bar IS NOT NULL)"] end it "should handle presence validation on non-String columns with :allow_nil option" do @db.create_table(:foo){Integer :name; validate{presence :name, :allow_nil=>true}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"presence", :column=>"name", :table=>"foo", :allow_nil=>'t') sqls.must_equal ["CREATE TABLE foo (name integer)"] end it "should support :exact_length constraint validation" do @db.create_table(:foo){String :name; validate{exact_length 5, :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"exact_length", :column=>"name", :table=>"foo", :argument=>'5') sqls.must_equal ["CREATE TABLE foo (name varchar(255), CHECK ((name IS NOT NULL) AND (char_length(name) = 5)))"] end it "should support :min_length constraint validation" do @db.create_table(:foo){String :name; validate{min_length 5, :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"min_length", :column=>"name", :table=>"foo", :argument=>'5') sqls.must_equal ["CREATE TABLE foo (name varchar(255), CHECK ((name IS NOT NULL) AND (char_length(name) >= 5)))"] end it "should support :max_length constraint validation" do @db.create_table(:foo){String :name; validate{max_length 5, :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"max_length", :column=>"name", :table=>"foo", :argument=>'5') sqls.must_equal ["CREATE TABLE foo (name varchar(255), CHECK ((name IS NOT NULL) AND (char_length(name) <= 5)))"] end it "should support :length_range constraint validation" do @db.create_table(:foo){String :name; validate{length_range 3..5, :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"length_range", :column=>"name", :table=>"foo", :argument=>'3..5') sqls.must_equal ["CREATE TABLE foo (name varchar(255), CHECK ((name IS NOT NULL) AND (char_length(name) >= 3) AND (char_length(name) <= 5)))"] @db.create_table(:foo){String :name; validate{length_range 3...5, :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"length_range", :column=>"name", :table=>"foo", :argument=>'3...5') sqls.must_equal ["CREATE TABLE foo (name varchar(255), CHECK ((name IS NOT NULL) AND (char_length(name) >= 3) AND (char_length(name) < 5)))"] end it "should support :format constraint validation" do @db = Sequel.mock(:host=>'postgres') @db.extend_datasets{def quote_identifiers?; false end} @db.extension(:constraint_validations) @db.create_table(:foo){String :name; validate{format(/^foo.*/, :name)}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"format", :column=>"name", :table=>"foo", :argument=>'^foo.*') sqls.must_equal [%[CREATE TABLE foo (name text, CHECK ((name IS NOT NULL) AND (name ~ '^foo.*')))]] end it "should support :format constraint validation with case insensitive format" do @db = Sequel.mock(:host=>'postgres') @db.extend_datasets{def quote_identifiers?; false end} @db.extension(:constraint_validations) @db.create_table(:foo){String :name; validate{format(/^foo.*/i, :name)}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"iformat", :column=>"name", :table=>"foo", :argument=>'^foo.*') sqls.must_equal [%[CREATE TABLE foo (name text, CHECK ((name IS NOT NULL) AND (name ~* '^foo.*')))]] end it "should support :includes constraint validation with an array of strings" do @db.create_table(:foo){String :name; validate{includes %w'a b c', :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"includes_str_array", :column=>"name", :table=>"foo", :argument=>'a,b,c') sqls.must_equal ["CREATE TABLE foo (name varchar(255), CHECK ((name IS NOT NULL) AND (name IN ('a', 'b', 'c'))))"] end it "should support :includes constraint validation with an array of integers" do @db.create_table(:foo){String :name; validate{includes [1, 2, 3], :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"includes_int_array", :column=>"name", :table=>"foo", :argument=>'1,2,3') sqls.must_equal ["CREATE TABLE foo (name varchar(255), CHECK ((name IS NOT NULL) AND (name IN (1, 2, 3))))"] end it "should support :includes constraint validation with a inclusive range of integers" do @db.create_table(:foo){String :name; validate{includes 3..5, :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"includes_int_range", :column=>"name", :table=>"foo", :argument=>'3..5') sqls.must_equal ["CREATE TABLE foo (name varchar(255), CHECK ((name IS NOT NULL) AND (name >= 3) AND (name <= 5)))"] end it "should support :includes constraint validation with a exclusive range of integers" do @db.create_table(:foo){String :name; validate{includes 3...5, :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"includes_int_range", :column=>"name", :table=>"foo", :argument=>'3...5') sqls.must_equal ["CREATE TABLE foo (name varchar(255), CHECK ((name IS NOT NULL) AND (name >= 3) AND (name < 5)))"] end it "should support :like constraint validation" do @db.create_table(:foo){String :name; validate{like 'foo%', :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"like", :column=>"name", :table=>"foo", :argument=>'foo%') sqls.must_equal ["CREATE TABLE foo (name varchar(255), CHECK ((name IS NOT NULL) AND (name LIKE 'foo%' ESCAPE '\\')))"] end it "should support :ilike constraint validation" do @db.create_table(:foo){String :name; validate{ilike 'foo%', :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"ilike", :column=>"name", :table=>"foo", :argument=>'foo%') sqls.must_equal ["CREATE TABLE foo (name varchar(255), CHECK ((name IS NOT NULL) AND (UPPER(name) LIKE UPPER('foo%') ESCAPE '\\')))"] end it "should support :operator :< constraint validation with string" do @db.create_table(:foo){String :name; validate{operator :<, 'a', :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"str_lt", :column=>"name", :table=>"foo", :argument=>'a') sqls.must_equal ["CREATE TABLE foo (name varchar(255), CHECK ((name IS NOT NULL) AND (name < 'a')))"] end it "should support :operator :<= constraint validation with string" do @db.create_table(:foo){String :name; validate{operator :<=, 'a', :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"str_lte", :column=>"name", :table=>"foo", :argument=>'a') sqls.must_equal ["CREATE TABLE foo (name varchar(255), CHECK ((name IS NOT NULL) AND (name <= 'a')))"] end it "should support :operator :> constraint validation with string" do @db.create_table(:foo){String :name; validate{operator :>, 'a', :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"str_gt", :column=>"name", :table=>"foo", :argument=>'a') sqls.must_equal ["CREATE TABLE foo (name varchar(255), CHECK ((name IS NOT NULL) AND (name > 'a')))"] end it "should support :operator :>= constraint validation with string" do @db.create_table(:foo){String :name; validate{operator :>=, 'a', :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"str_gte", :column=>"name", :table=>"foo", :argument=>'a') sqls.must_equal ["CREATE TABLE foo (name varchar(255), CHECK ((name IS NOT NULL) AND (name >= 'a')))"] end it "should support :operator :< constraint validation with integer" do @db.create_table(:foo){Integer :name; validate{operator :<, 2, :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"int_lt", :column=>"name", :table=>"foo", :argument=>'2') sqls.must_equal ["CREATE TABLE foo (name integer, CHECK ((name IS NOT NULL) AND (name < 2)))"] end it "should support :operator :<= constraint validation with integer" do @db.create_table(:foo){Integer :name; validate{operator :<=, 2, :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"int_lte", :column=>"name", :table=>"foo", :argument=>'2') sqls.must_equal ["CREATE TABLE foo (name integer, CHECK ((name IS NOT NULL) AND (name <= 2)))"] end it "should support :operator :> constraint validation with integer" do @db.create_table(:foo){Integer :name; validate{operator :>, 2, :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"int_gt", :column=>"name", :table=>"foo", :argument=>'2') sqls.must_equal ["CREATE TABLE foo (name integer, CHECK ((name IS NOT NULL) AND (name > 2)))"] end it "should support :operator :>= constraint validation with integer" do @db.create_table(:foo){Integer :name; validate{operator :>=, 2, :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"int_gte", :column=>"name", :table=>"foo", :argument=>'2') sqls.must_equal ["CREATE TABLE foo (name integer, CHECK ((name IS NOT NULL) AND (name >= 2)))"] end it "should support :unique constraint validation" do @db.create_table(:foo){String :name; validate{unique :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"unique", :column=>"name", :table=>"foo") sqls.must_equal ["CREATE TABLE foo (name varchar(255), UNIQUE (name))"] end it "should support :unique constraint validation with multiple columns" do @db.create_table(:foo){String :name; Integer :id; validate{unique [:name, :id]}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"unique", :column=>"name,id", :table=>"foo") sqls.must_equal ["CREATE TABLE foo (name varchar(255), id integer, UNIQUE (name, id))"] end it "should support :unique constraint validation in alter_table" do @db.alter_table(:foo){validate{unique :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"unique", :column=>"name", :table=>"foo") sqls.must_equal ["ALTER TABLE foo ADD UNIQUE (name)"] end it "should drop constraints and validations when dropping a constraint validation" do @db.alter_table(:foo){String :name; validate{drop :bar}} @db.sqls.must_equal ["DELETE FROM sequel_constraint_validations WHERE ((table, constraint_name) IN (('foo', 'bar')))", "ALTER TABLE foo DROP CONSTRAINT bar"] end it "should drop constraints and validations before adding new ones" do @db.alter_table(:foo){String :name; validate{unique :name; drop :bar}} sqls = @db.sqls parse_insert(sqls.slice!(1)).must_equal(:validation_type=>"unique", :column=>"name", :table=>"foo") sqls.must_equal ["DELETE FROM sequel_constraint_validations WHERE ((table, constraint_name) IN (('foo', 'bar')))", "ALTER TABLE foo ADD UNIQUE (name)", "ALTER TABLE foo DROP CONSTRAINT bar"] end it "should raise an error if attempting to validate inclusion with a range of non-integers" do proc{@db.create_table(:foo){String :name; validate{includes 'a'..'z', :name}}}.must_raise(Sequel::Error) end it "should raise an error if attempting to validate inclusion with a range of non-integers or strings" do proc{@db.create_table(:foo){String :name; validate{includes [1.0, 2.0], :name}}}.must_raise(Sequel::Error) end it "should raise an error if attempting to validate inclusion with a unsupported object" do proc{@db.create_table(:foo){String :name; validate{includes 'a', :name}}}.must_raise(Sequel::Error) end it "should raise an error if attempting attempting to process an operator validation with an unsupported operator" do proc{@db.alter_table(:foo){String :name; validate{operator :===, 'a', :name}}}.must_raise(Sequel::Error) end it "should raise an error if attempting attempting to process an operator validation with an unsupported argument" do proc{@db.alter_table(:foo){String :name; validate{operator :>, [], :name}}}.must_raise(Sequel::Error) end it "should raise an error if attempting to drop a constraint validation in a create_table generator" do proc{@db.create_table(:foo){String :name; validate{drop :foo}}}.must_raise(Sequel::Error) end it "should raise an error if attempting to drop a constraint validation without a name" do proc{@db.alter_table(:foo){String :name; validate{drop nil}}}.must_raise(Sequel::Error) end it "should raise an error if attempting attempting to process a constraint validation with an unsupported type" do proc{@db.alter_table(:foo){String :name; validations << {:type=>:foo}}}.must_raise(Sequel::Error) end it "should allow adding constraint validations for tables specified as a SQL::Identifier" do @db.create_table(Sequel.identifier(:sch__foo)){String :name; validate{presence :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"presence", :column=>"name", :table=>"sch__foo") sqls.must_equal ["CREATE TABLE sch__foo (name varchar(255), CHECK ((name IS NOT NULL) AND (trim(name) != '')))"] end it "should allow adding constraint validations for tables specified as a SQL::QualifiedIdentifier" do @db.create_table(Sequel.qualify(:sch, :foo)){String :name; validate{presence :name}} sqls = @db.sqls parse_insert(sqls.slice!(0)).must_equal(:validation_type=>"presence", :column=>"name", :table=>"sch.foo") sqls.must_equal ["CREATE TABLE sch.foo (name varchar(255), CHECK ((name IS NOT NULL) AND (trim(name) != '')))"] end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/core_refinements_spec.rb����������������������������������������������0000664�0000000�0000000�00000051766�14342141206�0023136�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" if (RUBY_VERSION >= '2.0.0' && RUBY_ENGINE == 'ruby') || (RUBY_ENGINE == 'jruby' && (JRUBY_VERSION >= '9.3' || (JRUBY_VERSION.match(/\A9\.2\.(\d+)/) && $1.to_i >= 7))) Sequel.extension :core_refinements, :pg_array, :pg_hstore, :pg_row, :pg_range, :pg_multirange, :pg_row_ops, :pg_range_ops, :pg_array_ops, :pg_hstore_ops, :pg_json, :pg_json_ops, :sqlite_json_ops, :is_distinct_from using Sequel::CoreRefinements describe "Core refinements" do before do db = Sequel.mock @d = db[:items].with_extend do def supports_regexp?; true end def l(*args, &block) literal(filter_expr(*args, &block)) end def lit(*args) literal(*args) end end end it "should support NOT via Symbol#~" do @d.l(~:x).must_equal 'NOT x' end with_symbol_splitting "should support NOT via Symbol#~ for splittable symbols" do @d.l(~:x__y).must_equal 'NOT x.y' end it "should support + - * / power via Symbol#+,-,*,/,**" do @d.l(:x + 1 > 100).must_equal '((x + 1) > 100)' @d.l((:x * :y) < 100.01).must_equal '((x * y) < 100.01)' @d.l((:x - :y/2) >= 100000000000000000000000000000000000).must_equal '((x - (y / 2)) >= 100000000000000000000000000000000000)' @d.l((((:x - :y)/(:x + :y))*:z) <= 100).must_equal '((((x - y) / (x + y)) * z) <= 100)' @d.l(~((((:x - :y)/(:x + :y))*:z) <= 100)).must_equal '((((x - y) / (x + y)) * z) > 100)' @d.l(~((((:x ** :y)/(:x + :y))*:z) <= 100)).must_equal '(((power(x, y) / (x + y)) * z) > 100)' end it "should support LIKE via Symbol#like" do @d.l(:x.like('a')).must_equal '(x LIKE \'a\' ESCAPE \'\\\')' @d.l(:x.like(/a/)).must_equal '(x ~ \'a\')' @d.l(:x.like('a', 'b')).must_equal '((x LIKE \'a\' ESCAPE \'\\\') OR (x LIKE \'b\' ESCAPE \'\\\'))' @d.l(:x.like(/a/, /b/i)).must_equal '((x ~ \'a\') OR (x ~* \'b\'))' @d.l(:x.like('a', /b/)).must_equal '((x LIKE \'a\' ESCAPE \'\\\') OR (x ~ \'b\'))' end it "should support NOT LIKE via Symbol#like and Symbol#~" do @d.l(~:x.like('a')).must_equal '(x NOT LIKE \'a\' ESCAPE \'\\\')' @d.l(~:x.like(/a/)).must_equal '(x !~ \'a\')' @d.l(~:x.like('a', 'b')).must_equal '((x NOT LIKE \'a\' ESCAPE \'\\\') AND (x NOT LIKE \'b\' ESCAPE \'\\\'))' @d.l(~:x.like(/a/, /b/i)).must_equal '((x !~ \'a\') AND (x !~* \'b\'))' @d.l(~:x.like('a', /b/)).must_equal '((x NOT LIKE \'a\' ESCAPE \'\\\') AND (x !~ \'b\'))' end it "should support ILIKE via Symbol#ilike" do @d.l(:x.ilike('a')).must_equal '(UPPER(x) LIKE UPPER(\'a\') ESCAPE \'\\\')' @d.l(:x.ilike(/a/)).must_equal '(x ~* \'a\')' @d.l(:x.ilike('a', 'b')).must_equal '((UPPER(x) LIKE UPPER(\'a\') ESCAPE \'\\\') OR (UPPER(x) LIKE UPPER(\'b\') ESCAPE \'\\\'))' @d.l(:x.ilike(/a/, /b/i)).must_equal '((x ~* \'a\') OR (x ~* \'b\'))' @d.l(:x.ilike('a', /b/)).must_equal '((UPPER(x) LIKE UPPER(\'a\') ESCAPE \'\\\') OR (x ~* \'b\'))' end it "should support NOT ILIKE via Symbol#ilike and Symbol#~" do @d.l(~:x.ilike('a')).must_equal '(UPPER(x) NOT LIKE UPPER(\'a\') ESCAPE \'\\\')' @d.l(~:x.ilike(/a/)).must_equal '(x !~* \'a\')' @d.l(~:x.ilike('a', 'b')).must_equal '((UPPER(x) NOT LIKE UPPER(\'a\') ESCAPE \'\\\') AND (UPPER(x) NOT LIKE UPPER(\'b\') ESCAPE \'\\\'))' @d.l(~:x.ilike(/a/, /b/i)).must_equal '((x !~* \'a\') AND (x !~* \'b\'))' @d.l(~:x.ilike('a', /b/)).must_equal '((UPPER(x) NOT LIKE UPPER(\'a\') ESCAPE \'\\\') AND (x !~* \'b\'))' end it "should support sql_expr on arrays with all two pairs" do @d.l([[:x, 100],[:y, 'a']].sql_expr).must_equal '((x = 100) AND (y = \'a\'))' @d.l([[:x, true], [:y, false]].sql_expr).must_equal '((x IS TRUE) AND (y IS FALSE))' @d.l([[:x, nil], [:y, [1,2,3]]].sql_expr).must_equal '((x IS NULL) AND (y IN (1, 2, 3)))' end it "should support sql_negate on arrays with all two pairs" do @d.l([[:x, 100],[:y, 'a']].sql_negate).must_equal '((x != 100) AND (y != \'a\'))' @d.l([[:x, true], [:y, false]].sql_negate).must_equal '((x IS NOT TRUE) AND (y IS NOT FALSE))' @d.l([[:x, nil], [:y, [1,2,3]]].sql_negate).must_equal '((x IS NOT NULL) AND (y NOT IN (1, 2, 3)))' end it "should support ~ on arrays with all two pairs" do @d.l(~[[:x, 100],[:y, 'a']]).must_equal '((x != 100) OR (y != \'a\'))' @d.l(~[[:x, true], [:y, false]]).must_equal '((x IS NOT TRUE) OR (y IS NOT FALSE))' @d.l(~[[:x, nil], [:y, [1,2,3]]]).must_equal '((x IS NOT NULL) OR (y NOT IN (1, 2, 3)))' end it "should support sql_or on arrays with all two pairs" do @d.l([[:x, 100],[:y, 'a']].sql_or).must_equal '((x = 100) OR (y = \'a\'))' @d.l([[:x, true], [:y, false]].sql_or).must_equal '((x IS TRUE) OR (y IS FALSE))' @d.l([[:x, nil], [:y, [1,2,3]]].sql_or).must_equal '((x IS NULL) OR (y IN (1, 2, 3)))' end it "should support Array#sql_string_join for concatenation of SQL strings" do @d.lit([:x].sql_string_join).must_equal '(x)' @d.lit([:x].sql_string_join(', ')).must_equal '(x)' @d.lit([:x, :y].sql_string_join).must_equal '(x || y)' @d.lit([:x, :y].sql_string_join(', ')).must_equal "(x || ', ' || y)" @d.lit([:x.sql_function(1), :y.sql_subscript(1)].sql_string_join).must_equal '(x(1) || y[1])' @d.lit([:x.sql_function(1), 'y.z'.lit].sql_string_join(', ')).must_equal "(x(1) || ', ' || y.z)" @d.lit([:x, 1, :y].sql_string_join).must_equal "(x || '1' || y)" @d.lit([:x, 1, :y].sql_string_join(', ')).must_equal "(x || ', ' || '1' || ', ' || y)" @d.lit([:x, 1, :y].sql_string_join(Sequel[:y][:z])).must_equal "(x || y.z || '1' || y.z || y)" @d.lit([:x, 1, :y].sql_string_join(1)).must_equal "(x || '1' || '1' || '1' || y)" @d.lit([:x, :y].sql_string_join('y.x || x.y'.lit)).must_equal "(x || y.x || x.y || y)" @d.lit([[:x, :y].sql_string_join, [:a, :b].sql_string_join].sql_string_join).must_equal "(x || y || a || b)" end it "should support sql_expr on hashes" do @d.l({:x => 100, :y => 'a'}.sql_expr)[1...-1].split(' AND ').sort.must_equal ['(x = 100)', '(y = \'a\')'] @d.l({:x => true, :y => false}.sql_expr)[1...-1].split(' AND ').sort.must_equal ['(x IS TRUE)', '(y IS FALSE)'] @d.l({:x => nil, :y => [1,2,3]}.sql_expr)[1...-1].split(' AND ').sort.must_equal ['(x IS NULL)', '(y IN (1, 2, 3))'] end it "should support sql_negate on hashes" do @d.l({:x => 100, :y => 'a'}.sql_negate)[1...-1].split(' AND ').sort.must_equal ['(x != 100)', '(y != \'a\')'] @d.l({:x => true, :y => false}.sql_negate)[1...-1].split(' AND ').sort.must_equal ['(x IS NOT TRUE)', '(y IS NOT FALSE)'] @d.l({:x => nil, :y => [1,2,3]}.sql_negate)[1...-1].split(' AND ').sort.must_equal ['(x IS NOT NULL)', '(y NOT IN (1, 2, 3))'] end it "should support ~ on hashes" do @d.l(~{:x => 100, :y => 'a'})[1...-1].split(' OR ').sort.must_equal ['(x != 100)', '(y != \'a\')'] @d.l(~{:x => true, :y => false})[1...-1].split(' OR ').sort.must_equal ['(x IS NOT TRUE)', '(y IS NOT FALSE)'] @d.l(~{:x => nil, :y => [1,2,3]})[1...-1].split(' OR ').sort.must_equal ['(x IS NOT NULL)', '(y NOT IN (1, 2, 3))'] end it "should support sql_or on hashes" do @d.l({:x => 100, :y => 'a'}.sql_or)[1...-1].split(' OR ').sort.must_equal ['(x = 100)', '(y = \'a\')'] @d.l({:x => true, :y => false}.sql_or)[1...-1].split(' OR ').sort.must_equal ['(x IS TRUE)', '(y IS FALSE)'] @d.l({:x => nil, :y => [1,2,3]}.sql_or)[1...-1].split(' OR ').sort.must_equal ['(x IS NULL)', '(y IN (1, 2, 3))'] end it "should Hash#& and Hash#|" do @d.l({:y => :z} & :x).must_equal '((y = z) AND x)' @d.l({:x => :a} & {:y => :z}).must_equal '((x = a) AND (y = z))' @d.l({:y => :z} | :x).must_equal '((y = z) OR x)' @d.l({:x => :a} | {:y => :z}).must_equal '((x = a) OR (y = z))' end end describe "Array#case and Hash#case" do before do @d = Sequel.mock.dataset end it "should return SQL CASE expression" do @d.literal({:x=>:y}.case(:z)).must_equal '(CASE WHEN x THEN y ELSE z END)' @d.literal({:x=>:y}.case(:z, :exp)).must_equal '(CASE exp WHEN x THEN y ELSE z END)' ['(CASE WHEN x THEN y WHEN a THEN b ELSE z END)', '(CASE WHEN a THEN b WHEN x THEN y ELSE z END)'].must_include(@d.literal({:x=>:y, :a=>:b}.case(:z))) @d.literal([[:x, :y]].case(:z)).must_equal '(CASE WHEN x THEN y ELSE z END)' @d.literal([[:x, :y], [:a, :b]].case(:z)).must_equal '(CASE WHEN x THEN y WHEN a THEN b ELSE z END)' @d.literal([[:x, :y], [:a, :b]].case(:z, :exp)).must_equal '(CASE exp WHEN x THEN y WHEN a THEN b ELSE z END)' @d.literal([[:x, :y], [:a, :b]].case(:z, Sequel[:exp][:w])).must_equal '(CASE exp.w WHEN x THEN y WHEN a THEN b ELSE z END)' end it "should return SQL CASE expression with expression even if nil" do @d.literal({:x=>:y}.case(:z, nil)).must_equal '(CASE NULL WHEN x THEN y ELSE z END)' end it "should raise an error if an array that isn't all two pairs is used" do proc{[:b].case(:a)}.must_raise(Sequel::Error) proc{[:b, :c].case(:a)}.must_raise(Sequel::Error) proc{[[:b, :c], :d].case(:a)}.must_raise(Sequel::Error) end it "should raise an error if an empty array/hash is used" do proc{[].case(:a)}.must_raise(Sequel::Error) proc{{}.case(:a)}.must_raise(Sequel::Error) end end describe "Array#sql_value_list and #sql_array" do before do @d = Sequel.mock.dataset end it "should treat the array as an SQL value list instead of conditions when used as a placeholder value" do @d.filter(Sequel.lit("(a, b) IN ?", [[:x, 1], [:y, 2]])).sql.must_equal 'SELECT * WHERE ((a, b) IN ((x = 1) AND (y = 2)))' @d.filter(Sequel.lit("(a, b) IN ?", [[:x, 1], [:y, 2]].sql_value_list)).sql.must_equal 'SELECT * WHERE ((a, b) IN ((x, 1), (y, 2)))' end it "should be no difference when used as a hash value" do @d.filter([:a, :b]=>[[:x, 1], [:y, 2]]).sql.must_equal 'SELECT * WHERE ((a, b) IN ((x, 1), (y, 2)))' @d.filter([:a, :b]=>[[:x, 1], [:y, 2]].sql_value_list).sql.must_equal 'SELECT * WHERE ((a, b) IN ((x, 1), (y, 2)))' end end describe "String#lit" do before do @ds = Sequel.mock[:t] end it "should return an LiteralString object" do 'xyz'.lit.must_be_kind_of(Sequel::LiteralString) 'xyz'.lit.to_s.must_equal 'xyz' end it "should inhibit string literalization" do @ds.update_sql(:stamp => "NOW()".lit).must_equal "UPDATE t SET stamp = NOW()" end it "should return a PlaceholderLiteralString object if args are given" do a = 'DISTINCT ?'.lit(:a) a.must_be_kind_of(Sequel::SQL::PlaceholderLiteralString) @ds.literal(a).must_equal 'DISTINCT a' @ds.with_quote_identifiers(true).literal(a).must_equal 'DISTINCT "a"' end it "should handle named placeholders if given a single argument hash" do a = 'DISTINCT :b'.lit(:b=>:a) a.must_be_kind_of(Sequel::SQL::PlaceholderLiteralString) @ds.literal(a).must_equal 'DISTINCT a' @ds.with_quote_identifiers(true).literal(a).must_equal 'DISTINCT "a"' end it "should treat placeholder literal strings as generic expressions" do a = ':b'.lit(:b=>:a) @ds.literal(a + 1).must_equal "(a + 1)" @ds.literal(a & :b).must_equal "(a AND b)" @ds.literal(a.sql_string + :b).must_equal "(a || b)" end end describe "String#to_sequel_blob" do it "should return a Blob object" do 'xyz'.to_sequel_blob.must_be_kind_of(::Sequel::SQL::Blob) 'xyz'.to_sequel_blob.must_equal 'xyz' end it "should retain binary data" do "\1\2\3\4".to_sequel_blob.must_equal "\1\2\3\4" end end describe "#desc" do before do @ds = Sequel.mock.dataset end it "should format a DESC clause for a column ref" do @ds.literal(:test.desc).must_equal 'test DESC' end with_symbol_splitting "should format a DESC clause for a column ref with splittable symbol" do @ds.literal(:items__price.desc).must_equal 'items.price DESC' end it "should format a DESC clause for a function" do @ds.literal(:avg.sql_function(:test).desc).must_equal 'avg(test) DESC' end end describe "#asc" do before do @ds = Sequel.mock.dataset end it "should format a ASC clause for a column ref" do @ds.literal(:test.asc).must_equal 'test ASC' end with_symbol_splitting "should format a ASC clause for a column ref with splittable symbol" do @ds.literal(:items__price.asc).must_equal 'items.price ASC' end it "should format a ASC clause for a function" do @ds.literal(:avg.sql_function(:test).asc).must_equal 'avg(test) ASC' end end describe "#as" do before do @ds = Sequel.mock.dataset end it "should format a AS clause for a column ref" do @ds.literal(:test.as(:t)).must_equal 'test AS t' end with_symbol_splitting "should format a AS clause for a column ref with splittable symbols" do @ds.literal(:items__price.as(:p)).must_equal 'items.price AS p' end it "should format a AS clause for a function" do @ds.literal(:avg.sql_function(:test).as(:avg)).must_equal 'avg(test) AS avg' end it "should format a AS clause for a literal value" do @ds.literal('abc'.as(:abc)).must_equal "'abc' AS abc" end end describe "Column references" do before do @ds = Sequel.mock.dataset.with_quote_identifiers(true).with_extend{def quoted_identifier_append(sql, c) sql << "`#{c}`" end} end it "should be quoted properly" do @ds.literal(:xyz).must_equal "`xyz`" @ds.literal(:xyz.as(:x)).must_equal "`xyz` AS `x`" end it "should be quoted properly in SQL functions" do @ds.literal(:avg.sql_function(:xyz)).must_equal "avg(`xyz`)" @ds.literal(:avg.sql_function(:xyz, 1)).must_equal "avg(`xyz`, 1)" @ds.literal(:avg.sql_function(:xyz).as(:a)).must_equal "avg(`xyz`) AS `a`" end it "should be quoted properly in ASC/DESC clauses" do @ds.literal(:xyz.asc).must_equal "`xyz` ASC" @ds.literal(:avg.sql_function(:xyz, 1).desc).must_equal "avg(`xyz`, 1) DESC" end it "should be quoted properly in a cast function" do @ds.literal(:x.cast(:integer)).must_equal "CAST(`x` AS integer)" end with_symbol_splitting "should be quoted properly when using splittable symbols" do @ds.literal(:xyz__abc).must_equal "`xyz`.`abc`" @ds.literal(:xyz__abc.as(:x)).must_equal "`xyz`.`abc` AS `x`" @ds.literal(:xyz___x).must_equal "`xyz` AS `x`" @ds.literal(:xyz__abc___x).must_equal "`xyz`.`abc` AS `x`" @ds.literal(:x__y.cast('varchar(20)')).must_equal "CAST(`x`.`y` AS varchar(20))" end end describe "Blob" do it "#to_sequel_blob should return self" do blob = "x".to_sequel_blob blob.to_sequel_blob.object_id.must_equal blob.object_id end end describe "Symbol#*" do before do @ds = Sequel.mock.dataset end it "should format a qualified wildcard if no argument" do @ds.literal(:xyz.*).must_equal 'xyz.*' @ds.literal(:abc.*).must_equal 'abc.*' end it "should format a filter expression if an argument" do @ds.literal(:xyz.*(3)).must_equal '(xyz * 3)' @ds.literal(:abc.*(5)).must_equal '(abc * 5)' end with_symbol_splitting "should support qualified symbols if no argument" do @ds.literal(:xyz__abc.*).must_equal 'xyz.abc.*' end end describe "Symbol" do before do @ds = Sequel.mock.dataset.with_quote_identifiers(true) end it "#identifier should format an identifier" do @ds.literal(:xyz__abc.identifier).must_equal '"xyz__abc"' end it "#qualify should format a qualified column" do @ds.literal(:xyz.qualify(:abc)).must_equal '"abc"."xyz"' end it "#qualify should work on QualifiedIdentifiers" do @ds.literal(:xyz.qualify(:abc).qualify(:def)).must_equal '"def"."abc"."xyz"' end it "should be able to qualify an identifier" do @ds.literal(:xyz.identifier.qualify(Sequel[:xyz][:abc])).must_equal '"xyz"."abc"."xyz"' end with_symbol_splitting "should be able to qualify an identifier with qualified symbol" do @ds.literal(:xyz.identifier.qualify(:xyz__abc)).must_equal '"xyz"."abc"."xyz"' end it "should be able to specify a schema.table.column" do @ds.literal(:column.qualify(:table.qualify(:schema))).must_equal '"schema"."table"."column"' @ds.literal(:column.qualify(:table__name.identifier.qualify(:schema))).must_equal '"schema"."table__name"."column"' end it "should be able to specify order" do @oe = :xyz.desc @oe.class.must_equal Sequel::SQL::OrderedExpression @oe.descending.must_equal true @oe = :xyz.asc @oe.class.must_equal Sequel::SQL::OrderedExpression @oe.descending.must_equal false end it "should work correctly with objects" do o = Object.new def o.sql_literal(ds) "(foo)" end @ds.literal(:column.qualify(o)).must_equal '(foo)."column"' end end describe "Symbol" do before do @ds = Sequel.mock.dataset end it "should support sql_function method" do @ds.literal(:COUNT.sql_function('1')).must_equal "COUNT('1')" @ds.select(:COUNT.sql_function('1')).sql.must_equal "SELECT COUNT('1')" end it "should support cast method" do @ds.literal(:abc.cast(:integer)).must_equal "CAST(abc AS integer)" end with_symbol_splitting "should support sql array accesses via sql_subscript for splittable symbols" do @ds.literal(:abc__def.sql_subscript(1)).must_equal "abc.def[1]" end it "should support sql array accesses via sql_subscript" do @ds.literal(:abc.sql_subscript(1)).must_equal "abc[1]" @ds.literal(:abc.sql_subscript(1)|2).must_equal "abc[1, 2]" @ds.literal(:abc.sql_subscript(1)[2]).must_equal "abc[1][2]" end it "should support cast_numeric and cast_string" do x = :abc.cast_numeric x.must_be_kind_of(Sequel::SQL::NumericExpression) @ds.literal(x).must_equal "CAST(abc AS integer)" x = :abc.cast_numeric(:real) x.must_be_kind_of(Sequel::SQL::NumericExpression) @ds.literal(x).must_equal "CAST(abc AS real)" x = :abc.cast_string x.must_be_kind_of(Sequel::SQL::StringExpression) @ds.literal(x).must_equal "CAST(abc AS varchar(255))" x = :abc.cast_string(:varchar) x.must_be_kind_of(Sequel::SQL::StringExpression) @ds.literal(x).must_equal "CAST(abc AS varchar(255))" end it "should allow database independent types when casting" do db = @ds.db def db.cast_type_literal(type) return :foo if type == Integer return :bar if type == String type end @ds.literal(:abc.cast(String)).must_equal "CAST(abc AS bar)" @ds.literal(:abc.cast(String)).must_equal "CAST(abc AS bar)" @ds.literal(:abc.cast_string).must_equal "CAST(abc AS bar)" @ds.literal(:abc.cast_string(Integer)).must_equal "CAST(abc AS foo)" @ds.literal(:abc.cast_numeric).must_equal "CAST(abc AS foo)" @ds.literal(:abc.cast_numeric(String)).must_equal "CAST(abc AS bar)" end it "should support SQL EXTRACT function via #extract " do @ds.literal(:abc.extract(:year)).must_equal "extract(year FROM abc)" end end describe "is_distinct_from extension integration" do it "Symbol#is_distinct_from should return an IsDistinctFrom" do db = Sequel.connect("mock://postgres") db.extension :is_distinct_from db.literal(:a.is_distinct_from(:b)).must_equal '("a" IS DISTINCT FROM "b")' end end describe "Postgres/SQLite extensions integration" do before do @db = Sequel.mock end it "Symbol#pg_array should return an ArrayOp" do @db.literal(:a.pg_array.unnest).must_equal "unnest(a)" end it "Symbol#pg_row should return a PGRowOp" do @db.literal(:a.pg_row[:a]).must_equal "(a).a" end it "Symbol#hstore should return an HStoreOp" do @db.literal(:a.hstore['a']).must_equal "(a -> 'a')" end it "Symbol#pg_json should return an JSONOp" do @db.literal(:a.pg_json[%w'a b']).must_equal "(a #> ARRAY['a','b'])" @db.literal(:a.pg_json.extract('a')).must_equal "json_extract_path(a, 'a')" end it "Symbol#pg_jsonb should return an JSONBOp" do @db.literal(:a.pg_jsonb[%w'a b']).must_equal "(a #> ARRAY['a','b'])" @db.literal(:a.pg_jsonb.extract('a')).must_equal "jsonb_extract_path(a, 'a')" end it "Symbol#pg_range should return a RangeOp" do @db.literal(:a.pg_range.lower).must_equal "lower(a)" end it "Array#pg_array should return a PGArray" do @db.literal([1].pg_array.op.unnest).must_equal "unnest(ARRAY[1])" @db.literal([1].pg_array(:int4).op.unnest).must_equal "unnest(ARRAY[1]::int4[])" end it "Array#pg_json should return a JSONArray" do @db.literal([1].pg_json).must_equal "'[1]'::json" end it "Array#pg_jsonb should return a JSONBArray" do @db.literal([1].pg_jsonb).must_equal "'[1]'::jsonb" end it "Array#pg_row should return a ArrayRow" do @db.literal([1].pg_row).must_equal "ROW(1)" end it "Hash#hstore should return an HStore" do @db.literal({'a'=>1}.hstore.op['a']).must_equal '(\'"a"=>"1"\'::hstore -> \'a\')' end it "Hash#pg_json should return an JSONHash" do @db.literal({'a'=>'b'}.pg_json).must_equal "'{\"a\":\"b\"}'::json" end it "Hash#pg_jsonb should return an JSONBHash" do @db.literal({'a'=>'b'}.pg_jsonb).must_equal "'{\"a\":\"b\"}'::jsonb" end it "Range#pg_range should return an PGRange" do @db.literal((1..2).pg_range).must_equal "'[1,2]'" @db.literal((1..2).pg_range(:int4range)).must_equal "int4range(1,2,'[]')" end it "Symbol#sqlite_json_opt should return an SQLite::JSONOp" do @db.literal(:a.sqlite_json_op[1]).must_equal "(a ->> 1)" @db.literal(:a.sqlite_json_op.minify).must_equal "json(a)" end end end ����������sequel-5.63.0/spec/extensions/csv_serializer_spec.rb������������������������������������������������0000664�0000000�0000000�00000016101�14342141206�0022613�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" require 'csv' describe "Sequel::Plugins::CsvSerializer" do before do artist = @Artist = Class.new(Sequel::Model(:artists)) @Artist.class_eval do def self.name; 'Artist' end unrestrict_primary_key plugin :csv_serializer columns :id, :name def_column_accessor :id, :name @db_schema = {:id=>{:type=>:integer}} end @Album = Class.new(Sequel::Model(:albums)) @Album.class_eval do def self.name; 'Album' end unrestrict_primary_key attr_accessor :blah plugin :csv_serializer columns :id, :name, :artist_id def_column_accessor :id, :name, :artist_id @db_schema = {:id=>{:type=>:integer}, :artist_id=>{:type=>:integer}} many_to_one :artist, :class=>artist end @artist = @Artist.load(:id=>2, :name=>'YJM') @artist.associations[:albums] = [] @album = @Album.load(:id=>1, :name=>'RF') @album.artist = @artist @album.blah = 'Blah' end it "should round trip successfully" do @Artist.from_csv(@artist.to_csv).must_equal @artist @Album.from_csv(@album.to_csv).must_equal @album end it "should handle ruby objects in values" do @Artist.send(:define_method, :name=) do |v| super(Date.parse(v)) end a = @Artist.load(:name=>Date.today) opts = {:columns=>[:name]} @Artist.from_csv(a.to_csv(opts), opts).must_equal a end it "should handle the :only option" do @Artist.from_csv(@artist.to_csv(:only=>:name), :only=>:name).must_equal @Artist.load(:name=>@artist.name) @Album.from_csv(@album.to_csv(:only=>[:id, :name]), :only=>[:id, :name]).must_equal @Album.load(:id=>@album.id, :name=>@album.name) end it "should handle the :except option" do @Artist.from_csv(@artist.to_csv(:except=>:id), :except=>:id).must_equal @Artist.load(:name=>@artist.name) @Album.from_csv(@album.to_csv(:except=>[:id, :artist_id]), :except=>[:id, :artist_id]).must_equal @Album.load(:name=>@album.name) end it "should handle the :include option for arbitrary attributes" do @Album.from_csv(@album.to_csv(:include=>:blah), :include=>:blah).blah.must_equal @album.blah end it "should handle multiple inclusions using an array for the :include option" do a = @Album.from_csv(@album.to_csv(:include=>[:blah]), :include=>:blah) a.blah.must_equal @album.blah end it "#from_csv should set column values" do @artist.from_csv('AS', :only=>:name) @artist.name.must_equal 'AS' @artist.id.must_equal 2 @artist.from_csv('1', :only=>:id) @artist.name.must_equal 'AS' @artist.id.must_equal 1 end it ".array_from_csv should support :headers to specify headers" do @albums = @Album.array_from_csv("AS,2\nDF,3", :headers=>['name', 'artist_id']) @albums.map(&:name).must_equal %w'AS DF' @albums.map(&:artist_id).must_equal [2, 3] @albums = @Album.array_from_csv("2,AS\n3,DF", :headers=>[nil, 'name']) @albums.map(&:name).must_equal %w'AS DF' @albums.map(&:artist_id).must_equal [nil, nil] end it ".from_csv should support :headers to specify headers" do @album = @Album.from_csv('AS,2', :headers=>['name', 'artist_id']) @album.name.must_equal 'AS' @album.artist_id.must_equal 2 @album = @Album.from_csv('2,AS', :headers=>[nil, 'name']) @album.name.must_equal 'AS' @album.artist_id.must_be_nil end it "#from_csv should support :headers to specify headers" do @album.from_csv('AS,2', :headers=>['name']) @album.name.must_equal 'AS' @album.artist_id.must_equal 2 @album.from_csv('2,AS', :headers=>[nil, 'name']) @album.name.must_equal 'AS' @album.artist_id.must_equal 2 end it "should support a to_csv class and dataset method" do @Album.dataset = @Album.dataset.with_fetch(:id=>1, :name=>'RF', :artist_id=>2) @Artist.dataset = @Artist.dataset.with_fetch(:id=>2, :name=>'YJM') @Album.columns(:id, :name, :artist_id) @Album.db_schema.replace(:id=>{:type=>:integer}, :artist_id=>{:type=>:integer}) @Album.array_from_csv(@Album.to_csv).must_equal [@album] @Album.array_from_csv(@Album.dataset.to_csv(:only=>:name), :only=>:name).must_equal [@Album.load(:name=>@album.name)] end it "should have dataset to_csv method respect :array option" do a = @Album.new(:id=>1, :name=>'RF', :artist_id=>3) @Album.array_from_csv(@Album.to_csv(:array=>[a])).must_equal [a] end it "#to_csv should respect class options" do @Album = Class.new(Sequel::Model(:albums)) artist = @Artist @Album.class_eval do attr_accessor :blah plugin :csv_serializer, :except => :id, :write_headers=>true, :include=>:blah columns :id, :name, :artist_id many_to_one :artist, :class=>artist end @album = @Album.load(:id=>2, :name=>'JK') @album.artist = @artist @album.blah = 'Gak' @album.to_csv.must_equal "name,artist_id,blah\nJK,2,Gak\n" @album.to_csv(:write_headers=>false).must_equal "JK,2,Gak\n" @album.to_csv(:headers=>[:name]).must_equal "name\nJK\n" @album.to_csv(:headers=>[:name, :id]).must_equal "name,id\nJK,2\n" @album.to_csv(:only=>[:name]).must_equal "name,blah\nJK,Gak\n" @album.to_csv(:except=>nil).must_equal "id,name,artist_id,blah\n2,JK,2,Gak\n" @album.to_csv(:except=>[:blah]).must_equal "id,name,artist_id\n2,JK,2\n" end it "should store the default options in csv_serializer_opts" do @Album.csv_serializer_opts.must_equal({}) c = Class.new(@Album) @Album.csv_serializer_opts[:include] = :blah c.plugin :csv_serializer, :naked=>false c.csv_serializer_opts.must_equal(:naked=>false) @Album.csv_serializer_opts.must_equal(:include=>:blah) end it "should work correctly when subclassing" do @Artist2 = Class.new(@Artist) @Artist2.plugin :csv_serializer, :only=>:name @Artist3 = Class.new(@Artist2) @Artist3.from_csv(@Artist3.load(:id=>2, :name=>'YYY').to_csv).must_equal @Artist3.load(:name=>'YYY') @Artist2 = Class.new(@Artist) @Artist2.plugin :csv_serializer, :only=>[:name] @Artist3 = Class.new(@Artist2) @Artist3.from_csv(@Artist3.load(:id=>2, :name=>'YYY').to_csv).must_equal @Artist3.load(:name=>'YYY') end it "should raise an error if attempting to set a restricted column and :all_columns is not used" do @Artist.restrict_primary_key proc{@Artist.from_csv(@artist.to_csv)}.must_raise(Sequel::MassAssignmentRestriction) end it "should use a dataset's selected columns" do columns = [:id] ds = @Artist.select(*columns).limit(1) ds.send(:columns=, columns) ds.with_fetch(:id => 10).to_csv(:write_headers => true).must_equal "id\n10\n" end it "should pass all the examples from the documentation" do @album.to_csv(:write_headers=>true).must_equal "id,name,artist_id\n1,RF,2\n" @album.to_csv(:only=>:name).must_equal "RF\n" @album.to_csv(:except=>[:id, :artist_id]).must_equal "RF\n" end it "should freeze csv serializier opts when model class is frozen" do @Album.csv_serializer_opts[:only] = [:id] @Album.csv_serializer_opts[:foo] = :bar @Album.freeze @Album.csv_serializer_opts.frozen?.must_equal true @Album.csv_serializer_opts[:only].frozen?.must_equal true end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/current_datetime_timestamp_spec.rb������������������������������������0000664�0000000�0000000�00000002116�14342141206�0025211�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "current_datetime_timestamp extension" do before do @ds = Sequel.mock[:table].extension(:current_datetime_timestamp) end after do Sequel.datetime_class = Time end it "should have current_timestamp respect Sequel.datetime_class" do t = Sequel::Dataset.new(nil).current_datetime t.must_be_kind_of(Time) (Time.now - t < 0.1).must_equal true Sequel.datetime_class = DateTime t = Sequel::Dataset.new(nil).current_datetime t.must_be_kind_of(DateTime) (DateTime.now - t < (0.1/86400)).must_equal true end it "should have current_timestamp value be literalized as CURRENT_TIMESTAMP" do @ds.literal(@ds.current_datetime).must_equal 'CURRENT_TIMESTAMP' Sequel.datetime_class = DateTime @ds.literal(@ds.current_datetime).must_equal 'CURRENT_TIMESTAMP' end it "should have other Date/Time values literalized normally" do t = Time.at(1594239778).getutc @ds.literal(t).must_equal "'2020-07-08 20:22:58.000000'" @ds.literal(t.to_datetime).must_equal "'2020-07-08 20:22:58.000000'" end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/dataset_associations_spec.rb������������������������������������������0000664�0000000�0000000�00000066555�14342141206�0024015�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::DatasetAssociations" do before do @db = Sequel.mock(:host=>'postgres') @db.extend_datasets do def quote_identifiers?; false end def supports_window_functions?; true; end def supports_distinct_on?; true; end end @Base = Class.new(Sequel::Model) @Base.plugin :dataset_associations @Artist = Class.new(@Base) @Album = Class.new(@Base) @Tag = Class.new(@Base) @Track = Class.new(@Base) def @Artist.name; 'Artist' end def @Album.name; 'Album' end def @Tag.name; 'Tag' end def @Track.name; 'Track' end @Artist.dataset = @db[:artists] @Album.dataset = @db[:albums] @Tag.dataset = @db[:tags] @Track.dataset = @db[:tracks] @Artist.columns :id, :name @Album.columns :id, :name, :artist_id @Tag.columns :id, :name @Album.plugin :many_through_many @Artist.plugin :many_through_many @Track.plugin :many_through_many @Artist.plugin :pg_array_associations @Tag.plugin :pg_array_associations @Artist.one_to_many :albums, :class=>@Album, :dataset_associations_join=>true @Artist.one_to_one :first_album, :class=>@Album @Album.many_to_one :artist, :class=>@Artist @Album.many_to_many :tags, :class=>@Tag @Album.many_through_many :mthm_tags, [[:albums_tags, :album_id, :tag_id]], :class=>@Tag @Album.one_through_one :first_tag, :class=>@Tag, :right_key=>:tag_id @Tag.many_to_many :albums, :class=>@Album @Artist.pg_array_to_many :artist_tags, :class=>@Tag, :key=>:tag_ids @Tag.many_to_pg_array :artists, :class=>@Artist @Artist.many_through_many :tags, [[:albums, :artist_id, :id], [:albums_tags, :album_id, :tag_id]], :class=>@Tag @Artist.one_through_many :otag, [[:albums, :artist_id, :id], [:albums_tags, :album_id, :tag_id]], :class=>@Tag @Track.many_through_many :artist_tracks, [[:albums, :id, :artist_id], [:albums, :artist_id, :id]], :class=>@Track, :left_primary_key=>:album_id, :right_primary_key=>:album_id end it "should work for many_to_one associations" do ds = @Album.artists ds.must_be_kind_of(Sequel::Dataset) ds.model.must_equal @Artist ds.sql.must_equal "SELECT * FROM artists WHERE (artists.id IN (SELECT albums.artist_id FROM albums))" end it "should work for one_to_many associations" do ds = @Artist.albums ds.must_be_kind_of(Sequel::Dataset) ds.model.must_equal @Album ds.sql.must_equal "SELECT * FROM albums WHERE (albums.artist_id IN (SELECT artists.id FROM artists))" end it "should work for one_to_one associations" do ds = @Artist.first_albums ds.must_be_kind_of(Sequel::Dataset) ds.model.must_equal @Album ds.sql.must_equal "SELECT * FROM albums WHERE (albums.artist_id IN (SELECT artists.id FROM artists))" end it "should work for many_to_many associations" do ds = @Album.tags ds.must_be_kind_of(Sequel::Dataset) ds.model.must_equal @Tag ds.sql.must_equal "SELECT tags.* FROM tags WHERE (tags.id IN (SELECT albums_tags.tag_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE ((albums_tags.album_id) IN (SELECT albums.id FROM albums))))" end it "should work for one_through_one associations" do ds = @Album.first_tags ds.must_be_kind_of(Sequel::Dataset) ds.model.must_equal @Tag ds.sql.must_equal "SELECT tags.* FROM tags WHERE (tags.id IN (SELECT albums_tags.tag_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE ((albums_tags.album_id) IN (SELECT albums.id FROM albums))))" end it "should work for many_through_many associations" do ds = @Artist.tags ds.must_be_kind_of(Sequel::Dataset) ds.model.must_equal @Tag ds.sql.must_equal "SELECT tags.* FROM tags WHERE (tags.id IN (SELECT albums_tags.tag_id FROM artists INNER JOIN albums ON (albums.artist_id = artists.id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) INNER JOIN tags ON (tags.id = albums_tags.tag_id) WHERE (albums.artist_id IN (SELECT artists.id FROM artists))))" end it "should work for self referential many_through_many associations" do ds = @Track.artist_tracks ds.must_be_kind_of(Sequel::Dataset) ds.model.must_equal @Track ds.sql.must_equal "SELECT tracks.* FROM tracks WHERE (tracks.album_id IN (SELECT albums_0.id FROM tracks INNER JOIN albums ON (albums.id = tracks.album_id) INNER JOIN albums AS albums_0 ON (albums_0.artist_id = albums.artist_id) INNER JOIN tracks AS tracks_0 ON (tracks_0.album_id = albums_0.id) WHERE (albums.id IN (SELECT tracks.album_id FROM tracks))))" end it "should work for many_through_many associations with a single join table" do ds = @Album.mthm_tags ds.must_be_kind_of(Sequel::Dataset) ds.model.must_equal @Tag ds.sql.must_equal "SELECT tags.* FROM tags WHERE (tags.id IN (SELECT albums_tags.tag_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE (albums_tags.album_id IN (SELECT albums.id FROM albums))))" end it "should work for one_through_many associations" do ds = @Artist.otags ds.must_be_kind_of(Sequel::Dataset) ds.model.must_equal @Tag ds.sql.must_equal "SELECT tags.* FROM tags WHERE (tags.id IN (SELECT albums_tags.tag_id FROM artists INNER JOIN albums ON (albums.artist_id = artists.id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) INNER JOIN tags ON (tags.id = albums_tags.tag_id) WHERE (albums.artist_id IN (SELECT artists.id FROM artists))))" end it "should work for many_to_many associations with :dataset_association_join=>true" do @Album.many_to_many :tags, :clone=>:tags, :dataset_associations_join=>true, :select=>[Sequel.expr(:tags).*, Sequel[:albums_tags][:foo]] ds = @Album.tags ds.must_be_kind_of(Sequel::Dataset) ds.model.must_equal @Tag ds.sql.must_equal "SELECT tags.*, albums_tags.foo FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE (tags.id IN (SELECT albums_tags.tag_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE ((albums_tags.album_id) IN (SELECT albums.id FROM albums))))" end it "should work for one_through_one associations with :dataset_association_join=>true" do @Album.one_through_one :first_tag, :clone=>:first_tag, :dataset_associations_join=>true, :select=>[Sequel.expr(:tags).*, Sequel[:albums_tags][:foo]] ds = @Album.first_tags ds.must_be_kind_of(Sequel::Dataset) ds.model.must_equal @Tag ds.sql.must_equal "SELECT tags.*, albums_tags.foo FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE (tags.id IN (SELECT albums_tags.tag_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE ((albums_tags.album_id) IN (SELECT albums.id FROM albums))))" end it "should work for many_through_many associations with :dataset_association_join=>true" do @Artist.many_through_many :tags, :clone=>:tags, :dataset_associations_join=>true, :select=>[Sequel.expr(:tags).*, Sequel[:albums_tags][:foo]] ds = @Artist.tags ds.must_be_kind_of(Sequel::Dataset) ds.model.must_equal @Tag ds.sql.must_equal "SELECT tags.*, albums_tags.foo FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) WHERE (tags.id IN (SELECT albums_tags.tag_id FROM artists INNER JOIN albums ON (albums.artist_id = artists.id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) INNER JOIN tags ON (tags.id = albums_tags.tag_id) WHERE (albums.artist_id IN (SELECT artists.id FROM artists))))" end it "should work for one_through_many associations with :dataset_association_join=>true" do @Artist.one_through_many :otag, :clone=>:otag, :dataset_associations_join=>true, :select=>[Sequel.expr(:tags).*, Sequel[:albums_tags][:foo]] ds = @Artist.otags ds.must_be_kind_of(Sequel::Dataset) ds.model.must_equal @Tag ds.sql.must_equal "SELECT tags.*, albums_tags.foo FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) WHERE (tags.id IN (SELECT albums_tags.tag_id FROM artists INNER JOIN albums ON (albums.artist_id = artists.id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) INNER JOIN tags ON (tags.id = albums_tags.tag_id) WHERE (albums.artist_id IN (SELECT artists.id FROM artists))))" end it "should work for pg_array_to_many associations" do ds = @Artist.artist_tags ds.must_be_kind_of(Sequel::Dataset) ds.model.must_equal @Tag ds.sql.must_equal "SELECT * FROM tags WHERE (id IN (SELECT unnest(artists.tag_ids) FROM artists))" end it "should work for many_to_pg_array associations" do ds = @Tag.artists ds.must_be_kind_of(Sequel::Dataset) ds.model.must_equal @Artist ds.sql.must_equal "SELECT * FROM artists WHERE coalesce((tag_ids && (SELECT array_agg(tags.id) FROM tags)), false)" end it "should remove order for current dataset" do ds = @Artist.order(:name).albums ds.must_be_kind_of(Sequel::Dataset) ds.model.must_equal @Album ds.sql.must_equal "SELECT * FROM albums WHERE (albums.artist_id IN (SELECT artists.id FROM artists))" end it "should not remove order for current dataset if the dataset is limited" do ds = @Artist.order(:name).limit(2).albums ds.must_be_kind_of(Sequel::Dataset) ds.model.must_equal @Album ds.sql.must_equal "SELECT * FROM albums WHERE (albums.artist_id IN (SELECT artists.id FROM artists ORDER BY name LIMIT 2))" end it "should have an associated method that takes an association symbol" do ds = @Album.associated(:artist) ds.must_be_kind_of(Sequel::Dataset) ds.model.must_equal @Artist ds.sql.must_equal "SELECT * FROM artists WHERE (artists.id IN (SELECT albums.artist_id FROM albums))" end it "should raise an Error if an invalid association is given to associated" do proc{@Album.associated(:foo)}.must_raise(Sequel::Error) end it "should raise an Error if an unrecognized association type is used" do @Album.association_reflection(:artist)[:type] = :foo proc{@Album.artists}.must_raise(Sequel::Error) end it "should work correctly when chaining" do ds = @Artist.albums.tags ds.must_be_kind_of(Sequel::Dataset) ds.model.must_equal @Tag ds.sql.must_equal "SELECT tags.* FROM tags WHERE (tags.id IN (SELECT albums_tags.tag_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE ((albums_tags.album_id) IN (SELECT albums.id FROM albums WHERE (albums.artist_id IN (SELECT artists.id FROM artists))))))" end it "should deal correctly with filters before the association method" do @Artist.filter(:id=>1).albums.sql.must_equal "SELECT * FROM albums WHERE (albums.artist_id IN (SELECT artists.id FROM artists WHERE (id = 1)))" end it "should deal correctly with filters after the association method" do @Artist.albums.filter(:id=>1).sql.must_equal "SELECT * FROM albums WHERE ((albums.artist_id IN (SELECT artists.id FROM artists)) AND (id = 1))" end it "should deal correctly with block on the association" do @Artist.one_to_many :albums, :clone=>:albums do |ds| ds.filter(:id=>1..100) end @Artist.albums.sql.must_equal "SELECT * FROM albums WHERE ((albums.artist_id IN (SELECT artists.id FROM artists)) AND (id >= 1) AND (id <= 100))" end it "should deal correctly with :conditions option on the association" do @Artist.one_to_many :albums, :clone=>:albums, :conditions=>{:id=>1..100} @Artist.albums.sql.must_equal "SELECT * FROM albums WHERE ((albums.artist_id IN (SELECT artists.id FROM artists)) AND (id >= 1) AND (id <= 100))" end it "should deal correctly with :distinct option on the association" do @Artist.one_to_many :albums, :clone=>:albums, :distinct=>true @Artist.albums.sql.must_equal "SELECT DISTINCT * FROM albums WHERE (albums.artist_id IN (SELECT artists.id FROM artists))" end it "should deal correctly with :eager option on the association" do @Artist.one_to_many :albums, :clone=>:albums, :eager=>:tags @Artist.albums.opts[:eager].must_equal(:tags=>nil) end it "should deal correctly with :eager_block option on the association, ignoring the association block" do @Artist.one_to_many :albums, :clone=>:albums, :eager_block=>proc{|ds| ds.filter(:id=>1..100)} do |ds| ds.filter(:id=>2..200) end @Artist.albums.sql.must_equal "SELECT * FROM albums WHERE ((albums.artist_id IN (SELECT artists.id FROM artists)) AND (id >= 1) AND (id <= 100))" end it "should deal correctly with :extend option on the association" do @Artist.one_to_many :albums, :clone=>:albums, :extend=>Module.new{def foo(x) filter(:id=>x) end} @Artist.albums.foo(1).sql.must_equal "SELECT * FROM albums WHERE ((albums.artist_id IN (SELECT artists.id FROM artists)) AND (id = 1))" end it "should deal correctly with :order option on the association" do @Artist.one_to_many :albums, :clone=>:albums, :order=>:name @Artist.albums.sql.must_equal "SELECT * FROM albums WHERE (albums.artist_id IN (SELECT artists.id FROM artists)) ORDER BY name" end it "should deal correctly with :select option on the association" do @Artist.one_to_many :albums, :clone=>:albums, :select=>[:id, :name] @Artist.albums.sql.must_equal "SELECT id, name FROM albums WHERE (albums.artist_id IN (SELECT artists.id FROM artists))" end it "should deal correctly with :order option for one_to_one associations" do @Artist.one_to_one :first_album, :clone=>:first_album, :order=>:name @Artist.first_albums.sql.must_equal 'SELECT * FROM albums WHERE ((albums.artist_id IN (SELECT artists.id FROM artists)) AND (albums.id IN (SELECT DISTINCT ON (albums.artist_id) albums.id FROM albums ORDER BY albums.artist_id, name))) ORDER BY name' end it "should deal correctly with :limit option for one_to_many associations" do @Artist.one_to_many :albums, :clone=>:albums, :limit=>10, :order=>:name @Artist.albums.sql.must_equal 'SELECT * FROM albums WHERE ((albums.artist_id IN (SELECT artists.id FROM artists)) AND (albums.id IN (SELECT id FROM (SELECT albums.id, row_number() OVER (PARTITION BY albums.artist_id ORDER BY name) AS x_sequel_row_number_x FROM albums) AS t1 WHERE (x_sequel_row_number_x <= 10)))) ORDER BY name' end it "should deal correctly with :order option for one_through_one associations" do @Album.one_through_one :first_tag, :clone=>:first_tag, :order=>Sequel[:tags][:name] @Album.first_tags.sql.must_equal 'SELECT tags.* FROM tags WHERE (tags.id IN (SELECT albums_tags.tag_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE (((albums_tags.album_id) IN (SELECT albums.id FROM albums)) AND ((albums_tags.album_id, tags.id) IN (SELECT DISTINCT ON (albums_tags.album_id) albums_tags.album_id, tags.id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) ORDER BY albums_tags.album_id, tags.name))))) ORDER BY tags.name' end it "should deal correctly with :limit option for many_to_many associations" do @Album.many_to_many :tags, :clone=>:tags, :limit=>10, :order=>Sequel[:tags][:name] @Album.tags.sql.must_equal 'SELECT tags.* FROM tags WHERE (tags.id IN (SELECT albums_tags.tag_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE (((albums_tags.album_id) IN (SELECT albums.id FROM albums)) AND ((albums_tags.album_id, tags.id) IN (SELECT b, c FROM (SELECT albums_tags.album_id AS b, tags.id AS c, row_number() OVER (PARTITION BY albums_tags.album_id ORDER BY tags.name) AS x_sequel_row_number_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id)) AS t1 WHERE (x_sequel_row_number_x <= 10)))))) ORDER BY tags.name' end it "should deal correctly with :order option for one_through_many associations" do @Artist.one_through_many :otag, :clone=>:otag, :order=>Sequel[:tags][:name] @Artist.otags.sql.must_equal 'SELECT tags.* FROM tags WHERE (tags.id IN (SELECT albums_tags.tag_id FROM artists INNER JOIN albums ON (albums.artist_id = artists.id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) INNER JOIN tags ON (tags.id = albums_tags.tag_id) WHERE ((albums.artist_id IN (SELECT artists.id FROM artists)) AND ((albums.artist_id, tags.id) IN (SELECT DISTINCT ON (albums.artist_id) albums.artist_id, tags.id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) ORDER BY albums.artist_id, tags.name))))) ORDER BY tags.name' end it "should deal correctly with :limit option for many_through_many associations" do @Artist.many_through_many :tags, :clone=>:tags, :limit=>10, :order=>Sequel[:tags][:name] @Artist.tags.sql.must_equal 'SELECT tags.* FROM tags WHERE (tags.id IN (SELECT albums_tags.tag_id FROM artists INNER JOIN albums ON (albums.artist_id = artists.id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) INNER JOIN tags ON (tags.id = albums_tags.tag_id) WHERE ((albums.artist_id IN (SELECT artists.id FROM artists)) AND ((albums.artist_id, tags.id) IN (SELECT b, c FROM (SELECT albums.artist_id AS b, tags.id AS c, row_number() OVER (PARTITION BY albums.artist_id ORDER BY tags.name) AS x_sequel_row_number_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id)) AS t1 WHERE (x_sequel_row_number_x <= 10)))))) ORDER BY tags.name' end end describe "Sequel::Plugins::DatasetAssociations with composite keys" do before do @db = Sequel.mock @db.extend_datasets do def supports_window_functions?; true; end def supports_distinct_on?; true; end end @Base = Class.new(Sequel::Model) @Base.plugin :dataset_associations @Artist = Class.new(@Base) @Album = Class.new(@Base) @Tag = Class.new(@Base) def @Artist.name; 'Artist' end def @Album.name; 'Album' end def @Tag.name; 'Tag' end @Artist.dataset = @db[:artists] @Album.dataset = @db[:albums] @Tag.dataset = @db[:tags] @Artist.set_primary_key([:id1, :id2]) @Album.set_primary_key([:id1, :id2]) @Tag.set_primary_key([:id1, :id2]) @Artist.columns :id1, :id2, :name @Album.columns :id1, :id2, :name, :artist_id1, :artist_id2 @Tag.columns :id1, :id2, :name @Artist.plugin :many_through_many @Artist.one_to_many :albums, :class=>@Album, :key=>[:artist_id1, :artist_id2] @Artist.one_to_one :first_album, :class=>@Album, :key=>[:artist_id1, :artist_id2] @Album.many_to_one :artist, :class=>@Artist, :key=>[:artist_id1, :artist_id2] @Album.many_to_many :tags, :class=>@Tag, :left_key=>[:album_id1, :album_id2], :right_key=>[:tag_id1, :tag_id2] @Album.one_through_one :first_tag, :class=>@Tag, :left_key=>[:album_id1, :album_id2], :right_key=>[:tag_id1, :tag_id2] @Tag.many_to_many :albums, :class=>@Album, :right_key=>[:album_id1, :album_id2], :left_key=>[:tag_id1, :tag_id2] @Artist.many_through_many :tags, [[:albums, [:artist_id1, :artist_id2], [:id1, :id2]], [:albums_tags, [:album_id1, :album_id2], [:tag_id1, :tag_id2]]], :class=>@Tag @Artist.one_through_many :otag, [[:albums, [:artist_id1, :artist_id2], [:id1, :id2]], [:albums_tags, [:album_id1, :album_id2], [:tag_id1, :tag_id2]]], :class=>@Tag end it "should work for many_to_one associations" do @Album.artists.sql.must_equal "SELECT * FROM artists WHERE ((artists.id1, artists.id2) IN (SELECT albums.artist_id1, albums.artist_id2 FROM albums))" end it "should work for one_to_many associations" do @Artist.albums.sql.must_equal "SELECT * FROM albums WHERE ((albums.artist_id1, albums.artist_id2) IN (SELECT artists.id1, artists.id2 FROM artists))" end it "should work for one_to_one associations" do @Artist.first_albums.sql.must_equal "SELECT * FROM albums WHERE ((albums.artist_id1, albums.artist_id2) IN (SELECT artists.id1, artists.id2 FROM artists))" end it "should work for many_to_many associations" do @Album.tags.sql.must_equal "SELECT tags.* FROM tags WHERE ((tags.id1, tags.id2) IN (SELECT albums_tags.tag_id1, albums_tags.tag_id2 FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.id1) AND (albums_tags.tag_id2 = tags.id2)) WHERE ((albums_tags.album_id1, albums_tags.album_id2) IN (SELECT albums.id1, albums.id2 FROM albums))))" end it "should work for one_through_one associations" do @Album.first_tags.sql.must_equal "SELECT tags.* FROM tags WHERE ((tags.id1, tags.id2) IN (SELECT albums_tags.tag_id1, albums_tags.tag_id2 FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.id1) AND (albums_tags.tag_id2 = tags.id2)) WHERE ((albums_tags.album_id1, albums_tags.album_id2) IN (SELECT albums.id1, albums.id2 FROM albums))))" end it "should work for many_through_many associations" do @Artist.tags.sql.must_equal "SELECT tags.* FROM tags WHERE ((tags.id1, tags.id2) IN (SELECT albums_tags.tag_id1, albums_tags.tag_id2 FROM artists INNER JOIN albums ON ((albums.artist_id1 = artists.id1) AND (albums.artist_id2 = artists.id2)) INNER JOIN albums_tags ON ((albums_tags.album_id1 = albums.id1) AND (albums_tags.album_id2 = albums.id2)) INNER JOIN tags ON ((tags.id1 = albums_tags.tag_id1) AND (tags.id2 = albums_tags.tag_id2)) WHERE ((albums.artist_id1, albums.artist_id2) IN (SELECT artists.id1, artists.id2 FROM artists))))" end it "should work for one_through_many associations" do @Artist.otags.sql.must_equal "SELECT tags.* FROM tags WHERE ((tags.id1, tags.id2) IN (SELECT albums_tags.tag_id1, albums_tags.tag_id2 FROM artists INNER JOIN albums ON ((albums.artist_id1 = artists.id1) AND (albums.artist_id2 = artists.id2)) INNER JOIN albums_tags ON ((albums_tags.album_id1 = albums.id1) AND (albums_tags.album_id2 = albums.id2)) INNER JOIN tags ON ((tags.id1 = albums_tags.tag_id1) AND (tags.id2 = albums_tags.tag_id2)) WHERE ((albums.artist_id1, albums.artist_id2) IN (SELECT artists.id1, artists.id2 FROM artists))))" end it "should work correctly when chaining" do @Artist.albums.tags.sql.must_equal "SELECT tags.* FROM tags WHERE ((tags.id1, tags.id2) IN (SELECT albums_tags.tag_id1, albums_tags.tag_id2 FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.id1) AND (albums_tags.tag_id2 = tags.id2)) WHERE ((albums_tags.album_id1, albums_tags.album_id2) IN (SELECT albums.id1, albums.id2 FROM albums WHERE ((albums.artist_id1, albums.artist_id2) IN (SELECT artists.id1, artists.id2 FROM artists))))))" end it "should deal correctly with :order option for one_to_one associations" do @Artist.one_to_one :first_album, :clone=>:first_album, :order=>:name @Artist.first_albums.sql.must_equal 'SELECT * FROM albums WHERE (((albums.artist_id1, albums.artist_id2) IN (SELECT artists.id1, artists.id2 FROM artists)) AND ((albums.id1, albums.id2) IN (SELECT DISTINCT ON (albums.artist_id1, albums.artist_id2) albums.id1, albums.id2 FROM albums ORDER BY albums.artist_id1, albums.artist_id2, name))) ORDER BY name' end it "should deal correctly with :limit option for one_to_many associations" do @Artist.one_to_many :albums, :clone=>:albums, :limit=>10, :order=>:name @Artist.albums.sql.must_equal 'SELECT * FROM albums WHERE (((albums.artist_id1, albums.artist_id2) IN (SELECT artists.id1, artists.id2 FROM artists)) AND ((albums.id1, albums.id2) IN (SELECT id1, id2 FROM (SELECT albums.id1, albums.id2, row_number() OVER (PARTITION BY albums.artist_id1, albums.artist_id2 ORDER BY name) AS x_sequel_row_number_x FROM albums) AS t1 WHERE (x_sequel_row_number_x <= 10)))) ORDER BY name' end it "should deal correctly with :order option for one_through_one associations" do @Album.one_through_one :first_tag, :clone=>:first_tag, :order=>Sequel[:tags][:name] @Album.first_tags.sql.must_equal 'SELECT tags.* FROM tags WHERE ((tags.id1, tags.id2) IN (SELECT albums_tags.tag_id1, albums_tags.tag_id2 FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.id1) AND (albums_tags.tag_id2 = tags.id2)) WHERE (((albums_tags.album_id1, albums_tags.album_id2) IN (SELECT albums.id1, albums.id2 FROM albums)) AND ((albums_tags.album_id1, albums_tags.album_id2, tags.id1, tags.id2) IN (SELECT DISTINCT ON (albums_tags.album_id1, albums_tags.album_id2) albums_tags.album_id1, albums_tags.album_id2, tags.id1, tags.id2 FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.id1) AND (albums_tags.tag_id2 = tags.id2)) ORDER BY albums_tags.album_id1, albums_tags.album_id2, tags.name))))) ORDER BY tags.name' end it "should deal correctly with :limit option for many_to_many associations" do @Album.many_to_many :tags, :clone=>:tags, :limit=>10, :order=>Sequel[:tags][:name] @Album.tags.sql.must_equal 'SELECT tags.* FROM tags WHERE ((tags.id1, tags.id2) IN (SELECT albums_tags.tag_id1, albums_tags.tag_id2 FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.id1) AND (albums_tags.tag_id2 = tags.id2)) WHERE (((albums_tags.album_id1, albums_tags.album_id2) IN (SELECT albums.id1, albums.id2 FROM albums)) AND ((albums_tags.album_id1, albums_tags.album_id2, tags.id1, tags.id2) IN (SELECT b, c, d, e FROM (SELECT albums_tags.album_id1 AS b, albums_tags.album_id2 AS c, tags.id1 AS d, tags.id2 AS e, row_number() OVER (PARTITION BY albums_tags.album_id1, albums_tags.album_id2 ORDER BY tags.name) AS x_sequel_row_number_x FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.id1) AND (albums_tags.tag_id2 = tags.id2))) AS t1 WHERE (x_sequel_row_number_x <= 10)))))) ORDER BY tags.name' end it "should deal correctly with :order option for one_through_many associations" do @Artist.one_through_many :otag, :clone=>:otag, :order=>Sequel[:tags][:name] @Artist.otags.sql.must_equal 'SELECT tags.* FROM tags WHERE ((tags.id1, tags.id2) IN (SELECT albums_tags.tag_id1, albums_tags.tag_id2 FROM artists INNER JOIN albums ON ((albums.artist_id1 = artists.id1) AND (albums.artist_id2 = artists.id2)) INNER JOIN albums_tags ON ((albums_tags.album_id1 = albums.id1) AND (albums_tags.album_id2 = albums.id2)) INNER JOIN tags ON ((tags.id1 = albums_tags.tag_id1) AND (tags.id2 = albums_tags.tag_id2)) WHERE (((albums.artist_id1, albums.artist_id2) IN (SELECT artists.id1, artists.id2 FROM artists)) AND ((albums.artist_id1, albums.artist_id2, tags.id1, tags.id2) IN (SELECT DISTINCT ON (albums.artist_id1, albums.artist_id2) albums.artist_id1, albums.artist_id2, tags.id1, tags.id2 FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.id1) AND (albums_tags.tag_id2 = tags.id2)) INNER JOIN albums ON ((albums.id1 = albums_tags.album_id1) AND (albums.id2 = albums_tags.album_id2)) ORDER BY albums.artist_id1, albums.artist_id2, tags.name))))) ORDER BY tags.name' end it "should deal correctly with :limit option for many_through_many associations" do @Artist.many_through_many :tags, :clone=>:tags, :limit=>10, :order=>Sequel[:tags][:name] @Artist.tags.sql.must_equal 'SELECT tags.* FROM tags WHERE ((tags.id1, tags.id2) IN (SELECT albums_tags.tag_id1, albums_tags.tag_id2 FROM artists INNER JOIN albums ON ((albums.artist_id1 = artists.id1) AND (albums.artist_id2 = artists.id2)) INNER JOIN albums_tags ON ((albums_tags.album_id1 = albums.id1) AND (albums_tags.album_id2 = albums.id2)) INNER JOIN tags ON ((tags.id1 = albums_tags.tag_id1) AND (tags.id2 = albums_tags.tag_id2)) WHERE (((albums.artist_id1, albums.artist_id2) IN (SELECT artists.id1, artists.id2 FROM artists)) AND ((albums.artist_id1, albums.artist_id2, tags.id1, tags.id2) IN (SELECT b, c, d, e FROM (SELECT albums.artist_id1 AS b, albums.artist_id2 AS c, tags.id1 AS d, tags.id2 AS e, row_number() OVER (PARTITION BY albums.artist_id1, albums.artist_id2 ORDER BY tags.name) AS x_sequel_row_number_x FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.id1) AND (albums_tags.tag_id2 = tags.id2)) INNER JOIN albums ON ((albums.id1 = albums_tags.album_id1) AND (albums.id2 = albums_tags.album_id2))) AS t1 WHERE (x_sequel_row_number_x <= 10)))))) ORDER BY tags.name' end end ���������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/dataset_source_alias_spec.rb������������������������������������������0000664�0000000�0000000�00000004375�14342141206�0023757�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "dataset_source_alias extension" do before do @db = Sequel.mock @db.extension(:dataset_source_alias) end it "should automatically alias datasets to their first source in #from" do @db[@db[:a]].sql.must_equal 'SELECT * FROM (SELECT * FROM a) AS a' @db[:a, @db[:b]].sql.must_equal 'SELECT * FROM a, (SELECT * FROM b) AS b' end it "should handle virtual row blocks in #from" do @db.dataset.from{|_| @db[:a]}.sql.must_equal 'SELECT * FROM (SELECT * FROM a) AS a' @db.dataset.from(:a){|_| @db[:b]}.sql.must_equal 'SELECT * FROM a, (SELECT * FROM b) AS b' end it "should automatically alias datasets to their first source in #join" do @db[:a].cross_join(@db[:b]).sql.must_equal 'SELECT * FROM a CROSS JOIN (SELECT * FROM b) AS b' end it "should handle :table_alias option when joining" do @db[:a].cross_join(@db[:b], :table_alias=>:c).sql.must_equal 'SELECT * FROM a CROSS JOIN (SELECT * FROM b) AS c' end it "should handle aliasing issues automatically" do @db[:a, @db[:a]].sql.must_equal 'SELECT * FROM a, (SELECT * FROM a) AS a_0' @db.dataset.from(:a, @db[:a]){|_| @db[:a]}.sql.must_equal 'SELECT * FROM a, (SELECT * FROM a) AS a_0, (SELECT * FROM a) AS a_1' @db.dataset.from(:a, @db[:a]){|_| @db[:a]}.cross_join(@db[:a]).sql.must_equal 'SELECT * FROM a, (SELECT * FROM a) AS a_0, (SELECT * FROM a) AS a_1 CROSS JOIN (SELECT * FROM a) AS a_2' end it "should handle from_self" do @db[:a].from_self.sql.must_equal 'SELECT * FROM (SELECT * FROM a) AS a' @db[:a].from_self.from_self.sql.must_equal 'SELECT * FROM (SELECT * FROM (SELECT * FROM a) AS a) AS a' end it "should handle datasets without sources" do @db[@db.select(1)].sql.must_equal 'SELECT * FROM (SELECT 1) AS t1' @db[:t, @db.select(1)].sql.must_equal 'SELECT * FROM t, (SELECT 1) AS t1' @db[:a].cross_join(@db.select(1)).sql.must_equal 'SELECT * FROM a CROSS JOIN (SELECT 1) AS t1' end it "should handle datasets selecting from functions" do @db.dataset.from{|o| @db[o.f(:a)]}.sql.must_equal 'SELECT * FROM (SELECT * FROM f(a)) AS t1' end it "should handle datasets with literal SQL" do @db.from(@db['SELECT c FROM d']).sql.must_equal 'SELECT * FROM (SELECT c FROM d) AS t1' end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/date_arithmetic_spec.rb�����������������������������������������������0000664�0000000�0000000�00000027447�14342141206�0022734�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" asd = begin require 'active_support' require 'active_support/duration' true rescue LoadError warn "Skipping some tests of date_arithmetic extension: can't load active_support/duration" false end Sequel.extension :date_arithmetic describe "date_arithmetic extension" do dbf = lambda do |db_type| db = Sequel.connect("mock://#{db_type}") db.extension :date_arithmetic db end before do @h0 = {:days=>0} @h1 = {:days=>1, :years=>nil, :hours=>0} @h2 = {:years=>1, :months=>1, :weeks=>1, :days=>1, :hours=>1, :minutes=>1, :seconds=>1} end it "should have Sequel.date_add with an interval hash return an appropriate Sequel::SQL::DateAdd expression" do da = Sequel.date_add(:a, :days=>1) da.must_be_kind_of(Sequel::SQL::DateAdd) da.expr.must_equal :a da.interval.must_equal(:days=>1) Sequel.date_add(:a, :years=>1, :months=>2, :days=>3, :hours=>1, :minutes=>1, :seconds=>1).interval.must_equal(:years=>1, :months=>2, :days=>3, :hours=>1, :minutes=>1, :seconds=>1) end it "should have Sequel.date_sub with an interval hash return an appropriate Sequel::SQL::DateAdd expression" do da = Sequel.date_sub(:a, :days=>1) da.must_be_kind_of(Sequel::SQL::DateAdd) da.expr.must_equal :a da.interval.must_equal(:days=>-1) Sequel.date_sub(:a, :years=>1, :months=>2, :days=>3, :hours=>1, :minutes=>1, :seconds=>1).interval.must_equal(:years=>-1, :months=>-2, :days=>-3, :hours=>-1, :minutes=>-1, :seconds=>-1) end it "should have Sequel.date_* with an interval hash handle nil values" do Sequel.date_sub(:a, :days=>1, :hours=>nil).interval.must_equal(:days=>-1) end it "should raise an error if given invalid keys in an interval hash" do lambda{Sequel.date_add(:a, :days=>1, :foo=>1)}.must_raise(Sequel::Error) end it "should raise an error if given string values in an interval hash" do lambda{Sequel.date_add(:a, :days=>'1')}.must_raise(Sequel::InvalidValue) end if asd it "should have Sequel.date_add with an ActiveSupport::Duration return an appropriate Sequel::SQL::DateAdd expression" do da = Sequel.date_add(:a, ActiveSupport::Duration.new(1, [[:days, 1]])) da.must_be_kind_of(Sequel::SQL::DateAdd) da.expr.must_equal :a da.interval.must_equal(:days=>1) Sequel.date_add(:a, ActiveSupport::Duration.new(1, [[:years, 1], [:months, 1], [:days, 1], [:minutes, 61], [:seconds, 1]])).interval.must_equal(:years=>1, :months=>1, :days=>1, :minutes=>61, :seconds=>1) Sequel.date_add(:a, ActiveSupport::Duration.new(1, [[:weeks, 1]])).interval.must_equal(:days=>7) end it "should have Sequel.date_sub with an ActiveSupport::Duration return an appropriate Sequel::SQL::DateAdd expression" do da = Sequel.date_sub(:a, ActiveSupport::Duration.new(1, [[:days, 1]])) da.must_be_kind_of(Sequel::SQL::DateAdd) da.expr.must_equal :a da.interval.must_equal(:days=>-1) Sequel.date_sub(:a, ActiveSupport::Duration.new(1, [[:years, 1], [:months, 1], [:days, 1], [:minutes, 61], [:seconds, 1]])).interval.must_equal(:years=>-1, :months=>-1, :days=>-1, :minutes=>-61, :seconds=>-1) Sequel.date_sub(:a, ActiveSupport::Duration.new(1, [[:weeks, 1]])).interval.must_equal(:days=>-7) end end it "should use existing method" do db = Sequel.mock db.extend_datasets do def date_add_sql_append(sql, da) interval = String.new each_valid_interval_unit(da.interval, Sequel::SQL::DateAdd::DatasetMethods::DEF_DURATION_UNITS) do |value, sql_unit| interval << "#{value} #{sql_unit} " end literal_append(sql, Sequel.function(:da, da.expr, interval)) end end db.extension :date_arithmetic db.literal(Sequel.date_add(:a, @h0)).must_equal "da(a, '')" db.literal(Sequel.date_add(:a, @h1)).must_equal "da(a, '1 days ')" db.literal(Sequel.date_add(:a, @h2)).must_equal "da(a, '1 years 1 months 8 days 1 hours 1 minutes 1 seconds ')" end it "should correctly literalize on Postgres" do db = dbf.call(:postgres).dataset.with_quote_identifiers(false) db.literal(Sequel.date_add(:a, @h0)).must_equal "CAST(a AS timestamp)" db.literal(Sequel.date_add(:a, @h1)).must_equal "(CAST(a AS timestamp) + make_interval(days := 1))" db.literal(Sequel.date_add(:a, @h2)).must_equal "(CAST(a AS timestamp) + make_interval(years := 1, months := 1, days := 8, hours := 1, mins := 1, secs := 1))" db.literal(Sequel.date_sub(:a, @h0)).must_equal "CAST(a AS timestamp)" db.literal(Sequel.date_sub(:a, @h1)).must_equal "(CAST(a AS timestamp) + make_interval(days := -1))" db.literal(Sequel.date_sub(:a, @h2)).must_equal "(CAST(a AS timestamp) + make_interval(years := -1, months := -1, days := -8, hours := -1, mins := -1, secs := -1))" db.literal(Sequel.date_add(:a, @h0, :cast=>:timestamptz)).must_equal "CAST(a AS timestamptz)" db.literal(Sequel.date_add(:a, @h1, :cast=>:timestamptz)).must_equal "(CAST(a AS timestamptz) + make_interval(days := 1))" db.literal(Sequel.date_add(:a, @h2, :cast=>:timestamptz)).must_equal "(CAST(a AS timestamptz) + make_interval(years := 1, months := 1, days := 8, hours := 1, mins := 1, secs := 1))" db.literal(Sequel.date_add(:a, :days=>Sequel[:a]+1, :weeks=>Sequel[:b])).must_equal "(CAST(a AS timestamp) + make_interval(days := (0 + a + 1 + (b * 7))))" db.literal(Sequel.date_sub(:a, :days=>Sequel[:a]+1, :weeks=>Sequel[:b])).must_equal "(CAST(a AS timestamp) + make_interval(days := (0 + ((a + 1) * -1) + (b * -1 * 7))))" def (db.db).server_version(*); 90300 end db.literal(Sequel.date_add(:a, @h0)).must_equal "CAST(a AS timestamp)" db.literal(Sequel.date_add(:a, @h1)).must_equal "(CAST(a AS timestamp) + CAST('1 days ' AS interval))" db.literal(Sequel.date_add(:a, @h2)).must_equal "(CAST(a AS timestamp) + CAST('1 years 1 months 8 days 1 hours 1 minutes 1 seconds ' AS interval))" db.literal(Sequel.date_add(:a, @h0, :cast=>:timestamptz)).must_equal "CAST(a AS timestamptz)" db.literal(Sequel.date_add(:a, @h1, :cast=>:timestamptz)).must_equal "(CAST(a AS timestamptz) + CAST('1 days ' AS interval))" db.literal(Sequel.date_add(:a, @h2, :cast=>:timestamptz)).must_equal "(CAST(a AS timestamptz) + CAST('1 years 1 months 8 days 1 hours 1 minutes 1 seconds ' AS interval))" end it "should correctly literalize on SQLite" do db = dbf.call(:sqlite).dataset.with_quote_identifiers(false) db.literal(Sequel.date_add(:a, @h0)).must_equal "datetime(a)" db.literal(Sequel.date_add(:a, @h1)).must_equal "datetime(a, '1 days')" db.literal(Sequel.date_add(:a, @h2)).must_equal "datetime(a, '1 years', '1 months', '8 days', '1 hours', '1 minutes', '1 seconds')" end it "should correctly literalize on MySQL" do db = dbf.call(:mysql).dataset.with_quote_identifiers(false) db.literal(Sequel.date_add(:a, @h0)).must_equal "CAST(a AS DATETIME)" db.literal(Sequel.date_add(:a, @h1)).must_equal "DATE_ADD(a, INTERVAL 1 DAY)" db.literal(Sequel.date_add(:a, @h2)).must_equal "DATE_ADD(DATE_ADD(DATE_ADD(DATE_ADD(DATE_ADD(DATE_ADD(a, INTERVAL 1 YEAR), INTERVAL 1 MONTH), INTERVAL 8 DAY), INTERVAL 1 HOUR), INTERVAL 1 MINUTE), INTERVAL 1 SECOND)" db.literal(Sequel.date_add(:a, @h0, :cast=>:timestamp)).must_equal "CAST(a AS timestamp)" end it "should correctly literalize on HSQLDB" do db = Sequel.mock def db.database_type; :hsqldb end db.extension :date_arithmetic db.literal(Sequel.date_add(:a, @h0)).must_equal "CAST(CAST(a AS timestamp) AS timestamp)" db.literal(Sequel.date_add(:a, @h1)).must_equal "DATE_ADD(CAST(a AS timestamp), INTERVAL 1 DAY)" db.literal(Sequel.date_add(:a, @h2)).must_equal "DATE_ADD(DATE_ADD(DATE_ADD(DATE_ADD(DATE_ADD(DATE_ADD(CAST(a AS timestamp), INTERVAL 1 YEAR), INTERVAL 1 MONTH), INTERVAL 8 DAY), INTERVAL 1 HOUR), INTERVAL 1 MINUTE), INTERVAL 1 SECOND)" db.literal(Sequel.date_add(:a, @h0, :cast=>:datetime)).must_equal "CAST(CAST(a AS datetime) AS datetime)" db.literal(Sequel.date_add(:a, @h2, :cast=>:datetime)).must_equal "DATE_ADD(DATE_ADD(DATE_ADD(DATE_ADD(DATE_ADD(DATE_ADD(CAST(a AS datetime), INTERVAL 1 YEAR), INTERVAL 1 MONTH), INTERVAL 8 DAY), INTERVAL 1 HOUR), INTERVAL 1 MINUTE), INTERVAL 1 SECOND)" end it "should correctly literalize on MSSQL" do db = dbf.call(:mssql).dataset.with_quote_identifiers(false) db.literal(Sequel.date_add(:A, @h0)).must_equal "CAST(A AS datetime)" db.literal(Sequel.date_add(:A, @h1)).must_equal "DATEADD(day, 1, A)" db.literal(Sequel.date_add(:A, @h2)).must_equal "DATEADD(second, 1, DATEADD(minute, 1, DATEADD(hour, 1, DATEADD(day, 8, DATEADD(month, 1, DATEADD(year, 1, A))))))" db.literal(Sequel.date_add(:A, @h0, :cast=>:timestamp)).must_equal "CAST(A AS timestamp)" end it "should correctly literalize on H2" do db = Sequel.mock def db.database_type; :h2 end db.extension :date_arithmetic db.literal(Sequel.date_add(:a, @h0)).must_equal "CAST(a AS timestamp)" db.literal(Sequel.date_add(:a, @h1)).must_equal "DATEADD('day', 1, a)" db.literal(Sequel.date_add(:a, @h2)).must_equal "DATEADD('second', 1, DATEADD('minute', 1, DATEADD('hour', 1, DATEADD('day', 8, DATEADD('month', 1, DATEADD('year', 1, a))))))" db.literal(Sequel.date_add(:a, @h0, :cast=>:datetime)).must_equal "CAST(a AS datetime)" end it "should correctly literalize on access" do db = dbf.call(:access).dataset.with_quote_identifiers(false) db.literal(Sequel.date_add(:a, @h0)).must_equal "CDate(a)" db.literal(Sequel.date_add(:a, @h1)).must_equal "DATEADD('d', 1, a)" db.literal(Sequel.date_add(:a, @h2)).must_equal "DATEADD('s', 1, DATEADD('n', 1, DATEADD('h', 1, DATEADD('d', 8, DATEADD('m', 1, DATEADD('yyyy', 1, a))))))" end it "should correctly literalize on Derby" do db = Sequel.mock def db.database_type; :derby end db.extension :date_arithmetic db.literal(Sequel.date_add(:a, @h0)).must_equal "CAST(a AS timestamp)" db.literal(Sequel.date_add(:a, @h1)).must_equal "{fn timestampadd(SQL_TSI_DAY, 1, timestamp(a))}" db.literal(Sequel.date_add(:a, @h2)).must_equal "{fn timestampadd(SQL_TSI_SECOND, 1, timestamp({fn timestampadd(SQL_TSI_MINUTE, 1, timestamp({fn timestampadd(SQL_TSI_HOUR, 1, timestamp({fn timestampadd(SQL_TSI_DAY, 8, timestamp({fn timestampadd(SQL_TSI_MONTH, 1, timestamp({fn timestampadd(SQL_TSI_YEAR, 1, timestamp(a))}))}))}))}))}))}" db.literal(Sequel.date_add(Date.civil(2012, 11, 12), @h1)).must_equal "{fn timestampadd(SQL_TSI_DAY, 1, timestamp((CAST('2012-11-12' AS varchar(255)) || ' 00:00:00')))}" db.literal(Sequel.date_add(:a, @h0, :cast=>:datetime)).must_equal "CAST(a AS datetime)" end it "should correctly literalize on Oracle" do db = dbf.call(:oracle).dataset.with_quote_identifiers(false) db.literal(Sequel.date_add(:A, @h0)).must_equal "CAST(A AS timestamp)" db.literal(Sequel.date_add(:A, @h1)).must_equal "(A + INTERVAL '1' DAY)" db.literal(Sequel.date_add(:A, @h2)).must_equal "(A + INTERVAL '1' YEAR + INTERVAL '1' MONTH + INTERVAL '8' DAY + INTERVAL '1' HOUR + INTERVAL '1' MINUTE + INTERVAL '1' SECOND)" db.literal(Sequel.date_add(:A, @h0, :cast=>:datetime)).must_equal "CAST(A AS datetime)" end it "should correctly literalize on DB2" do db = dbf.call(:db2) db.literal(Sequel.date_add(:A, @h0)).must_equal "CAST(A AS timestamp)" db.literal(Sequel.date_add(:A, @h1)).must_equal "(CAST(A AS timestamp) + 1 days)" db.literal(Sequel.date_add(:A, @h0)).must_equal "CAST(A AS timestamp)" db.literal(Sequel.date_add(:A, @h1, :cast=>:datetime)).must_equal "(CAST(A AS datetime) + 1 days)" db.literal(Sequel.date_add(:A, @h2, :cast=>:datetime)).must_equal "(CAST(A AS datetime) + 1 years + 1 months + 8 days + 1 hours + 1 minutes + 1 seconds)" end it "should raise error if literalizing on an unsupported database" do db = Sequel.mock db.extension :date_arithmetic lambda{db.literal(Sequel.date_add(:a, @h0))}.must_raise(Sequel::Error) end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/date_parse_input_handler_spec.rb��������������������������������������0000664�0000000�0000000�00000003626�14342141206�0024622�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "datetime_parse_to_time extension" do after do # Can't undo the adding of the module to Sequel, so removing the # method in Sequel is the only way to restore behavior. This # won't break anything, since it will fallback to the implementation # in Sequel::DateParseInputHandler Sequel.singleton_class.send(:remove_method, :handle_date_parse_input) end it "should be called by Sequel.string_to_*" do Sequel.database_to_application_timestamp("2020-11-12 10:20:30").must_equal Time.local(2020, 11, 12, 10, 20, 30) Sequel.extension :date_parse_input_handler Sequel.date_parse_input_handler do |string| raise Sequel::InvalidValue if string.bytesize > 128 "2020-" + string end small = "11-12 10:20:30" + " " * 100 Sequel.string_to_date(small).must_equal Date.new(2020, 11, 12) Sequel.string_to_datetime(small).must_equal Time.local(2020, 11, 12, 10, 20, 30) Sequel.string_to_time(small).strftime("%H %M %S").must_equal "10 20 30" Sequel.send(:_date_parse, small).must_equal(:hour=>10, :min=>20, :sec=>30, :year=>2020, :mon=>11, :mday=>12) large = "11-12 10:20:30" + " " * 128 proc{Sequel.string_to_date(large)}.must_raise Sequel::InvalidValue proc{Sequel.string_to_datetime(large)}.must_raise Sequel::InvalidValue proc{Sequel.string_to_time(large)}.must_raise Sequel::InvalidValue proc{Sequel.send(:_date_parse, large)}.must_raise Sequel::InvalidValue Sequel.date_parse_input_handler do |string| string end small = "2020-11-12 10:20:30" Sequel.string_to_date(small).must_equal Date.new(2020, 11, 12) Sequel.string_to_datetime(small).must_equal Time.local(2020, 11, 12, 10, 20, 30) Sequel.string_to_time(small).strftime("%H %M %S").must_equal "10 20 30" Sequel.send(:_date_parse, small).must_equal(:hour=>10, :min=>20, :sec=>30, :year=>2020, :mon=>11, :mday=>12) end end ����������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/datetime_parse_to_time_spec.rb����������������������������������������0000664�0000000�0000000�00000021356�14342141206�0024305�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "datetime_parse_to_time extension" do before(:all) do Sequel.extension :datetime_parse_to_time end after(:all) do # Can't undo the adding of the module to Sequel, so removing the # method in the module is the only way to fix it. Sequel::DateTimeParseToTime.send(:remove_method, :convert_input_timestamp) end before do @db = Sequel::Database.new @dataset = @db.dataset.with_extend do def supports_timestamp_timezones?; true end def supports_timestamp_usecs?; false end end @utc_time = Time.utc(2010, 1, 2, 3, 4, 5) @local_time = Time.local(2010, 1, 2, 3, 4, 5) @offset = sprintf("%+03i%02i", *(@local_time.utc_offset/60).divmod(60)) @dt_offset = @local_time.utc_offset/Rational(86400, 1) @utc_datetime = DateTime.new(2010, 1, 2, 3, 4, 5) @local_datetime = DateTime.new(2010, 1, 2, 3, 4, 5, @dt_offset) end after do Sequel.default_timezone = nil Sequel.datetime_class = Time end it "should handle conversions during invalid localtimes" do # This only checks of a couple of times that may be invalid. # You can run with TZ=Europe/Berlin or TZ=US/Pacific Sequel.database_timezone = :utc Sequel.database_to_application_timestamp("2017-03-26 02:30:00").getutc.hour.must_equal 2 Sequel.database_to_application_timestamp("2017-03-12 02:30:00").getutc.hour.must_equal 2 Sequel.application_timezone = :utc Sequel.database_to_application_timestamp("2017-03-26 02:30:00").getutc.hour.must_equal 2 Sequel.database_to_application_timestamp("2017-03-12 02:30:00").getutc.hour.must_equal 2 end it "should handle an database timezone of :utc when literalizing values" do Sequel.database_timezone = :utc @dataset.literal(Time.utc(2010, 1, 2, 3, 4, 5)).must_equal "'2010-01-02 03:04:05+0000'" @dataset.literal(DateTime.new(2010, 1, 2, 3, 4, 5)).must_equal "'2010-01-02 03:04:05+0000'" end it "should handle an database timezone of :local when literalizing values" do Sequel.database_timezone = :local @dataset.literal(Time.local(2010, 1, 2, 3, 4, 5)).must_equal "'2010-01-02 03:04:05#{@offset}'" @dataset.literal(DateTime.new(2010, 1, 2, 3, 4, 5, @dt_offset)).must_equal "'2010-01-02 03:04:05#{@offset}'" end it "should have Database#timezone override Sequel.database_timezone" do Sequel.database_timezone = :local @db.timezone = :utc @dataset.literal(Time.utc(2010, 1, 2, 3, 4, 5)).must_equal "'2010-01-02 03:04:05+0000'" @dataset.literal(DateTime.new(2010, 1, 2, 3, 4, 5)).must_equal "'2010-01-02 03:04:05+0000'" Sequel.database_timezone = :utc @db.timezone = :local @dataset.literal(Time.local(2010, 1, 2, 3, 4, 5)).must_equal "'2010-01-02 03:04:05#{@offset}'" @dataset.literal(DateTime.new(2010, 1, 2, 3, 4, 5, @dt_offset)).must_equal "'2010-01-02 03:04:05#{@offset}'" end it "should handle converting database timestamps into application timestamps" do Sequel.database_timezone = :utc Sequel.application_timezone = :local t = Time.now.utc Sequel.database_to_application_timestamp(t).to_s.must_equal t.getlocal.to_s Sequel.database_to_application_timestamp(t.to_s).to_s.must_equal t.getlocal.to_s Sequel.database_to_application_timestamp(t.strftime('%Y-%m-%d %H:%M:%S')).to_s.must_equal t.getlocal.to_s Sequel.datetime_class = DateTime dt = DateTime.now dt2 = dt.new_offset(0) Sequel.database_to_application_timestamp(dt2).to_s.must_equal dt.to_s Sequel.database_to_application_timestamp(dt2.to_s).to_s.must_equal dt.to_s Sequel.database_to_application_timestamp(dt2.strftime('%Y-%m-%d %H:%M:%S')).to_s.must_equal dt.to_s Sequel.datetime_class = Time Sequel.database_timezone = :local Sequel.application_timezone = :utc Sequel.database_to_application_timestamp(t.getlocal).to_s.must_equal t.to_s Sequel.database_to_application_timestamp(t.getlocal.to_s).to_s.must_equal t.to_s Sequel.database_to_application_timestamp(t.getlocal.strftime('%Y-%m-%d %H:%M:%S')).to_s.must_equal t.to_s Sequel.datetime_class = DateTime Sequel.database_to_application_timestamp(dt).to_s.must_equal dt2.to_s Sequel.database_to_application_timestamp(dt.to_s).to_s.must_equal dt2.to_s Sequel.database_to_application_timestamp(dt.strftime('%Y-%m-%d %H:%M:%S')).to_s.must_equal dt2.to_s end it "should handle typecasting timestamp columns" do Sequel.typecast_timezone = :utc Sequel.application_timezone = :local t = Time.now.utc @db.typecast_value(:datetime, t).to_s.must_equal t.getlocal.to_s @db.typecast_value(:datetime, t.to_s).to_s.must_equal t.getlocal.to_s @db.typecast_value(:datetime, t.strftime('%Y-%m-%d %H:%M:%S')).to_s.must_equal t.getlocal.to_s Sequel.datetime_class = DateTime dt = DateTime.now dt2 = dt.new_offset(0) @db.typecast_value(:datetime, dt2).to_s.must_equal dt.to_s @db.typecast_value(:datetime, dt2.to_s).to_s.must_equal dt.to_s @db.typecast_value(:datetime, dt2.strftime('%Y-%m-%d %H:%M:%S')).to_s.must_equal dt.to_s Sequel.datetime_class = Time Sequel.typecast_timezone = :local Sequel.application_timezone = :utc @db.typecast_value(:datetime, t.getlocal).to_s.must_equal t.to_s @db.typecast_value(:datetime, t.getlocal.to_s).to_s.must_equal t.to_s @db.typecast_value(:datetime, t.getlocal.strftime('%Y-%m-%d %H:%M:%S')).to_s.must_equal t.to_s Sequel.datetime_class = DateTime @db.typecast_value(:datetime, dt).to_s.must_equal dt2.to_s @db.typecast_value(:datetime, dt.to_s).to_s.must_equal dt2.to_s @db.typecast_value(:datetime, dt.strftime('%Y-%m-%d %H:%M:%S')).to_s.must_equal dt2.to_s end it "should handle converting database timestamp columns from an array of values" do Sequel.database_timezone = :utc Sequel.application_timezone = :local t = Time.now.utc Sequel.database_to_application_timestamp([t.year, t.mon, t.day, t.hour, t.min, t.sec]).to_s.must_equal t.getlocal.to_s Sequel.datetime_class = DateTime dt = DateTime.now dt2 = dt.new_offset(0) Sequel.database_to_application_timestamp([dt2.year, dt2.mon, dt2.day, dt2.hour, dt2.min, dt2.sec]).to_s.must_equal dt.to_s Sequel.datetime_class = Time Sequel.database_timezone = :local Sequel.application_timezone = :utc t = t.getlocal Sequel.database_to_application_timestamp([t.year, t.mon, t.day, t.hour, t.min, t.sec]).to_s.must_equal t.getutc.to_s Sequel.datetime_class = DateTime Sequel.database_to_application_timestamp([dt.year, dt.mon, dt.day, dt.hour, dt.min, dt.sec]).to_s.must_equal dt2.to_s end it "should raise an InvalidValue error when an error occurs while converting a timestamp" do proc{Sequel.database_to_application_timestamp([0, 0, 0, 0, 0, 0])}.must_raise(Sequel::InvalidValue) end it "should raise an error when attempting to typecast to a timestamp from an unsupported type" do proc{Sequel.database_to_application_timestamp(Object.new)}.must_raise(Sequel::InvalidValue) end it "should raise an InvalidValue error when the DateTime class is used and when a bad application timezone is used when attempting to convert timestamps" do Sequel.application_timezone = :blah Sequel.datetime_class = DateTime proc{Sequel.database_to_application_timestamp('2009-06-01 10:20:30')}.must_raise(Sequel::InvalidValue) end it "should raise an InvalidValue error when the DateTime class is used and when a bad database timezone is used when attempting to convert timestamps" do Sequel.database_timezone = :blah Sequel.datetime_class = DateTime proc{Sequel.database_to_application_timestamp('2009-06-01 10:20:30')}.must_raise(Sequel::InvalidValue) end it "should have Sequel.default_timezone= should set all other timezones" do Sequel.database_timezone.must_be_nil Sequel.application_timezone.must_be_nil Sequel.typecast_timezone.must_be_nil Sequel.default_timezone = :utc Sequel.database_timezone.must_equal :utc Sequel.application_timezone.must_equal :utc Sequel.typecast_timezone.must_equal :utc end it "should work date_parse_input_handler extension" do Sequel.database_to_application_timestamp("2020-11-12 10:20:30").must_equal Time.local(2020, 11, 12, 10, 20, 30) begin Sequel.extension :date_parse_input_handler Sequel.database_timezone = :utc Sequel.date_parse_input_handler do |string| raise Sequel::InvalidValue if string.bytesize > 128 "2020-" + string end Sequel.database_to_application_timestamp("11-12 10:20:30").must_equal Time.utc(2020, 11, 12, 10, 20, 30) proc{Sequel.database_to_application_timestamp("11-12 10:20:30" + " " * 128)}.must_raise Sequel::InvalidValue ensure Sequel.singleton_class.send(:remove_method, :handle_date_parse_input) end end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/def_dataset_method_spec.rb��������������������������������������������0000664�0000000�0000000�00000006030�14342141206�0023372�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Model, ".def_dataset_method" do before do @c = Class.new(Sequel::Model(:items)) @c.plugin :def_dataset_method end it "should add a method to the dataset and model if called with a block argument" do @c.def_dataset_method(:return_3){3} @c.return_3.must_equal 3 @c.dataset.return_3.must_equal 3 end it "should handle weird method names" do @c.def_dataset_method(:"return 3"){3} @c.send(:"return 3").must_equal 3 @c.dataset.send(:"return 3").must_equal 3 end it "should not add a model method if the model already responds to the method" do @c.instance_eval do def foo 1 end private def bar 2 end def_dataset_method(:foo){3} def_dataset_method(:bar){4} end @c.foo.must_equal 1 @c.dataset.foo.must_equal 3 @c.send(:bar).must_equal 2 @c.dataset.bar.must_equal 4 end it "should add all passed methods to the model if called without a block argument" do @c.def_dataset_method(:return_3, :return_4) proc{@c.return_3}.must_raise(NoMethodError) proc{@c.return_4}.must_raise(NoMethodError) @c.dataset = @c.dataset.with_extend do def return_3; 3; end def return_4; 4; end end @c.return_3.must_equal 3 @c.return_4.must_equal 4 end it "should cache calls and readd methods if set_dataset is used" do @c.def_dataset_method(:return_3){3} @c.set_dataset :items @c.return_3.must_equal 3 @c.dataset.return_3.must_equal 3 end it "should readd methods to subclasses, if set_dataset is used in a subclass" do @c.def_dataset_method(:return_3){3} c = Class.new(@c) c.set_dataset :items c.return_3.must_equal 3 c.dataset.return_3.must_equal 3 end end describe Sequel::Model, ".subset" do before do @c = Class.new(Sequel::Model(:items)) @c.plugin :def_dataset_method DB.reset end it "should create a filter on the underlying dataset" do proc {@c.new_only}.must_raise(NoMethodError) @c.subset(:new_only){age < 'new'} @c.new_only.sql.must_equal "SELECT * FROM items WHERE (age < 'new')" @c.dataset.new_only.sql.must_equal "SELECT * FROM items WHERE (age < 'new')" @c.subset(:pricey){price > 100} @c.pricey.sql.must_equal "SELECT * FROM items WHERE (price > 100)" @c.dataset.pricey.sql.must_equal "SELECT * FROM items WHERE (price > 100)" @c.pricey.new_only.sql.must_equal "SELECT * FROM items WHERE ((price > 100) AND (age < 'new'))" @c.new_only.pricey.sql.must_equal "SELECT * FROM items WHERE ((age < 'new') AND (price > 100))" end it "should not override existing model methods" do def @c.active() true end @c.subset(:active, :active) @c.active.must_equal true end it "should raise Error if called without arguments" do proc{@c.def_dataset_method}.must_raise Sequel::Error end it "should raise Error if called with a block and more than one argument" do proc{@c.def_dataset_method(:a, :b){}}.must_raise Sequel::Error end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/defaults_setter_spec.rb�����������������������������������������������0000664�0000000�0000000�00000011621�14342141206�0022766�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::DefaultsSetter" do before do @db = db = Sequel.mock def db.supports_schema_parsing?() true end def db.schema(*) [] end db.singleton_class.send(:alias_method, :schema, :schema) @c = c = Class.new(Sequel::Model(db[:foo])) @c.instance_variable_set(:@db_schema, {:a=>{}}) @c.plugin :defaults_setter @c.columns :a @pr = proc do |x| db.define_singleton_method(:schema){|*| [[:id, {:primary_key=>true}], [:a, {:ruby_default => x, :primary_key=>false}]]} db.singleton_class.send(:alias_method, :schema, :schema) c.dataset = c.dataset; c end end after do Sequel.datetime_class = Time end it "should set default value upon initialization" do @pr.call(2).new.a.must_equal 2 end it "should not mark the column as modified" do @pr.call(2).new.changed_columns.must_equal [] end it "should not set a default of nil" do @pr.call(nil).new.class.default_values.must_equal({}) end it "should set a default of false" do @pr.call(false).new.a.must_equal false end it "should handle Sequel::CURRENT_DATE default by using the current Date" do @pr.call(Sequel::CURRENT_DATE).new.a.must_equal Date.today end it "should handle Sequel::CURRENT_TIMESTAMP default by using the current Time" do t = @pr.call(Sequel::CURRENT_TIMESTAMP).new.a t.must_be_kind_of(Time) (t - Time.now).must_be :<, 1 end it "should handle :callable_default values in schema in preference to :ruby_default" do @db.define_singleton_method(:schema) do |*| [[:id, {:primary_key=>true}], [:a, {:ruby_default => Time.now, :callable_default=>lambda{Date.today}, :primary_key=>false}]] end @c.dataset = @c.dataset @c.new.a.must_equal Date.today end it "should handle Sequel::CURRENT_TIMESTAMP default by using the current DateTime if Sequel.datetime_class is DateTime" do Sequel.datetime_class = DateTime t = @pr.call(Sequel::CURRENT_TIMESTAMP).new.a t.must_be_kind_of(DateTime) (t - DateTime.now).must_be :<, 1/86400.0 end it "should work correctly with the current_datetime_timestamp extension" do @db.autoid = 1 @db.fetch = {:id=>1} @c.dataset = @c.dataset.extension(:current_datetime_timestamp) c = @pr.call(Sequel::CURRENT_TIMESTAMP) @db.sqls o = c.new o.a = o.a o.save @db.sqls.must_equal ["INSERT INTO foo (a) VALUES (CURRENT_TIMESTAMP)", "SELECT * FROM foo WHERE id = 1"] end it "should cache default values if :cache plugin option is used" do @c.plugin :defaults_setter, :cache => true @c.default_values[:a] = 'a' o = @c.new o.a.must_equal 'a' o.values[:a].must_equal 'a' o.a.must_be_same_as(o.a) end it "should not cache default values if :cache plugin option is used and there is no default values" do @c.plugin :defaults_setter, :cache => true o = @c.new o.a.must_be_nil o.values.must_be_empty o.a.must_be_nil o.a.must_be_same_as(o.a) end it "should not override a given value" do @pr.call(2) @c.new('a'=>3).a.must_equal 3 @c.new('a'=>nil).a.must_be_nil end it "should work correctly when subclassing" do Class.new(@pr.call(2)).new.a.must_equal 2 end it "should contain the default values in default_values" do @pr.call(2).default_values.must_equal(:a=>2) @c.default_values.clear @pr.call(nil).default_values.must_equal({}) end it "should allow modifications of default values" do @pr.call(2) @c.default_values[:a] = 3 @c.new.a.must_equal 3 end it "should allow proc default values" do @pr.call(2) @c.default_values[:a] = proc{3} @c.new.a.must_equal 3 end it "should have procs that set default values set them to nil" do @pr.call(2) @c.default_values[:a] = proc{nil} @c.new.a.must_be_nil end it "should work in subclasses" do @pr.call(2) @c.default_values[:a] = proc{1} c = Class.new(@c) @c.new.a.must_equal 1 c.new.a.must_equal 1 c.default_values[:a] = proc{2} @c.new.a.must_equal 1 c.new.a.must_equal 2 end it "should set default value upon initialization when plugin loaded without dataset" do db = @db @c = c = Class.new(Sequel::Model) @c.plugin :defaults_setter @c.instance_variable_set(:@db_schema, {:a=>{}}) @c.dataset = @db[:foo] @c.columns :a proc{|x| db.define_singleton_method(:schema){|*| [[:id, {:primary_key=>true}], [:a, {:ruby_default => x, :primary_key=>false}]]}; c.dataset = c.dataset; c}.call(2).new.a.must_equal 2 end it "should work correctly on a model without a dataset" do @pr.call(2) c = Class.new(Sequel::Model(@db[:bar])) c.plugin :defaults_setter c.default_values.must_equal(:a=>2) end it "should freeze default values when freezing model class" do c = Class.new(Sequel::Model(@db[:bar])) c.plugin :defaults_setter c.freeze c.default_values.frozen?.must_equal true end end ���������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/delay_add_association_spec.rb�����������������������������������������0000664�0000000�0000000�00000005207�14342141206�0024076�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::DelayAddAssociation" do before do @db = Sequel.mock(:autoid=>1, :numrows=>1, :fetch=>{:id=>1, :name=>'a', :c_id=>nil}) @c = Class.new(Sequel::Model(@db[:cs])) @c.send(:define_method, :save){|*| super(:changed=>true)} @c.plugin :delay_add_association @c.columns :id, :name, :c_id @c.one_to_many :cs, :class=>@c, :key=>:c_id @db.sqls end it "should delay adding of the association until after creation" do @o = @c.new(:name=>'a') @o.add_c(@c.load(:id=>2, :name=>'b')) @db.sqls.must_equal [] @o.save @db.sqls.must_equal ["INSERT INTO cs (name) VALUES ('a')", "SELECT * FROM cs WHERE (id = 1) LIMIT 1", "UPDATE cs SET c_id = 1 WHERE (id = 2)"] end it "should immediately reflect changes in cached association" do @o = @c.new(:name=>'a') o = @c.load(:id=>2, :name=>'b') @o.add_c(o) @o.cs.must_equal [o] @db.sqls.must_equal [] end it "should not affect adding associations to existing rows" do @o = @c.load(:id=>1, :name=>'a') @o.add_c(@c.load(:id=>2, :name=>'b')) @db.sqls.must_equal ["UPDATE cs SET c_id = 1 WHERE (id = 2)"] end it "should raise an error when saving if the associated object is invalid" do @c.send(:define_method, :validate){|*| errors.add(:name, 'is b') if name == 'b'} @o = @c.new(:name=>'a') @o.add_c(@c.load(:id=>2, :name=>'b')) proc{@o.save}.must_raise Sequel::ValidationFailed end it "should return nil when saving if the associated object is invalid when raise_on_save_failure is false" do @c.raise_on_save_failure = false @c.send(:define_method, :validate){|*| errors.add(:name, 'is b') if name == 'b'} @o = @c.new(:name=>'a') @o.add_c(@c.load(:id=>2, :name=>'b')) @o.save.must_be_nil @o.errors[:cs].must_equal ["name is b"] @o.cs.first.errors[:name].must_equal ['is b'] end it "should work when passing in hashes" do @o = @c.new(:name=>'a') @o.add_c(:name=>'b') @db.sqls.must_equal [] @o.save @db.sqls.must_equal [ "INSERT INTO cs (name) VALUES ('a')", "SELECT * FROM cs WHERE (id = 1) LIMIT 1", "INSERT INTO cs (name, c_id) VALUES ('b', 1)", "SELECT * FROM cs WHERE (id = 2) LIMIT 1"] end it "should work when passing in primary keys" do @db.fetch = [[{:id=>2, :name=>'b', :c_id=>nil}], [{:id=>1, :name=>'a', :c_id=>nil}]] @o = @c.new(:name=>'a') @o.add_c(2) @db.sqls.must_equal ["SELECT * FROM cs WHERE (id = 2) LIMIT 1"] @o.save @db.sqls.must_equal ["INSERT INTO cs (name) VALUES ('a')", "SELECT * FROM cs WHERE (id = 1) LIMIT 1", "UPDATE cs SET c_id = 1 WHERE (id = 2)"] end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/dirty_spec.rb���������������������������������������������������������0000664�0000000�0000000�00000024733�14342141206�0020734�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::Dirty" do before do @db = Sequel.mock(:fetch=>{:initial=>'i'.dup, :initial_changed=>'ic'.dup}, :numrows=>1) @c = Class.new(Sequel::Model(@db[:c])) @c.plugin :dirty @c.columns :initial, :initial_changed, :missing, :missing_changed end dirty_plugin_specs = Module.new do extend Minitest::Spec::DSL it "initial_value should be the current value if value has not changed" do @o.initial_value(:initial).must_equal 'i' @o.initial_value(:missing).must_be_nil end it "initial_value should be the intial value if value has changed" do @o.initial_value(:initial_changed).must_equal 'ic' @o.initial_value(:missing_changed).must_be_nil end it "initial_value should handle case where initial value is reassigned later" do @o.initial_changed = 'ic' @o.initial_value(:initial_changed).must_equal 'ic' @o.missing_changed = nil @o.initial_value(:missing_changed).must_be_nil end it "changed_columns should handle case where initial value is reassigned later" do @o.changed_columns.must_equal [:initial_changed, :missing_changed] @o.initial_changed = 'ic' @o.changed_columns.must_equal [:missing_changed] @o.missing_changed = nil @o.changed_columns.must_equal [:missing_changed] end it "column_change should give initial and current values if there has been a change made" do @o.column_change(:initial_changed).must_equal ['ic', 'ic2'] @o.column_change(:missing_changed).must_equal [nil, 'mc2'] end it "column_change should be nil if no change has been made" do @o.column_change(:initial).must_be_nil @o.column_change(:missing).must_be_nil end it "column_changed? should return whether the column has changed" do @o.column_changed?(:initial).must_equal false @o.column_changed?(:initial_changed).must_equal true @o.column_changed?(:missing).must_equal false @o.column_changed?(:missing_changed).must_equal true end it "column_changed? should handle case where initial value is reassigned later" do @o.initial_changed = 'ic' @o.column_changed?(:initial_changed).must_equal false @o.missing_changed = nil @o.column_changed?(:missing_changed).must_equal false end it "changed_columns should handle case where initial value is reassigned later" do @o.changed_columns.must_equal [:initial_changed, :missing_changed] @o.initial_changed = 'ic' @o.changed_columns.must_equal [:missing_changed] @o.missing_changed = nil @o.changed_columns.must_equal [:missing_changed] end it "column_changes should give initial and current values" do @o.column_changes.must_equal(:initial_changed=>['ic', 'ic2'], :missing_changed=>[nil, 'mc2']) end it "reset_column should reset the column to its initial value" do @o.reset_column(:initial) @o.initial.must_equal 'i' @o.reset_column(:initial_changed) @o.initial_changed.must_equal 'ic' @o.reset_column(:missing) @o.missing.must_be_nil @o.reset_column(:missing_changed) @o.missing_changed.must_be_nil end it "reset_column should remove missing values from the values" do @o.reset_column(:missing) @o.values.has_key?(:missing).must_equal false @o.reset_column(:missing_changed) @o.values.has_key?(:missing_changed).must_equal false end it "refresh should clear the cached initial values" do @o.refresh @o.column_changes.must_equal({}) end it "will_change_column should be used to signal in-place modification to column" do @o.will_change_column(:initial) @o.initial << 'b' @o.column_change(:initial).must_equal ['i', 'ib'] @o.will_change_column(:initial_changed) @o.initial_changed << 'b' @o.column_change(:initial_changed).must_equal ['ic', 'ic2b'] @o.will_change_column(:missing) @o.values[:missing] = 'b' @o.column_change(:missing).must_equal [nil, 'b'] @o.will_change_column(:missing_changed) @o.missing_changed << 'b' @o.column_change(:missing_changed).must_equal [nil, 'mc2b'] end it "will_change_column should different types of existing objects" do [nil, true, false, Class.new{undef_method :clone}.new, Class.new{def clone; raise TypeError; end}.new].each do |v| o = @c.new(:initial=>v) o.will_change_column(:initial) o.initial = 'a' o.column_change(:initial).must_equal [v, 'a'] end end it "should work when freezing objects" do @o.freeze @o.initial_value(:initial).must_equal 'i' proc{@o.initial = 'b'}.must_raise end it "should have #dup duplicate structures" do was_new = @o.new? @o.update(:missing=>'m2') @o.dup.initial_values.must_equal @o.initial_values @o.dup.initial_values.wont_be_same_as(@o.initial_values) @o.dup.instance_variable_get(:@missing_initial_values).must_equal @o.instance_variable_get(:@missing_initial_values) @o.dup.instance_variable_get(:@missing_initial_values).wont_be_same_as(@o.instance_variable_get(:@missing_initial_values)) if was_new @o.previous_changes.must_be_nil @o.dup.previous_changes.must_be_nil else @o.dup.previous_changes.must_equal @o.previous_changes end @o.dup.previous_changes.wont_be_same_as(@o.previous_changes) if @o.previous_changes end end describe "with new instance" do before do @o = @c.new(:initial=>'i'.dup, :initial_changed=>'ic'.dup) @o.initial_changed = 'ic2'.dup @o.missing_changed = 'mc2'.dup end include dirty_plugin_specs it "save should clear the cached initial values" do @o.save @o.column_changes.must_equal({}) end it "save_changes should clear the cached initial values" do @c.dataset = @c.dataset.with_extend do def supports_insert_select?; true end def insert_select(*) {:id=>1} end end @o.save @o.column_changes.must_equal({}) end it "should work with the typecast_on_load plugin" do @c.instance_variable_set(:@db_schema, :initial=>{:type=>:integer}) @c.plugin :typecast_on_load, :initial @o = @c.call(:initial=>'1') @o.column_changes.must_equal({}) @o.save @o.previous_changes.must_equal({}) end it "should have column_changes work with the typecast_on_load in after hooks" do @c.instance_variable_set(:@db_schema, :initial=>{:type=>:integer}) @c.plugin :typecast_on_load, :initial @o = @c.new @o.initial = 1 @o.column_changes.must_equal({:initial=>[nil, 1]}) column_changes_in_after_save = nil @o.define_singleton_method(:after_save) do column_changes_in_after_save = column_changes super() end @db.fetch = {:initial=>1} @o.save column_changes_in_after_save.must_equal({:initial=>[nil, 1]}) @o.initial = 2 @o.column_changes.must_equal({:initial=>[1, 2]}) @o.save column_changes_in_after_save.must_equal({:initial=>[1, 2]}) @o.previous_changes.must_equal({:initial=>[1, 2]}) end end describe "with existing instance" do before do @o = @c[1] @o.initial_changed = 'ic2'.dup @o.missing_changed = 'mc2'.dup end include dirty_plugin_specs it "previous_changes should be the previous changes after saving" do @o.save @o.previous_changes.must_equal(:initial_changed=>['ic', 'ic2'], :missing_changed=>[nil, 'mc2']) end it "column_previously_was should show the previous value of the column" do @o.save @o.column_previously_was(:initial_changed).must_equal 'ic' @o.column_previously_was(:missing_changed).must_be_nil end it "column_previously_was should be nil if there were no previous changes" do @o.save @o.column_previously_was(:initial).must_be_nil @o.column_previously_was(:missing).must_be_nil end it "column_previously_changed? should include whether the column previously changed" do @o.save @o.column_previously_changed?(:initial_changed).must_equal true @o.column_previously_changed?(:missing_changed).must_equal true @o.column_previously_changed?(:initial).must_equal false @o.column_previously_changed?(:missing).must_equal false end it "column_previously_changed? should accept :from and :to options" do @o.save @o.column_previously_changed?(:initial_changed, :from=>'ic').must_equal true @o.column_previously_changed?(:initial_changed, :from=>'ic2').must_equal false @o.column_previously_changed?(:missing_changed, :from=>nil).must_equal true @o.column_previously_changed?(:missing_changed, :from=>'mc2').must_equal false @o.column_previously_changed?(:initial, :from=>nil).must_equal false @o.column_previously_changed?(:missing, :from=>nil).must_equal false @o.column_previously_changed?(:initial_changed, :to=>'ic').must_equal false @o.column_previously_changed?(:initial_changed, :to=>'ic2').must_equal true @o.column_previously_changed?(:missing_changed, :to=>nil).must_equal false @o.column_previously_changed?(:missing_changed, :to=>'mc2').must_equal true @o.column_previously_changed?(:initial, :to=>nil).must_equal false @o.column_previously_changed?(:missing, :to=>nil).must_equal false @o.column_previously_changed?(:initial_changed, :from=>'ic', :to=>'ic2').must_equal true @o.column_previously_changed?(:initial_changed, :from=>'ic2', :to=>'ic2').must_equal false @o.column_previously_changed?(:initial_changed, :from=>'ic', :to=>'ic').must_equal false @o.column_previously_changed?(:initial_changed, :from=>'ic2', :to=>'ic').must_equal false @o.column_previously_changed?(:missing_changed, :from=>nil, :to=>'mc2').must_equal true @o.column_previously_changed?(:missing_changed, :from=>'mc2', :to=>'mc2').must_equal false @o.column_previously_changed?(:missing_changed, :from=>'mc', :to=>'mc').must_equal false @o.column_previously_changed?(:missing_changed, :from=>'mc2', :to=>nil).must_equal false @o.column_previously_changed?(:initial, :from=>'', :to=>'').must_equal false @o.column_previously_changed?(:missing, :from=>'', :to=>'').must_equal false end it "should work when freezing objects after saving" do @o.initial = 'a' @o.save @o.freeze @o.previous_changes[:initial].must_equal ['i', 'a'] proc{@o.initial = 'b'}.must_raise end end end �������������������������������������sequel-5.63.0/spec/extensions/duplicate_columns_handler_spec.rb�������������������������������������0000664�0000000�0000000�00000006407�14342141206�0025006�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" duplicate_columns_handler_specs = Module.new do extend Minitest::Spec::DSL it "should take action depending on :on_duplicate_columns if 2 or more columns have the same name" do check(nil, @cols) @warned.must_be_nil check(:ignore, @cols) @warned.must_be_nil check(:warn, @cols) @warned.must_include("One or more duplicate columns present in #{@cols.inspect}") proc{check(:raise, @cols)}.must_raise(Sequel::DuplicateColumnError) cols = nil check(proc{|cs| cols = cs; nil}, @cols) @warned.must_be_nil cols.must_equal @cols cols = nil check(proc{|cs| cols = cs; :ignore}, @cols) @warned.must_be_nil cols.must_equal @cols cols = nil proc{check(proc{|cs| cols = cs; :raise}, @cols)}.must_raise(Sequel::DuplicateColumnError) cols.must_equal @cols cols = nil check(proc{|cs| cols = cs; :warn}, @cols) @warned.must_include("One or more duplicate columns present in #{@cols.inspect}") cols.must_equal @cols check(:raise, nil) @warned.must_be_nil end it "should not raise error or warning if no columns have the same name" do [nil, :ignore, :raise, :warn, proc{|cs| :raise}].each do |handler| check(handler, @cols.uniq) @warned.must_be_nil end end end describe "Sequel::DuplicateColumnsHandler Database configuration" do before do @db = Sequel.mock @db.extension(:duplicate_columns_handler) @cols = [:id, :name, :id] @warned = nil set_warned = @set_warned = proc{|m| @warned = m} @ds = @db[:things].with_extend{define_method(:warn){|message| set_warned.call(message)}} end def check(handler, cols) @db.opts[:on_duplicate_columns] = handler @set_warned.call(nil) @ds.send(:columns=, cols) end include duplicate_columns_handler_specs end describe "Sequel::DuplicateColumnsHandler Dataset configuration" do before do @cols = [:id, :name, :id] @warned = nil set_warned = @set_warned = proc{|m| @warned = m} @ds = Sequel.mock[:things].extension(:duplicate_columns_handler).with_extend{define_method(:warn){|message| set_warned.call(message)}} end def check(handler, cols) @set_warned.call(nil) @ds.on_duplicate_columns(handler).send(:columns=, cols) end include duplicate_columns_handler_specs it "should use handlers passed as blocks to on_duplicate_columns" do proc{@ds.on_duplicate_columns{:raise}.send(:columns=, @cols)}.must_raise(Sequel::DuplicateColumnError) end it "should raise an error if not providing either an argument or block to on_duplicate_columns" do proc{@ds.on_duplicate_columns}.must_raise(Sequel::Error) end it "should raise an error if providing both an argument and block to on_duplicate_columns" do proc{@ds.on_duplicate_columns(:raise){:raise}}.must_raise(Sequel::Error) end it "should warn by defaul if there is no database or dataset handler" do @ds.send(:columns=, @cols) @warned.must_include("One or more duplicate columns present in #{@cols.inspect}") end it "should fallback to database setting if there is no dataset-level handler" do @ds.db.opts[:on_duplicate_columns] = :raise proc{@ds.send(:columns=, @cols)}.must_raise(Sequel::DuplicateColumnError) check(:ignore, @cols) @warned.must_be_nil end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/eager_each_spec.rb����������������������������������������������������0000664�0000000�0000000�00000007517�14342141206�0021645�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::EagerEach" do before do @c = Class.new(Sequel::Model(:items)) @c.columns :id, :parent_id @c.plugin :eager_each @c.one_to_many :children, :class=>@c, :key=>:parent_id @c.db.sqls end it "should make #each on an eager dataset do eager loading" do a = [] ds = @c.eager(:children).with_fetch([{:id=>1, :parent_id=>nil}, {:id=>2, :parent_id=>nil}]) @c.dataset = @c.dataset.with_fetch([{:id=>3, :parent_id=>1}, {:id=>4, :parent_id=>1}, {:id=>5, :parent_id=>2}, {:id=>6, :parent_id=>2}]) ds.each{|c| a << c} a.must_equal [@c.load(:id=>1, :parent_id=>nil), @c.load(:id=>2, :parent_id=>nil)] a.map{|c| c.associations[:children]}.must_equal [[@c.load(:id=>3, :parent_id=>1), @c.load(:id=>4, :parent_id=>1)], [@c.load(:id=>5, :parent_id=>2), @c.load(:id=>6, :parent_id=>2)]] sqls = @c.db.sqls sqls.shift.must_equal 'SELECT * FROM items' ['SELECT * FROM items WHERE (items.parent_id IN (1, 2))', 'SELECT * FROM items WHERE (items.parent_id IN (2, 1))'].must_include(sqls.pop) end it "should make #each on an eager_graph dataset do eager loading" do a = [] ds = @c.eager_graph(:children).with_fetch([{:id=>1, :parent_id=>nil, :children_id=>3, :children_parent_id=>1}, {:id=>1, :parent_id=>nil, :children_id=>4, :children_parent_id=>1}, {:id=>2, :parent_id=>nil, :children_id=>5, :children_parent_id=>2}, {:id=>2, :parent_id=>nil, :children_id=>6, :children_parent_id=>2}]) ds.each{|c| a << c} a.must_equal [@c.load(:id=>1, :parent_id=>nil), @c.load(:id=>2, :parent_id=>nil)] a.map{|c| c.associations[:children]}.must_equal [[@c.load(:id=>3, :parent_id=>1), @c.load(:id=>4, :parent_id=>1)], [@c.load(:id=>5, :parent_id=>2), @c.load(:id=>6, :parent_id=>2)]] @c.db.sqls.must_equal ['SELECT items.id, items.parent_id, children.id AS children_id, children.parent_id AS children_parent_id FROM items LEFT OUTER JOIN items AS children ON (children.parent_id = items.id)'] end it "should make #first on an eager dataset do eager loading" do ds = @c.eager(:children).with_fetch([{:id=>1, :parent_id=>nil}]) @c.dataset = @c.dataset.with_fetch([{:id=>3, :parent_id=>1}, {:id=>4, :parent_id=>1}]) a = ds.first a.values.must_equal(:id=>1, :parent_id=>nil) a.associations[:children].must_equal [@c.load(:id=>3, :parent_id=>1), @c.load(:id=>4, :parent_id=>1)] @c.db.sqls.must_equal ['SELECT * FROM items LIMIT 1','SELECT * FROM items WHERE (items.parent_id IN (1))'] end it "should make #first on an eager_graph dataset do eager loading" do a = @c.eager_graph(:children).with_fetch([[{:id=>1, :parent_id=>nil, :children_id=>3, :children_parent_id=>1}], [{:id=>1, :parent_id=>nil, :children_id=>3, :children_parent_id=>1}, {:id=>1, :parent_id=>nil, :children_id=>4, :children_parent_id=>1}]]).first a.values.must_equal(:id=>1, :parent_id=>nil) a.associations[:children].must_equal [@c.load(:id=>3, :parent_id=>1), @c.load(:id=>4, :parent_id=>1)] @c.db.sqls.must_equal ['SELECT items.id, items.parent_id, children.id AS children_id, children.parent_id AS children_parent_id FROM items LEFT OUTER JOIN items AS children ON (children.parent_id = items.id) LIMIT 1', 'SELECT items.id, items.parent_id, children.id AS children_id, children.parent_id AS children_parent_id FROM items LEFT OUTER JOIN items AS children ON (children.parent_id = items.id) WHERE (items.id = 1)'] end it "should make #first on a non-eager dataset work correctly" do @c.dataset.with_fetch([{:id=>1, :parent_id=>nil}]).first.must_equal @c.load(:id=>1, :parent_id=>nil) end it "should get columns normally columns" do @c.dataset.columns!.must_equal [:id, :parent_id] end it "should not attempt to eager load when getting the columns" do @c.eager(:children).with_extend{def all; raise; end}.columns!.must_equal [:id, :parent_id] end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/eager_graph_eager_spec.rb���������������������������������������������0000664�0000000�0000000�00000016014�14342141206�0023201�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "eager_graph_eager plugin" do before do @c = Class.new(Sequel::Model(:items)) @c.columns :id, :parent_id @c.plugin :eager_graph_eager @c.one_to_many :children, :class=>@c, :key=>:parent_id @c.many_to_one :parent, :class=>@c @c.db.sqls end it "should support Dataset#eager_graph_eager for eager loading dependencies of eager_graph associations for one_to_many associations" do a = @c.eager_graph(:children). with_fetch([{:id=>1, :parent_id=>nil, :children_id=>3, :children_parent_id=>1}, {:id=>2, :parent_id=>nil}]). eager_graph_eager([:children], :children=>proc{|ds| ds.with_fetch(:id=>4, :parent_id=>3)}). all @c.db.sqls.must_equal ["SELECT items.id, items.parent_id, children.id AS children_id, children.parent_id AS children_parent_id FROM items LEFT OUTER JOIN items AS children ON (children.parent_id = items.id)", "SELECT * FROM items WHERE (items.parent_id IN (3))"] a.must_equal [@c.load(:id=>1, :parent_id=>nil), @c.load(:id=>2, :parent_id=>nil)] a.map(&:children).must_equal [[@c.load(:id=>3, :parent_id=>1)], []] a.first.children.first.children.must_equal [@c.load(:id=>4, :parent_id=>3)] @c.db.sqls.must_equal [] end it "should support Dataset#eager_graph_eager for eager loading dependencies of eager_graph associations for many_to_one associations" do a = @c.eager_graph(:parent). with_fetch([{:id=>4, :parent_id=>3, :parent_id_0=>3, :parent_parent_id=>1}, {:id=>2, :parent_id=>nil}]). eager_graph_eager([:parent], :parent=>proc{|ds| ds.with_fetch(:id=>1, :parent_id=>nil)}). all @c.db.sqls.must_equal ["SELECT items.id, items.parent_id, parent.id AS parent_id_0, parent.parent_id AS parent_parent_id FROM items LEFT OUTER JOIN items AS parent ON (parent.id = items.parent_id)", "SELECT * FROM items WHERE (items.id IN (1))"] a.must_equal [@c.load(:id=>4, :parent_id=>3), @c.load(:id=>2, :parent_id=>nil)] a.map(&:parent).must_equal [@c.load(:id=>3, :parent_id=>1), nil] a.first.parent.parent.must_equal @c.load(:id=>1, :parent_id=>nil) @c.db.sqls.must_equal [] end it "should support multiple entries in dependency chain" do a = @c.eager_graph(:children=>:children). with_fetch([{:id=>1, :parent_id=>nil, :children_id=>3, :children_parent_id=>1, :children_0_id=>4, :children_0_parent_id=>3}, {:id=>2, :parent_id=>nil}]). eager_graph_eager([:children, :children], :children=>proc{|ds| ds.with_fetch(:id=>5, :parent_id=>4)}). all @c.db.sqls.must_equal ["SELECT items.id, items.parent_id, children.id AS children_id, children.parent_id AS children_parent_id, children_0.id AS children_0_id, children_0.parent_id AS children_0_parent_id FROM items LEFT OUTER JOIN items AS children ON (children.parent_id = items.id) LEFT OUTER JOIN items AS children_0 ON (children_0.parent_id = children.id)", "SELECT * FROM items WHERE (items.parent_id IN (4))"] a.must_equal [@c.load(:id=>1, :parent_id=>nil), @c.load(:id=>2, :parent_id=>nil)] a.map(&:children).must_equal [[@c.load(:id=>3, :parent_id=>1)], []] a.first.children.first.children.must_equal [@c.load(:id=>4, :parent_id=>3)] a.first.children.first.children.first.children.must_equal [@c.load(:id=>5, :parent_id=>4)] @c.db.sqls.must_equal [] end it "should support multiple dependency chains" do a = @c.eager_graph(:children, :parent). with_fetch([{:id=>4, :parent_id=>3, :children_id=>5, :children_parent_id=>4, :parent_id_0=>3, :parent_parent_id=>1}, {:id=>2, :parent_id=>nil}]). eager_graph_eager([:children], :children=>proc{|ds| ds.with_fetch(:id=>6, :parent_id=>5)}). eager_graph_eager([:parent], :parent=>proc{|ds| ds.with_fetch(:id=>1, :parent_id=>nil)}). all @c.db.sqls.must_equal ["SELECT items.id, items.parent_id, children.id AS children_id, children.parent_id AS children_parent_id, parent.id AS parent_id_0, parent.parent_id AS parent_parent_id FROM items LEFT OUTER JOIN items AS children ON (children.parent_id = items.id) LEFT OUTER JOIN items AS parent ON (parent.id = items.parent_id)", "SELECT * FROM items WHERE (items.parent_id IN (5))", "SELECT * FROM items WHERE (items.id IN (1))"] a.must_equal [@c.load(:id=>4, :parent_id=>3), @c.load(:id=>2, :parent_id=>nil)] a.map(&:children).must_equal [[@c.load(:id=>5, :parent_id=>4)], []] a.map(&:parent).must_equal [@c.load(:id=>3, :parent_id=>1), nil] a.first.children.first.children.must_equal [@c.load(:id=>6, :parent_id=>5)] a.first.parent.parent.must_equal @c.load(:id=>1, :parent_id=>nil) @c.db.sqls.must_equal [] end it "should raise for invalid dependency chains" do proc{@c.dataset.eager_graph_eager([], :children)}.must_raise Sequel::Error proc{@c.dataset.eager_graph_eager(:children, :children)}.must_raise Sequel::Error proc{@c.dataset.eager_graph_eager(['foo'], :children)}.must_raise Sequel::Error proc{@c.dataset.eager_graph_eager([:foo], :children)}.must_raise Sequel::Error end it "should handle cases where not all associated objects are unique" do a = @c.eager_graph(:parent=>:children). with_fetch([ {:id=>4, :parent_id=>3, :parent_id_0=>3, :parent_parent_id=>1, :children_id=>4, :children_parent_id=>3}, {:id=>5, :parent_id=>3, :parent_id_0=>3, :parent_parent_id=>1, :children_id=>4, :children_parent_id=>3}, {:id=>4, :parent_id=>3, :parent_id_0=>3, :parent_parent_id=>1, :children_id=>5, :children_parent_id=>3}, {:id=>5, :parent_id=>3, :parent_id_0=>3, :parent_parent_id=>1, :children_id=>5, :children_parent_id=>3} ]). eager_graph_eager([:parent], :parent=>proc{|ds| ds.with_fetch(:id=>1, :parent_id=>nil)}). all @c.db.sqls.must_equal ["SELECT items.id, items.parent_id, parent.id AS parent_id_0, parent.parent_id AS parent_parent_id, children.id AS children_id, children.parent_id AS children_parent_id FROM items LEFT OUTER JOIN items AS parent ON (parent.id = items.parent_id) LEFT OUTER JOIN items AS children ON (children.parent_id = parent.id)", "SELECT * FROM items WHERE (items.id IN (1))"] a.must_equal [@c.load(:id=>4, :parent_id=>3), @c.load(:id=>5, :parent_id=>3)] a.map(&:parent).must_equal [@c.load(:id=>3, :parent_id=>1), @c.load(:id=>3, :parent_id=>1)] a.map(&:parent).map(&:children).must_equal [a, a] a.map(&:parent).map(&:parent).must_equal [@c.load(:id=>1, :parent_id=>nil), @c.load(:id=>1, :parent_id=>nil)] @c.db.sqls.must_equal [] end it "should not affect eager_graph usage without eager_graph_eager" do a = @c.eager_graph(:children). with_fetch([{:id=>1, :parent_id=>nil, :children_id=>3, :children_parent_id=>1}, {:id=>2, :parent_id=>nil}]). all @c.db.sqls.must_equal ["SELECT items.id, items.parent_id, children.id AS children_id, children.parent_id AS children_parent_id FROM items LEFT OUTER JOIN items AS children ON (children.parent_id = items.id)"] a.must_equal [@c.load(:id=>1, :parent_id=>nil), @c.load(:id=>2, :parent_id=>nil)] a.map(&:children).must_equal [[@c.load(:id=>3, :parent_id=>1)], []] @c.db.sqls.must_equal [] end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/empty_array_consider_nulls_spec.rb������������������������������������0000664�0000000�0000000�00000002467�14342141206�0025240�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "filter_having extension" do before do @dataset = Sequel.mock[:test].extension(:empty_array_consider_nulls) end it "should handle all types of IN/NOT IN queries with empty arrays" do @dataset.filter(:id => []).sql.must_equal "SELECT * FROM test WHERE (id != id)" @dataset.filter([:id1, :id2] => []).sql.must_equal "SELECT * FROM test WHERE ((id1 != id1) AND (id2 != id2))" @dataset.exclude(:id => []).sql.must_equal "SELECT * FROM test WHERE (id = id)" @dataset.exclude([:id1, :id2] => []).sql.must_equal "SELECT * FROM test WHERE ((id1 = id1) AND (id2 = id2))" end it "should handle IN/NOT IN queries with multiple columns and an empty dataset where the database doesn't support it" do db = Sequel.mock d1 = db[:test].select(:id1, :id2).filter(:region=>'Asia').columns(:id1, :id2) @dataset = @dataset.with_extend{def supports_multiple_column_in?; false end} @dataset.filter([:id1, :id2] => d1).sql.must_equal "SELECT * FROM test WHERE ((id1 != id1) AND (id2 != id2))" db.sqls.must_equal ["SELECT id1, id2 FROM test WHERE (region = 'Asia')"] @dataset.exclude([:id1, :id2] => d1).sql.must_equal "SELECT * FROM test WHERE ((id1 = id1) AND (id2 = id2))" db.sqls.must_equal ["SELECT id1, id2 FROM test WHERE (region = 'Asia')"] end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/empty_failure_backtraces_spec.rb��������������������������������������0000664�0000000�0000000�00000003027�14342141206�0024621�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "empty_failure_backtraces plugin" do before do @c = Class.new(Sequel::Model(:items)) do plugin :empty_failure_backtraces columns :x set_primary_key :x unrestrict_primary_key def before_create super cancel_action 'bc' if x == 2 end def before_destroy super cancel_action 'bd' if x == 2 end def validate super errors.add(:x, "3") if x == 3 end end DB.reset end it "should work normally if no exceptions are thrown/raised" do o = @c.create(:x=>1) o.must_be_kind_of @c o.valid?.must_equal true o.destroy.must_equal o end it "should work normally when not rescuing exceptions internally when calling save" do @c.new.set(:x => 2).save(:raise_on_failure=>false).must_be_nil @c.raise_on_save_failure = false @c.create(:x => 2).must_be_nil @c.load(:x => 2).destroy(:raise_on_failure=>false).must_be_nil end it "should work normally when not rescuing exceptions internally when calling valid?" do @c.send(:define_method, :before_validation){cancel_action "bv"} @c.new(:x => 2).valid?.must_equal false end it "should raise exceptions with empty backtraces" do begin @c.create(:x => 2) rescue Sequel::HookFailed => e e.backtrace.must_be_empty 1 end.must_equal 1 begin @c.create(:x => 3) rescue Sequel::ValidationFailed => e e.backtrace.must_be_empty 1 end.must_equal 1 end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/enum_spec.rb����������������������������������������������������������0000664�0000000�0000000�00000013340�14342141206�0020535�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel enum plugin" do before do @Album = Class.new(Sequel::Model(Sequel.mock[:albums])) @Album.columns :id, :status_id @Album.plugin :enum @Album.enum :status_id, :good=>3, :bad=>5 @album = @Album.load(:status_id=>3) end it "should add enum_value! and enum_value? methods for setting/checking the enum values" do @album.good?.must_equal true @album.bad?.must_equal false @album.bad!.must_be_nil @album.good?.must_equal false @album.bad?.must_equal true @album.good!.must_be_nil @album.good?.must_equal true @album.bad?.must_equal false end it "should have column method convert to enum value if possible" do @album.status_id.must_equal :good @album.bad! @album.status_id.must_equal :bad @album[:status_id] = 3 @album.status_id.must_equal :good end it "should have the column method pass non-enum values through" do @album[:status_id] = 4 @album.status_id.must_equal 4 end it "should have column= handle enum values" do @album.status_id = :bad @album[:status_id].must_equal 5 @album.good?.must_equal false @album.bad?.must_equal true @album.status_id = :good @album[:status_id].must_equal 3 @album.good?.must_equal true @album.bad?.must_equal false end it "should have column= handle non-enum values" do @album.status_id = 5 @album[:status_id].must_equal 5 @album.good?.must_equal false @album.bad?.must_equal true end it "should setup dataset methods for each value" do ds = @Album.where(:id=>1) ds.good.sql.must_equal "SELECT * FROM albums WHERE ((id = 1) AND (status_id = 3))" ds.not_good.sql.must_equal "SELECT * FROM albums WHERE ((id = 1) AND (status_id != 3))" ds.bad.sql.must_equal "SELECT * FROM albums WHERE ((id = 1) AND (status_id = 5))" ds.not_bad.sql.must_equal "SELECT * FROM albums WHERE ((id = 1) AND (status_id != 5))" end end describe "Sequel enum plugin" do before do @Album = Class.new(Sequel::Model(Sequel.mock[:albums])) @Album.columns :id, :status_id @Album.plugin :enum @album = @Album.load(:status_id=>3) end it "should allow overriding methods in class and calling super" do @Album.enum :status_id, {:good=>3, :bad=>5}, :override_accessors=>false bad = nil @Album.class_eval do define_method(:bad?) do bad.nil? ? super() : bad end end @album.bad?.must_equal false bad = true @album.bad?.must_equal true bad = false @album.bad?.must_equal false bad = nil @album.bad! @album.bad?.must_equal true end it "should not override accessor methods for each value if :override_accessors option is false" do @Album.enum :status_id, {:good=>3, :bad=>5}, :override_accessors=>false @album.status_id.must_equal 3 @album.status_id = :bad @album.status_id.must_equal :bad @album.bad! @album.status_id.must_equal 5 end it "should not setup dataset methods for each value if :dataset_methods option is false" do @Album.enum :status_id, {:good=>3, :bad=>5}, :dataset_methods=>false ds = @Album.where(:id=>1) ds.wont_respond_to(:good) ds.wont_respond_to(:not_good) ds.wont_respond_to(:bad) ds.wont_respond_to(:not_bad) end it "should handle :prefix=>true option" do @Album.enum :status_id, {:good=>3, :bad=>5}, :prefix=>true @album.status_id_good?.must_equal true @album.status_id_bad! @album.status_id_bad?.must_equal true ds = @Album.where(:id=>1) ds.status_id_good.sql.must_equal "SELECT * FROM albums WHERE ((id = 1) AND (status_id = 3))" ds.status_id_not_good.sql.must_equal "SELECT * FROM albums WHERE ((id = 1) AND (status_id != 3))" end it "should handle :prefix=>string option" do @Album.enum :status_id, {:good=>3, :bad=>5}, :prefix=>'status' @album.status_good?.must_equal true @album.status_bad! @album.status_bad?.must_equal true ds = @Album.where(:id=>1) ds.status_good.sql.must_equal "SELECT * FROM albums WHERE ((id = 1) AND (status_id = 3))" ds.status_not_good.sql.must_equal "SELECT * FROM albums WHERE ((id = 1) AND (status_id != 3))" end it "should handle :suffix=>true option" do @Album.enum :status_id, {:good=>3, :bad=>5}, :suffix=>true @album.good_status_id?.must_equal true @album.bad_status_id! @album.bad_status_id?.must_equal true ds = @Album.where(:id=>1) ds.good_status_id.sql.must_equal "SELECT * FROM albums WHERE ((id = 1) AND (status_id = 3))" ds.not_good_status_id.sql.must_equal "SELECT * FROM albums WHERE ((id = 1) AND (status_id != 3))" end it "should handle :suffix=>true option" do @Album.enum :status_id, {:good=>3, :bad=>5}, :suffix=>'status' @album.good_status?.must_equal true @album.bad_status! @album.bad_status?.must_equal true ds = @Album.where(:id=>1) ds.good_status.sql.must_equal "SELECT * FROM albums WHERE ((id = 1) AND (status_id = 3))" ds.not_good_status.sql.must_equal "SELECT * FROM albums WHERE ((id = 1) AND (status_id != 3))" end it "should support multiple emums per class" do @Album.enum :id, {:odd=>1, :even=>2} @Album.enum :status_id, {:good=>3, :bad=>5} @album = @Album.load(:id=>1, :status_id=>3) @album.odd?.must_equal true @album.even?.must_equal false @album.good?.must_equal true @album.bad?.must_equal false end it "raises Error for column that isn't a symbol" do proc{@Album.enum 'status_id', :good=>3, :bad=>5}.must_raise Sequel::Error end it "raises Error for non-hash values" do proc{@Album.enum :status_id, [:good, :bad]}.must_raise Sequel::Error end it "raises Error for values hash with non-symbol keys" do proc{@Album.enum :status_id, 'good'=>3, :bad=>5}.must_raise Sequel::Error end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/error_splitter_spec.rb������������������������������������������������0000664�0000000�0000000�00000000646�14342141206�0022655�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::ErrorSplitter" do before do @c = Class.new(Sequel::Model) @c.plugin :error_splitter @m = @c.new def @m.validate errors.add([:a, :b], 'is bad') end end it "should split errors for multiple columns and assign them to each column" do @m.valid?.must_equal false @m.errors.must_equal(:a=>['is bad'], :b=>['is bad']) end end ������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/error_sql_spec.rb�����������������������������������������������������0000664�0000000�0000000�00000002506�14342141206�0021603�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "error_sql extension" do before do @db = Sequel.mock(:fetch=>proc{|sql| @db.synchronize{|c| @db.log_connection_yield(sql, c){raise StandardError}}}).extension(:error_sql) end it "should have Sequel::DatabaseError#sql give the SQL causing the error" do @db["SELECT"].all rescue (e = $!) e.sql.must_equal "SELECT" end it "should include connection information in SQL information if logging connection info" do @db.log_connection_info = true @db["SELECT"].all rescue (e = $!) e.sql.must_match(/\A\(conn: -?\d+\) SELECT\z/) end it "should include arguments in SQL information if given" do @db["SELECT"].with_fetch(proc{|sql| @db.synchronize{|c| @db.log_connection_yield(sql, c, [1, 2]){raise StandardError}}}).all rescue (e = $!) e.sql.must_equal "SELECT; [1, 2]" end it "should have Sequel::DatabaseError#sql give the SQL causing the error when using a logger" do l = Object.new def l.method_missing(*) end @db.loggers = [l] @db["SELECT"].all rescue (e = $!) e.sql.must_equal "SELECT" end it "should have Sequel::DatabaseError#sql be nil if there is no wrapped exception" do @db["SELECT"].with_fetch(proc{|sql| @db.log_connection_yield(sql, nil){raise Sequel::DatabaseError}}).all rescue (e = $!) e.sql.must_be_nil end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/escaped_like_spec.rb��������������������������������������������������0000664�0000000�0000000�00000004717�14342141206�0022211�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "escaped_like extension" do before do Sequel.extension(:escaped_like) @ds = Sequel.mock[:t] @c = Sequel[:c] end it "escaped_like should support creating case sensitive pattern matches" do @ds.where(@c.escaped_like('?', 'a')).sql.must_equal "SELECT * FROM t WHERE (c LIKE 'a' ESCAPE '\\')" @ds.where(@c.escaped_like('?%', 'a')).sql.must_equal "SELECT * FROM t WHERE (c LIKE 'a%' ESCAPE '\\')" @ds.where(@c.escaped_like('?', 'a%')).sql.must_equal "SELECT * FROM t WHERE (c LIKE 'a\\%' ESCAPE '\\')" @ds.where(@c.escaped_like('?', ['a%'])).sql.must_equal "SELECT * FROM t WHERE (c LIKE 'a\\%' ESCAPE '\\')" @ds.where(@c.escaped_like('??', ['a', '%'])).sql.must_equal "SELECT * FROM t WHERE (c LIKE 'a\\%' ESCAPE '\\')" end it "escaped_ilike should support creating case insensitive pattern matches" do @ds.where(@c.escaped_ilike('?', 'a')).sql.must_equal "SELECT * FROM t WHERE (UPPER(c) LIKE UPPER('a') ESCAPE '\\')" @ds.where(@c.escaped_ilike('?%', 'a')).sql.must_equal "SELECT * FROM t WHERE (UPPER(c) LIKE UPPER('a%') ESCAPE '\\')" @ds.where(@c.escaped_ilike('?', 'a%')).sql.must_equal "SELECT * FROM t WHERE (UPPER(c) LIKE UPPER('a\\%') ESCAPE '\\')" @ds.where(@c.escaped_ilike('?', ['a%'])).sql.must_equal "SELECT * FROM t WHERE (UPPER(c) LIKE UPPER('a\\%') ESCAPE '\\')" @ds.where(@c.escaped_ilike('??', ['a', '%'])).sql.must_equal "SELECT * FROM t WHERE (UPPER(c) LIKE UPPER('a\\%') ESCAPE '\\')" end it "should raise an Error for a mismatched number of placeholders" do proc{@ds.where(@c.escaped_like('?', [])).sql}.must_raise Sequel::Error proc{@ds.where(@c.escaped_like('??', ['a'])).sql}.must_raise Sequel::Error proc{@ds.where(@c.escaped_ilike('', ['a'])).sql}.must_raise Sequel::Error proc{@ds.where(@c.escaped_ilike('?', ['a', 'a'])).sql}.must_raise Sequel::Error end it "escaped_like and escaped_ilike should return expressions" do @ds.select(@c.escaped_like('?', 'a').as(:b)).sql.must_equal "SELECT (c LIKE 'a' ESCAPE '\\') AS b FROM t" @ds.select(@c.escaped_like('?', 'a').cast(String)).sql.must_equal "SELECT CAST((c LIKE 'a' ESCAPE '\\') AS varchar(255)) FROM t" @ds.order(@c.escaped_like('?', 'a').desc).sql.must_equal "SELECT * FROM t ORDER BY (c LIKE 'a' ESCAPE '\\') DESC" @ds.where(@c.escaped_like('?', 'a') | @c.escaped_like('?', 'b')).sql.must_equal "SELECT * FROM t WHERE ((c LIKE 'a' ESCAPE '\\') OR (c LIKE 'b' ESCAPE '\\'))" end end �������������������������������������������������sequel-5.63.0/spec/extensions/eval_inspect_spec.rb��������������������������������������������������0000664�0000000�0000000�00000007455�14342141206�0022257�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" Sequel.extension :eval_inspect describe "eval_inspect extension" do before do @ds = Sequel.mock.dataset.with_extend do def supports_window_functions?; true end def literal_blob_append(sql, s) sql << "X'#{s}'" end end end it "should make eval(obj.inspect) == obj for all Sequel::SQL::Expression subclasses" do [ # Objects with components where eval(inspect) == self Sequel::SQL::AliasedExpression.new(:b, :a), Sequel::SQL::AliasedExpression.new(:b, :a, [:c, :d]), Sequel::SQL::CaseExpression.new({:b=>:a}, :c), Sequel::SQL::CaseExpression.new({:b=>:a}, :c, :d), Sequel::SQL::Cast.new(:a, :b), Sequel::SQL::ColumnAll.new(:a), Sequel::SQL::ComplexExpression.new(:'=', :b, :a), Sequel::SQL::Constant.new(:a), Sequel::CURRENT_DATE, Sequel::CURRENT_TIMESTAMP, Sequel::CURRENT_TIME, Sequel::SQLTRUE, Sequel::SQLFALSE, Sequel::NULL, Sequel::NOTNULL, Sequel::SQL::Function.new(:a, :b, :c), Sequel::SQL::Identifier.new(:a), Sequel::SQL::JoinClause.new(:inner, :b), Sequel::SQL::JoinOnClause.new({:d=>:a}, :inner, :b), Sequel::SQL::JoinUsingClause.new([:a], :inner, :b), Sequel::SQL::JoinClause.new(:inner, Sequel.as(:b, :c, [:d, :e])), Sequel::SQL::JoinOnClause.new({:d=>:a}, :inner, Sequel.as(:b, :c, [:d, :e])), Sequel::SQL::JoinUsingClause.new([:a], :inner, Sequel.as(:b, :c, [:d, :e])), Sequel::SQL::PlaceholderLiteralString.new('? = ?', [:a, :b]), Sequel::SQL::PlaceholderLiteralString.new(':a = :b', [{:a=>:b, :b=>42}]), Sequel::SQL::OrderedExpression.new(:a), Sequel::SQL::OrderedExpression.new(:a, false), Sequel::SQL::OrderedExpression.new(:a, false, :nulls=>:first), Sequel::SQL::OrderedExpression.new(:a, false, :nulls=>:last), Sequel::SQL::QualifiedIdentifier.new(:b, :a), Sequel::SQL::Subscript.new(:a, [1, 2]), Sequel::SQL::Window.new(:order=>:a, :partition=>:b), Sequel::SQL::Function.new(:a, :b, :c).over(:order=>:a, :partition=>:b), Sequel::SQL::Wrapper.new(:a), # Objects with components where eval(inspect) != self Sequel::SQL::AliasedExpression.new(Sequel::SQL::Blob.new('s'), :a), Sequel::SQL::AliasedExpression.new(Sequel::LiteralString.new('s'), :a), Sequel::SQL::PlaceholderLiteralString.new('(a, b) IN ?', [Sequel::SQL::ValueList.new([[1, 2]])]), Sequel::SQL::CaseExpression.new({{:d=>Sequel::LiteralString.new('e')}=>:a}, :c, :d), Sequel::SQL::AliasedExpression.new(Date.new(2011, 10, 11), :a), Sequel::SQL::AliasedExpression.new(Sequel::SQLTime.create(10, 20, 30, 500000.125), :a), Sequel::SQL::AliasedExpression.new(DateTime.new(2011, 9, 11, 10, 20, 30), :a), Sequel::SQL::AliasedExpression.new(DateTime.new(2011, 9, 11, 10, 20, 30, 0.25), :a), Sequel::SQL::AliasedExpression.new(DateTime.new(2011, 9, 11, 10, 20, 30, -0.25), :a), Sequel::SQL::AliasedExpression.new(Time.local(2011, 9, 11, 10, 20, 30), :a), Sequel::SQL::AliasedExpression.new(Time.local(2011, 9, 11, 10, 20, 30, 500000.125), :a), Sequel::SQL::AliasedExpression.new(Time.utc(2011, 9, 11, 10, 20, 30), :a), Sequel::SQL::AliasedExpression.new(Time.utc(2011, 9, 11, 10, 20, 30, 500000.125), :a), Sequel::SQL::AliasedExpression.new(BigDecimal('1.000000000000000000000000000000000000000000000001'), :a), Sequel::SQL::AliasedExpression.new(Sequel::CURRENT_DATE, :a), Sequel::SQL::AliasedExpression.new(Sequel::CURRENT_TIMESTAMP, :a), ].each do |o| v = eval(o.inspect) v.must_equal o @ds.literal(v).must_equal @ds.literal(o) ds = @ds @ds.db.create_table(:test) do v = eval(o.inspect) v.must_equal o ds.literal(v).must_equal ds.literal(o) end end end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/exclude_or_null_spec.rb�����������������������������������������������0000664�0000000�0000000�00000001163�14342141206�0022754�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "exclude_or_null extension" do before do @ds = Sequel.mock[:t].extension(:exclude_or_null) end it "#exclude_or_null should add WHERE condition where a is false or NULL" do @ds.exclude_or_null(:a).sql.must_equal "SELECT * FROM t WHERE NOT coalesce(a, 'f')" end it "#exclude_or_null_having should add HAVING condition where a is false or NULL" do @ds.exclude_or_null_having(:a).sql.must_equal "SELECT * FROM t HAVING NOT coalesce(a, 'f')" end it "should not effect normal exclude" do @ds.exclude(:a).sql.must_equal "SELECT * FROM t WHERE NOT a" end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/finder_spec.rb��������������������������������������������������������0000664�0000000�0000000�00000023737�14342141206�0021053�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Model, ".finder" do before do @h = {:id=>1} @db = Sequel.mock(:fetch=>@h) @c = Class.new(Sequel::Model(@db[:items])) @c.instance_eval do def foo(a, b) where(:bar=>a).order(b) end def foo_bar(a, b=1) where(:bar=>a).order(b) end end @c.plugin :finder @o = @c.load(@h) @db.sqls end it "should create a method that calls the method given and returns the first instance" do @c.finder :foo @c.first_foo(1, 2).must_equal @o @c.first_foo(3, 4).must_equal @o @db.sqls.must_equal ["SELECT * FROM items WHERE (bar = 1) ORDER BY 2 LIMIT 1", "SELECT * FROM items WHERE (bar = 3) ORDER BY 4 LIMIT 1"] end it "should create a method that calls the method given and returns the first instance when method has negative arity and only required arguments are given" do @c.finder :foo_bar @c.first_foo_bar(2).must_equal @o @db.sqls.must_equal ["SELECT * FROM items WHERE (bar = 2) ORDER BY 1 LIMIT 1"] end it "should not allow calling a finder and providing values for the optional arguments" do @c.finder :foo_bar proc{@c.first_foo_bar(3, 4)}.must_raise Sequel::Error @db.sqls.must_equal [] end it "should work correctly when subclassing" do @c.finder(:foo) @sc = Class.new(@c) @sc.set_dataset :foos @db.sqls @sc.first_foo(1, 2).must_equal @sc.load(@h) @sc.first_foo(3, 4).must_equal @sc.load(@h) @db.sqls.must_equal ["SELECT * FROM foos WHERE (bar = 1) ORDER BY 2 LIMIT 1", "SELECT * FROM foos WHERE (bar = 3) ORDER BY 4 LIMIT 1"] end it "should work correctly when dataset is modified" do @c.finder(:foo) @c.first_foo(1, 2).must_equal @o @c.set_dataset :foos @c.first_foo(3, 4).must_equal @o @db.sqls.must_equal ["SELECT * FROM items WHERE (bar = 1) ORDER BY 2 LIMIT 1", "SELECT * FROM foos LIMIT 0", "SELECT * FROM foos WHERE (bar = 3) ORDER BY 4 LIMIT 1"] end it "should create a method based on the given block if no method symbol provided" do @c.finder(:name=>:first_foo){|pl, ds| ds.where(pl.arg).limit(1)} @c.first_foo(:id=>1).must_equal @o @db.sqls.must_equal ["SELECT * FROM items WHERE (id = 1) LIMIT 1"] end it "should raise an error if both a block and method symbol given" do proc{@c.finder(:foo, :name=>:first_foo){|pl, ds| ds.where(pl.arg)}}.must_raise(Sequel::Error) end it "should raise an error if two option hashes are provided" do proc{@c.finder({:name2=>:foo}, :name=>:first_foo){|pl, ds| ds.where(pl.arg)}}.must_raise(Sequel::Error) end it "should raise an error if block is provide without a name" do proc{@c.finder{|pl, ds| ds.where(pl.arg)}}.must_raise(Sequel::Error) end it "should raise an error if invalid :type option is used" do proc{@c.finder(:name=>:foo_foo, :type=>:foo){|pl, ds| ds.where(pl.arg)}}.must_raise(Sequel::Error) end it "should support :type option" do @c.finder :foo, :type=>:all @c.finder :foo, :type=>:each @c.finder :foo, :type=>:get a = [] @c.all_foo(1, 2){|r| a << r}.must_equal [@o] a.must_equal [@o] a = [] @c.each_foo(3, 4){|r| a << r} a.must_equal [@o] @c.get_foo(5, 6).must_equal 1 @db.sqls.must_equal ["SELECT * FROM items WHERE (bar = 1) ORDER BY 2", "SELECT * FROM items WHERE (bar = 3) ORDER BY 4", "SELECT * FROM items WHERE (bar = 5) ORDER BY 6 LIMIT 1"] end it "should support :name option" do @c.finder :foo, :name=>:find_foo @c.find_foo(1, 2).must_equal @o @c.find_foo(3, 4).must_equal @o @db.sqls.must_equal ["SELECT * FROM items WHERE (bar = 1) ORDER BY 2 LIMIT 1", "SELECT * FROM items WHERE (bar = 3) ORDER BY 4 LIMIT 1"] end it "should support :arity option" do def @c.foobar(*b) ds = dataset b.each_with_index do |a, i| ds = ds.where(i=>a) end ds end @c.finder :foobar, :arity=>1, :name=>:find_foobar_1 @c.finder :foobar, :arity=>2, :name=>:find_foobar_2 @c.find_foobar_1(:a) @c.find_foobar_2(:a, :b) @db.sqls.must_equal ["SELECT * FROM items WHERE (0 = a) LIMIT 1", "SELECT * FROM items WHERE ((0 = a) AND (1 = b)) LIMIT 1"] end it "should support :mod option" do m = Module.new @c.finder :foo, :mod=>m proc{@c.first_foo}.must_raise NoMethodError @c.extend m @c.first_foo(1, 2).must_equal @o @c.first_foo(3, 4).must_equal @o @db.sqls.must_equal ["SELECT * FROM items WHERE (bar = 1) ORDER BY 2 LIMIT 1", "SELECT * FROM items WHERE (bar = 3) ORDER BY 4 LIMIT 1"] end it "should raise error when calling with the wrong arity" do @c.finder :foo proc{@c.first_foo(1)}.must_raise Sequel::Error proc{@c.first_foo(1,2,3)}.must_raise Sequel::Error end end describe Sequel::Model, ".prepared_finder" do before do @h = {:id=>1} @db = Sequel.mock(:fetch=>@h) @db.extend_datasets do def select_sql sql = super sql << ' -- prepared' if is_a?(Sequel::Dataset::PreparedStatementMethods) && !opts[:sql] sql end end @c = Class.new(Sequel::Model(@db[:items])) @c.instance_eval do def foo(a, b) where(:bar=>a).order(b) end end @c.plugin :finder @o = @c.load(@h) @db.sqls end it "should create a method that calls the method given and returns the first instance" do @c.prepared_finder :foo @c.first_foo(1, 2).must_equal @o @c.first_foo(3, 4).must_equal @o @db.sqls.must_equal ["SELECT * FROM items WHERE (bar = 1) ORDER BY 2 LIMIT 1 -- prepared", "SELECT * FROM items WHERE (bar = 3) ORDER BY 4 LIMIT 1 -- prepared"] end it "should work correctly when subclassing" do @c.prepared_finder(:foo) @sc = Class.new(@c) @sc.set_dataset :foos @db.sqls @sc.first_foo(1, 2).must_equal @sc.load(@h) @sc.first_foo(3, 4).must_equal @sc.load(@h) @db.sqls.must_equal ["SELECT * FROM foos WHERE (bar = 1) ORDER BY 2 LIMIT 1 -- prepared", "SELECT * FROM foos WHERE (bar = 3) ORDER BY 4 LIMIT 1 -- prepared"] end it "should work correctly when dataset is modified" do @c.prepared_finder(:foo) @c.first_foo(1, 2).must_equal @o @c.set_dataset :foos @c.first_foo(3, 4).must_equal @o @db.sqls.must_equal ["SELECT * FROM items WHERE (bar = 1) ORDER BY 2 LIMIT 1 -- prepared", "SELECT * FROM foos LIMIT 0", "SELECT * FROM foos WHERE (bar = 3) ORDER BY 4 LIMIT 1 -- prepared"] end it "should create a method based on the given block if no method symbol provided" do @c.prepared_finder(:name=>:first_foo){|a1| where(:id=>a1).limit(1)} @c.first_foo(1).must_equal @o @db.sqls.must_equal ["SELECT * FROM items WHERE (id = 1) LIMIT 1 -- prepared"] end it "should raise an error if both a block and method symbol given" do proc{@c.prepared_finder(:foo, :name=>:first_foo){|pl, ds| ds.where(pl.arg)}}.must_raise(Sequel::Error) end it "should raise an error if two option hashes are provided" do proc{@c.prepared_finder({:name2=>:foo}, :name=>:first_foo){|pl, ds| ds.where(pl.arg)}}.must_raise(Sequel::Error) end it "should support :type option" do @c.prepared_finder :foo, :type=>:all @c.prepared_finder :foo, :type=>:each a = [] @c.all_foo(1, 2){|r| a << r}.must_equal [@o] a.must_equal [@o] a = [] @c.each_foo(3, 4){|r| a << r} a.must_equal [@o] @db.sqls.must_equal ["SELECT * FROM items WHERE (bar = 1) ORDER BY 2 -- prepared", "SELECT * FROM items WHERE (bar = 3) ORDER BY 4 -- prepared"] end it "should support :name option" do @c.prepared_finder :foo, :name=>:find_foo @c.find_foo(1, 2).must_equal @o @c.find_foo(3, 4).must_equal @o @db.sqls.must_equal ["SELECT * FROM items WHERE (bar = 1) ORDER BY 2 LIMIT 1 -- prepared", "SELECT * FROM items WHERE (bar = 3) ORDER BY 4 LIMIT 1 -- prepared"] end it "should support :arity option" do def @c.foobar(*b) ds = dataset b.each_with_index do |a, i| ds = ds.where(i=>a) end ds end @c.prepared_finder :foobar, :arity=>1, :name=>:find_foobar_1 @c.prepared_finder :foobar, :arity=>2, :name=>:find_foobar_2 @c.find_foobar_1(:a) @c.find_foobar_2(:a, :b) @db.sqls.must_equal ["SELECT * FROM items WHERE (0 = a) LIMIT 1 -- prepared", "SELECT * FROM items WHERE ((0 = a) AND (1 = b)) LIMIT 1 -- prepared"] end it "should support :mod option" do m = Module.new @c.prepared_finder :foo, :mod=>m proc{@c.first_foo}.must_raise NoMethodError @c.extend m @c.first_foo(1, 2).must_equal @o @c.first_foo(3, 4).must_equal @o @db.sqls.must_equal ["SELECT * FROM items WHERE (bar = 1) ORDER BY 2 LIMIT 1 -- prepared", "SELECT * FROM items WHERE (bar = 3) ORDER BY 4 LIMIT 1 -- prepared"] end it "should handle models with names" do def @c.name; 'foobar' end @c.prepared_finder :foo @c.first_foo(1, 2).must_equal @o @db.sqls.must_equal ["SELECT * FROM items WHERE (bar = 1) ORDER BY 2 LIMIT 1 -- prepared"] end end describe "Sequel::Model.freeze" do it "should freeze the model class and not allow any changes" do model = Class.new(Sequel::Model(:items)) model.plugin :finder model.finder(:name=>:f_by_name){|pl, ds| ds.where(:name=>pl.arg).limit(1)} model.freeze model.f_by_name('a').must_equal model.call(:id=>1, :x=>1) proc{model.finder(:name=>:first_by_name){|pl, ds| ds.where(:name=>pl.arg).limit(1)}}.must_raise RuntimeError, TypeError end it "should freeze a model class without a dataset without breaking" do model = Class.new(Sequel::Model) model.plugin :finder model.freeze proc{model.finder(:name=>:first_by_name){|pl, ds| ds.where(:name=>pl.arg).limit(1)}}.must_raise RuntimeError, TypeError end it "should allow subclasses of frozen model classes to work correctly" do model = Class.new(Sequel::Model(:items)) model.plugin :finder model.freeze model = Class.new(model) model.dataset = :items2 model.dataset_module{} model.plugin Module.new model.finder(:name=>:first_by_name){|pl, ds| ds.where(:name=>pl.arg).limit(1)} model.first_by_name('a').values.must_equal(:id=>1, :x=>1) end end ���������������������������������sequel-5.63.0/spec/extensions/forbid_lazy_load_spec.rb����������������������������������������������0000664�0000000�0000000�00000032363�14342141206�0023102�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "forbid_lazy_load plugin" do before do @c = Class.new(Sequel::Model) @c.set_dataset Sequel::Model.db[:ts].with_fetch({:id=>2, :t_id=>3}) @c.plugin :forbid_lazy_load @c.columns :id, :t_id @c.many_to_one :t, :class=>@c, :key=>:t_id @c.one_to_many :ts, :class=>@c, :key=>:t_id @c.many_to_many :mtm_ts, :class=>@c, :join_table=>:ts, :left_key=>:id, :right_key=>:t_id @c.one_to_one :otoo_t, :class=>@c, :key=>:t_id @c.one_through_one :oto_t, :class=>@c, :join_table=>:ts, :left_key=>:id, :right_key=>:t_id @o1 = @c.load(:id=>1, :t_id=>2) @o2 = @c.load(:id=>2, :t_id=>3) end it "should not forbid lazy load if not set at instance level" do @o1.t.must_equal @o2 @o1.ts.must_equal [@o2] @o1.mtm_ts.must_equal [@o2] @o1.otoo_t.must_equal @o2 @o1.oto_t.must_equal @o2 end it "should forbid lazy load when using :forbid_lazy_load true association method option" do proc{@o1.t(:forbid_lazy_load=>true)}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{@o1.ts(:forbid_lazy_load=>true)}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{@o1.mtm_ts(:forbid_lazy_load=>true)}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{@o1.otoo_t(:forbid_lazy_load=>true)}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{@o1.oto_t(:forbid_lazy_load=>true)}.must_raise Sequel::Plugins::ForbidLazyLoad::Error end it "should forbid lazy load if set at instance level" do @o1.forbid_lazy_load.must_be_same_as @o1 proc{@o1.t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{@o1.ts}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{@o1.mtm_ts}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{@o1.otoo_t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{@o1.oto_t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error end it "should allow lazy load for instance if set at instance level" do o = @c.all.first o.allow_lazy_load.must_be_same_as o o.t.must_equal @o2 o.ts.must_equal [@o2] o.mtm_ts.must_equal [@o2] o.otoo_t.must_equal @o2 o.oto_t.must_equal @o2 end it "should forbid lazy load if retrieved by dataset via Dataset#all" do o = @c.all.first proc{o.t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.otoo_t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.ts}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.mtm_ts}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.otoo_t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.oto_t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error end it "should forbid lazy load if retrieved by dataset via Dataset#each" do o = @c.each{|x| break x} proc{o.t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.ts}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.mtm_ts}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.otoo_t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.oto_t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error end it "should forbid lazy load if retrieved by dataset via Dataset#where_each" do 5.times do o = @c.where_each(:id=>1){|x| break x} proc{o.t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.ts}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.mtm_ts}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.otoo_t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.oto_t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error end end it "should forbid lazy load if retrieved by dataset via Dataset#first with integer argument" do 5.times do o = @c.first(2)[0] proc{o.t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.ts}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.mtm_ts}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.otoo_t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.oto_t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error o = @c.first(2){id > 0}[0] proc{o.t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.ts}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.mtm_ts}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.otoo_t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.oto_t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error end end it "should not forbid lazy load if retrieved by dataset via Dataset#first with no arguments" do 5.times do o = @c.first o.t.must_equal @o2 o.ts.must_equal [@o2] o.mtm_ts.must_equal [@o2] o.otoo_t.must_equal @o2 o.oto_t.must_equal @o2 end end it "should not forbid lazy load if retrieved by dataset via Dataset#first with hash argument" do 5.times do o = @c.first(id: 2) o.t.must_equal @o2 o.ts.must_equal [@o2] o.mtm_ts.must_equal [@o2] o.otoo_t.must_equal @o2 o.oto_t.must_equal @o2 end end it "should not forbid lazy load if retrieved by dataset via Dataset#first with block" do 5.times do o = @c.first{id > 1} o.t.must_equal @o2 o.ts.must_equal [@o2] o.mtm_ts.must_equal [@o2] o.otoo_t.must_equal @o2 o.oto_t.must_equal @o2 end end it "should not forbid lazy load if retrieved by dataset via Dataset#with_pk" do 5.times do o = @c.dataset.with_pk(1) o.t.must_equal @o2 o.ts.must_equal [@o2] o.mtm_ts.must_equal [@o2] o.otoo_t.must_equal @o2 o.oto_t.must_equal @o2 end end it "should not forbid lazy load for associated objects returned by singular associations" do [@o1.t, @o1.otoo_t, @o1.oto_t].each do |o| o.associations.clear o.t.must_equal @o2 o.ts.must_equal [@o2] o.mtm_ts.must_equal [@o2] o.otoo_t.must_equal @o2 o.oto_t.must_equal @o2 end end it "should forbid lazy load for associated objects returned by plural associations" do [@o1.ts, @o1.mtm_ts].each do |os| o = os.first o.associations.clear proc{o.t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.ts}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.mtm_ts}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.otoo_t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{o.oto_t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error end end it "should allow association access if cached even if forbidding lazy loading" do @o1.t.must_equal @o2 @o1.ts.must_equal [@o2] @o1.mtm_ts.must_equal [@o2] @o1.otoo_t.must_equal @o2 @o1.oto_t.must_equal @o2 @o1.forbid_lazy_load @o1.t.must_equal @o2 @o1.ts.must_equal [@o2] @o1.mtm_ts.must_equal [@o2] @o1.otoo_t.must_equal @o2 @o1.oto_t.must_equal @o2 end it "should forbid lazy load for associations if forbid_lazy_load true association option is used" do @c.many_to_one :t, :class=>@c, :key=>:t_id, :forbid_lazy_load=>true @c.one_to_many :ts, :class=>@c, :key=>:t_id, :forbid_lazy_load=>true @c.many_to_many :mtm_ts, :class=>@c, :join_table=>:ts, :left_key=>:id, :right_key=>:t_id, :forbid_lazy_load=>true @c.one_to_one :otoo_t, :class=>@c, :key=>:t_id, :forbid_lazy_load=>true @c.one_through_one :oto_t, :class=>@c, :join_table=>:ts, :left_key=>:id, :right_key=>:t_id, :forbid_lazy_load=>true proc{@o1.t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{@o1.ts}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{@o1.mtm_ts}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{@o1.otoo_t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{@o1.oto_t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error end it "should allow lazy load for associations even if instances have it forbidden if forbid_lazy_load false association option is used" do @c.many_to_one :t, :class=>@c, :key=>:t_id, :forbid_lazy_load=>false @c.one_to_many :ts, :class=>@c, :key=>:t_id, :forbid_lazy_load=>false @c.many_to_many :mtm_ts, :class=>@c, :join_table=>:ts, :left_key=>:id, :right_key=>:t_id, :forbid_lazy_load=>false @c.one_to_one :otoo_t, :class=>@c, :key=>:t_id, :forbid_lazy_load=>false @c.one_through_one :oto_t, :class=>@c, :join_table=>:ts, :left_key=>:id, :right_key=>:t_id, :forbid_lazy_load=>false o = @c.all.first o.t.must_equal @o2 o.ts.must_equal [@o2] o.mtm_ts.must_equal [@o2] o.otoo_t.must_equal @o2 o.oto_t.must_equal @o2 end it "should forbid lazy load after finalizing associations if not using static_cache in associated class" do @c.finalize_associations @o1.forbid_lazy_load proc{@o1.t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{@o1.ts}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{@o1.mtm_ts}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{@o1.otoo_t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{@o1.oto_t}.must_raise Sequel::Plugins::ForbidLazyLoad::Error end it "should set forbid_lazy_load false association option if using static_cache in associated class and using allow_lazy_load_for_static_cache_associations" do @c.plugin :static_cache @c.allow_lazy_load_for_static_cache_associations @o1.forbid_lazy_load @o1.t.must_equal @o2 @o1.ts.must_equal [@o2] @o1.mtm_ts.must_equal [@o2] @o1.otoo_t.must_equal @o2 @o1.oto_t.must_equal @o2 end it "should automatically set forbid_lazy_load false association option when finalizing associations if using static_cache in associated class" do @c.plugin :static_cache @c.finalize_associations @o1.forbid_lazy_load @o1.t.must_equal @o2 @o1.ts.must_equal [@o2] @o1.mtm_ts.must_equal [@o2] @o1.otoo_t.must_equal @o2 @o1.oto_t.must_equal @o2 end it "should allow lazy load when forbidden when using :reload association method option" do @o1.forbid_lazy_load @o1.t(:reload=>true).must_equal @o2 @o1.ts(:reload=>true).must_equal [@o2] @o1.mtm_ts(:reload=>true).must_equal [@o2] @o1.otoo_t(:reload=>true).must_equal @o2 @o1.oto_t(:reload=>true).must_equal @o2 end it "should work correctly if loading an associated object for a class that does not use the forbid_lazy_load plugin" do c = Class.new(Sequel::Model) c.set_dataset Sequel::Model.db[:ts].with_fetch({:id=>2, :t_id=>3}) @c.one_to_one :otoo_t, :class=>c, :key=>:t_id @o1.otoo_t.must_equal c.load(@o2.values) end it "should not allow lazy load for associations to static cache models not using forbid_lazy_load plugin" do c = Class.new(Sequel::Model) c.set_dataset Sequel::Model.db[:ts].with_fetch({:id=>2, :t_id=>3}) @c.one_to_many :ts, :class=>c, :key=>:t_id @c.plugin :static_cache @c.finalize_associations @o1.forbid_lazy_load @o1.t.must_equal @o2 proc{@o1.ts}.must_raise Sequel::Plugins::ForbidLazyLoad::Error @o1.mtm_ts.must_equal [@o2] @o1.otoo_t.must_equal @o2 @o1.oto_t.must_equal @o2 end it "should allow lazy load when forbidden when using :forbid_lazy_load false association option" do @o1.forbid_lazy_load @o1.t(:forbid_lazy_load=>false).must_equal @o2 @o1.ts(:forbid_lazy_load=>false).must_equal [@o2] @o1.mtm_ts(:forbid_lazy_load=>false).must_equal [@o2] @o1.otoo_t(:forbid_lazy_load=>false).must_equal @o2 @o1.oto_t(:forbid_lazy_load=>false).must_equal @o2 end it "should forbid lazy load when using :forbid_lazy_load true association method option even when using :reload association method option" do proc{@o1.t(:forbid_lazy_load=>true, :reload=>true)}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{@o1.ts(:forbid_lazy_load=>true, :reload=>true)}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{@o1.mtm_ts(:forbid_lazy_load=>true, :reload=>true)}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{@o1.otoo_t(:forbid_lazy_load=>true, :reload=>true)}.must_raise Sequel::Plugins::ForbidLazyLoad::Error proc{@o1.oto_t(:forbid_lazy_load=>true, :reload=>true)}.must_raise Sequel::Plugins::ForbidLazyLoad::Error end it "should not effect naked datasets" do @c.naked.all.must_equal [{:id=>2, :t_id=>3}] @c.naked.where_each(:id=>1){|x| break x}.must_equal(:id=>2, :t_id=>3) end it "should handle datasets without row_procs" do ds = @c.naked ds.all.first.must_equal(:id=>2, :t_id=>3) ds.each{|x| break x}.must_equal(:id=>2, :t_id=>3) ds.where_each(:id=>1){|x| break x}.must_equal(:id=>2, :t_id=>3) ds.first(2)[0].must_equal(:id=>2, :t_id=>3) ds.first(2){id > 0}[0].must_equal(:id=>2, :t_id=>3) ds.first.must_equal(:id=>2, :t_id=>3) ds.first{id > 1}.must_equal(:id=>2, :t_id=>3) ds.first(:id=>2).must_equal(:id=>2, :t_id=>3) ds.with_pk(1).must_equal(:id=>2, :t_id=>3) end it "should handle datasets with row_procs different from the model" do ds = @c.dataset.with_row_proc(proc{|x| x}) ds.all.first.must_equal(:id=>2, :t_id=>3) ds.each{|x| break x}.must_equal(:id=>2, :t_id=>3) ds.where_each(:id=>1){|x| break x}.must_equal(:id=>2, :t_id=>3) ds.first(2)[0].must_equal(:id=>2, :t_id=>3) ds.first(2){id > 0}[0].must_equal(:id=>2, :t_id=>3) ds.first.must_equal(:id=>2, :t_id=>3) ds.first{id > 1}.must_equal(:id=>2, :t_id=>3) ds.first(:id=>2).must_equal(:id=>2, :t_id=>3) ds.with_pk(1).must_equal(:id=>2, :t_id=>3) end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/force_encoding_spec.rb������������������������������������������������0000664�0000000�0000000�00000006550�14342141206�0022542�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "force_encoding plugin" do before do @c = Class.new(Sequel::Model) @c.columns :id, :x @c.plugin :force_encoding, 'UTF-8' @e1 = Encoding.find('UTF-8') end it "should force encoding to given encoding on load" do s = 'blah'.dup s.force_encoding('US-ASCII') o = @c.load(:id=>1, :x=>s) o.x.must_equal 'blah' o.x.encoding.must_equal @e1 end it "should force encoding to given encoding when setting column values" do s = 'blah'.dup s.force_encoding('US-ASCII') o = @c.new(:x=>s) o.x.must_equal 'blah' o.x.encoding.must_equal @e1 end it "should not force encoding of blobs to given encoding on load" do s = Sequel.blob('blah'.dup.force_encoding('BINARY')) o = @c.load(:id=>1, :x=>s) o.x.must_equal 'blah' o.x.encoding.must_equal Encoding.find('BINARY') end it "should not force encoding of blobs to given encoding when setting column values" do s = Sequel.blob('blah'.dup.force_encoding('BINARY')) o = @c.new(:x=>s) o.x.must_equal 'blah' o.x.encoding.must_equal Encoding.find('BINARY') end it "should work correctly when given a frozen string" do s = 'blah'.dup s.force_encoding('US-ASCII') s.freeze o = @c.new(:x=>s) o.x.must_equal 'blah' o.x.encoding.must_equal @e1 end it "should have a forced_encoding class accessor" do s = 'blah'.dup s.force_encoding('US-ASCII') @c.forced_encoding = 'Windows-1258' o = @c.load(:id=>1, :x=>s) o.x.must_equal 'blah' o.x.encoding.must_equal Encoding.find('Windows-1258') end it "should not force encoding if forced_encoding is nil" do s = 'blah'.dup s.force_encoding('US-ASCII') @c.forced_encoding = nil o = @c.load(:id=>1, :x=>s) o.x.must_equal 'blah' o.x.encoding.must_equal Encoding.find('US-ASCII') end it "should work correctly when subclassing" do c = Class.new(@c) s = 'blah'.dup s.force_encoding('US-ASCII') o = c.load(:id=>1, :x=>s) o.x.must_equal 'blah' o.x.encoding.must_equal @e1 c.plugin :force_encoding, 'UTF-16LE' s = String.new s.force_encoding('US-ASCII') o = c.load(:id=>1, :x=>s) o.x.must_equal '' o.x.encoding.must_equal Encoding.find('UTF-16LE') @c.plugin :force_encoding, 'UTF-32LE' s = String.new s.force_encoding('US-ASCII') o = @c.load(:id=>1, :x=>s) o.x.must_equal '' o.x.encoding.must_equal Encoding.find('UTF-32LE') s = String.new s.force_encoding('US-ASCII') o = c.load(:id=>1, :x=>s) o.x.must_equal '' o.x.encoding.must_equal Encoding.find('UTF-16LE') end it "should work when saving new model instances" do o = @c.new @c.dataset = DB[:a].with_extend do def first s = 'blah'.dup s.force_encoding('US-ASCII') {:id=>1, :x=>s} end end @c.instance_variable_set(:@fast_pk_lookup_sql, nil) o.save o.x.must_equal 'blah' o.x.encoding.must_equal @e1 end it "should work when refreshing model instances" do o = @c.load(:id=>1, :x=>'as'.dup) @c.dataset = DB[:a].with_extend do def first s = 'blah'.dup s.force_encoding('US-ASCII') {:id=>1, :x=>s} end end @c.instance_variable_set(:@fast_pk_lookup_sql, nil) o.refresh o.x.must_equal 'blah' o.x.encoding.must_equal @e1 end end ��������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/freeze_datasets_spec.rb�����������������������������������������������0000664�0000000�0000000�00000001633�14342141206�0022743�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "freeze_datasets extension" do before do @db = Sequel.mock.extension(:freeze_datasets) end it "should freeze datasets by default" do @db.dataset.frozen?.must_equal true @db.fetch('SQL').frozen?.must_equal true @db.from(:table).frozen?.must_equal true @db[:table].frozen?.must_equal true end it "should have dataset#dup return frozen dataset" do @db.dataset.dup.frozen?.must_equal true end it "should cache Database#from calls with single symbol tables" do @db.from(:foo).must_be_same_as @db.from(:foo) @db.from(Sequel[:foo]).wont_be_same_as @db.from(Sequel[:foo]) end it "should clear Database#from cache when modifying the schema" do ds = @db.from(:foo) ds.columns(:foo, :bar) @db[:foo].columns.must_equal [:foo, :bar] @db.create_table!(:foo){Integer :x} @db[:foo].columns.wont_equal [:foo, :bar] end end �����������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/graph_each_spec.rb����������������������������������������������������0000664�0000000�0000000�00000020137�14342141206�0021654�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Dataset, " graphing" do before do @db = Sequel.mock(:columns=>proc do |sql| case sql when /points/ [:id, :x, :y] when /lines/ [:id, :x, :y, :graph_id] else [:id, :name, :x, :y, :lines_x] end end).extension(:graph_each) @ds1 = @db.from(:points) @ds2 = @db.from(:lines) @ds3 = @db.from(:graphs) [@ds1, @ds2, @ds3].each{|ds| ds.columns} @db.sqls end it "#graph_each should handle graph using currently selected columns as the basis for the selected columns in a new graph" do @ds1.select(:id).graph(@ds2, :x=>:id).with_fetch(:id=>1, :lines_id=>2, :x=>3, :y=>4, :graph_id=>5).all.must_equal [{:points=>{:id=>1}, :lines=>{:id=>2, :x=>3, :y=>4, :graph_id=>5}}] @ds1.select(:id, :x).graph(@ds2, :x=>:id).with_fetch(:id=>1, :x=>-1, :lines_id=>2, :lines_x=>3, :y=>4, :graph_id=>5).all.must_equal [{:points=>{:id=>1, :x=>-1}, :lines=>{:id=>2, :x=>3, :y=>4, :graph_id=>5}}] @ds1.select(Sequel.identifier(:id), Sequel.qualify(:points, :x)).graph(@ds2, :x=>:id).with_fetch(:id=>1, :x=>-1, :lines_id=>2, :lines_x=>3, :y=>4, :graph_id=>5).all.must_equal [{:points=>{:id=>1, :x=>-1}, :lines=>{:id=>2, :x=>3, :y=>4, :graph_id=>5}}] @ds1.select(Sequel.identifier(:id).qualify(:points), Sequel.identifier(:x).as(:y)).graph(@ds2, :x=>:id).with_fetch(:id=>1, :y=>-1, :lines_id=>2, :x=>3, :lines_y=>4, :graph_id=>5).all.must_equal [{:points=>{:id=>1, :y=>-1}, :lines=>{:id=>2, :x=>3, :y=>4, :graph_id=>5}}] @ds1.select(:id, Sequel.identifier(:x).qualify(Sequel.identifier(:points)).as(Sequel.identifier(:y))).graph(@ds2, :x=>:id).with_fetch(:id=>1, :y=>-1, :lines_id=>2, :x=>3, :lines_y=>4, :graph_id=>5).all.must_equal [{:points=>{:id=>1, :y=>-1}, :lines=>{:id=>2, :x=>3, :y=>4, :graph_id=>5}}] end it "#graph_each should split the result set into component tables" do @db.fetch = [[{:id=>1,:x=>2,:y=>3,:lines_id=>4,:lines_x=>5,:lines_y=>6,:graph_id=>7}], [{:id=>1,:x=>2,:y=>3,:lines_id=>4,:lines_x=>5,:lines_y=>6,:graph_id=>7, :graphs_id=>8, :name=>9, :graphs_x=>10, :graphs_y=>11, :graphs_lines_x=>12}], [{:id=>1,:x=>2,:y=>3,:lines_id=>4,:lines_x=>5,:lines_y=>6,:graph_id=>7, :graph_id_0=>8, :graph_x=>9, :graph_y=>10, :graph_graph_id=>11}]] @ds1.graph(@ds2, :x=>:id).all.must_equal [{:points=>{:id=>1, :x=>2, :y=>3}, :lines=>{:id=>4, :x=>5, :y=>6, :graph_id=>7}}] @ds1.graph(@ds2, :x=>:id).graph(@ds3, :id=>:graph_id).all.must_equal [{:points=>{:id=>1, :x=>2, :y=>3}, :lines=>{:id=>4, :x=>5, :y=>6, :graph_id=>7}, :graphs=>{:id=>8, :name=>9, :x=>10, :y=>11, :lines_x=>12}}] @ds1.graph(@ds2, :x=>:id).graph(@ds2, {:y=>Sequel[:points][:id]}, :table_alias=>:graph).all.must_equal [{:points=>{:id=>1, :x=>2, :y=>3}, :lines=>{:id=>4, :x=>5, :y=>6, :graph_id=>7}, :graph=>{:id=>8, :x=>9, :y=>10, :graph_id=>11}}] end it "#graph_each should split the result set into component tables when using first" do @db.fetch = [[{:id=>1,:x=>2,:y=>3,:lines_id=>4,:lines_x=>5,:lines_y=>6,:graph_id=>7}], [{:id=>1,:x=>2,:y=>3,:lines_id=>4,:lines_x=>5,:lines_y=>6,:graph_id=>7, :graphs_id=>8, :name=>9, :graphs_x=>10, :graphs_y=>11, :graphs_lines_x=>12}], [{:id=>1,:x=>2,:y=>3,:lines_id=>4,:lines_x=>5,:lines_y=>6,:graph_id=>7, :graph_id_0=>8, :graph_x=>9, :graph_y=>10, :graph_graph_id=>11}]] @ds1.graph(@ds2, :x=>:id).first.must_equal(:points=>{:id=>1, :x=>2, :y=>3}, :lines=>{:id=>4, :x=>5, :y=>6, :graph_id=>7}) @ds1.graph(@ds2, :x=>:id).graph(@ds3, :id=>:graph_id).first.must_equal(:points=>{:id=>1, :x=>2, :y=>3}, :lines=>{:id=>4, :x=>5, :y=>6, :graph_id=>7}, :graphs=>{:id=>8, :name=>9, :x=>10, :y=>11, :lines_x=>12}) @ds1.graph(@ds2, :x=>:id).graph(@ds2, {:y=>Sequel[:points][:id]}, :table_alias=>:graph).first.must_equal(:points=>{:id=>1, :x=>2, :y=>3}, :lines=>{:id=>4, :x=>5, :y=>6, :graph_id=>7}, :graph=>{:id=>8, :x=>9, :y=>10, :graph_id=>11}) end it "#graph_each should give a nil value instead of a hash when all values for a table are nil" do @db.fetch = [[{:id=>1,:x=>2,:y=>3,:lines_id=>nil,:lines_x=>nil,:lines_y=>nil,:graph_id=>nil}], [{:id=>1,:x=>2,:y=>3,:lines_id=>4,:lines_x=>5,:lines_y=>6,:graph_id=>7, :graphs_id=>nil, :name=>nil, :graphs_x=>nil, :graphs_y=>nil, :graphs_lines_x=>nil}, {:id=>2,:x=>4,:y=>5,:lines_id=>nil,:lines_x=>nil,:lines_y=>nil,:graph_id=>nil, :graphs_id=>nil, :name=>nil, :graphs_x=>nil, :graphs_y=>nil, :graphs_lines_x=>nil}, {:id=>3,:x=>5,:y=>6,:lines_id=>4,:lines_x=>5,:lines_y=>6,:graph_id=>7, :graphs_id=>7, :name=>8, :graphs_x=>9, :graphs_y=>10, :graphs_lines_x=>11}, {:id=>3,:x=>5,:y=>6,:lines_id=>7,:lines_x=>5,:lines_y=>8,:graph_id=>9, :graphs_id=>9, :name=>10, :graphs_x=>10, :graphs_y=>11, :graphs_lines_x=>12}]] @ds1.graph(@ds2, :x=>:id).all.must_equal [{:points=>{:id=>1, :x=>2, :y=>3}, :lines=>nil}] @ds1.graph(@ds2, :x=>:id).graph(@ds3, :id=>:graph_id).all.must_equal [{:points=>{:id=>1, :x=>2, :y=>3}, :lines=>{:id=>4, :x=>5, :y=>6, :graph_id=>7}, :graphs=>nil}, {:points=>{:id=>2, :x=>4, :y=>5}, :lines=>nil, :graphs=>nil}, {:points=>{:id=>3, :x=>5, :y=>6}, :lines=>{:id=>4, :x=>5, :y=>6, :graph_id=>7}, :graphs=>{:id=>7, :name=>8, :x=>9, :y=>10, :lines_x=>11}}, {:points=>{:id=>3, :x=>5, :y=>6}, :lines=>{:id=>7, :x=>5, :y=>8, :graph_id=>9}, :graphs=>{:id=>9, :name=>10, :x=>10, :y=>11, :lines_x=>12}}] end it "#graph_each should not give a nil value instead of a hash when any value for a table is false" do @db.fetch = {:id=>1,:x=>2,:y=>3,:lines_id=>nil,:lines_x=>false,:lines_y=>nil,:graph_id=>nil} @ds1.graph(@ds2, :x=>:id).all.must_equal [{:points=>{:id=>1, :x=>2, :y=>3}, :lines=>{:id=>nil, :x=>false, :y=>nil, :graph_id=>nil}}] end it "#graph_each should not included tables graphed with the :select => false option in the result set" do @db.fetch = {:id=>1,:x=>2,:y=>3,:graphs_id=>8, :name=>9, :graphs_x=>10, :graphs_y=>11, :lines_x=>12} @ds1.graph(:lines, {:x=>:id}, :select=>false).graph(:graphs, :id=>:graph_id).all.must_equal [{:points=>{:id=>1, :x=>2, :y=>3}, :graphs=>{:id=>8, :name=>9, :x=>10, :y=>11, :lines_x=>12}}] end it "#graph_each should only include the columns selected with #set_graph_aliases and #add_graph_aliases, if called" do @db.fetch = [[{:x=>2,:y=>3}], [{:x=>2}], [{:x=>2, :q=>18}]] @ds1.graph(:lines, :x=>:id).set_graph_aliases(:x=>[:points, :x], :y=>[:lines, :y]).all.must_equal [{:points=>{:x=>2}, :lines=>{:y=>3}}] ds = @ds1.graph(:lines, :x=>:id).set_graph_aliases(:x=>[:points, :x]) ds.all.must_equal [{:points=>{:x=>2}, :lines=>nil}] ds = ds.add_graph_aliases(:q=>[:points, :r, 18]) ds.all.must_equal [{:points=>{:x=>2, :r=>18}, :lines=>nil}] end it "#graph_each should correctly map values when #set_graph_aliases is used with a third argument for each entry" do @db.fetch = [{:x=>2,:y=>3}] @ds1.graph(:lines, :x=>:id).set_graph_aliases(:x=>[:points, :z1, 2], :y=>[:lines, :z2, Sequel.function(:random)]).all.must_equal [{:points=>{:z1=>2}, :lines=>{:z2=>3}}] end it "#graph_each should correctly map values when #set_graph_aliases is used with a single argument for each entry" do @db.fetch = [{:x=>2,:y=>3}] @ds1.graph(:lines, :x=>:id).set_graph_aliases(:x=>[:points], :y=>[:lines]).all.must_equal [{:points=>{:x=>2}, :lines=>{:y=>3}}] end it "#graph_each should correctly map values when #set_graph_aliases is used with a symbol for each entry" do @db.fetch = [{:x=>2,:y=>3}] @ds1.graph(:lines, :x=>:id).set_graph_aliases(:x=>:points, :y=>:lines).all.must_equal [{:points=>{:x=>2}, :lines=>{:y=>3}}] end it "#graph_each should run the row_proc for graphed datasets" do @db.fetch = {:id=>1,:x=>2,:y=>3,:lines_id=>4,:lines_x=>5,:lines_y=>6,:graph_id=>7} @ds1.with_row_proc(proc{|h| h.keys.each{|k| h[k] *= 2}; h}).graph(@ds2.with_row_proc(proc{|h| h.keys.each{|k| h[k] *= 3}; h}), :x=>:id).all.must_equal [{:points=>{:id=>2, :x=>4, :y=>6}, :lines=>{:id=>12, :x=>15, :y=>18, :graph_id=>21}}] end it "#with_sql_each should work normally if the dataset is not graphed" do @db.fetch = {:x=>1} @db.dataset.with_sql_each('SELECT 1 AS x'){|r| r.must_equal(:x=>1)} @db.sqls.must_equal ['SELECT 1 AS x'] end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/hook_class_methods_spec.rb��������������������������������������������0000664�0000000�0000000�00000025160�14342141206�0023444�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" model_class = proc do |klass, &block| c = Class.new(klass) c.plugin :hook_class_methods c.class_eval(&block) if block c end describe Sequel::Model, "hook_class_methods plugin" do before do DB.reset end it "should freeze hooks when freezing model class" do c = model_class.call Sequel::Model do before_save{adds << 'hi'} end c.freeze hooks = c.instance_variable_get(:@hooks) hooks.frozen?.must_equal true hooks.values.all?(&:frozen?).must_equal true end deprecated ".hook_blocks method should yield each hook block" do c = model_class.call Sequel::Model a = [] c.hook_blocks(:before_save){|b| a << b} a.must_equal [] pr = proc{adds << 'hi'} c.before_save(&pr) a = [] c.hook_blocks(:before_save){|b| a << b} a.must_equal [pr] c.before_save(&pr) a = [] c.hook_blocks(:before_save){|b| a << b} a.must_equal [pr, pr] a = [] c.hook_blocks(:after_save){|b| a << b} a.must_equal [] end it "should be definable using a block" do adds = [] c = model_class.call Sequel::Model do before_save{adds << 'hi'} end c.new.before_save adds.must_equal ['hi'] end it "should be definable using a method name" do adds = [] c = model_class.call Sequel::Model do define_method(:bye){adds << 'bye'} before_save :bye end c.new.before_save adds.must_equal ['bye'] end it "should be additive" do adds = [] c = model_class.call Sequel::Model do after_save{adds << 'hyiyie'} after_save{adds << 'byiyie'} end c.new.after_save adds.must_equal ['hyiyie', 'byiyie'] end it "before hooks should run in reverse order" do adds = [] c = model_class.call Sequel::Model do before_save{adds << 'hyiyie'} before_save{adds << 'byiyie'} end c.new.before_save adds.must_equal ['byiyie', 'hyiyie'] end it "should not be additive if the method or tag already exists" do adds = [] c = model_class.call Sequel::Model do define_method(:bye){adds << 'bye'} before_save :bye before_save :bye end c.new.before_save adds.must_equal ['bye'] adds = [] d = model_class.call Sequel::Model do before_save(:bye){adds << 'hyiyie'} before_save(:bye){adds << 'byiyie'} end d.new.before_save adds.must_equal ['byiyie'] adds = [] e = model_class.call Sequel::Model do define_method(:bye){adds << 'bye'} before_save :bye before_save(:bye){adds << 'byiyie'} end e.new.before_save adds.must_equal ['byiyie'] adds = [] e = model_class.call Sequel::Model do define_method(:bye){adds << 'bye'} before_save(:bye){adds << 'byiyie'} before_save :bye end e.new.before_save adds.must_equal ['bye'] end it "should be inheritable" do adds = [] a = model_class.call Sequel::Model do after_save{adds << '123'} end b = Class.new(a) b.class_eval do after_save{adds << '456'} after_save{adds << '789'} end b.new.after_save adds.must_equal ['123', '456', '789'] end it "should be overridable in descendant classes" do adds = [] a = model_class.call Sequel::Model do before_save{adds << '123'} end b = Class.new(a) b.class_eval do define_method(:before_save){adds << '456'} end a.new.before_save adds.must_equal ['123'] adds = [] b.new.before_save adds.must_equal ['456'] end it "should stop processing if a before hook calls cancel_action" do flag = true adds = [] a = model_class.call Sequel::Model(:items) do before_save{adds << 'cruel'; cancel_action if flag == false} before_save{adds << 'blah'; cancel_action if flag == false} end a.raise_on_save_failure = false a.new.save adds.must_equal ['blah', 'cruel'] # chain should not break on nil adds = [] flag = nil a.new.save adds.must_equal ['blah', 'cruel'] adds = [] flag = false a.new.save adds.must_equal ['blah'] b = Class.new(a) b.class_eval do before_save{adds << 'mau'} end adds = [] b.new.save adds.must_equal ['mau', 'blah'] end end describe "Model#before_create && Model#after_create" do before do DB.reset @c = model_class.call Sequel::Model(:items) do columns :x no_primary_key after_create {DB << "BLAH after"} end end it "should be called around new record creation" do @c.before_create {DB << "BLAH before"} @c.create(:x => 2) DB.sqls.must_equal ['BLAH before', 'INSERT INTO items (x) VALUES (2)', 'BLAH after'] end it ".create should cancel the save and raise an error if before_create calls cancel_action and raise_on_save_failure is true" do @c.before_create{cancel_action} proc{@c.create(:x => 2)}.must_raise(Sequel::HookFailed) DB.sqls.must_equal [] end it ".create should cancel the save and return nil if before_create calls cancel_action and raise_on_save_failure is false" do @c.before_create{cancel_action} @c.raise_on_save_failure = false @c.create(:x => 2).must_be_nil DB.sqls.must_equal [] end end describe "Model#before_update && Model#after_update" do before do DB.reset @c = model_class.call(Sequel::Model(:items)) do after_update {DB << "BLAH after"} end end it "should be called around record update" do @c.before_update {DB << "BLAH before"} m = @c.load(:id => 2233, :x=>123) m.save DB.sqls.must_equal ['BLAH before', 'UPDATE items SET x = 123 WHERE (id = 2233)', 'BLAH after'] end it "#save should cancel the save and raise an error if before_update calls cancel_action and raise_on_save_failure is true" do @c.before_update{cancel_action} proc{@c.load(:id => 2233).save}.must_raise(Sequel::HookFailed) DB.sqls.must_equal [] end it "#save should cancel the save and return nil if before_update calls cancel_action and raise_on_save_failure is false" do @c.before_update{cancel_action} @c.raise_on_save_failure = false @c.load(:id => 2233).save.must_be_nil DB.sqls.must_equal [] end end describe "Model#before_save && Model#after_save" do before do DB.reset @c = model_class.call(Sequel::Model(:items)) do columns :x after_save {DB << "BLAH after"} end end it "should be called around record update" do @c.before_save {DB << "BLAH before"} m = @c.load(:id => 2233, :x=>123) m.save DB.sqls.must_equal ['BLAH before', 'UPDATE items SET x = 123 WHERE (id = 2233)', 'BLAH after'] end it "should be called around record creation" do @c.before_save {DB << "BLAH before"} @c.no_primary_key @c.create(:x => 2) DB.sqls.must_equal ['BLAH before', 'INSERT INTO items (x) VALUES (2)', 'BLAH after'] end it "#save should cancel the save and raise an error if before_save calls cancel_action and raise_on_save_failure is true" do @c.before_save{cancel_action} proc{@c.load(:id => 2233).save}.must_raise(Sequel::HookFailed) DB.sqls.must_equal [] end it "#save should cancel the save and return nil if before_save calls cancel_action and raise_on_save_failure is false" do @c.before_save{cancel_action} @c.raise_on_save_failure = false @c.load(:id => 2233).save.must_be_nil DB.sqls.must_equal [] end it "should raise if calling without block or method" do proc{@c.before_save}.must_raise Sequel::Error proc{@c.after_save}.must_raise Sequel::Error end end describe "Model#before_destroy && Model#after_destroy" do before do DB.reset @c = model_class.call(Sequel::Model(:items)) do after_destroy {DB << "BLAH after"} end end it "should be called around record destruction" do @c.before_destroy {DB << "BLAH before"} m = @c.load(:id => 2233) m.destroy DB.sqls.must_equal ['BLAH before', "DELETE FROM items WHERE id = 2233", 'BLAH after'] end it "#destroy should cancel the destroy and raise an error if before_destroy calls cancel_action and raise_on_save_failure is true" do @c.before_destroy{cancel_action} proc{@c.load(:id => 2233).destroy}.must_raise(Sequel::HookFailed) DB.sqls.must_equal [] end it "#destroy should cancel the destroy and return nil if before_destroy calls cancel_action and raise_on_save_failure is false" do @c.before_destroy{cancel_action} @c.raise_on_save_failure = false @c.load(:id => 2233).destroy.must_be_nil DB.sqls.must_equal [] end end describe "Model#before_validation && Model#after_validation" do before do DB.reset @c = model_class.call(Sequel::Model(:items)) do plugin :validation_class_methods after_validation{DB << "BLAH after"} def self.validate(o) o.errors.add(:id, 'not valid') unless o[:id] == 2233 end columns :id end end it "should be called around validation" do @c.before_validation{DB << "BLAH before"} m = @c.load(:id => 2233) m.must_be :valid? DB.sqls.must_equal ['BLAH before', 'BLAH after'] DB.sqls.clear m = @c.load(:id => 22) m.wont_be :valid? DB.sqls.must_equal ['BLAH before', 'BLAH after'] end it "should be called when calling save" do @c.before_validation{DB << "BLAH before"} m = @c.load(:id => 2233, :x=>123) m.save.must_equal m DB.sqls.must_equal ['BLAH before', 'BLAH after', 'UPDATE items SET x = 123 WHERE (id = 2233)'] DB.sqls.clear m = @c.load(:id => 22) m.raise_on_save_failure = false m.save.must_be_nil DB.sqls.must_equal ['BLAH before', 'BLAH after'] end it "#save should cancel the save and raise an error if before_validation calls cancel_action and raise_on_save_failure is true" do @c.before_validation{cancel_action} proc{@c.load(:id => 2233).save}.must_raise(Sequel::HookFailed) DB.sqls.must_equal [] end it "#save should cancel the save and return nil if before_validation calls cancel_action and raise_on_save_failure is false" do @c.before_validation{cancel_action} @c.raise_on_save_failure = false @c.load(:id => 2233).save.must_be_nil DB.sqls.must_equal [] end end describe "Model.has_hooks?" do before do @c = model_class.call(Sequel::Model(:items)) end it "should return false if no hooks are defined" do @c.has_hooks?(:before_save).must_equal false end it "should return true if hooks are defined" do @c.before_save {'blah'} @c.has_hooks?(:before_save).must_equal true end it "should return true if hooks are inherited" do @d = Class.new(@c) @d.has_hooks?(:before_save).must_equal false end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/identifier_mangling_spec.rb�������������������������������������������0000664�0000000�0000000�00000023752�14342141206�0023577�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "identifier_mangling extension" do it "should respect the :quote_identifiers option" do db = Sequel::Database.new(:quote_identifiers=>false).extension(:identifier_mangling) db.quote_identifiers?.must_equal false db = Sequel::Database.new(:quote_identifiers=>true).extension(:identifier_mangling) db.quote_identifiers?.must_equal true end it "should respect the :quote_identifiers setting" do db = Sequel::Database.new.extension(:identifier_mangling) db.quote_identifiers?.must_equal true db.quote_identifiers = false db.quote_identifiers?.must_equal false end it "should upcase on input and downcase on output by default" do db = Sequel::Database.new.extension(:identifier_mangling) db.send(:identifier_input_method_default).must_equal :upcase db.send(:identifier_output_method_default).must_equal :downcase end it "should respect the :identifier_input_method option" do db = Sequel::Database.new.extension(:identifier_mangling) db.identifier_input_method.must_equal :upcase db.identifier_input_method = nil db.identifier_input_method.must_be_nil db = Sequel::Database.new(:identifier_input_method=>nil).extension(:identifier_mangling) db.identifier_input_method.must_be_nil db.identifier_input_method = :downcase db.identifier_input_method.must_equal :downcase db = Sequel::Database.new(:identifier_input_method=>:upcase).extension(:identifier_mangling) db.identifier_input_method.must_equal :upcase db.identifier_input_method = nil db.identifier_input_method.must_be_nil end it "should respect the :identifier_output_method option" do db = Sequel::Database.new.extension(:identifier_mangling) db.identifier_output_method.must_equal :downcase db.identifier_output_method = nil db.identifier_output_method.must_be_nil db = Sequel::Database.new(:identifier_output_method=>nil).extension(:identifier_mangling) db.identifier_output_method.must_be_nil db.identifier_output_method = :downcase db.identifier_output_method.must_equal :downcase db = Sequel::Database.new(:identifier_output_method=>:upcase).extension(:identifier_mangling) db.identifier_output_method.must_equal :upcase db.identifier_output_method = nil db.identifier_output_method.must_be_nil end it "should respect the identifier_input_method_default method if Sequel.identifier_input_method is not called" do class Sequel::Database @identifier_input_method = nil end x = Class.new(Sequel::Database){private; def dataset_class_default; Sequel::Dataset end; def identifier_input_method_default; :downcase end} x.new.extension(:identifier_mangling).identifier_input_method.must_equal :downcase y = Class.new(Sequel::Database){private; def dataset_class_default; Sequel::Dataset end; def identifier_input_method_default; :camelize end} y.new.extension(:identifier_mangling).identifier_input_method.must_equal :camelize end it "should respect the identifier_output_method_default method if Sequel.identifier_output_method is not called" do class Sequel::Database @identifier_output_method = nil end x = Class.new(Sequel::Database){private; def dataset_class_default; Sequel::Dataset end; def identifier_output_method_default; :upcase end} x.new.extension(:identifier_mangling).identifier_output_method.must_equal :upcase y = Class.new(Sequel::Database){private; def dataset_class_default; Sequel::Dataset end; def identifier_output_method_default; :underscore end} y.new.extension(:identifier_mangling).identifier_output_method.must_equal :underscore end end describe "Database#input_identifier_meth" do it "should be the input_identifer method of a default dataset for this database" do db = Sequel::Database.new.extension(:identifier_mangling) db.identifier_input_method = nil db.send(:input_identifier_meth).call(:a).must_equal 'a' db.identifier_input_method = :upcase db.send(:input_identifier_meth).call(:a).must_equal 'A' end end describe "Database#output_identifier_meth" do it "should be the output_identifer method of a default dataset for this database" do db = Sequel::Database.new.extension(:identifier_mangling) db.identifier_output_method = nil db.send(:output_identifier_meth).call('A').must_equal :A db.identifier_output_method = :downcase db.send(:output_identifier_meth).call('A').must_equal :a end end describe "Database#metadata_dataset" do it "should be a dataset with the default settings for identifier_mangling" do ds = Sequel::Database.new.extension(:identifier_mangling).send(:metadata_dataset) ds.literal(:a).must_equal "\"A\"" ds.send(:output_identifier, 'A').must_equal :a end end describe "Dataset" do before do @dataset = Sequel.mock.extension(:identifier_mangling).dataset end it "should get quote_identifiers default from database" do db = Sequel::Database.new(:quote_identifiers=>true).extension(:identifier_mangling) db[:a].quote_identifiers?.must_equal true db = Sequel::Database.new(:quote_identifiers=>false).extension(:identifier_mangling) db[:a].quote_identifiers?.must_equal false end it "should get identifier_input_method default from database" do db = Sequel::Database.new(:identifier_input_method=>:upcase).extension(:identifier_mangling) db[:a].identifier_input_method.must_equal :upcase db = Sequel::Database.new(:identifier_input_method=>:downcase).extension(:identifier_mangling) db[:a].identifier_input_method.must_equal :downcase end it "should get identifier_output_method default from database" do db = Sequel::Database.new(:identifier_output_method=>:upcase).extension(:identifier_mangling) db[:a].identifier_output_method.must_equal :upcase db = Sequel::Database.new(:identifier_output_method=>:downcase).extension(:identifier_mangling) db[:a].identifier_output_method.must_equal :downcase end it "should have with_quote_identifiers method which returns cloned dataset with changed literalization of identifiers" do @dataset.with_quote_identifiers(true).literal(:a).must_equal '"a"' @dataset.with_quote_identifiers(false).literal(:a).must_equal 'a' ds = @dataset.freeze.with_quote_identifiers(false) ds.literal(:a).must_equal 'a' ds.frozen?.must_equal true end it "should have with_identifier_input_method method which returns cloned dataset with changed literalization of identifiers" do @dataset.with_identifier_input_method(:upcase).literal(:a).must_equal 'A' @dataset.with_identifier_input_method(:downcase).literal(:A).must_equal 'a' @dataset.with_identifier_input_method(:reverse).literal(:at_b).must_equal 'b_ta' ds = @dataset.freeze.with_identifier_input_method(:reverse) ds.frozen?.must_equal true ds.literal(:at_b).must_equal 'b_ta' end it "should have with_identifier_output_method method which returns cloned dataset with changed identifiers returned from the database" do @dataset.send(:output_identifier, "at_b_C").must_equal :at_b_C @dataset.with_identifier_output_method(:upcase).send(:output_identifier, "at_b_C").must_equal :AT_B_C @dataset.with_identifier_output_method(:downcase).send(:output_identifier, "at_b_C").must_equal :at_b_c @dataset.with_identifier_output_method(:reverse).send(:output_identifier, "at_b_C").must_equal :C_b_ta ds = @dataset.freeze.with_identifier_output_method(:reverse) ds.send(:output_identifier, "at_b_C").must_equal :C_b_ta ds.frozen?.must_equal true end it "should have output_identifier handle empty identifiers" do @dataset.send(:output_identifier, "").must_equal :untitled @dataset.with_identifier_output_method(:upcase).send(:output_identifier, "").must_equal :UNTITLED @dataset.with_identifier_output_method(:downcase).send(:output_identifier, "").must_equal :untitled @dataset.with_identifier_output_method(:reverse).send(:output_identifier, "").must_equal :deltitnu end end describe "identifier_mangling extension" do it "should be able to load dialects based on the database name" do Sequel.mock(:host=>'access').select(Date.new(2011, 12, 13)).sql.must_equal 'SELECT #2011-12-13#' Sequel.mock(:host=>'db2').select(1).sql.must_equal 'SELECT 1 FROM "SYSIBM"."SYSDUMMY1"' Sequel.mock(:host=>'mssql')[:a].full_text_search(:b, 'c').sql.must_equal "SELECT * FROM [A] WHERE (CONTAINS ([B], 'c'))" Sequel.mock(:host=>'mysql')[:a].full_text_search(:b, 'c').sql.must_equal "SELECT * FROM `a` WHERE (MATCH (`b`) AGAINST ('c'))" Sequel.mock(:host=>'oracle')[:a].limit(1).sql.must_equal 'SELECT * FROM (SELECT * FROM "A") "T1" WHERE (ROWNUM <= 1)' Sequel.mock(:host=>'postgres')[:a].full_text_search(:b, 'c').sql.must_equal "SELECT * FROM \"a\" WHERE (to_tsvector(CAST('simple' AS regconfig), (COALESCE(\"b\", ''))) @@ to_tsquery(CAST('simple' AS regconfig), 'c'))" Sequel.mock(:host=>'sqlanywhere').from(:a).offset(1).sql.must_equal 'SELECT TOP 2147483647 START AT (1 + 1) * FROM "A"' Sequel.mock(:host=>'sqlite')[Sequel[:a].as(:b)].sql.must_equal "SELECT * FROM `a` AS 'b'" end end describe Sequel::Model, ".[] optimization" do before do @db = Sequel.mock(:quote_identifiers=>true).extension(:identifier_mangling) def @db.schema(*) [[:id, {:primary_key=>true}]] end def @db.supports_schema_parsing?() true end @c = Class.new(Sequel::Model(@db)) @ds = @db.dataset.with_quote_identifiers(true) end it "should have simple_pk and simple_table respect dataset's identifier input methods" do ds = @db.from(:ab).with_identifier_input_method(:reverse) @c.set_dataset ds @c.simple_table.must_equal '"ba"' @c.set_primary_key :cd @c.simple_pk.must_equal '"dc"' @c.set_dataset ds.from(Sequel[:ef][:gh]) @c.simple_table.must_equal '"fe"."hg"' end with_symbol_splitting "should have simple_pk and simple_table respect dataset's identifier input methods when using splittable symbols" do ds = @db.from(:ab).with_identifier_input_method(:reverse) @c.set_dataset ds.from(:ef__gh) @c.simple_table.must_equal '"fe"."hg"' end end ����������������������sequel-5.63.0/spec/extensions/implicit_subquery_spec.rb���������������������������������������������0000664�0000000�0000000�00000011442�14342141206�0023343�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Dataset::ImplicitSubquery" do it "should implicitly use a subquery for most dataset query methods" do db = Sequel.mock db.extend_datasets{def supports_cte?; true end} ds = db["SELECT * FROM table"].extension(:implicit_subquery) ds.columns(:id, :a) ods = db[:c] ods.columns(:id, :b) ds.cross_join(:c).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 CROSS JOIN c" ds.distinct.sql.must_equal "SELECT DISTINCT * FROM (SELECT * FROM table) AS t1" ds.except(ods).sql.must_equal "SELECT * FROM (SELECT * FROM (SELECT * FROM table) AS t1 EXCEPT SELECT * FROM c) AS t1" ds.exclude(:c).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 WHERE NOT c" ds.exclude_having(:c).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 HAVING NOT c" ds.filter(:c).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 WHERE c" ds.for_update.sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 FOR UPDATE" ds.full_join(:c).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 FULL JOIN c" ds.full_outer_join(:c).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 FULL OUTER JOIN c" ds.graph(ods).sql.must_equal "SELECT t1.id, t1.a, c.id AS c_id, c.b FROM (SELECT * FROM table) AS t1 LEFT OUTER JOIN c" ds.grep(:c, 'a').sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 WHERE ((c LIKE 'a' ESCAPE '\\'))" ds.group(:c).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 GROUP BY c" ds.group_append(:c).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 GROUP BY c" ds.group_and_count(:c).sql.must_equal "SELECT c, count(*) AS count FROM (SELECT * FROM table) AS t1 GROUP BY c" ds.group_by(:c).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 GROUP BY c" ds.having(:c).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 HAVING c" ds.inner_join(:c, [:d]).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 INNER JOIN c USING (d)" ds.intersect(ods).sql.must_equal "SELECT * FROM (SELECT * FROM (SELECT * FROM table) AS t1 INTERSECT SELECT * FROM c) AS t1" ds.invert.sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 WHERE 'f'" ds.join(:c, [:d]).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 INNER JOIN c USING (d)" ds.join_table(:inner, :c, [:d]).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 INNER JOIN c USING (d)" ds.left_join(:c, [:d]).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 LEFT JOIN c USING (d)" ds.left_outer_join(:c, [:d]).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 LEFT OUTER JOIN c USING (d)" ds.limit(1).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 LIMIT 1" ds.lock_style('FOR UPDATE').sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 FOR UPDATE" ds.natural_full_join(:c).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 NATURAL FULL JOIN c" ds.natural_join(:c).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 NATURAL JOIN c" ds.natural_left_join(:c).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 NATURAL LEFT JOIN c" ds.natural_right_join(:c).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 NATURAL RIGHT JOIN c" ds.offset(1).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 OFFSET 1" ds.order(:c).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 ORDER BY c" ds.order_append(:c).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 ORDER BY c" ds.order_by(:c).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 ORDER BY c" ds.order_more(:c).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 ORDER BY c" ds.order_prepend(:c).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 ORDER BY c" ds.right_join(:c, [:d]).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 RIGHT JOIN c USING (d)" ds.right_outer_join(:c, [:d]).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 RIGHT OUTER JOIN c USING (d)" ds.select(:c).sql.must_equal "SELECT c FROM (SELECT * FROM table) AS t1" ds.select_append(:c).sql.must_equal "SELECT *, c FROM (SELECT * FROM table) AS t1" ds.select_group(:c).sql.must_equal "SELECT c FROM (SELECT * FROM table) AS t1 GROUP BY c" ds.select_more(:c).sql.must_equal "SELECT *, c FROM (SELECT * FROM table) AS t1" ds.union(ods).sql.must_equal "SELECT * FROM (SELECT * FROM (SELECT * FROM table) AS t1 UNION SELECT * FROM c) AS t1" ds.where(:c).sql.must_equal "SELECT * FROM (SELECT * FROM table) AS t1 WHERE c" ds.with(:d, ods).sql.must_equal "WITH d AS (SELECT * FROM c) SELECT * FROM (SELECT * FROM table) AS t1" ds.with_recursive(:d, ods, ods).sql.must_equal "WITH d AS (SELECT * FROM c UNION ALL SELECT * FROM c) SELECT * FROM (SELECT * FROM table) AS t1" end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/index_caching_spec.rb�������������������������������������������������0000664�0000000�0000000�00000005026�14342141206�0022356�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "index_caching extension" do before do @db = Sequel.connect('mock://postgres').extension(:index_caching) @indexes = {'"table"'=>{:table_idx_unique=>{:columns=>[:first_col, :second_col], :unique=>true, :deferrable=>nil}}} @filename = "spec/files/test_indexes_#$$.dump" @db.instance_variable_set(:@indexes, @indexes) end after do File.delete(@filename) if File.exist?(@filename) end it "#indexes should return cached index information" do @db.indexes(:table).must_equal @indexes['"table"'] @db.indexes(:table, {}).must_equal @indexes['"table"'] end it "#indexes should skip cached information if given options" do @db.indexes(:table, :schema=>:b).must_equal({}) end it "Database should remove cached indexes when schema is changed" do @db.create_table(:table){Integer :a} @db.indexes(:table).must_equal({}) end it "Database#freeze should allow cached information to work" do @db.freeze.indexes(:table).must_equal @indexes['"table"'] end it "Database#freeze should allow removing index information" do @db.freeze @db.create_table(:table){Integer :a} @db.indexes(:table).must_equal({}) end it "Database#dump_index_cache should dump the index cache to the given file" do File.exist?(@filename).must_equal false @db.dump_index_cache(@filename) File.exist?(@filename).must_equal true File.size(@filename).must_be :>, 0 end it "Database#load_index_cache should load the index cache from the given file dumped by #dump_index_cache" do @db.dump_index_cache(@filename) db = Sequel::Database.new.extension(:index_caching) db.load_index_cache(@filename) db.extension(:index_caching) @db.instance_variable_get(:@indexes).must_equal @indexes end it "Database#dump_index_cache? should dump the index cache to the given file unless the file exists" do @db.dump_index_cache?(@filename) File.size(@filename).wont_equal 0 File.open(@filename, 'wb'){|f|} File.size(@filename).must_equal 0 @db.dump_index_cache?(@filename) File.size(@filename).must_equal 0 end it "Database#load_index_cache? should load the index cache from the given file if it exists" do db = Sequel::Database.new.extension(:index_caching) File.exist?(@filename).must_equal false db.load_index_cache?(@filename) db.instance_variable_get(:@indexes).must_equal({}) @db.dump_index_cache(@filename) db.load_index_cache?(@filename) @db.instance_variable_get(:@indexes).must_equal @indexes end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/inflector_spec.rb�����������������������������������������������������0000664�0000000�0000000�00000014107�14342141206�0021560�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" Sequel.extension :inflector describe String do it "#camelize and #camelcase should transform the word to CamelCase" do "egg_and_hams".camelize.must_equal "EggAndHams" "egg_and_hams".camelize(false).must_equal "eggAndHams" "post".camelize.must_equal "Post" "post".camelcase.must_equal "Post" "foo/bar".camelize.must_equal "Foo::Bar" "foo/".camelize.must_equal "Foo::" "foo//bar".camelize.must_equal "Foo::/bar" "foo///bar".camelize.must_equal "Foo::/::Bar" end it "#constantize should eval the string to get a constant" do "String".constantize.must_equal String "String::Inflections".constantize.must_equal String::Inflections proc{"BKSDDF".constantize}.must_raise NameError proc{"++A++".constantize}.must_raise NameError end it "#dasherize should transform underscores to dashes" do "egg_and_hams".dasherize.must_equal "egg-and-hams" "post".dasherize.must_equal "post" end it "#demodulize should remove any preceding modules" do "String::Inflections::Blah".demodulize.must_equal "Blah" "String::Inflections".demodulize.must_equal "Inflections" "String".demodulize.must_equal "String" end it "#humanize should remove _i, transform underscore to spaces, and capitalize" do "egg_and_hams".humanize.must_equal "Egg and hams" "post".humanize.must_equal "Post" "post_id".humanize.must_equal "Post" end it "#titleize and #titlecase should underscore, humanize, and capitalize all words" do "egg-and: hams".titleize.must_equal "Egg And: Hams" "post".titleize.must_equal "Post" "post".titlecase.must_equal "Post" end it "#underscore should add underscores between CamelCased words, change :: to / and - to _, and downcase" do "EggAndHams".underscore.must_equal "egg_and_hams" "EGGAndHams".underscore.must_equal "egg_and_hams" "Egg::And::Hams".underscore.must_equal "egg/and/hams" "post".underscore.must_equal "post" "post-id".underscore.must_equal "post_id" end it "#pluralize should transform words from singular to plural" do "post".pluralize.must_equal "posts" "octopus".pluralize.must_equal"octopuses" "the blue mailman".pluralize.must_equal "the blue mailmen" "CamelOctopus".pluralize.must_equal "CamelOctopuses" end it "#singularize should transform words from plural to singular" do "posts".singularize.must_equal "post" "octopuses".singularize.must_equal "octopus" "the blue mailmen".singularize.must_equal "the blue mailman" "CamelOctopuses".singularize.must_equal "CamelOctopus" end it "#tableize should transform class names to table names" do "RawScaledScorer".tableize.must_equal "raw_scaled_scorers" "egg_and_ham".tableize.must_equal "egg_and_hams" "fancyCategory".tableize.must_equal "fancy_categories" end it "#classify should tranform table names to class names" do "egg_and_hams".classify.must_equal "EggAndHam" "post".classify.must_equal "Post" end it "#foreign_key should create a foreign key name from a class name" do "Message".foreign_key.must_equal "message_id" "Message".foreign_key(false).must_equal "messageid" "Admin::Post".foreign_key.must_equal "post_id" end end describe String::Inflections do before do @plurals, @singulars, @uncountables = String.inflections.plurals.dup, String.inflections.singulars.dup, String.inflections.uncountables.dup end after do String.inflections.plurals.replace(@plurals) String.inflections.singulars.replace(@singulars) String.inflections.uncountables.replace(@uncountables) end it "should be possible to clear the list of singulars, plurals, and uncountables" do String.inflections.clear(:plurals) String.inflections.plurals.must_equal [] String.inflections.plural('blah', 'blahs') String.inflections.clear String.inflections.plurals.must_equal [] String.inflections.singulars.must_equal [] String.inflections.uncountables.must_equal [] end it "should be able to specify new inflection rules" do String.inflections do |i| i.plural(/xx$/i, 'xxx') i.singular(/ttt$/i, 'tt') i.irregular('yy', 'yyy') i.uncountable(%w'zz') end 'roxx'.pluralize.must_equal 'roxxx' 'rottt'.singularize.must_equal 'rott' 'yy'.pluralize.must_equal 'yyy' 'yyy'.singularize.must_equal 'yy' 'zz'.pluralize.must_equal 'zz' 'zz'.singularize.must_equal 'zz' end it "should be yielded and returned by String.inflections" do String.inflections{|i| i.must_equal String::Inflections}.must_equal String::Inflections end end describe 'Default inflections' do it "should support the default inflection rules" do { :test=>:tests, :ax=>:axes, :testis=>:testes, :octopus=>:octopuses, :virus=>:viruses, :alias=>:aliases, :status=>:statuses, :bus=>:buses, :buffalo=>:buffaloes, :tomato=>:tomatoes, :datum=>:data, :bacterium=>:bacteria, :analysis=>:analyses, :basis=>:bases, :diagnosis=>:diagnoses, :parenthesis=>:parentheses, :prognosis=>:prognoses, :synopsis=>:synopses, :thesis=>:theses, :wife=>:wives, :giraffe=>:giraffes, :self=>:selves, :dwarf=>:dwarves, :hive=>:hives, :fly=>:flies, :buy=>:buys, :soliloquy=>:soliloquies, :day=>:days, :attorney=>:attorneys, :boy=>:boys, :hoax=>:hoaxes, :lunch=>:lunches, :princess=>:princesses, :matrix=>:matrices, :vertex=>:vertices, :index=>:indices, :mouse=>:mice, :louse=>:lice, :quiz=>:quizzes, :motive=>:motives, :movie=>:movies, :series=>:series, :crisis=>:crises, :person=>:people, :man=>:men, :woman=>:women, :child=>:children, :sex=>:sexes, :move=>:moves }.each do |k, v| k.to_s.pluralize.must_equal v.to_s v.to_s.singularize.must_equal k.to_s end [:equipment, :information, :rice, :money, :species, :series, :fish, :sheep, :news].each do |a| a.to_s.pluralize.must_equal a.to_s.singularize end end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/input_transformer_spec.rb���������������������������������������������0000664�0000000�0000000�00000004306�14342141206�0023354�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::InputTransformer" do before do @c = Class.new(Sequel::Model) @c.columns :name, :b @c.plugin(:input_transformer, :reverser){|v| v.is_a?(String) ? v.reverse : v} @o = @c.new end it "should apply transformation to input" do @o.name = ' name ' @o.name.must_equal ' eman ' @o.name = [1, 2, 3] @o.name.must_equal [1, 2, 3] end it "should have working .input_transformer_order" do @c.input_transformer_order.must_equal [:reverser] @c.plugin(:input_transformer, :reverser2){|v| v.is_a?(String) ? v.reverse : v} @c.input_transformer_order.must_equal [:reverser2, :reverser] end it "should not apply any transformers by default" do c = Class.new(Sequel::Model) c.columns :name, :b c.plugin :input_transformer c.new(:name => ' name ').name.must_equal ' name ' end it "should allow skipping of columns using .skip_input_transformer" do @c.skip_input_transformer :reverser, :name v = ' name ' @o.name = v @o.name.must_be_same_as(v) end it "should work correctly in subclasses" do o = Class.new(@c).new o.name = ' name ' o.name.must_equal ' eman ' end it "should raise an error if adding input filter without name" do proc{@c.add_input_transformer(nil){}}.must_raise(Sequel::Error) proc{@c.plugin(:input_transformer){}}.must_raise(Sequel::Error) end it "should raise an error if adding input filter without block" do proc{@c.add_input_transformer(:foo)}.must_raise(Sequel::Error) proc{@c.plugin(:input_transformer, :foo)}.must_raise(Sequel::Error) end it "should apply multiple input transformers in reverse order of their call" do @c.add_input_transformer(:add_bar){|v| v << 'bar'} @c.add_input_transformer(:add_foo){|v| v << 'foo'} @o.name = ' name '.dup @o.name.must_equal 'raboof eman ' end it "should freeze input transformers when freezing model class" do @c.skip_input_transformer :reverser, :name @c.freeze @c.input_transformers.frozen?.must_equal true skip = @c.instance_variable_get(:@skip_input_transformer_columns) skip.frozen?.must_equal true skip.values.all?(&:frozen?).must_equal true end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/insert_conflict_spec.rb�����������������������������������������������0000664�0000000�0000000�00000010663�14342141206�0022763�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "insert_conflict plugin" do def model_class(adapter) db = Sequel.mock(:host=>adapter, :fetch=>{:id=>1, :s=>2}, :autoid=>1) db.extend_datasets{def quote_identifiers?; false end} model = Class.new(Sequel::Model) model.dataset = db[:t] model.columns :id, :s, :o model.plugin :insert_conflict db.sqls model end def model_class_plugin_first(adapter) model = Class.new(Sequel::Model) model.plugin :insert_conflict model = Class.new(model) db = Sequel.mock(:host=>adapter, :fetch=>{:id=>1, :s=>2}, :autoid=>1) db.extend_datasets{def quote_identifiers?; false end} model.dataset = db[:t] model.columns :id, :s, :o db.sqls model end it "should use INSERT ON CONFLICT when inserting on PostgreSQL" do model = model_class(:postgres) model.new(:s=>'A', :o=>1).insert_conflict.save model.db.sqls.must_equal ["INSERT INTO t (s, o) VALUES ('A', 1) ON CONFLICT DO NOTHING RETURNING *"] model.new(:s=>'A', :o=>1).insert_conflict(:target=>:s, :update => {:o => Sequel[:excluded][:o]}).save model.db.sqls.must_equal ["INSERT INTO t (s, o) VALUES ('A', 1) ON CONFLICT (s) DO UPDATE SET o = excluded.o RETURNING *"] end it "should use INSERT ON CONFLICT when inserting on SQLITE" do model = model_class(:sqlite) model.new(:s=>'A', :o=>1).insert_conflict.save model.db.sqls.must_equal ["INSERT INTO t (s, o) VALUES ('A', 1) ON CONFLICT DO NOTHING", "SELECT * FROM t WHERE (id = 1) LIMIT 1"] model.new(:s=>'A', :o=>1).insert_conflict(:target=>:s, :update => {:o => Sequel[:excluded][:o]}).save model.db.sqls.must_equal ["INSERT INTO t (s, o) VALUES ('A', 1) ON CONFLICT (s) DO UPDATE SET o = excluded.o", "SELECT * FROM t WHERE (id = 2) LIMIT 1"] end it "should raise Error if calling insert_conflict on a model instance that isn't new" do m = model_class(:postgres).load(:s=>'A', :o=>1) proc{m.insert_conflict}.must_raise Sequel::Error end it "should raise if loading plugin into a model class with a dataset that doesn't support insert_conflict" do model = Class.new(Sequel::Model) model.dataset = Sequel.mock[:t] proc{model.plugin :insert_conflict}.must_raise Sequel::Error end it "should work if loading into a model class without a dataset on PostgreSQL" do model = model_class_plugin_first(:postgres) model.new(:s=>'A', :o=>1).insert_conflict.save model.db.sqls.must_equal ["INSERT INTO t (s, o) VALUES ('A', 1) ON CONFLICT DO NOTHING RETURNING *"] model.new(:s=>'A', :o=>1).insert_conflict(:target=>:s, :update => {:o => Sequel[:excluded][:o]}).save model.db.sqls.must_equal ["INSERT INTO t (s, o) VALUES ('A', 1) ON CONFLICT (s) DO UPDATE SET o = excluded.o RETURNING *"] end it "should work if loading into a model class without a dataset on SQLITE" do model = model_class_plugin_first(:sqlite) model.new(:s=>'A', :o=>1).insert_conflict.save model.db.sqls.must_equal ["INSERT INTO t (s, o) VALUES ('A', 1) ON CONFLICT DO NOTHING", "SELECT * FROM t WHERE (id = 1) LIMIT 1"] model.new(:s=>'A', :o=>1).insert_conflict(:target=>:s, :update => {:o => Sequel[:excluded][:o]}).save model.db.sqls.must_equal ["INSERT INTO t (s, o) VALUES ('A', 1) ON CONFLICT (s) DO UPDATE SET o = excluded.o", "SELECT * FROM t WHERE (id = 2) LIMIT 1"] end it "should work if the prepared_statements plugin is loaded before" do db = Sequel.mock(:host=>'sqlite', :fetch=>{:id=>1, :s=>2}, :autoid=>1, :numrows=>1) db.extend_datasets{def quote_identifiers?; false end} model = Class.new(Sequel::Model) model.dataset = db[:t] model.columns :id, :s model.plugin :prepared_statements model.plugin :insert_conflict db.sqls model.create(:s=>'a').update(:s=>'b') db.sqls.must_equal ["INSERT INTO t (s) VALUES ('a')", "SELECT * FROM t WHERE (id = 1) LIMIT 1", "UPDATE t SET s = 'b' WHERE (id = 1)"] end it "should work if the prepared_statements plugin is loaded after" do db = Sequel.mock(:host=>'postgres', :fetch=>{:id=>1, :s=>2}, :autoid=>1, :numrows=>1) db.extend_datasets{def quote_identifiers?; false end} model = Class.new(Sequel::Model) model.dataset = db[:t] model.columns :id, :s model.plugin :insert_conflict model.plugin :prepared_statements db.sqls model.create(:s=>'a').update(:s=>'b') db.sqls.must_equal ["INSERT INTO t (s) VALUES ('a') RETURNING *", "UPDATE t SET s = 'b' WHERE (id = 1)"] end end �����������������������������������������������������������������������������sequel-5.63.0/spec/extensions/insert_returning_select_spec.rb���������������������������������������0000664�0000000�0000000�00000006211�14342141206�0024530�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::InsertReturningSelect" do before do @db = Sequel.mock(:fetch=>{:id=>1, :x=>2}, :autoid=>1) @db.extend_datasets do def supports_returning?(_) true end def insert_select(*v) with_sql_first("#{insert_sql(*v)}#{" RETURNING #{opts[:returning].map{|x| literal(x)}.join(', ')}" if opts[:returning]}") end end @Album = Class.new(Sequel::Model(@db[:albums].select(:id, :x))) @Album.columns :id, :x @db.sqls end it "should work if loaded into a model without a dataset that also uses the lazy_attributes or dataset associations plugins" do c = Sequel::Model(@db) c.plugin :insert_returning_select c.columns :id, :x c.plugin :lazy_attributes c.plugin :dataset_associations c.set_dataset @db[:albums].select(:id, :x) c.plugin :lazy_attributes, :x c.many_to_one :c, :key=>:id, :class=>c @db.sqls c.dataset.sql.must_equal 'SELECT id FROM albums' c.create(:x=>2) @db.sqls.must_equal ['INSERT INTO albums (x) VALUES (2) RETURNING id'] c.load(:id=>2).x @db.sqls.must_equal ['SELECT albums.x FROM albums WHERE (id = 2) LIMIT 1'] c.dataset.cs.sql.must_equal "SELECT id FROM albums WHERE (albums.id IN (SELECT albums.id FROM albums))" end it "should add a returning clause when inserting using selected columns" do @Album.plugin :insert_returning_select @Album.create(:x=>2).must_equal @Album.load(:id=>1, :x=>2) @db.sqls.must_equal ['INSERT INTO albums (x) VALUES (2) RETURNING id, x'] end it "should not add a returning clause if no columns are selected" do @Album.plugin :insert_returning_select @Album.dataset = @Album.dataset.select_all @db.sqls.clear @Album.create(:x=>2).must_equal @Album.load(:id=>1, :x=>2) @db.sqls.must_equal ['INSERT INTO albums (x) VALUES (2)'] end it "should not add a returning clause if selection does not consist of just columns" do @Album.dataset = @Album.dataset.select_append(Sequel.as(1, :b)) @Album.plugin :insert_returning_select @db.sqls.clear @Album.create(:x=>2).must_equal @Album.load(:id=>1, :x=>2) @db.sqls.must_equal ['INSERT INTO albums (x) VALUES (2)', 'SELECT id, x, 1 AS b FROM albums WHERE (id = 1) LIMIT 1'] end it "should not add a returning clause if database doesn't support it" do @db.extend_datasets{def supports_returning?(_) false end} @Album.plugin :insert_returning_select @Album.create(:x=>2).must_equal @Album.load(:id=>1, :x=>2) @db.sqls.must_equal ['INSERT INTO albums (x) VALUES (2)', 'SELECT id, x FROM albums WHERE (id = 1) LIMIT 1'] end it "should work correctly with subclasses" do c = Class.new(Sequel::Model) c.plugin :insert_returning_select b = Class.new(c) b.columns :id, :x b.dataset = @db[:albums].select(:id, :x) @db.sqls.clear b.create(:x=>2).must_equal b.load(:id=>1, :x=>2) @db.sqls.must_equal ['INSERT INTO albums (x) VALUES (2) RETURNING id, x'] end it "should freeze instance_insert_dataset when freezing model class" do @Album.plugin :insert_returning_select @Album.freeze @Album.instance_insert_dataset.frozen?.must_equal true end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/instance_filters_spec.rb����������������������������������������������0000664�0000000�0000000�00000010457�14342141206�0023133�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "instance_filters plugin" do before do @c = Class.new(Sequel::Model(:people)) @c.columns :id, :name, :num @c.plugin :instance_filters @p = @c.load(:id=>1, :name=>'John', :num=>1) DB.sqls end it "should raise an error when updating a stale record" do @p.update(:name=>'Bob') DB.sqls.must_equal ["UPDATE people SET name = 'Bob' WHERE (id = 1)"] @p.instance_filter(:name=>'Jim') @p.instance_variable_set(:@this, @p.this.with_numrows(0)) proc{@p.update(:name=>'Joe')}.must_raise(Sequel::Plugins::InstanceFilters::Error) DB.sqls.must_equal ["UPDATE people SET name = 'Joe' WHERE ((id = 1) AND (name = 'Jim'))"] end it "should raise an error when destroying a stale record" do @p.destroy DB.sqls.must_equal ["DELETE FROM people WHERE id = 1"] @p.instance_filter(:name=>'Jim') @p.instance_variable_set(:@this, @p.this.with_numrows(0)) proc{@p.destroy}.must_raise(Sequel::Plugins::InstanceFilters::Error) DB.sqls.must_equal ["DELETE FROM people WHERE ((id = 1) AND (name = 'Jim'))"] end it "should work when using the prepared_statements plugin" do @c.plugin :prepared_statements @p.update(:name=>'Bob') DB.sqls.must_equal ["UPDATE people SET name = 'Bob' WHERE (id = 1)"] @p.instance_filter(:name=>'Jim') @p.instance_variable_set(:@this, @p.this.with_numrows(0)) proc{@p.update(:name=>'Joe')}.must_raise(Sequel::Plugins::InstanceFilters::Error) DB.sqls.must_equal ["UPDATE people SET name = 'Joe' WHERE ((id = 1) AND (name = 'Jim'))"] @p = @c.load(:id=>1, :name=>'John', :num=>1) @p.instance_variable_set(:@this, @p.this.with_numrows(1)) @c.instance_variable_set(:@fast_instance_delete_sql, nil) @p.destroy DB.sqls.must_equal ["DELETE FROM people WHERE (id = 1)"] @p.instance_filter(:name=>'Jim') @p.instance_variable_set(:@this, @p.this.with_numrows(0)) proc{@p.destroy}.must_raise(Sequel::Plugins::InstanceFilters::Error) DB.sqls.must_equal ["DELETE FROM people WHERE ((id = 1) AND (name = 'Jim'))"] @c.create.must_be_kind_of(@c) end it "should work when using the prepared_statements plugin when loading the prepared_statements plugin first" do @c = Class.new(Sequel::Model(:people)) @c.columns :id, :name, :num @c.plugin :prepared_statements @c.plugin :instance_filters @p = @c.load(:id=>1, :name=>'John', :num=>1) DB.sqls @p.update(:name=>'Bob') DB.sqls.must_equal ["UPDATE people SET name = 'Bob' WHERE (id = 1)"] @p.instance_filter(:name=>'Jim') @p.instance_variable_set(:@this, @p.this.with_numrows(0)) proc{@p.update(:name=>'Joe')}.must_raise(Sequel::Plugins::InstanceFilters::Error) DB.sqls.must_equal ["UPDATE people SET name = 'Joe' WHERE ((id = 1) AND (name = 'Jim'))"] @p = @c.load(:id=>1, :name=>'John', :num=>1) @p.instance_variable_set(:@this, @p.this.with_numrows(1)) @c.instance_variable_set(:@fast_instance_delete_sql, nil) @p.destroy DB.sqls.must_equal ["DELETE FROM people WHERE (id = 1)"] @p.instance_filter(:name=>'Jim') @p.instance_variable_set(:@this, @p.this.with_numrows(0)) proc{@p.destroy}.must_raise(Sequel::Plugins::InstanceFilters::Error) DB.sqls.must_equal ["DELETE FROM people WHERE ((id = 1) AND (name = 'Jim'))"] @c.create.must_be_kind_of(@c) end it "should apply all instance filters" do @p.instance_filter(:name=>'Jim') @p.instance_filter{num > 2} @p.update(:name=>'Bob') DB.sqls.must_equal ["UPDATE people SET name = 'Bob' WHERE ((id = 1) AND (name = 'Jim') AND (num > 2))"] end it "should drop instance filters after updating" do @p.instance_filter(:name=>'Joe') @p.update(:name=>'Joe') DB.sqls.must_equal ["UPDATE people SET name = 'Joe' WHERE ((id = 1) AND (name = 'Joe'))"] @p.update(:name=>'Bob') DB.sqls.must_equal ["UPDATE people SET name = 'Bob' WHERE (id = 1)"] end it "shouldn't allow instance filters on frozen objects" do @p.instance_filter(:name=>'Joe') @p.freeze proc{@p.instance_filter(:name=>'Jim')}.must_raise end it "should have dup duplicate internal structures" do @p.instance_filter(:name=>'Joe') @p.dup.send(:instance_filters).must_equal @p.send(:instance_filters) @p.dup.send(:instance_filters).wont_be_same_as(@p.send(:instance_filters)) end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/instance_hooks_spec.rb������������������������������������������������0000664�0000000�0000000�00000015451�14342141206�0022605�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "InstanceHooks plugin" do def r(x=nil) @r << x yield if block_given? x end before do @c = Class.new(Sequel::Model(:items)) @c.plugin :instance_hooks @c.raise_on_save_failure = false @o = @c.new @x = @c.load({:id=>1}) @r = [] end it "should support before_create_hook and after_create_hook" do @o.after_create_hook{r 1} @o.before_create_hook{r 2} @o.after_create_hook{r 3} @o.before_create_hook{r 4} @o.save.wont_equal nil @r.must_equal [4, 2, 1, 3] end it "should cancel the save if before_create_hook block calls cancel_action" do @o.after_create_hook{r 1} @o.before_create_hook{r{@o.cancel_action}} @o.before_create_hook{r 4} @o.save.must_be_nil @r.must_equal [4, nil] @r.clear @o.save.must_be_nil @r.must_equal [4, nil] end it "should support before_update_hook and after_update_hook" do @x.after_update_hook{r 1} @x.before_update_hook{r 2} @x.after_update_hook{r 3} @x.before_update_hook{r 4} @x.save.wont_equal nil @r.must_equal [4, 2, 1, 3] @x.save.wont_equal nil @r.must_equal [4, 2, 1, 3] end it "should cancel the save if before_update_hook block calls cancel_action" do @x.after_update_hook{r 1} @x.before_update_hook{r{@x.cancel_action}} @x.before_update_hook{r 4} @x.save.must_be_nil @r.must_equal [4, nil] @r.clear @x.save.must_be_nil @r.must_equal [4, nil] end it "should support before_save_hook and after_save_hook" do @o.after_save_hook{r 1} @o.before_save_hook{r 2} @o.after_save_hook{r 3} @o.before_save_hook{r 4} @o.save.wont_equal nil @r.must_equal [4, 2, 1, 3] @r.clear @x.after_save_hook{r 1} @x.before_save_hook{r 2} @x.after_save_hook{r 3} @x.before_save_hook{r 4} @x.save.wont_equal nil @r.must_equal [4, 2, 1, 3] @x.save.wont_equal nil @r.must_equal [4, 2, 1, 3] end it "should cancel the save if before_save_hook block calls cancel_action" do @x.after_save_hook{r 1} @x.before_save_hook{r{@x.cancel_action}} @x.before_save_hook{r 4} @x.save.must_be_nil @r.must_equal [4, nil] @r.clear @x.after_save_hook{r 1} @x.before_save_hook{r{@x.cancel_action}} @x.before_save_hook{r 4} @x.save.must_be_nil @r.must_equal [4, nil] @r.clear @x.save.must_be_nil @r.must_equal [4, nil] end it "should support before_destroy_hook and after_destroy_hook" do @x.after_destroy_hook{r 1} @x.before_destroy_hook{r 2} @x.after_destroy_hook{r 3} @x.before_destroy_hook{r 4} @x.destroy.wont_equal nil @r.must_equal [4, 2, 1, 3] end it "should cancel the destroy if before_destroy_hook block calls cancel_action" do @x.after_destroy_hook{r 1} @x.before_destroy_hook{r{@x.cancel_action}} @x.before_destroy_hook{r 4} @x.destroy.must_be_nil @r.must_equal [4, nil] end it "should support before_validation_hook and after_validation_hook" do @o.after_validation_hook{r 1} @o.before_validation_hook{r 2} @o.after_validation_hook{r 3} @o.before_validation_hook{r 4} @o.valid?.must_equal true @r.must_equal [4, 2, 1, 3] end it "should cancel the save if before_validation_hook block calls cancel_action" do @o.after_validation_hook{r 1} @o.before_validation_hook{r{@o.cancel_action}} @o.before_validation_hook{r 4} @o.valid?.must_equal false @r.must_equal [4, nil] @r.clear @o.valid?.must_equal false @r.must_equal [4, nil] end it "should clear only related hooks on successful create" do @o.after_destroy_hook{r 1} @o.before_destroy_hook{r 2} @o.after_update_hook{r 3} @o.before_update_hook{r 4} @o.before_save_hook{r 5} @o.after_save_hook{r 6} @o.before_create_hook{r 7} @o.after_create_hook{r 8} @o.save.wont_equal nil @r.must_equal [5, 7, 8, 6] @o.instance_variable_set(:@new, false) @o.save.wont_equal nil @r.must_equal [5, 7, 8, 6, 4, 3] @o.save.wont_equal nil @r.must_equal [5, 7, 8, 6, 4, 3] @o.destroy @r.must_equal [5, 7, 8, 6, 4, 3, 2, 1] end it "should clear only related hooks on successful update" do @x.after_destroy_hook{r 1} @x.before_destroy_hook{r 2} @x.before_update_hook{r 3} @x.after_update_hook{r 4} @x.before_save_hook{r 5} @x.after_save_hook{r 6} @x.save.wont_equal nil @r.must_equal [5, 3, 4, 6] @x.save.wont_equal nil @r.must_equal [5, 3, 4, 6] @x.destroy @r.must_equal [5, 3, 4, 6, 2, 1] end it "should clear only related hooks on successful destroy" do @x.after_destroy_hook{r 1} @x.before_destroy_hook{r 2} @x.before_update_hook{r 3} @x.before_save_hook{r 4} @x.destroy @r.must_equal [2, 1] @x.save.wont_equal nil @r.must_equal [2, 1, 4, 3] end it "should not clear validations hooks on successful save" do @x.after_validation_hook{@x.errors.add(:id, 'a') if @x.id == 1; r 1} @x.before_validation_hook{r 2} @x.save.must_be_nil @r.must_equal [2, 1] @x.save.must_be_nil @r.must_equal [2, 1, 2, 1] @x.id = 2 @x.save.must_equal @x @r.must_equal [2, 1, 2, 1, 2, 1] @x.save.must_equal @x @r.must_equal [2, 1, 2, 1, 2, 1] end it "should not allow addition of instance hooks to frozen instances" do @x.after_destroy_hook{r 1} @x.before_destroy_hook{r 2} @x.before_update_hook{r 3} @x.before_save_hook{r 4} @x.freeze proc{@x.after_destroy_hook{r 1}}.must_raise(Sequel::Error) proc{@x.before_destroy_hook{r 2}}.must_raise(Sequel::Error) proc{@x.before_update_hook{r 3}}.must_raise(Sequel::Error) proc{@x.before_save_hook{r 4}}.must_raise(Sequel::Error) end end describe "InstanceHooks plugin with transactions" do before do @db = Sequel.mock(:numrows=>1) @c = Class.new(Sequel::Model(@db[:items])) do attr_accessor :rb def after_save super db.execute('as') raise Sequel::Rollback if rb end def after_destroy super db.execute('ad') raise Sequel::Rollback if rb end end @c.use_transactions = true @c.plugin :instance_hooks @o = @c.load({:id=>1}) @or = @c.load({:id=>1}) @or.rb = true @r = [] @db.sqls end it "should have *_hook methods return self "do @o.before_destroy_hook{r 1}.must_be_same_as(@o) @o.before_validation_hook{r 1}.must_be_same_as(@o) @o.before_save_hook{r 1}.must_be_same_as(@o) @o.before_update_hook{r 1}.must_be_same_as(@o) @o.before_create_hook{r 1}.must_be_same_as(@o) @o.after_destroy_hook{r 1}.must_be_same_as(@o) @o.after_validation_hook{r 1}.must_be_same_as(@o) @o.after_save_hook{r 1}.must_be_same_as(@o) @o.after_update_hook{r 1}.must_be_same_as(@o) @o.after_create_hook{r 1}.must_be_same_as(@o) end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/instance_specific_default_spec.rb�������������������������������������0000664�0000000�0000000�00000004740�14342141206�0024752�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "instance_specific_default plugin" do before do @db = Sequel.mock @c = Class.new(Sequel::Model(@db[:test])) def @c.name; 'C' end @c.columns :id, :name @db.sqls end it "should support setting a true value" do @c.plugin :instance_specific_default, true @c.many_to_one :c, :class=>@c do |ds| ds end @c.association_reflection(:c)[:instance_specific].must_equal true end it "should support setting a false value" do @c.plugin :instance_specific_default, false @c.many_to_one :c, :class=>@c do |ds| ds end @c.association_reflection(:c)[:instance_specific].must_equal false end it "should support setting a :default value" do @c.plugin :instance_specific_default, :default @c.many_to_one :c, :class=>@c do |ds| ds end @c.association_reflection(:c)[:instance_specific].must_equal true end it "should support setting a :warn value" do warn_args = nil @c.define_singleton_method(:warn){|*args| warn_args = args} @c.plugin :instance_specific_default, :warn @c.many_to_one :c, :class=>@c do |ds| ds end @c.association_reflection(:c)[:instance_specific].must_equal true warn_args[0].must_match(/possibly instance-specific association without :instance_specific option/) warn_args[1].must_equal(:uplevel=>3) end it "should support setting a :raise value" do @c.plugin :instance_specific_default, :raise proc{@c.many_to_one :c, :class=>@c do |ds| ds end}.must_raise Sequel::Error end it "should raise in invalid option is given" do @c.plugin :instance_specific_default, Object.new proc{@c.many_to_one :c, :class=>@c do |ds| ds end}.must_raise Sequel::Error end it "should work correctly in subclasses" do @c.plugin :instance_specific_default, false c = Class.new(@c) c.many_to_one :c, :class=>@c do |ds| ds end c.association_reflection(:c)[:instance_specific].must_equal false end it "should be ignored for associations with a :dataset option" do @c.plugin :instance_specific_default, false @c.many_to_one :c, :class=>@c, :dataset=>proc{|r| r.associated_class.where(:id=>id)} @c.association_reflection(:c)[:instance_specific].must_equal true end it "should be considered for when cloning association with block" do @c.plugin :instance_specific_default, false @c.many_to_one :c, :class=>@c do |ds| ds end @c.many_to_one :c, :clone=>:c @c.association_reflection(:c)[:instance_specific].must_equal false end end ��������������������������������sequel-5.63.0/spec/extensions/integer64_spec.rb�����������������������������������������������������0000664�0000000�0000000�00000001322�14342141206�0021375�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "integer64 extension" do before do @db = Sequel.mock.extension(:integer64) end it "should use bigint as default integer type" do @db.create_table(:t){Integer :a; column :b, Integer} @db.sqls.must_equal ['CREATE TABLE t (a bigint, b bigint)'] end it "should use bigint as default type for primary_key and foreign_key" do @db.create_table(:t){primary_key :id; foreign_key :t_id, :t} @db.sqls.must_equal ['CREATE TABLE t (id bigint PRIMARY KEY AUTOINCREMENT, t_id bigint REFERENCES t)'] end it "should use bigint when casting" do @db.get(Sequel.cast('a', Integer)) @db.sqls.must_equal ["SELECT CAST('a' AS bigint) AS v LIMIT 1"] end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/inverted_subsets_spec.rb����������������������������������������������0000664�0000000�0000000�00000002447�14342141206�0023167�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::InvertedSubsets" do it "should add an inverted subset method which inverts the condition" do c = Class.new(Sequel::Model(:a)) c.plugin :inverted_subsets c.dataset_module{subset(:published, :published => true)} c.not_published.sql.must_equal 'SELECT * FROM a WHERE (published IS NOT TRUE)' end it "should support a configuration block to customise the inverted method name" do c = Class.new(Sequel::Model(:a)) c.plugin(:inverted_subsets){|name| "exclude_#{name}"} c.dataset_module{where(:published, :published => true)} c.exclude_published.sql.must_equal 'SELECT * FROM a WHERE (published IS NOT TRUE)' end it "should chain to existing dataset" do c = Class.new(Sequel::Model(:a)) c.plugin :inverted_subsets c.dataset_module{where(:published, :published => true)} c.where(1=>0).not_published.sql.must_equal 'SELECT * FROM a WHERE ((1 = 0) AND (published IS NOT TRUE))' end it "should work in subclasses" do c = Class.new(Sequel::Model) c.plugin(:inverted_subsets){|name| "exclude_#{name}"} c = Class.new(c) c.dataset = :a c.dataset_module{subset(:published, :published => true)} c.exclude_published.sql.must_equal 'SELECT * FROM a WHERE (published IS NOT TRUE)' end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/is_distinct_from_spec.rb����������������������������������������������0000664�0000000�0000000�00000004706�14342141206�0023136�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "is_distinct_from extension" do dbf = lambda do |db_type| db = Sequel.connect("mock://#{db_type}") db.extension :is_distinct_from db end it "should support Sequel.is_distinct_from" do dbf[:postgres].literal(Sequel.is_distinct_from(:a, :b)).must_equal '("a" IS DISTINCT FROM "b")' end it "should support is_distinct_from on Sequel expressions" do dbf[:postgres].literal(Sequel[:a].is_distinct_from(:b)).must_equal '("a" IS DISTINCT FROM "b")' end it "should support is_distinct_from on literal strings" do dbf[:postgres].literal(Sequel.lit('a').is_distinct_from(:b)).must_equal '(a IS DISTINCT FROM "b")' end it "should use IS DISTINCT FROM syntax on PostgreSQL and H2" do dbf[:postgres].literal(Sequel.is_distinct_from(:a, :b)).must_equal '("a" IS DISTINCT FROM "b")' db = dbf[:h2] def db.database_type; :h2; end db.literal(Sequel.is_distinct_from(:a, :b)).must_equal '(a IS DISTINCT FROM b)' end it "should use IS DISTINCT FROM syntax on SQLite 3.39+" do db = dbf[:sqlite] def db.sqlite_version; 33900; end db.literal(Sequel.is_distinct_from(:a, :b)).must_equal '(`a` IS DISTINCT FROM `b`)' db = dbf[:sqlite] def db.sqlite_version; 33800; end db.literal(Sequel.is_distinct_from(:a, :b)).must_equal "((CASE WHEN ((`a` = `b`) OR ((`a` IS NULL) AND (`b` IS NULL))) THEN 0 ELSE 1 END) = 1)" end it "should handle given nil values on derby" do db = dbf[:derby] def db.database_type; :derby; end db.literal(Sequel.is_distinct_from(:a, :b)).must_equal "((CASE WHEN ((a = b) OR ((a IS NULL) AND (b IS NULL))) THEN 0 ELSE 1 END) = 1)" db.literal(Sequel.is_distinct_from(nil, :b)).must_equal "(b IS NOT NULL)" db.literal(Sequel.is_distinct_from(:a, nil)).must_equal "(a IS NOT NULL)" db.literal(Sequel.is_distinct_from(nil, nil)).must_equal "'f'" # FALSE or (1 = 0) when using jdbc/derby adapter end it "should emulate IS DISTINCT FROM behavior on other databases" do dbf[:foo].literal(Sequel.is_distinct_from(:a, :b)).must_equal "((CASE WHEN ((a = b) OR ((a IS NULL) AND (b IS NULL))) THEN 0 ELSE 1 END) = 1)" end it "should respect existing supports_is_distinct_from? dataset method" do db = Sequel.mock db.extend_datasets do private def supports_is_distinct_from?; true; end end db.extension :is_distinct_from db.literal(Sequel.is_distinct_from(:a, :b)).must_equal "(a IS DISTINCT FROM b)" end end ����������������������������������������������������������sequel-5.63.0/spec/extensions/json_serializer_spec.rb�����������������������������������������������0000664�0000000�0000000�00000045041�14342141206�0022776�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::JsonSerializer" do before do class ::Artist < Sequel::Model unrestrict_primary_key plugin :json_serializer, :naked=>true columns :id, :name def_column_accessor :id, :name @db_schema = {:id=>{:type=>:integer}} one_to_many :albums end class ::Album < Sequel::Model unrestrict_primary_key attr_accessor :blah plugin :json_serializer, :naked=>true columns :id, :name, :artist_id def_column_accessor :id, :name, :artist_id many_to_one :artist end @artist = Artist.load(:id=>2, :name=>'YJM') @artist.associations[:albums] = [] @album = Album.load(:id=>1, :name=>'RF') @album.artist = @artist @album.blah = 'Blah' end after do Object.send(:remove_const, :Artist) Object.send(:remove_const, :Album) end it "should round trip successfully" do Artist.from_json(@artist.to_json).must_equal @artist Album.from_json(@album.to_json).must_equal @album end it "should support to_json_data for getting a JSON data structure" do @artist.to_json_data.must_equal("id"=>2, "name"=>"YJM") @album.to_json_data.must_equal("id"=>1, "name"=>"RF", "artist_id"=>2) end it "should handle ruby objects in values" do class ::Artist def name=(v) super(Date.parse(v)) end end Artist.from_json(Artist.load(:name=>Date.today).to_json).must_equal Artist.load(:name=>Date.today) end it "should support setting json_serializer_opts on models" do @artist.json_serializer_opts(:only=>:name) Sequel.parse_json([@artist].to_json).must_equal [{'name'=>@artist.name}] @artist.json_serializer_opts(:include=>{:albums=>{:only=>:name}}) Sequel.parse_json([@artist].to_json).must_equal [{'name'=>@artist.name, 'albums'=>[{'name'=>@album.name}]}] end it "should handle the :only option" do Artist.from_json(@artist.to_json(:only=>:name)).must_equal Artist.load(:name=>@artist.name) Album.from_json(@album.to_json(:only=>[:id, :name])).must_equal Album.load(:id=>@album.id, :name=>@album.name) end it "should handle the :except option" do Artist.from_json(@artist.to_json(:except=>:id)).must_equal Artist.load(:name=>@artist.name) Album.from_json(@album.to_json(:except=>[:id, :artist_id])).must_equal Album.load(:name=>@album.name) end it "should handle the :include option for associations" do Artist.from_json(@artist.to_json(:include=>:albums), :associations=>:albums).albums.must_equal [@album] Album.from_json(@album.to_json(:include=>:artist), :associations=>:artist).artist.must_equal @artist end it "should have from_json handle associations not present" do Artist.from_json(@artist.to_json, :associations=>:albums).associations.must_be_empty Album.from_json(@album.to_json, :associations=>:artist).associations.must_be_empty end it "should have #to_json support blocks for transformations" do values = {} @artist.values.each{|k,v| values[k.to_s] = v} Sequel.parse_json(@artist.to_json{|h| {'data'=>h}}).must_equal({'data'=>values}) end it "should raise an error if attempting to parse json when providing array to non-array association or vice-versa" do proc{Artist.from_json('{"albums":{"id":1,"name":"RF","artist_id":2},"id":2,"name":"YJM"}', :associations=>:albums)}.must_raise(Sequel::Error) proc{Album.from_json('{"artist":[{"id":2,"name":"YJM"}],"id":1,"name":"RF","artist_id":2}', :associations=>:artist)}.must_raise(Sequel::Error) end it "should raise an error if attempting to parse an array containing non-hashes" do proc{Artist.from_json('[{"id":2,"name":"YJM"}, 2]')}.must_raise(Sequel::Error) end it "should raise an error if attempting to parse invalid JSON" do begin Sequel.instance_eval do alias pj parse_json def parse_json(v) v end alias parse_json parse_json end proc{Album.from_json('1')}.must_raise(Sequel::Error) ensure Sequel.instance_eval do alias parse_json pj end end end it "should handle case where Sequel.parse_json already returns an instance" do begin Sequel.instance_eval do alias pj parse_json def parse_json(v) Album.load(:id=>3) end alias parse_json parse_json end ::Album.from_json('1').must_equal Album.load(:id=>3) ensure Sequel.instance_eval do alias parse_json pj end end end it "should handle the :include option for arbitrary attributes" do Album.from_json(@album.to_json(:include=>:blah)).blah.must_equal @album.blah end it "should handle multiple inclusions using an array for the :include option" do a = Album.from_json(@album.to_json(:include=>[:blah, :artist]), :associations=>:artist) a.blah.must_equal @album.blah a.artist.must_equal @artist end it "should handle cascading using a hash for the :include option" do Artist.from_json(@artist.to_json(:include=>{:albums=>{:include=>:artist}}), :associations=>{:albums=>{:associations=>:artist}}).albums.map{|a| a.artist}.must_equal [@artist] Album.from_json(@album.to_json(:include=>{:artist=>{:include=>:albums}}), :associations=>{:artist=>{:associations=>:albums}}).artist.albums.must_equal [@album] Artist.from_json(@artist.to_json(:include=>{:albums=>{:only=>:name}}), :associations=>[:albums]).albums.must_equal [Album.load(:name=>@album.name)] Album.from_json(@album.to_json(:include=>{:artist=>{:except=>:name}}), :associations=>[:artist]).artist.must_equal Artist.load(:id=>@artist.id) Artist.from_json(@artist.to_json(:include=>{:albums=>{:include=>{:artist=>{:include=>:albums}}}}), :associations=>{:albums=>{:associations=>{:artist=>{:associations=>:albums}}}}).albums.map{|a| a.artist.albums}.must_equal [[@album]] Album.from_json(@album.to_json(:include=>{:artist=>{:include=>{:albums=>{:only=>:name}}}}), :associations=>{:artist=>{:associations=>:albums}}).artist.albums.must_equal [Album.load(:name=>@album.name)] end it "should handle usage of association_proxies when cascading using the :include option" do Artist.plugin :association_proxies Artist.one_to_many :albums, :clone=>:albums Artist.from_json(@artist.to_json(:include=>{:albums=>{:include=>:artist}}), :associations=>{:albums=>{:associations=>:artist}}).albums.map{|a| a.artist}.must_equal [@artist] end it "should handle the :include option cascading with an empty hash" do Album.from_json(@album.to_json(:include=>{:artist=>{}}), :associations=>:artist).artist.must_equal @artist Album.from_json(@album.to_json(:include=>{:blah=>{}})).blah.must_equal @album.blah end it "should accept a :naked option to not include the JSON.create_id, so parsing yields a plain hash" do Sequel.parse_json(@album.to_json(:naked=>true)).must_equal @album.values.inject({}){|h, (k, v)| h[k.to_s] = v; h} end it "should support #from_json to set column values" do @artist.from_json('{"name": "AS"}') @artist.name.must_equal 'AS' @artist.id.must_equal 2 end it "should support #from_json to support specific :fields" do @album.from_json('{"name": "AS", "artist_id": 3}', :fields=>['name']) @album.name.must_equal 'AS' @album.artist_id.must_equal 2 end it "should support #from_json to support :missing=>:skip option" do @album.from_json('{"artist_id": 3}', :fields=>['name'], :missing=>:skip) @album.name.must_equal 'RF' @album.artist_id.must_equal 2 end it "should support #from_json to support :missing=>:raise option" do proc{@album.from_json('{"artist_id": 3}', :fields=>['name'], :missing=>:raise)}.must_raise(Sequel::Error) end it "should have #from_json raise an error if parsed json isn't a hash" do proc{@artist.from_json('[]')}.must_raise(Sequel::Error) end it "should raise an exception for json keys that aren't associations, columns, or setter methods" do Album.send(:undef_method, :blah=) proc{Album.from_json(@album.to_json(:include=>:blah))}.must_raise(Sequel::MassAssignmentRestriction) end it "should support a to_json class and dataset method" do Album.dataset = Album.dataset.with_fetch(:id=>1, :name=>'RF', :artist_id=>2) Artist.dataset = Artist.dataset.with_fetch(:id=>2, :name=>'YJM') Album.array_from_json(Album.to_json).must_equal [@album] Album.array_from_json(Album.to_json(:include=>:artist), :associations=>:artist).map{|x| x.artist}.must_equal [@artist] Album.array_from_json(Album.dataset.to_json(:only=>:name)).must_equal [Album.load(:name=>@album.name)] end it "should support setting json_serializer_opts for datasets" do Album.dataset = Album.dataset.with_fetch(:id=>1, :name=>'RF', :artist_id=>2) Album.array_from_json(Album.dataset.json_serializer_opts(:only=>:name).to_json).must_equal [Album.load(:name=>@album.name)] Album.plugin :json_serializer, :only=>:id Album.array_from_json(Album.dataset.json_serializer_opts(:only=>:name).to_json).must_equal [Album.load(:name=>@album.name)] Album.array_from_json(Album.dataset.json_serializer_opts(:only=>:name).to_json(:only=>:artist_id)).must_equal [Album.load(:artist_id=>2)] json = Album.dataset.json_serializer_opts(:only=>:name, :root=>true).to_json(:only=>:artist_id) Album.array_from_json(JSON.parse(json)['albums'].to_json).must_equal [Album.load(:artist_id=>2)] end it "should have dataset to_json method work with eager_graph datasets" do ds = Album.dataset.eager_graph(:artist).with_fetch(:id=>1, :name=>'RF', :artist_id=>2, :artist_id_0=>2, :artist_name=>'YM') Sequel.parse_json(ds.to_json(:only=>:name, :include=>{:artist=>{:only=>:name}})).must_equal [{"name"=>"RF", "artist"=>{"name"=>"YM"}}] end it "should have dataset to_json method work with naked datasets" do ds = Album.dataset.naked.with_fetch(:id=>1, :name=>'RF', :artist_id=>2) Sequel.parse_json(ds.to_json).must_equal [@album.values.inject({}){|h, (k, v)| h[k.to_s] = v; h}] end it "should have class and dataset to_json method accept blocks for transformations" do Album.dataset = Album.dataset.with_fetch(:id=>1, :name=>'RF', :artist_id=>2) Sequel.parse_json(Album.to_json{|h| {'data'=>h}}).must_equal('data'=>[@album.values.inject({}){|h, (k, v)| h[k.to_s] = v; h}]) Sequel.parse_json(Album.dataset.to_json{|h| {'data'=>h}}).must_equal('data'=>[@album.values.inject({}){|h, (k, v)| h[k.to_s] = v; h}]) end it "should have class and dataset to_json method support :instance_block option for instance_transformations" do Album.dataset = Album.dataset.with_fetch(:id=>1, :name=>'RF', :artist_id=>2) Sequel.parse_json(Album.to_json(:instance_block=>lambda{|h| {'data'=>h}})).must_equal [{'data'=>@album.values.inject({}){|h, (k, v)| h[k.to_s] = v; h}}] Sequel.parse_json(Album.dataset.to_json(:instance_block=>lambda{|h| {'data'=>h}})).must_equal [{'data'=>@album.values.inject({}){|h, (k, v)| h[k.to_s] = v; h}}] end it "should have dataset to_json method respect :array option for the array to use" do a = Album.new(:name=>'RF', :artist_id=>3) Album.array_from_json(Album.to_json(:array=>[a])).must_equal [a] a.associations[:artist] = artist = Artist.load(:id=>3, :name=>'YJM') Album.array_from_json(Album.to_json(:array=>[a], :include=>:artist), :associations=>:artist).first.artist.must_equal artist artist.associations[:albums] = [a] x = Artist.array_from_json(Artist.to_json(:array=>[artist], :include=>:albums), :associations=>[:albums]) x.must_equal [artist] x.first.albums.must_equal [a] end it "should propagate class default options to instance to_json output" do class ::Album2 < Sequel::Model attr_accessor :blah plugin :json_serializer, :naked => true, :except => :id columns :id, :name, :artist_id many_to_one :artist end @album2 = Album2.load(:id=>2, :name=>'JK') @album2.artist = @artist @album2.blah = 'Gak' JSON.parse(@album2.to_json).must_equal @album2.values.reject{|k,v| k.to_s == 'id'}.inject({}){|h, (k, v)| h[k.to_s] = v; h} JSON.parse(@album2.to_json(:only => :name)).must_equal @album2.values.reject{|k,v| k.to_s != 'name'}.inject({}){|h, (k, v)| h[k.to_s] = v; h} JSON.parse(@album2.to_json(:except => :artist_id)).must_equal @album2.values.reject{|k,v| k.to_s == 'artist_id'}.inject({}){|h, (k, v)| h[k.to_s] = v; h} end it "should handle the :root option to qualify single records" do @album.to_json(:root=>true, :except => [:name, :artist_id]).to_s.must_equal '{"album":{"id":1}}' @album.to_json(:root=>true, :only => :name).to_s.must_equal '{"album":{"name":"RF"}}' end it "should handle the :root option to qualify single records of namespaced models" do module ::Namespace class Album < Sequel::Model plugin :json_serializer, :naked=>true end end Namespace::Album.new({}).to_json(:root=>true).to_s.must_equal '{"album":{}}' Namespace::Album.dataset = Namespace::Album.dataset.with_fetch([{}]) Namespace::Album.dataset.to_json(:root=>:collection).to_s.must_equal '{"albums":[{}]}' Namespace::Album.dataset.to_json(:root=>:both).to_s.must_equal '{"albums":[{"album":{}}]}' Object.send(:remove_const, :Namespace) end it "should handle the :root option with a string to qualify single records using the string as the key" do @album.to_json(:root=>"foo", :except => [:name, :artist_id]).to_s.must_equal '{"foo":{"id":1}}' @album.to_json(:root=>"bar", :only => :name).to_s.must_equal '{"bar":{"name":"RF"}}' end it "should handle the :root=>:both option to qualify a dataset of records" do Album.dataset.with_fetch([{:id=>1, :name=>'RF'}, {:id=>1, :name=>'RF'}]).to_json(:root=>:both, :only => :id).to_s.must_equal '{"albums":[{"album":{"id":1}},{"album":{"id":1}}]}' end it "should handle the :root=>:collection option to qualify just the collection" do ds = Album.dataset.with_fetch([{:id=>1, :name=>'RF'}, {:id=>1, :name=>'RF'}]) ds.to_json(:root=>:collection, :only => :id).to_s.must_equal '{"albums":[{"id":1},{"id":1}]}' ds.to_json(:root=>true, :only => :id).to_s.must_equal '{"albums":[{"id":1},{"id":1}]}' end it "should handle the :root=>:instance option to qualify just the instances" do Album.dataset.with_fetch([{:id=>1, :name=>'RF'}, {:id=>1, :name=>'RF'}]).to_json(:root=>:instance, :only => :id).to_s.must_equal '[{"album":{"id":1}},{"album":{"id":1}}]' end it "should handle the :root=>string option to qualify just the collection using the string as the key" do ds = Album.dataset.with_fetch([{:id=>1, :name=>'RF'}, {:id=>1, :name=>'RF'}]) ds.to_json(:root=>"foos", :only => :id).to_s.must_equal '{"foos":[{"id":1},{"id":1}]}' ds.to_json(:root=>"bars", :only => :id).to_s.must_equal '{"bars":[{"id":1},{"id":1}]}' end it "should use an alias for an included asscociation to qualify an association" do JSON.parse(@album.to_json(:include=>Sequel.as(:artist, :singer)).to_s).must_equal JSON.parse('{"id":1,"name":"RF","artist_id":2,"singer":{"id":2,"name":"YJM"}}') JSON.parse(@album.to_json(:include=>{Sequel.as(:artist, :singer)=>{:only=>:name}}).to_s).must_equal JSON.parse('{"id":1,"name":"RF","artist_id":2,"singer":{"name":"YJM"}}') end it "should store the default options in json_serializer_opts" do Album.json_serializer_opts.must_equal(:naked=>true) c = Class.new(Album) a = [:artist] c.plugin :json_serializer, :naked=>false, :include=>a c.json_serializer_opts.must_equal(:naked=>false, :include=>a) c.json_serializer_opts[:include].must_be_same_as a c = Class.new(c) c.json_serializer_opts.must_equal(:naked=>false, :include=>a) c.json_serializer_opts[:include].wont_be_same_as a end it "should work correctly when subclassing" do class ::Artist2 < Artist plugin :json_serializer, :only=>:name end Artist2.from_json(Artist2.load(:id=>2, :name=>'YYY').to_json).must_equal Artist2.load(:name=>'YYY') class ::Artist3 < Artist2 plugin :json_serializer, :naked=>:true end Sequel.parse_json(Artist3.load(:id=>2, :name=>'YYY').to_json).must_equal("name"=>'YYY') Object.send(:remove_const, :Artist2) Object.send(:remove_const, :Artist3) end it "should raise an error if attempting to set a restricted column and :all_columns is not used" do Artist.restrict_primary_key proc{Artist.from_json(@artist.to_json)}.must_raise(Sequel::MassAssignmentRestriction) end it "should raise an error if an unsupported association is passed in the :associations option" do Artist.association_reflections.delete(:albums) proc{Artist.from_json(@artist.to_json(:include=>:albums), :associations=>:albums)}.must_raise(Sequel::Error) end it "should raise an error if using from_json and JSON parsing returns an array" do proc{Artist.from_json([@artist].to_json)}.must_raise(Sequel::Error) end it "should raise an error if using array_from_json and JSON parsing does not return an array" do proc{Artist.array_from_json(@artist.to_json)}.must_raise(Sequel::Error) end it "should raise an error if using array_from_json and JSON parsing does returns an array containing a non-hash" do proc{Artist.array_from_json('[[]]')}.must_raise(Sequel::Error) end it "should handle case in array_from_json where JSON parsing yields array of model instances" do begin Sequel.singleton_class.class_eval do alias parse_json_old parse_json define_method(:parse_json) do |json| case json when '[artists]' [Artist.load(:id=>1)] when 'artist' {:id=>1, 'albums'=>[Album.load(:id=>2)]} when 'album' {:id=>2, 'artist'=>Artist.load(:id=>1)} else raise end end alias parse_json parse_json end Artist.array_from_json('[artists]').must_equal [Artist.load(:id=>1)] artist = Artist.new.from_json('artist', :associations=>:albums) artist.must_equal Artist.load(:id=>1) artist.associations[:albums].must_equal [Album.load(:id=>2)] album = Album.new.from_json('album', :associations=>:artist) album.must_equal Album.load(:id=>2) album.associations[:artist].must_equal Artist.load(:id=>1) ensure Sequel.singleton_class.class_eval do alias parse_json parse_json_old remove_method(:parse_json_old) end end end it "should raise an error if using an unsupported :associations option" do proc{Artist.from_json(@artist.to_json, :associations=>'')}.must_raise(Sequel::Error) end it "should freeze json serializier opts when model class is frozen" do Album.json_serializer_opts[:only] = [:id] Album.freeze Album.json_serializer_opts.frozen?.must_equal true Album.json_serializer_opts[:only].frozen?.must_equal true end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/lazy_attributes_spec.rb�����������������������������������������������0000664�0000000�0000000�00000024552�14342141206�0023025�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" require 'yaml' describe "Sequel::Plugins::LazyAttributes" do before do @db = Sequel.mock def @db.supports_schema_parsing?() true end def @db.schema(*a) [[:id, {:type=>:integer}], [:name,{:type=>:string}]] end class ::LazyAttributesModel < Sequel::Model(@db[:la]) plugin :lazy_attributes set_columns([:id, :name]) def self.columns; [:id, :name] end singleton_class.send(:alias_method, :columns, :columns) lazy_attributes :name def self.columns; [:id] end set_dataset dataset.with_fetch(proc do |sql| if sql !~ /WHERE/ if sql =~ /name/ [{:id=>1, :name=>'1'}, {:id=>2, :name=>'2'}] else [{:id=>1}, {:id=>2}] end else if sql =~ /id IN \(([\d, ]+)\)/ $1.split(', ') elsif sql =~ /id = (\d)/ [$1] end.map do |x| if sql =~ /SELECT (la.)?name FROM/ {:name=>x.to_s} else {:id=>x.to_i, :name=>x.to_s} end end end end) end @c = ::LazyAttributesModel @ds = LazyAttributesModel.dataset @db.sqls end after do Object.send(:remove_const, :LazyAttributesModel) end it "should allowing adding additional lazy attributes via plugin :lazy_attributes" do @c.set_dataset(@ds.select(:id, :blah)) @c.dataset.sql.must_equal 'SELECT id, blah FROM la' @c.plugin :lazy_attributes, :blah @c.dataset.sql.must_equal 'SELECT id FROM la' end it "should allowing adding additional lazy attributes via lazy_attributes" do @c.set_dataset(@ds.select(:id, :blah)) @c.dataset.sql.must_equal 'SELECT id, blah FROM la' @c.lazy_attributes :blah @c.dataset.sql.must_equal 'SELECT id FROM la' end it "should handle lazy attributes that are qualified in the selection" do @c.set_dataset(@ds.select(Sequel[:la][:id], Sequel[:la][:blah])) @c.dataset.sql.must_equal 'SELECT la.id, la.blah FROM la' @c.plugin :lazy_attributes, :blah @c.dataset.sql.must_equal 'SELECT la.id FROM la' end with_symbol_splitting "should handle lazy attributes that are qualified in the selection using symbol splitting" do @c.set_dataset(@ds.select(:la__id, :la__blah)) @c.dataset.sql.must_equal 'SELECT la.id, la.blah FROM la' @c.plugin :lazy_attributes, :blah @c.dataset.sql.must_equal 'SELECT la.id FROM la' end it "should remove the attributes given from the SELECT columns of the model's dataset" do @ds.sql.must_equal 'SELECT la.id FROM la' end it "should still typecast correctly in lazy loaded column setters" do m = @c.new m.name = 1 m.name.must_equal '1' end it "should raise error if the model has no primary key" do m = @c.first @c.no_primary_key proc{m.name}.must_raise(Sequel::Error) end it "should lazily load the attribute for a single model object" do m = @c.first m.values.must_equal(:id=>1) m.name.must_equal '1' m.values.must_equal(:id=>1, :name=>'1') @db.sqls.must_equal ['SELECT la.id FROM la LIMIT 1', 'SELECT la.name FROM la WHERE (id = 1) LIMIT 1'] end it "should lazily load the attribute for a frozen model object" do m = @c.first m.freeze m.name.must_equal '1' @db.sqls.must_equal ['SELECT la.id FROM la LIMIT 1', 'SELECT la.name FROM la WHERE (id = 1) LIMIT 1'] m.name.must_equal '1' @db.sqls.must_equal ['SELECT la.name FROM la WHERE (id = 1) LIMIT 1'] end it "should not lazily load the attribute for a single model object if the value already exists" do m = @c.first m.values.must_equal(:id=>1) m[:name] = '1' m.name.must_equal '1' m.values.must_equal(:id=>1, :name=>'1') @db.sqls.must_equal ['SELECT la.id FROM la LIMIT 1'] end it "should not lazily load the attribute for a single model object if it is a new record" do m = @c.new m.values.must_equal({}) m.name.must_be_nil @db.sqls.must_equal [] end it "should eagerly load the attribute for all model objects reteived with it" do ms = @c.all ms.map{|m| m.values}.must_equal [{:id=>1}, {:id=>2}] ms.map{|m| m.name}.must_equal %w'1 2' ms.map{|m| m.values}.must_equal [{:id=>1, :name=>'1'}, {:id=>2, :name=>'2'}] @db.sqls.must_equal ['SELECT la.id FROM la', 'SELECT la.id, la.name FROM la WHERE (la.id IN (1, 2))'] end it "should not have eager loading modify values of rows if it returns unexpected values" do @c.dataset = @c.dataset.with_fetch([{:id=>1}, {:id=>2}, {:id=>3}]) @db.sqls ms = @c.all @db.sqls.must_equal ['SELECT la.id FROM la'] ms.map{|m| m.values}.must_equal [{:id=>1}, {:id=>2}, {:id=>3}] ms[2].name = 'foo' @c.dataset = @c.dataset.with_fetch([{:id=>1, :name=>'b'}, {:id=>2, :name=>'ba'}, {:id=>3, :name=>'bar'}, {:id=>4, :name=>'bar2'}]) @db.sqls ms.map{|m| m.name}.must_equal %w'b ba foo' ms.map{|m| m.values}.must_equal [{:id=>1, :name=>'b'}, {:id=>2, :name=>'ba'}, {:id=>3, :name=>'foo'}] @db.sqls.must_equal ['SELECT la.id, la.name FROM la WHERE (la.id IN (1, 2))'] end it "should raise Error if trying to load a lazy attribute for a model without a primary key" do @c.no_primary_key m = @c.first @db.sqls.must_equal ["SELECT la.id FROM la LIMIT 1"] m.values.must_equal(:id=>1) proc{m.name}.must_raise Sequel::Error ms = @c.all @db.sqls.must_equal [ "SELECT la.id FROM la"] proc{ms[0].name}.must_raise Sequel::Error @db.sqls.must_equal [] end it "should not eagerly load the attribute if model instance is frozen, and deal with other frozen instances if not frozen" do ms = @c.all ms.first.freeze ms.map{|m| m.name}.must_equal %w'1 2' @db.sqls.must_equal ['SELECT la.id FROM la', 'SELECT la.name FROM la WHERE (id = 1) LIMIT 1', 'SELECT la.id, la.name FROM la WHERE (la.id IN (2))'] end it "should add the accessors to a module included in the class, so they can be easily overridden" do @c.class_eval do def name "#{super}-blah" end end ms = @c.all ms.map{|m| m.values}.must_equal [{:id=>1}, {:id=>2}] ms.map{|m| m.name}.must_equal %w'1-blah 2-blah' ms.map{|m| m.values}.must_equal [{:id=>1, :name=>'1'}, {:id=>2, :name=>'2'}] sqls = @db.sqls ['SELECT la.id, la.name FROM la WHERE (la.id IN (1, 2))', 'SELECT la.id, la.name FROM la WHERE (la.id IN (2, 1))'].must_include(sqls.pop) sqls.must_equal ['SELECT la.id FROM la'] end it "should work with the serialization plugin" do @c.plugin :serialization, :yaml, :name @ds = @c.dataset = @ds.with_fetch([[{:id=>1}, {:id=>2}], [{:id=>1, :name=>"--- 3\n"}, {:id=>2, :name=>"--- 6\n"}], [{:id=>1}], [{:name=>"--- 3\n"}]]) ms = @ds.all ms.map{|m| m.values}.must_equal [{:id=>1}, {:id=>2}] ms.map{|m| m.name}.must_equal [3,6] ms.map{|m| m.values}.must_equal [{:id=>1, :name=>"--- 3\n"}, {:id=>2, :name=>"--- 6\n"}] ms.map{|m| m.deserialized_values}.must_equal [{:name=>3}, {:name=>6}] ms.map{|m| m.name}.must_equal [3,6] sqls = @db.sqls ['SELECT la.id, la.name FROM la WHERE (la.id IN (1, 2))', 'SELECT la.id, la.name FROM la WHERE (la.id IN (2, 1))'].must_include(sqls.pop) sqls.must_equal ['SELECT la.id FROM la'] m = @ds.first m.values.must_equal(:id=>1) m.name.must_equal 3 m.values.must_equal(:id=>1, :name=>"--- 3\n") m.deserialized_values.must_equal(:name=>3) m.name.must_equal 3 @db.sqls.must_equal ["SELECT la.id FROM la LIMIT 1", "SELECT la.name FROM la WHERE (id = 1) LIMIT 1"] end it "should not allow additional lazy attributes after freezing" do @c.plugin :lazy_attributes, :blah @c.freeze proc{@c.lazy_attributes :name}.must_raise RuntimeError, TypeError end it "should not affect db_schema" do db_schema = @c.db_schema @c.lazy_attributes :name @c.db_schema.must_be_same_as(db_schema) end it "should not affect when selecting from a subquery" do @c.dataset = @db[:la].from_self db_schema = @c.db_schema @c.lazy_attributes :name @c.db_schema.must_be_same_as(db_schema) end end describe "Sequel::Plugins::LazyAttributes with composite keys" do before do @db = Sequel.mock def @db.supports_schema_parsing?() true end def @db.schema(*a) [[:id, {:type=>:integer}], [:id2, {:type=>:integer}], [:name,{:type=>:string}]] end class ::LazyAttributesModel < Sequel::Model(@db[:la]) plugin :lazy_attributes set_columns([:id, :id2, :name]) def self.columns; [:id, :id2, :name] end lazy_attributes :name singleton_class.send(:alias_method, :columns, :columns) def self.columns; [:id, :id2] end set_primary_key [:id, :id2] set_dataset dataset.with_fetch(proc do |sql| if sql !~ /WHERE/ if sql =~ /name/ [{:id=>1, :id2=>2, :name=>'1'}, {:id=>1, :id2=>3, :name=>'2'}] else [{:id=>1, :id2=>2}, {:id=>1, :id2=>3}] end else case sql when /\((?:la.)?id, (?:la.)?id2\) IN \(((?:\(\d, \d\)(?:, )?)+)/ $1.gsub(/\D/, '|').split('|').delete_if(&:empty?).each_slice(2) when /id = (\d)\) AND \(id2 = (\d)/ [[$1, $2]] when /id = (\d), id2 = (\d)/ [[$1, $2]] end.map do |x, y| if sql =~ /SELECT (la.)?name FROM/ {:name=>"#{x}-#{y}"} else {:id=>x.to_i, :id2=>y.to_i, :name=>"#{x}-#{y}"} end end end end) end @c = ::LazyAttributesModel @ds = LazyAttributesModel.dataset @db.sqls end after do Object.send(:remove_const, :LazyAttributesModel) end it "should lazily load the attribute for a single model object" do m = @c.first m.values.must_equal(:id=>1, :id2=>2) m.name.must_equal '1-2' m.values.must_equal(:id=>1, :id2=>2, :name=>'1-2') @db.sqls.must_equal ["SELECT la.id, la.id2 FROM la LIMIT 1", "SELECT la.name FROM la WHERE ((id = 1) AND (id2 = 2)) LIMIT 1"] end it "should eagerly load the attribute for all model objects reteived with it" do ms = @c.all ms.map{|m| m.values}.must_equal [{:id=>1, :id2=>2}, {:id=>1, :id2=>3}] @db.sqls.must_equal ["SELECT la.id, la.id2 FROM la"] ms.map{|m| m.name}.must_equal %w'1-2 1-3' ms.map{|m| m.values}.must_equal [{:id=>1, :id2=>2, :name=>'1-2'}, {:id=>1, :id2=>3, :name=>'1-3'}] @db.sqls.must_equal ["SELECT la.id, la.id2, la.name FROM la WHERE ((la.id, la.id2) IN ((1, 2), (1, 3)))"] end end ������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/list_spec.rb����������������������������������������������������������0000664�0000000�0000000�00000032050�14342141206�0020543�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "List plugin" do def klass(opts={}) @db = DB c = Class.new(Sequel::Model(@db[:items])) c.class_eval do columns :id, :position, :scope_id, :pos plugin :list, opts self.use_transactions = false end c end before do @c = klass @o = @c.load(:id=>7, :position=>3) @sc = klass(:scope=>:scope_id) @so = @sc.load(:id=>7, :position=>3, :scope_id=>5) @tc = klass(:top=>0) @to = @tc.load(:id=>7, :position=>3) @db.reset end it "should default to using :position as the position field" do @c.position_field.must_equal :position @c.new.list_dataset.sql.must_equal 'SELECT * FROM items ORDER BY position' end it "should accept a :field option to modify the position field" do klass(:field=>:pos).position_field.must_equal :pos end it "should accept a :scope option with a symbol for a single scope column" do @sc.new(:scope_id=>4).list_dataset.sql.must_equal 'SELECT * FROM items WHERE (scope_id = 4) ORDER BY scope_id, position' end it "should accept a :scope option with an array of symbols for multiple scope columns" do ['SELECT * FROM items WHERE ((scope_id = 4) AND (pos = 3)) ORDER BY scope_id, pos, position', 'SELECT * FROM items WHERE ((pos = 3) AND (scope_id = 4)) ORDER BY scope_id, pos, position']. must_include(klass(:scope=>[:scope_id, :pos]).new(:scope_id=>4, :pos=>3).list_dataset.sql) end it "should accept a :scope option with a proc for a custom list scope" do klass(:scope=>proc{|o| o.model.dataset.filter(:active).filter(:scope_id=>o.scope_id)}).new(:scope_id=>4).list_dataset.sql.must_equal 'SELECT * FROM items WHERE (active AND (scope_id = 4)) ORDER BY position' end it "should default top of the list to 1" do @c.top_of_list.must_equal 1 end it "should accept a :top option to set top of the list" do @tc.top_of_list.must_equal 0 end it "should modify the order when using the plugin" do c = Class.new(Sequel::Model(:items)) c.dataset.sql.must_equal 'SELECT * FROM items' c.plugin :list c.dataset.sql.must_equal 'SELECT * FROM items ORDER BY position' end it "should be able to access the position field as a class attribute" do @c.position_field.must_equal :position klass(:field=>:pos).position_field.must_equal :pos end it "should be able to access the scope proc as a class attribute" do @c.scope_proc.must_be_nil @sc.scope_proc[@sc.new(:scope_id=>4)].sql.must_equal 'SELECT * FROM items WHERE (scope_id = 4) ORDER BY scope_id, position' end it "should work correctly in subclasses" do c = Class.new(klass(:scope=>:scope_id)) c.position_field.must_equal :position c.scope_proc[c.new(:scope_id=>4)].sql.must_equal 'SELECT * FROM items WHERE (scope_id = 4) ORDER BY scope_id, position' end it "should have at_position return the model object at the given position" do @c.dataset = @c.dataset.with_fetch(:id=>1, :position=>1) @o.at_position(10).must_equal @c.load(:id=>1, :position=>1) @sc.dataset = @sc.dataset.with_fetch(:id=>2, :position=>2, :scope_id=>5) @so.at_position(20).must_equal @sc.load(:id=>2, :position=>2, :scope_id=>5) @db.sqls.must_equal ["SELECT * FROM items WHERE (position = 10) ORDER BY position LIMIT 1", "SELECT * FROM items WHERE ((scope_id = 5) AND (position = 20)) ORDER BY scope_id, position LIMIT 1"] end it "should have position field set to max+1 when creating if not already set" do @c.dataset = @c.dataset.with_autoid(1).with_fetch([[{:pos=>nil}], [{:id=>1, :position=>1}], [{:pos=>1}], [{:id=>2, :position=>2}]]) @c.create.values.must_equal(:id=>1, :position=>1) @c.create.values.must_equal(:id=>2, :position=>2) @db.sqls.must_equal ["SELECT max(position) AS max FROM items LIMIT 1", "INSERT INTO items (position) VALUES (1)", "SELECT * FROM items WHERE (id = 1) ORDER BY position LIMIT 1", "SELECT max(position) AS max FROM items LIMIT 1", "INSERT INTO items (position) VALUES (2)", "SELECT * FROM items WHERE (id = 2) ORDER BY position LIMIT 1"] end it "should have position field set to max+1 in scope when creating if not already set" do @sc.dataset = @sc.dataset.with_autoid(1).with_fetch([[{:pos=>nil}], [{:id=>1, :scope_id=>1, :position=>1}], [{:pos=>1}], [{:id=>2, :scope_id=>1, :position=>2}], [{:pos=>nil}], [{:id=>3, :scope_id=>2, :position=>1}]]) @sc.create(:scope_id=>1).values.must_equal(:id=>1, :scope_id=>1, :position=>1) @sc.create(:scope_id=>1).values.must_equal(:id=>2, :scope_id=>1, :position=>2) @sc.create(:scope_id=>2).values.must_equal(:id=>3, :scope_id=>2, :position=>1) @db.sqls.must_equal ["SELECT max(position) AS max FROM items WHERE (scope_id = 1) LIMIT 1", 'INSERT INTO items (scope_id, position) VALUES (1, 1)', "SELECT * FROM items WHERE (id = 1) ORDER BY scope_id, position LIMIT 1", "SELECT max(position) AS max FROM items WHERE (scope_id = 1) LIMIT 1", 'INSERT INTO items (scope_id, position) VALUES (1, 2)', "SELECT * FROM items WHERE (id = 2) ORDER BY scope_id, position LIMIT 1", "SELECT max(position) AS max FROM items WHERE (scope_id = 2) LIMIT 1", 'INSERT INTO items (scope_id, position) VALUES (2, 1)', "SELECT * FROM items WHERE (id = 3) ORDER BY scope_id, position LIMIT 1"] end it "should update positions automatically on deletion" do @o.destroy @db.sqls.must_equal ["DELETE FROM items WHERE (id = 7)", "UPDATE items SET position = (position - 1) WHERE (position > 3)"] end it "should have last_position return the last position in the list" do @c.dataset = @c.dataset.with_fetch(:max=>10) @o.last_position.must_equal 10 @sc.dataset = @sc.dataset.with_fetch(:max=>20) @so.last_position.must_equal 20 @db.sqls.must_equal ["SELECT max(position) AS max FROM items LIMIT 1", "SELECT max(position) AS max FROM items WHERE (scope_id = 5) LIMIT 1"] end it "should have list_dataset return the model's dataset for non scoped lists" do @o.list_dataset.sql.must_equal 'SELECT * FROM items ORDER BY position' end it "should have list dataset return a scoped dataset for scoped lists" do @so.list_dataset.sql.must_equal 'SELECT * FROM items WHERE (scope_id = 5) ORDER BY scope_id, position' end it "should have move_down without an argument move down a single position" do @c.dataset = @c.dataset.with_fetch(:max=>10) @o.move_down.must_equal @o @o.position.must_equal 4 @db.sqls.must_equal ["SELECT max(position) AS max FROM items LIMIT 1", "UPDATE items SET position = (position - 1) WHERE ((position >= 4) AND (position <= 4))", "UPDATE items SET position = 4 WHERE (id = 7)"] end it "should have move_down with an argument move down the given number of positions" do @c.dataset = @c.dataset.with_fetch(:max=>10) @o.move_down(3).must_equal @o @o.position.must_equal 6 @db.sqls.must_equal ["SELECT max(position) AS max FROM items LIMIT 1", "UPDATE items SET position = (position - 1) WHERE ((position >= 4) AND (position <= 6))", "UPDATE items SET position = 6 WHERE (id = 7)"] end it "should have move_down with a negative argument move up the given number of positions" do @o.move_down(-1).must_equal @o @o.position.must_equal 2 @db.sqls.must_equal ["UPDATE items SET position = (position + 1) WHERE ((position >= 2) AND (position < 3))", "UPDATE items SET position = 2 WHERE (id = 7)"] end it "should have move_to handle out of range targets" do @o.move_to(0) @o.position.must_equal 1 @c.dataset = @c.dataset.with_fetch(:max=>10) @o.move_to(11) @o.position.must_equal 10 end it "should have move_to use a transaction if the instance is configured to use transactions" do @o.use_transactions = true @o.move_to(2) @db.sqls.must_equal ["BEGIN", "UPDATE items SET position = (position + 1) WHERE ((position >= 2) AND (position < 3))", "UPDATE items SET position = 2 WHERE (id = 7)", "COMMIT"] end it "should have move_to do nothing if the target position is the same as the current position" do @o.use_transactions = true @o.move_to(@o.position).must_equal @o @o.position.must_equal 3 @db.sqls.must_equal [] end it "should have move to shift entries correctly between current and target if moving up" do @o.move_to(2) @db.sqls.first.must_equal "UPDATE items SET position = (position + 1) WHERE ((position >= 2) AND (position < 3))" end it "should have move to shift entries correctly between current and target if moving down" do @c.dataset = @c.dataset.with_fetch(:max=>10) @o.move_to(4) @db.sqls[1].must_equal "UPDATE items SET position = (position - 1) WHERE ((position >= 4) AND (position <= 4))" end it "should have move_to_bottom move the item to the last position" do @c.dataset = @c.dataset.with_fetch(:max=>10) @o.move_to_bottom @db.sqls.must_equal ["SELECT max(position) AS max FROM items LIMIT 1", "UPDATE items SET position = (position - 1) WHERE ((position >= 4) AND (position <= 10))", "UPDATE items SET position = 10 WHERE (id = 7)"] end it "should have move_to_top move the item to the first position" do @o.move_to_top @db.sqls.must_equal ["UPDATE items SET position = (position + 1) WHERE ((position >= 1) AND (position < 3))", "UPDATE items SET position = 1 WHERE (id = 7)"] end it "should have move_to_top use position 0 when :top_of_list is 0" do @to.move_to_top @db.sqls.must_equal ["UPDATE items SET position = (position + 1) WHERE ((position >= 0) AND (position < 3))", "UPDATE items SET position = 0 WHERE (id = 7)"] end it "should have move_up without an argument move up a single position" do @o.move_up.must_equal @o @o.position.must_equal 2 @db.sqls.must_equal ["UPDATE items SET position = (position + 1) WHERE ((position >= 2) AND (position < 3))", "UPDATE items SET position = 2 WHERE (id = 7)"] end it "should have move_up with an argument move up the given number of positions" do @o.move_up(2).must_equal @o @o.position.must_equal 1 @db.sqls.must_equal ["UPDATE items SET position = (position + 1) WHERE ((position >= 1) AND (position < 3))", "UPDATE items SET position = 1 WHERE (id = 7)"] end it "should have move_up with a negative argument move down the given number of positions" do @c.dataset = @c.dataset.with_fetch(:max=>10) @o.move_up(-1).must_equal @o @o.position.must_equal 4 @db.sqls.must_equal ["SELECT max(position) AS max FROM items LIMIT 1", "UPDATE items SET position = (position - 1) WHERE ((position >= 4) AND (position <= 4))", "UPDATE items SET position = 4 WHERE (id = 7)"] end it "should have next return the next entry in the list if not given an argument" do @c.dataset = @c.dataset.with_fetch(:id=>9, :position=>4) @o.next.must_equal @c.load(:id=>9, :position=>4) @db.sqls.must_equal ["SELECT * FROM items WHERE (position = 4) ORDER BY position LIMIT 1"] end it "should have next return the entry the given number of positions below the instance if given an argument" do @c.dataset = @c.dataset.with_fetch(:id=>9, :position=>5) @o.next(2).must_equal @c.load(:id=>9, :position=>5) @db.sqls.must_equal ["SELECT * FROM items WHERE (position = 5) ORDER BY position LIMIT 1"] end it "should have next return receiver if given zero" do @o.next(0).must_be_same_as @o @db.sqls.must_equal [] end it "should have next return a previous entry if given a negative argument" do @c.dataset = @c.dataset.with_fetch(:id=>9, :position=>2) @o.next(-1).must_equal @c.load(:id=>9, :position=>2) @db.sqls.must_equal ["SELECT * FROM items WHERE (position = 2) ORDER BY position LIMIT 1"] end it "should have position_value return the value of the position field" do @o.position_value.must_equal 3 end it "should have prev return the previous entry in the list if not given an argument" do @c.dataset = @c.dataset.with_fetch(:id=>9, :position=>2) @o.prev.must_equal @c.load(:id=>9, :position=>2) @db.sqls.must_equal ["SELECT * FROM items WHERE (position = 2) ORDER BY position LIMIT 1"] end it "should have prev return the entry the given number of positions above the instance if given an argument" do @c.dataset = @c.dataset.with_fetch(:id=>9, :position=>1) @o.prev(2).must_equal @c.load(:id=>9, :position=>1) @db.sqls.must_equal ["SELECT * FROM items WHERE (position = 1) ORDER BY position LIMIT 1"] end it "should have prev return a following entry if given a negative argument" do @c.dataset = @c.dataset.with_fetch(:id=>9, :position=>4) @o.prev(-1).must_equal @c.load(:id=>9, :position=>4) @db.sqls.must_equal ["SELECT * FROM items WHERE (position = 4) ORDER BY position LIMIT 1"] end it "should work correctly with validation on position" do @c.class_eval do def validate super errors.add(:position, "not set") unless position end end @c.create @db.sqls.must_equal ["SELECT max(position) AS max FROM items LIMIT 1", "INSERT INTO items (position) VALUES (2)", "SELECT * FROM items WHERE (id = 10) ORDER BY position LIMIT 1"] end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/looser_typecasting_spec.rb��������������������������������������������0000664�0000000�0000000�00000002531�14342141206�0023506�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "LooserTypecasting Extension" do before do @db = Sequel.mock def @db.supports_schema_parsing?() true end def @db.schema(*args) [[:id, {}], [:z, {:type=>:float}], [:b, {:type=>:integer}], [:d, {:type=>:decimal}], [:s, {:type=>:string}]] end @c = Class.new(Sequel::Model(@db[:items])) @db.extension(:looser_typecasting) @c.instance_eval do @columns = [:id, :b, :z, :d, :s] def columns; @columns; end end end it "should not raise errors for invalid strings in integer columns" do @c.new(:b=>'a').b.must_equal 0 @c.new(:b=>'a').b.must_be_kind_of(Integer) end it "should not raise errors for invalid strings in float columns" do @c.new(:z=>'a').z.must_equal 0.0 @c.new(:z=>'a').z.must_be_kind_of(Float) end it "should not raise errors for hash or array input to string columns" do @c.new(:s=>'a').s.must_equal 'a' @c.new(:s=>[]).s.must_be_kind_of(String) @c.new(:s=>{}).s.must_be_kind_of(String) end it "should not raise errors for invalid strings in decimal columns" do @c.new(:d=>'a').d.must_equal 0.0 @c.new(:d=>'a').d.must_be_kind_of(BigDecimal) end it "should not affect conversions of other types in decimal columns" do @c.new(:d=>1).d.must_equal 1 @c.new(:d=>1).d.must_be_kind_of(BigDecimal) end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/many_through_many_spec.rb���������������������������������������������0000664�0000000�0000000�00000664424�14342141206�0023340�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Model, "many_through_many" do before do class ::Artist < Sequel::Model attr_accessor :yyy columns :id plugin :many_through_many end class ::Tag < Sequel::Model columns :id, :h1, :h2 end @c1 = Artist @c2 = Tag @dataset = @c2.dataset = @c2.dataset.with_fetch(:id=>1) DB.reset end after do Object.send(:remove_const, :Artist) Object.send(:remove_const, :Tag) end it "should raise an error if current class does not have a primary key, and :left_primary_key is not specified" do @c1.no_primary_key proc{@c1.many_through_many :tags, :through=>[[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]]}.must_raise(Sequel::Error) DB.sqls.must_equal [] end it "should raise an error if associated class does not have a primary key, and :right_primary_key is not specified" do @c2.no_primary_key @c1.many_through_many :tags, :through=>[[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] n = @c1.load(:id => 1234) proc{n.tags}.must_raise(Sequel::Error) DB.sqls.must_equal [] end it "should populate :key_hash and :id_map option correctly for custom eager loaders" do khs = [] pr = proc{|h| khs << [h[:key_hash], h[:id_map]]} @c1.many_through_many :tags, :through=>[[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :eager_loader=>pr @c1.eager(:tags).all khs.must_equal [[{:id=>{1=>[Artist.load(:x=>1, :id=>1)]}}, {1=>[Artist.load(:x=>1, :id=>1)]}]] khs.clear @c1.many_through_many :tags, :through=>[[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :left_primary_key=>:id, :left_primary_key_column=>:i, :eager_loader=>pr, :eager_loader_key=>:id @c1.eager(:tags).all khs.must_equal [[{:id=>{1=>[Artist.load(:x=>1, :id=>1)]}}, {1=>[Artist.load(:x=>1, :id=>1)]}]] end it "should support using a custom :left_primary_key option when eager loading many_to_many associations" do @c1.send(:define_method, :id3){id*3} @c1.dataset = @c1.dataset.with_fetch(:id=>1) @c2.dataset = @c2.dataset.with_fetch(:id=>4, :x_foreign_key_x=>3) @c1.many_through_many :tags, :through=>[[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :left_primary_key=>:id3 a = @c1.eager(:tags).all a.must_equal [@c1.load(:id => 1)] DB.sqls.must_equal ['SELECT * FROM artists', "SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (3))"] a.first.tags.must_equal [@c2.load(:id=>4)] DB.sqls.must_equal [] end it "should handle a :predicate_key option to change the SQL used in the lookup" do @c1.dataset = @c1.dataset.with_fetch(:id=>1) @c2.dataset = @c2.dataset.with_fetch(:id=>4, :x_foreign_key_x=>1) @c1.many_through_many :tags, :through=>[[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :predicate_key=>(Sequel[:albums_artists][:artist_id] / 3) a = @c1.eager(:tags).all a.must_equal [@c1.load(:id => 1)] DB.sqls.must_equal ['SELECT * FROM artists', "SELECT tags.*, (albums_artists.artist_id / 3) AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((albums_artists.artist_id / 3) IN (1))"] a.first.tags.must_equal [@c2.load(:id=>4)] end it "should handle schema qualified tables" do @c1.many_through_many :tags, :through=>[[Sequel[:myschema][:albums_artists], :artist_id, :album_id], [Sequel[:myschema][:albums], :id, :id], [Sequel[:myschema][:albums_tags], :album_id, :tag_id]] @c1.load(:id=>1).tags_dataset.sql.must_equal "SELECT tags.* FROM tags INNER JOIN myschema.albums_tags ON (myschema.albums_tags.tag_id = tags.id) INNER JOIN myschema.albums ON (myschema.albums.id = myschema.albums_tags.album_id) INNER JOIN myschema.albums_artists ON (myschema.albums_artists.album_id = myschema.albums.id) WHERE (myschema.albums_artists.artist_id = 1)" @c1.dataset = @c1.dataset.with_fetch(:id=>1) @c2.dataset = @c2.dataset.with_fetch(:id=>4, :x_foreign_key_x=>1) a = @c1.eager(:tags).all a.must_equal [@c1.load(:id => 1)] DB.sqls.must_equal ['SELECT * FROM artists', "SELECT tags.*, myschema.albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN myschema.albums_tags ON (myschema.albums_tags.tag_id = tags.id) INNER JOIN myschema.albums ON (myschema.albums.id = myschema.albums_tags.album_id) INNER JOIN myschema.albums_artists ON (myschema.albums_artists.album_id = myschema.albums.id) WHERE (myschema.albums_artists.artist_id IN (1))"] Tag.dataset.columns(:id, :h1, :h2) @c1.eager_graph(:tags).sql.must_equal 'SELECT artists.id, tags.id AS tags_id, tags.h1, tags.h2 FROM artists LEFT OUTER JOIN myschema.albums_artists AS albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN myschema.albums AS albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN myschema.albums_tags AS albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.id = albums_tags.tag_id)' end with_symbol_splitting "should handle schema qualified table symbols" do @c1.many_through_many :tags, :through=>[[:myschema__albums_artists, :artist_id, :album_id], [:myschema__albums, :id, :id], [:myschema__albums_tags, :album_id, :tag_id]] @c1.load(:id=>1).tags_dataset.sql.must_equal "SELECT tags.* FROM tags INNER JOIN myschema.albums_tags ON (myschema.albums_tags.tag_id = tags.id) INNER JOIN myschema.albums ON (myschema.albums.id = myschema.albums_tags.album_id) INNER JOIN myschema.albums_artists ON (myschema.albums_artists.album_id = myschema.albums.id) WHERE (myschema.albums_artists.artist_id = 1)" @c1.dataset = @c1.dataset.with_fetch(:id=>1) @c2.dataset = @c2.dataset.with_fetch(:id=>4, :x_foreign_key_x=>1) a = @c1.eager(:tags).all a.must_equal [@c1.load(:id => 1)] DB.sqls.must_equal ['SELECT * FROM artists', "SELECT tags.*, myschema.albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN myschema.albums_tags ON (myschema.albums_tags.tag_id = tags.id) INNER JOIN myschema.albums ON (myschema.albums.id = myschema.albums_tags.album_id) INNER JOIN myschema.albums_artists ON (myschema.albums_artists.album_id = myschema.albums.id) WHERE (myschema.albums_artists.artist_id IN (1))"] Tag.dataset.columns(:id, :h1, :h2) @c1.eager_graph(:tags).sql.must_equal 'SELECT artists.id, tags.id AS tags_id, tags.h1, tags.h2 FROM artists LEFT OUTER JOIN myschema.albums_artists AS albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN myschema.albums AS albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN myschema.albums_tags AS albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.id = albums_tags.tag_id)' end it "should default to associating to other models in the same scope" do begin class ::AssociationModuleTest class Artist < Sequel::Model plugin :many_through_many many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] end class Tag < Sequel::Model end end ::AssociationModuleTest::Artist.association_reflection(:tags).associated_class.must_equal ::AssociationModuleTest::Tag ensure Object.send(:remove_const, :AssociationModuleTest) end end it "should raise an error if in invalid form of through is used" do proc{@c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id]]}.must_raise(Sequel::Error) proc{@c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], {:table=>:album_tags, :left=>:album_id}]}.must_raise(Sequel::Error) proc{@c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], :album_tags]}.must_raise(Sequel::Error) end it "should allow only two arguments with the :through option" do @c1.many_through_many :tags, :through=>[[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] n = @c1.load(:id => 1234) n.tags_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234)' n.tags.must_equal [@c2.load(:id=>1)] end it "should be clonable" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] @c1.many_through_many :other_tags, :clone=>:tags n = @c1.load(:id => 1234) n.other_tags_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234)' n.tags.must_equal [@c2.load(:id=>1)] end it "should use join tables given" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] n = @c1.load(:id => 1234) n.tags_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234)' n.tags.must_equal [@c2.load(:id=>1)] end it "should handle multiple aliasing of tables" do begin class ::Album < Sequel::Model end @c1.many_through_many :albums, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_artists, :album_id, :artist_id], [:artists, :id, :id], [:albums_artists, :artist_id, :album_id]] n = @c1.load(:id => 1234) n.albums_dataset.sql.must_equal 'SELECT albums.* FROM albums INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) INNER JOIN artists ON (artists.id = albums_artists.artist_id) INNER JOIN albums_artists AS albums_artists_0 ON (albums_artists_0.artist_id = artists.id) INNER JOIN albums AS albums_0 ON (albums_0.id = albums_artists_0.album_id) INNER JOIN albums_artists AS albums_artists_1 ON (albums_artists_1.album_id = albums_0.id) WHERE (albums_artists_1.artist_id = 1234)' n.albums.must_equal [Album.load(:id=>1, :x=>1)] ensure Object.send(:remove_const, :Album) end end it "should use explicit class if given" do @c1.many_through_many :albums_tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :class=>Tag n = @c1.load(:id => 1234) n.albums_tags_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234)' n.albums_tags.must_equal [@c2.load(:id=>1)] end it "should accept :left_primary_key and :right_primary_key option for primary keys to use in current and associated table" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :right_primary_key=>:tag_id, :left_primary_key=>:yyy n = @c1.load(:id => 1234) n.yyy = 85 n.tags_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.tag_id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 85)' n.tags.must_equal [@c2.load(:id=>1)] end it "should handle composite keys" do @c1.many_through_many :tags, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy] n = @c1.load(:id => 1234) n.yyy = 85 n.tags_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON ((albums_tags.g1 = tags.h1) AND (albums_tags.g2 = tags.h2)) INNER JOIN albums ON ((albums.e1 = albums_tags.f1) AND (albums.e2 = albums_tags.f2)) INNER JOIN albums_artists ON ((albums_artists.c1 = albums.d1) AND (albums_artists.c2 = albums.d2)) WHERE ((albums_artists.b1 = 1234) AND (albums_artists.b2 = 85))' n.tags.must_equal [@c2.load(:id=>1)] end it "should allowing filtering by many_through_many associations" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] @c1.filter(:tags=>@c2.load(:id=>1234)).sql.must_equal 'SELECT * FROM artists WHERE (artists.id IN (SELECT albums_artists.artist_id FROM albums_artists INNER JOIN albums ON (albums.id = albums_artists.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) WHERE ((albums_tags.tag_id = 1234) AND (albums_artists.artist_id IS NOT NULL))))' end it "should allowing filtering by many_through_many associations with a single through table" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id]] @c1.filter(:tags=>@c2.load(:id=>1234)).sql.must_equal 'SELECT * FROM artists WHERE (artists.id IN (SELECT albums_artists.artist_id FROM albums_artists WHERE ((albums_artists.album_id = 1234) AND (albums_artists.artist_id IS NOT NULL))))' end it "should allowing filtering by many_through_many associations with aliased tables" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums_artists, :id, :id], [:albums_artists, :album_id, :tag_id]] @c1.filter(:tags=>@c2.load(:id=>1234)).sql.must_equal 'SELECT * FROM artists WHERE (artists.id IN (SELECT albums_artists.artist_id FROM albums_artists INNER JOIN albums_artists AS albums_artists_0 ON (albums_artists_0.id = albums_artists.album_id) INNER JOIN albums_artists AS albums_artists_1 ON (albums_artists_1.album_id = albums_artists_0.id) WHERE ((albums_artists_1.tag_id = 1234) AND (albums_artists.artist_id IS NOT NULL))))' end it "should allowing filtering by many_through_many associations with composite keys" do @c1.many_through_many :tags, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy] @c1.filter(:tags=>@c2.load(:h1=>1234, :h2=>85)).sql.must_equal 'SELECT * FROM artists WHERE ((artists.id, artists.yyy) IN (SELECT albums_artists.b1, albums_artists.b2 FROM albums_artists INNER JOIN albums ON ((albums.d1 = albums_artists.c1) AND (albums.d2 = albums_artists.c2)) INNER JOIN albums_tags ON ((albums_tags.f1 = albums.e1) AND (albums_tags.f2 = albums.e2)) WHERE ((albums_tags.g1 = 1234) AND (albums_tags.g2 = 85) AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL))))' end it "should allowing filtering by many_through_many associations with :conditions" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :conditions=>{:name=>'A'} @c1.filter(:tags=>@c2.load(:id=>1234)).sql.must_equal "SELECT * FROM artists WHERE (artists.id IN (SELECT albums_artists.artist_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((name = 'A') AND (albums_artists.artist_id IS NOT NULL) AND (tags.id = 1234))))" end it "should allowing filtering by many_through_many associations with :conditions with a single through table" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id]], :conditions=>{:name=>'A'} @c1.filter(:tags=>@c2.load(:id=>1234)).sql.must_equal "SELECT * FROM artists WHERE (artists.id IN (SELECT albums_artists.artist_id FROM tags INNER JOIN albums_artists ON (albums_artists.album_id = tags.id) WHERE ((name = 'A') AND (albums_artists.artist_id IS NOT NULL) AND (tags.id = 1234))))" end it "should allowing filtering by many_through_many associations with :conditions and composite keys" do @c1.many_through_many :tags, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy], :conditions=>{:name=>'A'} @c1.filter(:tags=>@c2.load(:id=>1, :h1=>1234, :h2=>85)).sql.must_equal "SELECT * FROM artists WHERE ((artists.id, artists.yyy) IN (SELECT albums_artists.b1, albums_artists.b2 FROM tags INNER JOIN albums_tags ON ((albums_tags.g1 = tags.h1) AND (albums_tags.g2 = tags.h2)) INNER JOIN albums ON ((albums.e1 = albums_tags.f1) AND (albums.e2 = albums_tags.f2)) INNER JOIN albums_artists ON ((albums_artists.c1 = albums.d1) AND (albums_artists.c2 = albums.d2)) WHERE ((name = 'A') AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL) AND (tags.id = 1))))" end it "should allowing filtering by many_through_many associations with :limit" do @c2.dataset = @c2.dataset.with_extend{def supports_window_functions?; true end} @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :limit=>10 @c1.filter(:tags=>@c2.load(:id=>1234)).sql.must_equal 'SELECT * FROM artists WHERE (artists.id IN (SELECT albums_artists.artist_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((albums_artists.artist_id IS NOT NULL) AND ((albums_artists.artist_id, tags.id) IN (SELECT b, c FROM (SELECT albums_artists.artist_id AS b, tags.id AS c, row_number() OVER (PARTITION BY albums_artists.artist_id) AS x_sequel_row_number_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id)) AS t1 WHERE (x_sequel_row_number_x <= 10))) AND (tags.id = 1234))))' end it "should allowing filtering by many_through_many associations with :limit and composite keys" do @c2.dataset = @c2.dataset.with_extend{def supports_window_functions?; true end} @c1.many_through_many :tags, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy], :limit=>10 @c1.filter(:tags=>@c2.load(:id=>1, :h1=>1234, :h2=>85)).sql.must_equal 'SELECT * FROM artists WHERE ((artists.id, artists.yyy) IN (SELECT albums_artists.b1, albums_artists.b2 FROM tags INNER JOIN albums_tags ON ((albums_tags.g1 = tags.h1) AND (albums_tags.g2 = tags.h2)) INNER JOIN albums ON ((albums.e1 = albums_tags.f1) AND (albums.e2 = albums_tags.f2)) INNER JOIN albums_artists ON ((albums_artists.c1 = albums.d1) AND (albums_artists.c2 = albums.d2)) WHERE ((albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL) AND ((albums_artists.b1, albums_artists.b2, tags.id) IN (SELECT b, c, d FROM (SELECT albums_artists.b1 AS b, albums_artists.b2 AS c, tags.id AS d, row_number() OVER (PARTITION BY albums_artists.b1, albums_artists.b2) AS x_sequel_row_number_x FROM tags INNER JOIN albums_tags ON ((albums_tags.g1 = tags.h1) AND (albums_tags.g2 = tags.h2)) INNER JOIN albums ON ((albums.e1 = albums_tags.f1) AND (albums.e2 = albums_tags.f2)) INNER JOIN albums_artists ON ((albums_artists.c1 = albums.d1) AND (albums_artists.c2 = albums.d2))) AS t1 WHERE (x_sequel_row_number_x <= 10))) AND (tags.id = 1))))' end it "should allowing filtering by many_through_many associations with :limit and :conditions" do @c2.dataset = @c2.dataset.with_extend{def supports_window_functions?; true end} @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :conditions=>{:name=>'A'}, :limit=>10 @c1.filter(:tags=>@c2.load(:id=>1234)).sql.must_equal "SELECT * FROM artists WHERE (artists.id IN (SELECT albums_artists.artist_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((name = 'A') AND (albums_artists.artist_id IS NOT NULL) AND ((albums_artists.artist_id, tags.id) IN (SELECT b, c FROM (SELECT albums_artists.artist_id AS b, tags.id AS c, row_number() OVER (PARTITION BY albums_artists.artist_id) AS x_sequel_row_number_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (name = 'A')) AS t1 WHERE (x_sequel_row_number_x <= 10))) AND (tags.id = 1234))))" end it "should allowing filtering by many_through_many associations with :limit and :conditions and composite keys" do @c2.dataset = @c2.dataset.with_extend{def supports_window_functions?; true end} @c1.many_through_many :tags, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy], :conditions=>{:name=>'A'}, :limit=>10 @c1.filter(:tags=>@c2.load(:id=>1, :h1=>1234, :h2=>85)).sql.must_equal "SELECT * FROM artists WHERE ((artists.id, artists.yyy) IN (SELECT albums_artists.b1, albums_artists.b2 FROM tags INNER JOIN albums_tags ON ((albums_tags.g1 = tags.h1) AND (albums_tags.g2 = tags.h2)) INNER JOIN albums ON ((albums.e1 = albums_tags.f1) AND (albums.e2 = albums_tags.f2)) INNER JOIN albums_artists ON ((albums_artists.c1 = albums.d1) AND (albums_artists.c2 = albums.d2)) WHERE ((name = 'A') AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL) AND ((albums_artists.b1, albums_artists.b2, tags.id) IN (SELECT b, c, d FROM (SELECT albums_artists.b1 AS b, albums_artists.b2 AS c, tags.id AS d, row_number() OVER (PARTITION BY albums_artists.b1, albums_artists.b2) AS x_sequel_row_number_x FROM tags INNER JOIN albums_tags ON ((albums_tags.g1 = tags.h1) AND (albums_tags.g2 = tags.h2)) INNER JOIN albums ON ((albums.e1 = albums_tags.f1) AND (albums.e2 = albums_tags.f2)) INNER JOIN albums_artists ON ((albums_artists.c1 = albums.d1) AND (albums_artists.c2 = albums.d2)) WHERE (name = 'A')) AS t1 WHERE (x_sequel_row_number_x <= 10))) AND (tags.id = 1))))" end it "should allowing excluding by many_through_many associations" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] @c1.exclude(:tags=>@c2.load(:id=>1234)).sql.must_equal 'SELECT * FROM artists WHERE ((artists.id NOT IN (SELECT albums_artists.artist_id FROM albums_artists INNER JOIN albums ON (albums.id = albums_artists.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) WHERE ((albums_tags.tag_id = 1234) AND (albums_artists.artist_id IS NOT NULL)))) OR (artists.id IS NULL))' end it "should allowing excluding by many_through_many associations with composite keys" do @c1.many_through_many :tags, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy] @c1.exclude(:tags=>@c2.load(:h1=>1234, :h2=>85)).sql.must_equal 'SELECT * FROM artists WHERE (((artists.id, artists.yyy) NOT IN (SELECT albums_artists.b1, albums_artists.b2 FROM albums_artists INNER JOIN albums ON ((albums.d1 = albums_artists.c1) AND (albums.d2 = albums_artists.c2)) INNER JOIN albums_tags ON ((albums_tags.f1 = albums.e1) AND (albums_tags.f2 = albums.e2)) WHERE ((albums_tags.g1 = 1234) AND (albums_tags.g2 = 85) AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL)))) OR (artists.id IS NULL) OR (artists.yyy IS NULL))' end it "should allowing excluding by many_through_many associations with :conditions" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :conditions=>{:name=>'A'} @c1.exclude(:tags=>@c2.load(:id=>1234)).sql.must_equal "SELECT * FROM artists WHERE ((artists.id NOT IN (SELECT albums_artists.artist_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((name = 'A') AND (albums_artists.artist_id IS NOT NULL) AND (tags.id = 1234)))) OR (artists.id IS NULL))" end it "should allowing excluding by many_through_many associations with :conditions and composite keys" do @c1.many_through_many :tags, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy], :conditions=>{:name=>'A'} @c1.exclude(:tags=>@c2.load(:id=>1, :h1=>1234, :h2=>85)).sql.must_equal "SELECT * FROM artists WHERE (((artists.id, artists.yyy) NOT IN (SELECT albums_artists.b1, albums_artists.b2 FROM tags INNER JOIN albums_tags ON ((albums_tags.g1 = tags.h1) AND (albums_tags.g2 = tags.h2)) INNER JOIN albums ON ((albums.e1 = albums_tags.f1) AND (albums.e2 = albums_tags.f2)) INNER JOIN albums_artists ON ((albums_artists.c1 = albums.d1) AND (albums_artists.c2 = albums.d2)) WHERE ((name = 'A') AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL) AND (tags.id = 1)))) OR (artists.id IS NULL) OR (artists.yyy IS NULL))" end it "should allowing filtering by multiple many_through_many associations" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] @c1.filter(:tags=>[@c2.load(:id=>1234), @c2.load(:id=>2345)]).sql.must_equal 'SELECT * FROM artists WHERE (artists.id IN (SELECT albums_artists.artist_id FROM albums_artists INNER JOIN albums ON (albums.id = albums_artists.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) WHERE ((albums_tags.tag_id IN (1234, 2345)) AND (albums_artists.artist_id IS NOT NULL))))' end it "should allowing filtering by multiple many_through_many associations with composite keys" do @c1.many_through_many :tags, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy] @c1.filter(:tags=>[@c2.load(:h1=>1234, :h2=>85), @c2.load(:h1=>2345, :h2=>95)]).sql.must_equal 'SELECT * FROM artists WHERE ((artists.id, artists.yyy) IN (SELECT albums_artists.b1, albums_artists.b2 FROM albums_artists INNER JOIN albums ON ((albums.d1 = albums_artists.c1) AND (albums.d2 = albums_artists.c2)) INNER JOIN albums_tags ON ((albums_tags.f1 = albums.e1) AND (albums_tags.f2 = albums.e2)) WHERE (((albums_tags.g1, albums_tags.g2) IN ((1234, 85), (2345, 95))) AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL))))' end it "should allowing filtering by multiple many_through_many associations with :conditions" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :conditions=>{:name=>'A'} @c1.filter(:tags=>[@c2.load(:id=>1234), @c2.load(:id=>2345)]).sql.must_equal "SELECT * FROM artists WHERE (artists.id IN (SELECT albums_artists.artist_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((name = 'A') AND (albums_artists.artist_id IS NOT NULL) AND (tags.id IN (1234, 2345)))))" end it "should allowing filtering by multiple many_through_many associations with :conditions and composite keys" do @c1.many_through_many :tags, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy], :conditions=>{:name=>'A'} @c1.filter(:tags=>[@c2.load(:id=>1, :h1=>1234, :h2=>85), @c2.load(:id=>2, :h1=>2345, :h2=>95)]).sql.must_equal "SELECT * FROM artists WHERE ((artists.id, artists.yyy) IN (SELECT albums_artists.b1, albums_artists.b2 FROM tags INNER JOIN albums_tags ON ((albums_tags.g1 = tags.h1) AND (albums_tags.g2 = tags.h2)) INNER JOIN albums ON ((albums.e1 = albums_tags.f1) AND (albums.e2 = albums_tags.f2)) INNER JOIN albums_artists ON ((albums_artists.c1 = albums.d1) AND (albums_artists.c2 = albums.d2)) WHERE ((name = 'A') AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL) AND (tags.id IN (1, 2)))))" end it "should allowing excluding by multiple many_through_many associations" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] @c1.exclude(:tags=>[@c2.load(:id=>1234), @c2.load(:id=>2345)]).sql.must_equal 'SELECT * FROM artists WHERE ((artists.id NOT IN (SELECT albums_artists.artist_id FROM albums_artists INNER JOIN albums ON (albums.id = albums_artists.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) WHERE ((albums_tags.tag_id IN (1234, 2345)) AND (albums_artists.artist_id IS NOT NULL)))) OR (artists.id IS NULL))' end it "should allowing excluding by multiple many_through_many associations with composite keys" do @c1.many_through_many :tags, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy] @c1.exclude(:tags=>[@c2.load(:h1=>1234, :h2=>85), @c2.load(:h1=>2345, :h2=>95)]).sql.must_equal 'SELECT * FROM artists WHERE (((artists.id, artists.yyy) NOT IN (SELECT albums_artists.b1, albums_artists.b2 FROM albums_artists INNER JOIN albums ON ((albums.d1 = albums_artists.c1) AND (albums.d2 = albums_artists.c2)) INNER JOIN albums_tags ON ((albums_tags.f1 = albums.e1) AND (albums_tags.f2 = albums.e2)) WHERE (((albums_tags.g1, albums_tags.g2) IN ((1234, 85), (2345, 95))) AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL)))) OR (artists.id IS NULL) OR (artists.yyy IS NULL))' end it "should allowing excluding by multiple many_through_many associations with :conditions" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :conditions=>{:name=>'A'} @c1.exclude(:tags=>[@c2.load(:id=>1234), @c2.load(:id=>2345)]).sql.must_equal "SELECT * FROM artists WHERE ((artists.id NOT IN (SELECT albums_artists.artist_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((name = 'A') AND (albums_artists.artist_id IS NOT NULL) AND (tags.id IN (1234, 2345))))) OR (artists.id IS NULL))" end it "should allowing excluding by multiple many_through_many associations with :conditions and composite keys" do @c1.many_through_many :tags, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy], :conditions=>{:name=>'A'} @c1.exclude(:tags=>[@c2.load(:id=>1, :h1=>1234, :h2=>85), @c2.load(:id=>2, :h1=>2345, :h2=>95)]).sql.must_equal "SELECT * FROM artists WHERE (((artists.id, artists.yyy) NOT IN (SELECT albums_artists.b1, albums_artists.b2 FROM tags INNER JOIN albums_tags ON ((albums_tags.g1 = tags.h1) AND (albums_tags.g2 = tags.h2)) INNER JOIN albums ON ((albums.e1 = albums_tags.f1) AND (albums.e2 = albums_tags.f2)) INNER JOIN albums_artists ON ((albums_artists.c1 = albums.d1) AND (albums_artists.c2 = albums.d2)) WHERE ((name = 'A') AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL) AND (tags.id IN (1, 2))))) OR (artists.id IS NULL) OR (artists.yyy IS NULL))" end it "should allowing filtering/excluding many_through_many associations with NULL values" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] @c1.filter(:tags=>@c2.new).sql.must_equal 'SELECT * FROM artists WHERE \'f\'' @c1.exclude(:tags=>@c2.new).sql.must_equal 'SELECT * FROM artists WHERE \'t\'' end it "should allowing filtering by many_through_many association datasets" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] @c1.filter(:tags=>@c2.filter(:x=>1)).sql.must_equal 'SELECT * FROM artists WHERE (artists.id IN (SELECT albums_artists.artist_id FROM albums_artists INNER JOIN albums ON (albums.id = albums_artists.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) WHERE ((albums_tags.tag_id IN (SELECT tags.id FROM tags WHERE ((x = 1) AND (tags.id IS NOT NULL)))) AND (albums_artists.artist_id IS NOT NULL))))' end it "should allowing filtering by many_through_many association datasets with composite keys" do @c1.many_through_many :tags, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy] @c1.filter(:tags=>@c2.filter(:x=>1)).sql.must_equal 'SELECT * FROM artists WHERE ((artists.id, artists.yyy) IN (SELECT albums_artists.b1, albums_artists.b2 FROM albums_artists INNER JOIN albums ON ((albums.d1 = albums_artists.c1) AND (albums.d2 = albums_artists.c2)) INNER JOIN albums_tags ON ((albums_tags.f1 = albums.e1) AND (albums_tags.f2 = albums.e2)) WHERE (((albums_tags.g1, albums_tags.g2) IN (SELECT tags.h1, tags.h2 FROM tags WHERE ((x = 1) AND (tags.h1 IS NOT NULL) AND (tags.h2 IS NOT NULL)))) AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL))))' end it "should allowing filtering by many_through_many association datasets with :conditions" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :conditions=>{:name=>'A'} @c1.filter(:tags=>@c2.filter(:x=>1)).sql.must_equal "SELECT * FROM artists WHERE (artists.id IN (SELECT albums_artists.artist_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((name = 'A') AND (albums_artists.artist_id IS NOT NULL) AND (tags.id IN (SELECT tags.id FROM tags WHERE (x = 1))))))" end it "should allowing filtering by many_through_many association datasets with :conditions and composite keys" do @c1.many_through_many :tags, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy], :conditions=>{:name=>'A'} @c1.filter(:tags=>@c2.filter(:x=>1)).sql.must_equal "SELECT * FROM artists WHERE ((artists.id, artists.yyy) IN (SELECT albums_artists.b1, albums_artists.b2 FROM tags INNER JOIN albums_tags ON ((albums_tags.g1 = tags.h1) AND (albums_tags.g2 = tags.h2)) INNER JOIN albums ON ((albums.e1 = albums_tags.f1) AND (albums.e2 = albums_tags.f2)) INNER JOIN albums_artists ON ((albums_artists.c1 = albums.d1) AND (albums_artists.c2 = albums.d2)) WHERE ((name = 'A') AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL) AND (tags.id IN (SELECT tags.id FROM tags WHERE (x = 1))))))" end it "should allowing excluding by many_through_many association datasets" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] @c1.exclude(:tags=>@c2.filter(:x=>1)).sql.must_equal 'SELECT * FROM artists WHERE ((artists.id NOT IN (SELECT albums_artists.artist_id FROM albums_artists INNER JOIN albums ON (albums.id = albums_artists.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) WHERE ((albums_tags.tag_id IN (SELECT tags.id FROM tags WHERE ((x = 1) AND (tags.id IS NOT NULL)))) AND (albums_artists.artist_id IS NOT NULL)))) OR (artists.id IS NULL))' end it "should allowing excluding by many_through_many association datasets with composite keys" do @c1.many_through_many :tags, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy] @c1.exclude(:tags=>@c2.filter(:x=>1)).sql.must_equal 'SELECT * FROM artists WHERE (((artists.id, artists.yyy) NOT IN (SELECT albums_artists.b1, albums_artists.b2 FROM albums_artists INNER JOIN albums ON ((albums.d1 = albums_artists.c1) AND (albums.d2 = albums_artists.c2)) INNER JOIN albums_tags ON ((albums_tags.f1 = albums.e1) AND (albums_tags.f2 = albums.e2)) WHERE (((albums_tags.g1, albums_tags.g2) IN (SELECT tags.h1, tags.h2 FROM tags WHERE ((x = 1) AND (tags.h1 IS NOT NULL) AND (tags.h2 IS NOT NULL)))) AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL)))) OR (artists.id IS NULL) OR (artists.yyy IS NULL))' end it "should allowing excluding by many_through_many association datasets with :conditions" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :conditions=>{:name=>'A'} @c1.exclude(:tags=>@c2.filter(:x=>1)).sql.must_equal "SELECT * FROM artists WHERE ((artists.id NOT IN (SELECT albums_artists.artist_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((name = 'A') AND (albums_artists.artist_id IS NOT NULL) AND (tags.id IN (SELECT tags.id FROM tags WHERE (x = 1)))))) OR (artists.id IS NULL))" end it "should allowing excluding by many_through_many association datasets with :conditions and composite keys" do @c1.many_through_many :tags, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy], :conditions=>{:name=>'A'} @c1.exclude(:tags=>@c2.filter(:x=>1)).sql.must_equal "SELECT * FROM artists WHERE (((artists.id, artists.yyy) NOT IN (SELECT albums_artists.b1, albums_artists.b2 FROM tags INNER JOIN albums_tags ON ((albums_tags.g1 = tags.h1) AND (albums_tags.g2 = tags.h2)) INNER JOIN albums ON ((albums.e1 = albums_tags.f1) AND (albums.e2 = albums_tags.f2)) INNER JOIN albums_artists ON ((albums_artists.c1 = albums.d1) AND (albums_artists.c2 = albums.d2)) WHERE ((name = 'A') AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL) AND (tags.id IN (SELECT tags.id FROM tags WHERE (x = 1)))))) OR (artists.id IS NULL) OR (artists.yyy IS NULL))" end it "should support a :conditions option" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :conditions=>{:a=>32} n = @c1.load(:id => 1234) n.tags_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((a = 32) AND (albums_artists.artist_id = 1234))' n.tags.must_equal [@c2.load(:id=>1)] @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :conditions=>Sequel.lit('a = ?', 42) n = @c1.load(:id => 1234) n.tags_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((a = 42) AND (albums_artists.artist_id = 1234))' n.tags.must_equal [@c2.load(:id=>1)] end it "should support an :order option" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :order=>:blah n = @c1.load(:id => 1234) n.tags_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234) ORDER BY blah' n.tags.must_equal [@c2.load(:id=>1)] end it "should support an array for the :order option" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :order=>[:blah1, :blah2] n = @c1.load(:id => 1234) n.tags_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234) ORDER BY blah1, blah2' n.tags.must_equal [@c2.load(:id=>1)] end it "should support a select option" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :select=>:blah n = @c1.load(:id => 1234) n.tags_dataset.sql.must_equal 'SELECT blah FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234)' n.tags.must_equal [@c2.load(:id=>1)] end it "should support an array for the select option" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :select=>[Sequel::SQL::ColumnAll.new(:tags), Sequel[:albums][:name]] n = @c1.load(:id => 1234) n.tags_dataset.sql.must_equal 'SELECT tags.*, albums.name FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234)' n.tags.must_equal [@c2.load(:id=>1)] end it "should accept a block" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] do |ds| ds.filter(:yyy=>@yyy) end n = @c1.load(:id => 1234) n.yyy = 85 n.tags_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((albums_artists.artist_id = 1234) AND (yyy = 85))' n.tags.must_equal [@c2.load(:id=>1)] end it "should allow the :order option while accepting a block" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :order=>:blah do |ds| ds.filter(:yyy=>@yyy) end n = @c1.load(:id => 1234) n.yyy = 85 n.tags_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((albums_artists.artist_id = 1234) AND (yyy = 85)) ORDER BY blah' n.tags.must_equal [@c2.load(:id=>1)] end it "should support a :dataset option that is used instead of the default" do @c1.many_through_many :tags, [[:a, :b, :c]], :dataset=>proc{Tag.join(:albums_tags, [:tag_id]).join(:albums, [:album_id]).join(:albums_artists, [:album_id]).filter(Sequel[:albums_artists][:artist_id]=>id)} n = @c1.load(:id => 1234) n.tags_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags USING (tag_id) INNER JOIN albums USING (album_id) INNER JOIN albums_artists USING (album_id) WHERE (albums_artists.artist_id = 1234)' n.tags.must_equal [@c2.load(:id=>1)] end it "should support a :limit option" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :limit=>10 n = @c1.load(:id => 1234) n.tags_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234) LIMIT 10' n.tags.must_equal [@c2.load(:id=>1)] @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :limit=>[10, 10] n = @c1.load(:id => 1234) n.tags_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234) LIMIT 10 OFFSET 10' n.tags.must_equal [@c2.load(:id=>1)] end it "should have the :eager option affect the _dataset method" do @c2.many_to_many :fans @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :eager=>:fans @c1.load(:id => 1234).tags_dataset.opts[:eager].must_equal(:fans=>nil) end it "should provide an array with all members of the association" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] @c1.load(:id => 1234).tags.must_equal [@c2.load(:id=>1)] DB.sqls.must_equal ['SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234)'] end it "should populate cache when accessed" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] n = @c1.load(:id => 1234) n.associations[:tags].must_be_nil DB.sqls.must_equal [] n.tags.must_equal [@c2.load(:id=>1)] DB.sqls.must_equal ['SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234)'] n.associations[:tags].must_equal n.tags DB.sqls.length.must_equal 0 end it "should use cache if available" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] n = @c1.load(:id => 1234) n.associations[:tags] = [] n.tags.must_equal [] DB.sqls.must_equal [] end it "should not use cache if asked to reload" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] n = @c1.load(:id => 1234) n.associations[:tags] = [] DB.sqls.must_equal [] n.tags(:reload=>true).must_equal [@c2.load(:id=>1)] DB.sqls.must_equal ['SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234)'] n.associations[:tags].must_equal n.tags DB.sqls.length.must_equal 0 end it "should not add associations methods directly to class" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] im = @c1.instance_methods im.must_include(:tags) im.must_include(:tags_dataset) im2 = @c1.instance_methods(false) im2.wont_include(:tags) im2.wont_include(:tags_dataset) end it "should support after_load association callback" do h = [] @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :after_load=>:al @c1.class_eval do self::Foo = h def al(v) v.each{|x| model::Foo << x.pk * 20} end end @c2.dataset = @c2.dataset.with_fetch([{:id=>20}, {:id=>30}]) p = @c1.load(:id=>10, :parent_id=>20) p.tags h.must_equal [400, 600] p.tags.collect{|a| a.pk}.must_equal [20, 30] end it "should support a :uniq option that removes duplicates from the association" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :uniq=>true @c2.dataset = @c2.dataset.with_fetch([{:id=>20}, {:id=>30}, {:id=>20}, {:id=>30}]) @c1.load(:id=>10).tags.must_equal [@c2.load(:id=>20), @c2.load(:id=>30)] end end describe 'Sequel::Plugins::ManyThroughMany::ManyThroughManyAssociationReflection' do before do class ::Artist < Sequel::Model plugin :many_through_many many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] end class ::Tag < Sequel::Model end DB.reset @ar = Artist.association_reflection(:tags) end after do Object.send(:remove_const, :Artist) Object.send(:remove_const, :Tag) end it "#edges should be an array of joins to make when eager graphing" do @ar.edges.must_equal [{:conditions=>[], :left=>:id, :right=>:artist_id, :table=>:albums_artists, :join_type=>:left_outer, :block=>nil}, {:conditions=>[], :left=>:album_id, :right=>:id, :table=>:albums, :join_type=>:left_outer, :block=>nil}, {:conditions=>[], :left=>:id, :right=>:album_id, :table=>:albums_tags, :join_type=>:left_outer, :block=>nil}] end it "#edges should handle composite keys" do Artist.many_through_many :tags, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy] Artist.association_reflection(:tags).edges.must_equal [{:conditions=>[], :left=>[:id, :yyy], :right=>[:b1, :b2], :table=>:albums_artists, :join_type=>:left_outer, :block=>nil}, {:conditions=>[], :left=>[:c1, :c2], :right=>[:d1, :d2], :table=>:albums, :join_type=>:left_outer, :block=>nil}, {:conditions=>[], :left=>[:e1, :e2], :right=>[:f1, :f2], :table=>:albums_tags, :join_type=>:left_outer, :block=>nil}] end it "#reverse_edges should be an array of joins to make when lazy loading or eager loading" do @ar.reverse_edges.must_equal [{:alias=>:albums_tags, :left=>:tag_id, :right=>:id, :table=>:albums_tags}, {:alias=>:albums, :left=>:id, :right=>:album_id, :table=>:albums}] end it "#reverse_edges should handle composite keys" do Artist.many_through_many :tags, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy] Artist.association_reflection(:tags).reverse_edges.must_equal [{:alias=>:albums_tags, :left=>[:g1, :g2], :right=>[:h1, :h2], :table=>:albums_tags}, {:alias=>:albums, :left=>[:e1, :e2], :right=>[:f1, :f2], :table=>:albums}] end it "#reciprocal should be nil" do @ar.reciprocal.must_be_nil end end describe "many_through_many eager loading methods" do before do class ::Artist < Sequel::Model plugin :many_through_many many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] many_through_many :other_tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :class=>:Tag many_through_many :albums, [[:albums_artists, :artist_id, :album_id]] many_through_many :artists, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_artists, :album_id, :artist_id]] end class ::Tag < Sequel::Model plugin :many_through_many many_through_many :tracks, [[:albums_tags, :tag_id, :album_id], [:albums, :id, :id]], :right_primary_key=>:album_id end class ::Album < Sequel::Model end class ::Track < Sequel::Model end Artist.dataset = Artist.dataset.with_fetch(proc do |sql| h = {:id => 1} if sql =~ /FROM artists LEFT OUTER JOIN albums_artists/ h[:tags_id] = 2 h[:albums_0_id] = 3 if sql =~ /LEFT OUTER JOIN albums AS albums_0/ h[:tracks_id] = 4 if sql =~ /LEFT OUTER JOIN tracks/ h[:other_tags_id] = 9 if sql =~ /other_tags\.id AS other_tags_id/ h[:artists_0_id] = 10 if sql =~ /artists_0\.id AS artists_0_id/ end h end) Artist.dataset.columns(:id) Tag.dataset = Tag.dataset.with_fetch(proc do |sql| h = {:id => 2} if sql =~ /albums_artists.artist_id IN \(([18])\)/ h[:x_foreign_key_x] = $1.to_i elsif sql =~ /\(\(albums_artists.b1, albums_artists.b2\) IN \(\(1, 8\)\)\)/ h.merge!(:x_foreign_key_0_x=>1, :x_foreign_key_1_x=>8) end h[:tag_id] = h.delete(:id) if sql =~ /albums_artists.artist_id IN \(8\)/ h end) Album.dataset = Album.dataset.with_fetch(proc do |sql| h = {:id => 3} h[:x_foreign_key_x] = 1 if sql =~ /albums_artists.artist_id IN \(1\)/ h end) Track.dataset = Track.dataset.with_fetch(proc do |sql| h = {:id => 4} h[:x_foreign_key_x] = 2 if sql =~ /albums_tags.tag_id IN \(2\)/ h end) @c1 = Artist DB.reset end after do [:Artist, :Tag, :Album, :Track].each{|x| Object.send(:remove_const, x)} end it "should eagerly load a single many_through_many association" do a = @c1.eager(:tags).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))'] a.first.tags.must_equal [Tag.load(:id=>2)] DB.sqls.length.must_equal 0 end it "should eagerly load multiple associations in a single call" do a = @c1.eager(:tags, :albums).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))', 'SELECT albums.*, albums_artists.artist_id AS x_foreign_key_x FROM albums INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))'] a = a.first a.tags.must_equal [Tag.load(:id=>2)] a.albums.must_equal [Album.load(:id=>3)] DB.sqls.length.must_equal 0 end it "should eagerly load multiple associations in separate" do a = @c1.eager(:tags).eager(:albums).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))', 'SELECT albums.*, albums_artists.artist_id AS x_foreign_key_x FROM albums INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))'] a = a.first a.tags.must_equal [Tag.load(:id=>2)] a.albums.must_equal [Album.load(:id=>3)] DB.sqls.length.must_equal 0 end it "should allow cascading of eager loading for associations of associated models" do a = @c1.eager(:tags=>:tracks).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))', 'SELECT tracks.*, albums_tags.tag_id AS x_foreign_key_x FROM tracks INNER JOIN albums ON (albums.id = tracks.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) WHERE (albums_tags.tag_id IN (2))'] a = a.first a.tags.must_equal [Tag.load(:id=>2)] a.tags.first.tracks.must_equal [Track.load(:id=>4)] DB.sqls.length.must_equal 0 end it "should cascade eagerly loading when the :eager association option is used" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :eager=>:tracks a = @c1.eager(:tags).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))', 'SELECT tracks.*, albums_tags.tag_id AS x_foreign_key_x FROM tracks INNER JOIN albums ON (albums.id = tracks.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) WHERE (albums_tags.tag_id IN (2))'] a = a.first a.tags.must_equal [Tag.load(:id=>2)] a.tags.first.tracks.must_equal [Track.load(:id=>4)] DB.sqls.length.must_equal 0 end it "should respect :eager when lazily loading an association" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :eager=>:tracks a = @c1.load(:id=>1) a.tags.must_equal [Tag.load(:id=>2)] DB.sqls.must_equal ['SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1)', 'SELECT tracks.*, albums_tags.tag_id AS x_foreign_key_x FROM tracks INNER JOIN albums ON (albums.id = tracks.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) WHERE (albums_tags.tag_id IN (2))'] a.tags.first.tracks.must_equal [Track.load(:id=>4)] DB.sqls.length.must_equal 0 end it "should raise error if attempting to eagerly load an association using :eager_graph option" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :eager_graph=>:tracks proc{@c1.eager(:tags).all}.must_raise(Sequel::Error) end it "should respect :eager_graph when lazily loading an association" do Tag.dataset = Tag.dataset.with_fetch(:id=>2, :tracks_id=>4).with_extend{def columns; [:id] end} @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :eager_graph=>:tracks a = @c1.load(:id=>1) a.tags DB.sqls.must_equal [ 'SELECT tags.id, tracks.id AS tracks_id FROM (SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1)) AS tags LEFT OUTER JOIN albums_tags AS albums_tags_0 ON (albums_tags_0.tag_id = tags.id) LEFT OUTER JOIN albums ON (albums.id = albums_tags_0.album_id) LEFT OUTER JOIN tracks ON (tracks.album_id = albums.id)'] a.tags.must_equal [Tag.load(:id=>2)] a.tags.first.tracks.must_equal [Track.load(:id=>4)] DB.sqls.length.must_equal 0 end it "should respect :conditions when eagerly loading" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :conditions=>{:a=>32} a = @c1.eager(:tags).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((a = 32) AND (albums_artists.artist_id IN (1)))'] a.first.tags.must_equal [Tag.load(:id=>2)] DB.sqls.length.must_equal 0 end it "should respect :order when eagerly loading" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :order=>:blah a = @c1.eager(:tags).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1)) ORDER BY blah'] a.first.tags.must_equal [Tag.load(:id=>2)] DB.sqls.length.must_equal 0 end it "should use the association's block when eager loading by default" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] do |ds| ds.filter(:a) end a = @c1.eager(:tags).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (a AND (albums_artists.artist_id IN (1)))'] a.first.tags.must_equal [Tag.load(:id=>2)] DB.sqls.length.must_equal 0 end it "should use the :eager_block option when eager loading if given" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :eager_block=>proc{|ds| ds.filter(:b)} do |ds| ds.filter(:a) end a = @c1.eager(:tags).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (b AND (albums_artists.artist_id IN (1)))'] a.first.tags.must_equal [Tag.load(:id=>2)] DB.sqls.length.must_equal 0 end it "should respect the :limit option on a many_through_many association" do @c1.many_through_many :first_two_tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :class=>Tag, :limit=>2 Tag.dataset = Tag.dataset.with_fetch([{:x_foreign_key_x=>1, :id=>5},{:x_foreign_key_x=>1, :id=>6}]) a = @c1.eager(:first_two_tags).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT * FROM (SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (1 = albums_artists.artist_id) LIMIT 2) AS t1'] a.first.first_two_tags.must_equal [Tag.load(:id=>5), Tag.load(:id=>6)] DB.sqls.length.must_equal 0 @c1.many_through_many :first_two_tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :class=>Tag, :limit=>[1,1] Tag.dataset = Tag.dataset.with_fetch([{:x_foreign_key_x=>1, :id=>6}]) a = @c1.eager(:first_two_tags).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT * FROM (SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (1 = albums_artists.artist_id) LIMIT 1 OFFSET 1) AS t1'] a.first.first_two_tags.must_equal [Tag.load(:id=>6)] DB.sqls.length.must_equal 0 @c1.many_through_many :first_two_tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :class=>Tag, :limit=>[nil,1] Tag.dataset = Tag.dataset.with_fetch([{:x_foreign_key_x=>1, :id=>6}, {:x_foreign_key_x=>1, :id=>7}]) a = @c1.eager(:first_two_tags).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT * FROM (SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (1 = albums_artists.artist_id) OFFSET 1) AS t1'] a.first.first_two_tags.must_equal [Tag.load(:id=>6), Tag.load(:id=>7)] DB.sqls.length.must_equal 0 end it "should respect the :limit option on a many_through_many association using a :ruby strategy" do @c1.many_through_many :first_two_tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :class=>Tag, :limit=>2, :eager_limit_strategy=>:ruby Tag.dataset = Tag.dataset.with_fetch([{:x_foreign_key_x=>1, :id=>5},{:x_foreign_key_x=>1, :id=>6}, {:x_foreign_key_x=>1, :id=>7}]) a = @c1.eager(:first_two_tags).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))'] a.first.first_two_tags.must_equal [Tag.load(:id=>5), Tag.load(:id=>6)] DB.sqls.length.must_equal 0 @c1.many_through_many :first_two_tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :class=>Tag, :limit=>[1,1], :eager_limit_strategy=>:ruby a = @c1.eager(:first_two_tags).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))'] a.first.first_two_tags.must_equal [Tag.load(:id=>6)] DB.sqls.length.must_equal 0 @c1.many_through_many :first_two_tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :class=>Tag, :limit=>[nil,1], :eager_limit_strategy=>:ruby a = @c1.eager(:first_two_tags).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))'] a.first.first_two_tags.must_equal [Tag.load(:id=>6), Tag.load(:id=>7)] DB.sqls.length.must_equal 0 end it "should respect the :limit option on a many_through_many association using a :window_function strategy" do @c1.many_through_many :first_two_tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :class=>Tag, :limit=>2, :order=>:name, :eager_limit_strategy=>:window_function Tag.dataset = Tag.dataset.with_fetch([{:x_foreign_key_x=>1, :id=>5},{:x_foreign_key_x=>1, :id=>6}]).with_extend{def supports_window_functions?; true end} a = @c1.eager(:first_two_tags).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT * FROM (SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x, row_number() OVER (PARTITION BY albums_artists.artist_id ORDER BY name) AS x_sequel_row_number_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))) AS t1 WHERE (x_sequel_row_number_x <= 2)'] a.first.first_two_tags.must_equal [Tag.load(:id=>5), Tag.load(:id=>6)] DB.sqls.length.must_equal 0 @c1.many_through_many :first_two_tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :class=>Tag, :limit=>[2,1], :order=>:name, :eager_limit_strategy=>:window_function a = @c1.eager(:first_two_tags).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT * FROM (SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x, row_number() OVER (PARTITION BY albums_artists.artist_id ORDER BY name) AS x_sequel_row_number_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))) AS t1 WHERE ((x_sequel_row_number_x >= 2) AND (x_sequel_row_number_x < 4))'] a.first.first_two_tags.must_equal [Tag.load(:id=>5), Tag.load(:id=>6)] DB.sqls.length.must_equal 0 @c1.many_through_many :first_two_tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :class=>Tag, :limit=>[nil,1], :order=>:name, :eager_limit_strategy=>:window_function a = @c1.eager(:first_two_tags).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT * FROM (SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x, row_number() OVER (PARTITION BY albums_artists.artist_id ORDER BY name) AS x_sequel_row_number_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))) AS t1 WHERE (x_sequel_row_number_x >= 2)'] a.first.first_two_tags.must_equal [Tag.load(:id=>5), Tag.load(:id=>6)] DB.sqls.length.must_equal 0 end it "should respect the :limit option on a many_through_many association with composite primary keys on the main table" do @c1.dataset = @c1.dataset.with_fetch([{:id1=>1, :id2=>2}]) Tag.dataset = Tag.dataset.with_fetch([{:x_foreign_key_0_x=>1, :x_foreign_key_1_x=>2, :id=>5}, {:x_foreign_key_0_x=>1, :x_foreign_key_1_x=>2, :id=>6}]).with_extend{def supports_window_functions?; true end} @c1.set_primary_key([:id1, :id2]) @c1.columns :id1, :id2 @c1.many_through_many :first_two_tags, [[:albums_artists, [:artist_id1, :artist_id2], :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :class=>Tag, :limit=>2, :order=>:name a = @c1.eager(:first_two_tags).all a.must_equal [@c1.load(:id1=>1, :id2=>2)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT * FROM (SELECT tags.*, albums_artists.artist_id1 AS x_foreign_key_0_x, albums_artists.artist_id2 AS x_foreign_key_1_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((1 = albums_artists.artist_id1) AND (2 = albums_artists.artist_id2)) ORDER BY name LIMIT 2) AS t1'] a.first.first_two_tags.must_equal [Tag.load(:id=>5), Tag.load(:id=>6)] DB.sqls.length.must_equal 0 @c1.many_through_many :first_two_tags, [[:albums_artists, [:artist_id1, :artist_id2], :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :class=>Tag, :limit=>[2,1] a = @c1.eager(:first_two_tags).all a.must_equal [@c1.load(:id1=>1, :id2=>2)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT * FROM (SELECT tags.*, albums_artists.artist_id1 AS x_foreign_key_0_x, albums_artists.artist_id2 AS x_foreign_key_1_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((1 = albums_artists.artist_id1) AND (2 = albums_artists.artist_id2)) LIMIT 2 OFFSET 1) AS t1'] a.first.first_two_tags.must_equal [Tag.load(:id=>5), Tag.load(:id=>6)] DB.sqls.length.must_equal 0 end it "should respect the :limit option on a many_through_many association with composite primary keys on the main table using a :window_function strategy" do @c1.dataset = @c1.dataset.with_fetch([{:id1=>1, :id2=>2}]) Tag.dataset = Tag.dataset.with_fetch([{:x_foreign_key_0_x=>1, :x_foreign_key_1_x=>2, :id=>5}, {:x_foreign_key_0_x=>1, :x_foreign_key_1_x=>2, :id=>6}]).with_extend{def supports_window_functions?; true end} @c1.set_primary_key([:id1, :id2]) @c1.columns :id1, :id2 @c1.many_through_many :first_two_tags, [[:albums_artists, [:artist_id1, :artist_id2], :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :class=>Tag, :limit=>2, :order=>:name, :eager_limit_strategy=>:window_function a = @c1.eager(:first_two_tags).all a.must_equal [@c1.load(:id1=>1, :id2=>2)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT * FROM (SELECT tags.*, albums_artists.artist_id1 AS x_foreign_key_0_x, albums_artists.artist_id2 AS x_foreign_key_1_x, row_number() OVER (PARTITION BY albums_artists.artist_id1, albums_artists.artist_id2 ORDER BY name) AS x_sequel_row_number_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((albums_artists.artist_id1, albums_artists.artist_id2) IN ((1, 2)))) AS t1 WHERE (x_sequel_row_number_x <= 2)'] a.first.first_two_tags.must_equal [Tag.load(:id=>5), Tag.load(:id=>6)] DB.sqls.length.must_equal 0 @c1.many_through_many :first_two_tags, [[:albums_artists, [:artist_id1, :artist_id2], :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :class=>Tag, :limit=>[2,1], :order=>:name, :eager_limit_strategy=>:window_function a = @c1.eager(:first_two_tags).all a.must_equal [@c1.load(:id1=>1, :id2=>2)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT * FROM (SELECT tags.*, albums_artists.artist_id1 AS x_foreign_key_0_x, albums_artists.artist_id2 AS x_foreign_key_1_x, row_number() OVER (PARTITION BY albums_artists.artist_id1, albums_artists.artist_id2 ORDER BY name) AS x_sequel_row_number_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((albums_artists.artist_id1, albums_artists.artist_id2) IN ((1, 2)))) AS t1 WHERE ((x_sequel_row_number_x >= 2) AND (x_sequel_row_number_x < 4))'] a.first.first_two_tags.must_equal [Tag.load(:id=>5), Tag.load(:id=>6)] DB.sqls.length.must_equal 0 end it "should raise an error when attempting to eagerly load an association with the :allow_eager option set to false" do @c1.eager(:tags).all @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :allow_eager=>false proc{@c1.eager(:tags).all}.must_raise(Sequel::Error) end it "should respect the association's :select option" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :select=>Sequel[:tags][:name] a = @c1.eager(:tags).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.name, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))'] a.first.tags.must_equal [Tag.load(:id=>2)] DB.sqls.length.must_equal 0 end it "should respect many_through_many association's :left_primary_key and :right_primary_key options" do @c1.send(:define_method, :yyy){values[:yyy]} @c1.dataset = @c1.dataset.with_fetch(:id=>1, :yyy=>8).with_extend{def columns; [:id, :yyy] end} @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :left_primary_key=>:yyy, :right_primary_key=>:tag_id a = @c1.eager(:tags).all a.must_equal [@c1.load(:id=>1, :yyy=>8)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.tag_id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (8))'] a.first.tags.must_equal [Tag.load(:tag_id=>2)] DB.sqls.length.must_equal 0 end it "should handle composite keys" do @c1.send(:define_method, :yyy){values[:yyy]} @c1.dataset = @c1.dataset.with_fetch(:id=>1, :yyy=>8).with_extend{def columns; [:id, :yyy] end} @c1.many_through_many :tags, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy] a = @c1.eager(:tags).all a.must_equal [@c1.load(:id=>1, :yyy=>8)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.b1 AS x_foreign_key_0_x, albums_artists.b2 AS x_foreign_key_1_x FROM tags INNER JOIN albums_tags ON ((albums_tags.g1 = tags.h1) AND (albums_tags.g2 = tags.h2)) INNER JOIN albums ON ((albums.e1 = albums_tags.f1) AND (albums.e2 = albums_tags.f2)) INNER JOIN albums_artists ON ((albums_artists.c1 = albums.d1) AND (albums_artists.c2 = albums.d2)) WHERE ((albums_artists.b1, albums_artists.b2) IN ((1, 8)))'] a.first.tags.must_equal [Tag.load(:id=>2)] DB.sqls.length.must_equal 0 end it "should respect :after_load callbacks on associations when eager loading" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :after_load=>lambda{|o, as| o[:id] *= 2; as.each{|a| a[:id] *= 3}} a = @c1.eager(:tags).all a.must_equal [@c1.load(:id=>2)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))'] a.first.tags.must_equal [Tag.load(:id=>6)] DB.sqls.length.must_equal 0 end it "should raise an error if called without a symbol or hash" do proc{@c1.eager_graph(Object.new)}.must_raise(Sequel::Error) end it "should support association_join" do @c1.association_join(:tags).sql.must_equal "SELECT * FROM artists INNER JOIN albums_artists ON (albums_artists.artist_id = artists.id) INNER JOIN albums ON (albums.id = albums_artists.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) INNER JOIN tags ON (tags.id = albums_tags.tag_id)" end it "should support custom selects when using association_join" do @c1.select{a(b)}.association_join(:tags).sql.must_equal "SELECT a(b) FROM artists INNER JOIN albums_artists ON (albums_artists.artist_id = artists.id) INNER JOIN albums ON (albums.id = albums_artists.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) INNER JOIN tags ON (tags.id = albums_tags.tag_id)" end it "should eagerly graph a single many_through_many association" do a = @c1.eager_graph(:tags).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT artists.id, tags.id AS tags_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.id = albums_tags.tag_id)'] a.first.tags.must_equal [Tag.load(:id=>2)] DB.sqls.length.must_equal 0 end it "should eagerly graph a single many_through_many association using the :window_function strategy" do Tag.dataset = Tag.dataset.with_extend do def supports_window_functions?; true end def columns; literal(opts[:select]) =~ /x_foreign_key_x/ ? [:id, :x_foreign_key_x] : [:id] end end @c1.many_through_many :tags, :clone=>:tags, :limit=>2 a = @c1.eager_graph_with_options(:tags, :limit_strategy=>true).with_fetch(:id=>1, :tags_id=>2).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT artists.id, tags.id AS tags_id FROM artists LEFT OUTER JOIN (SELECT id, x_foreign_key_x FROM (SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x, row_number() OVER (PARTITION BY albums_artists.artist_id) AS x_sequel_row_number_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id)) AS t1 WHERE (x_sequel_row_number_x <= 2)) AS tags ON (tags.x_foreign_key_x = artists.id)'] a.first.tags.must_equal [Tag.load(:id=>2)] DB.sqls.length.must_equal 0 end it "should eagerly graph multiple associations in a single call" do a = @c1.eager_graph(:tags, :albums).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT artists.id, tags.id AS tags_id, albums_0.id AS albums_0_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.id = albums_tags.tag_id) LEFT OUTER JOIN albums_artists AS albums_artists_0 ON (albums_artists_0.artist_id = artists.id) LEFT OUTER JOIN albums AS albums_0 ON (albums_0.id = albums_artists_0.album_id)'] a = a.first a.tags.must_equal [Tag.load(:id=>2)] a.albums.must_equal [Album.load(:id=>3)] DB.sqls.length.must_equal 0 end it "should eagerly graph multiple associations in separate calls" do a = @c1.eager_graph(:tags).eager_graph(:albums).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT artists.id, tags.id AS tags_id, albums_0.id AS albums_0_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.id = albums_tags.tag_id) LEFT OUTER JOIN albums_artists AS albums_artists_0 ON (albums_artists_0.artist_id = artists.id) LEFT OUTER JOIN albums AS albums_0 ON (albums_0.id = albums_artists_0.album_id)'] a = a.first a.tags.must_equal [Tag.load(:id=>2)] a.albums.must_equal [Album.load(:id=>3)] DB.sqls.length.must_equal 0 end it "should allow cascading of eager graphing for associations of associated models" do a = @c1.eager_graph(:tags=>:tracks).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT artists.id, tags.id AS tags_id, tracks.id AS tracks_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.id = albums_tags.tag_id) LEFT OUTER JOIN albums_tags AS albums_tags_0 ON (albums_tags_0.tag_id = tags.id) LEFT OUTER JOIN albums AS albums_0 ON (albums_0.id = albums_tags_0.album_id) LEFT OUTER JOIN tracks ON (tracks.album_id = albums_0.id)'] a = a.first a.tags.must_equal [Tag.load(:id=>2)] a.tags.first.tracks.must_equal [Track.load(:id=>4)] DB.sqls.length.must_equal 0 end it "eager graphing should eliminate duplicates caused by cartesian products" do a = @c1.eager_graph(:tags).with_fetch([{:id=>1, :tags_id=>2}, {:id=>1, :tags_id=>3}, {:id=>1, :tags_id=>2}, {:id=>1, :tags_id=>3}]).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT artists.id, tags.id AS tags_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.id = albums_tags.tag_id)'] a.first.tags.must_equal [Tag.load(:id=>2), Tag.load(:id=>3)] DB.sqls.length.must_equal 0 end it "should eager graph multiple associations from the same table" do a = @c1.eager_graph(:tags, :other_tags).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT artists.id, tags.id AS tags_id, other_tags.id AS other_tags_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.id = albums_tags.tag_id) LEFT OUTER JOIN albums_artists AS albums_artists_0 ON (albums_artists_0.artist_id = artists.id) LEFT OUTER JOIN albums AS albums_0 ON (albums_0.id = albums_artists_0.album_id) LEFT OUTER JOIN albums_tags AS albums_tags_0 ON (albums_tags_0.album_id = albums_0.id) LEFT OUTER JOIN tags AS other_tags ON (other_tags.id = albums_tags_0.tag_id)'] a = a.first a.tags.must_equal [Tag.load(:id=>2)] a.other_tags.must_equal [Tag.load(:id=>9)] DB.sqls.length.must_equal 0 end it "should eager graph a self_referential association" do a = @c1.eager_graph(:tags, :artists).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT artists.id, tags.id AS tags_id, artists_0.id AS artists_0_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.id = albums_tags.tag_id) LEFT OUTER JOIN albums_artists AS albums_artists_0 ON (albums_artists_0.artist_id = artists.id) LEFT OUTER JOIN albums AS albums_0 ON (albums_0.id = albums_artists_0.album_id) LEFT OUTER JOIN albums_artists AS albums_artists_1 ON (albums_artists_1.album_id = albums_0.id) LEFT OUTER JOIN artists AS artists_0 ON (artists_0.id = albums_artists_1.artist_id)'] a = a.first a.tags.must_equal [Tag.load(:id=>2)] a.artists.must_equal [@c1.load(:id=>10)] DB.sqls.length.must_equal 0 end it "eager graphing should give you a plain hash when called without .all" do @c1.eager_graph(:tags, :artists).first.must_equal(:albums_0_id=>3, :artists_0_id=>10, :id=>1, :tags_id=>2) end it "should be able to use eager and eager_graph together" do a = @c1.eager_graph(:tags).eager(:albums).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT artists.id, tags.id AS tags_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.id = albums_tags.tag_id)', 'SELECT albums.*, albums_artists.artist_id AS x_foreign_key_x FROM albums INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))'] a = a.first a.tags.must_equal [Tag.load(:id=>2)] a.albums.must_equal [Album.load(:id=>3)] DB.sqls.length.must_equal 0 end it "should handle no associated records when eagerly graphing a single many_through_many association" do a = @c1.eager_graph(:tags).with_fetch(:id=>1, :tags_id=>nil).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT artists.id, tags.id AS tags_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.id = albums_tags.tag_id)'] a.first.tags.must_equal [] DB.sqls.length.must_equal 0 end it "should handle no associated records when eagerly graphing multiple many_through_many associations" do a = @c1.eager_graph(:tags, :albums).with_fetch([{:id=>1, :tags_id=>nil, :albums_0_id=>3}, {:id=>1, :tags_id=>2, :albums_0_id=>nil}, {:id=>1, :tags_id=>5, :albums_0_id=>6}, {:id=>7, :tags_id=>nil, :albums_0_id=>nil}]).all a.must_equal [@c1.load(:id=>1), @c1.load(:id=>7)] DB.sqls.must_equal ['SELECT artists.id, tags.id AS tags_id, albums_0.id AS albums_0_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.id = albums_tags.tag_id) LEFT OUTER JOIN albums_artists AS albums_artists_0 ON (albums_artists_0.artist_id = artists.id) LEFT OUTER JOIN albums AS albums_0 ON (albums_0.id = albums_artists_0.album_id)'] a.first.tags.must_equal [Tag.load(:id=>2), Tag.load(:id=>5)] a.first.albums.must_equal [Album.load(:id=>3), Album.load(:id=>6)] a.last.tags.must_equal [] a.last.albums.must_equal [] DB.sqls.length.must_equal 0 end it "should handle missing associated records when cascading eager graphing for associations of associated models" do a = @c1.eager_graph(:tags=>:tracks).with_fetch([{:id=>1, :tags_id=>2, :tracks_id=>4}, {:id=>1, :tags_id=>3, :tracks_id=>nil}, {:id=>2, :tags_id=>nil, :tracks_id=>nil}]).all a.must_equal [@c1.load(:id=>1), @c1.load(:id=>2)] DB.sqls.must_equal ['SELECT artists.id, tags.id AS tags_id, tracks.id AS tracks_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.id = albums_tags.tag_id) LEFT OUTER JOIN albums_tags AS albums_tags_0 ON (albums_tags_0.tag_id = tags.id) LEFT OUTER JOIN albums AS albums_0 ON (albums_0.id = albums_tags_0.album_id) LEFT OUTER JOIN tracks ON (tracks.album_id = albums_0.id)'] a.last.tags.must_equal [] a = a.first a.tags.must_equal [Tag.load(:id=>2), Tag.load(:id=>3)] a.tags.first.tracks.must_equal [Track.load(:id=>4)] a.tags.last.tracks.must_equal [] DB.sqls.length.must_equal 0 end it "eager graphing should respect :left_primary_key and :right_primary_key options" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :left_primary_key=>:yyy, :right_primary_key=>:tag_id @c1.dataset = @c1.dataset.with_extend{def columns; [:id, :yyy] end} Tag.dataset = Tag.dataset.with_extend{def columns; [:id, :tag_id] end} a = @c1.eager_graph(:tags).with_fetch(:id=>1, :yyy=>8, :tags_id=>2, :tag_id=>4).all a.must_equal [@c1.load(:id=>1, :yyy=>8)] DB.sqls.must_equal ['SELECT artists.id, artists.yyy, tags.id AS tags_id, tags.tag_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.yyy) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.tag_id = albums_tags.tag_id)'] a.first.tags.must_equal [Tag.load(:id=>2, :tag_id=>4)] DB.sqls.length.must_equal 0 end it "eager graphing should respect composite keys" do @c1.many_through_many :tags, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:id, :tag_id], :left_primary_key=>[:id, :yyy] @c1.dataset = @c1.dataset.with_extend{def columns; [:id, :yyy] end} Tag.dataset = Tag.dataset.with_extend{def columns; [:id, :tag_id] end} a = @c1.eager_graph(:tags).with_fetch(:id=>1, :yyy=>8, :tags_id=>2, :tag_id=>4).all a.must_equal [@c1.load(:id=>1, :yyy=>8)] DB.sqls.must_equal ['SELECT artists.id, artists.yyy, tags.id AS tags_id, tags.tag_id FROM artists LEFT OUTER JOIN albums_artists ON ((albums_artists.b1 = artists.id) AND (albums_artists.b2 = artists.yyy)) LEFT OUTER JOIN albums ON ((albums.d1 = albums_artists.c1) AND (albums.d2 = albums_artists.c2)) LEFT OUTER JOIN albums_tags ON ((albums_tags.f1 = albums.e1) AND (albums_tags.f2 = albums.e2)) LEFT OUTER JOIN tags ON ((tags.id = albums_tags.g1) AND (tags.tag_id = albums_tags.g2))'] a.first.tags.must_equal [Tag.load(:id=>2, :tag_id=>4)] DB.sqls.length.must_equal 0 end it "should respect the association's :graph_select option" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :graph_select=>:b a = @c1.eager_graph(:tags).with_fetch(:id=>1, :b=>2).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT artists.id, tags.b FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.id = albums_tags.tag_id)'] a.first.tags.must_equal [Tag.load(:b=>2)] DB.sqls.length.must_equal 0 end it "should respect the association's :graph_join_type option" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :graph_join_type=>:inner @c1.eager_graph(:tags).sql.must_equal 'SELECT artists.id, tags.id AS tags_id FROM artists INNER JOIN albums_artists ON (albums_artists.artist_id = artists.id) INNER JOIN albums ON (albums.id = albums_artists.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) INNER JOIN tags ON (tags.id = albums_tags.tag_id)' end it "should respect the association's :join_type option on through" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id, :join_type=>:natural}, [:albums_tags, :album_id, :tag_id]], :graph_join_type=>:inner @c1.eager_graph(:tags).sql.must_equal 'SELECT artists.id, tags.id AS tags_id FROM artists INNER JOIN albums_artists ON (albums_artists.artist_id = artists.id) NATURAL JOIN albums ON (albums.id = albums_artists.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) INNER JOIN tags ON (tags.id = albums_tags.tag_id)' end it "should respect the association's :conditions option" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id}, [:albums_tags, :album_id, :tag_id]], :conditions=>{:a=>32} @c1.eager_graph(:tags).sql.must_equal 'SELECT artists.id, tags.id AS tags_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON ((tags.id = albums_tags.tag_id) AND (tags.a = 32))' end it "should respect the association's :graph_conditions option" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id}, [:albums_tags, :album_id, :tag_id]], :graph_conditions=>{:a=>42} @c1.eager_graph(:tags).sql.must_equal 'SELECT artists.id, tags.id AS tags_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON ((tags.id = albums_tags.tag_id) AND (tags.a = 42))' @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id}, [:albums_tags, :album_id, :tag_id]], :graph_conditions=>{:a=>42}, :conditions=>{:a=>32} @c1.eager_graph(:tags).sql.must_equal 'SELECT artists.id, tags.id AS tags_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON ((tags.id = albums_tags.tag_id) AND (tags.a = 42))' end it "should respect the association's :conditions option on through" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id, :conditions=>{:a=>42}}, [:albums_tags, :album_id, :tag_id]] @c1.eager_graph(:tags).sql.must_equal 'SELECT artists.id, tags.id AS tags_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON ((albums.id = albums_artists.album_id) AND (albums.a = 42)) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.id = albums_tags.tag_id)' end it "should respect the association's :graph_block option" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id}, [:albums_tags, :album_id, :tag_id]], :graph_block=>proc{|ja,lja,js| {Sequel.qualify(ja, :active)=>true}} @c1.eager_graph(:tags).sql.must_equal 'SELECT artists.id, tags.id AS tags_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON ((tags.id = albums_tags.tag_id) AND (tags.active IS TRUE))' end it "should respect the association's :block option on through" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id, :block=>proc{|ja,lja,js| {Sequel.qualify(ja, :active)=>true}}}, [:albums_tags, :album_id, :tag_id]] @c1.eager_graph(:tags).sql.must_equal 'SELECT artists.id, tags.id AS tags_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON ((albums.id = albums_artists.album_id) AND (albums.active IS TRUE)) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.id = albums_tags.tag_id)' end it "should respect the association's :graph_only_conditions option" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id}, [:albums_tags, :album_id, :tag_id]], :graph_only_conditions=>{:a=>32} @c1.eager_graph(:tags).sql.must_equal 'SELECT artists.id, tags.id AS tags_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.a = 32)' end it "should respect the association's :only_conditions option on through" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id, :only_conditions=>{:a=>42}}, [:albums_tags, :album_id, :tag_id]] @c1.eager_graph(:tags).sql.must_equal 'SELECT artists.id, tags.id AS tags_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.a = 42) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.id = albums_tags.tag_id)' end it "should create unique table aliases for all associations" do @c1.eager_graph(:artists=>{:artists=>:artists}).sql.must_equal "SELECT artists.id, artists_0.id AS artists_0_id, artists_1.id AS artists_1_id, artists_2.id AS artists_2_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_artists AS albums_artists_0 ON (albums_artists_0.album_id = albums.id) LEFT OUTER JOIN artists AS artists_0 ON (artists_0.id = albums_artists_0.artist_id) LEFT OUTER JOIN albums_artists AS albums_artists_1 ON (albums_artists_1.artist_id = artists_0.id) LEFT OUTER JOIN albums AS albums_0 ON (albums_0.id = albums_artists_1.album_id) LEFT OUTER JOIN albums_artists AS albums_artists_2 ON (albums_artists_2.album_id = albums_0.id) LEFT OUTER JOIN artists AS artists_1 ON (artists_1.id = albums_artists_2.artist_id) LEFT OUTER JOIN albums_artists AS albums_artists_3 ON (albums_artists_3.artist_id = artists_1.id) LEFT OUTER JOIN albums AS albums_1 ON (albums_1.id = albums_artists_3.album_id) LEFT OUTER JOIN albums_artists AS albums_artists_4 ON (albums_artists_4.album_id = albums_1.id) LEFT OUTER JOIN artists AS artists_2 ON (artists_2.id = albums_artists_4.artist_id)" end it "should respect the association's :order" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id}, [:albums_tags, :album_id, :tag_id]], :order=>[:blah1, :blah2] @c1.order(Sequel[:artists][:blah2], Sequel[:artists][:blah3]).eager_graph(:tags).sql.must_equal 'SELECT artists.id, tags.id AS tags_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.id = albums_tags.tag_id) ORDER BY artists.blah2, artists.blah3, tags.blah1, tags.blah2' end with_symbol_splitting "should not qualify qualified symbols" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id}, [:albums_tags, :album_id, :tag_id]], :order=>[Sequel.identifier(:blah__id), Sequel.identifier(:blah__id).desc, Sequel.desc(:blah__id), :blah__id, :album_id, Sequel.desc(:album_id), 1, Sequel.lit('RANDOM()'), Sequel.qualify(:b, :a)] @c1.order(:artists__blah2, :artists__blah3).eager_graph(:tags).sql.must_equal 'SELECT artists.id, tags.id AS tags_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.id = albums_tags.tag_id) ORDER BY artists.blah2, artists.blah3, tags.blah__id, tags.blah__id DESC, blah.id DESC, blah.id, tags.album_id, tags.album_id DESC, 1, RANDOM(), b.a' end it "should only qualify symbols, identifiers, or ordered versions in association's :order" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id}, [:albums_tags, :album_id, :tag_id]], :order=>[Sequel.identifier(:blah__id), Sequel.identifier(:blah__id).desc, Sequel.desc(Sequel[:blah][:id]), Sequel[:blah][:id], :album_id, Sequel.desc(:album_id), 1, Sequel.lit('RANDOM()'), Sequel.qualify(:b, :a)] @c1.order(Sequel[:artists][:blah2], Sequel[:artists][:blah3]).eager_graph(:tags).sql.must_equal 'SELECT artists.id, tags.id AS tags_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.id = albums_tags.tag_id) ORDER BY artists.blah2, artists.blah3, tags.blah__id, tags.blah__id DESC, blah.id DESC, blah.id, tags.album_id, tags.album_id DESC, 1, RANDOM(), b.a' end it "should not respect the association's :order if :order_eager_graph is false" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id}, [:albums_tags, :album_id, :tag_id]], :order=>[:blah1, :blah2], :order_eager_graph=>false @c1.order(Sequel[:artists][:blah2], Sequel[:artists][:blah3]).eager_graph(:tags).sql.must_equal 'SELECT artists.id, tags.id AS tags_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.id = albums_tags.tag_id) ORDER BY artists.blah2, artists.blah3' end it "should add the associations :order for multiple associations" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id}, [:albums_tags, :album_id, :tag_id]], :order=>[:blah1, :blah2] @c1.many_through_many :albums, [[:albums_artists, :artist_id, :album_id]], :order=>[:blah3, :blah4] @c1.eager_graph(:tags, :albums).sql.must_equal 'SELECT artists.id, tags.id AS tags_id, albums_0.id AS albums_0_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.id = albums_tags.tag_id) LEFT OUTER JOIN albums_artists AS albums_artists_0 ON (albums_artists_0.artist_id = artists.id) LEFT OUTER JOIN albums AS albums_0 ON (albums_0.id = albums_artists_0.album_id) ORDER BY tags.blah1, tags.blah2, albums_0.blah3, albums_0.blah4' end it "should add the association's :order for cascading associations" do @c1.many_through_many :tags, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id}, [:albums_tags, :album_id, :tag_id]], :order=>[:blah1, :blah2] Tag.many_through_many :tracks, [[:albums_tags, :tag_id, :album_id], [:albums, :id, :id]], :right_primary_key=>:album_id, :order=>[:blah3, :blah4] @c1.eager_graph(:tags=>:tracks).sql.must_equal 'SELECT artists.id, tags.id AS tags_id, tracks.id AS tracks_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.id = albums_tags.tag_id) LEFT OUTER JOIN albums_tags AS albums_tags_0 ON (albums_tags_0.tag_id = tags.id) LEFT OUTER JOIN albums AS albums_0 ON (albums_0.id = albums_tags_0.album_id) LEFT OUTER JOIN tracks ON (tracks.album_id = albums_0.id) ORDER BY tags.blah1, tags.blah2, tracks.blah3, tracks.blah4' end it "should use the correct qualifier when graphing multiple tables with extra conditions" do @c1.many_through_many :tags, [{:table=>:albums_artists, :left=>:artist_id, :right=>:album_id, :conditions=>{:a=>:b}}, {:table=>:albums, :left=>:id, :right=>:id}, [:albums_tags, :album_id, :tag_id]] @c1.many_through_many :albums, [{:table=>:albums_artists, :left=>:artist_id, :right=>:album_id, :conditions=>{:c=>:d}}] @c1.eager_graph(:tags, :albums).sql.must_equal 'SELECT artists.id, tags.id AS tags_id, albums_0.id AS albums_0_id FROM artists LEFT OUTER JOIN albums_artists ON ((albums_artists.artist_id = artists.id) AND (albums_artists.a = artists.b)) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags ON (tags.id = albums_tags.tag_id) LEFT OUTER JOIN albums_artists AS albums_artists_0 ON ((albums_artists_0.artist_id = artists.id) AND (albums_artists_0.c = artists.d)) LEFT OUTER JOIN albums AS albums_0 ON (albums_0.id = albums_artists_0.album_id)' end end describe "many_through_many associations with non-column expression keys" do before do @db = Sequel.mock(:fetch=>{:id=>1, :object_ids=>[2]}) @Foo = Class.new(Sequel::Model(@db[:foos])) @Foo.columns :id, :object_ids @Foo.plugin :many_through_many m = Module.new{def obj_id; object_ids[0]; end} @Foo.include m @Foo.many_through_many :foos, [ [:f, Sequel.subscript(:l, 0), Sequel.subscript(:r, 0)], [:f, Sequel.subscript(:l, 1), Sequel.subscript(:r, 1)] ], :class=>@Foo, :left_primary_key=>:obj_id, :left_primary_key_column=>Sequel.subscript(:object_ids, 0), :right_primary_key=>Sequel.subscript(:object_ids, 0), :right_primary_key_method=>:obj_id @foo = @Foo.load(:id=>1, :object_ids=>[2]) @db.sqls end it "should have working regular association methods" do @Foo.first.foos.must_equal [@foo] @db.sqls.must_equal ["SELECT * FROM foos LIMIT 1", "SELECT foos.* FROM foos INNER JOIN f ON (f.r[1] = foos.object_ids[0]) INNER JOIN f AS f_0 ON (f_0.r[0] = f.l[1]) WHERE (f_0.l[0] = 2)"] end it "should have working eager loading methods" do @db.fetch = [[{:id=>1, :object_ids=>[2]}], [{:id=>1, :object_ids=>[2], :x_foreign_key_x=>2}]] @Foo.eager(:foos).all.map{|o| [o, o.foos]}.must_equal [[@foo, [@foo]]] @db.sqls.must_equal ["SELECT * FROM foos", "SELECT foos.*, f_0.l[0] AS x_foreign_key_x FROM foos INNER JOIN f ON (f.r[1] = foos.object_ids[0]) INNER JOIN f AS f_0 ON (f_0.r[0] = f.l[1]) WHERE (f_0.l[0] IN (2))"] end it "should have working eager graphing methods" do @db.fetch = {:id=>1, :object_ids=>[2], :foos_0_id=>1, :foos_0_object_ids=>[2]} @Foo.eager_graph(:foos).all.map{|o| [o, o.foos]}.must_equal [[@foo, [@foo]]] @db.sqls.must_equal ["SELECT foos.id, foos.object_ids, foos_0.id AS foos_0_id, foos_0.object_ids AS foos_0_object_ids FROM foos LEFT OUTER JOIN f ON (f.l[0] = foos.object_ids[0]) LEFT OUTER JOIN f AS f_0 ON (f_0.l[1] = f.r[0]) LEFT OUTER JOIN foos AS foos_0 ON (foos_0.object_ids[0] = f_0.r[1])"] end it "should have working filter by associations with model instances" do @Foo.first(:foos=>@foo).must_equal @foo @db.sqls.must_equal ["SELECT * FROM foos WHERE (foos.object_ids[0] IN (SELECT f.l[0] FROM f INNER JOIN f AS f_0 ON (f_0.l[1] = f.r[0]) WHERE ((f_0.r[1] = 2) AND (f.l[0] IS NOT NULL)))) LIMIT 1"] end it "should have working filter by associations with model datasets" do @Foo.first(:foos=>@Foo.where(:id=>@foo.id)).must_equal @foo @db.sqls.must_equal ["SELECT * FROM foos WHERE (foos.object_ids[0] IN (SELECT f.l[0] FROM f INNER JOIN f AS f_0 ON (f_0.l[1] = f.r[0]) WHERE ((f_0.r[1] IN (SELECT foos.object_ids[0] FROM foos WHERE ((id = 1) AND (foos.object_ids[0] IS NOT NULL)))) AND (f.l[0] IS NOT NULL)))) LIMIT 1"] end end describe Sequel::Model, "one_through_many" do before do class ::Artist < Sequel::Model attr_accessor :yyy columns :id plugin :many_through_many end class ::Tag < Sequel::Model columns :id, :h1, :h2 end @c1 = Artist @c2 = Tag @dataset = @c2.dataset = @c2.dataset.with_fetch(:id=>1) DB.reset end after do Object.send(:remove_const, :Artist) Object.send(:remove_const, :Tag) end it "should support using a custom :left_primary_key option when eager loading many_to_many associations" do @c1.send(:define_method, :id3){id*3} @c1.dataset = @c1.dataset.with_fetch(:id=>1) @c2.dataset = @c2.dataset.with_fetch(:id=>4, :x_foreign_key_x=>3) @c1.one_through_many :tag, :through=>[[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :left_primary_key=>:id3 a = @c1.eager(:tag).all a.must_equal [@c1.load(:id => 1)] DB.sqls.must_equal ['SELECT * FROM artists', "SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (3))"] a.first.tag.must_equal @c2.load(:id=>4) DB.sqls.must_equal [] end it "should handle a :predicate_key option to change the SQL used in the lookup" do @c1.dataset = @c1.dataset.with_fetch(:id=>1) @c2.dataset = @c2.dataset.with_fetch(:id=>4, :x_foreign_key_x=>1) @c1.one_through_many :tag, :through=>[[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :predicate_key=>(Sequel[:albums_artists][:artist_id] / 3) a = @c1.eager(:tag).all a.must_equal [@c1.load(:id => 1)] DB.sqls.must_equal ['SELECT * FROM artists', "SELECT tags.*, (albums_artists.artist_id / 3) AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((albums_artists.artist_id / 3) IN (1))"] a.first.tag.must_equal @c2.load(:id=>4) end it "should raise an error if in invalid form of through is used" do proc{@c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id]]}.must_raise(Sequel::Error) proc{@c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], {:table=>:album_tags, :left=>:album_id}]}.must_raise(Sequel::Error) proc{@c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], :album_tags]}.must_raise(Sequel::Error) end it "should allow only two arguments with the :through option" do @c1.one_through_many :tag, :through=>[[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] n = @c1.load(:id => 1234) n.tag_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234) LIMIT 1' n.tag.must_equal @c2.load(:id=>1) end it "should be clonable" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] @c1.many_through_many :tags, :clone=>:tag @c1.one_through_many :tag, :clone=>:tags n = @c1.load(:id => 1234) n.tag_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234) LIMIT 1' n.tag.must_equal @c2.load(:id=>1) end it "should use join tables given" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] n = @c1.load(:id => 1234) n.tag_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234) LIMIT 1' n.tag.must_equal @c2.load(:id=>1) end it "should handle multiple aliasing of tables" do begin class ::Album < Sequel::Model end @c1.one_through_many :album, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_artists, :album_id, :artist_id], [:artists, :id, :id], [:albums_artists, :artist_id, :album_id]] n = @c1.load(:id => 1234) n.album_dataset.sql.must_equal 'SELECT albums.* FROM albums INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) INNER JOIN artists ON (artists.id = albums_artists.artist_id) INNER JOIN albums_artists AS albums_artists_0 ON (albums_artists_0.artist_id = artists.id) INNER JOIN albums AS albums_0 ON (albums_0.id = albums_artists_0.album_id) INNER JOIN albums_artists AS albums_artists_1 ON (albums_artists_1.album_id = albums_0.id) WHERE (albums_artists_1.artist_id = 1234) LIMIT 1' n.album.must_equal Album.load(:id=>1, :x=>1) ensure Object.send(:remove_const, :Album) end end it "should use explicit class if given" do @c1.one_through_many :album_tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :class=>Tag n = @c1.load(:id => 1234) n.album_tag_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234) LIMIT 1' n.album_tag.must_equal @c2.load(:id=>1) end it "should accept :left_primary_key and :right_primary_key option for primary keys to use in current and associated table" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :right_primary_key=>:tag_id, :left_primary_key=>:yyy n = @c1.load(:id => 1234) n.yyy = 85 n.tag_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.tag_id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 85) LIMIT 1' n.tag.must_equal @c2.load(:id=>1) end it "should handle composite keys" do @c1.one_through_many :tag, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy] n = @c1.load(:id => 1234) n.yyy = 85 n.tag_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON ((albums_tags.g1 = tags.h1) AND (albums_tags.g2 = tags.h2)) INNER JOIN albums ON ((albums.e1 = albums_tags.f1) AND (albums.e2 = albums_tags.f2)) INNER JOIN albums_artists ON ((albums_artists.c1 = albums.d1) AND (albums_artists.c2 = albums.d2)) WHERE ((albums_artists.b1 = 1234) AND (albums_artists.b2 = 85)) LIMIT 1' n.tag.must_equal @c2.load(:id=>1) end it "should allowing filtering by one_through_many associations" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] @c1.filter(:tag=>@c2.load(:id=>1234)).sql.must_equal 'SELECT * FROM artists WHERE (artists.id IN (SELECT albums_artists.artist_id FROM albums_artists INNER JOIN albums ON (albums.id = albums_artists.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) WHERE ((albums_tags.tag_id = 1234) AND (albums_artists.artist_id IS NOT NULL))))' end it "should allowing filtering by one_through_many associations with a single through table" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id]] @c1.filter(:tag=>@c2.load(:id=>1234)).sql.must_equal 'SELECT * FROM artists WHERE (artists.id IN (SELECT albums_artists.artist_id FROM albums_artists WHERE ((albums_artists.album_id = 1234) AND (albums_artists.artist_id IS NOT NULL))))' end it "should allowing filtering by one_through_many associations with aliased tables" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums_artists, :id, :id], [:albums_artists, :album_id, :tag_id]] @c1.filter(:tag=>@c2.load(:id=>1234)).sql.must_equal 'SELECT * FROM artists WHERE (artists.id IN (SELECT albums_artists.artist_id FROM albums_artists INNER JOIN albums_artists AS albums_artists_0 ON (albums_artists_0.id = albums_artists.album_id) INNER JOIN albums_artists AS albums_artists_1 ON (albums_artists_1.album_id = albums_artists_0.id) WHERE ((albums_artists_1.tag_id = 1234) AND (albums_artists.artist_id IS NOT NULL))))' end it "should allowing filtering by one_through_many associations with composite keys" do @c1.one_through_many :tag, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy] @c1.filter(:tag=>@c2.load(:h1=>1234, :h2=>85)).sql.must_equal 'SELECT * FROM artists WHERE ((artists.id, artists.yyy) IN (SELECT albums_artists.b1, albums_artists.b2 FROM albums_artists INNER JOIN albums ON ((albums.d1 = albums_artists.c1) AND (albums.d2 = albums_artists.c2)) INNER JOIN albums_tags ON ((albums_tags.f1 = albums.e1) AND (albums_tags.f2 = albums.e2)) WHERE ((albums_tags.g1 = 1234) AND (albums_tags.g2 = 85) AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL))))' end it "should allowing filtering by one_through_many associations with :conditions" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :conditions=>{:name=>'A'} @c1.filter(:tag=>@c2.load(:id=>1234)).sql.must_equal "SELECT * FROM artists WHERE (artists.id IN (SELECT albums_artists.artist_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((name = 'A') AND (albums_artists.artist_id IS NOT NULL) AND (tags.id = 1234))))" end it "should allowing filtering by one_through_many associations with :conditions with a single through table" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id]], :conditions=>{:name=>'A'} @c1.filter(:tag=>@c2.load(:id=>1234)).sql.must_equal "SELECT * FROM artists WHERE (artists.id IN (SELECT albums_artists.artist_id FROM tags INNER JOIN albums_artists ON (albums_artists.album_id = tags.id) WHERE ((name = 'A') AND (albums_artists.artist_id IS NOT NULL) AND (tags.id = 1234))))" end it "should allowing filtering by one_through_many associations with :conditions and composite keys" do @c1.one_through_many :tag, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy], :conditions=>{:name=>'A'} @c1.filter(:tag=>@c2.load(:id=>1, :h1=>1234, :h2=>85)).sql.must_equal "SELECT * FROM artists WHERE ((artists.id, artists.yyy) IN (SELECT albums_artists.b1, albums_artists.b2 FROM tags INNER JOIN albums_tags ON ((albums_tags.g1 = tags.h1) AND (albums_tags.g2 = tags.h2)) INNER JOIN albums ON ((albums.e1 = albums_tags.f1) AND (albums.e2 = albums_tags.f2)) INNER JOIN albums_artists ON ((albums_artists.c1 = albums.d1) AND (albums_artists.c2 = albums.d2)) WHERE ((name = 'A') AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL) AND (tags.id = 1))))" end it "should allowing filtering by one_through_many associations with :order" do @c2.dataset = @c2.dataset.with_extend{def supports_distinct_on?; true end} @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :order=>:name @c1.filter(:tag=>@c2.load(:id=>1234)).sql.must_equal 'SELECT * FROM artists WHERE (artists.id IN (SELECT albums_artists.artist_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((albums_artists.artist_id IS NOT NULL) AND ((albums_artists.artist_id, tags.id) IN (SELECT DISTINCT ON (albums_artists.artist_id) albums_artists.artist_id, tags.id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) ORDER BY albums_artists.artist_id, name)) AND (tags.id = 1234))))' end it "should allowing filtering by one_through_many associations with :order and composite keys" do @c2.dataset = @c2.dataset.with_extend{def supports_distinct_on?; true end} @c1.one_through_many :tag, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy], :order=>:name @c1.filter(:tag=>@c2.load(:id=>1, :h1=>1234, :h2=>85)).sql.must_equal 'SELECT * FROM artists WHERE ((artists.id, artists.yyy) IN (SELECT albums_artists.b1, albums_artists.b2 FROM tags INNER JOIN albums_tags ON ((albums_tags.g1 = tags.h1) AND (albums_tags.g2 = tags.h2)) INNER JOIN albums ON ((albums.e1 = albums_tags.f1) AND (albums.e2 = albums_tags.f2)) INNER JOIN albums_artists ON ((albums_artists.c1 = albums.d1) AND (albums_artists.c2 = albums.d2)) WHERE ((albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL) AND ((albums_artists.b1, albums_artists.b2, tags.id) IN (SELECT DISTINCT ON (albums_artists.b1, albums_artists.b2) albums_artists.b1, albums_artists.b2, tags.id FROM tags INNER JOIN albums_tags ON ((albums_tags.g1 = tags.h1) AND (albums_tags.g2 = tags.h2)) INNER JOIN albums ON ((albums.e1 = albums_tags.f1) AND (albums.e2 = albums_tags.f2)) INNER JOIN albums_artists ON ((albums_artists.c1 = albums.d1) AND (albums_artists.c2 = albums.d2)) ORDER BY albums_artists.b1, albums_artists.b2, name)) AND (tags.id = 1))))' end it "should allowing filtering by one_through_many associations with :order and :conditions" do @c2.dataset = @c2.dataset.with_extend{def supports_distinct_on?; true end} @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :conditions=>{:name=>'A'}, :order=>:name @c1.filter(:tag=>@c2.load(:id=>1234)).sql.must_equal "SELECT * FROM artists WHERE (artists.id IN (SELECT albums_artists.artist_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((name = 'A') AND (albums_artists.artist_id IS NOT NULL) AND ((albums_artists.artist_id, tags.id) IN (SELECT DISTINCT ON (albums_artists.artist_id) albums_artists.artist_id, tags.id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (name = 'A') ORDER BY albums_artists.artist_id, name)) AND (tags.id = 1234))))" end it "should allowing filtering by one_through_many associations with :order and :conditions and composite keys" do @c2.dataset = @c2.dataset.with_extend{def supports_distinct_on?; true end} @c1.one_through_many :tag, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy], :conditions=>{:name=>'A'}, :order=>:name @c1.filter(:tag=>@c2.load(:id=>1, :h1=>1234, :h2=>85)).sql.must_equal "SELECT * FROM artists WHERE ((artists.id, artists.yyy) IN (SELECT albums_artists.b1, albums_artists.b2 FROM tags INNER JOIN albums_tags ON ((albums_tags.g1 = tags.h1) AND (albums_tags.g2 = tags.h2)) INNER JOIN albums ON ((albums.e1 = albums_tags.f1) AND (albums.e2 = albums_tags.f2)) INNER JOIN albums_artists ON ((albums_artists.c1 = albums.d1) AND (albums_artists.c2 = albums.d2)) WHERE ((name = 'A') AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL) AND ((albums_artists.b1, albums_artists.b2, tags.id) IN (SELECT DISTINCT ON (albums_artists.b1, albums_artists.b2) albums_artists.b1, albums_artists.b2, tags.id FROM tags INNER JOIN albums_tags ON ((albums_tags.g1 = tags.h1) AND (albums_tags.g2 = tags.h2)) INNER JOIN albums ON ((albums.e1 = albums_tags.f1) AND (albums.e2 = albums_tags.f2)) INNER JOIN albums_artists ON ((albums_artists.c1 = albums.d1) AND (albums_artists.c2 = albums.d2)) WHERE (name = 'A') ORDER BY albums_artists.b1, albums_artists.b2, name)) AND (tags.id = 1))))" end it "should allowing excluding by one_through_many associations" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] @c1.exclude(:tag=>@c2.load(:id=>1234)).sql.must_equal 'SELECT * FROM artists WHERE ((artists.id NOT IN (SELECT albums_artists.artist_id FROM albums_artists INNER JOIN albums ON (albums.id = albums_artists.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) WHERE ((albums_tags.tag_id = 1234) AND (albums_artists.artist_id IS NOT NULL)))) OR (artists.id IS NULL))' end it "should allowing excluding by one_through_many associations with composite keys" do @c1.one_through_many :tag, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy] @c1.exclude(:tag=>@c2.load(:h1=>1234, :h2=>85)).sql.must_equal 'SELECT * FROM artists WHERE (((artists.id, artists.yyy) NOT IN (SELECT albums_artists.b1, albums_artists.b2 FROM albums_artists INNER JOIN albums ON ((albums.d1 = albums_artists.c1) AND (albums.d2 = albums_artists.c2)) INNER JOIN albums_tags ON ((albums_tags.f1 = albums.e1) AND (albums_tags.f2 = albums.e2)) WHERE ((albums_tags.g1 = 1234) AND (albums_tags.g2 = 85) AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL)))) OR (artists.id IS NULL) OR (artists.yyy IS NULL))' end it "should allowing excluding by one_through_many associations with :conditions" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :conditions=>{:name=>'A'} @c1.exclude(:tag=>@c2.load(:id=>1234)).sql.must_equal "SELECT * FROM artists WHERE ((artists.id NOT IN (SELECT albums_artists.artist_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((name = 'A') AND (albums_artists.artist_id IS NOT NULL) AND (tags.id = 1234)))) OR (artists.id IS NULL))" end it "should allowing excluding by one_through_many associations with :conditions and composite keys" do @c1.one_through_many :tag, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy], :conditions=>{:name=>'A'} @c1.exclude(:tag=>@c2.load(:id=>1, :h1=>1234, :h2=>85)).sql.must_equal "SELECT * FROM artists WHERE (((artists.id, artists.yyy) NOT IN (SELECT albums_artists.b1, albums_artists.b2 FROM tags INNER JOIN albums_tags ON ((albums_tags.g1 = tags.h1) AND (albums_tags.g2 = tags.h2)) INNER JOIN albums ON ((albums.e1 = albums_tags.f1) AND (albums.e2 = albums_tags.f2)) INNER JOIN albums_artists ON ((albums_artists.c1 = albums.d1) AND (albums_artists.c2 = albums.d2)) WHERE ((name = 'A') AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL) AND (tags.id = 1)))) OR (artists.id IS NULL) OR (artists.yyy IS NULL))" end it "should allowing filtering by multiple one_through_many associations" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] @c1.filter(:tag=>[@c2.load(:id=>1234), @c2.load(:id=>2345)]).sql.must_equal 'SELECT * FROM artists WHERE (artists.id IN (SELECT albums_artists.artist_id FROM albums_artists INNER JOIN albums ON (albums.id = albums_artists.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) WHERE ((albums_tags.tag_id IN (1234, 2345)) AND (albums_artists.artist_id IS NOT NULL))))' end it "should allowing filtering by multiple one_through_many associations with composite keys" do @c1.one_through_many :tag, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy] @c1.filter(:tag=>[@c2.load(:h1=>1234, :h2=>85), @c2.load(:h1=>2345, :h2=>95)]).sql.must_equal 'SELECT * FROM artists WHERE ((artists.id, artists.yyy) IN (SELECT albums_artists.b1, albums_artists.b2 FROM albums_artists INNER JOIN albums ON ((albums.d1 = albums_artists.c1) AND (albums.d2 = albums_artists.c2)) INNER JOIN albums_tags ON ((albums_tags.f1 = albums.e1) AND (albums_tags.f2 = albums.e2)) WHERE (((albums_tags.g1, albums_tags.g2) IN ((1234, 85), (2345, 95))) AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL))))' end it "should allowing filtering by multiple one_through_many associations with :conditions" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :conditions=>{:name=>'A'} @c1.filter(:tag=>[@c2.load(:id=>1234), @c2.load(:id=>2345)]).sql.must_equal "SELECT * FROM artists WHERE (artists.id IN (SELECT albums_artists.artist_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((name = 'A') AND (albums_artists.artist_id IS NOT NULL) AND (tags.id IN (1234, 2345)))))" end it "should allowing filtering by multiple one_through_many associations with :conditions and composite keys" do @c1.one_through_many :tag, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy], :conditions=>{:name=>'A'} @c1.filter(:tag=>[@c2.load(:id=>1, :h1=>1234, :h2=>85), @c2.load(:id=>2, :h1=>2345, :h2=>95)]).sql.must_equal "SELECT * FROM artists WHERE ((artists.id, artists.yyy) IN (SELECT albums_artists.b1, albums_artists.b2 FROM tags INNER JOIN albums_tags ON ((albums_tags.g1 = tags.h1) AND (albums_tags.g2 = tags.h2)) INNER JOIN albums ON ((albums.e1 = albums_tags.f1) AND (albums.e2 = albums_tags.f2)) INNER JOIN albums_artists ON ((albums_artists.c1 = albums.d1) AND (albums_artists.c2 = albums.d2)) WHERE ((name = 'A') AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL) AND (tags.id IN (1, 2)))))" end it "should allowing excluding by multiple one_through_many associations" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] @c1.exclude(:tag=>[@c2.load(:id=>1234), @c2.load(:id=>2345)]).sql.must_equal 'SELECT * FROM artists WHERE ((artists.id NOT IN (SELECT albums_artists.artist_id FROM albums_artists INNER JOIN albums ON (albums.id = albums_artists.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) WHERE ((albums_tags.tag_id IN (1234, 2345)) AND (albums_artists.artist_id IS NOT NULL)))) OR (artists.id IS NULL))' end it "should allowing excluding by multiple one_through_many associations with composite keys" do @c1.one_through_many :tag, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy] @c1.exclude(:tag=>[@c2.load(:h1=>1234, :h2=>85), @c2.load(:h1=>2345, :h2=>95)]).sql.must_equal 'SELECT * FROM artists WHERE (((artists.id, artists.yyy) NOT IN (SELECT albums_artists.b1, albums_artists.b2 FROM albums_artists INNER JOIN albums ON ((albums.d1 = albums_artists.c1) AND (albums.d2 = albums_artists.c2)) INNER JOIN albums_tags ON ((albums_tags.f1 = albums.e1) AND (albums_tags.f2 = albums.e2)) WHERE (((albums_tags.g1, albums_tags.g2) IN ((1234, 85), (2345, 95))) AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL)))) OR (artists.id IS NULL) OR (artists.yyy IS NULL))' end it "should allowing excluding by multiple one_through_many associations with :conditions" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :conditions=>{:name=>'A'} @c1.exclude(:tag=>[@c2.load(:id=>1234), @c2.load(:id=>2345)]).sql.must_equal "SELECT * FROM artists WHERE ((artists.id NOT IN (SELECT albums_artists.artist_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((name = 'A') AND (albums_artists.artist_id IS NOT NULL) AND (tags.id IN (1234, 2345))))) OR (artists.id IS NULL))" end it "should allowing excluding by multiple one_through_many associations with :conditions and composite keys" do @c1.one_through_many :tag, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy], :conditions=>{:name=>'A'} @c1.exclude(:tag=>[@c2.load(:id=>1, :h1=>1234, :h2=>85), @c2.load(:id=>2, :h1=>2345, :h2=>95)]).sql.must_equal "SELECT * FROM artists WHERE (((artists.id, artists.yyy) NOT IN (SELECT albums_artists.b1, albums_artists.b2 FROM tags INNER JOIN albums_tags ON ((albums_tags.g1 = tags.h1) AND (albums_tags.g2 = tags.h2)) INNER JOIN albums ON ((albums.e1 = albums_tags.f1) AND (albums.e2 = albums_tags.f2)) INNER JOIN albums_artists ON ((albums_artists.c1 = albums.d1) AND (albums_artists.c2 = albums.d2)) WHERE ((name = 'A') AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL) AND (tags.id IN (1, 2))))) OR (artists.id IS NULL) OR (artists.yyy IS NULL))" end it "should allowing filtering/excluding one_through_many associations with NULL values" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] @c1.filter(:tag=>@c2.new).sql.must_equal 'SELECT * FROM artists WHERE \'f\'' @c1.exclude(:tag=>@c2.new).sql.must_equal 'SELECT * FROM artists WHERE \'t\'' end it "should allowing filtering by one_through_many association datasets" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] @c1.filter(:tag=>@c2.filter(:x=>1)).sql.must_equal 'SELECT * FROM artists WHERE (artists.id IN (SELECT albums_artists.artist_id FROM albums_artists INNER JOIN albums ON (albums.id = albums_artists.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) WHERE ((albums_tags.tag_id IN (SELECT tags.id FROM tags WHERE ((x = 1) AND (tags.id IS NOT NULL)))) AND (albums_artists.artist_id IS NOT NULL))))' end it "should allowing filtering by one_through_many association datasets with composite keys" do @c1.one_through_many :tag, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy] @c1.filter(:tag=>@c2.filter(:x=>1)).sql.must_equal 'SELECT * FROM artists WHERE ((artists.id, artists.yyy) IN (SELECT albums_artists.b1, albums_artists.b2 FROM albums_artists INNER JOIN albums ON ((albums.d1 = albums_artists.c1) AND (albums.d2 = albums_artists.c2)) INNER JOIN albums_tags ON ((albums_tags.f1 = albums.e1) AND (albums_tags.f2 = albums.e2)) WHERE (((albums_tags.g1, albums_tags.g2) IN (SELECT tags.h1, tags.h2 FROM tags WHERE ((x = 1) AND (tags.h1 IS NOT NULL) AND (tags.h2 IS NOT NULL)))) AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL))))' end it "should allowing filtering by one_through_many association datasets with :conditions" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :conditions=>{:name=>'A'} @c1.filter(:tag=>@c2.filter(:x=>1)).sql.must_equal "SELECT * FROM artists WHERE (artists.id IN (SELECT albums_artists.artist_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((name = 'A') AND (albums_artists.artist_id IS NOT NULL) AND (tags.id IN (SELECT tags.id FROM tags WHERE (x = 1))))))" end it "should allowing filtering by one_through_many association datasets with :conditions and composite keys" do @c1.one_through_many :tag, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy], :conditions=>{:name=>'A'} @c1.filter(:tag=>@c2.filter(:x=>1)).sql.must_equal "SELECT * FROM artists WHERE ((artists.id, artists.yyy) IN (SELECT albums_artists.b1, albums_artists.b2 FROM tags INNER JOIN albums_tags ON ((albums_tags.g1 = tags.h1) AND (albums_tags.g2 = tags.h2)) INNER JOIN albums ON ((albums.e1 = albums_tags.f1) AND (albums.e2 = albums_tags.f2)) INNER JOIN albums_artists ON ((albums_artists.c1 = albums.d1) AND (albums_artists.c2 = albums.d2)) WHERE ((name = 'A') AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL) AND (tags.id IN (SELECT tags.id FROM tags WHERE (x = 1))))))" end it "should allowing excluding by one_through_many association datasets" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] @c1.exclude(:tag=>@c2.filter(:x=>1)).sql.must_equal 'SELECT * FROM artists WHERE ((artists.id NOT IN (SELECT albums_artists.artist_id FROM albums_artists INNER JOIN albums ON (albums.id = albums_artists.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) WHERE ((albums_tags.tag_id IN (SELECT tags.id FROM tags WHERE ((x = 1) AND (tags.id IS NOT NULL)))) AND (albums_artists.artist_id IS NOT NULL)))) OR (artists.id IS NULL))' end it "should allowing excluding by one_through_many association datasets with composite keys" do @c1.one_through_many :tag, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy] @c1.exclude(:tag=>@c2.filter(:x=>1)).sql.must_equal 'SELECT * FROM artists WHERE (((artists.id, artists.yyy) NOT IN (SELECT albums_artists.b1, albums_artists.b2 FROM albums_artists INNER JOIN albums ON ((albums.d1 = albums_artists.c1) AND (albums.d2 = albums_artists.c2)) INNER JOIN albums_tags ON ((albums_tags.f1 = albums.e1) AND (albums_tags.f2 = albums.e2)) WHERE (((albums_tags.g1, albums_tags.g2) IN (SELECT tags.h1, tags.h2 FROM tags WHERE ((x = 1) AND (tags.h1 IS NOT NULL) AND (tags.h2 IS NOT NULL)))) AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL)))) OR (artists.id IS NULL) OR (artists.yyy IS NULL))' end it "should allowing excluding by one_through_many association datasets with :conditions" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :conditions=>{:name=>'A'} @c1.exclude(:tag=>@c2.filter(:x=>1)).sql.must_equal "SELECT * FROM artists WHERE ((artists.id NOT IN (SELECT albums_artists.artist_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((name = 'A') AND (albums_artists.artist_id IS NOT NULL) AND (tags.id IN (SELECT tags.id FROM tags WHERE (x = 1)))))) OR (artists.id IS NULL))" end it "should allowing excluding by one_through_many association datasets with :conditions and composite keys" do @c1.one_through_many :tag, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy], :conditions=>{:name=>'A'} @c1.exclude(:tag=>@c2.filter(:x=>1)).sql.must_equal "SELECT * FROM artists WHERE (((artists.id, artists.yyy) NOT IN (SELECT albums_artists.b1, albums_artists.b2 FROM tags INNER JOIN albums_tags ON ((albums_tags.g1 = tags.h1) AND (albums_tags.g2 = tags.h2)) INNER JOIN albums ON ((albums.e1 = albums_tags.f1) AND (albums.e2 = albums_tags.f2)) INNER JOIN albums_artists ON ((albums_artists.c1 = albums.d1) AND (albums_artists.c2 = albums.d2)) WHERE ((name = 'A') AND (albums_artists.b1 IS NOT NULL) AND (albums_artists.b2 IS NOT NULL) AND (tags.id IN (SELECT tags.id FROM tags WHERE (x = 1)))))) OR (artists.id IS NULL) OR (artists.yyy IS NULL))" end it "should support a :conditions option" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :conditions=>{:a=>32} n = @c1.load(:id => 1234) n.tag_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((a = 32) AND (albums_artists.artist_id = 1234)) LIMIT 1' n.tag.must_equal @c2.load(:id=>1) @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :conditions=>Sequel.lit('a = ?', 42) n = @c1.load(:id => 1234) n.tag_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((a = 42) AND (albums_artists.artist_id = 1234)) LIMIT 1' n.tag.must_equal @c2.load(:id=>1) end it "should support an :order option" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :order=>:blah n = @c1.load(:id => 1234) n.tag_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234) ORDER BY blah LIMIT 1' n.tag.must_equal @c2.load(:id=>1) end it "should support an array for the :order option" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :order=>[:blah1, :blah2] n = @c1.load(:id => 1234) n.tag_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234) ORDER BY blah1, blah2 LIMIT 1' n.tag.must_equal @c2.load(:id=>1) end it "should support a select option" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :select=>:blah n = @c1.load(:id => 1234) n.tag_dataset.sql.must_equal 'SELECT blah FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234) LIMIT 1' n.tag.must_equal @c2.load(:id=>1) end it "should support an array for the select option" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :select=>[Sequel::SQL::ColumnAll.new(:tags), Sequel[:albums][:name]] n = @c1.load(:id => 1234) n.tag_dataset.sql.must_equal 'SELECT tags.*, albums.name FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234) LIMIT 1' n.tag.must_equal @c2.load(:id=>1) end it "should accept a block" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] do |ds| ds.filter(:yyy=>@yyy) end n = @c1.load(:id => 1234) n.yyy = 85 n.tag_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((albums_artists.artist_id = 1234) AND (yyy = 85)) LIMIT 1' n.tag.must_equal @c2.load(:id=>1) end it "should allow the :order option while accepting a block" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :order=>:blah do |ds| ds.filter(:yyy=>@yyy) end n = @c1.load(:id => 1234) n.yyy = 85 n.tag_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((albums_artists.artist_id = 1234) AND (yyy = 85)) ORDER BY blah LIMIT 1' n.tag.must_equal @c2.load(:id=>1) end it "should support a :dataset option that is used instead of the default" do @c1.one_through_many :tag, [[:a, :b, :c]], :dataset=>proc{Tag.join(:albums_tags, [:tag_id]).join(:albums, [:album_id]).join(:albums_artists, [:album_id]).filter(Sequel[:albums_artists][:artist_id]=>id)} n = @c1.load(:id => 1234) n.tag_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags USING (tag_id) INNER JOIN albums USING (album_id) INNER JOIN albums_artists USING (album_id) WHERE (albums_artists.artist_id = 1234) LIMIT 1' n.tag.must_equal @c2.load(:id=>1) end it "should support a :limit option to specify an offset" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :limit=>[nil, 10] n = @c1.load(:id => 1234) n.tag_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234) LIMIT 1 OFFSET 10' n.tag.must_equal @c2.load(:id=>1) end it "should have the :eager option affect the _dataset method" do @c2.many_to_many :fans @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :eager=>:fans @c1.load(:id => 1234).tag_dataset.opts[:eager].must_equal(:fans=>nil) end it "should return the associated object" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] @c1.load(:id => 1234).tag.must_equal @c2.load(:id=>1) DB.sqls.must_equal ['SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234) LIMIT 1'] end it "should populate cache when accessed" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] n = @c1.load(:id => 1234) n.associations[:tag].must_be_nil DB.sqls.must_equal [] n.tag.must_equal @c2.load(:id=>1) DB.sqls.must_equal ['SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234) LIMIT 1'] n.associations[:tag].must_equal n.tag DB.sqls.length.must_equal 0 end it "should use cache if available" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] n = @c1.load(:id => 1234) n.associations[:tag] = nil n.tag.must_be_nil DB.sqls.must_equal [] end it "should not use cache if asked to reload" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] n = @c1.load(:id => 1234) n.associations[:tag] = nil DB.sqls.must_equal [] n.tag(:reload=>true).must_equal @c2.load(:id=>1) DB.sqls.must_equal ['SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1234) LIMIT 1'] n.associations[:tag].must_equal n.tag DB.sqls.length.must_equal 0 end it "should not add associations methods directly to class" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] im = @c1.instance_methods im.must_include(:tag) im.must_include(:tag_dataset) im2 = @c1.instance_methods(false) im2.wont_include(:tag) im2.wont_include(:tag_dataset) end it "should support after_load association callback" do h = [] @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :after_load=>:al @c1.class_eval do self::Foo = h def al(v) model::Foo << v.pk * 20 end end @c2.dataset = @c2.dataset.with_fetch(:id=>20) p = @c1.load(:id=>10, :parent_id=>20) p.tag h.must_equal [400] p.tag.pk.must_equal 20 end end describe "one_through_many eager loading methods" do before do class ::Artist < Sequel::Model plugin :many_through_many one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] one_through_many :other_tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :class=>:Tag one_through_many :album, [[:albums_artists, :artist_id, :album_id]] one_through_many :artist, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_artists, :album_id, :artist_id]] end class ::Tag < Sequel::Model plugin :many_through_many one_through_many :track, [[:albums_tags, :tag_id, :album_id], [:albums, :id, :id]], :right_primary_key=>:album_id end class ::Album < Sequel::Model end class ::Track < Sequel::Model end Artist.dataset = Artist.dataset.with_fetch(proc do |sql| h = {:id => 1} if sql =~ /FROM artists LEFT OUTER JOIN albums_artists/ h[:tag_id] = 2 h[:album_id] = 3 if sql =~ /LEFT OUTER JOIN albums AS album/ h[:track_id] = 4 if sql =~ /LEFT OUTER JOIN tracks AS track/ h[:other_tag_id] = 9 if sql =~ /other_tag\.id AS other_tag_id/ h[:artist_id] = 10 if sql =~ /artists_0\.id AS artist_id/ end h end) Artist.dataset.columns(:id) Tag.dataset = Tag.dataset.with_fetch(proc do |sql| h = {:id => 2} if sql =~ /albums_artists.artist_id IN \(([18])\)/ h[:x_foreign_key_x] = $1.to_i elsif sql =~ /\(\(albums_artists.b1, albums_artists.b2\) IN \(\(1, 8\)\)\)/ h.merge!(:x_foreign_key_0_x=>1, :x_foreign_key_1_x=>8) end h[:tag_id] = h.delete(:id) if sql =~ /albums_artists.artist_id IN \(8\)/ h end) Album.dataset = Album.dataset.with_fetch(proc do |sql| h = {:id => 3} h[:x_foreign_key_x] = 1 if sql =~ /albums_artists.artist_id IN \(1\)/ h end) Track.dataset = Track.dataset.with_fetch(proc do |sql| h = {:id => 4} h[:x_foreign_key_x] = 2 if sql =~ /albums_tags.tag_id IN \(2\)/ h end) @c1 = Artist DB.reset end after do [:Artist, :Tag, :Album, :Track].each{|x| Object.send(:remove_const, x)} end it "should eagerly load a single one_through_many association" do a = @c1.eager(:tag).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))'] a.first.tag.must_equal Tag.load(:id=>2) DB.sqls.length.must_equal 0 end it "should eagerly load multiple associations in a single call" do a = @c1.eager(:tag, :album).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))', 'SELECT albums.*, albums_artists.artist_id AS x_foreign_key_x FROM albums INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))'] a = a.first a.tag.must_equal Tag.load(:id=>2) a.album.must_equal Album.load(:id=>3) DB.sqls.length.must_equal 0 end it "should eagerly load multiple associations in separate" do a = @c1.eager(:tag).eager(:album).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))', 'SELECT albums.*, albums_artists.artist_id AS x_foreign_key_x FROM albums INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))'] a = a.first a.tag.must_equal Tag.load(:id=>2) a.album.must_equal Album.load(:id=>3) DB.sqls.length.must_equal 0 end it "should allow cascading of eager loading for associations of associated models" do a = @c1.eager(:tag=>:track).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))', 'SELECT tracks.*, albums_tags.tag_id AS x_foreign_key_x FROM tracks INNER JOIN albums ON (albums.id = tracks.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) WHERE (albums_tags.tag_id IN (2))'] a = a.first a.tag.must_equal Tag.load(:id=>2) a.tag.track.must_equal Track.load(:id=>4) DB.sqls.length.must_equal 0 end it "should cascade eagerly loading when the :eager association option is used" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :eager=>:track a = @c1.eager(:tag).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))', 'SELECT tracks.*, albums_tags.tag_id AS x_foreign_key_x FROM tracks INNER JOIN albums ON (albums.id = tracks.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) WHERE (albums_tags.tag_id IN (2))'] a = a.first a.tag.must_equal Tag.load(:id=>2) a.tag.track.must_equal Track.load(:id=>4) DB.sqls.length.must_equal 0 end it "should respect :eager when lazily loading an association" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :eager=>:track a = @c1.load(:id=>1) a.tag.must_equal Tag.load(:id=>2) DB.sqls.must_equal ['SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1) LIMIT 1', 'SELECT tracks.*, albums_tags.tag_id AS x_foreign_key_x FROM tracks INNER JOIN albums ON (albums.id = tracks.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) WHERE (albums_tags.tag_id IN (2))'] a.tag.track.must_equal Track.load(:id=>4) DB.sqls.length.must_equal 0 end it "should raise error if attempting to eagerly load an association using :eager_graph option" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :eager_graph=>:track proc{@c1.eager(:tag).all}.must_raise(Sequel::Error) end it "should respect :eager_graph when lazily loading an association" do Tag.dataset = Tag.dataset.with_fetch(:id=>2, :track_id=>4).with_extend{def columns; [:id] end} @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :eager_graph=>:track a = @c1.load(:id=>1) a.tag DB.sqls.must_equal [ 'SELECT tags.id, track.id AS track_id FROM (SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id = 1) LIMIT 1) AS tags LEFT OUTER JOIN albums_tags AS albums_tags_0 ON (albums_tags_0.tag_id = tags.id) LEFT OUTER JOIN albums ON (albums.id = albums_tags_0.album_id) LEFT OUTER JOIN tracks AS track ON (track.album_id = albums.id)'] a.tag.must_equal Tag.load(:id=>2) a.tag.track.must_equal Track.load(:id=>4) DB.sqls.length.must_equal 0 end it "should respect :conditions when eagerly loading" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :conditions=>{:a=>32} a = @c1.eager(:tag).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((a = 32) AND (albums_artists.artist_id IN (1)))'] a.first.tag.must_equal Tag.load(:id=>2) DB.sqls.length.must_equal 0 end it "should respect :order when eagerly loading" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :order=>:blah, :eager_limit_strategy=>:ruby a = @c1.eager(:tag).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1)) ORDER BY blah'] a.first.tag.must_equal Tag.load(:id=>2) DB.sqls.length.must_equal 0 end it "should use the association's block when eager loading by default" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]] do |ds| ds.filter(:a) end a = @c1.eager(:tag).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (a AND (albums_artists.artist_id IN (1)))'] a.first.tag.must_equal Tag.load(:id=>2) DB.sqls.length.must_equal 0 end it "should use the :eager_block option when eager loading if given" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :eager_block=>proc{|ds| ds.filter(:b)} do |ds| ds.filter(:a) end a = @c1.eager(:tag).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (b AND (albums_artists.artist_id IN (1)))'] a.first.tag.must_equal Tag.load(:id=>2) DB.sqls.length.must_equal 0 end it "should respect the :limit option on a one_through_many association" do @c1.one_through_many :second_tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :class=>Tag, :limit=>[nil,1] Tag.dataset = Tag.dataset.with_fetch([{:x_foreign_key_x=>1, :id=>6}]) a = @c1.eager(:second_tag).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT * FROM (SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (1 = albums_artists.artist_id) LIMIT 1 OFFSET 1) AS t1'] a.first.second_tag.must_equal Tag.load(:id=>6) DB.sqls.length.must_equal 0 end it "should respect the :limit option on a one_through_many association using the :ruby strategy" do @c1.one_through_many :second_tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :class=>Tag, :limit=>[nil,1], :eager_limit_strategy=>:ruby Tag.dataset = Tag.dataset.with_fetch([{:x_foreign_key_x=>1, :id=>5}, {:x_foreign_key_x=>1, :id=>6}]) a = @c1.eager(:second_tag).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))'] a.first.second_tag.must_equal Tag.load(:id=>6) DB.sqls.length.must_equal 0 end it "should eagerly load a single one_through_many association using the :distinct_on strategy" do @c1.one_through_many :second_tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :class=>Tag, :order=>:name, :eager_limit_strategy=>:distinct_on Tag.dataset = Tag.dataset.with_fetch([{:x_foreign_key_x=>1, :id=>5}]).with_extend{def supports_distinct_on?; true end} a = @c1.eager(:second_tag).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', "SELECT DISTINCT ON (albums_artists.artist_id) tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1)) ORDER BY albums_artists.artist_id, name"] a.first.second_tag.must_equal Tag.load(:id=>5) DB.sqls.length.must_equal 0 end it "should eagerly load a single one_through_many association using the :window_function strategy" do @c1.one_through_many :second_tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :class=>Tag, :limit=>[nil,1], :order=>:name, :eager_limit_strategy=>:window_function Tag.dataset = Tag.dataset.with_fetch([{:x_foreign_key_x=>1, :id=>5}]).with_extend{def supports_window_functions?; true end} a = @c1.eager(:second_tag).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT * FROM (SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x, row_number() OVER (PARTITION BY albums_artists.artist_id ORDER BY name) AS x_sequel_row_number_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))) AS t1 WHERE (x_sequel_row_number_x = 2)'] a.first.second_tag.must_equal Tag.load(:id=>5) DB.sqls.length.must_equal 0 end it "should respect the :limit option on a one_through_many association with composite primary keys on the main table" do @c1.set_primary_key([:id1, :id2]) @c1.columns :id1, :id2 @c1.one_through_many :second_tag, [[:albums_artists, [:artist_id1, :artist_id2], :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :class=>Tag, :limit=>[nil,1], :order=>:name Tag.dataset = Tag.dataset.with_fetch([{:x_foreign_key_0_x=>1, :x_foreign_key_1_x=>2, :id=>5}]).with_extend{def supports_window_functions?; true end} a = @c1.eager(:second_tag).with_fetch(:id1=>1, :id2=>2).all a.must_equal [@c1.load(:id1=>1, :id2=>2)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT * FROM (SELECT tags.*, albums_artists.artist_id1 AS x_foreign_key_0_x, albums_artists.artist_id2 AS x_foreign_key_1_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((1 = albums_artists.artist_id1) AND (2 = albums_artists.artist_id2)) ORDER BY name LIMIT 1 OFFSET 1) AS t1'] a.first.second_tag.must_equal Tag.load(:id=>5) DB.sqls.length.must_equal 0 end it "should respect the :limit option on a one_through_many association with composite primary keys on the main table using a :window_function strategy" do @c1.set_primary_key([:id1, :id2]) @c1.columns :id1, :id2 @c1.one_through_many :second_tag, [[:albums_artists, [:artist_id1, :artist_id2], :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :class=>Tag, :limit=>[nil,1], :order=>:name, :eager_limit_strategy=>:window_function Tag.dataset = Tag.dataset.with_fetch([{:x_foreign_key_0_x=>1, :x_foreign_key_1_x=>2, :id=>5}]).with_extend{def supports_window_functions?; true end} a = @c1.eager(:second_tag).with_fetch(:id1=>1, :id2=>2).all a.must_equal [@c1.load(:id1=>1, :id2=>2)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT * FROM (SELECT tags.*, albums_artists.artist_id1 AS x_foreign_key_0_x, albums_artists.artist_id2 AS x_foreign_key_1_x, row_number() OVER (PARTITION BY albums_artists.artist_id1, albums_artists.artist_id2 ORDER BY name) AS x_sequel_row_number_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE ((albums_artists.artist_id1, albums_artists.artist_id2) IN ((1, 2)))) AS t1 WHERE (x_sequel_row_number_x = 2)'] a.first.second_tag.must_equal Tag.load(:id=>5) DB.sqls.length.must_equal 0 end it "should raise an error when attempting to eagerly load an association with the :allow_eager option set to false" do @c1.eager(:tag).all @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :allow_eager=>false proc{@c1.eager(:tag).all}.must_raise(Sequel::Error) end it "should respect the association's :select option" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :select=>Sequel[:tags][:name] a = @c1.eager(:tag).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.name, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))'] a.first.tag.must_equal Tag.load(:id=>2) DB.sqls.length.must_equal 0 end it "should respect one_through_many association's :left_primary_key and :right_primary_key options" do @c1.send(:define_method, :yyy){values[:yyy]} @c1.dataset = @c1.dataset.with_fetch(:id=>1, :yyy=>8).with_extend{def columns; [:id, :yyy] end} @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :left_primary_key=>:yyy, :right_primary_key=>:tag_id a = @c1.eager(:tag).all a.must_equal [@c1.load(:id=>1, :yyy=>8)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.tag_id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (8))'] a.first.tag.must_equal Tag.load(:tag_id=>2) DB.sqls.length.must_equal 0 end it "should handle composite keys" do @c1.send(:define_method, :yyy){values[:yyy]} @c1.dataset = @c1.dataset.with_fetch(:id=>1, :yyy=>8).with_extend{def columns; [:id, :yyy] end} @c1.one_through_many :tag, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:h1, :h2], :left_primary_key=>[:id, :yyy] a = @c1.eager(:tag).all a.must_equal [@c1.load(:id=>1, :yyy=>8)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.b1 AS x_foreign_key_0_x, albums_artists.b2 AS x_foreign_key_1_x FROM tags INNER JOIN albums_tags ON ((albums_tags.g1 = tags.h1) AND (albums_tags.g2 = tags.h2)) INNER JOIN albums ON ((albums.e1 = albums_tags.f1) AND (albums.e2 = albums_tags.f2)) INNER JOIN albums_artists ON ((albums_artists.c1 = albums.d1) AND (albums_artists.c2 = albums.d2)) WHERE ((albums_artists.b1, albums_artists.b2) IN ((1, 8)))'] a.first.tag.must_equal Tag.load(:id=>2) DB.sqls.length.must_equal 0 end it "should respect :after_load callbacks on associations when eager loading" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :after_load=>lambda{|o, a| o[:id] *= 2; a[:id] *= 3} a = @c1.eager(:tag).all a.must_equal [@c1.load(:id=>2)] DB.sqls.must_equal ['SELECT * FROM artists', 'SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))'] a.first.tag.must_equal Tag.load(:id=>6) DB.sqls.length.must_equal 0 end it "should support association_join" do @c1.association_join(:tag).sql.must_equal "SELECT * FROM artists INNER JOIN albums_artists ON (albums_artists.artist_id = artists.id) INNER JOIN albums ON (albums.id = albums_artists.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) INNER JOIN tags AS tag ON (tag.id = albums_tags.tag_id)" end it "should eagerly graph a single one_through_many association" do a = @c1.eager_graph(:tag).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT artists.id, tag.id AS tag_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags AS tag ON (tag.id = albums_tags.tag_id)'] a.first.tag.must_equal Tag.load(:id=>2) DB.sqls.length.must_equal 0 end it "should eagerly graph a single one_through_many association using the :distinct_on strategy" do Tag.dataset = Tag.dataset.with_extend{def supports_distinct_on?; true end} a = @c1.eager_graph_with_options(:tag, :limit_strategy=>true).with_fetch(:id=>1, :tag_id=>2).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT artists.id, tag.id AS tag_id FROM artists LEFT OUTER JOIN (SELECT DISTINCT ON (albums_artists.artist_id) tags.*, albums_artists.artist_id AS x_foreign_key_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) ORDER BY albums_artists.artist_id) AS tag ON (tag.x_foreign_key_x = artists.id)'] a.first.tag.must_equal Tag.load(:id=>2) DB.sqls.length.must_equal 0 end it "should eagerly graph a single one_through_many association using the :window_function strategy" do Tag.dataset = Tag.dataset.with_extend do def supports_window_functions?; true end def columns; literal(opts[:select]) =~ /x_foreign_key_x/ ? [:id, :x_foreign_key_x] : [:id] end end a = @c1.eager_graph_with_options(:tag, :limit_strategy=>true).with_fetch(:id=>1, :tag_id=>2).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT artists.id, tag.id AS tag_id FROM artists LEFT OUTER JOIN (SELECT id, x_foreign_key_x FROM (SELECT tags.*, albums_artists.artist_id AS x_foreign_key_x, row_number() OVER (PARTITION BY albums_artists.artist_id) AS x_sequel_row_number_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) INNER JOIN albums ON (albums.id = albums_tags.album_id) INNER JOIN albums_artists ON (albums_artists.album_id = albums.id)) AS t1 WHERE (x_sequel_row_number_x = 1)) AS tag ON (tag.x_foreign_key_x = artists.id)'] a.first.tag.must_equal Tag.load(:id=>2) DB.sqls.length.must_equal 0 end it "should eagerly graph multiple associations in a single call" do a = @c1.eager_graph(:tag, :album).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT artists.id, tag.id AS tag_id, album.id AS album_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags AS tag ON (tag.id = albums_tags.tag_id) LEFT OUTER JOIN albums_artists AS albums_artists_0 ON (albums_artists_0.artist_id = artists.id) LEFT OUTER JOIN albums AS album ON (album.id = albums_artists_0.album_id)'] a = a.first a.tag.must_equal Tag.load(:id=>2) a.album.must_equal Album.load(:id=>3) DB.sqls.length.must_equal 0 end it "should eagerly graph multiple associations in separate calls" do a = @c1.eager_graph(:tag).eager_graph(:album).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT artists.id, tag.id AS tag_id, album.id AS album_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags AS tag ON (tag.id = albums_tags.tag_id) LEFT OUTER JOIN albums_artists AS albums_artists_0 ON (albums_artists_0.artist_id = artists.id) LEFT OUTER JOIN albums AS album ON (album.id = albums_artists_0.album_id)'] a = a.first a.tag.must_equal Tag.load(:id=>2) a.album.must_equal Album.load(:id=>3) DB.sqls.length.must_equal 0 end it "should allow cascading of eager graphing for associations of associated models" do a = @c1.eager_graph(:tag=>:track).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT artists.id, tag.id AS tag_id, track.id AS track_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags AS tag ON (tag.id = albums_tags.tag_id) LEFT OUTER JOIN albums_tags AS albums_tags_0 ON (albums_tags_0.tag_id = tag.id) LEFT OUTER JOIN albums AS albums_0 ON (albums_0.id = albums_tags_0.album_id) LEFT OUTER JOIN tracks AS track ON (track.album_id = albums_0.id)'] a = a.first a.tag.must_equal Tag.load(:id=>2) a.tag.track.must_equal Track.load(:id=>4) DB.sqls.length.must_equal 0 end it "should eager graph multiple associations from the same table" do a = @c1.eager_graph(:tag, :other_tag).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT artists.id, tag.id AS tag_id, other_tag.id AS other_tag_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags AS tag ON (tag.id = albums_tags.tag_id) LEFT OUTER JOIN albums_artists AS albums_artists_0 ON (albums_artists_0.artist_id = artists.id) LEFT OUTER JOIN albums AS albums_0 ON (albums_0.id = albums_artists_0.album_id) LEFT OUTER JOIN albums_tags AS albums_tags_0 ON (albums_tags_0.album_id = albums_0.id) LEFT OUTER JOIN tags AS other_tag ON (other_tag.id = albums_tags_0.tag_id)'] a = a.first a.tag.must_equal Tag.load(:id=>2) a.other_tag.must_equal Tag.load(:id=>9) DB.sqls.length.must_equal 0 end it "should eager graph a self_referential association" do a = @c1.eager_graph(:tag, :artist).with_fetch(:id=>1, :tag_id=>2, :artist_id=>10).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT artists.id, tag.id AS tag_id, artist.id AS artist_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags AS tag ON (tag.id = albums_tags.tag_id) LEFT OUTER JOIN albums_artists AS albums_artists_0 ON (albums_artists_0.artist_id = artists.id) LEFT OUTER JOIN albums AS albums_0 ON (albums_0.id = albums_artists_0.album_id) LEFT OUTER JOIN albums_artists AS albums_artists_1 ON (albums_artists_1.album_id = albums_0.id) LEFT OUTER JOIN artists AS artist ON (artist.id = albums_artists_1.artist_id)'] a = a.first a.tag.must_equal Tag.load(:id=>2) a.artist.must_equal @c1.load(:id=>10) DB.sqls.length.must_equal 0 end it "should be able to use eager and eager_graph together" do a = @c1.eager_graph(:tag).eager(:album).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT artists.id, tag.id AS tag_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags AS tag ON (tag.id = albums_tags.tag_id)', 'SELECT albums.*, albums_artists.artist_id AS x_foreign_key_x FROM albums INNER JOIN albums_artists ON (albums_artists.album_id = albums.id) WHERE (albums_artists.artist_id IN (1))'] a = a.first a.tag.must_equal Tag.load(:id=>2) a.album.must_equal Album.load(:id=>3) DB.sqls.length.must_equal 0 end it "should handle no associated records when eagerly graphing a single one_through_many association" do a = @c1.eager_graph(:tag).with_fetch(:id=>1, :tag_id=>nil).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT artists.id, tag.id AS tag_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags AS tag ON (tag.id = albums_tags.tag_id)'] a.first.tag.must_be_nil DB.sqls.length.must_equal 0 end it "should handle no associated records when eagerly graphing multiple one_through_many associations" do a = @c1.eager_graph(:tag, :album).with_fetch([{:id=>1, :tag_id=>5, :album_id=>6}, {:id=>7, :tag_id=>nil, :albums_0_id=>nil}]).all a.must_equal [@c1.load(:id=>1), @c1.load(:id=>7)] DB.sqls.must_equal ['SELECT artists.id, tag.id AS tag_id, album.id AS album_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags AS tag ON (tag.id = albums_tags.tag_id) LEFT OUTER JOIN albums_artists AS albums_artists_0 ON (albums_artists_0.artist_id = artists.id) LEFT OUTER JOIN albums AS album ON (album.id = albums_artists_0.album_id)'] a.first.tag.must_equal Tag.load(:id=>5) a.first.album.must_equal Album.load(:id=>6) a.last.tag.must_be_nil a.last.album.must_be_nil DB.sqls.length.must_equal 0 end it "should handle missing associated records when cascading eager graphing for associations of associated models" do a = @c1.eager_graph(:tag=>:track).with_fetch([{:id=>1, :tag_id=>2, :track_id=>nil}, {:id=>2, :tag_id=>nil, :tracks_id=>nil}]).all a.must_equal [@c1.load(:id=>1), @c1.load(:id=>2)] DB.sqls.must_equal ['SELECT artists.id, tag.id AS tag_id, track.id AS track_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags AS tag ON (tag.id = albums_tags.tag_id) LEFT OUTER JOIN albums_tags AS albums_tags_0 ON (albums_tags_0.tag_id = tag.id) LEFT OUTER JOIN albums AS albums_0 ON (albums_0.id = albums_tags_0.album_id) LEFT OUTER JOIN tracks AS track ON (track.album_id = albums_0.id)'] a.last.tag.must_be_nil a = a.first a.tag.must_equal Tag.load(:id=>2) a.tag.track.must_be_nil DB.sqls.length.must_equal 0 end it "eager graphing should respect :left_primary_key and :right_primary_key options" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :left_primary_key=>:yyy, :right_primary_key=>:tag_id @c1.dataset = @c1.dataset.with_extend{def columns; [:id, :yyy] end} Tag.dataset = Tag.dataset.with_extend{def columns; [:id, :tag_id] end} a = @c1.eager_graph(:tag).with_fetch(:id=>1, :yyy=>8, :tag_id=>2, :tag_tag_id=>4).all a.must_equal [@c1.load(:id=>1, :yyy=>8)] DB.sqls.must_equal ['SELECT artists.id, artists.yyy, tag.id AS tag_id, tag.tag_id AS tag_tag_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.yyy) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags AS tag ON (tag.tag_id = albums_tags.tag_id)'] a.first.tag.must_equal Tag.load(:id=>2, :tag_id=>4) DB.sqls.length.must_equal 0 end it "eager graphing should respect composite keys" do @c1.one_through_many :tag, [[:albums_artists, [:b1, :b2], [:c1, :c2]], [:albums, [:d1, :d2], [:e1, :e2]], [:albums_tags, [:f1, :f2], [:g1, :g2]]], :right_primary_key=>[:id, :tag_id], :left_primary_key=>[:id, :yyy] @c1.dataset = @c1.dataset.with_extend{def columns; [:id, :yyy] end} Tag.dataset = Tag.dataset.with_extend{def columns; [:id, :tag_id] end} a = @c1.eager_graph(:tag).with_fetch(:id=>1, :yyy=>8, :tag_id=>2, :tag_tag_id=>4).all a.must_equal [@c1.load(:id=>1, :yyy=>8)] DB.sqls.must_equal ['SELECT artists.id, artists.yyy, tag.id AS tag_id, tag.tag_id AS tag_tag_id FROM artists LEFT OUTER JOIN albums_artists ON ((albums_artists.b1 = artists.id) AND (albums_artists.b2 = artists.yyy)) LEFT OUTER JOIN albums ON ((albums.d1 = albums_artists.c1) AND (albums.d2 = albums_artists.c2)) LEFT OUTER JOIN albums_tags ON ((albums_tags.f1 = albums.e1) AND (albums_tags.f2 = albums.e2)) LEFT OUTER JOIN tags AS tag ON ((tag.id = albums_tags.g1) AND (tag.tag_id = albums_tags.g2))'] a.first.tag.must_equal Tag.load(:id=>2, :tag_id=>4) DB.sqls.length.must_equal 0 end it "should respect the association's :graph_select option" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :graph_select=>:b a = @c1.eager_graph(:tag).with_fetch(:id=>1, :b=>2).all a.must_equal [@c1.load(:id=>1)] DB.sqls.must_equal ['SELECT artists.id, tag.b FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags AS tag ON (tag.id = albums_tags.tag_id)'] a.first.tag.must_equal Tag.load(:b=>2) DB.sqls.length.must_equal 0 end it "should respect the association's :graph_join_type option" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_tags, :album_id, :tag_id]], :graph_join_type=>:inner @c1.eager_graph(:tag).sql.must_equal 'SELECT artists.id, tag.id AS tag_id FROM artists INNER JOIN albums_artists ON (albums_artists.artist_id = artists.id) INNER JOIN albums ON (albums.id = albums_artists.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) INNER JOIN tags AS tag ON (tag.id = albums_tags.tag_id)' end it "should respect the association's :join_type option on through" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id, :join_type=>:natural}, [:albums_tags, :album_id, :tag_id]], :graph_join_type=>:inner @c1.eager_graph(:tag).sql.must_equal 'SELECT artists.id, tag.id AS tag_id FROM artists INNER JOIN albums_artists ON (albums_artists.artist_id = artists.id) NATURAL JOIN albums ON (albums.id = albums_artists.album_id) INNER JOIN albums_tags ON (albums_tags.album_id = albums.id) INNER JOIN tags AS tag ON (tag.id = albums_tags.tag_id)' end it "should respect the association's :conditions option" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id}, [:albums_tags, :album_id, :tag_id]], :conditions=>{:a=>32} @c1.eager_graph(:tag).sql.must_equal 'SELECT artists.id, tag.id AS tag_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags AS tag ON ((tag.id = albums_tags.tag_id) AND (tag.a = 32))' end it "should respect the association's :graph_conditions option" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id}, [:albums_tags, :album_id, :tag_id]], :graph_conditions=>{:a=>42} @c1.eager_graph(:tag).sql.must_equal 'SELECT artists.id, tag.id AS tag_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags AS tag ON ((tag.id = albums_tags.tag_id) AND (tag.a = 42))' @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id}, [:albums_tags, :album_id, :tag_id]], :graph_conditions=>{:a=>42}, :conditions=>{:a=>32} @c1.eager_graph(:tag).sql.must_equal 'SELECT artists.id, tag.id AS tag_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags AS tag ON ((tag.id = albums_tags.tag_id) AND (tag.a = 42))' end it "should respect the association's :conditions option on through" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id, :conditions=>{:a=>42}}, [:albums_tags, :album_id, :tag_id]] @c1.eager_graph(:tag).sql.must_equal 'SELECT artists.id, tag.id AS tag_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON ((albums.id = albums_artists.album_id) AND (albums.a = 42)) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags AS tag ON (tag.id = albums_tags.tag_id)' end it "should respect the association's :graph_block option" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id}, [:albums_tags, :album_id, :tag_id]], :graph_block=>proc{|ja,lja,js| {Sequel.qualify(ja, :active)=>true}} @c1.eager_graph(:tag).sql.must_equal 'SELECT artists.id, tag.id AS tag_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags AS tag ON ((tag.id = albums_tags.tag_id) AND (tag.active IS TRUE))' end it "should respect the association's :block option on through" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id, :block=>proc{|ja,lja,js| {Sequel.qualify(ja, :active)=>true}}}, [:albums_tags, :album_id, :tag_id]] @c1.eager_graph(:tag).sql.must_equal 'SELECT artists.id, tag.id AS tag_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON ((albums.id = albums_artists.album_id) AND (albums.active IS TRUE)) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags AS tag ON (tag.id = albums_tags.tag_id)' end it "should respect the association's :graph_only_conditions option" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id}, [:albums_tags, :album_id, :tag_id]], :graph_only_conditions=>{:a=>32} @c1.eager_graph(:tag).sql.must_equal 'SELECT artists.id, tag.id AS tag_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags AS tag ON (tag.a = 32)' end it "should respect the association's :only_conditions option on through" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id, :only_conditions=>{:a=>42}}, [:albums_tags, :album_id, :tag_id]] @c1.eager_graph(:tag).sql.must_equal 'SELECT artists.id, tag.id AS tag_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.a = 42) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags AS tag ON (tag.id = albums_tags.tag_id)' end it "should create unique table aliases for all associations" do @c1.eager_graph(:artist=>{:artist=>:artist}).sql.must_equal "SELECT artists.id, artist.id AS artist_id, artist_0.id AS artist_0_id, artist_1.id AS artist_1_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_artists AS albums_artists_0 ON (albums_artists_0.album_id = albums.id) LEFT OUTER JOIN artists AS artist ON (artist.id = albums_artists_0.artist_id) LEFT OUTER JOIN albums_artists AS albums_artists_1 ON (albums_artists_1.artist_id = artist.id) LEFT OUTER JOIN albums AS albums_0 ON (albums_0.id = albums_artists_1.album_id) LEFT OUTER JOIN albums_artists AS albums_artists_2 ON (albums_artists_2.album_id = albums_0.id) LEFT OUTER JOIN artists AS artist_0 ON (artist_0.id = albums_artists_2.artist_id) LEFT OUTER JOIN albums_artists AS albums_artists_3 ON (albums_artists_3.artist_id = artist_0.id) LEFT OUTER JOIN albums AS albums_1 ON (albums_1.id = albums_artists_3.album_id) LEFT OUTER JOIN albums_artists AS albums_artists_4 ON (albums_artists_4.album_id = albums_1.id) LEFT OUTER JOIN artists AS artist_1 ON (artist_1.id = albums_artists_4.artist_id)" end it "should respect the association's :order" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id}, [:albums_tags, :album_id, :tag_id]], :order=>[:blah1, :blah2] @c1.order(Sequel[:artists][:blah2], Sequel[:artists][:blah3]).eager_graph(:tag).sql.must_equal 'SELECT artists.id, tag.id AS tag_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags AS tag ON (tag.id = albums_tags.tag_id) ORDER BY artists.blah2, artists.blah3, tag.blah1, tag.blah2' end it "should only qualify unqualified symbols, identifiers, or ordered versions in association's :order" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id}, [:albums_tags, :album_id, :tag_id]], :order=>[Sequel.identifier(:blah__id), Sequel.identifier(:blah__id).desc, Sequel.desc(Sequel[:blah][:id]), Sequel[:blah][:id], :album_id, Sequel.desc(:album_id), 1, Sequel.lit('RANDOM()'), Sequel.qualify(:b, :a)] @c1.order(Sequel[:artists][:blah2], Sequel[:artists][:blah3]).eager_graph(:tag).sql.must_equal 'SELECT artists.id, tag.id AS tag_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags AS tag ON (tag.id = albums_tags.tag_id) ORDER BY artists.blah2, artists.blah3, tag.blah__id, tag.blah__id DESC, blah.id DESC, blah.id, tag.album_id, tag.album_id DESC, 1, RANDOM(), b.a' end with_symbol_splitting "should not qualify qualified symbols in association's :order" do @c1.one_through_many :tag, [[:albums_artists, :artist_id, :album_id], {:table=>:albums, :left=>:id, :right=>:id}, [:albums_tags, :album_id, :tag_id]], :order=>[Sequel.identifier(:blah__id), Sequel.identifier(:blah__id).desc, Sequel.desc(:blah__id), :blah__id, :album_id, Sequel.desc(:album_id), 1, Sequel.lit('RANDOM()'), Sequel.qualify(:b, :a)] @c1.order(:artists__blah2, :artists__blah3).eager_graph(:tag).sql.must_equal 'SELECT artists.id, tag.id AS tag_id FROM artists LEFT OUTER JOIN albums_artists ON (albums_artists.artist_id = artists.id) LEFT OUTER JOIN albums ON (albums.id = albums_artists.album_id) LEFT OUTER JOIN albums_tags ON (albums_tags.album_id = albums.id) LEFT OUTER JOIN tags AS tag ON (tag.id = albums_tags.tag_id) ORDER BY artists.blah2, artists.blah3, tag.blah__id, tag.blah__id DESC, blah.id DESC, blah.id, tag.album_id, tag.album_id DESC, 1, RANDOM(), b.a' end end describe "Sequel::Model.finalize_associations" do before do class ::Item < Sequel::Model plugin :many_through_many many_through_many :items, [[:foos, :item1_id, :foo1_id], [:bars, :foo2_id, :item2_id]] one_through_many :item, [[:foos, :item1_id, :foo1_id], [:bars, :foo2_id, :item2_id]] finalize_associations end end after do Object.send(:remove_const, :Item) end it "should finalize one_through_many associations" do r = Item.association_reflection(:item) r[:class].must_equal Item r[:_dataset].sql.must_equal "SELECT items.* FROM items INNER JOIN bars ON (bars.item2_id = items.id) INNER JOIN foos ON (foos.foo1_id = bars.foo2_id) LIMIT 1" r[:associated_eager_dataset].sql.must_equal "SELECT items.* FROM items INNER JOIN bars ON (bars.item2_id = items.id) INNER JOIN foos ON (foos.foo1_id = bars.foo2_id)" r[:filter_by_associations_conditions_dataset].sql.must_equal "SELECT foos.item1_id FROM items INNER JOIN bars ON (bars.item2_id = items.id) INNER JOIN foos ON (foos.foo1_id = bars.foo2_id) WHERE (foos.item1_id IS NOT NULL)" r[:placeholder_loader].wont_be_nil r[:predicate_key].must_equal Sequel.qualify(:foos, :item1_id) r[:associated_key_table].must_equal :foos r[:edges].must_equal [{:table=>:foos, :left=>:id, :right=>:item1_id, :conditions=>[], :join_type=>:left_outer, :block=>nil}, {:table=>:bars, :left=>:foo1_id, :right=>:foo2_id, :conditions=>[], :join_type=>:left_outer, :block=>nil}] r[:final_edge].must_equal(:table=>:items, :left=>:item2_id, :right=>:id, :conditions=>nil, :join_type=>nil, :block=>nil) r[:final_reverse_edge].must_equal(:table=>:foos, :left=>:foo1_id, :right=>:foo2_id, :alias=>:foos) r[:reverse_edges].must_equal [{:table=>:bars, :left=>:item2_id, :right=>:id, :alias=>:bars}] end it "should finalize many_through_many associations" do r = Item.association_reflection(:items) r[:class].must_equal Item r[:_dataset].sql.must_equal "SELECT items.* FROM items INNER JOIN bars ON (bars.item2_id = items.id) INNER JOIN foos ON (foos.foo1_id = bars.foo2_id)" r[:associated_eager_dataset].sql.must_equal "SELECT items.* FROM items INNER JOIN bars ON (bars.item2_id = items.id) INNER JOIN foos ON (foos.foo1_id = bars.foo2_id)" r[:filter_by_associations_conditions_dataset].sql.must_equal "SELECT foos.item1_id FROM items INNER JOIN bars ON (bars.item2_id = items.id) INNER JOIN foos ON (foos.foo1_id = bars.foo2_id) WHERE (foos.item1_id IS NOT NULL)" r[:placeholder_loader].wont_be_nil r[:predicate_key].must_equal Sequel.qualify(:foos, :item1_id) r[:associated_key_table].must_equal :foos r[:edges].must_equal [{:table=>:foos, :left=>:id, :right=>:item1_id, :conditions=>[], :join_type=>:left_outer, :block=>nil}, {:table=>:bars, :left=>:foo1_id, :right=>:foo2_id, :conditions=>[], :join_type=>:left_outer, :block=>nil}] r[:final_edge].must_equal(:table=>:items, :left=>:item2_id, :right=>:id, :conditions=>nil, :join_type=>nil, :block=>nil) r[:final_reverse_edge].must_equal(:table=>:foos, :left=>:foo1_id, :right=>:foo2_id, :alias=>:foos) r[:reverse_edges].must_equal [{:table=>:bars, :left=>:item2_id, :right=>:id, :alias=>:bars}] end end describe "many_through_many/one_through_many associations with :db option" do before do @db1, @db2, @db3, @db4 = @dbs = 4.times.map{Sequel.mock(:fetch=>{:id => 1, :x => 1}, :numrows=>1, :autoid=>proc{|sql| 10})} @c1 = Class.new(Sequel::Model(@db1[:attributes])) do unrestrict_primary_key attr_accessor :yyy def self.name; 'Attribute'; end def self.to_s; 'Attribute'; end columns :id, :y, :z end @c2 = Class.new(Sequel::Model(@db2[:nodes])) do plugin :many_through_many unrestrict_primary_key attr_accessor :xxx def self.name; 'Node'; end def self.to_s; 'Node'; end columns :id, :x end @db3.fetch = {:foo_id=>444} @db4.fetch = {:attribute_id=>555} @db1.fetch = {:id=>555} @through = [{:table=>:foo_nodes, :left=>:node_id, :right=>:foo_id, :db=>@db3}, {:table=>:bar_attributes, :left=>:bar_id, :right=>:attribute_id, :db=>@db4}] @composite_through = [@through[0].merge(:left=>[:l1, :l2], :right=>[:ml1, :ml2]), @through[1].merge(:left=>[:mr1, :mr2], :right=>[:r1, :r2])] sqls end def sqls @dbs.map(&:sqls) end it "should support dataset method" do @c2.many_through_many :attributes, @through, :class => @c1 @c2.new(:id => 1234).attributes_dataset.sql.must_equal "SELECT attributes.* FROM attributes WHERE (id IN (555))" sqls.must_equal [[], [], ["SELECT foo_id FROM foo_nodes WHERE (node_id = 1234)"], ["SELECT attribute_id FROM bar_attributes WHERE (bar_id IN (444))"]] end it "should support association method" do @c2.many_through_many :attributes, @through, :class => @c1 @c2.new(:id => 1234).attributes.must_equal [@c1.load(:id=>555)] sqls.must_equal [["SELECT attributes.* FROM attributes WHERE (id IN (555))"], [], ["SELECT foo_id FROM foo_nodes WHERE (node_id = 1234)"], ["SELECT attribute_id FROM bar_attributes WHERE (bar_id IN (444))"]] @c2.one_through_many :attribute, @through, :class => @c1 @db1.fetch = [{:id=>555, :x=>1}, {:id=>555, :x=>2}] @c2.new(:id => 1234).attribute.must_equal @c1.load(:id=>555, :x=>1) sqls.must_equal [["SELECT attributes.* FROM attributes WHERE (id IN (555)) LIMIT 1"], [], ["SELECT foo_id FROM foo_nodes WHERE (node_id = 1234)"], ["SELECT attribute_id FROM bar_attributes WHERE (bar_id IN (444))"]] end it "should support an existing selection on the dataset" do @c1.dataset = @c1.dataset.select(Sequel.qualify(:attributes, :id), Sequel.qualify(:attributes, :b)) @db1.sqls @c2.many_through_many :attributes, @through, :class => @c1 @db1.fetch = {:id=>555, :b=>10} @c2.new(:id => 1234).attributes.must_equal [@c1.load(:id=>555, :b=>10)] sqls.must_equal [["SELECT attributes.id, attributes.b FROM attributes WHERE (id IN (555))"], [], ["SELECT foo_id FROM foo_nodes WHERE (node_id = 1234)"], ["SELECT attribute_id FROM bar_attributes WHERE (bar_id IN (444))"]] end it "should support a conditions option" do @c2.many_through_many :attributes, @through, :class => @c1, :conditions => {:a=>32} @c2.new(:id => 1234).attributes.must_equal [@c1.load(:id=>555)] sqls.must_equal [["SELECT attributes.* FROM attributes WHERE ((a = 32) AND (id IN (555)))"], [], ["SELECT foo_id FROM foo_nodes WHERE (node_id = 1234)"], ["SELECT attribute_id FROM bar_attributes WHERE (bar_id IN (444))"]] end it "should support an order option" do @c2.many_through_many :attributes, @through, :class => @c1, :order=>:blah @c2.new(:id => 1234).attributes.must_equal [@c1.load(:id=>555)] sqls.must_equal [["SELECT attributes.* FROM attributes WHERE (id IN (555)) ORDER BY blah"], [], ["SELECT foo_id FROM foo_nodes WHERE (node_id = 1234)"], ["SELECT attribute_id FROM bar_attributes WHERE (bar_id IN (444))"]] end it "should support a conditions option in join table" do @through[0][:conditions] = {:b=>3} @through[1][:conditions] = {:c=>4} @c2.many_through_many :attributes, @through, :class => @c1 @c2.new(:id => 1234).attributes.must_equal [@c1.load(:id=>555)] sqls.must_equal [["SELECT attributes.* FROM attributes WHERE (id IN (555))"], [], ["SELECT foo_id FROM foo_nodes WHERE ((node_id = 1234) AND (b = 3))"], ["SELECT attribute_id FROM bar_attributes WHERE ((bar_id IN (444)) AND (c = 4))"]] end it "should support :left_primary_key and :right_primary_key options" do @c2.many_through_many :attributes, @through, :class => @c1, :left_primary_key=>:xxx, :right_primary_key=>:yyy @db3.fetch = {:foo_id=>7} @db4.fetch = {:attribute_id=>555} @db1.fetch = {:id=>14, :yyy=>555} @c2.new(:id => 1234, :xxx=>5).attributes.must_equal [@c1.load(:id=>14, :yyy=>555)] sqls.must_equal [["SELECT attributes.* FROM attributes WHERE (yyy IN (555))"], [], ["SELECT foo_id FROM foo_nodes WHERE (node_id = 5)"], ["SELECT attribute_id FROM bar_attributes WHERE (bar_id IN (7))"]] end it "should support composite keys" do @c2.many_through_many :attributes, @composite_through, :class => @c1, :left_primary_key=>[:id, :x], :right_primary_key=>[:id, :y] @db3.fetch = {:ml1=>17, :ml2=>18} @db4.fetch = {:r1=>14, :r2=>555} @db1.fetch = {:id=>14, :y=>555} @c2.new(:id => 1234, :x=>5).attributes.must_equal [@c1.load(:id=>14, :y=>555)] sqls.must_equal [["SELECT attributes.* FROM attributes WHERE ((id, y) IN ((14, 555)))"], [], ["SELECT ml1, ml2 FROM foo_nodes WHERE ((l1, l2) IN ((1234, 5)))"], ["SELECT r1, r2 FROM bar_attributes WHERE ((mr1, mr2) IN ((17, 18)))"]] end it "should handle case where join table query does not produce any rows" do @c2.many_through_many :attributes, @composite_through, :class => @c1, :left_primary_key=>[:id, :x], :right_primary_key=>[:id, :y] @db3.fetch = [] @db1.fetch = [] @c2.load(:id => 1234, :x=>5).attributes.must_equal [] sqls.must_equal [[], [], ["SELECT ml1, ml2 FROM foo_nodes WHERE ((l1, l2) IN ((1234, 5)))"], []] end it "should handle case where join table query returns a NULL value" do @db1.fetch = [] @c2.many_through_many :attributes, @through, :class => @c1 @db4.fetch = {:attribute_id=>nil} @c2.new(:id => 1234).attributes.must_equal [] sqls.must_equal [[], [], ["SELECT foo_id FROM foo_nodes WHERE (node_id = 1234)"], ["SELECT attribute_id FROM bar_attributes WHERE (bar_id IN (444))"]] @db3.fetch = {:foo_id=>nil} @c2.new(:id => 1234).attributes.must_equal [] sqls.must_equal [[], [], ["SELECT foo_id FROM foo_nodes WHERE (node_id = 1234)"], []] @c2.many_through_many :attributes, @composite_through, :class => @c1, :left_primary_key=>[:id, :x], :right_primary_key=>[:id, :y] @db3.fetch = {:ml1=>17, :ml2=>18} @db4.fetch = [{:r1=>14, :r2=>nil}, {:r1=>nil, :r2=>555}, {:r1=>nil, :r2=>nil}] @c2.load(:id => 1234, :x=>5).attributes.must_equal [] sqls.must_equal [[], [], ["SELECT ml1, ml2 FROM foo_nodes WHERE ((l1, l2) IN ((1234, 5)))"], ["SELECT r1, r2 FROM bar_attributes WHERE ((mr1, mr2) IN ((17, 18)))"]] @db3.fetch = [{:ml1=>17, :ml2=>nil}, {:ml1=>17, :ml2=>nil}, {:ml1=>nil, :ml2=>nil}] @c2.load(:id => 1234, :x=>5).attributes.must_equal [] sqls.must_equal [[], [], ["SELECT ml1, ml2 FROM foo_nodes WHERE ((l1, l2) IN ((1234, 5)))"], []] end it "should support a select option" do @c2.many_through_many :attributes, @through, :class => @c1, :select => :blah @db1.fetch = {:blah=>19} @c2.load(:id => 1234, :x=>5).attributes.must_equal [@c1.load(:blah=>19)] sqls.must_equal [["SELECT blah FROM attributes WHERE (id IN (555))"], [], ["SELECT foo_id FROM foo_nodes WHERE (node_id = 1234)"], ["SELECT attribute_id FROM bar_attributes WHERE (bar_id IN (444))"]] end it "should accept a block" do @c2.many_through_many :attributes, @through, :class => @c1 do |ds| ds.filter(:xxx => @xxx) end n = @c2.new(:id => 1234) n.xxx = 444 n.attributes.must_equal [@c1.load(:id=>555)] sqls.must_equal [["SELECT attributes.* FROM attributes WHERE ((id IN (555)) AND (xxx = 444))"], [], ["SELECT foo_id FROM foo_nodes WHERE (node_id = 1234)"], ["SELECT attribute_id FROM bar_attributes WHERE (bar_id IN (444))"]] end it "should handle an aliased join table" do @through[0][:table] = Sequel[:foo_nodes].as(:fn) @through[1][:table] = Sequel[:bar_attributes].as(:ba) @c2.many_through_many :attributes, @through, :class => @c1 n = @c2.load(:id => 1234) n.attributes.must_equal [@c1.load(:id=>555)] sqls.must_equal [["SELECT attributes.* FROM attributes WHERE (id IN (555))"], [], ["SELECT foo_id FROM foo_nodes AS fn WHERE (node_id = 1234)"], ["SELECT attribute_id FROM bar_attributes AS ba WHERE (bar_id IN (444))"]] end it "should support eager loading" do @db2.fetch = [{:id=>1234}, {:id=>33}] @db3.fetch = {:foo_id=>444, :node_id=>1234} @db4.fetch = {:attribute_id=>555, :bar_id=>444} @c2.many_through_many :attributes, @through, :class => @c1 a = @c2.eager(:attributes).all a.must_equal [@c2.load(:id=>1234), @c2.load(:id=>33)] a[0].associations[:attributes].must_equal [@c1.load(:id=>555)] a[1].associations[:attributes].must_equal [] sqls.must_equal [["SELECT attributes.* FROM attributes WHERE (id IN (555))"], ["SELECT * FROM nodes"], ["SELECT foo_id, node_id FROM foo_nodes WHERE (node_id IN (1234, 33))"], ["SELECT attribute_id, bar_id FROM bar_attributes WHERE (bar_id IN (444))"]] @c2.one_through_many :attribute, :clone=>:attributes @db1.fetch = [{:id=>555, :x=>1}, {:id=>555, :x=>2}] a = @c2.eager(:attribute).all a.must_equal [@c2.load(:id=>1234), @c2.load(:id=>33)] a[0].associations[:attribute].must_equal @c1.load(:id=>555, :x=>1) a[1].associations[:attribute].must_be_nil sqls.must_equal [["SELECT attributes.* FROM attributes WHERE (id IN (555))"], ["SELECT * FROM nodes"], ["SELECT foo_id, node_id FROM foo_nodes WHERE (node_id IN (1234, 33))"], ["SELECT attribute_id, bar_id FROM bar_attributes WHERE (bar_id IN (444))"]] end it "should support eager loading with a conditions option" do @db2.fetch = [{:id=>1234}, {:id=>33}] @db3.fetch = {:foo_id=>444, :node_id=>1234} @db4.fetch = {:attribute_id=>555, :bar_id=>444} @through[0][:conditions] = {:x=>7} @through[1][:conditions] = {:y=>8} @c2.many_through_many :attributes, @through, :class => @c1, :conditions=>{:z=>9} a = @c2.eager(:attributes).all a.must_equal [@c2.load(:id=>1234), @c2.load(:id=>33)] a[0].associations[:attributes].must_equal [@c1.load(:id=>555)] a[1].associations[:attributes].must_equal [] sqls.must_equal [["SELECT attributes.* FROM attributes WHERE ((z = 9) AND (id IN (555)))"], ["SELECT * FROM nodes"], ["SELECT foo_id, node_id FROM foo_nodes WHERE ((x = 7) AND (node_id IN (1234, 33)))"], ["SELECT attribute_id, bar_id FROM bar_attributes WHERE ((y = 8) AND (bar_id IN (444)))"]] @c2.one_through_many :attribute, :clone=>:attributes @db1.fetch = [{:id=>555, :x=>1}, {:id=>555, :x=>2}] a = @c2.eager(:attribute).all a.must_equal [@c2.load(:id=>1234), @c2.load(:id=>33)] a[0].associations[:attribute].must_equal @c1.load(:id=>555, :x=>1) a[1].associations[:attribute].must_be_nil sqls.must_equal [["SELECT attributes.* FROM attributes WHERE ((z = 9) AND (id IN (555)))"], ["SELECT * FROM nodes"], ["SELECT foo_id, node_id FROM foo_nodes WHERE ((x = 7) AND (node_id IN (1234, 33)))"], ["SELECT attribute_id, bar_id FROM bar_attributes WHERE ((y = 8) AND (bar_id IN (444)))"]] end it "should skip loading associated table when the join table has no results" do @db2.fetch = [{:id=>1234}, {:id=>33}] @db3.fetch = {:foo_id=>444, :node_id=>1234} @db4.fetch = {} @c2.many_through_many :attributes, @through, :class => @c1 a = @c2.eager(:attributes).all a.must_equal [@c2.load(:id=>1234), @c2.load(:id=>33)] a[0].associations[:attributes].must_equal [] a[1].associations[:attributes].must_equal [] sqls.must_equal [[], ["SELECT * FROM nodes"], ["SELECT foo_id, node_id FROM foo_nodes WHERE (node_id IN (1234, 33))"], ["SELECT attribute_id, bar_id FROM bar_attributes WHERE (bar_id IN (444))"]] @c2.one_through_many :attribute, :clone=>:attributes a = @c2.eager(:attribute).all a.must_equal [@c2.load(:id=>1234), @c2.load(:id=>33)] a[0].associations[:attribute].must_be_nil a[1].associations[:attribute].must_be_nil sqls.must_equal [[], ["SELECT * FROM nodes"], ["SELECT foo_id, node_id FROM foo_nodes WHERE (node_id IN (1234, 33))"], ["SELECT attribute_id, bar_id FROM bar_attributes WHERE (bar_id IN (444))"]] @db3.fetch = {} a = @c2.eager(:attributes).all a.must_equal [@c2.load(:id=>1234), @c2.load(:id=>33)] a[0].associations[:attributes].must_equal [] a[1].associations[:attributes].must_equal [] sqls.must_equal [[], ["SELECT * FROM nodes"], ["SELECT foo_id, node_id FROM foo_nodes WHERE (node_id IN (1234, 33))"], []] end it "should support eager loading when the join table includes NULL values" do @db2.fetch = [{:id=>1234}, {:id=>33}] @db3.fetch = {:foo_id=>444, :node_id=>1234} @db4.fetch = {:attribute_id=>nil, :bar_id=>444} @c2.many_through_many :attributes, @through, :class => @c1 a = @c2.eager(:attributes).all a.must_equal [@c2.load(:id=>1234), @c2.load(:id=>33)] a[0].associations[:attributes].must_equal [] a[1].associations[:attributes].must_equal [] sqls.must_equal [[], ["SELECT * FROM nodes"], ["SELECT foo_id, node_id FROM foo_nodes WHERE (node_id IN (1234, 33))"], ["SELECT attribute_id, bar_id FROM bar_attributes WHERE (bar_id IN (444))"]] @c2.one_through_many :attribute, :clone=>:attributes a = @c2.eager(:attribute).all a.must_equal [@c2.load(:id=>1234), @c2.load(:id=>33)] a[0].associations[:attribute].must_be_nil a[1].associations[:attribute].must_be_nil sqls.must_equal [[], ["SELECT * FROM nodes"], ["SELECT foo_id, node_id FROM foo_nodes WHERE (node_id IN (1234, 33))"], ["SELECT attribute_id, bar_id FROM bar_attributes WHERE (bar_id IN (444))"]] @db3.fetch = {:foo_id=>nil, :node_id=>1234} a = @c2.eager(:attributes).all a.must_equal [@c2.load(:id=>1234), @c2.load(:id=>33)] a[0].associations[:attributes].must_equal [] a[1].associations[:attributes].must_equal [] sqls.must_equal [[], ["SELECT * FROM nodes"], ["SELECT foo_id, node_id FROM foo_nodes WHERE (node_id IN (1234, 33))"], []] end it "should support eager loading when using composite keys" do @db1.fetch = {:id=>14, :y=>555} @db2.fetch = [{:id=>1234, :x=>333}, {:id=>33, :x=>4}] @db3.fetch = {:ml1=>17, :ml2=>18, :l1=>1234, :l2=>333} @db4.fetch = {:r1=>14, :r2=>555, :mr1=>17, :mr2=>18} @c2.many_through_many :attributes, @composite_through, :class => @c1, :left_primary_key=>[:id, :x], :right_primary_key=>[:id, :y] a = @c2.eager(:attributes).all a.must_equal [@c2.load(:id=>1234, :x=>333), @c2.load(:id=>33, :x=>4)] a[0].associations[:attributes].must_equal [@c1.load(:id=>14, :y=>555)] a[1].associations[:attributes].must_equal [] sqls.must_equal [["SELECT attributes.* FROM attributes WHERE ((id, y) IN ((14, 555)))"], ["SELECT * FROM nodes"], ["SELECT ml1, ml2, l1, l2 FROM foo_nodes WHERE ((l1, l2) IN ((1234, 333), (33, 4)))"], ["SELECT r1, r2, mr1, mr2 FROM bar_attributes WHERE ((mr1, mr2) IN ((17, 18)))"]] @c2.one_through_many :attribute, :clone=>:attributes @db1.fetch = [{:id=>14, :y=>555, :z=>2}, {:id=>14, :y=>555, :z=>3}] a = @c2.eager(:attribute).all a.must_equal [@c2.load(:id=>1234, :x=>333), @c2.load(:id=>33, :x=>4)] a[0].associations[:attribute].must_equal @c1.load(:id=>14, :y=>555, :z=>2) a[1].associations[:attribute].must_be_nil sqls.must_equal [["SELECT attributes.* FROM attributes WHERE ((id, y) IN ((14, 555)))"], ["SELECT * FROM nodes"], ["SELECT ml1, ml2, l1, l2 FROM foo_nodes WHERE ((l1, l2) IN ((1234, 333), (33, 4)))"], ["SELECT r1, r2, mr1, mr2 FROM bar_attributes WHERE ((mr1, mr2) IN ((17, 18)))"]] end it "should support eager loading when using composite keys when the join table includes NULL values" do @db1.fetch = {:id=>14, :y=>555} @db2.fetch = [{:id=>1234, :x=>333}, {:id=>33, :x=>4}] @db3.fetch = {:ml1=>17, :ml2=>18, :l1=>1234, :l2=>333} @db4.fetch = [{:r1=>nil, :r2=>555, :mr1=>17, :mr2=>18}, {:r1=>14, :r2=>nil, :mr1=>17, :mr2=>18}, {:r1=>nil, :r2=>nil, :mr1=>17, :mr2=>18}] @c2.many_through_many :attributes, @composite_through, :class => @c1, :left_primary_key=>[:id, :x], :right_primary_key=>[:id, :y] a = @c2.eager(:attributes).all a.must_equal [@c2.load(:id=>1234, :x=>333), @c2.load(:id=>33, :x=>4)] a[0].associations[:attributes].must_equal [] a[1].associations[:attributes].must_equal [] sqls.must_equal [[], ["SELECT * FROM nodes"], ["SELECT ml1, ml2, l1, l2 FROM foo_nodes WHERE ((l1, l2) IN ((1234, 333), (33, 4)))"], ["SELECT r1, r2, mr1, mr2 FROM bar_attributes WHERE ((mr1, mr2) IN ((17, 18)))"]] @c2.one_through_many :attribute, :clone=>:attributes a = @c2.eager(:attribute).all a.must_equal [@c2.load(:id=>1234, :x=>333), @c2.load(:id=>33, :x=>4)] a[0].associations[:attribute].must_be_nil a[1].associations[:attribute].must_be_nil sqls.must_equal [[], ["SELECT * FROM nodes"], ["SELECT ml1, ml2, l1, l2 FROM foo_nodes WHERE ((l1, l2) IN ((1234, 333), (33, 4)))"], ["SELECT r1, r2, mr1, mr2 FROM bar_attributes WHERE ((mr1, mr2) IN ((17, 18)))"]] @db3.fetch = [{:ml1=>nil, :ml2=>18, :l1=>1234, :l2=>333}, {:ml1=>17, :ml2=>nil, :l1=>1234, :l2=>333}, {:ml1=>nil, :ml2=>nil, :l1=>1234, :l2=>333}] @c2.many_through_many :attributes, @composite_through, :class => @c1, :left_primary_key=>[:id, :x], :right_primary_key=>[:id, :y] a = @c2.eager(:attributes).all a.must_equal [@c2.load(:id=>1234, :x=>333), @c2.load(:id=>33, :x=>4)] a[0].associations[:attributes].must_equal [] a[1].associations[:attributes].must_equal [] sqls.must_equal [[], ["SELECT * FROM nodes"], ["SELECT ml1, ml2, l1, l2 FROM foo_nodes WHERE ((l1, l2) IN ((1234, 333), (33, 4)))"], []] end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/migration_spec.rb�����������������������������������������������������0000664�0000000�0000000�00000125360�14342141206�0021570�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" Sequel.extension :migration describe "Migration.descendants" do before do Sequel::Migration.descendants.clear end it "should include Migration subclasses" do @class = Class.new(Sequel::Migration) Sequel::Migration.descendants.must_equal [@class] end it "should include Migration subclasses in order of creation" do @c1 = Class.new(Sequel::Migration) @c2 = Class.new(Sequel::Migration) @c3 = Class.new(Sequel::Migration) Sequel::Migration.descendants.must_equal [@c1, @c2, @c3] end it "should include SimpleMigration instances created by migration DSL" do i1 = Sequel.migration{} i2 = Sequel.migration{} i3 = Sequel.migration{} Sequel::Migration.descendants.must_equal [i1, i2, i3] end end describe "Migration.apply" do before do @c = Class.new do define_method(:one) {|x| [1111, x]} define_method(:two) {|x| [2222, x]} end @db = @c.new end it "should raise for an invalid direction" do proc {Sequel::Migration.apply(@db, :hahaha)}.must_raise(ArgumentError) end it "should apply the up and down directions correctly" do m = Class.new(Sequel::Migration) do define_method(:up) {one(3333)} define_method(:down) {two(4444)} end m.apply(@db, :up).must_equal [1111, 3333] m.apply(@db, :down).must_equal [2222, 4444] end it "should have default up and down actions that do nothing" do m = Class.new(Sequel::Migration) m.apply(@db, :up).must_be_nil m.apply(@db, :down).must_be_nil end it "should respond to the methods the database responds to" do m = Sequel::Migration.new(Sequel.mock) m.respond_to?(:foo).must_equal false m.respond_to?(:execute).must_equal true end if RUBY_VERSION >= '2.7' it "should handle keywords when delegating" do eval 'def @db.foo(name: (raise)) name end' Sequel::Migration.new(@db).foo(:name=>1).must_equal 1 end end end describe "SimpleMigration#apply" do before do @c = Class.new do define_method(:one) {|x| [1111, x]} define_method(:two) {|x| [2222, x]} end @db = @c.new end it "should raise for an invalid direction" do proc {Sequel.migration{}.apply(@db, :hahaha)}.must_raise(ArgumentError) end it "should apply the up and down directions correctly" do m = Sequel.migration do up{one(3333)} down{two(4444)} end m.apply(@db, :up).must_equal [1111, 3333] m.apply(@db, :down).must_equal [2222, 4444] end it "should have default up and down actions that do nothing" do m = Sequel.migration{} m.apply(@db, :up).must_be_nil m.apply(@db, :down).must_be_nil end end describe "Reversible Migrations with Sequel.migration{change{}}" do before do @c = Class.new do self::AT = Class.new do attr_reader :actions def initialize(&block) @actions = [] instance_eval(&block) end def method_missing(*args) @actions << args end self end attr_reader :actions def initialize @actions = [] end def method_missing(*args) @actions << args end def alter_table(*args, &block) @actions << [:alter_table, self.class::AT.new(&block).actions] end end @db = @c.new @p = proc do create_table(:a, :foo=>:bar){Integer :a} add_column :a, :b, String add_index :a, :b rename_column :a, :b, :c rename_table :a, :b alter_table(:b) do add_column :d, String add_constraint :blah, 'd IS NOT NULL' add_constraint({:name=>:merp}, 'a > 1') add_foreign_key :e, :b add_foreign_key [:e], :b, :name=>'e_fk' add_foreign_key [:e, :a], :b add_primary_key :f, :b add_index :e, :name=>'e_n' add_full_text_index :e, :name=>'e_ft' add_spatial_index :e, :name=>'e_s' rename_column :e, :g end create_view(:c, 'SELECT * FROM b', :foo=>:bar) create_join_table(:cat_id=>:cats, :dog_id=>:dogs) end end it "should apply up with normal actions in normal order" do p = @p Sequel.migration{change(&p)}.apply(@db, :up) @db.actions.must_equal [[:create_table, :a, {:foo=>:bar}], [:add_column, :a, :b, String], [:add_index, :a, :b], [:rename_column, :a, :b, :c], [:rename_table, :a, :b], [:alter_table, [ [:add_column, :d, String], [:add_constraint, :blah, "d IS NOT NULL"], [:add_constraint, {:name=>:merp}, "a > 1"], [:add_foreign_key, :e, :b], [:add_foreign_key, [:e], :b, {:name=>"e_fk"}], [:add_foreign_key, [:e, :a], :b], [:add_primary_key, :f, :b], [:add_index, :e, {:name=>"e_n"}], [:add_full_text_index, :e, {:name=>"e_ft"}], [:add_spatial_index, :e, {:name=>"e_s"}], [:rename_column, :e, :g]] ], [:create_view, :c, "SELECT * FROM b", {:foo=>:bar}], [:create_join_table, {:cat_id=>:cats, :dog_id=>:dogs}]] end it "should execute down with reversing actions in reverse order" do p = @p Sequel.migration{change(&p)}.apply(@db, :down) @db.actions.must_equal [ [:drop_join_table, {:cat_id=>:cats, :dog_id=>:dogs}], [:drop_view, :c, {:foo=>:bar}], [:alter_table, [ [:rename_column, :g, :e], [:drop_index, :e, {:name=>"e_s"}], [:drop_index, :e, {:name=>"e_ft"}], [:drop_index, :e, {:name=>"e_n"}], [:drop_column, :f], [:drop_foreign_key, [:e, :a]], [:drop_foreign_key, [:e], {:name=>"e_fk"}], [:drop_foreign_key, :e], [:drop_constraint, :merp], [:drop_constraint, :blah], [:drop_column, :d]] ], [:rename_table, :b, :a], [:rename_column, :a, :c, :b], [:drop_index, :a, :b], [:drop_column, :a, :b], [:drop_table, :a, {:foo=>:bar}]] end it "should reverse add_foreign_key with :type option" do Sequel.migration{change{alter_table(:t){add_foreign_key :b, :c, :type=>:f}}}.apply(@db, :down) actions = @db.actions actions.must_equal [[:alter_table, [[:drop_foreign_key, :b, {:type=>:f}]]]] @db.sqls db = Sequel.mock args = nil db.define_singleton_method(:foreign_key_list){|*a| args = a; [{:name=>:fbc, :columns=>[:b]}]} db.alter_table(:t){send(*actions[0][1][0])} db.sqls.must_equal ["ALTER TABLE t DROP CONSTRAINT fbc", "ALTER TABLE t DROP COLUMN b"] args.must_equal [:t] end it "should reverse add_foreign_key with :foreign_key_constraint_name option" do Sequel.migration{change{alter_table(:t){add_foreign_key :b, :c, :foreign_key_constraint_name=>:f}}}.apply(@db, :down) actions = @db.actions actions.must_equal [[:alter_table, [[:drop_foreign_key, :b, {:foreign_key_constraint_name=>:f}]]]] @db.sqls db = Sequel.mock db.alter_table(:t){send(*actions[0][1][0])} db.sqls.must_equal ["ALTER TABLE t DROP CONSTRAINT f", "ALTER TABLE t DROP COLUMN b"] end it "should raise in the down direction if migration uses unsupported method" do m = Sequel.migration{change{run 'SQL'}} m.apply(@db, :up) proc{m.apply(@db, :down)}.must_raise(Sequel::Error) end it "should raise in the down direction if migration uses add_primary_key with an array" do m = Sequel.migration{change{alter_table(:a){add_primary_key [:b]}}} m.apply(@db, :up) proc{m.apply(@db, :down)}.must_raise(Sequel::Error) end it "should raise in the down direction if migration uses add_foreign_key with an array" do m = Sequel.migration{change{alter_table(:a){add_foreign_key [:b]}}} m.apply(@db, :up) proc{m.apply(@db, :down)}.must_raise(Sequel::Error) end it "should raise in the down direction with the name of the source file if migration is irreversible" do m = Sequel.migration{change{alter_table(:a){add_foreign_key [:b]}}} m.apply(@db, :up) error = proc{m.apply(@db, :down)}.must_raise(Sequel::Error) error.message.must_match(/irreversible migration method used in .*spec\/extensions\/migration_spec.rb/) end end describe "Sequel::Migrator.migrator_class" do it "should return IntegerMigrator if not using timestamp migrations" do Sequel::Migrator.migrator_class("spec/files/integer_migrations").must_equal Sequel::IntegerMigrator end it "should return TimestampMigrator if using timestamp migrations" do Sequel::Migrator.migrator_class('spec/files/timestamped_migrations').must_equal Sequel::TimestampMigrator end it "should return self if run on a subclass" do Sequel::IntegerMigrator.migrator_class("spec/files/timestamped_migrations").must_equal Sequel::IntegerMigrator Sequel::TimestampMigrator.migrator_class("spec/files/integer_migrations").must_equal Sequel::TimestampMigrator end it "should raise an error if the migration folder does not exist" do proc{Sequel::Migrator.apply(@db, "spec/files/nonexistant_migration_path")}.must_raise(Sequel::Migrator::Error) end end describe "Sequel::IntegerMigrator" do before do dbc = Class.new(Sequel::Mock::Database) do attr_reader :drops, :tables_created, :columns_created, :versions def initialize(*args) super @drops = [] @tables_created = [] @columns_created = [] @versions = Hash.new{|h,k| h[k.to_sym]} end def version; versions.values.first || 0; end def creates; @tables_created.map{|x| y = x.to_s; y !~ /\Asm(\d+)/; $1.to_i if $1}.compact; end def drop_table(*a); super; @drops.concat(a.map{|x| y = x.to_s; y !~ /\Asm(\d+)/; $1.to_i if $1}.compact); end def create_table(name, opts={}, &block) super @columns_created << / \(?(\w+) integer.*\)?\z/.match(@sqls.last)[1].to_sym @tables_created << (name.is_a?(String) ? name.to_sym : name) end def dataset super.with_extend do def count; @opts[:from] == ["count2"] ? 2 : 1; end def columns; db.columns_created end def insert(h); db.versions.merge!(h); db.run insert_sql(h) end def update(h); db.versions.merge!(h); db.run update_sql(h) end def fetch_rows(sql); db.execute(sql); yield(db.versions) unless db.versions.empty? end end end def table_exists?(name) @tables_created.include?(name.is_a?(String) ? name.to_sym : name) end end @db = dbc.new @dirname = "spec/files/integer_migrations" end after do Object.send(:remove_const, "CreateSessions") if Object.const_defined?("CreateSessions") end it "should raise an error if directory given is not a directory or does not exist" do proc{Sequel::IntegerMigrator.new(@db, "spec/files/integer_migrations/001_create_sessions.rb")}.must_raise(Sequel::Migrator::Error) proc{Sequel::IntegerMigrator.new(@db, "spec/files/integer_migrations-does-not-exist")}.must_raise(Sequel::Migrator::Error) end it "should raise an error if there is a missing integer migration version" do proc{Sequel::Migrator.apply(@db, "spec/files/missing_integer_migrations")}.must_raise(Sequel::Migrator::Error) end it "should raise an error if there is a missing integer migration version greater than equal to schema version" do proc{Sequel::Migrator.run(@db, "spec/files/integer_migrations", :target=>3, :current=>4)}.must_raise(Sequel::Migrator::Error) proc{Sequel::Migrator.run(@db, "spec/files/integer_migrations", :target=>4, :current=>4)}.must_raise(Sequel::Migrator::Error) proc{Sequel::Migrator.run(@db, "spec/files/integer_migrations", :target=>5, :current=>4)}.must_raise(Sequel::Migrator::Error) end it "should not raise an error if there is a missing integer migration version and allow_missing_migration_files is true" do Sequel::Migrator.run(@db, "spec/files/missing_integer_migrations", :allow_missing_migration_files => true) @db.sqls.last.must_equal "UPDATE schema_info SET version = 3" Sequel::Migrator.run(@db, "spec/files/missing_integer_migrations", :allow_missing_migration_files => true) @db.sqls.last.must_equal "SELECT version FROM schema_info LIMIT 1" Sequel::Migrator.run(@db, "spec/files/missing_integer_migrations_missing_last_version", :allow_missing_migration_files => true) @db.sqls.last.must_equal "SELECT version FROM schema_info LIMIT 1" Sequel::Migrator.run(@db, "spec/files/missing_integer_migrations", :allow_missing_migration_files => true, :target=>0) @db.sqls.last.must_equal "UPDATE schema_info SET version = 0" end it "should raise an error if there is a duplicate integer migration version" do proc{Sequel::Migrator.apply(@db, "spec/files/duplicate_integer_migrations")}.must_raise(Sequel::Migrator::Error) end it "should raise an error if there is an empty migration file" do proc{Sequel::Migrator.apply(@db, "spec/files/empty_migration")}.must_raise(Sequel::Migrator::Error) end it "should raise an error if there is a migration file with multiple migrations" do proc{Sequel::Migrator.apply(@db, "spec/files/double_migration")}.must_raise(Sequel::Migrator::Error) end it "should raise an error if the most recent migration can't be detected" do # Have to specify a target version, otherwise an earlier check (inability # to detect the target) would raise an error, falsely matching the check. proc{Sequel::Migrator.apply(@db, "spec/files/empty_migration_folder", 2)}.must_raise(Sequel::Migrator::Error) end it "should add a column name if it doesn't already exist in the schema_info table" do @db.create_table(:schema_info){Integer :v} def @db.alter_table(*); end Sequel::Migrator.apply(@db, @dirname) end it "should automatically create the schema_info table with the version column" do @db.table_exists?(:schema_info).must_equal false Sequel::Migrator.run(@db, @dirname, :target=>0) @db.table_exists?(:schema_info).must_equal true @db.dataset.columns.must_equal [:version] end it "should allow specifying the table and columns" do @db.table_exists?(:si).must_equal false Sequel::Migrator.run(@db, @dirname, :target=>0, :table=>:si, :column=>:sic) @db.table_exists?(:si).must_equal true @db.dataset.columns.must_equal [:sic] end it "should allow specifying a qualified table" do @db.table_exists?(:si).must_equal false Sequel::Migrator.run(@db, @dirname, :target=>0, :table=>Sequel[:sch]["si"], :column=>:sic) @db.table_exists?(Sequel[:sch]["si"]).must_equal true @db.dataset.columns.must_equal [:sic] end it "should raise error if there is more than 1 row in the migrator table" do @db.table_exists?(:si).must_equal false proc{Sequel::Migrator.run(@db, @dirname, :target=>0, :table=>:count2)}.must_raise(Sequel::Migrator::Error) end it "should support :relative option for running relative migrations" do Sequel::Migrator.run(@db, @dirname, :relative=>2).must_equal 2 @db.creates.must_equal [1111, 2222] @db.version.must_equal 2 @db.sqls.map{|x| x =~ /\AUPDATE.*(\d+)/ ? $1.to_i : nil}.compact.must_equal [1, 2] Sequel::Migrator.run(@db, @dirname, :relative=>-1).must_equal 1 @db.drops.must_equal [2222] @db.version.must_equal 1 @db.sqls.map{|x| x =~ /\AUPDATE.*(\d+)/ ? $1.to_i : nil}.compact.must_equal [1] Sequel::Migrator.run(@db, @dirname, :relative=>2).must_equal 3 @db.creates.must_equal [1111, 2222, 2222, 3333] @db.version.must_equal 3 @db.sqls.map{|x| x =~ /\AUPDATE.*(\d+)/ ? $1.to_i : nil}.compact.must_equal [2, 3] Sequel::Migrator.run(@db, @dirname, :relative=>-3).must_equal 0 @db.drops.must_equal [2222, 3333, 2222, 1111] @db.version.must_equal 0 @db.sqls.map{|x| x =~ /\AUPDATE.*(\d+)/ ? $1.to_i : nil}.compact.must_equal [2, 1, 0] end it "should handle :relative option beyond the upper and lower limit" do Sequel::Migrator.run(@db, @dirname, :relative=>100).must_equal 3 @db.creates.must_equal [1111, 2222, 3333] @db.version.must_equal 3 @db.sqls.map{|x| x =~ /\AUPDATE.*(\d+)/ ? $1.to_i : nil}.compact.must_equal [1, 2, 3] Sequel::Migrator.run(@db, @dirname, :relative=>-200).must_equal 0 @db.drops.must_equal [3333, 2222, 1111] @db.version.must_equal 0 @db.sqls.map{|x| x =~ /\AUPDATE.*(\d+)/ ? $1.to_i : nil}.compact.must_equal [2, 1, 0] end it "should correctly handle migration target versions beyond the upper and lower limits" do Sequel::Migrator.run(@db, @dirname, :target=>100).must_equal 3 @db.creates.must_equal [1111, 2222, 3333] @db.version.must_equal 3 @db.sqls.map{|x| x =~ /\AUPDATE.*(\d+)/ ? $1.to_i : nil}.compact.must_equal [1, 2, 3] Sequel::Migrator.run(@db, @dirname, :target=>-100).must_equal 0 @db.drops.must_equal [3333, 2222, 1111] @db.version.must_equal 0 @db.sqls.map{|x| x =~ /\AUPDATE.*(\d+)/ ? $1.to_i : nil}.compact.must_equal [2, 1, 0] end it "should apply migrations correctly in the up direction if no target is given" do Sequel::Migrator.apply(@db, @dirname) @db.creates.must_equal [1111, 2222, 3333] @db.version.must_equal 3 @db.sqls.map{|x| x =~ /\AUPDATE.*(\d+)/ ? $1.to_i : nil}.compact.must_equal [1, 2, 3] end it "should be able to tell whether there are outstanding migrations" do Sequel::Migrator.is_current?(@db, @dirname).must_equal false Sequel::Migrator.apply(@db, @dirname) Sequel::Migrator.is_current?(@db, @dirname).must_equal true end it "should have #check_current raise an exception if the migrator is not current" do proc{Sequel::Migrator.check_current(@db, @dirname)}.must_raise(Sequel::Migrator::NotCurrentError) Sequel::Migrator.apply(@db, @dirname) Sequel::Migrator.check_current(@db, @dirname) end it "should apply migrations correctly in the up direction with target" do Sequel::Migrator.apply(@db, @dirname, 2) @db.creates.must_equal [1111, 2222] @db.version.must_equal 2 @db.sqls.map{|x| x =~ /\AUPDATE.*(\d+)/ ? $1.to_i : nil}.compact.must_equal [1, 2] end it "should apply migrations correctly in the up direction with target and existing" do Sequel::Migrator.apply(@db, @dirname, 2, 1) @db.creates.must_equal [2222] @db.version.must_equal 2 @db.sqls.map{|x| x =~ /\AUPDATE.*(\d+)/ ? $1.to_i : nil}.compact.must_equal [2] end it "should apply migrations correctly in the down direction with target" do @db.create_table(:schema_info){Integer :version, :default=>0} @db[:schema_info].insert(:version=>3) @db.version.must_equal 3 Sequel::Migrator.apply(@db, @dirname, 0) @db.drops.must_equal [3333, 2222, 1111] @db.version.must_equal 0 @db.sqls.map{|x| x =~ /\AUPDATE.*(\d+)/ ? $1.to_i : nil}.compact.must_equal [2, 1, 0] end it "should apply migrations correctly in the down direction with target and existing" do Sequel::Migrator.apply(@db, @dirname, 1, 2) @db.drops.must_equal [2222] @db.version.must_equal 1 @db.sqls.map{|x| x =~ /\AUPDATE.*(\d+)/ ? $1.to_i : nil}.compact.must_equal [1] end it "should return the target version" do Sequel::Migrator.apply(@db, @dirname, 3, 2).must_equal 3 Sequel::Migrator.apply(@db, @dirname, 0).must_equal 0 Sequel::Migrator.apply(@db, @dirname).must_equal 3 end it "should use IntegerMigrator if IntegerMigrator.apply called, even for timestamped migration directory" do proc{Sequel::IntegerMigrator.apply(@db, "spec/files/timestamped_migrations")}.must_raise(Sequel::Migrator::Error) end it "should not use transactions by default" do Sequel::Migrator.apply(@db, "spec/files/transaction_unspecified_migrations") @db.sqls.must_equal ["CREATE TABLE schema_info (version integer DEFAULT 0 NOT NULL)", "SELECT 1 AS one FROM schema_info LIMIT 1", "INSERT INTO schema_info (version) VALUES (0)", "SELECT version FROM schema_info LIMIT 1", "CREATE TABLE sm11111 (smc1 integer)", "UPDATE schema_info SET version = 1", "CREATE TABLE sm (smc1 integer)", "UPDATE schema_info SET version = 2"] end it "should use transactions by default if the database supports transactional ddl" do def @db.supports_transactional_ddl?; true end Sequel::Migrator.apply(@db, "spec/files/transaction_unspecified_migrations") @db.sqls.must_equal ["CREATE TABLE schema_info (version integer DEFAULT 0 NOT NULL)", "SELECT 1 AS one FROM schema_info LIMIT 1", "INSERT INTO schema_info (version) VALUES (0)", "SELECT version FROM schema_info LIMIT 1", "BEGIN", "CREATE TABLE sm11111 (smc1 integer)", "UPDATE schema_info SET version = 1", "COMMIT", "BEGIN", "CREATE TABLE sm (smc1 integer)", "UPDATE schema_info SET version = 2", "COMMIT"] end it "should respect transaction use on a per migration basis" do def @db.supports_transactional_ddl?; true end Sequel::Migrator.apply(@db, "spec/files/transaction_specified_migrations") @db.sqls.must_equal ["CREATE TABLE schema_info (version integer DEFAULT 0 NOT NULL)", "SELECT 1 AS one FROM schema_info LIMIT 1", "INSERT INTO schema_info (version) VALUES (0)", "SELECT version FROM schema_info LIMIT 1", "BEGIN", "CREATE TABLE sm11111 (smc1 integer)", "UPDATE schema_info SET version = 1", "COMMIT", "CREATE TABLE sm (smc1 integer)", "UPDATE schema_info SET version = 2"] end it "should force transactions if enabled in the migrator" do Sequel::Migrator.run(@db, "spec/files/transaction_specified_migrations", :use_transactions=>true) @db.sqls.must_equal ["CREATE TABLE schema_info (version integer DEFAULT 0 NOT NULL)", "SELECT 1 AS one FROM schema_info LIMIT 1", "INSERT INTO schema_info (version) VALUES (0)", "SELECT version FROM schema_info LIMIT 1", "BEGIN", "CREATE TABLE sm11111 (smc1 integer)", "UPDATE schema_info SET version = 1", "COMMIT", "BEGIN", "CREATE TABLE sm (smc1 integer)", "UPDATE schema_info SET version = 2", "COMMIT"] end it "should not use transactions if disabled in the migrator" do Sequel::Migrator.run(@db, "spec/files/transaction_unspecified_migrations", :use_transactions=>false) @db.sqls.must_equal ["CREATE TABLE schema_info (version integer DEFAULT 0 NOT NULL)", "SELECT 1 AS one FROM schema_info LIMIT 1", "INSERT INTO schema_info (version) VALUES (0)", "SELECT version FROM schema_info LIMIT 1", "CREATE TABLE sm11111 (smc1 integer)", "UPDATE schema_info SET version = 1", "CREATE TABLE sm (smc1 integer)", "UPDATE schema_info SET version = 2"] end end describe "Sequel::TimestampMigrator" do before do @dsc = dsc = Class.new(Sequel::Mock::Dataset) do def files db.files end def columns super case opts[:from].first when :schema_info, 'schema_info' [:version] when :schema_migrations, 'schema_migrations' [:filename] when :sm, 'sm' [:fn] end end def fetch_rows(sql) super case opts[:from].first when :schema_info, 'schema_info' yield({:version=>db.sequel_migration_version}) when :schema_migrations, 'schema_migrations' files.sort.each{|f| yield(:filename=>f)} when :sm, 'sm' files.sort.each{|f| yield(:fn=>f)} end end def insert(h={}) super case opts[:from].first when :schema_info, 'schema_info' db.sequel_migration_version = h.values.first when :schema_migrations, :sm, 'schema_migrations', 'sm' files << h.values.first end end def update(h={}) super case opts[:from].first when :schema_info, 'schema_info' db.sequel_migration_version = h.values.first end end def delete super case opts[:from].first when :schema_migrations, :sm, 'schema_migrations', 'sm' files.delete(opts[:where].args.last) end end end dbc = Class.new(Sequel::Mock::Database) do def files @files ||= [] end def tables @tables ||= {} end def sequel_migration_version @sequel_migration_version ||= 0 end attr_writer :sequel_migration_version def create_table(name, *args, &block) super tables[name.to_sym] = true end define_method(:drop_table){|*names| super(*names); names.each{|n| tables.delete(n.to_sym)}} define_method(:table_exists?){|name| super(name); tables.has_key?(name.is_a?(String) ? name.to_sym : name)} end @db = dbc.new @db.dataset_class = dsc @m = Sequel::Migrator end after do Object.send(:remove_const, "CreateSessions") if Object.const_defined?("CreateSessions") Object.send(:remove_const, "CreateArtists") if Object.const_defined?("CreateArtists") Object.send(:remove_const, "CreateAlbums") if Object.const_defined?("CreateAlbums") end it "should raise an error if there is an empty migration file" do proc{Sequel::TimestampMigrator.apply(@db, "spec/files/empty_migration")}.must_raise(Sequel::Migrator::Error) end it "should raise an error if there is a migration file with multiple migrations" do proc{Sequel::TimestampMigrator.apply(@db, "spec/files/double_migration")}.must_raise(Sequel::Migrator::Error) end it "should handle migrating up or down all the way" do @dir = 'spec/files/timestamped_migrations' @m.apply(@db, @dir) [:schema_migrations, :sm1111, :sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal true} @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb 1273253851_create_nodes.rb 1273253853_3_create_users.rb' @m.apply(@db, @dir, 0) [:sm1111, :sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal false} @db[:schema_migrations].select_order_map(:filename).must_equal [] end it "should handle migrating up or down to specific timestamps" do @dir = 'spec/files/timestamped_migrations' @m.apply(@db, @dir, 1273253851) [:schema_migrations, :sm1111, :sm2222].each{|n| @db.table_exists?(n).must_equal true} @db.table_exists?(:sm3333).must_equal false @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb 1273253851_create_nodes.rb' @m.apply(@db, @dir, 1273253849) [:sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal false} @db.table_exists?(:sm1111).must_equal true @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb' end it "should work correctly when multithreaded" do range = 0..4 dbs = range.map do db = @db.class.new db.dataset_class = @db.dataset_class db end q1, q2 = Queue.new, Queue.new @dir = 'spec/files/timestamped_migrations' threads = dbs.map do |db| Thread.new do q1.pop @m.apply(db, @dir) [:schema_migrations, :sm1111, :sm2222, :sm3333].each{|n| _(db.table_exists?(n)).must_equal true} _(db[:schema_migrations].select_order_map(:filename)).must_equal %w'1273253849_create_sessions.rb 1273253851_create_nodes.rb 1273253853_3_create_users.rb' q2.push db end end range.each{q1.push nil} (dbs - range.map{q2.pop}).must_be :empty? threads.each(&:join) end it "should not be current when there are migrations to apply" do @dir = 'spec/files/timestamped_migrations' @m.apply(@db, @dir) @m.is_current?(@db, @dir).must_equal true @dir = 'spec/files/interleaved_timestamped_migrations' @m.is_current?(@db, @dir).must_equal false end it "should raise an exception if the migrator is not current" do @dir = 'spec/files/timestamped_migrations' @m.apply(@db, @dir) @m.check_current(@db, @dir) @dir = 'spec/files/interleaved_timestamped_migrations' proc{@m.check_current(@db, @dir)}.must_raise(Sequel::Migrator::NotCurrentError) end it "should apply all missing files when migrating up" do @dir = 'spec/files/timestamped_migrations' @m.apply(@db, @dir) @dir = 'spec/files/interleaved_timestamped_migrations' @m.apply(@db, @dir) [:schema_migrations, :sm1111, :sm1122, :sm2222, :sm2233, :sm3333].each{|n| @db.table_exists?(n).must_equal true} @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb 1273253850_create_artists.rb 1273253851_create_nodes.rb 1273253852_create_albums.rb 1273253853_3_create_users.rb' end it "should not apply down action to migrations where up action hasn't been applied" do @dir = 'spec/files/timestamped_migrations' @m.apply(@db, @dir) @dir = 'spec/files/interleaved_timestamped_migrations' @m.apply(@db, @dir, 0) [:sm1111, :sm1122, :sm2222, :sm2233, :sm3333].each{|n| @db.table_exists?(n).must_equal false} @db[:schema_migrations].select_order_map(:filename).must_equal [] end it "should handle updating to a specific timestamp when interleaving migrations" do @dir = 'spec/files/timestamped_migrations' @m.apply(@db, @dir) @dir = 'spec/files/interleaved_timestamped_migrations' @m.apply(@db, @dir, 1273253851) [:schema_migrations, :sm1111, :sm1122, :sm2222].each{|n| @db.table_exists?(n).must_equal true} [:sm2233, :sm3333].each{|n| @db.table_exists?(n).must_equal false} @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb 1273253850_create_artists.rb 1273253851_create_nodes.rb' end it "should correctly update schema_migrations table when an error occurs when migrating up or down" do @dir = 'spec/files/bad_timestamped_migrations' proc{@m.apply(@db, @dir)}.must_raise NoMethodError [:schema_migrations, :sm1111, :sm2222].each{|n| @db.table_exists?(n).must_equal true} @db.table_exists?(:sm3333).must_equal false @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb 1273253851_create_nodes.rb' proc{@m.apply(@db, @dir, 0)}.must_raise NoMethodError [:sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal false} @db.table_exists?(:sm1111).must_equal true @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb' end it "should handle multiple migrations with the same timestamp correctly" do @dir = 'spec/files/duplicate_timestamped_migrations' @m.apply(@db, @dir) [:schema_migrations, :sm1111, :sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal true} @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb 1273253853_create_nodes.rb 1273253853_create_users.rb' @m.apply(@db, @dir, 1273253853) [:sm1111, :sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal true} @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb 1273253853_create_nodes.rb 1273253853_create_users.rb' @m.apply(@db, @dir, 1273253849) [:sm1111].each{|n| @db.table_exists?(n).must_equal true} [:sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal false} @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb' @m.apply(@db, @dir, 1273253848) [:sm1111, :sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal false} @db[:schema_migrations].select_order_map(:filename).must_equal [] end it "should convert schema_info table to schema_migrations table" do @dir = 'spec/files/integer_migrations' @m.apply(@db, @dir) [:schema_info, :sm1111, :sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal true} [:schema_migrations, :sm1122, :sm2233].each{|n| @db.table_exists?(n).must_equal false} @dir = 'spec/files/convert_to_timestamp_migrations' @m.apply(@db, @dir) [:schema_info, :sm1111, :sm2222, :sm3333, :schema_migrations, :sm1122, :sm2233].each{|n| @db.table_exists?(n).must_equal true} @db[:schema_migrations].select_order_map(:filename).must_equal %w'001_create_sessions.rb 002_create_nodes.rb 003_3_create_users.rb 1273253850_create_artists.rb 1273253852_create_albums.rb' @m.apply(@db, @dir, 4) [:schema_info, :schema_migrations, :sm1111, :sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal true} [:sm1122, :sm2233].each{|n| @db.table_exists?(n).must_equal false} @db[:schema_migrations].select_order_map(:filename).must_equal %w'001_create_sessions.rb 002_create_nodes.rb 003_3_create_users.rb' @m.apply(@db, @dir, 0) [:schema_info, :schema_migrations].each{|n| @db.table_exists?(n).must_equal true} [:sm1111, :sm2222, :sm3333, :sm1122, :sm2233].each{|n| @db.table_exists?(n).must_equal false} @db[:schema_migrations].select_order_map(:filename).must_equal [] end it "should handle unapplied migrations when migrating schema_info table to schema_migrations table" do @dir = 'spec/files/integer_migrations' @m.apply(@db, @dir, 2) [:schema_info, :sm1111, :sm2222].each{|n| @db.table_exists?(n).must_equal true} [:schema_migrations, :sm3333, :sm1122, :sm2233].each{|n| @db.table_exists?(n).must_equal false} @dir = 'spec/files/convert_to_timestamp_migrations' @m.apply(@db, @dir, 1273253850) [:schema_info, :sm1111, :sm2222, :sm3333, :schema_migrations, :sm1122].each{|n| @db.table_exists?(n).must_equal true} [:sm2233].each{|n| @db.table_exists?(n).must_equal false} @db[:schema_migrations].select_order_map(:filename).must_equal %w'001_create_sessions.rb 002_create_nodes.rb 003_3_create_users.rb 1273253850_create_artists.rb' end it "should handle unapplied migrations when migrating schema_info table to schema_migrations table and target is less than last integer migration version" do @dir = 'spec/files/integer_migrations' @m.apply(@db, @dir, 1) [:schema_info, :sm1111].each{|n| @db.table_exists?(n).must_equal true} [:schema_migrations, :sm2222, :sm3333, :sm1122, :sm2233].each{|n| @db.table_exists?(n).must_equal false} @dir = 'spec/files/convert_to_timestamp_migrations' @m.apply(@db, @dir, 2) [:schema_info, :sm1111, :sm2222, :schema_migrations].each{|n| @db.table_exists?(n).must_equal true} [:sm3333, :sm1122, :sm2233].each{|n| @db.table_exists?(n).must_equal false} @db[:schema_migrations].select_order_map(:filename).must_equal %w'001_create_sessions.rb 002_create_nodes.rb' @m.apply(@db, @dir) [:schema_info, :sm1111, :sm2222, :schema_migrations, :sm3333, :sm1122, :sm2233].each{|n| @db.table_exists?(n).must_equal true} @db[:schema_migrations].select_order_map(:filename).must_equal %w'001_create_sessions.rb 002_create_nodes.rb 003_3_create_users.rb 1273253850_create_artists.rb 1273253852_create_albums.rb' end it "should raise error for applied migrations not in file system" do @dir = 'spec/files/timestamped_migrations' @m.apply(@db, @dir) [:schema_migrations, :sm1111, :sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal true} @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb 1273253851_create_nodes.rb 1273253853_3_create_users.rb' @dir = 'spec/files/missing_timestamped_migrations' proc{@m.apply(@db, @dir, 0)}.must_raise(Sequel::Migrator::Error) [:schema_migrations, :sm1111, :sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal true} @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb 1273253851_create_nodes.rb 1273253853_3_create_users.rb' end it "should not raise error for applied migrations not in file system if :allow_missing_migration_files is true" do @dir = 'spec/files/timestamped_migrations' @m.apply(@db, @dir) [:schema_migrations, :sm1111, :sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal true} @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb 1273253851_create_nodes.rb 1273253853_3_create_users.rb' @dir = 'spec/files/missing_timestamped_migrations' @m.run(@db, @dir, :allow_missing_migration_files => true) [:schema_migrations, :sm1111, :sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal true} @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb 1273253851_create_nodes.rb 1273253853_3_create_users.rb' end it "should raise error missing column name in existing schema_migrations table" do @dir = 'spec/files/timestamped_migrations' @m.apply(@db, @dir) proc{@m.run(@db, @dir, :column=>:fn)}.must_raise(Sequel::Migrator::Error) end it "should handle migration filenames in a case insensitive manner" do @dir = 'spec/files/uppercase_timestamped_migrations' @m.apply(@db, @dir) [:schema_migrations, :sm1111, :sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal true} @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb 1273253851_create_nodes.rb 1273253853_3_create_users.rb' @dir = 'spec/files/timestamped_migrations' @m.apply(@db, @dir, 0) [:sm1111, :sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal false} @db[:schema_migrations].select_order_map(:filename).must_equal [] end it "should :table and :column options" do @dir = 'spec/files/timestamped_migrations' @m.run(@db, @dir, :table=>:sm, :column=>:fn) [:sm, :sm1111, :sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal true} @db[:sm].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb 1273253851_create_nodes.rb 1273253853_3_create_users.rb' @m.run(@db, @dir, :target=>0, :table=>:sm, :column=>:fn) [:sm1111, :sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal false} @db[:sm].select_order_map(:fn).must_equal [] end it "should return nil" do @dir = 'spec/files/timestamped_migrations' @m.apply(@db, @dir, 1273253850).must_be_nil @m.apply(@db, @dir, 0).must_be_nil @m.apply(@db, @dir).must_be_nil end it "should use TimestampMigrator if TimestampMigrator.apply is called even for integer migrations directory" do Sequel::TimestampMigrator.apply(@db, "spec/files/integer_migrations") @db.sqls.must_equal ["SELECT NULL AS nil FROM schema_migrations LIMIT 1", "CREATE TABLE schema_migrations (filename varchar(255) PRIMARY KEY)", "SELECT NULL AS nil FROM schema_info LIMIT 1", "SELECT filename FROM schema_migrations ORDER BY filename", "CREATE TABLE sm1111 (smc1 integer)", "INSERT INTO schema_migrations (filename) VALUES ('001_create_sessions.rb')", "CREATE TABLE sm2222 (smc2 integer)", "INSERT INTO schema_migrations (filename) VALUES ('002_create_nodes.rb')", "CREATE TABLE sm3333 (smc3 integer)", "INSERT INTO schema_migrations (filename) VALUES ('003_3_create_users.rb')"] end it "should not use transactions by default" do Sequel::TimestampMigrator.apply(@db, "spec/files/transaction_unspecified_migrations") @db.sqls.must_equal ["SELECT NULL AS nil FROM schema_migrations LIMIT 1", "CREATE TABLE schema_migrations (filename varchar(255) PRIMARY KEY)", "SELECT NULL AS nil FROM schema_info LIMIT 1", "SELECT filename FROM schema_migrations ORDER BY filename", "CREATE TABLE sm11111 (smc1 integer)", "INSERT INTO schema_migrations (filename) VALUES ('001_create_alt_basic.rb')", "CREATE TABLE sm (smc1 integer)", "INSERT INTO schema_migrations (filename) VALUES ('002_create_basic.rb')"] end it "should use transactions by default if database supports transactional ddl" do def @db.supports_transactional_ddl?; true end Sequel::TimestampMigrator.apply(@db, "spec/files/transaction_unspecified_migrations") @db.sqls.must_equal ["SELECT NULL AS nil FROM schema_migrations LIMIT 1", "CREATE TABLE schema_migrations (filename varchar(255) PRIMARY KEY)", "SELECT NULL AS nil FROM schema_info LIMIT 1", "SELECT filename FROM schema_migrations ORDER BY filename", "BEGIN", "CREATE TABLE sm11111 (smc1 integer)", "INSERT INTO schema_migrations (filename) VALUES ('001_create_alt_basic.rb')", "COMMIT", "BEGIN", "CREATE TABLE sm (smc1 integer)", "INSERT INTO schema_migrations (filename) VALUES ('002_create_basic.rb')", "COMMIT"] end it "should support transaction use on a per migration basis" do Sequel::TimestampMigrator.apply(@db, "spec/files/transaction_specified_migrations") @db.sqls.must_equal ["SELECT NULL AS nil FROM schema_migrations LIMIT 1", "CREATE TABLE schema_migrations (filename varchar(255) PRIMARY KEY)", "SELECT NULL AS nil FROM schema_info LIMIT 1", "SELECT filename FROM schema_migrations ORDER BY filename", "BEGIN", "CREATE TABLE sm11111 (smc1 integer)", "INSERT INTO schema_migrations (filename) VALUES ('001_create_alt_basic.rb')", "COMMIT", "CREATE TABLE sm (smc1 integer)", "INSERT INTO schema_migrations (filename) VALUES ('002_create_basic.rb')"] end it "should force transactions if enabled by the migrator" do Sequel::TimestampMigrator.run(@db, "spec/files/transaction_specified_migrations", :use_transactions=>true) @db.sqls.must_equal ["SELECT NULL AS nil FROM schema_migrations LIMIT 1", "CREATE TABLE schema_migrations (filename varchar(255) PRIMARY KEY)", "SELECT NULL AS nil FROM schema_info LIMIT 1", "SELECT filename FROM schema_migrations ORDER BY filename", "BEGIN", "CREATE TABLE sm11111 (smc1 integer)", "INSERT INTO schema_migrations (filename) VALUES ('001_create_alt_basic.rb')", "COMMIT", "BEGIN", "CREATE TABLE sm (smc1 integer)", "INSERT INTO schema_migrations (filename) VALUES ('002_create_basic.rb')", "COMMIT"] end it "should not use transactions if disabled in the migrator" do Sequel::TimestampMigrator.run(@db, "spec/files/transaction_unspecified_migrations", :use_transactions=>false) @db.sqls.must_equal ["SELECT NULL AS nil FROM schema_migrations LIMIT 1", "CREATE TABLE schema_migrations (filename varchar(255) PRIMARY KEY)", "SELECT NULL AS nil FROM schema_info LIMIT 1", "SELECT filename FROM schema_migrations ORDER BY filename", "CREATE TABLE sm11111 (smc1 integer)", "INSERT INTO schema_migrations (filename) VALUES ('001_create_alt_basic.rb')", "CREATE TABLE sm (smc1 integer)", "INSERT INTO schema_migrations (filename) VALUES ('002_create_basic.rb')"] end it "should use shorter primary key field on MySQL if creating schema migrations table fails" do def @db.database_type; :mysql end def @db.execute_ddl(sql, *) super raise Sequel::DatabaseError, "Specified key was too long; max key length is 767 bytes" if sql =~ /varchar\(255\)/ end Sequel::TimestampMigrator.run(@db, "spec/files/transaction_unspecified_migrations", :use_transactions=>false) @db.sqls.must_equal ["SELECT NULL AS nil FROM schema_migrations LIMIT 1", "CREATE TABLE schema_migrations (filename varchar(255) PRIMARY KEY)", "CREATE TABLE schema_migrations (filename varchar(190) PRIMARY KEY)", "SELECT NULL AS nil FROM schema_info LIMIT 1", "SELECT filename FROM schema_migrations ORDER BY filename", "CREATE TABLE sm11111 (smc1 integer)", "INSERT INTO schema_migrations (filename) VALUES ('001_create_alt_basic.rb')", "CREATE TABLE sm (smc1 integer)", "INSERT INTO schema_migrations (filename) VALUES ('002_create_basic.rb')"] end it "should not use shorter primary key field on other databases if creating schema migrations table fails" do def @db.execute_ddl(sql, *) super raise Sequel::DatabaseError, "Specified key was too long; max key length is 767 bytes" if sql =~ /varchar\(255\)/ end proc{Sequel::TimestampMigrator.run(@db, "spec/files/transaction_unspecified_migrations", :use_transactions=>false)}.must_raise Sequel::DatabaseError @db.sqls.must_equal ["SELECT NULL AS nil FROM schema_migrations LIMIT 1", "CREATE TABLE schema_migrations (filename varchar(255) PRIMARY KEY)"] end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/modification_detection_spec.rb����������������������������������������0000664�0000000�0000000�00000005072�14342141206�0024277�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" require 'yaml' describe "serialization_modification_detection plugin" do before do @ds = Sequel.mock(:fetch=>{:id=>1, :a=>'a'.dup, :b=>1, :c=>['a'.dup], :d=>{'b'=>'c'.dup}}, :numrows=>1, :autoid=>1)[:items] @c = Class.new(Sequel::Model(@ds)) @c.plugin :modification_detection @c.columns :a, :b, :c, :d @o = @c.first @ds.db.sqls end it "should detect setting new column values on new objects" do @o = @c.new @o.changed_columns.must_equal [] @o.a = 'c' @o.changed_columns.must_equal [:a] end it "should only detect columns that have been changed" do @o.changed_columns.must_equal [] @o.a << 'b' @o.changed_columns.must_equal [:a] @o.a.replace('a') @o.changed_columns.must_equal [] @o.values[:b] = 2 @o.changed_columns.must_equal [:b] @o.values[:b] = 1 @o.changed_columns.must_equal [] @o.c[0] << 'b' @o.d['b'] << 'b' @o.changed_columns.sort_by{|c| c.to_s}.must_equal [:c, :d] @o.c[0] = 'a' @o.changed_columns.must_equal [:d] @o.d['b'] = 'c' @o.changed_columns.must_equal [] end it "should detect columns that have been changed on frozen objects" do @o.freeze @o.a << 'b' @o.changed_columns.must_equal [:a] end it "should not list a column twice" do @o.a = 'b'.dup @o.a << 'a' @o.changed_columns.must_equal [:a] end it "should report correct changed_columns after updating" do @o.a << 'a' @o.save_changes @o.changed_columns.must_equal [] @o.values[:b] = 2 @o.save_changes @o.changed_columns.must_equal [] @o.c[0] << 'b' @o.save_changes @o.changed_columns.must_equal [] @o.d['b'] << 'a' @o.save_changes @o.changed_columns.must_equal [] @ds.db.sqls.must_equal ["UPDATE items SET a = 'aa' WHERE (id = 1)", "UPDATE items SET b = 2 WHERE (id = 1)", "UPDATE items SET c = ('ab') WHERE (id = 1)", "UPDATE items SET d = ('b' = 'ca') WHERE (id = 1)"] end it "should report correct changed_columns after creating new object" do o = @c.create o.changed_columns.must_equal [] o.a << 'a' o.changed_columns.must_equal [:a] @ds.db.sqls.must_equal ["INSERT INTO items DEFAULT VALUES", "SELECT * FROM items WHERE (id = 1) LIMIT 1"] end it "should report correct changed_columns after refreshing existing object" do @o.a << 'a' @o.changed_columns.must_equal [:a] @o.refresh @o.changed_columns.must_equal [] @o.a << 'a' @o.changed_columns.must_equal [:a] end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/mssql_optimistic_locking_spec.rb��������������������������������������0000664�0000000�0000000�00000007160�14342141206�0024705�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "MSSSQL optimistic locking plugin" do before do @db = Sequel.mock(:host=>'mssql') @ds = @db[:items].with_quote_identifiers(false).with_extend{private; def input_identifier(v); v.to_s end} @c = Class.new(Sequel::Model(@ds)) @c.columns :id, :name, :timestamp @c.plugin :mssql_optimistic_locking @o = @c.load(:id=>1, :name=>'a', :timestamp=>'1234') @db.sqls end it "should not include the lock column when updating" do @db.fetch = [[{:timestamp=>'2345'}]] @o.save @db.sqls.must_equal ["UPDATE TOP (1) items SET name = 'a' OUTPUT inserted.timestamp WHERE ((id = 1) AND (timestamp = 0x31323334))"] end it "should include the primary key column when updating if it has changed" do @db.fetch = [[{:timestamp=>'2345'}]] @o.id = 2 @o.save @db.sqls.must_equal ["UPDATE TOP (1) items SET id = 2, name = 'a' OUTPUT inserted.timestamp WHERE ((id = 2) AND (timestamp = 0x31323334))"] end it "should automatically update lock column using new value from database" do @db.fetch = [[{:timestamp=>'2345'}]] @o.save @o.timestamp.must_equal '2345' end it "should raise error when updating stale object" do @db.fetch = [] @o.timestamp = '2345' proc{@o.save}.must_raise(Sequel::NoExistingObject) @o.timestamp.must_equal '2345' @db.sqls.must_equal ["UPDATE TOP (1) items SET name = 'a' OUTPUT inserted.timestamp WHERE ((id = 1) AND (timestamp = 0x32333435))"] end it "should raise error when destroying stale object" do @db.numrows = 0 @o.timestamp = '2345' proc{@o.destroy}.must_raise(Sequel::NoExistingObject) @db.sqls.must_equal ["DELETE TOP (1) FROM items WHERE ((id = 1) AND (timestamp = 0x32333435))"] end it "should allow refresh after failed save" do @db.fetch = [] @o.timestamp = '2345' proc{@o.save}.must_raise(Sequel::NoExistingObject) @db.fetch = {:id=>1, :name=>'a', :timestamp=>'2345'} @o.refresh @db.sqls @o.save @db.sqls.must_equal ["UPDATE TOP (1) items SET name = 'a' OUTPUT inserted.timestamp WHERE ((id = 1) AND (timestamp = 0x32333435))"] end it "should allow changing the lock column via model.lock_column=" do @c = Class.new(Sequel::Model(@ds)) @c.columns :id, :name, :lv @c.plugin :mssql_optimistic_locking @c.lock_column = :lv @o = @c.load(:id=>1, :name=>'a', :lv=>'1234') @db.sqls @db.fetch = [] proc{@o.save}.must_raise(Sequel::NoExistingObject) @o.lv.must_equal '1234' @db.sqls.must_equal ["UPDATE TOP (1) items SET name = 'a' OUTPUT inserted.lv WHERE ((id = 1) AND (lv = 0x31323334))"] @o = @c.load(:id=>1, :name=>'a', :lv=>'1234') @db.fetch = {:lv=>'2345'} @o.save @o.lv.must_equal '2345' end it "should allow changing the lock column via plugin option" do @c = Class.new(Sequel::Model(@ds)) @c.columns :id, :name, :lv @c.plugin :mssql_optimistic_locking, :lock_column=>:lv @o = @c.load(:id=>1, :name=>'a', :lv=>'1234') @db.sqls @db.fetch = [] proc{@o.save}.must_raise(Sequel::NoExistingObject) @o.lv.must_equal '1234' @db.sqls.must_equal ["UPDATE TOP (1) items SET name = 'a' OUTPUT inserted.lv WHERE ((id = 1) AND (lv = 0x31323334))"] @o = @c.load(:id=>1, :name=>'a', :lv=>'1234') @db.fetch = {:lv=>'2345'} @o.save @o.lv.must_equal '2345' end it "should work when subclassing" do c = Class.new(@c) o = c.load(:id=>1, :name=>'a', :timestamp=>'1234') @db.fetch = [[{:timestamp=>'2345'}]] o.save @db.sqls.must_equal ["UPDATE TOP (1) items SET name = 'a' OUTPUT inserted.timestamp WHERE ((id = 1) AND (timestamp = 0x31323334))"] end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/named_timezones_spec.rb�����������������������������������������������0000664�0000000�0000000�00000024137�14342141206�0022760�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" begin require 'tzinfo' rescue LoadError warn "Skipping test of named_timezones extension: can't load tzinfo" else Sequel.extension :thread_local_timezones Sequel.extension :named_timezones Sequel.datetime_class = Time describe "Sequel named_timezones extension with DateTime class" do before do @tz_in = TZInfo::Timezone.get('America/Los_Angeles') @tz_out = TZInfo::Timezone.get('America/New_York') @db = Sequel.mock @dt = DateTime.civil(2009,6,1,10,20,30,0) Sequel.application_timezone = 'America/Los_Angeles' Sequel.database_timezone = 'America/New_York' Sequel.datetime_class = DateTime end after do Sequel.tzinfo_disambiguator = nil Sequel.default_timezone = nil Sequel.datetime_class = Time end it "should convert string arguments to *_timezone= to TZInfo::Timezone instances" do Sequel.application_timezone.must_equal @tz_in Sequel.database_timezone.must_equal @tz_out end it "should convert string arguments for Database#timezone= to TZInfo::Timezone instances for database-specific timezones" do @db.extension :named_timezones @db.timezone = 'America/Los_Angeles' @db.timezone.must_equal @tz_in end it "should accept TZInfo::Timezone instances in *_timezone=" do Sequel.application_timezone = @tz_in Sequel.database_timezone = @tz_out Sequel.application_timezone.must_equal @tz_in Sequel.database_timezone.must_equal @tz_out end it "should convert datetimes going into the database to named database_timezone" do ds = @db[:a].with_extend do def supports_timestamp_timezones?; true; end def supports_timestamp_usecs?; false; end end ds.insert([@dt, DateTime.civil(2009,6,1,3,20,30,-7/24.0), DateTime.civil(2009,6,1,6,20,30,-1/6.0)]) @db.sqls.must_equal ["INSERT INTO a VALUES ('2009-06-01 06:20:30-0400', '2009-06-01 06:20:30-0400', '2009-06-01 06:20:30-0400')"] end it "should convert datetimes going into the database to named database_timezone" do ds = @db[:a].with_extend do def supports_timestamp_timezones?; true; end end @dt += Rational(555555, 1000000*86400) ds.insert([@dt, DateTime.civil(2009,6,1,3,20,30.555555,-7/24.0), DateTime.civil(2009,6,1,6,20,30.555555,-1/6.0)]) @db.sqls.must_equal ["INSERT INTO a VALUES ('2009-06-01 06:20:30.555555-0400', '2009-06-01 06:20:30.555555-0400', '2009-06-01 06:20:30.555555-0400')"] end unless defined?(JRUBY_VERSION) && JRUBY_VERSION < "9.2" it "should convert datetimes coming out of the database from database_timezone to application_timezone" do dt = Sequel.database_to_application_timestamp('2009-06-01 06:20:30-0400') dt.must_be_instance_of DateTime dt.must_equal @dt dt.offset.must_equal(-7/24.0) dt = Sequel.database_to_application_timestamp('2009-06-01 10:20:30+0000') dt.must_be_instance_of DateTime dt.must_equal @dt dt.offset.must_equal(-7/24.0) end it "should raise an error for ambiguous timezones by default" do proc{Sequel.database_to_application_timestamp('2004-10-31T01:30:00')}.must_raise(Sequel::InvalidValue) end it "should support tzinfo_disambiguator= to handle ambiguous timezones automatically" do Sequel.tzinfo_disambiguator = proc{|datetime, periods| periods.first} dt = Sequel.database_to_application_timestamp('2004-10-31T01:30:00') dt.must_equal DateTime.parse('2004-10-30T22:30:00-07:00') dt.offset.must_equal(-7/24.0) end it "should assume datetimes coming out of the database that don't have an offset as coming from database_timezone" do dt = Sequel.database_to_application_timestamp('2009-06-01 06:20:30') dt.must_be_instance_of DateTime dt.must_equal @dt dt.offset.must_equal(-7/24.0) dt = Sequel.database_to_application_timestamp('2009-06-01 10:20:30') dt.must_be_instance_of DateTime dt.must_equal(@dt + 1/6.0) dt.offset.must_equal(-7/24.0) end it "should work with the thread_local_timezones extension" do q, q1, q2 = Queue.new, Queue.new, Queue.new tz1, tz2 = nil, nil t1 = Thread.new do Sequel.thread_application_timezone = 'America/New_York' q2.push nil q.pop tz1 = Sequel.application_timezone end t2 = Thread.new do Sequel.thread_application_timezone = 'America/Los_Angeles' q2.push nil q1.pop tz2 = Sequel.application_timezone end q2.pop q2.pop q.push nil q1.push nil t1.join t2.join tz1.must_equal @tz_out tz2.must_equal @tz_in end end describe "Sequel named_timezones extension with Time class" do before do @tz_in = TZInfo::Timezone.get('America/Los_Angeles') @tz_out = TZInfo::Timezone.get('America/New_York') @db = Sequel.mock Sequel.application_timezone = 'America/Los_Angeles' Sequel.database_timezone = 'America/New_York' end after do Sequel.tzinfo_disambiguator = nil Sequel.default_timezone = nil Sequel.datetime_class = Time end it "should convert string arguments to *_timezone= to TZInfo::Timezone instances" do Sequel.application_timezone.must_equal @tz_in Sequel.database_timezone.must_equal @tz_out end it "should convert string arguments for Database#timezone= to TZInfo::Timezone instances for database-specific timezones" do @db.extension :named_timezones @db.timezone = 'America/Los_Angeles' @db.timezone.must_equal @tz_in end it "should accept TZInfo::Timezone instances in *_timezone=" do Sequel.application_timezone = @tz_in Sequel.database_timezone = @tz_out Sequel.application_timezone.must_equal @tz_in Sequel.database_timezone.must_equal @tz_out end it "should convert times going into the database to named database_timezone" do ds = @db[:a].with_extend do def supports_timestamp_timezones?; true; end def supports_timestamp_usecs?; false; end end ds.insert([Time.new(2009,6,1,3,20,30, RUBY_VERSION >= '2.6' ? @tz_in : -25200), Time.new(2009,6,1,3,20,30,-25200), Time.new(2009,6,1,6,20,30,-14400)]) @db.sqls.must_equal ["INSERT INTO a VALUES ('2009-06-01 06:20:30-0400', '2009-06-01 06:20:30-0400', '2009-06-01 06:20:30-0400')"] end it "should convert times with fractional seconds going into the database to named database_timezone" do ds = @db[:a].with_extend do def supports_timestamp_timezones?; true; end end ds.insert([Time.new(2009,6,1,3,20,30.5555554, RUBY_VERSION >= '2.6' ? @tz_in : -25200), Time.new(2009,6,1,3,20,30.5555554,-25200), Time.new(2009,6,1,6,20,30.5555554,-14400)]) @db.sqls.must_equal ["INSERT INTO a VALUES ('2009-06-01 06:20:30.555555-0400', '2009-06-01 06:20:30.555555-0400', '2009-06-01 06:20:30.555555-0400')"] end it "should convert times coming out of the database from database_timezone to application_timezone" do dt = Sequel.database_to_application_timestamp('2009-06-01 06:20:30-0400') dt.must_be_instance_of Time dt.must_equal Time.new(2009,6,1,3,20,30,-25200) dt.utc_offset.must_equal(-25200) dt = Sequel.database_to_application_timestamp('2009-06-01 10:20:30+0000') dt.must_be_instance_of Time dt.must_equal Time.new(2009,6,1,3,20,30,-25200) dt.utc_offset.must_equal(-25200) end it "should convert times with fractional seconds coming out of the database from database_timezone to application_timezone" do dt = Sequel.database_to_application_timestamp('2009-06-01 06:20:30.555555-0400') dt.must_be_instance_of Time dt.to_i.must_equal Time.new(2009,6,1,3,20,30,-25200).to_i dt.nsec.must_equal 555555000 dt.utc_offset.must_equal(-25200) dt = Sequel.database_to_application_timestamp('2009-06-01 10:20:30.555555+0000') dt.must_be_instance_of Time dt.to_i.must_equal Time.new(2009,6,1,3,20,30,-25200).to_i dt.nsec.must_equal 555555000 dt.utc_offset.must_equal(-25200) end it "should raise an error for ambiguous timezones by default" do proc{Sequel.database_to_application_timestamp('2004-10-31T01:30:00')}.must_raise(Sequel::InvalidValue) end it "should support tzinfo_disambiguator= to handle ambiguous timezones automatically" do Sequel.tzinfo_disambiguator = proc{|datetime, periods| periods.first} Sequel.database_to_application_timestamp('2004-10-31T01:30:00').must_equal Time.new(2004, 10, 30, 22, 30, 0, -25200) dt = Sequel.database_to_application_timestamp('2004-10-31T01:30:00') dt.must_equal Time.new(2004, 10, 30, 22, 30, 0, -25200) dt.utc_offset.must_equal(-25200) end it "should support tzinfo_disambiguator= to handle ambiguous timezones automatically when using fractional seconds" do Sequel.tzinfo_disambiguator = proc{|datetime, periods| periods.first} dt = Sequel.database_to_application_timestamp('2004-10-31T01:30:00.555555') dt.to_i.must_equal Time.new(2004, 10, 30, 22, 30, 0, -25200).to_i dt.nsec.must_equal 555555000 dt = Sequel.database_to_application_timestamp('2004-10-31T01:30:00.555555') dt.to_i.must_equal Time.new(2004, 10, 30, 22, 30, 0, -25200).to_i dt.nsec.must_equal 555555000 dt.utc_offset.must_equal(-25200) end it "should assume datetimes coming out of the database that don't have an offset as coming from database_timezone" do dt = Sequel.database_to_application_timestamp('2009-06-01 06:20:30') dt.must_be_instance_of Time dt.must_equal Time.new(2009,6,1,3,20,30, -25200) dt.utc_offset.must_equal(-25200) dt = Sequel.database_to_application_timestamp('2009-06-01 10:20:30') dt.must_be_instance_of Time dt.must_equal Time.new(2009,6,1,7,20,30, -25200) dt.utc_offset.must_equal(-25200) end it "should work with the thread_local_timezones extension" do q, q1, q2 = Queue.new, Queue.new, Queue.new tz1, tz2 = nil, nil t1 = Thread.new do Sequel.thread_application_timezone = 'America/New_York' q2.push nil q.pop tz1 = Sequel.application_timezone end t2 = Thread.new do Sequel.thread_application_timezone = 'America/Los_Angeles' q2.push nil q1.pop tz2 = Sequel.application_timezone end q2.pop q2.pop q.push nil q1.push nil t1.join t2.join tz1.must_equal @tz_out tz2.must_equal @tz_in end end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/nested_attributes_spec.rb���������������������������������������������0000664�0000000�0000000�00000113657�14342141206�0023335�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "NestedAttributes plugin" do def check_sqls(should, is) if should.is_a?(Array) should.must_include(is) else is.must_equal should end end def check_sql_array(*shoulds) sqls = @db.sqls sqls.length.must_equal shoulds.length shoulds.zip(sqls){|s, i| check_sqls(s, i)} end before do @db = Sequel.mock(:autoid=>1, :numrows=>1) @c = Class.new(Sequel::Model(@db)) @c.plugin :nested_attributes @Artist = Class.new(@c).set_dataset(:artists) @Album = Class.new(@c).set_dataset(:albums) @Tag = Class.new(@c).set_dataset(:tags) @Concert = Class.new(@c).set_dataset(:concerts) @Artist.plugin :skip_create_refresh @Album.plugin :skip_create_refresh @Tag.plugin :skip_create_refresh @Concert.plugin :skip_create_refresh @Artist.columns :id, :name @Album.columns :id, :name, :artist_id @Tag.columns :id, :name @Concert.columns :tour, :date, :artist_id, :playlist @Concert.set_primary_key([:tour, :date]) @Concert.unrestrict_primary_key @Artist.one_to_many :albums, :class=>@Album, :key=>:artist_id @Artist.one_to_many :concerts, :class=>@Concert, :key=>:artist_id @Artist.one_to_one :first_album, :class=>@Album, :key=>:artist_id @Artist.one_to_one :first_concert, :class=>@Concert, :key=>:artist_id @Concert.one_to_many :albums, :class=>@Album, :key=>:artist_id, :primary_key=>:artist_id @Album.many_to_one :artist, :class=>@Artist, :reciprocal=>:albums @Album.many_to_many :tags, :class=>@Tag, :left_key=>:album_id, :right_key=>:tag_id, :join_table=>:at @Tag.many_to_many :albums, :class=>@Album, :left_key=>:tag_id, :right_key=>:album_id, :join_table=>:at @Artist.nested_attributes :albums, :first_album, :destroy=>true, :remove=>true @Artist.nested_attributes :concerts, :destroy=>true, :remove=>true @Album.nested_attributes :artist, :tags, :destroy=>true, :remove=>true @Artist.nested_attributes :first_concert @Concert.nested_attributes :albums @db.sqls end it "should not modify options hash when loading plugin" do h = {} @Concert.nested_attributes :albums, h h.must_equal({}) end it "should support creating new many_to_one objects" do a = @Album.new({:name=>'Al', :artist_attributes=>{:name=>'Ar'}}) @db.sqls.must_equal [] a.save check_sql_array("INSERT INTO artists (name) VALUES ('Ar')", ["INSERT INTO albums (name, artist_id) VALUES ('Al', 1)", "INSERT INTO albums (artist_id, name) VALUES (1, 'Al')"]) end it "should support creating new one_to_one objects" do a = @Artist.new(:name=>'Ar') a.id = 1 a.first_album_attributes = {:name=>'Al'} @db.sqls.must_equal [] a.save check_sql_array(["INSERT INTO artists (name, id) VALUES ('Ar', 1)", "INSERT INTO artists (id, name) VALUES (1, 'Ar')"], "UPDATE albums SET artist_id = NULL WHERE (artist_id = 1)", ["INSERT INTO albums (artist_id, name) VALUES (1, 'Al')", "INSERT INTO albums (name, artist_id) VALUES ('Al', 1)"]) end it "should support creating new one_to_many objects" do a = @Artist.new({:name=>'Ar', :albums_attributes=>[{:name=>'Al'}]}) @db.sqls.must_equal [] a.save check_sql_array("INSERT INTO artists (name) VALUES ('Ar')", ["INSERT INTO albums (artist_id, name) VALUES (1, 'Al')", "INSERT INTO albums (name, artist_id) VALUES ('Al', 1)"]) end it "should support creating new one_to_many and one_to_one objects with presence validations on the foreign key" do @Album.class_eval do plugin :validation_helpers def validate validates_integer :artist_id super end end a = @Artist.new({:name=>'Ar', :albums_attributes=>[{:name=>'Al'}]}) @db.sqls.must_equal [] a.save check_sql_array("INSERT INTO artists (name) VALUES ('Ar')", ["INSERT INTO albums (artist_id, name) VALUES (1, 'Al')", "INSERT INTO albums (name, artist_id) VALUES ('Al', 1)"]) a = @Artist.new(:name=>'Ar') a.id = 1 a.first_album_attributes = {:name=>'Al'} @db.sqls.must_equal [] a.save check_sql_array(["INSERT INTO artists (name, id) VALUES ('Ar', 1)", "INSERT INTO artists (id, name) VALUES (1, 'Ar')"], "UPDATE albums SET artist_id = NULL WHERE (artist_id = 1)", ["INSERT INTO albums (artist_id, name) VALUES (1, 'Al')", "INSERT INTO albums (name, artist_id) VALUES ('Al', 1)"]) end it "should support creating new one_to_many and one_to_one objects with composite keys with presence validations on the foreign key" do insert = nil @Album.class_eval do plugin :validation_helpers def validate validates_integer :artist_id super end end @Concert.class_eval do def before_create # Have to define the CPK somehow. self.tour = 'To' self.date = '2004-04-05' super end def after_create super self.artist_id = 3 end private define_method :_insert do insert = values.dup end end c = @Concert.new(:playlist=>'Pl') @db.sqls.must_equal [] c.albums_attributes = [{:name=>'Al'}] c.save insert.must_equal(:tour=>'To', :date=>'2004-04-05', :playlist=>'Pl') check_sql_array(["INSERT INTO albums (name, artist_id) VALUES ('Al', 3)", "INSERT INTO albums (artist_id, name) VALUES (3, 'Al')"]) @Concert.class_eval do plugin :validation_helpers def validate validates_integer :artist_id super end end a = @Artist.new(:name=>'Ar') a.id = 1 a.first_concert_attributes = {:playlist=>'Pl'} @db.sqls.must_equal [] a.save check_sql_array(["INSERT INTO artists (name, id) VALUES ('Ar', 1)", "INSERT INTO artists (id, name) VALUES (1, 'Ar')"], "UPDATE concerts SET artist_id = NULL WHERE (artist_id = 1)") insert.must_equal(:tour=>'To', :date=>'2004-04-05', :artist_id=>1, :playlist=>'Pl') end it "should should not remove existing values from object when validating" do @Artist.one_to_one :first_album, :class=>@Album, :key=>:id @Artist.nested_attributes :first_album @db.fetch = {:id=>1} a = @Artist.load(:id=>1) a.set(:first_album_attributes=>{:id=>1, :name=>'Ar'}) a.first_album.values.must_equal(:id=>1, :name=>'Ar') @db.sqls.must_equal ["SELECT * FROM albums WHERE (albums.id = 1) LIMIT 1"] a.save_changes check_sql_array("UPDATE albums SET name = 'Ar' WHERE (id = 1)") end it "should support creating new many_to_many objects" do a = @Album.new({:name=>'Al', :tags_attributes=>[{:name=>'T'}]}) @db.sqls.must_equal [] a.save check_sql_array("INSERT INTO albums (name) VALUES ('Al')", "INSERT INTO tags (name) VALUES ('T')", ["INSERT INTO at (album_id, tag_id) VALUES (1, 2)", "INSERT INTO at (tag_id, album_id) VALUES (2, 1)"]) end it "should add new objects to the cached association array as soon as the *_attributes= method is called" do a = @Artist.new({:name=>'Ar', :first_album_attributes=>{:name=>'B'}, :albums_attributes=>[{:name=>'Al', :tags_attributes=>[{:name=>'T'}]}]}) a.albums.must_equal [@Album.new(:name=>'Al')] a.albums.first.artist.must_equal a a.albums.first.tags.must_equal [@Tag.new(:name=>'T')] a.first_album.must_equal @Album.new(:name=>'B') a.first_album.artist.must_equal a end it "should support creating new objects with composite primary keys" do insert = nil @Concert.class_eval do def before_create # Have to define the CPK somehow. self.tour = 'To' self.date = '2004-04-05' super end private define_method :_insert do insert = values.dup end end a = @Artist.new({:name=>'Ar', :concerts_attributes=>[{:playlist=>'Pl'}]}) @db.sqls.must_equal [] a.save @db.sqls.must_equal ["INSERT INTO artists (name) VALUES ('Ar')"] insert.must_equal(:tour=>'To', :date=>'2004-04-05', :artist_id=>1, :playlist=>'Pl') end it "should support creating new objects with specific primary keys if :unmatched_pk => :create is set" do @Artist.nested_attributes :albums, :unmatched_pk=>:create insert = nil @Album.class_eval do unrestrict_primary_key private define_method :_insert do insert = values.dup end end a = @Artist.new({:name=>'Ar', :albums_attributes=>[{:id=>7, :name=>'Al'}]}) @db.sqls.must_equal [] a.save @db.sqls.must_equal ["INSERT INTO artists (name) VALUES ('Ar')"] insert.must_equal(:artist_id=>1, :name=>'Al', :id=>7) end it "should support creating new objects with specific composite primary keys if :unmatched_pk => :create is set" do insert = nil @Artist.nested_attributes :concerts, :unmatched_pk=>:create @Concert.class_eval do private define_method :_insert do insert = values.dup end end a = @Artist.new({:name=>'Ar', :concerts_attributes=>[{:tour=>'To', :date=>'2004-04-05', :playlist=>'Pl'}]}) @db.sqls.must_equal [] a.save @db.sqls.must_equal ["INSERT INTO artists (name) VALUES ('Ar')"] insert.must_equal(:tour=>'To', :date=>'2004-04-05', :artist_id=>1, :playlist=>'Pl') end it "should support updating many_to_one objects" do al = @Album.load(:id=>10, :name=>'Al') ar = @Artist.load(:id=>20, :name=>'Ar') al.associations[:artist] = ar al.set(:artist_attributes=>{:id=>'20', :name=>'Ar2'}) @db.sqls.must_equal [] al.save @db.sqls.must_equal ["UPDATE albums SET name = 'Al' WHERE (id = 10)", "UPDATE artists SET name = 'Ar2' WHERE (id = 20)"] end it "should support updating one_to_one objects" do al = @Album.load(:id=>10, :name=>'Al') ar = @Artist.load(:id=>20, :name=>'Ar') ar.associations[:first_album] = al ar.set(:first_album_attributes=>{:id=>10, :name=>'Al2'}) @db.sqls.must_equal [] ar.save @db.sqls.must_equal ["UPDATE artists SET name = 'Ar' WHERE (id = 20)", "UPDATE albums SET name = 'Al2' WHERE (id = 10)"] end it "should support updating one_to_many objects" do al = @Album.load(:id=>10, :name=>'Al') ar = @Artist.load(:id=>20, :name=>'Ar') ar.associations[:albums] = [al] ar.set(:albums_attributes=>[{:id=>10, :name=>'Al2'}]) @db.sqls.must_equal [] ar.save @db.sqls.must_equal ["UPDATE artists SET name = 'Ar' WHERE (id = 20)", "UPDATE albums SET name = 'Al2' WHERE (id = 10)"] end it "should support updating one_to_many objects with _delete/_remove flags set to false" do al = @Album.load(:id=>10, :name=>'Al') ar = @Artist.load(:id=>20, :name=>'Ar') ar.associations[:albums] = [al] ar.set(:albums_attributes=>[{:id=>10, :name=>'Al2', :_delete => 'f', :_remove => '0'}]) @db.sqls.must_equal [] ar.save @db.sqls.must_equal ["UPDATE artists SET name = 'Ar' WHERE (id = 20)", "UPDATE albums SET name = 'Al2' WHERE (id = 10)"] end it "should support updating many_to_many objects" do a = @Album.load(:id=>10, :name=>'Al') t = @Tag.load(:id=>20, :name=>'T') a.associations[:tags] = [t] a.set(:tags_attributes=>[{:id=>20, :name=>'T2'}]) @db.sqls.must_equal [] a.save @db.sqls.must_equal ["UPDATE albums SET name = 'Al' WHERE (id = 10)", "UPDATE tags SET name = 'T2' WHERE (id = 20)"] end it "should support updating many_to_many objects with _delete/_remove flags set to false" do a = @Album.load(:id=>10, :name=>'Al') t = @Tag.load(:id=>20, :name=>'T') a.associations[:tags] = [t] a.set(:tags_attributes=>[{:id=>20, :name=>'T2', '_delete' => false, '_remove' => 'F'}]) @db.sqls.must_equal [] a.save @db.sqls.must_equal ["UPDATE albums SET name = 'Al' WHERE (id = 10)", "UPDATE tags SET name = 'T2' WHERE (id = 20)"] end it "should support updating objects with composite primary keys" do ar = @Artist.load(:id=>10, :name=>'Ar') co = @Concert.load(:tour=>'To', :date=>'2004-04-05', :playlist=>'Pl') ar.associations[:concerts] = [co] ar.set(:concerts_attributes=>[{:tour=>'To', :date=>'2004-04-05', :playlist=>'Pl2'}]) @db.sqls.must_equal [] ar.save check_sql_array("UPDATE artists SET name = 'Ar' WHERE (id = 10)", ["UPDATE concerts SET playlist = 'Pl2' WHERE ((tour = 'To') AND (date = '2004-04-05'))", "UPDATE concerts SET playlist = 'Pl2' WHERE ((date = '2004-04-05') AND (tour = 'To'))"]) end it "should support removing many_to_one objects" do al = @Album.load(:id=>10, :name=>'Al') ar = @Artist.load(:id=>20, :name=>'Ar') al.associations[:artist] = ar al.set(:artist_attributes=>{:id=>'20', :_remove=>'1'}) @db.sqls.must_equal [] al.save check_sql_array(["UPDATE albums SET artist_id = NULL, name = 'Al' WHERE (id = 10)", "UPDATE albums SET name = 'Al', artist_id = NULL WHERE (id = 10)"]) end it "should support removing one_to_one objects" do al = @Album.load(:id=>10, :name=>'Al') ar = @Artist.load(:id=>20, :name=>'Ar') ar.associations[:first_album] = al ar.set(:first_album_attributes=>{:id=>10, :_remove=>'t'}) @db.sqls.must_equal [] ar.save @db.sqls.must_equal ["UPDATE albums SET artist_id = NULL WHERE (artist_id = 20)", "UPDATE artists SET name = 'Ar' WHERE (id = 20)"] end it "should support removing one_to_many objects" do al = @Album.load(:id=>10, :name=>'Al') ar = @Artist.load(:id=>20, :name=>'Ar') ar.associations[:albums] = [al] ar.set(:albums_attributes=>[{:id=>10, :_remove=>'t'}]) ar.associations[:albums].must_equal [] @db.sqls.must_equal [] @Album.dataset = @Album.dataset.with_fetch(:id=>1) ar.save check_sql_array("SELECT 1 AS one FROM albums WHERE ((albums.artist_id = 20) AND (id = 10)) LIMIT 1", ["UPDATE albums SET artist_id = NULL, name = 'Al' WHERE (id = 10)", "UPDATE albums SET name = 'Al', artist_id = NULL WHERE (id = 10)"], "UPDATE artists SET name = 'Ar' WHERE (id = 20)") end it "should support removing many_to_many objects" do a = @Album.load(:id=>10, :name=>'Al') t = @Tag.load(:id=>20, :name=>'T') a.associations[:tags] = [t] a.set(:tags_attributes=>[{:id=>20, :_remove=>true}]) a.associations[:tags].must_equal [] @db.sqls.must_equal [] a.save @db.sqls.must_equal ["DELETE FROM at WHERE ((album_id = 10) AND (tag_id = 20))", "UPDATE albums SET name = 'Al' WHERE (id = 10)"] end it "should support removing objects with composite primary keys" do ar = @Artist.load(:id=>10, :name=>'Ar') co = @Concert.load(:tour=>'To', :date=>'2004-04-05', :playlist=>'Pl') ar.associations[:concerts] = [co] ar.set(:concerts_attributes=>[{:tour=>'To', :date=>'2004-04-05', :_remove=>'t'}]) @db.sqls.must_equal [] @Concert.dataset = @Concert.dataset.with_fetch(:id=>1) ar.save check_sql_array(["SELECT 1 AS one FROM concerts WHERE ((concerts.artist_id = 10) AND (tour = 'To') AND (date = '2004-04-05')) LIMIT 1", "SELECT 1 AS one FROM concerts WHERE ((concerts.artist_id = 10) AND (date = '2004-04-05') AND (tour = 'To')) LIMIT 1"], ["UPDATE concerts SET artist_id = NULL, playlist = 'Pl' WHERE ((tour = 'To') AND (date = '2004-04-05'))", "UPDATE concerts SET playlist = 'Pl', artist_id = NULL WHERE ((tour = 'To') AND (date = '2004-04-05'))", "UPDATE concerts SET artist_id = NULL, playlist = 'Pl' WHERE ((date = '2004-04-05') AND (tour = 'To'))", "UPDATE concerts SET playlist = 'Pl', artist_id = NULL WHERE ((date = '2004-04-05') AND (tour = 'To'))"], "UPDATE artists SET name = 'Ar' WHERE (id = 10)") end it "should support destroying many_to_one objects" do al = @Album.load(:id=>10, :name=>'Al') ar = @Artist.load(:id=>20, :name=>'Ar') al.associations[:artist] = ar al.set(:artist_attributes=>{:id=>'20', :_delete=>'1'}) @db.sqls.must_equal [] al.save check_sql_array(["UPDATE albums SET artist_id = NULL, name = 'Al' WHERE (id = 10)", "UPDATE albums SET name = 'Al', artist_id = NULL WHERE (id = 10)"], "DELETE FROM artists WHERE (id = 20)") end it "should support destroying one_to_one objects" do al = @Album.load(:id=>10, :name=>'Al') ar = @Artist.load(:id=>20, :name=>'Ar') ar.associations[:first_album] = al ar.set(:first_album_attributes=>{:id=>10, :_delete=>'t'}) @db.sqls.must_equal [] ar.save @db.sqls.must_equal ["UPDATE artists SET name = 'Ar' WHERE (id = 20)", "DELETE FROM albums WHERE (id = 10)"] end it "should support destroying one_to_many objects" do al = @Album.load(:id=>10, :name=>'Al') ar = @Artist.load(:id=>20, :name=>'Ar') ar.associations[:albums] = [al] ar.set(:albums_attributes=>[{:id=>10, :_delete=>'t'}]) @db.sqls.must_equal [] ar.save @db.sqls.must_equal ["UPDATE artists SET name = 'Ar' WHERE (id = 20)", "DELETE FROM albums WHERE (id = 10)"] end it "should support destroying many_to_many objects" do a = @Album.load(:id=>10, :name=>'Al') t = @Tag.load(:id=>20, :name=>'T') a.associations[:tags] = [t] a.set(:tags_attributes=>[{:id=>20, :_delete=>true}]) @db.sqls.must_equal [] a.save @db.sqls.must_equal ["DELETE FROM at WHERE ((album_id = 10) AND (tag_id = 20))", "UPDATE albums SET name = 'Al' WHERE (id = 10)", "DELETE FROM tags WHERE (id = 20)"] end it "should support destroying objects with composite primary keys" do ar = @Artist.load(:id=>10, :name=>'Ar') co = @Concert.load(:tour=>'To', :date=>'2004-04-05', :playlist=>'Pl') ar.associations[:concerts] = [co] ar.set(:concerts_attributes=>[{:tour=>'To', :date=>'2004-04-05', :_delete=>'t'}]) @db.sqls.must_equal [] ar.save check_sql_array("UPDATE artists SET name = 'Ar' WHERE (id = 10)", ["DELETE FROM concerts WHERE ((tour = 'To') AND (date = '2004-04-05'))", "DELETE FROM concerts WHERE ((date = '2004-04-05') AND (tour = 'To'))"]) end it "should support both string and symbol keys in nested attribute hashes" do a = @Album.load(:id=>10, :name=>'Al') t = @Tag.load(:id=>20, :name=>'T') a.associations[:tags] = [t] a.set('tags_attributes'=>[{'id'=>20, '_delete'=>true}]) @db.sqls.must_equal [] a.save @db.sqls.must_equal ["DELETE FROM at WHERE ((album_id = 10) AND (tag_id = 20))", "UPDATE albums SET name = 'Al' WHERE (id = 10)", "DELETE FROM tags WHERE (id = 20)"] end it "should support using a hash instead of an array for to_many nested attributes" do a = @Album.load(:id=>10, :name=>'Al') t = @Tag.load(:id=>20, :name=>'T') a.associations[:tags] = [t] a.set('tags_attributes'=>{'1'=>{'id'=>20, '_delete'=>true}}) @db.sqls.must_equal [] a.save @db.sqls.must_equal ["DELETE FROM at WHERE ((album_id = 10) AND (tag_id = 20))", "UPDATE albums SET name = 'Al' WHERE (id = 10)", "DELETE FROM tags WHERE (id = 20)"] end it "should only allow destroying associated objects if :destroy option is used in the nested_attributes call" do a = @Album.load(:id=>10, :name=>'Al') ar = @Artist.load(:id=>20, :name=>'Ar') a.associations[:artist] = ar @Album.nested_attributes :artist proc{a.set(:artist_attributes=>{:id=>'20', :_delete=>'1'})}.must_raise(Sequel::MassAssignmentRestriction) @Album.nested_attributes :artist, :destroy=>true a.set(:artist_attributes=>{:id=>'20', :_delete=>'1'}) end it "should only allow removing associated objects if :remove option is used in the nested_attributes call" do a = @Album.load(:id=>10, :name=>'Al') ar = @Artist.load(:id=>20, :name=>'Ar') a.associations[:artist] = ar @Album.nested_attributes :artist proc{a.set(:artist_attributes=>{:id=>'20', :_remove=>'1'})}.must_raise(Sequel::MassAssignmentRestriction) @Album.nested_attributes :artist, :remove=>true a.set(:artist_attributes=>{:id=>'20', :_remove=>'1'}) end it "should raise an Error if a primary key is given in a nested attribute hash, but no matching associated object exists" do al = @Album.load(:id=>10, :name=>'Al') ar = @Artist.load(:id=>20, :name=>'Ar') ar.associations[:albums] = [al] proc{ar.set(:albums_attributes=>[{:id=>30, :_delete=>'t'}])}.must_raise(Sequel::Error) ar.set(:albums_attributes=>[{:id=>10, :_delete=>'t'}]) end it "should not raise an Error if an unmatched primary key is given, if the :unmatched_pk=>:ignore option is used" do @Artist.nested_attributes :albums, :unmatched_pk=>:ignore al = @Album.load(:id=>10, :name=>'Al') ar = @Artist.load(:id=>20, :name=>'Ar') ar.associations[:albums] = [al] ar.set(:albums_attributes=>[{:id=>30, :_delete=>'t'}]) @db.sqls.must_equal [] ar.save @db.sqls.must_equal ["UPDATE artists SET name = 'Ar' WHERE (id = 20)"] end it "should raise an Error if a composite primary key is given in a nested attribute hash, but no matching associated object exists" do ar = @Artist.load(:id=>10, :name=>'Ar') co = @Concert.load(:tour=>'To', :date=>'2004-04-05', :playlist=>'Pl') ar.associations[:concerts] = [co] proc{ar.set(:concerts_attributes=>[{:tour=>'To', :date=>'2004-04-04', :_delete=>'t'}])}.must_raise(Sequel::Error) ar.set(:concerts_attributes=>[{:tour=>'To', :date=>'2004-04-05', :_delete=>'t'}]) end it "should not raise an Error if an unmatched composite primary key is given, if the :unmatched_pk=>:ignore option is used" do @Artist.nested_attributes :concerts, :unmatched_pk=>:ignore ar = @Artist.load(:id=>10, :name=>'Ar') co = @Concert.load(:tour=>'To', :date=>'2004-04-05', :playlist=>'Pl') ar.associations[:concerts] = [co] ar.set(:concerts_attributes=>[{:tour=>'To', :date=>'2004-04-06', :_delete=>'t'}]) @db.sqls.must_equal [] ar.save @db.sqls.must_equal ["UPDATE artists SET name = 'Ar' WHERE (id = 10)"] end it "should raise a NoExistingObject error if object to be updated no longer exists, if the :require_modification=>true option is used" do @Artist.nested_attributes :albums, :require_modification=>true, :destroy=>true al = @Album.load(:id=>10, :name=>'Al') ar = @Artist.load(:id=>20, :name=>'Ar') ar.associations[:albums] = [al] ar.set(:albums_attributes=>[{:id=>10, :name=>'L'}]) @db.sqls.must_equal [] @db.numrows = [1, 0] proc{ar.save}.must_raise Sequel::NoExistingObject @db.sqls.must_equal ["UPDATE artists SET name = 'Ar' WHERE (id = 20)", "UPDATE albums SET name = 'L' WHERE (id = 10)"] end it "should not raise an Error if object to be updated no longer exists, if the :require_modification=>false option is used" do @Artist.nested_attributes :albums, :require_modification=>false, :destroy=>true al = @Album.load(:id=>10, :name=>'Al') ar = @Artist.load(:id=>20, :name=>'Ar') ar.associations[:albums] = [al] ar.set(:albums_attributes=>[{:id=>10, :name=>'L'}]) @db.sqls.must_equal [] @db.numrows = [1, 0] ar.save @db.sqls.must_equal ["UPDATE artists SET name = 'Ar' WHERE (id = 20)", "UPDATE albums SET name = 'L' WHERE (id = 10)"] end it "should raise a NoExistingObject error if object to be deleted no longer exists, if the :require_modification=>true option is used" do @Artist.nested_attributes :albums, :require_modification=>true, :destroy=>true al = @Album.load(:id=>10, :name=>'Al') ar = @Artist.load(:id=>20, :name=>'Ar') ar.associations[:albums] = [al] ar.set(:albums_attributes=>[{:id=>10, :_delete=>'t'}]) @db.sqls.must_equal [] @db.numrows = [1, 0] proc{ar.save}.must_raise Sequel::NoExistingObject @db.sqls.must_equal ["UPDATE artists SET name = 'Ar' WHERE (id = 20)", "DELETE FROM albums WHERE (id = 10)"] end it "should not raise an Error if object to be deleted no longer exists, if the :require_modification=>false option is used" do @Artist.nested_attributes :albums, :require_modification=>false, :destroy=>true al = @Album.load(:id=>10, :name=>'Al') ar = @Artist.load(:id=>20, :name=>'Ar') ar.associations[:albums] = [al] ar.set(:albums_attributes=>[{:id=>10, :_delete=>'t'}]) @db.sqls.must_equal [] @db.numrows = [1, 0] ar.save @db.sqls.must_equal ["UPDATE artists SET name = 'Ar' WHERE (id = 20)", "DELETE FROM albums WHERE (id = 10)"] end it "should not attempt to validate nested attributes twice for one_to_many associations when creating them" do @Artist.nested_attributes :albums validated = [] @Album.class_eval do define_method(:validate) do super() validated << self end end a = @Artist.new(:name=>'Ar', :albums_attributes=>[{:name=>'Al'}]) @db.sqls.must_equal [] validated.length.must_equal 0 a.save validated.length.must_equal 1 check_sql_array("INSERT INTO artists (name) VALUES ('Ar')", ["INSERT INTO albums (artist_id, name) VALUES (1, 'Al')", "INSERT INTO albums (name, artist_id) VALUES ('Al', 1)"]) end it "should not attempt to validate nested attributes twice for one_to_one associations when creating them" do @Artist.nested_attributes :first_album validated = [] @Album.class_eval do define_method(:validate) do super() validated << self end end a = @Artist.new(:name=>'Ar', :first_album_attributes=>{:name=>'Al'}) @db.sqls.must_equal [] validated.length.must_equal 0 a.save validated.length.must_equal 1 check_sql_array("INSERT INTO artists (name) VALUES ('Ar')", "UPDATE albums SET artist_id = NULL WHERE (artist_id = 1)", "INSERT INTO albums (name, artist_id) VALUES ('Al', 1)") end it "should not clear reciprocal association before saving new one_to_one associated object" do @Artist.one_to_one :first_album, :clone=>:first_album, :reciprocal=>:artist @Artist.nested_attributes :first_album assoc = [] @Album.class_eval do define_method(:after_save) do super() assoc << associations[:artist] end end a = @Artist.new(:name=>'Ar', :first_album_attributes=>{:name=>'Al'}) @db.sqls.must_equal [] assoc.must_be_empty a.save assoc.length.must_equal 1 assoc.first.must_be_kind_of(@Artist) check_sql_array("INSERT INTO artists (name) VALUES ('Ar')", "UPDATE albums SET artist_id = NULL WHERE (artist_id = 1)", "INSERT INTO albums (name, artist_id) VALUES ('Al', 1)") end it "should not save if nested attribute is not valid and should include nested attribute validation errors in the main object's validation errors" do @Artist.class_eval do def validate super errors.add(:name, 'cannot be Ar') if name == 'Ar' end end a = @Album.new(:name=>'Al', :artist_attributes=>{:name=>'Ar'}) @db.sqls.must_equal [] proc{a.save}.must_raise(Sequel::ValidationFailed) a.errors.full_messages.must_equal ['artist name cannot be Ar'] @db.sqls.must_equal [] # Should preserve attributes a.artist.name.must_equal 'Ar' end it "should not attempt to validate nested attributes if the :validate=>false association option is used" do @Album.many_to_one :artist, :class=>@Artist, :validate=>false, :reciprocal=>nil @Album.nested_attributes :artist, :tags, :destroy=>true, :remove=>true @Artist.class_eval do def validate super errors.add(:name, 'cannot be Ar') if name == 'Ar' end end a = @Album.new(:name=>'Al', :artist_attributes=>{:name=>'Ar'}) @db.sqls.must_equal [] a.save check_sql_array("INSERT INTO artists (name) VALUES ('Ar')", ["INSERT INTO albums (artist_id, name) VALUES (1, 'Al')", "INSERT INTO albums (name, artist_id) VALUES ('Al', 1)"]) end it "should not attempt to validate nested attributes if the :validate=>false option is passed to save" do @Artist.class_eval do def validate super errors.add(:name, 'cannot be Ar') if name == 'Ar' end end a = @Album.new(:name=>'Al', :artist_attributes=>{:name=>'Ar'}) @db.sqls.must_equal [] a.save(:validate=>false) check_sql_array("INSERT INTO artists (name) VALUES ('Ar')", ["INSERT INTO albums (artist_id, name) VALUES (1, 'Al')", "INSERT INTO albums (name, artist_id) VALUES ('Al', 1)"]) end it "should not accept nested attributes unless explicitly specified" do @Artist.many_to_many :tags, :class=>@Tag, :left_key=>:album_id, :right_key=>:tag_id, :join_table=>:at proc{@Artist.create({:name=>'Ar', :tags_attributes=>[{:name=>'T'}]})}.must_raise(Sequel::MassAssignmentRestriction) @db.sqls.must_equal [] end it "should save when save_changes or update is called if nested attribute associated objects changed but there are no changes to the main object" do al = @Album.load(:id=>10, :name=>'Al') ar = @Artist.load(:id=>20, :name=>'Ar') al.associations[:artist] = ar @db.sqls.must_equal [] al.update(:artist_attributes=>{:id=>'20', :name=>'Ar2'}) @db.sqls.must_equal ["UPDATE artists SET name = 'Ar2' WHERE (id = 20)"] end it "should have a :limit option limiting the amount of entries" do @Album.nested_attributes :tags, :limit=>2 arr = [{:name=>'T'}] proc{@Album.new({:name=>'Al', :tags_attributes=>arr*3})}.must_raise(Sequel::Error) a = @Album.new({:name=>'Al', :tags_attributes=>arr*2}) @db.sqls.must_equal [] a.save check_sql_array("INSERT INTO albums (name) VALUES ('Al')", "INSERT INTO tags (name) VALUES ('T')", ["INSERT INTO at (album_id, tag_id) VALUES (1, 2)", "INSERT INTO at (tag_id, album_id) VALUES (2, 1)"], "INSERT INTO tags (name) VALUES ('T')", ["INSERT INTO at (album_id, tag_id) VALUES (1, 4)", "INSERT INTO at (tag_id, album_id) VALUES (4, 1)"]) end it "should accept a :reject_nil option " do @Album.nested_attributes(:tags, :reject_nil=>true) a = @Album.new(:name=>'Al', :tags_attributes=>nil) @db.sqls.must_equal [] a.save @db.sqls.must_equal ["INSERT INTO albums (name) VALUES ('Al')"] end it "should accept a block that each hash gets passed to determine if it should be processed" do @Album.nested_attributes(:tags){|h| h[:name].empty?} a = @Album.new({:name=>'Al', :tags_attributes=>[{:name=>'T'}, {:name=>''}, {:name=>'T2'}]}) @db.sqls.must_equal [] a.save check_sql_array("INSERT INTO albums (name) VALUES ('Al')", "INSERT INTO tags (name) VALUES ('T')", ["INSERT INTO at (album_id, tag_id) VALUES (1, 2)", "INSERT INTO at (tag_id, album_id) VALUES (2, 1)"], "INSERT INTO tags (name) VALUES ('T2')", ["INSERT INTO at (album_id, tag_id) VALUES (1, 4)", "INSERT INTO at (tag_id, album_id) VALUES (4, 1)"]) end it "should accept a :reject_if option that each hash gets passed to determine if it should be processed" do @Album.nested_attributes(:tags, :reject_if=>proc{|h| h[:name].empty?}) a = @Album.new({:name=>'Al', :tags_attributes=>[{:name=>'T'}, {:name=>''}, {:name=>'T2'}]}) @db.sqls.must_equal [] a.save check_sql_array("INSERT INTO albums (name) VALUES ('Al')", "INSERT INTO tags (name) VALUES ('T')", ["INSERT INTO at (album_id, tag_id) VALUES (1, 2)", "INSERT INTO at (tag_id, album_id) VALUES (2, 1)"], "INSERT INTO tags (name) VALUES ('T2')", ["INSERT INTO at (album_id, tag_id) VALUES (1, 4)", "INSERT INTO at (tag_id, album_id) VALUES (4, 1)"]) end it "should accept a :transform block that returns a changed attributes hash" do @Album.nested_attributes :tags, :transform=>proc{|parent, hash| hash[:name] << parent.name; hash } a = @Album.new(:name => 'Al') a.set(:tags_attributes=>[{:name=>'T'.dup}, {:name=>'T2'.dup}]) @db.sqls.must_equal [] a.save check_sql_array("INSERT INTO albums (name) VALUES ('Al')", "INSERT INTO tags (name) VALUES ('TAl')", ["INSERT INTO at (album_id, tag_id) VALUES (1, 2)", "INSERT INTO at (tag_id, album_id) VALUES (2, 1)"], "INSERT INTO tags (name) VALUES ('T2Al')", ["INSERT INTO at (album_id, tag_id) VALUES (1, 4)", "INSERT INTO at (tag_id, album_id) VALUES (4, 1)"]) end it "should return objects created/modified in the internal methods" do @Album.nested_attributes :tags, :remove=>true, :unmatched_pk=>:ignore objs = [] @Album.class_eval do private define_method(:nested_attributes_create){|*a| objs << [super(*a), :create]} define_method(:nested_attributes_remove){|*a| objs << [super(*a), :remove]} define_method(:nested_attributes_update){|*a| objs << [super(*a), :update]} end a = @Album.new(:name=>'Al') a.associations[:tags] = [@Tag.load(:id=>6, :name=>'A'), @Tag.load(:id=>7, :name=>'A2')] a.tags_attributes = [{:id=>6, :name=>'T'}, {:id=>7, :name=>'T2', :_remove=>true}, {:name=>'T3'}, {:id=>8, :name=>'T4'}, {:id=>9, :name=>'T5', :_remove=>true}] objs.must_equal [[@Tag.load(:id=>6, :name=>'T'), :update], [@Tag.load(:id=>7, :name=>'A2'), :remove], [@Tag.new(:name=>'T3'), :create]] end it "should raise an error if updating modifies the associated objects keys" do @Artist.columns :id, :name, :artist_id @Album.columns :id, :name, :artist_id @Tag.columns :id, :name, :tag_id @Artist.one_to_many :albums, :class=>@Album, :key=>:artist_id, :primary_key=>:artist_id @Album.many_to_one :artist, :class=>@Artist, :primary_key=>:artist_id @Album.many_to_many :tags, :class=>@Tag, :left_key=>:album_id, :right_key=>:tag_id, :join_table=>:at, :right_primary_key=>:tag_id @Artist.nested_attributes :albums, :destroy=>true, :remove=>true @Album.nested_attributes :artist, :tags, :destroy=>true, :remove=>true al = @Album.load(:id=>10, :name=>'Al', :artist_id=>25) ar = @Artist.load(:id=>20, :name=>'Ar', :artist_id=>25) t = @Tag.load(:id=>30, :name=>'T', :tag_id=>15) al.associations[:artist] = ar al.associations[:tags] = [t] ar.associations[:albums] = [al] proc{ar.set(:albums_attributes=>[{:id=>10, :name=>'Al2', :artist_id=>'3'}])}.must_raise(Sequel::Error) proc{al.set(:artist_attributes=>{:id=>20, :name=>'Ar2', :artist_id=>'3'})}.must_raise(Sequel::Error) proc{al.set(:tags_attributes=>[{:id=>30, :name=>'T2', :tag_id=>'3'}])}.must_raise(Sequel::Error) end it "should accept a :fields option and only allow modification of those fields" do @Tag.columns :id, :name, :number @Album.nested_attributes :tags, :destroy=>true, :remove=>true, :fields=>[:name] al = @Album.load(:id=>10, :name=>'Al') t = @Tag.load(:id=>30, :name=>'T', :number=>10) al.associations[:tags] = [t] al.set(:tags_attributes=>[{:id=>30, :name=>'T2'}, {:name=>'T3'}]) @db.sqls.must_equal [] al.save check_sql_array("UPDATE albums SET name = 'Al' WHERE (id = 10)", "UPDATE tags SET name = 'T2' WHERE (id = 30)", "INSERT INTO tags (name) VALUES ('T3')", ["INSERT INTO at (album_id, tag_id) VALUES (10, 1)", "INSERT INTO at (tag_id, album_id) VALUES (1, 10)"]) al.set(:tags_attributes=>[{:id=>30, :name=>'T3', :number=>3}]) al.tags.first.name.must_equal 'T3' al.tags.first.number.must_equal 10 al.set(:tags_attributes=>[{:name=>'T4', :number=>3}]) al.tags.last.name.must_equal 'T4' al.tags.last.number.must_be_nil end it "should accept a proc for the :fields option that accepts the associated object and returns an array of fields" do @Tag.columns :id, :name, :number @Album.nested_attributes :tags, :destroy=>true, :remove=>true, :fields=>proc{|object| object.is_a?(@Tag) ? [:name] : []} al = @Album.load(:id=>10, :name=>'Al') t = @Tag.load(:id=>30, :name=>'T', :number=>10) al.associations[:tags] = [t] al.set(:tags_attributes=>[{:id=>30, :name=>'T2'}, {:name=>'T3'}]) @db.sqls.must_equal [] al.save check_sql_array("UPDATE albums SET name = 'Al' WHERE (id = 10)", "UPDATE tags SET name = 'T2' WHERE (id = 30)", "INSERT INTO tags (name) VALUES ('T3')", ["INSERT INTO at (album_id, tag_id) VALUES (10, 1)", "INSERT INTO at (tag_id, album_id) VALUES (1, 10)"]) al.set_nested_attributes(:tags, [{:id=>30, :name=>'T3', :number=>3}], :fields=>[:name]) al.tags.first.name.must_equal 'T3' al.tags.first.number.must_equal 10 al.set_nested_attributes(:tags, [{:name=>'T4', :number=>3}], :fields=>[:name]) al.tags.last.name.must_equal 'T4' al.tags.last.number.must_be_nil end it "should allow per-call options via the set_nested_attributes method" do @Tag.columns :id, :name, :number @Album.nested_attributes :tags al = @Album.load(:id=>10, :name=>'Al') t = @Tag.load(:id=>30, :name=>'T', :number=>10) al.associations[:tags] = [t] al.set_nested_attributes(:tags, [{:id=>30, :name=>'T2'}, {:name=>'T3'}], :fields=>[:name]) @db.sqls.must_equal [] al.save check_sql_array("UPDATE albums SET name = 'Al' WHERE (id = 10)", "UPDATE tags SET name = 'T2' WHERE (id = 30)", "INSERT INTO tags (name) VALUES ('T3')", ["INSERT INTO at (album_id, tag_id) VALUES (10, 1)", "INSERT INTO at (tag_id, album_id) VALUES (1, 10)"]) al.set_nested_attributes(:tags, [{:id=>30, :name=>'T3', :number=>3}], :fields=>[:name]) al.tags.first.name.must_equal 'T3' al.tags.first.number.must_equal 10 al.set_nested_attributes(:tags, [{:name=>'T4', :number=>3}], :fields=>[:name]) al.tags.last.name.must_equal 'T4' al.tags.last.number.must_be_nil end it "should have set_nested_attributes method raise error if called with a bad association" do proc{@Album.load(:id=>10, :name=>'Al').set_nested_attributes(:tags2, [{:id=>30, :name=>'T2', :number=>3}], :fields=>[:name])}.must_raise(Sequel::Error) end it "should have set_nested_attributes method raise error if there is no existing object with the given primary key" do @Tag.columns :id, :name, :number proc{@Album.load(:id=>10, :name=>'Al').set_nested_attributes(:tags, [{:id=>30, :name=>'T2', :number=>3}], :fields=>[:name])}.must_raise(Sequel::Error) end it "should have set_nested_attributes method raise error if called with an association that doesn't support nested attributes" do @Album.many_to_many :tags, :class=>@Tag, :left_key=>:album_id, :right_key=>:tag_id, :join_table=>:at proc{@Album.load(:id=>10, :name=>'Al').set_nested_attributes(:tags, [{:id=>30, :name=>'T2', :number=>3}], :fields=>[:name])}.must_raise(Sequel::Error) end it "should not allow adding nested attributes after freezing" do @Artist.freeze proc{@Artist.nested_attributes :albums}.must_raise RuntimeError end it "should not adding modifying nested attributes after freezing if none were present before" do @c = Class.new(Sequel::Model(@db)) @c.plugin :nested_attributes @Artist = Class.new(@c).set_dataset(:artists) @Artist.freeze proc{@Artist.nested_attributes :albums}.must_raise RuntimeError end end ���������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/null_dataset_spec.rb��������������������������������������������������0000664�0000000�0000000�00000003714�14342141206�0022254�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "null_dataset extension" do before do @db = Sequel::mock(:fetch=>{:id=>1}, :autoid=>1, :numrows=>1, :columns=>[:id]).extension(:null_dataset) @ds = @db[:table].nullify @i = 0 @pr = proc{|*a| @i += 1} end after do @db.sqls.must_equal [] unless @skip_check end it "should make each be a noop" do @ds.each(&@pr).must_be_same_as(@ds) @i.must_equal 0 end it "should make fetch_rows be a noop" do @ds.fetch_rows("SELECT 1", &@pr).must_be_nil @i.must_equal 0 end it "nullify should be a cached dataset" do ds = @db[:table] ds.nullify.object_id.must_equal(ds.nullify.object_id) end it "should make insert be a noop" do @ds.insert(1).must_be_nil end it "should make update be a noop" do @ds.update(:a=>1).must_equal 0 end it "should make delete be a noop" do @ds.delete.must_equal 0 end it "should make truncate be a noop" do @ds.truncate.must_be_nil end it "should make execute_* be a noop" do @ds.send(:execute_ddl,'FOO').must_be_nil @ds.send(:execute_insert,'FOO').must_be_nil @ds.send(:execute_dui,'FOO').must_be_nil @ds.send(:execute,'FOO').must_be_nil end it "should have working columns" do @skip_check = true 2.times do @ds.columns.must_equal [:id] end @db.sqls.must_equal ['SELECT * FROM table LIMIT 0'] end it "should have count return 0" do @ds.count.must_equal 0 end it "should have empty return true" do @ds.empty?.must_equal true end it "should make import a noop" do @ds.import([:id], [[1], [2], [3]]).must_be_nil end it "should have nullify method returned modified receiver" do @skip_check = true ds = @db[:table] ds.nullify.wont_be_same_as(ds) ds.each(&@pr) @db.sqls.must_equal ['SELECT * FROM table'] @i.must_equal 1 end it "should work with method chaining" do @ds.where(:a=>1).select(:b).each(&@pr) @i.must_equal 0 end end ����������������������������������������������������sequel-5.63.0/spec/extensions/optimistic_locking_spec.rb��������������������������������������������0000664�0000000�0000000�00000007231�14342141206�0023465�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "optimistic_locking plugin" do before do @c = Class.new(Sequel::Model(:people)) do end h = {1=>{:id=>1, :name=>'John', :lock_version=>2}} lv = @lv = "lock_version".dup @c.dataset = @c.dataset.with_numrows(proc do |sql| case sql when /UPDATE people SET (name|#{lv}) = ('Jim'|'Bob'|\d+), (?:name|#{lv}) = ('Jim'|'Bob'|\d+) WHERE \(\(id = (\d+)\) AND \(#{lv} = (\d+)\)\)/ name, nlv = $1 == 'name' ? [$2, $3] : [$3, $2] m = h[$4.to_i] if m && m[:lock_version] == $5.to_i m.merge!(:name=>name.gsub("'", ''), :lock_version=>nlv.to_i) 1 else 0 end when /UPDATE people SET #{lv} = (\d+) WHERE \(\(id = (\d+)\) AND \(#{lv} = (\d+)\)\)/ m = h[$2.to_i] if m && m[:lock_version] == $3.to_i m.merge!(:lock_version=>$1.to_i) 1 else 0 end when /DELETE FROM people WHERE \(\(id = (\d+)\) AND \(#{lv} = (\d+)\)\)/ m = h[$1.to_i] if m && m[lv.to_sym] == $2.to_i h.delete[$1.to_i] 1 else 0 end else puts sql end end).with_fetch(proc do |sql| m = h[1].dup v = m.delete(:lock_version) m[lv.to_sym] = v m end) @c.columns :id, :name, :lock_version @c.plugin :optimistic_locking end it "should raise an error when updating a stale record" do p1 = @c[1] p2 = @c[1] p1.update(:name=>'Jim') proc{p2.update(:name=>'Bob')}.must_raise(Sequel::Plugins::OptimisticLocking::Error) end it "should raise an error when destroying a stale record" do p1 = @c[1] p2 = @c[1] p1.update(:name=>'Jim') proc{p2.destroy}.must_raise(Sequel::Plugins::OptimisticLocking::Error) end it "should not raise an error when updating the same record twice" do p1 = @c[1] p1.update(:name=>'Jim') p1.update(:name=>'Bob') end it "should allow changing the lock column via model.lock_column=" do @lv.replace('lv') @c.columns :id, :name, :lv @c.lock_column = :lv p1 = @c[1] p2 = @c[1] p1.update(:name=>'Jim') proc{p2.update(:name=>'Bob')}.must_raise(Sequel::Plugins::OptimisticLocking::Error) end it "should allow changing the lock column via plugin option" do @lv.replace('lv') @c.columns :id, :name, :lv @c.plugin :optimistic_locking, :lock_column=>:lv p1 = @c[1] p2 = @c[1] p1.update(:name=>'Jim') proc{p2.destroy}.must_raise(Sequel::Plugins::OptimisticLocking::Error) end it "should work when subclassing" do c = Class.new(@c) p1 = c[1] p2 = c[1] p1.update(:name=>'Jim') proc{p2.update(:name=>'Bob')}.must_raise(Sequel::Plugins::OptimisticLocking::Error) end it "should work correctly if attempting to refresh and save again after a failed save" do p1 = @c[1] p2 = @c[1] p1.update(:name=>'Jim') begin p2.update(:name=>'Bob') rescue Sequel::Plugins::OptimisticLocking::Error p2.refresh @c.db.sqls p2.update(:name=>'Bob') end @c.db.sqls.must_equal ["UPDATE people SET name = 'Bob', lock_version = 4 WHERE ((id = 1) AND (lock_version = 3))"] end it "should increment the lock column when #modified! even if no columns are changed" do p1 = @c[1] p1.modified! lv = p1.lock_version p1.save_changes p1.lock_version.must_equal lv + 1 end it "should not increment the lock column when the update fails" do @c.dataset = @c.dataset.with_extend{def update(_) raise end} p1 = @c[1] p1.modified! lv = p1.lock_version proc{p1.save_changes}.must_raise(RuntimeError) p1.lock_version.must_equal lv end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/pagination_spec.rb����������������������������������������������������0000664�0000000�0000000�00000007522�14342141206�0021727�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "A paginated dataset" do before do count = @count = [153] @d = Sequel.mock.dataset.extension(:pagination).with_extend{define_method(:count){count.first}} @paginated = @d.paginate(1, 20) end it "should raise an error if the dataset already has a limit" do proc{@d.limit(10).paginate(1,10)}.must_raise(Sequel::Error) proc{@paginated.paginate(2,20)}.must_raise(Sequel::Error) proc{@d.limit(10).each_page(10){|ds|}}.must_raise(Sequel::Error) proc{@d.limit(10).each_page(10)}.must_raise(Sequel::Error) end it "should set the limit and offset options correctly" do @paginated.opts[:limit].must_equal 20 @paginated.opts[:offset].must_equal 0 end it "should set the page count correctly" do @paginated.page_count.must_equal 8 @d.paginate(1, 50).page_count.must_equal 4 @count[0] = 0 @d.paginate(1, 50).page_count.must_equal 1 end it "should set the current page number correctly" do @paginated.current_page.must_equal 1 @d.paginate(3, 50).current_page.must_equal 3 end it "should return the next page number or nil if we're on the last" do @paginated.next_page.must_equal 2 @d.paginate(4, 50).next_page.must_be_nil end it "should return the previous page number or nil if we're on the first" do @paginated.prev_page.must_be_nil @d.paginate(4, 50).prev_page.must_equal 3 end it "should return the page range" do @paginated.page_range.must_equal(1..8) @d.paginate(4, 50).page_range.must_equal(1..4) end it "should return the record range for the current page" do @paginated.current_page_record_range.must_equal(1..20) @d.paginate(4, 50).current_page_record_range.must_equal(151..153) @d.paginate(5, 50).current_page_record_range.must_equal(0..0) end it "should return the record count for the current page" do @paginated.current_page_record_count.must_equal 20 @d.paginate(3, 50).current_page_record_count.must_equal 50 @d.paginate(4, 50).current_page_record_count.must_equal 3 @d.paginate(5, 50).current_page_record_count.must_equal 0 end it "should know if current page is last page" do @paginated.last_page?.must_equal false @d.paginate(2, 20).last_page?.must_equal false @d.paginate(5, 30).last_page?.must_equal false @d.paginate(6, 30).last_page?.must_equal true @count[0] = 0 @d.paginate(1, 30).last_page?.must_equal true @d.paginate(2, 30).last_page?.must_equal false end it "should know if current page is first page" do @paginated.first_page?.must_equal true @d.paginate(1, 20).first_page?.must_equal true @d.paginate(2, 20).first_page?.must_equal false end it "should work with fixed sql" do ds = @d.clone(:sql => 'select * from blah') @count[0] = 150 ds.paginate(2, 50).sql.must_equal 'SELECT * FROM (select * from blah) AS t1 LIMIT 50 OFFSET 50' end end describe "Dataset#each_page" do before do @d = Sequel.mock[:items].extension(:pagination).with_extend{def count; 153 end} end it "should raise an error if the dataset already has a limit" do proc{@d.limit(10).each_page(10){}}.must_raise(Sequel::Error) end it "should iterate over each page in the resultset as a paginated dataset" do a = [] @d.each_page(50) {|p| a << p} a.map {|p| p.sql}.must_equal [ 'SELECT * FROM items LIMIT 50 OFFSET 0', 'SELECT * FROM items LIMIT 50 OFFSET 50', 'SELECT * FROM items LIMIT 50 OFFSET 100', 'SELECT * FROM items LIMIT 50 OFFSET 150', ] end it "should return an enumerator if no block is given" do enum = @d.each_page(50) enum.map {|p| p.sql}.must_equal [ 'SELECT * FROM items LIMIT 50 OFFSET 0', 'SELECT * FROM items LIMIT 50 OFFSET 50', 'SELECT * FROM items LIMIT 50 OFFSET 100', 'SELECT * FROM items LIMIT 50 OFFSET 150', ] end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/pg_array_associations_spec.rb�����������������������������������������0000664�0000000�0000000�00000135103�14342141206�0024156�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Model, "pg_array_associations" do before do @db = Sequel.mock(:host=>'postgres', :numrows=>1) @db.extend_datasets{def quote_identifiers?; false end} class ::Artist < Sequel::Model(@db) attr_accessor :yyy columns :id, :tag_ids plugin :pg_array_associations pg_array_to_many :tags pg_array_to_many :a_tags, :clone=>:tags, :conditions=>{:name=>'A'}, :key=>:tag_ids end class ::Tag < Sequel::Model(@db) columns :id plugin :pg_array_associations many_to_pg_array :artists many_to_pg_array :a_artists, :clone=>:artists, :conditions=>{:name=>'A'} def id3 id*3 end end @c1 = Artist @c2 = Tag @c1.dataset = @c1.dataset.with_fetch(:id=>1, :tag_ids=>Sequel.pg_array([1,2,3])) @c2.dataset = @c2.dataset.with_fetch(:id=>2) @o1 = @c1.first @o2 = @c2.first @n1 = @c1.new @n2 = @c2.new @db.sqls end after do Object.send(:remove_const, :Artist) Object.send(:remove_const, :Tag) end it "should respect :adder=>nil option to not create a add_* method" do c = Artist c.pg_array_to_many :cs, :adder=>nil c.method_defined?(:add_c).must_equal false c.method_defined?(:remove_c).must_equal true c.method_defined?(:remove_all_cs).must_equal true c = Tag c.many_to_pg_array :cs, :adder=>nil c.method_defined?(:add_c).must_equal false c.method_defined?(:remove_c).must_equal true c.method_defined?(:remove_all_cs).must_equal true end it "should respect :remover=>nil option to not create a remove_* method" do c = Artist c.pg_array_to_many :cs, :remover=>nil c.method_defined?(:add_c).must_equal true c.method_defined?(:remove_c).must_equal false c.method_defined?(:remove_all_cs).must_equal true c = Tag c.many_to_pg_array :cs, :remover=>nil c.method_defined?(:add_c).must_equal true c.method_defined?(:remove_c).must_equal false c.method_defined?(:remove_all_cs).must_equal true end it "should respect :clearer=>nil option to not create a remove_all_* method" do c = Artist c.pg_array_to_many :cs, :clearer=>nil c.method_defined?(:add_c).must_equal true c.method_defined?(:remove_c).must_equal true c.method_defined?(:remove_all_cs).must_equal false c = Tag c.many_to_pg_array :cs, :clearer=>nil c.method_defined?(:add_c).must_equal true c.method_defined?(:remove_c).must_equal true c.method_defined?(:remove_all_cs).must_equal false end it "should populate :key_hash and :id_map option correctly for custom eager loaders" do khs = [] pr = proc{|h| khs << [h[:key_hash], h[:id_map]]} @c1.pg_array_to_many :tags, :clone=>:tags, :eager_loader=>pr @c2.many_to_pg_array :artists, :clone=>:artists, :eager_loader=>pr @c1.eager(:tags).all @c2.eager(:artists).all khs.must_equal [[{}, nil], [{:id=>{2=>[Tag.load(:id=>2)]}}, {2=>[Tag.load(:id=>2)]}]] end it "should not issue queries if the object cannot have associated objects" do @n1.tags.must_equal [] @c1.load(:tag_ids=>[]).tags.must_equal [] @n2.artists.must_equal [] @db.sqls.must_equal [] end it "should use correct SQL when loading associations lazily" do @o1.tags.must_equal [@o2] @o2.artists.must_equal [@o1] @db.sqls.must_equal ["SELECT * FROM tags WHERE (tags.id IN (1, 2, 3))", "SELECT * FROM artists WHERE (artists.tag_ids @> ARRAY[2]::integer[])"] end it "should accept :primary_key option for primary keys to use in current and associated table" do @c1.pg_array_to_many :tags, :clone=>:tags, :primary_key=>Sequel./(:id, 3) @c2.many_to_pg_array :artists, :clone=>:artists, :primary_key=>:id3 @o1.tags_dataset.sql.must_equal "SELECT * FROM tags WHERE ((tags.id / 3) IN (1, 2, 3))" @o2.artists_dataset.sql.must_equal "SELECT * FROM artists WHERE (artists.tag_ids @> ARRAY[6]::integer[])" end it "should allowing filtering by associations" do @c1.filter(:tags=>@o2).sql.must_equal "SELECT * FROM artists WHERE (artists.tag_ids @> ARRAY[2]::integer[])" @c2.filter(:artists=>@o1).sql.must_equal "SELECT * FROM tags WHERE (tags.id IN (1, 2, 3))" end it "should allowing filtering by associations with :conditions" do @c1.filter(:a_tags=>@o2).sql.must_equal "SELECT * FROM artists WHERE coalesce((artists.tag_ids && (SELECT array_agg(tags.id) FROM tags WHERE ((name = 'A') AND (tags.id IS NOT NULL) AND (tags.id = 2)))), false)" @c2.filter(:a_artists=>@o1).sql.must_equal "SELECT * FROM tags WHERE (tags.id IN (SELECT _smtopgaa_key_ FROM artists CROSS JOIN unnest(artists.tag_ids) AS _smtopgaa_(_smtopgaa_key_) WHERE ((name = 'A') AND (artists.tag_ids IS NOT NULL) AND (artists.id = 1))))" end it "should allowing excluding by associations" do @c1.exclude(:tags=>@o2).sql.must_equal "SELECT * FROM artists WHERE (NOT (artists.tag_ids @> ARRAY[2]::integer[]) OR (artists.tag_ids IS NULL))" @c2.exclude(:artists=>@o1).sql.must_equal "SELECT * FROM tags WHERE ((tags.id NOT IN (1, 2, 3)) OR (tags.id IS NULL))" end it "should allowing excluding by associations with :conditions" do @c1.exclude(:a_tags=>@o2).sql.must_equal "SELECT * FROM artists WHERE (NOT coalesce((artists.tag_ids && (SELECT array_agg(tags.id) FROM tags WHERE ((name = 'A') AND (tags.id IS NOT NULL) AND (tags.id = 2)))), false) OR (artists.tag_ids IS NULL))" @c2.exclude(:a_artists=>@o1).sql.must_equal "SELECT * FROM tags WHERE ((tags.id NOT IN (SELECT _smtopgaa_key_ FROM artists CROSS JOIN unnest(artists.tag_ids) AS _smtopgaa_(_smtopgaa_key_) WHERE ((name = 'A') AND (artists.tag_ids IS NOT NULL) AND (artists.id = 1)))) OR (tags.id IS NULL))" end it "should allowing filtering by multiple associations" do @c1.filter(:tags=>[@c2.load(:id=>1), @c2.load(:id=>2)]).sql.must_equal "SELECT * FROM artists WHERE (artists.tag_ids && ARRAY[1,2]::integer[])" @c2.filter(:artists=>[@c1.load(:tag_ids=>Sequel.pg_array([3, 4])), @c1.load(:tag_ids=>Sequel.pg_array([4, 5]))]).sql.must_equal "SELECT * FROM tags WHERE (tags.id IN (3, 4, 5))" end it "should allowing filtering by multiple associations with :conditions" do @c1.filter(:a_tags=>[@c2.load(:id=>1), @c2.load(:id=>2)]).sql.must_equal "SELECT * FROM artists WHERE coalesce((artists.tag_ids && (SELECT array_agg(tags.id) FROM tags WHERE ((name = 'A') AND (tags.id IS NOT NULL) AND (tags.id IN (1, 2))))), false)" @c2.filter(:a_artists=>[@c1.load(:id=>7, :tag_ids=>Sequel.pg_array([3, 4])), @c1.load(:id=>8, :tag_ids=>Sequel.pg_array([4, 5]))]).sql.must_equal "SELECT * FROM tags WHERE (tags.id IN (SELECT _smtopgaa_key_ FROM artists CROSS JOIN unnest(artists.tag_ids) AS _smtopgaa_(_smtopgaa_key_) WHERE ((name = 'A') AND (artists.tag_ids IS NOT NULL) AND (artists.id IN (7, 8)))))" end it "should allowing excluding by multiple associations" do @c1.exclude(:tags=>[@c2.load(:id=>1), @c2.load(:id=>2)]).sql.must_equal "SELECT * FROM artists WHERE (NOT (artists.tag_ids && ARRAY[1,2]::integer[]) OR (artists.tag_ids IS NULL))" @c2.exclude(:artists=>[@c1.load(:tag_ids=>Sequel.pg_array([3, 4])), @c1.load(:tag_ids=>Sequel.pg_array([4, 5]))]).sql.must_equal "SELECT * FROM tags WHERE ((tags.id NOT IN (3, 4, 5)) OR (tags.id IS NULL))" end it "should allowing excluding by multiple associations with :conditions" do @c1.exclude(:a_tags=>[@c2.load(:id=>1), @c2.load(:id=>2)]).sql.must_equal "SELECT * FROM artists WHERE (NOT coalesce((artists.tag_ids && (SELECT array_agg(tags.id) FROM tags WHERE ((name = 'A') AND (tags.id IS NOT NULL) AND (tags.id IN (1, 2))))), false) OR (artists.tag_ids IS NULL))" @c2.exclude(:a_artists=>[@c1.load(:id=>7, :tag_ids=>Sequel.pg_array([3, 4])), @c1.load(:id=>8, :tag_ids=>Sequel.pg_array([4, 5]))]).sql.must_equal "SELECT * FROM tags WHERE ((tags.id NOT IN (SELECT _smtopgaa_key_ FROM artists CROSS JOIN unnest(artists.tag_ids) AS _smtopgaa_(_smtopgaa_key_) WHERE ((name = 'A') AND (artists.tag_ids IS NOT NULL) AND (artists.id IN (7, 8))))) OR (tags.id IS NULL))" end it "should allowing filtering/excluding associations with NULL or empty values" do @c1.filter(:tags=>@c2.new).sql.must_equal 'SELECT * FROM artists WHERE false' @c1.exclude(:tags=>@c2.new).sql.must_equal 'SELECT * FROM artists WHERE true' @c2.filter(:artists=>@c1.new).sql.must_equal 'SELECT * FROM tags WHERE false' @c2.exclude(:artists=>@c1.new).sql.must_equal 'SELECT * FROM tags WHERE true' @c2.filter(:artists=>@c1.load(:tag_ids=>[])).sql.must_equal 'SELECT * FROM tags WHERE false' @c2.exclude(:artists=>@c1.load(:tag_ids=>[])).sql.must_equal 'SELECT * FROM tags WHERE true' @c1.filter(:tags=>[@c2.new]).sql.must_equal 'SELECT * FROM artists WHERE false' @c1.exclude(:tags=>[@c2.new]).sql.must_equal 'SELECT * FROM artists WHERE true' @c2.filter(:artists=>[@c1.load(:tag_ids=>[])]).sql.must_equal 'SELECT * FROM tags WHERE false' @c2.exclude(:artists=>[@c1.load(:tag_ids=>[])]).sql.must_equal 'SELECT * FROM tags WHERE true' @c1.filter(:tags=>[@c2.new, @c2.load(:id=>2)]).sql.must_equal "SELECT * FROM artists WHERE (artists.tag_ids && ARRAY[2]::integer[])" @c2.filter(:artists=>[@c1.load(:tag_ids=>Sequel.pg_array([3, 4])), @c1.new]).sql.must_equal "SELECT * FROM tags WHERE (tags.id IN (3, 4))" end it "should allowing filtering by association datasets" do @c1.filter(:tags=>@c2.where(:id=>1)).sql.must_equal "SELECT * FROM artists WHERE coalesce((artists.tag_ids && (SELECT array_agg(tags.id) FROM tags WHERE (id = 1))), false)" @c2.filter(:artists=>@c1.where(:id=>1)).sql.must_equal "SELECT * FROM tags WHERE (EXISTS (SELECT 1 FROM (SELECT artists.tag_ids AS key FROM artists WHERE (id = 1)) AS t1 WHERE (tags.id = any(key))))" end it "should allowing filtering by association datasets with :conditions" do @c1.filter(:a_tags=>@c2.where(:id=>1)).sql.must_equal "SELECT * FROM artists WHERE coalesce((artists.tag_ids && (SELECT array_agg(tags.id) FROM tags WHERE ((name = 'A') AND (tags.id IS NOT NULL) AND (tags.id IN (SELECT tags.id FROM tags WHERE (id = 1)))))), false)" @c2.filter(:a_artists=>@c1.where(:id=>1)).sql.must_equal "SELECT * FROM tags WHERE (tags.id IN (SELECT _smtopgaa_key_ FROM artists CROSS JOIN unnest(artists.tag_ids) AS _smtopgaa_(_smtopgaa_key_) WHERE ((name = 'A') AND (artists.tag_ids IS NOT NULL) AND (artists.id IN (SELECT artists.id FROM artists WHERE (id = 1))))))" end it "should allowing excluding by association datasets" do @c1.exclude(:tags=>@c2.where(:id=>1)).sql.must_equal "SELECT * FROM artists WHERE (NOT coalesce((artists.tag_ids && (SELECT array_agg(tags.id) FROM tags WHERE (id = 1))), false) OR (artists.tag_ids IS NULL))" @c2.exclude(:artists=>@c1.where(:id=>1)).sql.must_equal "SELECT * FROM tags WHERE (NOT (EXISTS (SELECT 1 FROM (SELECT artists.tag_ids AS key FROM artists WHERE (id = 1)) AS t1 WHERE (tags.id = any(key)))) OR (tags.id IS NULL))" end it "should allowing excluding by association datasets with :conditions" do @c1.exclude(:a_tags=>@c2.where(:id=>1)).sql.must_equal "SELECT * FROM artists WHERE (NOT coalesce((artists.tag_ids && (SELECT array_agg(tags.id) FROM tags WHERE ((name = 'A') AND (tags.id IS NOT NULL) AND (tags.id IN (SELECT tags.id FROM tags WHERE (id = 1)))))), false) OR (artists.tag_ids IS NULL))" @c2.exclude(:a_artists=>@c1.where(:id=>1)).sql.must_equal "SELECT * FROM tags WHERE ((tags.id NOT IN (SELECT _smtopgaa_key_ FROM artists CROSS JOIN unnest(artists.tag_ids) AS _smtopgaa_(_smtopgaa_key_) WHERE ((name = 'A') AND (artists.tag_ids IS NOT NULL) AND (artists.id IN (SELECT artists.id FROM artists WHERE (id = 1)))))) OR (tags.id IS NULL))" end it "filter by associations should respect key options" do @c1.class_eval{def tag3_ids; tag_ids.map{|x| x*3} end} @c1.pg_array_to_many :tags, :clone=>:tags, :primary_key=>Sequel.*(:id, 3), :primary_key_method=>:id3, :key=>:tag3_ids, :key_column=>Sequel.pg_array(:tag_ids)[1..2] @c2.many_to_pg_array :artists, :clone=>:artists, :primary_key=>Sequel.*(:id, 3), :primary_key_method=>:id3, :key=>:tag3_ids, :key_column=>Sequel.pg_array(:tag_ids)[1..2] @c1.filter(:tags=>@o2).sql.must_equal "SELECT * FROM artists WHERE ((artists.tag_ids)[1:2] @> ARRAY[6]::integer[])" @c2.filter(:artists=>@o1).sql.must_equal "SELECT * FROM tags WHERE ((tags.id * 3) IN (3, 6, 9))" @c1.filter(:tags=>@c2.where(:id=>1)).sql.must_equal "SELECT * FROM artists WHERE coalesce(((artists.tag_ids)[1:2] && (SELECT array_agg((tags.id * 3)) FROM tags WHERE (id = 1))), false)" @c2.filter(:artists=>@c1.where(:id=>1)).sql.must_equal "SELECT * FROM tags WHERE (EXISTS (SELECT 1 FROM (SELECT (artists.tag_ids)[1:2] AS key FROM artists WHERE (id = 1)) AS t1 WHERE ((tags.id * 3) = any(key))))" end it "should raise an error if associated model does not have a primary key, and :primary_key is not specified" do @c1.no_primary_key @c2.no_primary_key @c1.pg_array_to_many :tags, :clone=>:tags proc{@o1.tags}.must_raise(Sequel::Error) proc{@c2.many_to_pg_array :artists, :clone=>:artists}.must_raise(Sequel::Error) @db.sqls.must_equal [] end it "should support a :key option" do @c1.pg_array_to_many :tags, :clone=>:tags, :key=>:tag2_ids @c2.many_to_pg_array :artists, :clone=>:artists, :key=>:tag2_ids @c1.class_eval{def tag2_ids; tag_ids.map{|x| x * 2} end} @o1.tags_dataset.sql.must_equal "SELECT * FROM tags WHERE (tags.id IN (2, 4, 6))" @o2.artists_dataset.sql.must_equal "SELECT * FROM artists WHERE (artists.tag2_ids @> ARRAY[2]::integer[])" end it "should support a :key_column option" do @c2.many_to_pg_array :artists, :clone=>:artists, :key_column=>Sequel.pg_array(:tag_ids)[1..2], :key=>:tag2_ids @o2.artists_dataset.sql.must_equal "SELECT * FROM artists WHERE ((artists.tag_ids)[1:2] @> ARRAY[2]::integer[])" end it "should support a :primary_key option" do @c1.pg_array_to_many :tags, :clone=>:tags, :primary_key=>:id2 @c2.many_to_pg_array :artists, :clone=>:artists, :primary_key=>:id2 @o1.tags_dataset.sql.must_equal "SELECT * FROM tags WHERE (tags.id2 IN (1, 2, 3))" @c2.class_eval{def id2; id*2 end} @o2.artists_dataset.sql.must_equal "SELECT * FROM artists WHERE (artists.tag_ids @> ARRAY[4]::integer[])" end it "should support a :conditions option" do @c1.pg_array_to_many :tags, :clone=>:tags, :conditions=>{:a=>1} @c2.many_to_pg_array :artists, :clone=>:artists, :conditions=>{:a=>1} @o1.tags_dataset.sql.must_equal "SELECT * FROM tags WHERE ((a = 1) AND (tags.id IN (1, 2, 3)))" @o2.artists_dataset.sql.must_equal "SELECT * FROM artists WHERE ((a = 1) AND (artists.tag_ids @> ARRAY[2]::integer[]))" end it "should support an :order option" do @c1.pg_array_to_many :tags, :clone=>:tags, :order=>[:a, :b] @c2.many_to_pg_array :artists, :clone=>:artists, :order=>[:a, :b] @o1.tags_dataset.sql.must_equal "SELECT * FROM tags WHERE (tags.id IN (1, 2, 3)) ORDER BY a, b" @o2.artists_dataset.sql.must_equal "SELECT * FROM artists WHERE (artists.tag_ids @> ARRAY[2]::integer[]) ORDER BY a, b" end it "should support a select option" do @c1.pg_array_to_many :tags, :clone=>:tags, :select=>[:a, :b] @c2.many_to_pg_array :artists, :clone=>:artists, :select=>[:a, :b] @c1.load(:tag_ids=>Sequel.pg_array([1,2,3])).tags_dataset.sql.must_equal "SELECT a, b FROM tags WHERE (tags.id IN (1, 2, 3))" @c2.load(:id=>1).artists_dataset.sql.must_equal "SELECT a, b FROM artists WHERE (artists.tag_ids @> ARRAY[1]::integer[])" end it "should accept a block" do @c1.pg_array_to_many :tags, :clone=>:tags do |ds| ds.filter(:yyy=>@yyy) end @c2.many_to_pg_array :artists, :clone=>:artists do |ds| ds.filter(:a=>1) end @c1.new(:yyy=>6, :tag_ids=>Sequel.pg_array([1,2,3])).tags_dataset.sql.must_equal "SELECT * FROM tags WHERE ((tags.id IN (1, 2, 3)) AND (yyy = 6))" @o2.artists_dataset.sql.must_equal "SELECT * FROM artists WHERE ((artists.tag_ids @> ARRAY[2]::integer[]) AND (a = 1))" end it "should support a :dataset option that is used instead of the default" do @c1.pg_array_to_many :tags, :clone=>:tags, :dataset=>proc{Tag.where(:id=>tag_ids.map{|x| x*2})} @c2.many_to_pg_array :artists, :clone=>:artists, :dataset=>proc{Artist.where(Sequel.pg_array(Sequel.pg_array(:tag_ids)[1..2]).contains([id]))} @o1.tags_dataset.sql.must_equal "SELECT * FROM tags WHERE (id IN (2, 4, 6))" @o2.artists_dataset.sql.must_equal "SELECT * FROM artists WHERE ((tag_ids)[1:2] @> ARRAY[2])" end it "should support a :limit option" do @c1.pg_array_to_many :tags, :clone=>:tags, :limit=>[2, 3] @c2.many_to_pg_array :artists, :clone=>:artists, :limit=>[3, 2] @o1.tags_dataset.sql.must_equal "SELECT * FROM tags WHERE (tags.id IN (1, 2, 3)) LIMIT 2 OFFSET 3" @o2.artists_dataset.sql.must_equal "SELECT * FROM artists WHERE (artists.tag_ids @> ARRAY[2]::integer[]) LIMIT 3 OFFSET 2" end it "should support a :uniq option that removes duplicates from the association" do @c1.pg_array_to_many :tags, :clone=>:tags, :uniq=>true @c2.many_to_pg_array :artists, :clone=>:artists, :uniq=>true @c1.dataset = @c1.dataset.with_fetch([{:id=>20}, {:id=>30}, {:id=>20}, {:id=>30}]) @c2.dataset = @c1.dataset.with_fetch([{:id=>20}, {:id=>30}, {:id=>20}, {:id=>30}]) @o1.tags.must_equal [@c2.load(:id=>20), @c2.load(:id=>30)] @o2.artists.must_equal [@c1.load(:id=>20), @c1.load(:id=>30)] end it "reflection associated_object_keys should return correct values" do @c1.association_reflection(:tags).associated_object_keys.must_equal [:id] @c2.association_reflection(:artists).associated_object_keys.must_equal [:tag_ids] end it "reflection remove_before_destroy? should return correct values" do @c1.association_reflection(:tags).remove_before_destroy?.must_equal true @c2.association_reflection(:artists).remove_before_destroy?.must_equal false end it "reflection reciprocal should be correct" do @c1.association_reflection(:tags).reciprocal.must_equal :artists @c2.association_reflection(:artists).reciprocal.must_equal :tags end it "should eagerly load correctly when key values are missing or do not match" do @c1.dataset = @c1.dataset.with_fetch([{:id=>1, :tag_ids=>Sequel.pg_array([1,2,3])}, {:id=>4, :tag_ids=>nil}]) @c2.dataset = @c2.dataset.with_fetch([{:id=>2}, {:id=>4}]) @db.sqls a = @c1.eager(:tags).all a.must_equal [@o1, @c1.load(:id=>4, :tag_ids=>nil)] @db.sqls.must_equal ["SELECT * FROM artists", 'SELECT * FROM tags WHERE (tags.id IN (1, 2, 3))'] a.first.tags.must_equal [@o2] @db.sqls.must_equal [] @c2.dataset = @c2.dataset.with_fetch([{:id=>2}]) @c1.dataset = @c1.dataset.with_fetch([{:id=>3, :tag_ids=>nil}, {:id=>2, :tag_ids=>Sequel.pg_array([1,3])}, {:id=>1, :tag_ids=>Sequel.pg_array([1,2,3])}]) @db.sqls a = @c2.eager(:artists).all a.must_equal [@o2] @db.sqls.must_equal ['SELECT * FROM tags', "SELECT * FROM artists WHERE (artists.tag_ids && ARRAY[2]::integer[])"] a.first.artists.must_equal [@o1] @db.sqls.must_equal [] end it "should eagerly load correctly" do a = @c1.eager(:tags).all a.must_equal [@o1] @db.sqls.must_equal ["SELECT * FROM artists", 'SELECT * FROM tags WHERE (tags.id IN (1, 2, 3))'] a.first.tags.must_equal [@o2] @db.sqls.must_equal [] a = @c2.eager(:artists).all a.must_equal [@o2] @db.sqls.must_equal ['SELECT * FROM tags', "SELECT * FROM artists WHERE (artists.tag_ids && ARRAY[2]::integer[])"] a.first.artists.must_equal [@o1] @db.sqls.must_equal [] end it "should support using custom key options when eager loading associations" do @c1.class_eval{def tag3_ids; tag_ids.map{|x| x*3} end} @c1.pg_array_to_many :tags, :clone=>:tags, :primary_key=>Sequel.*(:id, 3), :primary_key_method=>:id3, :key=>:tag3_ids @c2.many_to_pg_array :artists, :clone=>:artists, :primary_key=>:id3, :key=>:tag3_ids, :key_column=>Sequel.pg_array(:tag_ids)[1..2] a = @c1.eager(:tags).all a.must_equal [@o1] @db.sqls.must_equal ["SELECT * FROM artists", 'SELECT * FROM tags WHERE ((tags.id * 3) IN (3, 6, 9))'] a.first.tags.must_equal [@o2] @db.sqls.must_equal [] a = @c2.eager(:artists).all a.must_equal [@o2] @db.sqls.must_equal ["SELECT * FROM tags", "SELECT * FROM artists WHERE ((artists.tag_ids)[1:2] && ARRAY[6]::integer[])"] a.first.artists.must_equal [@o1] @db.sqls.must_equal [] end it "should allow cascading of eager loading for associations of associated models" do a = @c1.eager(:tags=>:artists).all a.must_equal [@o1] @db.sqls.must_equal ["SELECT * FROM artists", 'SELECT * FROM tags WHERE (tags.id IN (1, 2, 3))', "SELECT * FROM artists WHERE (artists.tag_ids && ARRAY[2]::integer[])"] a.first.tags.must_equal [@o2] a.first.tags.first.artists.must_equal [@o1] @db.sqls.must_equal [] end it "should respect :eager when lazily loading an association" do @c1.pg_array_to_many :tags2, :clone=>:tags, :eager=>:artists, :key=>:tag_ids @c2.many_to_pg_array :artists2, :clone=>:artists, :eager=>:tags @o1.tags2.must_equal [@o2] @db.sqls.must_equal ["SELECT * FROM tags WHERE (tags.id IN (1, 2, 3))", "SELECT * FROM artists WHERE (artists.tag_ids && ARRAY[2]::integer[])"] @o1.tags2.first.artists.must_equal [@o1] @db.sqls.must_equal [] @o2.artists2.must_equal [@o1] @db.sqls.must_equal ["SELECT * FROM artists WHERE (artists.tag_ids @> ARRAY[2]::integer[])", 'SELECT * FROM tags WHERE (tags.id IN (1, 2, 3))'] @o2.artists2.first.tags.must_equal [@o2] @db.sqls.must_equal [] end it "should cascade eagerly loading when the :eager_graph association option is used" do @c1.pg_array_to_many :tags2, :clone=>:tags, :eager_graph=>:artists, :key=>:tag_ids @c2.many_to_pg_array :artists2, :clone=>:artists, :eager_graph=>:tags @c2.dataset = @c2.dataset.with_fetch(:id=>2, :artists_id=>1, :tag_ids=>Sequel.pg_array([1,2,3])) @c1.dataset = @c1.dataset.with_fetch(:id=>1, :tags_id=>2, :tag_ids=>Sequel.pg_array([1,2,3])) @db.sqls @o1.tags2.must_equal [@o2] @db.sqls.must_equal ['SELECT tags.id, artists.id AS artists_id, artists.tag_ids FROM tags LEFT OUTER JOIN artists ON (artists.tag_ids @> ARRAY[tags.id]) WHERE (tags.id IN (1, 2, 3))'] @o1.tags2.first.artists.must_equal [@o1] @db.sqls.must_equal [] @o2.artists2.must_equal [@o1] @db.sqls.must_equal ["SELECT artists.id, artists.tag_ids, tags.id AS tags_id FROM artists LEFT OUTER JOIN tags ON (artists.tag_ids @> ARRAY[tags.id]) WHERE (artists.tag_ids @> ARRAY[2]::integer[])"] @o2.artists2.first.tags.must_equal [@o2] @db.sqls.must_equal [] @c2.dataset = @c2.dataset.with_fetch(:id=>2, :artists_id=>1, :tag_ids=>Sequel.pg_array([1,2,3])) @c1.dataset = @c1.dataset.with_fetch(:id=>1, :tag_ids=>Sequel.pg_array([1,2,3])) @db.sqls a = @c1.eager(:tags2).all @db.sqls.must_equal ["SELECT * FROM artists", 'SELECT tags.id, artists.id AS artists_id, artists.tag_ids FROM tags LEFT OUTER JOIN artists ON (artists.tag_ids @> ARRAY[tags.id]) WHERE (tags.id IN (1, 2, 3))'] a.must_equal [@o1] a.first.tags2.must_equal [@o2] a.first.tags2.first.artists.must_equal [@o1] @db.sqls.must_equal [] @c2.dataset = @c2.dataset.with_fetch(:id=>2) @c1.dataset = @c1.dataset.with_fetch(:id=>1, :tags_id=>2, :tag_ids=>Sequel.pg_array([1,2,3])) @db.sqls a = @c2.eager(:artists2).all @db.sqls.must_equal ["SELECT * FROM tags", "SELECT artists.id, artists.tag_ids, tags.id AS tags_id FROM artists LEFT OUTER JOIN tags ON (artists.tag_ids @> ARRAY[tags.id]) WHERE (artists.tag_ids && ARRAY[2]::integer[])"] a.must_equal [@o2] a.first.artists2.must_equal [@o1] a.first.artists2.first.tags.must_equal [@o2] @db.sqls.must_equal [] end it "should respect the :limit option when eager loading" do @c2.dataset = @c2.dataset.with_fetch([{:id=>1},{:id=>2}, {:id=>3}]) @db.sqls @c1.pg_array_to_many :tags, :clone=>:tags, :limit=>2 a = @c1.eager(:tags).all a.must_equal [@o1] @db.sqls.must_equal ["SELECT * FROM artists", 'SELECT * FROM tags WHERE (tags.id IN (1, 2, 3))'] a.first.tags.must_equal [@c2.load(:id=>1), @c2.load(:id=>2)] @db.sqls.must_equal [] @c1.pg_array_to_many :tags, :clone=>:tags, :limit=>[1, 1] a = @c1.eager(:tags).all a.must_equal [@o1] @db.sqls.must_equal ["SELECT * FROM artists", 'SELECT * FROM tags WHERE (tags.id IN (1, 2, 3))'] a.first.tags.must_equal [@c2.load(:id=>2)] @db.sqls.must_equal [] @c1.pg_array_to_many :tags, :clone=>:tags, :limit=>[nil, 1] a = @c1.eager(:tags).all a.must_equal [@o1] @db.sqls.must_equal ["SELECT * FROM artists", 'SELECT * FROM tags WHERE (tags.id IN (1, 2, 3))'] a.first.tags.must_equal [@c2.load(:id=>2), @c2.load(:id=>3)] @db.sqls.length.must_equal 0 @c2.dataset = @c2.dataset.with_fetch(:id=>2) @c1.dataset = @c1.dataset.with_fetch([{:id=>5, :tag_ids=>Sequel.pg_array([1,2,3])},{:id=>6, :tag_ids=>Sequel.pg_array([2,3])}, {:id=>7, :tag_ids=>Sequel.pg_array([1,2])}]) @db.sqls @c2.many_to_pg_array :artists, :clone=>:artists, :limit=>2 a = @c2.eager(:artists).all a.must_equal [@o2] @db.sqls.must_equal ['SELECT * FROM tags', "SELECT * FROM artists WHERE (artists.tag_ids && ARRAY[2]::integer[])"] a.first.artists.must_equal [@c1.load(:id=>5, :tag_ids=>Sequel.pg_array([1,2,3])), @c1.load(:id=>6, :tag_ids=>Sequel.pg_array([2,3]))] @db.sqls.must_equal [] @c2.many_to_pg_array :artists, :clone=>:artists, :limit=>[1, 1] a = @c2.eager(:artists).all a.must_equal [@o2] @db.sqls.must_equal ['SELECT * FROM tags', "SELECT * FROM artists WHERE (artists.tag_ids && ARRAY[2]::integer[])"] a.first.artists.must_equal [@c1.load(:id=>6, :tag_ids=>Sequel.pg_array([2,3]))] @db.sqls.must_equal [] @c2.many_to_pg_array :artists, :clone=>:artists, :limit=>[nil, 1] a = @c2.eager(:artists).all a.must_equal [@o2] @db.sqls.must_equal ['SELECT * FROM tags', "SELECT * FROM artists WHERE (artists.tag_ids && ARRAY[2]::integer[])"] a.first.artists.must_equal [@c1.load(:id=>6, :tag_ids=>Sequel.pg_array([2,3])), @c1.load(:id=>7, :tag_ids=>Sequel.pg_array([1,2]))] @db.sqls.must_equal [] end it "should support association_join" do @c1.association_join(:tags).sql.must_equal "SELECT * FROM artists INNER JOIN tags ON (artists.tag_ids @> ARRAY[tags.id])" @c2.association_join(:artists).sql.must_equal "SELECT * FROM tags INNER JOIN artists ON (artists.tag_ids @> ARRAY[tags.id])" end it "should support custom selects when using association_join" do @c1.select{a(b)}.association_join(:tags).sql.must_equal "SELECT a(b) FROM artists INNER JOIN tags ON (artists.tag_ids @> ARRAY[tags.id])" @c2.select{a(b)}.association_join(:artists).sql.must_equal "SELECT a(b) FROM tags INNER JOIN artists ON (artists.tag_ids @> ARRAY[tags.id])" end it "should eagerly graph associations" do @c2.dataset = @c2.dataset.with_fetch(:id=>2, :artists_id=>1, :tag_ids=>Sequel.pg_array([1,2,3])) @c1.dataset = @c1.dataset.with_fetch(:id=>1, :tags_id=>2, :tag_ids=>Sequel.pg_array([1,2,3])) @db.sqls a = @c1.eager_graph(:tags).all @db.sqls.must_equal ["SELECT artists.id, artists.tag_ids, tags.id AS tags_id FROM artists LEFT OUTER JOIN tags ON (artists.tag_ids @> ARRAY[tags.id])"] a.must_equal [@o1] a.first.tags.must_equal [@o2] @db.sqls.must_equal [] a = @c2.eager_graph(:artists).all @db.sqls.must_equal ["SELECT tags.id, artists.id AS artists_id, artists.tag_ids FROM tags LEFT OUTER JOIN artists ON (artists.tag_ids @> ARRAY[tags.id])"] a.must_equal [@o2] a.first.artists.must_equal [@o1] @db.sqls.must_equal [] end it "should eagerly graph associations with limits" do @c1.pg_array_to_many :tags, :limit=>1 a = @c1.eager_graph(:tags).with_fetch([{:id=>1, :tags_id=>2, :tag_ids=>Sequel.pg_array([1,2,3])}, {:id=>1, :tags_id=>3, :tag_ids=>Sequel.pg_array([1,2,3])}]).all @db.sqls.must_equal ["SELECT artists.id, artists.tag_ids, tags.id AS tags_id FROM artists LEFT OUTER JOIN tags ON (artists.tag_ids @> ARRAY[tags.id])"] a.must_equal [@o1] a.first.tags.must_equal [@o2] @db.sqls.must_equal [] @c2.many_to_pg_array :artists, :limit=>1 a = @c2.eager_graph(:artists).with_fetch([{:id=>2, :artists_id=>1, :tag_ids=>Sequel.pg_array([1,2,3])}, {:id=>2, :artists_id=>2, :tag_ids=>Sequel.pg_array([1,2,3])}]).all @db.sqls.must_equal ["SELECT tags.id, artists.id AS artists_id, artists.tag_ids FROM tags LEFT OUTER JOIN artists ON (artists.tag_ids @> ARRAY[tags.id])"] a.must_equal [@o2] a.first.artists.must_equal [@o1] @db.sqls.must_equal [] end it "should allow cascading of eager graphing for associations of associated models" do @c2.dataset = @c2.dataset.with_fetch(:id=>2, :artists_id=>1, :tag_ids=>Sequel.pg_array([1,2,3]), :tags_0_id=>2) @c1.dataset = @c1.dataset.with_fetch(:id=>1, :tags_id=>2, :tag_ids=>Sequel.pg_array([1,2,3]), :artists_0_id=>1, :artists_0_tag_ids=>Sequel.pg_array([1,2,3])) @db.sqls a = @c1.eager_graph(:tags=>:artists).all @db.sqls.must_equal ["SELECT artists.id, artists.tag_ids, tags.id AS tags_id, artists_0.id AS artists_0_id, artists_0.tag_ids AS artists_0_tag_ids FROM artists LEFT OUTER JOIN tags ON (artists.tag_ids @> ARRAY[tags.id]) LEFT OUTER JOIN artists AS artists_0 ON (artists_0.tag_ids @> ARRAY[tags.id])"] a.must_equal [@o1] a.first.tags.must_equal [@o2] a.first.tags.first.artists.must_equal [@o1] @db.sqls.must_equal [] a = @c2.eager_graph(:artists=>:tags).all @db.sqls.must_equal ["SELECT tags.id, artists.id AS artists_id, artists.tag_ids, tags_0.id AS tags_0_id FROM tags LEFT OUTER JOIN artists ON (artists.tag_ids @> ARRAY[tags.id]) LEFT OUTER JOIN tags AS tags_0 ON (artists.tag_ids @> ARRAY[tags_0.id])"] a.must_equal [@o2] a.first.artists.must_equal [@o1] a.first.artists.first.tags.must_equal [@o2] @db.sqls.must_equal [] end it "eager graphing should respect key options" do @c1.class_eval{def tag3_ids; tag_ids.map{|x| x*3} end} @c1.pg_array_to_many :tags, :clone=>:tags, :primary_key=>Sequel.*(:id, 3), :primary_key_method=>:id3, :key=>:tag3_ids, :key_column=>Sequel.pg_array(:tag_ids)[1..2] @c2.many_to_pg_array :artists, :clone=>:artists, :primary_key=>:id3, :key=>:tag3_ids, :key_column=>Sequel.pg_array(:tag_ids)[1..2] @c2.dataset = @c2.dataset.with_fetch(:id=>2, :artists_id=>1, :tag_ids=>Sequel.pg_array([1,2,3]), :tags_0_id=>2) @c1.dataset = @c1.dataset.with_fetch(:id=>1, :tags_id=>2, :tag_ids=>Sequel.pg_array([1,2,3]), :artists_0_id=>1, :artists_0_tag_ids=>Sequel.pg_array([1,2,3])) @db.sqls a = @c1.eager_graph(:tags).all a.must_equal [@o1] @db.sqls.must_equal ["SELECT artists.id, artists.tag_ids, tags.id AS tags_id FROM artists LEFT OUTER JOIN tags ON ((artists.tag_ids)[1:2] @> ARRAY[(tags.id * 3)])"] a.first.tags.must_equal [@o2] @db.sqls.must_equal [] a = @c2.eager_graph(:artists).all a.must_equal [@o2] @db.sqls.must_equal ["SELECT tags.id, artists.id AS artists_id, artists.tag_ids FROM tags LEFT OUTER JOIN artists ON ((artists.tag_ids)[1:2] @> ARRAY[tags.id3])"] a.first.artists.must_equal [@o1] @db.sqls.must_equal [] end it "should respect the association's :graph_select option" do @c1.pg_array_to_many :tags, :clone=>:tags, :graph_select=>:id2 @c2.many_to_pg_array :artists, :clone=>:artists, :graph_select=>:id @c2.dataset = @c2.dataset.with_fetch(:id=>2, :artists_id=>1) @c1.dataset = @c1.dataset.with_fetch(:id=>1, :id2=>2, :tag_ids=>Sequel.pg_array([1,2,3])) @db.sqls a = @c1.eager_graph(:tags).all @db.sqls.must_equal ["SELECT artists.id, artists.tag_ids, tags.id2 FROM artists LEFT OUTER JOIN tags ON (artists.tag_ids @> ARRAY[tags.id])"] a.must_equal [@o1] a.first.tags.must_equal [@c2.load(:id2=>2)] @db.sqls.must_equal [] a = @c2.eager_graph(:artists).all @db.sqls.must_equal ["SELECT tags.id, artists.id AS artists_id FROM tags LEFT OUTER JOIN artists ON (artists.tag_ids @> ARRAY[tags.id])"] a.must_equal [@o2] a.first.artists.must_equal [@c1.load(:id=>1)] @db.sqls.must_equal [] end it "should respect the association's :graph_join_type option" do @c1.pg_array_to_many :tags, :clone=>:tags, :graph_join_type=>:inner @c2.many_to_pg_array :artists, :clone=>:artists, :graph_join_type=>:inner @c1.eager_graph(:tags).sql.must_equal "SELECT artists.id, artists.tag_ids, tags.id AS tags_id FROM artists INNER JOIN tags ON (artists.tag_ids @> ARRAY[tags.id])" @c2.eager_graph(:artists).sql.must_equal "SELECT tags.id, artists.id AS artists_id, artists.tag_ids FROM tags INNER JOIN artists ON (artists.tag_ids @> ARRAY[tags.id])" end it "should respect the association's :conditions option" do @c1.pg_array_to_many :tags, :clone=>:tags, :conditions=>{:a=>1} @c2.many_to_pg_array :artists, :clone=>:artists, :conditions=>{:a=>1} @c1.eager_graph(:tags).sql.must_equal "SELECT artists.id, artists.tag_ids, tags.id AS tags_id FROM artists LEFT OUTER JOIN tags ON ((tags.a = 1) AND (artists.tag_ids @> ARRAY[tags.id]))" @c2.eager_graph(:artists).sql.must_equal "SELECT tags.id, artists.id AS artists_id, artists.tag_ids FROM tags LEFT OUTER JOIN artists ON ((artists.a = 1) AND (artists.tag_ids @> ARRAY[tags.id]))" end it "should respect the association's :graph_conditions option" do @c1.pg_array_to_many :tags, :clone=>:tags, :graph_conditions=>{:a=>1} @c2.many_to_pg_array :artists, :clone=>:artists, :graph_conditions=>{:a=>1} @c1.eager_graph(:tags).sql.must_equal "SELECT artists.id, artists.tag_ids, tags.id AS tags_id FROM artists LEFT OUTER JOIN tags ON ((tags.a = 1) AND (artists.tag_ids @> ARRAY[tags.id]))" @c2.eager_graph(:artists).sql.must_equal "SELECT tags.id, artists.id AS artists_id, artists.tag_ids FROM tags LEFT OUTER JOIN artists ON ((artists.a = 1) AND (artists.tag_ids @> ARRAY[tags.id]))" end it "should respect the association's :graph_block option" do @c1.pg_array_to_many :tags, :clone=>:tags, :graph_block=>proc{|ja,lja,js| {Sequel.qualify(ja, :a)=>1}} @c2.many_to_pg_array :artists, :clone=>:artists, :graph_block=>proc{|ja,lja,js| {Sequel.qualify(ja, :a)=>1}} @c1.eager_graph(:tags).sql.must_equal "SELECT artists.id, artists.tag_ids, tags.id AS tags_id FROM artists LEFT OUTER JOIN tags ON ((tags.a = 1) AND (artists.tag_ids @> ARRAY[tags.id]))" @c2.eager_graph(:artists).sql.must_equal "SELECT tags.id, artists.id AS artists_id, artists.tag_ids FROM tags LEFT OUTER JOIN artists ON ((artists.a = 1) AND (artists.tag_ids @> ARRAY[tags.id]))" end it "should respect the association's :graph_only_conditions option" do @c1.pg_array_to_many :tags, :clone=>:tags, :graph_only_conditions=>{:a=>1} @c2.many_to_pg_array :artists, :clone=>:artists, :graph_only_conditions=>{:a=>1} @c1.eager_graph(:tags).sql.must_equal "SELECT artists.id, artists.tag_ids, tags.id AS tags_id FROM artists LEFT OUTER JOIN tags ON (tags.a = 1)" @c2.eager_graph(:artists).sql.must_equal "SELECT tags.id, artists.id AS artists_id, artists.tag_ids FROM tags LEFT OUTER JOIN artists ON (artists.a = 1)" end it "should respect the association's :graph_only_conditions with :graph_block option" do @c1.pg_array_to_many :tags, :clone=>:tags, :graph_only_conditions=>{:a=>1}, :graph_block=>proc{|ja,lja,js| {Sequel.qualify(lja, :b)=>1}} @c2.many_to_pg_array :artists, :clone=>:artists, :graph_only_conditions=>{:a=>1}, :graph_block=>proc{|ja,lja,js| {Sequel.qualify(lja, :b)=>1}} @c1.eager_graph(:tags).sql.must_equal "SELECT artists.id, artists.tag_ids, tags.id AS tags_id FROM artists LEFT OUTER JOIN tags ON ((tags.a = 1) AND (artists.b = 1))" @c2.eager_graph(:artists).sql.must_equal "SELECT tags.id, artists.id AS artists_id, artists.tag_ids FROM tags LEFT OUTER JOIN artists ON ((artists.a = 1) AND (tags.b = 1))" end it "should define an add_ method for adding associated objects" do @o1.add_tag(@c2.load(:id=>4)) @o1.tag_ids.must_equal [1,2,3,4] @db.sqls.must_equal [] @o1.save_changes @db.sqls.must_equal ["UPDATE artists SET tag_ids = ARRAY[1,2,3,4] WHERE (id = 1)"] @o2.add_artist(@c1.load(:id=>1, :tag_ids=>Sequel.pg_array([4]))) @db.sqls.must_equal ["UPDATE artists SET tag_ids = ARRAY[4,2] WHERE (id = 1)"] end it "should define a remove_ method for removing associated objects" do @o1.remove_tag(@o2) @o1.tag_ids.must_equal [1,3] @db.sqls.must_equal [] @o1.save_changes @db.sqls.must_equal ["UPDATE artists SET tag_ids = ARRAY[1,3] WHERE (id = 1)"] @o2.remove_artist(@c1.load(:id=>1, :tag_ids=>Sequel.pg_array([1,2,3,4]))) @db.sqls.must_equal ["UPDATE artists SET tag_ids = ARRAY[1,3,4] WHERE (id = 1)"] end it "should define a remove_all_ method for removing all associated objects" do @o1.remove_all_tags @o1.tag_ids.must_equal [] @db.sqls.must_equal [] @o1.save_changes @db.sqls.must_equal ["UPDATE artists SET tag_ids = ARRAY[] WHERE (id = 1)"] @o2.remove_all_artists @db.sqls.must_equal ["UPDATE artists SET tag_ids = array_remove(tag_ids, CAST(2 AS integer)) WHERE (tag_ids @> ARRAY[2]::integer[])"] end it "should define a remove_all_ method for removing all associated objects respecting database type" do @c2.many_to_pg_array :artists, :clone=>:artists, :array_type=>:bigint @o1.remove_all_tags @o1.tag_ids.must_equal [] @db.sqls.must_equal [] @o1.save_changes @db.sqls.must_equal ["UPDATE artists SET tag_ids = ARRAY[] WHERE (id = 1)"] @o2.remove_all_artists @db.sqls.must_equal ["UPDATE artists SET tag_ids = array_remove(tag_ids, CAST(2 AS bigint)) WHERE (tag_ids @> ARRAY[2]::bigint[])"] end it "should allow calling add_ and remove_ methods on new objects for pg_array_to_many associations" do a = Artist.new a.add_tag(@c2.load(:id=>4)) a.tag_ids.must_equal [4] a.remove_tag(@c2.load(:id=>4)) a.tag_ids.must_equal [] a.add_tag(@c2.load(:id=>4)) a.tag_ids.must_equal [4] a.remove_all_tags a.tag_ids.must_equal [] end it "should have pg_array_to_many association modification methods save if :save_after_modify option is used" do @c1.pg_array_to_many :tags, :clone=>:tags, :save_after_modify=>true @o1.add_tag(@c2.load(:id=>4)) @o1.tag_ids.must_equal [1,2,3,4] @db.sqls.must_equal ["UPDATE artists SET tag_ids = ARRAY[1,2,3,4] WHERE (id = 1)"] @o1.remove_tag(@o2) @o1.tag_ids.must_equal [1,3,4] @db.sqls.must_equal ["UPDATE artists SET tag_ids = ARRAY[1,3,4] WHERE (id = 1)"] @o1.remove_all_tags @o1.tag_ids.must_equal [] @db.sqls.must_equal ["UPDATE artists SET tag_ids = ARRAY[] WHERE (id = 1)"] end it "should have association modification methods deal with nil values" do v = @c1.load(:id=>1) v.add_tag(@c2.load(:id=>4)) v.tag_ids.must_equal [4] @db.sqls.must_equal [] v.save_changes @db.sqls.must_equal ["UPDATE artists SET tag_ids = ARRAY[4]::integer[] WHERE (id = 1)"] @o2.add_artist(@c1.load(:id=>1)) @db.sqls.must_equal ["UPDATE artists SET tag_ids = ARRAY[2]::integer[] WHERE (id = 1)"] v = @c1.load(:id=>1) v.remove_tag(@c2.load(:id=>4)) v.tag_ids.must_be_nil @db.sqls.must_equal [] v.save_changes @db.sqls.must_equal [] @o2.remove_artist(@c1.load(:id=>1)) @db.sqls.must_equal [] v = @c1.load(:id=>1) v.remove_all_tags v.tag_ids.must_be_nil @db.sqls.must_equal [] v.save_changes @db.sqls.must_equal [] end it "should have association modification methods deal with empty arrays values" do v = @c1.load(:id=>1, :tag_ids=>Sequel.pg_array([])) v.add_tag(@c2.load(:id=>4)) v.tag_ids.must_equal [4] @db.sqls.must_equal [] v.save_changes @db.sqls.must_equal ["UPDATE artists SET tag_ids = ARRAY[4] WHERE (id = 1)"] @o2.add_artist(@c1.load(:id=>1, :tag_ids=>Sequel.pg_array([]))) @db.sqls.must_equal ["UPDATE artists SET tag_ids = ARRAY[2] WHERE (id = 1)"] v = @c1.load(:id=>1, :tag_ids=>Sequel.pg_array([])) v.remove_tag(@c2.load(:id=>4)) v.tag_ids.must_equal [] @db.sqls.must_equal [] v.save_changes @db.sqls.must_equal [] @o2.remove_artist(@c1.load(:id=>1, :tag_ids=>Sequel.pg_array([]))) @db.sqls.must_equal [] v = @c1.load(:id=>1, :tag_ids=>Sequel.pg_array([])) v.remove_all_tags v.tag_ids.must_equal [] @db.sqls.must_equal [] v.save_changes @db.sqls.must_equal [] end it "should respect the :array_type option when manually creating arrays" do @c1.pg_array_to_many :tags, :clone=>:tags, :array_type=>:int8 @c2.many_to_pg_array :artists, :clone=>:artists, :array_type=>:int8 v = @c1.load(:id=>1) v.add_tag(@c2.load(:id=>4)) v.tag_ids.must_equal [4] @db.sqls.must_equal [] v.save_changes @db.sqls.must_equal ["UPDATE artists SET tag_ids = ARRAY[4]::int8[] WHERE (id = 1)"] @o2.add_artist(@c1.load(:id=>1)) @db.sqls.must_equal ["UPDATE artists SET tag_ids = ARRAY[2]::int8[] WHERE (id = 1)"] end it "should respect the :array_type option in the associations dataset" do @c2.many_to_pg_array :artists, :clone=>:artists, :array_type=>:int8 @c2.load(:id=>1).artists_dataset.sql.must_equal 'SELECT * FROM artists WHERE (artists.tag_ids @> ARRAY[1]::int8[])' end it "should respect the :array_type option when eager loading" do @c2.many_to_pg_array :artists, :clone=>:artists, :array_type=>:int8 @c2.eager(:artists).all @db.sqls.must_equal ["SELECT * FROM tags", "SELECT * FROM artists WHERE (artists.tag_ids && ARRAY[2]::int8[])"] end it "should respect the :array_type option when filtering by associations" do @c1.pg_array_to_many :tags, :clone=>:tags, :array_type=>:int8 @c1.where(:tags=>@c2.load(:id=>1)).sql.must_equal 'SELECT * FROM artists WHERE (artists.tag_ids @> ARRAY[1]::int8[])' @c1.where(:tags=>[@c2.load(:id=>1), @c2.load(:id=>2)]).sql.must_equal 'SELECT * FROM artists WHERE (artists.tag_ids && ARRAY[1,2]::int8[])' end it "should automatically determine the array type by looking at the schema" do @c1.db_schema[:tag_ids][:db_type] = 'int8[]' @c2.many_to_pg_array :artists, :clone=>:artists @c1.pg_array_to_many :tags, :clone=>:tags, :save_after_modify=>true @c2.load(:id=>1).artists_dataset.sql.must_equal 'SELECT * FROM artists WHERE (artists.tag_ids @> ARRAY[1]::int8[])' @c1.load(:id=>1).add_tag(@c2.load(:id=>1)) @db.sqls.must_equal ["UPDATE artists SET tag_ids = ARRAY[1]::int8[] WHERE (id = 1)"] end it "should not validate the current/associated object in add_ and remove_ if the :validate=>false option is used" do @c1.pg_array_to_many :tags, :clone=>:tags, :validate=>false, :save_after_modify=>true @c2.many_to_pg_array :artists, :clone=>:artists, :validate=>false a = @c1.load(:id=>1) t = @c2.load(:id=>2) def a.validate() errors.add(:id, 'foo') end a.associations[:tags] = [] a.add_tag(t).must_equal t a.tags.must_equal [t] a.remove_tag(t).must_equal t a.tags.must_equal [] t.associations[:artists] = [] t.add_artist(a).must_equal a t.artists.must_equal [a] t.remove_artist(a).must_equal a t.artists.must_equal [] end it "should not raise exception in add_ and remove_ if the :raise_on_save_failure=>false option is used" do @c1.pg_array_to_many :tags, :clone=>:tags, :raise_on_save_failure=>false, :save_after_modify=>true @c2.many_to_pg_array :artists, :clone=>:artists, :raise_on_save_failure=>false a = @c1.load(:id=>1) t = @c2.load(:id=>2) def a.validate() errors.add(:id, 'foo') end a.associations[:tags] = [] a.add_tag(t).must_be_nil a.tags.must_equal [] a.associations[:tags] = [t] a.remove_tag(t).must_be_nil a.tags.must_equal [t] t.associations[:artists] = [] t.add_artist(a).must_be_nil t.artists.must_equal [] t.associations[:artists] = [a] t.remove_artist(a).must_be_nil t.artists.must_equal [a] end end describe "Sequel::Model.finalize_associations" do before do @db = Sequel.mock(:host=>'postgres', :numrows=>1) @db.extend_datasets do def quote_identifiers?; false end end class ::Foo < Sequel::Model(@db) plugin :pg_array_associations many_to_pg_array :items end class ::Item < Sequel::Model(@db) plugin :pg_array_associations pg_array_to_many :foos end [Foo, Item].each(&:finalize_associations) @db.sqls end after do Object.send(:remove_const, :Item) Object.send(:remove_const, :Foo) end it "should finalize pg_array_to_many associations" do r = Item.association_reflection(:foos) r[:class].must_equal Foo r[:_dataset].sql.must_equal "SELECT * FROM foos" r[:associated_eager_dataset].sql.must_equal "SELECT * FROM foos" r.fetch(:_eager_limit_strategy).must_be_nil r[:filter_by_associations_conditions_dataset].sql.must_equal "SELECT array_agg(foos.id) FROM foos WHERE (foos.id IS NOT NULL)" r[:predicate_key].must_equal Sequel.qualify(:foos, :id) r[:predicate_keys].must_equal [Sequel.qualify(:foos, :id)] r[:reciprocal].must_equal :items r[:array_type].must_equal :integer r[:primary_key].must_equal :id r[:primary_key_method].must_equal :id end it "should finalize many_to_pg_array associations" do r = Foo.association_reflection(:items) r[:class].must_equal Item r[:_dataset].sql.must_equal "SELECT * FROM items" r[:associated_eager_dataset].sql.must_equal "SELECT * FROM items" r.fetch(:_eager_limit_strategy).must_be_nil r[:filter_by_associations_conditions_dataset].sql.must_equal "SELECT _smtopgaa_key_ FROM items CROSS JOIN unnest(items.foo_ids) AS _smtopgaa_(_smtopgaa_key_) WHERE (items.foo_ids IS NOT NULL)" r[:predicate_key].must_equal Sequel.qualify(:items, :foo_ids) r[:predicate_keys].must_equal [Sequel.qualify(:items, :foo_ids)] r[:reciprocal].must_equal :foos r[:array_type].must_equal :integer end end describe Sequel::Model, "pg_array_associations with :read_only" do before do @db = Sequel.mock(:host=>'postgres', :numrows=>1) @db.extend_datasets{def quote_identifiers?; false end} class ::Artist < Sequel::Model(@db) attr_accessor :yyy columns :id, :tag_ids plugin :pg_array_associations pg_array_to_many :tags, :read_only=>true end class ::Tag < Sequel::Model(@db) columns :id plugin :pg_array_associations many_to_pg_array :artists, :read_only=>true def id3 id*3 end end end after do Object.send(:remove_const, :Artist) Object.send(:remove_const, :Tag) end it "should not define an add_ method for adding associated objects" do Artist.new.respond_to?(:add_tag).must_equal false Tag.new.respond_to?(:add_artist).must_equal false end it "should not define a remove_ method for removing associated objects" do Artist.new.respond_to?(:remove_tag).must_equal false Tag.new.respond_to?(:remove_artist).must_equal false end it "should not define a remove_all_ method for removing all associated objects" do Artist.new.respond_to?(:remove_all_tags).must_equal false Tag.new.respond_to?(:remove_all_artists).must_equal false end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/pg_array_ops_spec.rb��������������������������������������������������0000664�0000000�0000000�00000012230�14342141206�0022253�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" Sequel.extension :pg_array, :pg_array_ops, :pg_hstore, :pg_hstore_ops describe "Sequel::Postgres::ArrayOp" do before do @db = Sequel.connect('mock://postgres') @db.extend_datasets{def quote_identifiers?; false end} @a = Sequel.pg_array_op(:a) end it "should support the standard mathematical operators" do @db.literal(@a < @a).must_equal "(a < a)" @db.literal(@a <= @a).must_equal "(a <= a)" @db.literal(@a > @a).must_equal "(a > a)" @db.literal(@a >= @a).must_equal "(a >= a)" end it "#[] should support subscript access" do @db.literal(@a[1]).must_equal "(a)[1]" @db.literal(@a[1][2]).must_equal "(a)[1][2]" end it "#[] with a range should return an ArrayOp" do @db.literal(@a[1..2].any).must_equal "ANY((a)[1:2])" end it "#any should use the ANY method" do @db.literal(1=>@a.any).must_equal "(1 = ANY(a))" end it "#all should use the ALL method" do @db.literal(1=>@a.all).must_equal "(1 = ALL(a))" end it "#contains should use the @> operator" do @db.literal(@a.contains(:b)).must_equal "(a @> b)" end it "#contained_by should use the <@ operator" do @db.literal(@a.contained_by(:b)).must_equal "(a <@ b)" end it "#overlaps should use the && operator" do @db.literal(@a.overlaps(:b)).must_equal "(a && b)" end it "#push/concat should use the || operator in append mode" do @db.literal(@a.push(:b)).must_equal "(a || b)" @db.literal(@a.concat(:b)).must_equal "(a || b)" end it "#remove should remove the element from the array" do @db.literal(@a.remove(1)).must_equal "array_remove(a, 1)" @db.literal(@a.remove(1)[2]).must_equal "(array_remove(a, 1))[2]" end it "#remove should replace the element in the array with another" do @db.literal(@a.replace(1, 2)).must_equal "array_replace(a, 1, 2)" @db.literal(@a.replace(1, 2)[3]).must_equal "(array_replace(a, 1, 2))[3]" end it "#unshift should use the || operator in prepend mode" do @db.literal(@a.unshift(:b)).must_equal "(b || a)" end it "#cardinality should use the cardinality function" do @db.literal(@a.cardinality).must_equal "cardinality(a)" end it "#dims should use the array_dims function" do @db.literal(@a.dims).must_equal "array_dims(a)" end it "#length should use the array_length function" do @db.literal(@a.length).must_equal "array_length(a, 1)" @db.literal(@a.length(2)).must_equal "array_length(a, 2)" end it "#length should use the array_lower function" do @db.literal(@a.lower).must_equal "array_lower(a, 1)" @db.literal(@a.lower(2)).must_equal "array_lower(a, 2)" end it "#to_string/join should use the array_to_string function" do @db.literal(@a.to_string).must_equal "array_to_string(a, '')" @db.literal(@a.join).must_equal "array_to_string(a, '')" @db.literal(@a.join(':')).must_equal "array_to_string(a, ':')" @db.literal(@a.join(':', '*')).must_equal "array_to_string(a, ':', '*')" end it "#hstore should convert the item to an hstore using the hstore function" do @db.literal(@a.hstore).must_equal "hstore(a)" @db.literal(@a.hstore['a']).must_equal "(hstore(a) -> 'a')" @db.literal(@a.hstore(:b)).must_equal "hstore(a, b)" @db.literal(@a.hstore(:b)['a']).must_equal "(hstore(a, b) -> 'a')" @db.literal(@a.hstore(%w'1')).must_equal "hstore(a, ARRAY['1'])" @db.literal(@a.hstore(%w'1')['a']).must_equal "(hstore(a, ARRAY['1']) -> 'a')" end it "#unnest should use the unnest function" do @db.literal(@a.unnest).must_equal "unnest(a)" @db.literal(@a.unnest(:b, :c)).must_equal "unnest(a, b, c)" @db.literal(@a.unnest([1])).must_equal "unnest(a, ARRAY[1])" end it "#pg_array should return self" do @a.pg_array.must_be_same_as(@a) end it "Sequel.pg_array_op should return arg for ArrayOp" do Sequel.pg_array_op(@a).must_be_same_as(@a) end it "should be able to turn expressions into array ops using pg_array" do @db.literal(Sequel.qualify(:b, :a).pg_array.push(3)).must_equal "(b.a || 3)" @db.literal(Sequel.function(:a, :b).pg_array.push(3)).must_equal "(a(b) || 3)" end it "should be able to turn literal strings into array ops using pg_array" do @db.literal(Sequel.lit('a').pg_array.unnest).must_equal "unnest(a)" end it "should be able to turn symbols into array ops using Sequel.pg_array_op" do @db.literal(Sequel.pg_array_op(:a).unnest).must_equal "unnest(a)" end it "should be able to turn symbols into array ops using Sequel.pg_array" do @db.literal(Sequel.pg_array(:a).unnest).must_equal "unnest(a)" end it "should allow transforming PGArray instances into ArrayOp instances" do @db.literal(Sequel.pg_array([1,2]).op.push(3)).must_equal "(ARRAY[1,2] || 3)" end it "should wrap array arguments in PGArrays" do @db.literal(@a.contains([1, 2])).must_equal "(a @> ARRAY[1,2])" @db.literal(@a.contained_by([1, 2])).must_equal "(a <@ ARRAY[1,2])" @db.literal(@a.overlaps([1, 2])).must_equal "(a && ARRAY[1,2])" @db.literal(@a.push([1, 2])).must_equal "(a || ARRAY[1,2])" @db.literal(@a.concat([1, 2])).must_equal "(a || ARRAY[1,2])" @db.literal(@a.unshift([1, 2])).must_equal "(ARRAY[1,2] || a)" end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/pg_array_spec.rb������������������������������������������������������0000664�0000000�0000000�00000045755�14342141206�0021414�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "pg_array extension" do before(:all) do Sequel.extension :pg_array end before do @db = Sequel.connect('mock://postgres') @db.extend_datasets(Module.new do def supports_timestamp_timezones?; false end def supports_timestamp_usecs?; false; end def quote_identifiers?; false end end) @db.extension(:pg_array) @m = Sequel::Postgres @converter = @db.conversion_procs @db.sqls end it "should parse single dimensional text arrays" do c = @converter[1009] c.call("{a}").to_a.first.must_be_kind_of(String) c.call("{}").to_a.must_equal [] c.call('{""}').to_a.must_equal [""] c.call('{"",""}').to_a.must_equal ["",""] c.call('{"","",""}').to_a.must_equal ["","",""] c.call("{a}").to_a.must_equal ['a'] c.call('{"a b"}').to_a.must_equal ['a b'] c.call('{a,b}').to_a.must_equal ['a', 'b'] end it "should preserve encoding when parsing text arrays" do c = @converter[1009] c.call("{a,\u00E4}".encode('ISO-8859-1')).map(&:encoding).must_equal [Encoding::ISO_8859_1, Encoding::ISO_8859_1] end it "should parse multi-dimensional text arrays" do c = @converter[1009] c.call("{{}}").to_a.must_equal [[]] c.call("{{a},{b}}").to_a.must_equal [['a'], ['b']] c.call('{{"a b"},{c}}').to_a.must_equal [['a b'], ['c']] c.call('{{{a},{b}},{{c},{d}}}').to_a.must_equal [[['a'], ['b']], [['c'], ['d']]] c.call('{{{a,e},{b,f}},{{c,g},{d,h}}}').to_a.must_equal [[['a', 'e'], ['b', 'f']], [['c', 'g'], ['d', 'h']]] end it "should parse text arrays with embedded deliminaters" do c = @converter[1009] c.call('{{"{},","\\",\\,\\\\\\"\\""}}').to_a.must_equal [['{},', '",,\\""']] end it "should parse single dimensional integer arrays" do c = @converter[1007] c.call("{1}").to_a.first.must_be_kind_of(Integer) c.call("{}").to_a.must_equal [] c.call("{1}").to_a.must_equal [1] c.call('{2,3}').to_a.must_equal [2, 3] c.call('{3,4,5}').to_a.must_equal [3, 4, 5] end it "should parse multiple dimensional integer arrays" do c = @converter[1007] c.call("{{}}").to_a.must_equal [[]] c.call("{{1}}").to_a.must_equal [[1]] c.call('{{2},{3}}').to_a.must_equal [[2], [3]] c.call('{{{1,2},{3,4}},{{5,6},{7,8}}}').to_a.must_equal [[[1, 2], [3, 4]], [[5, 6], [7, 8]]] end it "should parse single dimensional float arrays" do c = @converter[1022] c.call("{}").to_a.must_equal [] c.call("{1.5}").to_a.must_equal [1.5] c.call('{2.5,3.5}').to_a.must_equal [2.5, 3.5] c.call('{3.5,4.5,5.5}').to_a.must_equal [3.5, 4.5, 5.5] end it "should parse multiple dimensional float arrays" do c = @converter[1022] c.call("{{}}").to_a.must_equal [[]] c.call("{{1.5}}").to_a.must_equal [[1.5]] c.call('{{2.5},{3.5}}').to_a.must_equal [[2.5], [3.5]] c.call('{{{1.5,2.5},{3.5,4.5}},{{5.5,6.5},{7.5,8.5}}}').to_a.must_equal [[[1.5, 2.5], [3.5, 4.5]], [[5.5, 6.5], [7.5, 8.5]]] end it "should parse integers in float arrays as floats" do c = @converter[1022] c.call("{1}").to_a.first.must_be_kind_of(Float) c.call("{1}").to_a.must_equal [1.0] c.call('{{{1,2},{3,4}},{{5,6},{7,8}}}').to_a.must_equal [[[1.0, 2.0], [3.0, 4.0]], [[5.0, 6.0], [7.0, 8.0]]] end it "should parse single dimensional decimal arrays" do c = @converter[1231] c.call("{}").to_a.must_equal [] c.call("{1.5}").to_a.must_equal [BigDecimal('1.5')] c.call('{2.5,3.5}').to_a.must_equal [BigDecimal('2.5'), BigDecimal('3.5')] c.call('{3.5,4.5,5.5}').to_a.must_equal [BigDecimal('3.5'), BigDecimal('4.5'), BigDecimal('5.5')] end it "should parse multiple dimensional decimal arrays" do c = @converter[1231] c.call("{{}}").to_a.must_equal [[]] c.call("{{1.5}}").to_a.must_equal [[BigDecimal('1.5')]] c.call('{{2.5},{3.5}}').to_a.must_equal [[BigDecimal('2.5')], [BigDecimal('3.5')]] c.call('{{{1.5,2.5},{3.5,4.5}},{{5.5,6.5},{7.5,8.5}}}').to_a.must_equal [[[BigDecimal('1.5'), BigDecimal('2.5')], [BigDecimal('3.5'), BigDecimal('4.5')]], [[BigDecimal('5.5'), BigDecimal('6.5')], [BigDecimal('7.5'), BigDecimal('8.5')]]] end it "should parse decimal values with arbitrary precision" do c = @converter[1231] c.call("{1.000000000000000000005}").to_a.must_equal [BigDecimal('1.000000000000000000005')] c.call("{{1.000000000000000000005,2.000000000000000000005},{3.000000000000000000005,4.000000000000000000005}}").to_a.must_equal [[BigDecimal('1.000000000000000000005'), BigDecimal('2.000000000000000000005')], [BigDecimal('3.000000000000000000005'), BigDecimal('4.000000000000000000005')]] end it "should parse integers in decimal arrays as BigDecimals" do c = @converter[1231] c.call("{1}").to_a.first.must_be_kind_of(BigDecimal) c.call("{1}").to_a.must_equal [BigDecimal('1')] c.call('{{{1,2},{3,4}},{{5,6},{7,8}}}').to_a.must_equal [[[BigDecimal('1'), BigDecimal('2')], [BigDecimal('3'), BigDecimal('4')]], [[BigDecimal('5'), BigDecimal('6')], [BigDecimal('7'), BigDecimal('8')]]] end it "should parse arrays with NULL values" do @converter.values_at(1007, 1009, 1022, 1231).each do |c| c.call("{NULL}").must_equal [nil] c.call("{NULL,NULL}").must_equal [nil,nil] c.call("{{NULL,NULL},{NULL,NULL}}").must_equal [[nil,nil],[nil,nil]] end end it 'should parse arrays with "NULL" values' do c = @converter[1009] c.call('{NULL,"NULL",NULL}').to_a.must_equal [nil, "NULL", nil] c.call('{NULLA,"NULL",NULL}').to_a.must_equal ["NULLA", "NULL", nil] end it "should raise errors when for certain recognized invalid arrays" do c = @converter[1009] proc{c.call('')}.must_raise(Sequel::Error) proc{c.call('}')}.must_raise(Sequel::Error) proc{c.call('{{}')}.must_raise(Sequel::Error) proc{c.call('{}}')}.must_raise(Sequel::Error) proc{c.call('{a""}')}.must_raise(Sequel::Error) proc{c.call('{a{}}')}.must_raise(Sequel::Error) proc{c.call('{""a}')}.must_raise(Sequel::Error) end it "should literalize arrays without types correctly" do @db.literal(@m::PGArray.new([])).must_equal 'ARRAY[]' @db.literal(@m::PGArray.new([1])).must_equal 'ARRAY[1]' @db.literal(@m::PGArray.new([nil])).must_equal 'ARRAY[NULL]' @db.literal(@m::PGArray.new([nil, 1])).must_equal 'ARRAY[NULL,1]' @db.literal(@m::PGArray.new([1.0, 2.5])).must_equal 'ARRAY[1.0,2.5]' @db.literal(@m::PGArray.new([BigDecimal('1'), BigDecimal('2.000000000000000000005')])).must_equal 'ARRAY[1.0,2.000000000000000000005]' @db.literal(@m::PGArray.new([nil, "NULL"])).must_equal "ARRAY[NULL,'NULL']" @db.literal(@m::PGArray.new([nil, "{},[]'\""])).must_equal "ARRAY[NULL,'{},[]''\"']" end it "should literalize multidimensional arrays correctly" do @db.literal(@m::PGArray.new([[]])).must_equal 'ARRAY[[]]' @db.literal(@m::PGArray.new([[1, 2]])).must_equal 'ARRAY[[1,2]]' @db.literal(@m::PGArray.new([[3], [5]])).must_equal 'ARRAY[[3],[5]]' @db.literal(@m::PGArray.new([[[1.0]], [[2.5]]])).must_equal 'ARRAY[[[1.0]],[[2.5]]]' @db.literal(@m::PGArray.new([[[["NULL"]]]])).must_equal "ARRAY[[[['NULL']]]]" @db.literal(@m::PGArray.new([["a", "b"], ["{},[]'\"", nil]])).must_equal "ARRAY[['a','b'],['{},[]''\"',NULL]]" end it "should literalize with types correctly" do @db.literal(@m::PGArray.new([], :int4)).must_equal "'{}'::int4[]" @db.literal(@m::PGArray.new([1], :int4)).must_equal 'ARRAY[1]::int4[]' @db.literal(@m::PGArray.new([nil], :text)).must_equal 'ARRAY[NULL]::text[]' @db.literal(@m::PGArray.new([nil, 1], :int8)).must_equal 'ARRAY[NULL,1]::int8[]' @db.literal(@m::PGArray.new([1.0, 2.5], :real)).must_equal 'ARRAY[1.0,2.5]::real[]' @db.literal(@m::PGArray.new([BigDecimal('1'), BigDecimal('2.000000000000000000005')], :decimal)).must_equal 'ARRAY[1.0,2.000000000000000000005]::decimal[]' @db.literal(@m::PGArray.new([nil, "NULL"], :varchar)).must_equal "ARRAY[NULL,'NULL']::varchar[]" @db.literal(@m::PGArray.new([nil, "{},[]'\""], :"varchar(255)")).must_equal "ARRAY[NULL,'{},[]''\"']::varchar(255)[]" end it "should have Sequel.pg_array method for easy PGArray creation" do @db.literal(Sequel.pg_array([1])).must_equal 'ARRAY[1]' @db.literal(Sequel.pg_array([1, 2], :int4)).must_equal 'ARRAY[1,2]::int4[]' @db.literal(Sequel.pg_array([[[1], [2]], [[3], [4]]], :real)).must_equal 'ARRAY[[[1],[2]],[[3],[4]]]::real[]' end it "should have Sequel.pg_array return existing PGArrays as-is" do a = Sequel.pg_array([1]) Sequel.pg_array(a).object_id.must_equal(a.object_id) end it "should have Sequel.pg_array create a new PGArrays if type of existing does not match" do a = Sequel.pg_array([1], :int4) b = Sequel.pg_array(a, :int8) a.must_equal b a.wont_be_same_as(b) a.array_type.must_equal :int4 b.array_type.must_equal :int8 end it "should support using arrays as bound variables" do @db.bound_variable_arg(1, nil).must_equal 1 @db.bound_variable_arg(Sequel.pg_array([1,2]), nil).must_equal '{1,2}' @db.bound_variable_arg([1,2], nil).must_equal '{1,2}' @db.bound_variable_arg([[1,2]], nil).must_equal '{{1,2}}' @db.bound_variable_arg([1.0,2.0], nil).must_equal '{1.0,2.0}' @db.bound_variable_arg([Sequel.lit('a'), Sequel.blob("a\0'\"")], nil).must_equal '{a,"a\\\\000\\\\047\\""}' @db.bound_variable_arg(["\\ \"", 'NULL', nil], nil).must_equal '{"\\\\ \\"","NULL",NULL}' end it "should parse array types from the schema correctly" do @db.fetch = [{:name=>'id', :db_type=>'integer'}, {:name=>'i', :db_type=>'integer[]'}, {:name=>'f', :db_type=>'real[]'}, {:name=>'d', :db_type=>'numeric[]'}, {:name=>'t', :db_type=>'text[]'}] @db.schema(:items).map{|e| e[1][:type]}.must_equal [:integer, :integer_array, :real_array, :decimal_array, :string_array] end it "should set :callable_default schema entries if default value is recognized" do @db.fetch = [{:name=>'id', :db_type=>'integer', :default=>'1'}, {:name=>'t', :db_type=>'text[]', :default=>"'{}'::text[]"}] s = @db.schema(:items) s[0][1][:callable_default].must_be_nil v = s[1][1][:callable_default].call Sequel::Postgres::PGArray.===(v).must_equal true @db.literal(v).must_equal "'{}'::text[]" v << 'a' @db.literal(v).must_equal "ARRAY['a']::text[]" end it "should support typecasting of the various array types" do { :integer=>{:class=>Integer, :convert=>['1', 1, '1']}, :float=>{:db_type=>'double precision', :class=>Float, :convert=>['1.1', 1.1, '1.1']}, :decimal=>{:db_type=>'numeric', :class=>BigDecimal, :convert=>['1.00000000000000000000000001', BigDecimal('1.00000000000000000000000001'), '1.00000000000000000000000001']}, :string=>{:db_type=>'text', :class=>String, :convert=>[1, '1', "'1'"]}, :bigint=>{:class=>Integer, :convert=>['1', 1, '1']}, :boolean=>{:class=>TrueClass, :convert=>['t', true, 'true']}, :blob=>{:db_type=>'bytea', :class=>Sequel::SQL::Blob, :convert=>['1', '1', "'1'"]}, :date=>{:class=>Date, :convert=>['2011-10-12', Date.new(2011, 10, 12), "'2011-10-12'"]}, :time=>{:db_type=>'time without time zone', :class=>Sequel::SQLTime, :convert=>['01:02:03', Sequel::SQLTime.create(1, 2, 3), "'01:02:03'"]}, :datetime=>{:db_type=>'timestamp without time zone', :class=>Time, :convert=>['2011-10-12 01:02:03', Time.local(2011, 10, 12, 1, 2, 3), "'2011-10-12 01:02:03'"]}, :time_timezone=>{:db_type=>'time with time zone', :class=>Sequel::SQLTime, :convert=>['01:02:03', Sequel::SQLTime.create(1, 2, 3), "'01:02:03'"]}, :datetime_timezone=>{:db_type=>'timestamp with time zone', :class=>Time, :convert=>['2011-10-12 01:02:03', Time.local(2011, 10, 12, 1, 2, 3), "'2011-10-12 01:02:03'"]}, }.each do |type, h| meth = :"#{type}_array" db_type = h[:db_type]||type klass = h[:class] array_in, value, output = h[:convert] [[array_in]].each do |input| v = @db.typecast_value(meth, input) v.must_equal [value] v.first.must_be_kind_of(klass) v.array_type.wont_equal nil @db.typecast_value(meth, Sequel.pg_array([value])).must_equal v @db.typecast_value(meth, v).object_id.must_equal(v.object_id) end [[[array_in]]].each do |input| v = @db.typecast_value(meth, input) v.must_equal [[value]] v.first.first.must_be_kind_of(klass) v.array_type.wont_equal nil @db.typecast_value(meth, Sequel.pg_array([[value]])).must_equal v @db.typecast_value(meth, v).object_id.must_equal(v.object_id) end @db.literal(@db.typecast_value(meth, [array_in])).must_equal "ARRAY[#{output}]::#{db_type}[]" @db.literal(@db.typecast_value(meth, [])).must_equal "'{}'::#{db_type}[]" end proc{@db.typecast_value(:integer_array, {})}.must_raise(Sequel::InvalidValue) end it "should support SQL::AliasMethods" do @db.select(Sequel.pg_array([1], :integer).as(:col1)).sql.must_equal 'SELECT ARRAY[1]::integer[] AS col1' end it "should support registering custom array types" do @db.register_array_type('foo') @db.typecast_value(:foo_array, []).class.must_equal(Sequel::Postgres::PGArray) @db.fetch = [{:name=>'id', :db_type=>'foo[]'}] @db.schema(:items).map{|e| e[1][:type]}.must_equal [:foo_array] end it "should support registering custom types with :type_symbol option" do @db.register_array_type('foo', :type_symbol=>:bar) @db.typecast_value(:bar_array, []).class.must_equal(Sequel::Postgres::PGArray) @db.fetch = [{:name=>'id', :db_type=>'foo[]'}] @db.schema(:items).map{|e| e[1][:type]}.must_equal [:bar_array] end it "should support using a block as a custom conversion proc given as block" do @db.register_array_type('foo', :oid=>1234){|s| (s*2).to_i} @db.conversion_procs[1234].call('{1}').must_equal [11] end it "should support using a block as a custom conversion proc given as :converter option" do @db.register_array_type('foo', :oid=>1234, :converter=>proc{|s| (s*2).to_i}) @db.conversion_procs[1234].call('{1}').must_equal [11] end it "should support using an existing scaler conversion proc via the :scalar_oid option" do @db.register_array_type('foo', :oid=>1234, :scalar_oid=>16) @db.conversion_procs[1234].call('{t}').must_equal [true] end it "should not raise an error if using :scalar_oid option with unexisting scalar conversion proc" do @db.register_array_type('foo', :oid=>1234, :scalar_oid=>0) @db.conversion_procs[1234].call('{t}').must_equal ["t"] end it "should raise an error if using :converter option and a block argument" do proc{@db.register_array_type('foo', :converter=>proc{}){}}.must_raise(Sequel::Error) end it "should raise an error if using :scalar_oid option and a block argument" do proc{@db.register_array_type('foo', :scalar_oid=>16){}}.must_raise(Sequel::Error) end it "should support registering custom types with :oid option" do @db.register_array_type('foo', :oid=>1) @db.conversion_procs[1].call('{1}').class.must_equal(Sequel::Postgres::PGArray) end it "should support registering converters with blocks" do @db.register_array_type('foo', :oid=>4){|s| s.to_i * 2} @db.conversion_procs[4].call('{{1,2},{3,4}}').must_equal [[2, 4], [6, 8]] end it "should support registering custom types with :array_type option" do @db.register_array_type('foo', :oid=>3, :array_type=>:blah) @db.literal(@db.conversion_procs[3].call('{}')).must_equal "'{}'::blah[]" end it "should not support registering custom array types on a per-Database basis for frozen databases" do @db.freeze proc{@db.register_array_type('banana', :oid=>7865){|s| s}}.must_raise RuntimeError, TypeError end it "should support registering custom array types on a per-Database basis" do @db.register_array_type('banana', :oid=>7865){|s| s} @db.typecast_value(:banana_array, []).class.must_equal(Sequel::Postgres::PGArray) @db.fetch = [{:name=>'id', :db_type=>'banana[]'}] @db.schema(:items).map{|e| e[1][:type]}.must_equal [:banana_array] @db.conversion_procs.must_include(7865) @db.respond_to?(:typecast_value_banana_array, true).must_equal true db = Sequel.connect('mock://postgres', :quote_identifiers=>false) db.extend_datasets(Module.new{def supports_timestamp_timezones?; false; end; def supports_timestamp_usecs?; false; end}) db.extension(:pg_array) db.fetch = [{:name=>'id', :db_type=>'banana[]'}] db.schema(:items).map{|e| e[1][:type]}.must_equal [nil] db.conversion_procs.wont_include(7865) db.respond_to?(:typecast_value_banana_array, true).must_equal false end it "should automatically look up the array and scalar oids when registering per-Database types" do @db.fetch = [[{:oid=>21, :typarray=>7866}], [{:name=>'id', :db_type=>'banana[]'}]] @db.register_array_type('banana', :scalar_typecast=>:integer) @db.sqls.must_equal ["SELECT typarray, oid FROM pg_type WHERE (typname = 'banana') LIMIT 1"] @db.schema(:items).map{|e| e[1][:type]}.must_equal [:banana_array] @db.conversion_procs[7866].call("{1,2}").must_equal [1,2] @db.typecast_value(:banana_array, %w'1 2').must_equal [1,2] end it "should not automatically look up oids if given both scalar and array oids" do @db.register_array_type('banana', :oid=>7866, :scalar_oid=>21, :scalar_typecast=>:integer) @db.sqls.must_equal [] @db.conversion_procs[7866].call("{1,2}").must_equal [1,2] @db.typecast_value(:banana_array, %w'1 2').must_equal [1,2] end it "should not automatically look up oids if given array oid and block" do @db.register_array_type('banana', :oid=>7866, :scalar_typecast=>:integer){|s| s.to_i} @db.sqls.must_equal [] @db.conversion_procs[7866].call("{1,2}").must_equal [1,2] @db.typecast_value(:banana_array, %w'1 2').must_equal [1,2] end it "should set appropriate timestamp conversion procs" do @db.conversion_procs[1185].call('{"2011-10-20 11:12:13"}').must_equal [Time.local(2011, 10, 20, 11, 12, 13)] @db.conversion_procs[1115].call('{"2011-10-20 11:12:13"}').must_equal [Time.local(2011, 10, 20, 11, 12, 13)] end ['foo', :foo].each do |arg| it "should set appropriate timestamp conversion procs when adding conversion procs with a #{arg.class}" do @db.fetch = [[{:oid=>2222}], [{:oid=>2222, :typarray=>2223}]] @db.add_named_conversion_proc(arg){|v| v*2} procs = @db.conversion_procs procs[1185].call('{"2011-10-20 11:12:13"}').must_equal [Time.local(2011, 10, 20, 11, 12, 13)] procs[1115].call('{"2011-10-20 11:12:13"}').must_equal [Time.local(2011, 10, 20, 11, 12, 13)] procs[2222].call('1').must_equal '11' procs[2223].call('{"2"}').must_equal ['22'] end end it "should return correct results for Database#schema_type_class" do @db.register_array_type('banana', :oid=>7866, :scalar_typecast=>:integer){|s| s.to_i} @db.schema_type_class(:banana_array).must_equal Sequel::Postgres::PGArray @db.schema_type_class(:integer).must_equal Integer end it "should convert ruby arrays to pg arrays as :default option values" do @db.create_table('a'){column :b, 'c[]', :default=>[]; Integer :d} @db.sqls.must_equal ['CREATE TABLE a (b c[] DEFAULT (ARRAY[]::c[]), d integer)'] end end �������������������sequel-5.63.0/spec/extensions/pg_auto_constraint_validations_spec.rb��������������������������������0000664�0000000�0000000�00000023575�14342141206�0026103�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "pg_auto_constraint_validations plugin" do def create_model(ds) @ds = ds @ds.send(:columns=, [:id, :i]) @db.fetch = @metadata_results.dup c = Sequel::Model(@ds) c.plugin :pg_auto_constraint_validations c end before do info = @info = {:schema=>'public', :table=>'items'} @db = Sequel.mock(:host=>'postgres') def @db.schema(*) [[:i, {}], [:id, {}]] end @set_error = lambda{|ec, ei| @db.fetch = @db.autoid = @db.numrows = ec; info.merge!(ei)} @db.define_singleton_method(:error_info){|e| info} @metadata_results = [ [{:constraint=>'items_i_check', :column=>'i', :definition=>'CHECK i'}, {:constraint=>'items_i_id_check', :column=>'i', :definition=>'CHECK i + id < 20'}, {:constraint=>'items_i_id_check', :column=>'id', :definition=>'CHECK i + id < 20'}, {:constraint=>'items_i_foo_check', :column=>nil, :definition=>'CHECK foo() < 20'}], [{:name=>'items_i_uidx', :unique=>true, :column=>'i', :deferrable=>false}, {:name=>'items_i2_idx', :unique=>false, :column=>'i', :deferrable=>false}], [{:name=>'items_i_fk', :column=>'i', :on_update=>'a', :on_delete=>'a', :table=>'items2', :refcolumn=>'id', :schema=>'public'}], [{:name=>'items2_i_fk', :column=>'id', :on_update=>'a', :on_delete=>'a', :table=>'items2', :refcolumn=>'i', :schema=>'public'}], [{:nspname=>'public', :relname=>'items'}] ] @c = create_model(@db[:items]) end it "should handle check constraint failures as validation errors when creating" do o = @c.new(:i=>12) @set_error[Sequel::CheckConstraintViolation, :constraint=>'items_i_check'] proc{o.save}.must_raise Sequel::ValidationFailed o.errors.must_equal(:i=>['is invalid']) end it "should handle check constraint failures as validation errors when updating" do o = @c.load(:i=>3) @set_error[Sequel::CheckConstraintViolation, :constraint=>'items_i_check'] proc{o.update(:i=>12)}.must_raise Sequel::ValidationFailed o.errors.must_equal(:i=>['is invalid']) end it "should handle unique constraint failures as validation errors when creating" do o = @c.new(:i=>2) @set_error[Sequel::UniqueConstraintViolation, :constraint=>'items_i_uidx'] proc{o.save}.must_raise Sequel::ValidationFailed o.errors.must_equal(:i=>['is already taken']) end it "should handle unique constraint failures as validation errors when updating" do o = @c.load(:id=>5, :i=>3) @set_error[Sequel::UniqueConstraintViolation, :constraint=>'items_i_uidx'] proc{o.update(:i=>2)}.must_raise Sequel::ValidationFailed o.errors.must_equal(:i=>['is already taken']) end it "should handle not null constraint failures as validation errors when creating" do o = @c.new(:i=>5) @set_error[Sequel::NotNullConstraintViolation, :column=>'i'] proc{o.save}.must_raise Sequel::ValidationFailed o.errors.must_equal(:i=>['is not present']) end it "should handle not null constraint failures as validation errors when updating" do o = @c.load(:i=>3) @set_error[Sequel::NotNullConstraintViolation, :column=>'i'] proc{o.update(:i=>nil)}.must_raise Sequel::ValidationFailed o.errors.must_equal(:i=>['is not present']) end it "should handle foreign key constraint failures as validation errors when creating" do o = @c.new(:i=>3) @set_error[Sequel::ForeignKeyConstraintViolation, :constraint=>'items_i_fk', :message_primary=>'insert or'] proc{o.save}.must_raise Sequel::ValidationFailed o.errors.must_equal(:i=>['is invalid']) end it "should handle foreign key constraint failures as validation errors when updating" do o = @c.load(:i=>1) @set_error[Sequel::ForeignKeyConstraintViolation, :constraint=>'items_i_fk', :message_primary=>'insert or'] proc{o.update(:i=>3)}.must_raise Sequel::ValidationFailed o.errors.must_equal(:i=>['is invalid']) end it "should handle foreign key constraint failures in other tables as validation errors when updating" do o = @c.load(:i=>1) @set_error[Sequel::ForeignKeyConstraintViolation, :constraint=>'items2_i_fk', :message_primary=>'update or', :schema=>'public', :table=>'items2'] proc{o.update(:i=>3)}.must_raise Sequel::ValidationFailed o.errors.must_equal(:i=>['cannot be changed currently']) end it "should handle symbol, string, and identifier table names" do [@db[:items], @db.from('items'), @db.from{items}, @db.from{public[:items]}].each do |ds| c = create_model(ds) @set_error[Sequel::CheckConstraintViolation, :constraint=>'items_i_check'] o = c.new(:i=>3) proc{o.save}.must_raise Sequel::ValidationFailed o.errors.must_equal(:i=>['is invalid']) end end it "should skip handling of other table types such as subqueries and functions" do [@db.from{foo(:bar)}, @db[:a, :b]].each do |ds| @db.fetch = @metadata_results.dup @c.dataset = ds o = @c.new(:i=>3) @set_error[Sequel::CheckConstraintViolation, :constraint=>'items_i_check'] proc{o.save}.must_raise Sequel::CheckConstraintViolation end end it "should skip handling if the error_info method is not supported" do @db.singleton_class.send(:remove_method, :error_info) c = create_model(@db[:items]) @set_error[Sequel::CheckConstraintViolation, :constraint=>'items_i_check'] o = c.new(:i=>3) proc{o.save}.must_raise Sequel::CheckConstraintViolation end it "should not handle constraint failures if they can't be converted" do o = @c.new(:i=>12) @set_error[Sequel::NotNullConstraintViolation, {}] proc{o.save}.must_raise Sequel::NotNullConstraintViolation @set_error[Sequel::CheckConstraintViolation, {}] proc{o.save}.must_raise Sequel::CheckConstraintViolation @set_error[Sequel::UniqueConstraintViolation, {}] proc{o.save}.must_raise Sequel::UniqueConstraintViolation @set_error[Sequel::ForeignKeyConstraintViolation, {}] proc{o.save}.must_raise Sequel::ForeignKeyConstraintViolation @set_error[Sequel::ForeignKeyConstraintViolation, :constraint=>'items_i_fk', :message_primary=>'foo'] proc{o.save}.must_raise Sequel::ForeignKeyConstraintViolation @set_error[Sequel::ForeignKeyConstraintViolation, :constraint=>'items_i_fk', :message_primary=>'update or'] proc{o.save}.must_raise Sequel::ForeignKeyConstraintViolation @set_error[Sequel::ForeignKeyConstraintViolation, :constraint=>'items_x_fk', :message_primary=>'insert or'] proc{o.save}.must_raise Sequel::ForeignKeyConstraintViolation end it "should reraise original exception if there is an error" do o = @c.new(:i=>12) def o.add_pg_constraint_validation_error; end @set_error[Sequel::NotNullConstraintViolation, :column=>'i'] proc{o.save}.must_raise Sequel::NotNullConstraintViolation end it "should not handle constraint failures if schema or table do not match" do o = @c.new(:i=>12) @set_error[Sequel::CheckConstraintViolation, :constraint=>'items_i_check', :schema=>'x'] proc{o.save}.must_raise Sequel::CheckConstraintViolation @set_error[Sequel::CheckConstraintViolation, :constraint=>'items_i_check', :schema=>'public', :table=>'x'] proc{o.save}.must_raise Sequel::CheckConstraintViolation end it "should handle constraint failures when disabling insert returning" do c = create_model(@db[:items].disable_insert_returning) o = c.new(:i=>12) o.id = 1 @set_error[Sequel::CheckConstraintViolation, :constraint=>'items_i_check'] proc{o.save}.must_raise Sequel::ValidationFailed o.errors.must_equal(:i=>['is invalid']) end it "should handle multi-column constraint failures as validation errors" do o = @c.new(:i=>12) @set_error[Sequel::CheckConstraintViolation, :constraint=>'items_i_id_check'] proc{o.save}.must_raise Sequel::ValidationFailed o.errors.must_equal([:i, :id]=>['is invalid']) end it "should handle multi-column constraint failures as validation errors when using the error_splitter plugin" do @c.plugin :error_splitter o = @c.new(:i=>12) @set_error[Sequel::CheckConstraintViolation, :constraint=>'items_i_id_check'] proc{o.save}.must_raise Sequel::ValidationFailed o.errors.must_equal(:i=>['is invalid'], :id=>['is invalid']) end it "should handle overridden constraint failures as validation errors when updating" do o = @c.load(:i=>3) @c.pg_auto_constraint_validation_override(:items_i_ocheck, :i, "foo bar") @set_error[Sequel::CheckConstraintViolation, :constraint=>'items_i_ocheck'] proc{o.update(:i=>12)}.must_raise Sequel::ValidationFailed o.errors.must_equal(:i=>['foo bar']) end it "should handle dumping cached metadata and loading metadata from cache" do cache_file = "spec/files/pgacv-spec-#{$$}.cache" begin @ds = @db[:items] @ds.send(:columns=, [:id, :i]) @db.fetch = @metadata_results.dup c = Sequel::Model(@ds) def c.name; 'Foo' end @db.sqls c.plugin :pg_auto_constraint_validations, :cache_file=>cache_file @db.sqls.length.must_equal 5 o = c.new(:i=>12) @set_error[Sequel::CheckConstraintViolation, :constraint=>'items_i_id_check'] proc{o.save}.must_raise Sequel::ValidationFailed c.dump_pg_auto_constraint_validations_cache @db.fetch = [] c = Sequel::Model(@ds) def c.name; 'Foo' end @db.sqls c.plugin :pg_auto_constraint_validations, :cache_file=>cache_file @db.sqls.must_be_empty o = c.new(:i=>12) @set_error[Sequel::CheckConstraintViolation, :constraint=>'items_i_id_check'] proc{o.save}.must_raise Sequel::ValidationFailed ensure File.delete(cache_file) if File.file?(cache_file) end end it "should raise error if attempting to dump cached metadata when not using caching" do proc{@c.dump_pg_auto_constraint_validations_cache}.must_raise Sequel::Error end it "should allow loading into models without a dataset" do c = Class.new(Sequel::Model) c.plugin :pg_auto_constraint_validations c.pg_auto_constraint_validations.must_be_nil end end �����������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/pg_auto_parameterize_spec.rb������������������������������������������0000664�0000000�0000000�00000046467�14342141206�0024017�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require File.join(File.dirname(File.expand_path(__FILE__)), "spec_helper") describe "pg_auto_parameterize extension" do before do @db = Sequel.connect('mock://postgres') @db.synchronize{|c| def c.escape_bytea(v) v*2 end} @db.extend_datasets{def use_cursor(*) self end} @db.extend(Module.new do def copy_table(*a) run(copy_table_sql(*a)) end private def copy_table_sql(ds, *) "COPY TABLE #{ds.is_a?(Sequel::Dataset) ? ds.sql : ds}" end end) @db.extension :pg_auto_parameterize end it "should parameterize select, insert, update, delete, and merge statements" do @db.fetch = {:a=>1} @db.numrows = 1 @db.autoid = 1 @db[:table].all.must_equal [{:a=>1}] @db.sqls.must_equal ['SELECT * FROM "table"'] @db[:table].filter(:a=>1).all.must_equal [{:a=>1}] @db.sqls.must_equal ['SELECT * FROM "table" WHERE ("a" = $1::int4) -- args: [1]'] @db[:table].filter(:a=>1).update(:b=>'a').must_equal 1 @db.sqls.must_equal ['UPDATE "table" SET "b" = $1 WHERE ("a" = $2::int4) -- args: ["a", 1]'] @db[:table].filter(:a=>1).delete.must_equal 1 @db.sqls.must_equal ['DELETE FROM "table" WHERE ("a" = $1::int4) -- args: [1]'] @db[:table].insert(:a=>1).must_equal 1 @db.sqls.must_equal ['INSERT INTO "table" ("a") VALUES ($1::int4) RETURNING "id" -- args: [1]'] @db[:table]. merge_using(:m2, :i1=>:i2). merge_do_nothing_when_not_matched{b > 50}. merge_insert(:i1=>Sequel[:i2], :a=>Sequel[:b]+11). merge_do_nothing_when_matched{a > 50}. merge_delete{a > 30}. merge_update(:i1=>Sequel[:i1]+:i2+10, :a=>Sequel[:a]+:b+20). merge sqls = @db.sqls sqls.must_equal ['MERGE INTO "table" USING "m2" ON ("i1" = "i2") WHEN NOT MATCHED AND ("b" > $1::int4) THEN DO NOTHING WHEN NOT MATCHED THEN INSERT ("i1", "a") VALUES ("i2", ("b" + $2::int4)) WHEN MATCHED AND ("a" > $1::int4) THEN DO NOTHING WHEN MATCHED AND ("a" > $3::int4) THEN DELETE WHEN MATCHED THEN UPDATE SET "i1" = ("i1" + "i2" + $4::int4), "a" = ("a" + "b" + $5::int4)'] sqls[0].args.must_equal [50, 11, 30, 10, 20] end it "should parameterize insert of multiple rows" do args = (1...40).to_a @db[:table].import([:a], args) sqls = @db.sqls sqls.size.must_equal 1 sqls[0].must_equal 'INSERT INTO "table" ("a") VALUES ' + args.map{|i| "($#{i}::int4)"}.join(', ') + " -- args: #{args.inspect}" end it "should default to splitting inserts of multiple rows to 40 at a time" do args = (1...81).to_a @db[:table].import([:a], args) sqls = @db.sqls sqls.size.must_equal 2 sqls[0].must_equal 'INSERT INTO "table" ("a") VALUES ' + args[0...40].map{|i| "($#{i}::int4)"}.join(', ') + " -- args: #{args[0...40].inspect}" sqls[1].must_equal 'INSERT INTO "table" ("a") VALUES ' + args[0...40].map{|i| "($#{i}::int4)"}.join(', ') + " -- args: #{args[40...80].inspect}" end it "should automatically parameterize queries strings, blobs, numerics, dates, and times" do ds = @db[:table] pr = proc do |sql, *args| arg = args[0] parg = args[1] || arg s = ds.filter(:a=>arg).sql s.must_equal sql if parg == :nil s.args.must_be_nil else s.args.must_equal [parg] end end pr.call('SELECT * FROM "table" WHERE ("a" = $1::int4)', 1) pr.call('SELECT * FROM "table" WHERE ("a" = $1::int8)', 18446744073709551616) pr.call('SELECT * FROM "table" WHERE ("a" = $1::numeric)', 1.1) pr.call('SELECT * FROM "table" WHERE ("a" = $1::double precision)', (1.0/0.0)) pr.call('SELECT * FROM "table" WHERE ("a" = $1::numeric)', BigDecimal('1.01')) pr.call('SELECT * FROM "table" WHERE ("a" = $1)', "a") pr.call('SELECT * FROM "table" WHERE ("a" = $1::bytea)', Sequel.blob("a\0b")) pr.call('SELECT * FROM "table" WHERE ("a" = $1::time)', Sequel::SQLTime.create(1, 2, 3, 500000)) pr.call('SELECT * FROM "table" WHERE ("a" = $1::date)', Date.today) pr.call('SELECT * FROM "table" WHERE ("a" = $1::timestamp)', DateTime.new(2012, 1, 2, 3, 4, 5)) pr.call('SELECT * FROM "table" WHERE ("a" = $1::timestamp)', Time.utc(2012, 1, 2, 3, 4, 5)) pr.call('SELECT * FROM "table" WHERE ("a" = 1)', Sequel.lit('1'), :nil) pr.call('SELECT * FROM "table" WHERE ("a" = "b")', :b, :nil) end it "should automatically parameterize and not typecast Sequel::SQL::Cast values" do ds = @db[:table] pr = proc do |*args| arg = args[0] parg = args[1] || arg s = ds.filter(:a=>Sequel.cast(arg, :foo)).sql s.must_equal 'SELECT * FROM "table" WHERE ("a" = CAST($1 AS foo))' if parg == :nil s.args.must_be_nil else s.args.must_equal [parg] end end pr.call(1) pr.call(18446744073709551616) pr.call(1.1) pr.call(BigDecimal('1.01')) pr.call("a") pr.call(Sequel.blob("a\0b")) pr.call(Sequel::SQLTime.create(1, 2, 3, 500000)) pr.call(Date.today) pr.call(DateTime.new(2012, 1, 2, 3, 4, 5)) pr.call(Time.utc(2012, 1, 2, 3, 4, 5)) sql = ds.where(:a=>Sequel.cast(Sequel.lit('1'), :foo)).sql sql.must_equal 'SELECT * FROM "table" WHERE ("a" = CAST(1 AS foo))' sql.args.must_be_nil end it "should parameterize model pk lookup and delete queries" do m = Sequel::Model(@db[:table].with_fetch(:id=>1).with_numrows(1)) @db.sqls m[1].must_equal m.load(:id=>1) @db.sqls.must_equal ['SELECT * FROM "table" WHERE ("id" = $1::int4) LIMIT 1 -- args: [1]'] o = m.load(:id=>1) o.delete.must_be_same_as o @db.sqls.must_equal ['DELETE FROM "table" WHERE ("id" = $1::int4) -- args: [1]'] end it "should use same parameters when using select_group" do sql = @db[:a].select_group{foo(1).as(:f)}.sql sql.must_equal 'SELECT foo($1::int4) AS "f" FROM "a" GROUP BY foo($1::int4)' sql.args.must_equal [1] end it "should use same parameters when same objects" do expr = Sequel.function(:foo, 1) sql = @db[:a].select(expr.as(:f)).group(expr).sql sql.must_equal 'SELECT foo($1::int4) AS "f" FROM "a" GROUP BY foo($1::int4)' sql.args.must_equal [1] expr = Sequel.function(:foo, 'a') sql = @db[:a].select(expr.as(:f)).group(expr).sql sql.must_equal 'SELECT foo($1) AS "f" FROM "a" GROUP BY foo($1)' sql.args.must_equal ['a'] end it "should use different parameters for different but equal objects" do sql = @db[:a].select{foo("a").as("f")}.group{foo("a")}.sql sql.must_equal 'SELECT foo($1) AS "f" FROM "a" GROUP BY foo($2)' sql.args.must_equal ['a', 'a'] end it "should parameterize ORDER BY if possible" do sql = @db[:a].order{foo(1)}.sql sql.must_equal 'SELECT * FROM "a" ORDER BY foo($1::int4)' sql.args.must_equal [1] end it "should not parameterize ORDER BY if it contains integers or ordered integers" do sql = @db[:a].order(1).sql sql.must_equal 'SELECT * FROM "a" ORDER BY 1' sql.args.must_be_nil sql = @db[:a].reverse(1).sql sql.must_equal 'SELECT * FROM "a" ORDER BY 1 DESC' sql.args.must_be_nil end it "should not parameterize LIMIT or OFFSET" do sql = @db[:a].limit(1).sql sql.must_equal 'SELECT * FROM "a" LIMIT 1' sql.args.must_be_nil sql = @db[:a].offset(1).sql sql.must_equal 'SELECT * FROM "a" OFFSET 1' sql.args.must_be_nil sql = @db[:a].limit(1, 1).sql sql.must_equal 'SELECT * FROM "a" LIMIT 1 OFFSET 1' sql.args.must_be_nil end it "should not parameterize in CTE CYCLE clauses" do ds = @db[:x] sql = @db[:t].with_recursive(:t, ds.filter(:id=>1), ds.join(:t, :id=>:parent_id).select_all(:i1), :cycle=>{:columns=>[:id, :parent_id], :path_column=>:pc, :cycle_column=>:cc, :cycle_value=>1, :noncycle_value=>0}).sql sql.must_equal 'WITH RECURSIVE "t" AS (SELECT * FROM "x" WHERE ("id" = $1::int4) UNION ALL (SELECT "i1".* FROM "x" INNER JOIN "t" ON ("t"."id" = "x"."parent_id"))) CYCLE "id", "parent_id" SET "cc" TO 1 DEFAULT 0 USING "pc" SELECT * FROM "t"' sql.args.must_equal [1] sql = @db[:t].with_recursive(:t, ds.filter(:parent_id=>nil), ds.join(:t, :id=>:parent_id).select_all(:i1), :search=>{:by=>:id}).sql sql.must_equal 'WITH RECURSIVE "t" AS (SELECT * FROM "x" WHERE ("parent_id" IS NULL) UNION ALL (SELECT "i1".* FROM "x" INNER JOIN "t" ON ("t"."id" = "x"."parent_id"))) SEARCH DEPTH FIRST BY "id" SET "ordercol" SELECT * FROM "t"' sql.args.must_be_nil end it "should parameterize datasets with static SQL using placeholders" do sql = @db.fetch("SELECT a FROM b WHERE c = ?", 2).sql sql.must_equal 'SELECT a FROM b WHERE c = $1::int4' sql.args.must_equal [2] end it "should parameterize datasets with static SQL using placeholders in subqueries" do sql = @db[:t].from(@db.fetch("SELECT a FROM b WHERE c = ?", 2)).sql sql.must_equal 'SELECT * FROM (SELECT a FROM b WHERE c = $1::int4) AS "t1"' sql.args.must_equal [2] end it "should automatically parameterize when using with_sql" do sql = @db[:table].filter(:a=>1, :b=>2).with_sql(:update_sql, :b=>3).sql sql.must_equal 'UPDATE "table" SET "b" = $1::int4 WHERE (("a" = $2::int4) AND ("b" = $3::int4))' sql.args.must_equal [3, 1, 2] end it "should automatically parameterize when using with_sql in subquery" do sql = @db.from(@db[:table].filter(:a=>1, :b=>2).with_sql(:delete_sql)).sql sql.must_equal 'SELECT * FROM (DELETE FROM "table" WHERE (("a" = $1::int4) AND ("b" = $2::int4))) AS "t1"' sql.args.must_equal [1, 2] end it "should parameterize datasets with static SQL using placeholders in a subquery" do sql = @db.from(@db.fetch("SELECT a FROM b WHERE c = ?", 2)).sql sql.must_equal 'SELECT * FROM (SELECT a FROM b WHERE c = $1::int4) AS "t1"' sql.args.must_equal [2] end it "should automatically switch column IN (int, ...) to column = ANY($) with parameter" do sql = @db[:table].where(:a=>[1,2,3]).sql sql.must_equal 'SELECT * FROM "table" WHERE ("a" = ANY(CAST($1 AS int8[])))' sql.args.must_equal ['{1,2,3}'] sql = @db[:table].where(:a=>[1,nil,3]).sql sql.must_equal 'SELECT * FROM "table" WHERE ("a" = ANY(CAST($1 AS int8[])))' sql.args.must_equal ['{1,NULL,3}'] end it "should automatically switch column NOT IN (int, ...) to column != ALL($) with parameter" do sql = @db[:table].exclude(:a=>[1,2,3]).sql sql.must_equal 'SELECT * FROM "table" WHERE ("a" != ALL(CAST($1 AS int8[])))' sql.args.must_equal ['{1,2,3}'] sql = @db[:table].exclude(:a=>[1,nil,3]).sql sql.must_equal 'SELECT * FROM "table" WHERE ("a" != ALL(CAST($1 AS int8[])))' sql.args.must_equal ['{1,NULL,3}'] end it "should not convert IN/NOT IN expressions that don't use integers" do sql = @db[:table].where([:a, :b]=>%w[1 2]).sql sql.must_equal 'SELECT * FROM "table" WHERE (("a", "b") IN ($1, $2))' sql.args.must_equal %w[1 2] sql = @db[:table].exclude([:a, :b]=>%w[1 2]).sql sql.must_equal 'SELECT * FROM "table" WHERE (("a", "b") NOT IN ($1, $2))' sql.args.must_equal %w[1 2] end it "should not convert multiple column IN expressions" do sql = @db[:table].where([:a, :b]=>[[1,2]]).sql sql.must_equal 'SELECT * FROM "table" WHERE (("a", "b") IN (($1::int4, $2::int4)))' sql.args.must_equal [1, 2] sql = @db[:table].exclude([:a, :b]=>[[1,2]]).sql sql.must_equal 'SELECT * FROM "table" WHERE (("a", "b") NOT IN (($1::int4, $2::int4)))' sql.args.must_equal [1, 2] end it "should not convert single value expressions" do sql = @db[:table].where(:a=>[1]).sql sql.must_equal 'SELECT * FROM "table" WHERE ("a" IN ($1::int4))' sql.args.must_equal [1] sql = @db[:table].where(:a=>[1]).sql sql.must_equal 'SELECT * FROM "table" WHERE ("a" IN ($1::int4))' sql.args.must_equal [1] end it "should automatically parameterize pg_array with types correctly" do @db.extension :pg_array v = Sequel.pg_array([1], :int4) sql = @db[:table].insert_sql(v) sql.must_equal 'INSERT INTO "table" VALUES ($1::int4[])' sql.args.must_equal [v] v = Sequel.pg_array([1, nil], :int4) sql = @db[:table].insert_sql(v) sql.must_equal 'INSERT INTO "table" VALUES ($1::int4[])' sql.args.must_equal [v] end it "should not automatically parameterize pg_array with internal expressions" do @db.extension :pg_array v = Sequel.pg_array([Sequel.function(:foo)], :int4) sql = @db[:table].insert_sql(v) sql.must_equal 'INSERT INTO "table" VALUES (ARRAY[foo()]::int4[])' sql.args.must_be_nil end it "should not automatically parameterize pg_array without type" do @db.extension :pg_array v = Sequel.pg_array([1]) sql = @db[:table].insert_sql(v) sql.must_equal 'INSERT INTO "table" VALUES (ARRAY[$1::int4])' sql.args.must_equal [1] end it "should automatically parameterize pg_hstore values" do @db.fetch = {:oid=>9999, :typname=>'hstore'} @db.extension :pg_hstore v = Sequel.hstore('a'=>'b') sql = @db[:table].insert_sql(v) sql.must_equal 'INSERT INTO "table" VALUES ($1::hstore)' sql.args.must_equal [v] end it "should automatically parameterize pg_inet values" do @db.extension :pg_inet v = IPAddr.new('127.0.0.1') sql = @db[:table].insert_sql(v) sql.must_equal 'INSERT INTO "table" VALUES ($1::inet)' sql.args.must_equal [v] end it "should automatically parameterize pg_inet values when loading pg_interval extension after" do @db.extension :pg_inet begin @db.extension :pg_interval rescue LoadError skip("cannot load pg_interval extension") else v = IPAddr.new('127.0.0.1') sql = @db[:table].insert_sql(v) sql.must_equal 'INSERT INTO "table" VALUES ($1::inet)' sql.args.must_equal [v] end end it "should automatically parameterize pg_json values" do @db.extension :pg_json v = Sequel.pg_json({}) sql = @db[:table].insert_sql(v) sql.must_equal 'INSERT INTO "table" VALUES ($1::json)' sql.args.must_equal [v] v = Sequel.pg_jsonb({}) sql = @db[:table].insert_sql(v) sql.must_equal 'INSERT INTO "table" VALUES ($1::jsonb)' sql.args.must_equal [v] end it "should automatically parameterize pg_multirange values" do @db.extension :pg_multirange v = Sequel.pg_multirange([1..2, 5..6], :int4multirange) sql = @db[:table].insert_sql(v) sql.must_equal 'INSERT INTO "table" VALUES ($1::int4multirange)' sql.args.length.must_equal 1 sql.args.must_equal [v] end it "should automatically parameterize pg_range values" do @db.extension :pg_range v = Sequel.pg_range(1..2, :int4range) sql = @db[:table].insert_sql(v) sql.must_equal 'INSERT INTO "table" VALUES ($1::int4range)' sql.args.must_equal [v] v = Sequel.pg_range(1..2) sql = @db[:table].insert_sql(v) sql.must_equal 'INSERT INTO "table" VALUES ($1)' sql.args.must_equal ['[1,2]'] end it "should automatically parameterize pg_row values if parts are automatically parameterizable" do @db.extension :pg_row aclass = Sequel::Postgres::PGRow::ArrayRow.subclass(:arow) hclass = Sequel::Postgres::PGRow::HashRow.subclass(:hrow, [:a, :b]) v = aclass.new([1, nil, 3]) sql = @db[:table].insert_sql(v) sql.must_equal 'INSERT INTO "table" VALUES ($1::"arow")' sql.args.must_equal [v] v = hclass.new(:a=>1, :b=>nil) sql = @db[:table].insert_sql(v) sql.must_equal 'INSERT INTO "table" VALUES ($1::"hrow")' sql.args.must_equal [v] sql = @db[:table].insert_sql(aclass.new([1, Sequel.function(:foo), 3])) sql.must_equal 'INSERT INTO "table" VALUES (ROW($1::int4, foo(), $2::int4)::"arow")' sql.args.must_equal [1, 3] sql = @db[:table].insert_sql(hclass.new(:a=>1, :b=>Sequel.function(:foo))) sql.must_equal 'INSERT INTO "table" VALUES (ROW($1::int4, foo())::"hrow")' sql.args.must_equal [1] end it "should show args with string when inspecting SQL if there are args" do @db[:table].sql.inspect.must_equal '"SELECT * FROM \\"table\\""' @db[:table].filter(:a=>1).sql.inspect.must_equal '"SELECT * FROM \\"table\\" WHERE (\\"a\\" = $1::int4); [1]"' end it "should keep args when adding to the SQL string" do (@db[:table].sql + ' -- foo').inspect.must_equal '"SELECT * FROM \\"table\\" -- foo"' (@db[:table].filter(:a=>1).sql + ' -- foo').inspect.must_equal '"SELECT * FROM \\"table\\" WHERE (\\"a\\" = $1::int4) -- foo; [1]"' end it "should freeze args when freezing" do sql = @db[:table].sql sql.freeze.must_be_same_as sql sql.args.must_be_nil sql = @db[:table].filter(:a=>1).sql sql.freeze.must_be_same_as sql sql.args.frozen?.must_equal true end it "should not support placeholder literalizers unless auto parameterization is disabled" do @db[:table].supports_placeholder_literalizer?.must_be_nil @db[:table].no_auto_parameterize.supports_placeholder_literalizer?.must_equal true end it "should not automatically parameterize if no_auto_parameterize is used" do ds = @db[:table].no_auto_parameterize ds.filter(:a=>1).sql.must_equal 'SELECT * FROM "table" WHERE ("a" = 1)' ds.filter(:a=>1).delete_sql.must_equal 'DELETE FROM "table" WHERE ("a" = 1)' ds.filter(:a=>1).update_sql(:a=>2).must_equal 'UPDATE "table" SET "a" = 2 WHERE ("a" = 1)' ds.insert_sql(:a=>1).must_equal 'INSERT INTO "table" ("a") VALUES (1)' @db.sqls ds.import([:a], [1]) sqls = @db.sqls sqls.size.must_equal 1 sqls[0].must_equal 'INSERT INTO "table" ("a") VALUES (1)' end it "should have no_auto_parameterize return self if automatic parameterization is already disabled" do ds = @db[:table].no_auto_parameterize ds.no_auto_parameterize.must_be_same_as ds end it "should not auto parameterize objects wrapped with Sequel.skip_auto_param" do @db[:table].filter(:a=>Sequel.skip_pg_auto_param(1)).sql.must_equal 'SELECT * FROM "table" WHERE ("a" = 1)' @db[:table].no_auto_parameterize.filter(:a=>Sequel.skip_pg_auto_param(1)).sql.must_equal 'SELECT * FROM "table" WHERE ("a" = 1)' end it "should not automatically parameterize prepared statements" do @db[:table].filter(:a=>1, :b=>:$b).prepare(:select, :foo).sql.must_equal 'SELECT * FROM "table" WHERE (("a" = 1) AND ("b" = $b))' end it "should not parameterize datasets with static SQL not using placeholders" do @db.fetch("SELECT a FROM b WHERE c = 2").sql.must_equal 'SELECT a FROM b WHERE c = 2' end it "should not parameterize datasets with static SQL using placeholders in a subselect if no_auto_parameterize is used" do @db.from(@db.fetch("SELECT a FROM b WHERE c = ?", 2)).no_auto_parameterize.sql.must_equal 'SELECT * FROM (SELECT a FROM b WHERE c = 2) AS "t1"' end it "should not auto parameterize when using cursors" do @db[:table].filter(:a=>1).use_cursor.opts[:no_auto_parameterize].must_equal true end it "should not attempt to parameterize create_view" do @db.create_view :foo, @db[:table].filter(:a=>1) @db.sqls.must_equal ['CREATE VIEW "foo" AS SELECT * FROM "table" WHERE ("a" = 1)'] end it "should not attempt to parameterize create_table(:as=>ds)" do @db.create_table(:foo, :as=>@db[:table].filter(:a=>1)) @db.sqls.must_equal ['CREATE TABLE "foo" AS SELECT * FROM "table" WHERE ("a" = 1)'] end it "should not attempt to parameterize copy table" do @db.copy_table(@db[:table].where(:a=>1)) @db.sqls.must_equal ['COPY TABLE SELECT * FROM "table" WHERE ("a" = 1)'] @db.copy_table(:table) @db.sqls.must_equal ['COPY TABLE table'] end it "should raise when trying to load the extension into an unsupported database" do proc{Sequel.mock.extension :pg_auto_parameterize}.must_raise Sequel::Error end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/pg_enum_spec.rb�������������������������������������������������������0000664�0000000�0000000�00000015664�14342141206�0021236�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" Sequel.extension :migration describe "pg_enum extension" do mod = Module.new do private def schema_parse_table(*) [[:a, {:oid=>1}], [:b, {:oid=>1234}]] end def _metadata_dataset super.with_fetch([[{:v=>1, :enumlabel=>'a'}, {:v=>1, :enumlabel=>'b'}, {:v=>1, :enumlabel=>'c'}], [{:typname=>'enum1', :v=>212389}]]) end end before do @db = Sequel.connect('mock://postgres') @db.extend_datasets{def quote_identifiers?; false end} @db.extend(mod) @db.extension(:pg_array, :pg_enum) @db.sqls end it "should parse enum labels respecting the sort order" do @db.send(:parse_enum_labels) @db.sqls.must_equal ["SELECT CAST(enumtypid AS integer) AS v, enumlabel FROM pg_enum ORDER BY enumtypid, enumsortorder", "SELECT typname, CAST(typarray AS integer) AS v FROM pg_type WHERE ((1 = 0) AND (typarray != 0))"] end it "should parse enum labels without the sort order on PostgreSQL < 9.1" do def @db.server_version(_=nil); 90000; end @db.send(:parse_enum_labels) @db.sqls.must_equal ["SELECT CAST(enumtypid AS integer) AS v, enumlabel FROM pg_enum ORDER BY enumtypid", "SELECT typname, CAST(typarray AS integer) AS v FROM pg_type WHERE ((1 = 0) AND (typarray != 0))"] end it "should add enum values to parsed schema columns" do @db.schema(:foo).must_equal [[:a, {:oid=>1, :ruby_default=>nil, :type=>:enum, :enum_values=>["a", "b", "c"]}], [:b, {:oid=>1234, :ruby_default=>nil}]] end it "should typecast objects to string" do @db.typecast_value(:enum, :a).must_equal 'a' end it "should add array parsers for enum values" do @db.conversion_procs[212389].call('{a,b,c}').must_equal %w'a b c' end it "should not add array parser if there is already a conversion proc" do @db = Sequel.connect('mock://postgres') @db.extend_datasets{def quote_identifiers?; false end} @db.extend(mod) pr = proc{} @db.conversion_procs[212389] = pr @db.extension(:pg_array, :pg_enum) @db.conversion_procs[212389].must_equal pr @db.sqls.must_equal ["SELECT CAST(enumtypid AS integer) AS v, enumlabel FROM pg_enum ORDER BY enumtypid, enumsortorder", "SELECT typname, CAST(typarray AS integer) AS v FROM pg_type WHERE ((oid IN (1)) AND (typarray != 0))"] end it "should not add array parsers for enum values if pg_array extension is not used" do @db = Sequel.connect('mock://postgres') @db.extend_datasets{def quote_identifiers?; false end} @db.extend(mod) @db.extension(:pg_enum) @db.conversion_procs[212389].must_be_nil @db.sqls.must_equal ["SELECT CAST(enumtypid AS integer) AS v, enumlabel FROM pg_enum ORDER BY enumtypid, enumsortorder"] end it "should support #create_enum method for adding a new enum" do @db.create_enum(:foo, [:a, :b, :c]) @db.sqls.first.must_equal "CREATE TYPE foo AS ENUM ('a', 'b', 'c')" @db.create_enum(Sequel[:sch][:foo], %w'a b c') @db.sqls.first.must_equal "CREATE TYPE sch.foo AS ENUM ('a', 'b', 'c')" end with_symbol_splitting "should support #create_enum method for adding a new enum with qualified symbol" do @db.create_enum(:sch__foo, %w'a b c') @db.sqls.first.must_equal "CREATE TYPE sch.foo AS ENUM ('a', 'b', 'c')" end it "should support #rename_enum method for renameing an enum" do @db.rename_enum(:foo, :bar) @db.sqls.first.must_equal "ALTER TYPE foo RENAME TO bar" @db.rename_enum(Sequel[:sch][:foo], Sequel[:sch][:bar]) @db.sqls.first.must_equal "ALTER TYPE sch.foo RENAME TO sch.bar" end it "should support #rename_enum_value method for renameing an enum value" do @db.rename_enum_value(:foo, :b, :x) @db.sqls.first.must_equal "ALTER TYPE foo RENAME VALUE 'b' TO 'x'" end it "should support #drop_enum method for dropping an enum" do @db.drop_enum(:foo) @db.sqls.first.must_equal "DROP TYPE foo" @db.drop_enum(Sequel[:sch][:foo], :if_exists=>true) @db.sqls.first.must_equal "DROP TYPE IF EXISTS sch.foo" @db.drop_enum('foo', :cascade=>true) @db.sqls.first.must_equal "DROP TYPE foo CASCADE" end with_symbol_splitting "should support #drop_enum method for dropping an enum with a splittable symbol" do @db.drop_enum(:sch__foo, :if_exists=>true) @db.sqls.first.must_equal "DROP TYPE IF EXISTS sch.foo" end it "should support #add_enum_value method for adding value to an existing enum" do @db.add_enum_value(:foo, :a) @db.sqls.first.must_equal "ALTER TYPE foo ADD VALUE 'a'" end it "should support :before option for #add_enum_value method for adding value before an existing enum value" do @db.add_enum_value('foo', :a, :before=>:b) @db.sqls.first.must_equal "ALTER TYPE foo ADD VALUE 'a' BEFORE 'b'" end it "should support :after option for #add_enum_value method for adding value after an existing enum value" do @db.add_enum_value(Sequel[:sch][:foo], :a, :after=>:b) @db.sqls.first.must_equal "ALTER TYPE sch.foo ADD VALUE 'a' AFTER 'b'" end with_symbol_splitting "should support :after option for #add_enum_value method for adding value after an existing enum value with splittable symbol" do @db.add_enum_value(:sch__foo, :a, :after=>:b) @db.sqls.first.must_equal "ALTER TYPE sch.foo ADD VALUE 'a' AFTER 'b'" end it "should support :if_not_exists option for #add_enum_value method for not adding the value if it exists" do @db.add_enum_value(:foo, :a, :if_not_exists=>true) @db.sqls.first.must_equal "ALTER TYPE foo ADD VALUE IF NOT EXISTS 'a'" end it "should reverse a create_enum directive in a migration" do m = Sequel.migration{change{create_enum(:type_name, %w'value1 value2 value3')}} m.apply(@db, :up) @db.sqls.must_equal ["CREATE TYPE type_name AS ENUM ('value1', 'value2', 'value3')", "SELECT CAST(enumtypid AS integer) AS v, enumlabel FROM pg_enum ORDER BY enumtypid, enumsortorder", "SELECT typname, CAST(typarray AS integer) AS v FROM pg_type WHERE ((1 = 0) AND (typarray != 0))"] m.apply(@db, :down) @db.sqls.must_equal ["DROP TYPE type_name", "SELECT CAST(enumtypid AS integer) AS v, enumlabel FROM pg_enum ORDER BY enumtypid, enumsortorder", "SELECT typname, CAST(typarray AS integer) AS v FROM pg_type WHERE ((1 = 0) AND (typarray != 0))"] end it "should reverse a rename_enum directive in a migration" do m = Sequel.migration{change{rename_enum(:old_type_name, :new_type_name)}} m.apply(@db, :up) @db.sqls.must_equal ["ALTER TYPE old_type_name RENAME TO new_type_name", "SELECT CAST(enumtypid AS integer) AS v, enumlabel FROM pg_enum ORDER BY enumtypid, enumsortorder", "SELECT typname, CAST(typarray AS integer) AS v FROM pg_type WHERE ((1 = 0) AND (typarray != 0))"] m.apply(@db, :down) @db.sqls.must_equal ["ALTER TYPE new_type_name RENAME TO old_type_name", "SELECT CAST(enumtypid AS integer) AS v, enumlabel FROM pg_enum ORDER BY enumtypid, enumsortorder", "SELECT typname, CAST(typarray AS integer) AS v FROM pg_type WHERE ((1 = 0) AND (typarray != 0))"] end end ����������������������������������������������������������������������������sequel-5.63.0/spec/extensions/pg_extended_date_support_spec.rb��������������������������������������0000664�0000000�0000000�00000013212�14342141206�0024646�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "pg_extended_date_support extension" do before do @db = Sequel.mock(:host=>'postgres', :fetch=>{:v=>1}) @db.extend(Module.new{def bound_variable_arg(v, _) v end}) @db.extension(:pg_extended_date_support) @db.extend_datasets{def quote_identifiers?; false end} end after do Sequel.datetime_class = Time Sequel.default_timezone = nil end it "should convert infinite timestamps and dates as configured" do cp = @db.conversion_procs d = lambda{|v| cp[1082].call(v)} t = lambda{|v| cp[1114].call(v)} pi = 'infinity' ni = '-infinity' today = Date.today now = Time.now d.(today.to_s).must_equal today t.(now.strftime("%Y-%m-%d %H:%M:%S.%N")).must_equal now proc{@db.typecast_value(:date, pi)}.must_raise Sequel::InvalidValue proc{@db.typecast_value(:datetime, pi)}.must_raise Sequel::InvalidValue [:nil, 'nil'].each do |v| @db.convert_infinite_timestamps = v d.(pi).must_be_nil t.(pi).must_be_nil d.(ni).must_be_nil t.(ni).must_be_nil @db.typecast_value(:date, pi).must_equal pi @db.typecast_value(:datetime, pi).must_equal pi @db.typecast_value(:date, ni).must_equal ni @db.typecast_value(:datetime, ni).must_equal ni end d.(today.to_s).must_equal today t.(now.strftime("%Y-%m-%d %H:%M:%S.%N")).must_equal now @db.typecast_value(:date, today.to_s).must_equal today @db.typecast_value(:datetime, now.strftime("%Y-%m-%d %H:%M:%S.%N")).must_equal now [:string, 'string'].each do |v| @db.convert_infinite_timestamps = v d.(pi).must_equal pi t.(pi).must_equal pi d.(ni).must_equal ni t.(ni).must_equal ni end [:date, 'date'].each do |v| @db.convert_infinite_timestamps = v d.(pi).must_equal Date::Infinity.new t.(pi).must_equal Date::Infinity.new d.(ni).must_equal(-Date::Infinity.new) t.(ni).must_equal(-Date::Infinity.new) end [:float, 'float', 't', true].each do |v| @db.convert_infinite_timestamps = v d.(pi).must_equal 1.0/0.0 t.(pi).must_equal 1.0/0.0 d.(ni).must_equal(-1.0/0.0) t.(ni).must_equal(-1.0/0.0) end ['f', false].each do |v| @db.convert_infinite_timestamps = v proc{d.(pi)}.must_raise ArgumentError, Sequel::InvalidValue proc{t.(pi)}.must_raise ArgumentError, Sequel::InvalidValue proc{d.(ni)}.must_raise ArgumentError, Sequel::InvalidValue proc{t.(ni)}.must_raise ArgumentError, Sequel::InvalidValue end end it "should handle parsing BC dates" do @db.conversion_procs[1082].call("1092-10-20 BC").must_equal Date.new(-1091, 10, 20) end it "should handle parsing BC timestamps as Time values" do @db.conversion_procs[1114].call("1200-02-15 14:13:20-00:00 BC").must_equal Time.at(-100000000000).utc @db.conversion_procs[1114].call("1200-02-15 14:13:20-00:00:00 BC").must_equal Time.at(-100000000000).utc Sequel.default_timezone = :utc @db.conversion_procs[1114].call("1200-02-15 14:13:20 BC").must_equal Time.at(-100000000000).utc Sequel.default_timezone = nil end it "should handle parsing BC timestamps as DateTime values" do Sequel.datetime_class = DateTime @db.conversion_procs[1114].call("1200-02-15 14:13:20-00:00 BC").must_equal DateTime.new(-1199, 2, 15, 14, 13, 20) @db.conversion_procs[1114].call("1200-02-15 14:13:20-00:00:00 BC").must_equal DateTime.new(-1199, 2, 15, 14, 13, 20) Sequel.default_timezone = :utc @db.conversion_procs[1114].call("1200-02-15 14:13:20 BC").must_equal DateTime.new(-1199, 2, 15, 14, 13, 20) end it "should handle parsing AD timestamps with offset seconds" do @db.conversion_procs[1114].call("1200-02-15 14:13:20-00:00:00").must_equal Time.utc(1200, 2, 15, 14, 13, 20) Sequel.datetime_class = DateTime @db.conversion_procs[1114].call("1200-02-15 14:13:20-00:00:00").must_equal DateTime.new(1200, 2, 15, 14, 13, 20) end it "should format Date::Infinity values" do @db.literal(Date::Infinity.new).must_equal "'infinity'" @db.literal(-Date::Infinity.new).must_equal "'-infinity'" end it "should raise errors for literalizing random Objects" do proc{@db.literal(Object.new)}.must_raise Sequel::Error end it "should format BC and AD dates" do @db.literal(Date.new(-1091, 10, 20)).must_equal "'1092-10-20 BC'" @db.literal(Date.new(1092, 10, 20)).must_equal "'1092-10-20'" end it "should format BC and AD datetimes" do @db.literal(DateTime.new(-1199, 2, 15, 14, 13, 20)).must_equal "'1200-02-15 14:13:20.000000000+0000 BC'" @db.literal(DateTime.new(1200, 2, 15, 14, 13, 20)).must_equal "'1200-02-15 14:13:20.000000+0000'" end it "should format BC and AD times" do @db.literal(Time.at(-100000000000).utc).must_equal "'1200-02-15 14:13:20.000000000+0000 BC'" @db.literal(Time.at(100000000000).utc).must_equal "'5138-11-16 09:46:40.000000+0000'" end it "should format BC and AD dates and times in bound variables" do @db.bound_variable_arg(Date.new(-1091, 10, 20), nil).must_equal "'1092-10-20 BC'" @db.bound_variable_arg(Date.new(1092, 10, 20), nil).must_equal "'1092-10-20'" @db.bound_variable_arg(DateTime.new(-1091, 10, 20), nil).must_equal "'1092-10-20 00:00:00.000000000+0000 BC'" @db.bound_variable_arg(DateTime.new(1092, 10, 20), nil).must_equal "'1092-10-20 00:00:00.000000+0000'" @db.bound_variable_arg(Time.at(-100000000000).utc, nil).must_equal "'1200-02-15 14:13:20.000000000+0000 BC'" @db.bound_variable_arg(Time.at(-100000000000).utc, nil).must_equal "'1200-02-15 14:13:20.000000000+0000 BC'" @db.bound_variable_arg(1, nil).must_equal 1 @db.bound_variable_arg(1, nil).must_equal 1 end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/pg_extended_integer_support_spec.rb�����������������������������������0000664�0000000�0000000�00000002632�14342141206�0025372�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "pg_extended_integer_support extension" do before do @db = Sequel.mock(:host=>'postgres').extension(:pg_extended_integer_support) end it "should literalize integers out of range using single quotes by default" do @db.literal(2**63).must_equal "'9223372036854775808'" end it "should literalize integers out of range without quotes when configured with :raw strategy" do @db.dataset.integer_outside_bigint_range_strategy(:raw).literal(2**63).must_equal "9223372036854775808" end it "should raise for integers out of range when configured with :raise strategy" do ds = @db.dataset.integer_outside_bigint_range_strategy(:raise) proc{ds.literal(2**63)}.must_raise Sequel::InvalidValue end it "should raise for integers out of range when configured with :quote strategy" do @db.dataset.integer_outside_bigint_range_strategy(:quote).literal(2**63).must_equal "'9223372036854775808'" end it "should respect :integer_outside_bigint_range_strategy Database option for strategy" do @db.opts[:integer_outside_bigint_range_strategy] = :raw @db.literal(2**63).must_equal "9223372036854775808" @db.opts[:integer_outside_bigint_range_strategy] = :quote @db.literal(2**63).must_equal "'9223372036854775808'" @db.opts[:integer_outside_bigint_range_strategy] = :raise proc{@db.literal(2**63)}.must_raise Sequel::InvalidValue end end ������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/pg_hstore_ops_spec.rb�������������������������������������������������0000664�0000000�0000000�00000017560�14342141206�0022454�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" Sequel.extension :pg_array, :pg_array_ops, :pg_hstore, :pg_hstore_ops describe "Sequel::Postgres::HStoreOp" do before do @db = Sequel.connect('mock://postgres') @db.extend_datasets{def quote_identifiers?; false end} @ds = @db.dataset @h = Sequel.hstore_op(:h) end it "#- should use the - operator" do @ds.literal(@h - :a).must_equal "(h - a)" end it "#- should cast String argument to text when using - operator" do @ds.literal(@h - 'a').must_equal "(h - CAST('a' AS text))" end it "#- should not cast LiteralString argument to text when using - operator" do @ds.literal(@h - Sequel.lit('a')).must_equal "(h - a)" end it "#- should handle arrays" do @ds.literal(@h - %w'a').must_equal "(h - ARRAY['a'])" end it "#- should handle hashes" do @ds.literal(@h - {'a'=>'b'}).must_equal "(h - '\"a\"=>\"b\"'::hstore)" end it "#- should return an HStoreOp" do @ds.literal((@h - :a)['a']).must_equal "((h - a) -> 'a')" end it "#[] should use the -> operator for older PostgreSQL versions" do def @db.server_version(*); 130000; end @ds.literal(@h['a']).must_equal "(h -> 'a')" end it "#[] should use subscripts for identifiers" do @ds.literal(@h['a']).must_equal "h['a']" @ds.literal(Sequel.hstore_op(Sequel[:h])['a']).must_equal "h['a']" @ds.literal(Sequel.hstore_op(Sequel[:h][:i])['a']).must_equal "h.i['a']" @ds.literal(Sequel.hstore_op(Sequel.lit('h'))['a']).must_equal "(h -> 'a')" @ds.literal(@h[%w'a']).must_equal "(h -> ARRAY['a'])" @ds.select(Sequel.hstore_op(Sequel[:h])['a']).qualify(:t).sql.must_equal "SELECT t.h['a']" end it "#[] should handle arrays" do @ds.literal(@h[%w'a']).must_equal "(h -> ARRAY['a'])" end it "#[] should return a PGArrayOp if given an array" do @ds.literal(@h[%w'a'][0]).must_equal "((h -> ARRAY['a']))[0]" end it "#[] should not return a PGArrayOp if given an array but pg_array_op is not supported" do begin module Sequel::Postgres::HStoreOp::Sequel SQL = ::Sequel::SQL end @ds.literal(@h[%w'a']).wont_be_kind_of(Sequel::Postgres::ArrayOp) ensure Sequel::Postgres::HStoreOp.send(:remove_const, :Sequel) end end it "#[] should return a PGArrayOp if given a PGArray" do @ds.literal(@h[Sequel.pg_array(%w'a')][0]).must_equal "((h -> ARRAY['a']))[0]" end it "#[] should return a PGArrayOp if given a PGArrayOp" do @ds.literal(@h[Sequel.pg_array_op(:a)][0]).must_equal "((h -> a))[0]" end it "#[] should return a string expression" do @ds.literal(@h['a'] + 'b').must_equal "(h['a'] || 'b')" end it "#concat and #merge should use the || operator" do @ds.literal(@h.concat(:h1)).must_equal "(h || h1)" @ds.literal(@h.merge(:h1)).must_equal "(h || h1)" end it "#concat and #merge should handle hashes" do @ds.literal(@h.concat('a'=>'b')).must_equal "(h || '\"a\"=>\"b\"'::hstore)" @ds.literal(@h.merge('a'=>'b')).must_equal "(h || '\"a\"=>\"b\"'::hstore)" end it "#concat should return an HStoreOp" do @ds.literal(@h.concat(:h1)['a']).must_equal "((h || h1) -> 'a')" end it "#contain_all should use the ?& operator" do @ds.literal(@h.contain_all(:h1)).must_equal "(h ?& h1)" end it "#contain_all handle arrays" do @ds.literal(@h.contain_all(%w'h1')).must_equal "(h ?& ARRAY['h1'])" end it "#contain_any should use the ?| operator" do @ds.literal(@h.contain_any(:h1)).must_equal "(h ?| h1)" end it "#contain_any should handle arrays" do @ds.literal(@h.contain_any(%w'h1')).must_equal "(h ?| ARRAY['h1'])" end it "#contains should use the @> operator" do @ds.literal(@h.contains(:h1)).must_equal "(h @> h1)" end it "#contains should handle hashes" do @ds.literal(@h.contains('a'=>'b')).must_equal "(h @> '\"a\"=>\"b\"'::hstore)" end it "#contained_by should use the <@ operator" do @ds.literal(@h.contained_by(:h1)).must_equal "(h <@ h1)" end it "#contained_by should handle hashes" do @ds.literal(@h.contained_by('a'=>'b')).must_equal "(h <@ '\"a\"=>\"b\"'::hstore)" end it "#defined should use the defined function" do @ds.literal(@h.defined('a')).must_equal "defined(h, 'a')" end it "#delete should use the delete function" do @ds.literal(@h.delete('a')).must_equal "delete(h, 'a')" end it "#delete should handle arrays" do @ds.literal(@h.delete(%w'a')).must_equal "delete(h, ARRAY['a'])" end it "#delete should handle hashes" do @ds.literal(@h.delete('a'=>'b')).must_equal "delete(h, '\"a\"=>\"b\"'::hstore)" end it "#delete should return an HStoreOp" do @ds.literal(@h.delete('a')['a']).must_equal "(delete(h, 'a') -> 'a')" end it "#each should use the each function" do @ds.literal(@h.each).must_equal "each(h)" end it "#has_key? and aliases should use the ? operator" do @ds.literal(@h.has_key?('a')).must_equal "(h ? 'a')" @ds.literal(@h.key?('a')).must_equal "(h ? 'a')" @ds.literal(@h.member?('a')).must_equal "(h ? 'a')" @ds.literal(@h.include?('a')).must_equal "(h ? 'a')" @ds.literal(@h.exist?('a')).must_equal "(h ? 'a')" end it "#hstore should return the receiver" do @h.hstore.must_be_same_as(@h) end it "#keys and #akeys should use the akeys function" do @ds.literal(@h.keys).must_equal "akeys(h)" @ds.literal(@h.akeys).must_equal "akeys(h)" end it "#keys and #akeys should return PGArrayOps" do @ds.literal(@h.keys[0]).must_equal "(akeys(h))[0]" @ds.literal(@h.akeys[0]).must_equal "(akeys(h))[0]" end it "#populate should use the populate_record function" do @ds.literal(@h.populate(:a)).must_equal "populate_record(a, h)" end it "#record_set should use the #= operator" do @ds.literal(@h.record_set(:a)).must_equal "(a #= h)" end it "#skeys should use the skeys function" do @ds.literal(@h.skeys).must_equal "skeys(h)" end it "#slice should should use the slice function" do @ds.literal(@h.slice(:a)).must_equal "slice(h, a)" end it "#slice should handle arrays" do @ds.literal(@h.slice(%w'a')).must_equal "slice(h, ARRAY['a'])" end it "#slice should return an HStoreOp" do @ds.literal(@h.slice(:a)['a']).must_equal "(slice(h, a) -> 'a')" end it "#svals should use the svals function" do @ds.literal(@h.svals).must_equal "svals(h)" end it "#to_array should use the hstore_to_array function" do @ds.literal(@h.to_array).must_equal "hstore_to_array(h)" end it "#to_array should return a PGArrayOp" do @ds.literal(@h.to_array[0]).must_equal "(hstore_to_array(h))[0]" end it "#to_matrix should use the hstore_to_matrix function" do @ds.literal(@h.to_matrix).must_equal "hstore_to_matrix(h)" end it "#to_matrix should return a PGArrayOp" do @ds.literal(@h.to_matrix[0]).must_equal "(hstore_to_matrix(h))[0]" end it "#values and #avals should use the avals function" do @ds.literal(@h.values).must_equal "avals(h)" @ds.literal(@h.avals).must_equal "avals(h)" end it "#values and #avals should return PGArrayOps" do @ds.literal(@h.values[0]).must_equal "(avals(h))[0]" @ds.literal(@h.avals[0]).must_equal "(avals(h))[0]" end it "should have Sequel.hstore_op return HStoreOp instances as-is" do Sequel.hstore_op(@h).must_be_same_as(@h) end it "should have Sequel.hstore return HStoreOp instances" do Sequel.hstore(:h).must_equal @h end it "should be able to turn expressions into hstore ops using hstore" do @ds.literal(Sequel.qualify(:b, :a).hstore['a']).must_equal "b.a['a']" @ds.literal(Sequel.function(:a, :b).hstore['a']).must_equal "(a(b) -> 'a')" end it "should be able to turn literal strings into hstore ops using hstore" do @ds.literal(Sequel.lit('a').hstore['a']).must_equal "(a -> 'a')" end it "should allow transforming HStore instances into HStoreOp instances" do @ds.literal(Sequel.hstore('a'=>'b').op['a']).must_equal "('\"a\"=>\"b\"'::hstore -> 'a')" end end ������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/pg_hstore_spec.rb�����������������������������������������������������0000664�0000000�0000000�00000020230�14342141206�0021557�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "pg_hstore extension" do before do Sequel.extension :pg_array, :pg_hstore @db = Sequel.connect('mock://postgres') @db.extend_datasets{def quote_identifiers?; false end} @m = Sequel::Postgres @c = @m::HStore @db.fetch = {:oid=>9999, :typname=>'hstore'} @db.extension :pg_hstore end it "should parse hstore strings correctly" do @c.parse('').to_hash.must_equal({}) @c.parse('"a"=>"b"').to_hash.must_equal('a'=>'b') @c.parse('"a"=>"b", "c"=>NULL').to_hash.must_equal('a'=>'b', 'c'=>nil) @c.parse('"a"=>"b", "c"=>"NULL"').to_hash.must_equal('a'=>'b', 'c'=>'NULL') @c.parse('"a"=>"b", "c"=>"\\\\ \\"\'=>"').to_hash.must_equal('a'=>'b', 'c'=>'\ "\'=>') end it "should cache parse results" do r = @c::Parser.new('') o = r.parse o.must_equal({}) r.parse.must_be_same_as(o) end it "should literalize HStores to strings correctly" do @db.literal(Sequel.hstore({})).must_equal '\'\'::hstore' @db.literal(Sequel.hstore("a"=>"b")).must_equal '\'"a"=>"b"\'::hstore' @db.literal(Sequel.hstore("c"=>nil)).must_equal '\'"c"=>NULL\'::hstore' @db.literal(Sequel.hstore("c"=>'NULL')).must_equal '\'"c"=>"NULL"\'::hstore' @db.literal(Sequel.hstore('c'=>'\ "\'=>')).must_equal '\'"c"=>"\\\\ \\"\'\'=>"\'::hstore' @db.literal(Sequel.hstore("a"=>"b","c"=>"d")).must_equal '\'"a"=>"b","c"=>"d"\'::hstore' end it "should register conversion proc correctly" do @db.conversion_procs[9999].call('"a"=>"b"').must_equal('a'=>'b') end it "should have Sequel.hstore method for creating HStore instances" do Sequel.hstore({}).class.must_equal(@c) end it "should have Sequel.hstore return HStores as-is" do a = Sequel.hstore({}) Sequel.hstore(a).object_id.must_equal(a.object_id) end it "should HStore#to_hash method for getting underlying hash" do Sequel.hstore({}).to_hash.must_be_kind_of(Hash) end it "should convert keys and values to strings on creation" do Sequel.hstore(1=>2).to_hash.must_equal("1"=>"2") end it "should convert keys and values to strings on assignment" do v = Sequel.hstore({}) v[1] = 2 v.to_hash.must_equal("1"=>"2") v.store(:'1', 3) v.to_hash.must_equal("1"=>"3") end it "should not convert nil values to strings on creation" do Sequel.hstore(:foo=>nil).to_hash.must_equal("foo"=>nil) end it "should not convert nil values to strings on assignment" do v = Sequel.hstore({}) v[:foo] = nil v.to_hash.must_equal("foo"=>nil) end it "should convert lookups by key to string" do Sequel.hstore('foo'=>'bar')[:foo].must_equal 'bar' Sequel.hstore('1'=>'bar')[1].must_equal 'bar' Sequel.hstore('foo'=>'bar').fetch(:foo).must_equal 'bar' Sequel.hstore('foo'=>'bar').fetch(:foo2, 2).must_equal 2 k = nil Sequel.hstore('foo2'=>'bar').fetch(:foo){|key| k = key }.must_equal 'foo' k.must_equal 'foo' Sequel.hstore('foo'=>'bar').has_key?(:foo).must_equal true Sequel.hstore('foo'=>'bar').has_key?(:bar).must_equal false Sequel.hstore('foo'=>'bar').key?(:foo).must_equal true Sequel.hstore('foo'=>'bar').key?(:bar).must_equal false Sequel.hstore('foo'=>'bar').member?(:foo).must_equal true Sequel.hstore('foo'=>'bar').member?(:bar).must_equal false Sequel.hstore('foo'=>'bar').include?(:foo).must_equal true Sequel.hstore('foo'=>'bar').include?(:bar).must_equal false Sequel.hstore('foo'=>'bar', '1'=>'2').values_at(:foo3, :foo, :foo2, 1).must_equal [nil, 'bar', nil, '2'] Sequel.hstore('foo'=>'bar').assoc(:foo).must_equal ['foo', 'bar'] Sequel.hstore('foo'=>'bar').assoc(:foo2).must_be_nil end it "should convert has_value?/value? lookups to string" do Sequel.hstore('foo'=>'bar').has_value?(:bar).must_equal true Sequel.hstore('foo'=>'bar').has_value?(:foo).must_equal false Sequel.hstore('foo'=>'bar').value?(:bar).must_equal true Sequel.hstore('foo'=>'bar').value?(:foo).must_equal false end it "should handle nil values in has_value?/value? lookups" do Sequel.hstore('foo'=>'').has_value?('').must_equal true Sequel.hstore('foo'=>'').has_value?(nil).must_equal false Sequel.hstore('foo'=>nil).has_value?(nil).must_equal true end it "should have underlying hash convert lookups by key to string" do Sequel.hstore('foo'=>'bar').to_hash[:foo].must_equal 'bar' Sequel.hstore('1'=>'bar').to_hash[1].must_equal 'bar' end it "should convert key lookups to string" do Sequel.hstore('foo'=>'bar').key(:bar).must_equal 'foo' Sequel.hstore('foo'=>'bar').key(:bar2).must_be_nil end it "should handle nil values in key lookups" do Sequel.hstore('foo'=>'').key('').must_equal 'foo' Sequel.hstore('foo'=>'').key(nil).must_be_nil Sequel.hstore('foo'=>nil).key(nil).must_equal 'foo' end it "should convert rassoc lookups to string" do Sequel.hstore('foo'=>'bar').rassoc(:bar).must_equal ['foo', 'bar'] Sequel.hstore('foo'=>'bar').rassoc(:bar2).must_be_nil end it "should handle nil values in rassoc lookups" do Sequel.hstore('foo'=>'').rassoc('').must_equal ['foo', ''] Sequel.hstore('foo'=>'').rassoc(nil).must_be_nil Sequel.hstore('foo'=>nil).rassoc(nil).must_equal ['foo', nil] end it "should have delete convert key to string" do v = Sequel.hstore('foo'=>'bar') v.delete(:foo).must_equal 'bar' v.to_hash.must_equal({}) end it "should handle #replace with hashes that do not use strings" do v = Sequel.hstore('foo'=>'bar') v.replace(:bar=>1) v.class.must_equal(@c) v.must_equal('bar'=>'1') v.to_hash[:bar].must_equal '1' end it "should handle #merge with hashes that do not use strings" do v = Sequel.hstore('foo'=>'bar').merge(:bar=>1) v.class.must_equal(@c) v.must_equal('foo'=>'bar', 'bar'=>'1') end it "should handle #merge/#update with hashes that do not use strings" do v = Sequel.hstore('foo'=>'bar') v.merge!(:bar=>1) v.class.must_equal(@c) v.must_equal('foo'=>'bar', 'bar'=>'1') v = Sequel.hstore('foo'=>'bar') v.update(:bar=>1) v.class.must_equal(@c) v.must_equal('foo'=>'bar', 'bar'=>'1') end it "should support using hstores as bound variables" do @db.bound_variable_arg(1, nil).must_equal 1 @db.bound_variable_arg({'1'=>'2'}, nil).must_equal '"1"=>"2"' @db.bound_variable_arg(Sequel.hstore('1'=>'2'), nil).must_equal '"1"=>"2"' @db.bound_variable_arg(Sequel.hstore('1'=>nil), nil).must_equal '"1"=>NULL' @db.bound_variable_arg(Sequel.hstore('1'=>"NULL"), nil).must_equal '"1"=>"NULL"' @db.bound_variable_arg(Sequel.hstore('1'=>"'\\ \"=>"), nil).must_equal '"1"=>"\'\\\\ \\"=>"' @db.bound_variable_arg(Sequel.hstore("a"=>"b","c"=>"d"), nil).must_equal '"a"=>"b","c"=>"d"' end it "should parse hstore type from the schema correctly" do @db.fetch = [{:name=>'id', :db_type=>'integer'}, {:name=>'i', :db_type=>'hstore'}] @db.schema(:items).map{|e| e[1][:type]}.must_equal [:integer, :hstore] end it "should set :callable_default schema entries if default value is recognized" do @db.fetch = [{:name=>'id', :db_type=>'integer', :default=>'1'}, {:name=>'t', :db_type=>'hstore', :default=>"''::hstore"}] s = @db.schema(:items) s[0][1][:callable_default].must_be_nil v = s[1][1][:callable_default].call Sequel::Postgres::HStore.===(v).must_equal true @db.literal(v).must_equal "''::hstore" v['a'] = 'b' @db.literal(v).must_equal "'\"a\"=>\"b\"'::hstore" end it "should support typecasting for the hstore type" do h = Sequel.hstore(1=>2) @db.typecast_value(:hstore, h).object_id.must_equal(h.object_id) @db.typecast_value(:hstore, {}).class.must_equal(@c) @db.typecast_value(:hstore, {}).must_equal Sequel.hstore({}) @db.typecast_value(:hstore, {'a'=>'b'}).must_equal Sequel.hstore("a"=>"b") proc{@db.typecast_value(:hstore, [])}.must_raise(Sequel::InvalidValue) end it "should be serializable" do v = Sequel.hstore('foo'=>'bar') dump = Marshal.dump(v) Marshal.load(dump).must_equal v end it "should return correct results for Database#schema_type_class" do @db.schema_type_class(:hstore).must_equal Sequel::Postgres::HStore @db.schema_type_class(:integer).must_equal Integer end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/pg_inet_ops_spec.rb���������������������������������������������������0000664�0000000�0000000�00000010520�14342141206�0022074�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" Sequel.extension :pg_inet_ops describe "Sequel::Postgres::InetOp" do before do db = Sequel.connect('mock://postgres') db.extend_datasets{def quote_identifiers?; false end} db.extension :pg_inet @ds = db.dataset @h = Sequel.pg_inet_op(:h) end it "#pg_inet should return self" do @h.pg_inet.must_be_same_as(@h) end it "Sequel.pg_inet_op should return argument if already an InetOp" do Sequel.pg_inet_op(@h).must_be_same_as(@h) end it "#pg_inet should return a InetOp for literal strings, and expressions" do @ds.literal(Sequel.function(:b, :h).pg_inet.abbrev).must_equal "abbrev(b(h))" @ds.literal(Sequel.lit('h').pg_inet.abbrev).must_equal "abbrev(h)" end it "should define methods for all of the PostgreSQL inet operators" do @ds.literal(@h + @h).must_equal "(h + h)" @ds.literal(@h - @h).must_equal "(h - h)" @ds.literal(@h << @h).must_equal "(h << h)" @ds.literal(@h >> @h).must_equal "(h >> h)" @ds.literal(@h & @h).must_equal "(h & h)" @ds.literal(@h | @h).must_equal "(h | h)" @ds.literal(~@h).must_equal "~h" @ds.literal(@h.contained_by(@h)).must_equal "(h << h)" @ds.literal(@h.contained_by_or_equals(@h)).must_equal "(h <<= h)" @ds.literal(@h.contains(@h)).must_equal "(h >> h)" @ds.literal(@h.contains_or_equals(@h)).must_equal "(h >>= h)" @ds.literal(@h.contains_or_contained_by(@h)).must_equal "(h && h)" end it "should define methods for all of the PostgreSQL inet functions" do @ds.literal(@h.abbrev).must_equal "abbrev(h)" @ds.literal(@h.broadcast).must_equal "broadcast(h)" @ds.literal(@h.family).must_equal "family(h)" @ds.literal(@h.host).must_equal "host(h)" @ds.literal(@h.hostmask).must_equal "hostmask(h)" @ds.literal(@h.masklen).must_equal "masklen(h)" @ds.literal(@h.netmask).must_equal "netmask(h)" @ds.literal(@h.network).must_equal "network(h)" @ds.literal(@h.set_masklen(16)).must_equal "set_masklen(h, 16)" @ds.literal(@h.text).must_equal "text(h)" end it "should have operators that return booleans return boolean expressions" do @ds.literal((@h << @h) & :b).must_equal "((h << h) AND b)" @ds.literal((@h >> @h) & :b).must_equal "((h >> h) AND b)" @ds.literal(@h.contained_by(@h) & :b).must_equal "((h << h) AND b)" @ds.literal(@h.contained_by_or_equals(@h) & :b).must_equal "((h <<= h) AND b)" @ds.literal(@h.contains(@h) & :b).must_equal "((h >> h) AND b)" @ds.literal(@h.contains_or_equals(@h) & :b).must_equal "((h >>= h) AND b)" @ds.literal(@h.contains_or_contained_by(@h) & :b).must_equal "((h && h) AND b)" end it "should have operators that return inet return InetOp" do @ds.literal((@h & @h).contains(:b)).must_equal "((h & h) >> b)" @ds.literal((@h | @h).contains(:b)).must_equal "((h | h) >> b)" @ds.literal((@h + @h).contains(:b)).must_equal "((h + h) >> b)" @ds.literal((@h - 3).contains(:b)).must_equal "((h - 3) >> b)" @ds.literal((~@h).contains(:b)).must_equal "(~h >> b)" end it "should have - operator with inet op return numeric expression" do @ds.literal((@h - @h) / :b).must_equal "((h - h) / b)" end it "should have function methods returning int return numeric expressions" do @ds.literal(@h.family / 2).must_equal "(family(h) / 2)" @ds.literal(@h.masklen / 2).must_equal "(masklen(h) / 2)" end it "should have function methods returning text return string expressions" do @ds.literal(@h.abbrev + :a).must_equal "(abbrev(h) || a)" @ds.literal(@h.host + :a).must_equal "(host(h) || a)" @ds.literal(@h.text + :a).must_equal "(text(h) || a)" end it "should have function methods returning inet return InetOp" do @ds.literal(@h.broadcast.contains(:a)).must_equal "(broadcast(h) >> a)" @ds.literal(@h.hostmask.contains(:a)).must_equal "(hostmask(h) >> a)" @ds.literal(@h.netmask.contains(:a)).must_equal "(netmask(h) >> a)" @ds.literal(@h.network.contains(:a)).must_equal "(network(h) >> a)" @ds.literal(@h.set_masklen(16).contains(:a)).must_equal "(set_masklen(h, 16) >> a)" end it "should string and IPAddr instances in a cast to inet" do @ds.literal(Sequel.pg_inet_op('1.2.3.4').contains(:a)).must_equal "(CAST('1.2.3.4' AS inet) >> a)" @ds.literal(Sequel.pg_inet_op(IPAddr.new('1.2.3.4')).contains(:a)).must_equal "(CAST('1.2.3.4/32' AS inet) >> a)" end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/pg_inet_spec.rb�������������������������������������������������������0000664�0000000�0000000�00000006254�14342141206�0021224�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "pg_inet extension" do ipv6_broken = (IPAddr.new('::1'); false) rescue true before do @db = Sequel.connect('mock://postgres') @db.extend_datasets{def quote_identifiers?; false end} @db.extension(:pg_array, :pg_inet) end it "should literalize IPAddr v4 instances to strings correctly" do @db.literal(IPAddr.new('127.0.0.1')).must_equal "'127.0.0.1/32'" @db.literal(IPAddr.new('127.0.0.0/8')).must_equal "'127.0.0.0/8'" end it "should literalize IPAddr v6 instances to strings correctly" do @db.literal(IPAddr.new('2001:4f8:3:ba::/64')).must_equal "'2001:4f8:3:ba::/64'" @db.literal(IPAddr.new('2001:4f8:3:ba:2e0:81ff:fe22:d1f1')).must_equal "'2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128'" end unless ipv6_broken it "should set up conversion procs correctly" do cp = @db.conversion_procs cp[869].call("127.0.0.1").must_equal IPAddr.new('127.0.0.1') cp[650].call("127.0.0.1").must_equal IPAddr.new('127.0.0.1') end it "should set up conversion procs for arrays correctly" do cp = @db.conversion_procs cp[1041].call("{127.0.0.1}").must_equal [IPAddr.new('127.0.0.1')] cp[651].call("{127.0.0.1}").must_equal [IPAddr.new('127.0.0.1')] cp[1040].call("{127.0.0.1}").must_equal ['127.0.0.1'] end it "should not affect literalization of custom objects" do o = Object.new def o.sql_literal(ds) 'v' end @db.literal(o).must_equal 'v' end it "should support using IPAddr as bound variables" do @db.bound_variable_arg(1, nil).must_equal 1 @db.bound_variable_arg(IPAddr.new('127.0.0.1'), nil).must_equal '127.0.0.1/32' end it "should support using IPAddr instances in array types in bound variables" do @db.bound_variable_arg(Sequel.pg_array([IPAddr.new('127.0.0.1')]), nil).must_equal '{"127.0.0.1/32"}' end it "should parse inet/cidr type from the schema correctly" do @db.fetch = [{:name=>'id', :db_type=>'integer'}, {:name=>'i', :db_type=>'inet'}, {:name=>'c', :db_type=>'cidr'}] @db.schema(:items).map{|e| e[1][:type]}.must_equal [:integer, :ipaddr, :ipaddr] end it "should set :ruby_default schema entries if default value is recognized" do @db.fetch = [{:name=>'id', :db_type=>'integer', :default=>'1'}, {:name=>'t', :db_type=>'inet', :default=>"'127.0.0.1'::inet"}] s = @db.schema(:items) s[1][1][:ruby_default].must_equal IPAddr.new('127.0.0.1') end it "should support typecasting for the ipaddr type" do ip = IPAddr.new('127.0.0.1') @db.typecast_value(:ipaddr, ip).must_be_same_as(ip) @db.typecast_value(:ipaddr, ip.to_s).must_equal ip proc{@db.typecast_value(:ipaddr, '')}.must_raise(Sequel::InvalidValue) proc{@db.typecast_value(:ipaddr, 1)}.must_raise(Sequel::InvalidValue) proc{@db.typecast_value(:ipaddr, '::ffff%'+'a'*100)}.must_raise(Sequel::InvalidValue) @db.check_string_typecast_bytesize = false if RUBY_VERSION >= '3.1' @db.typecast_value(:ipaddr, '::ffff%'+'a'*100).must_equal IPAddr.new('::ffff%'+'a'*100) end end it "should return correct results for Database#schema_type_class" do @db.schema_type_class(:ipaddr).must_equal IPAddr @db.schema_type_class(:integer).must_equal Integer end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/pg_interval_spec.rb���������������������������������������������������0000664�0000000�0000000�00000017736�14342141206�0022120�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" begin require 'active_support' require 'active_support/duration' begin require 'active_support/gem_version' rescue LoadError end begin require 'active_support/version' rescue LoadError end rescue LoadError warn "Skipping test of pg_interval plugin: can't load active_support/duration" else describe "pg_interval extension" do before do @db = Sequel.connect('mock://postgres') @db.extend_datasets{def quote_identifiers?; false end} @db.extension(:pg_array, :pg_interval) end it "should literalize ActiveSupport::Duration instances to strings correctly" do @db.literal(ActiveSupport::Duration.new(0, [])).must_equal "'0'::interval" @db.literal(ActiveSupport::Duration.new(0, [[:seconds, 0]])).must_equal "'0'::interval" @db.literal(ActiveSupport::Duration.new(0, [[:seconds, 10], [:minutes, 20], [:days, 3], [:months, 4], [:years, 6]])).must_equal "'6 years 4 months 3 days 20 minutes 10 seconds '::interval" @db.literal(ActiveSupport::Duration.new(0, [[:seconds, 10], [:minutes, 20], [:hours, 8], [:days, 3], [:weeks, 2], [:months, 4], [:years, 6]])).must_equal "'6 years 4 months 2 weeks 3 days 8 hours 20 minutes 10 seconds '::interval" @db.literal(ActiveSupport::Duration.new(0, [[:seconds, -10.000001], [:minutes, -20], [:days, -3], [:months, -4], [:years, -6]])).must_equal "'-6 years -4 months -3 days -20 minutes -10.000001 seconds '::interval" end it "should literalize ActiveSupport::Duration instances with repeated parts correctly" do if defined?(ActiveSupport::VERSION::STRING) && ActiveSupport::VERSION::STRING >= '5.1' && ActiveSupport::VERSION::STRING < '6.1' @db.literal(ActiveSupport::Duration.new(0, [[:seconds, 2], [:seconds, 1]])).must_equal "'1 seconds '::interval" @db.literal(ActiveSupport::Duration.new(0, [[:seconds, 2], [:seconds, 1], [:days, 1], [:days, 4]])).must_equal "'4 days 1 seconds '::interval" else @db.literal(ActiveSupport::Duration.new(0, [[:seconds, 2], [:seconds, 1]])).must_equal "'3 seconds '::interval" @db.literal(ActiveSupport::Duration.new(0, [[:seconds, 2], [:seconds, 1], [:days, 1], [:days, 4]])).must_equal "'5 days 3 seconds '::interval" end end it "should set up conversion procs correctly" do cp = @db.conversion_procs cp[1186].call("1 sec").must_equal ActiveSupport::Duration.new(1, [[:seconds, 1]]) end it "should set up conversion procs for arrays correctly" do cp = @db.conversion_procs cp[1187].call("{1 sec}").must_equal [ActiveSupport::Duration.new(1, [[:seconds, 1]])] end it "should setup conversion proc without array conversion proc if pg_array extension is not loaded" do @db = Sequel.connect('mock://postgres') @db.extension(:pg_interval, :pg_array) cp = @db.conversion_procs cp[1186].call("1 sec").must_equal ActiveSupport::Duration.new(1, [[:seconds, 1]]) cp[1187].must_be_nil end it "should not affect literalization of custom objects" do o = Object.new def o.sql_literal(ds) 'v' end @db.literal(o).must_equal 'v' end it "should support using ActiveSupport::Duration instances as bound variables" do @db.bound_variable_arg(1, nil).must_equal 1 @db.bound_variable_arg(ActiveSupport::Duration.new(0, [[:seconds, 0]]), nil).must_equal '0' @db.bound_variable_arg(ActiveSupport::Duration.new(0, [[:seconds, -10.000001], [:minutes, -20], [:days, -3], [:months, -4], [:years, -6]]), nil).must_equal '-6 years -4 months -3 days -20 minutes -10.000001 seconds ' end it "should support using ActiveSupport::Duration instances in array types in bound variables" do @db.bound_variable_arg(Sequel.pg_array([ActiveSupport::Duration.new(0, [[:seconds, 0]])]), nil).must_equal '{"0"}' @db.bound_variable_arg(Sequel.pg_array([ActiveSupport::Duration.new(0, [[:seconds, -10.000001], [:minutes, -20], [:days, -3], [:months, -4], [:years, -6]])]), nil).must_equal '{"-6 years -4 months -3 days -20 minutes -10.000001 seconds "}' end it "should parse interval type from the schema correctly" do @db.fetch = [{:name=>'id', :db_type=>'integer'}, {:name=>'i', :db_type=>'interval'}] @db.schema(:items).map{|e| e[1][:type]}.must_equal [:integer, :interval] end it "should set :ruby_default schema entries if default value is recognized" do @db.fetch = [{:name=>'id', :db_type=>'integer', :default=>'1'}, {:name=>'t', :db_type=>'interval', :default=>"'3 days'::interval"}] s = @db.schema(:items) s[1][1][:ruby_default].must_equal ActiveSupport::Duration.new(3*86400, :days=>3) end it "should automatically parameterize pg_interval values" do @db.extension :pg_auto_parameterize v = ActiveSupport::Duration.new(3*86400, :days=>3) sql = @db[:table].insert_sql(v) sql.must_equal 'INSERT INTO table VALUES ($1::interval)' sql.args.length.must_equal 1 sql.args[0].must_equal v end it "should automatically parameterize pg_interval values when loading pg_inet after" do @db.extension :pg_auto_parameterize, :pg_inet v = ActiveSupport::Duration.new(3*86400, :days=>3) sql = @db[:table].insert_sql(v) sql.must_equal 'INSERT INTO table VALUES ($1::interval)' sql.args.length.must_equal 1 sql.args[0].must_equal v end it "should support typecasting for the interval type" do m = Sequel::Postgres::IntervalDatabaseMethods::Parser seconds = m::SECONDS_PER_YEAR + 2*m::SECONDS_PER_MONTH + 3*86400*7 + 4*86400 + 5*3600 + 6*60 + 7 parts = {:years => 1, :months => 2, :days => 25, :seconds => 18367} if !defined?(ActiveSupport::VERSION::STRING) || ActiveSupport::VERSION::STRING < '5.1' parts = parts.to_a end d = ActiveSupport::Duration.new(seconds, parts) @db.typecast_value(:interval, d).object_id.must_equal d.object_id @db.typecast_value(:interval, "1 year 2 mons 25 days 05:06:07").is_a?(ActiveSupport::Duration).must_equal true @db.typecast_value(:interval, "1 year 2 mons 25 days 05:06:07").must_equal d @db.typecast_value(:interval, "1 year 2 mons 25 days -05:06:07").is_a?(ActiveSupport::Duration).must_equal true @db.typecast_value(:interval, "1 year 2 mons 25 days -05:06:07").must_equal(d-10*3600-12*60-14) @db.typecast_value(:interval, "1 year 2 mons 25 days 05:06:07").parts.sort_by{|k,v| k.to_s}.must_equal d.parts.sort_by{|k,v| k.to_s} @db.typecast_value(:interval, "1 year 2 mons 25 days 05:06:07.0").parts.sort_by{|k,v| k.to_s}.must_equal d.parts.sort_by{|k,v| k.to_s} @db.typecast_value(:interval, "1 year 2 mons 25 days 5 hours 6 mins").is_a?(ActiveSupport::Duration).must_equal true @db.typecast_value(:interval, "1 year 2 mons 25 days 5 hours 6 mins").must_equal(d-7) @db.typecast_value(:interval, "1 year 2 mons 25 days 5 hours 6 mins 7 secs").is_a?(ActiveSupport::Duration).must_equal true @db.typecast_value(:interval, "1 year 2 mons 25 days 5 hours 6 mins 7 secs").must_equal d @db.typecast_value(:interval, "1 year 2 mons 25 days 5 hours 6 mins 7 secs").parts.sort_by{|k,v| k.to_s}.must_equal d.parts.sort_by{|k,v| k.to_s} @db.typecast_value(:interval, "1 year 2 mons 25 days 5 hours 6 mins 7.0 secs").parts.sort_by{|k,v| k.to_s}.must_equal d.parts.sort_by{|k,v| k.to_s} d2 = ActiveSupport::Duration.new(1, [[:seconds, 1]]) @db.typecast_value(:interval, 1).is_a?(ActiveSupport::Duration).must_equal true @db.typecast_value(:interval, 1).must_equal d2 @db.typecast_value(:interval, 1).parts.sort_by{|k,v| k.to_s}.must_equal d2.parts.sort_by{|k,v| k.to_s} proc{@db.typecast_value(:interval, 'foo')}.must_raise(Sequel::InvalidValue) proc{@db.typecast_value(:interval, Object.new)}.must_raise(Sequel::InvalidValue) proc{@db.typecast_value(:interval, '1'*1000+' secs')}.must_raise(Sequel::InvalidValue) @db.check_string_typecast_bytesize = false @db.typecast_value(:interval, '1'*1000+' secs').must_be_kind_of ActiveSupport::Duration end it "should return correct results for Database#schema_type_class" do @db.schema_type_class(:interval).must_equal ActiveSupport::Duration @db.schema_type_class(:integer).must_equal Integer end end end ����������������������������������sequel-5.63.0/spec/extensions/pg_json_ops_spec.rb���������������������������������������������������0000664�0000000�0000000�00000043660�14342141206�0022121�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" Sequel.extension :pg_array, :pg_array_ops, :pg_json, :pg_json_ops describe "Sequel::Postgres::JSONOp" do before do @db = Sequel.connect('mock://postgres') @db.extend_datasets{def quote_identifiers?; false end} @j = Sequel.pg_json_op(:j) @jb = Sequel.pg_jsonb_op(:j) @l = proc{|o| @db.literal(o)} end it "should have #[] get the element" do @l[@j[1]].must_equal "(j -> 1)" @l[@j['a']].must_equal "(j -> 'a')" end it "should have #[] use -> operator on for JSONB for identifiers on older PostgreSQL versions" do def @db.server_version(*); 130000; end @l[@jb[1]].must_equal "(j -> 1)" end it "should have #[] use subscript form on PostgreSQL 14 for JSONB for identifiers" do @l[@jb[1]].must_equal "j[1]" @l[@jb['a'][1]].must_equal "j['a'][1]" @l[Sequel.pg_jsonb_op(Sequel[:j])[1]].must_equal "j[1]" @l[Sequel.pg_jsonb_op(Sequel[:s][:j])['a'][1]].must_equal "s.j['a'][1]" @l[@jb[[1, 2]]].must_equal "(j #> ARRAY[1,2])" @l[Sequel.pg_jsonb_op(Sequel.lit('j'))['a'][1]].must_equal "((j -> 'a') -> 1)" @db.select(Sequel.pg_jsonb_op(Sequel[:h])['a']).qualify(:t).sql.must_equal "SELECT t.h['a']" end it "should have #[] accept an array" do @l[@j[%w'a b']].must_equal "(j #> ARRAY['a','b'])" @l[@j[Sequel.pg_array(%w'a b')]].must_equal "(j #> ARRAY['a','b'])" @l[@j[Sequel.pg_array(:a)]].must_equal "(j #> a)" end it "should have #[] return an object of the same class" do @l[@j[1].to_recordset].must_equal "json_to_recordset((j -> 1))" @l[@j[%w'a b'][2]].must_equal "((j #> ARRAY['a','b']) -> 2)" @l[@jb[1].to_recordset].must_equal "jsonb_to_recordset(j[1])" @l[@jb[%w'a b'][2]].must_equal "((j #> ARRAY['a','b']) -> 2)" end it "should have #get be an alias to #[]" do @l[@j.get(1)].must_equal "(j -> 1)" @l[@j.get(%w'a b')].must_equal "(j #> ARRAY['a','b'])" end it "should have #get_text get the element as text" do @l[@j.get_text(1)].must_equal "(j ->> 1)" @l[@j.get_text('a')].must_equal "(j ->> 'a')" end it "should have #get_text accept an array" do @l[@j.get_text(%w'a b')].must_equal "(j #>> ARRAY['a','b'])" @l[@j.get_text(Sequel.pg_array(%w'a b'))].must_equal "(j #>> ARRAY['a','b'])" @l[@j.get_text(Sequel.pg_array(:a))].must_equal "(j #>> a)" end it "should have #get_text return an SQL::StringExpression" do @l[@j.get_text(1) + 'a'].must_equal "((j ->> 1) || 'a')" @l[@j.get_text(%w'a b') + 'a'].must_equal "((j #>> ARRAY['a','b']) || 'a')" end it "should have #array_length use the json_array_length function" do @l[@j.array_length].must_equal "json_array_length(j)" @l[@jb.array_length].must_equal "jsonb_array_length(j)" end it "should have #array_length return a numeric expression" do @l[@j.array_length & 1].must_equal "(json_array_length(j) & 1)" @l[@jb.array_length & 1].must_equal "(jsonb_array_length(j) & 1)" end it "should have #each use the json_each function" do @l[@j.each].must_equal "json_each(j)" @l[@jb.each].must_equal "jsonb_each(j)" end it "should have #each_text use the json_each_text function" do @l[@j.each_text].must_equal "json_each_text(j)" @l[@jb.each_text].must_equal "jsonb_each_text(j)" end it "should have #extract use the json_extract_path function" do @l[@j.extract('a')].must_equal "json_extract_path(j, 'a')" @l[@j.extract('a', 'b')].must_equal "json_extract_path(j, 'a', 'b')" @l[@jb.extract('a')].must_equal "jsonb_extract_path(j, 'a')" @l[@jb.extract('a', 'b')].must_equal "jsonb_extract_path(j, 'a', 'b')" end it "should have #extract return a JSONOp" do @l[@j.extract('a')[1]].must_equal "(json_extract_path(j, 'a') -> 1)" @l[@jb.extract('a')[1]].must_equal "(jsonb_extract_path(j, 'a') -> 1)" end it "should have #extract_text use the json_extract_path_text function" do @l[@j.extract_text('a')].must_equal "json_extract_path_text(j, 'a')" @l[@j.extract_text('a', 'b')].must_equal "json_extract_path_text(j, 'a', 'b')" @l[@jb.extract_text('a')].must_equal "jsonb_extract_path_text(j, 'a')" @l[@jb.extract_text('a', 'b')].must_equal "jsonb_extract_path_text(j, 'a', 'b')" end it "should have #extract_text return an SQL::StringExpression" do @l[@j.extract_text('a') + 'a'].must_equal "(json_extract_path_text(j, 'a') || 'a')" @l[@jb.extract_text('a') + 'a'].must_equal "(jsonb_extract_path_text(j, 'a') || 'a')" end it "should have #keys use the json_object_keys function" do @l[@j.keys].must_equal "json_object_keys(j)" @l[@jb.keys].must_equal "jsonb_object_keys(j)" end it "should have #array_elements use the json_array_elements function" do @l[@j.array_elements].must_equal "json_array_elements(j)" @l[@jb.array_elements].must_equal "jsonb_array_elements(j)" end it "should have #array_elements use the json_array_elements_text function" do @l[@j.array_elements_text].must_equal "json_array_elements_text(j)" @l[@jb.array_elements_text].must_equal "jsonb_array_elements_text(j)" end it "should have #strip_nulls use the json_strip_nulls function" do @l[@j.strip_nulls].must_equal "json_strip_nulls(j)" @l[@jb.strip_nulls].must_equal "jsonb_strip_nulls(j)" end it "should have #typeof use the json_typeof function" do @l[@j.typeof].must_equal "json_typeof(j)" @l[@jb.typeof].must_equal "jsonb_typeof(j)" end it "should have #to_record use the json_to_record function" do @l[@j.to_record].must_equal "json_to_record(j)" @l[@jb.to_record].must_equal "jsonb_to_record(j)" end it "should have #to_recordset use the json_to_recordsetfunction" do @l[@j.to_recordset].must_equal "json_to_recordset(j)" @l[@jb.to_recordset].must_equal "jsonb_to_recordset(j)" end it "should have #populate use the json_populate_record function" do @l[@j.populate(:a)].must_equal "json_populate_record(a, j)" @l[@jb.populate(:a)].must_equal "jsonb_populate_record(a, j)" end it "should have #populate_set use the json_populate_record function" do @l[@j.populate_set(:a)].must_equal "json_populate_recordset(a, j)" @l[@jb.populate_set(:a)].must_equal "jsonb_populate_recordset(a, j)" end it "#contain_all should use the ?& operator" do @l[@jb.contain_all(:h1)].must_equal "(j ?& h1)" end it "#contain_all handle arrays" do @l[@jb.contain_all(%w'h1')].must_equal "(j ?& ARRAY['h1'])" end it "#contain_any should use the ?| operator" do @l[@jb.contain_any(:h1)].must_equal "(j ?| h1)" end it "#contain_any should handle arrays" do @l[@jb.contain_any(%w'h1')].must_equal "(j ?| ARRAY['h1'])" end it "#contains should use the @> operator" do @l[@jb.contains(:h1)].must_equal "(j @> h1)" end it "#contains should handle hashes" do @l[@jb.contains('a'=>'b')].must_equal "(j @> '{\"a\":\"b\"}'::jsonb)" end it "#contains should handle arrays" do @l[@jb.contains([1, 2])].must_equal "(j @> '[1,2]'::jsonb)" end it "#contained_by should use the <@ operator" do @l[@jb.contained_by(:h1)].must_equal "(j <@ h1)" end it "#contained_by should handle hashes" do @l[@jb.contained_by('a'=>'b')].must_equal "(j <@ '{\"a\":\"b\"}'::jsonb)" end it "#contained_by should handle arrays" do @l[@jb.contained_by([1, 2])].must_equal "(j <@ '[1,2]'::jsonb)" end it "#concat should use the || operator" do @l[@jb.concat(:h1)].must_equal "(j || h1)" end it "#concat should handle hashes" do @l[@jb.concat('a'=>'b')].must_equal "(j || '{\"a\":\"b\"}'::jsonb)" end it "#concat should handle arrays" do @l[@jb.concat([1, 2])].must_equal "(j || '[1,2]'::jsonb)" end it "#insert should use the jsonb_insert function" do @l[@jb.insert(:a, :h)].must_equal "jsonb_insert(j, a, h, false)" @l[@jb.insert(:a, :h, true)].must_equal "jsonb_insert(j, a, h, true)" end it "#insert should handle hashes" do @l[@jb.insert(:a, 'a'=>'b')].must_equal "jsonb_insert(j, a, '{\"a\":\"b\"}'::jsonb, false)" end it "#insert should handle arrays" do @l[@jb.insert(%w'a b', [1, 2])].must_equal "jsonb_insert(j, ARRAY['a','b'], '[1,2]'::jsonb, false)" end it "#set should use the jsonb_set function" do @l[@jb.set(:a, :h)].must_equal "jsonb_set(j, a, h, true)" @l[@jb.set(:a, :h, false)].must_equal "jsonb_set(j, a, h, false)" end it "#set should handle hashes" do @l[@jb.set(:a, 'a'=>'b')].must_equal "jsonb_set(j, a, '{\"a\":\"b\"}'::jsonb, true)" end it "#set should handle arrays" do @l[@jb.set(%w'a b', [1, 2])].must_equal "jsonb_set(j, ARRAY['a','b'], '[1,2]'::jsonb, true)" end it "#set_lax should use the jsonb_set function" do @l[@jb.set_lax(:a, :h)].must_equal "jsonb_set_lax(j, a, h, true, 'use_json_null')" @l[@jb.set_lax(:a, :h, false)].must_equal "jsonb_set_lax(j, a, h, false, 'use_json_null')" @l[@jb.set_lax(:a, :h, false, 'delete_key')].must_equal "jsonb_set_lax(j, a, h, false, 'delete_key')" end it "#set should handle hashes" do @l[@jb.set_lax(:a, 'a'=>'b')].must_equal "jsonb_set_lax(j, a, '{\"a\":\"b\"}'::jsonb, true, 'use_json_null')" end it "#set should handle arrays" do @l[@jb.set_lax(%w'a b', [1, 2])].must_equal "jsonb_set_lax(j, ARRAY['a','b'], '[1,2]'::jsonb, true, 'use_json_null')" end it "#pretty should use the jsonb_pretty function" do @l[@jb.pretty].must_equal "jsonb_pretty(j)" end it "#- should use the - operator" do @l[@jb - 1].must_equal "(j - 1)" end it "#delete_path should use the #- operator" do @l[@jb.delete_path(:a)].must_equal "(j #- a)" end it "#delete_path should handle arrays" do @l[@jb.delete_path(['a'])].must_equal "(j #- ARRAY['a'])" end it "#has_key? and aliases should use the ? operator" do @l[@jb.has_key?('a')].must_equal "(j ? 'a')" @l[@jb.include?('a')].must_equal "(j ? 'a')" end it "#pg_json should return self" do @j.pg_json.must_be_same_as(@j) @jb.pg_jsonb.must_be_same_as(@jb) end it "Sequel.pg_json_op should return arg for JSONOp" do Sequel.pg_json_op(@j).must_be_same_as(@j) Sequel.pg_jsonb_op(@jb).must_be_same_as(@jb) end it "should be able to turn expressions into json ops using pg_json" do @db.literal(Sequel.qualify(:b, :a).pg_json[1]).must_equal "(b.a -> 1)" @db.literal(Sequel.function(:a, :b).pg_json[1]).must_equal "(a(b) -> 1)" @db.literal(Sequel.qualify(:b, :a).pg_jsonb[1]).must_equal "b.a[1]" @db.literal(Sequel.function(:a, :b).pg_jsonb[1]).must_equal "(a(b) -> 1)" end it "should be able to turn literal strings into json ops using pg_json" do @db.literal(Sequel.lit('a').pg_json[1]).must_equal "(a -> 1)" @db.literal(Sequel.lit('a').pg_jsonb[1]).must_equal "(a -> 1)" end it "should be able to turn symbols into json ops using Sequel.pg_json_op" do @db.literal(Sequel.pg_json_op(:a)[1]).must_equal "(a -> 1)" @db.literal(Sequel.pg_jsonb_op(:a)[1]).must_equal "a[1]" end it "should be able to turn symbols into json ops using Sequel.pg_json" do @db.literal(Sequel.pg_json(:a)[1]).must_equal "(a -> 1)" @db.literal(Sequel.pg_jsonb(:a)[1]).must_equal "a[1]" @db.literal(Sequel.pg_jsonb(:a).contains('a'=>1)).must_equal "(a @> '{\"a\":1}'::jsonb)" end it "should allow transforming JSONArray instances into ArrayOp instances" do @db.literal(Sequel.pg_json([1,2]).op[1]).must_equal "('[1,2]'::json -> 1)" end it "should allow transforming JSONHash instances into ArrayOp instances" do @db.literal(Sequel.pg_json('a'=>1).op['a']).must_equal "('{\"a\":1}'::json -> 'a')" end it "should allow transforming JSONBArray instances into ArrayOp instances" do @db.literal(Sequel.pg_jsonb([1,2]).op[1]).must_equal "('[1,2]'::jsonb -> 1)" end it "should allow transforming JSONBHash instances into ArrayOp instances" do @db.literal(Sequel.pg_jsonb('a'=>1).op['a']).must_equal "('{\"a\":1}'::jsonb -> 'a')" end it "#path_exists should use the @? operator" do @l[@jb.path_exists('$')].must_equal "(j @? '$')" end it "#path_exists result should be a boolean expression" do @jb.path_exists('$').must_be_kind_of Sequel::SQL::BooleanExpression end it "#path_match should use the @@ operator" do @l[@jb.path_match('$')].must_equal "(j @@ '$')" end it "#path_match result should be a boolean expression" do @jb.path_match('$').must_be_kind_of Sequel::SQL::BooleanExpression end it "#path_exists! should use the jsonb_path_exists function" do @l[@jb.path_exists!('$')].must_equal "jsonb_path_exists(j, '$')" @l[@jb.path_exists!('$', '{"x":2}')].must_equal "jsonb_path_exists(j, '$', '{\"x\":2}')" @l[@jb.path_exists!('$', x: 2)].must_equal "jsonb_path_exists(j, '$', '{\"x\":2}')" @l[@jb.path_exists!('$', {x: 2}, true)].must_equal "jsonb_path_exists(j, '$', '{\"x\":2}', true)" end it "#path_exists! result should be a boolean expression" do @jb.path_exists!('$').must_be_kind_of Sequel::SQL::BooleanExpression end it "#path_match! should use the jsonb_path_match function" do @l[@jb.path_match!('$')].must_equal "jsonb_path_match(j, '$')" @l[@jb.path_match!('$', '{"x":2}')].must_equal "jsonb_path_match(j, '$', '{\"x\":2}')" @l[@jb.path_match!('$', x: 2)].must_equal "jsonb_path_match(j, '$', '{\"x\":2}')" @l[@jb.path_match!('$', {x: 2}, true)].must_equal "jsonb_path_match(j, '$', '{\"x\":2}', true)" end it "#path_match! result should be a boolean expression" do @jb.path_match!('$').must_be_kind_of Sequel::SQL::BooleanExpression end it "#path_query should use the jsonb_path_query function" do @l[@jb.path_query('$')].must_equal "jsonb_path_query(j, '$')" @l[@jb.path_query('$', '{"x":2}')].must_equal "jsonb_path_query(j, '$', '{\"x\":2}')" @l[@jb.path_query('$', x: 2)].must_equal "jsonb_path_query(j, '$', '{\"x\":2}')" @l[@jb.path_query('$', {x: 2}, true)].must_equal "jsonb_path_query(j, '$', '{\"x\":2}', true)" end it "#path_query_array should use the jsonb_path_query_array function" do @l[@jb.path_query_array('$')].must_equal "jsonb_path_query_array(j, '$')" @l[@jb.path_query_array('$', '{"x":2}')].must_equal "jsonb_path_query_array(j, '$', '{\"x\":2}')" @l[@jb.path_query_array('$', x: 2)].must_equal "jsonb_path_query_array(j, '$', '{\"x\":2}')" @l[@jb.path_query_array('$', {x: 2}, true)].must_equal "jsonb_path_query_array(j, '$', '{\"x\":2}', true)" end it "#path_query_array result should be a JSONBOp" do @l[@jb.path_query_array('$').path_query_array('$')].must_equal "jsonb_path_query_array(jsonb_path_query_array(j, '$'), '$')" end it "#path_query_first should use the jsonb_path_query_first function" do @l[@jb.path_query_first('$')].must_equal "jsonb_path_query_first(j, '$')" @l[@jb.path_query_first('$', '{"x":2}')].must_equal "jsonb_path_query_first(j, '$', '{\"x\":2}')" @l[@jb.path_query_first('$', x: 2)].must_equal "jsonb_path_query_first(j, '$', '{\"x\":2}')" @l[@jb.path_query_first('$', {x: 2}, true)].must_equal "jsonb_path_query_first(j, '$', '{\"x\":2}', true)" end it "#path_query_first result should be a JSONBOp" do @l[@jb.path_query_first('$').path_query_first('$')].must_equal "jsonb_path_query_first(jsonb_path_query_first(j, '$'), '$')" end it "#path_exists_tz! should use the jsonb_path_exists function" do @l[@jb.path_exists_tz!('$')].must_equal "jsonb_path_exists_tz(j, '$')" @l[@jb.path_exists_tz!('$', '{"x":2}')].must_equal "jsonb_path_exists_tz(j, '$', '{\"x\":2}')" @l[@jb.path_exists_tz!('$', x: 2)].must_equal "jsonb_path_exists_tz(j, '$', '{\"x\":2}')" @l[@jb.path_exists_tz!('$', {x: 2}, true)].must_equal "jsonb_path_exists_tz(j, '$', '{\"x\":2}', true)" end it "#path_exists! result should be a boolean expression" do @jb.path_exists_tz!('$').must_be_kind_of Sequel::SQL::BooleanExpression end it "#path_match! should use the jsonb_path_match function" do @l[@jb.path_match_tz!('$')].must_equal "jsonb_path_match_tz(j, '$')" @l[@jb.path_match_tz!('$', '{"x":2}')].must_equal "jsonb_path_match_tz(j, '$', '{\"x\":2}')" @l[@jb.path_match_tz!('$', x: 2)].must_equal "jsonb_path_match_tz(j, '$', '{\"x\":2}')" @l[@jb.path_match_tz!('$', {x: 2}, true)].must_equal "jsonb_path_match_tz(j, '$', '{\"x\":2}', true)" end it "#path_match! result should be a boolean expression" do @jb.path_match_tz!('$').must_be_kind_of Sequel::SQL::BooleanExpression end it "#path_query should use the jsonb_path_query function" do @l[@jb.path_query_tz('$')].must_equal "jsonb_path_query_tz(j, '$')" @l[@jb.path_query_tz('$', '{"x":2}')].must_equal "jsonb_path_query_tz(j, '$', '{\"x\":2}')" @l[@jb.path_query_tz('$', x: 2)].must_equal "jsonb_path_query_tz(j, '$', '{\"x\":2}')" @l[@jb.path_query_tz('$', {x: 2}, true)].must_equal "jsonb_path_query_tz(j, '$', '{\"x\":2}', true)" end it "#path_query_array should use the jsonb_path_query_array function" do @l[@jb.path_query_array_tz('$')].must_equal "jsonb_path_query_array_tz(j, '$')" @l[@jb.path_query_array_tz('$', '{"x":2}')].must_equal "jsonb_path_query_array_tz(j, '$', '{\"x\":2}')" @l[@jb.path_query_array_tz('$', x: 2)].must_equal "jsonb_path_query_array_tz(j, '$', '{\"x\":2}')" @l[@jb.path_query_array_tz('$', {x: 2}, true)].must_equal "jsonb_path_query_array_tz(j, '$', '{\"x\":2}', true)" end it "#path_query_array result should be a JSONBOp" do @l[@jb.path_query_array_tz('$').path_query_array_tz('$')].must_equal "jsonb_path_query_array_tz(jsonb_path_query_array_tz(j, '$'), '$')" end it "#path_query_first should use the jsonb_path_query_first function" do @l[@jb.path_query_first_tz('$')].must_equal "jsonb_path_query_first_tz(j, '$')" @l[@jb.path_query_first_tz('$', '{"x":2}')].must_equal "jsonb_path_query_first_tz(j, '$', '{\"x\":2}')" @l[@jb.path_query_first_tz('$', x: 2)].must_equal "jsonb_path_query_first_tz(j, '$', '{\"x\":2}')" @l[@jb.path_query_first_tz('$', {x: 2}, true)].must_equal "jsonb_path_query_first_tz(j, '$', '{\"x\":2}', true)" end it "#path_query_first result should be a JSONBOp" do @l[@jb.path_query_first_tz('$').path_query_first_tz('$')].must_equal "jsonb_path_query_first_tz(jsonb_path_query_first_tz(j, '$'), '$')" end end ��������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/pg_json_spec.rb�������������������������������������������������������0000664�0000000�0000000�00000053102�14342141206�0021230�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" Sequel.extension :pg_array, :pg_json describe "pg_json extension" do integer_class = RUBY_VERSION >= '2.4' ? Integer : Fixnum before(:all) do m = Sequel::Postgres @m = m::JSONDatabaseMethods @hc = m::JSONHash @ac = m::JSONArray @bhc = m::JSONBHash @bac = m::JSONBArray end before do @db = Sequel.connect('mock://postgres') @db.extend_datasets{def quote_identifiers?; false end} @db.extension(:pg_array, :pg_json) end it "should only add array conversion procs if pg_array is loaded" do @db = Sequel.connect('mock://postgres') @db.extension(:pg_json, :pg_array) cp = @db.conversion_procs cp[114].call("{}").must_equal @hc.new({}) cp[3802].call("{}").must_equal @bhc.new({}) cp[199].must_be_nil cp[3807].must_be_nil end it "should set up conversion procs correctly" do cp = @db.conversion_procs cp[114].call("{}").must_equal @hc.new({}) cp[3802].call("{}").must_equal @bhc.new({}) end it "should set up conversion procs for arrays correctly" do cp = @db.conversion_procs cp[199].call("{[]}").must_equal [@ac.new([])] cp[3807].call("{[]}").must_equal [@bac.new([])] end deprecated "should parse json strings correctly" do @m.parse_json('[]').class.must_equal(@ac) @m.parse_json('[]').to_a.must_equal [] @m.parse_json('[1]').to_a.must_equal [1] @m.parse_json('[1, 2]').to_a.must_equal [1, 2] @m.parse_json('[1, [2], {"a": "b"}]').to_a.must_equal [1, [2], {'a'=>'b'}] @m.parse_json('{}').class.must_equal(@hc) @m.parse_json('{}').to_hash.must_equal({}) @m.parse_json('{"a": "b"}').to_hash.must_equal('a'=>'b') @m.parse_json('{"a": "b", "c": [1, 2, 3]}').to_hash.must_equal('a'=>'b', 'c'=>[1, 2, 3]) @m.parse_json('{"a": "b", "c": {"d": "e"}}').to_hash.must_equal('a'=>'b', 'c'=>{'d'=>'e'}) proc{@m.parse_json("a")}.must_raise Sequel::InvalidValue begin Sequel.instance_eval do alias pj parse_json def parse_json(v) {'1'=>1, "'a'"=>'a', 'true'=>true, 'false'=>false, 'null'=>nil, 'o'=>Object.new, '[one]'=>[1]}.fetch(v){pj(v)} end alias parse_json parse_json end proc{@m.parse_json('o')}.must_raise(Sequel::InvalidValue) ensure Sequel.instance_eval do alias parse_json pj end end end deprecated "should parse json and non-json plain strings, integers, and floats correctly in db_parse_json" do @m.db_parse_json('{"a": "b", "c": {"d": "e"}}').to_hash.must_equal('a'=>'b', 'c'=>{'d'=>'e'}) @m.db_parse_json('[1, [2], {"a": "b"}]').to_a.must_equal [1, [2], {'a'=>'b'}] @m.db_parse_json('1').must_equal 1 @m.db_parse_json('"b"').must_equal 'b' @m.db_parse_json('1.1').must_equal 1.1 proc{@m.db_parse_json("a")}.must_raise Sequel::InvalidValue end deprecated "should parse jsonb and non-jsonb plain strings, integers, and floats correctly in db_parse_jsonb" do @m.db_parse_jsonb('{"a": "b", "c": {"d": "e"}}').to_hash.must_equal('a'=>'b', 'c'=>{'d'=>'e'}) @m.db_parse_jsonb('[1, [2], {"a": "b"}]').to_a.must_equal [1, [2], {'a'=>'b'}] @m.db_parse_jsonb('1').must_equal 1 @m.db_parse_jsonb('"b"').must_equal 'b' @m.db_parse_jsonb('1.1').must_equal 1.1 proc{@m.db_parse_jsonb("a")}.must_raise Sequel::InvalidValue end it "should parse json and non-json plain strings, integers, and floats correctly in conversion_proc" do cp = @db.conversion_procs[114] cp.call('{"a": "b", "c": {"d": "e"}}').to_hash.must_equal('a'=>'b', 'c'=>{'d'=>'e'}) cp.call('[1, [2], {"a": "b"}]').to_a.must_equal [1, [2], {'a'=>'b'}] cp.call('1').must_equal 1 cp.call('"b"').must_equal 'b' cp.call('1.1').must_equal 1.1 end it "should parse jsonb and non-jsonb plain strings, integers, and floats correctly in conversion_proc" do cp = @db.conversion_procs[3802] cp.call('{"a": "b", "c": {"d": "e"}}').to_hash.must_equal('a'=>'b', 'c'=>{'d'=>'e'}) cp.call('[1, [2], {"a": "b"}]').to_a.must_equal [1, [2], {'a'=>'b'}] cp.call('1').must_equal 1 cp.call('"b"').must_equal 'b' cp.call('1.1').must_equal 1.1 end it "should raise an error when attempting to parse invalid json" do [114, 3802].each do |oid| cp = @db.conversion_procs[oid] proc{cp.call('a')}.must_raise(Sequel::InvalidValue) begin Sequel.singleton_class.class_eval do alias pj parse_json def parse_json(v) {'1'=>1, "'a'"=>'a', 'true'=>true, 'false'=>false, 'null'=>nil, 'o'=>Object.new, '[one]'=>[1]}.fetch(v){pj(v)} end alias parse_json parse_json end cp.call('1').must_equal 1 cp.call("'a'").must_equal 'a' cp.call('true').must_equal true cp.call('false').must_equal false cp.call('null').must_be_nil proc{cp.call('o')}.must_raise(Sequel::InvalidValue) cp.call('one').must_equal 1 ensure Sequel.singleton_class.class_eval do alias parse_json pj remove_method(:pj) end end end end it "should handle case where JSON parsing in conversion procs raises Sequel::InvalidValue for non-String" do begin Sequel.singleton_class.class_eval do alias parse_json_old parse_json define_method(:parse_json) do |json| raise Sequel::InvalidValue end alias parse_json parse_json end cp = @db.conversion_procs proc{cp[114].call(1)}.must_raise Sequel::InvalidValue proc{cp[3802].call(1)}.must_raise Sequel::InvalidValue ensure Sequel.singleton_class.class_eval do alias parse_json parse_json_old remove_method(:parse_json_old) end end end deprecated "should handle case where deprecated JSON parsing methods raise Sequel::InvalidValue for non-String" do begin Sequel.singleton_class.class_eval do alias parse_json_old parse_json define_method(:parse_json) do |json| raise Sequel::InvalidValue end alias parse_json parse_json end proc{@m.db_parse_json(1)}.must_raise Sequel::InvalidValue proc{@m.db_parse_jsonb(1)}.must_raise Sequel::InvalidValue ensure Sequel.singleton_class.class_eval do alias parse_json parse_json_old remove_method(:parse_json_old) end end end it "should literalize JSONHash and JSONArray to strings correctly" do @db.literal(Sequel.pg_json([])).must_equal "'[]'::json" @db.literal(Sequel.pg_json([1, [2], {'a'=>'b'}])).must_equal "'[1,[2],{\"a\":\"b\"}]'::json" @db.literal(Sequel.pg_json({})).must_equal "'{}'::json" @db.literal(Sequel.pg_json('a'=>'b')).must_equal "'{\"a\":\"b\"}'::json" end it "should literalize JSONBHash and JSONBArray to strings correctly" do @db.literal(Sequel.pg_jsonb([])).must_equal "'[]'::jsonb" @db.literal(Sequel.pg_jsonb([1, [2], {'a'=>'b'}])).must_equal "'[1,[2],{\"a\":\"b\"}]'::jsonb" @db.literal(Sequel.pg_jsonb({})).must_equal "'{}'::jsonb" @db.literal(Sequel.pg_jsonb('a'=>'b')).must_equal "'{\"a\":\"b\"}'::jsonb" end it "should have Sequel.pg_json return JSONHash and JSONArray as is" do a = Sequel.pg_json({}) Sequel.pg_json(a).object_id.must_equal(a.object_id) a = Sequel.pg_json([]) Sequel.pg_json(a).object_id.must_equal(a.object_id) end it "should have Sequel.pg_json convert jsonb values" do a = {} v = Sequel.pg_json(Sequel.pg_jsonb(a)) v.to_hash.must_be_same_as(a) v.class.must_equal(@hc) a = [] v = Sequel.pg_json(Sequel.pg_jsonb(a)) v.to_a.must_be_same_as(a) v.class.must_equal(@ac) end it "should have Sequel.pg_jsonb return JSONBHash and JSONBArray as is" do a = Sequel.pg_jsonb({}) Sequel.pg_jsonb(a).object_id.must_equal(a.object_id) a = Sequel.pg_jsonb([]) Sequel.pg_jsonb(a).object_id.must_equal(a.object_id) end it "should have Sequel.pg_jsonb convert json values" do a = {} v = Sequel.pg_jsonb(Sequel.pg_json(a)) v.to_hash.must_be_same_as(a) v.class.must_equal(@bhc) a = [] v = Sequel.pg_jsonb(Sequel.pg_json(a)) v.to_a.must_be_same_as(a) v.class.must_equal(@bac) end it "should have JSONHashBase#to_hash method for getting underlying hash" do Sequel.pg_json({}).to_hash.must_be_kind_of(Hash) Sequel.pg_jsonb({}).to_hash.must_be_kind_of(Hash) end it "should allow aliasing json objects" do @db.literal(Sequel.pg_json({}).as(:a)).must_equal "'{}'::json AS a" @db.literal(Sequel.pg_json([]).as(:a)).must_equal "'[]'::json AS a" @db.literal(Sequel.pg_jsonb({}).as(:a)).must_equal "'{}'::jsonb AS a" @db.literal(Sequel.pg_jsonb([]).as(:a)).must_equal "'[]'::jsonb AS a" end it "should allow casting json objects" do @db.literal(Sequel.pg_json({}).cast(String)).must_equal "CAST('{}'::json AS text)" @db.literal(Sequel.pg_json([]).cast(String)).must_equal "CAST('[]'::json AS text)" @db.literal(Sequel.pg_jsonb({}).cast(String)).must_equal "CAST('{}'::jsonb AS text)" @db.literal(Sequel.pg_jsonb([]).cast(String)).must_equal "CAST('[]'::jsonb AS text)" end it "should have JSONArrayBase#to_a method for getting underlying array" do Sequel.pg_json([]).to_a.must_be_kind_of(Array) Sequel.pg_jsonb([]).to_a.must_be_kind_of(Array) end it "should support using JSONHashBase and JSONArrayBase as bound variables" do @db.bound_variable_arg(1, nil).must_equal 1 @db.bound_variable_arg(Sequel.pg_json([1]), nil).must_equal '[1]' @db.bound_variable_arg(Sequel.pg_json('a'=>'b'), nil).must_equal '{"a":"b"}' @db.bound_variable_arg(Sequel.pg_jsonb([1]), nil).must_equal '[1]' @db.bound_variable_arg(Sequel.pg_jsonb('a'=>'b'), nil).must_equal '{"a":"b"}' end it "should support using json[] and jsonb[] types in bound variables" do @db.bound_variable_arg(Sequel.pg_array([Sequel.pg_json([{"a"=>1}]), Sequel.pg_json("b"=>[1, 2])]), nil).must_equal '{"[{\\"a\\":1}]","{\\"b\\":[1,2]}"}' @db.bound_variable_arg(Sequel.pg_array([Sequel.pg_jsonb([{"a"=>1}]), Sequel.pg_jsonb("b"=>[1, 2])]), nil).must_equal '{"[{\\"a\\":1}]","{\\"b\\":[1,2]}"}' end it "should support using wrapped JSON and JSONB primitives as bound variables" do @db.bound_variable_arg(Sequel.pg_json_wrap(1), nil).must_equal '1' @db.bound_variable_arg(Sequel.pg_json_wrap(2.5), nil).must_equal '2.5' @db.bound_variable_arg(Sequel.pg_json_wrap('a'), nil).must_equal '"a"' @db.bound_variable_arg(Sequel.pg_json_wrap(true), nil).must_equal 'true' @db.bound_variable_arg(Sequel.pg_json_wrap(false), nil).must_equal 'false' @db.bound_variable_arg(Sequel.pg_json_wrap(nil), nil).must_equal 'null' end it "should support using json[] and jsonb[] types in bound variables with ruby primitives" do @db.bound_variable_arg(Sequel.pg_array([1, 2.5, 'a', true, false, nil].map{|v| Sequel.pg_json_wrap(v)}), nil).must_equal '{"1","2.5","\"a\"","true","false","null"}' end it "Sequel.pg_json_wrap should wrap Ruby primitives in JSON wrappers" do Sequel.pg_json_wrap({}).class.must_equal Sequel::Postgres::JSONHash Sequel.pg_json_wrap({}).must_equal({}) Sequel.pg_json_wrap([]).class.must_equal Sequel::Postgres::JSONArray Sequel.pg_json_wrap([]).must_equal [] Sequel.pg_json_wrap('a').class.must_equal Sequel::Postgres::JSONString Sequel.pg_json_wrap('a').must_equal 'a' Sequel.pg_json_wrap(1).class.must_equal Sequel::Postgres::JSONInteger Sequel.pg_json_wrap(1).must_equal 1 Sequel.pg_json_wrap(2.5).class.must_equal Sequel::Postgres::JSONFloat Sequel.pg_json_wrap(2.5).must_equal 2.5 Sequel.pg_json_wrap(true).class.must_equal Sequel::Postgres::JSONTrue Sequel.pg_json_wrap(true).must_equal true Sequel.pg_json_wrap(false).class.must_equal Sequel::Postgres::JSONFalse Sequel.pg_json_wrap(false).must_equal false Sequel.pg_json_wrap(nil).class.must_equal Sequel::Postgres::JSONNull Sequel.pg_json_wrap(nil).must_be_nil c = Class.new(Hash).new Sequel.pg_json_wrap(c).class.must_equal Sequel::Postgres::JSONHash Sequel.pg_json_wrap(c).must_equal(c) c = Class.new(Array).new Sequel.pg_json_wrap(c).class.must_equal Sequel::Postgres::JSONArray Sequel.pg_json_wrap(c).must_equal c c = Class.new(String).new('a') Sequel.pg_json_wrap(c).class.must_equal Sequel::Postgres::JSONString Sequel.pg_json_wrap(c).must_equal c end it "Sequel.pg_json_wrap should fail when passed an unsupported object" do proc{Sequel.pg_json_wrap(Object.new)}.must_raise Sequel::Error end it "Sequel.pg_jsonb_wrap should wrap Ruby primitives in JSONB wrappers" do Sequel.pg_jsonb_wrap({}).class.must_equal Sequel::Postgres::JSONBHash Sequel.pg_jsonb_wrap({}).must_equal({}) Sequel.pg_jsonb_wrap([]).class.must_equal Sequel::Postgres::JSONBArray Sequel.pg_jsonb_wrap([]).must_equal [] Sequel.pg_jsonb_wrap('a').class.must_equal Sequel::Postgres::JSONBString Sequel.pg_jsonb_wrap('a').must_equal 'a' Sequel.pg_jsonb_wrap(1).class.must_equal Sequel::Postgres::JSONBInteger Sequel.pg_jsonb_wrap(1).must_equal 1 Sequel.pg_jsonb_wrap(2.5).class.must_equal Sequel::Postgres::JSONBFloat Sequel.pg_jsonb_wrap(2.5).must_equal 2.5 Sequel.pg_jsonb_wrap(true).class.must_equal Sequel::Postgres::JSONBTrue Sequel.pg_jsonb_wrap(true).must_equal true Sequel.pg_jsonb_wrap(false).class.must_equal Sequel::Postgres::JSONBFalse Sequel.pg_jsonb_wrap(false).must_equal false Sequel.pg_jsonb_wrap(nil).class.must_equal Sequel::Postgres::JSONBNull Sequel.pg_jsonb_wrap(nil).must_be_nil end it "Sequel.pg_jsonb_wrap should fail when passed an unsupported object" do proc{Sequel.pg_jsonb_wrap(Object.new)}.must_raise Sequel::Error end it "should not wrap JSON primitives in json and jsonb conversion_proc when not setting wrap_json_primitives" do [114, 3802].each do |oid| cp = @db.conversion_procs[oid] cp.call('1').class.must_equal(integer_class) cp.call('1').must_equal 1 cp.call('2.5').class.must_equal Float cp.call('2.5').must_equal 2.5 cp.call('"a"').class.must_equal String cp.call('"a"').must_equal 'a' cp.call('true').class.must_equal TrueClass cp.call('true').must_equal true cp.call('false').class.must_equal FalseClass cp.call('false').must_equal false cp.call('null').class.must_equal NilClass cp.call('null').must_be_nil end end it "should wrap JSON primitives in json conversion_proc when setting wrap_json_primitives" do cp = @db.conversion_procs[114] @db.wrap_json_primitives = true cp.call('1').class.must_equal Sequel::Postgres::JSONInteger cp.call('1').must_equal 1 cp.call('2.5').class.must_equal Sequel::Postgres::JSONFloat cp.call('2.5').must_equal 2.5 cp.call('"a"').class.must_equal Sequel::Postgres::JSONString cp.call('"a"').must_equal "a" cp.call('true').class.must_equal Sequel::Postgres::JSONTrue cp.call('true').must_equal true cp.call('false').class.must_equal Sequel::Postgres::JSONFalse cp.call('false').must_equal false cp.call('null').class.must_equal Sequel::Postgres::JSONNull cp.call('null').must_be_nil end it "should wrap JSON primitives in jsonb conversion_proc when setting wrap_json_primitives" do cp = @db.conversion_procs[3802] @db.wrap_json_primitives = true cp.call('1').class.must_equal Sequel::Postgres::JSONBInteger cp.call('1').must_equal 1 cp.call('2.5').class.must_equal Sequel::Postgres::JSONBFloat cp.call('2.5').must_equal 2.5 cp.call('"a"').class.must_equal Sequel::Postgres::JSONBString cp.call('"a"').must_equal "a" cp.call('true').class.must_equal Sequel::Postgres::JSONBTrue cp.call('true').must_equal true cp.call('false').class.must_equal Sequel::Postgres::JSONBFalse cp.call('false').must_equal false cp.call('null').class.must_equal Sequel::Postgres::JSONBNull cp.call('null').must_be_nil end it "should parse json type from the schema correctly" do @db.fetch = [{:name=>'id', :db_type=>'integer'}, {:name=>'i', :db_type=>'json'}] @db.schema(:items).map{|e| e[1][:type]}.must_equal [:integer, :json] end it "should parse json type from the schema correctly" do @db.fetch = [{:name=>'id', :db_type=>'integer'}, {:name=>'i', :db_type=>'jsonb'}] @db.schema(:items).map{|e| e[1][:type]}.must_equal [:integer, :jsonb] end it "should set :callable_default schema entries if default value is recognized" do @db.fetch = [{:name=>'id', :db_type=>'integer', :default=>'1'}, {:name=>'jh', :db_type=>'json', :default=>"'{}'::json"}, {:name=>'ja', :db_type=>'json', :default=>"'[]'::json"}, {:name=>'jbh', :db_type=>'jsonb', :default=>"'{}'::jsonb"}, {:name=>'jba', :db_type=>'jsonb', :default=>"'[]'::jsonb"}] s = @db.schema(:items) s[0][1][:callable_default].must_be_nil v = s[1][1][:callable_default].call Sequel::Postgres::JSONHash.===(v).must_equal true @db.literal(v).must_equal "'{}'::json" v['a'] = 'b' @db.literal(v).must_equal "'{\"a\":\"b\"}'::json" v = s[2][1][:callable_default].call Sequel::Postgres::JSONArray.===(v).must_equal true @db.literal(v).must_equal "'[]'::json" v << 1 @db.literal(v).must_equal "'[1]'::json" v = s[3][1][:callable_default].call Sequel::Postgres::JSONBHash.===(v).must_equal true @db.literal(v).must_equal "'{}'::jsonb" v['a'] = 'b' @db.literal(v).must_equal "'{\"a\":\"b\"}'::jsonb" v = s[4][1][:callable_default].call Sequel::Postgres::JSONBArray.===(v).must_equal true @db.literal(v).must_equal "'[]'::jsonb" v << 1 @db.literal(v).must_equal "'[1]'::jsonb" end it "should support typecasting for the json type" do h = Sequel.pg_json(1=>2) a = Sequel.pg_json([1]) @db.typecast_value(:json, h).object_id.must_equal(h.object_id) @db.typecast_value(:json, h.to_hash).must_equal h @db.typecast_value(:json, h.to_hash).class.must_equal(@hc) @db.typecast_value(:json, Sequel.pg_jsonb(h)).must_equal h @db.typecast_value(:json, Sequel.pg_jsonb(h)).class.must_equal(@hc) @db.typecast_value(:json, a).object_id.must_equal(a.object_id) @db.typecast_value(:json, a.to_a).must_equal a @db.typecast_value(:json, a.to_a).class.must_equal(@ac) @db.typecast_value(:json, Sequel.pg_jsonb(a)).must_equal a @db.typecast_value(:json, Sequel.pg_jsonb(a)).class.must_equal(@ac) @db.typecast_value(:json, '[]').must_equal Sequel.pg_json([]) @db.typecast_value(:json, '[]').class.must_equal(@ac) @db.typecast_value(:json, '{"a": "b"}').must_equal Sequel.pg_json("a"=>"b") @db.typecast_value(:json, '{"a": "b"}').class.must_equal(@hc) @db.typecast_value(:json, 1).class.must_equal Sequel::Postgres::JSONInteger @db.typecast_value(:json, 1).must_equal 1 @db.typecast_value(:json, 2.5).class.must_equal Sequel::Postgres::JSONFloat @db.typecast_value(:json, 2.5).must_equal 2.5 @db.typecast_value(:json, true).class.must_equal Sequel::Postgres::JSONTrue @db.typecast_value(:json, true).must_equal true @db.typecast_value(:json, false).class.must_equal Sequel::Postgres::JSONFalse @db.typecast_value(:json, false).must_equal false @db.typecast_value(:json, nil).class.must_equal NilClass @db.typecast_value(:json, nil).must_be_nil proc{@db.typecast_value(:json, 'a')}.must_raise(Sequel::InvalidValue) proc{@db.typecast_value(:json, Object.new)}.must_raise(Sequel::InvalidValue) @db.typecast_json_strings = true @db.typecast_value(:json, '[]').class.must_equal(Sequel::Postgres::JSONString) @db.typecast_value(:json, '[]').must_equal '[]' end it "should support typecasting for the jsonb type" do h = Sequel.pg_jsonb(1=>2) a = Sequel.pg_jsonb([1]) @db.typecast_value(:jsonb, h).object_id.must_equal(h.object_id) @db.typecast_value(:jsonb, h.to_hash).must_equal h @db.typecast_value(:jsonb, h.to_hash).class.must_equal(@bhc) @db.typecast_value(:jsonb, Sequel.pg_json(h)).must_equal h @db.typecast_value(:jsonb, Sequel.pg_json(h)).class.must_equal(@bhc) @db.typecast_value(:jsonb, a).object_id.must_equal(a.object_id) @db.typecast_value(:jsonb, a.to_a).must_equal a @db.typecast_value(:jsonb, a.to_a).class.must_equal(@bac) @db.typecast_value(:jsonb, Sequel.pg_json(a)).must_equal a @db.typecast_value(:jsonb, Sequel.pg_json(a)).class.must_equal(@bac) @db.typecast_value(:jsonb, '[]').must_equal Sequel.pg_jsonb([]) @db.typecast_value(:jsonb, '[]').class.must_equal(@bac) @db.typecast_value(:jsonb, '{"a": "b"}').must_equal Sequel.pg_jsonb("a"=>"b") @db.typecast_value(:jsonb, '{"a": "b"}').class.must_equal(@bhc) @db.typecast_value(:jsonb, 1).class.must_equal Sequel::Postgres::JSONBInteger @db.typecast_value(:jsonb, 1).must_equal 1 @db.typecast_value(:jsonb, 2.5).class.must_equal Sequel::Postgres::JSONBFloat @db.typecast_value(:jsonb, 2.5).must_equal 2.5 @db.typecast_value(:jsonb, true).class.must_equal Sequel::Postgres::JSONBTrue @db.typecast_value(:jsonb, true).must_equal true @db.typecast_value(:jsonb, false).class.must_equal Sequel::Postgres::JSONBFalse @db.typecast_value(:jsonb, false).must_equal false @db.typecast_value(:jsonb, nil).class.must_equal NilClass @db.typecast_value(:jsonb, nil).must_be_nil proc{@db.typecast_value(:jsonb, 'a')}.must_raise(Sequel::InvalidValue) proc{@db.typecast_value(:jsonb, Object.new)}.must_raise(Sequel::InvalidValue) @db.typecast_json_strings = true @db.typecast_value(:jsonb, '[]').class.must_equal(Sequel::Postgres::JSONBString) @db.typecast_value(:jsonb, '[]').must_equal '[]' end it "should return correct results for Database#schema_type_class" do @db.schema_type_class(:json).must_equal [Sequel::Postgres::JSONObject] @db.schema_type_class(:jsonb).must_equal [Sequel::Postgres::JSONBObject] @db.schema_type_class(:integer).must_equal Integer end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/pg_loose_count_spec.rb������������������������������������������������0000664�0000000�0000000�00000002001�14342141206�0022600�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "pg_loose_count extension" do before do @db = Sequel.mock(:host=>'postgres', :fetch=>{:v=>1}).extension(:pg_loose_count) @db.extend_datasets{def quote_identifiers?; false end} end it "should add loose_count method getting fast count for entire table using table statistics" do @db.loose_count(:a).must_equal 1 @db.sqls.must_equal ["SELECT CAST(reltuples AS integer) AS v FROM pg_class WHERE (oid = CAST(CAST('a' AS regclass) AS oid)) LIMIT 1"] end it "should support schema qualified tables" do @db.loose_count(Sequel[:a][:b]).must_equal 1 @db.sqls.must_equal ["SELECT CAST(reltuples AS integer) AS v FROM pg_class WHERE (oid = CAST(CAST('a.b' AS regclass) AS oid)) LIMIT 1"] end with_symbol_splitting "should support schema qualified table symbols" do @db.loose_count(:a__b).must_equal 1 @db.sqls.must_equal ["SELECT CAST(reltuples AS integer) AS v FROM pg_class WHERE (oid = CAST(CAST('a.b' AS regclass) AS oid)) LIMIT 1"] end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/pg_multirange_spec.rb�������������������������������������������������0000664�0000000�0000000�00000047461�14342141206�0022441�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "pg_multirange extension" do before(:all) do Sequel.extension :pg_array, :pg_range, :pg_multirange end before do @db = Sequel.connect('mock://postgres') def @db.server_version(*) 140000 end @R = Sequel::Postgres::PGRange @MR = Sequel::Postgres::PGMultiRange @db.extend_datasets do def supports_timestamp_timezones?; false end def supports_timestamp_usecs?; false end def quote_identifiers?; false end end @db.extension(:pg_array, :pg_multirange) end it "should raise if loaded into a database that doesn't support multiranges" do @db = Sequel.connect('mock://postgres') def @db.server_version(*) 130000 end proc{@db.extension(:pg_multirange)}.must_raise Sequel::Error end it "should set up conversion procs correctly" do cp = @db.conversion_procs cp[4451].call("{[1,2],(3,4)}").must_equal @MR.new([ @R.new(1,2, :exclude_begin=>false, :exclude_end=>false, :db_type=>'int4range'), @R.new(3,4, :exclude_begin=>true, :exclude_end=>true, :db_type=>'int4range'), ], 'int4multirange') cp[4532].call("{[1,2]}").must_equal @MR.new([@R.new(1,2, :exclude_begin=>false, :exclude_end=>false, :db_type=>'numrange')], 'nummultirange') cp[4533].call("{[2011-01-02 10:20:30,2011-02-03 10:20:30)}").must_equal @MR.new([@R.new(Time.local(2011, 1, 2, 10, 20, 30),Time.local(2011, 2, 3, 10, 20, 30), :exclude_begin=>false, :exclude_end=>true, :db_type=>'tsrange')], 'tsmultirange') cp[4534].call("{[2011-01-02 10:20:30,2011-02-03 10:20:30)}").must_equal @MR.new([@R.new(Time.local(2011, 1, 2, 10, 20, 30),Time.local(2011, 2, 3, 10, 20, 30), :exclude_begin=>false, :exclude_end=>true, :db_type=>'tstzrange')], 'tstzmultirange') cp[4535].call("{[2011-01-02,2011-02-03)}").must_equal @MR.new([@R.new(Date.new(2011, 1, 2),Date.new(2011, 2, 3), :exclude_begin=>false, :exclude_end=>true, :db_type=>'daterange')], 'datemultirange') cp[4536].call("{[1,2]}").must_equal @MR.new([@R.new(1,2, :exclude_begin=>false, :exclude_end=>false, :db_type=>'int8range')], 'int8multirange') end it "should set up conversion procs for arrays correctly" do cp = @db.conversion_procs cp[6150].call("{\"{[1,2],(3,4)}\"}").must_equal [@MR.new([ @R.new(1,2, :exclude_begin=>false, :exclude_end=>false, :db_type=>'int4range'), @R.new(3,4, :exclude_begin=>true, :exclude_end=>true, :db_type=>'int4range'), ], 'int4multirange')] cp[6151].call("{\"{[1,2]}\"}").must_equal [@MR.new([@R.new(1,2, :exclude_begin=>false, :exclude_end=>false, :db_type=>'numrange')], 'nummultirange')] cp[6152].call("{\"{[2011-01-02 10:20:30,2011-02-03 10:20:30)}\"}").must_equal [@MR.new([@R.new(Time.local(2011, 1, 2, 10, 20, 30),Time.local(2011, 2, 3, 10, 20, 30), :exclude_begin=>false, :exclude_end=>true, :db_type=>'tsrange')], 'tsmultirange')] cp[6153].call("{\"{[2011-01-02 10:20:30,2011-02-03 10:20:30)}\"}").must_equal [@MR.new([@R.new(Time.local(2011, 1, 2, 10, 20, 30),Time.local(2011, 2, 3, 10, 20, 30), :exclude_begin=>false, :exclude_end=>true, :db_type=>'tstzrange')], 'tstzmultirange')] cp[6155].call("{\"{[2011-01-02,2011-02-03)}\"}").must_equal [@MR.new([@R.new(Date.new(2011, 1, 2),Date.new(2011, 2, 3), :exclude_begin=>false, :exclude_end=>true, :db_type=>'daterange')], 'datemultirange')] cp[6157].call("{\"{[1,2]}\"}").must_equal [@MR.new([@R.new(1,2, :exclude_begin=>false, :exclude_end=>false, :db_type=>'int8range')], 'int8multirange')] end it "should parse empty multiranges" do @db.conversion_procs[4451].call("{}").must_equal @MR.new([], 'int4multirange') end it "should literalize PGMultiRange of Range instances to strings correctly" do @db.literal(@MR.new([], 'xmultirange')).must_equal "xmultirange()" @db.literal(@MR.new([Date.new(2011, 1, 2)...Date.new(2011, 3, 2), Date.new(2012, 1, 2)...Date.new(2012, 3, 2)], 'datemultirange')).must_equal "datemultirange(daterange('2011-01-02','2011-03-02','[)'), daterange('2012-01-02','2012-03-02','[)'))" @db.literal(@MR.new([Date.new(2011, 1, 2)...Date.new(2011, 3, 2)], 'datemultirange')).must_equal "datemultirange(daterange('2011-01-02','2011-03-02','[)'))" @db.literal(@MR.new([Time.local(2011, 1, 2, 10, 20, 30)...Time.local(2011, 2, 3, 10, 20, 30)], 'tsmultirange')).must_equal "tsmultirange(tsrange('2011-01-02 10:20:30','2011-02-03 10:20:30','[)'))" @db.literal(@MR.new([DateTime.new(2011, 1, 2, 10, 20, 30)...DateTime.new(2011, 2, 3, 10, 20, 30)], 'tsmultirange')).must_equal "tsmultirange(tsrange('2011-01-02 10:20:30','2011-02-03 10:20:30','[)'))" @db.literal(@MR.new([DateTime.new(2011, 1, 2, 10, 20, 30)...DateTime.new(2011, 2, 3, 10, 20, 30)], 'tstzmultirange')).must_equal "tstzmultirange(tstzrange('2011-01-02 10:20:30','2011-02-03 10:20:30','[)'))" @db.literal(@MR.new([1..2], 'int8multirange')).must_equal "int8multirange(int8range(1,2,'[]'))" @db.literal(@MR.new([1.0..2.0], 'nummultirange')).must_equal "nummultirange(numrange(1.0,2.0,'[]'))" @db.literal(@MR.new([BigDecimal('1.0')..BigDecimal('2.0')], 'nummultirange')).must_equal "nummultirange(numrange(1.0,2.0,'[]'))" end it "should literalize PGMultiRange of PGRange instances to strings correctly" do @db.literal(@MR.new([@R.new(1, 2, :db_type=>'int8range')], 'int8multirange')).must_equal "int8multirange(int8range(1,2,'[]'))" @db.literal(@MR.new([@R.new(1, 2, :exclude_begin=>true, :db_type=>'int8range')], 'int8multirange')).must_equal "int8multirange(int8range(1,2,'(]'))" @db.literal(@MR.new([@R.new(1, 2, :exclude_end=>true, :db_type=>'int8range')], 'int8multirange')).must_equal "int8multirange(int8range(1,2,'[)'))" @db.literal(@MR.new([@R.new(nil, nil, :empty=>true)], 'nummultirange')).must_equal "nummultirange('empty')" @db.literal(@MR.new([@R.new(nil, nil, :empty=>true, :db_type=>'int8range')], 'int8multirange')).must_equal "int8multirange('empty'::int8range)" @db.literal(@MR.new([@R.new("", 2)], 'nummultirange')).must_equal "nummultirange('[\"\",2]')" end it "should not affect literalization of custom objects" do o = Object.new def o.sql_literal(ds) 'v' end @db.literal(o).must_equal 'v' end it "should support using PGMultiRange of Range instances as bound variables" do @db.bound_variable_arg(@MR.new([], 'int4multirange'), nil).must_equal "{}" @db.bound_variable_arg(@MR.new([1..2], 'int4multirange'), nil).must_equal "{[1,2]}" @db.bound_variable_arg(@MR.new([1..2, 3...4], 'int4multirange'), nil).must_equal "{[1,2], [3,4)}" end it "should support using PGMultiRange of PGRange instances as bound variables" do @db.bound_variable_arg(@MR.new([@R.new(1, 2)], 'int8multirange'), nil).must_equal "{[1,2]}" @db.bound_variable_arg(@MR.new([@R.new(1, 2), @R.new(3, 4, :exclude_begin=>true, :exclude_end=>true)], 'int8multirange'), nil).must_equal "{[1,2], (3,4)}" end it "should support using arrays of PGMultiRanges as bound variables" do @db.bound_variable_arg([@MR.new([1..2], 'int4multirange'), @MR.new([@R.new(2, 3, :exclude_end=>true)], 'int4multirange')], nil).must_equal '{"{[1,2]}","{[2,3)}"}' end it "should parse multirange types from the schema correctly" do @db.fetch = [{:name=>'id', :db_type=>'integer'}, {:name=>'i4', :db_type=>'int4multirange'}, {:name=>'i8', :db_type=>'int8multirange'}, {:name=>'n', :db_type=>'nummultirange'}, {:name=>'d', :db_type=>'datemultirange'}, {:name=>'ts', :db_type=>'tsmultirange'}, {:name=>'tz', :db_type=>'tstzmultirange'}] @db.schema(:items).map{|e| e[1][:type]}.must_equal [:integer, :int4multirange, :int8multirange, :nummultirange, :datemultirange, :tsmultirange, :tstzmultirange] end it "should parse arrays of range types from the schema correctly" do @db.fetch = [{:name=>'id', :db_type=>'integer'}, {:name=>'i4', :db_type=>'int4multirange[]'}, {:name=>'i8', :db_type=>'int8multirange[]'}, {:name=>'n', :db_type=>'nummultirange[]'}, {:name=>'d', :db_type=>'datemultirange[]'}, {:name=>'ts', :db_type=>'tsmultirange[]'}, {:name=>'tz', :db_type=>'tstzmultirange[]'}] @db.schema(:items).map{|e| e[1][:type]}.must_equal [:integer, :int4multirange_array, :int8multirange_array, :nummultirange_array, :datemultirange_array, :tsmultirange_array, :tstzmultirange_array] end it "should set :ruby_default schema entries if default value is recognized using a database query" do v = @MR.new([@R.new(1, 3, :exclude_end=>true, :db_type=>'int8range')], 'int8multirange') @db.fetch = [[{:name=>'id', :db_type=>'integer', :default=>'1'}, {:name=>'t', :db_type=>'int8multirange', :default=>"int8multirange(int8range(1,3,'[)'))"}], [{:v=>v}] ] s = @db.schema(:items) s[1][1][:ruby_default].must_equal v @db.sqls.last.must_equal "SELECT int8multirange(int8range(1,3,'[)')) LIMIT 1" end it "should work correctly in hashes" do h = Hash.new(1) h[@MR.new([@R.new(1, 2)], 'int8multirange')] = 2 h[@MR.new([@R.new(nil, nil, :empty => true)], 'int8multirange')] = 3 h[@MR.new([@R.new(1, 2)], 'int8multirange')].must_equal 2 h[@MR.new([@R.new(1, 3)], 'int8multirange')].must_equal 1 h[@MR.new([@R.new(2, 2)], 'int8multirange')].must_equal 1 h[@MR.new([@R.new(1, 2, :exclude_begin => true)], 'int8multirange')].must_equal 1 h[@MR.new([@R.new(1, 2, :exclude_end => true)], 'int8multirange')].must_equal 1 h[@MR.new([@R.new(1, 2, :db_type => :int)], 'int8multirange')].must_equal 1 h[@MR.new([@R.new(nil, nil, :empty => true)], 'int8multirange')].must_equal 3 h[@MR.new([@R.new(nil, nil, :empty => true, :db_type => :int)], 'int8multirange')].must_equal 1 end describe "database typecasting" do before do @o = @MR.new([@R.new(1, 2, :db_type=>'int4range')], 'int4multirange') @o2 = @MR.new([@R.new(1, 2, :db_type=>'int8range')], 'int8multirange') @eo = @MR.new([@R.new(nil, nil, :empty=>true, :db_type=>'int4range')], 'int4multirange') @eo2 = @MR.new([@R.new(nil, nil, :empty=>true, :db_type=>'int8range')], 'int8multirange') end it "should handle multiple multirange types" do %w'int4 int8 num date ts tstz'.each do |i| @db.typecast_value(:"#{i}multirange", @MR.new([@R.new(1, 2, :db_type=>"#{i}range")], "#{i}multirange")).must_equal @MR.new([@R.new(1, 2, :db_type=>"#{i}range")], "#{i}multirange") end end it "should handle arrays of multiple multirange types" do %w'int4 int8 num date ts tstz'.each do |i| @db.typecast_value(:"#{i}multirange_array", [@MR.new([@R.new(1, 2, :db_type=>"#{i}range")], "#{i}multirange")]).class.must_equal(Sequel::Postgres::PGArray) @db.typecast_value(:"#{i}multirange_array", [@MR.new([@R.new(1, 2, :db_type=>"#{i}range")], "#{i}multirange")]).must_equal [@MR.new([@R.new(1, 2, :db_type=>"#{i}range")], "#{i}multirange")] end end it "should return PGMultiRange value as is if they have the same db_type" do @db.typecast_value(:int4multirange, @o).must_equal @o end it "should return new PGMultiRange value if they have a different db_type" do @db.typecast_value(:int8multirange, @o).must_equal @o2 end it "should return new PGMultiRange value if they have a different dbtype and value is empty" do @db.typecast_value(:int8multirange, @eo).must_equal @eo2 end it "should return new PGMultiRange value if given an Array" do @db.typecast_value(:int4multirange, [1..2]).must_equal @o @db.typecast_value(:int4multirange, [1..2]).wont_equal @o2 @db.typecast_value(:int8multirange, [1..2]).must_equal @o2 end it "should parse a string argument as the PostgreSQL output format" do @db.typecast_value(:int4multirange, ['[1,2]']).must_equal @o end it "should raise errors for unparsable formats" do proc{@db.typecast_value(:int8multirange, ['foo'])}.must_raise(Sequel::InvalidValue) end it "should raise errors for unhandled values" do proc{@db.typecast_value(:int4multirange, 1)}.must_raise(Sequel::InvalidValue) end end it "should support registering custom range types" do @db.register_multirange_type('foomultirange', :range_oid=>3904) @db.typecast_value(:foomultirange, [1..2]).class.must_equal @MR @db.fetch = [{:name=>'id', :db_type=>'foomultirange'}] @db.schema(:items).map{|e| e[1][:type]}.must_equal [:foomultirange] end it "should support using a block as a custom conversion proc given as block" do @db.register_multirange_type('foo2multirange', :oid=>1234) do |s| beg, en = s[1...-1].split(',').map{|x| (x*2).to_i} beg..en end @db.conversion_procs[1234].call('{[1,2]}').must_be :==, [11..22] end it "should support using a block as a custom conversion proc given as :converter option" do @db.register_multirange_type('foo2multirange', :oid=>1234, :converter=>proc do |s| beg, en = s[1...-1].split(',').map{|x| (x*2).to_i} beg..en end) @db.conversion_procs[1234].call('{[1,2]}').must_be :==, [11..22] end it "should support using an existing scaler conversion proc via the :range_oid option" do @db.register_multirange_type('foo4multirange', :oid=>1234, :range_oid=>3904) v = @db.conversion_procs[1234].call('{[1,2]}') v.must_equal @MR.new([@R.new(1, 2, :db_type=>'int4range')], 'foo4multirange') v.db_type.must_equal 'foo4multirange' end it "should raise an error if using :range_oid option with unexisting scalar conversion proc" do proc{@db.register_multirange_type('fooimultirange', :range_oid=>0)}.must_raise(Sequel::Error) end it "should raise an error if using :converter option and a block argument" do proc{@db.register_multirange_type('fooimultirange', :converter=>proc{}){}}.must_raise(Sequel::Error) end it "should raise an error if using :range_oid option and a block argument" do proc{@db.register_multirange_type('fooimultirange', :range_oid=>16){}}.must_raise(Sequel::Error) end it "should raise an error if using :converter option and a :range_oid option" do proc{@db.register_multirange_type('fooimultirange', :range_oid=>16, :converter=>proc{})}.must_raise(Sequel::Error) end it "should raise an error if using :oid option without a converter" do proc{@db.register_multirange_type('fooimultirange', :oid=>16)}.must_raise(Sequel::Error) end it "should not support registering custom multirange types on a per-Database basis for frozen databases" do @db.freeze proc{@db.register_multirange_type('banana', :oid=>7865){|s| s}}.must_raise RuntimeError, TypeError end it "should support registering custom multirange types on a per-Database basis" do @db.register_multirange_type('banana', :oid=>7865){|s| s} @db.conversion_procs[7865].call('{}').must_equal @MR.new([], 'banana') @db.fetch = [{:name=>'id', :db_type=>'banana'}] @db.schema(:items).map{|e| e[1][:type]}.must_equal [:banana] @db.conversion_procs.must_include(7865) @db.respond_to?(:typecast_value_banana, true).must_equal true db = Sequel.connect('mock://postgres', :quote_identifiers=>false) def db.server_version(*) 140000 end db.extend_datasets(Module.new{def supports_timestamp_timezones?; false; end; def supports_timestamp_usecs?; false; end}) db.extension(:pg_multirange) db.fetch = [{:name=>'id', :db_type=>'banana'}] db.schema(:items).map{|e| e[1][:type]}.must_equal [nil] db.conversion_procs.wont_include(7865) db.respond_to?(:typecast_value_banana, true).must_equal false end it "should automatically look up the multirange and subtype oids when registering per-Database types" do @db.fetch = [[{:rngtypid=>3904, :rngmultitypid=>7866}], [{:name=>'id', :db_type=>'banana'}]] @db.register_multirange_type('banana') @db.sqls.must_equal ["SELECT rngmultitypid, rngtypid FROM pg_range INNER JOIN pg_type ON (pg_type.oid = pg_range.rngmultitypid) WHERE (typname = 'banana') LIMIT 1"] @db.schema(:items).map{|e| e[1][:type]}.must_equal [:banana] @db.conversion_procs[7866].call("{[1,3)}").must_be :==, [1...3] end it "should not automatically look up oids if given both :oid and :range_oid" do @db.register_multirange_type('banana', :oid=>7866, :range_oid=>3904) @db.sqls.must_equal [] @db.conversion_procs[7866].call("{[1,3]}").must_equal @MR.new([@R.new(1, 3, :db_type=>'int4range')], 'banana') end it "should not automatically look up oids if given multirange oid and block" do @db.register_multirange_type('banana', :oid=>7866){|s| Range.new(*s[1...-1].split(',').map(&:to_i))} @db.sqls.must_equal [] @db.conversion_procs[7866].call("{[1,3]}").must_be :==, [1..3] end it "should return correct results for Database#schema_type_class" do @db.schema_type_class(:int4multirange).must_equal Sequel::Postgres::PGMultiRange @db.schema_type_class(:integer).must_equal Integer end describe "parser" do before do @converter = :to_s.to_proc end it "should raise if input doesn't start with {" do proc{@MR::Parser.new('', @converter).parse}.must_raise Sequel::Error proc{@MR::Parser.new('a', @converter).parse}.must_raise Sequel::Error end it "should raise if there is data after parsing has finished" do proc{@MR::Parser.new('{}}', @converter).parse}.must_raise Sequel::Error proc{@MR::Parser.new('{[1,2]}a', @converter).parse}.must_raise Sequel::Error proc{@MR::Parser.new('{[1,2],(3,4)})', @converter).parse}.must_raise Sequel::Error end it "should raise if invalid separator is used" do proc{@MR::Parser.new('{[1,2]a}', @converter).parse}.must_raise Sequel::Error end it "should raise if incomplete multirange is parsed" do proc{@MR::Parser.new('{[1', @converter).parse}.must_raise Sequel::Error end end describe "a PGMultiRange instance" do before do @r1 = @MR.new([], 'int4multirange') @r2 = @MR.new([@R.new(3, nil, :exclude_begin=>true, :db_type=>'int4range')], 'int4multirange') @r3 = @MR.new([@R.new(nil, 4, :exclude_end=>true, :db_type=>'int8range'), @R.new(14, nil, :db_type=>'int8range')], 'int8multirange') end it "should have #db_type return the multirange's database type" do @r1.db_type.must_equal 'int4multirange' @r2.db_type.must_equal 'int4multirange' @r3.db_type.must_equal 'int8multirange' end it "should be able to be created by Sequel.pg_multirange" do Sequel.pg_multirange([], 'int4multirange').must_equal @r1 end it "should have Sequel.pg_range return a PGRange as is" do Sequel.pg_multirange(@r1, 'int4multirange').to_a.must_be_same_as @r1.to_a end it "should have Sequel.pg_multirange return a new PGMultiRange if the database type differs" do v = Sequel.pg_multirange(@r2, 'int8multirange') v.must_equal @MR.new([@R.new(3, nil, :exclude_begin=>true, :db_type=>'int4range')], 'int8multirange') v.db_type.must_equal 'int8multirange' end it "should have cover? and === match if any member in the multiranges matches" do @r1.cover?(1).must_equal false @r2.cover?(1).must_equal false @r3.cover?(1).must_equal true @r1.cover?(3).must_equal false @r2.cover?(3).must_equal false @r3.cover?(3).must_equal true @r1.cover?(4).must_equal false @r2.cover?(4).must_equal true @r3.cover?(4).must_equal false @r1.cover?(5).must_equal false @r2.cover?(5).must_equal true @r3.cover?(5).must_equal false @r1.cover?(14).must_equal false @r2.cover?(14).must_equal true @r3.cover?(14).must_equal true @r1.===(14).must_equal false @r2.===(14).must_equal true @r3.===(14).must_equal true end it "should only consider PGMultiRanges equal if they have the same db_type" do (@MR.new([], 'int4range') == @MR.new([], 'int4range')).must_equal true (@MR.new([], 'int4range') == @MR.new([], 'int8range')).must_equal false (@MR.new([], 'int4range') == []).must_equal true (@MR.new([], 'int4range').eql? @MR.new([], 'int4range')).must_equal true (@MR.new([], 'int4range').eql? @MR.new([], 'int8range')).must_equal false (@MR.new([], 'int4range').eql? []).must_equal true end end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/pg_range_ops_spec.rb��������������������������������������������������0000664�0000000�0000000�00000005624�14342141206�0022242�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" Sequel.extension :pg_array, :pg_range, :pg_multirange, :pg_range_ops describe "Sequel::Postgres::RangeOp" do before do db = Sequel.connect('mock://postgres') db.extend_datasets{def quote_identifiers?; false end} @ds = db.dataset @h = Sequel.pg_range_op(:h) end it "#pg_range should return self" do @h.pg_range.must_be_same_as(@h) end it "Sequel.pg_range_op should return argument if already a RangeOp" do Sequel.pg_range_op(@h).must_be_same_as(@h) end it "Sequel.pg_range should return a new RangeOp if not given a range" do @ds.literal(Sequel.pg_range(:h).lower).must_equal "lower(h)" end it "Sequel.pg_multirange should return a new RangeOp if not given a multirange or array" do @ds.literal(Sequel.pg_multirange(:h, 'x').lower).must_equal "lower(h)" end it "#pg_range should return a RangeOp for literal strings, and expressions" do @ds.literal(Sequel.function(:b, :h).pg_range.lower).must_equal "lower(b(h))" @ds.literal(Sequel.lit('h').pg_range.lower).must_equal "lower(h)" end it "PGRange#op should return a RangeOp" do @ds.literal(Sequel.pg_range(1..2, :numrange).op.lower).must_equal "lower(numrange(1,2,'[]'))" end it "PGRange#op should return a RangeOp" do @ds.literal(Sequel.pg_multirange([Sequel.pg_range(1..2, :numrange)], :nummultirange).op.lower).must_equal "lower(nummultirange(numrange(1,2,'[]')))" end it "should define methods for all of the PostgreSQL range operators" do @ds.literal(@h.contains(@h)).must_equal "(h @> h)" @ds.literal(@h.contained_by(@h)).must_equal "(h <@ h)" @ds.literal(@h.overlaps(@h)).must_equal "(h && h)" @ds.literal(@h.left_of(@h)).must_equal "(h << h)" @ds.literal(@h.right_of(@h)).must_equal "(h >> h)" @ds.literal(@h.ends_before(@h)).must_equal "(h &< h)" @ds.literal(@h.starts_after(@h)).must_equal "(h &> h)" @ds.literal(@h.adjacent_to(@h)).must_equal "(h -|- h)" end it "should define methods for all of the PostgreSQL range functions" do @ds.literal(@h.lower).must_equal "lower(h)" @ds.literal(@h.upper).must_equal "upper(h)" @ds.literal(@h.isempty).must_equal "isempty(h)" @ds.literal(@h.lower_inc).must_equal "lower_inc(h)" @ds.literal(@h.upper_inc).must_equal "upper_inc(h)" @ds.literal(@h.lower_inf).must_equal "lower_inf(h)" @ds.literal(@h.upper_inf).must_equal "upper_inf(h)" @ds.literal(@h.unnest).must_equal "unnest(h)" end it "+ - * operators should be defined and return a RangeOp" do @ds.literal((@h + @h).lower).must_equal "lower((h + h))" @ds.literal((@h * @h).lower).must_equal "lower((h * h))" @ds.literal((@h - @h).lower).must_equal "lower((h - h))" end it "range_merge and multirange should be defined and return a RangeOp" do @ds.literal(@h.range_merge.lower).must_equal "lower(range_merge(h))" @ds.literal(@h.multirange.lower).must_equal "lower(multirange(h))" end end ������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/pg_range_spec.rb������������������������������������������������������0000664�0000000�0000000�00000066570�14342141206�0021370�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "pg_range extension" do before(:all) do Sequel.extension :pg_array, :pg_range end before do @db = Sequel.connect('mock://postgres') @R = Sequel::Postgres::PGRange @db.extend_datasets do def supports_timestamp_timezones?; false end def supports_timestamp_usecs?; false end def quote_identifiers?; false end end @db.extension(:pg_array, :pg_range) end endless_range_support = RUBY_VERSION >= '2.6' startless_range_support = RUBY_VERSION >= '2.7' it "should set up conversion procs correctly" do cp = @db.conversion_procs cp[3904].call("[1,2]").must_equal @R.new(1,2, :exclude_begin=>false, :exclude_end=>false, :db_type=>'int4range') cp[3906].call("[1,2]").must_equal @R.new(1,2, :exclude_begin=>false, :exclude_end=>false, :db_type=>'numrange') cp[3908].call("[2011-01-02 10:20:30,2011-02-03 10:20:30)").must_equal @R.new(Time.local(2011, 1, 2, 10, 20, 30),Time.local(2011, 2, 3, 10, 20, 30), :exclude_begin=>false, :exclude_end=>true, :db_type=>'tsrange') cp[3910].call("[2011-01-02 10:20:30,2011-02-03 10:20:30)").must_equal @R.new(Time.local(2011, 1, 2, 10, 20, 30),Time.local(2011, 2, 3, 10, 20, 30), :exclude_begin=>false, :exclude_end=>true, :db_type=>'tstzrange') cp[3912].call("[2011-01-02,2011-02-03)").must_equal @R.new(Date.new(2011, 1, 2),Date.new(2011, 2, 3), :exclude_begin=>false, :exclude_end=>true, :db_type=>'daterange') cp[3926].call("[1,2]").must_equal @R.new(1,2, :exclude_begin=>false, :exclude_end=>false, :db_type=>'int8range') end it "should set up conversion procs for arrays correctly" do cp = @db.conversion_procs cp[3905].call("{\"[1,2]\"}").must_equal [@R.new(1,2, :exclude_begin=>false, :exclude_end=>false, :db_type=>'int4range')] cp[3907].call("{\"[1,2]\"}").must_equal [@R.new(1,2, :exclude_begin=>false, :exclude_end=>false, :db_type=>'numrange')] cp[3909].call("{\"[2011-01-02 10:20:30,2011-02-03 10:20:30)\"}").must_equal [@R.new(Time.local(2011, 1, 2, 10, 20, 30),Time.local(2011, 2, 3, 10, 20, 30), :exclude_begin=>false, :exclude_end=>true, :db_type=>'tsrange')] cp[3911].call("{\"[2011-01-02 10:20:30,2011-02-03 10:20:30)\"}").must_equal [@R.new(Time.local(2011, 1, 2, 10, 20, 30),Time.local(2011, 2, 3, 10, 20, 30), :exclude_begin=>false, :exclude_end=>true, :db_type=>'tstzrange')] cp[3913].call("{\"[2011-01-02,2011-02-03)\"}").must_equal [@R.new(Date.new(2011, 1, 2),Date.new(2011, 2, 3), :exclude_begin=>false, :exclude_end=>true, :db_type=>'daterange')] cp[3927].call("{\"[1,2]\"}").must_equal [@R.new(1,2, :exclude_begin=>false, :exclude_end=>false, :db_type=>'int8range')] end it "should literalize Range instances to strings correctly" do @db.literal(Date.new(2011, 1, 2)...Date.new(2011, 3, 2)).must_equal "'[2011-01-02,2011-03-02)'" @db.literal(Time.local(2011, 1, 2, 10, 20, 30)...Time.local(2011, 2, 3, 10, 20, 30)).must_equal "'[2011-01-02 10:20:30,2011-02-03 10:20:30)'" @db.literal(DateTime.new(2011, 1, 2, 10, 20, 30)...DateTime.new(2011, 2, 3, 10, 20, 30)).must_equal "'[2011-01-02 10:20:30,2011-02-03 10:20:30)'" @db.literal(DateTime.new(2011, 1, 2, 10, 20, 30)...DateTime.new(2011, 2, 3, 10, 20, 30)).must_equal "'[2011-01-02 10:20:30,2011-02-03 10:20:30)'" @db.literal(1..2).must_equal "'[1,2]'" @db.literal(1.0..2.0).must_equal "'[1.0,2.0]'" @db.literal(BigDecimal('1.0')..BigDecimal('2.0')).must_equal "'[1.0,2.0]'" @db.literal(Sequel.lit('a')..Sequel.lit('z')).must_equal "'[a,z]'" @db.literal(''..'()[]",\\2').must_equal "'[\"\",\\(\\)\\[\\]\\\"\\,\\\\2]'" end it "should literalize endless Range instances to strings correctly" do @db.literal(eval('(1..)')).must_equal "'[1,]'" @db.literal(eval('(1...)')).must_equal "'[1,)'" end if endless_range_support it "should literalize startless Range instances to strings correctly" do @db.literal(eval('(..1)')).must_equal "'[,1]'" @db.literal(eval('(...1)')).must_equal "'[,1)'" end if startless_range_support it "should literalize startless, endless Range instances to strings correctly" do @db.literal(eval('nil..nil')).must_equal "'[,]'" @db.literal(eval('nil...nil')).must_equal "'[,)'" end if startless_range_support it "should literalize PGRange instances to strings correctly" do @db.literal(@R.new(1, 2)).must_equal "'[1,2]'" @db.literal(@R.new(true, false)).must_equal "'[true,false]'" @db.literal(@R.new(1, 2, :exclude_begin=>true)).must_equal "'(1,2]'" @db.literal(@R.new(1, 2, :exclude_end=>true)).must_equal "'[1,2)'" @db.literal(@R.new(nil, 2)).must_equal "'[,2]'" @db.literal(@R.new(1, nil)).must_equal "'[1,]'" @db.literal(@R.new(1, 2, :db_type=>'int8range')).must_equal "int8range(1,2,'[]')" @db.literal(@R.new(1, 2, :exclude_begin=>true, :db_type=>'int8range')).must_equal "int8range(1,2,'(]')" @db.literal(@R.new(1, 2, :exclude_end=>true, :db_type=>'int8range')).must_equal "int8range(1,2,'[)')" @db.literal(@R.new(nil, nil, :empty=>true)).must_equal "'empty'" @db.literal(@R.new(nil, nil, :empty=>true, :db_type=>'int8range')).must_equal "'empty'::int8range" @db.literal(@R.new("", 2)).must_equal "'[\"\",2]'" end it "should not affect literalization of custom objects" do o = Object.new def o.sql_literal(ds) 'v' end @db.literal(o).must_equal 'v' end it "should support using Range instances as bound variables" do @db.bound_variable_arg(1..2, nil).must_equal "[1,2]" end it "should support using endless Range instances as bound variables" do @db.bound_variable_arg(eval('(1..)'), nil).must_equal "[1,]" @db.bound_variable_arg(eval('(1...)'), nil).must_equal "[1,)" end if endless_range_support it "should support using startless Range instances as bound variables" do @db.bound_variable_arg(eval('(..1)'), nil).must_equal "[,1]" @db.bound_variable_arg(eval('(...1)'), nil).must_equal "[,1)" end if startless_range_support it "should support using startless, endless Range instances as bound variables" do @db.bound_variable_arg(eval('nil..nil'), nil).must_equal "[,]" @db.bound_variable_arg(eval('nil...nil'), nil).must_equal "[,)" end if startless_range_support it "should support using PGRange instances as bound variables" do @db.bound_variable_arg(@R.new(1, 2), nil).must_equal "[1,2]" end it "should support using arrays of Range instances as bound variables" do @db.bound_variable_arg([1..2,2...3], nil).must_equal '{"[1,2]","[2,3)"}' end it "should support using arrays of endless Range instances as bound variables" do @db.bound_variable_arg([eval('(1..)'), eval('(2..)')], nil).must_equal '{"[1,]","[2,]"}' end if endless_range_support it "should support using arrays of PGRange instances as bound variables" do @db.bound_variable_arg([@R.new(1, 2),@R.new(2, 3)], nil).must_equal '{"[1,2]","[2,3]"}' end it "should parse range types from the schema correctly" do @db.fetch = [{:name=>'id', :db_type=>'integer'}, {:name=>'i4', :db_type=>'int4range'}, {:name=>'i8', :db_type=>'int8range'}, {:name=>'n', :db_type=>'numrange'}, {:name=>'d', :db_type=>'daterange'}, {:name=>'ts', :db_type=>'tsrange'}, {:name=>'tz', :db_type=>'tstzrange'}] @db.schema(:items).map{|e| e[1][:type]}.must_equal [:integer, :int4range, :int8range, :numrange, :daterange, :tsrange, :tstzrange] end it "should parse arrays of range types from the schema correctly" do @db.fetch = [{:name=>'id', :db_type=>'integer'}, {:name=>'i4', :db_type=>'int4range[]'}, {:name=>'i8', :db_type=>'int8range[]'}, {:name=>'n', :db_type=>'numrange[]'}, {:name=>'d', :db_type=>'daterange[]'}, {:name=>'ts', :db_type=>'tsrange[]'}, {:name=>'tz', :db_type=>'tstzrange[]'}] @db.schema(:items).map{|e| e[1][:type]}.must_equal [:integer, :int4range_array, :int8range_array, :numrange_array, :daterange_array, :tsrange_array, :tstzrange_array] end it "should set :ruby_default schema entries if default value is recognized" do @db.fetch = [{:name=>'id', :db_type=>'integer', :default=>'1'}, {:oid=>3904, :name=>'t', :db_type=>'int4range', :default=>"'[1,5)'::int4range"}, {:oid=>113904, :name=>'t', :db_type=>'int4range', :default=>"'[1,5)'::int4range"}] s = @db.schema(:items) s[1][1][:ruby_default].must_equal Sequel::Postgres::PGRange.new(1, 5, :exclude_end=>true, :db_type=>'int4range') s[2][1][:ruby_default].must_be_nil end it "should work correctly in hashes" do h = Hash.new(1) h[@R.new(1, 2)] = 2 h[@R.new(nil, nil, :empty => true)] = 3 h[@R.new(1, 2)].must_equal 2 h[@R.new(1, 3)].must_equal 1 h[@R.new(2, 2)].must_equal 1 h[@R.new(1, 2, :exclude_begin => true)].must_equal 1 h[@R.new(1, 2, :exclude_end => true)].must_equal 1 h[@R.new(1, 2, :db_type => :int)].must_equal 1 h[@R.new(nil, nil, :empty => true)].must_equal 3 h[@R.new(nil, nil, :empty => true, :db_type => :int)].must_equal 1 end describe "database typecasting" do before do @o = @R.new(1, 2, :db_type=>'int4range') @o2 = @R.new(1, 2, :db_type=>'int8range') @eo = @R.new(nil, nil, :empty=>true, :db_type=>'int4range') @eo2 = @R.new(nil, nil, :empty=>true, :db_type=>'int8range') end it "should handle multiple range types" do %w'int4 int8 num date ts tstz'.each do |i| @db.typecast_value(:"#{i}range", @R.new(1, 2, :db_type=>"#{i}range")).must_equal @R.new(1, 2, :db_type=>"#{i}range") end end it "should handle arrays of multiple range types" do %w'int4 int8 num date ts tstz'.each do |i| @db.typecast_value(:"#{i}range_array", [@R.new(1, 2, :db_type=>"#{i}range")]).class.must_equal(Sequel::Postgres::PGArray) @db.typecast_value(:"#{i}range_array", [@R.new(1, 2, :db_type=>"#{i}range")]).must_equal [@R.new(1, 2, :db_type=>"#{i}range")] end end it "should return PGRange value as is if they have the same subtype" do @db.typecast_value(:int4range, @o).must_be_same_as(@o) end it "should return new PGRange value if they have a different subtype" do @db.typecast_value(:int8range, @o).wont_be_same_as(@o) @db.typecast_value(:int8range, @o).must_equal @o2 end it "should return new PGRange value if they have a different subtype and value is empty" do @db.typecast_value(:int8range, @eo).must_equal @eo2 end it "should return new PGRange value if given a Range" do @db.typecast_value(:int4range, 1..2).must_equal @o @db.typecast_value(:int4range, 1..2).wont_equal @o2 @db.typecast_value(:int8range, 1..2).must_equal @o2 end it "should parse a string argument as the PostgreSQL output format" do @db.typecast_value(:int4range, '[1,2]').must_equal @o end it "should raise errors for too long string input if configured" do proc{@db.typecast_value(:int4range, '[1,'+'1'*100+']')}.must_raise(Sequel::InvalidValue) @db.check_string_typecast_bytesize = false @db.typecast_value(:int4range, '[1,'+'1'*100+']').must_equal @R.new(1, Integer('1'*100), :db_type=>'int4range') end it "should raise errors for unparsable formats" do proc{@db.typecast_value(:int8range, 'foo')}.must_raise(Sequel::InvalidValue) end it "should raise errors for unhandled values" do proc{@db.typecast_value(:int4range, 1)}.must_raise(Sequel::InvalidValue) end end it "should support registering custom range types" do @db.register_range_type('foorange') @db.typecast_value(:foorange, 1..2).must_be_kind_of(@R) @db.fetch = [{:name=>'id', :db_type=>'foorange'}] @db.schema(:items).map{|e| e[1][:type]}.must_equal [:foorange] end it "should support using a block as a custom conversion proc given as block" do @db.register_range_type('foo2range'){|s| (s*2).to_i} @db.typecast_value(:foo2range, '[1,2]').must_be :==, (11..22) end it "should support using a block as a custom conversion proc given as :converter option" do @db.register_range_type('foo3range', :converter=>proc{|s| (s*2).to_i}) @db.typecast_value(:foo3range, '[1,2]').must_be :==, (11..22) end it "should support using an existing scaler conversion proc via the :subtype_oid option" do @db.register_range_type('foo4range', :subtype_oid=>16) @db.typecast_value(:foo4range, '[t,f]').must_equal @R.new(true, false, :db_type=>'foo4range') end it "should raise an error if using :subtype_oid option with unexisting scalar conversion proc" do proc{@db.register_range_type('fooirange', :subtype_oid=>0)}.must_raise(Sequel::Error) end it "should raise an error if using :converter option and a block argument" do proc{@db.register_range_type('fooirange', :converter=>proc{}){}}.must_raise(Sequel::Error) end it "should raise an error if using :subtype_oid option and a block argument" do proc{@db.register_range_type('fooirange', :subtype_oid=>16){}}.must_raise(Sequel::Error) end it "should raise an error if using :converter option and a :subtype_oid option " do proc{@db.register_range_type('fooirange', :converter=>proc{}, :subtype_oid=>16)}.must_raise(Sequel::Error) end it "should support registering custom types with :oid option" do @db.register_range_type('foo5range', :oid=>331) @db.conversion_procs[331].call('[1,3)').must_be_kind_of(@R) end it "should not support registering custom range types on a per-Database basis for frozen databases" do @db.freeze proc{@db.register_range_type('banana', :oid=>7865){|s| s}}.must_raise RuntimeError, TypeError end it "should support registering custom range types on a per-Database basis" do @db.register_range_type('banana', :oid=>7865){|s| s} @db.typecast_value(:banana, '[1,2]').class.must_equal(Sequel::Postgres::PGRange) @db.fetch = [{:name=>'id', :db_type=>'banana'}] @db.schema(:items).map{|e| e[1][:type]}.must_equal [:banana] @db.conversion_procs.must_include(7865) @db.respond_to?(:typecast_value_banana, true).must_equal true db = Sequel.connect('mock://postgres', :quote_identifiers=>false) db.extend_datasets(Module.new{def supports_timestamp_timezones?; false; end; def supports_timestamp_usecs?; false; end}) db.extension(:pg_range) db.fetch = [{:name=>'id', :db_type=>'banana'}] db.schema(:items).map{|e| e[1][:type]}.must_equal [nil] db.conversion_procs.wont_include(7865) db.respond_to?(:typecast_value_banana, true).must_equal false end it "should automatically look up the range and subtype oids when registering per-Database types" do @db.fetch = [[{:rngsubtype=>21, :rngtypid=>7866}], [{:name=>'id', :db_type=>'banana'}]] @db.register_range_type('banana', :subtype_typecast=>:integer) @db.sqls.must_equal ["SELECT rngtypid, rngsubtype FROM pg_range INNER JOIN pg_type ON (pg_type.oid = pg_range.rngtypid) WHERE (typname = 'banana') LIMIT 1"] @db.schema(:items).map{|e| e[1][:type]}.must_equal [:banana] @db.conversion_procs[7866].call("[1,3)").must_be :==, (1...3) @db.typecast_value(:banana, '[1,2]').must_be :==, (1..2) end it "should not automatically look up oids if given both subtype and range oids" do @db.register_range_type('banana', :oid=>7866, :subtype_oid=>21) @db.sqls.must_equal [] @db.conversion_procs[7866].call("[1,3)").must_be :==, (1...3) @db.typecast_value(:banana, '[1,2]').must_be :==, (1..2) end it "should not automatically look up oids if given range oid and block" do @db.register_range_type('banana', :oid=>7866){|s| s.to_i} @db.sqls.must_equal [] @db.conversion_procs[7866].call("[1,3)").must_be :==, (1...3) @db.typecast_value(:banana, '[1,2]').must_be :==, (1..2) end it "should return correct results for Database#schema_type_class" do @db.schema_type_class(:int4range).must_equal Sequel::Postgres::PGRange @db.schema_type_class(:integer).must_equal Integer end describe "parser" do before do @p = @R::Parser.new('int4range', proc(&:to_i)) @sp = @R::Parser.new(nil) end it "should have db_type method to return the database type string" do @p.db_type.must_equal 'int4range' end it "should have converter method which returns a callable used for conversion" do @p.converter.call('1').must_equal 1 end it "should have call parse input string argument into PGRange instance" do @p.call('[1,2]').must_equal @R.new(1, 2, :db_type=>'int4range') end it "should handle empty ranges" do @p.call('empty').must_equal @R.new(nil, nil, :empty=>true, :db_type=>'int4range') end it "should handle exclusive beginnings and endings" do @p.call('(1,3]').must_equal @R.new(1, 3, :exclude_begin=>true, :db_type=>'int4range') @p.call('[1,3)').must_equal @R.new(1, 3, :exclude_end=>true, :db_type=>'int4range') @p.call('(1,3)').must_equal @R.new(1, 3, :exclude_begin=>true, :exclude_end=>true, :db_type=>'int4range') end it "should handle unbounded beginnings and endings" do @p.call('[,2]').must_equal @R.new(nil, 2, :db_type=>'int4range') @p.call('[1,]').must_equal @R.new(1, nil, :db_type=>'int4range') @p.call('[,]').must_equal @R.new(nil, nil, :db_type=>'int4range') end it "should unescape quoted beginnings and endings" do @sp.call('["\\\\ \\"","\\" \\\\"]').must_equal @R.new("\\ \"", "\" \\") end it "should treat empty quoted string not as unbounded" do @sp.call('["","z"]').must_equal @R.new("", "z") @sp.call('["a",""]').must_equal @R.new("a", "") @sp.call('["",""]').must_equal @R.new("", "") end end describe "a PGRange instance" do before do @r1 = @R.new(1, 2) @r2 = @R.new(3, nil, :exclude_begin=>true, :db_type=>'int4range') @r3 = @R.new(nil, 4, :exclude_end=>true, :db_type=>'int8range') end it "should have #begin return the beginning of the range" do @r1.begin.must_equal 1 @r2.begin.must_equal 3 @r3.begin.must_be_nil end it "should have #end return the end of the range" do @r1.end.must_equal 2 @r2.end.must_be_nil @r3.end.must_equal 4 end it "should have #db_type return the range's database type" do @r1.db_type.must_be_nil @r2.db_type.must_equal 'int4range' @r3.db_type.must_equal 'int8range' end it "should be able to be created by Sequel.pg_range" do Sequel.pg_range(1..2).must_equal @r1 end it "should have Sequel.pg_range be able to take a database type" do Sequel.pg_range(1..2, :int4range).must_equal @R.new(1, 2, :db_type=>:int4range) end it "should have Sequel.pg_range return a PGRange as is" do a = Sequel.pg_range(1..2) Sequel.pg_range(a).must_be_same_as(a) end it "should have Sequel.pg_range return a new PGRange if the database type differs" do a = Sequel.pg_range(1..2, :int4range) b = Sequel.pg_range(a, :int8range) a.to_range.must_equal b.to_range a.wont_be_same_as(b) a.db_type.must_equal :int4range b.db_type.must_equal :int8range end it "should have #initialize raise if requesting an empty range with beginning or ending" do proc{@R.new(1, nil, :empty=>true)}.must_raise(Sequel::Error) proc{@R.new(nil, 2, :empty=>true)}.must_raise(Sequel::Error) proc{@R.new(nil, nil, :empty=>true, :exclude_begin=>true)}.must_raise(Sequel::Error) proc{@R.new(nil, nil, :empty=>true, :exclude_end=>true)}.must_raise(Sequel::Error) end it "should quack like a range" do @r1.cover?(1.5).must_equal true @r1.cover?(2.5).must_equal false @r1.first(1).must_equal [1] @r1.last(1).must_equal [2] @r1.to_a.must_equal [1, 2] @r1.first.must_equal 1 @r1.last.must_equal 2 a = [] @r1.step{|x| a << x} a.must_equal [1, 2] end it "should have cover? handle empty, unbounded, and exclusive beginning ranges" do @R.empty.cover?(1).must_equal false r = @R.new(1, nil) r.cover?(0).must_equal false r.cover?(1).must_equal true r.cover?(2).must_equal true r.cover?(3).must_equal true r = @R.new(nil, 2) r.cover?(0).must_equal true r.cover?(1).must_equal true r.cover?(2).must_equal true r.cover?(3).must_equal false r = @R.new(1, 2, :exclude_begin=>true) r.cover?(0).must_equal false r.cover?(1).must_equal false r.cover?(2).must_equal true r.cover?(3).must_equal false r = @R.new(1, 2, :exclude_end=>true) r.cover?(0).must_equal false r.cover?(1).must_equal true r.cover?(2).must_equal false r.cover?(3).must_equal false end it "should only consider PGRanges equal if they have the same db_type" do @R.new(1, 2, :db_type=>'int4range').must_equal @R.new(1, 2, :db_type=>'int4range') @R.new(1, 2, :db_type=>'int8range').wont_equal @R.new(1, 2, :db_type=>'int4range') end it "should only consider empty PGRanges equal with other empty PGRanges" do @R.new(nil, nil, :empty=>true).must_equal @R.new(nil, nil, :empty=>true) @R.new(nil, nil, :empty=>true).wont_equal @R.new(nil, nil) @R.new(nil, nil).wont_equal @R.new(nil, nil, :empty=>true) end it "should only consider PGRanges equal if they have the same bounds" do @R.new(1, 2).must_equal @R.new(1, 2) @R.new(1, 2).wont_equal @R.new(1, 3) end it "should only consider PGRanges equal if they have the same bound exclusions" do @R.new(1, 2, :exclude_begin=>true).must_equal @R.new(1, 2, :exclude_begin=>true) @R.new(1, 2, :exclude_end=>true).must_equal @R.new(1, 2, :exclude_end=>true) @R.new(1, 2, :exclude_begin=>true).wont_equal @R.new(1, 2, :exclude_end=>true) @R.new(1, 2, :exclude_end=>true).wont_equal @R.new(1, 2, :exclude_begin=>true) end it "should consider PGRanges equal with a Range they represent" do @R.new(1, 2).must_be :==, (1..2) @R.new(1, 2, :exclude_end=>true).must_be :==, (1...2) @R.new(1, 3).wont_be :==, (1..2) @R.new(1, 2, :exclude_end=>true).wont_be :==, (1..2) end it "should not consider a PGRange equal with a Range if it can't be expressed as a range" do @R.new(nil, nil).wont_be :==, (1..2) if startless_range_support @R.new(nil, nil, :exclude_begin=>true).wont_be :==, eval('nil..nil') end end it "should consider PGRanges equal with a endless Range they represent" do @R.new(1, nil).must_be :==, eval('(1..)') @R.new(1, nil, :exclude_end=>true).must_be :==, eval('(1...)') @R.new(1, nil).wont_be :==, eval('(1...)') @R.new(1, nil, :exclude_end=>true).wont_be :==, eval('(1..)') @R.new(1, nil).wont_be :==, eval('(2..)') @R.new(1, nil, :exclude_end=>true).wont_be :==, eval('(2...)') end if endless_range_support it "should consider PGRanges equal with a startless Range they represent" do @R.new(nil, 1).must_be :==, eval('(..1)') @R.new(nil, 1, :exclude_end=>true).must_be :==, eval('(...1)') @R.new(nil, 1).wont_be :==, eval('(...1)') @R.new(nil, 1, :exclude_end=>true).wont_be :==, eval('(..1)') @R.new(nil, 1).wont_be :==, eval('(..2)') @R.new(nil, 1, :exclude_end=>true).wont_be :==, eval('(...2)') end if startless_range_support it "should consider PGRanges equal with a startless, endless Range they represent" do @R.new(nil, nil).must_be :==, eval('nil..nil') @R.new(nil, nil, :exclude_end=>true).must_be :==, eval('nil...nil') @R.new(nil, nil).wont_be :==, eval('nil...nil') @R.new(nil, nil, :exclude_end=>true).wont_be :==, eval('nil..nil') @R.new(nil, nil).wont_be :==, eval('nil..1') @R.new(nil, nil).wont_be :==, eval('1..nil') @R.new(1, nil).wont_be :==, eval('nil..nil') end if startless_range_support it "should not consider a PGRange equal to other objects" do @R.new(nil, nil).wont_equal 1 end it "should have #=== be true if given an equal PGRange" do @R.new(1, 2).must_be :===, @R.new(1, 2) @R.new(1, 2).wont_be :===, @R.new(1, 3) end it "should have #=== be true if it would be true for the Range represented by the PGRange" do @R.new(1, 2).must_be :===, 1.5 @R.new(1, 2).wont_be :===, 2.5 end it "should have #=== be false if the PGRange cannot be represented by a Range" do @R.new(1, 2, :exclude_begin=>true).wont_be :===, 1.5 end it "should have #empty? indicate whether the range is empty" do @R.empty.must_be :empty? @R.new(1, 2).wont_be :empty? end it "should have #exclude_begin? and #exclude_end indicate whether the beginning or ending of the range is excluded" do @r1.exclude_begin?.must_equal false @r1.exclude_end?.must_equal false @r2.exclude_begin?.must_equal true @r2.exclude_end?.must_equal false @r3.exclude_begin?.must_equal false @r3.exclude_end?.must_equal true end it "should have #to_range raise an exception if the PGRange cannot be represented by a Range" do proc{@R.new(0, 1, :exclude_begin=>true).to_range}.must_raise(Sequel::Error) proc{@R.empty.to_range}.must_raise(Sequel::Error) end it "should have #to_range return the represented range" do @r1.to_range.must_be :==, (1..2) end it "should have #to_range return the represented range for endless ranges" do @R.new(1, nil).to_range.must_be :==, eval('1..') end if endless_range_support it "should have #to_range raise an exception for endless ranges" do proc{@R.new(1, nil).to_range}.must_raise(Sequel::Error) end unless endless_range_support it "should have #to_range return the represented range for startless ranges" do @R.new(nil, 1).to_range.must_be :==, eval('..1') end if startless_range_support it "should have #to_range raise an exception for startless ranges" do proc{@R.new(nil, 1).to_range}.must_raise(Sequel::Error) end unless startless_range_support it "should have #to_range return the represented range for startless, endless ranges" do @R.new(nil, nil).to_range.must_be :==, eval('nil..nil') end if startless_range_support it "should have #to_range raise an exception for startless, endless ranges" do proc{@R.new(nil, nil).to_range}.must_raise(Sequel::Error) end unless startless_range_support it "should have #to_range cache the returned value" do @r1.to_range.must_be_same_as(@r1.to_range) end it "should have #unbounded_begin? and #unbounded_end indicate whether the beginning or ending of the range is unbounded" do @r1.unbounded_begin?.must_equal false @r1.unbounded_end?.must_equal false @r2.unbounded_begin?.must_equal false @r2.unbounded_end?.must_equal true @r3.unbounded_begin?.must_equal true @r3.unbounded_end?.must_equal false end it "should have #valid_ruby_range? return true if the PGRange can be represented as a Range" do @r1.valid_ruby_range?.must_equal true @R.new(1, 2, :exclude_end=>true).valid_ruby_range?.must_equal true end it "should have #valid_ruby_range? return false if the PGRange cannot be represented as a Range" do @R.new(0, 1, :exclude_begin=>true).valid_ruby_range?.must_equal false @R.empty.valid_ruby_range?.must_equal false end it "should have #valid_ruby_range return #{endless_range_support} for endless ranges" do @R.new(1, nil).valid_ruby_range?.must_equal(endless_range_support) end it "should have #valid_ruby_range return #{startless_range_support} for endless ranges" do @R.new(nil, 1).valid_ruby_range?.must_equal(startless_range_support) end it "should have #valid_ruby_range return #{startless_range_support} for startless, endless ranges" do @R.new(nil, nil).valid_ruby_range?.must_equal(startless_range_support) end end end ����������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/pg_row_ops_spec.rb����������������������������������������������������0000664�0000000�0000000�00000004273�14342141206�0021754�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" Sequel.extension :pg_array, :pg_array_ops, :pg_row, :pg_row_ops describe "Sequel::Postgres::PGRowOp" do before do @db = Sequel.connect('mock://postgres') @db.extend_datasets{def quote_identifiers?; false end} @a = Sequel.pg_row_op(:a) end it "#[] should access members of the composite type" do @db.literal(@a[:b]).must_equal "(a).b" end it "#[] should be chainable" do @db.literal(@a[:b][:c]).must_equal "((a).b).c" end it "#[] should support array access if not given an identifier" do @db.literal(@a[:b][1]).must_equal "((a).b)[1]" end it "#[] should be chainable with array access" do @db.literal(@a[1][:b]).must_equal "((a)[1]).b" end it "#splat should return a splatted argument inside parentheses" do @db.literal(@a.splat).must_equal "(a.*)" end it "#splat(type) should return a splatted argument cast to given type" do @db.literal(@a.splat(:b)).must_equal "(a.*)::b" end it "#splat should not work on an already accessed composite type" do proc{@a[:a].splat(:b)}.must_raise(Sequel::Error) end it "#* should reference all members of the composite type as separate columns if given no arguments" do @db.literal(@a.*).must_equal "(a).*" @db.literal(@a[:b].*).must_equal "((a).b).*" end it "#* should use a multiplication operation if any arguments are given" do @db.literal(@a.*(1)).must_equal "(a * 1)" @db.literal(@a[:b].*(1)).must_equal "((a).b * 1)" end it "#pg_row should be callable on literal strings" do @db.literal(Sequel.lit('a').pg_row[:b]).must_equal "(a).b" end it "#pg_row should be callable on Sequel expressions" do @db.literal(Sequel.function(:a).pg_row[:b]).must_equal "(a()).b" end it "Sequel.pg_row should work as well if the pg_row extension is loaded" do @db.literal(Sequel.pg_row(Sequel.function(:a))[:b]).must_equal "(a()).b" end it "Sequel.pg_row(array).op should work" do @db.literal(Sequel.pg_row([1, 2, 3]).op[:f1]).must_equal "(ROW(1, 2, 3)).f1" end it "Sequel.pg_row(array).op should work" do @db.literal(Sequel::Postgres::PGRow::HashRow.subclass(:rowx, [:a]).new(:a=>1).op[:a]).must_equal "(ROW(1)::rowx).a" end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/pg_row_plugin_spec.rb�������������������������������������������������0000664�0000000�0000000�00000006710�14342141206�0022447�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::PgRow" do before do @db = Sequel.connect('mock://postgres') @db.extend_datasets{def quote_identifiers?; false end} @db.extension(:pg_array) @c = Class.new(Sequel::Model(@db[:address])) @c.columns :street, :city @c.db_schema[:street][:type] = :string @c.db_schema[:city][:type] = :string @db.fetch = [[{:oid=>1098, :typrelid=>2, :typarray=>3}], [{:attname=>'street', :atttypid=>1324}, {:attname=>'city', :atttypid=>1324}]] @c.plugin :pg_row @c2 = Class.new(Sequel::Model(@db[:company])) @c2.columns :address @c2.db_schema[:address].merge!(:type=>:pg_row_address) end it "should have schema_type_class include Sequel::Model" do @c2.new.send(:schema_type_class, :address).must_equal @c @db.conversion_procs[1098].call('(123 Foo St,Bar City)').must_equal @c.load(:street=>'123 Foo St', :city=>'Bar City') end it "should set up a parser for the type that creates a model class" do @db.conversion_procs[1098].call('(123 Foo St,Bar City)').must_equal @c.load(:street=>'123 Foo St', :city=>'Bar City') end it "should set up type casting for the type" do @c2.new(:address=>{'street'=>123, 'city'=>:Bar}).address.must_equal @c.load(:street=>'123', :city=>'Bar') end it "should return model instances as is when typecasting to rows" do o = @c.load(:street=>'123', :city=>'Bar') @c2.new(:address=>o).address.must_be_same_as(o) end it "should handle literalizing model instances" do @db.literal(@c.load(:street=>'123 Foo St', :city=>'Bar City')).must_equal "ROW('123 Foo St', 'Bar City')::address" end it "should handle literalizing model instances when model table is aliased" do @c.dataset = @c.dataset.from(Sequel.as(:address, :a)) @db.literal(@c.load(:street=>'123 Foo St', :city=>'Bar City')).must_equal "ROW('123 Foo St', 'Bar City')::address" end it "should handle model instances in bound variables" do @db.bound_variable_arg(1, nil).must_equal 1 @db.bound_variable_arg(@c.load(:street=>'123 Foo St', :city=>'Bar City'), nil).must_equal '("123 Foo St","Bar City")' end it "should handle model instances in arrays of bound variables" do @db.bound_variable_arg(1, nil).must_equal 1 @db.bound_variable_arg(Sequel.pg_array([@c.load(:street=>'123 Foo St', :city=>'Bar City')]), nil).must_equal '{"(\\"123 Foo St\\",\\"Bar City\\")"}' end it "should allow inserting just this model value" do @c2.dataset.insert_sql(@c.load(:street=>'123', :city=>'Bar')).must_equal "INSERT INTO company VALUES (ROW('123', 'Bar')::address)" end it "should work when loaded into models without a dataset" do @db = Sequel.connect('mock://postgres') @db.extend_datasets{def quote_identifiers?; false end} @db.extension(:pg_array) @c = Class.new(Sequel::Model(@db)) @c.plugin :pg_row @c.set_dataset(@db[:address]) @c.columns :street, :city @c.db_schema[:street][:type] = :string @c.db_schema[:city][:type] = :string @db.fetch = [[{:oid=>1098, :typrelid=>2, :typarray=>3}], [{:attname=>'street', :atttypid=>1324}, {:attname=>'city', :atttypid=>1324}]] @c.register_row_type @c2 = Class.new(Sequel::Model(@db[:company])) @c2.columns :address @c2.db_schema[:address].merge!(:type=>:pg_row_address) @c2.new.send(:schema_type_class, :address).must_equal @c @db.conversion_procs[1098].call('(123 Foo St,Bar City)').must_equal @c.load(:street=>'123 Foo St', :city=>'Bar City') end end ��������������������������������������������������������sequel-5.63.0/spec/extensions/pg_row_spec.rb��������������������������������������������������������0000664�0000000�0000000�00000045603�14342141206�0021075�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "pg_row extension" do before do @db = Sequel.connect('mock://postgres') @db.extend_datasets{def quote_identifiers?; false end} @db.extension(:pg_array, :pg_row) @m = Sequel::Postgres::PGRow @db.sqls end it "should parse record objects as arrays" do a = @db.conversion_procs[2249].call("(a,b,c)") a.class.must_equal(@m::ArrayRow) a.to_a.must_be_kind_of(Array) a[0].must_equal 'a' a.must_equal %w'a b c' a.db_type.must_be_nil @db.literal(a).must_equal "ROW('a', 'b', 'c')" end it "should parse arrays of record objects as arrays of arrays" do as = @db.conversion_procs[2287].call('{"(a,b,c)","(d,e,f)"}') as.must_equal [%w'a b c', %w'd e f'] as.each do |a| a.class.must_equal(@m::ArrayRow) a.to_a.must_be_kind_of(Array) a.db_type.must_be_nil end @db.literal(as).must_equal "ARRAY[ROW('a', 'b', 'c'),ROW('d', 'e', 'f')]::record[]" end it "should not parse arrays of record objects as arrays of arrays if pg_array extension not loaded" do @db = Sequel.connect('mock://postgres') @db.extend_datasets{def quote_identifiers?; false end} @db.extension(:pg_row, :pg_array) @db.conversion_procs[2287].must_be_nil a = @db.conversion_procs[2249].call("(a,b,c)") a.class.must_equal(@m::ArrayRow) a.to_a.must_be_kind_of(Array) a[0].must_equal 'a' a.must_equal %w'a b c' a.db_type.must_be_nil @db.literal(a).must_equal "ROW('a', 'b', 'c')" end it "should be able to register custom parsing of row types as array-like objects" do klass = @m::ArrayRow.subclass(:foo) parser = @m::Parser.new(:converter=>klass) a = parser.call("(a,b,c)") a.class.must_equal(klass) a.to_a.must_be_kind_of(Array) a[0].must_equal 'a' a.must_equal %w'a b c' a.db_type.must_equal :foo @db.literal(a).must_equal "ROW('a', 'b', 'c')::foo" end it "should be able to register custom parsing of row types as hash-like objects" do klass = @m::HashRow.subclass(:foo, [:a, :b, :c]) parser = @m::Parser.new(:converter=>klass, :columns=>[:a, :b, :c]) a = parser.call("(a,b,c)") a.class.must_equal(klass) a.to_hash.must_be_kind_of(Hash) a[:a].must_equal 'a' a.must_equal(:a=>'a', :b=>'b', :c=>'c') a.db_type.must_equal :foo a.columns.must_equal [:a, :b, :c] @db.literal(a).must_equal "ROW('a', 'b', 'c')::foo" end it "should be able to register custom parsing of row types as hash-like objects without a database type" do klass = @m::HashRow.subclass(nil, [:a, :b, :c]) parser = @m::Parser.new(:converter=>klass, :columns=>[:a, :b, :c]) a = parser.call("(a,b,c)") a.class.must_equal(klass) a.to_hash.must_be_kind_of(Hash) a[:a].must_equal 'a' a.must_equal(:a=>'a', :b=>'b', :c=>'c') a.db_type.must_be_nil a.columns.must_equal [:a, :b, :c] @db.literal(a).must_equal "ROW('a', 'b', 'c')" end it "should raise an error if attempting to literalize a HashRow without column information" do h = @m::HashRow.call(:a=>'a', :b=>'b', :c=>'c') proc{@db.literal(h)}.must_raise(Sequel::Error) end it "should be able to manually override db_type per ArrayRow instance" do a = @m::ArrayRow.call(%w'a b c') a.db_type = :foo @db.literal(a).must_equal "ROW('a', 'b', 'c')::foo" end it "should be able to manually override db_type and columns per HashRow instance" do h = @m::HashRow.call(:a=>'a', :c=>'c', :b=>'b') h.db_type = :foo h.columns = [:a, :b, :c] @db.literal(h).must_equal "ROW('a', 'b', 'c')::foo" end it "should correctly split an empty row" do @m::Splitter.new("()").parse.must_equal [nil] end it "should correctly split a row with a single value" do @m::Splitter.new("(1)").parse.must_equal %w'1' end it "should correctly split a row with multiple values" do @m::Splitter.new("(1,2)").parse.must_equal %w'1 2' end it "should correctly NULL values when splitting" do @m::Splitter.new("(1,)").parse.must_equal ['1', nil] end it "should correctly empty string values when splitting" do @m::Splitter.new('(1,"")').parse.must_equal ['1', ''] end it "should handle quoted values when splitting" do @m::Splitter.new('("1","2")').parse.must_equal %w'1 2' end it "should handle escaped backslashes in quoted values when splitting" do @m::Splitter.new('("\\\\1","2\\\\")').parse.must_equal ['\\1', '2\\'] end it "should handle doubled quotes in quoted values when splitting" do @m::Splitter.new('("""1","2""")').parse.must_equal ['"1', '2"'] end it "should correctly convert types when parsing into an array" do @m::Parser.new(:column_converters=>[proc{|s| s*2}, proc{|s| s*3}, proc{|s| s*4}]).call("(a,b,c)").must_equal %w'aa bbb cccc' end it "should correctly convert types into hashes if columns are known" do @m::Parser.new(:columns=>[:a, :b, :c]).call("(a,b,c)").must_equal(:a=>'a', :b=>'b', :c=>'c') end it "should correctly handle type conversion when converting into hashes" do @m::Parser.new(:column_converters=>[proc{|s| s*2}, proc{|s| s*3}, proc{|s| s*4}], :columns=>[:a, :b, :c]).call("(a,b,c)").must_equal(:a=>'aa', :b=>'bbb', :c=>'cccc') end it "should correctly wrap arrays when converting" do @m::Parser.new(:converter=>proc{|s| [:foo, s]}).call("(a,b,c)").must_equal [:foo, %w'a b c'] end it "should correctly wrap hashes when converting" do @m::Parser.new(:converter=>proc{|s| [:foo, s]}, :columns=>[:a, :b, :c]).call("(a,b,c)").must_equal [:foo, {:a=>'a', :b=>'b', :c=>'c'}] end it "should have parser store reflection information" do p = @m::Parser.new(:oid=>1, :column_oids=>[2], :columns=>[:a], :converter=>Array, :typecaster=>Hash, :column_converters=>[Array]) p.oid.must_equal 1 p.column_oids.must_equal [2] p.columns.must_equal [:a] p.converter.must_equal Array p.typecaster.must_equal Hash p.column_converters.must_equal [Array] end it "should handle ArrayRows and HashRows in bound variables" do @db.bound_variable_arg(1, nil).must_equal 1 @db.bound_variable_arg(@m::ArrayRow.call(["1", "abc\\'\","]), nil).must_equal '("1","abc\\\\\'\\",")' @db.bound_variable_arg(@m::HashRow.subclass(nil, [:a, :b]).call(:a=>"1", :b=>"abc\\'\","), nil).must_equal '("1","abc\\\\\'\\",")' end it "should handle ArrayRows and HashRows in arrays in bound variables" do @db.bound_variable_arg(1, nil).must_equal 1 @db.bound_variable_arg([@m::ArrayRow.call(["1", "abc\\'\","])], nil).must_equal '{"(\\"1\\",\\"abc\\\\\\\\\'\\\\\\",\\")"}' @db.bound_variable_arg([@m::HashRow.subclass(nil, [:a, :b]).call(:a=>"1", :b=>"abc\\'\",")], nil).must_equal '{"(\\"1\\",\\"abc\\\\\\\\\'\\\\\\",\\")"}' end it "should handle nils in bound variables" do @db.bound_variable_arg(@m::ArrayRow.call([nil, nil]), nil).must_equal '(,)' @db.bound_variable_arg(@m::HashRow.subclass(nil, [:a, :b]).call(:a=>nil, :b=>nil), nil).must_equal '(,)' @db.bound_variable_arg([@m::ArrayRow.call([nil, nil])], nil).must_equal '{"(,)"}' @db.bound_variable_arg([@m::HashRow.subclass(nil, [:a, :b]).call(:a=>nil, :b=>nil)], nil).must_equal '{"(,)"}' end it "should allow registering row type parsers by introspecting system tables" do @db.conversion_procs[4] = p4 = proc{|s| s.to_i} @db.conversion_procs[5] = p5 = proc{|s| s * 2} @db.fetch = [[{:oid=>1, :typrelid=>2, :typarray=>3}], [{:attname=>'bar', :atttypid=>4}, {:attname=>'baz', :atttypid=>5}]] @db.register_row_type(:foo) @db.sqls.must_equal ["SELECT pg_type.oid, typrelid, typarray FROM pg_type WHERE ((typtype = 'c') AND (typname = 'foo')) LIMIT 1", "SELECT attname, (CASE pg_type.typbasetype WHEN 0 THEN atttypid ELSE pg_type.typbasetype END) AS atttypid FROM pg_attribute INNER JOIN pg_type ON (pg_type.oid = pg_attribute.atttypid) WHERE ((attrelid = 2) AND (attnum > 0) AND NOT attisdropped) ORDER BY attnum"] p1 = @db.conversion_procs[1] p1.columns.must_equal [:bar, :baz] p1.column_oids.must_equal [4, 5] p1.column_converters.must_equal [p4, p5] p1.oid.must_equal 1 @db.send(:schema_column_type, 'foo').must_equal :pg_row_foo @db.send(:schema_column_type, 'integer').must_equal :integer c = p1.converter c.superclass.must_equal @m::HashRow c.columns.must_equal [:bar, :baz] c.db_type.must_equal :foo p1.typecaster.must_equal c p1.call('(1,b)').must_equal(:bar=>1, :baz=>'bb') @db.typecast_value(:pg_row_foo, %w'1 b').class.must_be :<, @m::HashRow @db.typecast_value(:pg_row_foo, %w'1 b').must_equal(:bar=>'1', :baz=>'b') @db.typecast_value(:pg_row_foo, :bar=>'1', :baz=>'b').must_equal(:bar=>'1', :baz=>'b') @db.literal(p1.call('(1,b)')).must_equal "ROW(1, 'bb')::foo" end it "should allow registering row type parsers for schema qualify types" do @db.conversion_procs[4] = p4 = proc{|s| s.to_i} @db.conversion_procs[5] = p5 = proc{|s| s * 2} @db.fetch = [[{:oid=>1, :typrelid=>2, :typarray=>3}], [{:attname=>'bar', :atttypid=>4}, {:attname=>'baz', :atttypid=>5}]] @db.register_row_type(Sequel[:foo][:bar]) @db.sqls.must_equal ["SELECT pg_type.oid, typrelid, typarray FROM pg_type INNER JOIN pg_namespace ON ((pg_namespace.oid = pg_type.typnamespace) AND (pg_namespace.nspname = 'foo')) WHERE ((typtype = 'c') AND (typname = 'bar')) LIMIT 1", "SELECT attname, (CASE pg_type.typbasetype WHEN 0 THEN atttypid ELSE pg_type.typbasetype END) AS atttypid FROM pg_attribute INNER JOIN pg_type ON (pg_type.oid = pg_attribute.atttypid) WHERE ((attrelid = 2) AND (attnum > 0) AND NOT attisdropped) ORDER BY attnum"] p1 = @db.conversion_procs[1] p1.columns.must_equal [:bar, :baz] p1.column_oids.must_equal [4, 5] p1.column_converters.must_equal [p4, p5] p1.oid.must_equal 1 c = p1.converter c.superclass.must_equal @m::HashRow c.columns.must_equal [:bar, :baz] c.db_type.must_equal Sequel[:foo][:bar] p1.typecaster.must_equal c p1.call('(1,b)').must_equal(:bar=>1, :baz=>'bb') @db.typecast_value(:pg_row_foo__bar, %w'1 b').must_equal(:bar=>'1', :baz=>'b') @db.typecast_value(:pg_row_foo__bar, :bar=>'1', :baz=>'b').must_equal(:bar=>'1', :baz=>'b') @db.literal(p1.call('(1,b)')).must_equal "ROW(1, 'bb')::foo.bar" end with_symbol_splitting "should allow registering row type parsers for schema qualify type symbols" do @db.conversion_procs[4] = p4 = proc{|s| s.to_i} @db.conversion_procs[5] = p5 = proc{|s| s * 2} @db.fetch = [[{:oid=>1, :typrelid=>2, :typarray=>3}], [{:attname=>'bar', :atttypid=>4}, {:attname=>'baz', :atttypid=>5}]] @db.register_row_type(:foo__bar) @db.sqls.must_equal ["SELECT pg_type.oid, typrelid, typarray FROM pg_type INNER JOIN pg_namespace ON ((pg_namespace.oid = pg_type.typnamespace) AND (pg_namespace.nspname = 'foo')) WHERE ((typtype = 'c') AND (typname = 'bar')) LIMIT 1", "SELECT attname, (CASE pg_type.typbasetype WHEN 0 THEN atttypid ELSE pg_type.typbasetype END) AS atttypid FROM pg_attribute INNER JOIN pg_type ON (pg_type.oid = pg_attribute.atttypid) WHERE ((attrelid = 2) AND (attnum > 0) AND NOT attisdropped) ORDER BY attnum"] p1 = @db.conversion_procs[1] p1.columns.must_equal [:bar, :baz] p1.column_oids.must_equal [4, 5] p1.column_converters.must_equal [p4, p5] p1.oid.must_equal 1 c = p1.converter c.superclass.must_equal @m::HashRow c.columns.must_equal [:bar, :baz] c.db_type.must_equal :foo__bar p1.typecaster.must_equal c p1.call('(1,b)').must_equal(:bar=>1, :baz=>'bb') @db.typecast_value(:pg_row_foo__bar, %w'1 b').must_equal(:bar=>'1', :baz=>'b') @db.typecast_value(:pg_row_foo__bar, :bar=>'1', :baz=>'b').must_equal(:bar=>'1', :baz=>'b') @db.literal(p1.call('(1,b)')).must_equal "ROW(1, 'bb')::foo.bar" end it "should not allow registering on a frozen database" do @db.conversion_procs[4] = proc{|s| s.to_i} @db.conversion_procs[5] = proc{|s| s * 2} @db.fetch = [[], [{:oid=>1, :typrelid=>2, :typarray=>3}], [{:attname=>'bar', :atttypid=>4}, {:attname=>'baz', :atttypid=>5}]] c = proc{|h| [h]} @db.freeze proc{@db.register_row_type(:foo, :converter=>c)}.must_raise RuntimeError, TypeError end it "should allow registering with a custom converter" do @db.conversion_procs[4] = proc{|s| s.to_i} @db.conversion_procs[5] = proc{|s| s * 2} @db.fetch = [[{:oid=>1, :typrelid=>2, :typarray=>3}], [{:attname=>'bar', :atttypid=>4}, {:attname=>'baz', :atttypid=>5}]] c = proc{|h| [h]} @db.register_row_type(:foo, :converter=>c) o = @db.conversion_procs[1].call('(1,b)') o.must_equal [{:bar=>1, :baz=>'bb'}] o.first.must_be_kind_of(Hash) end it "should allow registering with a custom typecaster" do @db.conversion_procs[4] = proc{|s| s.to_i} @db.conversion_procs[5] = proc{|s| s * 2} @db.fetch = [[{:oid=>1, :typrelid=>2, :typarray=>3}], [{:attname=>'bar', :atttypid=>4}, {:attname=>'baz', :atttypid=>5}]] @db.register_row_type(:foo, :typecaster=>proc{|h| {:bar=>(h[:bar]||0).to_i, :baz=>(h[:baz] || 'a')*2}}) @db.typecast_value(:pg_row_foo, %w'1 b').must_be_kind_of(Hash) @db.typecast_value(:pg_row_foo, %w'1 b').must_equal(:bar=>1, :baz=>'bb') @db.typecast_value(:pg_row_foo, :bar=>'1', :baz=>'b').must_equal(:bar=>1, :baz=>'bb') @db.typecast_value(:pg_row_foo, 'bar'=>'1', 'baz'=>'b').must_equal(:bar=>0, :baz=>'aa') @db.fetch = [[{:oid=>1, :typrelid=>2, :typarray=>3}], [{:attname=>'bar', :atttypid=>4}, {:attname=>'baz', :atttypid=>5}]] @db.register_row_type(:foo, :typecaster=>proc{|h| {:bar=>(h[:bar] || h['bar'] || 0).to_i, :baz=>(h[:baz] || h['baz'] || 'a')*2}}) @db.typecast_value(:pg_row_foo, %w'1 b').must_equal(:bar=>1, :baz=>'bb') @db.typecast_value(:pg_row_foo, :bar=>'1', :baz=>'b').must_equal(:bar=>1, :baz=>'bb') @db.typecast_value(:pg_row_foo, 'bar'=>'1', 'baz'=>'b').must_equal(:bar=>1, :baz=>'bb') end it "should handle nil values when converting columns" do @db.conversion_procs[5] = proc{|s| s * 2} @db.fetch = [[{:oid=>1, :typrelid=>2, :typarray=>3}], [{:attname=>'bar', :atttypid=>4}]] called = false @db.conversion_procs[4] = proc{|s| called = true; s} @db.register_row_type(:foo) @db.conversion_procs[1].call('()').must_equal(:bar=>nil) called.must_equal false end it "should registering array type for row type if type has an array oid" do @db.conversion_procs[4] = proc{|s| s.to_i} @db.conversion_procs[5] = proc{|s| s * 2} @db.fetch = [[{:oid=>1, :typrelid=>2, :typarray=>3}], [{:attname=>'bar', :atttypid=>4}, {:attname=>'baz', :atttypid=>5}]] @db.register_row_type(:foo, :typecaster=>proc{|h| {:bar=>(h[:bar]||0).to_i, :baz=>(h[:baz] || 'a')*2}}) p3 = @db.conversion_procs[3] p3.call('{"(1,b)"}').must_equal [{:bar=>1, :baz=>'bb'}] @db.literal(p3.call('{"(1,b)"}')).must_equal "ARRAY[ROW(1, 'bb')::foo]::foo[]" @db.typecast_value(:foo_array, [{:bar=>'1', :baz=>'b'}]).must_equal [{:bar=>1, :baz=>'bb'}] end it "should not register array type for row type if type has an array oid and pg_array extension not loaded" do @db = Sequel.connect('mock://postgres') @db.extend_datasets{def quote_identifiers?; false end} @db.extension(:pg_row) @db.conversion_procs[4] = proc{|s| s.to_i} @db.conversion_procs[5] = proc{|s| s * 2} @db.fetch = [[{:oid=>1, :typrelid=>2, :typarray=>3}], [{:attname=>'bar', :atttypid=>4}, {:attname=>'baz', :atttypid=>5}]] @db.register_row_type(:foo, :typecaster=>proc{|h| {:bar=>(h[:bar]||0).to_i, :baz=>(h[:baz] || 'a')*2}}) @db.conversion_procs[1].call("(1,b)").must_equal(:bar=>1, :baz=>'bb') @db.conversion_procs[3].must_be_nil end it "should allow creating unregisted row types via Database#row_type" do @db.literal(@db.row_type(:foo, [1, 2])).must_equal 'ROW(1, 2)::foo' end it "should allow typecasting of registered row types via Database#row_type" do @db.conversion_procs[4] = proc{|s| s.to_i} @db.conversion_procs[5] = proc{|s| s * 2} @db.fetch = [[{:oid=>1, :typrelid=>2, :typarray=>3}], [{:attname=>'bar', :atttypid=>4}, {:attname=>'baz', :atttypid=>5}]] @db.register_row_type(:foo, :typecaster=>proc{|h| @m::HashRow.subclass(:foo, [:bar, :baz]).new({:bar=>(h[:bar]||0).to_i, :baz=>(h[:baz] || 'a')*2})}) @db.literal(@db.row_type(:foo, ['1', 'b'])).must_equal "ROW(1, 'bb')::foo" @db.literal(@db.row_type(:foo, {:bar=>'1', :baz=>'b'})).must_equal "ROW(1, 'bb')::foo" end it "should allow parsing when typecasting registered row types via Database#row_type" do @db.conversion_procs[4] = proc{|s| s.to_i} @db.conversion_procs[5] = proc{|s| s * 2} @db.fetch = [[{:oid=>1, :typrelid=>2, :typarray=>3}], [{:attname=>'bar', :atttypid=>4}, {:attname=>'baz', :atttypid=>5}]] @db.register_row_type(:foo, :typecaster=>proc{|h| @m::HashRow.subclass(:foo, [:bar, :baz]).new(:bar=>(h[:bar]||0).to_i, :baz=>(h[:baz] || 'a')*2)}) @db.literal(@db.row_type(:foo, ['1', 'b'])).must_equal "ROW(1, 'bb')::foo" end it "should raise an error if attempt to use Database#row_type with an unregistered type and hash" do proc{@db.literal(@db.row_type(:foo, {:bar=>'1', :baz=>'b'}))}.must_raise(Sequel::InvalidValue) end it "should raise an error if attempt to use Database#row_type with an unhandled type" do proc{@db.literal(@db.row_type(:foo, 1))}.must_raise(Sequel::InvalidValue) end it "should return ArrayRow and HashRow values as-is" do h = @m::HashRow.call(:a=>1) a = @m::ArrayRow.call([1]) @db.row_type(:foo, h).object_id.must_equal(h.object_id) @db.row_type(:foo, a).object_id.must_equal(a.object_id) end it "should have Sequel.pg_row return a plain ArrayRow" do @db.literal(Sequel.pg_row([1, 2, 3])).must_equal 'ROW(1, 2, 3)' end it "should raise an error if attempting to typecast a hash for a parser without columns" do proc{@m::Parser.new.typecast(:a=>1)}.must_raise(Sequel::Error) end it "should raise an error if attempting to typecast a unhandled value for a parser" do proc{@m::Parser.new.typecast(1)}.must_raise(Sequel::Error) end it "should handle typecasting for a parser without a typecaster" do @m::Parser.new.typecast([1]).must_equal [1] end it "should raise an error if no columns are returned when registering a custom row type" do @db.fetch = [[{:oid=>1, :typrelid=>2, :typarray=>3}]] proc{@db.register_row_type(:foo)}.must_raise(Sequel::Error) end it "should raise an error when registering a custom row type if the type is found found" do @db.fetch = [] proc{@db.register_row_type(:foo)}.must_raise(Sequel::Error) end it "should return correct results for Database#schema_type_class" do @db.conversion_procs[4] = proc{|s| s.to_i} @db.conversion_procs[5] = proc{|s| s * 2} @db.fetch = [[{:oid=>1, :typrelid=>2, :typarray=>3}], [{:attname=>'bar', :atttypid=>4}, {:attname=>'baz', :atttypid=>5}]] @db.register_row_type(:foo, :typecaster=>proc{|h| {:bar=>(h[:bar]||0).to_i, :baz=>(h[:baz] || 'a')*2}}) @db.schema_type_class(:pg_row_foo).must_equal [Sequel::Postgres::PGRow::HashRow, Sequel::Postgres::PGRow::ArrayRow] @db.schema_type_class(:integer).must_equal Integer end end �����������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/pg_static_cache_updater_spec.rb���������������������������������������0000664�0000000�0000000�00000011253�14342141206�0024416�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "pg_static_cache_updater extension" do before do @db = Sequel.connect('mock://postgres') @db.extend_datasets{def quote_identifiers?; false end} def @db.listen(chan, opts={}) execute("LISTEN #{chan}") yield(*opts[:yield]) end @db.extension(:pg_static_cache_updater) @model = Class.new(Sequel::Model(@db[:table])) @model.plugin :static_cache @db.sqls end it "#create_static_cache_update_function should create a function in the database" do @db.create_static_cache_update_function @db.sqls.first.gsub(/\s+/, ' ').must_equal " CREATE FUNCTION sequel_static_cache_update() RETURNS trigger LANGUAGE plpgsql AS 'BEGIN PERFORM pg_notify(''sequel_static_cache_update'', TG_RELID::text); RETURN NULL; END ' " end it "#create_static_cache_update_function should support :channel_name and :function_name options" do @db.create_static_cache_update_function(:channel_name=>'foo', :function_name=>'bar') @db.sqls.first.gsub(/\s+/, ' ').must_equal " CREATE FUNCTION bar() RETURNS trigger LANGUAGE plpgsql AS 'BEGIN PERFORM pg_notify(''foo'', TG_RELID::text); RETURN NULL; END ' " end it "#create_static_cache_update_trigger should create a trigger for the database table" do @db.create_static_cache_update_trigger(:tab) @db.sqls.first.gsub(/\s+/, ' ').must_equal "CREATE TRIGGER sequel_static_cache_update AFTER INSERT OR UPDATE OR DELETE ON tab EXECUTE PROCEDURE sequel_static_cache_update()" end it "#create_static_cache_update_trigger should support :trigger_name and :function_name options" do @db.create_static_cache_update_trigger(:tab, :trigger_name=>'foo', :function_name=>'bar') @db.sqls.first.gsub(/\s+/, ' ').must_equal "CREATE TRIGGER foo AFTER INSERT OR UPDATE OR DELETE ON tab EXECUTE PROCEDURE bar()" end it "#default_static_cache_update_name should return the default name for function, trigger, and channel" do @db.default_static_cache_update_name.must_equal :sequel_static_cache_update end it "#listen_for_static_cache_updates should listen for changes to model tables and reload model classes" do @db.fetch = {:v=>1234} @db.listen_for_static_cache_updates([@model], :yield=>[nil, nil, 1234]).join @db.sqls.must_equal ["SELECT CAST(CAST('table' AS regclass) AS oid) AS v LIMIT 1", "LISTEN sequel_static_cache_update", "SELECT * FROM table"] end it "#listen_for_static_cache_updates should not reload model classes if oid doesn't match" do @db.fetch = {:v=>1234} @db.listen_for_static_cache_updates([@model], :yield=>[nil, nil, 12345]).join @db.sqls.must_equal ["SELECT CAST(CAST('table' AS regclass) AS oid) AS v LIMIT 1", "LISTEN sequel_static_cache_update"] end it "#listen_for_static_cache_updates should support a single model argument" do @db.fetch = {:v=>1234} @db.listen_for_static_cache_updates(@model, :yield=>[nil, nil, 1234]).join @db.sqls.must_equal ["SELECT CAST(CAST('table' AS regclass) AS oid) AS v LIMIT 1", "LISTEN sequel_static_cache_update", "SELECT * FROM table"] end it "#listen_for_static_cache_updates should support the :channel_name option" do @db.fetch = {:v=>1234} @db.listen_for_static_cache_updates([@model], :yield=>[nil, nil, 12345], :channel_name=>:foo).join @db.sqls.must_equal ["SELECT CAST(CAST('table' AS regclass) AS oid) AS v LIMIT 1", "LISTEN foo"] end it "#listen_for_static_cache_updates should raise an error if given an empty array" do @db.fetch = {:v=>1234} proc{@db.listen_for_static_cache_updates([])}.must_raise(Sequel::Error) end it "#listen_for_static_cache_updates should raise an error if one of the models is not using the static cache plugin" do @db.fetch = {:v=>1234} proc{@db.listen_for_static_cache_updates(Class.new(Sequel::Model(@db[:table])))}.must_raise(Sequel::Error) end it "#listen_for_static_cache_updates should raise an error if the database doesn't respond to listen" do @db = Sequel.mock(:host=>'postgres') @db.extension(:pg_static_cache_updater) @db.fetch = {:v=>1234} proc{@db.listen_for_static_cache_updates(Class.new(Sequel::Model(@db[:table])))}.must_raise(Sequel::Error) end it "#listen_for_static_cache_updates should handle a :before_thread_exit option" do a = [] @db.listen_for_static_cache_updates([@model], :yield=>[nil, nil, 12345], :before_thread_exit=>proc{a << 1}).join a.must_equal [1] end it "#listen_for_static_cache_updates should call :before_thread_exit option even if listen raises an exception" do a = [] @db.listen_for_static_cache_updates([@model], :yield=>[nil, nil, 12345], :after_listen=>proc{raise ArgumentError}, :before_thread_exit=>proc{a << 1}).join a.must_equal [1] end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/pg_timestamptz_spec.rb������������������������������������������������0000664�0000000�0000000�00000001060�14342141206�0022634�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "pg_timestamptz extension" do before do @db = Sequel.mock(:host=>'postgres').extension :pg_timestamptz end it "should use timestamptz as default timestamp type" do @db.create_table(:t){Time :t; DateTime :tz; Time :ot, :only_time=>true} @db.sqls.must_equal ['CREATE TABLE "t" ("t" timestamptz, "tz" timestamptz, "ot" time)'] end it "should use timestamptz when casting" do @db.get(Sequel.cast('a', Time)) @db.sqls.must_equal ["SELECT CAST('a' AS timestamptz) AS \"v\" LIMIT 1"] end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/prepared_statements_safe_spec.rb��������������������������������������0000664�0000000�0000000�00000006504�14342141206�0024644�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "prepared_statements_safe plugin" do before do @db = Sequel.mock(:fetch=>{:id=>1, :name=>'foo', :i=>2}, :autoid=>proc{|sql| 1}, :numrows=>1, :servers=>{:read_only=>{}}) @c = Class.new(Sequel::Model(@db[:people])) @c.columns :id, :name, :i @c.instance_variable_set(:@db_schema, {:i=>{}, :name=>{}, :id=>{:primary_key=>true}}) @c.plugin :prepared_statements_safe @p = @c.load(:id=>1, :name=>'foo', :i=>2) @db.sqls end it "should load the prepared_statements plugin" do @c.plugins.must_include(Sequel::Plugins::PreparedStatements) end it "should set default values correctly" do @c.prepared_statements_column_defaults.must_equal(:name=>nil, :i=>nil) @c.instance_variable_set(:@db_schema, {:i=>{:default=>'f(x)'}, :name=>{:ruby_default=>'foo'}, :id=>{:primary_key=>true}, :bar=>{:ruby_default=>Sequel::CURRENT_TIMESTAMP}}) Class.new(@c).prepared_statements_column_defaults.must_equal(:name=>'foo') end it "should set default values when creating" do @c.create @db.sqls.must_equal ['INSERT INTO people (i, name) VALUES (NULL, NULL)', "SELECT * FROM people WHERE (id = 1) LIMIT 1"] @c.create(:name=>'foo') @db.sqls.must_equal ["INSERT INTO people (i, name) VALUES (NULL, 'foo')", "SELECT * FROM people WHERE (id = 1) LIMIT 1"] @c.create(:name=>'foo', :i=>2) @db.sqls.must_equal ["INSERT INTO people (i, name) VALUES (2, 'foo')", "SELECT * FROM people WHERE (id = 1) LIMIT 1"] end it "should use database default values" do @c.instance_variable_set(:@db_schema, {:i=>{:ruby_default=>2}, :name=>{:ruby_default=>'foo'}, :id=>{:primary_key=>true}}) c = Class.new(@c) c.create @db.sqls.must_equal ["INSERT INTO people (i, name) VALUES (2, 'foo')", "SELECT * FROM people WHERE (id = 1) LIMIT 1"] end it "should not set defaults for unparseable dataset default values" do @c.instance_variable_set(:@db_schema, {:i=>{:default=>'f(x)'}, :name=>{:ruby_default=>'foo'}, :id=>{:primary_key=>true}}) c = Class.new(@c) c.create @db.sqls.must_equal ["INSERT INTO people (name) VALUES ('foo')", "SELECT * FROM people WHERE (id = 1) LIMIT 1"] end it "should save all fields when updating" do @p.update(:i=>3) @db.sqls.must_equal ["UPDATE people SET name = 'foo', i = 3 WHERE (id = 1)"] end it "should have save_changes return nil without saving if the object has not been modified" do @p.save_changes.must_be_nil @db.sqls.must_equal [] end it "should work with abstract classes" do c = Class.new(Sequel::Model) c.plugin :prepared_statements_safe c1 = Class.new(c) def c1.get_db_schema; @db_schema = {:i=>{:default=>'f(x)'}, :name=>{:ruby_default=>'foo'}, :id=>{:primary_key=>true}} end c1.singleton_class.send(:private, :get_db_schema) c1.set_dataset(:people) c1.prepared_statements_column_defaults.must_equal(:name=>'foo') Class.new(c1).prepared_statements_column_defaults.must_equal(:name=>'foo') end it "should freeze prepared statement column defaults when freezing model class" do @c.freeze @c.prepared_statements_column_defaults.frozen?.must_equal true end it "should handle freezing a class without a dataset" do c = Class.new(Sequel::Model) c.plugin :prepared_statements_safe c.freeze c.prepared_statements_column_defaults.must_be_nil end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/prepared_statements_spec.rb�������������������������������������������0000664�0000000�0000000�00000017402�14342141206�0023645�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "prepared_statements plugin" do before do @db = Sequel.mock(:fetch=>{:id=>1, :name=>'foo', :i=>2}, :autoid=>proc{|sql| 1}, :numrows=>1, :servers=>{:read_only=>{}}) @c = Class.new(Sequel::Model(@db[:people])) @c.columns :id, :name, :i @c.set_primary_key :id @columns = "id, name, i" @c.plugin :prepared_statements @p = @c.load(:id=>1, :name=>'foo', :i=>2) @ds = @c.dataset @db.sqls end it "should work with implicit subqueries used for joined datasets" do @c.dataset = @c.dataset.from(:people, :people2) @db.sqls @c[1].must_equal @p @db.sqls.must_equal ["SELECT * FROM (SELECT * FROM people, people2) AS people WHERE (id = 1) LIMIT 1 -- read_only"] end it "should correctly lookup by primary key for dataset using subquery" do @c.dataset = @c.dataset.from(:people, :people2).from_self(:alias=>:people) @db.sqls @c[1].must_equal @p @db.sqls.must_equal ["SELECT * FROM (SELECT * FROM people, people2) AS people WHERE (id = 1) LIMIT 1 -- read_only"] end it "should use prepared statements for pk lookups only if default is not optimized" do @c.send(:use_prepared_statements_for_pk_lookup?).must_equal false @c.set_primary_key [:id, :name] @c.send(:use_prepared_statements_for_pk_lookup?).must_equal true @c.set_primary_key :id @c.dataset = @c.dataset.from(:people, :people2) @c.send(:use_prepared_statements_for_pk_lookup?).must_equal true @c.dataset = @db[:people].select(:id, :name, :i) @c.send(:use_prepared_statements_for_pk_lookup?).must_equal true end it "should raise Error for unsupported prepared statement types" do proc{@p.send(:use_prepared_statements_for?, :foo)}.must_raise Sequel::Error end it "should not use prepared statements for insert_select on SQLite" do @p.send(:use_prepared_statements_for?, :insert_select).must_equal true @c.dataset = Sequel.connect('mock://sqlite')[:people] @p.send(:use_prepared_statements_for?, :insert_select).must_equal false end prepared_statements_spec = Module.new do extend Minitest::Spec::DSL it "should correctly update instance" do @p.update(:name=>'bar').must_equal @c.load(:id=>1, :name=>'bar', :i => 2) @db.sqls.must_equal ["UPDATE people SET name = 'bar' WHERE (id = 1)"] end it "should correctly create instance if dataset supports insert_select" do @c.dataset_module do def supports_insert_select? true end def supports_returning?(type) true end def insert_select(h) cache_set(:_fetch, :id=>1, :name=>'foo', :i => 2) server(:default).with_sql_first(insert_select_sql(h)) end def insert_select_sql(*v) insert_sql(*v) << " RETURNING #{(opts[:returning] && !opts[:returning].empty?) ? opts[:returning].map{|c| literal(c)}.join(', ') : '*'}" end end @c.create(:name=>'foo').must_equal @c.load(:id=>1, :name=>'foo', :i => 2) @db.sqls.must_equal ["INSERT INTO people (name) VALUES ('foo') RETURNING #{@columns}"] c = Class.new(@c) c.dataset = c.dataset.returning(:id, :name) c.columns :id, :name, :i @db.sqls c.create(:name=>'foo').must_equal c.load(:id=>1, :name=>'foo', :i => 2) @db.sqls.must_equal ["INSERT INTO people (name) VALUES ('foo') RETURNING id, name"] end end describe "when #use_prepared_statements_for? returns true" do before do @c.class_eval do def self.use_prepared_statements_for_pk_lookup?; true end singleton_class.send(:private, :use_prepared_statements_for_pk_lookup?) private def use_prepared_statements_for?(type) true end end end include prepared_statements_spec it "should correctly create instance" do @c.create(:name=>'foo').must_equal @c.load(:id=>1, :name=>'foo', :i => 2) @db.sqls.must_equal ["INSERT INTO people (name) VALUES ('foo')", "SELECT * FROM people WHERE id = 1"] end it "should correctly create instance" do @c.create(:name=>'foo').must_equal @c.load(:id=>1, :name=>'foo', :i => 2) @db.sqls.must_equal ["INSERT INTO people (name) VALUES ('foo')", "SELECT * FROM people WHERE id = 1"] end it "should correctly update instance when specifying server" do @p.set_server(:read_only).update(:name=>'bar').must_equal @c.load(:id=>1, :name=>'bar', :i => 2) @db.sqls.must_equal ["UPDATE people SET name = 'bar' WHERE (id = 1) -- read_only"] end it "should correctly create instance when specifying server" do @c.new(:name=>'foo').set_server(:read_only).save.must_equal @c.load(:id=>1, :name=>'foo', :i => 2) @db.sqls.must_equal ["INSERT INTO people (name) VALUES ('foo') -- read_only", "SELECT * FROM people WHERE id = 1 -- read_only"] end it "should correctly create instance if dataset supports insert_select when specifying server" do @c.dataset_module do def supports_insert_select? true end def supports_returning?(type) true end def insert_select(h) cache_set(:_fetch, :id=>1, :name=>'foo', :i => 2) server(:default).with_sql_first(insert_select_sql(h)) end def insert_select_sql(*v) insert_sql(*v) << " RETURNING #{(opts[:returning] && !opts[:returning].empty?) ? opts[:returning].map{|c| literal(c)}.join(', ') : '*'}" end end @c.new(:name=>'foo').set_server(:read_only).save.must_equal @c.load(:id=>1, :name=>'foo', :i => 2) @db.sqls.must_equal ["INSERT INTO people (name) VALUES ('foo') RETURNING #{@columns} -- read_only"] end it "should work correctly when subclassing" do c = Class.new(@c) @db.sqls c.load(:id=>1, :name=>'foo', :i=>2).save @db.sqls.must_equal ["UPDATE people SET name = 'foo', i = 2 WHERE (id = 1)"] end it "should correctly handle without schema type when placeholder type specifiers are required" do @c.dataset = @ds.with_extend{def requires_placeholder_type_specifiers?; true end} @p.save @db.sqls.must_equal ["UPDATE people SET name = 'foo', i = 2 WHERE (id = 1)"] end it "should correctly handle with schema type when placeholder type specifiers are required" do @c.dataset = @ds.with_extend do def requires_placeholder_type_specifiers?; true end private def prepared_statement_modules [Module.new do def literal_symbol_append(sql, v) if @opts[:bind_vars] && /\A\$(.*)\z/ =~ v literal_append(sql, prepared_arg($1.split('__')[0].to_sym)) else super end end end] end end @c.db_schema[:id][:type] = :integer @p.save @db.sqls.must_equal ["UPDATE people SET name = 'foo', i = 2 WHERE (id = 1)"] end end describe "when #use_prepared_statements_for? returns false" do before do @columns = "*" @c.class_eval do def self.use_prepared_statements_for_pk_lookup?; false end singleton_class.send(:private, :use_prepared_statements_for_pk_lookup?) private def use_prepared_statements_for?(type) false end end end include prepared_statements_spec it "should correctly create instance" do @c.create(:name=>'foo').must_equal @c.load(:id=>1, :name=>'foo', :i => 2) @db.sqls.must_equal ["INSERT INTO people (name) VALUES ('foo')", "SELECT #{@columns} FROM people WHERE id = 1"] end it "should correctly lookup by primary key" do @c[1].must_equal @p @db.sqls.must_equal ["SELECT * FROM people WHERE id = 1 -- read_only"] end it "should correctly delete instance" do @p.destroy.must_equal @p @db.sqls.must_equal ["DELETE FROM people WHERE id = 1"] end end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/pretty_table_spec.rb��������������������������������������������������0000664�0000000�0000000�00000004361�14342141206�0022272�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" require 'stringio' Sequel.extension :pretty_table describe "Dataset#print" do before do @output = StringIO.new @orig_stdout = $stdout $stdout = @output @dataset = Sequel.mock(:fetch=>[{:a=>1, :b=>2}, {:a=>3, :b=>4}, {:a=>5, :b=>6}])[:items].extension(:pretty_table) end after do $stdout = @orig_stdout end it "should print out a table with the values" do @dataset.print(:a, :b) @output.rewind @output.read.must_equal \ "+-+-+\n|a|b|\n+-+-+\n|1|2|\n|3|4|\n|5|6|\n+-+-+\n" end it "should default to the dataset's columns" do @dataset.columns(:a, :b) @dataset.print @output.rewind @output.read.must_equal \ "+-+-+\n|a|b|\n+-+-+\n|1|2|\n|3|4|\n|5|6|\n+-+-+\n" end end describe "PrettyTable" do before do @data1 = [ {:x => 3, :y => 4} ] @data2 = [ {:a => 23, :b => 45}, {:a => 45, :b => 2377} ] @data3 = [ {:aaa => 1}, {:bb => 2}, {:c => 3.1} ] @output = StringIO.new @orig_stdout = $stdout $stdout = @output end after do $stdout = @orig_stdout end it "should infer the columns if not given" do Sequel::PrettyTable.print(@data1) @output.rewind @output.read.must_equal(<<OUTPUT) +-+-+ |x|y| +-+-+ |3|4| +-+-+ OUTPUT end it "should have #string return the string without printing" do Sequel::PrettyTable.string(@data1).must_equal((<<OUTPUT).chomp) +-+-+ |x|y| +-+-+ |3|4| +-+-+ OUTPUT @output.rewind @output.read.must_equal '' end it "should calculate the maximum width of each column correctly" do Sequel::PrettyTable.print(@data2, [:a, :b]) @output.rewind @output.read.must_equal(<<OUTPUT) +--+----+ |a |b | +--+----+ |23| 45| |45|2377| +--+----+ OUTPUT end it "should also take header width into account" do Sequel::PrettyTable.print(@data3, [:aaa, :bb, :c]) @output.rewind @output.read.must_equal(<<OUTPUT) +---+--+---+ |aaa|bb|c | +---+--+---+ | 1| | | | | 2| | | | |3.1| +---+--+---+ OUTPUT end it "should print only the specified columns" do Sequel::PrettyTable.print(@data2, [:a]) @output.rewind @output.read.must_equal(<<OUTPUT) +--+ |a | +--+ |23| |45| +--+ OUTPUT end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/primary_key_lookup_check_values_spec.rb�������������������������������0000664�0000000�0000000�00000017746�14342141206�0026247�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "primary_key_lookup_check_values plugin" do int_pk_schema = {:id=>{:primary_key=>true, :type=>:integer, :min_value=>0, :max_value=>10}.freeze}.freeze def model(schema) fetch = {} schema.keys.each_with_index{|k,i| fetch[k] = i+1} db = Sequel.mock(:fetch=>fetch) schema = schema.to_a db.define_singleton_method(:schema){|*| schema} def db.supports_schema_parsing?; true end c = Class.new(Sequel::Model(db[:a])) c.plugin :primary_key_lookup_check_values db.sqls c end it "should be loadable on a model class without a dataset" do db = model(int_pk_schema).db c = Class.new(Sequel::Model(db)) c.plugin :primary_key_lookup_check_values c.dataset = :a c.db.sqls c['1'].must_equal c.load(:id=>1) db.sqls.must_equal ["SELECT * FROM a WHERE id = 1"] end it "should work in subclasses" do c = Class.new(model(int_pk_schema)) c.db.sqls c['1'].must_equal c.load(:id=>1) c.db.sqls.must_equal ["SELECT * FROM a WHERE id = 1"] c.dataset = Sequel.mock(:fetch=>{:id=>1})[:a] c.db.sqls c['1'].must_equal c.load(:id=>1) c.db.sqls.must_equal ["SELECT * FROM a WHERE id = '1'"] end it "should handle lookup for single primary key if database type is not known" do c = model(:id=>{:primary_key=>true}) c['1'].must_equal c.load(:id=>1) c.db.sqls.must_equal ["SELECT * FROM a WHERE id = '1'"] end it "should handle lookup for single primary key when value can be type checked but not value checked" do c = model(:id=>{:primary_key=>true, :type=>:integer}) c['1'].must_equal c.load(:id=>1) c.db.sqls.must_equal ["SELECT * FROM a WHERE id = 1"] end it "should handle lookup for composite primary key if no columns can be type checked" do c = model(:id1=>{:primary_key=>true}, :id2=>{:primary_key=>true}) c[['1', '2']].must_equal c.load(:id1=>1, :id2=>2) c.db.sqls.must_equal ["SELECT * FROM a WHERE ((id1 = '1') AND (id2 = '2')) LIMIT 1"] end it "should handle lookup for composite primary key where only a subset of columns can be type checked" do c = model(:id1=>{:primary_key=>true, :type=>:integer}, :id2=>{:primary_key=>true}) c[['a', '2']].must_be_nil c.db.sqls.must_equal [] c[['1', '2']].must_equal c.load(:id1=>1, :id2=>2) c.db.sqls.must_equal ["SELECT * FROM a WHERE ((id1 = 1) AND (id2 = '2')) LIMIT 1"] end it "should handle lookup for composite primary key where only a subset of columns can be type checked and value checked" do c = model(:id1=>{:primary_key=>true, :type=>:integer, :min_value=>0, :max_value=>10}, :id2=>{:primary_key=>true}) c[['a', '2']].must_be_nil c.db.sqls.must_equal [] c[['12', '2']].must_be_nil c.db.sqls.must_equal [] c[['1', '2']].must_equal c.load(:id1=>1, :id2=>2) c.db.sqls.must_equal ["SELECT * FROM a WHERE ((id1 = 1) AND (id2 = '2')) LIMIT 1"] end it "should handle lookup for composite primary key where only a subset of columns can be value checked" do c = model(:id1=>{:primary_key=>true, :type=>:integer, :min_value=>0, :max_value=>10}, :id2=>{:primary_key=>true, :type=>:integer}) c[['a', '2']].must_be_nil c.db.sqls.must_equal [] c[['12', '2']].must_be_nil c.db.sqls.must_equal [] c[['1', '2']].must_equal c.load(:id1=>1, :id2=>2) c.db.sqls.must_equal ["SELECT * FROM a WHERE ((id1 = 1) AND (id2 = 2)) LIMIT 1"] end describe "with single pk with type and value" do before do @c = model(int_pk_schema) @c.db.sqls end it "should work when setting the dataset for an existing class" do @c.dataset = :b @c.db.sqls @c['1'].must_equal @c.load(:id=>1) @c.db.sqls.must_equal ["SELECT * FROM b WHERE id = 1"] end it "should skip query when nil is given" do @c[nil].must_be_nil @c.db.sqls.must_equal [] end it "should skip query for single primary key with array value" do @c[[1]].must_be_nil @c.db.sqls.must_equal [] end it "should skip query for single primary key when typecasting fails" do @c['a'].must_be_nil @c.db.sqls.must_equal [] end it "should skip query for single primary key when value check fails" do @c[12].must_be_nil @c[-1].must_be_nil @c.db.sqls.must_equal [] end it "should handle lookup for single primary key when value can be type and value checked" do @c['1'] @c.db.sqls.must_equal ["SELECT * FROM a WHERE id = 1"] end it "should handle lookup when given value is a symbol" do @c[:b] @c.db.sqls.must_equal ["SELECT * FROM a WHERE id = b"] end it "should handle lookup when given value is an literal string" do @c[Sequel.lit('b')] @c.db.sqls.must_equal ["SELECT * FROM a WHERE id = b"] end it "should handle lookup when given value is an SQL expression" do @c[Sequel.identifier('b')] @c.db.sqls.must_equal ["SELECT * FROM a WHERE id = b"] end it "should affect Model.with_pk lookups" do @c.with_pk(nil).must_be_nil @c.db.sqls.must_equal [] @c.with_pk('1').must_equal @c.load(:id=>1) @c.db.sqls.must_equal ["SELECT * FROM a WHERE id = 1"] end it "should affect Model.with_pk! lookups" do proc{@c.with_pk!(nil)}.must_raise Sequel::NoMatchingRow @c.db.sqls.must_equal [] @c.with_pk!('1').must_equal @c.load(:id=>1) @c.db.sqls.must_equal ["SELECT * FROM a WHERE id = 1"] end it "should affect Model Dataset#[] lookups with integers" do @c.dataset[12].must_be_nil @c.db.sqls.must_equal [] @c.dataset[1].must_equal @c.load(:id=>1) @c.db.sqls.must_equal ["SELECT * FROM a WHERE (a.id = 1) LIMIT 1"] end it "should affect Model Dataset#with_pk lookups" do @c.dataset.with_pk(nil).must_be_nil @c.db.sqls.must_equal [] @c.dataset.with_pk('1').must_equal @c.load(:id=>1) @c.db.sqls.must_equal ["SELECT * FROM a WHERE (a.id = 1) LIMIT 1"] end it "should affect Model Dataset#with_pk! lookups" do proc{@c.dataset.with_pk!(nil)}.must_raise Sequel::NoMatchingRow @c.db.sqls.must_equal [] @c.dataset.with_pk!('1').must_equal @c.load(:id=>1) @c.db.sqls.must_equal ["SELECT * FROM a WHERE (a.id = 1) LIMIT 1"] end end describe "with composite pk with type and value for all columns" do comp_pk_schema = { :id1=>{:primary_key=>true, :type=>:integer, :min_value=>0, :max_value=>10}.freeze, :id2=>{:primary_key=>true, :type=>:integer, :min_value=>12, :max_value=>20}.freeze }.freeze before do @ds = model(comp_pk_schema).dataset end it "should skip query for composite primary key with non-array value" do @ds.with_pk(1).must_be_nil @ds.db.sqls.must_equal [] end it "should skip query for composite primary key with incorrect array value size" do @ds.with_pk([1]).must_be_nil @ds.with_pk([1, 2, 3]).must_be_nil @ds.db.sqls.must_equal [] end it "should skip query for composite primary key with nil value" do @ds.with_pk([nil, nil]).must_be_nil @ds.with_pk([nil, 14]).must_be_nil @ds.with_pk([1, nil]).must_be_nil @ds.db.sqls.must_equal [] end it "should skip query for composite primary key where typecasting fails for any value" do @ds.with_pk(['a', 'a']).must_be_nil @ds.with_pk(['a', 14]).must_be_nil @ds.with_pk([1, 'a']).must_be_nil @ds.db.sqls.must_equal [] end it "should skip query for composite primary key where where value check fails for any value" do @ds.with_pk([-1, 5]).must_be_nil @ds.with_pk([5, 5]).must_be_nil @ds.with_pk([5, 25]).must_be_nil @ds.with_pk([-1, 14]).must_be_nil @ds.with_pk([11, 14]).must_be_nil @ds.db.sqls.must_equal [] end it "should handle lookup for composite primary key when all values can be type and value checked" do @ds.with_pk(['5', '15']).must_equal @ds.model.load(:id1=>1, :id2=>2) @ds.db.sqls.must_equal ["SELECT * FROM a WHERE ((a.id1 = 5) AND (a.id2 = 15)) LIMIT 1"] end end end ��������������������������sequel-5.63.0/spec/extensions/query_spec.rb���������������������������������������������������������0000664�0000000�0000000�00000004753�14342141206�0020746�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Database#query" do before do @db = Sequel.mock.extension(:query) end it "should delegate to Dataset#query if block is provided" do @d = @db.query {select :x; from :y} @d.must_be_kind_of(Sequel::Dataset) @d.sql.must_equal "SELECT x FROM y" end end describe "Dataset#query" do before do @d = Sequel.mock.dataset.extension(:query) end it "should allow cloning without arguments" do q = @d.query {clone} q.class.must_equal @d.class q.sql.must_equal "SELECT *" end it "should support #from" do q = @d.query {from :xxx} q.class.must_equal @d.class q.sql.must_equal "SELECT * FROM xxx" end it "should support #select" do q = @d.query do select :a, Sequel[:b].as(:mongo) from :yyy end q.class.must_equal @d.class q.sql.must_equal "SELECT a, b AS mongo FROM yyy" end it "should support #where" do q = @d.query do from :zzz where{x + 2 > Sequel.expr(:y) + 3} end q.class.must_equal @d.class q.sql.must_equal "SELECT * FROM zzz WHERE ((x + 2) > (y + 3))" q = @d.from(:zzz).query do where{(x > 1) & (Sequel.expr(:y) > 2)} end q.class.must_equal @d.class q.sql.must_equal "SELECT * FROM zzz WHERE ((x > 1) AND (y > 2))" q = @d.from(:zzz).query do where :x => 33 end q.class.must_equal @d.class q.sql.must_equal "SELECT * FROM zzz WHERE (x = 33)" end it "should support #group_by and #having" do q = @d.query do from :abc group_by :id having{x >= 2} end q.class.must_equal @d.class q.sql.must_equal "SELECT * FROM abc GROUP BY id HAVING (x >= 2)" end it "should support #order, #order_by" do q = @d.query do from :xyz order_by :stamp end q.class.must_equal @d.class q.sql.must_equal "SELECT * FROM xyz ORDER BY stamp" end it "should support blocks that end in nil" do condition = false q = @d.query do from :xyz order_by :stamp if condition end q.sql.must_equal "SELECT * FROM xyz" end it "should raise on non-chainable method calls" do proc {@d.query {row_proc}}.must_raise(Sequel::Error) proc {@d.query {all}}.must_raise(Sequel::Error) end if RUBY_VERSION >= '2.7' it "should handle keywords when delegating" do eval '@d = @d.with_extend{def foo(name: (raise)) clone(:name=>name) end}' @d.query do foo(name: '1') end.opts[:name].must_equal '1' end end end ���������������������sequel-5.63.0/spec/extensions/rcte_tree_spec.rb�����������������������������������������������������0000664�0000000�0000000�00000145366�14342141206�0021563�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Model, "rcte_tree" do before do @db = Sequel.mock @db.extend_datasets do def supports_cte?(*) true end end @c = Class.new(Sequel::Model(@db[:nodes])) @c.class_eval do def self.name; 'Node'; end columns :id, :name, :parent_id, :i, :pi end @ds = @c.dataset @o = @c.load(:id=>2, :parent_id=>1, :name=>'AA', :i=>3, :pi=>4) @db.sqls end it "should define the correct associations" do @c.plugin :rcte_tree @c.associations.sort_by{|x| x.to_s}.must_equal [:ancestors, :children, :descendants, :parent] end it "should define the correct associations when giving options" do @c.plugin :rcte_tree, :ancestors=>{:name=>:as}, :children=>{:name=>:cs}, :descendants=>{:name=>:ds}, :parent=>{:name=>:p} @c.associations.sort_by{|x| x.to_s}.must_equal [:as, :cs, :ds, :p] end it "should use the correct SQL for lazy associations" do @c.plugin :rcte_tree @o.parent_dataset.sql.must_equal 'SELECT * FROM nodes WHERE (nodes.id = 1) LIMIT 1' @o.children_dataset.sql.must_equal 'SELECT * FROM nodes WHERE (nodes.parent_id = 2)' @o.ancestors_dataset.sql.must_equal 'WITH t AS (SELECT * FROM nodes WHERE (id = 1) UNION ALL SELECT nodes.* FROM nodes INNER JOIN t ON (t.parent_id = nodes.id)) SELECT * FROM t AS nodes' @o.descendants_dataset.sql.must_equal 'WITH t AS (SELECT * FROM nodes WHERE (parent_id = 2) UNION ALL SELECT nodes.* FROM nodes INNER JOIN t ON (t.id = nodes.parent_id)) SELECT * FROM t AS nodes' end it "should use the correct SQL for lazy associations when recursive CTEs require column aliases" do @c.dataset = @c.dataset.with_extend{def recursive_cte_requires_column_aliases?; true end} @c.plugin :rcte_tree @o.parent_dataset.sql.must_equal 'SELECT * FROM nodes WHERE (nodes.id = 1) LIMIT 1' @o.children_dataset.sql.must_equal 'SELECT * FROM nodes WHERE (nodes.parent_id = 2)' @o.ancestors_dataset.sql.must_equal 'WITH t(id, name, parent_id, i, pi) AS (SELECT id, name, parent_id, i, pi FROM nodes WHERE (id = 1) UNION ALL SELECT nodes.id, nodes.name, nodes.parent_id, nodes.i, nodes.pi FROM nodes INNER JOIN t ON (t.parent_id = nodes.id)) SELECT * FROM t AS nodes' @o.descendants_dataset.sql.must_equal 'WITH t(id, name, parent_id, i, pi) AS (SELECT id, name, parent_id, i, pi FROM nodes WHERE (parent_id = 2) UNION ALL SELECT nodes.id, nodes.name, nodes.parent_id, nodes.i, nodes.pi FROM nodes INNER JOIN t ON (t.id = nodes.parent_id)) SELECT * FROM t AS nodes' end it "should use the correct SQL for eager loading when recursive CTEs require column aliases" do @c.dataset = @c.dataset.with_extend{def recursive_cte_requires_column_aliases?; true end} @c.plugin :rcte_tree @c.dataset = @c.dataset.with_fetch([[{:id=>1, :name=>'A', :parent_id=>3}]]) @c.eager(:ancestors).all @db.sqls.must_equal ["SELECT * FROM nodes", "WITH t(x_root_x, id, name, parent_id, i, pi) AS (SELECT id AS x_root_x, nodes.id, nodes.name, nodes.parent_id, nodes.i, nodes.pi FROM nodes WHERE (id IN (3)) UNION ALL SELECT t.x_root_x, nodes.id, nodes.name, nodes.parent_id, nodes.i, nodes.pi FROM nodes INNER JOIN t ON (t.parent_id = nodes.id)) SELECT * FROM t AS nodes"] @c.dataset = @c.dataset.with_fetch([[{:id=>1, :name=>'A', :parent_id=>3}]]) @c.eager(:descendants).all @db.sqls.must_equal ["SELECT * FROM nodes", "WITH t(x_root_x, id, name, parent_id, i, pi) AS (SELECT parent_id AS x_root_x, nodes.id, nodes.name, nodes.parent_id, nodes.i, nodes.pi FROM nodes WHERE (parent_id IN (1)) UNION ALL SELECT t.x_root_x, nodes.id, nodes.name, nodes.parent_id, nodes.i, nodes.pi FROM nodes INNER JOIN t ON (t.id = nodes.parent_id)) SELECT * FROM t AS nodes"] end it "should use the correct SQL for lazy associations when giving options" do @c.plugin :rcte_tree, :primary_key=>:i, :key=>:pi, :cte_name=>:cte, :order=>:name, :ancestors=>{:name=>:as}, :children=>{:name=>:cs}, :descendants=>{:name=>:ds}, :parent=>{:name=>:p} @o.p_dataset.sql.must_equal 'SELECT * FROM nodes WHERE (nodes.i = 4) ORDER BY name LIMIT 1' @o.cs_dataset.sql.must_equal 'SELECT * FROM nodes WHERE (nodes.pi = 3) ORDER BY name' @o.as_dataset.sql.must_equal 'WITH cte AS (SELECT * FROM nodes WHERE (i = 4) UNION ALL SELECT nodes.* FROM nodes INNER JOIN cte ON (cte.pi = nodes.i)) SELECT * FROM cte AS nodes ORDER BY name' @o.ds_dataset.sql.must_equal 'WITH cte AS (SELECT * FROM nodes WHERE (pi = 3) UNION ALL SELECT nodes.* FROM nodes INNER JOIN cte ON (cte.i = nodes.pi)) SELECT * FROM cte AS nodes ORDER BY name' end it "should use the correct SQL for lazy associations with :conditions option" do @c.plugin :rcte_tree, :conditions => {:i => 1} @o.parent_dataset.sql.must_equal 'SELECT * FROM nodes WHERE ((i = 1) AND (nodes.id = 1)) LIMIT 1' @o.children_dataset.sql.must_equal 'SELECT * FROM nodes WHERE ((i = 1) AND (nodes.parent_id = 2))' @o.ancestors_dataset.sql.must_equal 'WITH t AS (SELECT * FROM nodes WHERE ((id = 1) AND (i = 1)) UNION ALL SELECT nodes.* FROM nodes INNER JOIN t ON (t.parent_id = nodes.id) WHERE (i = 1)) SELECT * FROM t AS nodes WHERE (i = 1)' @o.descendants_dataset.sql.must_equal 'WITH t AS (SELECT * FROM nodes WHERE ((parent_id = 2) AND (i = 1)) UNION ALL SELECT nodes.* FROM nodes INNER JOIN t ON (t.id = nodes.parent_id) WHERE (i = 1)) SELECT * FROM t AS nodes WHERE (i = 1)' end it "should use the correct SQL for lazy associations with :conditions option as an array that is a conditions specifier" do @c.plugin :rcte_tree, :conditions => [[:i, 1]] @o.parent_dataset.sql.must_equal 'SELECT * FROM nodes WHERE ((i = 1) AND (nodes.id = 1)) LIMIT 1' @o.children_dataset.sql.must_equal 'SELECT * FROM nodes WHERE ((i = 1) AND (nodes.parent_id = 2))' @o.ancestors_dataset.sql.must_equal 'WITH t AS (SELECT * FROM nodes WHERE ((id = 1) AND (i = 1)) UNION ALL SELECT nodes.* FROM nodes INNER JOIN t ON (t.parent_id = nodes.id) WHERE (i = 1)) SELECT * FROM t AS nodes WHERE (i = 1)' @o.descendants_dataset.sql.must_equal 'WITH t AS (SELECT * FROM nodes WHERE ((parent_id = 2) AND (i = 1)) UNION ALL SELECT nodes.* FROM nodes INNER JOIN t ON (t.id = nodes.parent_id) WHERE (i = 1)) SELECT * FROM t AS nodes WHERE (i = 1)' end it "should use the correct SQL for lazy associations with :conditions option as an array that is not a conditions specifier" do @c.plugin :rcte_tree, :conditions => [{:i => 1}] @o.parent_dataset.sql.must_equal 'SELECT * FROM nodes WHERE ((i = 1) AND (nodes.id = 1)) LIMIT 1' @o.children_dataset.sql.must_equal 'SELECT * FROM nodes WHERE ((i = 1) AND (nodes.parent_id = 2))' @o.ancestors_dataset.sql.must_equal 'WITH t AS (SELECT * FROM nodes WHERE ((id = 1) AND (i = 1)) UNION ALL SELECT nodes.* FROM nodes INNER JOIN t ON (t.parent_id = nodes.id) WHERE (i = 1)) SELECT * FROM t AS nodes WHERE (i = 1)' @o.descendants_dataset.sql.must_equal 'WITH t AS (SELECT * FROM nodes WHERE ((parent_id = 2) AND (i = 1)) UNION ALL SELECT nodes.* FROM nodes INNER JOIN t ON (t.id = nodes.parent_id) WHERE (i = 1)) SELECT * FROM t AS nodes WHERE (i = 1)' end it "should add all parent associations when lazily loading ancestors" do @c.plugin :rcte_tree @c.dataset = @c.dataset.with_fetch([[{:id=>1, :name=>'A', :parent_id=>3}, {:id=>4, :name=>'B', :parent_id=>nil}, {:id=>3, :name=>'?', :parent_id=>4}]]) @o.ancestors.must_equal [@c.load(:id=>1, :name=>'A', :parent_id=>3), @c.load(:id=>4, :name=>'B', :parent_id=>nil), @c.load(:id=>3, :name=>'?', :parent_id=>4)] @o.associations[:parent].must_equal @c.load(:id=>1, :name=>'A', :parent_id=>3) @o.associations[:parent].associations[:parent].must_equal @c.load(:id=>3, :name=>'?', :parent_id=>4) @o.associations[:parent].associations[:parent].associations[:parent].must_equal @c.load(:id=>4, :name=>'B', :parent_id=>nil) @o.associations[:parent].associations[:parent].associations[:parent].associations.fetch(:parent, 1).must_be_nil end it "should handle case where ancestors is called on value without a parent" do @c.plugin :rcte_tree @c.dataset = @c.dataset.with_fetch([]) @o.parent_id = nil @o.ancestors.must_equal [] @o.associations[:parent].must_be_nil end it "should add all parent associations when lazily loading ancestors and giving options" do @c.plugin :rcte_tree, :primary_key=>:i, :key=>:pi, :ancestors=>{:name=>:as}, :parent=>{:name=>:p} @c.dataset = @c.dataset.with_fetch([[{:i=>4, :name=>'A', :pi=>5}, {:i=>6, :name=>'B', :pi=>nil}, {:i=>5, :name=>'?', :pi=>6}]]) @o.as.must_equal [@c.load(:i=>4, :name=>'A', :pi=>5), @c.load(:i=>6, :name=>'B', :pi=>nil), @c.load(:i=>5, :name=>'?', :pi=>6)] @o.associations[:p].must_equal @c.load(:i=>4, :name=>'A', :pi=>5) @o.associations[:p].associations[:p].must_equal @c.load(:i=>5, :name=>'?', :pi=>6) @o.associations[:p].associations[:p].associations[:p].must_equal @c.load(:i=>6, :name=>'B', :pi=>nil) @o.associations[:p].associations[:p].associations[:p].associations.fetch(:p, 1).must_be_nil end it "should add all parent and children associations when lazily loading descendants" do @c.plugin :rcte_tree @c.dataset = @c.dataset.with_fetch([[{:id=>3, :name=>'??', :parent_id=>1}, {:id=>1, :name=>'A', :parent_id=>2}, {:id=>4, :name=>'B', :parent_id=>2}, {:id=>5, :name=>'?', :parent_id=>3}]]) @o.descendants.must_equal [@c.load(:id=>3, :name=>'??', :parent_id=>1), @c.load(:id=>1, :name=>'A', :parent_id=>2), @c.load(:id=>4, :name=>'B', :parent_id=>2), @c.load(:id=>5, :name=>'?', :parent_id=>3)] @o.associations[:children].must_equal [@c.load(:id=>1, :name=>'A', :parent_id=>2), @c.load(:id=>4, :name=>'B', :parent_id=>2)] @o.associations[:children].map{|c1| c1.associations[:children]}.must_equal [[@c.load(:id=>3, :name=>'??', :parent_id=>1)], []] @o.associations[:children].map{|c1| c1.associations[:parent]}.must_equal [@o, @o] @o.associations[:children].map{|c1| c1.associations[:children].map{|c2| c2.associations[:children]}}.must_equal [[[@c.load(:id=>5, :name=>'?', :parent_id=>3)]], []] @o.associations[:children].map{|c1| c1.associations[:children].map{|c2| c2.associations[:parent]}}.must_equal [[@o.children.first], []] @o.associations[:children].map{|c1| c1.associations[:children].map{|c2| c2.associations[:children].map{|c3| c3.associations[:children]}}}.must_equal [[[[]]], []] @o.associations[:children].map{|c1| c1.associations[:children].map{|c2| c2.associations[:children].map{|c3| c3.associations[:parent]}}}.must_equal [[[@o.children.first.children.first]], []] end it "should add all parent and children associations when lazily loading descendants when descendant value has no matching ancestor" do @c.plugin :rcte_tree @c.dataset = @c.dataset.with_fetch([[{:id=>3, :name=>'??', :parent_id=>1}, {:id=>1, :name=>'A', :parent_id=>2}, {:id=>nil, :name=>'B', :parent_id=>nil}, {:id=>5, :name=>'?', :parent_id=>3}]]) @o.descendants.must_equal [@c.load(:id=>3, :name=>'??', :parent_id=>1), @c.load(:id=>1, :name=>'A', :parent_id=>2), @c.load(:id=>nil, :name=>'B', :parent_id=>nil), @c.load(:id=>5, :name=>'?', :parent_id=>3)] @o.associations[:children].must_equal [@c.load(:id=>1, :name=>'A', :parent_id=>2)] @o.associations[:children].map{|c1| c1.associations[:children]}.must_equal [[@c.load(:id=>3, :name=>'??', :parent_id=>1)]] @o.associations[:children].map{|c1| c1.associations[:parent]}.must_equal [@o] @o.associations[:children].map{|c1| c1.associations[:children].map{|c2| c2.associations[:children]}}.must_equal [[[@c.load(:id=>5, :name=>'?', :parent_id=>3)]]] @o.associations[:children].map{|c1| c1.associations[:children].map{|c2| c2.associations[:parent]}}.must_equal [[@o.children.first]] @o.associations[:children].map{|c1| c1.associations[:children].map{|c2| c2.associations[:children].map{|c3| c3.associations[:children]}}}.must_equal [[[[]]]] @o.associations[:children].map{|c1| c1.associations[:children].map{|c2| c2.associations[:children].map{|c3| c3.associations[:parent]}}}.must_equal [[[@o.children.first.children.first]]] end it "should add all children associations when lazily loading descendants and giving options" do @c.plugin :rcte_tree, :primary_key=>:i, :key=>:pi, :children=>{:name=>:cs}, :descendants=>{:name=>:ds} @c.dataset = @c.dataset.with_fetch([[{:i=>7, :name=>'??', :pi=>5}, {:i=>5, :name=>'A', :pi=>3}, {:i=>6, :name=>'B', :pi=>3}, {:i=>8, :name=>'?', :pi=>7}]]) @o.ds.must_equal [@c.load(:i=>7, :name=>'??', :pi=>5), @c.load(:i=>5, :name=>'A', :pi=>3), @c.load(:i=>6, :name=>'B', :pi=>3), @c.load(:i=>8, :name=>'?', :pi=>7)] @o.associations[:cs].must_equal [@c.load(:i=>5, :name=>'A', :pi=>3), @c.load(:i=>6, :name=>'B', :pi=>3)] @o.associations[:cs].map{|c1| c1.associations[:cs]}.must_equal [[@c.load(:i=>7, :name=>'??', :pi=>5)], []] @o.associations[:cs].map{|c1| c1.associations[:cs].map{|c2| c2.associations[:cs]}}.must_equal [[[@c.load(:i=>8, :name=>'?', :pi=>7)]], []] @o.associations[:cs].map{|c1| c1.associations[:cs].map{|c2| c2.associations[:cs].map{|c3| c3.associations[:cs]}}}.must_equal [[[[]]], []] end it "should eagerly load ancestors" do @c.plugin :rcte_tree @ds = @c.dataset = @c.dataset.with_fetch([[{:id=>2, :parent_id=>1, :name=>'AA'}, {:id=>6, :parent_id=>2, :name=>'C'}, {:id=>7, :parent_id=>1, :name=>'D'}, {:id=>9, :parent_id=>nil, :name=>'E'}], [{:id=>2, :name=>'AA', :parent_id=>1, :x_root_x=>2}, {:id=>1, :name=>'00', :parent_id=>8, :x_root_x=>1}, {:id=>1, :name=>'00', :parent_id=>8, :x_root_x=>2}, {:id=>8, :name=>'?', :parent_id=>nil, :x_root_x=>2}, {:id=>8, :name=>'?', :parent_id=>nil, :x_root_x=>1}]]) os = @ds.eager(:ancestors).all @db.sqls.must_equal ["SELECT * FROM nodes", 'WITH t AS (SELECT id AS x_root_x, nodes.* FROM nodes WHERE (id IN (1, 2)) UNION ALL SELECT t.x_root_x, nodes.* FROM nodes INNER JOIN t ON (t.parent_id = nodes.id)) SELECT * FROM t AS nodes'] os.must_equal [@c.load(:id=>2, :parent_id=>1, :name=>'AA'), @c.load(:id=>6, :parent_id=>2, :name=>'C'), @c.load(:id=>7, :parent_id=>1, :name=>'D'), @c.load(:id=>9, :parent_id=>nil, :name=>'E')] os.map{|o| o.ancestors}.must_equal [[@c.load(:id=>1, :name=>'00', :parent_id=>8), @c.load(:id=>8, :name=>'?', :parent_id=>nil)], [@c.load(:id=>2, :name=>'AA', :parent_id=>1), @c.load(:id=>1, :name=>'00', :parent_id=>8), @c.load(:id=>8, :name=>'?', :parent_id=>nil)], [@c.load(:id=>1, :name=>'00', :parent_id=>8), @c.load(:id=>8, :name=>'?', :parent_id=>nil)], []] os.map{|o| o.parent}.must_equal [@c.load(:id=>1, :name=>'00', :parent_id=>8), @c.load(:id=>2, :name=>'AA', :parent_id=>1), @c.load(:id=>1, :name=>'00', :parent_id=>8), nil] os.map{|o| o.parent.parent if o.parent}.must_equal [@c.load(:id=>8, :name=>'?', :parent_id=>nil), @c.load(:id=>1, :name=>'00', :parent_id=>8), @c.load(:id=>8, :name=>'?', :parent_id=>nil), nil] os.map{|o| o.parent.parent.parent if o.parent and o.parent.parent}.must_equal [nil, @c.load(:id=>8, :name=>'?', :parent_id=>nil), nil, nil] os.map{|o| o.parent.parent.parent.parent if o.parent and o.parent.parent and o.parent.parent.parent}.must_equal [nil, nil, nil, nil] @db.sqls.must_equal [] end it "should eagerly load ancestors on oracle when root column is a BigDecimal value" do def (@c.dataset.db).database_type; :oracle; end @c.dataset = @c.dataset.with_extend{def recursive_cte_requires_column_aliases?; true end} @c.plugin :rcte_tree, :ancestors=>{:read_only=>true}, :descendants=>{:read_only=>true} @ds = @c.dataset = @c.dataset.with_fetch([[ {:id=>2, :parent_id=>1, :name=>'AA'}, {:id=>6, :parent_id=>2, :name=>'C'}, {:id=>7, :parent_id=>1, :name=>'D'}, {:id=>9, :parent_id=>nil, :name=>'E'}], [{:id=>2, :name=>'AA', :parent_id=>1, :x_root_x=>BigDecimal('2')}, {:id=>1, :name=>'00', :parent_id=>8, :x_root_x=>BigDecimal('1')}, {:id=>1, :name=>'00', :parent_id=>8, :x_root_x=>BigDecimal('2')}, {:id=>8, :name=>'?', :parent_id=>nil, :x_root_x=>BigDecimal('2')}, {:id=>8, :name=>'?', :parent_id=>nil, :x_root_x=>BigDecimal('1')}]]) os = @ds.eager(:ancestors).all @db.sqls.must_equal ["SELECT * FROM nodes", "WITH t(x_root_x, id, name, parent_id, i, pi) AS (SELECT id AS x_root_x, nodes.id, nodes.name, nodes.parent_id, nodes.i, nodes.pi FROM nodes WHERE (id IN (1, 2)) UNION ALL SELECT t.x_root_x, nodes.id, nodes.name, nodes.parent_id, nodes.i, nodes.pi FROM nodes INNER JOIN t ON (t.parent_id = nodes.id)) SELECT * FROM t AS nodes"] os.must_equal [@c.load(:id=>2, :parent_id=>1, :name=>'AA'), @c.load(:id=>6, :parent_id=>2, :name=>'C'), @c.load(:id=>7, :parent_id=>1, :name=>'D'), @c.load(:id=>9, :parent_id=>nil, :name=>'E')] os.map{|o| o.ancestors}.must_equal [[@c.load(:id=>1, :name=>'00', :parent_id=>8), @c.load(:id=>8, :name=>'?', :parent_id=>nil)], [@c.load(:id=>2, :name=>'AA', :parent_id=>1), @c.load(:id=>1, :name=>'00', :parent_id=>8), @c.load(:id=>8, :name=>'?', :parent_id=>nil)], [@c.load(:id=>1, :name=>'00', :parent_id=>8), @c.load(:id=>8, :name=>'?', :parent_id=>nil)], []] os.map{|o| o.parent}.must_equal [@c.load(:id=>1, :name=>'00', :parent_id=>8), @c.load(:id=>2, :name=>'AA', :parent_id=>1), @c.load(:id=>1, :name=>'00', :parent_id=>8), nil] os.map{|o| o.parent.parent if o.parent}.must_equal [@c.load(:id=>8, :name=>'?', :parent_id=>nil), @c.load(:id=>1, :name=>'00', :parent_id=>8), @c.load(:id=>8, :name=>'?', :parent_id=>nil), nil] os.map{|o| o.parent.parent.parent if o.parent and o.parent.parent}.must_equal [nil, @c.load(:id=>8, :name=>'?', :parent_id=>nil), nil, nil] os.map{|o| o.parent.parent.parent.parent if o.parent and o.parent.parent and o.parent.parent.parent}.must_equal [nil, nil, nil, nil] @db.sqls.must_equal [] end it "should eagerly load ancestors when giving options" do @c.plugin :rcte_tree, :primary_key=>:i, :key=>:pi, :key_alias=>:kal, :cte_name=>:cte, :ancestors=>{:name=>:as, :select=>[:i, :name, :pi]}, :parent=>{:name=>:p} @ds = @c.dataset = @c.dataset.with_fetch([[{:i=>2, :pi=>1, :name=>'AA'}, {:i=>6, :pi=>2, :name=>'C'}, {:i=>7, :pi=>1, :name=>'D'}, {:i=>9, :pi=>nil, :name=>'E'}], [{:i=>2, :name=>'AA', :pi=>1, :kal=>2}, {:i=>1, :name=>'00', :pi=>8, :kal=>1}, {:i=>1, :name=>'00', :pi=>8, :kal=>2}, {:i=>8, :name=>'?', :pi=>nil, :kal=>2}, {:i=>8, :name=>'?', :pi=>nil, :kal=>1}]]) os = @ds.eager(:as).all @db.sqls.must_equal ["SELECT * FROM nodes", 'WITH cte AS (SELECT i AS kal, nodes.* FROM nodes WHERE (i IN (1, 2)) UNION ALL SELECT cte.kal, nodes.* FROM nodes INNER JOIN cte ON (cte.pi = nodes.i)) SELECT i, name, pi, kal FROM cte AS nodes'] os.must_equal [@c.load(:i=>2, :pi=>1, :name=>'AA'), @c.load(:i=>6, :pi=>2, :name=>'C'), @c.load(:i=>7, :pi=>1, :name=>'D'), @c.load(:i=>9, :pi=>nil, :name=>'E')] os.map{|o| o.as}.must_equal [[@c.load(:i=>1, :name=>'00', :pi=>8), @c.load(:i=>8, :name=>'?', :pi=>nil)], [@c.load(:i=>2, :name=>'AA', :pi=>1), @c.load(:i=>1, :name=>'00', :pi=>8), @c.load(:i=>8, :name=>'?', :pi=>nil)], [@c.load(:i=>1, :name=>'00', :pi=>8), @c.load(:i=>8, :name=>'?', :pi=>nil)], []] os.map{|o| o.p}.must_equal [@c.load(:i=>1, :name=>'00', :pi=>8), @c.load(:i=>2, :name=>'AA', :pi=>1), @c.load(:i=>1, :name=>'00', :pi=>8), nil] os.map{|o| o.p.p if o.p}.must_equal [@c.load(:i=>8, :name=>'?', :pi=>nil), @c.load(:i=>1, :name=>'00', :pi=>8), @c.load(:i=>8, :name=>'?', :pi=>nil), nil] os.map{|o| o.p.p.p if o.p and o.p.p}.must_equal [nil, @c.load(:i=>8, :name=>'?', :pi=>nil), nil, nil] os.map{|o| o.p.p.p.p if o.p and o.p.p and o.p.p.p}.must_equal [nil, nil, nil, nil] end it "should eagerly load ancestors respecting association option :conditions" do @c.plugin :rcte_tree, :conditions => {:i => 1} @ds = @c.dataset = @c.dataset.with_fetch([[{:id=>2, :parent_id=>1, :name=>'AA'}, {:id=>6, :parent_id=>2, :name=>'C'}, {:id=>7, :parent_id=>1, :name=>'D'}, {:id=>9, :parent_id=>nil, :name=>'E'}], [{:id=>2, :name=>'AA', :parent_id=>1, :x_root_x=>2}, {:id=>1, :name=>'00', :parent_id=>8, :x_root_x=>1}, {:id=>1, :name=>'00', :parent_id=>8, :x_root_x=>2}, {:id=>8, :name=>'?', :parent_id=>nil, :x_root_x=>2}, {:id=>8, :name=>'?', :parent_id=>nil, :x_root_x=>1}]]) @ds.eager(:ancestors).all @db.sqls.must_equal ["SELECT * FROM nodes", 'WITH t AS (SELECT id AS x_root_x, nodes.* FROM nodes WHERE ((id IN (1, 2)) AND (i = 1)) UNION ALL SELECT t.x_root_x, nodes.* FROM nodes INNER JOIN t ON (t.parent_id = nodes.id) WHERE (i = 1)) SELECT * FROM t AS nodes WHERE (i = 1)'] end it "should eagerly load ancestors respecting association option :conditions as an array that is not a conditions specifier" do @c.plugin :rcte_tree, :conditions => [{:i => 1}] @ds = @c.dataset = @c.dataset.with_fetch([[{:id=>2, :parent_id=>1, :name=>'AA'}, {:id=>6, :parent_id=>2, :name=>'C'}, {:id=>7, :parent_id=>1, :name=>'D'}, {:id=>9, :parent_id=>nil, :name=>'E'}], [{:id=>2, :name=>'AA', :parent_id=>1, :x_root_x=>2}, {:id=>1, :name=>'00', :parent_id=>8, :x_root_x=>1}, {:id=>1, :name=>'00', :parent_id=>8, :x_root_x=>2}, {:id=>8, :name=>'?', :parent_id=>nil, :x_root_x=>2}, {:id=>8, :name=>'?', :parent_id=>nil, :x_root_x=>1}]]) @ds.eager(:ancestors).all @db.sqls.must_equal ["SELECT * FROM nodes", 'WITH t AS (SELECT id AS x_root_x, nodes.* FROM nodes WHERE ((id IN (1, 2)) AND (i = 1)) UNION ALL SELECT t.x_root_x, nodes.* FROM nodes INNER JOIN t ON (t.parent_id = nodes.id) WHERE (i = 1)) SELECT * FROM t AS nodes WHERE (i = 1)'] end it "should eagerly load descendants" do @c.plugin :rcte_tree @ds = @c.dataset = @c.dataset.with_fetch([[{:id=>2, :parent_id=>1, :name=>'AA'}, {:id=>6, :parent_id=>2, :name=>'C'}, {:id=>7, :parent_id=>1, :name=>'D'}], [{:id=>6, :parent_id=>2, :name=>'C', :x_root_x=>2}, {:id=>9, :parent_id=>2, :name=>'E', :x_root_x=>2}, {:id=>3, :name=>'00', :parent_id=>6, :x_root_x=>6}, {:id=>3, :name=>'00', :parent_id=>6, :x_root_x=>2}, {:id=>4, :name=>'?', :parent_id=>7, :x_root_x=>7}, {:id=>5, :name=>'?', :parent_id=>4, :x_root_x=>7}]]) os = @ds.eager(:descendants).all @db.sqls.must_equal ["SELECT * FROM nodes", 'WITH t AS (SELECT parent_id AS x_root_x, nodes.* FROM nodes WHERE (parent_id IN (2, 6, 7)) UNION ALL SELECT t.x_root_x, nodes.* FROM nodes INNER JOIN t ON (t.id = nodes.parent_id)) SELECT * FROM t AS nodes'] os.must_equal [@c.load(:id=>2, :parent_id=>1, :name=>'AA'), @c.load(:id=>6, :parent_id=>2, :name=>'C'), @c.load(:id=>7, :parent_id=>1, :name=>'D')] os.map{|o| o.descendants}.must_equal [[@c.load(:id=>6, :parent_id=>2, :name=>'C'), @c.load(:id=>9, :parent_id=>2, :name=>'E'), @c.load(:id=>3, :name=>'00', :parent_id=>6)], [@c.load(:id=>3, :name=>'00', :parent_id=>6)], [@c.load(:id=>4, :name=>'?', :parent_id=>7), @c.load(:id=>5, :name=>'?', :parent_id=>4)]] os.map{|o| o.children}.must_equal [[@c.load(:id=>6, :parent_id=>2, :name=>'C'), @c.load(:id=>9, :parent_id=>2, :name=>'E')], [@c.load(:id=>3, :name=>'00', :parent_id=>6)], [@c.load(:id=>4, :name=>'?', :parent_id=>7)]] os.map{|o1| o1.children.map{|o2| o2.children}}.must_equal [[[@c.load(:id=>3, :name=>'00', :parent_id=>6)], []], [[]], [[@c.load(:id=>5, :name=>'?', :parent_id=>4)]]] os.map{|o1| o1.children.map{|o2| o2.parent}}.must_equal [[os[0], os[0]], [os[1]], [os[2]]] os.map{|o1| o1.children.map{|o2| o2.children.map{|o3| o3.children}}}.must_equal [[[[]], []], [[]], [[[]]]] os.map{|o1| o1.children.map{|o2| o2.children.map{|o3| o3.parent}}}.must_equal [[[os[0].children[0]], []], [[]], [[os[2].children[0]]]] @db.sqls.must_equal [] end it "should eagerly load descendants when eagerly loaded descendant doesn't match initial object" do @c.plugin :rcte_tree @ds = @c.dataset = @c.dataset.with_fetch([[{:id=>2, :parent_id=>1, :name=>'AA'}, {:id=>6, :parent_id=>2, :name=>'C'}, {:id=>7, :parent_id=>1, :name=>'D'}], [{:id=>6, :parent_id=>2, :name=>'C', :x_root_x=>2}, {:id=>9, :parent_id=>2, :name=>'E', :x_root_x=>2}, {:id=>3, :name=>'00', :parent_id=>6, :x_root_x=>6}, {:id=>3, :name=>'00', :parent_id=>6, :x_root_x=>2}, {:id=>4, :name=>'?', :parent_id=>7, :x_root_x=>7}, {:id=>5, :name=>'?', :parent_id=>4, :x_root_x=>17}]]) os = @ds.eager(:descendants).all @db.sqls.must_equal ["SELECT * FROM nodes", 'WITH t AS (SELECT parent_id AS x_root_x, nodes.* FROM nodes WHERE (parent_id IN (2, 6, 7)) UNION ALL SELECT t.x_root_x, nodes.* FROM nodes INNER JOIN t ON (t.id = nodes.parent_id)) SELECT * FROM t AS nodes'] os.must_equal [@c.load(:id=>2, :parent_id=>1, :name=>'AA'), @c.load(:id=>6, :parent_id=>2, :name=>'C'), @c.load(:id=>7, :parent_id=>1, :name=>'D')] os.map{|o| o.descendants}.must_equal [[@c.load(:id=>6, :parent_id=>2, :name=>'C'), @c.load(:id=>9, :parent_id=>2, :name=>'E'), @c.load(:id=>3, :name=>'00', :parent_id=>6)], [@c.load(:id=>3, :name=>'00', :parent_id=>6)], [@c.load(:id=>4, :name=>'?', :parent_id=>7)]] os.map{|o| o.children}.must_equal [[@c.load(:id=>6, :parent_id=>2, :name=>'C'), @c.load(:id=>9, :parent_id=>2, :name=>'E')], [@c.load(:id=>3, :name=>'00', :parent_id=>6)], [@c.load(:id=>4, :name=>'?', :parent_id=>7)]] os.map{|o1| o1.children.map{|o2| o2.children}}.must_equal [[[@c.load(:id=>3, :name=>'00', :parent_id=>6)], []], [[]], [[@c.load(:id=>5, :name=>'?', :parent_id=>4)]]] os.map{|o1| o1.children.map{|o2| o2.parent}}.must_equal [[os[0], os[0]], [os[1]], [os[2]]] os.map{|o1| o1.children.map{|o2| o2.children.map{|o3| o3.children}}}.must_equal [[[[]], []], [[]], [[[]]]] os.map{|o1| o1.children.map{|o2| o2.children.map{|o3| o3.parent}}}.must_equal [[[os[0].children[0]], []], [[]], [[os[2].children[0]]]] @db.sqls.must_equal [] end it "should eagerly load descendants when giving options" do @c.plugin :rcte_tree, :primary_key=>:i, :key=>:pi, :key_alias=>:kal, :cte_name=>:cte, :children=>{:name=>:cs}, :descendants=>{:name=>:ds, :select=>[:i, :name, :pi]} @ds = @c.dataset = @c.dataset.with_fetch([[{:i=>2, :pi=>1, :name=>'AA'}, {:i=>6, :pi=>2, :name=>'C'}, {:i=>7, :pi=>1, :name=>'D'}], [{:i=>6, :pi=>2, :name=>'C', :kal=>2}, {:i=>9, :pi=>2, :name=>'E', :kal=>2}, {:i=>3, :name=>'00', :pi=>6, :kal=>6}, {:i=>3, :name=>'00', :pi=>6, :kal=>2}, {:i=>4, :name=>'?', :pi=>7, :kal=>7}, {:i=>5, :name=>'?', :pi=>4, :kal=>7}]]) os = @ds.eager(:ds).all @db.sqls.must_equal ["SELECT * FROM nodes", 'WITH cte AS (SELECT pi AS kal, nodes.* FROM nodes WHERE (pi IN (2, 6, 7)) UNION ALL SELECT cte.kal, nodes.* FROM nodes INNER JOIN cte ON (cte.i = nodes.pi)) SELECT i, name, pi, kal FROM cte AS nodes'] os.must_equal [@c.load(:i=>2, :pi=>1, :name=>'AA'), @c.load(:i=>6, :pi=>2, :name=>'C'), @c.load(:i=>7, :pi=>1, :name=>'D')] os.map{|o| o.ds}.must_equal [[@c.load(:i=>6, :pi=>2, :name=>'C'), @c.load(:i=>9, :pi=>2, :name=>'E'), @c.load(:i=>3, :name=>'00', :pi=>6)], [@c.load(:i=>3, :name=>'00', :pi=>6)], [@c.load(:i=>4, :name=>'?', :pi=>7), @c.load(:i=>5, :name=>'?', :pi=>4)]] os.map{|o| o.cs}.must_equal [[@c.load(:i=>6, :pi=>2, :name=>'C'), @c.load(:i=>9, :pi=>2, :name=>'E')], [@c.load(:i=>3, :name=>'00', :pi=>6)], [@c.load(:i=>4, :name=>'?', :pi=>7)]] os.map{|o1| o1.cs.map{|o2| o2.cs}}.must_equal [[[@c.load(:i=>3, :name=>'00', :pi=>6)], []], [[]], [[@c.load(:i=>5, :name=>'?', :pi=>4)]]] os.map{|o1| o1.cs.map{|o2| o2.cs.map{|o3| o3.cs}}}.must_equal [[[[]], []], [[]], [[[]]]] @db.sqls.must_equal [] end it "should eagerly load descendants to a given level" do @c.plugin :rcte_tree @ds = @c.dataset = @c.dataset.with_fetch([[{:id=>2, :parent_id=>1, :name=>'AA'}, {:id=>6, :parent_id=>2, :name=>'C'}, {:id=>7, :parent_id=>1, :name=>'D'}], [{:id=>6, :parent_id=>2, :name=>'C', :x_root_x=>2, :x_level_x=>0}, {:id=>9, :parent_id=>2, :name=>'E', :x_root_x=>2, :x_level_x=>0}, {:id=>3, :name=>'00', :parent_id=>6, :x_root_x=>6, :x_level_x=>0}, {:id=>3, :name=>'00', :parent_id=>6, :x_root_x=>2, :x_level_x=>1}, {:id=>4, :name=>'?', :parent_id=>7, :x_root_x=>7, :x_level_x=>0}, {:id=>5, :name=>'?', :parent_id=>4, :x_root_x=>7, :x_level_x=>1}]]) os = @ds.eager(:descendants=>2).all @db.sqls.must_equal ["SELECT * FROM nodes", 'WITH t AS (SELECT parent_id AS x_root_x, nodes.*, CAST(0 AS integer) AS x_level_x FROM nodes WHERE (parent_id IN (2, 6, 7)) UNION ALL SELECT t.x_root_x, nodes.*, (t.x_level_x + 1) AS x_level_x FROM nodes INNER JOIN t ON (t.id = nodes.parent_id) WHERE (t.x_level_x < 1)) SELECT * FROM t AS nodes'] os.must_equal [@c.load(:id=>2, :parent_id=>1, :name=>'AA'), @c.load(:id=>6, :parent_id=>2, :name=>'C'), @c.load(:id=>7, :parent_id=>1, :name=>'D')] os.map{|o| o.descendants}.must_equal [[@c.load(:id=>6, :parent_id=>2, :name=>'C'), @c.load(:id=>9, :parent_id=>2, :name=>'E'), @c.load(:id=>3, :name=>'00', :parent_id=>6)], [@c.load(:id=>3, :name=>'00', :parent_id=>6)], [@c.load(:id=>4, :name=>'?', :parent_id=>7), @c.load(:id=>5, :name=>'?', :parent_id=>4)]] os.map{|o| o.associations[:children]}.must_equal [[@c.load(:id=>6, :parent_id=>2, :name=>'C'), @c.load(:id=>9, :parent_id=>2, :name=>'E')], [@c.load(:id=>3, :name=>'00', :parent_id=>6)], [@c.load(:id=>4, :name=>'?', :parent_id=>7)]] os.map{|o1| o1.associations[:children].map{|o2| o2.associations[:children]}}.must_equal [[[@c.load(:id=>3, :name=>'00', :parent_id=>6)], []], [[]], [[@c.load(:id=>5, :name=>'?', :parent_id=>4)]]] os.map{|o1| o1.associations[:children].map{|o2| o2.associations[:children].map{|o3| o3.associations[:children]}}}.must_equal [[[[]], []], [[]], [[nil]]] @db.sqls.must_equal [] end it "should eagerly load descendants to a given level when CTEs require column aliases" do @c.dataset = @c.dataset.with_extend{def recursive_cte_requires_column_aliases?; true end} @c.plugin :rcte_tree @ds = @c.dataset = @c.dataset.with_fetch([[{:id=>2, :parent_id=>1, :name=>'AA'}, {:id=>6, :parent_id=>2, :name=>'C'}, {:id=>7, :parent_id=>1, :name=>'D'}], [{:id=>6, :parent_id=>2, :name=>'C', :x_root_x=>2, :x_level_x=>0}, {:id=>9, :parent_id=>2, :name=>'E', :x_root_x=>2, :x_level_x=>0}, {:id=>3, :name=>'00', :parent_id=>6, :x_root_x=>6, :x_level_x=>0}, {:id=>3, :name=>'00', :parent_id=>6, :x_root_x=>2, :x_level_x=>1}, {:id=>4, :name=>'?', :parent_id=>7, :x_root_x=>7, :x_level_x=>0}, {:id=>5, :name=>'?', :parent_id=>4, :x_root_x=>7, :x_level_x=>1}]]) os = @ds.eager(:descendants=>2).all @db.sqls.must_equal ["SELECT * FROM nodes", "WITH t(x_root_x, id, name, parent_id, i, pi, x_level_x) AS (SELECT parent_id AS x_root_x, nodes.id, nodes.name, nodes.parent_id, nodes.i, nodes.pi, CAST(0 AS integer) AS x_level_x FROM nodes WHERE (parent_id IN (2, 6, 7)) UNION ALL SELECT t.x_root_x, nodes.id, nodes.name, nodes.parent_id, nodes.i, nodes.pi, (t.x_level_x + 1) AS x_level_x FROM nodes INNER JOIN t ON (t.id = nodes.parent_id) WHERE (t.x_level_x < 1)) SELECT * FROM t AS nodes"] os.must_equal [@c.load(:id=>2, :parent_id=>1, :name=>'AA'), @c.load(:id=>6, :parent_id=>2, :name=>'C'), @c.load(:id=>7, :parent_id=>1, :name=>'D')] os.map{|o| o.descendants}.must_equal [[@c.load(:id=>6, :parent_id=>2, :name=>'C'), @c.load(:id=>9, :parent_id=>2, :name=>'E'), @c.load(:id=>3, :name=>'00', :parent_id=>6)], [@c.load(:id=>3, :name=>'00', :parent_id=>6)], [@c.load(:id=>4, :name=>'?', :parent_id=>7), @c.load(:id=>5, :name=>'?', :parent_id=>4)]] os.map{|o| o.associations[:children]}.must_equal [[@c.load(:id=>6, :parent_id=>2, :name=>'C'), @c.load(:id=>9, :parent_id=>2, :name=>'E')], [@c.load(:id=>3, :name=>'00', :parent_id=>6)], [@c.load(:id=>4, :name=>'?', :parent_id=>7)]] os.map{|o1| o1.associations[:children].map{|o2| o2.associations[:children]}}.must_equal [[[@c.load(:id=>3, :name=>'00', :parent_id=>6)], []], [[]], [[@c.load(:id=>5, :name=>'?', :parent_id=>4)]]] os.map{|o1| o1.associations[:children].map{|o2| o2.associations[:children].map{|o3| o3.associations[:children]}}}.must_equal [[[[]], []], [[]], [[nil]]] @db.sqls.must_equal [] end it "should eagerly load descendants to a given level when giving options" do @c.plugin :rcte_tree, :primary_key=>:i, :key=>:pi, :key_alias=>:kal, :level_alias=>:lal, :cte_name=>:cte, :children=>{:name=>:cs}, :descendants=>{:name=>:ds} @ds = @c.dataset = @c.dataset.with_fetch([[{:i=>2, :pi=>1, :name=>'AA'}, {:i=>6, :pi=>2, :name=>'C'}, {:i=>7, :pi=>1, :name=>'D'}], [{:i=>6, :pi=>2, :name=>'C', :kal=>2, :lal=>0}, {:i=>9, :pi=>2, :name=>'E', :kal=>2, :lal=>0}, {:i=>3, :name=>'00', :pi=>6, :kal=>6, :lal=>0}, {:i=>3, :name=>'00', :pi=>6, :kal=>2, :lal=>1}, {:i=>4, :name=>'?', :pi=>7, :kal=>7, :lal=>0}, {:i=>5, :name=>'?', :pi=>4, :kal=>7, :lal=>1}]]) os = @ds.eager(:ds=>2).all @db.sqls.must_equal ["SELECT * FROM nodes", 'WITH cte AS (SELECT pi AS kal, nodes.*, CAST(0 AS integer) AS lal FROM nodes WHERE (pi IN (2, 6, 7)) UNION ALL SELECT cte.kal, nodes.*, (cte.lal + 1) AS lal FROM nodes INNER JOIN cte ON (cte.i = nodes.pi) WHERE (cte.lal < 1)) SELECT * FROM cte AS nodes'] os.must_equal [@c.load(:i=>2, :pi=>1, :name=>'AA'), @c.load(:i=>6, :pi=>2, :name=>'C'), @c.load(:i=>7, :pi=>1, :name=>'D')] os.map{|o| o.ds}.must_equal [[@c.load(:i=>6, :pi=>2, :name=>'C'), @c.load(:i=>9, :pi=>2, :name=>'E'), @c.load(:i=>3, :name=>'00', :pi=>6)], [@c.load(:i=>3, :name=>'00', :pi=>6)], [@c.load(:i=>4, :name=>'?', :pi=>7), @c.load(:i=>5, :name=>'?', :pi=>4)]] os.map{|o| o.associations[:cs]}.must_equal [[@c.load(:i=>6, :pi=>2, :name=>'C'), @c.load(:i=>9, :pi=>2, :name=>'E')], [@c.load(:i=>3, :name=>'00', :pi=>6)], [@c.load(:i=>4, :name=>'?', :pi=>7)]] os.map{|o1| o1.associations[:cs].map{|o2| o2.associations[:cs]}}.must_equal [[[@c.load(:i=>3, :name=>'00', :pi=>6)], []], [[]], [[@c.load(:i=>5, :name=>'?', :pi=>4)]]] os.map{|o1| o1.associations[:cs].map{|o2| o2.associations[:cs].map{|o3| o3.associations[:cs]}}}.must_equal [[[[]], []], [[]], [[nil]]] @db.sqls.must_equal [] end it "should eagerly load descendants respecting association option :conditions" do @c.plugin :rcte_tree, :conditions => {:i => 1} @ds = @c.dataset = @c.dataset.with_fetch([[{:id=>2, :parent_id=>1, :name=>'AA'}, {:id=>6, :parent_id=>2, :name=>'C'}, {:id=>7, :parent_id=>1, :name=>'D'}], [{:id=>6, :parent_id=>2, :name=>'C', :x_root_x=>2}, {:id=>9, :parent_id=>2, :name=>'E', :x_root_x=>2}, {:id=>3, :name=>'00', :parent_id=>6, :x_root_x=>6}, {:id=>3, :name=>'00', :parent_id=>6, :x_root_x=>2}, {:id=>4, :name=>'?', :parent_id=>7, :x_root_x=>7}, {:id=>5, :name=>'?', :parent_id=>4, :x_root_x=>7}]]) @ds.eager(:descendants).all @db.sqls.must_equal ["SELECT * FROM nodes", 'WITH t AS (SELECT parent_id AS x_root_x, nodes.* FROM nodes WHERE ((parent_id IN (2, 6, 7)) AND (i = 1)) UNION ALL SELECT t.x_root_x, nodes.* FROM nodes INNER JOIN t ON (t.id = nodes.parent_id) WHERE (i = 1)) SELECT * FROM t AS nodes WHERE (i = 1)'] end it "should eagerly load descendants respecting association option :conditions as an array that is not a conditions specifier" do @c.plugin :rcte_tree, :conditions => [{:i => 1}] @ds = @c.dataset = @c.dataset.with_fetch([[{:id=>2, :parent_id=>1, :name=>'AA'}, {:id=>6, :parent_id=>2, :name=>'C'}, {:id=>7, :parent_id=>1, :name=>'D'}], [{:id=>6, :parent_id=>2, :name=>'C', :x_root_x=>2}, {:id=>9, :parent_id=>2, :name=>'E', :x_root_x=>2}, {:id=>3, :name=>'00', :parent_id=>6, :x_root_x=>6}, {:id=>3, :name=>'00', :parent_id=>6, :x_root_x=>2}, {:id=>4, :name=>'?', :parent_id=>7, :x_root_x=>7}, {:id=>5, :name=>'?', :parent_id=>4, :x_root_x=>7}]]) @ds.eager(:descendants).all @db.sqls.must_equal ["SELECT * FROM nodes", 'WITH t AS (SELECT parent_id AS x_root_x, nodes.* FROM nodes WHERE ((parent_id IN (2, 6, 7)) AND (i = 1)) UNION ALL SELECT t.x_root_x, nodes.* FROM nodes INNER JOIN t ON (t.id = nodes.parent_id) WHERE (i = 1)) SELECT * FROM t AS nodes WHERE (i = 1)'] end it "should disallow eager graphing of ancestors and descendants" do @c.plugin :rcte_tree proc{@c.eager_graph(:ancestors)}.must_raise Sequel::Error proc{@c.eager_graph(:descendants)}.must_raise Sequel::Error end end describe Sequel::Model, "rcte_tree with composite keys" do before do @db = Sequel.mock @db.extend_datasets do def supports_cte?(*) true end end @c = Class.new(Sequel::Model(@db[:nodes])) @c.class_eval do def self.name; 'Node'; end columns :id, :id2, :name, :parent_id, :parent_id2, :i, :pi set_primary_key [:id, :id2] end @ds = @c.dataset @o = @c.load(:id=>2, :id2=>5, :parent_id=>1, :parent_id2=>6, :name=>'AA', :i=>3, :pi=>4) @db.sqls end it "should use the correct SQL for lazy associations" do @c.plugin :rcte_tree, :key=>[:parent_id, :parent_id2] @o.parent_dataset.sql.must_equal 'SELECT * FROM nodes WHERE ((nodes.id = 1) AND (nodes.id2 = 6)) LIMIT 1' @o.children_dataset.sql.must_equal 'SELECT * FROM nodes WHERE ((nodes.parent_id = 2) AND (nodes.parent_id2 = 5))' @o.ancestors_dataset.sql.must_equal 'WITH t AS (SELECT * FROM nodes WHERE ((id = 1) AND (id2 = 6)) UNION ALL SELECT nodes.* FROM nodes INNER JOIN t ON ((t.parent_id = nodes.id) AND (t.parent_id2 = nodes.id2))) SELECT * FROM t AS nodes' @o.descendants_dataset.sql.must_equal 'WITH t AS (SELECT * FROM nodes WHERE ((parent_id = 2) AND (parent_id2 = 5)) UNION ALL SELECT nodes.* FROM nodes INNER JOIN t ON ((t.id = nodes.parent_id) AND (t.id2 = nodes.parent_id2))) SELECT * FROM t AS nodes' end it "should use the correct SQL for lazy associations when recursive CTEs require column aliases" do @c.dataset = @c.dataset.with_extend{def recursive_cte_requires_column_aliases?; true end} @c.plugin :rcte_tree, :key=>[:parent_id, :parent_id2] @o.ancestors_dataset.sql.must_equal 'WITH t(id, id2, name, parent_id, parent_id2, i, pi) AS (SELECT id, id2, name, parent_id, parent_id2, i, pi FROM nodes WHERE ((id = 1) AND (id2 = 6)) UNION ALL SELECT nodes.id, nodes.id2, nodes.name, nodes.parent_id, nodes.parent_id2, nodes.i, nodes.pi FROM nodes INNER JOIN t ON ((t.parent_id = nodes.id) AND (t.parent_id2 = nodes.id2))) SELECT * FROM t AS nodes' @o.descendants_dataset.sql.must_equal 'WITH t(id, id2, name, parent_id, parent_id2, i, pi) AS (SELECT id, id2, name, parent_id, parent_id2, i, pi FROM nodes WHERE ((parent_id = 2) AND (parent_id2 = 5)) UNION ALL SELECT nodes.id, nodes.id2, nodes.name, nodes.parent_id, nodes.parent_id2, nodes.i, nodes.pi FROM nodes INNER JOIN t ON ((t.id = nodes.parent_id) AND (t.id2 = nodes.parent_id2))) SELECT * FROM t AS nodes' end it "should use the correct SQL for eager loading when recursive CTEs require column aliases" do @c.dataset = @c.dataset.with_extend{def recursive_cte_requires_column_aliases?; true end} @c.plugin :rcte_tree, :key=>[:parent_id, :parent_id2] @c.dataset = @c.dataset.with_fetch([[{:id=>1, :id2=>2, :name=>'A', :parent_id=>3, :parent_id2=>4}]]) @c.eager(:ancestors).all @db.sqls.must_equal ["SELECT * FROM nodes", "WITH t(x_root_x_0, x_root_x_1, id, id2, name, parent_id, parent_id2, i, pi) AS (SELECT id AS x_root_x_0, id2 AS x_root_x_1, nodes.id, nodes.id2, nodes.name, nodes.parent_id, nodes.parent_id2, nodes.i, nodes.pi FROM nodes WHERE ((id, id2) IN ((3, 4))) UNION ALL SELECT t.x_root_x_0, t.x_root_x_1, nodes.id, nodes.id2, nodes.name, nodes.parent_id, nodes.parent_id2, nodes.i, nodes.pi FROM nodes INNER JOIN t ON ((t.parent_id = nodes.id) AND (t.parent_id2 = nodes.id2))) SELECT * FROM t AS nodes"] @c.dataset = @c.dataset.with_fetch([[{:id=>1, :id2=>2, :name=>'A', :parent_id=>3, :parent_id2=>4}]]) @c.eager(:descendants).all @db.sqls.must_equal ["SELECT * FROM nodes", "WITH t(x_root_x_0, x_root_x_1, id, id2, name, parent_id, parent_id2, i, pi) AS (SELECT parent_id AS x_root_x_0, parent_id2 AS x_root_x_1, nodes.id, nodes.id2, nodes.name, nodes.parent_id, nodes.parent_id2, nodes.i, nodes.pi FROM nodes WHERE ((parent_id, parent_id2) IN ((1, 2))) UNION ALL SELECT t.x_root_x_0, t.x_root_x_1, nodes.id, nodes.id2, nodes.name, nodes.parent_id, nodes.parent_id2, nodes.i, nodes.pi FROM nodes INNER JOIN t ON ((t.id = nodes.parent_id) AND (t.id2 = nodes.parent_id2))) SELECT * FROM t AS nodes"] end it "should add all parent associations when lazily loading ancestors" do @c.plugin :rcte_tree, :key=>[:parent_id, :parent_id2] @c.dataset = @c.dataset.with_fetch([[{:id=>1, :id2=>6, :name=>'A', :parent_id=>3, :parent_id2=>5}, {:id=>4, :id2=>8, :name=>'B', :parent_id=>nil, :parent_id2=>nil}, {:id=>3, :id2=>5, :name=>'?', :parent_id=>4, :parent_id2=>8}]]) @o.ancestors.must_equal [@c.load(:id=>1, :id2=>6, :name=>'A', :parent_id=>3, :parent_id2=>5), @c.load(:id=>4, :id2=>8, :name=>'B', :parent_id=>nil, :parent_id2=>nil), @c.load(:id=>3, :id2=>5, :name=>'?', :parent_id=>4, :parent_id2=>8)] @o.associations[:parent].must_equal @c.load(:id=>1, :id2=>6, :name=>'A', :parent_id=>3, :parent_id2=>5) @o.associations[:parent].associations[:parent].must_equal @c.load(:id=>3, :id2=>5, :name=>'?', :parent_id=>4, :parent_id2=>8) @o.associations[:parent].associations[:parent].associations[:parent].must_equal @c.load(:id=>4, :id2=>8, :name=>'B', :parent_id=>nil, :parent_id2=>nil) @o.associations[:parent].associations[:parent].associations[:parent].associations.fetch(:parent, 1).must_be_nil end it "should add all children associations when lazily loading descendants" do @c.plugin :rcte_tree, :key=>[:parent_id, :parent_id2] @c.dataset = @c.dataset.with_fetch([[{:id=>3, :id2=>4, :name=>'??', :parent_id=>1, :parent_id2=>2}, {:id=>1, :id2=>2, :name=>'A', :parent_id=>2, :parent_id2=>5}, {:id=>4, :id2=>5, :name=>'B', :parent_id=>2, :parent_id2=>5}, {:id=>5, :id2=>7, :name=>'?', :parent_id=>3, :parent_id2=>4}]]) @o.descendants.must_equal [@c.load(:id=>3, :id2=>4, :name=>'??', :parent_id=>1, :parent_id2=>2), @c.load(:id=>1, :id2=>2, :name=>'A', :parent_id=>2, :parent_id2=>5), @c.load(:id=>4, :id2=>5, :name=>'B', :parent_id=>2, :parent_id2=>5), @c.load(:id=>5, :id2=>7, :name=>'?', :parent_id=>3, :parent_id2=>4)] @o.associations[:children].must_equal [@c.load(:id=>1, :id2=>2, :name=>'A', :parent_id=>2, :parent_id2=>5), @c.load(:id=>4, :id2=>5, :name=>'B', :parent_id=>2, :parent_id2=>5)] @o.associations[:children].map{|c1| c1.associations[:children]}.must_equal [[@c.load(:id=>3, :id2=>4, :name=>'??', :parent_id=>1, :parent_id2=>2)], []] @o.associations[:children].map{|c1| c1.associations[:children].map{|c2| c2.associations[:children]}}.must_equal [[[@c.load(:id=>5, :id2=>7, :name=>'?', :parent_id=>3, :parent_id2=>4)]], []] @o.associations[:children].map{|c1| c1.associations[:children].map{|c2| c2.associations[:children].map{|c3| c3.associations[:children]}}}.must_equal [[[[]]], []] end it "should eagerly load ancestors" do @c.plugin :rcte_tree, :key=>[:parent_id, :parent_id2] @ds = @c.dataset = @c.dataset.with_fetch([[{:id=>2, :id2=>3, :parent_id=>1, :parent_id2=>2, :name=>'AA'}, {:id=>6, :id2=>7, :parent_id=>2, :parent_id2=>3, :name=>'C'}, {:id=>7, :id2=>8, :parent_id=>1, :parent_id2=>2, :name=>'D'}, {:id=>9, :id2=>10, :parent_id=>nil, :parent_id2=>nil, :name=>'E'}], [{:id=>2, :id2=>3, :name=>'AA', :parent_id=>1, :parent_id2=>2, :x_root_x_0=>2, :x_root_x_1=>3}, {:id=>1, :id2=>2, :name=>'00', :parent_id=>8, :parent_id2=>9, :x_root_x_0=>1, :x_root_x_1=>2}, {:id=>1, :id2=>2, :name=>'00', :parent_id=>8, :parent_id2=>9, :x_root_x_0=>2, :x_root_x_1=>3}, {:id=>8, :id2=>9, :name=>'?', :parent_id=>nil, :parent_id2=>nil, :x_root_x_0=>2, :x_root_x_1=>3}, {:id=>8, :id2=>9, :name=>'?', :parent_id=>nil, :parent_id2=>nil, :x_root_x_0=>1, :x_root_x_1=>2}]]) os = @ds.eager(:ancestors).all @db.sqls.must_equal ["SELECT * FROM nodes", 'WITH t AS (SELECT id AS x_root_x_0, id2 AS x_root_x_1, nodes.* FROM nodes WHERE ((id, id2) IN ((1, 2), (2, 3))) UNION ALL SELECT t.x_root_x_0, t.x_root_x_1, nodes.* FROM nodes INNER JOIN t ON ((t.parent_id = nodes.id) AND (t.parent_id2 = nodes.id2))) SELECT * FROM t AS nodes'] os.must_equal [@c.load(:id=>2, :id2=>3, :parent_id=>1, :parent_id2=>2, :name=>'AA'), @c.load(:id=>6, :id2=>7, :parent_id=>2, :parent_id2=>3, :name=>'C'), @c.load(:id=>7, :id2=>8, :parent_id=>1, :parent_id2=>2, :name=>'D'), @c.load(:id=>9, :id2=>10, :parent_id=>nil, :parent_id2=>nil, :name=>'E')] os.map{|o| o.ancestors}.must_equal [[@c.load(:id=>1, :id2=>2, :name=>'00', :parent_id=>8, :parent_id2=>9), @c.load(:id=>8, :id2=>9, :name=>'?', :parent_id=>nil, :parent_id2=>nil)], [@c.load(:id=>2, :id2=>3, :name=>'AA', :parent_id=>1, :parent_id2=>2), @c.load(:id=>1, :id2=>2, :name=>'00', :parent_id=>8, :parent_id2=>9), @c.load(:id=>8, :id2=>9, :name=>'?', :parent_id=>nil, :parent_id2=>nil)], [@c.load(:id=>1, :id2=>2, :name=>'00', :parent_id=>8, :parent_id2=>9), @c.load(:id=>8, :id2=>9, :name=>'?', :parent_id=>nil, :parent_id2=>nil)], []] os.map{|o| o.parent}.must_equal [@c.load(:id=>1, :id2=>2, :name=>'00', :parent_id=>8, :parent_id2=>9), @c.load(:id=>2, :id2=>3, :name=>'AA', :parent_id=>1, :parent_id2=>2), @c.load(:id=>1, :id2=>2, :name=>'00', :parent_id=>8, :parent_id2=>9), nil] os.map{|o| o.parent.parent if o.parent}.must_equal [@c.load(:id=>8, :id2=>9, :name=>'?', :parent_id=>nil, :parent_id2=>nil), @c.load(:id=>1, :id2=>2, :name=>'00', :parent_id=>8, :parent_id2=>9), @c.load(:id=>8, :id2=>9, :name=>'?', :parent_id=>nil, :parent_id2=>nil), nil] os.map{|o| o.parent.parent.parent if o.parent and o.parent.parent}.must_equal [nil, @c.load(:id=>8, :id2=>9, :name=>'?', :parent_id=>nil, :parent_id2=>nil), nil, nil] os.map{|o| o.parent.parent.parent.parent if o.parent and o.parent.parent and o.parent.parent.parent}.must_equal [nil, nil, nil, nil] @db.sqls.must_equal [] end it "should eagerly load descendants" do @c.plugin :rcte_tree, :key=>[:parent_id, :parent_id2] @ds = @c.dataset = @c.dataset.with_fetch([[{:id=>2, :id2=>3, :parent_id=>1, :parent_id2=>2, :name=>'AA'}, {:id=>6, :id2=>7, :parent_id=>2, :parent_id2=>3, :name=>'C'}, {:id=>7, :id2=>8, :parent_id=>1, :parent_id2=>2, :name=>'D'}], [{:id=>6, :id2=>7, :parent_id=>2, :parent_id2=>3, :name=>'C', :x_root_x_0=>2, :x_root_x_1=>3}, {:id=>9, :id2=>10, :parent_id=>2, :parent_id2=>3, :name=>'E', :x_root_x_0=>2, :x_root_x_1=>3}, {:id=>3, :id2=>4, :name=>'00', :parent_id=>6, :parent_id2=>7, :x_root_x_0=>6, :x_root_x_1=>7}, {:id=>3, :id2=>4, :name=>'00', :parent_id=>6, :parent_id2=>7, :x_root_x_0=>2, :x_root_x_1=>3}, {:id=>4, :id2=>5, :name=>'?', :parent_id=>7, :parent_id2=>8, :x_root_x_0=>7, :x_root_x_1=>8}, {:id=>5, :id2=>6, :name=>'?', :parent_id=>4, :parent_id2=>5, :x_root_x_0=>7, :x_root_x_1=>8}]]) os = @ds.eager(:descendants).all @db.sqls.must_equal ["SELECT * FROM nodes", 'WITH t AS (SELECT parent_id AS x_root_x_0, parent_id2 AS x_root_x_1, nodes.* FROM nodes WHERE ((parent_id, parent_id2) IN ((2, 3), (6, 7), (7, 8))) UNION ALL SELECT t.x_root_x_0, t.x_root_x_1, nodes.* FROM nodes INNER JOIN t ON ((t.id = nodes.parent_id) AND (t.id2 = nodes.parent_id2))) SELECT * FROM t AS nodes'] os.must_equal [@c.load(:id=>2, :id2=>3, :parent_id=>1, :parent_id2=>2, :name=>'AA'), @c.load(:id=>6, :id2=>7, :parent_id=>2, :parent_id2=>3, :name=>'C'), @c.load(:id=>7, :id2=>8, :parent_id=>1, :parent_id2=>2, :name=>'D')] os.map{|o| o.descendants}.must_equal [[@c.load(:id=>6, :id2=>7, :parent_id=>2, :parent_id2=>3, :name=>'C'), @c.load(:id=>9, :id2=>10, :parent_id=>2, :parent_id2=>3, :name=>'E'), @c.load(:id=>3, :id2=>4, :name=>'00', :parent_id=>6, :parent_id2=>7)], [@c.load(:id=>3, :id2=>4, :name=>'00', :parent_id=>6, :parent_id2=>7)], [@c.load(:id=>4, :id2=>5, :name=>'?', :parent_id=>7, :parent_id2=>8), @c.load(:id=>5, :id2=>6, :name=>'?', :parent_id=>4, :parent_id2=>5)]] os.map{|o| o.children}.must_equal [[@c.load(:id=>6, :id2=>7, :parent_id=>2, :parent_id2=>3, :name=>'C'), @c.load(:id=>9, :id2=>10, :parent_id=>2, :parent_id2=>3, :name=>'E')], [@c.load(:id=>3, :id2=>4, :name=>'00', :parent_id=>6, :parent_id2=>7)], [@c.load(:id=>4, :id2=>5, :name=>'?', :parent_id=>7, :parent_id2=>8)]] os.map{|o1| o1.children.map{|o2| o2.children}}.must_equal [[[@c.load(:id=>3, :id2=>4, :name=>'00', :parent_id=>6, :parent_id2=>7)], []], [[]], [[@c.load(:id=>5, :id2=>6, :name=>'?', :parent_id=>4, :parent_id2=>5)]]] os.map{|o1| o1.children.map{|o2| o2.children.map{|o3| o3.children}}}.must_equal [[[[]], []], [[]], [[[]]]] @db.sqls.must_equal [] end it "should eagerly load descendants to a given level" do @c.plugin :rcte_tree, :key=>[:parent_id, :parent_id2] @ds = @c.dataset = @c.dataset.with_fetch([[{:id=>2, :id2=>3, :parent_id=>1, :parent_id2=>2, :name=>'AA'}, {:id=>6, :id2=>7, :parent_id=>2, :parent_id2=>3, :name=>'C'}, {:id=>7, :id2=>8, :parent_id=>1, :parent_id2=>2, :name=>'D'}], [{:id=>6, :id2=>7, :parent_id=>2, :parent_id2=>3, :name=>'C', :x_root_x_0=>2, :x_root_x_1=>3, :x_level_x=>0}, {:id=>9, :id2=>10, :parent_id=>2, :parent_id2=>3, :name=>'E', :x_root_x_0=>2, :x_root_x_1=>3, :x_level_x=>0}, {:id=>3, :id2=>4, :name=>'00', :parent_id=>6, :parent_id2=>7, :x_root_x_0=>6, :x_root_x_1=>7, :x_level_x=>0}, {:id=>3, :id2=>4, :name=>'00', :parent_id=>6, :parent_id2=>7, :x_root_x_0=>2, :x_root_x_1=>3, :x_level_x=>1}, {:id=>4, :id2=>5, :name=>'?', :parent_id=>7, :parent_id2=>8, :x_root_x_0=>7, :x_root_x_1=>8, :x_level_x=>0}, {:id=>5, :id2=>6, :name=>'?', :parent_id=>4, :parent_id2=>5, :x_root_x_0=>7, :x_root_x_1=>8, :x_level_x=>1}]]) os = @ds.eager(:descendants=>2).all @db.sqls.must_equal ["SELECT * FROM nodes", 'WITH t AS (SELECT parent_id AS x_root_x_0, parent_id2 AS x_root_x_1, nodes.*, CAST(0 AS integer) AS x_level_x FROM nodes WHERE ((parent_id, parent_id2) IN ((2, 3), (6, 7), (7, 8))) UNION ALL SELECT t.x_root_x_0, t.x_root_x_1, nodes.*, (t.x_level_x + 1) AS x_level_x FROM nodes INNER JOIN t ON ((t.id = nodes.parent_id) AND (t.id2 = nodes.parent_id2)) WHERE (t.x_level_x < 1)) SELECT * FROM t AS nodes'] os.must_equal [@c.load(:id=>2, :id2=>3, :parent_id=>1, :parent_id2=>2, :name=>'AA'), @c.load(:id=>6, :id2=>7, :parent_id=>2, :parent_id2=>3, :name=>'C'), @c.load(:id=>7, :id2=>8, :parent_id=>1, :parent_id2=>2, :name=>'D')] os.map{|o| o.descendants}.must_equal [[@c.load(:id=>6, :id2=>7, :parent_id=>2, :parent_id2=>3, :name=>'C'), @c.load(:id=>9, :id2=>10, :parent_id=>2, :parent_id2=>3, :name=>'E'), @c.load(:id=>3, :id2=>4, :name=>'00', :parent_id=>6, :parent_id2=>7)], [@c.load(:id=>3, :id2=>4, :name=>'00', :parent_id=>6, :parent_id2=>7)], [@c.load(:id=>4, :id2=>5, :name=>'?', :parent_id=>7, :parent_id2=>8), @c.load(:id=>5, :id2=>6, :name=>'?', :parent_id=>4, :parent_id2=>5)]] os.map{|o| o.associations[:children]}.must_equal [[@c.load(:id=>6, :id2=>7, :parent_id=>2, :parent_id2=>3, :name=>'C'), @c.load(:id=>9, :id2=>10, :parent_id=>2, :parent_id2=>3, :name=>'E')], [@c.load(:id=>3, :id2=>4, :name=>'00', :parent_id=>6, :parent_id2=>7)], [@c.load(:id=>4, :id2=>5, :name=>'?', :parent_id=>7, :parent_id2=>8)]] os.map{|o1| o1.associations[:children].map{|o2| o2.associations[:children]}}.must_equal [[[@c.load(:id=>3, :id2=>4, :name=>'00', :parent_id=>6, :parent_id2=>7)], []], [[]], [[@c.load(:id=>5, :id2=>6, :name=>'?', :parent_id=>4, :parent_id2=>5)]]] os.map{|o1| o1.associations[:children].map{|o2| o2.associations[:children].map{|o3| o3.associations[:children]}}}.must_equal [[[[]], []], [[]], [[nil]]] @db.sqls.must_equal [] end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/require_valid_schema_spec.rb������������������������������������������0000664�0000000�0000000�00000004005�14342141206�0023742�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Model.require_valid_schema" do before do @db = Sequel.mock @db.columns = proc{|sql| [:id]} def @db.supports_schema_parsing?; true end def @db.schema(t, *) t.first_source == :foos ? (raise Sequel::Error) : [[:id, {}]] end @c = Class.new(Sequel::Model) @c.db = @db @c.plugin :require_valid_schema end after do if Object.const_defined?(:Bar) Object.send(:remove_const, :Bar) end if Object.const_defined?(:Foo) Object.send(:remove_const, :Foo) end end it "should raise an exception when creating a model with invalid schema" do proc{class ::Foo < @c; end}.must_raise Sequel::Error end it "should raise an exception when setting the dataset to a table with invalid schema" do proc{@c.set_dataset(:foos)}.must_raise Sequel::Error end it "should raise an exception when setting the dataset to a dataset with invalid schema" do proc{@c.set_dataset(@db[:foos])}.must_raise Sequel::Error end it "should not raise an exception when setting the dataset to a dataset with invalid schema" do @c.set_dataset(@db.from(:foos, :bars)) @c.columns.must_equal [:id] end it "should not raise an exception when creating a model with a valid implicit table" do class ::Bar < @c; end Bar.columns.must_equal [:id] end it "should not raise an exception when setting the dataset with invalid schema" do @c.set_dataset(:bars) @c.columns.must_equal [:id] end it "should warn when setting the dataset with invalid schema, when using :warn" do @c.plugin :require_valid_schema, :warn message = nil @c.define_singleton_method(:warn){|msg| message = msg} class ::Foo < @c; end message.must_equal "Not able to parse schema for model: Foo, table: foos" end it "should not raise an exception when creating a model with invalid schema if require_valid_schema is false" do @c.plugin :require_valid_schema, false @c.set_dataset(:foos) @c.columns.must_equal [:id] end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/round_timestamps_spec.rb����������������������������������������������0000664�0000000�0000000�00000004575�14342141206�0023200�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Dataset::RoundTimestamps" do before do @dataset = Sequel.mock.dataset.extension(:round_timestamps) end it "should round times properly for databases supporting microsecond precision" do @dataset.literal(Sequel::SQLTime.create(1, 2, 3, 499999.5)).must_equal "'01:02:03.500000'" @dataset.literal(Time.local(2010, 1, 2, 3, 4, 5.4999995)).must_equal "'2010-01-02 03:04:05.500000'" @dataset.literal(DateTime.new(2010, 1, 2, 3, 4, Rational(54999995, 10000000))).must_equal "'2010-01-02 03:04:05.500000'" @dataset.literal(Sequel::SQLTime.create(1, 2, 3, 499999.4)).must_equal "'01:02:03.499999'" @dataset.literal(Time.local(2010, 1, 2, 3, 4, 5.4999994)).must_equal "'2010-01-02 03:04:05.499999'" @dataset.literal(DateTime.new(2010, 1, 2, 3, 4, Rational(54999994, 10000000))).must_equal "'2010-01-02 03:04:05.499999'" end it "should round times properly for databases supporting millisecond precision" do @dataset = @dataset.with_extend{def timestamp_precision; 3 end} @dataset.literal(Sequel::SQLTime.create(1, 2, 3, 499500)).must_equal "'01:02:03.500'" @dataset.literal(Time.local(2010, 1, 2, 3, 4, 5.4995)).must_equal "'2010-01-02 03:04:05.500'" @dataset.literal(DateTime.new(2010, 1, 2, 3, 4, Rational(54995, 10000))).must_equal "'2010-01-02 03:04:05.500'" @dataset.literal(Sequel::SQLTime.create(1, 2, 3, 499499)).must_equal "'01:02:03.499'" @dataset.literal(Time.local(2010, 1, 2, 3, 4, 5.4994)).must_equal "'2010-01-02 03:04:05.499'" @dataset.literal(DateTime.new(2010, 1, 2, 3, 4, Rational(54994, 10000))).must_equal "'2010-01-02 03:04:05.499'" end it "should round times properly for databases supporting second precision" do @dataset = @dataset.with_extend{def supports_timestamp_usecs?; false end} @dataset.literal(Sequel::SQLTime.create(1, 2, 3, 500000)).must_equal "'01:02:04'" @dataset.literal(Time.local(2010, 1, 2, 3, 4, 5.5)).must_equal "'2010-01-02 03:04:06'" @dataset.literal(DateTime.new(2010, 1, 2, 3, 4, Rational(55, 10))).must_equal "'2010-01-02 03:04:06'" @dataset.literal(Sequel::SQLTime.create(1, 2, 3, 499999)).must_equal "'01:02:03'" @dataset.literal(Time.local(2010, 1, 2, 3, 4, 5.4999999)).must_equal "'2010-01-02 03:04:05'" @dataset.literal(DateTime.new(2010, 1, 2, 3, 4, Rational(54999999, 10000000))).must_equal "'2010-01-02 03:04:05'" end end �����������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/run_transaction_hooks_spec.rb�����������������������������������������0000664�0000000�0000000�00000011712�14342141206�0024206�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "run_transaction_hooks extension" do before do @db = Sequel.mock.extension(:run_transaction_hooks) end it "should support #run_after_{commit,rollback} hooks to run the hooks early" do @db.transaction do @db.sqls.must_equal ["BEGIN"] @db.run_after_commit_hooks @db.run_after_rollback_hooks @db.after_commit{@db.run "C"} @db.after_commit{@db.run "C2"} @db.after_rollback{@db.run "R"} @db.after_rollback{@db.run "R2"} @db.sqls.must_equal [] @db.run_after_commit_hooks @db.sqls.must_equal ["C", "C2"] @db.run_after_rollback_hooks @db.sqls.must_equal ["R", "R2"] end @db.sqls.must_equal ["COMMIT"] @db.transaction(:rollback=>:always) do @db.after_commit{@db.run "C"} @db.after_commit{@db.run "C2"} @db.after_rollback{@db.run "R"} @db.after_rollback{@db.run "R2"} @db.sqls.must_equal ["BEGIN"] @db.run_after_commit_hooks @db.sqls.must_equal ["C", "C2"] @db.run_after_rollback_hooks @db.sqls.must_equal ["R", "R2"] end @db.sqls.must_equal ["ROLLBACK"] end it "should support #run_after_{commit,rollback} hooks to run the hooks early when savepoints are not supported" do def @db.supports_savepoints?; false end @db.transaction do @db.sqls.must_equal ["BEGIN"] @db.run_after_commit_hooks @db.run_after_rollback_hooks @db.after_commit{@db.run "C"} @db.after_commit{@db.run "C2"} @db.after_rollback{@db.run "R"} @db.after_rollback{@db.run "R2"} @db.sqls.must_equal [] @db.run_after_commit_hooks @db.sqls.must_equal ["C", "C2"] @db.run_after_rollback_hooks @db.sqls.must_equal ["R", "R2"] end @db.sqls.must_equal ["COMMIT"] @db.transaction(:rollback=>:always) do @db.after_commit{@db.run "C"} @db.after_commit{@db.run "C2"} @db.after_rollback{@db.run "R"} @db.after_rollback{@db.run "R2"} @db.sqls.must_equal ["BEGIN"] @db.run_after_commit_hooks @db.sqls.must_equal ["C", "C2"] @db.run_after_rollback_hooks @db.sqls.must_equal ["R", "R2"] end @db.sqls.must_equal ["ROLLBACK"] end it "should not same hook on transaction completion when using #run_after_{commit,rollback} hooks" do @db.transaction do @db.after_commit{@db.run "C"} @db.after_commit{@db.run "C2"} @db.after_rollback{@db.run "R"} @db.after_rollback{@db.run "R2"} @db.sqls.must_equal ["BEGIN"] @db.run_after_commit_hooks @db.run_after_rollback_hooks @db.sqls end @db.sqls.must_equal ["COMMIT"] @db.transaction(:rollback=>:always) do @db.after_commit{@db.run "C"} @db.after_commit{@db.run "C2"} @db.after_rollback{@db.run "R"} @db.after_rollback{@db.run "R2"} @db.sqls.must_equal ["BEGIN"] @db.run_after_commit_hooks @db.run_after_rollback_hooks @db.sqls end @db.sqls.must_equal ["ROLLBACK"] end it "should handle savepoint hooks in #run_after_{commit,rollback} hooks" do @db.transaction do @db.after_commit{@db.run "C"} @db.after_rollback{@db.run "R"} @db.transaction(:savepoint=>:true) do @db.after_commit(:savepoint=>true){@db.run "SC"} @db.after_rollback(:savepoint=>true){@db.run "SR"} @db.sqls.must_equal ["BEGIN", "SAVEPOINT autopoint_1"] @db.run_after_commit_hooks @db.sqls.must_equal ["C", "SC"] @db.run_after_rollback_hooks @db.sqls.must_equal ["R", "SR"] end end @db.sqls.must_equal ["RELEASE SAVEPOINT autopoint_1", "COMMIT"] @db.transaction(:rollback=>:always) do @db.after_commit{@db.run "C"} @db.after_rollback{@db.run "R"} @db.transaction(:savepoint=>:true) do @db.after_commit(:savepoint=>true){@db.run "SC"} @db.after_rollback(:savepoint=>true){@db.run "SR"} end @db.sqls.must_equal ["BEGIN", "SAVEPOINT autopoint_1", "RELEASE SAVEPOINT autopoint_1"] @db.run_after_commit_hooks @db.sqls.must_equal ["C", "SC"] @db.run_after_rollback_hooks @db.sqls.must_equal ["R", "SR"] end @db.sqls.must_equal ["ROLLBACK"] @db.transaction(:rollback=>:always) do @db.after_commit{@db.run "C"} @db.after_rollback{@db.run "R"} @db.transaction(:savepoint=>:true, :rollback=>:always) do @db.after_commit(:savepoint=>true){@db.run "SC"} @db.after_rollback(:savepoint=>true){@db.run "SR"} end @db.sqls.must_equal ["BEGIN", "SAVEPOINT autopoint_1", "ROLLBACK TO SAVEPOINT autopoint_1", "SR"] @db.run_after_commit_hooks @db.sqls.must_equal ["C"] @db.run_after_rollback_hooks @db.sqls.must_equal ["R"] end @db.sqls.must_equal ["ROLLBACK"] end it "should raise Error if trying to run transaction hooks outside of a transaction" do proc{@db.run_after_commit_hooks}.must_raise Sequel::Error proc{@db.run_after_rollback_hooks}.must_raise Sequel::Error end end ������������������������������������������������������sequel-5.63.0/spec/extensions/s_spec.rb�������������������������������������������������������������0000664�0000000�0000000�00000003057�14342141206�0020037�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" Sequel.extension :s describe "s extension as refinement" do include Sequel::S before do @db = Sequel.mock end it "S should be callable with different arguments" do @db.literal(S(:s) + 1).must_equal "(s + 1)" @db.literal(S('s') + '1').must_equal "('s' || '1')" @db.literal(~S([[:s, 1], [:z, 2]])).must_equal "((s != 1) OR (z != 2))" end it "S should be callable with blocks" do @db.literal(S{x + 1}).must_equal "(x + 1)" end it "S should raise an error if called with multiple objects" do proc{S(:x, 1)}.must_raise ArgumentError end it "S should raise an error if called with objects and block" do proc{S(:x){}}.must_raise Sequel::Error end end if (RUBY_VERSION >= '2.0.0' && RUBY_ENGINE == 'ruby') || (RUBY_ENGINE == 'jruby' && (JRUBY_VERSION >= '9.3' || (JRUBY_VERSION.match(/\A9\.2\.(\d+)/) && $1.to_i >= 7))) using Sequel::S describe "s extension as refinement" do before do @db = Sequel.mock end it "S should be callable with different arguments" do @db.literal(S(:s) + 1).must_equal "(s + 1)" @db.literal(S('s') + '1').must_equal "('s' || '1')" @db.literal(~S([[:s, 1], [:z, 2]])).must_equal "((s != 1) OR (z != 2))" end it "S should be callable with blocks" do @db.literal(S{x + 1}).must_equal "(x + 1)" end it "S should raise an error if called with multiple objects" do proc{S(:x, 1)}.must_raise ArgumentError end it "S should raise an error if called with objects and block" do proc{S(:x){}}.must_raise Sequel::Error end end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/schema_caching_spec.rb������������������������������������������������0000664�0000000�0000000�00000006217�14342141206�0022512�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "schema_caching extension" do before do @db = Sequel.mock.extension(:schema_caching) @schemas = {'"table"'=>[[:column, {:db_type=>"integer", :default=>"nextval('table_id_seq'::regclass)", :allow_null=>false, :primary_key=>true, :type=>:integer, :ruby_default=>nil}]]} @filename = "spec/files/test_schema_#$$.dump" @db.instance_variable_set(:@schemas, @schemas) end after do File.delete(@filename) if File.exist?(@filename) end it "Database#dump_schema_cache should dump cached schema to the given file" do File.exist?(@filename).must_equal false @db.dump_schema_cache(@filename) File.exist?(@filename).must_equal true File.size(@filename).must_be :>, 0 end it "Database#dump_schema_cache/load_schema_cache should work with :callable_default values set in schema_post_process" do @schemas['"table"'][0][1][:callable_default] = lambda{1} @schemas['"table"'][0][1][:default] = 'call_1' @db.dump_schema_cache(@filename) db = Sequel.mock(:host=>'postgres').extension(:schema_caching) def db.schema_post_process(_) super.each{|_, c| c[:callable_default] = lambda{1} if c[:default] == 'call_1'} end db.singleton_class.send(:private, :schema_post_process) db.load_schema_cache(@filename) db.schema(:table)[0][1][:callable_default].call.must_equal 1 end it "Database#load_schema_cache should load cached schema from the given file dumped by #dump_schema_cache" do @db.dump_schema_cache(@filename) db = Sequel::Database.new.extension(:schema_caching) db.load_schema_cache(@filename) @db.instance_variable_get(:@schemas).must_equal @schemas end it "Database#load_schema_cache should have frozen string values in the schema caches" do @db.dump_schema_cache(@filename) db = Sequel.mock(:host=>'postgres').extension(:schema_caching) db.load_schema_cache(@filename) h = db.schema(:table)[0][1] h[:db_type].must_equal 'integer' h[:db_type].frozen?.must_equal true h[:default].must_equal "nextval('table_id_seq'::regclass)" h[:default].frozen?.must_equal true end it "Database#dump_schema_cache? should dump cached schema to the given file unless the file exists" do @db.dump_schema_cache?(@filename) File.size(@filename).wont_equal 0 File.open(@filename, 'wb'){|f|} File.size(@filename).must_equal 0 @db.dump_schema_cache?(@filename) File.size(@filename).must_equal 0 end it "Database#load_schema_cache? should load cached schema from the given file if it exists" do db = Sequel::Database.new.extension(:schema_caching) File.exist?(@filename).must_equal false db.load_schema_cache?(@filename) db.instance_variable_get(:@schemas).must_equal({}) end it "Database#load_schema_cache? should load cached schema from the given file if it exists" do db = Sequel::Database.new.extension(:schema_caching) File.exist?(@filename).must_equal false db.load_schema_cache?(@filename) db.instance_variable_get(:@schemas).must_equal({}) @db.dump_schema_cache(@filename) db.load_schema_cache?(@filename) @db.instance_variable_get(:@schemas).must_equal @schemas end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/schema_dumper_spec.rb�������������������������������������������������0000664�0000000�0000000�00000106230�14342141206�0022406�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Schema::CreateTableGenerator dump methods" do before do @d = Sequel::Database.new.extension(:schema_dumper) @g = Sequel::Schema::CreateTableGenerator end it "should allow the same table information to be converted to a string for evaling inside of another instance with the same result" do g = @g.new(@d) do Integer :a varchar :b column :dt, DateTime column :vc, :varchar primary_key :c foreign_key :d, :a foreign_key :e foreign_key [:d, :e], :name=>:cfk constraint :blah, "a=1" check :a=>1 unique [:e] index :a index [:c, :e] index [:b, :c], :type=>:hash index [:d], :unique=>true spatial_index :a full_text_index [:b, :c] end g2 = @g.new(@d) do instance_eval(g.dump_columns, __FILE__, __LINE__) instance_eval(g.dump_constraints, __FILE__, __LINE__) instance_eval(g.dump_indexes, __FILE__, __LINE__) end g.columns.must_equal g2.columns g.constraints.must_equal g2.constraints g.indexes.must_equal g2.indexes end it "should respect :keep_order option to primary_key" do g = @g.new(@d) do Integer :a primary_key :c, :keep_order=>true end g2 = @g.new(@d) do instance_eval(g.dump_columns, __FILE__, __LINE__) end g.columns.must_equal g2.columns end it "should respect :keep_order option to primary_key with primary key type" do g = @g.new(@d) do Integer :a primary_key :c, :keep_order=>true, :type=>:Bignum end g2 = @g.new(@d) do instance_eval(g.dump_columns, __FILE__, __LINE__) end g.columns.must_equal g2.columns end it "should allow dumping indexes as separate add_index and drop_index methods" do g = @g.new(@d) do index :a index [:c, :e], :name=>:blah index [:b, :c], :unique=>true end g.dump_indexes(:add_index=>:t).must_equal((<<END_CODE).strip) add_index :t, [:a] add_index :t, [:c, :e], :name=>:blah add_index :t, [:b, :c], :unique=>true END_CODE g.dump_indexes(:drop_index=>:t).must_equal((<<END_CODE).strip) drop_index :t, [:b, :c], :unique=>true drop_index :t, [:c, :e], :name=>:blah drop_index :t, [:a] END_CODE end it "should raise an error if you try to dump a Generator that uses a constraint with a proc" do proc{@g.new(@d){check{a>1}}.dump_constraints}.must_raise(Sequel::Error) end end describe "Sequel::Database dump methods" do before do @d = Sequel::Database.new.extension(:schema_dumper) def @d.tables(o) o[:schema] ? [o[:schema]] : [:t1, :t2] end @d.singleton_class.send(:alias_method, :tables, :tables) def @d.schema(t, *o) v = case t when :t1, 't__t1', Sequel.identifier(:t__t1) [[:c1, {:db_type=>'integer', :primary_key=>true, :auto_increment=>true, :allow_null=>false}], [:c2, {:db_type=>'varchar(20)', :allow_null=>true}]] when :t2 [[:c1, {:db_type=>'integer', :primary_key=>true, :allow_null=>false}], [:c2, {:db_type=>'numeric', :primary_key=>true, :allow_null=>false}]] when :t3 [[:c2, {:db_type=>'varchar(20)', :allow_null=>true}], [:c1, {:db_type=>'integer', :primary_key=>true, :auto_increment=>true, :allow_null=>false}]] when :t5 [[:c1, {:db_type=>'blahblah', :allow_null=>true}]] when :t6 [[:c1, {:db_type=>'integer', :allow_null=>false, :generated=>true, :default=>'1', :ruby_default=>1}]] when :t7 [[:c1, {:db_type=>'integer', :allow_null=>true, :generated=>true, :default=>'(a + b)', :ruby_default=>nil}]] end if o.first.is_a?(Hash) && o.first[:schema] v.last.last[:db_type] = o.first[:schema] end v end @d.singleton_class.send(:alias_method, :schema, :schema) end it "should support dumping table with :schema option" do @d.dump_table_schema(:t1, :schema=>'varchar(15)').must_equal "create_table(:t1) do\n primary_key :c1\n String :c2, :size=>15\nend" end it "should support dumping table schemas as create_table method calls" do @d.dump_table_schema(:t1).must_equal "create_table(:t1) do\n primary_key :c1\n String :c2, :size=>20\nend" end it "should support dumping table schemas when given a string" do @d.dump_table_schema('t__t1').must_equal "create_table(\"t__t1\") do\n primary_key :c1\n String :c2, :size=>20\nend" end it "should support dumping table schemas when given an identifier" do @d.dump_table_schema(Sequel.identifier(:t__t1)).must_equal "create_table(Sequel::SQL::Identifier.new(:t__t1)) do\n primary_key :c1\n String :c2, :size=>20\nend" end it "should dump non-Integer primary key columns with explicit :type" do def @d.schema(*s) [[:c1, {:db_type=>'bigint', :primary_key=>true, :allow_null=>true, :auto_increment=>true}]] end @d.dump_table_schema(:t6).must_equal "create_table(:t6) do\n primary_key :c1, :type=>:Bignum\nend" end it "should dump non-Integer primary key columns with explicit :type when using :same_db=>true" do def @d.schema(*s) [[:c1, {:db_type=>'bigint', :primary_key=>true, :allow_null=>true, :auto_increment=>true}]] end @d.dump_table_schema(:t6, :same_db=>true).must_equal "create_table(:t6) do\n primary_key :c1, :type=>:Bignum\nend" end it "should dump auto incrementing primary keys with :keep_order option if they are not first" do @d.dump_table_schema(:t3).must_equal "create_table(:t3) do\n String :c2, :size=>20\n primary_key :c1, :keep_order=>true\nend" end it "should handle foreign keys" do def @d.schema(*s) [[:c1, {:db_type=>'integer', :allow_null=>true}]] end def @d.supports_foreign_key_parsing?; true end def @d.foreign_key_list(*s) [{:columns=>[:c1], :table=>:t2, :key=>[:c2]}] end @d.dump_table_schema(:t6).must_equal "create_table(:t6) do\n foreign_key :c1, :t2, :key=>[:c2]\nend" end it "should handle primary keys that are also foreign keys" do def @d.schema(*s) [[:c1, {:db_type=>'integer', :primary_key=>true, :allow_null=>true, :auto_increment=>true}]] end def @d.supports_foreign_key_parsing?; true end def @d.foreign_key_list(*s) [{:columns=>[:c1], :table=>:t2, :key=>[:c2]}] end @d.dump_table_schema(:t6).must_equal((<<OUTPUT).chomp) create_table(:t6) do primary_key :c1, :table=>:t2, :key=>[:c2] end OUTPUT end it "should handle foreign key options" do def @d.schema(*s) [[:c1, {:db_type=>'integer', :allow_null=>true}]] end def @d.supports_foreign_key_parsing?; true end def @d.foreign_key_list(*s) [{:columns=>[:c1], :table=>:t2, :key=>[:c2], :on_delete=>:restrict, :on_update=>:set_null, :deferrable=>true}] end @d.dump_table_schema(:t6).must_equal((<<OUTPUT).chomp) create_table(:t6) do foreign_key :c1, :t2, :key=>[:c2], :on_delete=>:restrict, :on_update=>:set_null, :deferrable=>true end OUTPUT end it "should handle foreign key options in the primary key" do def @d.schema(*s) [[:c1, {:db_type=>'integer', :primary_key=>true, :allow_null=>true, :auto_increment=>true}]] end def @d.supports_foreign_key_parsing?; true end def @d.foreign_key_list(*s) [{:columns=>[:c1], :table=>:t2, :key=>[:c2], :on_delete=>:restrict, :on_update=>:set_null, :deferrable=>true}] end @d.dump_table_schema(:t6).must_equal((<<OUTPUT).chomp) create_table(:t6) do primary_key :c1, :table=>:t2, :key=>[:c2], :on_delete=>:restrict, :on_update=>:set_null, :deferrable=>true end OUTPUT end it "should omit foreign key options that are the same as defaults" do def @d.schema(*s) [[:c1, {:db_type=>'integer', :allow_null=>true}]] end def @d.supports_foreign_key_parsing?; true end def @d.foreign_key_list(*s) [{:columns=>[:c1], :table=>:t2, :key=>[:c2], :on_delete=>:no_action, :on_update=>:no_action, :deferrable=>false}] end @d.dump_table_schema(:t6).must_equal((<<OUTPUT).chomp) create_table(:t6) do foreign_key :c1, :t2, :key=>[:c2] end OUTPUT end it "should omit foreign key options that are the same as defaults in the primary key" do def @d.schema(*s) [[:c1, {:db_type=>'integer', :primary_key=>true, :allow_null=>true, :auto_increment=>true}]] end def @d.supports_foreign_key_parsing?; true end def @d.foreign_key_list(*s) [{:columns=>[:c1], :table=>:t2, :key=>[:c2], :on_delete=>:no_action, :on_update=>:no_action, :deferrable=>false}] end @d.dump_table_schema(:t6).must_equal((<<OUTPUT).chomp) create_table(:t6) do primary_key :c1, :table=>:t2, :key=>[:c2] end OUTPUT end it "should dump primary key columns with explicit type equal to the database type when :same_db option is passed" do def @d.schema(*s) [[:c1, {:db_type=>'somedbspecifictype', :primary_key=>true, :allow_null=>false}]] end @d.dump_table_schema(:t7, :same_db => true).must_equal "create_table(:t7) do\n column :c1, \"somedbspecifictype\", :null=>false\n \n primary_key [:c1]\nend" end it "should use a composite primary_key calls if there is a composite primary key" do @d.dump_table_schema(:t2).must_equal "create_table(:t2) do\n Integer :c1, :null=>false\n BigDecimal :c2, :null=>false\n \n primary_key [:c1, :c2]\nend" end it "should use a composite foreign_key calls if there is a composite foreign key" do def @d.schema(*s) [[:c1, {:db_type=>'integer'}], [:c2, {:db_type=>'integer'}]] end def @d.supports_foreign_key_parsing?; true end def @d.foreign_key_list(*s) [{:columns=>[:c1, :c2], :table=>:t2, :key=>[:c3, :c4]}] end @d.dump_table_schema(:t1).must_equal "create_table(:t1) do\n Integer :c1\n Integer :c2\n \n foreign_key [:c1, :c2], :t2, :key=>[:c3, :c4]\nend" end it "should use a composite foreign_key calls with options" do def @d.schema(*s) [[:c1, {:db_type=>'integer'}], [:c2, {:db_type=>'integer'}]] end def @d.supports_foreign_key_parsing?; true end def @d.foreign_key_list(*s) [{:columns=>[:c1, :c2], :table=>:t2, :key=>[:c3, :c4], :on_delete=>:no_action, :on_update=>:no_action, :deferrable=>true}] end @d.dump_table_schema(:t1).must_equal "create_table(:t1) do\n Integer :c1\n Integer :c2\n \n foreign_key [:c1, :c2], :t2, :key=>[:c3, :c4], :deferrable=>true\nend" end it "should include index information if available" do def @d.supports_index_parsing?; true end def @d.indexes(t) {:i1=>{:columns=>[:c1], :unique=>false}, :t1_c2_c1_index=>{:columns=>[:c2, :c1], :unique=>true, :deferrable=>true}} end @d.dump_table_schema(:t1).must_equal "create_table(:t1, :ignore_index_errors=>true) do\n primary_key :c1\n String :c2, :size=>20\n \n index [:c1], :name=>:i1\n index [:c2, :c1], :unique=>true, :deferrable=>true\nend" end it "should support dumping the whole database as a migration with a :schema option" do @d.dump_schema_migration(:schema=>'t__t1').must_equal <<-END_MIG Sequel.migration do change do create_table("t__t1") do primary_key :c1 String :c2 end end end END_MIG end it "should support dumping the whole database as a migration" do @d.dump_schema_migration.must_equal <<-END_MIG Sequel.migration do change do create_table(:t1) do primary_key :c1 String :c2, :size=>20 end create_table(:t2) do Integer :c1, :null=>false BigDecimal :c2, :null=>false primary_key [:c1, :c2] end end end END_MIG end it "should sort table names when dumping a migration" do def @d.tables(o) [:t2, :t1] end @d.dump_schema_migration.must_equal <<-END_MIG Sequel.migration do change do create_table(:t1) do primary_key :c1 String :c2, :size=>20 end create_table(:t2) do Integer :c1, :null=>false BigDecimal :c2, :null=>false primary_key [:c1, :c2] end end end END_MIG end it "should sort table names topologically when dumping a migration with foreign keys" do def @d.tables(o) [:t1, :t2] end def @d.schema(t, *o) t == :t1 ? [[:c2, {:db_type=>'integer'}]] : [[:c1, {:db_type=>'integer', :primary_key=>true, :auto_increment=>true}]] end def @d.supports_foreign_key_parsing?; true end def @d.foreign_key_list(t) t == :t1 ? [{:columns=>[:c2], :table=>:t2, :key=>[:c1]}] : [] end @d.dump_schema_migration.must_equal <<-END_MIG Sequel.migration do change do create_table(:t2) do primary_key :c1 end create_table(:t1) do foreign_key :c2, :t2, :key=>[:c1] end end end END_MIG end it "should handle circular dependencies when dumping a migration with foreign keys" do def @d.tables(o) [:t1, :t2] end def @d.schema(t, *o) t == :t1 ? [[:c2, {:db_type=>'integer'}]] : [[:c1, {:db_type=>'integer'}]] end def @d.supports_foreign_key_parsing?; true end def @d.foreign_key_list(t) t == :t1 ? [{:columns=>[:c2], :table=>:t2, :key=>[:c1]}] : [{:columns=>[:c1], :table=>:t1, :key=>[:c2]}] end @d.dump_schema_migration.must_equal <<-END_MIG Sequel.migration do change do create_table(:t1) do Integer :c2 end create_table(:t2) do foreign_key :c1, :t1, :key=>[:c2] end alter_table(:t1) do add_foreign_key [:c2], :t2, :key=>[:c1] end end end END_MIG end it "should sort topologically even if the database raises an error when trying to parse foreign keys for a non-existent table" do def @d.tables(o) [:t1, :t2] end def @d.schema(t, *o) t == :t1 ? [[:c2, {:db_type=>'integer'}]] : [[:c1, {:db_type=>'integer', :primary_key=>true, :auto_increment=>true}]] end def @d.supports_foreign_key_parsing?; true end def @d.foreign_key_list(t) raise Sequel::DatabaseError unless [:t1, :t2].include?(t) t == :t1 ? [{:columns=>[:c2], :table=>:t2, :key=>[:c1]}] : [] end @d.dump_schema_migration.must_equal <<-END_MIG Sequel.migration do change do create_table(:t2) do primary_key :c1 end create_table(:t1) do foreign_key :c2, :t2, :key=>[:c1] end end end END_MIG end it "should honor the :same_db option to not convert types" do @d.dump_table_schema(:t1, :same_db=>true).must_equal "create_table(:t1) do\n primary_key :c1\n column :c2, \"varchar(20)\"\nend" @d.dump_schema_migration(:same_db=>true).must_equal <<-END_MIG Sequel.migration do change do create_table(:t1) do primary_key :c1 column :c2, "varchar(20)" end create_table(:t2) do column :c1, "integer", :null=>false column :c2, "numeric", :null=>false primary_key [:c1, :c2] end end end END_MIG end it "should honor the :index_names => false option to not include names of indexes" do def @d.supports_index_parsing?; true end def @d.indexes(t) {:i1=>{:columns=>[:c1], :unique=>false}, :t1_c2_c1_index=>{:columns=>[:c2, :c1], :unique=>true}} end @d.dump_table_schema(:t1, :index_names=>false).must_equal "create_table(:t1, :ignore_index_errors=>true) do\n primary_key :c1\n String :c2, :size=>20\n \n index [:c1]\n index [:c2, :c1], :unique=>true\nend" @d.dump_schema_migration(:index_names=>false).must_equal <<-END_MIG Sequel.migration do change do create_table(:t1, :ignore_index_errors=>true) do primary_key :c1 String :c2, :size=>20 index [:c1] index [:c2, :c1], :unique=>true end create_table(:t2, :ignore_index_errors=>true) do Integer :c1, :null=>false BigDecimal :c2, :null=>false primary_key [:c1, :c2] index [:c1] index [:c2, :c1], :unique=>true end end end END_MIG end it "should make :index_names => :namespace option a noop if there is a global index namespace" do def @d.supports_index_parsing?; true end def @d.indexes(t) {:i1=>{:columns=>[:c1], :unique=>false}, :t1_c2_c1_index=>{:columns=>[:c2, :c1], :unique=>false}} end @d.dump_table_schema(:t1, :index_names=>:namespace).must_equal "create_table(:t1, :ignore_index_errors=>true) do\n primary_key :c1\n String :c2, :size=>20\n \n index [:c1], :name=>:i1\n index [:c2, :c1]\nend" @d.dump_schema_migration(:index_names=>:namespace).must_equal <<-END_MIG Sequel.migration do change do create_table(:t1, :ignore_index_errors=>true) do primary_key :c1 String :c2, :size=>20 index [:c1], :name=>:i1 index [:c2, :c1] end create_table(:t2, :ignore_index_errors=>true) do Integer :c1, :null=>false BigDecimal :c2, :null=>false primary_key [:c1, :c2] index [:c1], :name=>:i1 index [:c2, :c1], :name=>:t1_c2_c1_index end end end END_MIG end it "should honor the :index_names => :namespace option to include names of indexes with prepended table name if there is no global index namespace" do def @d.global_index_namespace?; false end def @d.supports_index_parsing?; true end def @d.indexes(t) {:i1=>{:columns=>[:c1], :unique=>false}, :t1_c2_c1_index=>{:columns=>[:c2, :c1], :unique=>false}} end @d.dump_table_schema(:t1, :index_names=>:namespace).must_equal "create_table(:t1, :ignore_index_errors=>true) do\n primary_key :c1\n String :c2, :size=>20\n \n index [:c1], :name=>:t1_i1\n index [:c2, :c1]\nend" @d.dump_schema_migration(:index_names=>:namespace).must_equal <<-END_MIG Sequel.migration do change do create_table(:t1, :ignore_index_errors=>true) do primary_key :c1 String :c2, :size=>20 index [:c1], :name=>:t1_i1 index [:c2, :c1] end create_table(:t2, :ignore_index_errors=>true) do Integer :c1, :null=>false BigDecimal :c2, :null=>false primary_key [:c1, :c2] index [:c1], :name=>:t2_i1 index [:c2, :c1], :name=>:t2_t1_c2_c1_index end end end END_MIG end it "should honor the :indexes => false option to not include indexes" do def @d.supports_index_parsing?; true end def @d.indexes(t) {:i1=>{:columns=>[:c1], :unique=>false}, :t1_c2_c1_index=>{:columns=>[:c2, :c1], :unique=>true}} end @d.dump_table_schema(:t1, :indexes=>false).must_equal "create_table(:t1) do\n primary_key :c1\n String :c2, :size=>20\nend" @d.dump_schema_migration(:indexes=>false).must_equal <<-END_MIG Sequel.migration do change do create_table(:t1) do primary_key :c1 String :c2, :size=>20 end create_table(:t2) do Integer :c1, :null=>false BigDecimal :c2, :null=>false primary_key [:c1, :c2] end end end END_MIG end it "should have :indexes => false option disable foreign keys as well when dumping a whole migration" do def @d.foreign_key_list(t) t == :t1 ? [{:columns=>[:c2], :table=>:t2, :key=>[:c1]}] : [] end @d.dump_schema_migration(:indexes=>false).wont_match(/foreign_key/) end it "should have :foreign_keys option override :indexes => false disabling of foreign keys" do def @d.supports_foreign_key_parsing?; true end def @d.foreign_key_list(t) t == :t1 ? [{:columns=>[:c2], :table=>:t2, :key=>[:c1]}] : [] end @d.dump_schema_migration(:indexes=>false, :foreign_keys=>true).must_equal(<<OUTPUT) Sequel.migration do change do create_table(:t2) do Integer :c1, :null=>false BigDecimal :c2, :null=>false primary_key [:c1, :c2] end create_table(:t1) do primary_key :c1 foreign_key :c2, :t2, :type=>String, :size=>20, :key=>[:c1] end end end OUTPUT end it "should support dumping just indexes as a migration" do def @d.tables(o) [:t1] end def @d.supports_index_parsing?; true end def @d.indexes(t) {:i1=>{:columns=>[:c1], :unique=>false}, :t1_c2_c1_index=>{:columns=>[:c2, :c1], :unique=>true}} end @d.dump_indexes_migration.must_equal <<-END_MIG Sequel.migration do change do add_index :t1, [:c1], :ignore_errors=>true, :name=>:i1 add_index :t1, [:c2, :c1], :ignore_errors=>true, :unique=>true end end END_MIG end it "should honor the :index_names => false option to not include names of indexes when dumping just indexes as a migration" do def @d.tables(o) [:t1] end def @d.supports_index_parsing?; true end def @d.indexes(t) {:i1=>{:columns=>[:c1], :unique=>false}, :t1_c2_c1_index=>{:columns=>[:c2, :c1], :unique=>true}} end @d.dump_indexes_migration(:index_names=>false).must_equal <<-END_MIG Sequel.migration do change do add_index :t1, [:c1], :ignore_errors=>true add_index :t1, [:c2, :c1], :ignore_errors=>true, :unique=>true end end END_MIG end it "should honor the :index_names => :namespace option be a noop if there is a global index namespace" do def @d.tables(o) [:t1, :t2] end def @d.supports_index_parsing?; true end def @d.indexes(t) {:i1=>{:columns=>[:c1], :unique=>false}, :t1_c2_c1_index=>{:columns=>[:c2, :c1], :unique=>false}} end @d.dump_indexes_migration(:index_names=>:namespace).must_equal <<-END_MIG Sequel.migration do change do add_index :t1, [:c1], :ignore_errors=>true, :name=>:i1 add_index :t1, [:c2, :c1], :ignore_errors=>true add_index :t2, [:c1], :ignore_errors=>true, :name=>:i1 add_index :t2, [:c2, :c1], :ignore_errors=>true, :name=>:t1_c2_c1_index end end END_MIG end it "should honor the :index_names => :namespace option to include names of indexes with prepended table name when dumping just indexes as a migration if there is no global index namespace" do def @d.global_index_namespace?; false end def @d.tables(o) [:t1, :t2] end def @d.supports_index_parsing?; true end def @d.indexes(t) {:i1=>{:columns=>[:c1], :unique=>false}, :t1_c2_c1_index=>{:columns=>[:c2, :c1], :unique=>false}} end @d.dump_indexes_migration(:index_names=>:namespace).must_equal <<-END_MIG Sequel.migration do change do add_index :t1, [:c1], :ignore_errors=>true, :name=>:t1_i1 add_index :t1, [:c2, :c1], :ignore_errors=>true add_index :t2, [:c1], :ignore_errors=>true, :name=>:t2_i1 add_index :t2, [:c2, :c1], :ignore_errors=>true, :name=>:t2_t1_c2_c1_index end end END_MIG end it "should handle missing index parsing support when dumping index migration" do def @d.tables(o) [:t1] end @d.dump_indexes_migration.must_equal <<-END_MIG Sequel.migration do change do end end END_MIG end it "should handle missing foreign key parsing support when dumping foreign key migration" do def @d.tables(o) [:t1] end @d.dump_foreign_key_migration.must_equal <<-END_MIG Sequel.migration do change do end end END_MIG end it "should support dumping just foreign_keys as a migration" do def @d.tables(o) [:t1, :t2, :t3] end def @d.schema(t, *o) t == :t1 ? [[:c2, {:db_type=>'integer'}]] : [[:c1, {:db_type=>'integer'}]] end def @d.supports_foreign_key_parsing?; true end def @d.foreign_key_list(t, *a) case t when :t1 [{:columns=>[:c2], :table=>:t2, :key=>[:c1]}] when :t2 [{:columns=>[:c1, :c3], :table=>:t1, :key=>[:c2, :c4]}] else [] end end @d.dump_foreign_key_migration.must_equal <<-END_MIG Sequel.migration do change do alter_table(:t1) do add_foreign_key [:c2], :t2, :key=>[:c1] end alter_table(:t2) do add_foreign_key [:c1, :c3], :t1, :key=>[:c2, :c4] end end end END_MIG end it "should handle not null values and defaults" do def @d.schema(*s) [[:c1, {:db_type=>'date', :default=>"'now()'", :allow_null=>true}], [:c2, {:db_type=>'datetime', :allow_null=>false}]] end @d.dump_table_schema(:t3).must_equal "create_table(:t3) do\n Date :c1\n DateTime :c2, :null=>false\nend" end it "should handle converting common defaults" do def @d.schema(t, *os) s = [[:c1, {:db_type=>'boolean', :default=>"false", :type=>:boolean, :allow_null=>true}], [:c2, {:db_type=>'varchar', :default=>"'blah'", :type=>:string, :allow_null=>true}], [:c3, {:db_type=>'integer', :default=>"-1", :type=>:integer, :allow_null=>true}], [:c4, {:db_type=>'float', :default=>"1.0", :type=>:float, :allow_null=>true}], [:c5, {:db_type=>'decimal', :default=>"100.50", :type=>:decimal, :allow_null=>true}], [:c6, {:db_type=>'blob', :default=>"'blah'", :type=>:blob, :allow_null=>true}], [:c7, {:db_type=>'date', :default=>"'2008-10-29'", :type=>:date, :allow_null=>true}], [:c8, {:db_type=>'datetime', :default=>"'2008-10-29 10:20:30'", :type=>:datetime, :allow_null=>true}], [:c9, {:db_type=>'time', :default=>"'10:20:30'", :type=>:time, :allow_null=>true}], [:c10, {:db_type=>'foo', :default=>"'6 weeks'", :type=>nil, :allow_null=>true}], [:c11, {:db_type=>'date', :default=>"CURRENT_DATE", :type=>:date, :allow_null=>true}], [:c12, {:db_type=>'timestamp', :default=>"now()", :type=>:datetime, :allow_null=>true}]] s.each{|_, c| c[:ruby_default] = column_schema_to_ruby_default(c[:default], c[:type])} s end e = RUBY_VERSION >= '2.4' ? 'e' : 'E' @d.dump_table_schema(:t4).gsub(/[+-]\d\d\d\d"\)/, '")').gsub(/\.0+/, '.0').must_equal "create_table(:t4) do\n TrueClass :c1, :default=>false\n String :c2, :default=>\"blah\"\n Integer :c3, :default=>-1\n Float :c4, :default=>1.0\n BigDecimal :c5, :default=>Kernel::BigDecimal(\"0.1005#{e}3\")\n File :c6, :default=>Sequel::SQL::Blob.new(\"blah\")\n Date :c7, :default=>Date.new(2008, 10, 29)\n DateTime :c8, :default=>Time.parse(\"2008-10-29T10:20:30.0\")\n Time :c9, :default=>Sequel::SQLTime.parse(\"10:20:30.0\"), :only_time=>true\n String :c10\n Date :c11, :default=>Sequel::CURRENT_DATE\n DateTime :c12, :default=>Sequel::CURRENT_TIMESTAMP\nend" @d.dump_table_schema(:t4, :same_db=>true).gsub(/[+-]\d\d\d\d"\)/, '")').gsub(/\.0+/, '.0').must_equal "create_table(:t4) do\n column :c1, \"boolean\", :default=>false\n column :c2, \"varchar\", :default=>\"blah\"\n column :c3, \"integer\", :default=>-1\n column :c4, \"float\", :default=>1.0\n column :c5, \"decimal\", :default=>Kernel::BigDecimal(\"0.1005#{e}3\")\n column :c6, \"blob\", :default=>Sequel::SQL::Blob.new(\"blah\")\n column :c7, \"date\", :default=>Date.new(2008, 10, 29)\n column :c8, \"datetime\", :default=>Time.parse(\"2008-10-29T10:20:30.0\")\n column :c9, \"time\", :default=>Sequel::SQLTime.parse(\"10:20:30.0\")\n column :c10, \"foo\", :default=>Sequel::LiteralString.new(\"'6 weeks'\")\n column :c11, \"date\", :default=>Sequel::CURRENT_DATE\n column :c12, \"timestamp\", :default=>Sequel::CURRENT_TIMESTAMP\nend" end it "should not use a literal string as a fallback if using MySQL with the :same_db option" do def @d.database_type; :mysql end def @d.supports_index_parsing?; false end def @d.supports_foreign_key_parsing?; false end def @d.schema(t, *os) s = [[:c10, {:db_type=>'foo', :default=>"'6 weeks'", :type=>nil, :allow_null=>true}]] s.each{|_, c| c[:ruby_default] = column_schema_to_ruby_default(c[:default], c[:type])} s end @d.dump_table_schema(:t5, :same_db=>true).must_equal "create_table(:t5) do\n column :c10, \"foo\"\nend" end it "should convert unknown database types to strings" do @d.dump_table_schema(:t5).must_equal "create_table(:t5) do\n String :c1\nend" end it "should not include defaults for generated columns even if parsed" do @d.dump_table_schema(:t6).must_equal "create_table(:t6) do\n Integer :c1, :null=>false\nend" @d.dump_table_schema(:t7).must_equal "create_table(:t7) do\n Integer :c1\nend" end it "should not include defaults for generated columns even if parsed when using :same_db option" do @d.dump_table_schema(:t6, :same_db=>true).must_equal "create_table(:t6) do\n column :c1, \"integer\", :null=>false\nend" @d.dump_table_schema(:t7, :same_db=>true).must_equal "create_table(:t7) do\n column :c1, \"integer\"\nend" end it "should create generated column when using :same_db option on PostgreSQL" do def @d.database_type; :postgres end @d.dump_table_schema(:t6, :same_db=>true).must_equal "create_table(:t6) do\n column :c1, \"integer\", :generated_always_as=>Sequel::LiteralString.new(\"1\"), :null=>false\nend" @d.dump_table_schema(:t7, :same_db=>true).must_equal "create_table(:t7) do\n column :c1, \"integer\", :generated_always_as=>Sequel::LiteralString.new(\"(a + b)\")\nend" end it "should convert many database types to ruby types" do def @d.schema(t, *o) types = %w"mediumint smallint int integer mediumint(6) smallint(7) int(8) integer(9) tinyint tinyint(2) bigint bigint(20) real float double boolean tinytext mediumtext longtext text clob date datetime timestamp time char character varchar varchar(255) varchar(30) bpchar string money decimal decimal(10,2) numeric numeric(15,3) number bytea tinyblob mediumblob longblob blob varbinary varbinary(10) binary binary(20) year" + ["double precision", "timestamp with time zone", "timestamp without time zone", "time with time zone", "time without time zone", "character varying(20)"] + %w"nvarchar ntext smalldatetime smallmoney binary varbinary nchar" + ["timestamp(6) without time zone", "timestamp(6) with time zone", 'mediumint(10) unsigned', 'int(9) unsigned', 'int(10) unsigned', "int(12) unsigned", 'bigint unsigned', 'tinyint(3) unsigned', 'identity', 'int identity'] + %w"integer(10) bit bool" + ["decimal(7, 2) unsigned", "real unsigned"] i = 0 types.map{|x| [:"c#{i+=1}", {:db_type=>x, :allow_null=>true}]} end @d.dump_table_schema(:x).must_equal((<<END_MIG).chomp) create_table(:x) do Integer :c1 Integer :c2 Integer :c3 Integer :c4 Integer :c5 Integer :c6 Integer :c7 Integer :c8 Integer :c9 Integer :c10 Bignum :c11 Bignum :c12 Float :c13 Float :c14 Float :c15 TrueClass :c16 String :c17, :text=>true String :c18, :text=>true String :c19, :text=>true String :c20, :text=>true String :c21, :text=>true Date :c22 DateTime :c23 DateTime :c24 Time :c25, :only_time=>true String :c26, :fixed=>true String :c27, :fixed=>true String :c28 String :c29, :size=>255 String :c30, :size=>30 String :c31 String :c32 BigDecimal :c33, :size=>[19, 2] BigDecimal :c34 BigDecimal :c35, :size=>[10, 2] BigDecimal :c36 BigDecimal :c37, :size=>[15, 3] BigDecimal :c38 File :c39 File :c40 File :c41 File :c42 File :c43 File :c44 File :c45, :size=>10 File :c46 File :c47, :size=>20 Integer :c48 Float :c49 DateTime :c50 DateTime :c51 Time :c52, :only_time=>true Time :c53, :only_time=>true String :c54, :size=>20 String :c55 String :c56, :text=>true DateTime :c57 BigDecimal :c58, :size=>[19, 2] File :c59 File :c60 String :c61, :fixed=>true DateTime :c62, :size=>6 DateTime :c63, :size=>6 Integer :c64 Integer :c65 Bignum :c66 Bignum :c67 Bignum :c68 Integer :c69 Integer :c70 Integer :c71 Integer :c72 TrueClass :c73 TrueClass :c74 BigDecimal :c75, :size=>[7, 2] Float :c76 check Sequel::SQL::BooleanExpression.new(:>=, Sequel::SQL::Identifier.new(:c64), 0) check Sequel::SQL::BooleanExpression.new(:>=, Sequel::SQL::Identifier.new(:c65), 0) check Sequel::SQL::BooleanExpression.new(:>=, Sequel::SQL::Identifier.new(:c66), 0) check Sequel::SQL::BooleanExpression.new(:>=, Sequel::SQL::Identifier.new(:c67), 0) check Sequel::SQL::BooleanExpression.new(:>=, Sequel::SQL::Identifier.new(:c68), 0) check Sequel::SQL::BooleanExpression.new(:>=, Sequel::SQL::Identifier.new(:c69), 0) check Sequel::SQL::BooleanExpression.new(:>=, Sequel::SQL::Identifier.new(:c75), 0) check Sequel::SQL::BooleanExpression.new(:>=, Sequel::SQL::Identifier.new(:c76), 0) end END_MIG end it "should convert mysql types to ruby types" do def @d.schema(t, *o) i = 0 ['float unsigned', 'double(15,2)', 'double(7,1) unsigned', 'tinyint', 'char(3)'].map{|x| [:"c#{i+=1}", {:db_type=>x, :allow_null=>true, :type=>:boolean, :size=>10}]} end @d.dump_table_schema(:x).must_equal((<<END_MIG).chomp) create_table(:x) do Float :c1 Float :c2 Float :c3 TrueClass :c4 String :c5, :size=>3, :fixed=>true check Sequel::SQL::BooleanExpression.new(:>=, Sequel::SQL::Identifier.new(:c1), 0) check Sequel::SQL::BooleanExpression.new(:>=, Sequel::SQL::Identifier.new(:c3), 0) end END_MIG end it "should convert oracle special types to ruby types" do def @d.database_type; :oracle end def @d.schema(t, *o) i = 0 ['number not null', 'date not null', 'varchar2(4 byte) not null'].map{|x| [:"c#{i+=1}", {:db_type=>x, :allow_null=>false}]} end @d.dump_table_schema(:x).must_equal((<<END_MIG).chomp) create_table(:x) do BigDecimal :c1, :null=>false Date :c2, :null=>false String :c3, :null=>false end END_MIG end it "should force specify :null option for MySQL timestamp columns when using :same_db" do def @d.database_type; :mysql end def @d.schema(*s) [[:c1, {:db_type=>'timestamp', :primary_key=>true, :allow_null=>true}]] end @d.dump_table_schema(:t3, :same_db=>true).must_equal "create_table(:t3) do\n column :c1, \"timestamp\", :null=>true\n \n primary_key [:c1]\nend" @d.singleton_class.send(:alias_method, :schema, :schema) def @d.schema(*s) [[:c1, {:db_type=>'timestamp', :primary_key=>true, :allow_null=>false}]] end @d.dump_table_schema(:t3, :same_db=>true).must_equal "create_table(:t3) do\n column :c1, \"timestamp\", :null=>false\n \n primary_key [:c1]\nend" end it "should use separate primary_key call with non autoincrementable types" do def @d.schema(*s) [[:c1, {:db_type=>'varchar(8)', :primary_key=>true, :auto_increment=>false}]] end @d.dump_table_schema(:t3).must_equal "create_table(:t3) do\n String :c1, :size=>8\n \n primary_key [:c1]\nend" @d.dump_table_schema(:t3, :same_db=>true).must_equal "create_table(:t3) do\n column :c1, \"varchar(8)\"\n \n primary_key [:c1]\nend" end it "should use explicit type for non integer foreign_key types" do def @d.schema(*s) [[:c1, {:db_type=>'date', :primary_key=>true, :auto_increment=>false}]] end def @d.supports_foreign_key_parsing?; true end def @d.foreign_key_list(t, *a) [{:columns=>[:c1], :table=>:t3, :key=>[:c1]}] if t == :t4 end ["create_table(:t4) do\n foreign_key :c1, :t3, :type=>Date, :key=>[:c1]\n \n primary_key [:c1]\nend", "create_table(:t4) do\n foreign_key :c1, :t3, :key=>[:c1], :type=>Date\n \n primary_key [:c1]\nend"].must_include(@d.dump_table_schema(:t4)) ["create_table(:t4) do\n foreign_key :c1, :t3, :type=>\"date\", :key=>[:c1]\n \n primary_key [:c1]\nend", "create_table(:t4) do\n foreign_key :c1, :t3, :key=>[:c1], :type=>\"date\"\n \n primary_key [:c1]\nend"].must_include(@d.dump_table_schema(:t4, :same_db=>true)) end it "should correctly handing autoincrementing primary keys that are also foreign keys" do def @d.schema(*s) [[:c1, {:db_type=>'integer', :primary_key=>true, :auto_increment=>true}]] end def @d.supports_foreign_key_parsing?; true end def @d.foreign_key_list(t, *a) [{:columns=>[:c1], :table=>:t3, :key=>[:c1]}] if t == :t4 end ["create_table(:t4) do\n primary_key :c1, :table=>:t3, :key=>[:c1]\nend", "create_table(:t4) do\n primary_key :c1, :key=>[:c1], :table=>:t3\nend"].must_include(@d.dump_table_schema(:t4)) end it "should handle dumping on PostgreSQL using qualified tables" do @d = Sequel.connect('mock://postgres').extension(:schema_dumper) def @d.schema(*s) [[:c1, {:db_type=>'timestamp', :primary_key=>true, :allow_null=>true}]] end @d.dump_table_schema(Sequel.qualify(:foo, :bar), :same_db=>true).must_equal "create_table(Sequel::SQL::QualifiedIdentifier.new(:foo, :bar)) do\n column :c1, \"timestamp\"\n \n primary_key [:c1]\nend" end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/select_remove_spec.rb�������������������������������������������������0000664�0000000�0000000�00000002577�14342141206�0022437�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Dataset#select_remove" do before do @d = Sequel.mock.from(:test).extension(:select_remove) @d.columns :a, :b, :c end it "should remove columns from the selected columns" do @d.sql.must_equal 'SELECT * FROM test' @d.select_remove(:a).sql.must_equal 'SELECT b, c FROM test' @d.select_remove(:b).sql.must_equal 'SELECT a, c FROM test' @d.select_remove(:c).sql.must_equal 'SELECT a, b FROM test' end it "should work correctly if there are already columns selected" do d = @d.select(:a, :b, :c) d.columns :a, :b, :c d.select_remove(:c).sql.must_equal 'SELECT a, b FROM test' end it "should have no effect if the columns given are not currently selected" do @d.select_remove(:d).sql.must_equal 'SELECT a, b, c FROM test' end it "should handle expressions where Sequel can't determine the alias by itself" do d = @d.select(:a, Sequel.function(:b), Sequel.as(:c, :b)) d.columns :a, :"b()", :b d.select_remove(:"b()").sql.must_equal 'SELECT a, c AS b FROM test' end it "should remove expressions if given exact expressions" do d = @d.select(:a, Sequel.function(:b), Sequel.as(:c, :b)) d.columns :a, :"b()", :b d.select_remove(Sequel.function(:b)).sql.must_equal 'SELECT a, c AS b FROM test' d.select_remove(Sequel.as(:c, :b)).sql.must_equal 'SELECT a, b() FROM test' end end ���������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/sequel_4_dataset_methods_spec.rb��������������������������������������0000664�0000000�0000000�00000010604�14342141206�0024542�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Dataset#and" do before do @dataset = Sequel.mock.dataset.from(:test).extension(:sequel_4_dataset_methods) @d1 = @dataset.where(:x => 1) end it "should add a WHERE filter if none exists" do @dataset.and(:a => 1).sql.must_equal 'SELECT * FROM test WHERE (a = 1)' end it "should add an expression to the where clause" do @d1.and(:y => 2).sql.must_equal 'SELECT * FROM test WHERE ((x = 1) AND (y = 2))' end it "should accept placeholder literal string filters" do @d1.and(Sequel.lit('y > ?', 2)).sql.must_equal 'SELECT * FROM test WHERE ((x = 1) AND (y > 2))' end it "should accept expression filters" do @d1.and(Sequel.expr(:yy) > 3).sql.must_equal 'SELECT * FROM test WHERE ((x = 1) AND (yy > 3))' end it "should accept string filters with placeholders" do @d1.extension(:auto_literal_strings).and('y > ?', 2).sql.must_equal 'SELECT * FROM test WHERE ((x = 1) AND (y > 2))' end it "should accept blocks passed to filter" do @d1.and{yy > 3}.sql.must_equal 'SELECT * FROM test WHERE ((x = 1) AND (yy > 3))' end it "should correctly add parens to give predictable results" do @d1.or(:y => 2).and(:z => 3).sql.must_equal 'SELECT * FROM test WHERE (((x = 1) OR (y = 2)) AND (z = 3))' @d1.and(:y => 2).or(:z => 3).sql.must_equal 'SELECT * FROM test WHERE (((x = 1) AND (y = 2)) OR (z = 3))' end end describe "Dataset#exclude_where" do before do @dataset = Sequel.mock.dataset.from(:test).extension(:sequel_4_dataset_methods) end it "should correctly negate the expression and add it to the where clause" do @dataset.exclude_where(:region=>'Asia').sql.must_equal "SELECT * FROM test WHERE (region != 'Asia')" @dataset.exclude_where(:region=>'Asia').exclude_where(:region=>'NA').sql.must_equal "SELECT * FROM test WHERE ((region != 'Asia') AND (region != 'NA'))" end it "should affect the where clause even if having clause is already used" do @dataset.group_and_count(:name).having{count > 2}.exclude_where(:region=>'Asia').sql. must_equal "SELECT name, count(*) AS count FROM test WHERE (region != 'Asia') GROUP BY name HAVING (count > 2)" end end describe "Dataset#interval" do before do @db = Sequel.mock(:fetch=>{:v => 1234}).extension(:sequel_4_dataset_methods) @ds = @db[:test].freeze end it "should generate the correct SQL statement" do 5.times do @ds.interval(:stamp) @db.sqls.must_equal ["SELECT (max(stamp) - min(stamp)) AS interval FROM test LIMIT 1"] end @ds.filter(Sequel.expr(:price) > 100).interval(:stamp) @db.sqls.must_equal ["SELECT (max(stamp) - min(stamp)) AS interval FROM test WHERE (price > 100) LIMIT 1"] end it "should use a subselect for the same conditions as count" do ds = @ds.order(:stamp).limit(5) 5.times do ds.interval(:stamp).must_equal 1234 @db.sqls.must_equal ['SELECT (max(stamp) - min(stamp)) AS interval FROM (SELECT * FROM test ORDER BY stamp LIMIT 5) AS t1 LIMIT 1'] end end it "should accept virtual row blocks" do 5.times do @ds.interval{a(b)} @db.sqls.must_equal ["SELECT (max(a(b)) - min(a(b))) AS interval FROM test LIMIT 1"] end end end describe "Dataset#range" do before do @db = Sequel.mock(:fetch=>{:v1 => 1, :v2 => 10}).extension(:sequel_4_dataset_methods) @ds = @db[:test].freeze end it "should generate a correct SQL statement" do 5.times do @ds.range(:stamp) @db.sqls.must_equal ["SELECT min(stamp) AS v1, max(stamp) AS v2 FROM test LIMIT 1"] end @ds.filter(Sequel.expr(:price) > 100).range(:stamp) @db.sqls.must_equal ["SELECT min(stamp) AS v1, max(stamp) AS v2 FROM test WHERE (price > 100) LIMIT 1"] end it "should return nil if no row matches" do @db.fetch = [] 5.times do @ds.range(:tryme).must_be_nil end end it "should return a range object" do 5.times do @ds.range(:tryme).must_equal(1..10) end end it "should use a subselect for the same conditions as count" do @ds.order(:stamp).limit(5).range(:stamp).must_equal(1..10) @db.sqls.must_equal ['SELECT min(stamp) AS v1, max(stamp) AS v2 FROM (SELECT * FROM test ORDER BY stamp LIMIT 5) AS t1 LIMIT 1'] end it "should accept virtual row blocks" do 5.times do @ds.range{a(b)} @db.sqls.must_equal ["SELECT min(a(b)) AS v1, max(a(b)) AS v2 FROM test LIMIT 1"] end end end ����������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/serialization_modification_detection_spec.rb��������������������������0000664�0000000�0000000�00000005025�14342141206�0027232�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" require 'yaml' describe "serialization_modification_detection plugin" do before do @c = Class.new(Sequel::Model(:items)) @c.class_eval do columns :id, :h plugin :serialization, :yaml, :h plugin :serialization_modification_detection end @o1 = @c.new(:h=>{}) @o2 = @c.load(:id=>1, :h=>"--- {}\n\n") @o3 = @c.new @o4 = @c.load(:id=>1, :h=>nil) DB.reset end it "should not detect columns that haven't been changed" do @o1.changed_columns.must_equal [] @o1.h.must_equal({}) @o1.h[1] = 2 @o1.h.clear @o1.changed_columns.must_equal [] @o2.changed_columns.must_equal [] @o2.h.must_equal({}) @o2.h[1] = 2 @o2.h.clear @o2.changed_columns.must_equal [] end it "should detect columns that have been changed" do @o1.changed_columns.must_equal [] @o1.h.must_equal({}) @o1.h[1] = 2 @o1.changed_columns.must_equal [:h] @o2.changed_columns.must_equal [] @o2.h.must_equal({}) @o2.h[1] = 2 @o2.changed_columns.must_equal [:h] @o3.changed_columns.must_equal [] @o3.h.must_be_nil @o3.h = {} @o3.changed_columns.must_equal [:h] @o4.changed_columns.must_equal [] @o4.h.must_be_nil @o4.h = {} @o4.changed_columns.must_equal [:h] end it "should report correct changed_columns after saving" do @o1.h[1] = 2 @o1.save @o1.changed_columns.must_equal [] @o2.h[1] = 2 @o2.save_changes @o2.changed_columns.must_equal [] @o3.h = {1=>2} @o3.save @o3.changed_columns.must_equal [] @o4.h = {1=>2} @o4.save @o4.changed_columns.must_equal [] end it "should work with frozen objects" do @o1.changed_columns.must_equal [] @o1.h.must_equal({}) @o1.freeze.must_be_same_as @o1 @o1.h[1] = 2 @o1.changed_columns.must_equal [:h] end it "should work with frozen objects when checking changed_colums during validation" do @c.send(:define_method, :validate){changed_columns} @o1.h[1] = 2 @o1.freeze @o1.changed_columns.must_equal [:h] end it "should work with duplicating objects" do @o2.changed_columns.must_equal [] o = @o2.dup @o2.h.must_equal({}) @o2.h[1] = 2 @o2.changed_columns.must_equal [:h] o.changed_columns.must_equal [] end it "should work with duplicating objects after modifying them" do @o2.changed_columns.must_equal [] @o2.h.must_equal({}) @o2.h[1] = 2 @o2.changed_columns.must_equal [:h] o = @o2.dup o.changed_columns.must_equal [:h] end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/serialization_spec.rb�������������������������������������������������0000664�0000000�0000000�00000033512�14342141206�0022451�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" require 'yaml' require 'json' describe "Serialization plugin" do before do @c = Class.new(Sequel::Model(:items)) do no_primary_key columns :id, :abc, :def, :ghi end DB.reset end it "should allow setting additional serializable attributes via plugin :serialization call" do @c.plugin :serialization, :yaml, :abc @c.create(:abc => 1, :def=> 2) DB.sqls.map{|s| s.sub("1\n...", '1')}.must_equal ["INSERT INTO items (def, abc) VALUES (2, '--- 1\n')"] @c.plugin :serialization, :marshal, :def @c.create(:abc => 1, :def=> 1) DB.sqls.map{|s| s.sub("1\n...", '1')}.must_equal ["INSERT INTO items (abc, def) VALUES ('--- 1\n', 'BAhpBg==\n')"] @c.plugin :serialization, :json, :ghi @c.create(:ghi => [123]) DB.sqls.must_equal ["INSERT INTO items (ghi) VALUES ('[123]')"] end it "should handle validations of underlying column" do @c.plugin :serialization, :yaml, :abc o = @c.new def o.validate errors.add(:abc, "not present") unless self[:abc] end o.valid?.must_equal false o.abc = {} o.valid?.must_equal true end it "should offer serialize_attributes class method for configuring serialization" do @c.plugin :serialization @c.serialize_attributes :yaml, :abc o = @c.new def o.validate errors.add(:abc, "not present") unless self[:abc] end o.valid?.must_equal false o.abc = {} o.valid?.must_equal true end it "should set column values even when not validating" do @c.set_primary_key :id @c.plugin :serialization, :yaml, :abc @c.load({:id=>1}).set(:abc=>{}).save(:validate=>false) DB.sqls.last.gsub("\n", '').must_equal "UPDATE items SET abc = '--- {}' WHERE (id = 1)" end it "should allow serializing attributes to yaml" do @c.plugin :serialization, :yaml, :abc @c.create(:abc => 1) @c.create(:abc => "hello") DB.sqls.map{|s| s.sub("...\n", '')}.must_equal ["INSERT INTO items (abc) VALUES ('--- 1\n')", "INSERT INTO items (abc) VALUES ('--- hello\n')"] end it "should allow serializing attributes to marshal" do @c.plugin :serialization, :marshal, :abc @c.create(:abc => 1) @c.create(:abc => "hello") x = [Marshal.dump("hello")].pack('m') DB.sqls.must_equal [ \ "INSERT INTO items (abc) VALUES ('BAhpBg==\n')", \ "INSERT INTO items (abc) VALUES ('#{x}')", \ ] end it "should allow serializing attributes to json" do @c.plugin :serialization, :json, :ghi @c.create(:ghi => [1]) @c.create(:ghi => ["hello"]) x = ["hello"].to_json DB.sqls.must_equal [ \ "INSERT INTO items (ghi) VALUES ('[1]')", \ "INSERT INTO items (ghi) VALUES ('#{x}')", \ ] end it "should not attempt to serialize nil attributes" do @c.plugin :serialization, :marshal, :abc @c.create(:abc => nil) DB.sqls.must_equal ["INSERT INTO items (abc) VALUES (NULL)"] end it "should allow serializing attributes using arbitrary callable" do @c.plugin :serialization, [proc{|s| s.reverse}, proc{}], :abc @c.create(:abc => "hello") DB.sqls.must_equal ["INSERT INTO items (abc) VALUES ('olleh')"] end it "should raise an error if specificing serializer as an unregistered symbol" do proc{@c.plugin :serialization, :foo, :abc}.must_raise(Sequel::Error) end it "should translate values to and from yaml serialization format using accessor methods" do @c.set_primary_key :id @c.plugin :serialization, :yaml, :abc, :def @c.dataset = @c.dataset.with_fetch(:id => 1, :abc => "--- 1\n", :def => "--- hello\n") o = @c.first o.id.must_equal 1 o.abc.must_equal 1 o.abc.must_equal 1 o.def.must_equal "hello" o.def.must_equal "hello" o.update(:abc => 23) @c.create(:abc => [1, 2, 3]) DB.sqls.must_equal ["SELECT * FROM items LIMIT 1", "UPDATE items SET abc = '#{23.to_yaml}' WHERE (id = 1)", "INSERT INTO items (abc) VALUES ('#{[1, 2, 3].to_yaml}')", "SELECT * FROM items WHERE id = 10"] end it "should translate values to and from marshal serialization format using accessor methods" do @c.set_primary_key :id @c.plugin :serialization, :marshal, :abc, :def @c.dataset = @c.dataset.with_fetch([:id => 1, :abc =>[Marshal.dump(1)].pack('m'), :def =>[Marshal.dump('hello')].pack('m')]) o = @c.first o.id.must_equal 1 o.abc.must_equal 1 o.abc.must_equal 1 o.def.must_equal "hello" o.def.must_equal "hello" o.update(:abc => 23) @c.create(:abc => [1, 2, 3]) DB.sqls.must_equal ["SELECT * FROM items LIMIT 1", "UPDATE items SET abc = '#{[Marshal.dump(23)].pack('m')}' WHERE (id = 1)", "INSERT INTO items (abc) VALUES ('#{[Marshal.dump([1, 2, 3])].pack('m')}')", "SELECT * FROM items WHERE id = 10"] end it "should handle old non-base-64 encoded marshal serialization format" do @c.set_primary_key :id @c.plugin :serialization, :marshal, :abc, :def @c.dataset = @c.dataset.with_fetch([:id => 1, :abc =>Marshal.dump(1), :def =>Marshal.dump('hello')]) o = @c.first o.abc.must_equal 1 o.def.must_equal "hello" end it "should raise exception for bad marshal data" do @c.set_primary_key :id @c.plugin :serialization, :marshal, :abc, :def @c.dataset = @c.dataset.with_fetch([:id => 1, :abc =>'foo', :def =>'bar']) o = @c.first proc{o.abc}.must_raise TypeError, ArgumentError proc{o.def}.must_raise TypeError, ArgumentError end it "should translate values to and from json serialization format using accessor methods" do @c.set_primary_key :id @c.plugin :serialization, :json, :abc, :def @c.dataset = @c.dataset.with_fetch(:id => 1, :abc => [1].to_json, :def => ["hello"].to_json) o = @c.first o.id.must_equal 1 o.abc.must_equal [1] o.abc.must_equal [1] o.def.must_equal ["hello"] o.def.must_equal ["hello"] o.update(:abc => [23]) @c.create(:abc => [1,2,3]) DB.sqls.must_equal ["SELECT * FROM items LIMIT 1", "UPDATE items SET abc = '#{[23].to_json}' WHERE (id = 1)", "INSERT INTO items (abc) VALUES ('#{[1,2,3].to_json}')", "SELECT * FROM items WHERE id = 10"] end it "should translate values to and from arbitrary callables using accessor methods" do @c.set_primary_key :id @c.plugin :serialization, [proc{|s| s.reverse}, proc{|s| s.reverse}], :abc, :def @c.dataset = @c.dataset.with_fetch(:id => 1, :abc => 'cba', :def => 'olleh') o = @c.first o.id.must_equal 1 o.abc.must_equal 'abc' o.abc.must_equal 'abc' o.def.must_equal "hello" o.def.must_equal "hello" o.update(:abc => 'foo') @c.create(:abc => 'bar') DB.sqls.must_equal ["SELECT * FROM items LIMIT 1", "UPDATE items SET abc = 'oof' WHERE (id = 1)", "INSERT INTO items (abc) VALUES ('rab')", "SELECT * FROM items WHERE id = 10"] end it "should handle registration of custom serializer/deserializer pairs" do @c.set_primary_key :id require_relative '../../lib/sequel/plugins/serialization' Sequel::Plugins::Serialization.register_format(:reverse, proc{|s| s.reverse}, proc{|s| s.reverse}) @c.plugin :serialization, :reverse, :abc, :def @c.dataset = @c.dataset.with_fetch(:id => 1, :abc => 'cba', :def => 'olleh') o = @c.first o.id.must_equal 1 o.abc.must_equal 'abc' o.abc.must_equal 'abc' o.def.must_equal "hello" o.def.must_equal "hello" o.update(:abc => 'foo') @c.create(:abc => 'bar') DB.sqls.must_equal ["SELECT * FROM items LIMIT 1", "UPDATE items SET abc = 'oof' WHERE (id = 1)", "INSERT INTO items (abc) VALUES ('rab')", "SELECT * FROM items WHERE id = 10"] end it "should copy serialization formats and columns to subclasses" do @c.set_primary_key :id @c.plugin :serialization, :yaml, :abc, :def @c.dataset = @c.dataset.with_fetch(:id => 1, :abc => "--- 1\n", :def => "--- hello\n") o = Class.new(@c).first o.id.must_equal 1 o.abc.must_equal 1 o.abc.must_equal 1 o.def.must_equal "hello" o.def.must_equal "hello" o.update(:abc => 23) Class.new(@c).create(:abc => [1, 2, 3]) DB.sqls.must_equal ["SELECT * FROM items LIMIT 1", "UPDATE items SET abc = '#{23.to_yaml}' WHERE (id = 1)", "INSERT INTO items (abc) VALUES ('#{[1, 2, 3].to_yaml}')", "SELECT * FROM items WHERE id = 10"] end it "should clear the deserialized columns when refreshing" do @c.set_primary_key :id @c.plugin :serialization, :yaml, :abc, :def o = @c.load(:id => 1, :abc => "--- 1\n", :def => "--- hello\n") o.abc = 23 o.deserialized_values.length.must_equal 1 o.abc.must_equal 23 o.refresh o.deserialized_values.length.must_equal 0 end it "should handle case where there are no deserialized columns when refreshing" do @c.set_primary_key :id @c.plugin :serialization, :yaml, :abc, :def o = @c.load(:id => 1, :abc => "--- 1\n", :def => "--- hello\n") o.refresh o.deserialized_values.length.must_equal 0 end it "should not clear the deserialized columns when refreshing after saving a new object" do @c.set_primary_key :id @c.plugin :serialization, :yaml, :abc, :def o = @c.new(:abc => "--- 1\n", :def => "--- hello\n") o.deserialized_values.length.must_equal 2 o.save o.deserialized_values.length.must_equal 2 end it "should not clear the deserialized columns when refreshing after saving a new object with insert_select" do @c.set_primary_key :id @c.plugin :serialization, :yaml, :abc, :def @c.dataset = @c.dataset.with_extend do def supports_insert_select?; true end def insert_select(*) {:id=>1} end end o = @c.new(:abc => "--- 1\n", :def => "--- hello\n") o.deserialized_values.length.must_equal 2 o.save o.deserialized_values.length.must_equal 2 end it "should raise an error if calling internal serialization methods with bad columns" do @c.set_primary_key :id @c.plugin :serialization o = @c.load(:id => 1, :abc => "--- 1\n", :def => "--- hello\n") lambda{o.send(:serialize_value, :abc, 1)}.must_raise(Sequel::Error) lambda{o.send(:deserialize_value, :abc, "--- hello\n")}.must_raise(Sequel::Error) end it "should add the accessors to a module included in the class, so they can be easily overridden" do @c.class_eval do def abc "#{super}-blah" end end @c.plugin :serialization, :yaml, :abc o = @c.load(:abc => "--- 1\n") o.abc.must_equal "1-blah" end it "should call super to get the deserialized value from a previous accessor" do m = Module.new do def abc "--- #{@values[:abc]*3}\n" end end @c.send(:include, m) @c.plugin :serialization, :yaml, :abc o = @c.load(:abc => 3) o.abc.must_equal 9 end it "should work correctly with frozen instances" do @c.set_primary_key :id @c.plugin :serialization, :yaml, :abc, :def @c.dataset = @c.dataset.with_fetch(:id => 1, :abc => "--- 1\n", :def => "--- hello\n") o = @c.first o.freeze.must_be_same_as o o.abc.must_equal 1 o.abc.must_equal 1 o.def.must_equal "hello" o.def.must_equal "hello" proc{o.abc = 2}.must_raise proc{o.def = 'h'}.must_raise end it "should work correctly with frozen instances with validations on serialized column" do @c.set_primary_key :id @c.plugin :serialization, :yaml, :abc, :def @c.dataset = @c.dataset.with_fetch(:id => 1, :abc => "--- 1\n", :def => "--- hello\n") @c.send(:define_method, :validate){errors.add(:abc, "is 1") if abc == 1} o = @c.first o.freeze o.abc.must_equal 1 o.abc.must_equal 1 o.def.must_equal "hello" o.def.must_equal "hello" proc{o.abc = 2}.must_raise proc{o.def = 'h'}.must_raise end it "should have dup duplicate internal structures" do @c.plugin :serialization, :yaml, :abc, :def o = @c.new o.dup.deserialized_values.must_equal o.deserialized_values o.dup.deserialized_values.wont_be_same_as(o.deserialized_values) end it "should have changed_columns include serialized columns if those columns have changed" do @c.plugin :serialization, :yaml, :abc, :def @c.dataset = @c.dataset.with_fetch(:id => 1, :abc => "--- 1\n", :def => "--- hello\n") o = @c.first o.changed_columns.must_equal [] o.abc = 1 o.changed_columns.must_equal [] o.abc = 1 o.changed_columns.must_equal [] o.abc = 2 o.changed_columns.must_equal [:abc] o.def = 'hello' o.changed_columns.must_equal [:abc] o.def = 'hello' o.changed_columns.must_equal [:abc] o.def = 'hello2' o.changed_columns.must_equal [:abc, :def] end it "should update column_changes if the dirty plugin is used" do @c.plugin :serialization, :yaml, :abc, :def @c.plugin :dirty @c.dataset = @c.dataset.with_fetch(:id => 1, :abc => "--- 1\n", :def => "--- hello\n") o = @c.first o.column_changes.must_equal({}) o.abc = 1 o.column_changes.must_equal({}) o.abc = 1 o.column_changes.must_equal({}) o.abc = 2 o.column_changes.must_equal(:abc=>[1, 2]) o.def = 'hello' o.column_changes.must_equal(:abc=>[1, 2]) o.def = 'hello' o.column_changes.must_equal(:abc=>[1, 2]) o.def = 'hello2' o.column_changes.must_equal(:abc=>[1, 2], :def=>["hello", "hello2"]) end it "should freeze serialization metadata when freezing model class" do @c.plugin :serialization, :yaml, :abc, :def @c.freeze @c.serialization_map.frozen?.must_equal true @c.deserialization_map.frozen?.must_equal true end it "should handle freezing model class without defining any serializers" do @c.plugin :serialization @c.freeze @c.serialization_map.frozen?.must_equal true @c.deserialization_map.frozen?.must_equal true end it "should raise error when calling serialize_attributes without any columns" do @c.plugin :serialization proc{@c.serialize_attributes :yaml}.must_raise Sequel::Error end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/server_block_spec.rb��������������������������������������������������0000664�0000000�0000000�00000011274�14342141206�0022255�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" with_server_specs = Module.new do extend Minitest::Spec::DSL it "should set the default server to use in the block" do @db.with_server(:a){@db[:t].all} @db.sqls.must_equal ["SELECT * FROM t -- a"] @db.with_server(:b){@db[:t].all} @db.sqls.must_equal ["SELECT * FROM t -- b"] end it "should set the default server to use in the block" do @db.with_server(:a, :b){@db[:t].all} @db.sqls.must_equal ["SELECT * FROM t -- b"] @db.with_server(:a, :b){@db[:t].insert} @db.sqls.must_equal ["INSERT INTO t DEFAULT VALUES -- a"] end it "should have no affect after the block" do @db.with_server(:a){@db[:t].all} @db.sqls.must_equal ["SELECT * FROM t -- a"] @db[:t].all @db.sqls.must_equal ["SELECT * FROM t"] end it "should not override specific server inside the block" do @db.with_server(:a){@db[:t].server(:b).all} @db.sqls.must_equal ["SELECT * FROM t -- b"] end it "should work correctly when blocks are nested" do @db[:t].all @db.with_server(:a) do @db[:t].all @db.with_server(:b){@db[:t].all} @db[:t].all end @db[:t].all @db.sqls.must_equal ["SELECT * FROM t", "SELECT * FROM t -- a", "SELECT * FROM t -- b", "SELECT * FROM t -- a", "SELECT * FROM t"] end it "should work correctly for inserts/updates/deletes" do @db.with_server(:a) do @db[:t].insert @db[:t].update(:a=>1) @db[:t].delete end @db.sqls.must_equal ["INSERT INTO t DEFAULT VALUES -- a", "UPDATE t SET a = 1 -- a", "DELETE FROM t -- a"] end end describe "Database#with_server single threaded" do before do @db = Sequel.mock(:single_threaded=>true, :servers=>{:a=>{}, :b=>{}}) @db.extension :server_block end include with_server_specs end describe "Database#with_server multi threaded" do before do @db = Sequel.mock(:servers=>{:a=>{}, :b=>{}, :c=>{}, :d=>{}}) @db.extension :server_block end include with_server_specs it "should respect multithreaded access" do q, q1 = Queue.new, Queue.new t = nil @db[:t].all @db.with_server(:a) do @db[:t].all t = Thread.new do @db[:t].all @db.with_server(:c) do @db[:t].all @db.with_server(:d){@db[:t].all} q.push nil q1.pop @db[:t].all end @db[:t].all end q.pop @db.with_server(:b){@db[:t].all} @db[:t].all end @db[:t].all q1.push nil t.join @db.sqls.must_equal ["SELECT * FROM t", "SELECT * FROM t -- a", "SELECT * FROM t", "SELECT * FROM t -- c", "SELECT * FROM t -- d", "SELECT * FROM t -- b", "SELECT * FROM t -- a", "SELECT * FROM t", "SELECT * FROM t -- c", "SELECT * FROM t"] end end describe "Database#with_server with invalid servers" do def sqls(server) @db.with_server(server) do @db[:t].all @db[:t].insert @db[:t].update(:a=>1) @db[:t].delete end @db.sqls end it "when single threaded and no servers_hash" do @db = Sequel.mock(:single_threaded=>true, :servers=>{:a=>{}}).extension(:server_block) sqls(:a).must_equal ["SELECT * FROM t -- a", "INSERT INTO t DEFAULT VALUES -- a", "UPDATE t SET a = 1 -- a", "DELETE FROM t -- a"] sqls(:c).must_equal ["SELECT * FROM t", "INSERT INTO t DEFAULT VALUES", "UPDATE t SET a = 1", "DELETE FROM t"] end it "when multi-threaded and no servers_hash" do @db = Sequel.mock(:servers=>{:a=>{}}).extension(:server_block) sqls(:a).must_equal ["SELECT * FROM t -- a", "INSERT INTO t DEFAULT VALUES -- a", "UPDATE t SET a = 1 -- a", "DELETE FROM t -- a"] sqls(:c).must_equal ["SELECT * FROM t", "INSERT INTO t DEFAULT VALUES", "UPDATE t SET a = 1", "DELETE FROM t"] end it "when single threaded and servers_hash" do @db = Sequel.mock(:single_threaded=>true, :servers=>{:a=>{}, :b=>{}}, :servers_hash=>Hash.new{|_,k| raise}.merge!(:c=>:b)).extension(:server_block) sqls(:a).must_equal ["SELECT * FROM t -- a", "INSERT INTO t DEFAULT VALUES -- a", "UPDATE t SET a = 1 -- a", "DELETE FROM t -- a"] sqls(:c).must_equal ["SELECT * FROM t -- b", "INSERT INTO t DEFAULT VALUES -- b", "UPDATE t SET a = 1 -- b", "DELETE FROM t -- b"] proc{sqls(:d)}.must_raise(RuntimeError) end it "when multi-threaded and servers_hash" do @db = Sequel.mock(:servers=>{:a=>{}, :b=>{}}, :servers_hash=>Hash.new{|_,k| raise}.merge!(:c=>:b)).extension(:server_block) sqls(:a).must_equal ["SELECT * FROM t -- a", "INSERT INTO t DEFAULT VALUES -- a", "UPDATE t SET a = 1 -- a", "DELETE FROM t -- a"] sqls(:c).must_equal ["SELECT * FROM t -- b", "INSERT INTO t DEFAULT VALUES -- b", "UPDATE t SET a = 1 -- b", "DELETE FROM t -- b"] proc{sqls(:d)}.must_raise(RuntimeError) end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/server_logging_spec.rb������������������������������������������������0000664�0000000�0000000�00000003556�14342141206�0022615�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "server_logging extension" do before do @o = Object.new def @o.logs; @logs || []; end def @o.log; logs.length.must_equal 1; logs.first.length.must_equal 1; logs.shift.first; end def @o.to_ary; [self]; end def @o.method_missing(m, *args); (@logs ||= []) << args; end @db = Sequel::mock(:test=>false, :servers=>{:read_only=>{}, :b=>{}}, :logger=>@o).extension(:server_logging) end it "should include shard when logging" do @db[:a].all @o.log.must_include "server: read_only) SELECT * FROM a" @db[:a].insert @o.log.must_include "server: default) INSERT INTO a DEFAULT VALUES" @db[:a].server(:b).all @o.log.must_include "server: b) SELECT * FROM a" end it "should not include shard when not logging connection info" do @db.log_connection_info = false @db[:a].all log = @o.log log.wont_include "server: read_only) SELECT * FROM a" log.must_include "SELECT * FROM a" end it "should not turn on logging connction info if it was turned off" do @db.log_connection_info = false @db.extension :server_logging @db[:a].all log = @o.log log.wont_include "server: read_only) SELECT * FROM a" log.must_include "SELECT * FROM a" end it "should remove mapping when disconnecting" do c = @db.synchronize{|c1| c1} @db.disconnect @db.send(:log_connection_execute, c, "SELECT * FROM a") @o.log.must_include "server: ) SELECT * FROM a" end it "should not automatically enable logging connection info if explicitly disabled when extension is loaded" do @db = Sequel::mock(:test=>false, :servers=>{:read_only=>{}, :b=>{}}, :logger=>@o) @db.log_connection_info = false @db.extension(:server_logging) @db[:a].all log = @o.log log.wont_include "server: read_only) SELECT * FROM a" log.must_include "SELECT * FROM a" end end ��������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/sharding_spec.rb������������������������������������������������������0000664�0000000�0000000�00000030752�14342141206�0021376�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "sharding plugin" do before do @db = Sequel.mock(:numrows=>1, :autoid=>proc{1}, :servers=>{:s1=>{}, :s2=>{}, :s3=>{}, :s4=>{}}) @Artist = Class.new(Sequel::Model(@db[:artists].with_fetch(:id=>2, :name=>'YJM'))) @Artist.class_eval do columns :id, :name plugin :sharding end @Album = Class.new(Sequel::Model(@db[:albums].with_fetch(:id=>1, :name=>'RF', :artist_id=>2))) @Album.class_eval do columns :id, :artist_id, :name plugin :sharding end @Tag = Class.new(Sequel::Model(@db[:tags].with_fetch(:id=>3, :name=>'M'))) @Tag.class_eval do columns :id, :name plugin :sharding end @Artist.one_to_many :albums, :class=>@Album, :key=>:artist_id @Album.many_to_one :artist, :class=>@Artist @Album.many_to_many :tags, :class=>@Tag, :left_key=>:album_id, :right_key=>:tag_id, :join_table=>:albums_tags @db.sqls end it "should allow you to instantiate a new object for a specified shard" do @Album.new_using_server(:s1, :name=>'RF').save @db.sqls.must_equal ["INSERT INTO albums (name) VALUES ('RF') -- s1", "SELECT * FROM albums WHERE (id = 1) LIMIT 1 -- s1"] @Album.new_using_server(:s2){|o| o.name = 'MO'}.save @db.sqls.must_equal ["INSERT INTO albums (name) VALUES ('MO') -- s2", "SELECT * FROM albums WHERE (id = 1) LIMIT 1 -- s2"] end it "should allow you to create and save a new object for a specified shard" do @Album.create_using_server(:s1, :name=>'RF') @db.sqls.must_equal ["INSERT INTO albums (name) VALUES ('RF') -- s1", "SELECT * FROM albums WHERE (id = 1) LIMIT 1 -- s1"] @Album.create_using_server(:s2){|o| o.name = 'MO'} @db.sqls.must_equal ["INSERT INTO albums (name) VALUES ('MO') -- s2", "SELECT * FROM albums WHERE (id = 1) LIMIT 1 -- s2"] end it "should have objects retrieved from a specific shard update that shard" do @Album.server(:s1).first.update(:name=>'MO') @db.sqls.must_equal ["SELECT * FROM albums LIMIT 1 -- s1", "UPDATE albums SET name = 'MO' WHERE (id = 1) -- s1"] end it "should have objects retrieved from a specific shard delete from that shard" do @Album.server(:s1).first.delete @db.sqls.must_equal ["SELECT * FROM albums LIMIT 1 -- s1", "DELETE FROM albums WHERE (id = 1) -- s1"] end it "should have objects retrieved from a specific shard reload from that shard" do @Album.server(:s1).first.reload @db.sqls.must_equal ["SELECT * FROM albums LIMIT 1 -- s1", "SELECT * FROM albums WHERE (id = 1) LIMIT 1 -- s1"] end it "should use current dataset's shard when eager loading if eagerly loaded dataset doesn't have its own shard" do albums = @Album.server(:s1).eager(:artist).all @db.sqls.must_equal ["SELECT * FROM albums -- s1", "SELECT * FROM artists WHERE (artists.id IN (2)) -- s1"] albums.length.must_equal 1 albums.first.artist.save @db.sqls.must_equal ["UPDATE artists SET name = 'YJM' WHERE (id = 2) -- s1"] end it "should use current dataset's shard when eager loading with eager block if eagerly loaded dataset doesn't have its own shard" do albums = @Album.server(:s1).eager(:artist=>proc{|ds| ds.where(:x)}).all @db.sqls.must_equal ["SELECT * FROM albums -- s1", "SELECT * FROM artists WHERE ((artists.id IN (2)) AND x) -- s1"] albums.length.must_equal 1 albums.first.artist.save @db.sqls.must_equal ["UPDATE artists SET name = 'YJM' WHERE (id = 2) -- s1"] end it "should not use current dataset's shard when eager loading if eagerly loaded dataset has its own shard" do @Artist.dataset = @Artist.dataset.server(:s2) albums = @Album.server(:s1).eager(:artist).all @db.sqls.must_equal ["SELECT * FROM albums -- s1", "SELECT * FROM artists WHERE (artists.id IN (2)) -- s2"] albums.length.must_equal 1 albums.first.artist.save @db.sqls.must_equal ["UPDATE artists SET name = 'YJM' WHERE (id = 2) -- s2"] end it "should use not use a shard when eager loading if the dataset doesn't have a shard associated with it" do albums = @Album.eager(:artist).all @db.sqls.must_equal ["SELECT * FROM albums", "SELECT * FROM artists WHERE (artists.id IN (2))"] albums.length.must_equal 1 albums.first.artist.save @db.sqls.must_equal ["UPDATE artists SET name = 'YJM' WHERE (id = 2)"] end it "should use current dataset's shard when eager graphing if eagerly graphed dataset doesn't have its own shard" do albums = @Album.server(:s1).eager_graph(:artist).with_fetch(:id=>1, :artist_id=>2, :name=>'RF', :artist_id_0=>2, :artist_name=>'YJM').all @db.sqls.must_equal ["SELECT albums.id, albums.artist_id, albums.name, artist.id AS artist_id_0, artist.name AS artist_name FROM albums LEFT OUTER JOIN artists AS artist ON (artist.id = albums.artist_id) -- s1"] albums.length.must_equal 1 albums.first.artist.save @db.sqls.must_equal ["UPDATE artists SET name = 'YJM' WHERE (id = 2) -- s1"] end it "should not use current dataset's shard when eager graphing if eagerly graphed dataset has its own shard" do @Artist.dataset = @Artist.dataset.server(:s2) albums = @Album.server(:s1).eager_graph(:artist).with_fetch(:id=>1, :artist_id=>2, :name=>'RF', :artist_id_0=>2, :artist_name=>'YJM').all @db.sqls.must_equal ["SELECT albums.id, albums.artist_id, albums.name, artist.id AS artist_id_0, artist.name AS artist_name FROM albums LEFT OUTER JOIN artists AS artist ON (artist.id = albums.artist_id) -- s1"] albums.length.must_equal 1 albums.first.artist.save @db.sqls.must_equal ["UPDATE artists SET name = 'YJM' WHERE (id = 2) -- s2"] end it "should use eagerly graphed dataset shard for eagerly graphed objects even if current dataset does not have a shard" do @Artist.dataset = @Artist.dataset.server(:s2) albums = @Album.eager_graph(:artist).with_fetch(:id=>1, :artist_id=>2, :name=>'RF', :artist_id_0=>2, :artist_name=>'YJM').all @db.sqls.must_equal ["SELECT albums.id, albums.artist_id, albums.name, artist.id AS artist_id_0, artist.name AS artist_name FROM albums LEFT OUTER JOIN artists AS artist ON (artist.id = albums.artist_id)"] albums.length.must_equal 1 albums.first.artist.save @db.sqls.must_equal ["UPDATE artists SET name = 'YJM' WHERE (id = 2) -- s2"] end it "should have objects retrieved from a specific shard use associated objects from that shard, with modifications to the associated objects using that shard" do album = @Album.server(:s1).first @db.sqls.must_equal ["SELECT * FROM albums LIMIT 1 -- s1"] album.artist.update(:name=>'AS') @db.sqls.must_equal ["SELECT * FROM artists WHERE (artists.id = 2) LIMIT 1 -- s1", "UPDATE artists SET name = 'AS' WHERE (id = 2) -- s1"] album.tags.map{|a| a.update(:name=>'SR')} @db.sqls.must_equal ["SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE (albums_tags.album_id = 1) -- s1", "UPDATE tags SET name = 'SR' WHERE (id = 3) -- s1"] @Artist.server(:s2).first.albums.map{|a| a.update(:name=>'MO')} @db.sqls.must_equal ["SELECT * FROM artists LIMIT 1 -- s2", "SELECT * FROM albums WHERE (albums.artist_id = 2) -- s2", "UPDATE albums SET name = 'MO' WHERE (id = 1) -- s2"] end it "should have objects retrieved from a specific shard add associated objects to that shard" do album = @Album.server(:s1).first artist = @Artist.server(:s2).first @db.sqls.must_equal ["SELECT * FROM albums LIMIT 1 -- s1", "SELECT * FROM artists LIMIT 1 -- s2"] artist.add_album(:name=>'MO') sqls = @db.sqls ["INSERT INTO albums (artist_id, name) VALUES (2, 'MO') -- s2", "INSERT INTO albums (name, artist_id) VALUES ('MO', 2) -- s2"].must_include(sqls.shift) sqls.must_equal ["SELECT * FROM albums WHERE (id = 1) LIMIT 1 -- s2"] album.add_tag(:name=>'SR') sqls = @db.sqls ["INSERT INTO albums_tags (album_id, tag_id) VALUES (1, 3) -- s1", "INSERT INTO albums_tags (tag_id, album_id) VALUES (3, 1) -- s1"].must_include(sqls.pop) sqls.must_equal ["INSERT INTO tags (name) VALUES ('SR') -- s1", "SELECT * FROM tags WHERE (id = 1) LIMIT 1 -- s1", ] end it "should have objects retrieved from a specific shard add associated objects when associated object doesn't use sharding plugin" do @Album = Class.new(Sequel::Model(@db[:albums].with_fetch(:id=>1, :name=>'RF', :artist_id=>2))) @Album.columns :id, :artist_id, :name @Artist.one_to_many :albums, :class=>@Album, :key=>:artist_id @db.sqls @Album.server(:s1).first artist = @Artist.server(:s2).first @db.sqls.must_equal ["SELECT * FROM albums LIMIT 1 -- s1", "SELECT * FROM artists LIMIT 1 -- s2"] artist.add_album(:name=>'MO') sqls = @db.sqls sqls.must_equal ["INSERT INTO albums (name, artist_id) VALUES ('MO', 2)", "SELECT * FROM albums WHERE (id = 1) LIMIT 1"] end it "should have objects retrieved from a specific shard remove associated objects from that shard" do album = @Album.server(:s1).first artist = @Artist.server(:s2).first @db.sqls.must_equal ["SELECT * FROM albums LIMIT 1 -- s1", "SELECT * FROM artists LIMIT 1 -- s2"] artist.remove_album(1) sqls = @db.sqls ["UPDATE albums SET artist_id = NULL, name = 'RF' WHERE (id = 1) -- s2", "UPDATE albums SET name = 'RF', artist_id = NULL WHERE (id = 1) -- s2"].must_include(sqls.pop) sqls.must_equal ["SELECT * FROM albums WHERE ((albums.artist_id = 2) AND (albums.id = 1)) LIMIT 1 -- s2"] album.remove_tag(3) @db.sqls.must_equal ["SELECT tags.* FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE ((albums_tags.album_id = 1) AND (tags.id = 3)) LIMIT 1 -- s1", "DELETE FROM albums_tags WHERE ((album_id = 1) AND (tag_id = 3)) -- s1"] end it "should have objects retrieved from a specific shard remove all associated objects from that shard" do album = @Album.server(:s1).first artist = @Artist.server(:s2).first @db.sqls.must_equal ["SELECT * FROM albums LIMIT 1 -- s1", "SELECT * FROM artists LIMIT 1 -- s2"] artist.remove_all_albums @db.sqls.must_equal ["UPDATE albums SET artist_id = NULL WHERE (artist_id = 2) -- s2"] album.remove_all_tags @db.sqls.must_equal ["DELETE FROM albums_tags WHERE (album_id = 1) -- s1"] end it "should not override a server already set on an associated object" do @Album.server(:s1).first artist = @Artist.server(:s2).first @db.sqls.must_equal ["SELECT * FROM albums LIMIT 1 -- s1", "SELECT * FROM artists LIMIT 1 -- s2"] artist.add_album(@Album.load(:id=>4, :name=>'MO').set_server(:s3)) ["UPDATE albums SET artist_id = 2, name = 'MO' WHERE (id = 4) -- s3", "UPDATE albums SET name = 'MO', artist_id = 2 WHERE (id = 4) -- s3"].must_include(@db.sqls.pop) artist.remove_album(@Album.load(:id=>5, :name=>'T', :artist_id=>2).set_server(:s4)) # Should select from current object's shard to check existing association, but update associated object's shard sqls = @db.sqls ["UPDATE albums SET artist_id = NULL, name = 'T' WHERE (id = 5) -- s4", "UPDATE albums SET name = 'T', artist_id = NULL WHERE (id = 5) -- s4"].must_include(sqls.pop) sqls.must_equal ["SELECT 1 AS one FROM albums WHERE ((albums.artist_id = 2) AND (id = 5)) LIMIT 1 -- s2"] end it "should be able to set a shard to use for any object using set_server" do @Album.server(:s1).first.set_server(:s2).reload @db.sqls.must_equal ["SELECT * FROM albums LIMIT 1 -- s1", "SELECT * FROM albums WHERE (id = 1) LIMIT 1 -- s2"] end it "should use transactions on the correct shard" do @Album.use_transactions = true @Album.server(:s2).first.save sqls = @db.sqls ["UPDATE albums SET artist_id = 2, name = 'RF' WHERE (id = 1) -- s2", "UPDATE albums SET name = 'RF', artist_id = 2 WHERE (id = 1) -- s2"].must_include(sqls.slice!(2)) sqls.must_equal ["SELECT * FROM albums LIMIT 1 -- s2", "BEGIN -- s2", "COMMIT -- s2"] end it "should use override current shard when saving with given :server option" do @Album.use_transactions = true @Album.server(:s2).first.save(:server=>:s1) sqls = @db.sqls ["UPDATE albums SET artist_id = 2, name = 'RF' WHERE (id = 1) -- s1", "UPDATE albums SET name = 'RF', artist_id = 2 WHERE (id = 1) -- s1"].must_include(sqls.slice!(2)) sqls.must_equal ["SELECT * FROM albums LIMIT 1 -- s2", "BEGIN -- s1", "COMMIT -- s1"] end it "should have objects retrieved from a specific shard using with_server from server_block extension" do album = @db.extension(:server_block).with_server(:s1) do @Album.first end album.update(:name=>'MO') @db.sqls.must_equal ["SELECT * FROM albums LIMIT 1 -- s1", "UPDATE albums SET name = 'MO' WHERE (id = 1) -- s1"] end end ����������������������sequel-5.63.0/spec/extensions/shared_caching_spec.rb������������������������������������������������0000664�0000000�0000000�00000012217�14342141206�0022515�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Shared caching behavior" do before do @db = Sequel.mock class ::LookupModel < ::Sequel::Model(@db) columns :id, :caching_model_id, :caching_model_id2 many_to_one :caching_model many_to_one :caching_model2, :key=>[:caching_model_id, :caching_model_id2], :class=>:CachingModel end @c = LookupModel class ::CachingModel < Sequel::Model(@db) columns :id, :id2 end @cc = CachingModel end after do Object.send(:remove_const, :CachingModel) Object.send(:remove_const, :LookupModel) end many_to_one_cpk_specs = Module.new do extend Minitest::Spec::DSL it "should use a simple primary key lookup when retrieving many_to_one associated records with a composite key" do @db.sqls.must_equal [] @c.load(:id=>3, :caching_model_id=>1, :caching_model_id2=>2).caching_model2.must_be_same_as(@cm12) @c.load(:id=>3, :caching_model_id=>2, :caching_model_id2=>1).caching_model2.must_be_same_as(@cm21) @db.sqls.must_equal [] @db.fetch = [] @c.load(:id=>4, :caching_model_id=>2, :caching_model_id2=>2).caching_model2.must_be_nil end end many_to_one_pk_specs = Module.new do extend Minitest::Spec::DSL it "should use a simple primary key lookup when retrieving many_to_one associated records" do @cc.set_primary_key([:id, :id2]) @db.sqls.must_equal [] @c.load(:id=>3, :caching_model_id=>1).caching_model.must_be_same_as(@cm1) @c.load(:id=>4, :caching_model_id=>2).caching_model.must_be_same_as(@cm2) @db.sqls.must_equal [] @db.fetch = [] @c.load(:id=>4, :caching_model_id=>3).caching_model.must_be_nil end it "should not use a simple primary key lookup if the assocation has a nil :key option" do @c.many_to_one :caching_model, :key=>nil, :dataset=>proc{CachingModel.filter(:caching_model_id=>caching_model_id)} @c.load(:id=>3, :caching_model_id=>1).caching_model @db.sqls.wont_equal [] end it "should not use a simple primary key lookup if the assocation has a nil :key option" do @c.many_to_one :caching_model, :many_to_one_pk_lookup=>false @c.load(:id=>3, :caching_model_id=>1).caching_model @db.sqls.wont_equal [] end it "should not use a simple primary key lookup if the assocation's :primary_key option doesn't match the primary key of the associated class" do @c.many_to_one :caching_model, :primary_key=>:id2 @c.load(:id=>3, :caching_model_id=>1).caching_model @db.sqls.wont_equal [] end it "should not use a simple primary key lookup if the assocation has :conditions" do @c.many_to_one :caching_model, :conditions=>{:a=>1} @c.load(:id=>3, :caching_model_id=>1).caching_model @db.sqls.wont_equal [] end it "should not use a simple primary key lookup if the assocation has :select" do @c.many_to_one :caching_model, :select=>[:a, :b] @c.load(:id=>3, :caching_model_id=>1).caching_model @db.sqls.wont_equal [] end it "should not use a simple primary key lookup if the assocation has a block" do @c.many_to_one(:caching_model){|ds| ds.where{a > 1}} @c.load(:id=>3, :caching_model_id=>1).caching_model @db.sqls.wont_equal [] end it "should not use a simple primary key lookup if the assocation has a non-default :dataset option" do cc = @cc @c.many_to_one :caching_model, :dataset=>proc{cc.where(:id=>caching_model_id)} @c.load(:id=>3, :caching_model_id=>1).caching_model @db.sqls.wont_equal [] end it "should use a simple primary key lookup if explicitly set" do @c.many_to_one :caching_model, :select=>[:a, :b], :many_to_one_pk_lookup=>true @c.load(:id=>3, :caching_model_id=>1).caching_model @db.sqls.must_equal [] end end describe "With caching plugin" do before do @cache_class = Class.new(Hash) do attr_accessor :ttl def set(k, v, ttl); self[k] = v; @ttl = ttl; end def get(k); self[k]; end end cache = @cache_class.new @cache = cache @cc.plugin :caching, @cache @db.fetch = {:id=>1} @cm1 = @cc[1] @cm2 = @cc[2] @cm12 = @cc[1, 2] @cm21 = @cc[2, 1] @db.sqls end include many_to_one_cpk_specs include many_to_one_pk_specs end describe "With static_cache plugin with single key" do before do @db.fetch = [{:id=>1}, {:id=>2}] @cc.plugin :static_cache @cm1 = @cc[1] @cm2 = @cc[2] @db.sqls end include many_to_one_pk_specs it "should not issue regular query if primary key lookup returns no rows" do def @cc.primary_key_lookup(pk); end @cc.singleton_class.send(:private, :primary_key_lookup) @c.many_to_one :caching_model @c.load(:id=>3, :caching_model_id=>1).caching_model @db.sqls.must_equal [] end end describe "With static_cache plugin with composite key" do before do @cc.set_primary_key([:id, :id2]) @db.fetch = [{:id=>1, :id2=>2}, {:id=>2, :id2=>1}] @cc.plugin :static_cache @cm12 = @cc[[1, 2]] @cm21 = @cc[[2, 1]] @db.sqls end include many_to_one_cpk_specs end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/single_table_inheritance_spec.rb��������������������������������������0000664�0000000�0000000�00000040152�14342141206�0024573�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Model, "single table inheritance plugin" do before do class ::StiTest < Sequel::Model columns :id, :kind, :blah plugin :single_table_inheritance, :kind end class ::StiTestSub1 < StiTest end class ::StiTestSub2 < StiTest end @ds = StiTest.dataset DB.reset end after do Object.send(:remove_const, :StiTestSub1) Object.send(:remove_const, :StiTestSub2) Object.send(:remove_const, :StiTest) end describe ".sti_load" do it "should load instances of the correct type" do StiTest.sti_load(:id => 3).must_be_instance_of StiTest StiTest.sti_load(:id => 3, :kind => 'StiTestSub1').must_be_instance_of StiTestSub1 StiTest.sti_load(:id => 3, :kind => 'StiTestSub2').must_be_instance_of StiTestSub2 end end describe ".sti_class_from_sti_key" do it "should load the correct subclass based on the key" do StiTest.sti_class_from_sti_key('StiTest').must_equal StiTest StiTest.sti_class_from_sti_key('StiTestSub1').must_equal StiTestSub1 StiTest.sti_class_from_sti_key('StiTestSub2').must_equal StiTestSub2 end end it "should freeze sti metadata when freezing model class" do StiTest.freeze StiTest.sti_dataset.frozen?.must_equal true StiTestSub1.freeze StiTestSub1.sti_key_array.frozen?.must_equal true proc{class ::StiTestSub1Sub1 < StiTestSub1; end}.must_raise RuntimeError, TypeError end it "should have simple_table = nil" do StiTest.simple_table.must_equal "sti_tests" StiTestSub1.simple_table.must_be_nil end it "should not attempt to use prepared statements" do StiTestSub1.plugin :prepared_statements StiTestSub1.load(:id=>1, :kind=>'StiTestSub1').save DB.sqls.must_equal ["UPDATE sti_tests SET kind = 'StiTestSub1' WHERE ((sti_tests.kind IN ('StiTestSub1')) AND (id = 1))"] StiTest.plugin :prepared_statements StiTest.load(:id=>2, :kind=>'StiTest').save DB.sqls.must_equal ["UPDATE sti_tests SET kind = 'StiTest' WHERE (id = 2)"] end it "should allow changing the inheritance column via a plugin :single_table_inheritance call" do StiTest.plugin :single_table_inheritance, :blah Object.send(:remove_const, :StiTestSub1) Object.send(:remove_const, :StiTestSub2) class ::StiTestSub1 < StiTest; end class ::StiTestSub2 < StiTest; end StiTest.dataset = StiTest.dataset.with_fetch([{:blah=>'StiTest'}, {:blah=>'StiTestSub1'}, {:blah=>'StiTestSub2'}]) StiTest.all.collect{|x| x.class}.must_equal [StiTest, StiTestSub1, StiTestSub2] StiTest.dataset.sql.must_equal "SELECT * FROM sti_tests" StiTestSub1.dataset.sql.must_equal "SELECT * FROM sti_tests WHERE (sti_tests.blah IN ('StiTestSub1'))" StiTestSub2.dataset.sql.must_equal "SELECT * FROM sti_tests WHERE (sti_tests.blah IN ('StiTestSub2'))" end it "should return rows with the correct class based on the polymorphic_key value" do StiTest.dataset = StiTest.dataset.with_fetch([{:kind=>'StiTest'}, {:kind=>'StiTestSub1'}, {:kind=>'StiTestSub2'}]) StiTest.all.collect{|x| x.class}.must_equal [StiTest, StiTestSub1, StiTestSub2] end it "should return rows with the correct class based on the polymorphic_key value when retreiving by primary key" do StiTest.dataset = StiTest.dataset.with_fetch([{:kind=>'StiTestSub1'}]) StiTest[1].class.must_equal StiTestSub1 end it "should return rows with the correct class for subclasses based on the polymorphic_key value" do class ::StiTestSub1Sub < StiTestSub1; end StiTestSub1.dataset = StiTestSub1.dataset.with_fetch([{:kind=>'StiTestSub1'}, {:kind=>'StiTestSub1Sub'}]) StiTestSub1.all.collect{|x| x.class}.must_equal [StiTestSub1, StiTestSub1Sub] end it "should fallback to the main class if the given class does not exist" do StiTest.dataset = StiTest.dataset.with_fetch(:kind=>'StiTestSub3') StiTest.all.collect{|x| x.class}.must_equal [StiTest] end it "should inherit dataset_modules correctly in subclass" do StiTest.dataset_module{def foo; 1; end} Object.send(:remove_const, :StiTestSub1) Object.send(:remove_const, :StiTestSub2) class ::StiTestSub1 < StiTest; end StiTestSub1.dataset_module{def bar; 2; end} class ::StiTestSub2 < StiTestSub1; end StiTestSub2.dataset_module{def baz; 3; end} StiTest.dataset.foo.must_equal 1 proc{StiTest.dataset.bar}.must_raise NoMethodError proc{StiTest.dataset.baz}.must_raise NoMethodError StiTestSub1.dataset.foo.must_equal 1 StiTestSub1.dataset.bar.must_equal 2 proc{StiTestSub1.dataset.baz}.must_raise NoMethodError StiTestSub2.dataset.foo.must_equal 1 StiTestSub2.dataset.bar.must_equal 2 StiTestSub2.dataset.baz.must_equal 3 end it "should fallback to the main class if the sti_key field is empty or nil without calling constantize" do called = false StiTest.define_singleton_method(:constantize){|_| called = true} StiTest.singleton_class.send(:private, :constantize) StiTest.plugin :single_table_inheritance, :kind StiTest.dataset = StiTest.dataset.with_fetch([{:kind=>''}, {:kind=>nil}]) StiTest.all.collect{|x| x.class}.must_equal [StiTest, StiTest] called.must_equal false end it "should set the model class name when saving" do StiTest.new.save StiTestSub1.new.save StiTestSub2.new.save DB.sqls.must_equal ["INSERT INTO sti_tests (kind) VALUES ('StiTest')", "SELECT * FROM sti_tests WHERE id = 10", "INSERT INTO sti_tests (kind) VALUES ('StiTestSub1')", "SELECT * FROM sti_tests WHERE ((sti_tests.kind IN ('StiTestSub1')) AND (id = 10)) LIMIT 1", "INSERT INTO sti_tests (kind) VALUES ('StiTestSub2')", "SELECT * FROM sti_tests WHERE ((sti_tests.kind IN ('StiTestSub2')) AND (id = 10)) LIMIT 1"] end it "should destroy the model correctly" do StiTest.load(:id=>1).destroy StiTestSub1.load(:id=>1).destroy StiTestSub2.load(:id=>1).destroy DB.sqls.must_equal ["DELETE FROM sti_tests WHERE id = 1", "DELETE FROM sti_tests WHERE ((sti_tests.kind IN ('StiTestSub1')) AND (id = 1))", "DELETE FROM sti_tests WHERE ((sti_tests.kind IN ('StiTestSub2')) AND (id = 1))"] end it "should handle validations on the type column field" do o = StiTestSub1.new def o.validate errors.add(:kind, 'not present') unless kind end o.valid?.must_equal true end it "should set type column field even if validations are skipped" do StiTestSub1.new.save(:validate=>false) DB.sqls.must_equal ["INSERT INTO sti_tests (kind) VALUES ('StiTestSub1')", "SELECT * FROM sti_tests WHERE ((sti_tests.kind IN ('StiTestSub1')) AND (id = 10)) LIMIT 1"] end it "should override an existing value in the class name field" do StiTest.create(:kind=>'StiTestSub1') DB.sqls.must_equal ["INSERT INTO sti_tests (kind) VALUES ('StiTestSub1')", "SELECT * FROM sti_tests WHERE id = 10"] end it "should handle type column with the same name as existing method names" do StiTest.plugin :single_table_inheritance, :type StiTest.columns :id, :type StiTest.create DB.sqls.must_equal ["INSERT INTO sti_tests (type) VALUES ('StiTest')", "SELECT * FROM sti_tests WHERE id = 10"] end it "should add a filter to model datasets inside subclasses hook to only retreive objects with the matching key" do StiTest.dataset.sql.must_equal "SELECT * FROM sti_tests" StiTestSub1.dataset.sql.must_equal "SELECT * FROM sti_tests WHERE (sti_tests.kind IN ('StiTestSub1'))" StiTestSub2.dataset.sql.must_equal "SELECT * FROM sti_tests WHERE (sti_tests.kind IN ('StiTestSub2'))" end it "should add a correct filter for multiple levels of subclasses" do class ::StiTestSub1A < StiTestSub1; end StiTestSub1.dataset.sql.must_equal "SELECT * FROM sti_tests WHERE (sti_tests.kind IN ('StiTestSub1', 'StiTestSub1A'))" StiTestSub1A.dataset.sql.must_equal "SELECT * FROM sti_tests WHERE (sti_tests.kind IN ('StiTestSub1A'))" class ::StiTestSub2A < StiTestSub2; end StiTestSub2.dataset.sql.must_equal "SELECT * FROM sti_tests WHERE (sti_tests.kind IN ('StiTestSub2', 'StiTestSub2A'))" StiTestSub2A.dataset.sql.must_equal "SELECT * FROM sti_tests WHERE (sti_tests.kind IN ('StiTestSub2A'))" class ::StiTestSub1B < StiTestSub1A; end StiTestSub1.dataset.sql.must_equal "SELECT * FROM sti_tests WHERE (sti_tests.kind IN ('StiTestSub1', 'StiTestSub1A', 'StiTestSub1B'))" StiTestSub1A.dataset.sql.must_equal "SELECT * FROM sti_tests WHERE (sti_tests.kind IN ('StiTestSub1A', 'StiTestSub1B'))" StiTestSub1B.dataset.sql.must_equal "SELECT * FROM sti_tests WHERE (sti_tests.kind IN ('StiTestSub1B'))" end it "should work correctly with the :caching plugin" do cache_class = Class.new(Hash) do attr_accessor :ttl def set(k, v, ttl); self[k] = v; @ttl = ttl; end def get(k); self[k]; end end cache = cache_class.new StiTest.plugin :caching, cache def StiTest.cache_key_prefix; "stitest" end c2 = Class.new StiTest c2.cache_key(:id).must_equal StiTest.cache_key(:id) obj2 = c2.new obj2.values[:x] = 2 obj2.save c2[obj2.id] c2.cache_get_pk(obj2.id).values.must_equal StiTest.cache_get_pk(obj2.id).values obj2.save c2.cache_get_pk(obj2.id).must_be_nil StiTest.cache_get_pk(obj2.id).must_be_nil end describe "with custom options" do before do class ::StiTest2 < Sequel::Model columns :id, :kind private def _save_refresh; end end end after do Object.send(:remove_const, :StiTest2) Object.send(:remove_const, :StiTest3) if defined?(StiTest3) Object.send(:remove_const, :StiTest4) if defined?(StiTest4) end it "should freeze sti key and model map if given as hashes when freezing model class" do StiTest2.plugin :single_table_inheritance, :kind, :model_map=>{0=>StiTest2, 1=>:StiTest3, 2=>'StiTest4'}, :key_map=>{StiTest2=>4, 'StiTest3'=>5, 'StiTest4'=>6} StiTest2.freeze StiTest2.sti_key_map.frozen?.must_equal true StiTest2.sti_model_map.frozen?.must_equal true end it "should have working row_proc if using set_dataset in subclass to remove columns" do StiTest2.plugin :single_table_inheritance, :kind class ::StiTest3 < ::StiTest2 set_dataset(dataset.select(*(columns - [:blah]))) end class ::StiTest4 < ::StiTest3; end StiTest3.dataset = StiTest3.dataset.with_fetch(:id=>1, :kind=>'StiTest4') StiTest3[1].must_equal StiTest4.load(:id=>1, :kind=>'StiTest4') end it "should work with custom procs with strings" do StiTest2.plugin :single_table_inheritance, :kind, :model_map=>proc{|v| v == 1 ? 'StiTest3' : 'StiTest4'}, :key_map=>proc{|klass| klass.name == 'StiTest3' ? 1 : 2} class ::StiTest3 < ::StiTest2; end class ::StiTest4 < ::StiTest2; end StiTest2.dataset.row_proc.call(:kind=>0).must_be_instance_of(StiTest4) StiTest2.dataset.row_proc.call(:kind=>1).must_be_instance_of(StiTest3) StiTest2.dataset.row_proc.call(:kind=>2).must_be_instance_of(StiTest4) StiTest2.create.kind.must_equal 2 StiTest3.create.kind.must_equal 1 StiTest4.create.kind.must_equal 2 end it "should work with custom procs with symbols" do StiTest2.plugin :single_table_inheritance, :kind, :model_map=>proc{|v| v == 1 ? :StiTest3 : :StiTest4}, :key_map=>proc{|klass| klass.name == 'StiTest3' ? 1 : 2} class ::StiTest3 < ::StiTest2; end class ::StiTest4 < ::StiTest2; end StiTest2.dataset.row_proc.call(:kind=>0).must_be_instance_of(StiTest4) StiTest2.dataset.row_proc.call(:kind=>1).must_be_instance_of(StiTest3) StiTest2.dataset.row_proc.call(:kind=>2).must_be_instance_of(StiTest4) StiTest2.create.kind.must_equal 2 StiTest3.create.kind.must_equal 1 StiTest4.create.kind.must_equal 2 end it "should work with custom hashes" do StiTest2.plugin :single_table_inheritance, :kind, :model_map=>{0=>StiTest2, 1=>:StiTest3, 2=>'StiTest4'}, :key_map=>{'StiTest2'=>7, StiTest2=>4, 'StiTest3'=>5, 'StiTest4'=>6} class ::StiTest3 < ::StiTest2; end class ::StiTest4 < ::StiTest2; end StiTest2.dataset.row_proc.call(:kind=>0).must_be_instance_of(StiTest2) StiTest2.dataset.row_proc.call(:kind=>1).must_be_instance_of(StiTest3) StiTest2.dataset.row_proc.call(:kind=>2).must_be_instance_of(StiTest4) StiTest3.sti_model_map.must_equal StiTest2.sti_model_map StiTest2.create.kind.must_equal 4 StiTest3.create.kind.must_equal 5 StiTest4.create.kind.must_equal 6 class ::StiTest5 < ::StiTest4; end StiTest5.create.kind.must_be_nil end it "should infer key_map from model_map if provided as a hash" do StiTest2.plugin :single_table_inheritance, :kind, :model_map=>{0=>StiTest2, 1=>'StiTest3', 2=>:StiTest4} class ::StiTest3 < ::StiTest2; end class ::StiTest4 < ::StiTest2; end StiTest2.dataset.row_proc.call(:kind=>0).must_be_instance_of(StiTest2) StiTest2.dataset.row_proc.call(:kind=>1).must_be_instance_of(StiTest3) StiTest2.dataset.row_proc.call(:kind=>2).must_be_instance_of(StiTest4) StiTest2.create.kind.must_equal 0 StiTest3.create.kind.must_equal 1 StiTest4.create.kind.must_equal 2 end it "should raise exceptions if a bad model value is used" do StiTest2.plugin :single_table_inheritance, :kind, :model_map=>{0=>1,1=>1.5, 2=>Date.today} class ::StiTest3 < ::StiTest2; end class ::StiTest4 < ::StiTest2; end proc{StiTest2.dataset.row_proc.call(:kind=>0)}.must_raise(Sequel::Error) proc{StiTest2.dataset.row_proc.call(:kind=>1)}.must_raise(Sequel::Error) proc{StiTest2.dataset.row_proc.call(:kind=>2)}.must_raise(Sequel::Error) end it "should work with non-bijective mappings" do StiTest2.plugin :single_table_inheritance, :kind, :model_map=>{0=>'StiTest3', 1=>'StiTest3', 2=>'StiTest4'} class ::StiTest3 < ::StiTest2; end class ::StiTest4 < ::StiTest2; end StiTest2.dataset.row_proc.call(:kind=>0).must_be_instance_of(StiTest3) StiTest2.dataset.row_proc.call(:kind=>1).must_be_instance_of(StiTest3) StiTest2.dataset.row_proc.call(:kind=>2).must_be_instance_of(StiTest4) [0,1].must_include(StiTest3.create.kind) StiTest4.create.kind.must_equal 2 end it "should work with non-bijective mappings and key map procs" do StiTest2.plugin :single_table_inheritance, :kind, :key_map=>proc{|model| model.to_s == 'StiTest4' ? 2 : [0,1] } class ::StiTest3 < ::StiTest2; end class ::StiTest4 < ::StiTest2; end StiTest2.dataset.sql.must_equal "SELECT * FROM sti_test2s" StiTest3.dataset.sql.must_equal "SELECT * FROM sti_test2s WHERE (sti_test2s.kind IN (0, 1))" StiTest4.dataset.sql.must_equal "SELECT * FROM sti_test2s WHERE (sti_test2s.kind IN (2))" end it "should create correct sql with non-bijective mappings" do StiTest2.plugin :single_table_inheritance, :kind, :model_map=>{0=>'StiTest3', 1=>'StiTest3', 2=>'StiTest4'} class ::StiTest3 < ::StiTest2; end class ::StiTest4 < ::StiTest2; end StiTest2.dataset.sql.must_equal "SELECT * FROM sti_test2s" ["SELECT * FROM sti_test2s WHERE (sti_test2s.kind IN (0, 1))", "SELECT * FROM sti_test2s WHERE (sti_test2s.kind IN (1, 0))"].must_include(StiTest3.dataset.sql) end it "should destroy the model correctly" do StiTest2.plugin :single_table_inheritance, :kind, :model_map=>{'sti3'=>'StiTest3', 'sti3b'=>'StiTest3', 'sti4'=>'StiTest4'} class ::StiTest3 < ::StiTest2; end class ::StiTest4 < ::StiTest2; end StiTest2.load(:id=>1).destroy StiTest3.load(:id=>1).destroy sqls = DB.sqls sqls.shift.must_equal "DELETE FROM sti_test2s WHERE id = 1" ["DELETE FROM sti_test2s WHERE ((sti_test2s.kind IN ('sti3', 'sti3b')) AND (id = 1))", "DELETE FROM sti_test2s WHERE ((sti_test2s.kind IN ('sti3b', 'sti3')) AND (id = 1))"].must_include(sqls.pop) sqls.must_equal [] end it "should honor a :key_chooser" do StiTest2.plugin :single_table_inheritance, :kind, :key_chooser => proc{|inst| inst.model.to_s.downcase } class ::StiTest3 < ::StiTest2; end class ::StiTest4 < ::StiTest2; end StiTest3.create.kind.must_equal 'stitest3' StiTest4.create.kind.must_equal 'stitest4' end end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/singular_table_names_spec.rb������������������������������������������0000664�0000000�0000000�00000001042�14342141206�0023743�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "singular_table_names plugin" do before do @c = Class.new(Sequel::Model) @c.plugin :singular_table_names end after do Object.send(:remove_const, :Foo) end it "should use the singular form of model name for table name" do class ::Foo < @c; end Foo.table_name.must_equal :foo end it "should handle namespaced models using single form of last component of model name" do module ::Foo; end class Foo::Bar < @c; end Foo::Bar.table_name.must_equal :bar end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/skip_create_refresh_spec.rb�������������������������������������������0000664�0000000�0000000�00000001102�14342141206�0023571�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::SkipCreateRefresh" do it "should skip the refresh after saving a new object" do c = Class.new(Sequel::Model(:a)) c.columns :id, :x c.dataset = c.dataset.with_autoid(2) c.db.reset c.create(:x=>1) c.db.sqls.must_equal ['INSERT INTO a (x) VALUES (1)', 'SELECT * FROM a WHERE id = 2'] c.dataset = c.dataset.with_autoid(2) c.plugin :skip_create_refresh c.db.reset c.create(:x=>3).values.must_equal(:id=>2, :x=>3) c.db.sqls.must_equal ['INSERT INTO a (x) VALUES (3)'] end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/skip_saving_columns_spec.rb�������������������������������������������0000664�0000000�0000000�00000010066�14342141206�0023650�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Skip Saving Generated Columns" do before do @db = Sequel.mock @db.numrows = 1 @db.autoid = 1 def @db.schema(*) { :id=>{:type=>:integer}, :user_id=>{:type=>:integer}, :name=>{:type=>:string}, :search=>{:type=>:string, :generated=>true} } end @db.singleton_class.send(:alias_method, :schema, :schema) @c = Class.new(Sequel::Model(@db[:t])) def @c.db_schema; @db.schema; end @c.columns :id, :user_id, :name, :search @c.plugin :skip_saving_columns @o = @c.load(id: 2, user_id: 1, name: 'a', search: 's') @db.sqls end it "should not include generated columns by default when saving" do @o.save @db.sqls.must_equal ["UPDATE t SET user_id = 1, name = 'a' WHERE (id = 2)"] end it "should not include generated columns by default when saving if loaded into class without dataset" do @db = Sequel.mock @db.numrows = 1 @db.autoid = 1 def @db.schema(*) { :id=>{:type=>:integer}, :user_id=>{:type=>:integer}, :name=>{:type=>:string}, :search=>{:type=>:string, :generated=>true} } end @c = Class.new(Sequel::Model) @c.plugin :skip_saving_columns def @c.db_schema; @db.schema; end @c.dataset = @db[:t] @c.columns :id, :user_id, :name, :search @o = @c.load(id: 2, user_id: 1, name: 'a', search: 's') @db.sqls @o.save @db.sqls.must_equal ["UPDATE t SET user_id = 1, name = 'a' WHERE (id = 2)"] end it "should allow overriding which columns to skip" do @c.skip_saving_columns = @c.skip_saving_columns + [:name] @o.save @db.sqls.must_equal ["UPDATE t SET user_id = 1 WHERE (id = 2)"] @c.skip_saving_columns = [:name] @o.save @db.sqls.must_equal ["UPDATE t SET user_id = 1, search = 's' WHERE (id = 2)"] end it "should reset columns to skip from generated columns when resetting dataset if not previously overridden" do def @db.schema(*) { :id=>{:type=>:integer}, :user_id=>{:type=>:integer}, :name=>{:type=>:string, :generated=>true}, :search=>{:type=>:string} } end @c.dataset = @db[:x] @db.sqls @o.save @db.sqls.must_equal ["UPDATE x SET user_id = 1, search = 's' WHERE (id = 2)"] @c.skip_saving_columns = [:search] @c.dataset = @db[:y] @db.sqls @o.save @db.sqls.must_equal ["UPDATE y SET user_id = 1, name = 'a' WHERE (id = 2)"] end it "should freeze generated columns when freezing class" do @c.freeze proc{@c.skip_saving_columns << :name}.must_raise(RuntimeError) end it "should not include skipped columns when updating, even if they have been modified" do @o.update(user_id: 3, search: 'sd') @db.sqls.must_equal ["UPDATE t SET user_id = 3 WHERE (id = 2)"] end it "should include skipped columns when specified explicitly as columns to save" do @o.save(:columns=>[:user_id, :search]) @db.sqls.must_equal ["UPDATE t SET user_id = 1, search = 's' WHERE (id = 2)"] end it "should not include skipped columns when inserting, even if they are present" do @db.fetch = {id: 1, user_id: 2, name: 'a', search: 's2'} o = @c.new o.values.merge!(id: 1, user_id: 2, name: 'a', search: 's') o.save @db.sqls.must_equal ["INSERT INTO t (id, user_id, name) VALUES (1, 2, 'a')", "SELECT * FROM t WHERE (id = 1) LIMIT 1"] o.values.must_equal(id: 1, user_id: 2, name: 'a', search: 's2') end it "should work correctly in subclasses" do @c.skip_saving_columns = @c.skip_saving_columns + [:name] @sc = Class.new(@c) @c.skip_saving_columns = [:search] @so = @sc.load(id: 2, user_id: 1, name: 'a', search: 's') @o.save @db.sqls.must_equal ["UPDATE t SET user_id = 1, name = 'a' WHERE (id = 2)"] @so.save @db.sqls.must_equal ["UPDATE t SET user_id = 1 WHERE (id = 2)"] @sc.skip_saving_columns = [:name] @o.save @db.sqls.must_equal ["UPDATE t SET user_id = 1, name = 'a' WHERE (id = 2)"] @so.save @db.sqls.must_equal ["UPDATE t SET user_id = 1, search = 's' WHERE (id = 2)"] end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/spec_helper.rb��������������������������������������������������������0000664�0000000�0000000�00000004411�14342141206�0021047�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������if ENV['COVERAGE'] require_relative "../sequel_coverage" SimpleCov.sequel_coverage(:filter=>%r{lib/sequel/(extensions|plugins)/\w+\.rb\z}) end ENV['MT_NO_PLUGINS'] = '1' # Work around stupid autoloading of plugins gem 'minitest' require 'minitest/global_expectations/autorun' require 'minitest/hooks/default' $:.unshift(File.join(File.dirname(File.expand_path(__FILE__)), "../../lib/")) require_relative "../../lib/sequel" require_relative "../visibility_checking_after_hook" if ENV['CHECK_METHOD_VISIBILITY'] require_relative '../deprecation_helper' if ENV['SEQUEL_TZINFO_VERSION'] # Allow forcing specific TZInfo versions, useful when testing gem 'tzinfo', ENV['SEQUEL_TZINFO_VERSION'] end begin # Attempt to load ActiveSupport blank extension and inflector first, so Sequel # can override them. require 'active_support' require 'active_support/core_ext/object/blank' require 'active_support/inflector' require 'active_support/core_ext/string/inflections' rescue LoadError nil end if (RUBY_VERSION >= '2.0.0' && RUBY_ENGINE == 'ruby') || (RUBY_ENGINE == 'jruby' && (JRUBY_VERSION >= '9.3' || (JRUBY_VERSION.match(/\A9\.2\.(\d+)/) && $1.to_i >= 7))) Sequel.extension :core_refinements end class << Sequel::Model attr_writer :db_schema alias orig_columns columns def columns(*cols) return super if cols.empty? define_method(:columns){cols} alias_method(:columns, :columns) @dataset.send(:columns=, cols) if @dataset def_column_accessor(*cols) @columns = cols @db_schema = {} cols.each{|c| @db_schema[c] = {}} end end Sequel::DB = nil Sequel::Model.use_transactions = false Sequel::Model.cache_anonymous_models = false db = Sequel.mock(:fetch=>{:id => 1, :x => 1}, :numrows=>1, :autoid=>proc{|sql| 10}) def db.schema(*) [[:id, {:primary_key=>true}]] end def db.reset() sqls end def db.supports_schema_parsing?() true end Sequel::Model.db = DB = db Sequel::DATABASES.clear if ENV['SEQUEL_COLUMNS_INTROSPECTION'] Sequel.extension :columns_introspection Sequel::Database.extension :columns_introspection Sequel::Mock::Dataset.send(:include, Sequel::ColumnsIntrospection) end if ENV['SEQUEL_NO_CACHE_ASSOCIATIONS'] Sequel::Model.cache_associations = false end Sequel::Model.plugin :throw_failures if ENV['SEQUEL_MODEL_THROW_FAILURES'] �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/split_array_nil_spec.rb�����������������������������������������������0000664�0000000�0000000�00000001614�14342141206�0022765�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "split_array_nil extension" do before do @ds = Sequel.mock[:table].extension(:split_array_nil) end it "should split IN with nil in array into separate OR IS NULL clause" do @ds.filter(:a=>[1, nil]).sql.must_equal "SELECT * FROM table WHERE ((a IN (1)) OR (a IS NULL))" end it "should split NOT IN with nil in array into separate AND IS NOT NULL clause" do @ds.exclude(:a=>[1, nil]).sql.must_equal "SELECT * FROM table WHERE ((a NOT IN (1)) AND (a IS NOT NULL))" end it "should not affect other IN/NOT in clauses" do @ds.filter(:a=>[1, 2]).sql.must_equal "SELECT * FROM table WHERE (a IN (1, 2))" @ds.exclude(:a=>[1, 2]).sql.must_equal "SELECT * FROM table WHERE (a NOT IN (1, 2))" end it "should not affect other types of filters clauses" do @ds.filter(:a=>1).sql.must_equal "SELECT * FROM table WHERE (a = 1)" end end ��������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/split_values_spec.rb��������������������������������������������������0000664�0000000�0000000�00000002653�14342141206�0022310�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::SplitValues" do before do @c = Class.new(Sequel::Model(:a)) @c.columns :id, :x @c.plugin :split_values end it "stores non-columns in a separate hash" do @c.dataset = @c.dataset.with_fetch(:id=>1, :x=>2, :y=>3) o = @c.first @c.db.reset o.must_equal @c.load(:id=>1, :x=>2) o[:id].must_equal 1 o[:x].must_equal 2 o[:y].must_equal 3 {@c.load(:id=>1, :x=>2)=>4}[o].must_equal 4 o.values.must_equal(:id=>1, :x=>2) o.save @c.db.sqls.must_equal ["UPDATE a SET x = 2 WHERE (id = 1)"] end it "handles false values" do @c.dataset = @c.dataset.with_fetch(:id=>1, :x=>false, :y=>3) o = @c.first @c.db.reset o.must_equal @c.load(:id=>1, :x=>false) o[:id].must_equal 1 o[:x].must_equal false o[:y].must_equal 3 {@c.load(:id=>1, :x=>false)=>4}[o].must_equal 4 o.values.must_equal(:id=>1, :x=>false) o.save @c.db.sqls.must_equal ["UPDATE a SET x = 'f' WHERE (id = 1)"] end it "handles nil values" do @c.dataset = @c.dataset.with_fetch(:id=>1, :x=>nil, :y=>3) o = @c.first @c.db.reset o.must_equal @c.load(:id=>1, :x=>nil) o[:id].must_equal 1 o[:x].must_be_nil o[:y].must_equal 3 {@c.load(:id=>1, :x=>nil)=>4}[o].must_equal 4 o.values.must_equal(:id=>1, :x=>nil) o.save @c.db.sqls.must_equal ["UPDATE a SET x = NULL WHERE (id = 1)"] end end �������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/sql_comments_plugin_spec.rb�������������������������������������������0000664�0000000�0000000�00000007352�14342141206�0023661�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "sql_comments plugin " do before do @db = Sequel.mock(:fetch=>{:id=>1, :name=>'a'}).extension(:sql_comments) @c = Class.new(Sequel::Model(@db[:t])) @c.columns :id, :name def @c.to_s; 'C' end @c.many_to_one :c, :class=>@c, :key=>:id @c.one_to_many :cs, :class=>@c, :key=>:id @o = @c.new(:name=>'a'){|o| o.id = 1} @c.plugin :sql_comments @ds = @c.dataset @db.sqls end it "should include SQL comments for default class methods that issue queries" do @c.with_pk!(1) @db.sqls.must_equal ["SELECT * FROM t WHERE (id = 1) LIMIT 1 -- model:C,method_type:class,method:with_pk\n"] end it "should include SQL comments for default instance methods that issue queries" do @o.update(:name=>'b') @db.sqls.must_equal ["INSERT INTO t (name, id) VALUES ('b', 1) -- model:C,method_type:instance,method:update\n", "SELECT * FROM t WHERE (id = 1) LIMIT 1 -- model:C,method_type:instance,method:update\n"] end it "should include SQL comments for default dataset methods that issue queries" do @c.all @db.sqls.must_equal ["SELECT * FROM t -- model:C,method_type:dataset,method:all\n"] end it "should add comments for instance methods if :model is not already one of the comments" do @db.with_comments(:foo=>'bar'){@o.update(:name=>'b')} @db.sqls.must_equal ["INSERT INTO t (name, id) VALUES ('b', 1) -- foo:bar,model:C,method_type:instance,method:update\n", "SELECT * FROM t WHERE (id = 1) LIMIT 1 -- foo:bar,model:C,method_type:instance,method:update\n"] end it "should add comments for dataset methods if :model is not already one of the comments" do @db.with_comments(:foo=>'bar'){@c.all} @db.sqls.must_equal ["SELECT * FROM t -- foo:bar,model:C,method_type:dataset,method:all\n"] end it "should include SQL comments for association load queries" do @o.c @db.sqls.must_equal ["SELECT * FROM t WHERE (id = 1) LIMIT 1 -- model:C,method_type:association_load,association:c\n"] end it "should include SQL comments for association load queries even after finalizing associations " do @c.finalize_associations @c.freeze @o.cs @db.sqls.must_equal ["SELECT * FROM t WHERE (t.id = 1) -- model:C,method_type:association_load,association:cs\n"] end it "should include SQL comments for eager association loads issue queries" do @c.eager(:c).all @db.sqls.must_equal ["SELECT * FROM t -- model:C,method_type:dataset,method:all\n", "SELECT * FROM t WHERE (t.id IN (1)) -- model:C,method_type:association_eager_load,association:c\n"] end it "should support adding comments for custom class methods" do @c.extend(Module.new{def c; all; end; def d; all; end}) @c.sql_comments_class_methods :c, :d @c.c @db.sqls.must_equal ["SELECT * FROM t -- model:C,method_type:class,method:c\n"] @c.d @db.sqls.must_equal ["SELECT * FROM t -- model:C,method_type:class,method:d\n"] end it "should support adding comments for custom instance methods" do @c.send(:include, Module.new{def c; model.all; end; def d; model.all; end}) @c.sql_comments_instance_methods :c, :d @o.c @db.sqls.must_equal ["SELECT * FROM t -- model:C,method_type:instance,method:c\n"] @o.d @db.sqls.must_equal ["SELECT * FROM t -- model:C,method_type:instance,method:d\n"] end it "should support adding comments for custom dataset methods" do @c.dataset_module(Module.new{def c; all; end; def d; all; end}) @c.sql_comments_dataset_methods :c @c.dataset.c @db.sqls.must_equal ["SELECT * FROM t -- model:C,method_type:dataset,method:c\n"] @c.sql_comments_dataset_methods :d @c.dataset.d @db.sqls.must_equal ["SELECT * FROM t -- model:C,method_type:dataset,method:d\n"] end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/sql_comments_spec.rb��������������������������������������������������0000664�0000000�0000000�00000010776�14342141206�0022307�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" shared_specs = Module.new do extend Minitest::Spec::DSL it "should not add a comment if one is not set for the dataset" do @ds.select_sql.must_equal 'SELECT * FROM t' @ds.insert_sql(:a=>1).must_equal 'INSERT INTO t (a) VALUES (1)' @ds.delete_sql.must_equal 'DELETE FROM t' @ds.update_sql(:a=>1).must_equal 'UPDATE t SET a = 1' end it "should add a comment if one is set for the dataset" do ds = @ds.comment("Some\nComment\r\n Here") ds.select_sql.must_equal "SELECT * FROM t -- Some Comment Here\n" ds.insert_sql(:a=>1).must_equal "INSERT INTO t (a) VALUES (1) -- Some Comment Here\n" ds.delete_sql.must_equal "DELETE FROM t -- Some Comment Here\n" ds.update_sql(:a=>1).must_equal "UPDATE t SET a = 1 -- Some Comment Here\n" end it "should not add a comment multiple times" do ds = @ds.comment("Some\nComment\r\n Here") ds.sql.must_equal "SELECT * FROM t -- Some Comment Here\n" ds.sql.must_equal "SELECT * FROM t -- Some Comment Here\n" end it "should not add a comment multiple times" do ds = @ds.comment("Some\nComment\r\n Here") 5.times do ds.first(:x=>1) ds.db.sqls.must_equal ["SELECT * FROM t WHERE (x = 1) LIMIT 1 -- Some Comment Here\n"] end end it "should handle comments used in nested datasets" do ds = @ds.comment("Some\nComment\r\n Here") ds.where(:id=>ds).select_sql.must_equal "SELECT * FROM t WHERE (id IN (SELECT * FROM t -- Some Comment Here\n)) -- Some Comment Here\n" end it "should allow overriding comments" do @ds.comment("Foo").comment("Some\nComment\r\n Here").select_sql.must_equal "SELECT * FROM t -- Some Comment Here\n" end it "should allow disabling comments by overridding with nil" do @ds.comment("Foo").comment(nil).select_sql.must_equal "SELECT * FROM t" end it "should handle frozen SQL strings" do @ds = Sequel.mock[:t].with_extend{def select_sql; super.freeze; end}.extension(:sql_comments) ds = @ds.comment("Some\nComment\r\n Here") ds.select_sql.must_equal "SELECT * FROM t -- Some Comment Here\n" end end describe "sql_comments dataset extension" do before do @ds = Sequel.mock[:t].extension(:sql_comments) end include shared_specs end describe "sql_comments database extension" do before do @db = Sequel.mock.extension(:sql_comments) @ds = @db[:t] end include shared_specs it "should support setting comments for all queries executed inside a with_comments block" do @db.with_comments(:foo=>'bar', :baz=>'quux') do @ds.select_sql.must_equal "SELECT * FROM t -- foo:bar,baz:quux\n" end end it "should work if loading the extension multiple times" do @db.with_comments(:foo=>'bar', :baz=>'quux') do @db.extension :sql_comments @ds.select_sql.must_equal "SELECT * FROM t -- foo:bar,baz:quux\n" end end it "should support nesting with_comments blocks" do @db.with_comments(:foo=>'bar') do @db.with_comments(:baz=>'quux') do @ds.select_sql.must_equal "SELECT * FROM t -- foo:bar,baz:quux\n" end end end it "should support nesting with_comments blocks multiple times" do @db.with_comments(:foo=>'bar') do @db.with_comments(:baz=>'quux') do @ds.select_sql.must_equal "SELECT * FROM t -- foo:bar,baz:quux\n" end @db.with_comments(:x=>'y') do @ds.select_sql.must_equal "SELECT * FROM t -- foo:bar,x:y\n" end end end it "should support overridding values in nested blocks" do @db.with_comments(:foo=>'bar', :baz=>'q') do @db.with_comments(:baz=>'quux') do @ds.select_sql.must_equal "SELECT * FROM t -- foo:bar,baz:quux\n" end end end it "should support removing values in nested using nil" do @db.with_comments(:foo=>'bar', :bat=>'q') do @db.with_comments(:baz=>'quux', :bat=>nil) do @ds.select_sql.must_equal "SELECT * FROM t -- foo:bar,baz:quux\n" end end end it "should support combining with dataset-specific comments" do @db.with_comments(:foo=>'bar', :baz=>'quux') do @ds.comment('specific').select_sql.must_equal "SELECT * FROM t -- foo:bar,baz:quux -- specific \n" end end it "should only use block level comments for main dataset, not for nested datasets" do @db.with_comments(:foo=>'bar', :baz=>'quux') do ds = @ds.comment("Some\nComment\r\n Here") ds.where(:id=>ds).select_sql.must_equal "SELECT * FROM t WHERE (id IN (SELECT * FROM t -- Some Comment Here\n)) -- foo:bar,baz:quux -- Some Comment Here \n" end end end ��sequel-5.63.0/spec/extensions/sql_expr_spec.rb������������������������������������������������������0000664�0000000�0000000�00000004307�14342141206�0021431�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" Sequel.extension :sql_expr describe "Sequel sql_expr extension" do before do @ds = Sequel.mock.dataset end it "Object#sql_expr should wrap the object in a GenericComplexExpression" do o = Object.new def o.sql_literal(ds) 'foo' end s = o.sql_expr @ds.literal(s).must_equal "foo" @ds.literal(s+1).must_equal "(foo + 1)" @ds.literal(s & true).must_equal "(foo AND 't')" @ds.literal(s < 1).must_equal "(foo < 1)" @ds.literal(s.sql_subscript(1)).must_equal "(foo)[1]" @ds.literal(s.like('a')).must_equal "(foo LIKE 'a' ESCAPE '\\')" @ds.literal(s.as(:a)).must_equal "foo AS a" @ds.literal(s.cast(Integer)).must_equal "CAST(foo AS integer)" @ds.literal(s.desc).must_equal "foo DESC" @ds.literal(s.sql_string + '1').must_equal "(foo || '1')" end it "Numeric#sql_expr should wrap the object in a NumericExpression" do [1, 2.0, 2^70, BigDecimal('1.0')].each do |o| @ds.literal(o.sql_expr).must_equal @ds.literal(o) @ds.literal(o.sql_expr + 1).must_equal "(#{@ds.literal(o)} + 1)" end end it "String#sql_expr should wrap the object in a StringExpression" do @ds.literal("".sql_expr).must_equal "''" @ds.literal("".sql_expr + :a).must_equal "('' || a)" end it "NilClass, TrueClass, and FalseClass#sql_expr should wrap the object in a BooleanExpression" do [nil, true, false].each do |o| @ds.literal(o.sql_expr).must_equal @ds.literal(o) @ds.literal(o.sql_expr & :a).must_equal "(#{@ds.literal(o)} AND a)" end end it "Proc#sql_expr should should treat the object as a virtual row block" do @ds.literal(proc{a}.sql_expr).must_equal "a" @ds.literal(proc{a(b)}.sql_expr).must_equal "a(b)" end it "Proc#sql_expr should should wrap the object in a GenericComplexExpression if the object is not already an expression" do @ds.literal(proc{1}.sql_expr).must_equal "1" @ds.literal(proc{1}.sql_expr + 2).must_equal "(1 + 2)" end it "Proc#sql_expr should should convert a hash or array of two element arrays to a BooleanExpression" do @ds.literal(proc{{a=>b}}.sql_expr).must_equal "(a = b)" @ds.literal(proc{[[a, b]]}.sql_expr & :a).must_equal "((a = b) AND a)" end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/sql_log_normalizer_spec.rb��������������������������������������������0000664�0000000�0000000�00000006662�14342141206�0023504�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "sql_log_normalizer extension" do def db(opts={}) return @db if @db @sql = String.new def @sql.info(sql) replace(sql.sub(/\A\(.*?\) /, '')) end if opts[:logger] == false loggers = [] else loggers = @sql end @db = Sequel.mock(:loggers=>loggers) case opts[:type] when :backslash @db.extend_datasets do private def literal_string_append(sql, v) sql << "'" << v.gsub(/(\\|')/){"\\#{$1}"} << "'" end end when :n_standard @db.extend_datasets do private def literal_string_append(sql, v) sql << "N'" << v.gsub("'", "''") << "'" end end when :bad @db.extend_datasets do private def literal_string_append(sql, v) sql << "X'" << v.gsub("'", "''") << "'" end end end @db.extension(:sql_log_normalizer) @db end it "should normalize literal strings and numbers for standard escaping" do db[:ts].first(:a=>1, :b=>2.3, :c=>'d', :d=>"e\\f\\'g'") db.sqls.last.must_equal "SELECT * FROM ts WHERE ((a = 1) AND (b = 2.3) AND (c = 'd') AND (d = 'e\\f\\''g''')) LIMIT 1" @sql.must_equal "SELECT * FROM ts WHERE ((a = ?) AND (b = ?) AND (c = ?) AND (d = ?)) LIMIT ?" end it "should normalize literal strings and numbers for backslash escaping" do db(:type=>:backslash)[:ts].first(:a=>1, :b=>2.3, :c=>'d', :d=>"e\\f\\'g'") db.sqls.last.must_equal "SELECT * FROM ts WHERE ((a = 1) AND (b = 2.3) AND (c = 'd') AND (d = 'e\\\\f\\\\\\'g\\'')) LIMIT 1" @sql.must_equal "SELECT * FROM ts WHERE ((a = ?) AND (b = ?) AND (c = ?) AND (d = ?)) LIMIT ?" end it "should normalize literal strings and numbers for N' escaping" do db(:type=>:n_standard)[:ts].first(:a=>1, :b=>2.3, :c=>'d', :d=>"e\\f\\'g'") db.sqls.last.must_equal "SELECT * FROM ts WHERE ((a = 1) AND (b = 2.3) AND (c = N'd') AND (d = N'e\\f\\''g''')) LIMIT 1" @sql.must_equal "SELECT * FROM ts WHERE ((a = ?) AND (b = ?) AND (c = ?) AND (d = ?)) LIMIT ?" end it "should normalize literal strings and numbers for N' escaping when using non N' string" do db(:type=>:n_standard)[:ts].first(:a=>1, :b=>2.3, :c=>Sequel.lit("'d'"), :d=>"e\\f\\'g'") db.sqls.last.must_equal "SELECT * FROM ts WHERE ((a = 1) AND (b = 2.3) AND (c = 'd') AND (d = N'e\\f\\''g''')) LIMIT 1" @sql.must_equal "SELECT * FROM ts WHERE ((a = ?) AND (b = ?) AND (c = ?) AND (d = ?)) LIMIT ?" end it "should raise an error if you attempt to load it into a database that doesn't literalize strings in an expected way" do proc{db(:type=>:bad)}.must_raise Sequel::Error end it "should not affect cases where no logger is used" do db(:logger=>false)[:ts].first(:a=>1, :b=>2.3, :c=>'d', :d=>"e\\f\\'g'") db.sqls.last.must_equal "SELECT * FROM ts WHERE ((a = 1) AND (b = 2.3) AND (c = 'd') AND (d = 'e\\f\\''g''')) LIMIT 1" end it "should handle case where identifier contains apostrophe (will not remove all strings in this case)" do db[:"'ts"].first(:a=>1, :b=>2.3, :c=>'d', :d=>"e\\f\\'g'") db.sqls.last.must_equal "SELECT * FROM 'ts WHERE ((a = 1) AND (b = 2.3) AND (c = 'd') AND (d = 'e\\f\\''g''')) LIMIT 1" @sql.must_equal "SELECT * FROM ?d?e\\f\\?g''')) LIMIT ?" end it "should not include bound variables when logging" do db.log_connection_yield("X", nil, :a=>1, :b=>2.3, :c=>'d', :d=>"e\\f\\'g'"){} @sql.must_equal "X" end end ������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/sqlite_json_ops_spec.rb�����������������������������������������������0000664�0000000�0000000�00000011633�14342141206�0023007�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" Sequel.extension :sqlite_json_ops describe "Sequel::SQLite::JSONOp" do before do @db = Sequel.connect('mock://sqlite') @db.extend_datasets{def quote_identifiers?; false end} @j = Sequel.sqlite_json_op(:j) @l = proc{|o| @db.literal(o)} end it "#[]/#get should use the ->> operator" do @l[@j[1]].must_equal "(j ->> 1)" @l[@j.get('a')].must_equal "(j ->> 'a')" end it "#[]/#get should return a JSONOp" do @l[@j[1][2]].must_equal "((j ->> 1) ->> 2)" @l[@j.get('a').get('b')].must_equal "((j ->> 'a') ->> 'b')" end it "#array_length should use the json_array_length function" do @l[@j.array_length].must_equal "json_array_length(j)" @l[@j.array_length("$[1]")].must_equal "json_array_length(j, '$[1]')" end it "#array_length should return a numeric expression" do @l[@j.array_length + 1].must_equal "(json_array_length(j) + 1)" end it "#each should use the json_each function" do @l[@j.each].must_equal "json_each(j)" @l[@j.each("$[1]")].must_equal "json_each(j, '$[1]')" end it "#extract should use the json_extract function" do @l[@j.extract].must_equal "json_extract(j)" @l[@j.extract("$[1]")].must_equal "json_extract(j, '$[1]')" end it "#get_json should use the -> operator" do @l[@j.get_json(1)].must_equal "(j -> 1)" end it "#get_json should return a JSONOp" do @l[@j.get_json('a').get_json('b')].must_equal "((j -> 'a') -> 'b')" end it "#insert should use the json_insert function" do @l[@j.insert('$.a', 1)].must_equal "json_insert(j, '$.a', 1)" @l[@j.insert('$.a', 1, '$.b', 2)].must_equal "json_insert(j, '$.a', 1, '$.b', 2)" end it "#insert should return JSONOp" do @l[@j.insert('$.a', 1).insert('$.b', 2)].must_equal "json_insert(json_insert(j, '$.a', 1), '$.b', 2)" end it "#json/#minify should use the json function" do @l[@j.json].must_equal "json(j)" @l[@j.minify].must_equal "json(j)" end it "#json/#minify should return JSONOp" do @l[@j.json.minify].must_equal "json(json(j))" @l[@j.minify.json].must_equal "json(json(j))" end it "#patch should use the json_patch function" do @l[@j.patch('{"a": 1}')].must_equal "json_patch(j, '{\"a\": 1}')" end it "#patch should return JSONOp" do @l[@j.patch('{"a": 1}').patch('{"b": 2}')].must_equal "json_patch(json_patch(j, '{\"a\": 1}'), '{\"b\": 2}')" end it "#remove should use the json_remove function" do @l[@j.remove('$.a')].must_equal "json_remove(j, '$.a')" @l[@j.remove('$.a', '$[1]')].must_equal "json_remove(j, '$.a', '$[1]')" end it "#remove should return JSONOp" do @l[@j.remove('$.a').remove('$[1]')].must_equal "json_remove(json_remove(j, '$.a'), '$[1]')" end it "#replace should use the json_replace function" do @l[@j.replace('$.a', 1)].must_equal "json_replace(j, '$.a', 1)" @l[@j.replace('$.a', 1, '$.b', 2)].must_equal "json_replace(j, '$.a', 1, '$.b', 2)" end it "#replace should return JSONOp" do @l[@j.replace('$.a', 1).replace('$.b', 2)].must_equal "json_replace(json_replace(j, '$.a', 1), '$.b', 2)" end it "#set should use the json_set function" do @l[@j.set('$.a', 1)].must_equal "json_set(j, '$.a', 1)" @l[@j.set('$.a', 1, '$.b', 2)].must_equal "json_set(j, '$.a', 1, '$.b', 2)" end it "#set should return JSONOp" do @l[@j.set('$.a', 1).set('$.b', 2)].must_equal "json_set(json_set(j, '$.a', 1), '$.b', 2)" end it "#tree should use the json_tree function" do @l[@j.tree].must_equal "json_tree(j)" @l[@j.tree("$[1]")].must_equal "json_tree(j, '$[1]')" end it "#type/#typeof should use the json_type function" do @l[@j.type].must_equal "json_type(j)" @l[@j.typeof].must_equal "json_type(j)" @l[@j.type("$[1]")].must_equal "json_type(j, '$[1]')" @l[@j.typeof("$[1]")].must_equal "json_type(j, '$[1]')" end it "#type/#typeof should return a string expression" do @l[@j.type + '1'].must_equal "(json_type(j) || '1')" @l[@j.typeof('$.a') + '1'].must_equal "(json_type(j, '$.a') || '1')" end it "#valid should use the json_valid function" do @l[@j.valid].must_equal "json_valid(j)" end it "#valid should return a boolean expression" do @l[@j.valid & 1].must_equal "(json_valid(j) AND 1)" end it "Sequel.sqlite_json_op should wrap object in a JSONOp" do @l[Sequel.sqlite_json_op(:j).valid].must_equal "json_valid(j)" @l[Sequel.sqlite_json_op(Sequel.join([:j, :k])).valid].must_equal "json_valid((j || k))" end it "Sequel.sqlite_json_op return a JSONOp as-is" do v = Sequel.sqlite_json_op(:j) Sequel.sqlite_json_op(v).must_be_same_as v end it "SQL::GenericExpression#sqlite_json_op should wrap receiver in JSON op" do @l[Sequel.function(:j, :k).sqlite_json_op.valid].must_equal "json_valid(j(k))" end it "SQL::LiteralString#sqlite_json_op should wrap receiver in JSON op" do @l[Sequel.lit('j || k').sqlite_json_op.valid].must_equal "json_valid(j || k)" end end �����������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/static_cache_cache_spec.rb��������������������������������������������0000664�0000000�0000000�00000001777�14342141206�0023341�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "static_cache_cache plugin" do before do @db = Sequel.mock @db.fetch = [{:id=>1, :name=>'A'}, {:id=>2, :name=>'B'}] @c = Class.new(Sequel::Model(@db[:t])) def @c.name; 'Foo' end @c.columns :id, :name @file = "spec/files/static_cache_cache-spec-#{$$}.cache" end after do File.delete(@file) if File.file?(@file) end it "should allow dumping and loading static cache rows from a cache file" do @c.plugin :static_cache_cache, @file @db.sqls @c.plugin :static_cache @db.sqls.must_equal ['SELECT * FROM t'] @c.all.must_equal [@c.load(:id=>1, :name=>'A'), @c.load(:id=>2, :name=>'B')] @c.dump_static_cache_cache @db.fetch = [] c = Class.new(Sequel::Model(@db[:t])) def c.name; 'Foo' end c.columns :id, :name @c.plugin :static_cache_cache, @file @db.sqls @c.plugin :static_cache @db.sqls.must_be_empty @c.all.must_equal [@c.load(:id=>1, :name=>'A'), @c.load(:id=>2, :name=>'B')] end end �sequel-5.63.0/spec/extensions/static_cache_spec.rb��������������������������������������������������0000664�0000000�0000000�00000034021�14342141206�0022202�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::StaticCache" do before do @db = Sequel.mock @db.fetch = [{:id=>1}, {:id=>2}] @db.numrows = 1 @c = Class.new(Sequel::Model(@db[:t])) @c.columns :id, :name end it "should not attempt to validate objects" do @c.send(:define_method, :validate){errors.add(:name, 'bad')} @c.plugin(:static_cache) @c.map{|o| o.valid?}.must_equal [true, true] end static_cache_specs = Module.new do extend Minitest::Spec::DSL it "should use a ruby hash as a cache of all model instances" do @c.cache.must_equal(1=>@c.load(:id=>1), 2=>@c.load(:id=>2)) @c.cache[1].must_be_same_as(@c1) @c.cache[2].must_be_same_as(@c2) end it "should make .[] method with primary key use the cache" do @c[1].must_equal @c1 @c[2].must_equal @c2 @c[3].must_be_nil @c[[1, 2]].must_be_nil @c[nil].must_be_nil @c[].must_be_nil @db.sqls.must_equal [] end it "should have .[] with a hash not use the cache" do @db.fetch = {:id=>2} @c[:id=>2].must_equal @c2 @db.sqls.must_equal ['SELECT * FROM t WHERE (id = 2) LIMIT 1'] end it "should support cache_get_pk" do @c.cache_get_pk(1).must_equal @c1 @c.cache_get_pk(2).must_equal @c2 @c.cache_get_pk(3).must_be_nil @db.sqls.must_equal [] end it "should have first just returns instances without sending a query" do @c.first.must_equal @c1 @c.first(2).must_equal [@c1, @c2] @c.first(0).must_equal [] @db.sqls.must_equal [] end it "should have first just returns instances with sending a query" do @db.fetch = lambda do |s| case s when /id = '?(\d+)'?/ id = $1.to_i id <= 2 ? { id: id } : nil when /id >= '?(\d+)'?/ id = $1.to_i id <= 2 ? (id..2).map { |i| { id: i } } : [] end end @c.first(id: 2).must_equal @c2 @c.first(id: '2').must_equal @c2 @c.first(id: 3).must_be_nil @c.first { id >= 2 }.must_equal @c2 @c.first(2) { id >= 1 }.must_equal [@c1, @c2] @c.first(Sequel.lit('id = ?', 2)).must_equal @c2 @db.sqls.must_equal [ "SELECT * FROM t WHERE (id = 2) LIMIT 1", "SELECT * FROM t WHERE (id = '2') LIMIT 1", "SELECT * FROM t WHERE (id = 3) LIMIT 1", "SELECT * FROM t WHERE (id >= 2) LIMIT 1", "SELECT * FROM t WHERE (id >= 1) LIMIT 2", "SELECT * FROM t WHERE (id = 2) LIMIT 1" ] end it "should have each just iterate over the hash's values without sending a query" do a = [] @c.each{|o| a << o} a = a.sort_by{|o| o.id} a.first.must_equal @c1 a.last.must_equal @c2 @db.sqls.must_equal [] end it "should have map just iterate over the hash's values without sending a query if no argument is given" do @c.map{|v| v.id}.sort.must_equal [1, 2] @db.sqls.must_equal [] end it "should have count with no argument or block not issue a query" do @c.count.must_equal 2 @db.sqls.must_equal [] end it "should have count with argument or block not issue a query" do @db.fetch = [[{:count=>1}], [{:count=>2}]] @c.count(:a).must_equal 1 @c.count{b}.must_equal 2 @db.sqls.must_equal ["SELECT count(a) AS count FROM t LIMIT 1", "SELECT count(b) AS count FROM t LIMIT 1"] end it "should have map not send a query if given an argument" do @c.map(:id).sort.must_equal [1, 2] @db.sqls.must_equal [] @c.map([:id,:id]).sort.must_equal [[1,1], [2,2]] @db.sqls.must_equal [] end it "should have map without a block or argument not raise an exception or issue a query" do @c.map.to_a.must_equal @c.all @db.sqls.must_equal [] end it "should have map without a block not return a frozen object" do @c.map.frozen?.must_equal false end it "should have map without a block return an Enumerator" do @c.map.class.must_equal Enumerator end it "should have map with a block and argument raise" do proc{@c.map(:id){}}.must_raise(Sequel::Error) end it "should have other enumerable methods work without sending a query" do a = @c.sort_by{|o| o.id} a.first.must_equal @c1 a.last.must_equal @c2 @db.sqls.must_equal [] end it "should have all return all objects" do a = @c.all.sort_by{|o| o.id} a.first.must_equal @c1 a.last.must_equal @c2 @db.sqls.must_equal [] end it "should have all not return a frozen object" do @c.all.frozen?.must_equal false end it "should have all return things in dataset order" do @c.all.must_equal [@c1, @c2] end it "should have all receiving block" do a = [] b = @c.all { |o| a << o } a.must_equal [@c1, @c2] a.must_equal b @db.sqls.must_equal [] end it "should have as_hash/to_hash without arguments run without a query" do a = @c.to_hash a.must_equal(1=>@c1, 2=>@c2) a[1].must_equal @c1 a[2].must_equal @c2 a = @c.as_hash a.must_equal(1=>@c1, 2=>@c2) a[1].must_equal @c1 a[2].must_equal @c2 @db.sqls.must_equal [] end it "should have as_hash handle :hash option" do h = {} a = @c.as_hash(nil, nil, :hash=>h) a.must_be_same_as h a.must_equal(1=>@c1, 2=>@c2) a[1].must_equal @c1 a[2].must_equal @c2 h = {} a = @c.as_hash(:id, nil, :hash=>h) a.must_be_same_as h a.must_equal(1=>@c1, 2=>@c2) a[1].must_equal @c1 a[2].must_equal @c2 @db.sqls.must_equal [] end it "should have as_hash with arguments return results without a query" do a = @c.as_hash(:id) a.must_equal(1=>@c1, 2=>@c2) a[1].must_equal @c1 a[2].must_equal @c2 a = @c.as_hash([:id]) a.must_equal([1]=>@c1, [2]=>@c2) a[[1]].must_equal @c1 a[[2]].must_equal @c2 @c.as_hash(:id, :id).must_equal(1=>1, 2=>2) @c.as_hash([:id], :id).must_equal([1]=>1, [2]=>2) @c.as_hash(:id, [:id]).must_equal(1=>[1], 2=>[2]) @c.as_hash([:id], [:id]).must_equal([1]=>[1], [2]=>[2]) @db.sqls.must_equal [] end it "should have as_hash not return a frozen object" do @c.as_hash.frozen?.must_equal false end it "should have to_hash_groups without arguments return the cached objects without a query" do a = @c.to_hash_groups(:id) a.must_equal(1=>[@c1], 2=>[@c2]) a[1].first.must_equal @c1 a[2].first.must_equal @c2 a = @c.to_hash_groups([:id]) a.must_equal([1]=>[@c1], [2]=>[@c2]) a[[1]].first.must_equal @c1 a[[2]].first.must_equal @c2 @c.to_hash_groups(:id, :id).must_equal(1=>[1], 2=>[2]) @c.to_hash_groups([:id], :id).must_equal([1]=>[1], [2]=>[2]) @c.to_hash_groups(:id, [:id]).must_equal(1=>[[1]], 2=>[[2]]) @c.to_hash_groups([:id], [:id]).must_equal([1]=>[[1]], [2]=>[[2]]) @db.sqls.must_equal [] end it "should have to_hash_groups handle :hash option" do h = {} a = @c.to_hash_groups(:id, nil, :hash=>h) a.must_be_same_as h a.must_equal(1=>[@c1], 2=>[@c2]) a[1].first.must_equal @c1 a[2].first.must_equal @c2 end it "should have as_hash_groups without arguments return the cached objects without a query" do a = @c.to_hash_groups(:id) a.must_equal(1=>[@c1], 2=>[@c2]) a[1].first.must_equal @c1 a[2].first.must_equal @c2 a = @c.to_hash_groups([:id]) a.must_equal([1]=>[@c1], [2]=>[@c2]) a[[1]].first.must_equal @c1 a[[2]].first.must_equal @c2 @c.to_hash_groups(:id, :id).must_equal(1=>[1], 2=>[2]) @c.to_hash_groups([:id], :id).must_equal([1]=>[1], [2]=>[2]) @c.to_hash_groups(:id, [:id]).must_equal(1=>[[1]], 2=>[[2]]) @c.to_hash_groups([:id], [:id]).must_equal([1]=>[[1]], [2]=>[[2]]) @db.sqls.must_equal [] end it "subclasses should work correctly" do c = Class.new(@c) c.all.must_equal [c.load(:id=>1), c.load(:id=>2)] c.as_hash.must_equal(1=>c.load(:id=>1), 2=>c.load(:id=>2)) @db.sqls.must_equal ['SELECT * FROM t'] end it "set_dataset should work correctly" do ds = @c.dataset.from(:t2).columns(:id).with_fetch(:id=>3) @c.dataset = ds @c.all.must_equal [@c.load(:id=>3)] @c.as_hash.must_equal(3=>@c.load(:id=>3)) @c.as_hash[3].must_equal @c.all.first @db.sqls.must_equal ['SELECT * FROM t2'] end it "should have load_cache" do a = @c.all.sort_by{|o| o.id} a.first.must_equal @c1 a.last.must_equal @c2 @db.sqls.must_equal [] @c.load_cache a = @c.all.sort_by{|o| o.id} a.first.must_equal @c1 a.last.must_equal @c2 @db.sqls.must_equal ['SELECT * FROM t'] end end describe "without options" do before do @c.plugin :static_cache @c1 = @c.cache[1] @c2 = @c.cache[2] @db.sqls end include static_cache_specs it "should work correctly with composite keys" do @db.fetch = [{:id=>1, :id2=>1}, {:id=>2, :id2=>1}] @c = Class.new(Sequel::Model(@db[:t])) @c.columns :id, :id2 @c.set_primary_key([:id, :id2]) @c.plugin :static_cache @db.sqls @c1 = @c.cache[[1, 2]] @c2 = @c.cache[[2, 1]] @c[[1, 2]].must_be_same_as(@c1) @c[[2, 1]].must_be_same_as(@c2) @db.sqls.must_equal [] end it "all of the static cache values (model instances) should be frozen" do @c.all.all?{|o| o.frozen?}.must_equal true end it "should make .[] method with primary key return cached instances" do @c[1].must_be_same_as(@c1) @c[2].must_be_same_as(@c2) end it "should have cache_get_pk return cached instances" do @c.cache_get_pk(1).must_be_same_as(@c1) @c.cache_get_pk(2).must_be_same_as(@c2) end it "should have each yield cached objects" do a = [] @c.each{|o| a << o} a = a.sort_by{|o| o.id} a.first.must_be_same_as(@c1) a.last.must_be_same_as(@c2) end it "should have other enumerable methods work yield cached objects" do a = @c.sort_by{|o| o.id} a.first.must_be_same_as(@c1) a.last.must_be_same_as(@c2) end it "should have all return cached instances" do a = @c.all.sort_by{|o| o.id} a.first.must_be_same_as(@c1) a.last.must_be_same_as(@c2) end it "should have as_hash without arguments use cached instances" do a = @c.as_hash a[1].must_be_same_as(@c1) a[2].must_be_same_as(@c2) end it "should have as_hash with arguments return cached instances" do a = @c.as_hash(:id) a[1].must_be_same_as(@c1) a[2].must_be_same_as(@c2) a = @c.as_hash([:id]) a[[1]].must_be_same_as(@c1) a[[2]].must_be_same_as(@c2) end it "should have to_hash_groups without single argument return the cached instances" do a = @c.to_hash_groups(:id) a[1].first.must_be_same_as(@c1) a[2].first.must_be_same_as(@c2) a = @c.to_hash_groups([:id]) a[[1]].first.must_be_same_as(@c1) a[[2]].first.must_be_same_as(@c2) end it "should not allow the saving of new objects" do proc{@c.create}.must_raise(Sequel::HookFailed) end it "should not allow the saving of existing objects" do @db.fetch = {:id=>1} proc{@c.first(:id=>1).save}.must_raise(Sequel::HookFailed) end it "should not allow the destroying of existing objects" do @db.fetch = {:id=>1} proc{@c.first(:id=>1).destroy}.must_raise(Sequel::HookFailed) end end describe "with :frozen=>false option" do before do @c.plugin :static_cache, :frozen=>false @c1 = @c.cache[1] @c2 = @c.cache[2] @db.sqls end include static_cache_specs it "record retrieved by primary key should not be frozen" do @c[1].frozen?.must_equal false @c.cache_get_pk(1).frozen?.must_equal false end it "none of values returned in #all should be frozen" do @c.all.all?{|o| !o.frozen?}.must_equal true end it "none of values yielded by each should be frozen" do a = [] @c.each{|o| a << o} a.all?{|o| !o.frozen?}.must_equal true end it "none of values yielded by Enumerable method should be frozen" do @c.sort_by{|o| o.id}.all?{|o| !o.frozen?}.must_equal true end it "none of values returned by map without an argument or block should be frozen" do @c.map{|o| o}.all?{|o| !o.frozen?}.must_equal true @c.map.all?{|o| !o.frozen?}.must_equal true end it "none of values in the hash returned by as_hash without an argument should be frozen" do @c.as_hash.values.all?{|o| !o.frozen?}.must_equal true end it "none of values in the hash returned by as_hash with a single argument should be frozen" do @c.as_hash(:id).values.all?{|o| !o.frozen?}.must_equal true end it "none of values in the hash returned by as_hash with a single array argument should be frozen" do @c.as_hash([:id, :id]).values.all?{|o| !o.frozen?}.must_equal true end it "none of values in the hash returned by to_hash_groups with a single argument should be frozen" do @c.to_hash_groups(:id).values.flatten.all?{|o| !o.frozen?}.must_equal true end it "none of values in the hash returned by to_hash_groups with a single array argument should be frozen" do @c.to_hash_groups([:id, :id]).values.flatten.all?{|o| !o.frozen?}.must_equal true end it "should not automatically update the cache when creating new model objects" do o = @c.new o.id = 3 @db.autoid = 3 @db.fetch = [[{:id=>1}, {:id=>2}, {:id=>3}], [{:id=>3}]] o.save @c[3].must_be_nil end it "should not automatically update the cache when updating model objects" do o = @c[2] @db.fetch = [[{:id=>1}, {:id=>2, :name=>'a'}]] o.update(:name=>'a') @c[2].values.must_equal(:id=>2) end it "should not automatically update the cache when updating model objects" do o = @c[2] @db.fetch = [[{:id=>1}]] o.destroy @c[2].must_equal @c2 end end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/string_agg_spec.rb����������������������������������������������������0000664�0000000�0000000�00000007514�14342141206�0021723�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "string_agg extension" do dbf = lambda do |db_type| db = Sequel.connect("mock://#{db_type}") db.extension :string_agg db end before(:all) do Sequel.extension :string_agg end before do @sa1 = Sequel.string_agg(:c) @sa2 = Sequel.string_agg(:c, '-') @sa3 = Sequel.string_agg(:c, '-').order(:o) @sa4 = Sequel.string_agg(:c).order(:o).distinct @sa5 = Sequel.string_agg(:c).distinct.order(:o) end it "should use existing method" do db = Sequel.mock db.extend_datasets do def string_agg_sql_append(sql, sa) sql << "sa(#{sa.expr})" end end db.extension :string_agg db.literal(Sequel.string_agg(:c)).must_equal "sa(c)" end it "should correctly literalize on Postgres" do ds = dbf.call(:postgres).dataset.with_quote_identifiers(false) ds.literal(@sa1).must_equal "string_agg(c, ',')" ds.literal(@sa2).must_equal "string_agg(c, '-')" ds.literal(@sa3).must_equal "string_agg(c, '-' ORDER BY o)" ds.literal(@sa4).must_equal "string_agg(DISTINCT c, ',' ORDER BY o)" ds.literal(@sa5).must_equal "string_agg(DISTINCT c, ',' ORDER BY o)" end it "should correctly literalize on SQLAnywhere" do ds = dbf.call(:sqlanywhere).dataset.with_quote_identifiers(false).with_extend{def input_identifier(v) v.to_s end} ds.literal(@sa1).must_equal "list(c, ',')" ds.literal(@sa2).must_equal "list(c, '-')" ds.literal(@sa3).must_equal "list(c, '-' ORDER BY o)" ds.literal(@sa4).must_equal "list(DISTINCT c, ',' ORDER BY o)" ds.literal(@sa5).must_equal "list(DISTINCT c, ',' ORDER BY o)" end it "should correctly literalize on MySQL, H2, HSQLDB" do [:mysql, :h2, :hsqldb].each do |type| db = dbf.call(type) db.define_singleton_method(:database_type){type} ds = db.dataset.with_quote_identifiers(false).with_extend{def input_identifier(v) v.to_s end} ds.literal(@sa1).upcase.must_equal "GROUP_CONCAT(C SEPARATOR ',')" ds.literal(@sa2).upcase.must_equal "GROUP_CONCAT(C SEPARATOR '-')" ds.literal(@sa3).upcase.must_equal "GROUP_CONCAT(C ORDER BY O SEPARATOR '-')" ds.literal(@sa4).upcase.must_equal "GROUP_CONCAT(DISTINCT C ORDER BY O SEPARATOR ',')" ds.literal(@sa5).upcase.must_equal "GROUP_CONCAT(DISTINCT C ORDER BY O SEPARATOR ',')" end end it "should correctly literalize on Oracle and DB2" do [:oracle, :db2].each do |type| ds = dbf.call(type).dataset.with_quote_identifiers(false).with_extend{def input_identifier(v) v.to_s end} ds.literal(@sa1).must_equal "listagg(c, ',') WITHIN GROUP (ORDER BY 1)" ds.literal(@sa2).must_equal "listagg(c, '-') WITHIN GROUP (ORDER BY 1)" ds.literal(@sa3).must_equal "listagg(c, '-') WITHIN GROUP (ORDER BY o)" proc{ds.literal(@sa4)}.must_raise Sequel::Error proc{ds.literal(@sa5)}.must_raise Sequel::Error end end it "should raise Sequel::Error on unsupported database" do proc{dbf.call(:foo).literal(@sa1)}.must_raise Sequel::Error end it "should handle order without arguments" do db = dbf.call(:postgres) db.dataset.with_quote_identifiers(false).literal(@sa1.order).must_equal "string_agg(c, ',')" end it "should handle operations on object" do ds = dbf.call(:postgres).dataset.with_quote_identifiers(false) ds.literal(@sa1 + 'b').must_equal "(string_agg(c, ',') || 'b')" ds.literal(@sa1.like('b')).must_equal "(string_agg(c, ',') LIKE 'b')" ds.literal(@sa1 < 'b').must_equal "(string_agg(c, ',') < 'b')" ds.literal(@sa1.as(:b)).must_equal "string_agg(c, ',') AS b" ds.literal(@sa1.cast(:b)).must_equal "CAST(string_agg(c, ',') AS b)" ds.literal(@sa1.desc).must_equal "string_agg(c, ',') DESC" ds.literal(@sa1 =~ /a/).must_equal "(string_agg(c, ',') ~ 'a')" ds.literal(@sa1.sql_subscript(1)).must_equal "(string_agg(c, ','))[1]" end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/string_date_time_spec.rb����������������������������������������������0000664�0000000�0000000�00000006066�14342141206�0023121�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" Sequel.extension :string_date_time describe "String#to_time" do it "should convert the string into a Time object" do "2007-07-11".to_time.must_equal Time.parse("2007-07-11") "06:30".to_time.must_equal Time.parse("06:30") end it "should raise InvalidValue for an invalid time" do proc {'0000-00-00'.to_time}.must_raise(Sequel::InvalidValue) end end describe "String#to_date" do after do Sequel.convert_two_digit_years = true end it "should convert the string into a Date object" do "2007-07-11".to_date.must_equal Date.parse("2007-07-11") end it "should convert 2 digit years by default" do "July 11, 07".to_date.must_equal Date.parse("2007-07-11") end it "should not convert 2 digit years if set not to" do Sequel.convert_two_digit_years = false "July 11, 07".to_date.must_equal Date.parse("0007-07-11") end it "should raise InvalidValue for an invalid date" do proc {'0000-00-00'.to_date}.must_raise(Sequel::InvalidValue) end end describe "String#to_datetime" do after do Sequel.convert_two_digit_years = true end it "should convert the string into a DateTime object" do "2007-07-11 10:11:12a".to_datetime.must_equal DateTime.parse("2007-07-11 10:11:12a") end it "should convert 2 digit years by default" do "July 11, 07 10:11:12a".to_datetime.must_equal DateTime.parse("2007-07-11 10:11:12a") end it "should not convert 2 digit years if set not to" do Sequel.convert_two_digit_years = false "July 11, 07 10:11:12a".to_datetime.must_equal DateTime.parse("0007-07-11 10:11:12a") end it "should raise InvalidValue for an invalid date" do proc {'0000-00-00'.to_datetime}.must_raise(Sequel::InvalidValue) end end describe "String#to_sequel_time" do after do Sequel.datetime_class = Time Sequel.convert_two_digit_years = true end it "should convert the string into a Time object by default" do "2007-07-11 10:11:12a".to_sequel_time.class.must_equal Time "2007-07-11 10:11:12a".to_sequel_time.must_equal Time.parse("2007-07-11 10:11:12a") end it "should convert the string into a DateTime object if that is set" do Sequel.datetime_class = DateTime "2007-07-11 10:11:12a".to_sequel_time.class.must_equal DateTime "2007-07-11 10:11:12a".to_sequel_time.must_equal DateTime.parse("2007-07-11 10:11:12a") end it "should convert 2 digit years by default if using DateTime class" do Sequel.datetime_class = DateTime "July 11, 07 10:11:12a".to_sequel_time.must_equal DateTime.parse("2007-07-11 10:11:12a") end it "should not convert 2 digit years if set not to when using DateTime class" do Sequel.datetime_class = DateTime Sequel.convert_two_digit_years = false "July 11, 07 10:11:12a".to_sequel_time.must_equal DateTime.parse("0007-07-11 10:11:12a") end it "should raise InvalidValue for an invalid time" do proc {'0000-00-00'.to_sequel_time}.must_raise(Sequel::InvalidValue) Sequel.datetime_class = DateTime proc {'0000-00-00'.to_sequel_time}.must_raise(Sequel::InvalidValue) end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/string_stripper_spec.rb�����������������������������������������������0000664�0000000�0000000�00000004012�14342141206�0023023�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::StringStripper" do before do @db = Sequel.mock @c = Class.new(Sequel::Model(@db[:test])) @c.columns :name, :b @c.db_schema[:b][:type] = :blob @c.plugin :string_stripper @o = @c.new end it "should strip all input strings" do @o.name = ' name ' @o.name.must_equal 'name' end it "should not affect other types" do @o.name = 1 @o.name.must_equal 1 @o.name = Date.today @o.name.must_equal Date.today end it "should not strip strings for blob arguments" do v = Sequel.blob(' name ') @o.name = v @o.name.must_be_same_as(v) end it "should not strip strings for blob columns" do @o.b = ' name ' @o.b.must_be_kind_of(Sequel::SQL::Blob) @o.b.must_equal Sequel.blob(' name ') end it "should allow skipping of columns using Model.skip_string_stripping" do @c.skip_string_stripping?(:name).must_equal false @c.skip_string_stripping :name @c.skip_string_stripping?(:name).must_equal true v = ' name ' @o.name = v @o.name.must_be_same_as(v) end it "should work correctly in subclasses" do o = Class.new(@c).new o.name = ' name ' o.name.must_equal 'name' o.b = ' name ' o.b.must_be_kind_of(Sequel::SQL::Blob) o.b.must_equal Sequel.blob(' name ') end it "should work correctly for dataset changes" do c = Class.new(Sequel::Model(@db[:test])) c.plugin :string_stripper def @db.supports_schema_parsing?() true end def @db.schema(*) [[:name, {}], [:b, {:type=>:blob}]] end c.set_dataset(@db[:test2]) o = c.new o.name = ' name ' o.name.must_equal 'name' o.b = ' name ' o.b.must_be_kind_of(Sequel::SQL::Blob) o.b.must_equal Sequel.blob(' name ') end it "should handle classes without datasets" do @db = Sequel.mock @c = Class.new(Sequel::Model) @c.plugin :string_stripper @c.dataset = @db[:test] @c.columns :name, :b @o = @c.new @o.name = ' name ' @o.name.must_equal 'name' end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/subclasses_spec.rb����������������������������������������������������0000664�0000000�0000000�00000005731�14342141206�0021745�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Model, "Subclasses plugin" do before do @c = Class.new(Sequel::Model) @c.plugin :subclasses end it "#subclasses should record direct subclasses of the given model" do @c.subclasses.must_equal [] sc1 = Class.new(@c) def sc1.name; 'A'; end @c.subclasses.must_equal [sc1] sc1.subclasses.must_equal [] sc2 = Class.new(@c) def sc2.name; 'B'; end @c.subclasses.sort_by(&:name).must_equal [sc1, sc2] sc1.subclasses.must_equal [] sc2.subclasses.must_equal [] ssc1 = Class.new(sc1) @c.subclasses.sort_by(&:name).must_equal [sc1, sc2] sc1.subclasses.must_equal [ssc1] sc2.subclasses.must_equal [] end it "#descendants should record all descendent subclasses of the given model" do @c.descendants.must_equal [] sc1 = Class.new(@c) def sc1.name; 'A'; end @c.descendants.must_equal [sc1] sc1.descendants.must_equal [] sc2 = Class.new(@c) def sc2.name; 'B'; end @c.descendants.sort_by(&:name).must_equal [sc1, sc2] sc1.descendants.must_equal [] sc2.descendants.must_equal [] ssc1 = Class.new(sc1) def ssc1.name; 'C'; end @c.descendants.sort_by(&:name).must_equal [sc1, sc2, ssc1] sc1.descendants.must_equal [ssc1] sc2.descendants.must_equal [] ssc1.descendants.must_equal [] sssc1 = Class.new(ssc1) def sssc1.name; 'D'; end @c.descendants.sort_by(&:name).must_equal [sc1, sc2, ssc1, sssc1] sc1.descendants.must_equal [ssc1, sssc1] sc2.descendants.must_equal [] ssc1.descendants.must_equal [sssc1] sssc1.descendants.must_equal [] end it "#descendents should be an alias of descendants" do @c.descendents.must_equal [] sc1 = Class.new(@c) @c.descendents.must_equal [sc1] sc1.descendents.must_equal [] end it "#freeze_descendants should finalize the associations for all descendants" do sc1 = Class.new(@c) sc1.set_dataset :bars sc1.set_primary_key :foo sc2 = Class.new(@c) sc2.set_dataset :bazs sc2.many_to_one :bar, :class=>sc1 @c.freeze_descendants sc1.frozen?.must_equal true sc2.frozen?.must_equal true sc2.association_reflection(:bar)[:primary_key].must_equal :foo end it "#freeze_descendents should be an alias of freeze_descendants" do sc1 = Class.new(@c) sc1.set_dataset :bars sc1.set_primary_key :foo sc2 = Class.new(@c) sc2.set_dataset :bazs sc2.many_to_one :bar, :class=>sc1 @c.freeze_descendents sc1.frozen?.must_equal true sc2.frozen?.must_equal true sc2.association_reflection(:bar)[:primary_key].must_equal :foo end it "plugin block should be called with each subclass created" do c = Class.new(Sequel::Model) a = [] c.plugin(:subclasses){|sc| a << sc} sc1 = Class.new(c) a.must_equal [sc1] sc2 = Class.new(c) a.must_equal [sc1, sc2] sc3 = Class.new(sc1) a.must_equal [sc1, sc2, sc3] sc4 = Class.new(sc3) a.must_equal [sc1, sc2, sc3, sc4] end end ���������������������������������������sequel-5.63.0/spec/extensions/subset_conditions_spec.rb���������������������������������������������0000664�0000000�0000000�00000003561�14342141206�0023333�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "subset_conditions plugin" do before do @c = Class.new(Sequel::Model(:a)) @c.plugin :subset_conditions end it "should provide *_conditions method return the arguments passed" do @c.dataset_module{subset(:published, :published => true)} @c.where(@c.published_conditions).sql.must_equal @c.published.sql @c.dataset_module{where(:active, :active)} @c.where(@c.active_conditions).sql.must_equal @c.active.sql @c.dataset_module{subset(:active_published, Sequel.&(:active, :published => true))} @c.where(@c.active_published_conditions).sql.must_equal @c.active_published.sql @c.where(Sequel.&(@c.active_conditions, @c.published_conditions)).sql.must_equal @c.active_published.sql @c.where(Sequel.|(@c.active_conditions, @c.published_conditions)).sql.must_equal "SELECT * FROM a WHERE (active OR (published IS TRUE))" @c.where(Sequel.|(@c.active_published_conditions, :foo)).sql.must_equal "SELECT * FROM a WHERE ((active AND (published IS TRUE)) OR foo)" end it "should work with blocks" do p1 = proc{{:published=>true}} @c.dataset_module{subset(:published, &p1)} @c.where(@c.published_conditions).sql.must_equal @c.published.sql p2 = :active @c.dataset_module{subset(:active, p2)} @c.where(@c.active_conditions).sql.must_equal @c.active.sql @c.dataset_module{subset(:active_published, p2, &p1)} @c.where(@c.active_published_conditions).sql.must_equal @c.active_published.sql @c.where(Sequel.&(@c.active_conditions, @c.published_conditions)).sql.must_equal @c.active_published.sql @c.where(Sequel.|(@c.active_conditions, @c.published_conditions)).sql.must_equal "SELECT * FROM a WHERE (active OR (published IS TRUE))" @c.where(Sequel.|(@c.active_published_conditions, :foo)).sql.must_equal "SELECT * FROM a WHERE ((active AND (published IS TRUE)) OR foo)" end end �����������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/symbol_aref_refinement_spec.rb����������������������������������������0000664�0000000�0000000�00000001424�14342141206�0024307�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" if (RUBY_VERSION >= '2.0.0' && RUBY_ENGINE == 'ruby') || (RUBY_VERSION >= '2.3.0' && RUBY_ENGINE == 'jruby') Sequel.extension :symbol_aref_refinement using Sequel::SymbolAref describe "symbol_aref_refinement extension" do before do @db = Sequel.mock end it "Symbol#[] should create qualified identifier if given a symbol" do @db.literal(:x[:y]).must_equal "x.y" end it "Symbol#[] should create qualified identifier if given an identifier" do @db.literal(:x[Sequel[:y]]).must_equal "x.y" end it "Symbol#[] should create qualified identifier if given a qualified identifier" do @db.literal(:x[:y[:z]]).must_equal "x.y.z" end it "should not affect other arguments to Symbol#[]" do :x[0].must_equal "x" end end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/symbol_as_refinement_spec.rb������������������������������������������0000664�0000000�0000000�00000001041�14342141206�0023770�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" if (RUBY_VERSION >= '2.0.0' && RUBY_ENGINE == 'ruby') || (RUBY_VERSION >= '2.3.0' && RUBY_ENGINE == 'jruby') Sequel.extension :symbol_as_refinement using Sequel::SymbolAs describe "symbol_as_refinement extension" do before do @db = Sequel.mock end it "Symbol#as should create aliased expression" do @db.literal(:x.as(:y)).must_equal "x AS y" end it "Symbol#as should create aliased expression with columns" do @db.literal(:x.as(:y, [:c1, :c2])).must_equal "x AS y(c1, c2)" end end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/synchronize_sql_spec.rb�����������������������������������������������0000664�0000000�0000000�00000011234�14342141206�0023023�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative 'spec_helper' describe "synchronize_sql extension" do module Sync private def literal_string_append(sql, v) db.synchronize{super} end end before do @db = Sequel.mock @db.pool.extend(Module.new do def assign_connection(*args) r = super @times_connection_acquired ||= 0 @times_connection_acquired += 1 if r return r end def times_connection_acquired v = @times_connection_acquired @times_connection_acquired = 0 v || 0 end end) @db.extend_datasets(Sync) @ds = @db[:tab1] end it 'does not checkout a connection if SQL is given as a string' do @ds.extension(:synchronize_sql).with_sql('SELECT 1').sql @db.pool.times_connection_acquired.must_equal 0 end it 'checks out an extra connection on insert_sql if there are no strings' do @ds.insert_sql(:numeric_foo => 8) @db.pool.times_connection_acquired.must_equal 0 extds = @ds.extension(:synchronize_sql) extds.insert_sql(:numeric_foo => 8) @db.pool.times_connection_acquired.must_equal 1 end it 'checks out just one connection on insert_sql if there are multiple strings' do @ds.insert_sql(:string_foo1 => 'eight', :string_foo2 => 'nine', :string_foo3 => 'ten') @db.pool.times_connection_acquired.must_equal 3 extds = @ds.extension(:synchronize_sql) extds.insert_sql(:string_foo1 => 'eight', :string_foo2 => 'nine', :string_foo3 => 'ten') @db.pool.times_connection_acquired.must_equal 1 end it 'cheks out an extra connectrion on update_sql if there are no strings' do @ds.where(:numeric_foo => [1, 2, 3, 4, 5]).update_sql(:numeric_foo => 99) @db.pool.times_connection_acquired.must_equal 0 extds = @ds.extension(:synchronize_sql) extds.where(:numeric_foo => [1, 2, 3, 4, 5]).update_sql(:numeric_foo => 99) @db.pool.times_connection_acquired.must_equal 1 end it 'checks out just one connection on update_sql if there are multiple strings' do @ds.where(:numeric_foo => [1, 2, 3, 4, 5]).update_sql(:string_foo1 => 'eight', :string_foo2 => 'nine', :string_foo3 => 'ten') @db.pool.times_connection_acquired.must_equal 3 extds = @ds.extension(:synchronize_sql) extds.where(:numeric_foo => [1, 2, 3, 4, 5]).update_sql(:string_foo1 => 'eight', :string_foo2 => 'nine', :string_foo3 => 'ten') @db.pool.times_connection_acquired.must_equal 1 end it 'checks out an extra connection on delete_sql if there are no strings' do @ds.where(:numeric_foo => [1, 2, 3]).delete_sql @db.pool.times_connection_acquired.must_equal 0 extds = @ds.extension(:synchronize_sql) extds.where(:numeric_foo => [1, 2, 3]).delete_sql @db.pool.times_connection_acquired.must_equal 1 end it 'checks out just one connection on delete_sql if there are multiple strings' do @ds.where(:string_foo => ['one', 'two', 'three', 'four']).delete_sql @db.pool.times_connection_acquired.must_equal 4 extds = @ds.extension(:synchronize_sql) extds.where(:string_foo => ['one', 'two', 'three', 'four']).delete_sql @db.pool.times_connection_acquired.must_equal 1 end it 'checks out an extra connection on select_sql if there are no strings' do @ds.where(:numeric_foo => [1, 2, 3]).select_sql @db.pool.times_connection_acquired.must_equal 0 extds = @ds.extension(:synchronize_sql) extds.where(:numeric_foo => [1, 2, 3]).select_sql @db.pool.times_connection_acquired.must_equal 1 end it 'checks out just one connection on select_sql if there are multiple strings' do @ds.where(:string_foo => ['one', 'two', 'three', 'four']).select_sql @db.pool.times_connection_acquired.must_equal 4 extds = @ds.extension(:synchronize_sql) extds.where(:string_foo => ['one', 'two', 'three', 'four']).select_sql @db.pool.times_connection_acquired.must_equal 1 end it 'checks out an extra connection on fetch if there are no strings' do @db.fetch('SELECT * FROM tab1 WHERE numeric_foo IN (?, ?, ?, ?)', 1, 2, 3, 4).select_sql @db.pool.times_connection_acquired.must_equal 0 @db.extension(:synchronize_sql) @db.fetch('SELECT * FROM tab1 WHERE numeric_foo IN (?, ?, ?, ?)', 1, 2, 3, 4).select_sql @db.pool.times_connection_acquired.must_equal 1 end it 'checks out just one connection on fetch if there are multiple strings' do @db.fetch('SELECT * FROM tab1 WHERE string_foo IN (?, ?, ?, ?)', 'one', 'two', 'three', 'four').select_sql @db.pool.times_connection_acquired.must_equal 4 @db.extension(:synchronize_sql) @db.fetch('SELECT * FROM tab1 WHERE string_foo IN (?, ?, ?, ?)', 'one', 'two', 'three', 'four').select_sql @db.pool.times_connection_acquired.must_equal 1 end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/table_select_spec.rb��������������������������������������������������0000664�0000000�0000000�00000005710�14342141206�0022221�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::TableSelect" do before do @Album = Class.new(Sequel::Model(Sequel.mock[:albums])) end it "should add a table.* selection to existing dataset without explicit selection" do @Album.plugin :table_select @Album.dataset.sql.must_equal 'SELECT albums.* FROM albums' @Album.dataset = :albs @Album.dataset.sql.must_equal 'SELECT albs.* FROM albs' @Album.dataset = Sequel.identifier(:albs) @Album.dataset.sql.must_equal 'SELECT albs.* FROM albs' end it "should handle qualified tables" do @Album.dataset = Sequel.qualify(:s2, :albums) @Album.plugin :table_select @Album.dataset.sql.must_equal 'SELECT s2.albums.* FROM s2.albums' end it "should handle aliases" do @Album.dataset = Sequel.as(:albums, :b) @Album.plugin :table_select @Album.dataset.sql.must_equal 'SELECT b.* FROM albums AS b' @Album.dataset = @Album.db[:albums].from_self @Album.dataset.sql.must_equal 'SELECT t1.* FROM (SELECT * FROM albums) AS t1' @Album.dataset = Sequel.as(@Album.db[:albums], :b) @Album.dataset.sql.must_equal 'SELECT b.* FROM (SELECT * FROM albums) AS b' end with_symbol_splitting "should handle splittable symbols" do @Album.dataset = :albums___a @Album.plugin :table_select @Album.dataset.sql.must_equal 'SELECT a.* FROM albums AS a' @Album.dataset = :s__albums___a @Album.dataset.sql.must_equal 'SELECT a.* FROM s.albums AS a' @Album.dataset = :s__albums @Album.dataset.sql.must_equal 'SELECT s.albums.* FROM s.albums' end it "should not add a table.* selection on existing dataset with explicit selection" do @Album.dataset = @Album.dataset.select(:name) @Album.plugin :table_select @Album.dataset.sql.must_equal 'SELECT name FROM albums' @Album.dataset = @Album.dataset.select(:name, :artist) @Album.dataset.sql.must_equal 'SELECT name, artist FROM albums' end it "should add a table.* selection on existing dataset with subquery" do @Album.dataset = @Album.db.from(:a1, :a2).from_self(:alias=>:foo) @Album.plugin :table_select @Album.dataset.sql.must_equal 'SELECT foo.* FROM (SELECT * FROM a1, a2) AS foo' @Album.dataset = @Album.db.from(:a1).cross_join(:a2).from_self(:alias=>:foo) @Album.dataset.sql.must_equal 'SELECT foo.* FROM (SELECT * FROM a1 CROSS JOIN a2) AS foo' end it "should work with implicit subqueries used for joined datasets" do @Album.dataset = @Album.db.from(:a1, :a2) @Album.plugin :table_select @Album.dataset.sql.must_equal 'SELECT a1.* FROM (SELECT * FROM a1, a2) AS a1' @Album.dataset = @Album.db.from(:a1).cross_join(:a2) @Album.dataset.sql.must_equal 'SELECT a1.* FROM (SELECT * FROM a1 CROSS JOIN a2) AS a1' end it "works correctly when loaded on model without a dataset" do c = Class.new(Sequel::Model) c.plugin :table_select sc = Class.new(c) sc.dataset = :a sc.dataset.sql.must_equal "SELECT a.* FROM a" end end ��������������������������������������������������������sequel-5.63.0/spec/extensions/tactical_eager_loading_spec.rb����������������������������������������0000664�0000000�0000000�00000062062�14342141206�0024222�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "tactical_eager_loading plugin" do def sql_match(*args) sqls = DB.sqls sqls.length.must_equal args.length sqls.zip(args).each do |is, should| if should.is_a?(Regexp) is.must_match should else is.must_equal should end end end attr_reader :ts before do class ::TacticalEagerLoadingModel < Sequel::Model(:t) plugin :tactical_eager_loading columns :id, :parent_id many_to_one :parent, :class=>self one_to_many :children, :class=>self, :key=>:parent_id set_dataset dataset.with_fetch(proc do |sql| if sql !~ /WHERE/ [{:id=>1, :parent_id=>101}, {:id=>2, :parent_id=>102}, {:id=>101, :parent_id=>nil}, {:id=>102, :parent_id=>nil}] elsif sql =~ /WHERE.*\bid = (\d+)/ [{:id=>$1.to_i, :parent_id=>nil}] elsif sql =~ /WHERE.*\bid IN \(([\d, ]*)\)/ $1.split(', ').map{|x| {:id=>x.to_i, :parent_id=>nil}} elsif sql =~ /WHERE.*\bparent_id = (\d+)/ {:id=>$1.to_i - 100, :parent_id=>$1.to_i} if $1.to_i > 100 elsif sql =~ /WHERE.*\bparent_id IN \(([\d, ]*)\)/ $1.split(', ').map{|x| {:id=>x.to_i - 100, :parent_id=>x.to_i} if x.to_i > 100}.compact end end) end @c = ::TacticalEagerLoadingModel @ds = TacticalEagerLoadingModel.dataset DB.reset @ts = @c.all sql_match('SELECT * FROM t') end after do Object.send(:remove_const, :TacticalEagerLoadingModel) sql_match end it "Dataset#all should set the retrieved_by and retrieved_with attributes" do ts.map{|x| [x.retrieved_by, x.retrieved_with]}.must_equal [[@ds,ts], [@ds,ts], [@ds,ts], [@ds,ts]] end it "Dataset#all shouldn't raise an error if a Sequel::Model instance is not returned" do @c.naked.all sql_match('SELECT * FROM t') end it "association getter methods should eagerly load the association if the association isn't cached" do ts.map{|x| x.parent}.must_equal [ts[2], ts[3], nil, nil] sql_match(/\ASELECT \* FROM t WHERE \(t\.id IN \(10[12], 10[12]\)\)\z/) ts.map{|x| x.children}.must_equal [[], [], [ts[0]], [ts[1]]] sql_match(/\ASELECT \* FROM t WHERE \(t\.parent_id IN/) end it "association getter methods should not eagerly load the association if the association is cached" do ts.map{|x| x.parent}.must_equal [ts[2], ts[3], nil, nil] sql_match(/\ASELECT \* FROM t WHERE \(t\.id IN \(10[12], 10[12]\)\)\z/) @c.dataset = @c.dataset.with_extend{private; def eager_load(*) raise end} ts.map{|x| x.parent}.must_equal [ts[2], ts[3], nil, nil] end it "association getter methods should not eagerly load the association if a block is given" do ts.map{|x| x.parent{|ds| ds}}.must_equal [ts[2], ts[3], nil, nil] sql_match('SELECT * FROM t WHERE (t.id = 101) LIMIT 1', 'SELECT * FROM t WHERE (t.id = 102) LIMIT 1') end it "association getter methods should not eagerly load the association if a callback proc is given" do ts.map{|x| x.parent(:callback=>proc{|ds| ds})}.must_equal [ts[2], ts[3], nil, nil] sql_match('SELECT * FROM t WHERE (t.id = 101) LIMIT 1', 'SELECT * FROM t WHERE (t.id = 102) LIMIT 1') end it "association getter methods should not eagerly load the association if :reload=>true is passed" do ts.map{|x| x.parent(:reload=>true)}.must_equal [ts[2], ts[3], nil, nil] sql_match('SELECT * FROM t WHERE id = 101', 'SELECT * FROM t WHERE id = 102') end it "association getter methods should eagerly reload the association if :eager_reload=>true is passed" do ts.first.parent(:reload=>true) sql_match('SELECT * FROM t WHERE id = 101') ts.map{|x| x.associations.fetch(:parent, 1)}.must_equal [ts[2], 1, 1, 1] ts.first.parent(:eager_reload=>true) sql_match(/\ASELECT \* FROM t WHERE \(t\.id IN \(10[12], 10[12]\)\)\z/) ts.map{|x| x.associations.fetch(:parent, 1)}.must_equal [ts[2], ts[3], nil, nil] end it "association getter methods should not clear associations for objects that already have a cached association" do ts.first.parent(:reload=>true) sql_match('SELECT * FROM t WHERE id = 101') ts.map{|x| x.associations.fetch(:parent, 1)}.must_equal [ts[2], 1, 1, 1] ts[1].associations.delete(:parent) ts[1].parent sql_match(/\ASELECT \* FROM t WHERE \(t\.id IN \(102\)\)\z/) ts.map{|x| x.associations.fetch(:parent, 1)}.must_equal [ts[2], ts[3], nil, nil] end it "association getter methods should support eagerly loading dependent associations via :eager" do parents = ts.map{|x| x.parent(:eager=>:children)} sql_match(/\ASELECT \* FROM t WHERE \(t\.id IN \(10[12], 10[12]\)\)\z/, /\ASELECT \* FROM t WHERE \(t\.parent_id IN/) parents.must_equal [ts[2], ts[3], nil, nil] parents[0..1].map{|x| x.children}.must_equal [[ts[0]], [ts[1]]] end it "association getter methods should support eager callbacks via :eager" do parents = ts.map{|x| x.parent(:eager=>proc{|ds| ds.where{name > 'M'}.eager(:children)})} sql_match(/\ASELECT \* FROM t WHERE \(\(t\.id IN \(10[12], 10[12]\)\) AND \(name > 'M'\)\)\z/, /\ASELECT \* FROM t WHERE \(t\.parent_id IN/) parents.must_equal [ts[2], ts[3], nil, nil] parents[0..1].map{|x| x.children}.must_equal [[ts[0]], [ts[1]]] end it "should not eager load when association uses :allow_eager=>false option" do @c.many_to_one :parent, :clone=>:parent, :allow_eager=>false @c.one_to_many :children, :clone=>:children, :allow_eager=>false ts.map{|x| x.parent}.must_equal [ts[2], ts[3], nil, nil] sql_match('SELECT * FROM t WHERE id = 101', 'SELECT * FROM t WHERE id = 102') ts.map{|x| x.children}.must_equal [[], [], [ts[0]], [ts[1]]] sql_match('SELECT * FROM t WHERE (t.parent_id = 1)', 'SELECT * FROM t WHERE (t.parent_id = 2)', 'SELECT * FROM t WHERE (t.parent_id = 101)', 'SELECT * FROM t WHERE (t.parent_id = 102)') end it "should handle loading single_table_inheritance after tactical_eager_loading and loading associations defined in parent and subclasses" do c = Class.new(@c) c.many_to_one :parent2, :class=>@c, :key=>:parent_id c.plugin :single_table_inheritance, nil objs = @c.dataset.with_row_proc(proc{|r| (r[:parent_id] == 101 ? c : @c).call(r)}).all{|x| x.parent2 if x.is_a?(c)} sql_match('SELECT * FROM t', 'SELECT * FROM t WHERE (t.id IN (101))') objs[0].associations.keys.must_equal [:parent2] objs[1].associations.keys.must_equal [] objs = @c.dataset.with_row_proc(proc{|r| (r[:parent_id] == 101 ? c : @c).call(r)}).all{|x| x.parent} sql_match('SELECT * FROM t', 'SELECT * FROM t WHERE (t.id IN (101, 102))') objs[0].associations.keys.must_equal [:parent] objs[1].associations.keys.must_equal [:parent] end it "should handle case where an association is defined in a subclass when loading single_table_inheritance before tactical_eager_loading" do Object.send(:remove_const, :TacticalEagerLoadingModel) class ::TacticalEagerLoadingModel < Sequel::Model(:t) plugin :single_table_inheritance, nil plugin :tactical_eager_loading columns :id, :parent_id many_to_one :parent, :class=>self one_to_many :children, :class=>self, :key=>:parent_id set_dataset dataset.with_fetch(proc do |sql| if sql !~ /WHERE/ [{:id=>1, :parent_id=>101}, {:id=>2, :parent_id=>102}, {:id=>101, :parent_id=>nil}, {:id=>102, :parent_id=>nil}] elsif sql =~ /WHERE.*\bid = (\d+)/ [{:id=>$1.to_i, :parent_id=>nil}] elsif sql =~ /WHERE.*\bid IN \(([\d, ]*)\)/ $1.split(', ').map{|x| {:id=>x.to_i, :parent_id=>nil}} elsif sql =~ /WHERE.*\bparent_id = (\d+)/ {:id=>$1.to_i - 100, :parent_id=>$1.to_i} if $1.to_i > 100 elsif sql =~ /WHERE.*\bparent_id IN \(([\d, ]*)\)/ $1.split(', ').map{|x| {:id=>x.to_i - 100, :parent_id=>x.to_i} if x.to_i > 100}.compact end end) end @c = ::TacticalEagerLoadingModel @ds = TacticalEagerLoadingModel.dataset DB.reset c = Class.new(@c) c.many_to_one :parent2, :class=>@c, :key=>:parent_id objs = @c.dataset.with_row_proc(proc{|r| (r[:parent_id] == 101 ? c : @c).call(r)}).all{|x| x.parent2 if x.is_a?(c)} sql_match('SELECT * FROM t', 'SELECT * FROM t WHERE (t.id IN (101))') objs[0].associations.keys.must_equal [:parent2] objs[1].associations.keys.must_equal [] objs = @c.dataset.with_row_proc(proc{|r| (r[:parent_id] == 101 ? c : @c).call(r)}).all{|x| x.parent} sql_match('SELECT * FROM t', 'SELECT * FROM t WHERE (t.id IN (101, 102))') objs[0].associations.keys.must_equal [:parent] objs[1].associations.keys.must_equal [:parent] end it "association getter methods should not eagerly load the association if an instance is frozen" do ts.first.freeze ts.map{|x| x.parent}.must_equal [ts[2], ts[3], nil, nil] sql_match('SELECT * FROM t WHERE id = 101', 'SELECT * FROM t WHERE (t.id IN (102))') ts.map{|x| x.children}.must_equal [[], [], [ts[0]], [ts[1]]] sql_match('SELECT * FROM t WHERE (t.parent_id = 1)', /\ASELECT \* FROM t WHERE \(t\.parent_id IN/) ts.map{|x| x.parent}.must_equal [ts[2], ts[3], nil, nil] sql_match('SELECT * FROM t WHERE id = 101') ts.map{|x| x.children}.must_equal [[], [], [ts[0]], [ts[1]]] sql_match('SELECT * FROM t WHERE (t.parent_id = 1)') end it "#marshallable should make marshalling not fail" do Marshal.dump(ts.map{|x| x.marshallable!}) end end describe "tactical_eager_loading plugin eager_graph_support" do before do @c = Class.new(Sequel::Model) @c.class_eval do set_dataset DB[:t] columns :id, :parent_id plugin :tactical_eager_loading many_to_one :parent, :class=>self one_to_many :children, :class=>self, :key=>:parent_id end DB.reset end it "should allow eager loading of associated objects from one_to_many associated objects retrieved via eager_graph" do a = @c.eager_graph(:children). with_fetch([ {:id=>1, :parent_id=>nil, :children_id=>3, :children_parent_id=>1}, {:id=>1, :parent_id=>nil, :children_id=>4, :children_parent_id=>1}, {:id=>2, :parent_id=>nil, :children_id=>5, :children_parent_id=>2} ]).all @c.db.sqls.must_equal ["SELECT t.id, t.parent_id, children.id AS children_id, children.parent_id AS children_parent_id FROM t LEFT OUTER JOIN t AS children ON (children.parent_id = t.id)"] a.must_equal [@c.load(:id=>1, :parent_id=>nil), @c.load(:id=>2, :parent_id=>nil)] a.map(&:children).must_equal [ [@c.load(:id=>3, :parent_id=>1), @c.load(:id=>4, :parent_id=>1)], [@c.load(:id=>5, :parent_id=>2)]] @c.db.sqls.must_equal [] @c.dataset = @c.dataset.with_fetch([[{:id=>6, :parent_id=>3}, {:id=>7, :parent_id=>4}, {:id=>8, :parent_id=>5}], [{:id=>9, :parent_id=>6}, {:id=>10, :parent_id=>7}, {:id=>11, :parent_id=>8}]]) a.map(&:children).map{|v| v.map(&:children)}.must_equal [ [[@c.load(:id=>6, :parent_id=>3)], [@c.load(:id=>7, :parent_id=>4)]], [[@c.load(:id=>8, :parent_id=>5)]]] @c.db.sqls.must_equal ["SELECT * FROM t WHERE (t.parent_id IN (3, 4, 5))"] a.map(&:children).map{|v| v.map(&:children).map{|v1| v1.map(&:children)}}.must_equal [ [[[@c.load(:id=>9, :parent_id=>6)]], [[@c.load(:id=>10, :parent_id=>7)]]], [[[@c.load(:id=>11, :parent_id=>8)]]]] @c.db.sqls.must_equal ["SELECT * FROM t WHERE (t.parent_id IN (6, 7, 8))"] end it "should allow eager loading of associated objects from many_to_one associated objects retrieved via eager_graph" do a = @c.eager_graph(:parent). with_fetch([ {:id=>9, :parent_id=>6, :parent_id_0=>6, :parent_parent_id=>3}, {:id=>10, :parent_id=>7, :parent_id_0=>7, :parent_parent_id=>4}, {:id=>11, :parent_id=>8, :parent_id_0=>8, :parent_parent_id=>5} ]).all @c.db.sqls.must_equal ["SELECT t.id, t.parent_id, parent.id AS parent_id_0, parent.parent_id AS parent_parent_id FROM t LEFT OUTER JOIN t AS parent ON (parent.id = t.parent_id)"] a.must_equal [@c.load(:id=>9, :parent_id=>6), @c.load(:id=>10, :parent_id=>7), @c.load(:id=>11, :parent_id=>8)] a.map(&:parent).must_equal [@c.load(:id=>6, :parent_id=>3), @c.load(:id=>7, :parent_id=>4), @c.load(:id=>8, :parent_id=>5)] @c.db.sqls.must_equal [] @c.dataset = @c.dataset.with_fetch([[{:id=>5, :parent_id=>2}, {:id=>4, :parent_id=>nil}, {:id=>3, :parent_id=>1}], [{:id=>2, :parent_id=>nil}, {:id=>1, :parent_id=>nil}]]) a.map(&:parent).map(&:parent).must_equal [@c.load(:id=>3, :parent_id=>1), @c.load(:id=>4, :parent_id=>nil), @c.load(:id=>5, :parent_id=>2)] @c.db.sqls.must_equal ["SELECT * FROM t WHERE (t.id IN (3, 4, 5))"] a.map(&:parent).map(&:parent).map(&:parent).must_equal [@c.load(:id=>1, :parent_id=>nil), nil, @c.load(:id=>2, :parent_id=>nil)] @c.db.sqls.must_equal ["SELECT * FROM t WHERE (t.id IN (2, 1))"] end it "should allow eager loading of associated objects when using chained one_to_many associations" do a = @c.eager_graph(:children=>:children). with_fetch([ {:id=>1, :parent_id=>nil, :children_id=>3, :children_parent_id=>1, :children_0_id=>6, :children_0_parent_id=>3}, {:id=>1, :parent_id=>nil, :children_id=>4, :children_parent_id=>1, :children_0_id=>7, :children_0_parent_id=>4}, {:id=>2, :parent_id=>nil, :children_id=>5, :children_parent_id=>2, :children_0_id=>8, :children_0_parent_id=>5} ]).all @c.db.sqls.must_equal ["SELECT t.id, t.parent_id, children.id AS children_id, children.parent_id AS children_parent_id, children_0.id AS children_0_id, children_0.parent_id AS children_0_parent_id FROM t LEFT OUTER JOIN t AS children ON (children.parent_id = t.id) LEFT OUTER JOIN t AS children_0 ON (children_0.parent_id = children.id)"] a.must_equal [@c.load(:id=>1, :parent_id=>nil), @c.load(:id=>2, :parent_id=>nil)] a.map(&:children).must_equal [ [@c.load(:id=>3, :parent_id=>1), @c.load(:id=>4, :parent_id=>1)], [@c.load(:id=>5, :parent_id=>2)]] a.map(&:children).map{|v| v.map(&:children)}.must_equal [ [[@c.load(:id=>6, :parent_id=>3)], [@c.load(:id=>7, :parent_id=>4)]], [[@c.load(:id=>8, :parent_id=>5)]]] @c.db.sqls.must_equal [] @c.dataset = @c.dataset.with_fetch([{:id=>9, :parent_id=>6}, {:id=>10, :parent_id=>7}, {:id=>11, :parent_id=>8}]) a.map(&:children).map{|v| v.map(&:children).map{|v1| v1.map(&:children)}}.must_equal [ [[[@c.load(:id=>9, :parent_id=>6)]], [[@c.load(:id=>10, :parent_id=>7)]]], [[[@c.load(:id=>11, :parent_id=>8)]]]] @c.db.sqls.must_equal ["SELECT * FROM t WHERE (t.parent_id IN (6, 7, 8))"] end it "should allow eager loading of associated objects when using chained many_to_one associations" do a = @c.eager_graph(:parent=>:parent). with_fetch([ {:id=>9, :parent_id=>6, :parent_id_0=>6, :parent_parent_id=>3, :parent_0_id=>3, :parent_0_parent_id=>1}, {:id=>10, :parent_id=>7, :parent_id_0=>7, :parent_parent_id=>4, :parent_0_id=>4, :parent_0_parent_id=>1}, {:id=>11, :parent_id=>8, :parent_id_0=>8, :parent_parent_id=>5, :parent_0_id=>5, :parent_0_parent_id=>2} ]).all @c.db.sqls.must_equal ["SELECT t.id, t.parent_id, parent.id AS parent_id_0, parent.parent_id AS parent_parent_id, parent_0.id AS parent_0_id, parent_0.parent_id AS parent_0_parent_id FROM t LEFT OUTER JOIN t AS parent ON (parent.id = t.parent_id) LEFT OUTER JOIN t AS parent_0 ON (parent_0.id = parent.parent_id)"] a.must_equal [@c.load(:id=>9, :parent_id=>6), @c.load(:id=>10, :parent_id=>7), @c.load(:id=>11, :parent_id=>8)] a.map(&:parent).must_equal [@c.load(:id=>6, :parent_id=>3), @c.load(:id=>7, :parent_id=>4), @c.load(:id=>8, :parent_id=>5)] a.map(&:parent).map(&:parent).must_equal [@c.load(:id=>3, :parent_id=>1), @c.load(:id=>4, :parent_id=>1), @c.load(:id=>5, :parent_id=>2)] @c.db.sqls.must_equal [] @c.dataset = @c.dataset.with_fetch([{:id=>2, :parent_id=>nil}, {:id=>1, :parent_id=>nil}]) a.map(&:parent).map(&:parent).map(&:parent).must_equal [@c.load(:id=>1, :parent_id=>nil), @c.load(:id=>1, :parent_id=>nil), @c.load(:id=>2, :parent_id=>nil)] @c.db.sqls.must_equal ["SELECT * FROM t WHERE (t.id IN (1, 2))"] end it "should allow eager loading of associated objects when using chained many_to_one=>one_to_many associations" do a = @c.eager_graph(:parent=>:children). with_fetch([ {:id=>9, :parent_id=>6, :parent_id_0=>6, :parent_parent_id=>3, :children_id=>9, :children_parent_id=>6}, {:id=>10, :parent_id=>7, :parent_id_0=>7, :parent_parent_id=>4, :children_id=>10, :children_parent_id=>7}, {:id=>11, :parent_id=>8, :parent_id_0=>8, :parent_parent_id=>5, :children_id=>11, :children_parent_id=>8}, {:id=>9, :parent_id=>6, :parent_id_0=>6, :parent_parent_id=>3, :children_id=>12, :children_parent_id=>6}, {:id=>10, :parent_id=>7, :parent_id_0=>7, :parent_parent_id=>4, :children_id=>13, :children_parent_id=>7}, {:id=>11, :parent_id=>8, :parent_id_0=>8, :parent_parent_id=>5, :children_id=>14, :children_parent_id=>8} ]).all @c.db.sqls.must_equal ["SELECT t.id, t.parent_id, parent.id AS parent_id_0, parent.parent_id AS parent_parent_id, children.id AS children_id, children.parent_id AS children_parent_id FROM t LEFT OUTER JOIN t AS parent ON (parent.id = t.parent_id) LEFT OUTER JOIN t AS children ON (children.parent_id = parent.id)"] a.must_equal [@c.load(:id=>9, :parent_id=>6), @c.load(:id=>10, :parent_id=>7), @c.load(:id=>11, :parent_id=>8)] a.map(&:parent).must_equal [@c.load(:id=>6, :parent_id=>3), @c.load(:id=>7, :parent_id=>4), @c.load(:id=>8, :parent_id=>5)] a.map(&:parent).map(&:children).must_equal [ [@c.load(:id=>9, :parent_id=>6), @c.load(:id=>12, :parent_id=>6)], [@c.load(:id=>10, :parent_id=>7), @c.load(:id=>13, :parent_id=>7)], [@c.load(:id=>11, :parent_id=>8), @c.load(:id=>14, :parent_id=>8)]] @c.db.sqls.must_equal [] @c.dataset = @c.dataset.with_fetch([{:id=>19, :parent_id=>9}, {:id=>24, :parent_id=>14}]) a.map(&:parent).map(&:children).map{|v| v.map(&:children)} #.must_equal [ # [[@c.load(:id=>19, :parent_id=>9)], []], # [[], []], # [[], @c.load(:id=>24, :parent_id=>14)]] @c.db.sqls.must_equal ["SELECT * FROM t WHERE (t.parent_id IN (9, 10, 11, 12, 13, 14))"] end it "should allow eager loading of associated objects when using chained one_to_many associations with partial data" do a = @c.eager_graph(:children=>:children). with_fetch([ {:id=>1, :parent_id=>nil, :children_id=>3, :children_parent_id=>1, :children_0_id=>6, :children_0_parent_id=>3}, {:id=>1, :parent_id=>nil, :children_id=>4, :children_parent_id=>1, :children_0_id=>nil, :children_0_parent_id=>nil}, {:id=>2, :parent_id=>nil, :children_id=>nil, :children_parent_id=>nil, :children_0_id=>nil, :children_0_parent_id=>nil} ]).all @c.db.sqls.must_equal ["SELECT t.id, t.parent_id, children.id AS children_id, children.parent_id AS children_parent_id, children_0.id AS children_0_id, children_0.parent_id AS children_0_parent_id FROM t LEFT OUTER JOIN t AS children ON (children.parent_id = t.id) LEFT OUTER JOIN t AS children_0 ON (children_0.parent_id = children.id)"] a.must_equal [@c.load(:id=>1, :parent_id=>nil), @c.load(:id=>2, :parent_id=>nil)] a.map(&:children).must_equal [ [@c.load(:id=>3, :parent_id=>1), @c.load(:id=>4, :parent_id=>1)], []] a.map(&:children).map{|v| v.map(&:children)}.must_equal [ [[@c.load(:id=>6, :parent_id=>3)], []], []] @c.db.sqls.must_equal [] @c.dataset = @c.dataset.with_fetch([{:id=>9, :parent_id=>6}]) a.map(&:children).map{|v| v.map(&:children).map{|v1| v1.map(&:children)}}.must_equal [ [[[@c.load(:id=>9, :parent_id=>6)]], []], []] @c.db.sqls.must_equal ["SELECT * FROM t WHERE (t.parent_id IN (6))"] end it "should allow eager loading of associated objects when using chained many_to_one associations with partial data" do a = @c.eager_graph(:parent=>:parent). with_fetch([ {:id=>9, :parent_id=>6, :parent_id_0=>6, :parent_parent_id=>3, :parent_0_id=>3, :parent_0_parent_id=>1}, {:id=>10, :parent_id=>7, :parent_id_0=>7, :parent_parent_id=>nil, :parent_0_id=>nil, :parent_0_parent_id=>nil}, {:id=>11, :parent_id=>nil, :parent_id_0=>nil, :parent_parent_id=>nil, :parent_0_id=>nil, :parent_0_parent_id=>nil} ]).all @c.db.sqls.must_equal ["SELECT t.id, t.parent_id, parent.id AS parent_id_0, parent.parent_id AS parent_parent_id, parent_0.id AS parent_0_id, parent_0.parent_id AS parent_0_parent_id FROM t LEFT OUTER JOIN t AS parent ON (parent.id = t.parent_id) LEFT OUTER JOIN t AS parent_0 ON (parent_0.id = parent.parent_id)"] a.must_equal [@c.load(:id=>9, :parent_id=>6), @c.load(:id=>10, :parent_id=>7), @c.load(:id=>11, :parent_id=>nil)] a.map(&:parent).must_equal [@c.load(:id=>6, :parent_id=>3), @c.load(:id=>7, :parent_id=>nil), nil] a.map(&:parent).map{|v| v.parent if v}.must_equal [@c.load(:id=>3, :parent_id=>1), nil, nil] @c.db.sqls.must_equal [] @c.dataset = @c.dataset.with_fetch([{:id=>1, :parent_id=>nil}]) a.map(&:parent).map{|v| v.parent.parent if v && v.parent}.must_equal [@c.load(:id=>1, :parent_id=>nil), nil, nil] @c.db.sqls.must_equal ["SELECT * FROM t WHERE (t.id IN (1))"] end it "should skip setup of eager loading when using eager_graph for association not using plugin" do c = Class.new(Sequel::Model) c.class_eval do set_dataset DB[:t] columns :id, :t_id end @c.many_to_one :f, :class=>c, :key=>:parent_id @c.one_to_many :fs, :class=>c c.many_to_one :t, :class=>@c c.one_to_many :ts, :class=>@c, :key=>:parent_id a = @c.eager_graph(:f, :parent, :fs=>:t). with_fetch([ {:id=>5, :parent_id=>4, :f_id=>4, :t_id=>20, :parent_id_0=>4, :parent_parent_id=>3, :fs_id=>5, :fs_t_id=>30, :t_0_id=>30, :t_0_parent_id=>40}, {:id=>15, :parent_id=>14, :f_id=>14, :t_id=>30, :parent_id_0=>14, :parent_parent_id=>13, :fs_id=>15, :fs_t_id=>40, :t_0_id=>40, :t_0_parent_id=>50}, ]). all @c.db.sqls.must_equal ["SELECT t.id, t.parent_id, f.id AS f_id, f.t_id, parent.id AS parent_id_0, parent.parent_id AS parent_parent_id, fs.id AS fs_id, fs.t_id AS fs_t_id, t_0.id AS t_0_id, t_0.parent_id AS t_0_parent_id FROM t LEFT OUTER JOIN t AS f ON (f.id = t.parent_id) LEFT OUTER JOIN t AS parent ON (parent.id = t.parent_id) LEFT OUTER JOIN t AS fs ON (fs._id = t.id) LEFT OUTER JOIN t AS t_0 ON (t_0.id = fs.t_id)"] a.must_equal [@c.load(:id=>5, :parent_id=>4), @c.load(:id=>15, :parent_id=>14)] a.map(&:f).must_equal [c.load(:id=>4, :t_id=>20), c.load(:id=>14, :t_id=>30)] a.map(&:parent).must_equal [@c.load(:id=>4, :parent_id=>3), @c.load(:id=>14, :parent_id=>13)] a.map(&:fs).must_equal [[c.load(:id=>5, :t_id=>30)], [c.load(:id=>15, :t_id=>40)]] a.map(&:fs).map{|v| v.map(&:t)}.must_equal [[@c.load(:id=>30, :parent_id=>40)], [@c.load(:id=>40, :parent_id=>50)]] @c.db.sqls.must_equal [] @c.dataset = @c.dataset.with_fetch([[{:id=>3, :parent_id=>1}, {:id=>13, :parent_id=>1}], [{:id=>1, :parent_id=>nil}], [{:id=>20, :parent_id=>nil}], [{:id=>30, :parent_id=>nil}], [{:id=>50, :parent_id=>nil}, {:id=>40, :parent_id=>nil}] ]) a.map(&:parent).map(&:parent).must_equal [@c.load(:id=>3, :parent_id=>1), @c.load(:id=>13, :parent_id=>1)] @c.db.sqls.must_equal ["SELECT * FROM t WHERE (t.id IN (3, 13))"] a.map(&:parent).map(&:parent).map(&:parent).must_equal [@c.load(:id=>1, :parent_id=>nil), @c.load(:id=>1, :parent_id=>nil)] @c.db.sqls.must_equal ["SELECT * FROM t WHERE (t.id IN (1))"] a.map(&:f).map(&:t).must_equal [@c.load(:id=>20, :parent_id=>nil), @c.load(:id=>30, :parent_id=>nil)] @c.db.sqls.must_equal ["SELECT * FROM t WHERE id = 20", "SELECT * FROM t WHERE id = 30"] a.map(&:fs).map{|v| v.map(&:t).map(&:parent)}.must_equal [[@c.load(:id=>40, :parent_id=>nil)], [@c.load(:id=>50, :parent_id=>nil)]] @c.db.sqls.must_equal ["SELECT * FROM t WHERE (t.id IN (40, 50))"] end it "should skip frozen objects when eager loading for model objects" do a = @c.eager_graph(:parent). with_fetch([ {:id=>9, :parent_id=>6, :parent_id_0=>6, :parent_parent_id=>3}, {:id=>10, :parent_id=>7, :parent_id_0=>7, :parent_parent_id=>4}, {:id=>11, :parent_id=>8, :parent_id_0=>8, :parent_parent_id=>5} ]).all @c.db.sqls.must_equal ["SELECT t.id, t.parent_id, parent.id AS parent_id_0, parent.parent_id AS parent_parent_id FROM t LEFT OUTER JOIN t AS parent ON (parent.id = t.parent_id)"] a.must_equal [@c.load(:id=>9, :parent_id=>6), @c.load(:id=>10, :parent_id=>7), @c.load(:id=>11, :parent_id=>8)] a.map(&:parent).must_equal [@c.load(:id=>6, :parent_id=>3), @c.load(:id=>7, :parent_id=>4), @c.load(:id=>8, :parent_id=>5)] @c.db.sqls.must_equal [] @c.dataset = @c.dataset.with_fetch([[{:id=>3, :parent_id=>1}, {:id=>5, :parent_id=>2}], [{:id=>4, :parent_id=>nil}]]) parents = a.map(&:parent) parents[1].freeze parents[0].parent.must_equal @c.load(:id=>3, :parent_id=>1) @c.db.sqls.must_equal ["SELECT * FROM t WHERE (t.id IN (3, 5))"] parents[1].parent.must_equal @c.load(:id=>4, :parent_id=>nil) @c.db.sqls.must_equal ["SELECT * FROM t WHERE id = 4"] parents[2].parent.must_equal @c.load(:id=>5, :parent_id=>2) @c.db.sqls.must_equal [] end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/thread_local_timezones_spec.rb����������������������������������������0000664�0000000�0000000�00000003755�14342141206�0024320�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" Sequel.extension :thread_local_timezones describe "Sequel thread_local_timezones extension" do after do Sequel.default_timezone = nil Sequel.thread_application_timezone = nil Sequel.thread_database_timezone = nil Sequel.thread_typecast_timezone = nil end it "should allow specifying thread local timezones via thread_*_timezone=" do Sequel.thread_application_timezone = :local Sequel.thread_database_timezone = :utc Sequel.thread_typecast_timezone = nil end it "should use thread local timezone if available" do Sequel.thread_application_timezone = :local Sequel.application_timezone.must_equal :local Sequel.thread_database_timezone = :utc Sequel.database_timezone.must_equal :utc Sequel.thread_typecast_timezone = nil Sequel.typecast_timezone.must_be_nil end it "should fallback to default timezone if no thread_local timezone" do Sequel.default_timezone = :utc Sequel.application_timezone.must_equal :utc Sequel.database_timezone.must_equal :utc Sequel.typecast_timezone.must_equal :utc end it "should use a nil thread_local_timezone if set instead of falling back to the default timezone if thread_local_timezone is set to :nil" do Sequel.typecast_timezone = :utc Sequel.thread_typecast_timezone = nil Sequel.typecast_timezone.must_equal :utc Sequel.thread_typecast_timezone = :nil Sequel.typecast_timezone.must_be_nil end it "should be thread safe" do q, q1, q2 = Queue.new, Queue.new, Queue.new tz1, tz2 = nil, nil t1 = Thread.new do Sequel.thread_application_timezone = :utc q2.push nil q.pop tz1 = Sequel.application_timezone end t2 = Thread.new do Sequel.thread_application_timezone = :local q2.push nil q1.pop tz2 = Sequel.application_timezone end q2.pop q2.pop q.push nil q1.push nil t1.join t2.join tz1.must_equal :utc tz2.must_equal :local end end �������������������sequel-5.63.0/spec/extensions/throw_failures_spec.rb������������������������������������������������0000664�0000000�0000000�00000003774�14342141206�0022640�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "throw_failures plugin" do before do @c = Class.new(Sequel::Model(:items)) do plugin :throw_failures columns :x set_primary_key :x unrestrict_primary_key def before_create super cancel_action 'bc' if x == 2 end def before_destroy super cancel_action 'bd' if x == 2 end def validate super errors.add(:x, "3") if x == 3 end end DB.reset end it "should work normally if no exceptions are thrown/raised" do o = @c.create(:x=>1) o.must_be_kind_of @c o.valid?.must_equal true o.destroy.must_equal o end it "should work normally when not rescuing exceptions internally when calling save" do @c.new.set(:x => 2).save(:raise_on_failure=>false).must_be_nil @c.raise_on_save_failure = false @c.create(:x => 2).must_be_nil @c.load(:x => 2).destroy(:raise_on_failure=>false).must_be_nil end it "should work normally when not rescuing exceptions internally when calling valid?" do @c.send(:define_method, :before_validation){cancel_action "bv"} @c.new(:x => 2).valid?.must_equal false end it "should raise exceptions if no catch blocks have been setup and set to raise on failure" do begin @c.create(:x => 2) rescue Sequel::HookFailed => e e.backtrace.wont_be_empty 1 end.must_equal 1 begin @c.create(:x => 3) rescue Sequel::ValidationFailed => e e.backtrace.wont_be_empty 1 end.must_equal 1 end it "should allow catching exceptions instead of rescuing them" do e = catch(Sequel::HookFailed){@c.create(:x => 2)} e.must_be_kind_of Sequel::HookFailed e.backtrace.must_be_nil e = catch(Sequel::ValidationFailed){@c.create(:x => 3)} e.must_be_kind_of Sequel::ValidationFailed e.backtrace.must_be_nil e = catch(Sequel::HookFailed){@c.load(:x => 2).destroy} e.must_be_kind_of Sequel::HookFailed e.backtrace.must_be_nil end end ����sequel-5.63.0/spec/extensions/timestamps_spec.rb����������������������������������������������������0000664�0000000�0000000�00000017136�14342141206�0021766�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::Timestamps" do before do dc = Object.new dc.instance_eval do def now '2009-08-01' end singleton_class.send(:alias_method, :now, :now) end Sequel.datetime_class = dc @c = Class.new(Sequel::Model(:t)) @c.class_eval do columns :id, :created_at, :updated_at plugin :timestamps private def _save_refresh(*) end db.reset end end after do Sequel.datetime_class = Time end it "should handle validations on the timestamp fields for new objects" do @c.plugin :timestamps, :update_on_create=>true o = @c.new def o.validate errors.add(model.create_timestamp_field, 'not present') unless send(model.create_timestamp_field) errors.add(model.update_timestamp_field, 'not present') unless send(model.update_timestamp_field) end o.valid?.must_equal true end it "should set timestamp fields when skipping validations" do @c.plugin :timestamps @c.new.save(:validate=>false) @c.db.sqls.must_equal ["INSERT INTO t (created_at) VALUES ('2009-08-01')"] end it "should set the create timestamp field on creation" do o = @c.create @c.db.sqls.must_equal ["INSERT INTO t (created_at) VALUES ('2009-08-01')"] o.created_at.must_equal '2009-08-01' end it "should set the update timestamp field on update" do o = @c.load(:id=>1).save @c.db.sqls.must_equal ["UPDATE t SET updated_at = '2009-08-01' WHERE (id = 1)"] o.updated_at.must_equal '2009-08-01' end it "should leave manually set update timestamp, if :allow_manual_update was given" do o = @c.load(:id=>1).update(:updated_at=>Date.new(2016)) @c.db.sqls.must_equal ["UPDATE t SET updated_at = '2009-08-01' WHERE (id = 1)"] o.updated_at.must_equal '2009-08-01' @c.plugin :timestamps, :allow_manual_update=>true o = @c.load(:id=>1).update(:updated_at=>Date.new(2016)) @c.db.sqls.must_equal ["UPDATE t SET updated_at = '2016-01-01' WHERE (id = 1)"] o.updated_at.must_equal Date.new(2016) end it "should work with current_datetime_timestamp extension" do Sequel.datetime_class = Time @c.dataset = @c.dataset.extension(:current_datetime_timestamp) @c.create @c.db.sqls.must_equal ["INSERT INTO t (created_at) VALUES (CURRENT_TIMESTAMP)"] @c.load(:id=>1).save @c.db.sqls.must_equal ["UPDATE t SET updated_at = CURRENT_TIMESTAMP WHERE (id = 1)"] end it "should not update the update timestamp on creation" do @c.create.updated_at.must_be_nil end it "should use the same value for the creation and update timestamps when creating if the :update_on_create option is given" do @c.plugin :timestamps, :update_on_create=>true o = @c.create @c.db.sqls.must_equal ["INSERT INTO t (created_at, updated_at) VALUES ('2009-08-01', '2009-08-01')"] o.created_at.must_be :===, o.updated_at end it "should allow specifying the create timestamp field via the :create option" do c = Class.new(Sequel::Model(:t)) c.class_eval do columns :id, :c plugin :timestamps, :create=>:c private def _save_refresh(*) end end o = c.create c.db.sqls.must_equal ["INSERT INTO t (c) VALUES ('2009-08-01')"] o.c.must_equal '2009-08-01' end it "should allow specifying the update timestamp field via the :update option" do c = Class.new(Sequel::Model(:t)) c.class_eval do columns :id, :u plugin :timestamps, :update=>:u db.reset end o = c.load(:id=>1).save c.db.sqls.must_equal ["UPDATE t SET u = '2009-08-01' WHERE (id = 1)"] o.u.must_equal '2009-08-01' end it "should not raise an error if the model doesn't have the timestamp columns" do c = Class.new(Sequel::Model(:t)) c.class_eval do columns :id, :x plugin :timestamps db.reset private def _save_refresh; self end end c.create(:x=>2) c.load(:id=>1, :x=>2).save c.db.sqls.must_equal ["INSERT INTO t (x) VALUES (2)", "UPDATE t SET x = 2 WHERE (id = 1)"] end it "should not overwrite an existing create timestamp" do o = @c.create(:created_at=>'2009-08-03') @c.db.sqls.must_equal ["INSERT INTO t (created_at) VALUES ('2009-08-03')"] o.created_at.must_equal '2009-08-03' end it "should overwrite an existing create timestamp if the :force option is used" do @c.plugin :timestamps, :force=>true o = @c.create(:created_at=>'2009-08-03') @c.db.sqls.must_equal ["INSERT INTO t (created_at) VALUES ('2009-08-01')"] o.created_at.must_equal '2009-08-01' end it "should set update timestamp to same timestamp as create timestamp when setting creating timestamp" do i = 1 Sequel.datetime_class.define_singleton_method(:now){"2009-08-0#{i+=1}"} @c.plugin :timestamps, :update_on_create=>true o = @c.create sqls = @c.db.sqls sqls.length.must_equal 1 ["INSERT INTO t (created_at, updated_at) VALUES ('2009-08-02', '2009-08-02')", "INSERT INTO t (updated_at, created_at) VALUES ('2009-08-02', '2009-08-02')"].must_include sqls.first o.created_at.must_equal '2009-08-02' o.updated_at.must_equal '2009-08-02' end it "should set update timestamp when using not overriding create timestamp" do i = 1 Sequel.datetime_class.define_singleton_method(:now){"2009-08-0#{i+=1}"} @c.plugin :timestamps, :update_on_create=>true o = @c.create(:created_at=>'2009-08-10') sqls = @c.db.sqls sqls.length.must_equal 1 ["INSERT INTO t (created_at, updated_at) VALUES ('2009-08-10', '2009-08-02')", "INSERT INTO t (updated_at, created_at) VALUES ('2009-08-02', '2009-08-10')"].must_include sqls.first o.created_at.must_equal '2009-08-10' o.updated_at.must_equal '2009-08-02' end it "should have create_timestamp_field give the create timestamp field" do @c.create_timestamp_field.must_equal :created_at @c.plugin :timestamps, :create=>:c @c.create_timestamp_field.must_equal :c end it "should have update_timestamp_field give the update timestamp field" do @c.update_timestamp_field.must_equal :updated_at @c.plugin :timestamps, :update=>:u @c.update_timestamp_field.must_equal :u end it "should have create_timestamp_overwrite? give the whether to overwrite an existing create timestamp" do @c.create_timestamp_overwrite?.must_equal false @c.plugin :timestamps, :force=>true @c.create_timestamp_overwrite?.must_equal true end it "should have set_update_timestamp_on_create? give whether to set the update timestamp on create" do @c.set_update_timestamp_on_create?.must_equal false @c.plugin :timestamps, :update_on_create=>true @c.set_update_timestamp_on_create?.must_equal true end it "should work with subclasses" do c = Class.new(@c) o = c.create o.created_at.must_equal '2009-08-01' o.updated_at.must_be_nil o = c.load(:id=>1).save o.updated_at.must_equal '2009-08-01' c.db.sqls.must_equal ["INSERT INTO t (created_at) VALUES ('2009-08-01')", "UPDATE t SET updated_at = '2009-08-01' WHERE (id = 1)"] c.create(:created_at=>'2009-08-03').created_at.must_equal '2009-08-03' c.class_eval do columns :id, :c, :u plugin :timestamps, :create=>:c, :update=>:u, :force=>true, :update_on_create=>true end c2 = Class.new(c) c2.db.reset o = c2.create o.c.must_equal '2009-08-01' o.u.must_be :===, o.c c2.db.sqls.must_equal ["INSERT INTO t (c, u) VALUES ('2009-08-01', '2009-08-01')"] c2.db.reset o = c2.load(:id=>1).save o.u.must_equal '2009-08-01' c2.db.sqls.must_equal ["UPDATE t SET u = '2009-08-01' WHERE (id = 1)"] c2.create(:c=>'2009-08-03').c.must_equal '2009-08-01' end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/to_dot_spec.rb��������������������������������������������������������0000664�0000000�0000000�00000027414�14342141206�0021070�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Model, "to_dot extension" do def dot(ds) Sequel::ToDot.new(ds).instance_variable_get(:@dot)[4...-1] end before do @db = DB @ds = @db.dataset.extension(:to_dot) end it "should output a string suitable for input to the graphviz dot program" do @ds.to_dot.must_equal((<<END).strip) digraph G { 0 [label="self"]; 0 -> 1 [label=""]; 1 [label="Dataset"]; } END end it "should handle an empty dataset" do dot(@ds).must_equal [] end it "should handle WITH" do a = dot(@ds.with_extend{def supports_cte?(*) true end}.with(:a, @ds)) a.must_equal [ "1 -> 2 [label=\"with\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\"Hash\"];", "3 -> 4 [label=\"name\"];", "4 [label=\":a\"];", "3 -> 5 [label=\"dataset\"];", "5 [label=\"Dataset\"];"] end it "should handle WITH with nil key option" do a = dot(@ds.with_extend{def supports_cte?(*) true end}.with(:a, @ds, nil=>:a)) a.must_equal [ "1 -> 2 [label=\"with\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\"Hash\"];", "3 -> 4 [label=\"<nil>\"];", "4 [label=\":a\"];", "3 -> 5 [label=\"name\"];", "5 [label=\":a\"];", "3 -> 6 [label=\"dataset\"];", "6 [label=\"Dataset\"];"] end it "should handle DISTINCT" do dot(@ds.distinct).must_equal ["1 -> 2 [label=\"distinct\"];", "2 [label=\"Array\"];"] end it "should handle FROM" do dot(@ds.from(:a)).must_equal ["1 -> 2 [label=\"from\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\":a\"];"] end it "should handle JOIN" do dot(@ds.join(:a)).must_equal ["1 -> 2 [label=\"join\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\"INNER JOIN\"];", "3 -> 4 [label=\"table\"];", "4 [label=\":a\"];"] end it "should handle WHERE" do dot(@ds.filter(true)).must_equal ["1 -> 2 [label=\"where\"];", "2 [label=\"ComplexExpression: NOOP\"];", "2 -> 3 [label=\"0\"];", "3 [label=\"true\"];"] end it "should handle GROUP" do dot(@ds.group(:a)).must_equal ["1 -> 2 [label=\"group\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\":a\"];"] end it "should handle HAVING" do dot(@ds.having(:a)).must_equal ["1 -> 2 [label=\"having\"];", "2 [label=\":a\"];"] end it "should handle UNION" do dot(@ds.union(@ds)).must_equal ["1 -> 2 [label=\"from\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\"AliasedExpression\"];", "3 -> 4 [label=\"expression\"];", "4 [label=\"Dataset\"];", "4 -> 5 [label=\"compounds\"];", "5 [label=\"Array\"];", "5 -> 6 [label=\"0\"];", "6 [label=\"Array\"];", "6 -> 7 [label=\"0\"];", "7 [label=\":union\"];", "6 -> 8 [label=\"1\"];", "8 [label=\"Dataset\"];", "6 -> 9 [label=\"2\"];", "9 [label=\"nil\"];", "3 -> 10 [label=\"alias\"];", "10 [label=\":t1\"];"] end it "should handle INTERSECT" do dot(@ds.intersect(@ds)).must_equal ["1 -> 2 [label=\"from\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\"AliasedExpression\"];", "3 -> 4 [label=\"expression\"];", "4 [label=\"Dataset\"];", "4 -> 5 [label=\"compounds\"];", "5 [label=\"Array\"];", "5 -> 6 [label=\"0\"];", "6 [label=\"Array\"];", "6 -> 7 [label=\"0\"];", "7 [label=\":intersect\"];", "6 -> 8 [label=\"1\"];", "8 [label=\"Dataset\"];", "6 -> 9 [label=\"2\"];", "9 [label=\"nil\"];", "3 -> 10 [label=\"alias\"];", "10 [label=\":t1\"];"] end it "should handle EXCEPT" do dot(@ds.except(@ds)).must_equal ["1 -> 2 [label=\"from\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\"AliasedExpression\"];", "3 -> 4 [label=\"expression\"];", "4 [label=\"Dataset\"];", "4 -> 5 [label=\"compounds\"];", "5 [label=\"Array\"];", "5 -> 6 [label=\"0\"];", "6 [label=\"Array\"];", "6 -> 7 [label=\"0\"];", "7 [label=\":except\"];", "6 -> 8 [label=\"1\"];", "8 [label=\"Dataset\"];", "6 -> 9 [label=\"2\"];", "9 [label=\"nil\"];", "3 -> 10 [label=\"alias\"];", "10 [label=\":t1\"];"] end it "should handle ORDER" do dot(@ds.order(:a)).must_equal ["1 -> 2 [label=\"order\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\":a\"];"] end it "should handle LIMIT and OFFSET" do dot(@ds.limit(1, 2)).must_equal ["1 -> 2 [label=\"limit\"];", "2 [label=\"1\"];", "1 -> 3 [label=\"offset\"];", "3 [label=\"2\"];"] end it "should handle FOR UPDATE" do dot(@ds.for_update).must_equal ["1 -> 2 [label=\"lock\"];", "2 [label=\":update\"];"] end it "should handle LiteralStrings" do dot(@ds.filter(Sequel.lit('a'))).must_equal ["1 -> 2 [label=\"where\"];", "2 [label=\"Sequel.lit(\\\"(a)\\\")\"];"] end it "should handle true, false, nil" do dot(@ds.select(true, false, nil)).must_equal ["1 -> 2 [label=\"select\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\"true\"];", "2 -> 4 [label=\"1\"];", "4 [label=\"false\"];", "2 -> 5 [label=\"2\"];", "5 [label=\"nil\"];"] end it "should handle SQL::ComplexExpressions" do dot(@ds.filter(:a=>:b)).must_equal ["1 -> 2 [label=\"where\"];", "2 [label=\"ComplexExpression: =\"];", "2 -> 3 [label=\"0\"];", "3 [label=\":a\"];", "2 -> 4 [label=\"1\"];", "4 [label=\":b\"];"] end it "should handle SQL::Identifiers" do dot(@ds.select{a}).must_equal ["1 -> 2 [label=\"select\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\"Identifier\"];", "3 -> 4 [label=\"value\"];", "4 [label=\":a\"];"] end it "should handle SQL::QualifiedIdentifiers" do dot(@ds.select{a[b]}).must_equal ["1 -> 2 [label=\"select\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\"QualifiedIdentifier\"];", "3 -> 4 [label=\"table\"];", "4 [label=\"\\\"a\\\"\"];", "3 -> 5 [label=\"column\"];", "5 [label=\"\\\"b\\\"\"];"] end it "should handle SQL::OrderedExpressions" do dot(@ds.order(Sequel.asc(:a))).must_equal ["1 -> 2 [label=\"order\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\"OrderedExpression: ASC\"];", "3 -> 4 [label=\"expression\"];", "4 [label=\":a\"];"] dot(@ds.order(Sequel.desc(:a, :nulls=>:last))).must_equal ["1 -> 2 [label=\"order\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\"OrderedExpression: DESC NULLS LAST\"];", "3 -> 4 [label=\"expression\"];", "4 [label=\":a\"];"] end it "should handle SQL::AliasedExpressions" do dot(@ds.from(Sequel.as(:a, :b))).must_equal ["1 -> 2 [label=\"from\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\"AliasedExpression\"];", "3 -> 4 [label=\"expression\"];", "4 [label=\":a\"];", "3 -> 5 [label=\"alias\"];", "5 [label=\":b\"];"] end it "should handle SQL::AliasedExpressions with column aliases" do dot(@ds.from(Sequel.as(:a, :b, [:c, :d]))).must_equal ["1 -> 2 [label=\"from\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\"AliasedExpression\"];", "3 -> 4 [label=\"expression\"];", "4 [label=\":a\"];", "3 -> 5 [label=\"alias\"];", "5 [label=\":b\"];", "3 -> 6 [label=\"columns\"];", "6 [label=\"Array\"];", "6 -> 7 [label=\"0\"];", "7 [label=\":c\"];", "6 -> 8 [label=\"1\"];", "8 [label=\":d\"];"] end it "should handle SQL::CaseExpressions" do dot(@ds.select(Sequel.case({:a=>:b}, :c))).must_equal ["1 -> 2 [label=\"select\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\"CaseExpression\"];", "3 -> 4 [label=\"conditions\"];", "4 [label=\"Array\"];", "4 -> 5 [label=\"0\"];", "5 [label=\"Array\"];", "5 -> 6 [label=\"0\"];", "6 [label=\":a\"];", "5 -> 7 [label=\"1\"];", "7 [label=\":b\"];", "3 -> 8 [label=\"default\"];", "8 [label=\":c\"];"] dot(@ds.select(Sequel.case({:a=>:b}, :c, :d))).must_equal ["1 -> 2 [label=\"select\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\"CaseExpression\"];", "3 -> 4 [label=\"expression\"];", "4 [label=\":d\"];", "3 -> 5 [label=\"conditions\"];", "5 [label=\"Array\"];", "5 -> 6 [label=\"0\"];", "6 [label=\"Array\"];", "6 -> 7 [label=\"0\"];", "7 [label=\":a\"];", "6 -> 8 [label=\"1\"];", "8 [label=\":b\"];", "3 -> 9 [label=\"default\"];", "9 [label=\":c\"];"] end it "should handle SQL::Cast" do dot(@ds.select(Sequel.cast(:a, Integer))).must_equal ["1 -> 2 [label=\"select\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\"Cast\"];", "3 -> 4 [label=\"expr\"];", "4 [label=\":a\"];", "3 -> 5 [label=\"type\"];", "5 [label=\"Integer\"];"] end it "should handle SQL::Function" do dot(@ds.select{a(b)}).must_equal ["1 -> 2 [label=\"select\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\"Function: a\"];", "3 -> 4 [label=\"0\"];", "4 [label=\"Identifier\"];", "4 -> 5 [label=\"value\"];", "5 [label=\":b\"];", "3 -> 6 [label=\"args\"];", "6 [label=\"Array\"];", "6 -> 7 [label=\"0\"];", "7 [label=\"Identifier\"];", "7 -> 8 [label=\"value\"];", "8 [label=\":b\"];", "3 -> 9 [label=\"opts\"];", "9 [label=\"Hash\"];"] end it "should handle SQL::Subscript" do dot(@ds.select(Sequel.subscript(:a, 1))).must_equal ["1 -> 2 [label=\"select\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\"Subscript\"];", "3 -> 4 [label=\"f\"];", "4 [label=\":a\"];", "3 -> 5 [label=\"sub\"];", "5 [label=\"Array\"];", "5 -> 6 [label=\"0\"];", "6 [label=\"1\"];"] end it "should handle SQL::Function with a window" do dot(@ds.select(Sequel.function(:sum).over(:partition=>:a))).must_equal ["1 -> 2 [label=\"select\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\"Function: sum\"];", "3 -> 4 [label=\"args\"];", "4 [label=\"Array\"];", "3 -> 5 [label=\"opts\"];", "5 [label=\"Hash\"];", "5 -> 6 [label=\"over\"];", "6 [label=\"Window\"];", "6 -> 7 [label=\"opts\"];", "7 [label=\"Hash\"];", "7 -> 8 [label=\"partition\"];", "8 [label=\":a\"];"] end it "should handle SQL::PlaceholderLiteralString" do dot(@ds.where(Sequel.lit("?", true))).must_equal ["1 -> 2 [label=\"where\"];", "2 [label=\"PlaceholderLiteralString: \\\"(?)\\\"\"];", "2 -> 3 [label=\"args\"];", "3 [label=\"Array\"];", "3 -> 4 [label=\"0\"];", "4 [label=\"true\"];"] dot(@ds.select(Sequel::SQL::PlaceholderLiteralString.new("(?)", [true]))).must_equal ["1 -> 2 [label=\"select\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\"PlaceholderLiteralString: \\\"(?)\\\"\"];", "3 -> 4 [label=\"args\"];", "4 [label=\"Array\"];", "4 -> 5 [label=\"0\"];", "5 [label=\"true\"];"] end it "should handle JOIN ON" do dot(@ds.from(:a).join(:d, :b=>:c)).must_equal ["1 -> 2 [label=\"from\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\":a\"];", "1 -> 4 [label=\"join\"];", "4 [label=\"Array\"];", "4 -> 5 [label=\"0\"];", "5 [label=\"INNER JOIN ON\"];", "5 -> 6 [label=\"table\"];", "6 [label=\":d\"];", "5 -> 7 [label=\"on\"];", "7 [label=\"ComplexExpression: =\"];", "7 -> 8 [label=\"0\"];", "8 [label=\"QualifiedIdentifier\"];", "8 -> 9 [label=\"table\"];", "9 [label=\"\\\"d\\\"\"];", "8 -> 10 [label=\"column\"];", "10 [label=\"\\\"b\\\"\"];", "7 -> 11 [label=\"1\"];", "11 [label=\"QualifiedIdentifier\"];", "11 -> 12 [label=\"table\"];", "12 [label=\"\\\"a\\\"\"];", "11 -> 13 [label=\"column\"];", "13 [label=\"\\\"c\\\"\"];"] end it "should handle JOIN USING" do dot(@ds.from(:a).join(:d, [:c], :table_alias=>:c)).must_equal ["1 -> 2 [label=\"from\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\":a\"];", "1 -> 4 [label=\"join\"];", "4 [label=\"Array\"];", "4 -> 5 [label=\"0\"];", "5 [label=\"INNER JOIN USING\"];", "5 -> 6 [label=\"table\"];", "6 [label=\"AliasedExpression\"];", "6 -> 7 [label=\"expression\"];", "7 [label=\":d\"];", "6 -> 8 [label=\"alias\"];", "8 [label=\":c\"];", "5 -> 9 [label=\"using\"];", "9 [label=\"Array\"];", "9 -> 10 [label=\"0\"];", "10 [label=\":c\"];"] end it "should handle other types" do o = Object.new def o.inspect "blah" end dot(@ds.select(o)).must_equal ["1 -> 2 [label=\"select\"];", "2 [label=\"Array\"];", "2 -> 3 [label=\"0\"];", "3 [label=\"Unhandled: blah\"];"] end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/touch_spec.rb���������������������������������������������������������0000664�0000000�0000000�00000023566�14342141206�0020726�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Touch plugin" do before do @c = Class.new(Sequel::Model) p = proc{private; def touch_instance_value; touch_association_value; end} @Artist = Class.new(@c, &p).set_dataset(:artists) @Album = Class.new(@c, &p).set_dataset(:albums) @Artist.columns :id, :updated_at, :modified_on @Artist.one_to_many :albums, :class=>@Album, :key=>:artist_id @Album.columns :id, :updated_at, :modified_on, :artist_id, :original_album_id @Album.one_to_many :followup_albums, :class=>@Album, :key=>:original_album_id @Album.many_to_one :artist, :class=>@Artist @a = @Artist.load(:id=>1) DB.reset end it "should default to using Time.now when setting the column values for model instances" do c = Class.new(Sequel::Model).set_dataset(:a) c.plugin :touch c.columns :id, :updated_at c.load(:id=>1).touch DB.sqls.first.must_match(/UPDATE a SET updated_at = '[-0-9 :.]+' WHERE \(id = 1\)/) end it "should work with current_datetime_timestamp extension" do c = Class.new(Sequel::Model).set_dataset(:a) c.dataset = c.dataset.extension(:current_datetime_timestamp) c.plugin :touch c.columns :id, :updated_at c.load(:id=>1).touch DB.sqls.must_equal ["UPDATE a SET updated_at = CURRENT_TIMESTAMP WHERE (id = 1)"] end it "should allow #touch instance method for updating the updated_at column" do @Artist.plugin :touch @a.touch DB.sqls.must_equal ["UPDATE artists SET updated_at = CURRENT_TIMESTAMP WHERE (id = 1)"] end it "should have #touch take an argument for the column to touch" do @Artist.plugin :touch @a.touch(:modified_on) DB.sqls.must_equal ["UPDATE artists SET modified_on = CURRENT_TIMESTAMP WHERE (id = 1)"] end it "should be able to specify the default column to touch in the plugin call using the :column option" do @Artist.plugin :touch, :column=>:modified_on @a.touch DB.sqls.must_equal ["UPDATE artists SET modified_on = CURRENT_TIMESTAMP WHERE (id = 1)"] end it "should be able to specify the default column to touch using the touch_column model accessor" do @Artist.plugin :touch @Artist.touch_column = :modified_on @a.touch DB.sqls.must_equal ["UPDATE artists SET modified_on = CURRENT_TIMESTAMP WHERE (id = 1)"] end it "should be able to specify the associations to touch in the plugin call using the :associations option" do @Artist.plugin :touch, :associations=>:albums @a.touch DB.sqls.must_equal ["UPDATE artists SET updated_at = CURRENT_TIMESTAMP WHERE (id = 1)", "UPDATE albums SET updated_at = CURRENT_TIMESTAMP WHERE (albums.artist_id = 1)"] end it "should clear associations after touching them :associations option" do @Artist.plugin :touch, :associations=>:albums @a.associations[:albums] = [@Album.call(:id=>1)] @a.touch @a.associations[:albums].must_be_nil DB.sqls.must_equal ["UPDATE artists SET updated_at = CURRENT_TIMESTAMP WHERE (id = 1)", "UPDATE albums SET updated_at = CURRENT_TIMESTAMP WHERE (albums.artist_id = 1)"] end it "should be able to give an array to the :associations option specifying multiple associations" do @Album.plugin :touch, :associations=>[:artist, :followup_albums] @Album.load(:id=>4, :artist_id=>1).touch sqls = DB.sqls sqls.shift.must_equal "UPDATE albums SET updated_at = CURRENT_TIMESTAMP WHERE (id = 4)" sqls.sort.must_equal ["UPDATE albums SET updated_at = CURRENT_TIMESTAMP WHERE (albums.original_album_id = 4)", "UPDATE artists SET updated_at = CURRENT_TIMESTAMP WHERE (artists.id = 1)"] end it "should be able to give a hash to the :associations option specifying the column to use for each association" do @Artist.plugin :touch, :associations=>{:albums=>:modified_on} @a.touch DB.sqls.must_equal ["UPDATE artists SET updated_at = CURRENT_TIMESTAMP WHERE (id = 1)", "UPDATE albums SET modified_on = CURRENT_TIMESTAMP WHERE (albums.artist_id = 1)"] end it "should default to using the touch_column as the default touch column for associations" do @Artist.plugin :touch, :column=>:modified_on, :associations=>:albums @a.touch DB.sqls.must_equal ["UPDATE artists SET modified_on = CURRENT_TIMESTAMP WHERE (id = 1)", "UPDATE albums SET modified_on = CURRENT_TIMESTAMP WHERE (albums.artist_id = 1)"] end it "should allow the mixed use of symbols and hashes inside an array for the :associations option" do @Album.plugin :touch, :associations=>[:artist, {:followup_albums=>:modified_on}] @Album.load(:id=>4, :artist_id=>1).touch sqls = DB.sqls sqls.shift.must_equal "UPDATE albums SET updated_at = CURRENT_TIMESTAMP WHERE (id = 4)" sqls.sort.must_equal ["UPDATE albums SET modified_on = CURRENT_TIMESTAMP WHERE (albums.original_album_id = 4)", "UPDATE artists SET updated_at = CURRENT_TIMESTAMP WHERE (artists.id = 1)"] end it "should be able to specify the associations to touch via a touch_associations_method" do @Album.plugin :touch @Album.touch_associations(:artist, {:followup_albums=>:modified_on}) @Album.load(:id=>4, :artist_id=>1).touch sqls = DB.sqls sqls.shift.must_equal "UPDATE albums SET updated_at = CURRENT_TIMESTAMP WHERE (id = 4)" sqls.sort.must_equal ["UPDATE albums SET modified_on = CURRENT_TIMESTAMP WHERE (albums.original_album_id = 4)", "UPDATE artists SET updated_at = CURRENT_TIMESTAMP WHERE (artists.id = 1)"] end it "should touch associated objects when destroying an object" do @Album.plugin :touch @Album.touch_associations(:artist, {:followup_albums=>:modified_on}) @Album.load(:id=>4, :artist_id=>1).destroy sqls = DB.sqls sqls.shift.must_equal "DELETE FROM albums WHERE id = 4" sqls.sort.must_equal ["UPDATE albums SET modified_on = CURRENT_TIMESTAMP WHERE (albums.original_album_id = 4)", "UPDATE artists SET updated_at = CURRENT_TIMESTAMP WHERE (artists.id = 1)"] end it "should be able to touch many_to_one associations" do @Album.plugin :touch, :associations=>:artist @Album.load(:id=>3, :artist_id=>4).touch DB.sqls.must_equal ["UPDATE albums SET updated_at = CURRENT_TIMESTAMP WHERE (id = 3)", "UPDATE artists SET updated_at = CURRENT_TIMESTAMP WHERE (artists.id = 4)"] end it "should be able to touch many_to_one associations" do @Album.plugin :touch, :associations=>:artist @Album.plugin :skip_create_refresh @Album.create(:artist_id=>4) DB.sqls.must_equal ["INSERT INTO albums (artist_id) VALUES (4)", "UPDATE artists SET updated_at = CURRENT_TIMESTAMP WHERE (artists.id = 4)"] end it "should be able to touch one_to_one associations" do @Artist.one_to_one :album, :class=>@Album, :key=>:artist_id @Artist.plugin :touch, :associations=>:album @a.touch DB.sqls.must_equal ["UPDATE artists SET updated_at = CURRENT_TIMESTAMP WHERE (id = 1)", "UPDATE albums SET updated_at = CURRENT_TIMESTAMP WHERE (albums.artist_id = 1)"] end it "should be able to touch many_to_many associations" do @Artist.many_to_many :albums, :class=>@Album, :left_key=>:artist_id, :join_table=>:aa @Artist.plugin :touch, :associations=>:albums @a.touch DB.sqls.must_equal ["UPDATE artists SET updated_at = CURRENT_TIMESTAMP WHERE (id = 1)", "SELECT albums.* FROM albums INNER JOIN aa ON (aa.album_id = albums.id) WHERE (aa.artist_id = 1)", "UPDATE albums SET updated_at = CURRENT_TIMESTAMP WHERE (id = 1)"] end it "should be able to touch many_through_many associations" do @Artist.plugin :many_through_many @Artist.many_through_many :albums, [[:aa, :artist_id, :album_id]], :class=>@Album @Artist.plugin :touch, :associations=>:albums @a.touch DB.sqls.must_equal ["UPDATE artists SET updated_at = CURRENT_TIMESTAMP WHERE (id = 1)", "SELECT albums.* FROM albums INNER JOIN aa ON (aa.album_id = albums.id) WHERE (aa.artist_id = 1)", "UPDATE albums SET updated_at = CURRENT_TIMESTAMP WHERE (id = 1)"] end it "should handle touching many_to_one associations with no associated object" do @Album.plugin :touch, :associations=>:artist @Album.load(:id=>3, :artist_id=>nil).touch DB.sqls.must_equal ["UPDATE albums SET updated_at = CURRENT_TIMESTAMP WHERE (id = 3)"] end it "should not update a column that doesn't exist" do @Album.plugin :touch, :column=>:x a = @Album.load(:id=>1) a.touch DB.sqls.must_equal [] a.artist_id = 1 a.touch DB.sqls.must_equal ['UPDATE albums SET artist_id = 1 WHERE (id = 1)'] end it "should raise an error if given a column argument in touch that doesn't exist" do @Artist.plugin :touch proc{@a.touch(:x)}.must_raise(Sequel::MassAssignmentRestriction) end it "should raise an Error when a nonexistent association is given" do @Artist.plugin :touch proc{@Artist.plugin :touch, :associations=>:blah}.must_raise(Sequel::Error) end it "should work correctly in subclasses" do @Artist.plugin :touch c1 = Class.new(@Artist) c1.load(:id=>4).touch DB.sqls.must_equal ["UPDATE artists SET updated_at = CURRENT_TIMESTAMP WHERE (id = 4)"] c1.touch_column = :modified_on c1.touch_associations :albums c1.load(:id=>1).touch DB.sqls.must_equal ["UPDATE artists SET modified_on = CURRENT_TIMESTAMP WHERE (id = 1)", "UPDATE albums SET modified_on = CURRENT_TIMESTAMP WHERE (albums.artist_id = 1)"] @a.touch DB.sqls.must_equal ["UPDATE artists SET updated_at = CURRENT_TIMESTAMP WHERE (id = 1)"] @Artist.plugin :touch, :column=>:modified_on, :associations=>:albums c2 = Class.new(@Artist) c2.load(:id=>4).touch DB.sqls.must_equal ["UPDATE artists SET modified_on = CURRENT_TIMESTAMP WHERE (id = 4)", "UPDATE albums SET modified_on = CURRENT_TIMESTAMP WHERE (albums.artist_id = 4)"] end it "should freeze touched associations when freezing model class" do @Artist.plugin :touch, :associations=>:albums @Artist.freeze @Artist.touched_associations.frozen?.must_equal true end end ������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/tree_spec.rb����������������������������������������������������������0000664�0000000�0000000�00000043657�14342141206�0020546�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Model, "tree plugin" do def klass(opts={}) @db = DB c = Class.new(Sequel::Model(@db[:nodes])) c.class_eval do def self.name; 'Node'; end columns :id, :name, :parent_id, :i, :pi plugin :tree, opts end c end before do @c = klass @ds = @c.dataset @o = @c.load(:id=>2, :parent_id=>1, :name=>'AA', :i=>3, :pi=>4) @db.reset end it "should define the correct associations" do @c.associations.sort_by{|x| x.to_s}.must_equal [:children, :parent] end it "should define the correct reciprocals" do @c.associations.sort_by{|x| x.to_s}.map{|x| @c.association_reflection(x).reciprocal}.must_equal [:parent, :children] end it "should define the correct associations when giving options" do klass(:children=>{:name=>:cs}, :parent=>{:name=>:p}).associations.sort_by{|x| x.to_s}.must_equal [:cs, :p] end it "should use the correct SQL for lazy associations" do @o.parent_dataset.sql.must_equal 'SELECT * FROM nodes WHERE (nodes.id = 1) LIMIT 1' @o.children_dataset.sql.must_equal 'SELECT * FROM nodes WHERE (nodes.parent_id = 2)' end it "should use the correct SQL for lazy associations when giving options" do o = klass(:primary_key=>:i, :key=>:pi, :order=>:name, :children=>{:name=>:cs}, :parent=>{:name=>:p}).load(:id=>2, :parent_id=>1, :name=>'AA', :i=>3, :pi=>4) o.p_dataset.sql.must_equal 'SELECT * FROM nodes WHERE (nodes.i = 4) ORDER BY name LIMIT 1' o.cs_dataset.sql.must_equal 'SELECT * FROM nodes WHERE (nodes.pi = 3) ORDER BY name' end it "should have parent_column give the symbol of the parent column" do @c.parent_column.must_equal :parent_id klass(:key=>:p_id).parent_column.must_equal :p_id end it "should have qualified_parent_column give a qualified identifier for the parent column" do @c.qualified_parent_column.must_equal Sequel.qualify(:nodes, :parent_id) klass(:key=>:p_id).qualified_parent_column.must_equal Sequel.qualify(:nodes, :p_id) end it "should have tree_order give the order of the association" do @c.tree_order.must_be_nil klass(:order=>:name).tree_order.must_equal :name klass(:order=>[:parent_id, :name]).tree_order.must_equal [:parent_id, :name] end it "should work correctly in subclasses" do o = Class.new(klass(:primary_key=>:i, :key=>:pi, :order=>:name, :children=>{:name=>:cs}, :parent=>{:name=>:p})).load(:id=>2, :parent_id=>1, :name=>'AA', :i=>3, :pi=>4) o.p_dataset.sql.must_equal 'SELECT * FROM nodes WHERE (nodes.i = 4) ORDER BY name LIMIT 1' o.cs_dataset.sql.must_equal 'SELECT * FROM nodes WHERE (nodes.pi = 3) ORDER BY name' end it "should have roots return an array of the tree's roots" do @c.dataset = @c.dataset.with_fetch([{:id=>1, :parent_id=>nil, :name=>'r'}]) @c.roots.must_equal [@c.load(:id=>1, :parent_id=>nil, :name=>'r')] @db.sqls.must_equal ["SELECT * FROM nodes WHERE (nodes.parent_id IS NULL)"] @c.exclude(id: 2).roots.must_equal [@c.load(:id=>1, :parent_id=>nil, :name=>'r')] @db.sqls.must_equal ["SELECT * FROM nodes WHERE ((id != 2) AND (nodes.parent_id IS NULL))"] end it "should have roots_dataset be a dataset representing the tree's roots" do @c.roots_dataset.sql.must_equal "SELECT * FROM nodes WHERE (nodes.parent_id IS NULL)" @c.exclude(id: 2).roots_dataset.sql.must_equal "SELECT * FROM nodes WHERE ((id != 2) AND (nodes.parent_id IS NULL))" end it "should have roots_dataset include an order if the tree has an order" do @c.tree_order = :id @c.roots_dataset.sql.must_equal "SELECT * FROM nodes WHERE (nodes.parent_id IS NULL) ORDER BY id" @c.exclude(id: 2).roots_dataset.sql.must_equal "SELECT * FROM nodes WHERE ((id != 2) AND (nodes.parent_id IS NULL)) ORDER BY id" end it "should have ancestors return the ancestors of the current node" do @c.dataset = @c.dataset.with_fetch([[{:id=>1, :parent_id=>5, :name=>'r'}], [{:id=>5, :parent_id=>nil, :name=>'r2'}]]) @o.ancestors.must_equal [@c.load(:id=>1, :parent_id=>5, :name=>'r'), @c.load(:id=>5, :parent_id=>nil, :name=>'r2')] @db.sqls.must_equal ["SELECT * FROM nodes WHERE id = 1", "SELECT * FROM nodes WHERE id = 5"] end it "should have descendants return the descendants of the current node" do @c.dataset = @c.dataset.with_fetch([[{:id=>3, :parent_id=>2, :name=>'r'}, {:id=>4, :parent_id=>2, :name=>'r2'}], [{:id=>5, :parent_id=>4, :name=>'r3'}], []]) @o.descendants.must_equal [@c.load(:id=>3, :parent_id=>2, :name=>'r'), @c.load(:id=>4, :parent_id=>2, :name=>'r2'), @c.load(:id=>5, :parent_id=>4, :name=>'r3')] @db.sqls.must_equal ["SELECT * FROM nodes WHERE (nodes.parent_id = 2)", "SELECT * FROM nodes WHERE (nodes.parent_id = 3)", "SELECT * FROM nodes WHERE (nodes.parent_id = 5)", "SELECT * FROM nodes WHERE (nodes.parent_id = 4)"] end it "should have root return the root of the current node" do @c.dataset = @c.dataset.with_fetch([[{:id=>1, :parent_id=>5, :name=>'r'}], [{:id=>5, :parent_id=>nil, :name=>'r2'}]]) @o.root.must_equal @c.load(:id=>5, :parent_id=>nil, :name=>'r2') @db.sqls.must_equal ["SELECT * FROM nodes WHERE id = 1", "SELECT * FROM nodes WHERE id = 5"] end it "should have root? return true for a root node and false for a child node" do @c.load(:parent_id => nil).root?.must_equal true @c.load(:parent_id => 1).root?.must_equal false end it "should have root? return false for an new node" do @c.new.root?.must_equal false end it "should have self_and_siblings return the children of the current node's parent" do @c.dataset = @c.dataset.with_fetch([[{:id=>1, :parent_id=>3, :name=>'r'}], [{:id=>7, :parent_id=>1, :name=>'r2'}, @o.values.dup]]) @o.self_and_siblings.must_equal [@c.load(:id=>7, :parent_id=>1, :name=>'r2'), @o] @db.sqls.must_equal ["SELECT * FROM nodes WHERE id = 1", "SELECT * FROM nodes WHERE (nodes.parent_id = 1)"] end it "should have self_and_siblings return the roots if the current object is a root" do h = {:id=>2, :parent_id=>nil, :name=>'AA'} @c.dataset = @c.dataset.with_fetch(h) @c.load(h).self_and_siblings.must_equal [@c.load(h)] @db.sqls.must_equal ["SELECT * FROM nodes WHERE (nodes.parent_id IS NULL)"] end it "should have siblings return the children of the current node's parent, except for the current node" do @c.dataset = @c.dataset.with_fetch([[{:id=>1, :parent_id=>3, :name=>'r'}], [{:id=>7, :parent_id=>1, :name=>'r2'}, @o.values.dup]]) @o.siblings.must_equal [@c.load(:id=>7, :parent_id=>1, :name=>'r2')] @db.sqls.must_equal ["SELECT * FROM nodes WHERE id = 1", "SELECT * FROM nodes WHERE (nodes.parent_id = 1)"] end it "should have methods work correctly with custom association names" do o = klass(:primary_key=>:i, :key=>:pi, :order=>:name, :children=>{:name=>:cs}, :parent=>{:name=>:p}).load(:id=>2, :parent_id=>1, :name=>'AA', :i=>3, :pi=>4) o.model.parent_association_name.must_equal :p o.model.children_association_name.must_equal :cs o.model.dataset = o.model.dataset.with_fetch(lambda do |sql| case sql when "SELECT * FROM nodes WHERE (nodes.i = 4) ORDER BY name LIMIT 1" {:id=>7, :parent_id=>8, :name=>'r2', :i=>4, :pi=>5} when "SELECT * FROM nodes WHERE (nodes.i = 5) ORDER BY name LIMIT 1" {:id=>10, :parent_id=>11, :name=>'r3', :i=>5, :pi=>nil} when 'SELECT * FROM nodes WHERE (nodes.pi = 3) ORDER BY name' {:id=>12, :parent_id=>13, :name=>'r4', :i=>7, :pi=>3} when 'SELECT * FROM nodes WHERE (nodes.pi = 7) ORDER BY name' {:id=>14, :parent_id=>15, :name=>'r5', :i=>8, :pi=>7} when 'SELECT * FROM nodes WHERE (nodes.pi = 8) ORDER BY name' [] when 'SELECT * FROM nodes WHERE (nodes.pi = 4) ORDER BY name' [{:id=>2, :parent_id=>1, :name=>'AA', :i=>3, :pi=>4}, {:id=>20, :parent_id=>21, :name=>'r6', :i=>9, :pi=>4}] else raise sql end end) o.db.sqls.must_equal [] o.ancestors.must_equal [o.model.load(:id=>7, :parent_id=>8, :name=>'r2', :i=>4, :pi=>5), o.model.load(:id=>10, :parent_id=>11, :name=>'r3', :i=>5, :pi=>nil)] o.db.sqls.must_equal ["SELECT * FROM nodes WHERE (nodes.i = 4) ORDER BY name LIMIT 1", "SELECT * FROM nodes WHERE (nodes.i = 5) ORDER BY name LIMIT 1"] o.descendants.must_equal [o.model.load(:id=>12, :parent_id=>13, :name=>'r4', :i=>7, :pi=>3), o.model.load(:id=>14, :parent_id=>15, :name=>'r5', :i=>8, :pi=>7)] o.db.sqls.must_equal ["SELECT * FROM nodes WHERE (nodes.pi = 3) ORDER BY name", "SELECT * FROM nodes WHERE (nodes.pi = 7) ORDER BY name", "SELECT * FROM nodes WHERE (nodes.pi = 8) ORDER BY name"] o.siblings.must_equal [o.model.load(:id=>20, :parent_id=>21, :name=>'r6', :i=>9, :pi=>4)] o.db.sqls.must_equal ["SELECT * FROM nodes WHERE (nodes.pi = 4) ORDER BY name"] end describe ":single_root option" do before do @c = klass(:single_root => true) end it "should have root class method return the root" do @c.dataset = @c.dataset.with_fetch([{:id=>1, :parent_id=>nil, :name=>'r'}]) @c.root.must_equal @c.load(:id=>1, :parent_id=>nil, :name=>'r') end it "prevents creating a second root" do @c.dataset = @c.dataset.with_fetch([{:id=>1, :parent_id=>nil, :name=>'r'}]) lambda { @c.create }.must_raise(Sequel::Plugins::Tree::TreeMultipleRootError) end it "errors when promoting an existing record to a second root" do @c.dataset = @c.dataset.with_fetch([{:id=>1, :parent_id=>nil, :name=>'r'}]) n = @c.load(:id => 2, :parent_id => 1) lambda { n.update(:parent_id => nil) }.must_raise(Sequel::Plugins::Tree::TreeMultipleRootError) end it "allows updating existing root" do @c.dataset = @c.dataset.with_fetch([{:id=>1, :parent_id=>nil, :name=>'r'}]) @c.root.update(:name => 'fdsa') end end end describe Sequel::Model, "tree plugin with composite keys" do def klass(opts={}) @db = DB c = Class.new(Sequel::Model(@db[:nodes])) c.class_eval do def self.name; 'Node'; end columns :id, :id2, :name, :parent_id, :parent_id2, :i, :pi set_primary_key [:id, :id2] plugin :tree, opts.merge(:key=>[:parent_id, :parent_id2]) def self.set_dataset(ds) super set_primary_key [:id, :id2] end end c end before do @c = klass @ds = @c.dataset @o = @c.load(:id=>2, :id2=>5, :parent_id=>1, :parent_id2=>6, :name=>'AA', :i=>3, :pi=>4) @db.reset end it "should use the correct SQL for lazy associations" do @o.parent_dataset.sql.must_equal 'SELECT * FROM nodes WHERE ((nodes.id = 1) AND (nodes.id2 = 6)) LIMIT 1' @o.children_dataset.sql.must_equal 'SELECT * FROM nodes WHERE ((nodes.parent_id = 2) AND (nodes.parent_id2 = 5))' end it "should have parent_column give an array of symbols of the parent column" do @c.parent_column.must_equal [:parent_id, :parent_id2] end it "should have roots return an array of the tree's roots" do @c.dataset = @c.dataset.with_fetch([{:id=>1, :parent_id=>nil, :parent_id2=>nil, :name=>'r'}]) @c.roots.must_equal [@c.load(:id=>1, :parent_id=>nil, :parent_id2=>nil, :name=>'r')] @db.sqls.must_equal ["SELECT * FROM nodes WHERE ((nodes.parent_id IS NULL) OR (nodes.parent_id2 IS NULL))"] end it "should have roots_dataset be a dataset representing the tree's roots" do @c.roots_dataset.sql.must_equal "SELECT * FROM nodes WHERE ((nodes.parent_id IS NULL) OR (nodes.parent_id2 IS NULL))" end it "should have ancestors return the ancestors of the current node" do @c.dataset = @c.dataset.with_fetch([[{:id=>1, :id2=>6, :parent_id=>5, :parent_id2=>7, :name=>'r'}], [{:id=>5, :id2=>7, :parent_id=>nil, :parent_id2=>nil, :name=>'r2'}]]) @o.ancestors.must_equal [@c.load(:id=>1, :id2=>6, :parent_id=>5, :parent_id2=>7, :name=>'r'), @c.load(:id=>5, :id2=>7, :parent_id=>nil, :parent_id2=>nil, :name=>'r2')] sqls = @db.sqls sqls.length.must_equal 2 ["SELECT * FROM nodes WHERE ((id = 1) AND (id2 = 6)) LIMIT 1", "SELECT * FROM nodes WHERE ((id2 = 6) AND (id = 1)) LIMIT 1"].must_include(sqls[0]) ["SELECT * FROM nodes WHERE ((id = 5) AND (id2 = 7)) LIMIT 1", "SELECT * FROM nodes WHERE ((id2 = 7) AND (id = 5)) LIMIT 1"].must_include(sqls[1]) end it "should have descendants return the descendants of the current node" do @c.dataset = @c.dataset.with_fetch([[{:id=>3, :id2=>7, :parent_id=>2, :parent_id2=>5, :name=>'r'}, {:id=>4, :id2=>8, :parent_id=>2, :parent_id2=>5, :name=>'r2'}], [{:id=>5, :id2=>9, :parent_id=>4, :parent_id2=>8, :name=>'r3'}], []]) @o.descendants.must_equal [@c.load(:id=>3, :id2=>7, :parent_id=>2, :parent_id2=>5, :name=>'r'), @c.load(:id=>4, :id2=>8, :parent_id=>2, :parent_id2=>5, :name=>'r2'), @c.load(:id=>5, :id2=>9, :parent_id=>4, :parent_id2=>8, :name=>'r3')] @db.sqls.must_equal ["SELECT * FROM nodes WHERE ((nodes.parent_id = 2) AND (nodes.parent_id2 = 5))", "SELECT * FROM nodes WHERE ((nodes.parent_id = 3) AND (nodes.parent_id2 = 7))", "SELECT * FROM nodes WHERE ((nodes.parent_id = 5) AND (nodes.parent_id2 = 9))", "SELECT * FROM nodes WHERE ((nodes.parent_id = 4) AND (nodes.parent_id2 = 8))"] end it "should have root return the root of the current node" do @c.dataset = @c.dataset.with_fetch([[{:id=>1, :id2=>6, :parent_id=>5, :parent_id2=>7, :name=>'r'}], [{:id=>5, :id2=>7, :parent_id=>nil, :parent_id2=>nil, :name=>'r2'}]]) @o.root.must_equal @c.load(:id=>5, :id2=>7, :parent_id=>nil, :parent_id2=>nil, :name=>'r2') sqls = @db.sqls sqls.length.must_equal 2 ["SELECT * FROM nodes WHERE ((id = 1) AND (id2 = 6)) LIMIT 1", "SELECT * FROM nodes WHERE ((id2 = 6) AND (id = 1)) LIMIT 1"].must_include(sqls[0]) ["SELECT * FROM nodes WHERE ((id = 5) AND (id2 = 7)) LIMIT 1", "SELECT * FROM nodes WHERE ((id2 = 7) AND (id = 5)) LIMIT 1"].must_include(sqls[1]) end it "should have root? return true for a root node and false for a child node" do @c.load(:parent_id => nil, :parent_id2=>nil).root?.must_equal true @c.load(:parent_id => 1, :parent_id2=>nil).root?.must_equal true @c.load(:parent_id => nil, :parent_id2=>2).root?.must_equal true @c.load(:parent_id => 1, :parent_id2=>2).root?.must_equal false end it "should have root? return false for an new node" do @c.new.root?.must_equal false end it "should have self_and_siblings return the children of the current node's parent" do @c.dataset = @c.dataset.with_fetch([[{:id=>1, :id2=>6, :parent_id=>3, :parent_id2=>7, :name=>'r'}], [{:id=>7, :id2=>9, :parent_id=>1, :parent_id2=>6, :name=>'r2'}, @o.values.dup]]) @o.self_and_siblings.must_equal [@c.load(:id=>7, :id2=>9, :parent_id=>1, :parent_id2=>6, :name=>'r2'), @o] sqls = @db.sqls sqls.length.must_equal 2 ["SELECT * FROM nodes WHERE ((id = 1) AND (id2 = 6)) LIMIT 1", "SELECT * FROM nodes WHERE ((id2 = 6) AND (id = 1)) LIMIT 1"].must_include(sqls[0]) sqls[1].must_equal "SELECT * FROM nodes WHERE ((nodes.parent_id = 1) AND (nodes.parent_id2 = 6))" end it "should have siblings return the children of the current node's parent, except for the current node" do @c.dataset = @c.dataset.with_fetch([[{:id=>1, :id2=>6, :parent_id=>3, :parent_id2=>7, :name=>'r'}], [{:id=>7, :id2=>9, :parent_id=>1, :parent_id2=>6, :name=>'r2'}, @o.values.dup]]) @o.siblings.must_equal [@c.load(:id=>7, :id2=>9, :parent_id=>1, :parent_id2=>6, :name=>'r2')] sqls = @db.sqls sqls.length.must_equal 2 ["SELECT * FROM nodes WHERE ((id = 1) AND (id2 = 6)) LIMIT 1", "SELECT * FROM nodes WHERE ((id2 = 6) AND (id = 1)) LIMIT 1"].must_include(sqls[0]) sqls[1].must_equal "SELECT * FROM nodes WHERE ((nodes.parent_id = 1) AND (nodes.parent_id2 = 6))" end describe ":single_root option" do before do @c = klass(:single_root => true) end it "prevents creating a second root" do @c.dataset = @c.dataset.with_fetch([{:id=>1, :id2=>6, :parent_id=>nil, :parent_id2=>nil, :name=>'r'}]) lambda { @c.create }.must_raise(Sequel::Plugins::Tree::TreeMultipleRootError) @c.dataset = @c.dataset.with_fetch([{:id=>1, :id2=>6, :parent_id=>1, :parent_id2=>nil, :name=>'r'}]) lambda { @c.create(:parent_id2=>1) }.must_raise(Sequel::Plugins::Tree::TreeMultipleRootError) @c.dataset = @c.dataset.with_fetch([{:id=>1, :id2=>6, :parent_id=>nil, :parent_id2=>2, :name=>'r'}]) lambda { @c.create(:parent_id=>2) }.must_raise(Sequel::Plugins::Tree::TreeMultipleRootError) end it "errors when promoting an existing record to a second root" do @c.dataset = @c.dataset.with_fetch([{:id=>1, :id2=>6, :parent_id=>nil, :parent_id2=>nil, :name=>'r'}]) lambda { @c.load(:id => 2, :id2=>7, :parent_id => 1, :parent_id2=>2).update(:parent_id => nil, :parent_id2=>nil) }.must_raise(Sequel::Plugins::Tree::TreeMultipleRootError) @c.dataset = @c.dataset.with_fetch([{:id=>1, :id2=>6, :parent_id=>1, :parent_id2=>nil, :name=>'r'}]) lambda { @c.load(:id => 2, :id2=>7, :parent_id => 1, :parent_id2=>2).update(:parent_id => nil) }.must_raise(Sequel::Plugins::Tree::TreeMultipleRootError) @c.dataset = @c.dataset.with_fetch([{:id=>1, :id2=>6, :parent_id=>nil, :parent_id2=>2, :name=>'r'}]) lambda { @c.load(:id => 2, :id2=>7, :parent_id => 1, :parent_id2=>2).update(:parent_id2 => nil) }.must_raise(Sequel::Plugins::Tree::TreeMultipleRootError) end it "allows updating existing root" do @c.dataset = @c.dataset.with_fetch(:id=>1, :id2=>6, :parent_id=>nil, :parent_id2=>nil, :name=>'r') @c.root.update(:name => 'fdsa') @c.dataset = @c.dataset.with_fetch(:id=>1, :id2=>6, :parent_id=>1, :parent_id2=>nil, :name=>'r') @c.root.update(:name => 'fdsa') @c.dataset = @c.dataset.with_fetch(:id=>1, :id2=>6, :parent_id=>nil, :parent_id2=>2, :name=>'r') @c.root.update(:name => 'fdsa') end it "handles case where tree_order is not an array when freezing" do @c.tree_order = :id @c.freeze @c.tree_order.must_equal :id end it "freezes tree_order if it is an array" do @c.tree_order = [:id] @c.freeze @c.tree_order.frozen?.must_equal true end end end ���������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/typecast_on_load_spec.rb����������������������������������������������0000664�0000000�0000000�00000006734�14342141206�0023131�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Model, "TypecastOnLoad plugin" do before do @db = Sequel.mock(:fetch=>{:id=>1, :b=>"1", :y=>"0"}, :columns=>[:id, :b, :y], :numrows=>1) def @db.supports_schema_parsing?() true end def @db.schema(*args) [[:id, {}], [:y, {:type=>:boolean, :db_type=>'tinyint(1)'}], [:b, {:type=>:integer, :db_type=>'integer'}]] end @c = Class.new(Sequel::Model(@db[:items])) do attr_accessor :bset def b=(x) self.bset = true super end end end it "should call setter method with value when loading the object, for all given columns" do @c.plugin :typecast_on_load, :b o = @c.load(:id=>1, :b=>"1", :y=>"0") o.values.must_equal(:id=>1, :b=>1, :y=>"0") o.bset.must_equal true end it "should not call setter method with value when loading the object if value is not present or nil" do @c.plugin :typecast_on_load, :b o = @c.load(:id=>1, :y=>"0") o.values.must_equal(:id=>1, :y=>"0") o = @c.load(:id=>1, :b=>nil, :y=>"0") o.values.must_equal(:id=>1, :b=>nil, :y=>"0") o.bset.must_be_nil end it "should call setter method with value when reloading the object, for all given columns" do @c.plugin :typecast_on_load, :b o = @c.load(:id=>1, :b=>"1", :y=>"0") o.refresh o.values.must_equal(:id=>1, :b=>1, :y=>"0") o.bset.must_equal true end it "should call setter method with value when automatically reloading the object on creation" do @c.plugin :typecast_on_load, :b o = @c.new(:b=>"1", :y=>"0") o.save.values.must_equal(:id=>1, :b=>1, :y=>"0") o.bset.must_equal true end it "should call setter method with value when automatically reloading the object on creation via insert_select" do @c.plugin :typecast_on_load, :b @c.dataset = @c.dataset.with_extend{def insert_select(h) insert(h); first end} o = @c.new(:b=>"1", :y=>"0") o.save.values.must_equal(:id=>1, :b=>1, :y=>"0") o.bset.must_equal true end it "should allowing setting columns separately via add_typecast_on_load_columns" do @c.plugin :typecast_on_load @c.load(:id=>1, :b=>"1", :y=>"0").values.must_equal(:id=>1, :b=>"1", :y=>"0") @c.add_typecast_on_load_columns :b @c.load(:id=>1, :b=>"1", :y=>"0").values.must_equal(:id=>1, :b=>1, :y=>"0") @c.add_typecast_on_load_columns :y @c.load(:id=>1, :b=>"1", :y=>"0").values.must_equal(:id=>1, :b=>1, :y=>false) end it "should work with subclasses" do @c.plugin :typecast_on_load @c.load(:id=>1, :b=>"1", :y=>"0").values.must_equal(:id=>1, :b=>"1", :y=>"0") c1 = Class.new(@c) @c.add_typecast_on_load_columns :b @c.load(:id=>1, :b=>"1", :y=>"0").values.must_equal(:id=>1, :b=>1, :y=>"0") c1.load(:id=>1, :b=>"1", :y=>"0").values.must_equal(:id=>1, :b=>"1", :y=>"0") c2 = Class.new(@c) @c.add_typecast_on_load_columns :y @c.load(:id=>1, :b=>"1", :y=>"0").values.must_equal(:id=>1, :b=>1, :y=>false) c2.load(:id=>1, :b=>"1", :y=>"0").values.must_equal(:id=>1, :b=>1, :y=>"0") c1.add_typecast_on_load_columns :y c1.load(:id=>1, :b=>"1", :y=>"0").values.must_equal(:id=>1, :b=>"1", :y=>false) end it "should not mark the object as modified" do @c.plugin :typecast_on_load, :b @c.load(:id=>1, :b=>"1", :y=>"0").modified?.must_equal false end it "should freeze typecast_on_load columns when freezing model class" do @c.plugin :typecast_on_load, :b @c.freeze @c.typecast_on_load_columns.frozen?.must_equal true end end ������������������������������������sequel-5.63.0/spec/extensions/unlimited_update_spec.rb����������������������������������������������0000664�0000000�0000000�00000001123�14342141206�0023121�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::UnlimitedUpdate" do before do @db = Sequel.mock(:host=>'mysql', :numrows=>1) @db.extend_datasets{def quote_identifiers?; false end} @c = Class.new(Sequel::Model(@db[:test])) @c.columns :id, :name @o = @c.load(:id=>1, :name=>'a') @db.sqls end it "should remove limit from update dataset" do @o.save @db.sqls.must_equal ["UPDATE test SET name = 'a' WHERE (id = 1) LIMIT 1"] @c.plugin :unlimited_update @o.save @db.sqls.must_equal ["UPDATE test SET name = 'a' WHERE (id = 1)"] end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/unused_associations_spec.rb�������������������������������������������0000664�0000000�0000000�00000021765�14342141206�0023665�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "unused_associations plugin" do require 'rbconfig' require 'json' ua_file = "spec/tua-#{$$}.json" uac_file = "spec/tua-coverage-#{$$}.json" after do File.delete(ua_file) if File.file?(ua_file) File.delete(uac_file) if File.file?(uac_file) end def check(code, env={}) ruby = File.join(RbConfig::CONFIG['bindir'], RbConfig::CONFIG['RUBY_INSTALL_NAME']) runner = File.expand_path('../files/unused_associations/run_tua.rb', File.dirname(__FILE__)) input_read, input_write = IO.pipe output_read, output_write = IO.pipe Process.spawn(env, ruby, runner, :in=>input_read, :out=>output_write) input_write.write(code) input_write.close output_write.close result = output_read.read input_read.close output_read.close res = Sequel.parse_json(result) raise res if res.is_a?(String) res end it "should correctly determine which associations are unused or partially used" do ua, uao = check("TUA::O.a1") ua.must_equal [["TUA", "a2s"], ["TUA", "a3"], ["TUA", "a4s"], ["TUA", "a5"], ["TUA", "a6s"], ["TUA::SC", "a7"]] uao.must_equal [["TUA", "a1", {"read_only"=>true, "no_dataset_method"=>true}]] ua, uao = check(<<-RUBY) obj = TUA::O obj.a1 obj.a1_dataset obj.a1 = nil obj.a2s_dataset obj.remove_all_a2s obj.a3 = nil obj.add_a4(obj) obj.remove_a6(obj) obj = TUA::SC::O obj.a7 obj.a7_dataset RUBY ua.must_equal [["TUA", "a5"]] uao.must_equal [ ["TUA", "a2s", {"no_association_method"=>true, "adder"=>nil, "remover"=>nil}], ["TUA", "a3", {"no_dataset_method"=>true, "no_association_method"=>true}], ["TUA", "a4s", {"no_dataset_method"=>true, "no_association_method"=>true, "remover"=>nil, "clearer"=>nil}], ["TUA", "a6s", {"no_association_method"=>true, "adder"=>nil, "clearer"=>nil}], ["TUA::SC", "a7", {"read_only"=>true}]] end it "should use association reflection access to determine which associations are used" do ua, uao = check("TUA.association_reflection(:a1); TUA::O.a2s") ua.must_equal [["TUA", "a3"], ["TUA", "a4s"], ["TUA", "a5"], ["TUA", "a6s"], ["TUA::SC", "a7"]] uao.must_equal [ ["TUA", "a1", {"read_only"=>true, "no_dataset_method"=>true, "no_association_method"=>true}], ["TUA", "a2s", {"read_only"=>true, "no_dataset_method"=>true}]] end it "should work with :file and :coverage_file plugin options" do ua, uao = check("TUA::O.a1", 'PLUGIN_OPTS'=>Sequel.object_to_json(:coverage_file=>uac_file, :file=>ua_file)) ua.must_equal [["TUA", "a2s"], ["TUA", "a3"], ["TUA", "a4s"], ["TUA", "a5"], ["TUA", "a6s"], ["TUA::SC", "a7"]] uao.must_equal [["TUA", "a1", {"read_only"=>true, "no_dataset_method"=>true}]] Sequel.parse_json(File.binread(ua_file)).must_be_kind_of(Hash) File.file?(uac_file).must_equal false end it "should work without arguments when using :file and :coverage_file plugin options" do ua, uao = check("TUA::O.a1", 'PLUGIN_OPTS'=>Sequel.object_to_json(:coverage_file=>uac_file, :file=>ua_file), 'NO_COVERAGE_RESULT'=>'1', 'NO_COVERAGE_DATA'=>'1', 'NO_DATA'=>'1') ua.must_equal [["TUA", "a2s"], ["TUA", "a3"], ["TUA", "a4s"], ["TUA", "a5"], ["TUA", "a6s"], ["TUA::SC", "a7"]] uao.must_equal [["TUA", "a1", {"read_only"=>true, "no_dataset_method"=>true}]] Sequel.parse_json(File.binread(ua_file)).must_be_kind_of(Hash) File.file?(uac_file).must_equal false end it "should be able to combine information from multiple coverage runs" do ua, uao = check("TUA::O.a1", 'KEEP_COVERAGE'=>'1', 'PLUGIN_OPTS'=>Sequel.object_to_json(:coverage_file=>uac_file, :file=>ua_file)) ua.must_equal [["TUA", "a2s"], ["TUA", "a3"], ["TUA", "a4s"], ["TUA", "a5"], ["TUA", "a6s"], ["TUA::SC", "a7"]] uao.must_equal [["TUA", "a1", {"read_only"=>true, "no_dataset_method"=>true}]] Sequel.parse_json(File.binread(ua_file)).must_be_kind_of(Hash) Sequel.parse_json(File.binread(uac_file)).must_be_kind_of(Hash) ua, uao = check("TUA::O.a2s", 'KEEP_COVERAGE'=>'1', 'PLUGIN_OPTS'=>Sequel.object_to_json(:coverage_file=>uac_file, :file=>ua_file)) ua.must_equal [["TUA", "a3"], ["TUA", "a4s"], ["TUA", "a5"], ["TUA", "a6s"], ["TUA::SC", "a7"]] uao.must_equal [ ["TUA", "a1", {"read_only"=>true, "no_dataset_method"=>true}], ["TUA", "a2s", {"read_only"=>true, "no_dataset_method"=>true}]] end it "should respect association_reflection information from multiple coverage runs" do check("", 'KEEP_COVERAGE'=>'1', 'PLUGIN_OPTS'=>Sequel.object_to_json(:coverage_file=>uac_file, :file=>ua_file)) ua, uao = check("TUA.association_reflection(:a1); TUA::O.a2s", 'KEEP_COVERAGE'=>'1', 'PLUGIN_OPTS'=>Sequel.object_to_json(:coverage_file=>uac_file, :file=>ua_file)) ua.must_equal [["TUA", "a3"], ["TUA", "a4s"], ["TUA", "a5"], ["TUA", "a6s"], ["TUA::SC", "a7"]] uao.must_equal [ ["TUA", "a1", {"read_only"=>true, "no_dataset_method"=>true, "no_association_method"=>true}], ["TUA", "a2s", {"read_only"=>true, "no_dataset_method"=>true}]] end it "should not define unused associations when using :modify_associations and :file plugin options" do check(<<-RUBY, 'PLUGIN_OPTS'=>Sequel.object_to_json(:file=>ua_file), 'NO_DATA'=>'1') obj = TUA::O obj.a1 obj.a1_dataset obj.a1 = nil obj.a2s_dataset obj.remove_all_a2s obj.a3 = nil obj.add_a4(obj) obj.remove_a6(obj) RUBY assocs, meths = check(<<-RUBY, 'PLUGIN_OPTS'=>Sequel.object_to_json(:modify_associations=>true, :file=>ua_file), 'NO_COVERAGE_RESULT'=>'1') print Sequel.object_to_json([TUA.associations.sort, TUA.instance_methods]) exit RUBY assocs.must_equal %w'a1 a2s a3 a4s a6s' %w'a1 a1_dataset a1= a2s_dataset remove_all_a2s a3= add_a4 a6s_dataset remove_a6'.each do |meth| meths.must_include meth end %w'a2s add_a2 remove_a2 a3 a3_dataset a4s a4s_dataset remove_a4 remove_all_a4s a5 a5_dataset a5= a6s add_a6 remove_all_a6s'.each do |meth| meths.wont_include meth end end it "should not define unused associations when using :modify_associations and :unused_associations_data options" do check(<<-RUBY, 'PLUGIN_OPTS'=>Sequel.object_to_json(:coverage_file=>uac_file, :file=>ua_file), 'NO_COVERAGE_DATA'=>'1') obj = TUA::O obj.a1 obj.a1_dataset obj.a1 = nil obj.a2s_dataset obj.remove_all_a2s obj.a3 = nil obj.add_a4(obj) obj.remove_a6(obj) RUBY assocs, meths = check(<<-RUBY, 'PLUGIN_OPTS'=>Sequel.object_to_json(:modify_associations=>true, :unused_associations_data=>Sequel.parse_json(File.binread(ua_file)))) print Sequel.object_to_json([TUA.associations.sort, TUA.instance_methods]) exit RUBY assocs.must_equal %w'a1 a2s a3 a4s a6s' %w'a1 a1_dataset a1= a2s_dataset remove_all_a2s a3= add_a4 a6s_dataset remove_a6'.each do |meth| meths.must_include meth end %w'a2s add_a2 remove_a2 a3 a3_dataset a4s a4s_dataset remove_a4 remove_all_a4s a5 a5_dataset a5= a6s add_a6 remove_all_a6s'.each do |meth| meths.wont_include meth end end it "should respect :is_used association option when modifying associations" do check(<<-RUBY, 'PLUGIN_OPTS'=>Sequel.object_to_json(:file=>ua_file), 'NO_DATA'=>'1') obj = TUA::O obj.a1 obj.a1_dataset obj.a1 = nil obj.a2s_dataset obj.remove_all_a2s obj.a3 = nil obj.add_a4(obj) obj.remove_a6(obj) RUBY assocs, meths = check(<<-RUBY, 'PLUGIN_OPTS'=>Sequel.object_to_json(:modify_associations=>true, :file=>ua_file), 'NO_COVERAGE_RESULT'=>'1', 'A5_IS_USED'=>'1', 'A6S_IS_USED'=>'1') print Sequel.object_to_json([TUA.associations.sort, TUA.instance_methods]) exit RUBY assocs.must_equal %w'a1 a2s a3 a4s a5 a6s' %w'a1 a1_dataset a1= a2s_dataset remove_all_a2s a3= add_a4 a5 a5_dataset a5= a6s a6s_dataset add_a6 remove_a6 remove_all_a6s'.each do |meth| meths.must_include meth end %w'a2s add_a2 remove_a2 a3 a3_dataset a4s a4s_dataset remove_a4 remove_all_a4s'.each do |meth| meths.wont_include meth end end it "should skip associations that use :methods_module" do ua, uao = check("TUA::O.a1", 'A4S_METHODS_MODULE'=>'1') ua.must_equal [["TUA", "a2s"], ["TUA", "a3"], ["TUA", "a5"], ["TUA", "a6s"], ["TUA::SC", "a7"]] uao.must_equal [["TUA", "a1", {"read_only"=>true, "no_dataset_method"=>true}]] end it "should ignore association modification methods for read_only associations" do ua, uao = check("TUA::O.a4s", 'A4S_READ_ONLY'=>'1') ua.must_equal [["TUA", "a1"], ["TUA", "a2s"], ["TUA", "a3"], ["TUA", "a5"], ["TUA", "a6s"], ["TUA::SC", "a7"]] uao.must_equal [["TUA", "a4s", {"no_dataset_method"=>true}]] end it "should ignore missing association modification methods" do ua, uao = check("nil", 'A4S_NO_METHODS'=>'1') ua.must_equal [["TUA", "a1"], ["TUA", "a2s"], ["TUA", "a3"], ["TUA", "a5"], ["TUA", "a6s"], ["TUA::SC", "a7"]] uao.must_equal [] end end if RUBY_VERSION >= '2.5' && RUBY_ENGINE == 'ruby' �����������sequel-5.63.0/spec/extensions/update_or_create_spec.rb����������������������������������������������0000664�0000000�0000000�00000007072�14342141206�0023103�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::UpdateOrCreate" do before do @db = Sequel.mock(:autoid=>proc{1}, :numrows=>1) @c = Class.new(Sequel::Model(@db[:test])) @c.plugin :update_or_create @c.columns :id, :a, :b @db.sqls end it ".update_or_create should update an existing record if one exists" do @db.fetch = [[{:id=>1, :a=>2, :b=>3}]] @c.update_or_create(:a=>2){|t| t.b = 4}.must_equal @c.load(:id=>1, :a=>2, :b=>4) @db.sqls.must_equal ["SELECT * FROM test WHERE (a = 2) LIMIT 1", "UPDATE test SET b = 4 WHERE (id = 1)"] @db.fetch = [[{:id=>1, :a=>2, :b=>3}]] @c.update_or_create({:a=>2}, :b=>4).must_equal @c.load(:id=>1, :a=>2, :b=>4) @db.sqls.must_equal ["SELECT * FROM test WHERE (a = 2) LIMIT 1", "UPDATE test SET b = 4 WHERE (id = 1)"] @db.fetch = [[{:id=>1, :a=>2, :b=>3}]] @c.update_or_create({:a=>2}, :a=>3){|t| t.b = 4}.must_equal @c.load(:id=>1, :a=>3, :b=>4) @db.sqls.must_equal ["SELECT * FROM test WHERE (a = 2) LIMIT 1", 'UPDATE test SET a = 3, b = 4 WHERE (id = 1)'] end it ".update_or_create should create a record if an existing record does not exist" do @db.fetch = [[], [{:id=>1, :a=>1, :b=>4}]] @c.update_or_create(:a=>1){|t| t.b = 4}.must_equal @c.load(:id=>1, :a=>1, :b=>4) @db.sqls.must_equal ["SELECT * FROM test WHERE (a = 1) LIMIT 1", "INSERT INTO test (a, b) VALUES (1, 4)", "SELECT * FROM test WHERE (id = 1) LIMIT 1"] @db.fetch = [[], [{:id=>1, :a=>1, :b=>4}]] @c.update_or_create({:a=>1}, :b=>4).must_equal @c.load(:id=>1, :a=>1, :b=>4) @db.sqls.must_equal ["SELECT * FROM test WHERE (a = 1) LIMIT 1", "INSERT INTO test (a, b) VALUES (1, 4)", "SELECT * FROM test WHERE (id = 1) LIMIT 1"] @db.fetch = [[], [{:id=>1, :a=>3, :b=>4}]] @c.update_or_create({:a=>1}, :a=>3){|t| t.b = 4}.must_equal @c.load(:id=>1, :a=>3, :b=>4) @db.sqls.must_equal ["SELECT * FROM test WHERE (a = 1) LIMIT 1", "INSERT INTO test (a, b) VALUES (3, 4)", "SELECT * FROM test WHERE (id = 1) LIMIT 1"] end it ".update_or_create should return an existing record even if no changes necessary" do @db.fetch = [[{:id=>1, :a=>2, :b=>3}]] @c.update_or_create(:a=>2){|t| t.b = 3}.must_equal @c.load(:id=>1, :a=>2, :b=>3) @db.sqls.must_equal ["SELECT * FROM test WHERE (a = 2) LIMIT 1"] end it ".find_or_new should return an existing record" do @db.fetch = [[{:id=>1, :a=>2, :b=>3}]] @c.find_or_new(:a=>2){|t| t.b = 4}.must_equal @c.load(:id=>1, :a=>2, :b=>4) @db.sqls.must_equal ["SELECT * FROM test WHERE (a = 2) LIMIT 1"] @db.fetch = [[{:id=>1, :a=>2, :b=>3}]] @c.find_or_new({:a=>2}, :b=>4).must_equal @c.load(:id=>1, :a=>2, :b=>4) @db.sqls.must_equal ["SELECT * FROM test WHERE (a = 2) LIMIT 1"] @db.fetch = [[{:id=>1, :a=>2, :b=>3}]] @c.find_or_new({:a=>2}, :a=>3){|t| t.b = 4}.must_equal @c.load(:id=>1, :a=>3, :b=>4) @db.sqls.must_equal ["SELECT * FROM test WHERE (a = 2) LIMIT 1"] end it ".find_or_new should return a new record if no record exists" do o = @c.find_or_new(:a=>1){|t| t.b = 4} o.must_equal @c.load(:a=>1, :b=>4) o.new?.must_equal true @db.sqls.must_equal ["SELECT * FROM test WHERE (a = 1) LIMIT 1"] o = @c.find_or_new({:a=>1}, :b=>4) o.must_equal @c.load(:a=>1, :b=>4) o.new?.must_equal true @db.sqls.must_equal ["SELECT * FROM test WHERE (a = 1) LIMIT 1"] o = @c.find_or_new({:a=>1}, :a=>3){|t| t.b = 4} o.must_equal @c.load(:a=>3, :b=>4) o.new?.must_equal true @db.sqls.must_equal ["SELECT * FROM test WHERE (a = 1) LIMIT 1"] end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/update_primary_key_spec.rb��������������������������������������������0000664�0000000�0000000�00000014262�14342141206�0023472�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::UpdatePrimaryKey" do before do @c = Class.new(Sequel::Model(:a)) @c.plugin :update_primary_key @c.columns :a, :b def @c.set_dataset(*) super set_primary_key :a end @c.set_primary_key :a @c.unrestrict_primary_key @ds = @c.dataset DB.reset end it "should handle regular updates" do @c.dataset = @c.dataset.with_fetch([[{:a=>1, :b=>3}], [{:a=>1, :b=>4}], [{:a=>1, :b=>4}], [{:a=>1, :b=>5}], [{:a=>1, :b=>5}], [{:a=>1, :b=>6}], [{:a=>1, :b=>6}]]) @c.first.update(:b=>4) @c.all.must_equal [@c.load(:a=>1, :b=>4)] DB.sqls.must_equal ["SELECT * FROM a LIMIT 1", "UPDATE a SET b = 4 WHERE (a = 1)", "SELECT * FROM a"] @c.first.set(:b=>5).save @c.all.must_equal [@c.load(:a=>1, :b=>5)] DB.sqls.must_equal ["SELECT * FROM a LIMIT 1", "UPDATE a SET b = 5 WHERE (a = 1)", "SELECT * FROM a"] @c.first.set(:b=>6).save(:columns=>:b) @c.all.must_equal [@c.load(:a=>1, :b=>6)] DB.sqls.must_equal ["SELECT * FROM a LIMIT 1", "UPDATE a SET b = 6 WHERE (a = 1)", "SELECT * FROM a"] end it "should handle updating the primary key field with another field" do @c.dataset = @c.dataset.with_fetch([[{:a=>1, :b=>3}], [{:a=>2, :b=>4}]]) @c.first.update(:a=>2, :b=>4) @c.all.must_equal [@c.load(:a=>2, :b=>4)] sqls = DB.sqls ["UPDATE a SET a = 2, b = 4 WHERE (a = 1)", "UPDATE a SET b = 4, a = 2 WHERE (a = 1)"].must_include(sqls.slice!(1)) sqls.must_equal ["SELECT * FROM a LIMIT 1", "SELECT * FROM a"] end it "should handle updating the primary key field with another field when using composite keys" do @c = Class.new(Sequel::Model(:a)) @c.plugin :update_primary_key @c.columns :a, :b, :c @c.dataset = @c.dataset.with_fetch([[{:a=>1, :b=>3, :c=>5}], [{:a=>2, :b=>4, :c=>6}]]) @c.set_primary_key [:a, :b] @c.unrestrict_primary_key DB.reset @c.first.update(:a=>2, :b=>4, :c=>6) @c.all.must_equal [@c.load(:a=>2, :b=>4, :c=>6)] DB.sqls.must_equal ["SELECT * FROM a LIMIT 1", "UPDATE a SET a = 2, b = 4, c = 6 WHERE ((a = 1) AND (b = 3))", "SELECT * FROM a"] end it "should handle updating just the primary key field when saving changes" do @c.dataset = @c.dataset.with_fetch([[{:a=>1, :b=>3}], [{:a=>2, :b=>3}], [{:a=>2, :b=>3}], [{:a=>3, :b=>3}]]) @c.first.update(:a=>2) @c.all.must_equal [@c.load(:a=>2, :b=>3)] DB.sqls.must_equal ["SELECT * FROM a LIMIT 1", "UPDATE a SET a = 2 WHERE (a = 1)", "SELECT * FROM a"] @c.first.set(:a=>3).save(:columns=>:a) @c.all.must_equal [@c.load(:a=>3, :b=>3)] DB.sqls.must_equal ["SELECT * FROM a LIMIT 1", "UPDATE a SET a = 3 WHERE (a = 2)", "SELECT * FROM a"] end it "should handle saving after modifying the primary key field with another field" do @c.dataset = @c.dataset.with_fetch([[{:a=>1, :b=>3}], [{:a=>2, :b=>4}]]) @c.first.set(:a=>2, :b=>4).save @c.all.must_equal [@c.load(:a=>2, :b=>4)] sqls = DB.sqls ["UPDATE a SET a = 2, b = 4 WHERE (a = 1)", "UPDATE a SET b = 4, a = 2 WHERE (a = 1)"].must_include(sqls.slice!(1)) sqls.must_equal ["SELECT * FROM a LIMIT 1", "SELECT * FROM a"] end it "should handle saving after modifying just the primary key field" do @c.dataset = @c.dataset.with_fetch([[{:a=>1, :b=>3}], [{:a=>2, :b=>3}]]) @c.first.set(:a=>2).save @c.all.must_equal [@c.load(:a=>2, :b=>3)] sqls = DB.sqls ["UPDATE a SET a = 2, b = 3 WHERE (a = 1)", "UPDATE a SET b = 3, a = 2 WHERE (a = 1)"].must_include(sqls.slice!(1)) sqls.must_equal ["SELECT * FROM a LIMIT 1", "SELECT * FROM a"] end it "should handle saving after updating the primary key" do @c.dataset = @c.dataset.with_fetch([[{:a=>1, :b=>3}], [{:a=>2, :b=>5}]]) @c.first.update(:a=>2).update(:b=>4).set(:b=>5).save @c.all.must_equal [@c.load(:a=>2, :b=>5)] DB.sqls.must_equal ["SELECT * FROM a LIMIT 1", "UPDATE a SET a = 2 WHERE (a = 1)", "UPDATE a SET b = 4 WHERE (a = 2)", "UPDATE a SET b = 5 WHERE (a = 2)", "SELECT * FROM a"] end it "should work correctly when using the prepared_statements plugin" do @c.plugin :prepared_statements @c.dataset = @c.dataset.with_fetch([[{:a=>1, :b=>3}], [{:a=>2, :b=>4}], [{:a=>3}]]) o = @c.first o.update(:a=>2, :b=>4) @c.all.must_equal [@c.load(:a=>2, :b=>4)] sqls = DB.sqls ["UPDATE a SET a = 2, b = 4 WHERE (a = 1)", "UPDATE a SET b = 4, a = 2 WHERE (a = 1)"].must_include(sqls.slice!(1)) sqls.must_equal ["SELECT * FROM a LIMIT 1", "SELECT * FROM a"] @c.create(:a=>3) DB.sqls.must_equal ["INSERT INTO a (a) VALUES (3)", "SELECT * FROM a WHERE a = 3"] end it "should work correctly when loading the prepared_statements plugin first" do @c = Class.new(Sequel::Model(:a)) @c.plugin :prepared_statements @c.plugin :update_primary_key @c.columns :a, :b def @c.set_dataset(*) super set_primary_key :a end @c.set_primary_key :a @c.unrestrict_primary_key @ds = @c.dataset DB.reset @c.dataset = @c.dataset.with_fetch([[{:a=>1, :b=>3}], [{:a=>2, :b=>4}], [{:a=>3}]]) o = @c.first o.update(:a=>2, :b=>4) @c.all.must_equal [@c.load(:a=>2, :b=>4)] sqls = DB.sqls ["UPDATE a SET a = 2, b = 4 WHERE (a = 1)", "UPDATE a SET b = 4, a = 2 WHERE (a = 1)"].must_include(sqls.slice!(1)) sqls.must_equal ["SELECT * FROM a LIMIT 1", "SELECT * FROM a"] @c.create(:a=>3) DB.sqls.must_equal ["INSERT INTO a (a) VALUES (3)", "SELECT * FROM a WHERE a = 3"] end it "should clear the associations cache of non-many_to_one associations when changing the primary key" do @c.one_to_many :cs, :class=>@c @c.many_to_one :c, :class=>@c o = @c.new(:a=>1) o.associations[:cs] = @c.new o.associations[:c] = o2 = @c.new o.a = 2 o.associations.must_equal(:c=>o2) end it "should clear the associations cache of non-many_to_one associations when changing the primary key" do @c.one_to_many :cs, :class=>@c @c.many_to_one :c, :class=>@c o = @c.new(:a=>1) o.associations[:cs] = @c.new o.associations[:c] = o2 = @c.new o.a = 2 o.associations.must_equal(:c=>o2) end it "should handle frozen instances" do o = @c.new o.a = 1 o.freeze o.pk_hash.must_equal(:a=>1) end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/update_refresh_spec.rb������������������������������������������������0000664�0000000�0000000�00000005601�14342141206�0022572�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::UpdateRefresh" do before do @db = Sequel.mock(:numrows=>1, :fetch=>{:id=>1, :name=>'b'}) @c = Class.new(Sequel::Model(@db[:test])) @ds = @c.dataset @c.columns :id, :name @c.plugin :update_refresh @db.sqls end it "should refresh the instance after updating" do o = @c.load(:id=>1, :name=>'a') o.save @db.sqls.must_equal ["UPDATE test SET name = 'a' WHERE (id = 1)", "SELECT * FROM test WHERE (id = 1) LIMIT 1"] o.name.must_equal 'b' end it "should refresh the instance after updating" do @db.extend_datasets{def supports_returning?(x) true end; def update_sql(*); sql = super; update_returning_sql(sql); sql end} @c.dataset = @db[:test] @db.sqls o = @c.load(:id=>1, :name=>'a') o.save @db.sqls.must_equal ["UPDATE test SET name = 'a' WHERE (id = 1) RETURNING *"] o.name.must_equal 'b' end it "should support specifying columns to return" do @db.extend_datasets{def supports_returning?(x) true end; def update_sql(*); sql = super; update_returning_sql(sql); sql end} @c.plugin :update_refresh, :columns => [ :a ] @c.dataset = @db[:test] @db.sqls o = @c.load(:id=>1, :name=>'a') o.save @db.sqls.must_equal ["UPDATE test SET name = 'a' WHERE (id = 1) RETURNING a"] o.name.must_equal 'b' end it "should refresh the instance after updating when returning specific columns" do @db.extend_datasets{def supports_returning?(x) true end; def update_sql(*); sql = super; update_returning_sql(sql); sql end} @c.plugin :insert_returning_select @c.dataset = @db[:test].select(:id, :name) @db.sqls o = @c.load(:id=>1, :name=>'a') o.instance_variable_set(:@this, o.this.returning(:id, :name)) o.save @db.sqls.must_equal ["UPDATE test SET name = 'a' WHERE (id = 1) RETURNING id, name"] o.name.must_equal 'b' end it "should raise error without refreshing when updating when returning specific columns modifies multiple rows" do @db.fetch = [{:id=>1, :name=>'b'}, {:id=>1, :name=>'c'}] @db.extend_datasets{def supports_returning?(x) true end; def update_sql(*); sql = super; update_returning_sql(sql); sql end} @c.plugin :insert_returning_select @c.dataset = @db[:test].select(:id, :name) @db.sqls o = @c.load(:id=>1, :name=>'a') o.instance_variable_set(:@this, o.this.returning(:id, :name)) proc{o.save}.must_raise Sequel::NoExistingObject o.values.must_equal(:id=>1, :name=>'a') @db.sqls.must_equal ["UPDATE test SET name = 'a' WHERE (id = 1) RETURNING id, name"] end it "should freeze update refresh columns when freezing model class" do @db.extend_datasets{def supports_returning?(x) true end; def update_sql(*); sql = super; update_returning_sql(sql); sql end} @c.plugin :update_refresh, :columns => [ :a ] @c.freeze @c.update_refresh_columns.frozen?.must_equal true end end �������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/uuid_spec.rb����������������������������������������������������������0000664�0000000�0000000�00000005637�14342141206�0020551�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::Uuid" do before do uuid = @uuid = '57308544-4e83-47b8-b87f-6f68b987f4f9' @alt_uuid = 'd5d1ec46-5e8e-4a7b-adc9-50e76b819e19' @c = Class.new(Sequel::Model(:t)) @c.class_eval do columns :id, :uuid plugin :uuid private def _save_refresh(*) end define_method(:create_uuid) do uuid end db.reset end end it "should handle validations on the uuid field for new objects" do @c.plugin :uuid, :force=>true o = @c.new def o.validate errors.add(model.uuid_field, 'not present') unless send(model.uuid_field) end o.valid?.must_equal true end it "should set uuid field when skipping validations" do @c.plugin :uuid @c.new.save(:validate=>false) @c.db.sqls.must_equal ["INSERT INTO t (uuid) VALUES ('#{@uuid}')"] end it "should set the uuid field on creation" do o = @c.create @c.db.sqls.must_equal ["INSERT INTO t (uuid) VALUES ('#{@uuid}')"] o.uuid.must_equal @uuid end it "should allow specifying the uuid field via the :field option" do c = Class.new(Sequel::Model(:t)) c.class_eval do columns :id, :u plugin :uuid, :field=>:u private def _save_refresh(*) end end o = c.create c.db.sqls.must_equal ["INSERT INTO t (u) VALUES ('#{o.u}')"] end it "should not raise an error if the model doesn't have the uuid column" do @c.columns :id, :x @c.send(:undef_method, :uuid) @c.create(:x=>2) @c.load(:id=>1, :x=>2).save @c.db.sqls.must_equal ["INSERT INTO t (x) VALUES (2)", "UPDATE t SET x = 2 WHERE (id = 1)"] end it "should not overwrite an existing uuid value" do o = @c.create(:uuid=>@alt_uuid) @c.db.sqls.must_equal ["INSERT INTO t (uuid) VALUES ('#{@alt_uuid}')"] o.uuid.must_equal @alt_uuid end it "should overwrite an existing uuid if the :force option is used" do @c.plugin :uuid, :force=>true o = @c.create(:uuid=>@alt_uuid) @c.db.sqls.must_equal ["INSERT INTO t (uuid) VALUES ('#{@uuid}')"] o.uuid.must_equal @uuid end it "should have uuid_field give the uuid field" do @c.uuid_field.must_equal :uuid @c.plugin :uuid, :field=>:u @c.uuid_field.must_equal :u end it "should have uuid_overwrite? give the whether to overwrite an existing uuid" do @c.uuid_overwrite?.must_equal false @c.plugin :uuid, :force=>true @c.uuid_overwrite?.must_equal true end it "should work with subclasses" do c = Class.new(@c) o = c.create o.uuid.must_equal @uuid c.db.sqls.must_equal ["INSERT INTO t (uuid) VALUES ('#{@uuid}')"] c.create(:uuid=>@alt_uuid).uuid.must_equal @alt_uuid c.class_eval do columns :id, :u plugin :uuid, :field=>:u, :force=>true end c2 = Class.new(c) c2.db.reset o = c2.create o.u.must_equal @uuid c2.db.sqls.must_equal ["INSERT INTO t (u) VALUES ('#{@uuid}')"] end end �������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/validate_associated_spec.rb�������������������������������������������0000664�0000000�0000000�00000005722�14342141206�0023566�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "ValidatesAssociated plugin" do before do @db = Sequel.mock(:autoid=>1, :numrows=>1, :fetch=>{:id=>1, :name=>'a', :c_id=>nil}) @c = Class.new(Sequel::Model(@db[:cs])) @c.plugin :validate_associated @c.columns :id, :name, :c_id @c.one_to_many :cs, :class=>@c, :key=>:c_id @o = @c.load(:id=>1, :name=>'a') @db.sqls end it "should return nil when saving if the associated object is invalid when raise_on_save_failure is false" do @c.raise_on_save_failure = false @c.send(:define_method, :validate){|*| errors.add(:name, 'is b') if name == 'b'} o = @c.load(:id=>2, :name=>'b') @o.send(:delay_validate_associated_object, @c.association_reflection(:cs), o) @o.save.must_be_nil @o.errors[:cs].must_equal ["name is b"] o.errors[:name].must_equal ['is b'] end it "should support creating new one_to_many and one_to_one objects with presence validations on the foreign key" do @c.class_eval do plugin :validation_helpers def validate validates_presence :c_id errors.add([:name, :c_id], 'compound error') unless c_id super end end o = @c.new(:name=>'a', :c_id=>1) c = @c.new(:name=>'b') o.valid?.must_equal true c.valid?.must_equal false o.send(:delay_validate_associated_object, @c.association_reflection(:cs), c) o.valid?.must_equal true c.valid?.must_equal false @db.sqls.must_equal [] o.save @db.sqls.must_equal ["INSERT INTO cs (name, c_id) VALUES ('a', 1)", "SELECT * FROM cs WHERE (id = 1) LIMIT 1"] end it "should handle other errors when validating" do @c.class_eval do plugin :validation_helpers def validate unless c_id validates_presence :id errors.add([:name, :id], 'compound error') end super end end o = @c.new(:name=>'a', :c_id=>1) c = @c.new(:name=>'b') o.valid?.must_equal true c.valid?.must_equal false o.send(:delay_validate_associated_object, @c.association_reflection(:cs), c) o.valid?.must_equal false o.errors.on(:cs).must_equal ["id is not present", "name and id compound error"] c.valid?.must_equal false @db.sqls.must_equal [] end it "should should not remove existing values from object when validating" do o = @c.load(:id=>2, :name=>'b', :c_id=>3) @o.send(:delay_validate_associated_object, @c.association_reflection(:cs), o) @o.valid?.must_equal true o.c_id.must_equal 3 end it "should not attempt to validate associated_object if the :validate=>false option is passed to save" do @c.one_to_many :cs, :class=>@c, :key=>:c_id @c.send(:define_method, :validate){|*| errors.add(:name, 'is b') if name == 'b'} o = @c.load(:id=>2, :name=>'b', :c_id=>3) @o.send(:delay_validate_associated_object, @c.association_reflection(:cs), o) @o.save(:validate=>false).must_equal @o @db.sqls.must_equal ["UPDATE cs SET name = 'a' WHERE (id = 1)"] end end ����������������������������������������������sequel-5.63.0/spec/extensions/validation_class_methods_spec.rb��������������������������������������0000664�0000000�0000000�00000072251�14342141206�0024641�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" model_class = proc do |klass, &block| c = Class.new(klass) c.plugin :validation_class_methods c.class_eval(&block) if block c end describe Sequel::Model do before do @c = model_class.call Sequel::Model do def self.validates_coolness_of(attr) validates_each(attr) {|o, a, v| o.errors.add(a, 'is not cool') if v != :cool} end end end it "should freeze validation metadata when freezing model class" do @c.validates_acceptance_of(:a) @c.freeze @c.validations.frozen?.must_equal true @c.validations.values.all?(&:frozen?).must_equal true @c.validation_reflections.frozen?.must_equal true @c.validation_reflections.values.all? do |vs| vs.frozen? && vs.all? do |v| v.frozen? && v.last.frozen? end end.must_equal true end it "should respond to validations, has_validations?, and validation_reflections" do @c.must_respond_to(:validations) @c.must_respond_to(:has_validations?) @c.must_respond_to(:validation_reflections) end it "should be able to reflect on validations" do @c.validation_reflections.must_equal({}) @c.validates_acceptance_of(:a) @c.validation_reflections.must_equal(:a=>[[:acceptance, {:tag=>:acceptance, :message=>"is not accepted", :allow_nil=>true, :accept=>"1"}]]) @c.validates_presence_of(:a) @c.validation_reflections[:a].length.must_equal 2 @c.validation_reflections[:a].last.must_equal [:presence, {:tag=>:presence, :message=>"is not present"}] end it "should handle validation reflections correctly when subclassing" do @c.validates_acceptance_of(:a) c = Class.new(@c) c.validation_reflections.map{|k,v| k}.must_equal [:a] c.validates_presence_of(:a) @c.validation_reflections.must_equal(:a=>[[:acceptance, {:tag=>:acceptance, :message=>"is not accepted", :allow_nil=>true, :accept=>"1"}]]) c.validation_reflections[:a].last.must_equal [:presence, {:tag=>:presence, :message=>"is not present"}] end it "should acccept validation definitions using validates_each" do @c.validates_each(:xx, :yy) {|o, a, v| o.errors.add(a, 'too low') if v < 50} o = @c.new def o.xx; 40; end def o.yy; 60; end o.valid?.must_equal false o.errors.full_messages.must_equal ['xx too low'] end it "should return true/false for has_validations?" do @c.has_validations?.must_equal false @c.validates_each(:xx) {1} @c.has_validations?.must_equal true end it "should validate multiple attributes at once" do o = @c.new def o.xx 1 end def o.yy 2 end vals = nil atts = nil @c.validates_each([:xx, :yy]){|obj,a,v| atts=a; vals=v} o.valid? vals.must_equal [1,2] atts.must_equal [:xx, :yy] end it "should respect allow_missing option when using multiple attributes" do o = @c.new def o.xx self[:xx] end def o.yy self[:yy] end vals = nil atts = nil @c.validates_each([:xx, :yy], :allow_missing=>true){|obj,a,v| atts=a; vals=v} o.values[:xx] = 1 o.valid? vals.must_equal [1,nil] atts.must_equal [:xx, :yy] vals = nil atts = nil o.values.clear o.values[:yy] = 2 o.valid? vals.must_equal [nil, 2] atts.must_equal [:xx, :yy] vals = nil atts = nil o.values.clear o.valid?.must_equal true vals.must_be_nil atts.must_be_nil end it "should overwrite existing validation with the same tag and attribute" do @c.validates_each(:xx, :xx, :tag=>:low) {|o, a, v| o.xxx; o.errors.add(a, 'too low') if v < 50} @c.validates_each(:yy, :yy) {|o, a, v| o.yyy; o.errors.add(a, 'too low') if v < 50} @c.validates_presence_of(:zz, :zz) @c.validates_length_of(:aa, :aa, :tag=>:blah) o = @c.new def o.zz @a ||= 0 @a += 1 end def o.aa @b ||= 0 @b += 1 end def o.xx; 40; end def o.yy; 60; end def o.xxx; end def o.yyy; end o.valid?.must_equal false o.zz.must_equal 2 o.aa.must_equal 2 o.errors.full_messages.must_equal ['xx too low'] end it "should provide a validates method that takes block with validation definitions" do @c.validates do coolness_of :blah end @c.validations[:blah].wont_be :empty? o = @c.new def o.blah; end o.valid?.must_equal false o.errors.full_messages.must_equal ['blah is not cool'] end it "should have the validates block have appropriate respond_to?" do c = nil @c.validates{c = respond_to?(:foo)} c.must_equal false @c.validates{c = respond_to?(:length_of)} c.must_equal true end end describe Sequel::Model do before do @c = model_class.call Sequel::Model do columns :score validates_each :score do |o, a, v| o.errors.add(a, 'too low') if v < 87 end end @o = @c.new end it "should supply a #valid? method that returns true if validations pass" do @o.score = 50 @o.wont_be :valid? @o.score = 100 @o.must_be :valid? end it "should provide an errors object" do @o.score = 100 @o.must_be :valid? @o.errors.must_be :empty? @o.score = 86 @o.wont_be :valid? @o.errors[:score].must_equal ['too low'] @o.errors.on(:blah).must_be_nil end end describe "Sequel::Plugins::ValidationClassMethods::ClassMethods::Generator" do before do @testit = testit = [] @c = model_class.call Sequel::Model do singleton_class.send(:define_method, :validates_blah) do testit << 1324 end end end it "should instance_eval the block, sending everything to its receiver" do @c.validates do blah end @testit.must_equal [1324] end end describe Sequel::Model do before do @c = model_class.call Sequel::Model do columns :value def self.where(*args) o = Object.new def o.count; 2; end o end def skip; false; end def dont_skip; true; end end @m = @c.new end it "should validate acceptance_of" do @c.validates_acceptance_of :value @m.must_be :valid? @m.value = '1' @m.must_be :valid? end it "should validate acceptance_of with accept" do @c.validates_acceptance_of :value, :accept => 'true' @m.value = '1' @m.wont_be :valid? @m.value = 'true' @m.must_be :valid? end it "should validate acceptance_of with allow_nil => false" do @c.validates_acceptance_of :value, :allow_nil => false @m.wont_be :valid? end it "should validate acceptance_of with allow_missing => true" do @c.validates_acceptance_of :value, :allow_missing => true @m.must_be :valid? end it "should validate acceptance_of with allow_missing => true and allow_nil => false" do @c.validates_acceptance_of :value, :allow_missing => true, :allow_nil => false @m.must_be :valid? @m.value = nil @m.wont_be :valid? end it "should validate acceptance_of with allow_blank => true" do @c.validates_acceptance_of :value, :allow_blank => true @m.must_be :valid? @m.value = '' @m.must_be :valid? @m.value = '0' @m.wont_be :valid? end it "should validate acceptance_of with if => true" do @c.validates_acceptance_of :value, :if => :dont_skip @m.value = '0' @m.wont_be :valid? end it "should validate acceptance_of with if => false" do @c.validates_acceptance_of :value, :if => :skip @m.value = '0' @m.must_be :valid? end it "should validate acceptance_of with if proc that evaluates to true" do @c.validates_acceptance_of :value, :if => proc{true} @m.value = '0' @m.wont_be :valid? end it "should validate acceptance_of with if proc that evaluates to false" do @c.validates_acceptance_of :value, :if => proc{false} @m.value = '0' @m.must_be :valid? end it "should raise an error if :if option is not a Symbol, Proc, or nil" do @c.validates_acceptance_of :value, :if => 1 @m.value = '0' proc{@m.valid?}.must_raise(Sequel::Error) end it "should validate confirmation_of" do @c.send(:attr_accessor, :value_confirmation) @c.validates_confirmation_of :value @m.value = 'blah' @m.wont_be :valid? @m.value_confirmation = 'blah' @m.must_be :valid? end it "should validate confirmation_of with if => true" do @c.send(:attr_accessor, :value_confirmation) @c.validates_confirmation_of :value, :if => :dont_skip @m.value = 'blah' @m.wont_be :valid? end it "should validate confirmation_of with if => false" do @c.send(:attr_accessor, :value_confirmation) @c.validates_confirmation_of :value, :if => :skip @m.value = 'blah' @m.must_be :valid? end it "should validate confirmation_of with allow_missing => true" do @c.send(:attr_accessor, :value_confirmation) @c.validates_confirmation_of :value, :allow_missing => true @m.must_be :valid? @m.value_confirmation = 'blah' @m.must_be :valid? @m.value = nil @m.wont_be :valid? end it "should validate format_of" do @c.validates_format_of :value, :with => /.+_.+/ @m.value = 'abc_' @m.wont_be :valid? @m.value = 'abc_def' @m.must_be :valid? end it "should raise for validate_format_of without regexp" do proc {@c.validates_format_of :value}.must_raise(ArgumentError) proc {@c.validates_format_of :value, :with => :blah}.must_raise(ArgumentError) end it "should validate format_of with if => true" do @c.validates_format_of :value, :with => /_/, :if => :dont_skip @m.value = 'a' @m.wont_be :valid? end it "should validate format_of with if => false" do @c.validates_format_of :value, :with => /_/, :if => :skip @m.value = 'a' @m.must_be :valid? end it "should validate format_of with allow_missing => true" do @c.validates_format_of :value, :allow_missing => true, :with=>/./ @m.must_be :valid? @m.value = nil @m.wont_be :valid? end it "should validate length_of with maximum" do @c.validates_length_of :value, :maximum => 5 @m.wont_be :valid? @m.value = '12345' @m.must_be :valid? @m.value = '123456' @m.wont_be :valid? @m.errors[:value].must_equal ['is too long'] @m.value = nil @m.wont_be :valid? @m.errors[:value].must_equal ['is not present'] end it "should validate length_of with maximum using customized error messages" do @c.validates_length_of :value, :maximum => 5, :too_long=>'tl', :nil_message=>'np' @m.value = '123456' @m.wont_be :valid? @m.errors[:value].must_equal ['tl'] @m.value = nil @m.wont_be :valid? @m.errors[:value].must_equal ['np'] end it "should validate length_of with minimum" do @c.validates_length_of :value, :minimum => 5 @m.wont_be :valid? @m.value = '12345' @m.must_be :valid? @m.value = '1234' @m.wont_be :valid? end it "should validate length_of with within" do @c.validates_length_of :value, :within => 2..5 @m.wont_be :valid? @m.value = '12345' @m.must_be :valid? @m.value = '1' @m.wont_be :valid? @m.value = '123456' @m.wont_be :valid? end it "should validate length_of with within" do @c.validates_length_of :value, :within => [2,3,4,5] @m.wont_be :valid? @m.value = '12345' @m.must_be :valid? @m.value = '1' @m.wont_be :valid? @m.value = '123456' @m.wont_be :valid? end it "should validate length_of with is" do @c.validates_length_of :value, :is => 3 @m.wont_be :valid? @m.value = '123' @m.must_be :valid? @m.value = '12' @m.wont_be :valid? @m.value = '1234' @m.wont_be :valid? end it "should validate length_of with allow_nil" do @c.validates_length_of :value, :is => 3, :allow_nil => true @m.must_be :valid? end it "should validate length_of with if => true" do @c.validates_length_of :value, :is => 3, :if => :dont_skip @m.value = 'a' @m.wont_be :valid? end it "should validate length_of with if => false" do @c.validates_length_of :value, :is => 3, :if => :skip @m.value = 'a' @m.must_be :valid? end it "should validate length_of with allow_missing => true" do @c.validates_length_of :value, :allow_missing => true, :minimum => 5 @m.must_be :valid? @m.value = nil @m.wont_be :valid? end it "should allow multiple calls to validates_length_of with different options without overwriting" do @c.validates_length_of :value, :maximum => 5 @c.validates_length_of :value, :minimum => 5 @m.wont_be :valid? @m.value = '12345' @m.must_be :valid? @m.value = '123456' @m.wont_be :valid? @m.value = '12345' @m.must_be :valid? @m.value = '1234' @m.wont_be :valid? end it "should validate numericality_of" do @c.validates_numericality_of :value @m.value = 'blah' @m.wont_be :valid? @m.value = '123' @m.must_be :valid? @m.value = '123.1231' @m.must_be :valid? @m.value = '+1' @m.must_be :valid? @m.value = '-1' @m.must_be :valid? @m.value = '+1.123' @m.must_be :valid? @m.value = '-0.123' @m.must_be :valid? @m.value = '-0.123E10' @m.must_be :valid? @m.value = '32.123e10' @m.must_be :valid? @m.value = '+32.123E10' @m.must_be :valid? @m.must_be :valid? @m.value = '.0123' end it "should validate numericality_of with only_integer" do @c.validates_numericality_of :value, :only_integer => true @m.value = 'blah' @m.wont_be :valid? @m.value = '123' @m.must_be :valid? @m.value = '123.1231' @m.wont_be :valid? end it "should validate numericality_of with if => true" do @c.validates_numericality_of :value, :if => :dont_skip @m.value = 'a' @m.wont_be :valid? end it "should validate numericality_of with if => false" do @c.validates_numericality_of :value, :if => :skip @m.value = 'a' @m.must_be :valid? end it "should validate numericality_of with allow_missing => true" do @c.validates_numericality_of :value, :allow_missing => true @m.must_be :valid? @m.value = nil @m.wont_be :valid? end it "should validate presence_of" do @c.validates_presence_of :value @m.wont_be :valid? @m.value = '' @m.wont_be :valid? @m.value = 1234 @m.must_be :valid? @m.value = nil @m.wont_be :valid? @m.value = true @m.must_be :valid? @m.value = false @m.must_be :valid? end it "should validate inclusion_of with an array" do @c.validates_inclusion_of :value, :in => [1,2] @m.wont_be :valid? @m.value = 1 @m.must_be :valid? @m.value = 1.5 @m.wont_be :valid? @m.value = 2 @m.must_be :valid? @m.value = 3 @m.wont_be :valid? end it "should validate inclusion_of with a range" do @c.validates_inclusion_of :value, :in => 1..4 @m.wont_be :valid? @m.value = 1 @m.must_be :valid? @m.value = 1.5 @m.must_be :valid? @m.value = 0 @m.wont_be :valid? @m.value = 5 @m.wont_be :valid? end it "should raise an error if inclusion_of doesn't receive a valid :in option" do lambda{@c.validates_inclusion_of :value}.must_raise(ArgumentError) lambda{@c.validates_inclusion_of :value, :in => 1}.must_raise(ArgumentError) end it "should raise an error if inclusion_of handles :allow_nil too" do @c.validates_inclusion_of :value, :in => 1..4, :allow_nil => true @m.value = nil @m.must_be :valid? @m.value = 0 @m.wont_be :valid? end it "should validate presence_of with if => true" do @c.validates_presence_of :value, :if => :dont_skip @m.wont_be :valid? end it "should validate presence_of with if => false" do @c.validates_presence_of :value, :if => :skip @m.must_be :valid? end it "should validate presence_of with allow_missing => true" do @c.validates_presence_of :value, :allow_missing => true @m.must_be :valid? @m.value = nil @m.wont_be :valid? end it "should validate uniqueness_of with if => true" do @c.validates_uniqueness_of :value, :if => :dont_skip @m.value = 'a' @m.wont_be :valid? end it "should validate uniqueness_of with if => false" do @c.validates_uniqueness_of :value, :if => :skip @m.value = 'a' @m.must_be :valid? end it "should validate uniqueness_of with allow_missing => true" do @c.validates_uniqueness_of :value, :allow_missing => true @m.must_be :valid? @m.value = 1 @m.wont_be :valid? end end describe "Superclass validations" do before do @c1 = model_class.call Sequel::Model do columns :value validates_length_of :value, :minimum => 5 end @c2 = Class.new(@c1) @c2.class_eval do columns :value validates_format_of :value, :with => /^[a-z]+$/ end end it "should be checked when validating" do o = @c2.new o.value = 'ab' o.valid?.must_equal false o.errors.full_messages.must_equal ['value is too short'] o.value = '12' o.valid?.must_equal false o.errors.full_messages.must_equal ['value is too short', 'value is invalid'] o.value = 'abcde' o.valid?.must_equal true end it "should have skip_superclass_validations? return whether superclass validations were skipped" do @c2.skip_superclass_validations?.must_be_nil @c2.skip_superclass_validations @c2.skip_superclass_validations?.must_equal true end it "should be skipped if skip_superclass_validations is called" do @c2.skip_superclass_validations o = @c2.new o.value = 'ab' o.valid?.must_equal true o.value = '12' o.valid?.must_equal false o.errors.full_messages.must_equal ['value is invalid'] o.value = 'abcde' o.valid?.must_equal true end it "should handle case where superclass validation has already been deleted" do @c1.class_eval do validates_length_of :foo, :minimum => 5 end @c2 = Class.new(@c1) @c2.class_eval do columns :value validates_format_of :value, :with => /^[a-z]+$/ end @c2.validations.delete(:foo) @c2.skip_superclass_validations o = @c2.new o.value = 'ab' o.valid?.must_equal true o.value = '12' o.valid?.must_equal false o.errors.full_messages.must_equal ['value is invalid'] o.value = 'abcde' o.valid?.must_equal true end end describe ".validates with block" do it "should support calling .each" do @c = model_class.call Sequel::Model do columns :vvv validates do each :vvv do |o, a, v| o.errors.add(a, "is less than zero") if v.to_i < 0 end end end o = @c.new o.vvv = 1 o.must_be :valid? o.vvv = -1 o.wont_be :valid? end end describe Sequel::Model, "Validations" do before do class ::Person < Sequel::Model plugin :validation_class_methods columns :id,:name,:first_name,:last_name,:middle_name,:initials,:age, :terms end class ::Smurf < Person end class ::Can < Sequel::Model plugin :validation_class_methods columns :id, :name end class ::Cow < Sequel::Model plugin :validation_class_methods columns :id, :name, :got_milk end class ::User < Sequel::Model plugin :validation_class_methods columns :id, :username, :password end class ::Address < Sequel::Model plugin :validation_class_methods columns :id, :zip_code end end after do [:Person, :Smurf, :Cow, :User, :Address].each{|c| Object.send(:remove_const, c)} end it "should validate the acceptance of a column" do class ::Cow < Sequel::Model validations.clear validates_acceptance_of :got_milk, :accept => 'blah', :allow_nil => false end @cow = Cow.new @cow.wont_be :valid? @cow.errors.full_messages.must_equal ["got_milk is not accepted"] @cow.got_milk = "blah" @cow.must_be :valid? end it "should validate the confirmation of a column" do class ::User < Sequel::Model def password_confirmation "test" end validations.clear validates_confirmation_of :password end @user = User.new @user.wont_be :valid? @user.errors.full_messages.must_equal ["password is not confirmed"] @user.password = "test" @user.must_be :valid? end it "should validate format of column" do class ::Person < Sequel::Model validates_format_of :first_name, :with => /^[a-zA-Z]+$/ end @person = Person.new :first_name => "Lancelot99" @person.valid?.must_equal false @person = Person.new :first_name => "Anita" @person.valid?.must_equal true end it "should validate length of column" do class ::Person < Sequel::Model validations.clear validates_length_of :first_name, :maximum => 30 validates_length_of :last_name, :minimum => 30 validates_length_of :middle_name, :within => 1..5 validates_length_of :initials, :is => 2 end @person = Person.new( :first_name => "Anamethatiswaytofreakinglongandwayoverthirtycharacters", :last_name => "Alastnameunderthirtychars", :initials => "LGC", :middle_name => "danger" ) @person.wont_be :valid? @person.errors.full_messages.size.must_equal 4 @person.errors.full_messages.sort.must_equal [ 'first_name is too long', 'initials is the wrong length', 'last_name is too short', 'middle_name is the wrong length' ] @person.first_name = "Lancelot" @person.last_name = "1234567890123456789012345678901" @person.initials = "LC" @person.middle_name = "Will" @person.must_be :valid? end it "should validate that a column has the correct type for the schema column" do p = model_class.call Sequel::Model do columns :age, :d, :b self.raise_on_typecast_failure = false validates_schema_type :age validates_schema_type :d, :message=>'is a bad choice' validates_schema_type :b @db_schema = {:age=>{:type=>:integer}, :d=>{:type=>:date}, :b=>{:type=>:datetime}} end @person = p.new @person.must_be :valid? @person.age = 'a' @person.wont_be :valid? @person.errors.full_messages.must_equal ['age is not a valid integer'] @person.age = 1 @person.must_be :valid? @person.d = 'a' @person.wont_be :valid? @person.errors.full_messages.must_equal ['d is a bad choice'] @person.d = Date.today @person.must_be :valid? @person.b = 'XX' @person.wont_be :valid? @person.errors.full_messages.must_equal ['b is not a valid time or datetime'] @person.b = DateTime.now @person.must_be :valid? end it "should validate numericality of column" do class ::Person < Sequel::Model validations.clear validates_numericality_of :age end @person = Person.new :age => "Twenty" @person.wont_be :valid? @person.errors.full_messages.must_equal ['age is not a number'] @person.age = 20 @person.must_be :valid? end it "should validate the presence of a column" do class ::Cow < Sequel::Model validations.clear validates_presence_of :name end @cow = Cow.new @cow.wont_be :valid? @cow.errors.full_messages.must_equal ['name is not present'] @cow.name = "Betsy" @cow.must_be :valid? end it "should validate the uniqueness of a column" do class ::User < Sequel::Model validations.clear validates do uniqueness_of :username end end User.dataset = User.dataset.with_fetch(proc do |sql| case sql when /count.*username = '0records'/ {:v => 0} when /count.*username = '2records'/ {:v => 2} when /count.*username = '1record'/ {:v => 1} when /username = '1record'/ {:id => 3, :username => "1record", :password => "test"} end end) @user = User.new(:username => "2records", :password => "anothertest") @user.wont_be :valid? @user.errors.full_messages.must_equal ['username is already taken'] @user = User.new(:username => "1record", :password => "anothertest") @user.wont_be :valid? @user.errors.full_messages.must_equal ['username is already taken'] @user = User.load(:id=>4, :username => "1record", :password => "anothertest") @user.wont_be :valid? @user.errors.full_messages.must_equal ['username is already taken'] @user = User.load(:id=>3, :username => "1record", :password => "anothertest") @user.must_be :valid? @user.errors.full_messages.must_equal [] @user = User.new(:username => "0records", :password => "anothertest") @user.must_be :valid? @user.errors.full_messages.must_equal [] User.db.sqls @user = User.new(:password => "anothertest") @user.must_be :valid? @user.errors.full_messages.must_equal [] User.db.sqls.must_equal [] end it "should validate the uniqueness of multiple columns" do class ::User < Sequel::Model validations.clear validates do uniqueness_of [:username, :password] end end User.dataset = User.dataset.with_fetch(proc do |sql| case sql when /count.*username = '0records'/ {:v => 0} when /count.*username = '2records'/ {:v => 2} when /count.*username = '1record'/ {:v => 1} when /username = '1record'/ if sql =~ /password = 'anothertest'/ {:id => 3, :username => "1record", :password => "anothertest"} else {:id => 4, :username => "1record", :password => "test"} end end end) @user = User.new(:username => "2records", :password => "anothertest") @user.wont_be :valid? @user.errors.full_messages.must_equal ['username and password is already taken'] @user = User.new(:username => "1record", :password => "anothertest") @user.wont_be :valid? @user.errors.full_messages.must_equal ['username and password is already taken'] @user = User.load(:id=>4, :username => "1record", :password => "anothertest") @user.wont_be :valid? @user.errors.full_messages.must_equal ['username and password is already taken'] @user = User.load(:id=>3, :username => "1record", :password => "test") @user.wont_be :valid? @user.errors.full_messages.must_equal ['username and password is already taken'] @user = User.load(:id=>3, :username => "1record", :password => "anothertest") @user.must_be :valid? @user.errors.full_messages.must_equal [] @user = User.new(:username => "0records", :password => "anothertest") @user.must_be :valid? @user.errors.full_messages.must_equal [] User.db.sqls @user = User.new(:password => "anothertest") @user.must_be :valid? @user.errors.full_messages.must_equal [] @user = User.new(:username => "0records") @user.must_be :valid? @user.errors.full_messages.must_equal [] @user = User.new @user.must_be :valid? @user.errors.full_messages.must_equal [] User.db.sqls.must_equal [] end it "should have a validates block that contains multiple validations" do class ::Person < Sequel::Model validations.clear validates do format_of :first_name, :with => /^[a-zA-Z]+$/ length_of :first_name, :maximum => 30 end end Person.validations[:first_name].size.must_equal 2 @person = Person.new :first_name => "Lancelot99" @person.valid?.must_equal false @person2 = Person.new :first_name => "Wayne" @person2.valid?.must_equal true end it "should allow 'longhand' validations direcly within the model." do class ::Person < Sequel::Model validations.clear validates_length_of :first_name, :maximum => 30 end Person.validations.length.must_equal(1) end it "should define a has_validations? method which returns true if the model has validations, false otherwise" do class ::Person < Sequel::Model validations.clear validates do format_of :first_name, :with => /\w+/ length_of :first_name, :maximum => 30 end end class ::Smurf < Person validations.clear end Person.validations.wont_be :empty? Smurf.validations.must_be :empty? end it "should validate correctly instances initialized with string keys" do class ::Can < Sequel::Model validates_length_of :name, :minimum => 4 end Can.new('name' => 'ab').wont_be :valid? Can.new('name' => 'abcd').must_be :valid? end end describe "Model#save" do before do @c = model_class.call Sequel::Model(:people) do columns :id, :x validates_each :x do |o, a, v| o.errors.add(a, 'blah') unless v == 7 end end @m = @c.load(:id => 4, :x=>6) DB.reset end it "should save only if validations pass" do @m.raise_on_save_failure = false @m.wont_be :valid? @m.save DB.sqls.must_be :empty? @m.x = 7 @m.must_be :valid? @m.save.wont_equal false DB.sqls.must_equal ['UPDATE people SET x = 7 WHERE (id = 4)'] end it "should skip validations if the :validate=>false option is used" do @m.raise_on_save_failure = false @m.wont_be :valid? @m.save(:validate=>false) DB.sqls.must_equal ['UPDATE people SET x = 6 WHERE (id = 4)'] end it "should raise error if validations fail and raise_on_save_faiure is true" do proc{@m.save}.must_raise(Sequel::ValidationFailed) end it "should return nil if validations fail and raise_on_save_faiure is false" do @m.raise_on_save_failure = false @m.save.must_be_nil end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/validation_contexts_spec.rb�������������������������������������������0000664�0000000�0000000�00000002067�14342141206�0023656�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::ValidationHelpers" do before do @c = Class.new(Sequel::Model(:foo)) @c.class_eval do columns :a, :b, :c plugin :validation_contexts def validate errors.add(:a, 'bad') if a == 1 && validation_context == :create errors.add(:b, 'bad') if b == 2 && validation_context == :update errors.add(:c, 'bad') if c == 3 && validation_context == :foo end end end it "should support :validation_context option to valid?" do @c.new(:c=>3).valid?.must_equal true @c.new(:c=>3).valid?(:validation_context=>:foo).must_equal false end it "should support :validation_context option to save?" do @c.new(:c=>3).save proc{@c.new(:c=>3).save(:validation_context=>:foo)}.must_raise Sequel::ValidationFailed end it "should raise error if using a validation context on a frozen model instance" do @c.new(:c=>3).freeze.valid?.must_equal true proc{@c.new(:c=>3).freeze.valid?(:validation_context=>:foo)}.must_raise RuntimeError, TypeError end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/validation_helpers_spec.rb��������������������������������������������0000664�0000000�0000000�00000045443�14342141206�0023456�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Plugins::ValidationHelpers" do before do @c = Class.new(Sequel::Model) do def self.set_validations(&block) define_method(:validate, &block) alias_method(:validate, :validate) end columns :value end @c.plugin :validation_helpers @m = @c.new end it "should take an :allow_blank option" do @c.set_validations{validates_format(/.+_.+/, :value, :allow_blank=>true)} @m.value = 'abc_' @m.wont_be :valid? @m.value = '1_1' @m.must_be :valid? o = String.new o.singleton_class.send(:undef_method, :blank?) @m.value = o @m.must_be :valid? o = Object.new @m.value = o @m.wont_be :valid? def o.blank? true end @m.must_be :valid? end it "should take an :allow_missing option" do @c.set_validations{validates_format(/.+_.+/, :value, :allow_missing=>true)} @m.values.clear @m.must_be :valid? @m.value = nil @m.wont_be :valid? @m.value = '1_1' @m.must_be :valid? end it "should take an :allow_nil option" do @c.set_validations{validates_format(/.+_.+/, :value, :allow_nil=>true)} @m.value = 'abc_' @m.wont_be :valid? @m.value = '1_1' @m.must_be :valid? @m.value = nil @m.must_be :valid? end it "should take a :message option" do @c.set_validations{validates_format(/.+_.+/, :value, :message=>"is so blah")} @m.value = 'abc_' @m.wont_be :valid? @m.errors.full_messages.must_equal ['value is so blah'] @m.value = '1_1' @m.must_be :valid? end it "should take a :from=>:values option to lookup in values hash" do @c.set_validations{validates_max_length(50, :value, :from=>:values)} @c.send(:define_method, :value){super() * 2} @m.value = ' ' * 26 @m.must_be :valid? end it "should take an :skip_invalid option to not validate a column that already has an error" do @c.set_validations{validates_not_null(:value); validates_not_null(:value, :skip_invalid=>true)} @m.wont_be :valid? @m.errors.on(:value).must_equal ['is not present'] end it "should add validation errors even if columns that already have an error" do @c.set_validations{validates_not_null(:value); validates_not_null(:value)} @m.wont_be :valid? @m.errors.on(:value).must_equal ['is not present', 'is not present'] end it "should allow a proc for the :message option" do @c.set_validations{validates_format(/.+_.+/, :value, :message=>proc{|f| "doesn't match #{f.inspect}"})} @m.value = 'abc_' @m.wont_be :valid? @m.errors.must_equal(:value=>["doesn't match /.+_.+/"]) end it "should take multiple attributes in the same call" do @c.columns :value, :value2 @c.set_validations{validates_presence([:value, :value2])} @m.wont_be :valid? @m.value = 1 @m.wont_be :valid? @m.value2 = 1 @m.must_be :valid? end it "should support modifying default validation options for a particular model" do @c.set_validations{validates_presence(:value)} @m.wont_be :valid? @m.errors.must_equal(:value=>['is not present']) @c.class_eval do private def default_validation_helpers_options(type) {:allow_missing=>true, :message=>proc{'was not entered'}} end end @m.value = nil @m.wont_be :valid? @m.errors.must_equal(:value=>["was not entered"]) @m.value = 1 @m.must_be :valid? @m.values.clear @m.must_be :valid? c = Class.new(Sequel::Model) c.class_eval do plugin :validation_helpers attr_accessor :value def validate validates_presence(:value) end end m = c.new m.wont_be :valid? m.errors.must_equal(:value=>['is not present']) end it "should support validates_exact_length" do @c.set_validations{validates_exact_length(3, :value)} @m.wont_be :valid? @m.value = '123' @m.must_be :valid? @m.value = '12' @m.wont_be :valid? @m.value = '1234' @m.wont_be :valid? end it "should support validate_format" do @c.set_validations{validates_format(/.+_.+/, :value)} @m.value = 'abc_' @m.wont_be :valid? @m.value = 'abc_def' @m.must_be :valid? end it "should support validates_includes with an array" do @c.set_validations{validates_includes([1,2], :value)} @m.wont_be :valid? @m.value = 1 @m.must_be :valid? @m.value = 1.5 @m.wont_be :valid? @m.value = 2 @m.must_be :valid? @m.value = 3 @m.wont_be :valid? end it "should support validates_includes with a range" do @c.set_validations{validates_includes(1..4, :value)} @m.wont_be :valid? @m.value = 1 @m.must_be :valid? @m.value = 1.5 @m.must_be :valid? @m.value = 0 @m.wont_be :valid? @m.value = 5 @m.wont_be :valid? end it "should supports validates_integer" do @c.set_validations{validates_integer(:value)} @m.value = 'blah' @m.wont_be :valid? @m.value = '123' @m.must_be :valid? @m.value = '123.1231' @m.wont_be :valid? end it "should support validates_length_range" do @c.set_validations{validates_length_range(2..5, :value)} @m.wont_be :valid? @m.value = '12345' @m.must_be :valid? @m.value = '1' @m.wont_be :valid? @m.value = '123456' @m.wont_be :valid? end it "should support validates_max_length" do @c.set_validations{validates_max_length(5, :value)} @m.wont_be :valid? @m.value = '12345' @m.must_be :valid? @m.value = '123456' @m.wont_be :valid? @m.errors[:value].must_equal ['is longer than 5 characters'] @m.value = nil @m.wont_be :valid? @m.errors[:value].must_equal ['is not present'] end it "should support validates_max_length with nil value" do @c.set_validations{validates_max_length(5, :value, :message=>'tl', :nil_message=>'np')} @m.value = '123456' @m.wont_be :valid? @m.errors[:value].must_equal ['tl'] @m.value = nil @m.wont_be :valid? @m.errors[:value].must_equal ['np'] end it "should support validates_min_length" do @c.set_validations{validates_min_length(5, :value)} @m.wont_be :valid? @m.value = '12345' @m.must_be :valid? @m.value = '1234' @m.wont_be :valid? end it "should support validates_max_value" do @c.set_validations{validates_max_value(5, :value)} @m.must_be :valid? @m.value = 6 @m.wont_be :valid? @m.errors[:value].must_equal ['is greater than maximum allowed value'] @m.value = 5 @m.must_be :valid? end it "should support validates_min_value" do @c.set_validations{validates_min_value(7, :value)} @m.must_be :valid? @m.value = 6 @m.wont_be :valid? @m.errors[:value].must_equal ['is less than minimum allowed value'] @m.value = 7 @m.must_be :valid? end it "should support validates_schema_types" do @c.set_validations{validates_schema_types} @m.value = 123 @m.must_be :valid? @m.value = '123' @m.must_be :valid? def @m.db_schema; {:value=>{:type=>:integer}} end @m.singleton_class.send(:alias_method, :db_schema, :db_schema) @m.wont_be :valid? @m.errors.full_messages.must_equal ['value is not a valid integer'] @c.set_validations{validates_schema_types(:value)} def @m.db_schema; {:value=>{:type=>:integer}} end @m.singleton_class.send(:alias_method, :db_schema, :db_schema) @m.wont_be :valid? @m.errors.full_messages.must_equal ['value is not a valid integer'] @c.set_validations{validates_schema_types(:value, :message=>'is bad')} def @m.db_schema; {:value=>{:type=>:integer}} end @m.wont_be :valid? @m.errors.full_messages.must_equal ['value is bad'] end it "should support validates_numeric" do @c.set_validations{validates_numeric(:value)} @m.value = 'blah' @m.wont_be :valid? @m.value = '123' @m.must_be :valid? @m.value = '123.1231' @m.must_be :valid? @m.value = '+1' @m.must_be :valid? @m.value = '-1' @m.must_be :valid? @m.value = '+1.123' @m.must_be :valid? @m.value = '-0.123' @m.must_be :valid? @m.value = '-0.123E10' @m.must_be :valid? @m.value = '32.123e10' @m.must_be :valid? @m.value = '+32.123E10' @m.must_be :valid? @m.must_be :valid? @m.value = '.0123' end it "should support validates_type" do @c.set_validations{validates_type(Integer, :value)} @m.value = 123 @m.must_be :valid? @m.value = '123' @m.wont_be :valid? @m.errors.full_messages.must_equal ['value is not a valid integer'] @c.set_validations{validates_type(:String, :value)} @m.value = '123' @m.must_be :valid? @m.value = 123 @m.wont_be :valid? @m.errors.full_messages.must_equal ['value is not a valid string'] @c.set_validations{validates_type('Integer', :value)} @m.value = 123 @m.must_be :valid? @m.value = 123.05 @m.wont_be :valid? @m.errors.full_messages.must_equal ['value is not a valid integer'] @c.set_validations{validates_type(Integer, :value)} @m.value = 1 @m.must_be :valid? @m.value = false @m.wont_be :valid? @c.set_validations{validates_type([Integer, Float], :value)} @m.value = 1 @m.must_be :valid? @m.value = 1.0 @m.must_be :valid? @m.value = BigDecimal('1.0') @m.wont_be :valid? @m.errors.full_messages.must_equal ['value is not a valid integer or float'] end it "should support validates_not_null" do @c.set_validations{validates_not_null(:value)} @m.wont_be :valid? @m.value = '' @m.must_be :valid? @m.value = 1234 @m.must_be :valid? @m.value = nil @m.wont_be :valid? @m.value = true @m.must_be :valid? @m.value = false @m.must_be :valid? @m.value = Time.now @m.must_be :valid? end it "should support validates_no_null_byte" do @c.set_validations{validates_no_null_byte(:value)} @m.must_be :valid? @m.value = '' @m.must_be :valid? @m.value = 1234 @m.must_be :valid? @m.value = "asdfasl\0asdf" @m.wont_be :valid? @m.value = true @m.must_be :valid? @m.value = false @m.must_be :valid? @m.value = Time.now @m.must_be :valid? end it "should support validates_presence" do @c.set_validations{validates_presence(:value)} @m.wont_be :valid? @m.value = '' @m.wont_be :valid? @m.value = 1234 @m.must_be :valid? @m.value = nil @m.wont_be :valid? @m.value = true @m.must_be :valid? @m.value = false @m.must_be :valid? @m.value = Time.now @m.must_be :valid? end it "should support validates_unique with a single attribute" do @c.columns(:id, :username, :password) @c.set_dataset DB[:items] @c.set_validations{validates_unique(:username, :only_if_modified=>false)} @c.dataset = @c.dataset.with_fetch(proc do |sql| case sql when /count.*username = '0records'/ {:v => 0} when /count.*username = '1record'/ {:v => 1} end end) @user = @c.new(:username => "0records", :password => "anothertest") @user.must_be :valid? @user = @c.load(:id=>3, :username => "0records", :password => "anothertest") @user.must_be :valid? DB.sqls @user = @c.new(:password => "anothertest") @user.must_be :valid? DB.sqls.must_equal [] @user = @c.new(:username => "1record", :password => "anothertest") @user.wont_be :valid? @user.errors.full_messages.must_equal ['username is already taken'] @user = @c.load(:id=>4, :username => "1record", :password => "anothertest") @user.wont_be :valid? @user.errors.full_messages.must_equal ['username is already taken'] @user = @c.load(:id=>1, :username => "0records", :password => "anothertest") @user.must_be :valid? DB.sqls.last.must_equal "SELECT count(*) AS count FROM items WHERE ((username = '0records') AND (id != 1)) LIMIT 1" @user = @c.new(:username => "0records", :password => "anothertest") @user.must_be :valid? DB.sqls.last.must_equal "SELECT count(*) AS count FROM items WHERE (username = '0records') LIMIT 1" end it "should support validates_unique with multiple attributes" do @c.columns(:id, :username, :password) @c.set_dataset DB[:items] @c.set_validations{validates_unique([:username, :password], :only_if_modified=>false)} @c.dataset = @c.dataset.with_fetch(proc do |sql| case sql when /count.*username = '0records'/ {:v => 0} when /count.*username = '1record'/ {:v => 1} end end) @user = @c.new(:username => "0records", :password => "anothertest") @user.must_be :valid? @user = @c.load(:id=>3, :username => "0records", :password => "anothertest") @user.must_be :valid? DB.sqls @user = @c.new(:password => "anothertest") @user.must_be :valid? @user.errors.full_messages.must_equal [] @user = @c.new(:username => "0records") @user.must_be :valid? @user.errors.full_messages.must_equal [] @user = @c.new @user.must_be :valid? @user.errors.full_messages.must_equal [] DB.sqls.must_equal [] @user = @c.new(:username => "1record", :password => "anothertest") @user.wont_be :valid? @user.errors.full_messages.must_equal ['username and password is already taken'] @user = @c.load(:id=>4, :username => "1record", :password => "anothertest") @user.wont_be :valid? @user.errors.full_messages.must_equal ['username and password is already taken'] @user = @c.load(:id=>1, :username => "0records", :password => "anothertest") @user.must_be :valid? DB.sqls.last.must_equal "SELECT count(*) AS count FROM items WHERE ((username = '0records') AND (password = 'anothertest') AND (id != 1)) LIMIT 1" @user = @c.new(:username => "0records", :password => "anothertest") @user.must_be :valid? DB.sqls.last.must_equal "SELECT count(*) AS count FROM items WHERE ((username = '0records') AND (password = 'anothertest')) LIMIT 1" end it "should support validates_unique with a block" do @c.columns(:id, :username, :password) @c.set_dataset DB[:items] @c.set_validations{validates_unique(:username, :only_if_modified=>false){|ds| ds.filter(:active)}} @c.dataset = @c.dataset.with_fetch(:v=>0) DB.reset @c.new(:username => "0records", :password => "anothertest").must_be :valid? @c.load(:id=>3, :username => "0records", :password => "anothertest").must_be :valid? DB.sqls.must_equal ["SELECT count(*) AS count FROM items WHERE ((username = '0records') AND active) LIMIT 1", "SELECT count(*) AS count FROM items WHERE ((username = '0records') AND active AND (id != 3)) LIMIT 1"] end it "should support validates_unique with :where option" do @c.columns(:id, :username, :password) @c.set_dataset DB[:items] @c.set_validations{validates_unique(:username, :only_if_modified=>false, :where=>proc{|ds, obj, cols| ds.where(cols.map{|c| [Sequel.function(:lower, c), obj.send(c).downcase]})})} @c.dataset = @c.dataset.with_fetch(:v=>0) DB.reset @c.new(:username => "0RECORDS", :password => "anothertest").must_be :valid? @c.load(:id=>3, :username => "0RECORDS", :password => "anothertest").must_be :valid? DB.sqls.must_equal ["SELECT count(*) AS count FROM items WHERE (lower(username) = '0records') LIMIT 1", "SELECT count(*) AS count FROM items WHERE ((lower(username) = '0records') AND (id != 3)) LIMIT 1"] end it "should support validates_unique with :dataset option" do @c.columns(:id, :username, :password) @c.set_dataset DB[:items] c = @c @c.set_validations{validates_unique(:username, :only_if_modified=>false, :dataset=>c.where(:a=>[1,2,3]))} @c.dataset = @c.dataset.with_fetch(:v=>0) DB.reset @c.new(:username => "0records", :password => "anothertest").must_be :valid? @c.load(:id=>3, :username => "0records", :password => "anothertest").must_be :valid? DB.sqls.must_equal ["SELECT count(*) AS count FROM items WHERE ((a IN (1, 2, 3)) AND (username = '0records')) LIMIT 1", "SELECT count(*) AS count FROM items WHERE ((a IN (1, 2, 3)) AND (username = '0records') AND (id != 3)) LIMIT 1"] end it "should use qualified primary keys for validates_unique when the dataset is joined" do @c.columns(:id, :username, :password) @c.set_dataset DB[:items] c = @c @c.set_validations{validates_unique(:username, :only_if_modified=>false, :dataset=>c.cross_join(:a))} @c.dataset = @c.dataset.with_fetch(:v=>0) DB.reset @c.new(:username => "0records", :password => "anothertest").must_be :valid? @c.load(:id=>3, :username => "0records", :password => "anothertest").must_be :valid? DB.sqls.must_equal ["SELECT count(*) AS count FROM items CROSS JOIN a WHERE (username = '0records') LIMIT 1", "SELECT count(*) AS count FROM items CROSS JOIN a WHERE ((username = '0records') AND (items.id != 3)) LIMIT 1"] end it "should not have validates_unique check uniqueness for existing records if values haven't changed" do @c.columns(:id, :username, :password) @c.set_dataset DB[:items] @c.set_validations{validates_unique([:username, :password])} @c.dataset = @c.dataset.with_fetch(:v=>0) DB.reset @c.new(:username => "0records", :password => "anothertest").must_be :valid? DB.sqls.must_equal ["SELECT count(*) AS count FROM items WHERE ((username = '0records') AND (password = 'anothertest')) LIMIT 1"] DB.reset m = @c.load(:id=>3, :username => "0records", :password => "anothertest") m.must_be :valid? DB.sqls.must_equal [] m.username = '1' m.must_be :valid? DB.sqls.must_equal ["SELECT count(*) AS count FROM items WHERE ((username = '1') AND (password = 'anothertest') AND (id != 3)) LIMIT 1"] m = @c.load(:id=>3, :username => "0records", :password => "anothertest") DB.reset m.password = '1' m.must_be :valid? DB.sqls.must_equal ["SELECT count(*) AS count FROM items WHERE ((username = '0records') AND (password = '1') AND (id != 3)) LIMIT 1"] DB.reset m.username = '2' m.must_be :valid? DB.sqls.must_equal ["SELECT count(*) AS count FROM items WHERE ((username = '2') AND (password = '1') AND (id != 3)) LIMIT 1"] end it "should not attempt a database query if the underlying columns have validation errors" do @c.columns(:id, :username, :password) @c.set_dataset DB[:items] @c.set_validations{errors.add(:username, 'foo'); validates_unique([:username, :password])} @c.dataset = @c.dataset.with_fetch(:v=>0) DB.reset m = @c.new(:username => "1", :password => "anothertest") m.wont_be :valid? DB.sqls.must_equal [] end it "should support validates_operator" do @c.set_validations{validates_operator(:>, 3, :value)} @m.value = 1 @m.wont_be :valid? @m.errors.full_messages.must_equal ['value is not > 3'] @m.value = 3 @m.wont_be :valid? @m.value = nil @m.wont_be :valid? @m.value = 4 @m.must_be :valid? end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/whitelist_security_spec.rb��������������������������������������������0000664�0000000�0000000�00000013173�14342141206�0023540�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Model, "#(set|update)_(all|only)" do before do @c = Class.new(Sequel::Model(:items)) do set_primary_key :id columns :x, :y, :z, :id end @c.plugin :whitelist_security @c.set_allowed_columns :x @c.strict_param_setting = false @o1 = @c.new DB.reset end it "should raise errors if not all hash fields can be set and strict_param_setting is true" do @c.strict_param_setting = true proc{@c.new.set_all(:x => 1, :y => 2, :z=>3, :use_transactions => false)}.must_raise(Sequel::MassAssignmentRestriction) (o = @c.new).set_all(:x => 1, :y => 2, :z=>3) o.values.must_equal(:x => 1, :y => 2, :z=>3) proc{@c.new.set_only({:x => 1, :y => 2, :z=>3, :id=>4}, :x, :y)}.must_raise(Sequel::MassAssignmentRestriction) proc{@c.new.set_only({:x => 1, :y => 2, :z=>3}, :x, :y)}.must_raise(Sequel::MassAssignmentRestriction) (o = @c.new).set_only({:x => 1, :y => 2}, :x, :y) o.values.must_equal(:x => 1, :y => 2) end it "#set_all should set all attributes including the primary key" do @o1.set_all(:x => 1, :y => 2, :z=>3, :id=>4) @o1.values.must_equal(:id =>4, :x => 1, :y => 2, :z=>3) end it "#set_all should set not set restricted fields" do @o1.use_transactions.must_equal false @o1.set_all(:x => 1, :use_transactions => true) @o1.use_transactions.must_equal false @o1.values.must_equal(:x => 1) end it "#set_only should only set given attributes" do @o1.set_only({:x => 1, :y => 2, :z=>3, :id=>4}, [:x, :y]) @o1.values.must_equal(:x => 1, :y => 2) @o1.set_only({:x => 4, :y => 5, :z=>6, :id=>7}, :x, :y) @o1.values.must_equal(:x => 4, :y => 5) @o1.set_only({:x => 9, :y => 8, :z=>6, :id=>7}, :x, :y, :id) @o1.values.must_equal(:x => 9, :y => 8, :id=>7) end it "#update_all should update all attributes" do @c.new.update_all(:x => 1) DB.sqls.must_equal ["INSERT INTO items (x) VALUES (1)", "SELECT * FROM items WHERE id = 10"] @c.new.update_all(:y => 1) DB.sqls.must_equal ["INSERT INTO items (y) VALUES (1)", "SELECT * FROM items WHERE id = 10"] @c.new.update_all(:z => 1) DB.sqls.must_equal ["INSERT INTO items (z) VALUES (1)", "SELECT * FROM items WHERE id = 10"] end it "#update_only should only update given attributes" do @o1.update_only({:x => 1, :y => 2, :z=>3, :id=>4}, [:x]) DB.sqls.must_equal ["INSERT INTO items (x) VALUES (1)", "SELECT * FROM items WHERE id = 10"] @c.new.update_only({:x => 1, :y => 2, :z=>3, :id=>4}, :x) DB.sqls.must_equal ["INSERT INTO items (x) VALUES (1)", "SELECT * FROM items WHERE id = 10"] end end describe Sequel::Model, "#(set|update)_(all|only) without set_allowed_columns" do before do @c = Class.new(Sequel::Model(:items)) do set_primary_key :id columns :x, :y, :z, :id end @c.plugin :whitelist_security @c.strict_param_setting = false @o1 = @c.new DB.reset end end describe Sequel::Model, ".strict_param_setting" do before do @c = Class.new(Sequel::Model(:blahblah)) do columns :x, :y, :z, :id end @c.plugin :whitelist_security @c.set_allowed_columns :x, :y end it "should raise an error if a missing/restricted column/method is accessed" do proc{@c.new(:z=>1)}.must_raise(Sequel::MassAssignmentRestriction) proc{@c.create(:z=>1)}.must_raise(Sequel::MassAssignmentRestriction) c = @c.new proc{c.set(:z=>1)}.must_raise(Sequel::MassAssignmentRestriction) proc{c.update(:z=>1)}.must_raise(Sequel::MassAssignmentRestriction) proc{c.set_all(:use_after_commit_rollback => false)}.must_raise(Sequel::MassAssignmentRestriction) proc{c.set_only({:x=>1}, :y)}.must_raise(Sequel::MassAssignmentRestriction) proc{c.update_all(:use_after_commit_rollback=>false)}.must_raise(Sequel::MassAssignmentRestriction) proc{c.update_only({:x=>1}, :y)}.must_raise(Sequel::MassAssignmentRestriction) end end describe Sequel::Model, ".allowed_columns " do before do @c = Class.new(Sequel::Model(:blahblah)) do columns :x, :y, :z end @c.plugin :whitelist_security @c.strict_param_setting = false @c.instance_variable_set(:@columns, [:x, :y, :z]) DB.reset end it "should set the allowed columns correctly" do @c.allowed_columns.must_be_nil @c.set_allowed_columns :x @c.allowed_columns.must_equal [:x] @c.set_allowed_columns :x, :y @c.allowed_columns.must_equal [:x, :y] end it "should not change behavior if allowed_columns are not set" do i = @c.new(:x => 1, :y => 2, :z => 3) i.values.must_equal(:x => 1, :y => 2, :z => 3) i.set(:x => 4, :y => 5, :z => 6) i.values.must_equal(:x => 4, :y => 5, :z => 6) @c.dataset = @c.dataset.with_fetch(:x => 7) i = @c.new i.update(:x => 7) i.values.must_equal(:x => 7) DB.sqls.must_equal ["INSERT INTO blahblah (x) VALUES (7)", "SELECT * FROM blahblah WHERE id = 10"] end it "should only set allowed columns by default" do @c.set_allowed_columns :x, :y i = @c.new(:x => 1, :y => 2, :z => 3) i.values.must_equal(:x => 1, :y => 2) i.set(:x => 4, :y => 5, :z => 6) i.values.must_equal(:x => 4, :y => 5) @c.dataset = @c.dataset.with_fetch(:x => 7) i = @c.new i.update(:x => 7, :z => 9) i.values.must_equal(:x => 7) DB.sqls.must_equal ["INSERT INTO blahblah (x) VALUES (7)", "SELECT * FROM blahblah WHERE id = 10"] end end describe "Sequel::Model.freeze" do it "should freeze the model class and not allow any changes" do model = Class.new(Sequel::Model(:items)) model.plugin :whitelist_security model.set_allowed_columns [:id] model.freeze model.allowed_columns.frozen?.must_equal true end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/extensions/xml_serializer_spec.rb������������������������������������������������0000664�0000000�0000000�00000023532�14342141206�0022626�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" begin require 'nokogiri' rescue LoadError warn "Skipping test of xml_serializer plugin: can't load nokogiri" else describe "Sequel::Plugins::XmlSerializer" do before do class ::Artist < Sequel::Model unrestrict_primary_key plugin :xml_serializer columns :id, :name @db_schema = {:id=>{:type=>:integer}, :name=>{:type=>:string}} one_to_many :albums end class ::Album < Sequel::Model unrestrict_primary_key attr_accessor :blah plugin :xml_serializer columns :id, :name, :artist_id @db_schema2 = @db_schema = {:id=>{:type=>:integer}, :name=>{:type=>:string}, :artist_id=>{:type=>:integer}} def self.set_dataset(*) super @db_schema = @db_schema2 end many_to_one :artist end @artist = Artist.load(:id=>2, :name=>'YJM') @artist.associations[:albums] = [] @album = Album.load(:id=>1, :name=>'RF') @album.artist = @artist @album.blah = 'Blah' end after do Object.send(:remove_const, :Artist) Object.send(:remove_const, :Album) end it "should round trip successfully" do Artist.from_xml(@artist.to_xml).must_equal @artist Album.from_xml(@album.to_xml).must_equal @album end it "should round trip successfully for namespaced models" do module XmlSerializerTest class Artist < Sequel::Model unrestrict_primary_key plugin :xml_serializer columns :id, :name @db_schema = {:id=>{:type=>:integer}, :name=>{:type=>:string}} end end artist = XmlSerializerTest::Artist.load(:id=>2, :name=>'YJM') XmlSerializerTest::Artist.from_xml(artist.to_xml).must_equal artist end it "should round trip successfully with empty strings" do artist = Artist.load(:id=>2, :name=>'') Artist.from_xml(artist.to_xml).must_equal artist end it "should round trip successfully with nil values" do artist = Artist.load(:id=>2, :name=>nil) Artist.from_xml(artist.to_xml).must_equal artist end it "should handle the :only option" do Artist.from_xml(@artist.to_xml(:only=>:name)).must_equal Artist.load(:name=>@artist.name) Album.from_xml(@album.to_xml(:only=>[:id, :name])).must_equal Album.load(:id=>@album.id, :name=>@album.name) end it "should handle the :except option" do Artist.from_xml(@artist.to_xml(:except=>:id)).must_equal Artist.load(:name=>@artist.name) Album.from_xml(@album.to_xml(:except=>[:id, :artist_id])).must_equal Album.load(:name=>@album.name) end it "should handle the :include option for associations" do Artist.from_xml(@artist.to_xml(:include=>:albums), :associations=>:albums).albums.must_equal [@album] Album.from_xml(@album.to_xml(:include=>:artist), :associations=>:artist).artist.must_equal @artist end it "should handle the :include option for arbitrary attributes" do Album.from_xml(@album.to_xml(:include=>:blah)).blah.must_equal @album.blah end it "should handle multiple inclusions using an array for the :include option" do a = Album.from_xml(@album.to_xml(:include=>[:blah, :artist]), :associations=>:artist) a.blah.must_equal @album.blah a.artist.must_equal @artist end it "should handle cascading using a hash for the :include option" do Artist.from_xml(@artist.to_xml(:include=>{:albums=>{:include=>:artist}}), :associations=>{:albums=>{:associations=>:artist}}).albums.map{|a| a.artist}.must_equal [@artist] Album.from_xml(@album.to_xml(:include=>{:artist=>{:include=>:albums}}), :associations=>{:artist=>{:associations=>:albums}}).artist.albums.must_equal [@album] Artist.from_xml(@artist.to_xml(:include=>{:albums=>{:only=>:name}}), :associations=>{:albums=>{:fields=>%w'name'}}).albums.must_equal [Album.load(:name=>@album.name)] Album.from_xml(@album.to_xml(:include=>{:artist=>{:except=>:name}}), :associations=>:artist).artist.must_equal Artist.load(:id=>@artist.id) Artist.from_xml(@artist.to_xml(:include=>{:albums=>{:include=>{:artist=>{:include=>:albums}}}}), :associations=>{:albums=>{:associations=>{:artist=>{:associations=>:albums}}}}).albums.map{|a| a.artist.albums}.must_equal [[@album]] Album.from_xml(@album.to_xml(:include=>{:artist=>{:include=>{:albums=>{:only=>:name}}}}), :associations=>{:artist=>{:associations=>{:albums=>{:fields=>%w'name'}}}}).artist.albums.must_equal [Album.load(:name=>@album.name)] end it "should handle the :include option cascading with an empty hash" do Album.from_xml(@album.to_xml(:include=>{:artist=>{}}), :associations=>:artist).artist.must_equal @artist Album.from_xml(@album.to_xml(:include=>{:blah=>{}})).blah.must_equal @album.blah end it "should support #from_xml to set column values" do @artist.from_xml('<album><name>AS</name></album>') @artist.name.must_equal 'AS' @artist.id.must_equal 2 end it "should support a :name_proc option when serializing and deserializing" do Album.from_xml(@album.to_xml(:name_proc=>proc{|s| s.reverse}), :name_proc=>proc{|s| s.reverse}).must_equal @album end it "should support a :camelize option when serializing and :underscore option when deserializing" do Album.from_xml(@album.to_xml(:camelize=>true), :underscore=>true).must_equal @album end it "should support a :camelize option when serializing and :underscore option when deserializing" do Album.from_xml(@album.to_xml(:dasherize=>true), :underscore=>true).must_equal @album end it "should support a block when serializing" do @artist.to_xml{|x| x.foo "bar"}.gsub(/\n */m, '').must_equal "<?xml version=\"1.0\"?><artist><id>2</id><name>YJM</name><foo>bar</foo></artist>" end it "should support an :encoding option when serializing" do @artist.to_xml(:encoding=>'UTF-8').gsub(/\n */m, '').must_equal "<?xml version=\"1.0\" encoding=\"UTF-8\"?><artist><id>2</id><name>YJM</name></artist>" end it "should support a :builder_opts option when serializing" do @artist.to_xml(:builder_opts=>{:encoding=>'UTF-8'}).gsub(/\n */m, '').must_equal "<?xml version=\"1.0\" encoding=\"UTF-8\"?><artist><id>2</id><name>YJM</name></artist>" end it "should support an :types option when serializing" do @artist.to_xml(:types=>true).gsub(/\n */m, '').must_equal "<?xml version=\"1.0\"?><artist><id type=\"integer\">2</id><name type=\"string\">YJM</name></artist>" end it "should support an :root_name option when serializing" do @artist.to_xml(:root_name=>'ar').gsub(/\n */m, '').must_equal "<?xml version=\"1.0\"?><ar><id>2</id><name>YJM</name></ar>" end it "should support an :array_root_name option when serializing arrays" do artist = @artist Artist.dataset = Artist.dataset.with_extend{define_method(:all){[artist]}} Artist.to_xml(:array_root_name=>'ars', :root_name=>'ar').gsub(/\n */m, '').must_equal "<?xml version=\"1.0\"?><ars><ar><id>2</id><name>YJM</name></ar></ars>" end it "should raise an exception for xml tags that aren't associations, columns, or setter methods" do Album.send(:undef_method, :blah=) proc{Album.from_xml(@album.to_xml(:include=>:blah))}.must_raise(Sequel::MassAssignmentRestriction) end it "should support a to_xml class and dataset method" do album = @album Album.dataset = Album.dataset.with_extend{define_method(:all){[album]}} Album.array_from_xml(Album.to_xml).must_equal [@album] Album.array_from_xml(Album.to_xml(:include=>:artist), :associations=>:artist).map{|x| x.artist}.must_equal [@artist] Album.array_from_xml(Album.dataset.to_xml(:only=>:name)).must_equal [Album.load(:name=>@album.name)] end it "should have to_xml dataset method respect an :array option" do a = Album.load(:id=>1, :name=>'RF', :artist_id=>3) Album.array_from_xml(Album.to_xml(:array=>[a])).must_equal [a] a.associations[:artist] = artist = Artist.load(:id=>3, :name=>'YJM') Album.array_from_xml(Album.to_xml(:array=>[a], :include=>:artist), :associations=>:artist).first.artist.must_equal artist artist.associations[:albums] = [a] x = Artist.array_from_xml(Artist.to_xml(:array=>[artist], :include=>:albums), :associations=>[:albums]) x.must_equal [artist] x.first.albums.must_equal [a] end it "should work correctly for eager graphed datasets" do ds = Album.dataset.eager_graph(:artist).with_fetch(:id=>1, :name=>'RF', :artist_id=>2, :artist_id_0=>2, :artist_name=>'YJM') albums = Album.array_from_xml(ds.to_xml(:only=>:name, :include=>{:artist=>{:only=>:name}}), :associations=>:artist) albums.must_equal [Album.load(:name=>@album.name)] albums.first.artist.must_equal Artist.load(:name=>@artist.name) end it "should raise an error if the dataset does not have a row_proc" do proc{Album.dataset.naked.to_xml}.must_raise(Sequel::Error) end it "should raise an error if using parsing empty xml" do proc{Artist.from_xml("<?xml version=\"1.0\"?>\n")}.must_raise(Sequel::Error) proc{Artist.array_from_xml("<?xml version=\"1.0\"?>\n")}.must_raise(Sequel::Error) end it "should raise an error if attempting to set a restricted column and :all_columns is not used" do Artist.restrict_primary_key proc{Artist.from_xml(@artist.to_xml)}.must_raise(Sequel::MassAssignmentRestriction) end it "should raise an error if an unsupported association is passed in the :associations option" do Artist.association_reflections.delete(:albums) proc{Artist.from_xml(@artist.to_xml(:include=>:albums), :associations=>:albums)}.must_raise(Sequel::Error) end it "should raise an error if using from_xml and XML represents an array" do proc{Artist.from_xml(Artist.to_xml(:array=>[@artist]))}.must_raise(Sequel::MassAssignmentRestriction) end it "should raise an error if using array_from_xml and XML does not represent an array" do proc{Artist.array_from_xml(@artist.to_xml)}.must_raise(Sequel::Error) end it "should raise an error if using an unsupported :associations option" do proc{Artist.from_xml(@artist.to_xml, :associations=>'')}.must_raise(Sequel::Error) end end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/���������������������������������������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0015134�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/bad_down_migration/��������������������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0020762�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/bad_down_migration/001_create_alt_basic.rb���������������������������������0000664�0000000�0000000�00000000135�14342141206�0025132�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do up{create_table(:sm11111){Integer :smc1}} down{get(:asdfsadfsa)} end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/bad_down_migration/002_create_alt_advanced.rb������������������������������0000664�0000000�0000000�00000000141�14342141206�0025614�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do up{create_table(:sm22222){Integer :smc2}} down{drop_table(:sm22222)} end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/bad_timestamped_migrations/������������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0022512�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/bad_timestamped_migrations/1273253849_create_sessions.rb�������������������0000664�0000000�0000000�00000000216�14342141206�0027402�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������class CreateSessions < Sequel::Migration def up create_table(:sm1111){Integer :smc1} end def down get(:asdfsadfas) end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/bad_timestamped_migrations/1273253851_create_nodes.rb����������������������0000664�0000000�0000000�00000000212�14342141206�0026631�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Class.new(Sequel::Migration) do def up create_table(:sm2222){Integer :smc2} end def down drop_table(:sm2222) end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/bad_timestamped_migrations/1273253853_3_create_users.rb��������������������0000664�0000000�0000000�00000000057�14342141206�0027115�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do up{get(:asdfsadfas)} end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/bad_up_migration/����������������������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0020437�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/bad_up_migration/001_create_alt_basic.rb�����������������������������������0000664�0000000�0000000�00000000141�14342141206�0024604�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do up{create_table(:sm11111){Integer :smc1}} down{drop_table(:sm11111)} end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/bad_up_migration/002_create_alt_advanced.rb��������������������������������0000664�0000000�0000000�00000000056�14342141206�0025276�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do up{get(:asdfassfd)} end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/convert_to_timestamp_migrations/�������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0023635�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/convert_to_timestamp_migrations/001_create_sessions.rb���������������������0000664�0000000�0000000�00000000221�14342141206�0027726�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������class CreateSessions < Sequel::Migration def up create_table(:sm1111){Integer :smc1} end def down drop_table(:sm1111) end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/convert_to_timestamp_migrations/002_create_nodes.rb������������������������0000664�0000000�0000000�00000000212�14342141206�0027171�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Class.new(Sequel::Migration) do def up create_table(:sm2222){Integer :smc2} end def down drop_table(:sm2222) end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/convert_to_timestamp_migrations/003_3_create_users.rb����������������������0000664�0000000�0000000�00000000137�14342141206�0027453�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do up{create_table(:sm3333){Integer :smc3}} down{drop_table(:sm3333)} end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/convert_to_timestamp_migrations/1273253850_create_artists.rb���������������0000664�0000000�0000000�00000000221�14342141206�0030334�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������class CreateArtists < Sequel::Migration def up create_table(:sm1122){Integer :smc12} end def down drop_table(:sm1122) end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/convert_to_timestamp_migrations/1273253852_create_albums.rb����������������0000664�0000000�0000000�00000000220�14342141206�0030127�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������class CreateAlbums < Sequel::Migration def up create_table(:sm2233){Integer :smc23} end def down drop_table(:sm2233) end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/double_migration/����������������������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0020457�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/double_migration/001_create_sessions.rb������������������������������������0000664�0000000�0000000�00000000221�14342141206�0024550�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������class CreateSessions < Sequel::Migration def up create_table(:sm1111){Integer :smc1} end def down drop_table(:sm1111) end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/double_migration/002_create_nodes.rb���������������������������������������0000664�0000000�0000000�00000000425�14342141206�0024021�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Class.new(Sequel::Migration) do def up create_table(:sm2222){Integer :smc2} end def down drop_table(:sm2222) end end Class.new(Sequel::Migration) do def up create_table(:sm2443){Integer :smc2} end def down drop_table(:sm2443) end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/double_migration/003_3_create_users.rb�������������������������������������0000664�0000000�0000000�00000000137�14342141206�0024275�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do up{create_table(:sm3333){Integer :smc3}} down{drop_table(:sm3333)} end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/duplicate_integer_migrations/����������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0023057�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/duplicate_integer_migrations/001_create_alt_advanced.rb��������������������0000664�0000000�0000000�00000000141�14342141206�0027710�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do up{create_table(:sm33333){Integer :smc3}} down{drop_table(:sm33333)} end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/duplicate_integer_migrations/001_create_alt_basic.rb�����������������������0000664�0000000�0000000�00000000141�14342141206�0027224�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do up{create_table(:sm11111){Integer :smc1}} down{drop_table(:sm11111)} end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/duplicate_timestamped_migrations/������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0023736�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/duplicate_timestamped_migrations/1273253849_create_sessions.rb�������������0000664�0000000�0000000�00000000221�14342141206�0030622�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������class CreateSessions < Sequel::Migration def up create_table(:sm1111){Integer :smc1} end def down drop_table(:sm1111) end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/duplicate_timestamped_migrations/1273253853_create_nodes.rb����������������0000664�0000000�0000000�00000000212�14342141206�0030057�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Class.new(Sequel::Migration) do def up create_table(:sm2222){Integer :smc2} end def down drop_table(:sm2222) end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/duplicate_timestamped_migrations/1273253853_create_users.rb����������������0000664�0000000�0000000�00000000137�14342141206�0030116�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do up{create_table(:sm3333){Integer :smc3}} down{drop_table(:sm3333)} end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/empty_migration/�����������������������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0020343�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/empty_migration/001_create_sessions.rb�������������������������������������0000664�0000000�0000000�00000000221�14342141206�0024434�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������class CreateSessions < Sequel::Migration def up create_table(:sm1111){Integer :smc1} end def down drop_table(:sm1111) end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/empty_migration/002_create_nodes.rb����������������������������������������0000664�0000000�0000000�00000000000�14342141206�0023672�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/empty_migration/003_3_create_users.rb��������������������������������������0000664�0000000�0000000�00000000137�14342141206�0024161�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do up{create_table(:sm3333){Integer :smc3}} down{drop_table(:sm3333)} end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/empty_migration_folder/����������������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0021676�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/empty_migration_folder/.gitkeep��������������������������������������������0000664�0000000�0000000�00000000000�14342141206�0023315�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/integer_migrations/��������������������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0021025�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/integer_migrations/001_create_sessions.rb����������������������������������0000664�0000000�0000000�00000000221�14342141206�0025116�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������class CreateSessions < Sequel::Migration def up create_table(:sm1111){Integer :smc1} end def down drop_table(:sm1111) end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/integer_migrations/002_create_nodes.rb�������������������������������������0000664�0000000�0000000�00000000212�14342141206�0024361�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Class.new(Sequel::Migration) do def up create_table(:sm2222){Integer :smc2} end def down drop_table(:sm2222) end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/integer_migrations/003_3_create_users.rb�����������������������������������0000664�0000000�0000000�00000000137�14342141206�0024643�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do up{create_table(:sm3333){Integer :smc3}} down{drop_table(:sm3333)} end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/interleaved_timestamped_migrations/����������������������������������������0000775�0000000�0000000�00000000000�14342141206�0024266�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/interleaved_timestamped_migrations/1273253849_create_sessions.rb�����������0000664�0000000�0000000�00000000221�14342141206�0031152�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������class CreateSessions < Sequel::Migration def up create_table(:sm1111){Integer :smc1} end def down drop_table(:sm1111) end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/interleaved_timestamped_migrations/1273253850_create_artists.rb������������0000664�0000000�0000000�00000000221�14342141206�0030765�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������class CreateArtists < Sequel::Migration def up create_table(:sm1122){Integer :smc12} end def down drop_table(:sm1122) end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/interleaved_timestamped_migrations/1273253851_create_nodes.rb��������������0000664�0000000�0000000�00000000212�14342141206�0030405�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Class.new(Sequel::Migration) do def up create_table(:sm2222){Integer :smc2} end def down drop_table(:sm2222) end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/interleaved_timestamped_migrations/1273253852_create_albums.rb�������������0000664�0000000�0000000�00000000220�14342141206�0030560�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������class CreateAlbums < Sequel::Migration def up create_table(:sm2233){Integer :smc23} end def down drop_table(:sm2233) end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/interleaved_timestamped_migrations/1273253853_3_create_users.rb������������0000664�0000000�0000000�00000000137�14342141206�0030670�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do up{create_table(:sm3333){Integer :smc3}} down{drop_table(:sm3333)} end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/missing_integer_migrations/������������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0022556�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/missing_integer_migrations/001_create_alt_basic.rb�������������������������0000664�0000000�0000000�00000000141�14342141206�0026723�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do up{create_table(:sm11111){Integer :smc1}} down{drop_table(:sm11111)} end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/missing_integer_migrations/003_create_alt_advanced.rb����������������������0000664�0000000�0000000�00000000141�14342141206�0027411�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do up{create_table(:sm33333){Integer :smc3}} down{drop_table(:sm33333)} end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/missing_integer_migrations_missing_last_version/���������������������������0000775�0000000�0000000�00000000000�14342141206�0027077�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/missing_integer_migrations_missing_last_version/001_create_alt_basic.rb����0000664�0000000�0000000�00000000141�14342141206�0033244�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do up{create_table(:sm11111){Integer :smc1}} down{drop_table(:sm11111)} end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/missing_timestamped_migrations/��������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0023435�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/missing_timestamped_migrations/1273253849_create_sessions.rb���������������0000664�0000000�0000000�00000000221�14342141206�0030321�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������class CreateSessions < Sequel::Migration def up create_table(:sm1111){Integer :smc1} end def down drop_table(:sm1111) end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/missing_timestamped_migrations/1273253853_3_create_users.rb����������������0000664�0000000�0000000�00000000137�14342141206�0030037�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do up{create_table(:sm3333){Integer :smc3}} down{drop_table(:sm3333)} end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/reversible_migrations/�����������������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0021532�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/reversible_migrations/001_reversible.rb������������������������������������0000664�0000000�0000000�00000000113�14342141206�0024574�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do change do create_table(:a){Integer :a} end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/reversible_migrations/002_reversible.rb������������������������������������0000664�0000000�0000000�00000000110�14342141206�0024572�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do change do add_column :a, :b, String end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/reversible_migrations/003_reversible.rb������������������������������������0000664�0000000�0000000�00000000107�14342141206�0024601�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do change do rename_column :a, :b, :c end end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/reversible_migrations/004_reversible.rb������������������������������������0000664�0000000�0000000�00000000102�14342141206�0024575�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do change do rename_table :a, :b end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/reversible_migrations/005_reversible.rb������������������������������������0000664�0000000�0000000�00000000237�14342141206�0024607�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do change do alter_table(:b) do add_column :d, String end alter_table(:b) do rename_column :d, :e end end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/reversible_migrations/006_reversible.rb������������������������������������0000664�0000000�0000000�00000000234�14342141206�0024605�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do change do create_table(:c) do primary_key :id end alter_table(:b) do add_foreign_key :f, :c end end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/reversible_migrations/007_reversible.rb������������������������������������0000664�0000000�0000000�00000000304�14342141206�0024604�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do change do create_table(:d) do primary_key :id end alter_table(:b) do add_foreign_key :g, :d, :foreign_key_constraint_name=>:b_f_foo end end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/timestamped_migrations/����������������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0021704�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/timestamped_migrations/1273253849_create_sessions.rb�����������������������0000664�0000000�0000000�00000000221�14342141206�0026570�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������class CreateSessions < Sequel::Migration def up create_table(:sm1111){Integer :smc1} end def down drop_table(:sm1111) end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/timestamped_migrations/1273253851_create_nodes.rb��������������������������0000664�0000000�0000000�00000000212�14342141206�0026023�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Class.new(Sequel::Migration) do def up create_table(:sm2222){Integer :smc2} end def down drop_table(:sm2222) end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/timestamped_migrations/1273253853_3_create_users.rb������������������������0000664�0000000�0000000�00000000137�14342141206�0026306�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do up{create_table(:sm3333){Integer :smc3}} down{drop_table(:sm3333)} end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/transaction_specified_migrations/������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0023730�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/transaction_specified_migrations/001_create_alt_basic.rb�������������������0000664�0000000�0000000�00000000126�14342141206�0030100�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do transaction change{create_table(:sm11111){Integer :smc1}} end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/transaction_specified_migrations/002_create_basic.rb�����������������������0000664�0000000�0000000�00000000124�14342141206�0027237�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do no_transaction change{create_table(:sm){Integer :smc1}} end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/transaction_unspecified_migrations/����������������������������������������0000775�0000000�0000000�00000000000�14342141206�0024273�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/transaction_unspecified_migrations/001_create_alt_basic.rb�����������������0000664�0000000�0000000�00000000110�14342141206�0030434�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do change{create_table(:sm11111){Integer :smc1}} end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/transaction_unspecified_migrations/002_create_basic.rb���������������������0000664�0000000�0000000�00000000103�14342141206�0027577�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do change{create_table(:sm){Integer :smc1}} end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/unused_associations/�������������������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0021216�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/unused_associations/run_tua.rb���������������������������������������������0000664�0000000�0000000�00000002433�14342141206�0023222�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������$:.unshift(File.expand_path('../../../lib', File.dirname(__FILE__))) require 'json' require 'coverage' Coverage.start(methods: true) require 'sequel' DB = Sequel.mock(:columns=>[:id, :t_id], :fetch=>{:id=>1, :t_id=>2}, :numrows=>1) opts = ENV['PLUGIN_OPTS'] ? Sequel.parse_json(ENV['PLUGIN_OPTS']).transform_keys(&:to_sym) : {} Sequel::Model.plugin :unused_associations, opts require_relative 'tua' eval($stdin.read) begin cov_data = if ENV['NO_COVERAGE_RESULT'] Sequel::Model.update_associations_coverage else Sequel::Model.update_associations_coverage(coverage_result: Coverage.result) end data = if ENV['NO_COVERAGE_DATA'] Sequel::Model.update_unused_associations_data elsif ENV['KEEP_COVERAGE'] Sequel::Model.update_unused_associations_data(:keep_coverage=>true) else Sequel::Model.update_unused_associations_data(coverage_data: cov_data) end result = if ENV['NO_DATA'] [Sequel::Model.unused_associations.sort, Sequel::Model.unused_association_options.sort] else [Sequel::Model.unused_associations(unused_associations_data: data).sort, Sequel::Model.unused_association_options(unused_associations_data: data).sort] end rescue => e result = "#{e.class}: #{e.message}\n#{e.backtrace.join("\n")}" end print Sequel.object_to_json(result) �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/unused_associations/tua.rb�������������������������������������������������0000664�0000000�0000000�00000002060�14342141206�0022332�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������class TUA < Sequel::Model many_to_one :a1, :key=>:t_id, :class=>self one_to_many :a2s, :key=>:t_id, :class=>self one_to_one :a3, :key=>:t_id, :class=>self a4s_opts = {:right_key=>:t_id, :left_key=>:t_id, :class=>self} include(a4s_opts[:methods_module] = Module.new) if ENV['A4S_METHODS_MODULE'] a4s_opts[:read_only] = true if ENV['A4S_READ_ONLY'] if ENV['A4S_NO_METHODS'] a4s_opts[:no_dataset_method] = a4s_opts[:no_association_method] = true a4s_opts[:adder] = a4s_opts[:remover] = a4s_opts[:clearer] = nil end many_to_many :a4s, a4s_opts one_through_one :a5, :right_key=>:t_id, :left_key=>:t_id, :class=>self, :is_used=>!!ENV['A5_IS_USED'] one_to_many :a6s, :key=>:t_id, :class=>self, :is_used=>!!ENV['A6S_IS_USED'] O = load(:id=>1, :t_id=>2) class SC < self many_to_one :a7, :key=>:t_id, :class=>self O = load(:id=>1, :t_id=>2) end end # Class with no associations class TUA2 < Sequel::Model end # Anonymous class with associations Class.new(Sequel::Model(DB[:tuas])) do many_to_one :a1, :key=>:t_id, :class=>self end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/uppercase_timestamped_migrations/������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0023753�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/uppercase_timestamped_migrations/1273253849_CREATE_SESSIONS.RB�������������0000664�0000000�0000000�00000000221�14342141206�0027637�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������class CreateSessions < Sequel::Migration def up create_table(:sm1111){Integer :smc1} end def down drop_table(:sm1111) end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/uppercase_timestamped_migrations/1273253851_CREATE_NODES.RB����������������0000664�0000000�0000000�00000000212�14342141206�0027232�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Class.new(Sequel::Migration) do def up create_table(:sm2222){Integer :smc2} end def down drop_table(:sm2222) end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/files/uppercase_timestamped_migrations/1273253853_3_CREATE_USERS.RB��������������0000664�0000000�0000000�00000000137�14342141206�0027515�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Sequel.migration do up{create_table(:sm3333){Integer :smc3}} down{drop_table(:sm3333)} end ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/guards_helper.rb�����������������������������������������������������������������0000664�0000000�0000000�00000002725�14342141206�0017211�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������ENV['MT_NO_PLUGINS'] = '1' # Work around stupid autoloading of plugins gem 'minitest' require 'minitest/global_expectations/autorun' require 'minitest/hooks/default' require_relative "deprecation_helper" def Sequel.guarded?(*checked) unless ENV['SEQUEL_NO_PENDING'] checked.each do |c| case c when DB.database_type return c when Array case c.length when 1 return c if c.first == DB.adapter_scheme when 2 if c.first.is_a?(Proc) return c if c.last == DB.database_type && c.first.call(DB) elsif c.last.is_a?(Proc) return c if c.first == DB.adapter_scheme && c.last.call(DB) else return c if c.first == DB.adapter_scheme && c.last == DB.database_type end when 3 return c if c[0] == DB.adapter_scheme && c[1] == DB.database_type && c[2].call(DB) end end end end false end module Minitest::Spec::DSL def cspecify(message, *checked, &block) if pending = Sequel.guarded?(*checked) it(message) do proc{instance_exec(&block)}.must_raise(Exception) if ENV['SEQUEL_CHECK_PENDING'] skip "Not yet working on #{Array(pending).map{|x| x.is_a?(Proc) ? :proc : x}.join(', ')}" end else it(message, &block) end end end class Minitest::HooksSpec def log begin DB.loggers << Logger.new(STDOUT) yield ensure DB.loggers.pop end end end �������������������������������������������sequel-5.63.0/spec/integration/���������������������������������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0016355�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/integration/associations_test.rb�������������������������������������������������0000664�0000000�0000000�00000403521�14342141206�0022445�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" one_to_one_eager_limit_strategies = Module.new do extend Minitest::Spec::DSL it "eager loading one_to_one associations should work correctly" do Artist.one_to_one :first_album, {:clone=>:first_album}.merge(@els) if @els Artist.one_to_one :last_album, {:clone=>:last_album}.merge(@els) if @els Artist.one_to_one :second_album, {:clone=>:second_album}.merge(@els) if @els && @els[:eager_limit_strategy] != :distinct_on @album.update(:artist => @artist) diff_album = @diff_album.call ar = @pr.call[1] a = Artist.eager(:first_album, :last_album, :second_album).order(:name).all a.must_equal [@artist, ar] a.first.first_album.must_equal @album a.first.last_album.must_equal diff_album a.first.second_album.must_equal diff_album a.last.first_album.must_be_nil a.last.last_album.must_be_nil a.last.second_album.must_be_nil # Check that no extra columns got added by the eager loading a.first.first_album.values.must_equal @album.values a.first.last_album.values.must_equal diff_album.values a.first.second_album.values.must_equal diff_album.values same_album = @same_album.call a = Artist.eager(:first_album).order(:name).all a.must_equal [@artist, ar] [@album, same_album].must_include(a.first.first_album) a.last.first_album.must_be_nil end end one_to_one_eager_graph_limit_strategies = Module.new do extend Minitest::Spec::DSL it "eager graphing one_to_one associations should work correctly" do @album.update(:artist => @artist) diff_album = @diff_album.call ar = @pr.call[1] ds = Artist.order(Sequel[:artists][:name]) limit_strategy = {:limit_strategy=>@els[:eager_limit_strategy]} a = ds.eager_graph_with_options(:first_album, limit_strategy).all a.must_equal [@artist, ar] a.first.first_album.must_equal @album a.last.first_album.must_be_nil a.first.first_album.values.must_equal @album.values a = ds.eager_graph_with_options(:last_album, limit_strategy).all a = ds.eager_graph(:last_album).all a.must_equal [@artist, ar] a.first.last_album.must_equal diff_album a.last.last_album.must_be_nil a.first.last_album.values.must_equal diff_album.values if @els[:eager_limit_strategy] != :distinct_on && (@els[:eager_limit_strategy] != :correlated_subquery || Album.dataset.supports_offsets_in_correlated_subqueries?) a = ds.eager_graph_with_options(:second_album, limit_strategy).all a = ds.eager_graph(:second_album).all a.must_equal [@artist, ar] a.first.second_album.must_equal diff_album a.last.second_album.must_be_nil a.first.second_album.values.must_equal diff_album.values end same_album = @same_album.call a = ds.eager_graph_with_options(:first_album, limit_strategy).all a.must_equal [@artist, ar] [@album, same_album].must_include(a.first.first_album) a.last.first_album.must_be_nil end end one_to_many_eager_limit_strategies = Module.new do extend Minitest::Spec::DSL it "should correctly handle limits and offsets when eager loading one_to_many associations" do Artist.one_to_many :first_two_albums, {:clone=>:first_two_albums}.merge(@els) if @els Artist.one_to_many :second_two_albums, {:clone=>:second_two_albums}.merge(@els) if @els Artist.one_to_many :not_first_albums, {:clone=>:not_first_albums}.merge(@els) if @els Artist.one_to_many :last_two_albums, {:clone=>:last_two_albums}.merge(@els) if @els @album.update(:artist => @artist) middle_album = @middle_album.call diff_album = @diff_album.call ar = @pr.call[1] ars = Artist.eager(:first_two_albums, :second_two_albums, :not_first_albums, :last_two_albums).order(:name).all ars.must_equal [@artist, ar] ars.first.first_two_albums.must_equal [@album, middle_album] ars.first.second_two_albums.must_equal [middle_album, diff_album] ars.first.not_first_albums.must_equal [middle_album, diff_album] ars.first.last_two_albums.must_equal [diff_album, middle_album] ars.last.first_two_albums.must_equal [] ars.last.second_two_albums.must_equal [] ars.last.not_first_albums.must_equal [] ars.last.last_two_albums.must_equal [] # Check that no extra columns got added by the eager loading ars.first.first_two_albums.map{|x| x.values}.must_equal [@album, middle_album].map{|x| x.values} ars.first.second_two_albums.map{|x| x.values}.must_equal [middle_album, diff_album].map{|x| x.values} ars.first.not_first_albums.map{|x| x.values}.must_equal [middle_album, diff_album].map{|x| x.values} ars.first.last_two_albums.map{|x| x.values}.must_equal [diff_album, middle_album].map{|x| x.values} end end one_to_many_eager_graph_limit_strategies = Module.new do extend Minitest::Spec::DSL it "should correctly handle limits and offsets when eager graphing one_to_many associations" do @album.update(:artist => @artist) middle_album = @middle_album.call diff_album = @diff_album.call ar = @pr.call[1] ds = Artist.order(Sequel[:artists][:name]) limit_strategy = {:limit_strategy=>@els[:eager_limit_strategy]} ars = ds.eager_graph_with_options(:first_two_albums, limit_strategy).all ars.must_equal [@artist, ar] ars.first.first_two_albums.must_equal [@album, middle_album] ars.last.first_two_albums.must_equal [] ars.first.first_two_albums.map{|x| x.values}.must_equal [@album, middle_album].map{|x| x.values} if @els[:eager_limit_strategy] != :correlated_subquery || Album.dataset.supports_offsets_in_correlated_subqueries? ars = ds.eager_graph_with_options(:second_two_albums, limit_strategy).all ars.must_equal [@artist, ar] ars.first.second_two_albums.must_equal [middle_album, diff_album] ars.last.second_two_albums.must_equal [] ars.first.second_two_albums.map{|x| x.values}.must_equal [middle_album, diff_album].map{|x| x.values} ars = ds.eager_graph_with_options(:not_first_albums, limit_strategy).all ars.must_equal [@artist, ar] ars.first.not_first_albums.must_equal [middle_album, diff_album] ars.last.not_first_albums.must_equal [] ars.first.not_first_albums.map{|x| x.values}.must_equal [middle_album, diff_album].map{|x| x.values} end ars = ds.eager_graph_with_options(:last_two_albums, limit_strategy).all ars.must_equal [@artist, ar] ars.first.last_two_albums.must_equal [diff_album, middle_album] ars.last.last_two_albums.must_equal [] ars.first.last_two_albums.map{|x| x.values}.must_equal [diff_album, middle_album].map{|x| x.values} end end one_through_one_eager_limit_strategies = Module.new do extend Minitest::Spec::DSL it "should correctly handle offsets when eager loading one_through_one associations" do Album.one_through_one :first_tag, {:clone=>:first_tag}.merge(@els) if @els Album.one_through_one :second_tag, {:clone=>:second_tag}.merge(@els) if @els && @els[:eager_limit_strategy] != :distinct_on Album.one_through_one :last_tag, {:clone=>:last_tag}.merge(@els) if @els tu, tv = @other_tags.call al = @pr.call.first als = Album.eager(:first_tag, :second_tag, :last_tag).order(:name).all als.must_equal [@album, al] als.first.first_tag.must_equal @tag als.first.second_tag.must_equal tu als.first.last_tag.must_equal tv als.last.first_tag.must_be_nil als.last.second_tag.must_be_nil als.last.last_tag.must_be_nil # Check that no extra columns got added by the eager loading als.first.first_tag.values.must_equal @tag.values als.first.second_tag.values.must_equal tu.values als.first.last_tag.values.must_equal tv.values end end one_through_one_eager_graph_limit_strategies = Module.new do extend Minitest::Spec::DSL it "should correctly handle offsets when eager graphing one_through_one associations" do tu, tv = @other_tags.call al = @pr.call.first ds = Album.order(Sequel[:albums][:name]) limit_strategy = {:limit_strategy=>@els[:eager_limit_strategy]} als = ds.eager_graph_with_options(:first_tag, limit_strategy).all als.must_equal [@album, al] als.first.first_tag.must_equal @tag als.last.first_tag.must_be_nil als.first.first_tag.values.must_equal @tag.values als = ds.eager_graph_with_options(:second_tag, @els[:eager_limit_strategy] != :distinct_on ? limit_strategy : {}).all als.must_equal [@album, al] als.first.second_tag.must_equal tu als.last.second_tag.must_be_nil als.first.second_tag.values.must_equal tu.values als = ds.eager_graph_with_options(:last_tag, limit_strategy).all als.must_equal [@album, al] als.first.last_tag.must_equal tv als.last.last_tag.must_be_nil als.first.last_tag.values.must_equal tv.values end end many_to_many_eager_limit_strategies = Module.new do extend Minitest::Spec::DSL it "should correctly handle limits and offsets when eager loading many_to_many associations" do Album.send @many_to_many_method||:many_to_many, :first_two_tags, {:clone=>:first_two_tags}.merge(@els) if @els Album.send @many_to_many_method||:many_to_many, :second_two_tags, {:clone=>:second_two_tags}.merge(@els) if @els Album.send @many_to_many_method||:many_to_many, :not_first_tags, {:clone=>:not_first_tags}.merge(@els) if @els Album.send @many_to_many_method||:many_to_many, :last_two_tags, {:clone=>:last_two_tags}.merge(@els) if @els tu, tv = @other_tags.call al = @pr.call.first al.add_tag(tu) als = Album.eager(:first_two_tags, :second_two_tags, :not_first_tags, :last_two_tags).order(:name).all als.must_equal [@album, al] als.first.first_two_tags.must_equal [@tag, tu] als.first.second_two_tags.must_equal [tu, tv] als.first.not_first_tags.must_equal [tu, tv] als.first.last_two_tags.must_equal [tv, tu] als.last.first_two_tags.must_equal [tu] als.last.second_two_tags.must_equal [] als.last.last_two_tags.must_equal [tu] # Check that no extra columns got added by the eager loading als.first.first_two_tags.map{|x| x.values}.must_equal [@tag, tu].map{|x| x.values} als.first.second_two_tags.map{|x| x.values}.must_equal [tu, tv].map{|x| x.values} als.first.not_first_tags.map{|x| x.values}.must_equal [tu, tv].map{|x| x.values} als.first.last_two_tags.map{|x| x.values}.must_equal [tv, tu].map{|x| x.values} end end many_to_many_eager_graph_limit_strategies = Module.new do extend Minitest::Spec::DSL it "should correctly handle limits and offsets when eager loading many_to_many associations" do tu, tv = @other_tags.call al = @pr.call.first al.add_tag(tu) ds = Album.order(Sequel[:albums][:name]) limit_strategy = {:limit_strategy=>(@els||{})[:eager_limit_strategy]} als = ds.eager_graph_with_options(:first_two_tags, limit_strategy).all als.must_equal [@album, al] als.first.first_two_tags.must_equal [@tag, tu] als.last.first_two_tags.must_equal [tu] als.first.first_two_tags.map{|x| x.values}.must_equal [@tag, tu].map{|x| x.values} als = ds.eager_graph_with_options(:second_two_tags, limit_strategy).all als.must_equal [@album, al] als.first.second_two_tags.must_equal [tu, tv] als.last.second_two_tags.must_equal [] als.first.second_two_tags.map{|x| x.values}.must_equal [tu, tv].map{|x| x.values} als = ds.eager_graph_with_options(:not_first_tags, limit_strategy).all als.must_equal [@album, al] als.first.not_first_tags.must_equal [tu, tv] als.last.not_first_tags.must_equal [] als.first.not_first_tags.map{|x| x.values}.must_equal [tu, tv].map{|x| x.values} als = ds.eager_graph_with_options(:last_two_tags, limit_strategy).all als.must_equal [@album, al] als.first.last_two_tags.must_equal [tv, tu] als.last.last_two_tags.must_equal [tu] als.first.last_two_tags.map{|x| x.values}.must_equal [tv, tu].map{|x| x.values} end end many_through_many_eager_limit_strategies = Module.new do extend Minitest::Spec::DSL it "should correctly handle limits and offsets when eager loading many_through_many associations" do Artist.many_through_many :first_two_tags, {:clone=>:first_two_tags}.merge(@els) if @els Artist.many_through_many :second_two_tags, {:clone=>:second_two_tags}.merge(@els) if @els Artist.many_through_many :not_first_tags, {:clone=>:not_first_tags}.merge(@els) if @els Artist.many_through_many :last_two_tags, {:clone=>:last_two_tags}.merge(@els) if @els @album.update(:artist => @artist) tu, tv = @other_tags.call al, ar, _ = @pr.call al.update(:artist=>ar) al.add_tag(tu) ars = Artist.eager(:first_two_tags, :second_two_tags, :not_first_tags, :last_two_tags).order(:name).all ars.must_equal [@artist, ar] ars.first.first_two_tags.must_equal [@tag, tu] ars.first.second_two_tags.must_equal [tu, tv] ars.first.not_first_tags.must_equal [tu, tv] ars.first.last_two_tags.must_equal [tv, tu] ars.last.first_two_tags.must_equal [tu] ars.last.second_two_tags.must_equal [] ars.last.not_first_tags.must_equal [] ars.last.last_two_tags.must_equal [tu] # Check that no extra columns got added by the eager loading ars.first.first_two_tags.map{|x| x.values}.must_equal [@tag, tu].map{|x| x.values} ars.first.second_two_tags.map{|x| x.values}.must_equal [tu, tv].map{|x| x.values} ars.first.not_first_tags.map{|x| x.values}.must_equal [tu, tv].map{|x| x.values} ars.first.last_two_tags.map{|x| x.values}.must_equal [tv, tu].map{|x| x.values} end end many_through_many_eager_graph_limit_strategies = Module.new do extend Minitest::Spec::DSL it "should correctly handle limits and offsets when eager loading many_through_many associations" do @album.update(:artist => @artist) tu, tv = @other_tags.call al, ar, _ = @pr.call al.update(:artist=>ar) al.add_tag(tu) ds = Artist.order(Sequel[:artists][:name]) limit_strategy = {:limit_strategy=>@els[:eager_limit_strategy]} ars = ds.eager_graph_with_options(:first_two_tags, limit_strategy).all ars.must_equal [@artist, ar] ars.first.first_two_tags.must_equal [@tag, tu] ars.last.first_two_tags.must_equal [tu] ars.first.first_two_tags.map{|x| x.values}.must_equal [@tag, tu].map{|x| x.values} ars = ds.eager_graph_with_options(:second_two_tags, limit_strategy).all ars.must_equal [@artist, ar] ars.first.second_two_tags.must_equal [tu, tv] ars.last.second_two_tags.must_equal [] ars.first.second_two_tags.map{|x| x.values}.must_equal [tu, tv].map{|x| x.values} ars = ds.eager_graph_with_options(:not_first_tags, limit_strategy).all ars.must_equal [@artist, ar] ars.first.not_first_tags.must_equal [tu, tv] ars.last.not_first_tags.must_equal [] ars.first.not_first_tags.map{|x| x.values}.must_equal [tu, tv].map{|x| x.values} ars = ds.eager_graph_with_options(:last_two_tags, limit_strategy).all ars.must_equal [@artist, ar] ars.first.last_two_tags.must_equal [tv, tu] ars.last.last_two_tags.must_equal [tu] ars.first.last_two_tags.map{|x| x.values}.must_equal [tv, tu].map{|x| x.values} end end one_through_many_eager_limit_strategies = Module.new do extend Minitest::Spec::DSL it "should correctly handle offsets when eager loading one_through_many associations" do Artist.one_through_many :first_tag, {:clone=>:first_tag}.merge(@els) if @els Artist.one_through_many :second_tag, {:clone=>:second_tag}.merge(@els) if @els && @els[:eager_limit_strategy] != :distinct_on Artist.one_through_many :last_tag, {:clone=>:last_tag}.merge(@els) if @els @album.update(:artist => @artist) tu, tv = @other_tags.call al, ar, _ = @pr.call al.update(:artist=>ar) al.add_tag(tu) ars = Artist.eager(:first_tag, :second_tag, :last_tag).order(:name).all ars.must_equal [@artist, ar] ars.first.first_tag.must_equal @tag ars.first.second_tag.must_equal tu ars.first.last_tag.must_equal tv ars.last.first_tag.must_equal tu ars.last.second_tag.must_be_nil ars.last.last_tag.must_equal tu # Check that no extra columns got added by the eager loading ars.first.first_tag.values.must_equal @tag.values ars.first.second_tag.values.must_equal tu.values ars.first.last_tag.values.must_equal tv.values end end one_through_many_eager_graph_limit_strategies = Module.new do extend Minitest::Spec::DSL it "should correctly handle offsets when eager graphing one_through_many associations" do @album.update(:artist => @artist) tu, tv = @other_tags.call al, ar, _ = @pr.call al.update(:artist=>ar) al.add_tag(tu) ds = Artist.order(Sequel[:artists][:name]) limit_strategy = {:limit_strategy=>@els[:eager_limit_strategy]} ars = ds.eager_graph_with_options(:first_tag, limit_strategy).all ars.must_equal [@artist, ar] ars.first.first_tag.must_equal @tag ars.last.first_tag.must_equal tu ars.first.first_tag.values.must_equal @tag.values ars = ds.eager_graph_with_options(:second_tag, @els[:eager_limit_strategy] != :distinct_on ? limit_strategy : {}).all ars.must_equal [@artist, ar] ars.first.second_tag.must_equal tu ars.last.second_tag.must_be_nil ars.first.second_tag.values.must_equal tu.values ars = ds.eager_graph_with_options(:last_tag, limit_strategy).all ars.must_equal [@artist, ar] ars.first.last_tag.must_equal tv ars.last.last_tag.must_equal tu ars.first.last_tag.values.must_equal tv.values end end eager_limit_strategies = Module.new do include one_to_one_eager_limit_strategies include one_to_many_eager_limit_strategies include many_to_many_eager_limit_strategies include one_through_one_eager_limit_strategies include many_through_many_eager_limit_strategies include one_through_many_eager_limit_strategies end eager_graph_limit_strategies = Module.new do include one_to_one_eager_graph_limit_strategies include one_to_many_eager_graph_limit_strategies include many_to_many_eager_graph_limit_strategies include one_through_one_eager_graph_limit_strategies include many_through_many_eager_graph_limit_strategies include one_through_many_eager_graph_limit_strategies end filter_by_associations = Module.new do extend Minitest::Spec::DSL it "should handle association inner joins" do @Artist.association_join(:albums).select(Sequel.qualify(@Artist.first_source, @Artist.columns.first)).all.must_equal [] @Artist.association_join(:first_album).select(Sequel.qualify(@Artist.first_source, @Artist.columns.first)).all.must_equal [] @Album.association_join(:artist).select(Sequel.qualify(@Album.first_source, @Album.columns.first)).all.must_equal [] @Album.association_join(:tags).select(Sequel.qualify(@Album.first_source, @Album.columns.first)).all.must_equal [] @Album.association_join(:alias_tags).select(Sequel.qualify(@Album.first_source, @Album.columns.first)).all.must_equal [] @Tag.association_join(:albums).select(Sequel.qualify(@Tag.first_source, @Tag.columns.first)).all.must_equal [] unless @no_many_through_many @Artist.association_join(:tags).select(Sequel.qualify(@Artist.first_source, @Artist.columns.first)).all.must_equal [] @Artist.association_join(:first_tag).select(Sequel.qualify(@Artist.first_source, @Artist.columns.first)).all.must_equal [] end @album.update(:artist => @artist) @album.add_tag(@tag) @Artist.association_join(:albums).select_all(:artists).all.must_equal [@artist] @Artist.association_join(:first_album).select_all(:artists).all.must_equal [@artist] @Album.association_join(:artist).select_all(:albums).all.must_equal [@album] @Album.association_join(:tags).select_all(:albums).all.must_equal [@album] @Album.association_join(:alias_tags).select_all(:albums).all.must_equal [@album] @Tag.association_join(:albums).select_all(:tags).all.must_equal [@tag] unless @no_many_through_many @Artist.association_join(:tags).select_all(:artists).all.must_equal [@artist] @Artist.association_join(:first_tag).select_all(:artists).all.must_equal [@artist] end @Artist.association_join(:albums).select_all(:albums).naked.all.must_equal [@album.values] @Artist.association_join(:first_album).select_all(:first_album).naked.all.must_equal [@album.values] @Album.association_join(:artist).select_all(:artist).naked.all.must_equal [@artist.values] @Album.association_join(:tags).select_all(:tags).naked.all.must_equal [@tag.values] @Album.association_join(:alias_tags).select_all(:alias_tags).naked.all.must_equal [@tag.values] @Tag.association_join(:albums).select_all(:albums).naked.all.must_equal [@album.values] unless @no_many_through_many @Artist.association_join(:tags).select_all(:tags).naked.all.must_equal [@tag.values] @Artist.association_join(:first_tag).select_all(:first_tag).naked.all.must_equal [@tag.values] end end it "should handle association left joins" do @Artist.association_left_join(:albums).select_all(:artists).all.must_equal [@artist] @Artist.association_left_join(:first_album).select_all(:artists).all.must_equal [@artist] @Album.association_left_join(:artist).select_all(:albums).all.must_equal [@album] @Album.association_left_join(:tags).select_all(:albums).all.must_equal [@album] @Album.association_left_join(:alias_tags).select_all(:albums).all.must_equal [@album] @Tag.association_left_join(:albums).select_all(:tags).all.must_equal [@tag] unless @no_many_through_many @Artist.association_left_join(:tags).select_all(:artists).all.must_equal [@artist] @Artist.association_left_join(:first_tag).select_all(:artists).all.must_equal [@artist] end nil_hash = lambda{|obj| [obj.values.keys.inject({}){|h,k| h[k] = nil; h}]} @Artist.association_left_join(:albums).select_all(:albums).naked.all.must_equal nil_hash[@album] @Artist.association_left_join(:first_album).select_all(:first_album).naked.all.must_equal nil_hash[@album] @Album.association_left_join(:artist).select_all(:artist).naked.all.must_equal nil_hash[@artist] @Album.association_left_join(:tags).select_all(:tags).naked.all.must_equal nil_hash[@tag] @Album.association_left_join(:alias_tags).select_all(:alias_tags).naked.all.must_equal nil_hash[@tag] @Tag.association_left_join(:albums).select_all(:albums).naked.all.must_equal nil_hash[@album] unless @no_many_through_many @Artist.association_left_join(:tags).select_all(:tags).naked.all.must_equal nil_hash[@tag] @Artist.association_left_join(:first_tag).select_all(:first_tag).naked.all.must_equal nil_hash[@tag] end @album.update(:artist => @artist) @album.add_tag(@tag) @Artist.association_left_join(:albums).select_all(:albums).naked.all.must_equal [@album.values] @Artist.association_left_join(:first_album).select_all(:first_album).naked.all.must_equal [@album.values] @Album.association_left_join(:artist).select_all(:artist).naked.all.must_equal [@artist.values] @Album.association_left_join(:tags).select_all(:tags).naked.all.must_equal [@tag.values] @Album.association_left_join(:alias_tags).select_all(:alias_tags).naked.all.must_equal [@tag.values] @Tag.association_left_join(:albums).select_all(:albums).naked.all.must_equal [@album.values] unless @no_many_through_many @Artist.association_left_join(:tags).select_all(:tags).naked.all.must_equal [@tag.values] @Artist.association_left_join(:first_tag).select_all(:first_tag).naked.all.must_equal [@tag.values] end end it "should work correctly when filtering by associations" do @album.update(:artist => @artist) @album.add_tag(@tag) @Artist.filter(:albums=>@album).all.must_equal [@artist] @Artist.filter(:first_album=>@album).all.must_equal [@artist] unless @no_many_through_many @Artist.filter(:tags=>@tag).all.must_equal [@artist] @Artist.filter(:first_tag=>@tag).all.must_equal [@artist] end @Album.filter(:artist=>@artist).all.must_equal [@album] @Album.filter(:tags=>@tag).all.must_equal [@album] @Album.filter(:alias_tags=>@tag).all.must_equal [@album] @Tag.filter(:albums=>@album).all.must_equal [@tag] @Album.filter(:artist=>@artist, :tags=>@tag).all.must_equal [@album] @artist.albums_dataset.filter(:tags=>@tag).all.must_equal [@album] end it "should work correctly when excluding by associations" do @album.update(:artist => @artist) @album.add_tag(@tag) album, artist, tag = @pr.call @Artist.exclude(:albums=>@album).all.must_equal [artist] @Artist.exclude(:first_album=>@album).all.must_equal [artist] unless @no_many_through_many @Artist.exclude(:tags=>@tag).all.must_equal [artist] @Artist.exclude(:first_tag=>@tag).all.must_equal [artist] end @Album.exclude(:artist=>@artist).all.must_equal [album] @Album.exclude(:tags=>@tag).all.must_equal [album] @Album.exclude(:alias_tags=>@tag).all.must_equal [album] @Tag.exclude(:albums=>@album).all.must_equal [tag] @Album.exclude(:artist=>@artist, :tags=>@tag).all.must_equal [album] end it "should work correctly when filtering by associations with conditions" do @album.update(:artist => @artist) @album.add_tag(@tag) @Artist.filter(:a_albums=>@album).all.must_equal [@artist] @Artist.filter(:first_a_album=>@album).all.must_equal [@artist] @album.update(:name=>'Foo') @Artist.filter(:a_albums=>@album).all.must_equal [] @Artist.filter(:first_a_album=>@album).all.must_equal [] @Album.filter(:a_artist=>@artist).all.must_equal [@album] @artist.update(:name=>'Foo') @Album.filter(:a_artist=>@artist).all.must_equal [] @Album.filter(:t_tags=>@tag).all.must_equal [@album] @Album.filter(:alias_t_tags=>@tag).all.must_equal [@album] unless @no_many_through_many @Album.filter(:t_tag=>@tag).all.must_equal [@album] @Album.filter(:alias_t_tag=>@tag).all.must_equal [@album] @Artist.filter(:t_tags=>@tag).all.must_equal [@artist] @Artist.filter(:t_tag=>@tag).all.must_equal [@artist] end @tag.update(:name=>'Foo') @Album.filter(:t_tags=>@tag).all.must_equal [] @Album.filter(:alias_t_tags=>@tag).all.must_equal [] unless @no_many_through_many @Album.filter(:t_tag=>@tag).all.must_equal [] @Album.filter(:alias_t_tag=>@tag).all.must_equal [] @Artist.filter(:t_tags=>@tag).all.must_equal [] @Artist.filter(:t_tag=>@tag).all.must_equal [] end end it "should work correctly when excluding by associations with conditions" do @album.update(:artist => @artist) @album.add_tag(@tag) @Artist.exclude(:a_albums=>@album).all.must_equal [] @Artist.exclude(:first_a_album=>@album).all.must_equal [] @album.update(:name=>'Foo') @Artist.exclude(:a_albums=>@album).all.must_equal [@artist] @Artist.exclude(:first_a_album=>@album).all.must_equal [@artist] @Album.exclude(:a_artist=>@artist).all.must_equal [] @artist.update(:name=>'Foo') @Album.exclude(:a_artist=>@artist).all.must_equal [@album] @Album.exclude(:t_tags=>@tag).all.must_equal [] @Album.exclude(:alias_t_tags=>@tag).all.must_equal [] unless @no_many_through_many @Album.exclude(:t_tag=>@tag).all.must_equal [] @Album.exclude(:alias_t_tag=>@tag).all.must_equal [] @Artist.exclude(:t_tags=>@tag).all.must_equal [] @Artist.exclude(:t_tag=>@tag).all.must_equal [] end @tag.update(:name=>'Foo') @Album.exclude(:t_tags=>@tag).all.must_equal [@album] @Album.exclude(:alias_t_tags=>@tag).all.must_equal [@album] unless @no_many_through_many @Album.exclude(:t_tag=>@tag).all.must_equal [@album] @Album.exclude(:alias_t_tag=>@tag).all.must_equal [@album] @Artist.exclude(:t_tags=>@tag).all.must_equal [@artist] @Artist.exclude(:t_tag=>@tag).all.must_equal [@artist] end end it "should work correctly when filtering by multiple associations" do album, artist, tag = @pr.call @album.update(:artist => @artist) @album.add_tag(@tag) @Artist.filter(:albums=>[@album, album]).all.must_equal [@artist] @Artist.filter(:first_album=>[@album, album]).all.must_equal [@artist] @Album.filter(:artist=>[@artist, artist]).all.must_equal [@album] @Album.filter(:tags=>[@tag, tag]).all.must_equal [@album] @Album.filter(:alias_tags=>[@tag, tag]).all.must_equal [@album] @Tag.filter(:albums=>[@album, album]).all.must_equal [@tag] @Album.filter(:artist=>[@artist, artist], :tags=>[@tag, tag]).all.must_equal [@album] @artist.albums_dataset.filter(:tags=>[@tag, tag]).all.must_equal [@album] unless @no_many_through_many @Artist.filter(:tags=>[@tag, tag]).all.must_equal [@artist] @Artist.filter(:first_tag=>[@tag, tag]).all.must_equal [@artist] end album.add_tag(tag) @Artist.filter(:albums=>[@album, album]).all.must_equal [@artist] @Artist.filter(:first_album=>[@album, album]).all.must_equal [@artist] @Album.filter(:artist=>[@artist, artist]).all.must_equal [@album] @Album.filter(:tags=>[@tag, tag]).all.sort_by{|x| x.pk}.must_equal [@album, album] @Album.filter(:alias_tags=>[@tag, tag]).all.sort_by{|x| x.pk}.must_equal [@album, album] @Tag.filter(:albums=>[@album, album]).all.sort_by{|x| x.pk}.must_equal [@tag, tag] @Album.filter(:artist=>[@artist, artist], :tags=>[@tag, tag]).all.must_equal [@album] unless @no_many_through_many @Artist.filter(:tags=>[@tag, tag]).all.must_equal [@artist] @Artist.filter(:first_tag=>[@tag, tag]).all.must_equal [@artist] end album.update(:artist => artist) @Artist.filter(:albums=>[@album, album]).all.sort_by{|x| x.pk}.must_equal [@artist, artist] @Artist.filter(:first_album=>[@album, album]).all.sort_by{|x| x.pk}.must_equal [@artist, artist] @Album.filter(:artist=>[@artist, artist]).all.sort_by{|x| x.pk}.must_equal [@album, album] @Album.filter(:tags=>[@tag, tag]).all.sort_by{|x| x.pk}.must_equal [@album, album] @Album.filter(:alias_tags=>[@tag, tag]).all.sort_by{|x| x.pk}.must_equal [@album, album] @Tag.filter(:albums=>[@album, album]).all.sort_by{|x| x.pk}.must_equal [@tag, tag] @Album.filter(:artist=>[@artist, artist], :tags=>[@tag, tag]).all.sort_by{|x| x.pk}.must_equal [@album, album] unless @no_many_through_many @Artist.filter(:tags=>[@tag, tag]).all.sort_by{|x| x.pk}.must_equal [@artist, artist] @Artist.filter(:first_tag=>[@tag, tag]).all.sort_by{|x| x.pk}.must_equal [@artist, artist] end end it "should work correctly when excluding by multiple associations" do album, artist, tag = @pr.call @Artist.exclude(:albums=>[@album, album]).all.sort_by{|x| x.pk}.must_equal [@artist, artist] @Artist.exclude(:first_album=>[@album, album]).all.sort_by{|x| x.pk}.must_equal [@artist, artist] @Album.exclude(:artist=>[@artist, artist]).all.sort_by{|x| x.pk}.must_equal [@album, album] @Album.exclude(:tags=>[@tag, tag]).all.sort_by{|x| x.pk}.must_equal [@album, album] @Album.exclude(:alias_tags=>[@tag, tag]).all.sort_by{|x| x.pk}.must_equal [@album, album] @Tag.exclude(:albums=>[@album, album]).all.sort_by{|x| x.pk}.must_equal [@tag, tag] @Album.exclude(:artist=>[@artist, artist], :tags=>[@tag, tag]).all.sort_by{|x| x.pk}.must_equal [@album, album] unless @no_many_through_many @Artist.exclude(:tags=>[@tag, tag]).all.sort_by{|x| x.pk}.must_equal [@artist, artist] @Artist.exclude(:first_tag=>[@tag, tag]).all.sort_by{|x| x.pk}.must_equal [@artist, artist] end @album.update(:artist => @artist) @album.add_tag(@tag) @Artist.exclude(:albums=>[@album, album]).all.sort_by{|x| x.pk}.must_equal [artist] @Artist.exclude(:first_album=>[@album, album]).all.sort_by{|x| x.pk}.must_equal [artist] @Album.exclude(:artist=>[@artist, artist]).all.sort_by{|x| x.pk}.must_equal [album] @Album.exclude(:tags=>[@tag, tag]).all.sort_by{|x| x.pk}.must_equal [album] @Album.exclude(:alias_tags=>[@tag, tag]).all.sort_by{|x| x.pk}.must_equal [album] @Tag.exclude(:albums=>[@album, album]).all.sort_by{|x| x.pk}.must_equal [tag] @Album.exclude(:artist=>[@artist, artist], :tags=>[@tag, tag]).all.sort_by{|x| x.pk}.must_equal [album] unless @no_many_through_many @Artist.exclude(:tags=>[@tag, tag]).all.must_equal [artist] @Artist.exclude(:first_tag=>[@tag, tag]).all.must_equal [artist] end album.add_tag(tag) @Artist.exclude(:albums=>[@album, album]).all.must_equal [artist] @Artist.exclude(:first_album=>[@album, album]).all.must_equal [artist] @Album.exclude(:artist=>[@artist, artist]).all.must_equal [album] @Album.exclude(:tags=>[@tag, tag]).all.must_equal [] @Album.exclude(:alias_tags=>[@tag, tag]).all.must_equal [] @Tag.exclude(:albums=>[@album, album]).all.must_equal [] @Album.exclude(:artist=>[@artist, artist], :tags=>[@tag, tag]).all.must_equal [album] unless @no_many_through_many @Artist.exclude(:tags=>[@tag, tag]).all.must_equal [artist] @Artist.exclude(:first_tag=>[@tag, tag]).all.must_equal [artist] end album.update(:artist => artist) @Artist.exclude(:albums=>[@album, album]).all.must_equal [] @Artist.exclude(:first_album=>[@album, album]).all.must_equal [] @Album.exclude(:artist=>[@artist, artist]).all.must_equal [] @Album.exclude(:tags=>[@tag, tag]).all.must_equal [] @Album.exclude(:alias_tags=>[@tag, tag]).all.must_equal [] @Tag.exclude(:albums=>[@album, album]).all.must_equal [] @Album.exclude(:artist=>[@artist, artist], :tags=>[@tag, tag]).all.must_equal [] unless @no_many_through_many @Artist.exclude(:tags=>[@tag, tag]).all.must_equal [] @Artist.exclude(:first_tag=>[@tag, tag]).all.must_equal [] end end it "should work correctly when filtering associations with conditions with multiple objects" do album, artist, tag = @pr.call album.update(:name=>@album.name) artist.update(:name=>@artist.name) tag.update(:name=>@tag.name) @album.update(:artist => @artist) @album.add_tag(@tag) album.update(:artist => @artist) tag.add_album(@album) @Artist.filter(:a_albums=>[@album, album]).all.must_equal [@artist] @Artist.filter(:first_a_album=>[@album, album]).all.must_equal [@artist] @album.update(:name=>'Foo') @Artist.filter(:a_albums=>[@album, album]).all.must_equal [@artist] @Artist.filter(:first_a_album=>[@album, album]).all.must_equal [@artist] album.update(:name=>'Foo') @Artist.filter(:a_albums=>[@album, album]).all.must_equal [] @Artist.filter(:first_a_album=>[@album, album]).all.must_equal [] album.update(:artist => nil) artist.add_album(@album) @Album.filter(:a_artist=>[@artist, artist]).all.must_equal [@album] @artist.update(:name=>'Foo') @Album.filter(:a_artist=>[@artist, artist]).all.must_equal [@album] artist.update(:name=>'Foo') @Album.filter(:a_artist=>[@artist, artist]).all.must_equal [] @Album.filter(:t_tags=>[@tag, tag]).all.must_equal [@album] @Album.filter(:alias_t_tags=>[@tag, tag]).all.must_equal [@album] unless @no_many_through_many @Album.filter(:t_tag=>[@tag, tag]).all.must_equal [@album] @Album.filter(:alias_t_tag=>[@tag, tag]).all.must_equal [@album] @Artist.filter(:t_tags=>[@tag, tag]).all.must_equal [artist] @Artist.filter(:t_tag=>[@tag, tag]).all.must_equal [artist] end @tag.update(:name=>'Foo') @Album.filter(:t_tags=>[@tag, tag]).all.must_equal [@album] @Album.filter(:alias_t_tags=>[@tag, tag]).all.must_equal [@album] unless @no_many_through_many @Album.filter(:t_tag=>[@tag, tag]).all.must_equal [@album] @Album.filter(:alias_t_tag=>[@tag, tag]).all.must_equal [@album] @Artist.filter(:t_tags=>[@tag, tag]).all.must_equal [artist] @Artist.filter(:t_tag=>[@tag, tag]).all.must_equal [artist] end tag.update(:name=>'Foo') @Album.filter(:t_tags=>[@tag, tag]).all.must_equal [] @Album.filter(:alias_t_tags=>[@tag, tag]).all.must_equal [] unless @no_many_through_many @Album.filter(:t_tag=>[@tag, tag]).all.must_equal [] @Album.filter(:alias_t_tag=>[@tag, tag]).all.must_equal [] @Artist.filter(:t_tags=>[@tag, tag]).all.must_equal [] @Artist.filter(:t_tag=>[@tag, tag]).all.must_equal [] end end it "should work correctly when excluding associations with conditions with multiple objects" do album, artist, tag = @pr.call album.update(:name=>@album.name) artist.update(:name=>@artist.name) tag.update(:name=>@tag.name) @album.update(:artist => @artist) @album.add_tag(@tag) album.update(:artist => @artist) tag.add_album(@album) artist.add_album(@album) @Artist.exclude(:a_albums=>[@album, album]).all.must_equal [] @Artist.exclude(:first_a_album=>[@album, album]).all.must_equal [] @album.update(:name=>'Foo') @Artist.exclude(:a_albums=>[@album, album]).all.must_equal [artist] @Artist.exclude(:first_a_album=>[@album, album]).all.must_equal [artist] album.update(:name=>'Foo') @Artist.exclude(:a_albums=>[@album, album]).all.sort_by{|x| x.pk}.must_equal [@artist, artist] @Artist.exclude(:first_a_album=>[@album, album]).all.sort_by{|x| x.pk}.must_equal [@artist, artist] @Album.exclude(:a_artist=>[@artist, artist]).all.must_equal [] album.update(:artist => nil) @artist.update(:name=>'Foo') @Album.exclude(:a_artist=>[@artist, artist]).all.must_equal [album] artist.update(:name=>'Foo') @Album.exclude(:a_artist=>[@artist, artist]).all.sort_by{|x| x.pk}.must_equal [@album, album] @tag.add_album(album) @Album.exclude(:t_tags=>[@tag, tag]).all.must_equal [] @Album.exclude(:alias_t_tags=>[@tag, tag]).all.must_equal [] unless @no_many_through_many @Album.exclude(:t_tag=>[@tag, tag]).all.must_equal [] @Album.exclude(:alias_t_tag=>[@tag, tag]).all.must_equal [] @Artist.exclude(:t_tags=>[@tag, tag]).all.must_equal [@artist] @Artist.exclude(:t_tag=>[@tag, tag]).all.must_equal [@artist] end @tag.update(:name=>'Foo') @Album.exclude(:t_tags=>[@tag, tag]).all.must_equal [album] @Album.exclude(:alias_t_tags=>[@tag, tag]).all.must_equal [album] unless @no_many_through_many @Album.exclude(:t_tag=>[@tag, tag]).all.must_equal [album] @Album.exclude(:alias_t_tag=>[@tag, tag]).all.must_equal [album] @Artist.exclude(:t_tags=>[@tag, tag]).all.must_equal [@artist] @Artist.exclude(:t_tag=>[@tag, tag]).all.must_equal [@artist] end tag.update(:name=>'Foo') @Album.exclude(:t_tags=>[@tag, tag]).all.sort_by{|x| x.pk}.must_equal [@album, album] @Album.exclude(:alias_t_tags=>[@tag, tag]).all.sort_by{|x| x.pk}.must_equal [@album, album] unless @no_many_through_many @Album.exclude(:t_tag=>[@tag, tag]).all.sort_by{|x| x.pk}.must_equal [@album, album] @Album.exclude(:alias_t_tag=>[@tag, tag]).all.sort_by{|x| x.pk}.must_equal [@album, album] @Artist.exclude(:t_tags=>[@tag, tag]).all.sort_by{|x| x.pk}.must_equal [@artist, artist] @Artist.exclude(:t_tag=>[@tag, tag]).all.sort_by{|x| x.pk}.must_equal [@artist, artist] end end it "should work correctly when excluding by associations in regards to NULL values" do @Artist.exclude(:albums=>@album).all.must_equal [@artist] @Artist.exclude(:first_album=>@album).all.must_equal [@artist] @Album.exclude(:artist=>@artist).all.must_equal [@album] @Album.exclude(:tags=>@tag).all.must_equal [@album] @Album.exclude(:alias_tags=>@tag).all.must_equal [@album] @Tag.exclude(:albums=>@album).all.must_equal [@tag] @Album.exclude(:artist=>@artist, :tags=>@tag).all.must_equal [@album] @Artist.exclude(:a_albums=>@album).all.must_equal [@artist] @Artist.exclude(:first_a_album=>@album).all.must_equal [@artist] @Album.exclude(:a_artist=>@artist).all.must_equal [@album] @Album.exclude(:t_tags=>@tag).all.must_equal [@album] @Album.exclude(:alias_t_tags=>@tag).all.must_equal [@album] unless @no_many_through_many @Album.exclude(:t_tag=>@tag).all.must_equal [@album] @Album.exclude(:alias_t_tag=>@tag).all.must_equal [@album] @Artist.exclude(:t_tags=>@tag).all.must_equal [@artist] @Artist.exclude(:t_tag=>@tag).all.must_equal [@artist] end @album.update(:artist => @artist) @artist.albums_dataset.exclude(:tags=>@tag).all.must_equal [@album] end it "should handle NULL values in join table correctly when filtering/excluding many_to_many associations" do @ins.call @Album.exclude(:tags=>@tag).all.must_equal [@album] @Album.exclude(:alias_tags=>@tag).all.must_equal [@album] @Album.exclude(:t_tags=>@tag).all.must_equal [@album] @Album.exclude(:alias_t_tags=>@tag).all.must_equal [@album] @album.add_tag(@tag) @Album.filter(:tags=>@tag).all.must_equal [@album] @Album.filter(:alias_tags=>@tag).all.must_equal [@album] @Album.filter(:t_tags=>@tag).all.must_equal [@album] @Album.filter(:alias_t_tags=>@tag).all.must_equal [@album] album, tag = @pr.call.values_at(0, 2) @Album.exclude(:tags=>@tag).all.must_equal [album] @Album.exclude(:alias_tags=>@tag).all.must_equal [album] @Album.exclude(:t_tags=>@tag).all.must_equal [album] @Album.exclude(:alias_t_tags=>@tag).all.must_equal [album] @Album.exclude(:tags=>tag).all.sort_by{|x| x.pk}.must_equal [@album, album] @Album.exclude(:alias_tags=>tag).all.sort_by{|x| x.pk}.must_equal [@album, album] @Album.exclude(:t_tags=>tag).all.sort_by{|x| x.pk}.must_equal [@album, album] @Album.exclude(:alias_t_tags=>tag).all.sort_by{|x| x.pk}.must_equal [@album, album] end it "should work correctly when filtering by association datasets" do album, artist, tag = @pr.call @album.update(:artist => @artist) @album.add_tag(@tag) album.add_tag(tag) album.update(:artist => artist) @Artist.filter(:albums=>@Album).all.sort_by{|x| x.pk}.must_equal [@artist, artist] @Artist.filter(:albums=>@Album.filter(Array(Album.primary_key).map{|k| Sequel.qualify(Album.table_name, k)}.zip(Array(album.pk)))).all.sort_by{|x| x.pk}.must_equal [artist] @Artist.filter(:albums=>@Album.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [] @Artist.filter(:first_album=>@Album).all.sort_by{|x| x.pk}.must_equal [@artist, artist] @Artist.filter(:first_album=>@Album.filter(Array(Album.primary_key).map{|k| Sequel.qualify(Album.table_name, k)}.zip(Array(album.pk)))).all.sort_by{|x| x.pk}.must_equal [artist] @Artist.filter(:first_album=>@Album.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [] @Album.filter(:artist=>@Artist).all.sort_by{|x| x.pk}.must_equal [@album, album] @Album.filter(:artist=>@Artist.filter(Array(Artist.primary_key).map{|k| Sequel.qualify(Artist.table_name, k)}.zip(Array(artist.pk)))).all.sort_by{|x| x.pk}.must_equal [album] @Album.filter(:artist=>@Artist.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [] @Album.filter(:tags=>@Tag).all.sort_by{|x| x.pk}.must_equal [@album, album] @Album.filter(:tags=>@Tag.filter(Array(Tag.primary_key).map{|k| Sequel.qualify(Tag.table_name, k)}.zip(Array(tag.pk)))).all.sort_by{|x| x.pk}.must_equal [album] @Album.filter(:tags=>@Tag.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [] @Album.filter(:alias_tags=>@Tag).all.sort_by{|x| x.pk}.must_equal [@album, album] @Album.filter(:alias_tags=>@Tag.filter(Array(Tag.primary_key).map{|k| Sequel.qualify(Tag.table_name, k)}.zip(Array(tag.pk)))).all.sort_by{|x| x.pk}.must_equal [album] @Album.filter(:alias_tags=>@Tag.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [] @Tag.filter(:albums=>@Album).all.sort_by{|x| x.pk}.must_equal [@tag, tag] @Tag.filter(:albums=>@Album.filter(Array(Album.primary_key).map{|k| Sequel.qualify(Album.table_name, k)}.zip(Array(album.pk)))).all.sort_by{|x| x.pk}.must_equal [tag] @Tag.filter(:albums=>@Album.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [] unless @no_many_through_many @Artist.filter(:tags=>@Tag).all.sort_by{|x| x.pk}.must_equal [@artist, artist] @Artist.filter(:tags=>@Tag.filter(Array(Tag.primary_key).map{|k| Sequel.qualify(Tag.table_name, k)}.zip(Array(tag.pk)))).all.sort_by{|x| x.pk}.must_equal [artist] @Artist.filter(:tags=>@Tag.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [] @Artist.filter(:first_tag=>@Tag).all.sort_by{|x| x.pk}.must_equal [@artist, artist] @Artist.filter(:first_tag=>@Tag.filter(Array(Tag.primary_key).map{|k| Sequel.qualify(Tag.table_name, k)}.zip(Array(tag.pk)))).all.sort_by{|x| x.pk}.must_equal [artist] @Artist.filter(:first_tag=>@Tag.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [] end end it "should work correctly when excluding by association datasets" do album, artist, tag = @pr.call @album.update(:artist => @artist) @album.add_tag(@tag) album.add_tag(tag) album.update(:artist => artist) @Artist.exclude(:albums=>@Album).all.sort_by{|x| x.pk}.must_equal [] @Artist.exclude(:albums=>@Album.filter(Array(Album.primary_key).map{|k| Sequel.qualify(Album.table_name, k)}.zip(Array(album.pk)))).all.sort_by{|x| x.pk}.must_equal [@artist] @Artist.exclude(:albums=>@Album.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [@artist, artist] @Artist.exclude(:first_album=>@Album).all.sort_by{|x| x.pk}.must_equal [] @Artist.exclude(:first_album=>@Album.filter(Array(Album.primary_key).map{|k| Sequel.qualify(Album.table_name, k)}.zip(Array(album.pk)))).all.sort_by{|x| x.pk}.must_equal [@artist] @Artist.exclude(:first_album=>@Album.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [@artist, artist] @Album.exclude(:artist=>@Artist).all.sort_by{|x| x.pk}.must_equal [] @Album.exclude(:artist=>@Artist.filter(Array(Artist.primary_key).map{|k| Sequel.qualify(Artist.table_name, k)}.zip(Array(artist.pk)))).all.sort_by{|x| x.pk}.must_equal [@album] @Album.exclude(:artist=>@Artist.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [@album, album] @Album.exclude(:tags=>@Tag).all.sort_by{|x| x.pk}.must_equal [] @Album.exclude(:tags=>@Tag.filter(Array(Tag.primary_key).map{|k| Sequel.qualify(Tag.table_name, k)}.zip(Array(tag.pk)))).all.sort_by{|x| x.pk}.must_equal [@album] @Album.exclude(:tags=>@Tag.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [@album, album] @Album.exclude(:alias_tags=>@Tag).all.sort_by{|x| x.pk}.must_equal [] @Album.exclude(:alias_tags=>@Tag.filter(Array(Tag.primary_key).map{|k| Sequel.qualify(Tag.table_name, k)}.zip(Array(tag.pk)))).all.sort_by{|x| x.pk}.must_equal [@album] @Album.exclude(:alias_tags=>@Tag.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [@album, album] @Tag.exclude(:albums=>@Album).all.sort_by{|x| x.pk}.must_equal [] @Tag.exclude(:albums=>@Album.filter(Array(Album.primary_key).map{|k| Sequel.qualify(Album.table_name, k)}.zip(Array(album.pk)))).all.sort_by{|x| x.pk}.must_equal [@tag] @Tag.exclude(:albums=>@Album.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [@tag, tag] unless @no_many_through_many @Artist.exclude(:tags=>@Tag).all.sort_by{|x| x.pk}.must_equal [] @Artist.exclude(:tags=>@Tag.filter(Array(Tag.primary_key).map{|k| Sequel.qualify(Tag.table_name, k)}.zip(Array(tag.pk)))).all.sort_by{|x| x.pk}.must_equal [@artist] @Artist.exclude(:tags=>@Tag.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [@artist, artist] @Artist.exclude(:first_tag=>@Tag).all.sort_by{|x| x.pk}.must_equal [] @Artist.exclude(:first_tag=>@Tag.filter(Array(Tag.primary_key).map{|k| Sequel.qualify(Tag.table_name, k)}.zip(Array(tag.pk)))).all.sort_by{|x| x.pk}.must_equal [@artist] @Artist.exclude(:first_tag=>@Tag.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [@artist, artist] end end it "should work correctly when filtering by association datasets with conditions" do album, artist, tag = @pr.call @album.update(:artist => @artist) @album.add_tag(@tag) album.add_tag(tag) album.update(:artist => artist) @Artist.filter(:a_albums=>@Album).all.sort_by{|x| x.pk}.must_equal [@artist] @Artist.filter(:first_a_album=>@Album).all.sort_by{|x| x.pk}.must_equal [@artist] @Album.filter(:a_artist=>@Artist).all.sort_by{|x| x.pk}.must_equal [@album] @Album.filter(:t_tags=>@Tag).all.sort_by{|x| x.pk}.must_equal [@album] @Album.filter(:alias_t_tags=>@Tag).all.sort_by{|x| x.pk}.must_equal [@album] unless @no_many_through_many @Album.filter(:t_tag=>@Tag).all.sort_by{|x| x.pk}.must_equal [@album] @Album.filter(:alias_t_tag=>@Tag).all.sort_by{|x| x.pk}.must_equal [@album] @Artist.filter(:t_tags=>@Tag).all.sort_by{|x| x.pk}.must_equal [@artist] @Artist.filter(:t_tag=>@Tag).all.sort_by{|x| x.pk}.must_equal [@artist] end artist.update(:name=>@artist.name) album.update(:name=>@album.name) tag.update(:name=>@tag.name) @Artist.filter(:a_albums=>@Album).all.sort_by{|x| x.pk}.must_equal [@artist, artist] @Artist.filter(:first_a_album=>@Album).all.sort_by{|x| x.pk}.must_equal [@artist, artist] @Album.filter(:a_artist=>@Artist).all.sort_by{|x| x.pk}.must_equal [@album, album] @Album.filter(:t_tags=>@Tag).all.sort_by{|x| x.pk}.must_equal [@album, album] @Album.filter(:alias_t_tags=>@Tag).all.sort_by{|x| x.pk}.must_equal [@album, album] unless @no_many_through_many @Album.filter(:t_tag=>@Tag).all.sort_by{|x| x.pk}.must_equal [@album, album] @Album.filter(:alias_t_tag=>@Tag).all.sort_by{|x| x.pk}.must_equal [@album, album] @Artist.filter(:t_tags=>@Tag).all.sort_by{|x| x.pk}.must_equal [@artist, artist] @Artist.filter(:t_tag=>@Tag).all.sort_by{|x| x.pk}.must_equal [@artist, artist] end @Artist.filter(:a_albums=>@Album.filter(Array(Album.primary_key).map{|k| Sequel.qualify(Album.table_name, k)}.zip(Array(album.pk)))).all.sort_by{|x| x.pk}.must_equal [artist] @Artist.filter(:first_a_album=>@Album.filter(Array(Album.primary_key).map{|k| Sequel.qualify(Album.table_name, k)}.zip(Array(album.pk)))).all.sort_by{|x| x.pk}.must_equal [artist] @Album.filter(:a_artist=>@Artist.filter(Array(Artist.primary_key).map{|k| Sequel.qualify(Artist.table_name, k)}.zip(Array(artist.pk)))).all.sort_by{|x| x.pk}.must_equal [album] @Album.filter(:t_tags=>@Tag.filter(Array(Tag.primary_key).map{|k| Sequel.qualify(Tag.table_name, k)}.zip(Array(tag.pk)))).all.sort_by{|x| x.pk}.must_equal [album] @Album.filter(:alias_t_tags=>@Tag.filter(Array(Tag.primary_key).map{|k| Sequel.qualify(Tag.table_name, k)}.zip(Array(tag.pk)))).all.sort_by{|x| x.pk}.must_equal [album] unless @no_many_through_many @Album.filter(:t_tag=>@Tag.filter(Array(Tag.primary_key).map{|k| Sequel.qualify(Tag.table_name, k)}.zip(Array(tag.pk)))).all.sort_by{|x| x.pk}.must_equal [album] @Album.filter(:alias_t_tag=>@Tag.filter(Array(Tag.primary_key).map{|k| Sequel.qualify(Tag.table_name, k)}.zip(Array(tag.pk)))).all.sort_by{|x| x.pk}.must_equal [album] @Artist.filter(:t_tags=>@Tag.filter(Array(Tag.primary_key).map{|k| Sequel.qualify(Tag.table_name, k)}.zip(Array(tag.pk)))).all.sort_by{|x| x.pk}.must_equal [artist] @Artist.filter(:t_tag=>@Tag.filter(Array(Tag.primary_key).map{|k| Sequel.qualify(Tag.table_name, k)}.zip(Array(tag.pk)))).all.sort_by{|x| x.pk}.must_equal [artist] end @Artist.filter(:a_albums=>@Album.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [] @Artist.filter(:first_a_album=>@Album.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [] @Album.filter(:a_artist=>@Artist.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [] @Album.filter(:t_tags=>@Tag.filter(Array(Tag.primary_key).map{|k| Sequel.qualify(Tag.table_name, k)}.zip(Array(tag.pk)))).all.sort_by{|x| x.pk}.must_equal [album] @Album.filter(:t_tags=>@Tag.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [] @Album.filter(:alias_t_tags=>@Tag.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [] unless @no_many_through_many @Album.filter(:t_tag=>@Tag.filter(Array(Tag.primary_key).map{|k| Sequel.qualify(Tag.table_name, k)}.zip(Array(tag.pk)))).all.sort_by{|x| x.pk}.must_equal [album] @Album.filter(:t_tag=>@Tag.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [] @Album.filter(:alias_t_tag=>@Tag.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [] @Artist.filter(:t_tags=>@Tag.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [] @Artist.filter(:t_tag=>@Tag.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [] end end it "should work correctly when excluding by association datasets with conditions" do album, artist, tag = @pr.call @album.update(:artist => @artist) @album.add_tag(@tag) album.add_tag(tag) album.update(:artist => artist) @Artist.exclude(:a_albums=>@Album).all.sort_by{|x| x.pk}.must_equal [artist] @Artist.exclude(:first_a_album=>@Album).all.sort_by{|x| x.pk}.must_equal [artist] @Album.exclude(:a_artist=>@Artist).all.sort_by{|x| x.pk}.must_equal [album] @Album.exclude(:t_tags=>@Tag).all.sort_by{|x| x.pk}.must_equal [album] @Album.exclude(:alias_t_tags=>@Tag).all.sort_by{|x| x.pk}.must_equal [album] unless @no_many_through_many @Album.exclude(:t_tag=>@Tag).all.sort_by{|x| x.pk}.must_equal [album] @Album.exclude(:alias_t_tag=>@Tag).all.sort_by{|x| x.pk}.must_equal [album] @Artist.exclude(:t_tags=>@Tag).all.sort_by{|x| x.pk}.must_equal [artist] @Artist.exclude(:t_tag=>@Tag).all.sort_by{|x| x.pk}.must_equal [artist] end artist.update(:name=>@artist.name) album.update(:name=>@album.name) tag.update(:name=>@tag.name) @Artist.exclude(:a_albums=>@Album).all.sort_by{|x| x.pk}.must_equal [] @Artist.exclude(:first_a_album=>@Album).all.sort_by{|x| x.pk}.must_equal [] @Album.exclude(:a_artist=>@Artist).all.sort_by{|x| x.pk}.must_equal [] @Album.exclude(:t_tags=>@Tag).all.sort_by{|x| x.pk}.must_equal [] @Album.exclude(:alias_t_tags=>@Tag).all.sort_by{|x| x.pk}.must_equal [] unless @no_many_through_many @Album.exclude(:t_tag=>@Tag).all.sort_by{|x| x.pk}.must_equal [] @Album.exclude(:alias_t_tag=>@Tag).all.sort_by{|x| x.pk}.must_equal [] @Artist.exclude(:t_tags=>@Tag).all.sort_by{|x| x.pk}.must_equal [] @Artist.exclude(:t_tag=>@Tag).all.sort_by{|x| x.pk}.must_equal [] end @Artist.exclude(:a_albums=>@Album.filter(Array(Album.primary_key).map{|k| Sequel.qualify(Album.table_name, k)}.zip(Array(album.pk)))).all.sort_by{|x| x.pk}.must_equal [@artist] @Artist.exclude(:first_a_album=>@Album.filter(Array(Album.primary_key).map{|k| Sequel.qualify(Album.table_name, k)}.zip(Array(album.pk)))).all.sort_by{|x| x.pk}.must_equal [@artist] @Album.exclude(:a_artist=>@Artist.filter(Array(Artist.primary_key).map{|k| Sequel.qualify(Artist.table_name, k)}.zip(Array(artist.pk)))).all.sort_by{|x| x.pk}.must_equal [@album] @Album.exclude(:t_tags=>@Tag.filter(Array(Tag.primary_key).map{|k| Sequel.qualify(Tag.table_name, k)}.zip(Array(tag.pk)))).all.sort_by{|x| x.pk}.must_equal [@album] @Album.exclude(:alias_t_tags=>@Tag.filter(Array(Tag.primary_key).map{|k| Sequel.qualify(Tag.table_name, k)}.zip(Array(tag.pk)))).all.sort_by{|x| x.pk}.must_equal [@album] unless @no_many_through_many @Album.exclude(:t_tag=>@Tag.filter(Array(Tag.primary_key).map{|k| Sequel.qualify(Tag.table_name, k)}.zip(Array(tag.pk)))).all.sort_by{|x| x.pk}.must_equal [@album] @Album.exclude(:alias_t_tag=>@Tag.filter(Array(Tag.primary_key).map{|k| Sequel.qualify(Tag.table_name, k)}.zip(Array(tag.pk)))).all.sort_by{|x| x.pk}.must_equal [@album] @Artist.exclude(:t_tags=>@Tag.filter(Array(Tag.primary_key).map{|k| Sequel.qualify(Tag.table_name, k)}.zip(Array(tag.pk)))).all.sort_by{|x| x.pk}.must_equal [@artist] @Artist.exclude(:t_tag=>@Tag.filter(Array(Tag.primary_key).map{|k| Sequel.qualify(Tag.table_name, k)}.zip(Array(tag.pk)))).all.sort_by{|x| x.pk}.must_equal [@artist] end @Artist.exclude(:a_albums=>@Album.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [@artist, artist] @Artist.exclude(:first_a_album=>@Album.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [@artist, artist] @Album.exclude(:a_artist=>@Artist.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [@album, album] @Album.exclude(:t_tags=>@Tag.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [@album, album] @Album.exclude(:alias_t_tags=>@Tag.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [@album, album] unless @no_many_through_many @Album.exclude(:t_tag=>@Tag.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [@album, album] @Album.exclude(:alias_t_tag=>@Tag.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [@album, album] @Artist.exclude(:t_tags=>@Tag.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [@artist, artist] @Artist.exclude(:t_tag=>@Tag.filter(1=>0)).all.sort_by{|x| x.pk}.must_equal [@artist, artist] end end end filter_by_associations_one_to_one_limit_strategies = Module.new do extend Minitest::Spec::DSL it "filter by associations with limited one_to_one associations should work correctly" do Artist.one_to_one :first_album, {:clone=>:first_album}.merge(@els) Artist.one_to_one :last_album, {:clone=>:last_album}.merge(@els) Artist.one_to_one :second_album, {:clone=>:second_album}.merge(@els) @album.update(:artist => @artist) diff_album = @diff_album.call ar = @pr.call[1] ds = Artist.order(:name) ds.where(:first_album=>@album).all.must_equal [@artist] ds.where(:first_album=>diff_album).all.must_equal [] ds.exclude(:first_album=>@album).all.must_equal [ar] ds.exclude(:first_album=>diff_album).all.must_equal [@artist, ar] if @els[:eager_limit_strategy] != :distinct_on && (@els[:eager_limit_strategy] != :correlated_subquery || Album.dataset.supports_offsets_in_correlated_subqueries?) ds.where(:second_album=>@album).all.must_equal [] ds.where(:second_album=>diff_album).all.must_equal [@artist] ds.exclude(:second_album=>@album).all.must_equal [@artist, ar] ds.exclude(:second_album=>diff_album).all.must_equal [ar] end ds.where(:last_album=>@album).all.must_equal [] ds.where(:last_album=>diff_album).all.must_equal [@artist] ds.exclude(:last_album=>@album).all.must_equal [@artist, ar] ds.exclude(:last_album=>diff_album).all.must_equal [ar] Artist.one_to_one :first_album, :clone=>:first_album do |ads| ads.where(Sequel[:albums][:name]=>diff_album.name) end ar.add_album(diff_album) ds.where(:first_album=>[@album, diff_album]).all.must_equal [ar] ds.exclude(:first_album=>[@album, diff_album]).all.must_equal [@artist] end end filter_by_associations_singular_limit_strategies = Module.new do extend Minitest::Spec::DSL include filter_by_associations_one_to_one_limit_strategies it "dataset associations with limited one_to_one associations should work correctly" do Artist.one_to_one :first_album, {:clone=>:first_album}.merge(@els) Artist.one_to_one :last_album, {:clone=>:last_album}.merge(@els) Artist.one_to_one :second_album, {:clone=>:second_album}.merge(@els) if @els[:eager_limit_strategy] != :distinct_on @album.update(:artist => @artist) diff_album = @diff_album.call ar = @pr.call[1] ds = Artist ds.where(@artist.pk_hash).first_albums.all.must_equal [@album] ds.where(@artist.pk_hash).second_albums.all.must_equal [diff_album] ds.where(@artist.pk_hash).last_albums.all.must_equal [diff_album] ds.where(ar.pk_hash).first_albums.all.must_equal [] ds.where(ar.pk_hash).second_albums.all.must_equal [] ds.where(ar.pk_hash).last_albums.all.must_equal [] Artist.one_to_one :first_album, :clone=>:first_album do |ads| ads.where(Sequel[:albums][:name]=>diff_album.name) end ar.add_album(diff_album) ds.where(@artist.pk_hash).first_albums.all.must_equal [] ds.where(ar.pk_hash).first_albums.all.must_equal [diff_album] end it "filter by associations with limited one_through_one associations should work correctly" do Album.one_through_one :first_tag, {:clone=>:first_tag}.merge(@els) Album.one_through_one :second_tag, {:clone=>:second_tag}.merge(@els) if @els[:eager_limit_strategy] != :distinct_on Album.one_through_one :last_tag, {:clone=>:last_tag}.merge(@els) tu, tv = @other_tags.call al = @pr.call.first ds = Album.order(:name) al.add_tag(tu) ds.where(:first_tag=>@tag).all.must_equal [@album] ds.where(:first_tag=>tu).all.must_equal [al] ds.where(:first_tag=>tv).all.must_equal [] ds.exclude(:first_tag=>@tag).all.must_equal [al] ds.exclude(:first_tag=>tu).all.must_equal [@album] ds.exclude(:first_tag=>tv).all.must_equal [@album, al] ds.where(:second_tag=>@tag).all.must_equal [] ds.where(:second_tag=>tu).all.must_equal [@album] ds.where(:second_tag=>tv).all.must_equal [] ds.exclude(:second_tag=>@tag).all.must_equal [@album, al] ds.exclude(:second_tag=>tu).all.must_equal [al] ds.exclude(:second_tag=>tv).all.must_equal [@album, al] ds.where(:last_tag=>@tag).all.must_equal [] ds.where(:last_tag=>tu).all.must_equal [al] ds.where(:last_tag=>tv).all.must_equal [@album] ds.exclude(:last_tag=>@tag).all.must_equal [@album, al] ds.exclude(:last_tag=>tu).all.must_equal [@album] ds.exclude(:last_tag=>tv).all.must_equal [al] Album.one_through_one :first_tag, :clone=>:first_tag do |ads| ads.where(Sequel[:tags][:name]=>tu.name) end Album.one_through_one :second_tag, :clone=>:second_tag do |ads| ads.where(Sequel[:tags][:name]=>[tu.name, tv.name]) end ds.where(:first_tag=>[@tag, tu]).all.must_equal [@album, al] ds.exclude(:first_tag=>[@tag, tu]).all.must_equal [] al.add_tag(tv) ds.where(:second_tag=>[tv, tu]).all.must_equal [@album, al] ds.exclude(:second_tag=>[tv, tu]).all.must_equal [] end it "dataset associations with limited one_through_one associations should work correctly" do Album.one_through_one :first_tag, {:clone=>:first_tag}.merge(@els) Album.one_through_one :second_tag, {:clone=>:second_tag}.merge(@els) if @els[:eager_limit_strategy] != :distinct_on Album.one_through_one :last_tag, {:clone=>:last_tag}.merge(@els) tu, tv = @other_tags.call al = @pr.call.first ds = Album al.add_tag(tu) ds.where(@album.pk_hash).first_tags.all.must_equal [@tag] ds.where(@album.pk_hash).second_tags.all.must_equal [tu] ds.where(@album.pk_hash).last_tags.all.must_equal [tv] ds.where(al.pk_hash).first_tags.all.must_equal [tu] ds.where(al.pk_hash).second_tags.all.must_equal [] ds.where(al.pk_hash).last_tags.all.must_equal [tu] Album.one_through_one :first_tag, :clone=>:first_tag do |ads| ads.where(Sequel[:tags][:name]=>tu.name) end Album.one_through_one :second_tag, :clone=>:second_tag do |ads| ads.where(Sequel[:tags][:name]=>[tu.name, tv.name]) end ds.where(@album.pk_hash).first_tags.all.must_equal [tu] ds.where(@album.pk_hash).second_tags.all.must_equal [tv] ds.where(al.pk_hash).first_tags.all.must_equal [tu] ds.where(al.pk_hash).second_tags.all.must_equal [] al.add_tag(tv) ds.where(@album.pk_hash).first_tags.all.must_equal [tu] ds.where(@album.pk_hash).second_tags.all.must_equal [tv] ds.where(al.pk_hash).first_tags.all.must_equal [tu] ds.where(al.pk_hash).second_tags.all.must_equal [tv] end it "filter by associations with limited one_through_many associations should work correctly" do Artist.one_through_many :first_tag, {:clone=>:first_tag}.merge(@els) Artist.one_through_many :second_tag, {:clone=>:second_tag}.merge(@els) if @els[:eager_limit_strategy] != :distinct_on Artist.one_through_many :last_tag, {:clone=>:last_tag}.merge(@els) @album.update(:artist => @artist) tu, tv = @other_tags.call al, ar, _ = @pr.call al.update(:artist=>ar) al.add_tag(tu) ds = Artist.order(:name) ds.where(:first_tag=>@tag).all.must_equal [@artist] ds.where(:first_tag=>tu).all.must_equal [ar] ds.where(:first_tag=>tv).all.must_equal [] ds.exclude(:first_tag=>@tag).all.must_equal [ar] ds.exclude(:first_tag=>tu).all.must_equal [@artist] ds.exclude(:first_tag=>tv).all.must_equal [@artist, ar] ds.where(:second_tag=>@tag).all.must_equal [] ds.where(:second_tag=>tu).all.must_equal [@artist] ds.where(:second_tag=>tv).all.must_equal [] ds.exclude(:second_tag=>@tag).all.must_equal [@artist, ar] ds.exclude(:second_tag=>tu).all.must_equal [ar] ds.exclude(:second_tag=>tv).all.must_equal [@artist, ar] ds.where(:last_tag=>@tag).all.must_equal [] ds.where(:last_tag=>tu).all.must_equal [ar] ds.where(:last_tag=>tv).all.must_equal [@artist] ds.exclude(:last_tag=>@tag).all.must_equal [@artist, ar] ds.exclude(:last_tag=>tu).all.must_equal [@artist] ds.exclude(:last_tag=>tv).all.must_equal [ar] Artist.one_through_many :first_tag, :clone=>:first_tag do |ads| ads.where(Sequel[:tags][:name]=>tu.name) end Artist.one_through_many :second_tag, :clone=>:second_tag do |ads| ads.where(Sequel[:tags][:name]=>[tu.name, tv.name]) end ds.where(:first_tag=>[@tag, tu]).all.must_equal [@artist, ar] ds.exclude(:first_tag=>[@tag, tu]).all.must_equal [] al.add_tag(tv) ds.where(:second_tag=>[tv, tu]).all.must_equal [@artist, ar] ds.exclude(:second_tag=>[tv, tu]).all.must_equal [] end it "dataset associations with limited one_through_many associations should work correctly" do Artist.one_through_many :first_tag, {:clone=>:first_tag}.merge(@els) Artist.one_through_many :second_tag, {:clone=>:second_tag}.merge(@els) if @els[:eager_limit_strategy] != :distinct_on Artist.one_through_many :last_tag, {:clone=>:last_tag}.merge(@els) @album.update(:artist => @artist) tu, tv = @other_tags.call al, ar, _ = @pr.call al.update(:artist=>ar) al.add_tag(tu) ds = Artist.order(:name) ds.where(@artist.pk_hash).first_tags.all.must_equal [@tag] ds.where(@artist.pk_hash).second_tags.all.must_equal [tu] ds.where(@artist.pk_hash).last_tags.all.must_equal [tv] ds.where(ar.pk_hash).first_tags.all.must_equal [tu] ds.where(ar.pk_hash).second_tags.all.must_equal [] ds.where(ar.pk_hash).last_tags.all.must_equal [tu] Artist.one_through_many :first_tag, :clone=>:first_tag do |ads| ads.where(Sequel[:tags][:name]=>tu.name) end Artist.one_through_many :second_tag, :clone=>:second_tag do |ads| ads.where(Sequel[:tags][:name]=>[tu.name, tv.name]) end ds.where(@artist.pk_hash).first_tags.all.must_equal [tu] ds.where(@artist.pk_hash).second_tags.all.must_equal [tv] ds.where(ar.pk_hash).first_tags.all.must_equal [tu] ds.where(ar.pk_hash).second_tags.all.must_equal [] al.add_tag(tv) ds.where(@artist.pk_hash).first_tags.all.must_equal [tu] ds.where(@artist.pk_hash).second_tags.all.must_equal [tv] ds.where(ar.pk_hash).first_tags.all.must_equal [tu] ds.where(ar.pk_hash).second_tags.all.must_equal [tv] end end filter_by_associations_one_to_many_limit_strategies = Module.new do extend Minitest::Spec::DSL it "filter by associations with limited one_to_many associations should work correctly" do Artist.one_to_many :first_two_albums, {:clone=>:first_two_albums}.merge(@els) Artist.one_to_many :second_two_albums, {:clone=>:second_two_albums}.merge(@els) Artist.one_to_many :not_first_albums, {:clone=>:not_first_albums}.merge(@els) Artist.one_to_many :last_two_albums, {:clone=>:last_two_albums}.merge(@els) @album.update(:artist => @artist) middle_album = @middle_album.call diff_album = @diff_album.call ar = @pr.call[1] ds = Artist.order(:name) ds.where(:first_two_albums=>@album).all.must_equal [@artist] ds.where(:first_two_albums=>middle_album).all.must_equal [@artist] ds.where(:first_two_albums=>diff_album).all.must_equal [] ds.exclude(:first_two_albums=>@album).all.must_equal [ar] ds.exclude(:first_two_albums=>middle_album).all.must_equal [ar] ds.exclude(:first_two_albums=>diff_album).all.must_equal [@artist, ar] assocs = if @els[:eager_limit_strategy] != :correlated_subquery || Album.dataset.supports_offsets_in_correlated_subqueries? [:second_two_albums, :not_first_albums, :last_two_albums] else [:last_two_albums] end assocs.each do |a| ds.where(a=>@album).all.must_equal [] ds.where(a=>middle_album).all.must_equal [@artist] ds.where(a=>diff_album).all.must_equal [@artist] ds.exclude(a=>@album).all.must_equal [@artist, ar] ds.exclude(a=>middle_album).all.must_equal [ar] ds.exclude(a=>diff_album).all.must_equal [ar] end Artist.one_to_one :first_two_albums, :clone=>:first_two_albums do |ads| ads.where(Sequel[:albums][:name]=>diff_album.name) end ar.add_album(diff_album) ds.where(:first_two_albums=>[@album, diff_album]).all.must_equal [ar] ds.exclude(:first_two_albums=>[@album, diff_album]).all.must_equal [@artist] end end filter_by_associations_limit_strategies = Module.new do extend Minitest::Spec::DSL include filter_by_associations_singular_limit_strategies include filter_by_associations_one_to_many_limit_strategies it "dataset associations with limited one_to_many associations should work correctly" do Artist.one_to_many :first_two_albums, {:clone=>:first_two_albums}.merge(@els) Artist.one_to_many :second_two_albums, {:clone=>:second_two_albums}.merge(@els) Artist.one_to_many :not_first_albums, {:clone=>:not_first_albums}.merge(@els) Artist.one_to_many :last_two_albums, {:clone=>:last_two_albums}.merge(@els) @album.update(:artist => @artist) middle_album = @middle_album.call diff_album = @diff_album.call ar = @pr.call[1] ds = Artist.order(:name) ds.where(@artist.pk_hash).first_two_albums.all.must_equal [@album, middle_album] ds.where(@artist.pk_hash).second_two_albums.all.must_equal [middle_album, diff_album] ds.where(@artist.pk_hash).not_first_albums.all.must_equal [middle_album, diff_album] ds.where(@artist.pk_hash).last_two_albums.all.must_equal [diff_album, middle_album] ds.where(ar.pk_hash).first_two_albums.all.must_equal [] ds.where(ar.pk_hash).second_two_albums.all.must_equal [] ds.where(ar.pk_hash).not_first_albums.all.must_equal [] ds.where(ar.pk_hash).last_two_albums.all.must_equal [] Artist.one_to_one :first_two_albums, :clone=>:first_two_albums do |ads| ads.where(Sequel[:albums][:name]=>[diff_album.name, middle_album.name]) end ar.add_album(diff_album) ds.where(@artist.pk_hash).first_two_albums.all.must_equal [middle_album] ds.where(ar.pk_hash).first_two_albums.all.must_equal [diff_album] end it "filter by associations with limited many_to_many associations should work correctly" do Album.send :many_to_many, :first_two_tags, {:clone=>:first_two_tags}.merge(@els) Album.send :many_to_many, :second_two_tags, {:clone=>:second_two_tags}.merge(@els) Album.send :many_to_many, :not_first_tags, {:clone=>:not_first_tags}.merge(@els) Album.send :many_to_many, :last_two_tags, {:clone=>:last_two_tags}.merge(@els) tu, tv = @other_tags.call al = @pr.call.first al.add_tag(tu) ds = Album.order(:name) ds.where(:first_two_tags=>@tag).all.must_equal [@album] ds.where(:first_two_tags=>tu).all.must_equal [@album, al] ds.where(:first_two_tags=>tv).all.must_equal [] ds.exclude(:first_two_tags=>@tag).all.must_equal [al] ds.exclude(:first_two_tags=>tu).all.must_equal [] ds.exclude(:first_two_tags=>tv).all.must_equal [@album, al] ds.where(:second_two_tags=>@tag).all.must_equal [] ds.where(:second_two_tags=>tu).all.must_equal [@album] ds.where(:second_two_tags=>tv).all.must_equal [@album] ds.exclude(:second_two_tags=>@tag).all.must_equal [@album, al] ds.exclude(:second_two_tags=>tu).all.must_equal [al] ds.exclude(:second_two_tags=>tv).all.must_equal [al] ds.where(:not_first_tags=>@tag).all.must_equal [] ds.where(:not_first_tags=>tu).all.must_equal [@album] ds.where(:not_first_tags=>tv).all.must_equal [@album] ds.exclude(:not_first_tags=>@tag).all.must_equal [@album, al] ds.exclude(:not_first_tags=>tu).all.must_equal [al] ds.exclude(:not_first_tags=>tv).all.must_equal [al] ds.where(:last_two_tags=>@tag).all.must_equal [] ds.where(:last_two_tags=>tu).all.must_equal [@album, al] ds.where(:last_two_tags=>tv).all.must_equal [@album] ds.exclude(:last_two_tags=>@tag).all.must_equal [@album, al] ds.exclude(:last_two_tags=>tu).all.must_equal [] ds.exclude(:last_two_tags=>tv).all.must_equal [al] Album.many_to_many :first_two_tags, :clone=>:first_two_tags do |ads| ads.where(Sequel[:tags][:name]=>tu.name) end Album.many_to_many :second_two_tags, :clone=>:second_two_tags do |ads| ads.where(Sequel[:tags][:name]=>[tu.name, tv.name]) end ds.where(:first_two_tags=>[@tag, tu]).all.must_equal [@album, al] ds.exclude(:first_two_tags=>[@tag, tu]).all.must_equal [] al.add_tag(tv) ds.where(:second_two_tags=>[tv, tu]).all.must_equal [@album, al] ds.exclude(:second_two_tags=>[tv, tu]).all.must_equal [] end it "dataset associations with limited many_to_many associations should work correctly" do Album.send :many_to_many, :first_two_tags, {:clone=>:first_two_tags}.merge(@els) Album.send :many_to_many, :second_two_tags, {:clone=>:second_two_tags}.merge(@els) Album.send :many_to_many, :not_first_tags, {:clone=>:not_first_tags}.merge(@els) Album.send :many_to_many, :last_two_tags, {:clone=>:last_two_tags}.merge(@els) tu, tv = @other_tags.call al = @pr.call.first al.add_tag(tu) ds = Album.order(:name) ds.where(@album.pk_hash).first_two_tags.all.must_equal [@tag, tu] ds.where(@album.pk_hash).second_two_tags.all.must_equal [tu, tv] ds.where(@album.pk_hash).not_first_tags.all.must_equal [tu, tv] ds.where(@album.pk_hash).last_two_tags.all.must_equal [tv, tu] ds.where(al.pk_hash).first_two_tags.all.must_equal [tu] ds.where(al.pk_hash).second_two_tags.all.must_equal [] ds.where(al.pk_hash).not_first_tags.all.must_equal [] ds.where(al.pk_hash).last_two_tags.all.must_equal [tu] Album.many_to_many :first_two_tags, :clone=>:first_two_tags do |ads| ads.where(Sequel[:tags][:name]=>tu.name) end Album.many_to_many :second_two_tags, :clone=>:second_two_tags do |ads| ads.where(Sequel[:tags][:name]=>[tu.name, tv.name]) end ds.where(@album.pk_hash).first_two_tags.all.must_equal [tu] ds.where(@album.pk_hash).second_two_tags.all.must_equal [tv] ds.where(al.pk_hash).first_two_tags.all.must_equal [tu] ds.where(al.pk_hash).second_two_tags.all.must_equal [] al.add_tag(tv) ds.where(@album.pk_hash).first_two_tags.all.must_equal [tu] ds.where(@album.pk_hash).second_two_tags.all.must_equal [tv] ds.where(al.pk_hash).first_two_tags.all.must_equal [tu] ds.where(al.pk_hash).second_two_tags.all.must_equal [tv] end it "filter by associations with limited many_through_many associations should work correctly" do Artist.many_through_many :first_two_tags, {:clone=>:first_two_tags}.merge(@els) Artist.many_through_many :second_two_tags, {:clone=>:second_two_tags}.merge(@els) Artist.many_through_many :not_first_tags, {:clone=>:not_first_tags}.merge(@els) Artist.many_through_many :last_two_tags, {:clone=>:last_two_tags}.merge(@els) @album.update(:artist => @artist) tu, tv = @other_tags.call al, ar, _ = @pr.call al.update(:artist=>ar) al.add_tag(tu) ds = Artist.order(:name) ds.where(:first_two_tags=>@tag).all.must_equal [@artist] ds.where(:first_two_tags=>tu).all.must_equal [@artist, ar] ds.where(:first_two_tags=>tv).all.must_equal [] ds.exclude(:first_two_tags=>@tag).all.must_equal [ar] ds.exclude(:first_two_tags=>tu).all.must_equal [] ds.exclude(:first_two_tags=>tv).all.must_equal [@artist, ar] ds.where(:second_two_tags=>@tag).all.must_equal [] ds.where(:second_two_tags=>tu).all.must_equal [@artist] ds.where(:second_two_tags=>tv).all.must_equal [@artist] ds.exclude(:second_two_tags=>@tag).all.must_equal [@artist, ar] ds.exclude(:second_two_tags=>tu).all.must_equal [ar] ds.exclude(:second_two_tags=>tv).all.must_equal [ar] ds.where(:not_first_tags=>@tag).all.must_equal [] ds.where(:not_first_tags=>tu).all.must_equal [@artist] ds.where(:not_first_tags=>tv).all.must_equal [@artist] ds.exclude(:not_first_tags=>@tag).all.must_equal [@artist, ar] ds.exclude(:not_first_tags=>tu).all.must_equal [ar] ds.exclude(:not_first_tags=>tv).all.must_equal [ar] ds.where(:last_two_tags=>@tag).all.must_equal [] ds.where(:last_two_tags=>tu).all.must_equal [@artist, ar] ds.where(:last_two_tags=>tv).all.must_equal [@artist] ds.exclude(:last_two_tags=>@tag).all.must_equal [@artist, ar] ds.exclude(:last_two_tags=>tu).all.must_equal [] ds.exclude(:last_two_tags=>tv).all.must_equal [ar] Artist.many_through_many :first_two_tags, :clone=>:first_tag do |ads| ads.where(Sequel[:tags][:name]=>tu.name) end Artist.many_through_many :second_two_tags, :clone=>:first_tag do |ads| ads.where(Sequel[:tags][:name]=>[tv.name, tu.name]) end ds.where(:first_two_tags=>[@tag, tu]).all.must_equal [@artist, ar] ds.exclude(:first_two_tags=>[@tag, tu]).all.must_equal [] al.add_tag(tv) ds.where(:second_two_tags=>[tv, tu]).all.must_equal [@artist, ar] ds.exclude(:second_two_tags=>[tv, tu]).all.must_equal [] end it "dataset associations with limited many_through_many associations should work correctly" do Artist.many_through_many :first_two_tags, {:clone=>:first_two_tags}.merge(@els) Artist.many_through_many :second_two_tags, {:clone=>:second_two_tags}.merge(@els) Artist.many_through_many :not_first_tags, {:clone=>:not_first_tags}.merge(@els) Artist.many_through_many :last_two_tags, {:clone=>:last_two_tags}.merge(@els) @album.update(:artist => @artist) tu, tv = @other_tags.call al, ar, _ = @pr.call al.update(:artist=>ar) al.add_tag(tu) ds = Artist.order(:name) ds.where(@artist.pk_hash).first_two_tags.all.must_equal [@tag, tu] ds.where(@artist.pk_hash).second_two_tags.all.must_equal [tu, tv] ds.where(@artist.pk_hash).not_first_tags.all.must_equal [tu, tv] ds.where(@artist.pk_hash).last_two_tags.all.must_equal [tv, tu] ds.where(ar.pk_hash).first_two_tags.all.must_equal [tu] ds.where(ar.pk_hash).second_two_tags.all.must_equal [] ds.where(ar.pk_hash).not_first_tags.all.must_equal [] ds.where(ar.pk_hash).last_two_tags.all.must_equal [tu] Artist.many_through_many :first_two_tags, :clone=>:first_two_tags do |ads| ads.where(Sequel[:tags][:name]=>tu.name) end Artist.many_through_many :second_two_tags, :clone=>:second_two_tags do |ads| ads.where(Sequel[:tags][:name]=>[tu.name, tv.name]) end ds.where(@artist.pk_hash).first_two_tags.all.must_equal [tu] ds.where(@artist.pk_hash).second_two_tags.all.must_equal [tv] ds.where(ar.pk_hash).first_two_tags.all.must_equal [tu] ds.where(ar.pk_hash).second_two_tags.all.must_equal [] al.add_tag(tv) ds.where(@artist.pk_hash).first_two_tags.all.must_equal [tu] ds.where(@artist.pk_hash).second_two_tags.all.must_equal [tv] ds.where(ar.pk_hash).first_two_tags.all.must_equal [tu] ds.where(ar.pk_hash).second_two_tags.all.must_equal [tv] end end basic_regular_and_composite_key_associations = lambda do describe "when filtering/excluding by associations" do before do @Artist = Artist.dataset @Album = Album.dataset @Tag = Tag.dataset end include filter_by_associations end it "should return no objects if none are associated" do @album.artist.must_be_nil @album.artist_dataset.first.must_be_nil @artist.first_album.must_be_nil @artist.first_album_dataset.first.must_be_nil @artist.albums.must_equal [] @artist.albums_dataset.all.must_equal [] @album.tags.must_equal [] @album.tags_dataset.all.must_equal [] @album.alias_tags.must_equal [] @album.alias_tags_dataset.all.must_equal [] @tag.albums.must_equal [] @tag.albums_dataset.all.must_equal [] unless @no_many_through_many @album.first_tag.must_be_nil @album.first_tag_dataset.first.must_be_nil end end it "should have set methods work" do # many to one @album.update(:artist => @artist) @album.reload.artist.must_equal @artist @album.update(:artist => nil) @album.reload.artist.must_be_nil # one to one @artist.update(:first_album => @album) @artist.reload.first_album.must_equal @album @artist.update(:first_album => nil) @artist.reload.first_album.must_be_nil unless @no_many_through_many tag = @pr.call.last # one through one @album.update(:first_tag => @tag) @album.reload.first_tag.must_equal @tag @album.update(:first_tag => @tag) @album.reload.first_tag.must_equal @tag @album.update(:first_tag => tag) @album.reload.first_tag.must_equal tag @album.update(:first_tag => nil) @album.reload.first_tag.must_be_nil @album.update(:first_tag => nil) @album.reload.first_tag.must_be_nil # one through one with alias @album.update(:alias_t_tag => @tag) @album.reload.alias_t_tag.must_equal @tag @album.update(:alias_t_tag => nil) @album.reload.alias_t_tag.must_be_nil end end it "should have add and remove methods work" do # one to many @artist.add_album(@album) @artist.reload.albums.must_equal [@album] @artist.remove_album(@album) @artist.reload.albums.must_equal [] # many to many @album.add_tag(@tag) @album.reload.tags.must_equal [@tag] @tag.reload.albums.must_equal [@album] @album.alias_tags.must_equal [@tag] @album.remove_tag(@tag) @album.reload.tags.must_equal [] # many to many with alias @album.add_alias_tag(@tag) @album.reload.alias_tags.must_equal [@tag] @album.remove_alias_tag(@tag) @album.reload.alias_tags.must_equal [] end it "should have remove_all methods work" do # one to many @artist.add_album(@album) @artist.remove_all_albums @artist.reload.albums.must_equal [] # many to many @album.add_tag(@tag) @album.remove_all_tags @album.reload.tags.must_equal [] # many to many with alias @album.add_alias_tag(@tag) @album.remove_all_alias_tags @album.reload.alias_tags.must_equal [] end it "should have working dataset associations" do album, artist, tag = @pr.call Tag.albums.all.must_equal [] Album.artists.all.must_equal [] Album.tags.all.must_equal [] Album.alias_tags.all.must_equal [] Artist.albums.all.must_equal [] unless @no_many_through_many Album.first_tags.all.must_equal [] Artist.tags.all.must_equal [] Artist.first_tags.all.must_equal [] Tag.tags.all.must_equal [] end Artist.albums.tags.all.must_equal [] @album.update(:artist => @artist) @album.add_tag(@tag) Tag.albums.all.must_equal [@album] Album.artists.all.must_equal [@artist] Album.tags.all.must_equal [@tag] Album.alias_tags.all.must_equal [@tag] Artist.albums.all.must_equal [@album] unless @no_many_through_many Album.mthm_tags.all.must_equal [@tag] Album.first_tags.all.must_equal [@tag] Artist.tags.all.must_equal [@tag] Artist.first_tags.all.must_equal [@tag] Tag.tags.all.must_equal [@tag] end Artist.albums.tags.all.must_equal [@tag] album.add_tag(tag) album.update(:artist => artist) Tag.albums.order(:name).all.must_equal [@album, album] Album.artists.order(:name).all.must_equal [@artist, artist] Album.tags.order(:name).all.must_equal [@tag, tag] Album.alias_tags.order(:name).all.must_equal [@tag, tag] Artist.albums.order(:name).all.must_equal [@album, album] unless @no_many_through_many Album.first_tags.order(:name).all.must_equal [@tag, tag] Artist.tags.order(:name).all.must_equal [@tag, tag] Artist.first_tags.order(:name).all.must_equal [@tag, tag] Tag.tags.order(:name).all.must_equal [@tag, tag] end Artist.albums.tags.order(:name).all.must_equal [@tag, tag] Tag.filter(Tag.qualified_primary_key_hash(tag.pk)).albums.all.must_equal [album] Album.filter(Album.qualified_primary_key_hash(album.pk)).artists.all.must_equal [artist] Album.filter(Album.qualified_primary_key_hash(album.pk)).tags.all.must_equal [tag] Album.filter(Album.qualified_primary_key_hash(album.pk)).alias_tags.all.must_equal [tag] Artist.filter(Artist.qualified_primary_key_hash(artist.pk)).albums.all.must_equal [album] unless @no_many_through_many Album.filter(Album.qualified_primary_key_hash(album.pk)).first_tags.all.must_equal [tag] Artist.filter(Artist.qualified_primary_key_hash(artist.pk)).tags.all.must_equal [tag] Artist.filter(Artist.qualified_primary_key_hash(artist.pk)).first_tags.all.must_equal [tag] Tag.filter(Tag.qualified_primary_key_hash(tag.pk)).tags.all.must_equal [tag] end Artist.filter(Artist.qualified_primary_key_hash(artist.pk)).albums.tags.all.must_equal [tag] Artist.filter(Artist.qualified_primary_key_hash(artist.pk)).albums.filter(Album.qualified_primary_key_hash(album.pk)).tags.all.must_equal [tag] Artist.filter(Artist.qualified_primary_key_hash(@artist.pk)).albums.filter(Album.qualified_primary_key_hash(@album.pk)).tags.all.must_equal [@tag] Artist.filter(Artist.qualified_primary_key_hash(@artist.pk)).albums.filter(Album.qualified_primary_key_hash(album.pk)).tags.all.must_equal [] Artist.filter(Artist.qualified_primary_key_hash(artist.pk)).albums.filter(Album.qualified_primary_key_hash(@album.pk)).tags.all.must_equal [] end it "should eager load via eager correctly" do @album.update(:artist => @artist) @album.add_tag(@tag) a = Artist.eager(:albums=>[:tags, :alias_tags]).eager(:first_album).all a.must_equal [@artist] a.first.albums.must_equal [@album] a.first.first_album.must_equal @album a.first.albums.first.tags.must_equal [@tag] a.first.albums.first.alias_tags.must_equal [@tag] a = Tag.eager(:albums=>:artist).all a.must_equal [@tag] a.first.albums.must_equal [@album] a.first.albums.first.artist.must_equal @artist end it "should eager load via eager_graph correctly" do @album.update(:artist => @artist) @album.add_tag(@tag) a = Artist.eager_graph(:albums=>[:tags, :alias_tags]).eager_graph(:first_album).all a.must_equal [@artist] a.first.albums.must_equal [@album] a.first.first_album.must_equal @album a.first.albums.first.tags.must_equal [@tag] a.first.albums.first.alias_tags.must_equal [@tag] a = Tag.eager_graph(:albums=>:artist).all a.must_equal [@tag] a.first.albums.must_equal [@album] a.first.albums.first.artist.must_equal @artist end it "should be able to eager_graph dependent eager associations using eager callback" do @album.update(:artist => @artist) @album.add_tag(@tag) a = Artist.eager(:albums=>proc{|ds| ds.eager_graph(:tags, :alias_tags).unordered}).eager(:first_album).all a.must_equal [@artist] a.first.albums.must_equal [@album] a.first.first_album.must_equal @album a.first.albums.first.tags.must_equal [@tag] a.first.albums.first.alias_tags.must_equal [@tag] a = Tag.eager(:albums=>proc{|ds| ds.eager_graph(:artist).unordered}).all a.must_equal [@tag] a.first.albums.must_equal [@album] a.first.albums.first.artist.must_equal @artist end it "should be able to eager dependent eager_graph associations using eager_graph_eager" do @album.update(:artist => @artist) @album.add_tag(@tag) a = Artist.eager_graph(:albums, :first_album).eager_graph_eager([:albums], :tags, :alias_tags).all a.must_equal [@artist] a.first.albums.must_equal [@album] a.first.first_album.must_equal @album a.first.albums.first.tags.must_equal [@tag] a.first.albums.first.alias_tags.must_equal [@tag] a = Tag.eager_graph(:albums).eager_graph_eager([:albums], :artist).all a.must_equal [@tag] a.first.albums.must_equal [@album] a.first.albums.first.artist.must_equal @artist end end regular_and_composite_key_associations = lambda do describe "when filtering/excluding by associations when joining" do def self_join(c) c.join(Sequel.as(c.table_name, :b), Array(c.primary_key).zip(Array(c.primary_key))).select_all(c.table_name) end before do @Artist = self_join(Artist) @Album = self_join(Album) @Tag = self_join(Tag) end include filter_by_associations end describe "with default/union :eager_limit_strategy" do before do @els = {} end include eager_limit_strategies end describe "with :eager_limit_strategy=>:ruby" do before do @els = {:eager_limit_strategy=>:ruby} end include eager_limit_strategies include eager_graph_limit_strategies end describe "with :eager_limit_strategy=>:distinct_on" do before do @els = {:eager_limit_strategy=>:distinct_on} end include one_to_one_eager_limit_strategies include one_through_one_eager_limit_strategies include one_through_many_eager_limit_strategies include one_to_one_eager_graph_limit_strategies include one_through_one_eager_graph_limit_strategies include one_through_many_eager_graph_limit_strategies include filter_by_associations_singular_limit_strategies end if DB.dataset.supports_ordered_distinct_on? describe "with :eager_limit_strategy=>:window_function" do def setup super @els = {:eager_limit_strategy=>:window_function} end include eager_limit_strategies include eager_graph_limit_strategies include filter_by_associations_limit_strategies end if DB.dataset.supports_window_functions? it "should work with a many_through_many association" do @album.update(:artist => @artist) @album.add_tag(@tag) @album.reload @artist.reload @tag.reload @album.tags.must_equal [@tag] a = Artist.eager(:tags).all a.must_equal [@artist] a.first.tags.must_equal [@tag] a = Artist.eager_graph(:tags).all a.must_equal [@artist] a.first.tags.must_equal [@tag] a = Album.eager(:artist=>:tags).all a.must_equal [@album] a.first.artist.must_equal @artist a.first.artist.tags.must_equal [@tag] a = Album.eager(:artist=>proc{|ds| ds.eager_graph(:tags)}).all a.must_equal [@album] a.first.artist.must_equal @artist a.first.artist.tags.must_equal [@tag] a = Album.eager_graph(:artist=>:tags).all a.must_equal [@album] a.first.artist.must_equal @artist a.first.artist.tags.must_equal [@tag] a = Album.eager_graph(:artist).eager_graph_eager([:artist], :tags).all a.must_equal [@album] a.first.artist.must_equal @artist a.first.artist.tags.must_equal [@tag] end it "should work with a one_through_many association" do @album.update(:artist => @artist) @album.add_tag(@tag) @album.reload @artist.reload @tag.reload @album.tags.must_equal [@tag] a = Artist.eager(:first_tag).all a.must_equal [@artist] a.first.first_tag.must_equal @tag a = Artist.eager_graph(:first_tag).all a.must_equal [@artist] a.first.first_tag.must_equal @tag a = Album.eager(:artist=>:first_tag).all a.must_equal [@album] a.first.artist.must_equal @artist a.first.artist.first_tag.must_equal @tag a = Album.eager(:artist=>proc{|ds| ds.eager_graph(:first_tag)}).all a.must_equal [@album] a.first.artist.must_equal @artist a.first.artist.first_tag.must_equal @tag a = Album.eager_graph(:artist=>:first_tag).all a.must_equal [@album] a.first.artist.must_equal @artist a.first.artist.first_tag.must_equal @tag a = Album.eager_graph(:artist).eager_graph_eager([:artist], :first_tag).all a.must_equal [@album] a.first.artist.must_equal @artist a.first.artist.first_tag.must_equal @tag end end describe "Sequel::Model Simple Associations" do before(:all) do @db = DB @db.drop_table?(:albums_tags, :tags, :albums, :artists) @db.create_table(:artists) do primary_key :id String :name end @db.create_table(:albums) do primary_key :id String :name foreign_key :artist_id, :artists end @db.create_table(:tags) do primary_key :id String :name end @db.create_table(:albums_tags) do foreign_key :album_id, :albums foreign_key :tag_id, :tags end end define_method(:setup) do [:albums_tags, :tags, :albums, :artists].each{|t| @db[t].delete} (::Artist = Class.new(Sequel::Model(@db[:artists]))).class_eval do plugin :dataset_associations plugin :eager_graph_eager one_to_many :albums, :order=>:name one_to_one :first_album, :clone=>:albums one_to_one :second_album, :clone=>:albums, :limit=>[nil, 1] one_to_one :last_album, :class=>:Album, :order=>Sequel.desc(:name) one_to_many :first_two_albums, :clone=>:albums, :limit=>2 one_to_many :second_two_albums, :clone=>:albums, :limit=>[2, 1] one_to_many :not_first_albums, :clone=>:albums, :limit=>[nil, 1] one_to_many :last_two_albums, :class=>:Album, :order=>Sequel.desc(:name), :limit=>2 one_to_many :a_albums, :clone=>:albums, :conditions=>{:name=>'Al'} one_to_one :first_a_album, :clone=>:a_albums plugin :many_through_many many_through_many :tags, [[:albums, :artist_id, :id], [:albums_tags, :album_id, :tag_id]] many_through_many :first_two_tags, :clone=>:tags, :order=>Sequel[:tags][:name], :limit=>2, :graph_order=>:name many_through_many :second_two_tags, :clone=>:tags, :order=>Sequel[:tags][:name], :limit=>[2, 1], :graph_order=>:name many_through_many :not_first_tags, :clone=>:tags, :order=>Sequel[:tags][:name], :limit=>[nil, 1], :graph_order=>:name many_through_many :last_two_tags, :clone=>:tags, :order=>Sequel.desc(Sequel[:tags][:name]), :limit=>2, :graph_order=>Sequel.desc(:name) many_through_many :t_tags, :clone=>:tags, :conditions=>{Sequel[:tags][:name]=>'T'} one_through_many :first_tag, [[:albums, :artist_id, :id], [:albums_tags, :album_id, :tag_id]], :order=>Sequel[:tags][:name], :graph_order=>:name, :class=>:Tag one_through_many :second_tag, :clone=>:first_tag, :limit=>[nil, 1] one_through_many :last_tag, :clone=>:first_tag, :order=>Sequel.desc(Sequel[:tags][:name]), :graph_order=>Sequel.desc(:name) one_through_many :t_tag, :clone=>:first_tag, :conditions=>{Sequel[:tags][:name]=>'T'} end (::Album = Class.new(Sequel::Model(@db[:albums]))).class_eval do plugin :dataset_associations plugin :eager_graph_eager many_to_one :artist, :reciprocal=>nil many_to_one :a_artist, :clone=>:artist, :conditions=>{:name=>'Ar'}, :key=>:artist_id many_to_many :tags, :right_key=>:tag_id plugin :many_through_many many_through_many :mthm_tags, [[:albums_tags, :album_id, :tag_id]], :class=>:Tag many_to_many :alias_tags, :clone=>:tags, :join_table=>Sequel[:albums_tags].as(:at) many_to_many :first_two_tags, :clone=>:tags, :order=>:name, :limit=>2 many_to_many :second_two_tags, :clone=>:tags, :order=>:name, :limit=>[2, 1] many_to_many :not_first_tags, :clone=>:tags, :order=>:name, :limit=>[nil, 1] many_to_many :last_two_tags, :clone=>:tags, :order=>Sequel.desc(:name), :limit=>2 many_to_many :t_tags, :clone=>:tags, :conditions=>{:name=>'T'} many_to_many :alias_t_tags, :clone=>:t_tags, :join_table=>Sequel[:albums_tags].as(:at) one_through_one :first_tag, :clone=>:tags, :order=>:name one_through_one :second_tag, :clone=>:first_tag, :limit=>[nil, 1] one_through_one :last_tag, :clone=>:tags, :order=>Sequel.desc(:name) one_through_one :t_tag, :clone=>:t_tags one_through_one :alias_t_tag, :clone=>:alias_t_tags end (::Tag = Class.new(Sequel::Model(@db[:tags]))).class_eval do plugin :dataset_associations plugin :eager_graph_eager many_to_many :albums plugin :many_through_many many_through_many :tags, [[:albums_tags, :tag_id, :album_id], [:albums, :id, :artist_id], [:albums, :artist_id, :id], [:albums_tags, :album_id, :tag_id]], :class=>:Tag end @album = Album.create(:name=>'Al') @artist = Artist.create(:name=>'Ar') @tag = Tag.create(:name=>'T') @same_album = lambda{Album.create(:name=>'Al', :artist_id=>@artist.id)} @diff_album = lambda{Album.create(:name=>'lA', :artist_id=>@artist.id)} @middle_album = lambda{Album.create(:name=>'Bl', :artist_id=>@artist.id)} @other_tags = lambda{t = [Tag.create(:name=>'U'), Tag.create(:name=>'V')]; @db[:albums_tags].insert([:album_id, :tag_id], Tag.select(@album.id, :id)); t} @pr = lambda{[Album.create(:name=>'Al2'),Artist.create(:name=>'Ar2'),Tag.create(:name=>'T2')]} @ins = lambda{@db[:albums_tags].insert(:tag_id=>@tag.id)} super() end after do [:Tag, :Album, :Artist].each{|x| Object.send(:remove_const, x)} end after(:all) do @db.drop_table?(:albums_tags, :tags, :albums, :artists) end instance_exec(&basic_regular_and_composite_key_associations) instance_exec(®ular_and_composite_key_associations) describe "with :correlated_subquery limit strategy" do before do @els = {:eager_limit_strategy=>:correlated_subquery} end include one_to_one_eager_graph_limit_strategies include one_to_many_eager_graph_limit_strategies include filter_by_associations_one_to_one_limit_strategies include filter_by_associations_one_to_many_limit_strategies end if DB.dataset.supports_limits_in_correlated_subqueries? it "should handle eager loading limited associations for many objects" do @db[:artists].import([:name], (1..99).map{|i| [i.to_s]}) artists = Artist.eager(:albums).all artists.length.must_equal 100 artists.each{|a| a.albums.must_equal []} artists = Artist.eager(:first_two_albums).all artists.length.must_equal 100 artists.each{|a| a.first_two_albums.must_equal []} @db[:albums].insert([:artist_id], @db[:artists].select(:id)) artists = Artist.eager(:albums).all artists.length.must_equal 100 artists.each{|a| a.albums.length.must_equal 1} artists = Artist.eager(:first_two_albums).all artists.length.must_equal 100 artists.each{|a| a.first_two_albums.length.must_equal 1} end it "should handle the :eager_limit option in eager-loading callbacks" do @db[:artists].import([:name], (1..4).map{|i| ['test']}) artist_ids = @db[:artists].where(:name => 'test').select_map([:id]) @db[:albums].import([:artist_id], artist_ids * 3) ads = Artist.where(:id => artist_ids) artists = ads.eager(:albums => proc{|ds| ds.clone(:eager_limit => 1)}).all artists.length.must_equal 4 artists.each{|a| a.albums.length.must_equal 1} artists = ads.eager(:albums => proc{|ds| ds.clone(:eager_limit => 2)}).all artists.length.must_equal 4 artists.each{|a| a.albums.length.must_equal 2} artists = ads.eager(:albums => proc{|ds| ds.clone(:eager_limit => 3)}).all artists.length.must_equal 4 artists.each{|a| a.albums.length.must_equal 3} artists = ads.eager(:albums => proc{|ds| ds.clone(:eager_limit => 4)}).all artists.length.must_equal 4 artists.each{|a| a.albums.length.must_equal 3} end it "should handle many_to_one associations with same name as :key" do Album.def_column_alias(:artist_id_id, :artist_id) Album.many_to_one :artist_id, :key_column =>:artist_id, :class=>Artist @album.update(:artist_id_id => @artist.id) @album.artist_id.must_equal @artist as = Album.eager(:artist_id).all as.must_equal [@album] as.map{|a| a.artist_id}.must_equal [@artist] as = Album.eager_graph(:artist_id).all as.must_equal [@album] as.map{|a| a.artist_id}.must_equal [@artist] end it "should handle aliased tables when eager_graphing" do @album.update(:artist => @artist) @album.add_tag(@tag) Artist.set_dataset(Sequel[:artists].as(:ar)) Album.set_dataset(Sequel[:albums].as(:a)) Tag.set_dataset(Sequel[:tags].as(:t)) Artist.one_to_many :balbums, :class=>Album, :key=>:artist_id, :reciprocal=>nil Album.many_to_many :btags, :class=>Tag, :join_table=>:albums_tags, :right_key=>:tag_id Album.many_to_one :bartist, :class=>Artist, :key=>:artist_id Tag.many_to_many :balbums, :class=>Album, :join_table=>:albums_tags, :right_key=>:album_id a = Artist.eager_graph(:balbums=>:btags).all a.must_equal [@artist] a.first.balbums.must_equal [@album] a.first.balbums.first.btags.must_equal [@tag] a = Tag.eager_graph(:balbums=>:bartist).all a.must_equal [@tag] a.first.balbums.must_equal [@album] a.first.balbums.first.bartist.must_equal @artist end it "should have add method accept hashes and create new records" do @artist.remove_all_albums Album.dataset.delete @album = @artist.add_album(:name=>'Al2') Album.first[:name].must_equal 'Al2' @artist.albums_dataset.first[:name].must_equal 'Al2' @album.remove_all_tags Tag.dataset.delete @album.add_tag(:name=>'T2') Tag.first[:name].must_equal 'T2' @album.tags_dataset.first[:name].must_equal 'T2' end it "should have add method accept primary key and add related records" do @artist.remove_all_albums @artist.add_album(@album.id) @artist.albums_dataset.first[:id].must_equal @album.id @album.remove_all_tags @album.add_tag(@tag.id) @album.tags_dataset.first[:id].must_equal @tag.id end it "should have remove method accept primary key and remove related album" do @artist.add_album(@album) @artist.reload.remove_album(@album.id) @artist.reload.albums.must_equal [] @album.add_tag(@tag) @album.reload.remove_tag(@tag.id) @tag.reload.albums.must_equal [] end it "should handle dynamic callbacks for regular loading" do @artist.add_album(@album) @artist.albums.must_equal [@album] @artist.albums{|ds| ds.exclude(:id=>@album.id)}.must_equal [] @artist.albums{|ds| ds.filter(:id=>@album.id)}.must_equal [@album] @album.artist.must_equal @artist @album.artist{|ds| ds.exclude(:id=>@artist.id)}.must_be_nil @album.artist{|ds| ds.filter(:id=>@artist.id)}.must_equal @artist @artist.albums{|ds| ds.exclude(:id=>@album.id)}.must_equal [] @artist.albums{|ds| ds.filter(:id=>@album.id)}.must_equal [@album] @album.artist{|ds| ds.exclude(:id=>@artist.id)}.must_be_nil @album.artist{|ds| ds.filter(:id=>@artist.id)}.must_equal @artist end it "should handle dynamic callbacks for eager loading via eager and eager_graph" do @artist.add_album(@album) @album.add_tag(@tag) album2 = @artist.add_album(:name=>'Foo') tag2 = album2.add_tag(:name=>'T2') artist = Artist.eager(:albums=>:tags).all.first artist.albums.must_equal [@album, album2] artist.albums.map{|x| x.tags}.must_equal [[@tag], [tag2]] artist = Artist.eager_graph(:albums=>:tags).all.first artist.albums.must_equal [@album, album2] artist.albums.map{|x| x.tags}.must_equal [[@tag], [tag2]] artist = Artist.eager(:albums=>{proc{|ds| ds.where(:id=>album2.id)}=>:tags}).all.first artist.albums.must_equal [album2] artist.albums.first.tags.must_equal [tag2] artist = Artist.eager_graph(:albums=>{proc{|ds| ds.where(:id=>album2.id)}=>:tags}).all.first artist.albums.must_equal [album2] artist.albums.first.tags.must_equal [tag2] end it "should not produce duplicates when eager graphing many_to_one=>one_to_many association" do @pr.call @album.update(:artist => @artist) album2 = Album.last album2.update(:artist => @artist) a = Album.eager_graph(:artist=>:albums).order{[albums[:id], albums_0[:id]]}.all a.must_equal [@album, album2] a.map(&:artist).must_equal [@artist, @artist] a.map(&:artist).map(&:albums).must_equal [[@album, album2], [@album, album2]] a = Album.eager_graph(:artist=>:albums).eager_graph_eager([:artist], :tags).order{[albums[:id], albums_0[:id]]}.all a.must_equal [@album, album2] a.map(&:artist).must_equal [@artist, @artist] a.map(&:artist).map(&:albums).must_equal [[@album, album2], [@album, album2]] a.map(&:artist).map{|artist| artist.associations[:tags]}.must_equal [[], []] end it "should support using custom join types in nested associations" do @album.update(:artist_id=>@artist.id) ds = Artist.eager_graph(:albums=>Sequel.as(:tags, :t, :join_type=>:inner)).order{[artists[:id], albums[:id], t[:id]]} ds.all.must_equal [] @db[:albums_tags].insert(:album_id=>@album.id, :tag_id=>@tag.id) a = ds.all a.must_equal [@artist] a = a.first a.associations[:albums].must_equal [@album] a = a.albums.first a.associations[:tags].must_equal [@tag] end it "should have remove method raise an error for one_to_many records if the object isn't already associated" do proc{@artist.remove_album(@album.id)}.must_raise(Sequel::Error) proc{@artist.remove_album(@album)}.must_raise(Sequel::Error) end it "should handle dataset associations with :dataset_associations_join options" do Album.many_to_many :tags, :right_key=>:tag_id, :select=>[Sequel.expr(:tags).*, Sequel[:albums_tags][:tag_id].as(:atid)], :dataset_associations_join=>true Artist.many_through_many :tags, [[:albums, :artist_id, :id], [:albums_tags, :album_id, :tag_id]], :select=>[Sequel.expr(:tags).*, Sequel[:albums_tags][:tag_id].as(:atid), Sequel[:albums][:artist_id].as(:aid)], :dataset_associations_join=>true Album.tags.order(Sequel[:tags][:name]).first.must_be_nil Artist.tags.order(Sequel[:tags][:name]).first.must_be_nil @album.add_tag(@tag) @artist.add_album(@album) Album.tags.order(Sequel[:tags][:name]).first.must_equal Tag.load(:id=>@tag.id, :name=>"T", :atid=>@tag.id) Artist.tags.order(Sequel[:tags][:name]).first.must_equal Tag.load(:id=>@tag.id, :name=>"T", :atid=>@tag.id, :aid=>@artist.id) end end describe "Sequel::Model Composite Key Associations" do before(:all) do @db = DB @db.drop_table?(:albums_tags, :tags, :albums, :artists) @db.create_table(:artists) do Integer :id1 Integer :id2 String :name primary_key [:id1, :id2] end @db.create_table(:albums) do Integer :id1 Integer :id2 String :name Integer :artist_id1 Integer :artist_id2 foreign_key [:artist_id1, :artist_id2], :artists primary_key [:id1, :id2] end @db.create_table(:tags) do Integer :id1 Integer :id2 String :name primary_key [:id1, :id2] end @db.create_table(:albums_tags) do Integer :album_id1 Integer :album_id2 Integer :tag_id1 Integer :tag_id2 foreign_key [:album_id1, :album_id2], :albums foreign_key [:tag_id1, :tag_id2], :tags end end define_method(:setup) do [:albums_tags, :tags, :albums, :artists].each{|t| @db[t].delete} (::Artist = Class.new(Sequel::Model(@db[:artists]))).class_eval do plugin :dataset_associations plugin :eager_graph_eager set_primary_key [:id1, :id2] unrestrict_primary_key one_to_many :albums, :key=>[:artist_id1, :artist_id2], :order=>:name one_to_one :first_album, :clone=>:albums one_to_one :last_album, :clone=>:albums, :order=>Sequel.desc(:name) one_to_one :second_album, :clone=>:albums, :limit=>[nil, 1] one_to_many :first_two_albums, :clone=>:albums, :order=>:name, :limit=>2 one_to_many :second_two_albums, :clone=>:albums, :order=>:name, :limit=>[2, 1] one_to_many :not_first_albums, :clone=>:albums, :order=>:name, :limit=>[nil, 1] one_to_many :last_two_albums, :clone=>:albums, :order=>Sequel.desc(:name), :limit=>2 one_to_many :a_albums, :clone=>:albums do |ds| ds.where(:name=>'Al') end one_to_one :first_a_album, :clone=>:a_albums plugin :many_through_many many_through_many :tags, [[:albums, [:artist_id1, :artist_id2], [:id1, :id2]], [:albums_tags, [:album_id1, :album_id2], [:tag_id1, :tag_id2]]] many_through_many :first_two_tags, :clone=>:tags, :order=>Sequel[:tags][:name], :limit=>2, :graph_order=>:name many_through_many :second_two_tags, :clone=>:tags, :order=>Sequel[:tags][:name], :limit=>[2, 1], :graph_order=>:name many_through_many :not_first_tags, :clone=>:tags, :order=>Sequel[:tags][:name], :limit=>[nil, 1], :graph_order=>:name many_through_many :last_two_tags, :clone=>:tags, :order=>Sequel.desc(Sequel[:tags][:name]), :limit=>2, :graph_order=>Sequel.desc(:name) many_through_many :t_tags, :clone=>:tags do |ds| ds.where(Sequel[:tags][:name]=>'T') end one_through_many :first_tag, [[:albums, [:artist_id1, :artist_id2], [:id1, :id2]], [:albums_tags, [:album_id1, :album_id2], [:tag_id1, :tag_id2]]], :order=>Sequel[:tags][:name], :graph_order=>:name, :class=>:Tag one_through_many :second_tag, :clone=>:first_tag, :limit=>[nil, 1] one_through_many :last_tag, :clone=>:first_tag, :order=>Sequel.desc(Sequel[:tags][:name]), :graph_order=>Sequel.desc(:name) one_through_many :t_tag, :clone=>:first_tag do |ds| ds.where(Sequel[:tags][:name]=>'T') end end (::Album = Class.new(Sequel::Model(@db[:albums]))).class_eval do plugin :dataset_associations plugin :eager_graph_eager set_primary_key [:id1, :id2] unrestrict_primary_key many_to_one :artist, :key=>[:artist_id1, :artist_id2], :reciprocal=>nil many_to_one(:a_artist, :clone=>:artist){|ds| ds.where(:name=>'Ar')} many_to_many :tags, :left_key=>[:album_id1, :album_id2], :right_key=>[:tag_id1, :tag_id2] plugin :many_through_many many_through_many :mthm_tags, [[:albums_tags, [:album_id1, :album_id2], [:tag_id1, :tag_id2]]], :class=>:Tag many_to_many :alias_tags, :clone=>:tags, :join_table=>Sequel[:albums_tags].as(:at) many_to_many :first_two_tags, :clone=>:tags, :order=>:name, :limit=>2 many_to_many :second_two_tags, :clone=>:tags, :order=>:name, :limit=>[2, 1] many_to_many :not_first_tags, :clone=>:tags, :order=>:name, :limit=>[nil, 1] many_to_many :last_two_tags, :clone=>:tags, :order=>Sequel.desc(:name), :limit=>2 many_to_many :t_tags, :clone=>:tags do |ds| ds.where(:name=>'T') end many_to_many :alias_t_tags, :clone=>:t_tags, :join_table=>Sequel[:albums_tags].as(:at) one_through_one :first_tag, :clone=>:tags, :order=>:name one_through_one :second_tag, :clone=>:first_tag, :limit=>[nil, 1] one_through_one :last_tag, :clone=>:tags, :order=>Sequel.desc(:name) one_through_one :t_tag, :clone=>:t_tags one_through_one :alias_t_tag, :clone=>:alias_t_tags end (::Tag = Class.new(Sequel::Model(@db[:tags]))).class_eval do plugin :dataset_associations plugin :eager_graph_eager set_primary_key [:id1, :id2] unrestrict_primary_key many_to_many :albums, :right_key=>[:album_id1, :album_id2], :left_key=>[:tag_id1, :tag_id2] plugin :many_through_many many_through_many :tags, [[:albums_tags, [:tag_id1, :tag_id2], [:album_id1, :album_id2]], [:albums, [:id1, :id2], [:artist_id1, :artist_id2]], [:albums, [:artist_id1, :artist_id2], [:id1, :id2]], [:albums_tags, [:album_id1, :album_id2], [:tag_id1, :tag_id2]]], :class=>:Tag end @album = Album.create(:name=>'Al', :id1=>1, :id2=>2) @artist = Artist.create(:name=>'Ar', :id1=>3, :id2=>4) @tag = Tag.create(:name=>'T', :id1=>5, :id2=>6) @same_album = lambda{Album.create(:name=>'Al', :id1=>7, :id2=>8, :artist_id1=>3, :artist_id2=>4)} @diff_album = lambda{Album.create(:name=>'lA', :id1=>9, :id2=>10, :artist_id1=>3, :artist_id2=>4)} @middle_album = lambda{Album.create(:name=>'Bl', :id1=>13, :id2=>14, :artist_id1=>3, :artist_id2=>4)} @other_tags = lambda{t = [Tag.create(:name=>'U', :id1=>17, :id2=>18), Tag.create(:name=>'V', :id1=>19, :id2=>20)]; @db[:albums_tags].insert([:album_id1, :album_id2, :tag_id1, :tag_id2], Tag.select(1, 2, :id1, :id2)); t} @pr = lambda{[Album.create(:name=>'Al2', :id1=>11, :id2=>12),Artist.create(:name=>'Ar2', :id1=>13, :id2=>14),Tag.create(:name=>'T2', :id1=>15, :id2=>16)]} @ins = lambda{@db[:albums_tags].insert(:tag_id1=>@tag.id1, :tag_id2=>@tag.id2)} super() end after do [:Tag, :Album, :Artist].each{|x| Object.send(:remove_const, x)} end after(:all) do @db.drop_table?(:albums_tags, :tags, :albums, :artists) end instance_exec(&basic_regular_and_composite_key_associations) instance_exec(®ular_and_composite_key_associations) describe "with :correlated_subquery limit strategy" do before do @els = {:eager_limit_strategy=>:correlated_subquery} end include one_to_one_eager_graph_limit_strategies include one_to_many_eager_graph_limit_strategies include filter_by_associations_one_to_one_limit_strategies include filter_by_associations_one_to_many_limit_strategies end if DB.dataset.supports_limits_in_correlated_subqueries? && DB.dataset.supports_multiple_column_in? it "should have add method accept hashes and create new records" do @artist.remove_all_albums Album.dataset.delete @artist.add_album(:id1=>1, :id2=>2, :name=>'Al2') Album.first[:name].must_equal 'Al2' @artist.albums_dataset.first[:name].must_equal 'Al2' @album.remove_all_tags Tag.dataset.delete @album.add_tag(:id1=>1, :id2=>2, :name=>'T2') Tag.first[:name].must_equal 'T2' @album.tags_dataset.first[:name].must_equal 'T2' end it "should have add method accept primary key and add related records" do @artist.remove_all_albums @artist.add_album([@album.id1, @album.id2]) @artist.albums_dataset.first.pk.must_equal [@album.id1, @album.id2] @album.remove_all_tags @album.add_tag([@tag.id1, @tag.id2]) @album.tags_dataset.first.pk.must_equal [@tag.id1, @tag.id2] end it "should have remove method accept primary key and remove related album" do @artist.add_album(@album) @artist.reload.remove_album([@album.id1, @album.id2]) @artist.reload.albums.must_equal [] @album.add_tag(@tag) @album.reload.remove_tag([@tag.id1, @tag.id2]) @tag.reload.albums.must_equal [] end it "should have remove method raise an error for one_to_many records if the object isn't already associated" do proc{@artist.remove_album([@album.id1, @album.id2])}.must_raise(Sequel::Error) proc{@artist.remove_album(@album)}.must_raise(Sequel::Error) end it "should not have association method or dataset method return rows with NULL keys" do Album.one_to_many :other_albums, :class=>Album, :key=>[:artist_id1, :artist_id2], :primary_key=>[:artist_id1, :artist_id2] @album.update(:artist_id1=>1) @album.other_albums.must_equal [] @album.other_albums_dataset.all.must_equal [] end end describe "Sequel::Model pg_array_to_many" do before(:all) do db = @db = DB @db.extension :pg_array unless @db.frozen? Sequel.extension :pg_array_ops @db.drop_table?(:tags, :albums, :artists) @db.create_table(:artists) do primary_key :id String :name end @db.create_table(:albums) do primary_key :id String :name foreign_key :artist_id, :artists column :tag_ids, "#{db.send(:type_literal, :type=>Integer)}[]" end @db.create_table(:tags) do primary_key :id String :name end end before do [:tags, :albums, :artists].each{|t| @db[t].delete} (::Artist = Class.new(Sequel::Model(@db[:artists]))).class_eval do plugin :dataset_associations plugin :eager_graph_eager one_to_many :albums, :order=>:name one_to_one :first_album, :clone=>:albums one_to_many :a_albums, :clone=>:albums do |ds| ds.where(:name=>'Al') end one_to_one :first_a_album, :clone=>:a_albums end (::Album = Class.new(Sequel::Model(@db[:albums]))).class_eval do plugin :dataset_associations plugin :pg_array_associations plugin :eager_graph_eager many_to_one :artist, :reciprocal=>nil many_to_one :a_artist, :clone=>:artist, :key=>:artist_id do |ds| ds.where(:name=>'Ar') end pg_array_to_many :tags, :key=>:tag_ids, :save_after_modify=>true pg_array_to_many :alias_tags, :clone=>:tags pg_array_to_many :first_two_tags, :clone=>:tags, :order=>:name, :limit=>2 pg_array_to_many :second_two_tags, :clone=>:tags, :order=>:name, :limit=>[2, 1] pg_array_to_many :not_first_tags, :clone=>:tags, :order=>:name, :limit=>[nil, 1] pg_array_to_many :last_two_tags, :clone=>:tags, :order=>Sequel.desc(:name), :limit=>2 pg_array_to_many :t_tags, :clone=>:tags do |ds| ds.where(Sequel[:tags][:name]=>'T') end pg_array_to_many :alias_t_tags, :clone=>:t_tags end (::Tag = Class.new(Sequel::Model(@db[:tags]))).class_eval do plugin :dataset_associations plugin :pg_array_associations plugin :eager_graph_eager many_to_pg_array :albums end @album = Album.create(:name=>'Al') @artist = Artist.create(:name=>'Ar') @tag = Tag.create(:name=>'T') @many_to_many_method = :pg_array_to_many @no_many_through_many = true @same_album = lambda{Album.create(:name=>'Al', :artist_id=>@artist.id)} @diff_album = lambda{Album.create(:name=>'lA', :artist_id=>@artist.id)} @middle_album = lambda{Album.create(:name=>'Bl', :artist_id=>@artist.id)} @other_tags = lambda{t = [Tag.create(:name=>'U'), Tag.create(:name=>'V')]; Tag.all{|x| @album.add_tag(x)}; t} @pr = lambda{[Album.create(:name=>'Al2'),Artist.create(:name=>'Ar2'),Tag.create(:name=>'T2')]} @ins = lambda{} end after do [:Tag, :Album, :Artist].each{|x| Object.send(:remove_const, x)} end after(:all) do @db.drop_table?(:tags, :albums, :artists) end instance_exec(&basic_regular_and_composite_key_associations) include many_to_many_eager_limit_strategies include many_to_many_eager_graph_limit_strategies it "should handle adding and removing entries in array" do a = Album.create a.typecast_on_assignment = false a.add_tag(@tag) a.remove_tag(@tag) a.save end end if DB.database_type == :postgres && [:postgres, :jdbc].include?(DB.adapter_scheme) && DB.server_version >= 90300 describe "Sequel::Model many_to_pg_array" do before(:all) do db = @db = DB @db.extension :pg_array unless @db.frozen? Sequel.extension :pg_array_ops @db.drop_table?(:tags, :albums, :artists) @db.create_table(:artists) do primary_key :id String :name end @db.create_table(:albums) do primary_key :id String :name foreign_key :artist_id, :artists end @db.create_table(:tags) do primary_key :id String :name column :album_ids, "#{db.send(:type_literal, :type=>Integer)}[]" end end before do [:tags, :albums, :artists].each{|t| @db[t].delete} (::Artist = Class.new(Sequel::Model(@db[:artists]))).class_eval do plugin :dataset_associations plugin :eager_graph_eager one_to_many :albums, :order=>:name one_to_one :first_album, :class=>:Album, :order=>:name one_to_many :a_albums, :clone=>:albums do |ds| ds.where(:name=>'Al') end one_to_one :first_a_album, :clone=>:a_albums end (::Album = Class.new(Sequel::Model(@db[:albums]))).class_eval do plugin :dataset_associations plugin :pg_array_associations plugin :eager_graph_eager many_to_one :artist, :reciprocal=>nil many_to_one :a_artist, :clone=>:artist, :key=>:artist_id do |ds| ds.where(:name=>'Ar') end many_to_pg_array :tags many_to_pg_array :alias_tags, :clone=>:tags many_to_pg_array :first_two_tags, :clone=>:tags, :order=>:name, :limit=>2 many_to_pg_array :second_two_tags, :clone=>:tags, :order=>:name, :limit=>[2, 1] many_to_pg_array :not_first_tags, :clone=>:tags, :order=>:name, :limit=>[nil, 1] many_to_pg_array :last_two_tags, :clone=>:tags, :order=>Sequel.desc(:name), :limit=>2 many_to_pg_array :t_tags, :clone=>:tags do |ds| ds.where(Sequel[:tags][:name]=>'T') end many_to_pg_array :alias_t_tags, :clone=>:t_tags end (::Tag = Class.new(Sequel::Model(@db[:tags]))).class_eval do plugin :dataset_associations plugin :pg_array_associations plugin :eager_graph_eager pg_array_to_many :albums, :save_after_modify=>true end @album = Album.create(:name=>'Al') @artist = Artist.create(:name=>'Ar') @tag = Tag.create(:name=>'T') @many_to_many_method = :pg_array_to_many @no_many_through_many = true @same_album = lambda{Album.create(:name=>'Al', :artist_id=>@artist.id)} @diff_album = lambda{Album.create(:name=>'lA', :artist_id=>@artist.id)} @middle_album = lambda{Album.create(:name=>'Bl', :artist_id=>@artist.id)} @other_tags = lambda{t = [Tag.create(:name=>'U'), Tag.create(:name=>'V')]; Tag.all{|x| @album.add_tag(x)}; @tag.refresh; t.each{|x| x.refresh}; t} @pr = lambda{[Album.create(:name=>'Al2'),Artist.create(:name=>'Ar2'),Tag.create(:name=>'T2')]} @ins = lambda{} end after do [:Tag, :Album, :Artist].each{|x| Object.send(:remove_const, x)} end after(:all) do @db.drop_table?(:tags, :albums, :artists) end instance_exec(&basic_regular_and_composite_key_associations) include many_to_many_eager_limit_strategies include many_to_many_eager_graph_limit_strategies it "should handle adding and removing entries in array" do a = Album.create @tag.typecast_on_assignment = false a.add_tag(@tag) a.remove_tag(@tag) end end if DB.database_type == :postgres && [:postgres, :jdbc].include?(DB.adapter_scheme) && DB.server_version >= 90300 describe "Sequel::Model Associations with clashing column names" do before(:all) do @db = DB @db.drop_table?(:bars_foos, :bars, :foos) @db.create_table(:foos) do primary_key :id Integer :object_id end @db.create_table(:bars) do primary_key :id Integer :object_id end @db.create_table(:bars_foos) do Integer :foo_id Integer :object_id primary_key [:foo_id, :object_id] end end before do [:bars_foos, :bars, :foos].each{|t| @db[t].delete} @Foo = Class.new(Sequel::Model(:foos)) @Bar = Class.new(Sequel::Model(:bars)) @Foo.def_column_alias(:obj_id, :object_id) @Bar.def_column_alias(:obj_id, :object_id) @Foo.one_to_many :bars, :primary_key=>:obj_id, :primary_key_column=>:object_id, :key=>:object_id, :key_method=>:obj_id, :class=>@Bar @Foo.one_to_one :bar, :primary_key=>:obj_id, :primary_key_column=>:object_id, :key=>:object_id, :key_method=>:obj_id, :class=>@Bar @Bar.many_to_one :foo, :key=>:obj_id, :key_column=>:object_id, :primary_key=>:object_id, :primary_key_method=>:obj_id, :class=>@Foo @Foo.many_to_many :mtmbars, :join_table=>:bars_foos, :left_primary_key=>:obj_id, :left_primary_key_column=>:object_id, :right_primary_key=>:object_id, :right_primary_key_method=>:obj_id, :left_key=>:foo_id, :right_key=>:object_id, :class=>@Bar @Foo.one_through_one :mtmbar, :join_table=>:bars_foos, :left_primary_key=>:obj_id, :left_primary_key_column=>:object_id, :right_primary_key=>:object_id, :right_primary_key_method=>:obj_id, :left_key=>:foo_id, :right_key=>:object_id, :class=>@Bar @Bar.many_to_many :mtmfoos, :join_table=>:bars_foos, :left_primary_key=>:obj_id, :left_primary_key_column=>:object_id, :right_primary_key=>:object_id, :right_primary_key_method=>:obj_id, :left_key=>:object_id, :right_key=>:foo_id, :class=>@Foo @foo = @Foo.create(:obj_id=>2) @bar = @Bar.create(:obj_id=>2) @Foo.db[:bars_foos].insert(2, 2) end after(:all) do @db.drop_table?(:bars_foos, :bars, :foos) end it "should have working regular association methods" do @Bar.first.foo.must_equal @foo @Foo.first.bars.must_equal [@bar] @Foo.first.bar.must_equal @bar @Foo.first.mtmbars.must_equal [@bar] @Foo.first.mtmbar.must_equal @bar @Bar.first.mtmfoos.must_equal [@foo] end it "should have working eager loading methods" do @Bar.eager(:foo).all.map{|o| [o, o.foo]}.must_equal [[@bar, @foo]] @Foo.eager(:bars).all.map{|o| [o, o.bars]}.must_equal [[@foo, [@bar]]] @Foo.eager(:bar).all.map{|o| [o, o.bar]}.must_equal [[@foo, @bar]] @Foo.eager(:mtmbars).all.map{|o| [o, o.mtmbars]}.must_equal [[@foo, [@bar]]] @Foo.eager(:mtmbar).all.map{|o| [o, o.mtmbar]}.must_equal [[@foo, @bar]] @Bar.eager(:mtmfoos).all.map{|o| [o, o.mtmfoos]}.must_equal [[@bar, [@foo]]] end it "should have working eager graphing methods" do @Bar.eager_graph(:foo).all.map{|o| [o, o.foo]}.must_equal [[@bar, @foo]] @Foo.eager_graph(:bars).all.map{|o| [o, o.bars]}.must_equal [[@foo, [@bar]]] @Foo.eager_graph(:bar).all.map{|o| [o, o.bar]}.must_equal [[@foo, @bar]] @Foo.eager_graph(:mtmbars).all.map{|o| [o, o.mtmbars]}.must_equal [[@foo, [@bar]]] @Foo.eager_graph(:mtmbar).all.map{|o| [o, o.mtmbar]}.must_equal [[@foo, @bar]] @Bar.eager_graph(:mtmfoos).all.map{|o| [o, o.mtmfoos]}.must_equal [[@bar, [@foo]]] end it "should have working modification methods" do b = @Bar.create(:obj_id=>3) f = @Foo.create(:obj_id=>3) @bar.foo = f @bar.obj_id.must_equal 3 @foo.bar = @bar @bar.obj_id.must_equal 2 @foo.add_bar(b) @foo.bars.sort_by{|x| x.id}.must_equal [@bar, b] @foo.remove_bar(b) @foo.bars.must_equal [@bar] @foo.remove_all_bars @foo.bars.must_equal [] @bar.refresh.update(:obj_id=>2) b.refresh.update(:obj_id=>3) @foo.mtmbars.must_equal [@bar] @foo.remove_all_mtmbars @foo.mtmbars.must_equal [] @foo.add_mtmbar(b) @foo.mtmbars.must_equal [b] @foo.remove_mtmbar(b) @foo.mtmbars.must_equal [] @bar.add_mtmfoo(f) @bar.mtmfoos.must_equal [f] @bar.remove_all_mtmfoos @bar.mtmfoos.must_equal [] @bar.add_mtmfoo(f) @bar.mtmfoos.must_equal [f] @bar.remove_mtmfoo(f) @bar.mtmfoos.must_equal [] end end describe "Sequel::Model query per join table support" do before(:all) do @dbs = 4.times.map do |i| url = if DB.adapter_scheme == :sqlite 'sqlite:/' else ENV["SEQUEL_QUERY_PER_ASSOCIATION_DB_#{i}_URL"] end Sequel.connect(url, :keep_reference=>false) end end after(:all) do @dbs.each(&:disconnect) end after do db1, db2, db3, db4 = @dbs db1.drop_table?(:bs) db2.drop_table?(:cs) db3.drop_table?(:bs_cs) db4.drop_table?(:bs_cs2) end it "should support a query per join table approach with scalar keys" do db1, db2, db3, db4 = @dbs db1.create_table!(:bs) do primary_key :id end db2.create_table!(:cs) do primary_key :id end db3.create_table!(:bs_cs) do Integer :b_id Integer :c_id end db4.create_table!(:bs_cs2) do Integer :b2_id Integer :c2_id end _C = Sequel::Model(db2[:cs]) _B = Sequel::Model(db1[:bs]) _B.class_eval do plugin :many_through_many many_to_many :cs, :class=>_C, :join_table_db=>db3, :order=>:id, :join_table=>:bs_cs, :left_key=>:b_id one_through_one :c, :clone=>:cs many_through_many :mtm_cs, [{:table=>:bs_cs, :left=>:b_id, :right=>:c_id, :db=>db3}, {:table=>:bs_cs2, :left=>:b2_id, :right=>:c2_id, :db=>db4}], :class=>_C, :order=>:id one_through_many :mtm_c, :clone=>:mtm_cs end b1 = _B.create b2 = _B.create b3 = _B.create c1 = _C.create c2 = _C.create c3 = _C.create c4 = _C.create b1.add_c(c1) b1.add_c(c2) b2.add_c(c2) b2.add_c(c3) b1.cs.must_equal [c1, c2] b2.cs.must_equal [c2, c3] b3.cs.must_equal [] b1.c.must_equal c1 b2.c.must_equal c2 b3.c.must_be_nil b1, b2, b3 = _B.eager(:cs, :c).order(:id).all b1.associations[:cs].must_equal [c1, c2] b2.associations[:cs].must_equal [c2, c3] b3.associations[:cs].must_equal [] b1.associations[:c].must_equal c1 b2.associations[:c].must_equal c2 b3.associations[:c].must_be_nil db4[:bs_cs2].insert(:b2_id=>1, :c2_id=>3) db4[:bs_cs2].insert(:b2_id=>1, :c2_id=>4) db4[:bs_cs2].insert(:b2_id=>2, :c2_id=>2) db4[:bs_cs2].insert(:b2_id=>2, :c2_id=>3) db4[:bs_cs2].insert(:b2_id=>3, :c2_id=>1) db3[:bs_cs].insert(:b_id=>3, :c_id=>5) b1.mtm_cs.must_equal [c2, c3, c4] b2.mtm_cs.must_equal [c1, c2, c3] b3.mtm_cs.must_equal [] b1.mtm_c.must_equal c2 b2.mtm_c.must_equal c1 b3.mtm_c.must_be_nil b1, b2, b3 = _B.eager(:mtm_cs, :mtm_c).order(:id).all b1.associations[:mtm_cs].must_equal [c2, c3, c4] b2.associations[:mtm_cs].must_equal [c1, c2, c3] b3.associations[:mtm_cs].must_equal [] b1.associations[:mtm_c].must_equal c2 b2.associations[:mtm_c].must_equal c1 b3.associations[:mtm_c].must_be_nil end it "should support a query per join table approach with composite keys" do db1, db2, db3, db4 = @dbs db1.create_table!(:bs) do Integer :bs_pk1 Integer :bs_pk2 primary_key [:bs_pk1, :bs_pk2] end db2.create_table!(:cs) do Integer :cs_pk1 Integer :cs_pk2 primary_key [:cs_pk1, :cs_pk2] end db3.create_table!(:bs_cs) do Integer :b_id1 Integer :b_id2 Integer :c_id1 Integer :c_id2 end db4.create_table!(:bs_cs2) do Integer :b2_id1 Integer :b2_id2 Integer :c2_id1 Integer :c2_id2 end _C = Sequel::Model(db2[:cs]) _B = Sequel::Model(db1[:bs]) _C.unrestrict_primary_key _B.class_eval do unrestrict_primary_key plugin :many_through_many many_to_many :cs, :class=>_C, :join_table_db=>db3, :order=>:cs_pk1, :join_table=>:bs_cs, :left_key=>[:b_id1, :b_id2], :right_key=>[:c_id1, :c_id2] one_through_one :c, :clone=>:cs many_through_many :mtm_cs, [{:table=>:bs_cs, :left=>[:b_id1, :b_id2], :right=>[:c_id1, :c_id2], :db=>db3}, {:table=>:bs_cs2, :left=>[:b2_id1, :b2_id2], :right=>[:c2_id1, :c2_id2], :db=>db4}], :class=>_C, :order=>:cs_pk1 one_through_many :mtm_c, :clone=>:mtm_cs end b1 = _B.create(:bs_pk1=>1, :bs_pk2=>2) b2 = _B.create(:bs_pk1=>3, :bs_pk2=>4) b3 = _B.create(:bs_pk1=>5, :bs_pk2=>6) c1 = _C.create(:cs_pk1=>10, :cs_pk2=>11) c2 = _C.create(:cs_pk1=>12, :cs_pk2=>13) c3 = _C.create(:cs_pk1=>14, :cs_pk2=>15) c4 = _C.create(:cs_pk1=>16, :cs_pk2=>17) b1.add_c(c1) b1.add_c(c2) b2.add_c(c2) b2.add_c(c3) b1.cs.must_equal [c1, c2] b2.cs.must_equal [c2, c3] b3.cs.must_equal [] b1.c.must_equal c1 b2.c.must_equal c2 b3.c.must_be_nil b1, b2, b3 = _B.eager(:cs, :c).order(:bs_pk1).all b1.associations[:cs].must_equal [c1, c2] b2.associations[:cs].must_equal [c2, c3] b3.associations[:cs].must_equal [] b1.associations[:c].must_equal c1 b2.associations[:c].must_equal c2 b3.associations[:c].must_be_nil db4[:bs_cs2].insert(:b2_id1=>10, :b2_id2=>11, :c2_id1=>14, :c2_id2=>15) db4[:bs_cs2].insert(:b2_id1=>10, :b2_id2=>11, :c2_id1=>16, :c2_id2=>17) db4[:bs_cs2].insert(:b2_id1=>12, :b2_id2=>13, :c2_id1=>12, :c2_id2=>13) db4[:bs_cs2].insert(:b2_id1=>12, :b2_id2=>13, :c2_id1=>14, :c2_id2=>15) db4[:bs_cs2].insert(:b2_id1=>14, :b2_id2=>15, :c2_id1=>10, :c2_id2=>11) db3[:bs_cs].insert(:b_id1=>1, :b_id2=>3, :c_id1=>10, :c_id2=>11) db3[:bs_cs].insert(:b_id1=>1, :b_id2=>2, :c_id1=>10, :c_id2=>5) db3[:bs_cs].insert(:b_id1=>3, :b_id2=>6, :c_id1=>10, :c_id2=>11) b1.mtm_cs.must_equal [c2, c3, c4] b2.mtm_cs.must_equal [c1, c2, c3] b3.mtm_cs.must_equal [] b1.mtm_c.must_equal c2 b2.mtm_c.must_equal c1 b3.mtm_c.must_be_nil b1, b2, b3 = _B.eager(:mtm_cs, :mtm_c).order(:bs_pk1).all b1.associations[:mtm_cs].must_equal [c2, c3, c4] b2.associations[:mtm_cs].must_equal [c1, c2, c3] b3.associations[:mtm_cs].must_equal [] b1.associations[:mtm_c].must_equal c2 b2.associations[:mtm_c].must_equal c1 b3.associations[:mtm_c].must_be_nil end end if DB.adapter_scheme == :sqlite || 4.times.all?{|i| ENV["SEQUEL_QUERY_PER_ASSOCIATION_DB_#{i}_URL"]} �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/integration/database_test.rb�����������������������������������������������������0000664�0000000�0000000�00000010676�14342141206�0021517�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Database do before do @db = DB end it "should provide disconnect functionality" do @db.disconnect @db.pool.size.must_equal 0 @db.test_connection @db.pool.size.must_equal 1 end it "should provide disconnect functionality after preparing a statement" do @db.create_table!(:items){Integer :i} @db[:items].prepare(:first, :a).call @db.disconnect @db.pool.size.must_equal 0 @db.drop_table?(:items) end it "should raise Sequel::DatabaseError on invalid SQL" do proc{@db << "S"}.must_raise(Sequel::DatabaseError) end it "should have Sequel::DatabaseError#sql give the SQL causing the error" do (@db << "SELECT") rescue (e = $!) e.sql.must_equal "SELECT" end if ENV['SEQUEL_ERROR_SQL'] describe "constraint violations" do before do @db.drop_table?(:test2, :test) end after do @db.drop_table?(:test2, :test) end cspecify "should raise Sequel::UniqueConstraintViolation when a unique constraint is violated", [:jdbc, :sqlite] do @db.create_table!(:test){String :a, :unique=>true, :null=>false} @db[:test].insert('1') proc{@db[:test].insert('1')}.must_raise(Sequel::UniqueConstraintViolation) @db[:test].insert('2') proc{@db[:test].update(:a=>'1')}.must_raise(Sequel::UniqueConstraintViolation) end cspecify "should raise Sequel::UniqueConstraintViolation when a unique constraint is violated for composite primary keys", [:jdbc, :sqlite] do @db.create_table!(:test){String :a; String :b; primary_key [:a, :b]} @db[:test].insert(:a=>'1', :b=>'2') proc{@db[:test].insert(:a=>'1', :b=>'2')}.must_raise(Sequel::UniqueConstraintViolation) @db[:test].insert(:a=>'3', :b=>'4') proc{@db[:test].update(:a=>'1', :b=>'2')}.must_raise(Sequel::UniqueConstraintViolation) end cspecify "should raise Sequel::CheckConstraintViolation when a check constraint is violated", [proc{|db| !db.mariadb? || db.server_version <= 100200}, :mysql], [proc{|db| db.sqlite_version < 30802}, :sqlite] do @db.create_table!(:test){String :a; check Sequel.~(:a=>'1')} proc{@db[:test].insert('1')}.must_raise(Sequel::CheckConstraintViolation) @db[:test].insert('2') proc{@db[:test].insert('1')}.must_raise(Sequel::CheckConstraintViolation) end cspecify "should raise Sequel::ForeignKeyConstraintViolation when a foreign key constraint is violated", [:jdbc, :sqlite] do @db.create_table!(:test, :engine=>:InnoDB){primary_key :id} @db.create_table!(:test2, :engine=>:InnoDB){foreign_key :tid, :test} proc{@db[:test2].insert(:tid=>1)}.must_raise(Sequel::ForeignKeyConstraintViolation) @db[:test].insert @db[:test2].insert(:tid=>1) proc{@db[:test2].where(:tid=>1).update(:tid=>3)}.must_raise(Sequel::ForeignKeyConstraintViolation) proc{@db[:test].where(:id=>1).delete}.must_raise(Sequel::ForeignKeyConstraintViolation) end cspecify "should raise Sequel::NotNullConstraintViolation when a not null constraint is violated", [:jdbc, :sqlite] do @db.create_table!(:test){Integer :a, :null=>false} proc{@db[:test].insert(:a=>nil)}.must_raise(Sequel::NotNullConstraintViolation) unless @db.database_type == :mysql # Broken mysql silently changes NULL here to 0, and doesn't raise an exception. @db[:test].insert(2) proc{@db[:test].update(:a=>nil)}.must_raise(Sequel::NotNullConstraintViolation) end end end it "should store underlying wrapped exception in Sequel::DatabaseError" do begin @db << "SELECT" rescue Sequel::DatabaseError=>e if defined?(Java::JavaLang::Exception) (e.wrapped_exception.is_a?(Exception) || e.wrapped_exception.is_a?(Java::JavaLang::Exception)).must_equal true else e.wrapped_exception.must_be_kind_of(Exception) end end end it "should not have the connection pool swallow non-StandardError based exceptions" do proc{@db.pool.hold{raise Interrupt, "test"}}.must_raise(Interrupt) end it "should be able to disconnect connections more than once without exceptions" do conn = @db.synchronize{|c| c} @db.disconnect @db.disconnect_connection(conn) @db.disconnect_connection(conn) end it "should provide ability to check connections for validity" do conn = @db.synchronize{|c| c} @db.valid_connection?(conn).must_equal true @db.disconnect @db.valid_connection?(conn).must_equal false end end ������������������������������������������������������������������sequel-5.63.0/spec/integration/dataset_test.rb������������������������������������������������������0000664�0000000�0000000�00000300246�14342141206�0021373�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Simple Dataset operations" do before do @db = DB @db.create_table!(:items) do primary_key :id Integer :number end @ds = @db[:items] @ds.insert(:number=>10) @ds = @ds.async if async? end after do @db.drop_table?(:items) end it "should support sequential primary keys" do wait{@ds.insert(:number=>20)} wait{@ds.insert(:number=>30)} @ds.order(:number).all.must_equal [ {:id => 1, :number=>10}, {:id => 2, :number=>20}, {:id => 3, :number=>30} ] end it "should support sequential primary keys with a Bignum" do @db.create_table!(:items) do primary_key :id, :type=>:Bignum Integer :number end wait{@ds.insert(:number=>20)} wait{@ds.insert(:number=>30)} @ds.order(:number).all.must_equal [{:id => 1, :number=>20}, {:id => 2, :number=>30}] end cspecify "should insert with a primary key specified", :db2, :mssql do wait{@ds.insert(:id=>100, :number=>20)} @ds.count.must_equal 2 @ds.order(:id).all.must_equal [{:id=>1, :number=>10}, {:id=>100, :number=>20}] end it "should support ordering considering NULLS" do wait{@ds.insert(:number=>20)} wait{@ds.insert(:number=>nil)} @ds.order(Sequel[:number].asc(:nulls=>:first)).select_map(:number).must_equal [nil, 10, 20] @ds.order(Sequel[:number].asc(:nulls=>:last)).select_map(:number).must_equal [10, 20, nil] @ds.order(Sequel[:number].desc(:nulls=>:first)).select_map(:number).must_equal [nil, 20, 10] @ds.order(Sequel[:number].desc(:nulls=>:last)).select_map(:number).must_equal [20, 10, nil] end it "should have insert return primary key value" do @ds.insert(:number=>20).must_equal 2 @ds.filter(:id=>2).first[:number].must_equal 20 end it "should have insert work correctly with static SQL" do wait{@db["INSERT INTO #{@ds.literal(:items)} (#{@ds.literal(:number)}) VALUES (20)"].insert} @ds.filter(:id=>2).first[:number].must_equal 20 end it "should join correctly" do @ds.join(Sequel[:items].as(:b), :id=>:id).select_all(:items).all.must_equal [{:id=>1, :number=>10}] end it "should handle LATERAL subqueries correctly" do wait{@ds.insert(:number=>20)} @ds.from(Sequel[:items].as(:i), @ds.where(Sequel[:items][:number]=>Sequel[:i][:number]).lateral).select_order_map([Sequel[:i][:number].as(:n), Sequel[:t1][:number]]).must_equal [[10, 10], [20, 20]] @ds.from(Sequel[:items].as(:i)).cross_join(@ds.where(Sequel[:items][:number]=>Sequel[:i][:number]).lateral).select_order_map([Sequel[:i][:number].as(:n), Sequel[:t1][:number]]).must_equal [[10, 10], [20, 20]] @ds.from(Sequel[:items].as(:i)).join(@ds.where(Sequel[:items][:number]=>Sequel[:i][:number]).lateral, 1=>1).select_order_map([Sequel[:i][:number].as(:n), Sequel[:t1][:number]]).must_equal [[10, 10], [20, 20]] @ds.from(Sequel[:items].as(:i)).join(@ds.where(Sequel[:items][:number]=>Sequel[:i][:number]).lateral, 1=>0).select_order_map([Sequel[:i][:number].as(:n), Sequel[:t1][:number]]).must_equal [] @ds.from(Sequel[:items].as(:i)).left_join(@ds.from(Sequel[:items].as(:i2)).where(Sequel[:i2][:number]=>Sequel[:i][:number]).lateral, 1=>1).select_order_map([Sequel[:i][:number].as(:n), Sequel[:t1][:number]]).must_equal [[10, 10], [20, 20]] @ds.from(Sequel[:items].as(:i)).left_join(@ds.from(Sequel[:items].as(:i2)).where(Sequel[:i2][:number]=>Sequel[:i][:number]).lateral, 1=>0).select_order_map([Sequel[:i][:number].as(:n), Sequel[:t1][:number]]).must_equal [[10, nil], [20, nil]] end if DB.dataset.supports_lateral_subqueries? it "should correctly deal with qualified columns and subselects" do @ds.from_self(:alias=>:a).select(Sequel[:a][:id], Sequel.qualify(:a, :number)).all.must_equal [{:id=>1, :number=>10}] @ds.join(@ds.as(:a), :id=>:id).select(Sequel[:a][:id], Sequel.qualify(:a, :number)).all.must_equal [{:id=>1, :number=>10}] end it "should graph correctly" do a = [{:items=>{:id=>1, :number=>10}, :b=>{:id=>1, :number=>10}}] pr = proc{|t| @ds.graph(t, {:id=>:id}, :table_alias=>:b).extension(:graph_each).all.must_equal a} pr[:items] pr[Sequel[:items].as(:foo)] pr[Sequel.identifier(:items)] pr[Sequel.identifier('items')] pr[Sequel.as(:items, :foo)] pr[Sequel.as(Sequel.identifier('items'), 'foo')] end it "should graph correctly with a subselect" do @ds.from_self(:alias=>:items).graph(@ds.from_self, {:id=>:id}, :table_alias=>:b).extension(:graph_each).all.must_equal [{:items=>{:id=>1, :number=>10}, :b=>{:id=>1, :number=>10}}] end cspecify "should have insert and update work with Sequel::DEFAULT", :sqlite do @db.create_table!(:items) do Integer :number, :default=>10 end wait{@ds.insert(:number=>Sequel::DEFAULT)} @ds.select_map(:number).must_equal [10] wait{@ds.insert(:number=>20)} wait{@ds.update(:number=>Sequel::DEFAULT)} @ds.select_map(:number).must_equal [10, 10] end cspecify "should have insert work correctly when inserting a row with all NULL values", :hsqldb do @db.create_table!(:items) do String :name Integer :number end wait{@ds.insert} @ds.all.must_equal [{:name=>nil, :number=>nil}] end it "should delete correctly" do @ds.filter(1=>1).delete.must_equal 1 @ds.count.must_equal 0 end it "should update correctly" do @ds.update(:number=>Sequel.expr(:number)+1).must_equal 1 @ds.all.must_equal [{:id=>1, :number=>11}] end it "should have update return the number of matched rows" do @ds.update(:number=>:number).must_equal 1 @ds.filter(:id=>1).update(:number=>:number).must_equal 1 @ds.filter(:id=>2).update(:number=>:number).must_equal 0 @ds.all.must_equal [{:id=>1, :number=>10}] end it "should iterate over records as they come in" do called = false wait{@ds.each{|row| called = true; _(row).must_equal(:id=>1, :number=>10)}} called.must_equal true end it "should support iterating over large numbers of records with paged_each" do (2..100).map{|i| wait{@ds.insert(:number=>i*10)}} [:offset, :filter].each do |strategy| rows = [] wait{@ds.order(:number).paged_each(:rows_per_fetch=>5, :strategy=>strategy){|row| rows << row}} rows.must_equal((1..100).map{|i| {:id=>i, :number=>i*10}}) rows = [] wait{@ds.order(:number).paged_each(:rows_per_fetch=>3, :strategy=>strategy){|row| rows << row}} rows.must_equal((1..100).map{|i| {:id=>i, :number=>i*10}}) rows = [] wait{@ds.order(:number, :id).paged_each(:rows_per_fetch=>5, :strategy=>strategy){|row| rows << row}} rows.must_equal((1..100).map{|i| {:id=>i, :number=>i*10}}) rows = [] wait{@ds.reverse_order(:number).paged_each(:rows_per_fetch=>5, :strategy=>strategy){|row| rows << row}} rows.must_equal((1..100).map{|i| {:id=>i, :number=>i*10}}.reverse) rows = [] wait{@ds.order(Sequel.desc(:number), :id).paged_each(:rows_per_fetch=>5, :strategy=>strategy){|row| rows << row}} rows.must_equal((1..100).map{|i| {:id=>i, :number=>i*10}}.reverse) end rows = [] wait{@ds.order(:number).limit(50, 25).paged_each(:rows_per_fetch=>3).each{|row| rows << row}} rows.must_equal((26..75).map{|i| {:id=>i, :number=>i*10}}) rows = [] wait{@ds.order(:number).limit(50, 25).paged_each(:rows_per_fetch=>3){|row| rows << row}} rows.must_equal((26..75).map{|i| {:id=>i, :number=>i*10}}) rows = [] wait{@ds.order(Sequel.*(:number, 2)).paged_each(:rows_per_fetch=>5){|row| rows << row}} rows.must_equal((1..100).map{|i| {:id=>i, :number=>i*10}}) rows = [] wait{@ds.order(Sequel.*(:number, 2)).paged_each(:rows_per_fetch=>5, :strategy=>:filter, :filter_values=>proc{|row, _| [row[:number] * 2]}){|row| rows << row}} rows.must_equal((1..100).map{|i| {:id=>i, :number=>i*10}}) if DB.adapter_scheme == :jdbc # check retrival with varying fetch sizes array = (1..100).to_a [1, 2, 5, 10, 33, 50, 100, 1000].each do |i| @ds.with_fetch_size(i).select_order_map(:id).must_equal array end end end it "should fetch all results correctly" do @ds.all.must_equal [{:id=>1, :number=>10}] end it "should skip locked rows correctly" do skip if async? # async doesn't work with transactions @ds.insert(:number=>10) q1 = Queue.new q2 = Queue.new ds = @ds.order(:id).for_update.skip_locked begin t = Thread.new{@db.transaction(:isolation=>:committed){q2.push(ds.get(:id)); q1.pop}} q2.pop.must_equal 1 # Some databases do row level locking, others do page level locking [2, nil].must_include @db.transaction(:isolation=>:committed){ds.get(:id)} ensure q1.push(nil) t.join # Keep only one active connection, as some other specs expect that @db.disconnect end end if DB.dataset.supports_skip_locked? it "should raise error instead of waiting for rows correctly" do skip if async? # async doesn't work with transactions @ds.insert(:number=>10) q1 = Queue.new q2 = Queue.new ds = @ds.order(:id).for_update.nowait begin t = Thread.new{@db.transaction(:isolation=>:committed){q2.push(ds.get(:id)); q1.pop}} q2.pop.must_equal 1 # Some databases do row level locking, others do page level locking proc{@db.transaction(:isolation=>:committed){ds.get(:id)}}.must_raise Sequel::DatabaseLockTimeout ensure q1.push(nil) t.join # Keep only one active connection, as some other specs expect that @db.disconnect end end if DB.dataset.supports_nowait? it "should raise exception if raising on duplication columns" do proc{@ds.select_map([:id, :id])}.must_raise Sequel::DuplicateColumnError end if DB.opts[:on_duplicate_columns] == :raise it "should fetch a single row correctly" do @ds.first.must_equal(:id=>1, :number=>10) @ds.single_record.must_equal(:id=>1, :number=>10) @ds.single_record!.must_equal(:id=>1, :number=>10) end it "should work correctly when returning from each without iterating over the whole result set" do skip if async? # break not allowed in async blocks (break from proc closure error) @ds.insert(:number=>20) @ds.order(:id).each{|v| break v}.must_equal(:id=>1, :number=>10) @ds.reverse(:id).each{|v| break v}.must_equal(:id=>2, :number=>20) end it "should fetch a single value correctly" do @ds.get(:id).must_equal 1 @ds.select(:id).single_value.must_equal 1 @ds.select(:id).single_value!.must_equal 1 end it "should have distinct work with limit" do @ds.limit(1).distinct.all.must_equal [{:id=>1, :number=>10}] end it "should fetch correctly with a limit" do @ds.order(:id).limit(2).all.must_equal [{:id=>1, :number=>10}] wait{@ds.insert(:number=>20)} @ds.order(:id).limit(1).all.must_equal [{:id=>1, :number=>10}] @ds.order(:id).limit(2).all.must_equal [{:id=>1, :number=>10}, {:id=>2, :number=>20}] end it "should fetch correctly with a limit and offset" do @ds.order(:id).limit(2, 0).all.must_equal [{:id=>1, :number=>10}] @ds.order(:id).limit(2, 1).all.must_equal [] wait{@ds.insert(:number=>20)} @ds.order(:id).limit(1, 1).all.must_equal [{:id=>2, :number=>20}] @ds.order(:id).limit(2, 0).all.must_equal [{:id=>1, :number=>10}, {:id=>2, :number=>20}] @ds.order(:id).limit(2, 1).all.must_equal [{:id=>2, :number=>20}] end it "should fetch correctly with just offset" do @ds.order(:id).offset(0).all.must_equal [{:id=>1, :number=>10}] @ds.order(:id).offset(1).all.must_equal [] wait{@ds.insert(:number=>20)} @ds.order(:id).offset(0).all.must_equal [{:id=>1, :number=>10}, {:id=>2, :number=>20}] @ds.order(:id).offset(1).all.must_equal [{:id=>2, :number=>20}] @ds.order(:id).offset(2).all.must_equal [] end it "should fetch correctly with a limit and offset using seperate methods" do @ds.order(:id).limit(2).offset(0).all.must_equal [{:id=>1, :number=>10}] @ds.order(:id).limit(2).offset(1).all.must_equal [] wait{@ds.insert(:number=>20)} @ds.order(:id).limit(1).offset(1).all.must_equal [{:id=>2, :number=>20}] @ds.order(:id).limit(2).offset(0).all.must_equal [{:id=>1, :number=>10}, {:id=>2, :number=>20}] @ds.order(:id).limit(2).offset(1).all.must_equal [{:id=>2, :number=>20}] end it "should provide correct columns when using a limit and offset" do ds = @ds.order(:id).limit(1, 1) wait{ds.all} ds.columns.must_equal [:id, :number] @ds.order(:id).limit(1, 1).columns.must_equal [:id, :number] end it "should fetch correctly with a limit and offset for different combinations of from and join tables" do @db.create_table!(:items2){primary_key :id2; Integer :number2} wait{@db[:items2].insert(:number2=>10)} @ds.from(:items, :items2).order(:id).limit(2, 0).all.must_equal [{:id=>1, :number=>10, :id2=>1, :number2=>10}] @ds.from(Sequel[:items].as(:i), Sequel[:items2].as(:i2)).order(:id).limit(2, 0).all.must_equal [{:id=>1, :number=>10, :id2=>1, :number2=>10}] @ds.cross_join(:items2).order(:id).limit(2, 0).all.must_equal [{:id=>1, :number=>10, :id2=>1, :number2=>10}] @ds.from(Sequel[:items].as(:i)).cross_join(Sequel[:items2].as(:i2)).order(:id).limit(2, 0).all.must_equal [{:id=>1, :number=>10, :id2=>1, :number2=>10}] @ds.cross_join(Sequel[:items2].as(:i)).cross_join(@db[:items2].select(Sequel[:id2].as(:id3), Sequel[:number2].as(:number3))).order(:id).limit(2, 0).all.must_equal [{:id=>1, :number=>10, :id2=>1, :number2=>10, :id3=>1, :number3=>10}] @ds.from(:items, :items2).order(:id).limit(2, 1).all.must_equal [] @ds.from(Sequel[:items].as(:i), Sequel[:items2].as(:i2)).order(:id).limit(2, 1).all.must_equal [] @ds.cross_join(:items2).order(:id).limit(2, 1).all.must_equal [] @ds.from(Sequel[:items].as(:i)).cross_join(Sequel[:items2].as(:i2)).order(:id).limit(2, 1).all.must_equal [] @ds.cross_join(Sequel[:items2].as(:i)).cross_join(@db[:items2].select(Sequel[:id2].as(:id3), Sequel[:number2].as(:number3))).order(:id).limit(2, 1).all.must_equal [] @db.drop_table(:items2) end it "should fetch correctly with a limit and offset without an order" do @ds.limit(2, 1).all.must_equal [] @ds.join(Sequel[:items].as(:i), :id=>:id).select(Sequel[:items][:id].as(:s), Sequel[:i][:id].as(:id2)).limit(2, 1).all.must_equal [] @ds.join(Sequel[:items].as(:i), :id=>:id).select(Sequel[:items][:id]).limit(2, 1).all.must_equal [] @ds.join(Sequel[:items].as(:i), :id=>:id).select(Sequel.qualify(:items, :id)).limit(2, 1).all.must_equal [] @ds.join(Sequel[:items].as(:i), :id=>:id).select(Sequel.qualify(:items, :id).as(:s)).limit(2, 1).all.must_equal [] end it "should be orderable by column number" do wait{@ds.insert(:number=>20)} wait{@ds.insert(:number=>10)} @ds.order(2, 1).select_map([:id, :number]).must_equal [[1, 10], [3, 10], [2, 20]] end it "should fetch correctly with a limit in an IN subselect" do @ds.where(:id=>@ds.select(:id).order(:id).limit(2)).all.must_equal [{:id=>1, :number=>10}] wait{@ds.insert(:number=>20)} @ds.where(:id=>@ds.select(:id).order(:id).limit(1)).all.must_equal [{:id=>1, :number=>10}] @ds.where(:id=>@ds.select(:id).order(:id).limit(2)).order(:id).all.must_equal [{:id=>1, :number=>10}, {:id=>2, :number=>20}] end it "should fetch correctly with a limit and offset in an IN subselect" do @ds.where(:id=>@ds.select(:id).order(:id).limit(2, 0)).all.must_equal [{:id=>1, :number=>10}] @ds.where(:id=>@ds.select(:id).order(:id).limit(2, 1)).all.must_equal [] wait{@ds.insert(:number=>20)} @ds.where(:id=>@ds.select(:id).order(:id).limit(1, 1)).all.must_equal [{:id=>2, :number=>20}] @ds.where(:id=>@ds.select(:id).order(:id).limit(2, 0)).order(:id).all.must_equal [{:id=>1, :number=>10}, {:id=>2, :number=>20}] @ds.where(:id=>@ds.select(:id).order(:id).limit(2, 1)).all.must_equal [{:id=>2, :number=>20}] end it "should fetch correctly when using limit and offset in a from_self" do wait{@ds.insert(:number=>20)} ds = @ds.order(:id).limit(1, 1).from_self ds.all.must_equal [{:number=>20, :id=>2}] ds.columns.must_equal [:id, :number] @ds.order(:id).limit(1, 1).columns.must_equal [:id, :number] end it "should fetch correctly when using nested limit and offset in a from_self" do wait{@ds.insert(:number=>20)} wait{@ds.insert(:number=>30)} ds = @ds.order(:id).limit(2, 1).from_self.reverse_order(:number).limit(1, 1) ds.all.must_equal [{:number=>20, :id=>2}] ds.columns.must_equal [:id, :number] @ds.order(:id).limit(2, 1).from_self.reverse_order(:number).limit(1, 1).columns.must_equal [:id, :number] ds = @ds.order(:id).limit(3, 1).from_self.limit(2, 1).from_self.limit(1, 1) ds.all.must_equal [] ds.columns.must_equal [:id, :number] wait{@ds.insert(:number=>40)} ds = @ds.order(:id).limit(3, 1).from_self.reverse_order(:number).limit(2, 1).from_self.reverse_order(:id).limit(1, 1) ds.all.must_equal [{:number=>20, :id=>2}] ds.columns.must_equal [:id, :number] end it "should alias columns correctly" do @ds.select(Sequel[:id].as(:x), Sequel[:number].as(:n)).first.must_equal(:x=>1, :n=>10) end it "should support table aliases with column aliases" do DB.from(@ds.as(:i, [:x, :n])).first.must_equal(:x=>1, :n=>10) end if DB.dataset.supports_derived_column_lists? it "should handle true/false properly" do @ds.filter(Sequel::TRUE).select_map(:number).must_equal [10] @ds.filter(Sequel::FALSE).select_map(:number).must_equal [] @ds.filter(true).select_map(:number).must_equal [10] @ds.filter(false).select_map(:number).must_equal [] end it "should support the sql_comments extension" do ds = @ds.extension(:sql_comments).comment("Some\rComment\r\nHere") ds.all.must_equal [{:id=>1, :number=>10}] ds.insert(:number=>20).must_equal 2 ds.update(:number=>30).must_equal 2 ds.delete.must_equal 2 end it "should support execution using an async thread pool" do q = Queue.new ds = @ds.async vals = 3.times.map{ds.all{q.pop}} 3.times{q.push nil} vals.each{|v| v[0][:number].must_equal(10)} vals = 3.times.map{ds.first} vals.each{|v| v[:number].must_equal(10)} vals = 3.times.map{ds.get(:number)} vals.each{|v| v.must_equal(10)} vals = [ ds.all{q.pop}, ds.first, ds.get(:number) ] q.push nil vals[0][0][:number].must_equal 10 vals[1][:number].must_equal 10 vals[2].must_equal 10 end if ENV['SEQUEL_ASYNC_THREAD_POOL'] end describe "Simple dataset operations with nasty table names" do before do @db = DB @table = :"i`t' [e]\"m\\s" end cspecify "should work correctly", :oracle, :sqlanywhere, [:jdbc, :mssql] do @db.create_table!(@table) do primary_key :id Integer :number end @ds = @db[@table] @ds.insert(:number=>10).must_equal 1 @ds.all.must_equal [{:id=>1, :number=>10}] @ds.update(:number=>20).must_equal 1 @ds.all.must_equal [{:id=>1, :number=>20}] @ds.delete.must_equal 1 @ds.count.must_equal 0 @db.drop_table?(@table) end end if DB.dataset.quote_identifiers? describe Sequel::Dataset do before do DB.create_table!(:test) do String :name Integer :value end @d = DB[:test] @d = @d.async if async? end after do DB.drop_table?(:test) end it "should return the correct record count" do @d.count.must_equal 0 wait{@d.insert(:name => 'abc', :value => 123)} wait{@d.insert(:name => 'abc', :value => 456)} wait{@d.insert(:name => 'def', :value => nil)} 5.times do @d.count.must_equal 3 @d.count(:name).must_equal 3 @d.count(:value).must_equal 2 end end it "should handle functions with identifier names correctly" do wait{@d.insert(:name => 'abc', :value => 6)} @d.get{sum.function(:value)}.must_equal 6 end it "should handle aggregate methods on limited datasets correctly" do wait{@d.insert(:name => 'abc', :value => 6)} wait{@d.insert(:name => 'bcd', :value => 12)} wait{@d.insert(:name => 'def', :value => 18)} @d = @d.order(:name).limit(2) @d.count.must_equal 2 @d.avg(:value).to_i.must_equal 9 @d.min(:value).to_i.must_equal 6 @d.reverse.min(:value).to_i.must_equal 12 @d.max(:value).to_i.must_equal 12 @d.sum(:value).to_i.must_equal 18 @d.extension(:sequel_4_dataset_methods).interval(:value).to_i.must_equal 6 end it "should support or emulate filtered aggregate functions" do wait{@d.insert(:name => 'abc', :value => 123)} wait{@d.insert(:name => 'abc', :value => 456)} wait{@d.insert(:name => 'def', :value => 324)} @d.get{count.function.*.filter{value > 100}}.must_equal 3 @d.get{count.function.*.filter{value > 200}}.must_equal 2 @d.get{count.function.*.filter{value > 400}}.must_equal 1 @d.get{count.function.*.filter{value > 500}}.must_equal 0 @d.get{count(:value).filter{value > 100}}.must_equal 3 @d.get{count(:value).filter{value > 200}}.must_equal 2 @d.get{count(:value).filter{value > 400}}.must_equal 1 @d.get{count(:value).filter{value > 500}}.must_equal 0 end it "should return the correct records" do @d.to_a.must_equal [] wait{@d.insert(:name => 'abc', :value => 123)} wait{@d.insert(:name => 'abc', :value => 456)} wait{@d.insert(:name => 'def', :value => 789)} @d.order(:value).to_a.must_equal [ {:name => 'abc', :value => 123}, {:name => 'abc', :value => 456}, {:name => 'def', :value => 789} ] end it "should update records correctly" do wait{@d.insert(:name => 'abc', :value => 123)} wait{@d.insert(:name => 'abc', :value => 456)} wait{@d.insert(:name => 'def', :value => 789)} wait{@d.filter(:name => 'abc').update(:value => 530)} @d[:name => 'def'][:value].must_equal 789 @d.filter(:value => 530).count.must_equal 2 end it "should delete records correctly" do wait{@d.insert(:name => 'abc', :value => 123)} wait{@d.insert(:name => 'abc', :value => 456)} wait{@d.insert(:name => 'def', :value => 789)} wait{@d.filter(:name => 'abc').delete} @d.count.must_equal 1 @d.first[:name].must_equal 'def' end it "should be able to truncate the table" do wait{@d.insert(:name => 'abc', :value => 123)} wait{@d.insert(:name => 'abc', :value => 456)} wait{@d.insert(:name => 'def', :value => 789)} @d.count.must_equal 3 @d.truncate.must_be_nil @d.count.must_equal 0 end it "should be able to literalize booleans" do @d.literal(true) @d.literal(false) end end describe Sequel::Database do it "should correctly escape strings" do ["\\\n", "\\\\\n", "\\\r\n", "\\\\\r\n", "\\\\\n\n", "\\\\\r\n\r\n", "\\dingo", "\\'dingo", "\\\\''dingo", ].each do |str| DB.get(Sequel.cast(str, String)).must_equal str str = "1#{str}1" DB.get(Sequel.cast(str, String)).must_equal str str = "#{str}#{str}" DB.get(Sequel.cast(str, String)).must_equal str end end cspecify "should properly escape binary data", [:odbc], [:jdbc, :hsqldb], :oracle do DB.get(Sequel.cast(Sequel.blob("\1\2\3"), File).as(:a)).must_equal "\1\2\3" end cspecify "should properly handle empty blobs", [:jdbc, :hsqldb], :oracle do DB.get(Sequel.cast(Sequel.blob(""), File).as(:a)).must_equal "" end cspecify "should properly escape identifiers", :db2, :oracle, :sqlanywhere do DB.create_table!(:"\\'\"[]"){Integer :id} DB.drop_table(:"\\'\"[]") end it "should have a working table_exists?" do t = :basdfdsafsaddsaf DB.drop_table?(t) DB.table_exists?(t).must_equal false DB.create_table(t){Integer :a} begin DB.table_exists?(t).must_equal true ensure DB.drop_table(t) end end end describe Sequel::Dataset do before do DB.create_table! :items do primary_key :id Integer :value end @d = DB[:items] @d.insert(:value => 123) @d.insert(:value => 456) @d.insert(:value => 789) @d = @d.async if async? end after do DB.drop_table?(:items) end it "should correctly return avg" do @d.avg(:value).to_i.must_equal 456 end it "should correctly return sum" do @d.sum(:value).to_i.must_equal 1368 end it "should correctly return max" do @d.max(:value).to_i.must_equal 789 end it "should correctly return min" do @d.min(:value).to_i.must_equal 123 end end describe "Simple Dataset operations" do before(:all) do DB.create_table!(:items) do Integer :number TrueClass :flag end @ds = DB[:items].order(:number) @ds.insert(:number=>1, :flag=>true) @ds.insert(:number=>2, :flag=>false) @ds.insert(:number=>3, :flag=>nil) @ds = @ds.async if async? end after(:all) do DB.drop_table?(:items) end it "should deal with boolean conditions correctly" do @ds.filter(:flag=>true).map(:number).must_equal [1] @ds.filter(:flag=>false).map(:number).must_equal [2] @ds.filter(:flag=>nil).map(:number).must_equal [3] @ds.exclude(:flag=>true).map(:number).must_equal [2, 3] @ds.exclude(:flag=>false).map(:number).must_equal [1, 3] @ds.exclude(:flag=>nil).map(:number).must_equal [1, 2] end cspecify "should deal with boolean equality conditions correctly", :derby do @ds.filter(true=>:flag).map(:number).must_equal [1] @ds.filter(false=>:flag).map(:number).must_equal [2] @ds.filter(nil=>:flag).map(:number).must_equal [] @ds.exclude(true=>:flag).map(:number).must_equal [2] @ds.exclude(false=>:flag).map(:number).must_equal [1] @ds.exclude(nil=>:flag).map(:number).must_equal [] end cspecify "should have exclude_or_null work correctly", :mssql, :derby, :oracle, :db2, :sqlanywhere do @ds = @ds.extension(:exclude_or_null) @ds.exclude_or_null(true=>:flag).map(:number).must_equal [2, 3] @ds.exclude_or_null(false=>:flag).map(:number).must_equal [1, 3] @ds.exclude_or_null(nil=>:flag).map(:number).must_equal [1, 2, 3] end end describe "Simple Dataset operations in transactions" do before do DB.create_table!(:items) do primary_key :id integer :number end @ds = DB[:items] end after do DB.drop_table?(:items) end cspecify "should insert correctly with a primary key specified inside a transaction", :db2, :mssql do DB.transaction do @ds.insert(:id=>100, :number=>20) @ds.count.must_equal 1 @ds.order(:id).all.must_equal [{:id=>100, :number=>20}] end end it "should have insert return primary key value inside a transaction" do DB.transaction do @ds.insert(:number=>20).must_equal 1 @ds.count.must_equal 1 @ds.order(:id).all.must_equal [{:id=>1, :number=>20}] end end it "should support for_update" do DB.transaction{@ds.for_update.all.must_equal []} end end describe "Dataset UNION, EXCEPT, and INTERSECT" do before do DB.create_table!(:i1){integer :number} DB.create_table!(:i2){integer :number} @ds1 = DB[:i1] @ds1.insert(:number=>10) @ds1.insert(:number=>20) @ds2 = DB[:i2] @ds2.insert(:number=>10) @ds2.insert(:number=>30) if async? @ds1 = @ds1.async @ds2 = @ds2.async end end after do DB.drop_table?(:i1, :i2, :i3) end it "should give the correct results for UNION with an existing order" do @ds1.order(:number).union(@ds2).order(:number).map{|x| x[:number].to_s}.must_equal %w'10 20 30' end it "should give the correct results for simple UNION, EXCEPT, and INTERSECT" do @ds1.union(@ds2).order(:number).map{|x| x[:number].to_s}.must_equal %w'10 20 30' if @ds1.supports_intersect_except? @ds1.except(@ds2).order(:number).map{|x| x[:number].to_s}.must_equal %w'20' @ds1.intersect(@ds2).order(:number).map{|x| x[:number].to_s}.must_equal %w'10' end end it "should give the correct results for UNION, EXCEPT, and INTERSECT when used with ordering and limits and offsets" do [%w'10 30', %w'10 20 30'].must_include @ds1.limit(1).union(@ds2).order(:number).map{|x| x[:number].to_s} [%w'10 30', %w'10 20 30'].must_include @ds1.offset(1).union(@ds2).order(:number).map{|x| x[:number].to_s} wait{@ds1.insert(:number=>8)} wait{@ds2.insert(:number=>9)} wait{@ds1.insert(:number=>38)} wait{@ds2.insert(:number=>39)} @ds1.reverse_order(:number).union(@ds2).order(:number).map{|x| x[:number].to_s}.must_equal %w'8 9 10 20 30 38 39' @ds1.union(@ds2.reverse_order(:number)).order(:number).map{|x| x[:number].to_s}.must_equal %w'8 9 10 20 30 38 39' @ds1.reverse_order(:number).limit(1).union(@ds2).order(:number).map{|x| x[:number].to_s}.must_equal %w'9 10 30 38 39' @ds2.reverse_order(:number).limit(1).union(@ds1).order(:number).map{|x| x[:number].to_s}.must_equal %w'8 10 20 38 39' @ds1.reverse_order(:number).limit(1, 1).union(@ds2).order(:number).map{|x| x[:number].to_s}.must_equal %w'9 10 20 30 39' @ds2.reverse_order(:number).limit(1, 1).union(@ds1).order(:number).map{|x| x[:number].to_s}.must_equal %w'8 10 20 30 38' @ds1.reverse_order(:number).offset(1).union(@ds2).order(:number).map{|x| x[:number].to_s}.must_equal %w'8 9 10 20 30 39' @ds2.reverse_order(:number).offset(1).union(@ds1).order(:number).map{|x| x[:number].to_s}.must_equal %w'8 9 10 20 30 38' @ds1.union(@ds2.order(:number).limit(1)).order(:number).map{|x| x[:number].to_s}.must_equal %w'8 9 10 20 38' @ds2.union(@ds1.order(:number).limit(1)).order(:number).map{|x| x[:number].to_s}.must_equal %w'8 9 10 30 39' @ds1.union(@ds2.order(:number).limit(1, 1)).order(:number).map{|x| x[:number].to_s}.must_equal %w'8 10 20 38' @ds2.union(@ds1.order(:number).limit(1, 1)).order(:number).map{|x| x[:number].to_s}.must_equal %w'9 10 30 39' @ds1.union(@ds2.order(:number).offset(1)).order(:number).map{|x| x[:number].to_s}.must_equal %w'8 10 20 30 38 39' @ds2.union(@ds1.order(:number).offset(1)).order(:number).map{|x| x[:number].to_s}.must_equal %w'9 10 20 30 38 39' @ds1.union(@ds2).limit(2).order(:number).map{|x| x[:number].to_s}.must_equal %w'8 9' @ds2.union(@ds1).reverse_order(:number).limit(2).map{|x| x[:number].to_s}.must_equal %w'39 38' @ds1.union(@ds2).limit(2, 1).order(:number).map{|x| x[:number].to_s}.must_equal %w'9 10' @ds2.union(@ds1).reverse_order(:number).limit(2, 1).map{|x| x[:number].to_s}.must_equal %w'38 30' @ds1.union(@ds2).offset(1).order(:number).map{|x| x[:number].to_s}.must_equal %w'9 10 20 30 38 39' @ds2.union(@ds1).reverse_order(:number).offset(1).map{|x| x[:number].to_s}.must_equal %w'38 30 20 10 9 8' @ds1.reverse_order(:number).limit(2).union(@ds2.reverse_order(:number).limit(2)).order(:number).limit(3).map{|x| x[:number].to_s}.must_equal %w'20 30 38' @ds2.order(:number).limit(2).union(@ds1.order(:number).limit(2)).reverse_order(:number).limit(3).map{|x| x[:number].to_s}.must_equal %w'10 9 8' @ds1.reverse_order(:number).limit(2, 1).union(@ds2.reverse_order(:number).limit(2, 1)).order(:number).limit(3, 1).map{|x| x[:number].to_s}.must_equal %w'20 30' @ds2.order(:number).limit(2, 1).union(@ds1.order(:number).limit(2, 1)).reverse_order(:number).limit(3, 1).map{|x| x[:number].to_s}.must_equal %w'20 10' @ds1.reverse_order(:number).offset(1).union(@ds2.reverse_order(:number).offset(1)).order(:number).offset(1).map{|x| x[:number].to_s}.must_equal %w'9 10 20 30' @ds2.order(:number).offset(1).union(@ds1.order(:number).offset(1)).reverse_order(:number).offset(1).map{|x| x[:number].to_s}.must_equal %w'38 30 20 10' end it "should give the correct results for compound UNION, EXCEPT, and INTERSECT" do DB.create_table!(:i3){integer :number} @ds3 = DB[:i3] @ds3.insert(:number=>10) @ds3.insert(:number=>40) @ds1.union(@ds2).union(@ds3).order(:number).map{|x| x[:number].to_s}.must_equal %w'10 20 30 40' @ds1.union(@ds2.union(@ds3)).order(:number).map{|x| x[:number].to_s}.must_equal %w'10 20 30 40' if @ds1.supports_intersect_except? @ds1.union(@ds2).except(@ds3).order(:number).map{|x| x[:number].to_s}.must_equal %w'20 30' @ds1.union(@ds2.except(@ds3)).order(:number).map{|x| x[:number].to_s}.must_equal %w'10 20 30' @ds1.union(@ds2).intersect(@ds3).order(:number).map{|x| x[:number].to_s}.must_equal %w'10 ' @ds1.union(@ds2.intersect(@ds3)).order(:number).map{|x| x[:number].to_s}.must_equal %w'10 20' @ds1.except(@ds2).union(@ds3).order(:number).map{|x| x[:number].to_s}.must_equal %w'10 20 40' @ds1.except(@ds2.union(@ds3)).order(:number).map{|x| x[:number].to_s}.must_equal %w'20' @ds1.except(@ds2).except(@ds3).order(:number).map{|x| x[:number].to_s}.must_equal %w'20' @ds1.except(@ds2.except(@ds3)).order(:number).map{|x| x[:number].to_s}.must_equal %w'10 20' @ds1.except(@ds2).intersect(@ds3).order(:number).map{|x| x[:number].to_s}.must_equal %w'' @ds1.except(@ds2.intersect(@ds3)).order(:number).map{|x| x[:number].to_s}.must_equal %w'20' @ds1.intersect(@ds2).union(@ds3).order(:number).map{|x| x[:number].to_s}.must_equal %w'10 40' @ds1.intersect(@ds2.union(@ds3)).order(:number).map{|x| x[:number].to_s}.must_equal %w'10' @ds1.intersect(@ds2).except(@ds3).order(:number).map{|x| x[:number].to_s}.must_equal %w'' @ds1.intersect(@ds2.except(@ds3)).order(:number).map{|x| x[:number].to_s}.must_equal %w'' @ds1.intersect(@ds2).intersect(@ds3).order(:number).map{|x| x[:number].to_s}.must_equal %w'10' @ds1.intersect(@ds2.intersect(@ds3)).order(:number).map{|x| x[:number].to_s}.must_equal %w'10' end end end if DB.dataset.supports_cte? describe "Common Table Expressions" do before(:all) do @db = DB @db.create_table!(:i1){Integer :id; Integer :parent_id} @ds = @db[:i1] @ds.insert(:id=>1) @ds.insert(:id=>2) @ds.insert(:id=>3, :parent_id=>1) @ds.insert(:id=>4, :parent_id=>1) @ds.insert(:id=>5, :parent_id=>3) @ds.insert(:id=>6, :parent_id=>5) @ds = @ds.async if async? end after(:all) do @db.drop_table?(:i1) end it "should give correct results for WITH" do @db[:t].with(:t, @ds.filter(:parent_id=>nil).select(:id)).order(:id).map(:id).must_equal [1, 2] end cspecify "should give correct results for recursive WITH", :db2 do ds = @db[:t].select(Sequel[:i].as(:id), Sequel[:pi].as(:parent_id)).with_recursive(:t, @ds.filter(:parent_id=>nil), @ds.join(:t, :i=>:parent_id).select(Sequel[:i1][:id], Sequel[:i1][:parent_id]), :args=>[:i, :pi]).order(:id) ds.all.must_equal [{:parent_id=>nil, :id=>1}, {:parent_id=>nil, :id=>2}, {:parent_id=>1, :id=>3}, {:parent_id=>1, :id=>4}, {:parent_id=>3, :id=>5}, {:parent_id=>5, :id=>6}] ps = @db[:t].select(Sequel[:i].as(:id), Sequel[:pi].as(:parent_id)).with_recursive(:t, @ds.filter(:parent_id=>:$n), @ds.join(:t, :i=>:parent_id).filter(Sequel[:t][:i]=>:parent_id).select(Sequel[:i1][:id], Sequel[:i1][:parent_id]), :args=>[:i, :pi]).order(:id).prepare(:select, :cte_sel) ps.call(:n=>1).must_equal [{:id=>3, :parent_id=>1}, {:id=>4, :parent_id=>1}, {:id=>5, :parent_id=>3}, {:id=>6, :parent_id=>5}] ps.call(:n=>3).must_equal [{:id=>5, :parent_id=>3}, {:id=>6, :parent_id=>5}] ps.call(:n=>5).must_equal [{:id=>6, :parent_id=>5}] end it "should support joining a dataset with a CTE" do @ds.inner_join(@db[:t].with(:t, @ds.filter(:parent_id=>nil)), :id => :id).select(Sequel[:i1][:id]).order(Sequel[:i1][:id]).map(:id).must_equal [1,2] @db[:t].with(:t, @ds).inner_join(@db[:s].with(:s, @ds.filter(:parent_id=>nil)), :id => :id).select(Sequel[:t][:id]).order(Sequel[:t][:id]).map(:id).must_equal [1,2] end it "should support a subselect in the FROM clause with a CTE" do @ds.from(@db[:t].with(:t, @ds)).select_order_map(:id).must_equal [1,2,3,4,5,6] @db[:t].with(:t, @ds).from_self.select_order_map(:id).must_equal [1,2,3,4,5,6] end it "should support using a CTE inside a CTE" do @db[:s].with(:s, @db[:t].with(:t, @ds)).select_order_map(:id).must_equal [1,2,3,4,5,6] @db[:s].with_recursive(:s, @db[:t].with(:t, @ds), @db[:t2].with(:t2, @ds)).select_order_map(:id).must_equal [1,1,2,2,3,3,4,4,5,5,6,6] end it "should support using a CTE inside UNION/EXCEPT/INTERSECT" do @ds.union(@db[:t].with(:t, @ds)).select_order_map(:id).must_equal [1,2,3,4,5,6] if @ds.supports_intersect_except? @ds.intersect(@db[:t].with(:t, @ds)).select_order_map(:id).must_equal [1,2,3,4,5,6] @ds.except(@db[:t].with(:t, @ds)).select_order_map(:id).must_equal [] end end it "should give correct results for WITH AS [NOT] MATERIALIZED" do @db[:t].with(:t, @ds, :materialized=>nil).order(:id).map(:id).must_equal [1, 2, 3, 4, 5, 6] @db[:t].with(:t, @ds, :materialized=>true).order(:id).map(:id).must_equal [1, 2, 3, 4, 5, 6] @db[:t].with(:t, @ds, :materialized=>false).order(:id).map(:id).must_equal [1, 2, 3, 4, 5, 6] end if (DB.database_type == :postgres && DB.server_version >= 120000) || (DB.database_type == :sqlite && DB.sqlite_version > 33500) end end if DB.dataset.supports_cte?(:insert) || DB.dataset.supports_cte?(:update) || DB.dataset.supports_cte?(:delete) describe "Common Table Expressions" do before do @db = DB @db.create_table!(:i1){Integer :id} @ds = @db[:i1] @ds2 = @ds.with(:t, @ds) @ds.insert(:id=>1) @ds.insert(:id=>2) @ds = @ds.async if async? end after do @db.drop_table?(:i1) end it "should give correct results for WITH in insert" do wait{@ds2.insert(@db[:t])} @ds.select_order_map(:id).must_equal [1, 1, 2, 2] end if DB.dataset.supports_cte?(:insert) it "should give correct results for WITH in update" do wait{@ds2.filter(:id=>@db[:t].select{max(id)}).update(:id=>Sequel.+(:id, 1))} @ds.select_order_map(:id).must_equal [1, 3] end if DB.dataset.supports_cte?(:update) it "should give correct results for WITH in delete" do wait{@ds2.filter(:id=>@db[:t].select{max(id)}).delete} @ds.select_order_map(:id).must_equal [1] end if DB.dataset.supports_cte?(:delete) it "should support a subselect in an subquery used for INSERT" do wait{@ds.insert([:id], @db[:foo].with(:foo, @ds.select{(id + 10).as(:id)}))} @ds.select_order_map(:id).must_equal [1,2,11,12] end end end if DB.dataset.supports_returning?(:insert) describe "RETURNING clauses in INSERT" do before do @db = DB @db.create_table!(:i1){Integer :id; Integer :foo} @ds = @db[:i1] @ds = @ds.async if async? end after do @db.drop_table?(:i1) end it "should give correct results" do h = {} wait{@ds.returning(:foo).insert(1, 2){|r| h = r}} h.must_equal(:foo=>2) wait{@ds.returning(:id).insert(3, 4){|r| h = r}} h.must_equal(:id=>3) wait{@ds.returning.insert(5, 6){|r| h = r}} h.must_equal(:id=>5, :foo=>6) wait{@ds.returning(Sequel[:id].as(:foo), Sequel[:foo].as(:id)).insert(7, 8){|r| h = r}} h.must_equal(:id=>8, :foo=>7) end end end if DB.dataset.supports_returning?(:update) # Assume DELETE support as well describe "RETURNING clauses in UPDATE/DELETE" do before do @db = DB @db.create_table!(:i1){Integer :id; Integer :foo} @ds = @db[:i1] @ds.insert(1, 2) @ds = @ds.async if async? end after do @db.drop_table?(:i1) end it "should give correct results" do h = [] wait{@ds.returning(:foo).update(:id=>Sequel.+(:id, 1), :foo=>Sequel.*(:foo, 2)){|r| h << r}} h.must_equal [{:foo=>4}] h.clear wait{@ds.returning(:id).update(:id=>Sequel.+(:id, 1), :foo=>Sequel.*(:foo, 2)){|r| h << r}} h.must_equal [{:id=>3}] h.clear wait{@ds.returning.update(:id=>Sequel.+(:id, 1), :foo=>Sequel.*(:foo, 2)){|r| h << r}} h.must_equal [{:id=>4, :foo=>16}] h.clear wait{@ds.returning(Sequel[:id].as(:foo), Sequel[:foo].as(:id)).update(:id=>Sequel.+(:id, 1), :foo=>Sequel.*(:foo, 2)){|r| h << r}} h.must_equal [{:id=>32, :foo=>5}] h.clear wait{@ds.returning.delete{|r| h << r}} h.must_equal [{:id=>5, :foo=>32}] h.clear wait{@ds.returning.delete{|r| h << r}} h.must_equal [] end end end if DB.dataset.supports_window_functions? describe "Window Functions" do before(:all) do @db = DB @db.create_table!(:i1){Integer :id; Integer :group_id; Integer :amount} @ds = @db[:i1].order(:id) @ds.insert(:id=>1, :group_id=>1, :amount=>1) @ds.insert(:id=>2, :group_id=>1, :amount=>10) @ds.insert(:id=>3, :group_id=>1, :amount=>100) @ds.insert(:id=>4, :group_id=>2, :amount=>1000) @ds.insert(:id=>5, :group_id=>2, :amount=>10000) @ds.insert(:id=>6, :group_id=>2, :amount=>100000) @ds = @ds.async if async? end after(:all) do @db.drop_table?(:i1) end it "should give correct results for aggregate window functions" do @ds.select(:id){sum(:amount).over(:partition=>:group_id).as(:sum)}.all. must_equal [{:sum=>111, :id=>1}, {:sum=>111, :id=>2}, {:sum=>111, :id=>3}, {:sum=>111000, :id=>4}, {:sum=>111000, :id=>5}, {:sum=>111000, :id=>6}] @ds.select(:id){sum(:amount).over.as(:sum)}.all. must_equal [{:sum=>111111, :id=>1}, {:sum=>111111, :id=>2}, {:sum=>111111, :id=>3}, {:sum=>111111, :id=>4}, {:sum=>111111, :id=>5}, {:sum=>111111, :id=>6}] end it "should give correct results for ranking window functions with orders" do @ds.select(:id){rank.function.over(:partition=>:group_id, :order=>:id).as(:rank)}.all. must_equal [{:rank=>1, :id=>1}, {:rank=>2, :id=>2}, {:rank=>3, :id=>3}, {:rank=>1, :id=>4}, {:rank=>2, :id=>5}, {:rank=>3, :id=>6}] @ds.select(:id){rank.function.over(:order=>id).as(:rank)}.all. must_equal [{:rank=>1, :id=>1}, {:rank=>2, :id=>2}, {:rank=>3, :id=>3}, {:rank=>4, :id=>4}, {:rank=>5, :id=>5}, {:rank=>6, :id=>6}] end it "should give correct results for aggregate window functions with orders" do @ds.select(:id){sum(:amount).over(:partition=>:group_id, :order=>:id).as(:sum)}.all. must_equal [{:sum=>1, :id=>1}, {:sum=>11, :id=>2}, {:sum=>111, :id=>3}, {:sum=>1000, :id=>4}, {:sum=>11000, :id=>5}, {:sum=>111000, :id=>6}] @ds.select(:id){sum(:amount).over(:order=>:id).as(:sum)}.all. must_equal [{:sum=>1, :id=>1}, {:sum=>11, :id=>2}, {:sum=>111, :id=>3}, {:sum=>1111, :id=>4}, {:sum=>11111, :id=>5}, {:sum=>111111, :id=>6}] @ds.select(:id){sum(:amount).over(:partition=>:group_id, :order=>id.desc, :frame=>:rows).as(:sum)}.all. must_equal [{:sum=>111, :id=>1}, {:sum=>110, :id=>2}, {:sum=>100, :id=>3}, {:sum=>111000, :id=>4}, {:sum=>110000, :id=>5}, {:sum=>100000, :id=>6}] end it "should give correct results for aggregate window functions with frames" do @ds.select(:id){sum(:amount).over(:partition=>:group_id, :order=>:id, :frame=>:all).as(:sum)}.all. must_equal [{:sum=>111, :id=>1}, {:sum=>111, :id=>2}, {:sum=>111, :id=>3}, {:sum=>111000, :id=>4}, {:sum=>111000, :id=>5}, {:sum=>111000, :id=>6}] @ds.select(:id){sum(:amount).over(:order=>:id, :frame=>:all).as(:sum)}.all. must_equal [{:sum=>111111, :id=>1}, {:sum=>111111, :id=>2}, {:sum=>111111, :id=>3}, {:sum=>111111, :id=>4}, {:sum=>111111, :id=>5}, {:sum=>111111, :id=>6}] @ds.select(:id){sum(:amount).over(:partition=>:group_id, :order=>:id, :frame=>:rows).as(:sum)}.all. must_equal [{:sum=>1, :id=>1}, {:sum=>11, :id=>2}, {:sum=>111, :id=>3}, {:sum=>1000, :id=>4}, {:sum=>11000, :id=>5}, {:sum=>111000, :id=>6}] @ds.select(:id){sum(:amount).over(:order=>:id, :frame=>:rows).as(:sum)}.all. must_equal [{:sum=>1, :id=>1}, {:sum=>11, :id=>2}, {:sum=>111, :id=>3}, {:sum=>1111, :id=>4}, {:sum=>11111, :id=>5}, {:sum=>111111, :id=>6}] end it "should give correct results for aggregate window functions with ranges" do @ds.select(:id){sum(:amount).over(:partition=>:group_id, :order=>:id, :frame=>:range).as(:sum)}.all. must_equal [{:sum=>1, :id=>1}, {:sum=>11, :id=>2}, {:sum=>111, :id=>3}, {:sum=>1000, :id=>4}, {:sum=>11000, :id=>5}, {:sum=>111000, :id=>6}] @ds.select(:id){sum(:amount).over(:partition=>:group_id, :order=>:group_id, :frame=>:range).as(:sum)}.all. must_equal [{:sum=>111, :id=>1}, {:sum=>111, :id=>2}, {:sum=>111, :id=>3}, {:sum=>111000, :id=>4}, {:sum=>111000, :id=>5}, {:sum=>111000, :id=>6}] end if DB.dataset.supports_window_function_frame_option?(:range) it "should give correct results for aggregate window functions with groups" do @ds.select(:id){sum(:amount).over(:partition=>:group_id, :order=>:id, :frame=>:groups).as(:sum)}.all. must_equal [{:sum=>1, :id=>1}, {:sum=>11, :id=>2}, {:sum=>111, :id=>3}, {:sum=>1000, :id=>4}, {:sum=>11000, :id=>5}, {:sum=>111000, :id=>6}] @ds.select(:id){sum(:amount).over(:order=>:group_id, :frame=>:groups).as(:sum)}.all. must_equal [{:sum=>111, :id=>1}, {:sum=>111, :id=>2}, {:sum=>111, :id=>3}, {:sum=>111111, :id=>4}, {:sum=>111111, :id=>5}, {:sum=>111111, :id=>6}] end if DB.dataset.supports_window_function_frame_option?(:groups) if DB.dataset.supports_window_function_frame_option?(:offset) it "should give correct results for aggregate window functions with offsets for ROWS" do @ds.select(:id){sum(:amount).over(:order=>:id, :frame=>{:type=>:rows, :start=>1}).as(:sum)}.all. must_equal [{:sum=>1, :id=>1}, {:sum=>11, :id=>2}, {:sum=>110, :id=>3}, {:sum=>1100, :id=>4}, {:sum=>11000, :id=>5}, {:sum=>110000, :id=>6}] @ds.select(:id){sum(:amount).over(:order=>:id, :frame=>{:type=>:rows, :start=>1, :end=>1}).as(:sum)}.all. must_equal [{:sum=>11, :id=>1}, {:sum=>111, :id=>2}, {:sum=>1110, :id=>3}, {:sum=>11100, :id=>4}, {:sum=>111000, :id=>5}, {:sum=>110000, :id=>6}] @ds.select(:id){sum(:amount).over(:order=>:id, :frame=>{:type=>:rows, :start=>2, :end=>[1, :preceding]}).as(:sum)}.all. must_equal [{:sum=>nil, :id=>1}, {:sum=>1, :id=>2}, {:sum=>11, :id=>3}, {:sum=>110, :id=>4}, {:sum=>1100, :id=>5}, {:sum=>11000, :id=>6}] @ds.select(:id){sum(:amount).over(:order=>:id, :frame=>{:type=>:rows, :start=>[1, :following], :end=>2}).as(:sum)}.order(:id).all. must_equal [{:sum=>110, :id=>1}, {:sum=>1100, :id=>2}, {:sum=>11000, :id=>3}, {:sum=>110000, :id=>4}, {:sum=>100000, :id=>5}, {:sum=>nil, :id=>6}] end cspecify "should give correct results for aggregate window functions with offsets for RANGES", :mssql, [proc{DB.sqlite_version < 32800}, :sqlite], [proc{DB.server_version < 110000}, :postgres] do @ds.select(:id){sum(:amount).over(:order=>:group_id, :frame=>{:type=>:range, :start=>1}).as(:sum)}.all. must_equal [{:sum=>111, :id=>1}, {:sum=>111, :id=>2}, {:sum=>111, :id=>3}, {:sum=>111111, :id=>4}, {:sum=>111111, :id=>5}, {:sum=>111111, :id=>6}] @ds.select(:id){sum(:amount).over(:order=>:group_id, :frame=>{:type=>:range, :start=>0, :end=>1}).as(:sum)}.all. must_equal [{:sum=>111111, :id=>1}, {:sum=>111111, :id=>2}, {:sum=>111111, :id=>3}, {:sum=>111000, :id=>4}, {:sum=>111000, :id=>5}, {:sum=>111000, :id=>6}] @ds.select(:id){sum(:amount).over(:order=>:amount, :frame=>{:type=>:range, :start=>100, :end=>1000}).as(:sum)}.all. must_equal [{:sum=>1111, :id=>1}, {:sum=>1111, :id=>2}, {:sum=>1111, :id=>3}, {:sum=>1000, :id=>4}, {:sum=>10000, :id=>5}, {:sum=>100000, :id=>6}] end if DB.dataset.supports_window_function_frame_option?(:range) it "should give correct results for aggregate window functions with offsets for GROUPS" do @ds.select(:id){sum(:amount).over(:order=>:group_id, :frame=>{:type=>:groups, :start=>1}).as(:sum)}.all. must_equal [{:sum=>111, :id=>1}, {:sum=>111, :id=>2}, {:sum=>111, :id=>3}, {:sum=>111111, :id=>4}, {:sum=>111111, :id=>5}, {:sum=>111111, :id=>6}] @ds.select(:id){sum(:amount).over(:order=>:group_id, :frame=>{:type=>:groups, :start=>0, :end=>1}).as(:sum)}.all. must_equal [{:sum=>111111, :id=>1}, {:sum=>111111, :id=>2}, {:sum=>111111, :id=>3}, {:sum=>111000, :id=>4}, {:sum=>111000, :id=>5}, {:sum=>111000, :id=>6}] end if DB.dataset.supports_window_function_frame_option?(:groups) end it "should give correct results for aggregate window functions with exclusions" do @ds.select(:id){sum(:amount).over(:order=>:id, :frame=>{:type=>:rows, :start=>:preceding, :exclude=>:current}).as(:sum)}.all. must_equal [{:sum=>nil, :id=>1}, {:sum=>1, :id=>2}, {:sum=>11, :id=>3}, {:sum=>111, :id=>4}, {:sum=>1111, :id=>5}, {:sum=>11111, :id=>6}] @ds.select(:id){sum(:amount).over(:order=>:group_id, :frame=>{:type=>:rows, :start=>:preceding, :exclude=>:group}).as(:sum)}.all. must_equal [{:sum=>nil, :id=>1}, {:sum=>nil, :id=>2}, {:sum=>nil, :id=>3}, {:sum=>111, :id=>4}, {:sum=>111, :id=>5}, {:sum=>111, :id=>6}] @ds.select(:id){sum(:amount).over(:order=>:group_id, :frame=>{:type=>:rows, :start=>:preceding, :exclude=>:ties}).as(:sum)}.all. must_equal [{:sum=>1, :id=>1}, {:sum=>10, :id=>2}, {:sum=>100, :id=>3}, {:sum=>1111, :id=>4}, {:sum=>10111, :id=>5}, {:sum=>100111, :id=>6}] @ds.select(:id){sum(:amount).over(:order=>:id, :frame=>{:type=>:rows, :start=>:preceding, :exclude=>:no_others}).as(:sum)}.all. must_equal [{:sum=>1, :id=>1}, {:sum=>11, :id=>2}, {:sum=>111, :id=>3}, {:sum=>1111, :id=>4}, {:sum=>11111, :id=>5}, {:sum=>111111, :id=>6}] end if DB.dataset.supports_window_function_frame_option?(:exclude) it "should give correct results for window functions" do @ds.window(:win, :partition=>:group_id, :order=>:id).select(:id){sum(:amount).over(:window=>win, :frame=>:rows).as(:sum)}.all. must_equal [{:sum=>1, :id=>1}, {:sum=>11, :id=>2}, {:sum=>111, :id=>3}, {:sum=>1000, :id=>4}, {:sum=>11000, :id=>5}, {:sum=>111000, :id=>6}] @ds.window(:win, :partition=>:group_id).select(:id){sum(:amount).over(:window=>win, :order=>id, :frame=>:rows).as(:sum)}.all. must_equal [{:sum=>1, :id=>1}, {:sum=>11, :id=>2}, {:sum=>111, :id=>3}, {:sum=>1000, :id=>4}, {:sum=>11000, :id=>5}, {:sum=>111000, :id=>6}] @ds.window(:win, {}).select(:id){sum(:amount).over(:window=>:win, :order=>id, :frame=>:rows).as(:sum)}.all. must_equal [{:sum=>1, :id=>1}, {:sum=>11, :id=>2}, {:sum=>111, :id=>3}, {:sum=>1111, :id=>4}, {:sum=>11111, :id=>5}, {:sum=>111111, :id=>6}] @ds.window(:win, :partition=>:group_id).select(:id){sum(:amount).over(:window=>:win, :order=>id, :frame=>:all).as(:sum)}.all. must_equal [{:sum=>111, :id=>1}, {:sum=>111, :id=>2}, {:sum=>111, :id=>3}, {:sum=>111000, :id=>4}, {:sum=>111000, :id=>5}, {:sum=>111000, :id=>6}] end if DB.dataset.supports_window_clause? end end describe Sequel::SQL::Constants do before do @db = DB @ds = @db[:constants] @c = proc do |v| case v when Time v when DateTime, String Time.parse(v.to_s) else v end end @c2 = proc{|v| v.is_a?(Date) ? v : Date.parse(v) } end after do @db.drop_table?(:constants) end cspecify "should have working CURRENT_DATE", [:jdbc, :sqlite], :oracle do @db.create_table!(:constants){Date :d} @ds.insert(:d=>Sequel::CURRENT_DATE) d = @c2[@ds.get(:d)] d.must_be_kind_of(Date) d.to_s.must_equal Date.today.to_s end cspecify "should have working CURRENT_TIME", [:jdbc, :sqlite], [:mysql2], [:tinytds], [:ado] do @db.create_table!(:constants){Time :t, :only_time=>true} @ds.insert(:t=>Sequel::CURRENT_TIME) (Time.now - @c[@ds.get(:t)]).must_be_close_to 0, 60 end cspecify "should have working CURRENT_TIMESTAMP", [:jdbc, :sqlite] do @db.create_table!(:constants){DateTime :ts} @ds.insert(:ts=>Sequel::CURRENT_TIMESTAMP) (Time.now - @c[@ds.get(:ts)]).must_be_close_to 0, 60 end cspecify "should have working CURRENT_TIMESTAMP when used as a column default", [:jdbc, :sqlite] do @db.create_table!(:constants){DateTime :ts, :default=>Sequel::CURRENT_TIMESTAMP} @ds.insert (Time.now - @c[@ds.get(:ts)]).must_be_close_to 0, 60 end end describe "Sequel::Dataset#import and #multi_insert" do before(:all) do @db = DB @db.create_table!(:imp){Integer :i} @ids = @db[:imp].order(:i) end before do @ids.delete end after(:all) do @db.drop_table?(:imp) end it "should import with multi_insert and an array of hashes" do @ids.multi_insert([{:i=>10}, {:i=>20}]) @ids.all.must_equal [{:i=>10}, {:i=>20}] end it "should import with an array of arrays of values" do @ids.import([:i], [[10], [20]]) @ids.all.must_equal [{:i=>10}, {:i=>20}] end it "should import with a dataset" do @db.create_table!(:exp2){Integer :i} @db[:exp2].import([:i], [[10], [20]]) @ids.import([:i], @db[:exp2]) @ids.all.must_equal [{:i=>10}, {:i=>20}] @db.drop_table(:exp2) end it "should have import work with the :slice_size option" do @ids.import([:i], [[10], [20], [30]], :slice_size=>1) @ids.all.must_equal [{:i=>10}, {:i=>20}, {:i=>30}] @ids.delete @ids.import([:i], [[10], [20], [30]], :slice_size=>2) @ids.all.must_equal [{:i=>10}, {:i=>20}, {:i=>30}] @ids.delete @ids.import([:i], [[10], [20], [30]], :slice_size=>3) @ids.all.must_equal [{:i=>10}, {:i=>20}, {:i=>30}] end it "should import many rows at once" do @ids.import([:i], (1..1000).to_a.map{|x| [x]}) @ids.select_order_map(:i).must_equal((1..1000).to_a) end end describe "Sequel::Dataset#import and #multi_insert :return=>:primary_key" do before do @db = DB @db.create_table!(:imp){primary_key :id; Integer :i} @ds = @db[:imp] @ds = @ds.async if async? end after do @db.drop_table?(:imp) end it "should return primary key values" do @ds.multi_insert([{:i=>10}, {:i=>20}, {:i=>30}], :return=>:primary_key).must_equal [1, 2, 3] @ds.import([:i], [[40], [50], [60]], :return=>:primary_key).must_equal [4, 5, 6] @ds.order(:id).map([:id, :i]).must_equal [[1, 10], [2, 20], [3, 30], [4, 40], [5, 50], [6, 60]] end it "should handle dataset with row_proc" do ds = @ds.with_row_proc(lambda{|h| Object.new}) ds.multi_insert([{:i=>10}, {:i=>20}, {:i=>30}], :return=>:primary_key).must_equal [1, 2, 3] ds.import([:i], [[40], [50], [60]], :return=>:primary_key).must_equal [4, 5, 6] end it "should return primary key values when :slice is used" do @ds.multi_insert([{:i=>10}, {:i=>20}, {:i=>30}], :return=>:primary_key, :slice=>2).must_equal [1, 2, 3] @ds.import([:i], [[40], [50], [60]], :return=>:primary_key, :slice=>2).must_equal [4, 5, 6] @ds.order(:id).map([:id, :i]).must_equal [[1, 10], [2, 20], [3, 30], [4, 40], [5, 50], [6, 60]] end end describe "Sequel::Dataset convenience methods" do before(:all) do @db = DB @db.create_table!(:a){Integer :a; Integer :b; Integer :c} @ds = @db[:a] @ds.insert(1, 3, 5) @ds.insert(1, 3, 6) @ds.insert(1, 4, 5) @ds.insert(2, 3, 5) @ds.insert(2, 4, 6) @ds = @ds.async if async? end after(:all) do @db.drop_table?(:a) end it "#group_rollup should include hierarchy of groupings" do @ds.group_by(:a).group_rollup.select_map([:a, Sequel.function(:sum, :b).cast(Integer).as(:b), Sequel.function(:sum, :c).cast(Integer).as(:c)]).sort_by{|x| x.map(&:to_i)}.must_equal [[nil, 17, 27], [1, 10, 16], [2, 7, 11]] @ds.group_by(:a, :b).group_rollup.select_map([:a, :b, Sequel.function(:sum, :c).cast(Integer).as(:c)]).sort_by{|x| x.map(&:to_i)}.must_equal [[nil, nil, 27], [1, nil, 16], [1, 3, 11], [1, 4, 5], [2, nil, 11], [2, 3, 5], [2, 4, 6]] end if DB.dataset.supports_group_rollup? it "#group_cube should include all combinations of groupings" do @ds.group_by(:a).group_cube.select_map([:a, Sequel.function(:sum, :b).cast(Integer).as(:b), Sequel.function(:sum, :c).cast(Integer).as(:c)]).sort_by{|x| x.map(&:to_i)}.must_equal [[nil, 17, 27], [1, 10, 16], [2, 7, 11]] @ds.group_by(:a, :b).group_cube.select_map([:a, :b, Sequel.function(:sum, :c).cast(Integer).as(:c)]).sort_by{|x| x.map(&:to_i)}.must_equal [[nil, nil, 27], [nil, 3, 16], [nil, 4, 11], [1, nil, 16], [1, 3, 11], [1, 4, 5], [2, nil, 11], [2, 3, 5], [2, 4, 6]] end if DB.dataset.supports_group_cube? it "#grouping_sets should include sets specified in group" do @ds.group_by(:a, []).grouping_sets.select_map([:a, Sequel.function(:sum, :b).cast(Integer).as(:b), Sequel.function(:sum, :c).cast(Integer).as(:c)]).sort_by{|x| x.map(&:to_i)}.must_equal [[nil, 17, 27], [1, 10, 16], [2, 7, 11]] @ds.group_by([:a, :b], :a, :b, []).grouping_sets.select_map([:a, :b, Sequel.function(:sum, :c).cast(Integer).as(:c)]).sort_by{|x| x.map(&:to_i)}.must_equal [[nil, nil, 27], [nil, 3, 16], [nil, 4, 11], [1, nil, 16], [1, 3, 11], [1, 4, 5], [2, nil, 11], [2, 3, 5], [2, 4, 6]] end if DB.dataset.supports_grouping_sets? end describe "Sequel::Dataset convenience methods" do before(:all) do @db = DB @db.create_table!(:a){Integer :a; Integer :b} @ds = @db[:a].order(:a) @ds = @ds.async if async? end before do wait{@ds.delete} end after(:all) do @db.drop_table?(:a) end it "#empty? should return whether the dataset returns no rows" do @ds.empty?.must_equal true wait{@ds.insert(20, 10)} @ds.empty?.must_equal false end it "#empty? should work correctly for datasets with limits" do ds = @ds.limit(1) ds.empty?.must_equal true wait{ds.insert(20, 10)} ds.empty?.must_equal false end it "#empty? should work correctly for datasets with limits and offsets" do ds = @ds.limit(1, 1) ds.empty?.must_equal true wait{ds.insert(20, 10)} ds.empty?.must_equal true wait{ds.insert(20, 10)} ds.empty?.must_equal false end it "#group_and_count should return a grouping by count" do @ds.group_and_count(:a).order{count(:a)}.all.must_equal [] wait{@ds.insert(20, 10)} @ds.group_and_count(:a).order{count(:a)}.all.each{|h| h[:count] = h[:count].to_i}.must_equal [{:a=>20, :count=>1}] wait{@ds.insert(20, 30)} @ds.group_and_count(:a).order{count(:a)}.all.each{|h| h[:count] = h[:count].to_i}.must_equal [{:a=>20, :count=>2}] wait{@ds.insert(30, 30)} @ds.group_and_count(:a).order{count(:a)}.all.each{|h| h[:count] = h[:count].to_i}.must_equal [{:a=>30, :count=>1}, {:a=>20, :count=>2}] end it "#group_and_count should support column aliases" do @ds.group_and_count(Sequel[:a].as(:c)).order{count(:a)}.all.must_equal [] wait{@ds.insert(20, 10)} @ds.group_and_count(Sequel[:a].as(:c)).order{count(:a)}.all.each{|h| h[:count] = h[:count].to_i}.must_equal [{:c=>20, :count=>1}] wait{@ds.insert(20, 30)} @ds.group_and_count(Sequel[:a].as(:c)).order{count(:a)}.all.each{|h| h[:count] = h[:count].to_i}.must_equal [{:c=>20, :count=>2}] wait{@ds.insert(30, 30)} @ds.group_and_count(Sequel[:a].as(:c)).order{count(:a)}.all.each{|h| h[:count] = h[:count].to_i}.must_equal [{:c=>30, :count=>1}, {:c=>20, :count=>2}] end it "#range should return the range between the maximum and minimum values" do @ds = @ds.unordered.extension(:sequel_4_dataset_methods) wait{@ds.insert(20, 10)} wait{@ds.insert(30, 10)} @ds.range(:a).must_equal(20..30) @ds.range(:b).must_equal(10..10) end it "#interval should return the different between the maximum and minimum values" do @ds = @ds.unordered.extension(:sequel_4_dataset_methods) wait{@ds.insert(20, 10)} wait{@ds.insert(30, 10)} @ds.interval(:a).to_i.must_equal 10 @ds.interval(:b).to_i.must_equal 0 end end describe "Sequel::Dataset main SQL methods" do before(:all) do @db = DB @db.create_table!(:d){Integer :a; Integer :b} @ds = @db[:d].order(:a) @ds = @ds.async if async? end before do wait{@ds.delete} end after(:all) do @db.drop_table?(:d) end it "#exists should return a usable exists clause" do @ds.filter(@db[Sequel[:d].as(:c)].filter(Sequel[:c][:a]=>Sequel[:d][:b]).exists).all.must_equal [] wait{@ds.insert(20, 30)} wait{@ds.insert(10, 20)} @ds.filter(@db[Sequel[:d].as(:c)].filter(Sequel[:c][:a]=>Sequel[:d][:b]).exists).all.must_equal [{:a=>10, :b=>20}] end it "#filter and #exclude should work with placeholder strings" do wait{@ds.insert(20, 30)} @ds.filter(Sequel.lit("a > ?", 15)).all.must_equal [{:a=>20, :b=>30}] @ds.exclude(Sequel.lit("b < ?", 15)).all.must_equal [{:a=>20, :b=>30}] @ds.filter(Sequel.lit("b < ?", 15)).invert.all.must_equal [{:a=>20, :b=>30}] end it "#where and #or should work correctly" do wait{@ds.insert(20, 30)} @ds.filter(:a=>20).where(:b=>30).all.must_equal [{:a=>20, :b=>30}] @ds.filter(:a=>20).where(:b=>15).all.must_equal [] @ds.filter(:a=>20).or(:b=>15).all.must_equal [{:a=>20, :b=>30}] @ds.filter(:a=>10).or(:b=>15).all.must_equal [] end it "#select_group should work correctly" do @ds = @ds.unordered @ds.select_group(:a).all.must_equal [] wait{@ds.insert(20, 30)} @ds.select_group(:a).all.must_equal [{:a=>20}] @ds.select_group(:b).all.must_equal [{:b=>30}] wait{@ds.insert(20, 40)} @ds.select_group(:a).all.must_equal [{:a=>20}] @ds.order(:b).select_group(:b).all.must_equal [{:b=>30}, {:b=>40}] end it "#select_group should work correctly when aliasing" do @ds = @ds.unordered wait{@ds.insert(20, 30)} @ds.select_group(Sequel[:b].as(:c)).all.must_equal [{:c=>30}] end it "#having should work correctly" do @ds = @ds.unordered @ds.select{[b, max(a).as(c)]}.group(:b).having{max(a) > 30}.all.must_equal [] wait{@ds.insert(20, 30)} @ds.select{[b, max(a).as(c)]}.group(:b).having{max(a) > 30}.all.must_equal [] wait{@ds.insert(40, 20)} @ds.select{[b, max(a).as(c)]}.group(:b).having{max(a) > 30}.all.each{|h| h[:c] = h[:c].to_i}.must_equal [{:b=>20, :c=>40}] end cspecify "#having should work without a previous group", [proc{|db| db.sqlite_version < 33900}, :sqlite] do @ds = @ds.unordered @ds.select{max(a).as(c)}.having{max(a) > 30}.all.must_equal [] wait{@ds.insert(20, 30)} @ds.select{max(a).as(c)}.having{max(a) > 30}.all.must_equal [] wait{@ds.insert(40, 20)} @ds.select{max(a).as(c)}.having{max(a) > 30}.all.each{|h| h[:c] = h[:c].to_i}.must_equal [{:c=>40}] end end describe "Sequel::Dataset convenience methods" do before(:all) do @db = DB @db.create_table!(:a){Integer :a; Integer :b; Integer :c; Integer :d} @ds = @db[:a].order(:a) end before do wait{@ds.delete} wait{@ds.insert(1, 2, 3, 4)} wait{@ds.insert(5, 6, 7, 8)} end after(:all) do @db.drop_table?(:a) end it "should have working #map" do @ds.map(:a).must_equal [1, 5] @ds.map(:b).must_equal [2, 6] @ds.map([:a, :b]).must_equal [[1, 2], [5, 6]] end it "should have working #as_hash" do @ds.to_hash(:a).must_equal(1=>{:a=>1, :b=>2, :c=>3, :d=>4}, 5=>{:a=>5, :b=>6, :c=>7, :d=>8}) @ds.as_hash(:a).must_equal(1=>{:a=>1, :b=>2, :c=>3, :d=>4}, 5=>{:a=>5, :b=>6, :c=>7, :d=>8}) @ds.as_hash(:b).must_equal(2=>{:a=>1, :b=>2, :c=>3, :d=>4}, 6=>{:a=>5, :b=>6, :c=>7, :d=>8}) @ds.as_hash([:a, :b]).must_equal([1, 2]=>{:a=>1, :b=>2, :c=>3, :d=>4}, [5, 6]=>{:a=>5, :b=>6, :c=>7, :d=>8}) @ds.as_hash(:a, :b).must_equal(1=>2, 5=>6) @ds.as_hash([:a, :c], :b).must_equal([1, 3]=>2, [5, 7]=>6) @ds.as_hash(:a, [:b, :c]).must_equal(1=>[2, 3], 5=>[6, 7]) @ds.as_hash([:a, :c], [:b, :d]).must_equal([1, 3]=>[2, 4], [5, 7]=>[6, 8]) @ds.extension(:null_dataset).nullify.as_hash([:a, :c], [:b, :d]).must_equal({}) @ds.as_hash(:a, :b, :hash => (tmp = {})).must_be_same_as(tmp) end it "should have working #to_hash_groups" do ds = @ds.order(*@ds.columns) wait{ds.insert(1, 2, 3, 9)} ds.to_hash_groups(:a).must_equal(1=>[{:a=>1, :b=>2, :c=>3, :d=>4}, {:a=>1, :b=>2, :c=>3, :d=>9}], 5=>[{:a=>5, :b=>6, :c=>7, :d=>8}]) ds.to_hash_groups(:b).must_equal(2=>[{:a=>1, :b=>2, :c=>3, :d=>4}, {:a=>1, :b=>2, :c=>3, :d=>9}], 6=>[{:a=>5, :b=>6, :c=>7, :d=>8}]) ds.to_hash_groups([:a, :b]).must_equal([1, 2]=>[{:a=>1, :b=>2, :c=>3, :d=>4}, {:a=>1, :b=>2, :c=>3, :d=>9}], [5, 6]=>[{:a=>5, :b=>6, :c=>7, :d=>8}]) ds.to_hash_groups(:a, :d).must_equal(1=>[4, 9], 5=>[8]) ds.to_hash_groups([:a, :c], :d).must_equal([1, 3]=>[4, 9], [5, 7]=>[8]) ds.to_hash_groups(:a, [:b, :d]).must_equal(1=>[[2, 4], [2, 9]], 5=>[[6, 8]]) ds.to_hash_groups([:a, :c], [:b, :d]).must_equal([1, 3]=>[[2, 4], [2, 9]], [5, 7]=>[[6, 8]]) @ds.extension(:null_dataset).nullify.to_hash_groups([:a, :c], [:b, :d]).must_equal({}) ds.to_hash_groups(:a, :d, :hash => (tmp = {})).must_be_same_as(tmp) end it "should have working #select_map" do @ds.select_map(:a).must_equal [1, 5] @ds.select_map(:b).must_equal [2, 6] @ds.select_map([:a]).must_equal [[1], [5]] @ds.select_map([:a, :b]).must_equal [[1, 2], [5, 6]] @ds.extension(:null_dataset).nullify.select_map([:a, :b]).must_equal [] @ds.select_map(Sequel[:a].as(:e)).must_equal [1, 5] @ds.select_map(Sequel[:b].as(:e)).must_equal [2, 6] @ds.select_map([Sequel[:a].as(:e), Sequel[:b].as(:f)]).must_equal [[1, 2], [5, 6]] @ds.select_map([Sequel[:a][:a].as(:e), Sequel[:a][:b].as(:f)]).must_equal [[1, 2], [5, 6]] @ds.select_map([Sequel.expr(Sequel[:a][:a]).as(:e), Sequel.expr(Sequel[:a][:b]).as(:f)]).must_equal [[1, 2], [5, 6]] @ds.select_map([Sequel.qualify(:a, :a).as(:e), Sequel.qualify(:a, :b).as(:f)]).must_equal [[1, 2], [5, 6]] @ds.select_map([Sequel.identifier(:a).qualify(:a).as(:e), Sequel.qualify(:a, :b).as(:f)]).must_equal [[1, 2], [5, 6]] end it "should have working #select_order_map" do @ds.select_order_map(:a).must_equal [1, 5] @ds.select_order_map(Sequel.desc(Sequel[:a][:b])).must_equal [6, 2] @ds.select_order_map(Sequel.desc(Sequel[:a][:b].as(:e))).must_equal [6, 2] @ds.select_order_map(Sequel.qualify(:a, :b).as(:e)).must_equal [2, 6] @ds.select_order_map([:a]).must_equal [[1], [5]] @ds.select_order_map([Sequel.desc(:a), :b]).must_equal [[5, 6], [1, 2]] @ds.extension(:null_dataset).nullify.select_order_map(:a).must_equal [] @ds.select_order_map(Sequel[:a].as(:e)).must_equal [1, 5] @ds.select_order_map(Sequel[:b].as(:e)).must_equal [2, 6] @ds.select_order_map([Sequel.desc(Sequel[:a].as(:e)), Sequel[:b].as(:f)]).must_equal [[5, 6], [1, 2]] @ds.select_order_map([Sequel.desc(Sequel[:a][:a].as(:e)), Sequel[:a][:b].as(:f)]).must_equal [[5, 6], [1, 2]] @ds.select_order_map([Sequel.desc(Sequel[:a][:a]), Sequel.expr(Sequel[:a][:b]).as(:f)]).must_equal [[5, 6], [1, 2]] @ds.select_order_map([Sequel.qualify(:a, :a).desc, Sequel.qualify(:a, :b).as(:f)]).must_equal [[5, 6], [1, 2]] @ds.select_order_map([Sequel.identifier(:a).qualify(:a).desc, Sequel.qualify(:a, :b).as(:f)]).must_equal [[5, 6], [1, 2]] end it "should have working #select_hash" do @ds.select_hash(:a, :b).must_equal(1=>2, 5=>6) @ds.select_hash(Sequel[:a][:a].as(:e), :b).must_equal(1=>2, 5=>6) @ds.select_hash(Sequel.expr(Sequel[:a][:a]).as(:e), :b).must_equal(1=>2, 5=>6) @ds.select_hash(Sequel.qualify(:a, :a).as(:e), :b).must_equal(1=>2, 5=>6) @ds.select_hash(Sequel.identifier(:a).qualify(:a).as(:e), :b).must_equal(1=>2, 5=>6) @ds.select_hash([:a, :c], :b).must_equal([1, 3]=>2, [5, 7]=>6) @ds.select_hash(:a, [:b, :c]).must_equal(1=>[2, 3], 5=>[6, 7]) @ds.select_hash([:a, :c], [:b, :d]).must_equal([1, 3]=>[2, 4], [5, 7]=>[6, 8]) @ds.select_hash(:a, :b, :hash => (tmp = {})).must_be_same_as(tmp) @ds.extension(:null_dataset).nullify.select_hash(:a, :b).must_equal({}) end it "should have working #select_hash_groups" do ds = @ds.order(*@ds.columns) wait{ds.insert(1, 2, 3, 9)} ds.select_hash_groups(:a, :d).must_equal(1=>[4, 9], 5=>[8]) ds.select_hash_groups(Sequel[:a][:a].as(:e), :d).must_equal(1=>[4, 9], 5=>[8]) ds.select_hash_groups(Sequel.expr(Sequel[:a][:a]).as(:e), :d).must_equal(1=>[4, 9], 5=>[8]) ds.select_hash_groups(Sequel.qualify(:a, :a).as(:e), :d).must_equal(1=>[4, 9], 5=>[8]) ds.select_hash_groups(Sequel.identifier(:a).qualify(:a).as(:e), :d).must_equal(1=>[4, 9], 5=>[8]) ds.select_hash_groups([:a, :c], :d).must_equal([1, 3]=>[4, 9], [5, 7]=>[8]) ds.select_hash_groups(:a, [:b, :d]).must_equal(1=>[[2, 4], [2, 9]], 5=>[[6, 8]]) ds.select_hash_groups([:a, :c], [:b, :d]).must_equal([1, 3]=>[[2, 4], [2, 9]], [5, 7]=>[[6, 8]]) ds.select_hash_groups(:a, :d, :hash => (tmp = {})).must_be_same_as(tmp) @ds.extension(:null_dataset).nullify.select_hash_groups(:a, :d).must_equal({}) end end describe "Sequel::Dataset DSL support" do before(:all) do @db = DB @db.create_table!(:a){Integer :a; Integer :b} @ds = @db[:a].order(:a) @ds = @ds.async if async? end before do wait{@ds.delete} end after(:all) do @db.drop_table?(:a) end it "should work with standard mathematical operators" do wait{@ds.insert(20, 10)} @ds.get{a + b}.to_i.must_equal 30 @ds.get{a - b}.to_i.must_equal 10 @ds.get{a * b}.to_i.must_equal 200 @ds.get{a / b}.to_i.must_equal 2 end it "should work with exponentiation operator" do wait{@ds.insert(:a=>2)} (-4..4).each do |i| # Allow minor differences due to emulation issues on some adapters @ds.get{(a / 1.0) ** i}.to_f.must_be_close_to((2**i).to_f) end end cspecify "should work with bitwise shift operators", :derby do wait{@ds.insert(3, 2)} b = Sequel[:b] b = b.cast(:integer) if @db.database_type == :postgres @ds.get{a.sql_number << b}.to_i.must_equal 12 @ds.get{a.sql_number >> b}.to_i.must_equal 0 @ds.get{a.sql_number << b << 1}.to_i.must_equal 24 wait{@ds.delete} wait{@ds.insert(3, 1)} @ds.get{a.sql_number << b}.to_i.must_equal 6 @ds.get{a.sql_number >> b}.to_i.must_equal 1 @ds.get{a.sql_number >> b >> 1}.to_i.must_equal 0 end cspecify "should work with bitwise AND and OR operators", :derby do wait{@ds.insert(3, 5)} @ds.get{a.sql_number | b}.to_i.must_equal 7 @ds.get{a.sql_number & b}.to_i.must_equal 1 @ds.get{a.sql_number | b | 8}.to_i.must_equal 15 @ds.get{a.sql_number & b & 8}.to_i.must_equal 0 end it "should work with the bitwise compliment operator" do wait{@ds.insert(-3, 3)} @ds.get{~a.sql_number}.to_i.must_equal 2 @ds.get{~b.sql_number}.to_i.must_equal(-4) end cspecify "should work with the bitwise xor operator", :derby do wait{@ds.insert(3, 5)} @ds.get{a.sql_number ^ b}.to_i.must_equal 6 @ds.get{a.sql_number ^ b ^ 1}.to_i.must_equal 7 end it "should work with the modulus operator" do wait{@ds.insert(3, 5)} @ds.get{a.sql_number % 4}.to_i.must_equal 3 @ds.get{b.sql_number % 4}.to_i.must_equal 1 @ds.get{a.sql_number % 4 % 2}.to_i.must_equal 1 end it "should work with inequality operators" do wait{@ds.insert(10, 11)} wait{@ds.insert(11, 11)} wait{@ds.insert(20, 19)} wait{@ds.insert(20, 20)} @ds.filter{a > b}.select_order_map(:a).must_equal [20] @ds.filter{a >= b}.select_order_map(:a).must_equal [11, 20, 20] @ds.filter{a < b}.select_order_map(:a).must_equal [10] @ds.filter{a <= b}.select_order_map(:a).must_equal [10, 11, 20] end it "should work with casting and string concatentation" do wait{@ds.insert(20, 20)} @ds.get{Sequel.cast(a, String).sql_string + Sequel.cast(b, String)}.must_equal '2020' end it "should work with ordering" do wait{@ds.insert(10, 20)} wait{@ds.insert(20, 10)} @ds.order(:a, :b).all.must_equal [{:a=>10, :b=>20}, {:a=>20, :b=>10}] @ds.order(Sequel.asc(:a), Sequel.asc(:b)).all.must_equal [{:a=>10, :b=>20}, {:a=>20, :b=>10}] @ds.order(Sequel.desc(:a), Sequel.desc(:b)).all.must_equal [{:a=>20, :b=>10}, {:a=>10, :b=>20}] end it "should work with qualifying" do wait{@ds.insert(10, 20)} @ds.get(Sequel[:a][:b]).must_equal 20 @ds.get{a[:b]}.must_equal 20 @ds.get(Sequel.qualify(:a, :b)).must_equal 20 end it "should work with aliasing" do wait{@ds.insert(10, 20)} @ds.get(Sequel[:a][:b].as(:c)).must_equal 20 @ds.get{a[:b].as(c)}.must_equal 20 @ds.get(Sequel.qualify(:a, :b).as(:c)).must_equal 20 @ds.get(Sequel.as(:b, :c)).must_equal 20 end it "should work with selecting all columns of a table" do wait{@ds.insert(20, 10)} @ds.select_all(:a).all.must_equal [{:a=>20, :b=>10}] end it "should work with ranges as hash values" do wait{@ds.insert(20, 10)} @ds.filter(:a=>(10..30)).all.must_equal [{:a=>20, :b=>10}] @ds.filter(:a=>(25..30)).all.must_equal [] @ds.filter(:a=>(10..15)).all.must_equal [] @ds.exclude(:a=>(10..30)).all.must_equal [] @ds.exclude(:a=>(25..30)).all.must_equal [{:a=>20, :b=>10}] @ds.exclude(:a=>(10..15)).all.must_equal [{:a=>20, :b=>10}] end it "should work with nil as hash value" do wait{@ds.insert(20, nil)} @ds.filter(:a=>nil).all.must_equal [] @ds.filter(:b=>nil).all.must_equal [{:a=>20, :b=>nil}] @ds.exclude(:b=>nil).all.must_equal [] @ds.exclude(:a=>nil).all.must_equal [{:a=>20, :b=>nil}] end it "should work with arrays as hash values" do wait{@ds.insert(20, 10)} @ds.filter(:a=>[10]).all.must_equal [] @ds.filter(:a=>[20, 10]).all.must_equal [{:a=>20, :b=>10}] @ds.exclude(:a=>[10]).all.must_equal [{:a=>20, :b=>10}] @ds.exclude(:a=>[20, 10]).all.must_equal [] end it "should work with endless ranges as hash values" do wait{@ds.insert(20, 10)} @ds.filter(:a=>eval('(30..)')).all.must_equal [] @ds.filter(:a=>eval('(20...)')).all.must_equal [{:a=>20, :b=>10}] @ds.filter(:a=>eval('(20..)')).all.must_equal [{:a=>20, :b=>10}] @ds.filter(:a=>eval('(10..)')).all.must_equal [{:a=>20, :b=>10}] end if RUBY_VERSION >= '2.6' it "should work with startless ranges as hash values" do wait{@ds.insert(20, 10)} @ds.filter(:a=>eval('(..30)')).all.must_equal [{:a=>20, :b=>10}] @ds.filter(:a=>eval('(...30)')).all.must_equal [{:a=>20, :b=>10}] @ds.filter(:a=>eval('(..20)')).all.must_equal [{:a=>20, :b=>10}] @ds.filter(:a=>eval('(...20)')).all.must_equal [] @ds.filter(:a=>eval('(..10)')).all.must_equal [] @ds.filter(:a=>eval('(...10)')).all.must_equal [] @ds.filter(:a=>eval('nil..nil')).all.must_equal [{:a=>20, :b=>10}] end if RUBY_VERSION >= '2.7' it "should work with CASE statements" do wait{@ds.insert(20, 10)} @ds.filter(Sequel.case({{:a=>20}=>20}, 0) > 0).all.must_equal [{:a=>20, :b=>10}] @ds.filter(Sequel.case({{:a=>15}=>20}, 0) > 0).all.must_equal [] @ds.filter(Sequel.case({20=>20}, 0, :a) > 0).all.must_equal [{:a=>20, :b=>10}] @ds.filter(Sequel.case({15=>20}, 0, :a) > 0).all.must_equal [] end it "should work with multiple value arrays" do wait{@ds.insert(20, 10)} @ds.filter([:a, :b]=>[[20, 10]]).all.must_equal [{:a=>20, :b=>10}] @ds.filter([:a, :b]=>[[10, 20]]).all.must_equal [] @ds.filter([:a, :b]=>[[20, 10], [1, 2]]).all.must_equal [{:a=>20, :b=>10}] @ds.filter([:a, :b]=>[[10, 10], [20, 20]]).all.must_equal [] @ds.exclude([:a, :b]=>[[20, 10]]).all.must_equal [] @ds.exclude([:a, :b]=>[[10, 20]]).all.must_equal [{:a=>20, :b=>10}] @ds.exclude([:a, :b]=>[[20, 10], [1, 2]]).all.must_equal [] @ds.exclude([:a, :b]=>[[10, 10], [20, 20]]).all.must_equal [{:a=>20, :b=>10}] end it "should work with IN/NOT in with datasets" do wait{@ds.insert(20, 10)} ds = @ds.unordered @ds.filter(:a=>ds.select(:a)).all.must_equal [{:a=>20, :b=>10}] @ds.filter(:a=>ds.select(:a).where(:a=>15)).all.must_equal [] @ds.exclude(:a=>ds.select(:a)).all.must_equal [] @ds.exclude(:a=>ds.select(:a).where(:a=>15)).all.must_equal [{:a=>20, :b=>10}] @ds.filter([:a, :b]=>ds.select(:a, :b)).all.must_equal [{:a=>20, :b=>10}] @ds.filter([:a, :b]=>ds.select(:b, :a)).all.must_equal [] @ds.exclude([:a, :b]=>ds.select(:a, :b)).all.must_equal [] @ds.exclude([:a, :b]=>ds.select(:b, :a)).all.must_equal [{:a=>20, :b=>10}] @ds.filter([:a, :b]=>ds.select(:a, :b).where(:a=>15)).all.must_equal [] @ds.exclude([:a, :b]=>ds.select(:a, :b).where(:a=>15)).all.must_equal [{:a=>20, :b=>10}] end it "should work empty arrays" do wait{@ds.insert(20, 10)} @ds.filter(:a=>[]).all.must_equal [] @ds.exclude(:a=>[]).all.must_equal [{:a=>20, :b=>10}] @ds.filter([:a, :b]=>[]).all.must_equal [] @ds.exclude([:a, :b]=>[]).all.must_equal [{:a=>20, :b=>10}] end it "should work empty arrays with nulls when using empty_array_consider_nulls extension" do @ds = @ds.extension(:empty_array_consider_nulls) wait{@ds.insert(nil, nil)} @ds.filter(:a=>[]).all.must_equal [] @ds.exclude(:a=>[]).all.must_equal [] @ds.filter([:a, :b]=>[]).all.must_equal [] @ds.exclude([:a, :b]=>[]).all.must_equal [] unless Sequel.guarded?(:mssql, :oracle, :db2, :sqlanywhere) # Some databases don't like boolean results in the select list pr = proc{|r| r.is_a?(Integer) ? (r != 0) : r} pr[@ds.get(Sequel.expr(:a=>[]))].must_be_nil pr[@ds.get(~Sequel.expr(:a=>[]))].must_be_nil pr[@ds.get(Sequel.expr([:a, :b]=>[]))].must_be_nil pr[@ds.get(~Sequel.expr([:a, :b]=>[]))].must_be_nil end end it "should work empty arrays with nulls" do ds = @ds wait{ds.insert(nil, nil)} ds.filter(:a=>[]).all.must_equal [] ds.exclude(:a=>[]).all.must_equal [{:a=>nil, :b=>nil}] ds.filter([:a, :b]=>[]).all.must_equal [] ds.exclude([:a, :b]=>[]).all.must_equal [{:a=>nil, :b=>nil}] unless Sequel.guarded?(:mssql, :oracle, :db2, :sqlanywhere) # Some databases don't like boolean results in the select list pr = proc{|r| r.is_a?(Integer) ? (r != 0) : r} pr[ds.get(Sequel.expr(:a=>[]))].must_equal false pr[ds.get(~Sequel.expr(:a=>[]))].must_equal true pr[ds.get(Sequel.expr([:a, :b]=>[]))].must_equal false pr[ds.get(~Sequel.expr([:a, :b]=>[]))].must_equal true end end it "should work multiple conditions" do wait{@ds.insert(20, 10)} @ds.filter(:a=>20, :b=>10).all.must_equal [{:a=>20, :b=>10}] @ds.filter([[:a, 20], [:b, 10]]).all.must_equal [{:a=>20, :b=>10}] @ds.filter(Sequel.&({:a=>20}, {:b=>10})).all.must_equal [{:a=>20, :b=>10}] @ds.filter(Sequel.|({:a=>20}, {:b=>5})).all.must_equal [{:a=>20, :b=>10}] @ds.filter(Sequel.~(:a=>10)).all.must_equal [{:a=>20, :b=>10}] end end describe "SQL Extract Function" do before do @db = DB @db.create_table!(:a){DateTime :a} @ds = @db[:a].order(:a) @ds = @ds.async if async? end after do @db.drop_table?(:a) end it "should return the part of the datetime asked for" do t = Time.now @ds = @ds.with_extend{def supports_timestamp_timezones?() false end} wait{@ds.insert(t)} @ds.get{a.extract(:year)}.must_equal t.year @ds.get{a.extract(:month)}.must_equal t.month @ds.get{a.extract(:day)}.must_equal t.day @ds.get{a.extract(:hour)}.must_equal t.hour @ds.get{a.extract(:minute)}.must_equal t.min sec = @ds.get{a.extract(:second)}.to_i if t.usec >= 999500 sec.must_be_close_to 1, t.sec else sec.to_i.must_equal t.sec end end end describe "Dataset string methods" do before(:all) do @db = DB csc = {} cic = {} if @db.database_type == :mssql csc[:collate] = 'Latin1_General_CS_AS' cic[:collate] = 'Latin1_General_CI_AS' end @db.create_table!(:a) do String :a, csc String :b, cic end @ds = @db[:a].order(:a) @ds = @ds.async if async? end before do wait{@ds.delete} end after(:all) do @db.drop_table?(:a) end it "#grep should return matching rows" do wait{@ds.insert('foo', 'bar')} @ds.grep(:a, 'foo').all.must_equal [{:a=>'foo', :b=>'bar'}] @ds.grep(:b, 'foo').all.must_equal [] @ds.grep(:b, 'bar').all.must_equal [{:a=>'foo', :b=>'bar'}] @ds.grep(:a, 'bar').all.must_equal [] @ds.grep([:a, :b], %w'foo bar').all.must_equal [{:a=>'foo', :b=>'bar'}] @ds.grep([:a, :b], %w'boo far').all.must_equal [] end it "#grep should work with :all_patterns and :all_columns options" do wait{@ds.insert('foo bar', ' ')} wait{@ds.insert('foo d', 'bar')} wait{@ds.insert('foo e', ' ')} wait{@ds.insert(' ', 'bar')} wait{@ds.insert('foo f', 'baz')} wait{@ds.insert('foo baz', 'bar baz')} wait{@ds.insert('foo boo', 'boo foo')} @ds.grep([:a, :b], %w'%foo% %bar%', :all_patterns=>true).all.must_equal [{:a=>'foo bar', :b=>' '}, {:a=>'foo baz', :b=>'bar baz'}, {:a=>'foo d', :b=>'bar'}] @ds.grep([:a, :b], %w'%foo% %bar% %blob%', :all_patterns=>true).all.must_equal [] @ds.grep([:a, :b], %w'%bar% %foo%', :all_columns=>true).all.must_equal [{:a=>"foo baz", :b=>"bar baz"}, {:a=>"foo boo", :b=>"boo foo"}, {:a=>"foo d", :b=>"bar"}] @ds.grep([:a, :b], %w'%baz%', :all_columns=>true).all.must_equal [{:a=>'foo baz', :b=>'bar baz'}] @ds.grep([:a, :b], %w'%baz% %foo%', :all_columns=>true, :all_patterns=>true).all.must_equal [] @ds.grep([:a, :b], %w'%boo% %foo%', :all_columns=>true, :all_patterns=>true).all.must_equal [{:a=>'foo boo', :b=>'boo foo'}] end it "#like should return matching rows" do wait{@ds.insert('foo', 'bar')} @ds.filter(Sequel.expr(:a).like('foo')).all.must_equal [{:a=>'foo', :b=>'bar'}] @ds.filter(Sequel.expr(:a).like('bar')).all.must_equal [] @ds.filter(Sequel.expr(:a).like('foo', 'bar')).all.must_equal [{:a=>'foo', :b=>'bar'}] @ds.exclude(Sequel.expr(:a).like('foo')).all.must_equal [] @ds.exclude(Sequel.expr(:a).like('bar')).all.must_equal [{:a=>'foo', :b=>'bar'}] @ds.exclude(Sequel.expr(:a).like('foo', 'bar')).all.must_equal [] end it "#like should be case sensitive" do wait{@ds.insert('foo', 'bar')} @ds.filter(Sequel.expr(:a).like('Foo')).all.must_equal [] @ds.filter(Sequel.expr(:b).like('baR')).all.must_equal [] @ds.filter(Sequel.expr(:a).like('FOO', 'BAR')).all.must_equal [] @ds.exclude(Sequel.expr(:a).like('Foo')).all.must_equal [{:a=>'foo', :b=>'bar'}] @ds.exclude(Sequel.expr(:b).like('baR')).all.must_equal [{:a=>'foo', :b=>'bar'}] @ds.exclude(Sequel.expr(:a).like('FOO', 'BAR')).all.must_equal [{:a=>'foo', :b=>'bar'}] end it "#ilike should return matching rows, in a case insensitive manner" do wait{@ds.insert('foo', 'bar')} @ds.filter(Sequel.expr(:a).ilike('Foo')).all.must_equal [{:a=>'foo', :b=>'bar'}] @ds.filter(Sequel.expr(:a).ilike('baR')).all.must_equal [] @ds.filter(Sequel.expr(:a).ilike('FOO', 'BAR')).all.must_equal [{:a=>'foo', :b=>'bar'}] @ds.exclude(Sequel.expr(:a).ilike('Foo')).all.must_equal [] @ds.exclude(Sequel.expr(:a).ilike('baR')).all.must_equal [{:a=>'foo', :b=>'bar'}] @ds.exclude(Sequel.expr(:a).ilike('FOO', 'BAR')).all.must_equal [] end it "#escape_like should escape any metacharacters" do wait{@ds.insert('foo', 'bar')} wait{@ds.insert('foo.', 'bar..')} wait{@ds.insert('foo\\..', 'bar\\..')} wait{@ds.insert('foo\\_', 'bar\\%')} wait{@ds.insert('foo_', 'bar%')} wait{@ds.insert('foo_.', 'bar%.')} wait{@ds.insert('foo_..', 'bar%..')} wait{@ds.insert('[f#*?oo_]', '[bar%]')} @ds.filter(Sequel.expr(:a).like(@ds.escape_like('foo_'))).select_order_map(:a).must_equal ['foo_'] @ds.filter(Sequel.expr(:b).like(@ds.escape_like('bar%'))).select_order_map(:b).must_equal ['bar%'] @ds.filter(Sequel.expr(:a).like(@ds.escape_like('foo\\_'))).select_order_map(:a).must_equal ['foo\\_'] @ds.filter(Sequel.expr(:b).like(@ds.escape_like('bar\\%'))).select_order_map(:b).must_equal ['bar\\%'] @ds.filter(Sequel.expr(:a).like(@ds.escape_like('[f#*?oo_]'))).select_order_map(:a).must_equal ['[f#*?oo_]'] @ds.filter(Sequel.expr(:b).like(@ds.escape_like('[bar%]'))).select_order_map(:b).must_equal ['[bar%]'] @ds.filter(Sequel.expr(:b).like("#{@ds.escape_like('bar%')}_")).select_order_map(:b).must_equal ['bar%.'] @ds.filter(Sequel.expr(:b).like("#{@ds.escape_like('bar%')}%")).select_order_map(:b).must_equal ['bar%', 'bar%.', 'bar%..'] @ds.filter(Sequel.expr(:a).ilike(@ds.escape_like('Foo_'))).select_order_map(:a).must_equal ['foo_'] @ds.filter(Sequel.expr(:b).ilike(@ds.escape_like('Bar%'))).select_order_map(:b).must_equal ['bar%'] @ds.filter(Sequel.expr(:a).ilike(@ds.escape_like('Foo\\_'))).select_order_map(:a).must_equal ['foo\\_'] @ds.filter(Sequel.expr(:b).ilike(@ds.escape_like('Bar\\%'))).select_order_map(:b).must_equal ['bar\\%'] @ds.filter(Sequel.expr(:a).ilike(@ds.escape_like('[F#*?oo_]'))).select_order_map(:a).must_equal ['[f#*?oo_]'] @ds.filter(Sequel.expr(:b).ilike(@ds.escape_like('[Bar%]'))).select_order_map(:b).must_equal ['[bar%]'] @ds.filter(Sequel.expr(:b).ilike("#{@ds.escape_like('Bar%')}_")).select_order_map(:b).must_equal ['bar%.'] @ds.filter(Sequel.expr(:b).ilike("#{@ds.escape_like('Bar%')}%")).select_order_map(:b).must_equal ['bar%', 'bar%.', 'bar%..'] Sequel.extension(:escaped_like) @ds.filter(Sequel.expr(:a).escaped_like('?', 'Foo_')).select_order_map(:a).must_equal [] @ds.filter(Sequel.expr(:a).escaped_like('?', 'foo_')).select_order_map(:a).must_equal ['foo_'] @ds.filter(Sequel.expr(:b).escaped_like('?', ['bar%'])).select_order_map(:b).must_equal ['bar%'] @ds.filter(Sequel.expr(:a).escaped_like('??', ['fo', 'o\\_'])).select_order_map(:a).must_equal ['foo\\_'] @ds.filter(Sequel.expr(:b).escaped_like('?', 'bar\\%')).select_order_map(:b).must_equal ['bar\\%'] @ds.filter(Sequel.expr(:a).escaped_like('?', '[f#*?oo_]')).select_order_map(:a).must_equal ['[f#*?oo_]'] @ds.filter(Sequel.expr(:b).escaped_like('?', '[bar%]')).select_order_map(:b).must_equal ['[bar%]'] @ds.filter(Sequel.expr(:b).escaped_like('?_', 'bar%')).select_order_map(:b).must_equal ['bar%.'] @ds.filter(Sequel.expr(:b).escaped_like('?%', 'bar%')).select_order_map(:b).must_equal ['bar%', 'bar%.', 'bar%..'] @ds.filter(Sequel.expr(:a).escaped_ilike('?', 'Foo_')).select_order_map(:a).must_equal ['foo_'] @ds.filter(Sequel.expr(:a).escaped_ilike('?', 'Foo_')).select_order_map(:a).must_equal ['foo_'] @ds.filter(Sequel.expr(:b).escaped_ilike('?', ['Bar%'])).select_order_map(:b).must_equal ['bar%'] @ds.filter(Sequel.expr(:a).escaped_ilike('??', ['Fo', 'o\\_'])).select_order_map(:a).must_equal ['foo\\_'] @ds.filter(Sequel.expr(:b).escaped_ilike('?', 'Bar\\%')).select_order_map(:b).must_equal ['bar\\%'] @ds.filter(Sequel.expr(:a).escaped_ilike('?', '[F#*?oo_]')).select_order_map(:a).must_equal ['[f#*?oo_]'] @ds.filter(Sequel.expr(:b).escaped_ilike('?', '[Bar%]')).select_order_map(:b).must_equal ['[bar%]'] @ds.filter(Sequel.expr(:b).escaped_ilike('?_', 'Bar%')).select_order_map(:b).must_equal ['bar%.'] @ds.filter(Sequel.expr(:b).escaped_ilike('?%', 'Bar%')).select_order_map(:b).must_equal ['bar%', 'bar%.', 'bar%..'] end if DB.dataset.supports_regexp? it "#like with regexp return matching rows" do wait{@ds.insert('foo', 'bar')} @ds.filter(Sequel.expr(:a).like(/fo/)).all.must_equal [{:a=>'foo', :b=>'bar'}] @ds.filter(Sequel.expr(:a).like(/fo$/)).all.must_equal [] @ds.filter(Sequel.expr(:a).like(/fo/, /ar/)).all.must_equal [{:a=>'foo', :b=>'bar'}] @ds.exclude(Sequel.expr(:a).like(/fo/)).all.must_equal [] @ds.exclude(Sequel.expr(:a).like(/fo$/)).all.must_equal [{:a=>'foo', :b=>'bar'}] @ds.exclude(Sequel.expr(:a).like(/fo/, /ar/)).all.must_equal [] end it "#like with regexp should be case sensitive if regexp is case sensitive" do wait{@ds.insert('foo', 'bar')} @ds.filter(Sequel.expr(:a).like(/Fo/)).all.must_equal [] @ds.filter(Sequel.expr(:b).like(/baR/)).all.must_equal [] @ds.filter(Sequel.expr(:a).like(/FOO/, /BAR/)).all.must_equal [] @ds.exclude(Sequel.expr(:a).like(/Fo/)).all.must_equal [{:a=>'foo', :b=>'bar'}] @ds.exclude(Sequel.expr(:b).like(/baR/)).all.must_equal [{:a=>'foo', :b=>'bar'}] @ds.exclude(Sequel.expr(:a).like(/FOO/, /BAR/)).all.must_equal [{:a=>'foo', :b=>'bar'}] @ds.filter(Sequel.expr(:a).like(/Fo/i)).all.must_equal [{:a=>'foo', :b=>'bar'}] @ds.filter(Sequel.expr(:b).like(/baR/i)).all.must_equal [{:a=>'foo', :b=>'bar'}] @ds.filter(Sequel.expr(:a).like(/FOO/i, /BAR/i)).all.must_equal [{:a=>'foo', :b=>'bar'}] @ds.exclude(Sequel.expr(:a).like(/Fo/i)).all.must_equal [] @ds.exclude(Sequel.expr(:b).like(/baR/i)).all.must_equal [] @ds.exclude(Sequel.expr(:a).like(/FOO/i, /BAR/i)).all.must_equal [] end it "#ilike with regexp should return matching rows, in a case insensitive manner" do wait{@ds.insert('foo', 'bar')} @ds.filter(Sequel.expr(:a).ilike(/Fo/)).all.must_equal [{:a=>'foo', :b=>'bar'}] @ds.filter(Sequel.expr(:b).ilike(/baR/)).all.must_equal [{:a=>'foo', :b=>'bar'}] @ds.filter(Sequel.expr(:a).ilike(/FOO/, /BAR/)).all.must_equal [{:a=>'foo', :b=>'bar'}] @ds.exclude(Sequel.expr(:a).ilike(/Fo/)).all.must_equal [] @ds.exclude(Sequel.expr(:b).ilike(/baR/)).all.must_equal [] @ds.exclude(Sequel.expr(:a).ilike(/FOO/, /BAR/)).all.must_equal [] end end it "should work with strings created with Sequel.join" do wait{@ds.insert('foo', 'bar')} @ds.get(Sequel.join([:a, "bar"])).must_equal 'foobar' @ds.get(Sequel.join(["foo", :b], ' ')).must_equal 'foo bar' end end describe "Dataset identifier methods" do before(:all) do class ::String def uprev upcase.reverse end end @db = DB @db.create_table!(:a){Integer :ab} @db[:a].insert(1) end before do @ds = @db[:a].order(:ab) end after(:all) do @db.drop_table?(:a) end it "#identifier_output_method should change how identifiers are output" do @ds.with_identifier_output_method(:upcase).first.must_equal(:AB=>1) @ds.with_identifier_output_method(:uprev).first.must_equal(:BA=>1) end it "should work with a nil identifier_output_method" do [{:ab=>1}, {:AB=>1}].must_include(@ds.with_identifier_output_method(nil).first) end it "should work when not quoting identifiers" do @ds.with_quote_identifiers(false).first.must_equal(:ab=>1) end end if IDENTIFIER_MANGLING if DB.dataset.supports_updating_joins? || DB.dataset.supports_deleting_joins? describe "Modifying joined datasets" do before do @db = DB @db.create_table!(:a){Integer :a; Integer :d} @db.create_table!(:b){Integer :b; Integer :e} @db.create_table!(:c){Integer :c; Integer :f} @ds = @db.from(:a, :b).join(:c, {:c=>Sequel.identifier(:e)}, :qualify=>:symbol).where(:d=>:b, :f=>6) @db[:a].insert(1, 2) @db[:a].insert(3, 4) @db[:b].insert(2, 5) @db[:c].insert(5, 6) @db[:b].insert(4, 7) @db[:c].insert(7, 8) @ds = @ds.async if async? end after do @db.drop_table?(:a, :b, :c) end if DB.dataset.supports_updating_joins? it "#update should allow updating joined datasets" do wait{@ds.update(:a=>10)} @ds.all.must_equal [{:c=>5, :b=>2, :a=>10, :d=>2, :e=>5, :f=>6}] @db[:a].order(:a).all.must_equal [{:a=>3, :d=>4}, {:a=>10, :d=>2}] @db[:b].order(:b).all.must_equal [{:b=>2, :e=>5}, {:b=>4, :e=>7}] @db[:c].order(:c).all.must_equal [{:c=>5, :f=>6}, {:c=>7, :f=>8}] end end if DB.dataset.supports_deleting_joins? it "#delete should allow deleting from joined datasets" do wait{@ds.delete} @ds.all.must_equal [] @db[:a].order(:a).all.must_equal [{:a=>3, :d=>4}] @db[:b].order(:b).all.must_equal [{:b=>2, :e=>5}, {:b=>4, :e=>7}] @db[:c].order(:c).all.must_equal [{:c=>5, :f=>6}, {:c=>7, :f=>8}] end end end end describe "Emulated functions" do before(:all) do @db = DB @db.create_table!(:a){String :a} @ds = @db[:a] @ds = @ds.async if async? end after(:all) do @db.drop_table?(:a) end after do wait{@ds.delete} end it "Sequel.char_length should return the length of characters in the string" do @ds.get(Sequel.char_length(:a)).must_be_nil wait{@ds.insert(:a=>'foo')} @ds.get(Sequel.char_length(:a)).must_equal 3 # Check behavior with leading/trailing blanks wait{@ds.update(:a=>' foo22 ')} @ds.get(Sequel.char_length(:a)).must_equal 7 end it "Sequel.trim should return the string with spaces trimmed from both sides" do @ds.get(Sequel.trim(:a)).must_be_nil wait{@ds.insert(:a=>'foo')} @ds.get(Sequel.trim(:a)).must_equal 'foo' # Check behavior with leading/trailing blanks wait{@ds.update(:a=>' foo22 ')} @ds.get(Sequel.trim(:a)).must_equal 'foo22' end end describe "Dataset replace" do before do DB.create_table!(:items){Integer :id, :unique=>true; Integer :value} @d = DB[:items] end after do DB.drop_table?(:items) end it "should use support arrays, datasets, and multiple values" do @d.replace([1, 2]) @d.all.must_equal [{:id=>1, :value=>2}] @d.replace(1, 2) @d.all.must_equal [{:id=>1, :value=>2}] @d.replace(@d) @d.all.must_equal [{:id=>1, :value=>2}] end it "should create a record if the condition is not met" do @d.replace(:id => 111, :value => 333) @d.all.must_equal [{:id => 111, :value => 333}] end it "should update a record if the condition is met" do @d.insert(:id => 111) @d.all.must_equal [{:id => 111, :value => nil}] @d.replace(:id => 111, :value => 333) @d.all.must_equal [{:id => 111, :value => 333}] end end if DB.dataset.supports_replace? describe "Concurrent access" do before do @ds = DB.select(Sequel[1].as(:v)).union(DB.select 2) end after do DB.disconnect end it "should support multiple threads" do threads = 4.times.map do q = Queue.new q2 = Queue.new [q, q2, Thread.new{@ds.each{|r| q.push(r[:v]); q2.pop}}] end threads.each{|q,| q.pop.must_equal 1} threads.each{|_,q2| q2.push nil} threads.each{|q,| q.pop.must_equal 2} threads.each{|_,q2| q2.push nil} threads.each{|_,_,t| t.join} end if ENV["SEQUEL_FIBER_CONCURRENCY"] it "should support multiple enumerators" do enums = 4.times.map{@ds.to_enum} enums.each{|e| e.next[:v].must_equal 1} enums.each{|e| e.next[:v].must_equal 2} enums.each{|e| proc{e.next}.must_raise StopIteration} end it "should support multiple fibers" do fibers = 4.times.map{Fiber.new{@ds.each{|r| Fiber.yield r[:v]}; 3}} fibers.each{|f| f.resume.must_equal 1} fibers.each{|f| f.resume.must_equal 2} fibers.each{|f| f.resume.must_equal 3} end end end if [:threaded, :sharded_threaded].include?(DB.pool.pool_type) && DB.pool.max_size >= 4 && DB.database_type != :derby && DB.database_type != :sqlanywhere describe "MERGE" do before(:all) do @db = DB @db.create_table!(:m1){Integer :i1; Integer :a} @db.create_table!(:m2){Integer :i2; Integer :b} @m1 = @db[:m1] @m2 = @db[:m2] end after do @m1.delete @m2.delete end after(:all) do @db.drop_table?(:m1, :m2) end def check(ds) @m2.insert(1, 2) @m1.all.must_equal [] # INSERT ds.merge @m1.all.must_equal [{:i1=>1, :a=>13}] # UPDATE ds.merge @m1.all.must_equal [{:i1=>12, :a=>35}] # DELETE MATCHING current row, INSERT NOT MATCHED new row @m2.insert(12, 3) ds.merge @m1.all.must_equal [{:i1=>1, :a=>13}] # MATCHED DO NOTHING @m2.where(:i2=>12).delete @m1.update(:a=>51) ds.merge @m1.all.must_equal [{:i1=>1, :a=>51}] # NOT MATCHED DO NOTHING @m1.delete @m2.update(:b=>51) ds.merge @m1.all.must_equal [] end it "should allow inserts, updates, and deletes based on conditions in a single MERGE statement" do ds = @m1. merge_using(:m2, :i1=>:i2). merge_insert(:i1=>Sequel[:i2], :a=>Sequel[:b]+11){b <= 50}. merge_delete{{:a => 30..50}}. merge_update(:i1=>Sequel[:i1]+:i2+10, :a=>Sequel[:a]+:b+20){a <= 50} check(ds) end cspecify "should support WITH clauses", :db2 do ds = @m1. with(:m3, @db[:m2]). merge_using(:m3, :i1=>:i2). merge_insert(:i1=>Sequel[:i2], :a=>Sequel[:b]+11){b <= 50}. merge_delete{{:a => 30..50}}. merge_update(:i1=>Sequel[:i1]+:i2+10, :a=>Sequel[:a]+:b+20){a <= 50} check(ds) end if DB.dataset.supports_cte? it "should support inserts with just columns" do ds = @m1. merge_using(:m2, :i1=>:i2). merge_insert(Sequel[:i2], Sequel[:b]+11){b <= 50}. merge_delete{{:a => 30..50}}. merge_update(:i1=>Sequel[:i1]+:i2+10, :a=>Sequel[:a]+:b+20){a <= 50} check(ds) end it "should calls inserts, updates, and deletes without conditions" do @m2.insert(1, 2) ds = @m1.merge_using(:m2, :i1=>:i2) ds.merge_insert(:i2, :b).merge @m1.all.must_equal [{:i1=>1, :a=>2}] ds.merge_update(:a=>Sequel[:a]+1).merge @m1.all.must_equal [{:i1=>1, :a=>3}] ds.merge_delete.merge @m1.all.must_equal [] end it "should raise if a merge is attempted without WHEN clauses" do proc{@m1.merge_using(:m2, :i1=>:i2).merge}.must_raise Sequel::Error end it "should raise if a merge is attempted without a merge source" do proc{@m1.merge_delete.merge}.must_raise Sequel::Error end it "should handle uncachable SQL" do v = true @m2.insert(1, 2) ds = @m1. merge_using(:m2, :i1=>:i2). merge_insert(Sequel[:i2], Sequel[:b]+11){Sequel.delay{v}} ds.merge @m1.all.must_equal [{:i1=>1, :a=>13}] @m1.delete v = false ds.merge @m1.all.must_equal [] end end if DB.dataset.supports_merge? && DB.database_type != :oracle ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/integration/eager_loader_test.rb�������������������������������������������������0000664�0000000�0000000�00000057530�14342141206�0022364�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Eagerly loading a tree structure" do before(:all) do DB.instance_variable_get(:@schemas).clear DB.create_table!(:nodes) do primary_key :id foreign_key :parent_id, :nodes end class ::Node < Sequel::Model many_to_one :parent one_to_many :children, :key=>:parent_id # Only useful when eager loading many_to_one :ancestors, :eager_loader_key=>nil, :eager_loader=>(proc do |eo| # Handle cases where the root node has the same parent_id as primary_key # and also when it is NULL non_root_nodes = eo[:rows].reject do |n| if [nil, n.pk].include?(n.parent_id) # Make sure root nodes have their parent association set to nil n.associations[:parent] = nil true else false end end unless non_root_nodes.empty? id_map = {} # Create an map of parent_ids to nodes that have that parent id non_root_nodes.each{|n| (id_map[n.parent_id] ||= []) << n} # Doesn't cause an infinte loop, because when only the root node # is left, this is not called. Node.filter(Node.primary_key=>id_map.keys.sort).eager(:ancestors).all do |node| # Populate the parent association for each node id_map[node.pk].each{|n| n.associations[:parent] = node} end end end) many_to_one :descendants, :eager_loader_key=>nil, :eager_loader=>(proc do |eo| id_map = {} eo[:rows].each do |n| # Initialize an empty array of child associations for each parent node n.associations[:children] = [] # Populate identity map of nodes id_map[n.pk] = n end # Doesn't cause an infinite loop, because the :eager_loader is not called # if no records are returned. Exclude id = parent_id to avoid infinite loop # if the root note is one of the returned records and it has parent_id = id # instead of parent_id = NULL. Node.filter(:parent_id=>id_map.keys.sort).exclude(:id=>:parent_id).eager(:descendants).all do |node| # Get the parent from the identity map parent = id_map[node.parent_id] # Set the child's parent association to the parent node.associations[:parent] = parent # Add the child association to the array of children in the parent parent.associations[:children] << node end end) end Node.insert(:parent_id=>1) Node.insert(:parent_id=>1) Node.insert(:parent_id=>1) Node.insert(:parent_id=>2) Node.insert(:parent_id=>4) Node.insert(:parent_id=>5) Node.insert(:parent_id=>6) end after(:all) do DB.drop_table :nodes Object.send(:remove_const, :Node) end it "#descendants should get all descendants in one call" do nodes = Node.filter(:id=>1).eager(:descendants).all nodes.length.must_equal 1 node = nodes.first node.pk.must_equal 1 node.children.length.must_equal 2 node.children.collect{|x| x.pk}.sort.must_equal [2, 3] node.children.collect{|x| x.parent}.must_equal [node, node] node = nodes.first.children.find{|x| x.pk == 2} node.children.length.must_equal 1 node.children.first.pk.must_equal 4 node.children.first.parent.must_equal node node = node.children.first node.children.length.must_equal 1 node.children.first.pk.must_equal 5 node.children.first.parent.must_equal node node = node.children.first node.children.length.must_equal 1 node.children.first.pk.must_equal 6 node.children.first.parent.must_equal node node = node.children.first node.children.length.must_equal 1 node.children.first.pk.must_equal 7 node.children.first.parent.must_equal node end it "#ancestors should get all ancestors in one call" do nodes = Node.filter(:id=>[7,3]).order(:id).eager(:ancestors).all nodes.length.must_equal 2 nodes.collect{|x| x.pk}.must_equal [3, 7] nodes.first.parent.pk.must_equal 1 nodes.first.parent.parent.must_be_nil node = nodes.last node.parent.pk.must_equal 6 node = node.parent node.parent.pk.must_equal 5 node = node.parent node.parent.pk.must_equal 4 node = node.parent node.parent.pk.must_equal 2 node = node.parent node.parent.pk.must_equal 1 node.parent.parent.must_be_nil end end describe "Association Extensions" do before do module ::FindOrCreate def find_or_create(vals) first(vals) || model.create(vals.merge(:author_id=>model_object.pk)) end def find_or_create_by_name(name) first(:name=>name) || model.create(:name=>name, :author_id=>model_object.pk) end end DB.instance_variable_get(:@schemas).clear DB.create_table!(:authors) do primary_key :id end class ::Author < Sequel::Model one_to_many :authorships, :extend=>FindOrCreate end DB.create_table!(:authorships) do primary_key :id foreign_key :author_id, :authors String :name end class ::Authorship < Sequel::Model many_to_one :author end @author = Author.create end after do DB.drop_table :authorships, :authors Object.send(:remove_const, :Author) Object.send(:remove_const, :Authorship) end it "should allow methods to be called on the dataset method" do Authorship.count.must_equal 0 authorship = @author.authorships_dataset.find_or_create_by_name('Bob') Authorship.count.must_equal 1 Authorship.first.must_equal authorship authorship.name.must_equal 'Bob' authorship.author_id.must_equal @author.id @author.authorships_dataset.find_or_create_by_name('Bob').must_equal authorship Authorship.count.must_equal 1 authorship2 = @author.authorships_dataset.find_or_create(:name=>'Jim') Authorship.count.must_equal 2 Authorship.order(:name).map(:name).must_equal ['Bob', 'Jim'] authorship2.name.must_equal 'Jim' authorship2.author_id.must_equal @author.id @author.authorships_dataset.find_or_create(:name=>'Jim').must_equal authorship2 end end describe "has_many :through has_many and has_one :through belongs_to" do before(:all) do DB.instance_variable_get(:@schemas).clear DB.create_table!(:firms) do primary_key :id end class ::Firm < Sequel::Model one_to_many :clients one_to_many :invoices, :read_only=>true, \ :dataset=>proc{Invoice.eager_graph(:client).filter(Sequel[:client][:firm_id]=>pk)}, \ :after_load=>(proc do |firm, invs| invs.each do |inv| inv.client.associations[:firm] = inv.associations[:firm] = firm end end), \ :eager_loader=>(proc do |eo| id_map = eo[:id_map] eo[:rows].each{|firm| firm.associations[:invoices] = []} Invoice.eager_graph(:client).filter(Sequel[:client][:firm_id]=>id_map.keys).all do |inv| id_map[inv.client.firm_id].each do |firm| firm.associations[:invoices] << inv end end end) end DB.create_table!(:clients) do primary_key :id foreign_key :firm_id, :firms end class ::Client < Sequel::Model many_to_one :firm one_to_many :invoices end DB.create_table!(:invoices) do primary_key :id foreign_key :client_id, :clients end class ::Invoice < Sequel::Model many_to_one :client many_to_one :firm, :key=>nil, :read_only=>true, \ :dataset=>proc{Firm.eager_graph(:clients).filter(Sequel[:clients][:id]=>client_id)}, \ :after_load=>(proc do |inv, firm| # Delete the cached associations from firm, because it only has the # client with this invoice, instead of all clients of the firm if c = firm.associations.delete(:clients) firm.associations[:invoice_client] = c.first end inv.associations[:client] ||= firm.associations[:invoice_client] end), \ :eager_loader=>(proc do |eo| id_map = {} eo[:rows].each do |inv| inv.associations[:firm] = nil (id_map[inv.client_id] ||= []) << inv end Firm.eager_graph(:clients).filter(Sequel[:clients][:id]=>id_map.keys).all do |firm| # Delete the cached associations from firm, because it only has the # clients related the invoices being eagerly loaded, instead of all # clients of the firm. firm.associations[:clients].each do |client| id_map[client.pk].each do |inv| inv.associations[:firm] = firm inv.associations[:client] = client end end end end) end @firm1 = Firm.create @firm2 = Firm.create @client1 = Client.create(:firm => @firm1) @client2 = Client.create(:firm => @firm1) @client3 = Client.create(:firm => @firm2) @invoice1 = Invoice.create(:client => @client1) @invoice2 = Invoice.create(:client => @client1) @invoice3 = Invoice.create(:client => @client2) @invoice4 = Invoice.create(:client => @client3) @invoice5 = Invoice.create(:client => @client3) end after(:all) do DB.drop_table :invoices, :clients, :firms Object.send(:remove_const, :Firm) Object.send(:remove_const, :Client) Object.send(:remove_const, :Invoice) end it "should return has_many :through has_many records for a single object" do invs = @firm1.invoices.sort_by{|x| x.pk} invs.must_equal [@invoice1, @invoice2, @invoice3] invs[0].client.must_equal @client1 invs[1].client.must_equal @client1 invs[2].client.must_equal @client2 invs.collect{|i| i.firm}.must_equal [@firm1, @firm1, @firm1] invs.collect{|i| i.client.firm}.must_equal [@firm1, @firm1, @firm1] end it "should eagerly load has_many :through has_many records for multiple objects" do firms = Firm.order(:id).eager(:invoices).all firms.must_equal [@firm1, @firm2] firm1, firm2 = firms invs1 = firm1.invoices.sort_by{|x| x.pk} invs2 = firm2.invoices.sort_by{|x| x.pk} invs1.must_equal [@invoice1, @invoice2, @invoice3] invs2.must_equal [@invoice4, @invoice5] invs1[0].client.must_equal @client1 invs1[1].client.must_equal @client1 invs1[2].client.must_equal @client2 invs2[0].client.must_equal @client3 invs2[1].client.must_equal @client3 invs1.collect{|i| i.firm}.must_equal [@firm1, @firm1, @firm1] invs2.collect{|i| i.firm}.must_equal [@firm2, @firm2] invs1.collect{|i| i.client.firm}.must_equal [@firm1, @firm1, @firm1] invs2.collect{|i| i.client.firm}.must_equal [@firm2, @firm2] end it "should return has_one :through belongs_to records for a single object" do firm = @invoice1.firm firm.must_equal @firm1 @invoice1.client.must_equal @client1 @invoice1.client.firm.must_equal @firm1 firm.associations[:clients].must_be_nil end it "should eagerly load has_one :through belongs_to records for multiple objects" do invs = Invoice.order(:id).eager(:firm).all invs.must_equal [@invoice1, @invoice2, @invoice3, @invoice4, @invoice5] invs[0].firm.must_equal @firm1 invs[0].client.must_equal @client1 invs[0].client.firm.must_equal @firm1 invs[0].firm.associations[:clients].must_be_nil invs[1].firm.must_equal @firm1 invs[1].client.must_equal @client1 invs[1].client.firm.must_equal @firm1 invs[1].firm.associations[:clients].must_be_nil invs[2].firm.must_equal @firm1 invs[2].client.must_equal @client2 invs[2].client.firm.must_equal @firm1 invs[2].firm.associations[:clients].must_be_nil invs[3].firm.must_equal @firm2 invs[3].client.must_equal @client3 invs[3].client.firm.must_equal @firm2 invs[3].firm.associations[:clients].must_be_nil invs[4].firm.must_equal @firm2 invs[4].client.must_equal @client3 invs[4].client.firm.must_equal @firm2 invs[4].firm.associations[:clients].must_be_nil end end describe "Polymorphic Associations" do before(:all) do DB.instance_variable_get(:@schemas).clear DB.create_table!(:assets) do primary_key :id Integer :attachable_id String :attachable_type end class ::Asset < Sequel::Model m = method(:constantize) many_to_one :attachable, :reciprocal=>:assets, :reciprocal_type=>:one_to_many, :setter=>(proc do |attachable| self[:attachable_id] = (attachable.pk if attachable) self[:attachable_type] = (attachable.class.name if attachable) end), :dataset=>(proc do klass = m.call(attachable_type) klass.where(klass.primary_key=>attachable_id) end), :eager_loader=>(proc do |eo| id_map = {} eo[:rows].each do |asset| asset.associations[:attachable] = nil ((id_map[asset.attachable_type] ||= {})[asset.attachable_id] ||= []) << asset end id_map.each do |klass_name, idmap| klass = m.call(klass_name) klass.where(klass.primary_key=>idmap.keys).all do |attach| idmap[attach.pk].each do |asset| asset.associations[:attachable] = attach end end end end) end DB.create_table!(:posts) do primary_key :id end class ::Post < Sequel::Model one_to_many :assets, :key=>:attachable_id, :reciprocal=>:attachable, :conditions=>{:attachable_type=>'Post'}, :adder=>proc{|asset| asset.update(:attachable_id=>pk, :attachable_type=>'Post')}, :remover=>proc{|asset| asset.update(:attachable_id=>nil, :attachable_type=>nil)}, :clearer=>proc{assets_dataset.update(:attachable_id=>nil, :attachable_type=>nil)} end DB.create_table!(:notes) do primary_key :id end class ::Note < Sequel::Model one_to_many :assets, :key=>:attachable_id, :reciprocal=>:attachable, :conditions=>{:attachable_type=>'Note'}, :adder=>proc{|asset| asset.update(:attachable_id=>pk, :attachable_type=>'Note')}, :remover=>proc{|asset| asset.update(:attachable_id=>nil, :attachable_type=>nil)}, :clearer=>proc{assets_dataset.update(:attachable_id=>nil, :attachable_type=>nil)} end end before do [:assets, :posts, :notes].each{|t| DB[t].delete} @post = Post.create Note.create @note = Note.create @asset1 = Asset.create(:attachable=>@post) @asset2 = Asset.create(:attachable=>@note) @asset1.associations.clear @asset2.associations.clear end after(:all) do DB.drop_table :assets, :posts, :notes Object.send(:remove_const, :Asset) Object.send(:remove_const, :Post) Object.send(:remove_const, :Note) end it "should load the correct associated object for a single object" do @asset1.attachable.must_equal @post @asset2.attachable.must_equal @note end it "should eagerly load the correct associated object for a group of objects" do assets = Asset.order(:id).eager(:attachable).all assets.must_equal [@asset1, @asset2] assets[0].attachable.must_equal @post assets[1].attachable.must_equal @note end it "should set items correctly" do @asset1.attachable = @note @asset2.attachable = @post @asset1.attachable.must_equal @note @asset1.attachable_id.must_equal @note.pk @asset1.attachable_type.must_equal 'Note' @asset2.attachable.must_equal @post @asset2.attachable_id.must_equal @post.pk @asset2.attachable_type.must_equal 'Post' @asset1.attachable = nil @asset1.attachable.must_be_nil @asset1.attachable_id.must_be_nil @asset1.attachable_type.must_be_nil end it "should add items correctly" do @post.assets.must_equal [@asset1] @post.add_asset(@asset2) @post.assets.must_equal [@asset1, @asset2] @asset2.attachable.must_equal @post @asset2.attachable_id.must_equal @post.pk @asset2.attachable_type.must_equal 'Post' end it "should remove items correctly" do @note.assets.must_equal [@asset2] @note.remove_asset(@asset2) @note.assets.must_equal [] @asset2.attachable.must_be_nil @asset2.attachable_id.must_be_nil @asset2.attachable_type.must_be_nil end it "should remove all items correctly" do @post.remove_all_assets @note.remove_all_assets @asset1.reload.attachable.must_be_nil @asset2.reload.attachable.must_be_nil end end describe "many_to_one/one_to_many not referencing primary key" do before(:all) do DB.instance_variable_get(:@schemas).clear DB.create_table!(:clients) do primary_key :id String :name end class ::Client < Sequel::Model one_to_many :invoices, :reciprocal=>:client, :adder=>(proc do |invoice| invoice.client_name = name invoice.save end), :remover=>(proc do |invoice| invoice.client_name = nil invoice.save end), :clearer=>proc{invoices_dataset.update(:client_name=>nil)}, :dataset=>proc{Invoice.filter(:client_name=>name)}, :eager_loader=>(proc do |eo| id_map = {} eo[:rows].each do |client| id_map[client.name] = client client.associations[:invoices] = [] end Invoice.filter(:client_name=>id_map.keys.sort).all do |inv| inv.associations[:client] = client = id_map[inv.client_name] client.associations[:invoices] << inv end end) end DB.create_table!(:invoices) do primary_key :id String :client_name end class ::Invoice < Sequel::Model many_to_one :client, :key=>:client_name, :setter=>proc{|client| self.client_name = (client.name if client)}, :dataset=>proc{Client.filter(:name=>client_name)}, :eager_loader=>(proc do |eo| id_map = eo[:id_map] eo[:rows].each{|inv| inv.associations[:client] = nil} Client.filter(:name=>id_map.keys).all do |client| id_map[client.name].each{|inv| inv.associations[:client] = client} end end) end end before do Client.dataset.delete Invoice.dataset.delete @client1 = Client.create(:name=>'X') @client2 = Client.create(:name=>'Y') @invoice1 = Invoice.create(:client_name=>'X') @invoice2 = Invoice.create(:client_name=>'X') end after(:all) do DB.drop_table :invoices, :clients Object.send(:remove_const, :Client) Object.send(:remove_const, :Invoice) end it "should load all associated one_to_many objects for a single object" do invs = @client1.invoices invs.sort_by{|x| x.pk}.must_equal [@invoice1, @invoice2] invs[0].client.must_equal @client1 invs[1].client.must_equal @client1 end it "should load the associated many_to_one object for a single object" do client = @invoice1.client client.must_equal @client1 end it "should eagerly load all associated one_to_many objects for a group of objects" do clients = Client.order(:id).eager(:invoices).all clients.must_equal [@client1, @client2] clients[1].invoices.must_equal [] invs = clients[0].invoices.sort_by{|x| x.pk} invs.must_equal [@invoice1, @invoice2] invs[0].client.must_equal @client1 invs[1].client.must_equal @client1 end it "should eagerly load the associated many_to_one object for a group of objects" do invoices = Invoice.order(:id).eager(:client).all invoices.must_equal [@invoice1, @invoice2] invoices[0].client.must_equal @client1 invoices[1].client.must_equal @client1 end it "should set the associated object correctly" do @invoice1.client = @client2 @invoice1.client.must_equal @client2 @invoice1.client_name.must_equal 'Y' @invoice1.client = nil @invoice1.client_name.must_be_nil end it "should add the associated object correctly" do @client2.invoices.must_equal [] @client2.add_invoice(@invoice1) @client2.invoices.must_equal [@invoice1] @invoice1.client_name.must_equal 'Y' @invoice1.client = nil @invoice1.client_name.must_be_nil end it "should remove the associated object correctly" do invs = @client1.invoices.sort_by{|x| x.pk} invs.must_equal [@invoice1, @invoice2] @client1.remove_invoice(@invoice1) @client1.invoices.must_equal [@invoice2] @invoice1.client_name.must_be_nil @invoice1.client.must_be_nil end it "should remove all associated objects correctly" do @client1.remove_all_invoices @invoice1.refresh.client.must_be_nil @invoice1.client_name.must_be_nil @invoice2.refresh.client.must_be_nil @invoice2.client_name.must_be_nil end end describe "statistics associations" do before(:all) do DB.create_table!(:projects) do primary_key :id String :name end class ::Project < Sequel::Model many_to_one :ticket_hours, :read_only=>true, :key=>:id, :class=>:Ticket, :dataset=>proc{Ticket.filter(:project_id=>id).select{sum(hours).as(hours)}}, :eager_loader=>(proc do |eo| eo[:rows].each{|p| p.associations[:ticket_hours] = nil} Ticket.filter(:project_id=>eo[:id_map].keys). select_group(:project_id). select_append{sum(hours).as(hours)}. all do |t| p = eo[:id_map][t.values.delete(:project_id)].first p.associations[:ticket_hours] = t end end) def ticket_hours if s = super s[:hours] end end end DB.create_table!(:tickets) do primary_key :id foreign_key :project_id, :projects Integer :hours end class ::Ticket < Sequel::Model many_to_one :project end @project1 = Project.create(:name=>'X') @project2 = Project.create(:name=>'Y') @ticket1 = Ticket.create(:project=>@project1, :hours=>1) @ticket2 = Ticket.create(:project=>@project1, :hours=>10) @ticket3 = Ticket.create(:project=>@project2, :hours=>2) @ticket4 = Ticket.create(:project=>@project2, :hours=>20) end after(:all) do DB.drop_table :tickets, :projects Object.send(:remove_const, :Project) Object.send(:remove_const, :Ticket) end it "should give the correct sum of ticket hours for each project" do @project1.ticket_hours.to_i.must_equal 11 @project2.ticket_hours.to_i.must_equal 22 end it "should give the correct sum of ticket hours for each project when eager loading" do p1, p2 = Project.order(:name).eager(:ticket_hours).all p1.ticket_hours.to_i.must_equal 11 p2.ticket_hours.to_i.must_equal 22 end end describe "one to one associations" do before(:all) do DB.create_table!(:books) do primary_key :id end class ::Book < Sequel::Model one_to_one :first_page, :class=>:Page, :conditions=>{:page_number=>1}, :reciprocal=>nil one_to_one :second_page, :class=>:Page, :conditions=>{:page_number=>2}, :reciprocal=>nil end DB.create_table!(:pages) do primary_key :id foreign_key :book_id, :books Integer :page_number end class ::Page < Sequel::Model many_to_one :book, :reciprocal=>nil end @book1 = Book.create @book2 = Book.create @page1 = Page.create(:book=>@book1, :page_number=>1) @page2 = Page.create(:book=>@book1, :page_number=>2) @page3 = Page.create(:book=>@book2, :page_number=>1) @page4 = Page.create(:book=>@book2, :page_number=>2) end after(:all) do DB.drop_table :pages, :books Object.send(:remove_const, :Book) Object.send(:remove_const, :Page) end it "should be eager loadable" do bk1, bk2 = Book.filter(Sequel[:books][:id]=>[1,2]).eager(:first_page).all bk1.first_page.must_equal @page1 bk2.first_page.must_equal @page3 end it "should be eager graphable" do bk1, bk2 = Book.filter(Sequel[:books][:id]=>[1,2]).eager_graph(:first_page).all bk1.first_page.must_equal @page1 bk2.first_page.must_equal @page3 end it "should be eager graphable two at once" do bk1, bk2 = Book.filter(Sequel[:books][:id]=>[1,2]).eager_graph(:first_page, :second_page).all bk1.first_page.must_equal @page1 bk1.second_page.must_equal @page2 bk2.first_page.must_equal @page3 bk2.second_page.must_equal @page4 end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/integration/migrator_test.rb�����������������������������������������������������0000664�0000000�0000000�00000032776�14342141206�0021604�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" Sequel.extension :migration describe Sequel::Migrator do before do @db = DB @m = Sequel::Migrator end after do @db.drop_table?(:schema_info, :schema_migrations, :sm1111, :sm1122, :sm2222, :sm2233, :sm3333, :sm11111, :sm22222, :a, :b, :c, :d) end it "should be able to migrate up and down all the way successfully" do @dir = 'spec/files/integer_migrations' @m.apply(@db, @dir) [:schema_info, :sm1111, :sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal true} @db[:schema_info].get(:version).must_equal 3 @m.apply(@db, @dir, 0) [:sm1111, :sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal false} @db[:schema_info].get(:version).must_equal 0 end it "should be able to migrate up and down to specific versions successfully" do @dir = 'spec/files/integer_migrations' @m.apply(@db, @dir, 2) [:schema_info, :sm1111, :sm2222].each{|n| @db.table_exists?(n).must_equal true} @db.table_exists?(:sm3333).must_equal false @db[:schema_info].get(:version).must_equal 2 @m.apply(@db, @dir, 1) [:sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal false} @db.table_exists?(:sm1111).must_equal true @db[:schema_info].get(:version).must_equal 1 end it "should correctly set migration version to the last successful migration if the migration raises an error when migrating up" do @dir = 'spec/files/bad_up_migration' proc{@m.apply(@db, @dir)}.must_raise Sequel::DatabaseError [:schema_info, :sm11111].each{|n| @db.table_exists?(n).must_equal true} @db.table_exists?(:sm22222).must_equal false @db[:schema_info].get(:version).must_equal 1 @m.apply(@db, @dir, 0) [:sm11111, :sm22222].each{|n| @db.table_exists?(n).must_equal false} @db[:schema_info].get(:version).must_equal 0 end it "should correctly set migration version to the last successful migration if the migration raises an error when migrating down" do @dir = 'spec/files/bad_down_migration' @m.apply(@db, @dir) [:schema_info, :sm11111, :sm22222].each{|n| @db.table_exists?(n).must_equal true} @db[:schema_info].get(:version).must_equal 2 proc{@m.apply(@db, @dir, 0)}.must_raise Sequel::DatabaseError [:sm22222].each{|n| @db.table_exists?(n).must_equal false} @db.table_exists?(:sm11111).must_equal true @db[:schema_info].get(:version).must_equal 1 end it "should handle migrating up or down all the way with timestamped migrations" do @dir = 'spec/files/timestamped_migrations' @m.apply(@db, @dir) [:schema_migrations, :sm1111, :sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal true} @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb 1273253851_create_nodes.rb 1273253853_3_create_users.rb' @m.apply(@db, @dir, 0) [:sm1111, :sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal false} @db[:schema_migrations].select_order_map(:filename).must_equal [] end it "should handle migrating up or down to specific timestamps with timestamped migrations" do @dir = 'spec/files/timestamped_migrations' @m.apply(@db, @dir, 1273253851) [:schema_migrations, :sm1111, :sm2222].each{|n| @db.table_exists?(n).must_equal true} @db.table_exists?(:sm3333).must_equal false @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb 1273253851_create_nodes.rb' @m.apply(@db, @dir, 1273253849) [:sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal false} @db.table_exists?(:sm1111).must_equal true @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb' end it "should apply all missing files when migrating up with timestamped migrations" do @dir = 'spec/files/timestamped_migrations' @m.apply(@db, @dir) @dir = 'spec/files/interleaved_timestamped_migrations' @m.apply(@db, @dir) [:schema_migrations, :sm1111, :sm1122, :sm2222, :sm2233, :sm3333].each{|n| @db.table_exists?(n).must_equal true} @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb 1273253850_create_artists.rb 1273253851_create_nodes.rb 1273253852_create_albums.rb 1273253853_3_create_users.rb' end it "should not apply down action to migrations where up action hasn't been applied" do @dir = 'spec/files/timestamped_migrations' @m.apply(@db, @dir) @dir = 'spec/files/interleaved_timestamped_migrations' @m.apply(@db, @dir, 0) [:sm1111, :sm1122, :sm2222, :sm2233, :sm3333].each{|n| @db.table_exists?(n).must_equal false} @db[:schema_migrations].select_order_map(:filename).must_equal [] end it "should handle updating to a specific timestamp when interleaving migrations with timestamps" do @dir = 'spec/files/timestamped_migrations' @m.apply(@db, @dir) @dir = 'spec/files/interleaved_timestamped_migrations' @m.apply(@db, @dir, 1273253851) [:schema_migrations, :sm1111, :sm1122, :sm2222].each{|n| @db.table_exists?(n).must_equal true} [:sm2233, :sm3333].each{|n| @db.table_exists?(n).must_equal false} @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb 1273253850_create_artists.rb 1273253851_create_nodes.rb' end it "should correctly update schema_migrations table when an error occurs when migrating up or down using timestamped migrations" do @dir = 'spec/files/bad_timestamped_migrations' proc{@m.apply(@db, @dir)}.must_raise Sequel::DatabaseError [:schema_migrations, :sm1111, :sm2222].each{|n| @db.table_exists?(n).must_equal true} @db.table_exists?(:sm3333).must_equal false @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb 1273253851_create_nodes.rb' proc{@m.apply(@db, @dir, 0)}.must_raise Sequel::DatabaseError [:sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal false} @db.table_exists?(:sm1111).must_equal true @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb' end it "should handle multiple migrations with the same timestamp correctly" do @dir = 'spec/files/duplicate_timestamped_migrations' @m.apply(@db, @dir) [:schema_migrations, :sm1111, :sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal true} @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb 1273253853_create_nodes.rb 1273253853_create_users.rb' @m.apply(@db, @dir, 1273253853) [:sm1111, :sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal true} @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb 1273253853_create_nodes.rb 1273253853_create_users.rb' @m.apply(@db, @dir, 1273253849) [:sm1111].each{|n| @db.table_exists?(n).must_equal true} [:sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal false} @db[:schema_migrations].select_order_map(:filename).must_equal %w'1273253849_create_sessions.rb' @m.apply(@db, @dir, 1273253848) [:sm1111, :sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal false} @db[:schema_migrations].select_order_map(:filename).must_equal [] end it "should convert schema_info table to schema_migrations table" do @dir = 'spec/files/integer_migrations' @m.apply(@db, @dir) [:schema_info, :sm1111, :sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal true} [:schema_migrations, :sm1122, :sm2233].each{|n| @db.table_exists?(n).must_equal false} @dir = 'spec/files/convert_to_timestamp_migrations' @m.apply(@db, @dir) [:schema_info, :sm1111, :sm2222, :sm3333, :schema_migrations, :sm1122, :sm2233].each{|n| @db.table_exists?(n).must_equal true} @db[:schema_migrations].select_order_map(:filename).must_equal %w'001_create_sessions.rb 002_create_nodes.rb 003_3_create_users.rb 1273253850_create_artists.rb 1273253852_create_albums.rb' @m.apply(@db, @dir, 4) [:schema_info, :schema_migrations, :sm1111, :sm2222, :sm3333].each{|n| @db.table_exists?(n).must_equal true} [:sm1122, :sm2233].each{|n| @db.table_exists?(n).must_equal false} @db[:schema_migrations].select_order_map(:filename).must_equal %w'001_create_sessions.rb 002_create_nodes.rb 003_3_create_users.rb' @m.apply(@db, @dir, 0) [:schema_info, :schema_migrations].each{|n| @db.table_exists?(n).must_equal true} [:sm1111, :sm2222, :sm3333, :sm1122, :sm2233].each{|n| @db.table_exists?(n).must_equal false} @db[:schema_migrations].select_order_map(:filename).must_equal [] end it "should handle unapplied migrations when migrating schema_info table to schema_migrations table" do @dir = 'spec/files/integer_migrations' @m.apply(@db, @dir, 2) [:schema_info, :sm1111, :sm2222].each{|n| @db.table_exists?(n).must_equal true} [:schema_migrations, :sm3333, :sm1122, :sm2233].each{|n| @db.table_exists?(n).must_equal false} @dir = 'spec/files/convert_to_timestamp_migrations' @m.apply(@db, @dir, 1273253850) [:schema_info, :sm1111, :sm2222, :sm3333, :schema_migrations, :sm1122].each{|n| @db.table_exists?(n).must_equal true} [:sm2233].each{|n| @db.table_exists?(n).must_equal false} @db[:schema_migrations].select_order_map(:filename).must_equal %w'001_create_sessions.rb 002_create_nodes.rb 003_3_create_users.rb 1273253850_create_artists.rb' end it "should handle unapplied migrations when migrating schema_info table to schema_migrations table and target is less than last integer migration version" do @dir = 'spec/files/integer_migrations' @m.apply(@db, @dir, 1) [:schema_info, :sm1111].each{|n| @db.table_exists?(n).must_equal true} [:schema_migrations, :sm2222, :sm3333, :sm1122, :sm2233].each{|n| @db.table_exists?(n).must_equal false} @dir = 'spec/files/convert_to_timestamp_migrations' @m.apply(@db, @dir, 2) [:schema_info, :sm1111, :sm2222, :schema_migrations].each{|n| @db.table_exists?(n).must_equal true} [:sm3333, :sm1122, :sm2233].each{|n| @db.table_exists?(n).must_equal false} @db[:schema_migrations].select_order_map(:filename).must_equal %w'001_create_sessions.rb 002_create_nodes.rb' @m.apply(@db, @dir) [:schema_info, :sm1111, :sm2222, :schema_migrations, :sm3333, :sm1122, :sm2233].each{|n| @db.table_exists?(n).must_equal true} @db[:schema_migrations].select_order_map(:filename).must_equal %w'001_create_sessions.rb 002_create_nodes.rb 003_3_create_users.rb 1273253850_create_artists.rb 1273253852_create_albums.rb' end it "should handle reversible migrations" do @dir = 'spec/files/reversible_migrations' @db.drop_table?(:a, :b) @m.apply(@db, @dir, 1) [:schema_info, :a].each{|n| @db.table_exists?(n).must_equal true} [:schema_migrations, :b].each{|n| @db.table_exists?(n).must_equal false} @db[:a].columns.must_equal [:a] @m.apply(@db, @dir, 2) [:schema_info, :a].each{|n| @db.table_exists?(n).must_equal true} [:schema_migrations, :b].each{|n| @db.table_exists?(n).must_equal false} @db[:a].columns.must_equal [:a, :b] @m.apply(@db, @dir, 3) [:schema_info, :a].each{|n| @db.table_exists?(n).must_equal true} [:schema_migrations, :b].each{|n| @db.table_exists?(n).must_equal false} @db[:a].columns.must_equal [:a, :c] @m.apply(@db, @dir, 4) [:schema_info, :b].each{|n| @db.table_exists?(n).must_equal true} [:schema_migrations, :a].each{|n| @db.table_exists?(n).must_equal false} @db[:b].columns.must_equal [:a, :c] @m.apply(@db, @dir, 5) [:schema_info, :b].each{|n| @db.table_exists?(n).must_equal true} [:schema_migrations, :a].each{|n| @db.table_exists?(n).must_equal false} @db[:b].columns.must_equal [:a, :c, :e] if @db.supports_foreign_key_parsing? @m.apply(@db, @dir, 6) [:schema_info, :b, :c].each{|n| @db.table_exists?(n).must_equal true} [:schema_migrations, :a].each{|n| @db.table_exists?(n).must_equal false} @db[:b].columns.must_equal [:a, :c, :e, :f] @m.apply(@db, @dir, 7) [:schema_info, :b, :c, :d].each{|n| @db.table_exists?(n).must_equal true} [:schema_migrations, :a].each{|n| @db.table_exists?(n).must_equal false} @db[:b].columns.must_equal [:a, :c, :e, :f, :g] @m.apply(@db, @dir, 6) [:schema_info, :b, :c].each{|n| @db.table_exists?(n).must_equal true} [:schema_migrations, :a, :d].each{|n| @db.table_exists?(n).must_equal false} @db[:b].columns.must_equal [:a, :c, :e, :f] end @m.apply(@db, @dir, 5) [:schema_info, :b].each{|n| @db.table_exists?(n).must_equal true} [:schema_migrations, :a, :c].each{|n| @db.table_exists?(n).must_equal false} @db[:b].columns.must_equal [:a, :c, :e] @m.apply(@db, @dir, 4) [:schema_info, :b].each{|n| @db.table_exists?(n).must_equal true} [:schema_migrations, :a].each{|n| @db.table_exists?(n).must_equal false} @db[:b].columns.must_equal [:a, :c] @m.apply(@db, @dir, 3) [:schema_info, :a].each{|n| @db.table_exists?(n).must_equal true} [:schema_migrations, :b].each{|n| @db.table_exists?(n).must_equal false} @db[:a].columns.must_equal [:a, :c] @m.apply(@db, @dir, 2) [:schema_info, :a].each{|n| @db.table_exists?(n).must_equal true} [:schema_migrations, :b].each{|n| @db.table_exists?(n).must_equal false} @db[:a].columns.must_equal [:a, :b] @m.apply(@db, @dir, 1) [:schema_info, :a].each{|n| @db.table_exists?(n).must_equal true} [:schema_migrations, :b].each{|n| @db.table_exists?(n).must_equal false} @db[:a].columns.must_equal [:a] @m.apply(@db, @dir, 0) [:schema_info].each{|n| @db.table_exists?(n).must_equal true} [:schema_migrations, :a, :b].each{|n| @db.table_exists?(n).must_equal false} end end ��sequel-5.63.0/spec/integration/model_test.rb��������������������������������������������������������0000664�0000000�0000000�00000013650�14342141206�0021046�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Model basic support" do before do @db = DB @db.create_table!(:items, :engine=>:InnoDB) do primary_key :id String :name end class ::Item < Sequel::Model(@db) end end after do @db.drop_table?(:items) Object.send(:remove_const, :Item) end it ".find should return first matching item" do Item.all.must_equal [] Item.find(:name=>'J').must_be_nil Item.create(:name=>'J') Item.find(:name=>'J').must_equal Item.load(:id=>1, :name=>'J') end it ".finder should create method that returns first matching item" do def Item.by_name(name) where(:name=>name) end Item.plugin :finder Item.finder :by_name Item.first_by_name('J').must_be_nil Item.create(:name=>'J') Item.first_by_name('J').must_equal Item.load(:id=>1, :name=>'J') Item.first_by_name(['J', 'K']).must_equal Item.load(:id=>1, :name=>'J') end it ".prepared_finder should create method that returns first matching item" do def Item.by_name(name) where(:name=>name) end Item.plugin :finder Item.prepared_finder :by_name Item.first_by_name('J').must_be_nil Item.create(:name=>'J') Item.first_by_name('J').must_equal Item.load(:id=>1, :name=>'J') end it ".find_or_create should return first matching item, or create it if it doesn't exist" do Item.all.must_equal [] Item.find_or_create(:name=>'J').must_equal Item.load(:id=>1, :name=>'J') Item.all.must_equal [Item.load(:id=>1, :name=>'J')] Item.find_or_create(:name=>'J').must_equal Item.load(:id=>1, :name=>'J') Item.all.must_equal [Item.load(:id=>1, :name=>'J')] end it "should raise an error if the implied database table doesn't exist" do proc do class ::Item::Thing < Sequel::Model end end.must_raise Sequel::Error end it "should not raise an error if the implied database table doesn't exist if require_valid_table is false" do c = Sequel::Model(@db) c.require_valid_table = false class ::Item::Thing < c set_dataset :items end Item.create(:name=>'J') Item::Thing.first.must_equal Item::Thing.load(:id=>1, :name=>'J') end it "should create accessors for all table columns even if all dataset columns aren't selected" do c = Class.new(Sequel::Model(@db[:items].select(:id))) o = c.new o.name = 'A' o.save.must_equal c.load(:id=>1) c.select_map(:name).must_equal ['A'] end it "should work correctly when a dataset restricts the colums it selects" do class ::Item::Thing < Sequel::Model(@db[:items].select(:name)) end Item.create(:name=>'J') Item::Thing.first.must_equal Item::Thing.load(:name=>'J') end it "#delete should delete items correctly" do i = Item.create(:name=>'J') Item.count.must_equal 1 i.delete Item.count.must_equal 0 end it "#save should return nil if raise_on_save_failure is false and save isn't successful" do i = Item.new(:name=>'J') i.use_transactions = true def i.after_save raise Sequel::Rollback end i.save.must_be_nil end it "#exists? should return whether the item is still in the database" do i = Item.create(:name=>'J') i.exists?.must_equal true Item.dataset.delete i.exists?.must_equal false end it "#save should only update specified columns when saving" do @db.create_table!(:items) do primary_key :id String :name Integer :num end Item.dataset = Item.dataset i = Item.create(:name=>'J', :num=>1) Item.all.must_equal [Item.load(:id=>1, :name=>'J', :num=>1)] i.set(:name=>'K', :num=>2) i.save(:columns=>:name) Item.all.must_equal [Item.load(:id=>1, :name=>'K', :num=>1)] i.set(:name=>'L') i.save(:columns=>:num) Item.all.must_equal [Item.load(:id=>1, :name=>'K', :num=>2)] end it "#save should check that the only a single row is modified, unless require_modification is false" do i = Item.create(:name=>'a') i.require_modification = true i.delete proc{i.save}.must_raise(Sequel::NoExistingObject) proc{i.delete}.must_raise(Sequel::NoExistingObject) i.require_modification = false i.save i.delete end it ".to_hash should return a hash keyed on primary key if no argument provided" do Item.create(:name=>'J') Item.to_hash.must_equal(1=>Item.load(:id=>1, :name=>'J')) end it ".to_hash should return a hash keyed on argument if one argument provided" do Item.create(:name=>'J') Item.to_hash(:name).must_equal('J'=>Item.load(:id=>1, :name=>'J')) end it "should be marshallable before and after saving if marshallable! is called" do i = Item.new(:name=>'J') s = nil i2 = nil i.marshallable! s = Marshal.dump(i) i2 = Marshal.load(s) i2.must_equal i i.save i.marshallable! s = Marshal.dump(i) i2 = Marshal.load(s) i2.must_equal i i.save i.marshallable! s = Marshal.dump(i) i2 = Marshal.load(s) i2.must_equal i end it "#lock! should lock records" do Item.db.transaction do i = Item.create(:name=>'J') i.lock! i.update(:name=>'K') end end end describe "Sequel::Model with no existing table" do it "should not raise an error when setting the dataset" do db = DB db.drop_table?(:items) c = Class.new(Sequel::Model) proc{c.set_dataset(db[:items])}.must_raise Sequel::Error db.transaction do c = Class.new(Sequel::Model) proc{c.dataset = db[:items]}.must_raise Sequel::Error db.get(Sequel.cast(1, Integer)).must_equal 1 end end it "should not raise an error when setting the dataset when require_valid_table is false" do db = DB db.drop_table?(:items) c = Class.new(Sequel::Model) c.require_valid_table = false c.set_dataset(db[:items]) db.transaction do c = Class.new(Sequel::Model) c.require_valid_table = false c.dataset = db[:items] db.get(Sequel.cast(1, Integer)).must_equal 1 end end end ����������������������������������������������������������������������������������������sequel-5.63.0/spec/integration/plugin_test.rb�������������������������������������������������������0000664�0000000�0000000�00000346223�14342141206�0021251�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Class Table Inheritance Plugin" do before(:all) do @db = DB @db.instance_variable_get(:@schemas).clear @db.drop_table?(:staff, :executives, :managers, :employees) @db.create_table(:employees) do primary_key :id String :name String :kind end @db.create_table(:managers) do foreign_key :id, :employees, :primary_key=>true Integer :num_staff end @db.create_table(:executives) do foreign_key :id, :managers, :primary_key=>true Integer :num_managers end @db.create_table(:staff) do foreign_key :id, :employees, :primary_key=>true foreign_key :manager_id, :managers end end before do [:staff, :executives, :managers, :employees].each{|t| @db[t].delete} class ::Employee < Sequel::Model(@db) plugin :class_table_inheritance, :key=>:kind, :table_map=>{:Staff=>:staff} end class ::Manager < Employee one_to_many :staff_members, :class=>:Staff one_to_one :first_staff_member, :clone=>:staff_members, :order=>:id end class ::Executive < Manager end class ::Ceo < Executive end class ::Staff < Employee many_to_one :manager end class ::Intern < Employee end @i1 = @db[:employees].insert(:name=>'E', :kind=>'Employee') @i2 = @db[:employees].insert(:name=>'S', :kind=>'Staff') @i3 = @db[:employees].insert(:name=>'M', :kind=>'Manager') @db[:managers].insert(:id=>@i3, :num_staff=>7) @i4 = @db[:employees].insert(:name=>'Ex', :kind=>'Executive') @db[:managers].insert(:id=>@i4, :num_staff=>5) @db[:executives].insert(:id=>@i4, :num_managers=>6) @i5 = @db[:employees].insert(:name=>'C', :kind=>'Ceo') @db[:managers].insert(:id=>@i5, :num_staff=>2) @db[:executives].insert(:id=>@i5, :num_managers=>1) @db[:staff].insert(:id=>@i2, :manager_id=>@i4) @i6 = @db[:employees].insert(:name=>'I', :kind=>'Intern') end after do [:Intern, :Ceo, :Executive, :Manager, :Staff, :Employee].each{|s| Object.send(:remove_const, s)} end after(:all) do @db.drop_table? :staff, :executives, :managers, :employees end it "should return rows as subclass instances" do Employee.order(:id).all.must_equal [ Employee.load(:id=>@i1, :name=>'E', :kind=>'Employee'), Staff.load(:id=>@i2, :name=>'S', :kind=>'Staff'), Manager.load(:id=>@i3, :name=>'M', :kind=>'Manager'), Executive.load(:id=>@i4, :name=>'Ex', :kind=>'Executive'), Ceo.load(:id=>@i5, :name=>'C', :kind=>'Ceo'), Intern.load(:id=>@i6, :name=>'I', :kind=>'Intern'), ] end it "should lazily load columns in subclass tables" do Employee[@i2][:manager_id].must_be_nil Employee[@i2].manager_id.must_equal @i4 Employee[@i3][:num_staff].must_be_nil Employee[@i3].num_staff.must_equal 7 Employee[@i4][:num_staff].must_be_nil Employee[@i4].num_staff.must_equal 5 Employee[@i4][:num_managers].must_be_nil Employee[@i4].num_managers.must_equal 6 Employee[@i5][:num_managers].must_be_nil Employee[@i5].num_managers.must_equal 1 end it "should eagerly load columns in subclass tables when retrieving multiple objects" do a = Employee.order(:id).all a[1][:manager_id].must_be_nil a[1].manager_id.must_equal @i4 a[2][:num_staff].must_be_nil a[2].num_staff.must_equal 7 a[3][:num_staff].must_equal 5 # eagerly loaded by previous call a[3].num_staff.must_equal 5 a[3][:num_managers].must_be_nil a[3].num_managers.must_equal 6 a[4][:num_managers].must_equal 1 a[4].num_managers.must_equal 1 end it "should include schema for columns for tables for ancestor classes" do Employee.db_schema.keys.sort_by{|x| x.to_s}.must_equal [:id, :kind, :name] Staff.db_schema.keys.sort_by{|x| x.to_s}.must_equal [:id, :kind, :manager_id, :name] Manager.db_schema.keys.sort_by{|x| x.to_s}.must_equal [:id, :kind, :name, :num_staff] Executive.db_schema.keys.sort_by{|x| x.to_s}.must_equal [:id, :kind, :name, :num_managers, :num_staff] Ceo.db_schema.keys.sort_by{|x| x.to_s}.must_equal [:id, :kind, :name, :num_managers, :num_staff] Intern.db_schema.keys.sort_by{|x| x.to_s}.must_equal [:id, :kind, :name] end it "should include columns for tables for ancestor classes" do Employee.columns.must_equal [:id, :name, :kind] Staff.columns.must_equal [:id, :name, :kind, :manager_id] Manager.columns.must_equal [:id, :name, :kind, :num_staff] Executive.columns.must_equal [:id, :name, :kind, :num_staff, :num_managers] Ceo.columns.must_equal [:id, :name, :kind, :num_staff, :num_managers] Intern.columns.must_equal [:id, :name, :kind] end it "should delete rows from all tables" do e = Ceo.first i = e.id e.staff_members_dataset.destroy e.destroy @db[:executives][:id=>i].must_be_nil @db[:managers][:id=>i].must_be_nil @db[:employees][:id=>i].must_be_nil end it "should handle associations only defined in subclasses" do Employee.filter(Sequel[:employees][:id]=>@i2).all.first.manager.id.must_equal @i4 end it "should insert rows into all tables" do e = Ceo.create(:name=>'Ex2', :num_managers=>8, :num_staff=>9) i = e.id @db[:employees][:id=>i].must_equal(:id=>i, :name=>'Ex2', :kind=>'Ceo') @db[:managers][:id=>i].must_equal(:id=>i, :num_staff=>9) @db[:executives][:id=>i].must_equal(:id=>i, :num_managers=>8) end it "should update rows in all tables" do Executive[:id=>@i4].update(:name=>'Ex2', :num_managers=>8, :num_staff=>9) @db[:employees][:id=>@i4].must_equal(:id=>@i4, :name=>'Ex2', :kind=>'Executive') @db[:managers][:id=>@i4].must_equal(:id=>@i4, :num_staff=>9) @db[:executives][:id=>@i4].must_equal(:id=>@i4, :num_managers=>8) end it "should handle many_to_one relationships" do m = Staff.first.manager m.must_equal Manager[@i4] m.must_be_kind_of(Executive) Staff.first.update(:manager => Manager[@i3]) Staff.first.manager.must_equal Manager[@i3] end it "should handle eagerly loading many_to_one relationships" do Staff.limit(1).eager(:manager).all.map{|x| x.manager}.must_equal [Manager[@i4]] end it "should handle eagerly graphing many_to_one relationships" do ss = Staff.eager_graph(:manager).all ss.must_equal [Staff[@i2]] ss.map{|x| x.manager}.must_equal [Manager[@i4]] end it "should handle one_to_many relationships" do Executive.first(:name=>'Ex').staff_members.must_equal [Staff[@i2]] i6 = @db[:employees].insert(:name=>'S2', :kind=>'Staff') @db[:staff].insert(:id=>i6, :manager_id=>@i4) Executive.first(:name=>'Ex').add_staff_member(i6) Executive.first(:name=>'Ex').staff_members{|ds| ds.order(:id)}.must_equal [Staff[@i2], Staff[i6]] end it "should handle one_to_many relationships" do Executive.first(:name=>'Ex').first_staff_member.must_equal Staff[@i2] i6 = @db[:employees].insert(:name=>'S2', :kind=>'Staff') @db[:staff].insert(:id=>i6, :manager_id=>@i4) Executive.first(:name=>'Ex').first_staff_member = Staff[i6] Executive.first(:name=>'Ex').staff_members.must_equal [Staff[i6]] end it "should handle eagerly loading one_to_many relationships" do Executive.where(:name=>'Ex').eager(:staff_members).first.staff_members.must_equal [Staff[@i2]] end it "should handle eagerly graphing one_to_many relationships" do es = Executive.where(Sequel[:employees][:name]=>'Ex').eager_graph(:staff_members).all es.must_equal [Executive[@i4]] es.map{|x| x.staff_members}.must_equal [[Staff[@i2]]] end end describe "Many Through Many Plugin" do before(:all) do @db = DB @db.instance_variable_get(:@schemas).clear @db.drop_table?(:albums_artists, :albums, :artists) @db.create_table(:albums) do primary_key :id String :name end @db.create_table(:artists) do primary_key :id String :name end @db.create_table(:albums_artists) do foreign_key :album_id, :albums foreign_key :artist_id, :artists end end before do [:albums_artists, :albums, :artists].each{|t| @db[t].delete} class ::Album < Sequel::Model(@db) many_to_many :artists end class ::Artist < Sequel::Model(@db) plugin :many_through_many end @artist1 = Artist.create(:name=>'1') @artist2 = Artist.create(:name=>'2') @artist3 = Artist.create(:name=>'3') @artist4 = Artist.create(:name=>'4') @album1 = Album.create(:name=>'A') @album1.add_artist(@artist1) @album1.add_artist(@artist2) @album2 = Album.create(:name=>'B') @album2.add_artist(@artist3) @album2.add_artist(@artist4) @album3 = Album.create(:name=>'C') @album3.add_artist(@artist2) @album3.add_artist(@artist3) @album4 = Album.create(:name=>'D') @album4.add_artist(@artist1) @album4.add_artist(@artist4) end after do [:albums_artists, :albums, :artists].each{|t| @db[t].delete} [:Album, :Artist].each{|s| Object.send(:remove_const, s)} end after(:all) do @db.drop_table? :albums_artists, :albums, :artists end def self_join(c) c.join(Sequel.as(c.table_name, :b), Array(c.primary_key).zip(Array(c.primary_key))).select_all(c.table_name) end it "should handle super simple case with 1 join table" do Artist.many_through_many :albums, [[:albums_artists, :artist_id, :album_id]] Artist[@artist1.id].albums.map{|x| x.name}.sort.must_equal %w'A D' Artist[@artist2.id].albums.map{|x| x.name}.sort.must_equal %w'A C' Artist[@artist3.id].albums.map{|x| x.name}.sort.must_equal %w'B C' Artist[@artist4.id].albums.map{|x| x.name}.sort.must_equal %w'B D' Artist[@artist1.id].albums.map{|x| x.name}.sort.must_equal %w'A D' Artist[@artist2.id].albums.map{|x| x.name}.sort.must_equal %w'A C' Artist[@artist3.id].albums.map{|x| x.name}.sort.must_equal %w'B C' Artist[@artist4.id].albums.map{|x| x.name}.sort.must_equal %w'B D' Artist.filter(:id=>@artist1.id).eager(:albums).all.map{|x| x.albums.map{|a| a.name}}.flatten.sort.must_equal %w'A D' Artist.filter(:id=>@artist2.id).eager(:albums).all.map{|x| x.albums.map{|a| a.name}}.flatten.sort.must_equal %w'A C' Artist.filter(:id=>@artist3.id).eager(:albums).all.map{|x| x.albums.map{|a| a.name}}.flatten.sort.must_equal %w'B C' Artist.filter(:id=>@artist4.id).eager(:albums).all.map{|x| x.albums.map{|a| a.name}}.flatten.sort.must_equal %w'B D' Artist.filter(Sequel[:artists][:id]=>@artist1.id).eager_graph(:albums).all.map{|x| x.albums.map{|a| a.name}}.flatten.sort.must_equal %w'A D' Artist.filter(Sequel[:artists][:id]=>@artist2.id).eager_graph(:albums).all.map{|x| x.albums.map{|a| a.name}}.flatten.sort.must_equal %w'A C' Artist.filter(Sequel[:artists][:id]=>@artist3.id).eager_graph(:albums).all.map{|x| x.albums.map{|a| a.name}}.flatten.sort.must_equal %w'B C' Artist.filter(Sequel[:artists][:id]=>@artist4.id).eager_graph(:albums).all.map{|x| x.albums.map{|a| a.name}}.flatten.sort.must_equal %w'B D' Artist.filter(:albums=>@album1).all.map{|a| a.name}.sort.must_equal %w'1 2' Artist.filter(:albums=>@album2).all.map{|a| a.name}.sort.must_equal %w'3 4' Artist.filter(:albums=>@album3).all.map{|a| a.name}.sort.must_equal %w'2 3' Artist.filter(:albums=>@album4).all.map{|a| a.name}.sort.must_equal %w'1 4' Artist.exclude(:albums=>@album1).all.map{|a| a.name}.sort.must_equal %w'3 4' Artist.exclude(:albums=>@album2).all.map{|a| a.name}.sort.must_equal %w'1 2' Artist.exclude(:albums=>@album3).all.map{|a| a.name}.sort.must_equal %w'1 4' Artist.exclude(:albums=>@album4).all.map{|a| a.name}.sort.must_equal %w'2 3' Artist.filter(:albums=>[@album1, @album3]).all.map{|a| a.name}.sort.must_equal %w'1 2 3' Artist.filter(:albums=>[@album2, @album4]).all.map{|a| a.name}.sort.must_equal %w'1 3 4' Artist.exclude(:albums=>[@album1, @album3]).all.map{|a| a.name}.sort.must_equal %w'4' Artist.exclude(:albums=>[@album2, @album4]).all.map{|a| a.name}.sort.must_equal %w'2' Artist.filter(:albums=>Album.filter(:id=>[@album1.id, @album3.id])).all.map{|a| a.name}.sort.must_equal %w'1 2 3' Artist.exclude(:albums=>Album.filter(:id=>[@album1.id, @album3.id])).all.map{|a| a.name}.sort.must_equal %w'4' c = self_join(Artist) c.filter(:albums=>@album1).all.map{|a| a.name}.sort.must_equal %w'1 2' c.filter(:albums=>@album2).all.map{|a| a.name}.sort.must_equal %w'3 4' c.filter(:albums=>@album3).all.map{|a| a.name}.sort.must_equal %w'2 3' c.filter(:albums=>@album4).all.map{|a| a.name}.sort.must_equal %w'1 4' c.exclude(:albums=>@album1).all.map{|a| a.name}.sort.must_equal %w'3 4' c.exclude(:albums=>@album2).all.map{|a| a.name}.sort.must_equal %w'1 2' c.exclude(:albums=>@album3).all.map{|a| a.name}.sort.must_equal %w'1 4' c.exclude(:albums=>@album4).all.map{|a| a.name}.sort.must_equal %w'2 3' c.filter(:albums=>[@album1, @album3]).all.map{|a| a.name}.sort.must_equal %w'1 2 3' c.filter(:albums=>[@album2, @album4]).all.map{|a| a.name}.sort.must_equal %w'1 3 4' c.exclude(:albums=>[@album1, @album3]).all.map{|a| a.name}.sort.must_equal %w'4' c.exclude(:albums=>[@album2, @album4]).all.map{|a| a.name}.sort.must_equal %w'2' c.filter(:albums=>self_join(Album).filter(Sequel[:albums][:id]=>[@album1.id, @album3.id])).all.map{|a| a.name}.sort.must_equal %w'1 2 3' c.exclude(:albums=>self_join(Album).filter(Sequel[:albums][:id]=>[@album1.id, @album3.id])).all.map{|a| a.name}.sort.must_equal %w'4' end it "should handle typical case with 3 join tables" do Artist.many_through_many :related_artists, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_artists, :album_id, :artist_id]], :class=>Artist, :distinct=>true, :delay_pks=>false Artist[@artist1.id].related_artists.map{|x| x.name}.sort.must_equal %w'1 2 4' Artist[@artist2.id].related_artists.map{|x| x.name}.sort.must_equal %w'1 2 3' Artist[@artist3.id].related_artists.map{|x| x.name}.sort.must_equal %w'2 3 4' Artist[@artist4.id].related_artists.map{|x| x.name}.sort.must_equal %w'1 3 4' Artist[@artist1.id].related_artists.map{|x| x.name}.sort.must_equal %w'1 2 4' Artist[@artist2.id].related_artists.map{|x| x.name}.sort.must_equal %w'1 2 3' Artist[@artist3.id].related_artists.map{|x| x.name}.sort.must_equal %w'2 3 4' Artist[@artist4.id].related_artists.map{|x| x.name}.sort.must_equal %w'1 3 4' Artist.filter(:id=>@artist1.id).eager(:related_artists).all.map{|x| x.related_artists.map{|a| a.name}}.flatten.sort.must_equal %w'1 2 4' Artist.filter(:id=>@artist2.id).eager(:related_artists).all.map{|x| x.related_artists.map{|a| a.name}}.flatten.sort.must_equal %w'1 2 3' Artist.filter(:id=>@artist3.id).eager(:related_artists).all.map{|x| x.related_artists.map{|a| a.name}}.flatten.sort.must_equal %w'2 3 4' Artist.filter(:id=>@artist4.id).eager(:related_artists).all.map{|x| x.related_artists.map{|a| a.name}}.flatten.sort.must_equal %w'1 3 4' Artist.filter(Sequel[:artists][:id]=>@artist1.id).eager_graph(:related_artists).all.map{|x| x.related_artists.map{|a| a.name}}.flatten.sort.must_equal %w'1 2 4' Artist.filter(Sequel[:artists][:id]=>@artist2.id).eager_graph(:related_artists).all.map{|x| x.related_artists.map{|a| a.name}}.flatten.sort.must_equal %w'1 2 3' Artist.filter(Sequel[:artists][:id]=>@artist3.id).eager_graph(:related_artists).all.map{|x| x.related_artists.map{|a| a.name}}.flatten.sort.must_equal %w'2 3 4' Artist.filter(Sequel[:artists][:id]=>@artist4.id).eager_graph(:related_artists).all.map{|x| x.related_artists.map{|a| a.name}}.flatten.sort.must_equal %w'1 3 4' Artist.filter(:related_artists=>@artist1).all.map{|a| a.name}.sort.must_equal %w'1 2 4' Artist.filter(:related_artists=>@artist2).all.map{|a| a.name}.sort.must_equal %w'1 2 3' Artist.filter(:related_artists=>@artist3).all.map{|a| a.name}.sort.must_equal %w'2 3 4' Artist.filter(:related_artists=>@artist4).all.map{|a| a.name}.sort.must_equal %w'1 3 4' Artist.exclude(:related_artists=>@artist1).all.map{|a| a.name}.sort.must_equal %w'3' Artist.exclude(:related_artists=>@artist2).all.map{|a| a.name}.sort.must_equal %w'4' Artist.exclude(:related_artists=>@artist3).all.map{|a| a.name}.sort.must_equal %w'1' Artist.exclude(:related_artists=>@artist4).all.map{|a| a.name}.sort.must_equal %w'2' Artist.filter(:related_artists=>[@artist1, @artist4]).all.map{|a| a.name}.sort.must_equal %w'1 2 3 4' Artist.exclude(:related_artists=>[@artist1, @artist4]).all.map{|a| a.name}.sort.must_equal %w'' Artist.filter(:related_artists=>Artist.filter(:id=>@artist1.id)).all.map{|a| a.name}.sort.must_equal %w'1 2 4' Artist.exclude(:related_artists=>Artist.filter(:id=>@artist1.id)).all.map{|a| a.name}.sort.must_equal %w'3' c = self_join(Artist) c.filter(:related_artists=>@artist1).all.map{|a| a.name}.sort.must_equal %w'1 2 4' c.filter(:related_artists=>@artist2).all.map{|a| a.name}.sort.must_equal %w'1 2 3' c.filter(:related_artists=>@artist3).all.map{|a| a.name}.sort.must_equal %w'2 3 4' c.filter(:related_artists=>@artist4).all.map{|a| a.name}.sort.must_equal %w'1 3 4' c.exclude(:related_artists=>@artist1).all.map{|a| a.name}.sort.must_equal %w'3' c.exclude(:related_artists=>@artist2).all.map{|a| a.name}.sort.must_equal %w'4' c.exclude(:related_artists=>@artist3).all.map{|a| a.name}.sort.must_equal %w'1' c.exclude(:related_artists=>@artist4).all.map{|a| a.name}.sort.must_equal %w'2' c.filter(:related_artists=>[@artist1, @artist4]).all.map{|a| a.name}.sort.must_equal %w'1 2 3 4' c.exclude(:related_artists=>[@artist1, @artist4]).all.map{|a| a.name}.sort.must_equal %w'' c.filter(:related_artists=>c.filter(Sequel[:artists][:id]=>@artist1.id)).all.map{|a| a.name}.sort.must_equal %w'1 2 4' c.exclude(:related_artists=>c.filter(Sequel[:artists][:id]=>@artist1.id)).all.map{|a| a.name}.sort.must_equal %w'3' end it "should handle extreme case with 5 join tables" do Artist.many_through_many :related_albums, [[:albums_artists, :artist_id, :album_id], [:albums, :id, :id], [:albums_artists, :album_id, :artist_id], [:artists, :id, :id], [:albums_artists, :artist_id, :album_id]], :class=>Album, :distinct=>true @db[:albums_artists].delete @album1.add_artist(@artist1) @album1.add_artist(@artist2) @album2.add_artist(@artist2) @album2.add_artist(@artist3) @album3.add_artist(@artist1) @album4.add_artist(@artist3) @album4.add_artist(@artist4) Artist[@artist1.id].related_albums.map{|x| x.name}.sort.must_equal %w'A B C' Artist[@artist2.id].related_albums.map{|x| x.name}.sort.must_equal %w'A B C D' Artist[@artist3.id].related_albums.map{|x| x.name}.sort.must_equal %w'A B D' Artist[@artist4.id].related_albums.map{|x| x.name}.sort.must_equal %w'B D' Artist[@artist1.id].related_albums.map{|x| x.name}.sort.must_equal %w'A B C' Artist[@artist2.id].related_albums.map{|x| x.name}.sort.must_equal %w'A B C D' Artist[@artist3.id].related_albums.map{|x| x.name}.sort.must_equal %w'A B D' Artist[@artist4.id].related_albums.map{|x| x.name}.sort.must_equal %w'B D' Artist.filter(:id=>@artist1.id).eager(:related_albums).all.map{|x| x.related_albums.map{|a| a.name}}.flatten.sort.must_equal %w'A B C' Artist.filter(:id=>@artist2.id).eager(:related_albums).all.map{|x| x.related_albums.map{|a| a.name}}.flatten.sort.must_equal %w'A B C D' Artist.filter(:id=>@artist3.id).eager(:related_albums).all.map{|x| x.related_albums.map{|a| a.name}}.flatten.sort.must_equal %w'A B D' Artist.filter(:id=>@artist4.id).eager(:related_albums).all.map{|x| x.related_albums.map{|a| a.name}}.flatten.sort.must_equal %w'B D' Artist.filter(Sequel[:artists][:id]=>@artist1.id).eager_graph(:related_albums).all.map{|x| x.related_albums.map{|a| a.name}}.flatten.sort.must_equal %w'A B C' Artist.filter(Sequel[:artists][:id]=>@artist2.id).eager_graph(:related_albums).all.map{|x| x.related_albums.map{|a| a.name}}.flatten.sort.must_equal %w'A B C D' Artist.filter(Sequel[:artists][:id]=>@artist3.id).eager_graph(:related_albums).all.map{|x| x.related_albums.map{|a| a.name}}.flatten.sort.must_equal %w'A B D' Artist.filter(Sequel[:artists][:id]=>@artist4.id).eager_graph(:related_albums).all.map{|x| x.related_albums.map{|a| a.name}}.flatten.sort.must_equal %w'B D' Artist.filter(:related_albums=>@album1).all.map{|a| a.name}.sort.must_equal %w'1 2 3' Artist.filter(:related_albums=>@album2).all.map{|a| a.name}.sort.must_equal %w'1 2 3 4' Artist.filter(:related_albums=>@album3).all.map{|a| a.name}.sort.must_equal %w'1 2' Artist.filter(:related_albums=>@album4).all.map{|a| a.name}.sort.must_equal %w'2 3 4' Artist.exclude(:related_albums=>@album1).all.map{|a| a.name}.sort.must_equal %w'4' Artist.exclude(:related_albums=>@album2).all.map{|a| a.name}.sort.must_equal %w'' Artist.exclude(:related_albums=>@album3).all.map{|a| a.name}.sort.must_equal %w'3 4' Artist.exclude(:related_albums=>@album4).all.map{|a| a.name}.sort.must_equal %w'1' Artist.filter(:related_albums=>[@album1, @album3]).all.map{|a| a.name}.sort.must_equal %w'1 2 3' Artist.filter(:related_albums=>[@album3, @album4]).all.map{|a| a.name}.sort.must_equal %w'1 2 3 4' Artist.exclude(:related_albums=>[@album1, @album3]).all.map{|a| a.name}.sort.must_equal %w'4' Artist.exclude(:related_albums=>[@album2, @album4]).all.map{|a| a.name}.sort.must_equal %w'' Artist.filter(:related_albums=>Album.filter(:id=>[@album1.id, @album3.id])).all.map{|a| a.name}.sort.must_equal %w'1 2 3' Artist.exclude(:related_albums=>Album.filter(:id=>[@album1.id, @album3.id])).all.map{|a| a.name}.sort.must_equal %w'4' c = self_join(Artist) c.filter(:related_albums=>@album1).all.map{|a| a.name}.sort.must_equal %w'1 2 3' c.filter(:related_albums=>@album2).all.map{|a| a.name}.sort.must_equal %w'1 2 3 4' c.filter(:related_albums=>@album3).all.map{|a| a.name}.sort.must_equal %w'1 2' c.filter(:related_albums=>@album4).all.map{|a| a.name}.sort.must_equal %w'2 3 4' c.exclude(:related_albums=>@album1).all.map{|a| a.name}.sort.must_equal %w'4' c.exclude(:related_albums=>@album2).all.map{|a| a.name}.sort.must_equal %w'' c.exclude(:related_albums=>@album3).all.map{|a| a.name}.sort.must_equal %w'3 4' c.exclude(:related_albums=>@album4).all.map{|a| a.name}.sort.must_equal %w'1' c.filter(:related_albums=>[@album1, @album3]).all.map{|a| a.name}.sort.must_equal %w'1 2 3' c.filter(:related_albums=>[@album3, @album4]).all.map{|a| a.name}.sort.must_equal %w'1 2 3 4' c.exclude(:related_albums=>[@album1, @album3]).all.map{|a| a.name}.sort.must_equal %w'4' c.exclude(:related_albums=>[@album2, @album4]).all.map{|a| a.name}.sort.must_equal %w'' c.filter(:related_albums=>self_join(Album).filter(Sequel[:albums][:id]=>[@album1.id, @album3.id])).all.map{|a| a.name}.sort.must_equal %w'1 2 3' c.exclude(:related_albums=>self_join(Album).filter(Sequel[:albums][:id]=>[@album1.id, @album3.id])).all.map{|a| a.name}.sort.must_equal %w'4' end end describe "Lazy Attributes plugin" do before(:all) do @db = DB @db.create_table!(:items) do primary_key :id String :name Integer :num end @db[:items].delete class ::Item < Sequel::Model(@db) plugin :lazy_attributes, :num end Item.create(:name=>'J', :num=>1) end after(:all) do @db.drop_table?(:items) Object.send(:remove_const, :Item) end it "should not include lazy attribute columns by default" do Item.first.must_equal Item.load(:id=>1, :name=>'J') end it "should load lazy attribute on access" do Item.first.num.must_equal 1 end it "should typecast lazy attribute in setter" do i = Item.new i.num = '1' i.num.must_equal 1 end it "should typecast lazy attribute in setter when selecting from a subquery" do c = Sequel::Model(@db[:items].from_self) c.instance_variable_set(:@db_schema, Item.db_schema) c.plugin :lazy_attributes, :num i = c.new i.num = '1' i.num.must_equal 1 end it "should load lazy attribute for all items returned when accessing any item if using identity map " do Item.create(:name=>'K', :num=>2) a = Item.order(:name).all a.must_equal [Item.load(:id=>1, :name=>'J'), Item.load(:id=>2, :name=>'K')] a.map{|x| x[:num]}.must_equal [nil, nil] a.first.num.must_equal 1 a.map{|x| x[:num]}.must_equal [1, 2] a.last.num.must_equal 2 end end describe "Tactical Eager Loading Plugin" do before(:all) do @db = DB @db.instance_variable_get(:@schemas).clear @db.drop_table?(:albums_artists) @db.create_table!(:artists) do primary_key :id String :name end @db.create_table!(:albums) do primary_key :id String :name foreign_key :artist_id, :artists end end before do @db[:albums].delete @db[:artists].delete class ::Album < Sequel::Model(@db) plugin :tactical_eager_loading many_to_one :artist end class ::Artist < Sequel::Model(@db) plugin :tactical_eager_loading one_to_many :albums, :order=>:name end @artist1 = Artist.create(:name=>'1') @artist2 = Artist.create(:name=>'2') @artist3 = Artist.create(:name=>'3') @artist4 = Artist.create(:name=>'4') @album1 = Album.create(:name=>'A', :artist=>@artist1) @album2 = Album.create(:name=>'B', :artist=>@artist1) @album3 = Album.create(:name=>'C', :artist=>@artist2) @album4 = Album.create(:name=>'D', :artist=>@artist3) end after do [:Album, :Artist].each{|s| Object.send(:remove_const, s)} end after(:all) do @db.drop_table? :albums, :artists end it "should eagerly load associations for all items when accessing any item" do a = Artist.order(:name).all a.map{|x| x.associations}.must_equal [{}, {}, {}, {}] a.first.albums.must_equal [@album1, @album2] a.map{|x| x.associations}.must_equal [{:albums=>[@album1, @album2]}, {:albums=>[@album3]}, {:albums=>[@album4]}, {:albums=>[]}] a = Album.order(:name).all a.map{|x| x.associations}.must_equal [{}, {}, {}, {}] a.first.artist.must_equal @artist1 a.map{|x| x.associations}.must_equal [{:artist=>@artist1}, {:artist=>@artist1}, {:artist=>@artist2}, {:artist=>@artist3}] end end describe "Touch plugin" do before(:all) do @db = DB @db.drop_table? :albums_artists, :albums, :artists @db.create_table(:artists) do primary_key :id String :name DateTime :updated_at end @db.create_table(:albums) do primary_key :id String :name foreign_key :artist_id, :artists DateTime :updated_at end @db.create_join_table({:album_id=>:albums, :artist_id=>:artists}, :no_index=>true) end before do @db[:albums].delete @db[:artists].delete class ::Album < Sequel::Model(@db) end class ::Artist < Sequel::Model(@db) end @artist = Artist.create(:name=>'1') @album = Album.create(:name=>'A', :artist_id=>@artist.id) end after do [:Album, :Artist].each{|s| Object.send(:remove_const, s)} end after(:all) do @db.drop_table? :albums_artists, :albums, :artists end def around DB.transaction(:rollback=>:always){super} end it "should update the timestamp column when touching the record" do Album.plugin :touch @album.updated_at.must_be_nil @album.touch @album.updated_at.to_i.must_be_close_to Time.now.to_i, 2 end cspecify "should update the timestamp column for many_to_one associated records when the record is updated or destroyed", [:jdbc, :sqlite] do Album.many_to_one :artist Album.plugin :touch, :associations=>:artist @artist.updated_at.must_be_nil @album.update(:name=>'B') ua = @artist.reload.updated_at if ua.is_a?(Time) ua.to_i.must_be_close_to Time.now.to_i, 60 else (DateTime.now - ua).must_be_close_to 0, 60.0/86400 end @artist.update(:updated_at=>nil) @album.destroy if ua.is_a?(Time) ua.to_i.must_be_close_to Time.now.to_i, 60 else (DateTime.now - ua).must_be_close_to 0, 60.0/86400 end end cspecify "should update the timestamp column for one_to_many associated records when the record is updated", [:jdbc, :sqlite] do Artist.one_to_many :albums Artist.plugin :touch, :associations=>:albums @album.updated_at.must_be_nil @artist.update(:name=>'B') ua = @album.reload.updated_at if ua.is_a?(Time) ua.to_i.must_be_close_to Time.now.to_i, 60 else (DateTime.now - ua).must_be_close_to 0, 60.0/86400 end end cspecify "should update the timestamp column for many_to_many associated records when the record is updated", [:jdbc, :sqlite] do Artist.many_to_many :albums Artist.plugin :touch, :associations=>:albums @artist.add_album(@album) @album.updated_at.must_be_nil @artist.update(:name=>'B') ua = @album.reload.updated_at if ua.is_a?(Time) ua.to_i.must_be_close_to Time.now.to_i, 60 else (DateTime.now - ua).must_be_close_to 0, 60.0/86400 end end end describe "Serialization plugin" do before do @db = DB @db.create_table!(:items) do primary_key :id String :stuff end class ::Item < Sequel::Model(@db) plugin :serialization, :marshal, :stuff end end after do @db.drop_table?(:items) Object.send(:remove_const, :Item) end it "should serialize and deserialize items as needed" do i = Item.create(:stuff=>{:a=>1}) i.stuff.must_equal(:a=>1) i.stuff = [1, 2, 3] i.save Item.first.stuff.must_equal [1, 2, 3] i.update(:stuff=>Item.new) Item.first.stuff.must_equal Item.new end end describe "OptimisticLocking plugin" do before(:all) do @db = DB @db.create_table!(:people) do primary_key :id String :name Integer :lock_version, :default=>0, :null=>false end class ::Person < Sequel::Model(@db) plugin :optimistic_locking end end before do @db[:people].delete @p = Person.create(:name=>'John') end after(:all) do @db.drop_table?(:people) Object.send(:remove_const, :Person) end it "should raise an error when updating a stale record" do p1 = Person[@p.id] p2 = Person[@p.id] p1.update(:name=>'Jim') proc{p2.update(:name=>'Bob')}.must_raise(Sequel::Plugins::OptimisticLocking::Error) end it "should raise an error when destroying a stale record" do p1 = Person[@p.id] p2 = Person[@p.id] p1.update(:name=>'Jim') proc{p2.destroy}.must_raise(Sequel::Plugins::OptimisticLocking::Error) end it "should not raise an error when updating the same record twice" do p1 = Person[@p.id] p1.update(:name=>'Jim') p1.update(:name=>'Bob') end end describe "Composition plugin" do before do @db = DB @db.create_table!(:events) do primary_key :id Integer :year Integer :month Integer :day end class ::Event < Sequel::Model(@db) plugin :composition composition :date, :composer=>proc{Date.new(year, month, day) if year && month && day}, :decomposer=>(proc do if date self.year = date.year self.month = date.month self.day = date.day else self.year, self.month, self.day = nil end end) composition :date, :mapping=>[:year, :month, :day] end @e1 = Event.create(:year=>2010, :month=>2, :day=>15) @e2 = Event.create(:year=>nil) end after do @db.drop_table?(:events) Object.send(:remove_const, :Event) end it "should return a composed object if the underlying columns have a value" do @e1.date.must_equal Date.civil(2010, 2, 15) @e2.date.must_be_nil end it "should decompose the object when saving the record" do @e1.date = Date.civil(2009, 1, 2) @e1.save @e1.year.must_equal 2009 @e1.month.must_equal 1 @e1.day.must_equal 2 end it "should save all columns when saving changes" do @e2.date = Date.civil(2009, 10, 2) @e2.save_changes @e2.reload @e2.year.must_equal 2009 @e2.month.must_equal 10 @e2.day.must_equal 2 end end describe "RcteTree Plugin" do rcte_tree_plugin_specs = Module.new do extend Minitest::Spec::DSL it "should load all standard (not-CTE) methods correctly" do @a.children.must_equal [@aa, @ab] @b.children.must_equal [@ba, @bb] @aa.children.must_equal [@aaa, @aab] @ab.children.must_equal [@aba, @abb] @ba.children.must_equal [] @bb.children.must_equal [] @aaa.children.must_equal [@aaaa, @aaab] @aab.children.must_equal [] @aba.children.must_equal [] @abb.children.must_equal [] @aaaa.children.must_equal [@aaaaa] @aaab.children.must_equal [] @aaaaa.children.must_equal [] @a.parent.must_be_nil @b.parent.must_be_nil @aa.parent.must_equal @a @ab.parent.must_equal @a @ba.parent.must_equal @b @bb.parent.must_equal @b @aaa.parent.must_equal @aa @aab.parent.must_equal @aa @aba.parent.must_equal @ab @abb.parent.must_equal @ab @aaaa.parent.must_equal @aaa @aaab.parent.must_equal @aaa @aaaaa.parent.must_equal @aaaa end it "should load all ancestors and descendants lazily for a given instance" do @a.descendants.must_equal [@aa, @aaa, @aaaa, @aaaaa, @aaab, @aab, @ab, @aba, @abb] @b.descendants.must_equal [@ba, @bb] @aa.descendants.must_equal [@aaa, @aaaa, @aaaaa, @aaab, @aab] @ab.descendants.must_equal [@aba, @abb] @ba.descendants.must_equal [] @bb.descendants.must_equal [] @aaa.descendants.must_equal [@aaaa, @aaaaa, @aaab] @aab.descendants.must_equal [] @aba.descendants.must_equal [] @abb.descendants.must_equal [] @aaaa.descendants.must_equal [@aaaaa] @aaab.descendants.must_equal [] @aaaaa.descendants.must_equal [] @a.ancestors.must_equal [] @b.ancestors.must_equal [] @aa.ancestors.must_equal [@a] @ab.ancestors.must_equal [@a] @ba.ancestors.must_equal [@b] @bb.ancestors.must_equal [@b] @aaa.ancestors.must_equal [@a, @aa] @aab.ancestors.must_equal [@a, @aa] @aba.ancestors.must_equal [@a, @ab] @abb.ancestors.must_equal [@a, @ab] @aaaa.ancestors.must_equal [@a, @aa, @aaa] @aaab.ancestors.must_equal [@a, @aa, @aaa] @aaaaa.ancestors.must_equal [@a, @aa, @aaa, @aaaa] end it "should eagerly load all ancestors and descendants for a dataset" do nodes = @Node.filter(@Node.primary_key=>[@a.pk, @b.pk, @aaa.pk]).order(:name).eager(:ancestors, :descendants).all nodes.must_equal [@a, @aaa, @b] nodes[0].descendants.must_equal [@aa, @aaa, @aaaa, @aaaaa, @aaab, @aab, @ab, @aba, @abb] nodes[1].descendants.must_equal [@aaaa, @aaaaa, @aaab] nodes[2].descendants.must_equal [@ba, @bb] nodes[0].ancestors.must_equal [] nodes[1].ancestors.must_equal [@a, @aa] nodes[2].ancestors.must_equal [] end it "should eagerly load descendants to a given level" do nodes = @Node.filter(@Node.primary_key=>[@a.pk, @b.pk, @aaa.pk]).order(:name).eager(:descendants=>1).all nodes.must_equal [@a, @aaa, @b] nodes[0].descendants.must_equal [@aa, @ab] nodes[1].descendants.must_equal [@aaaa, @aaab] nodes[2].descendants.must_equal [@ba, @bb] nodes = @Node.filter(@Node.primary_key=>[@a.pk, @b.pk, @aaa.pk]).order(:name).eager(:descendants=>2).all nodes.must_equal [@a, @aaa, @b] nodes[0].descendants.must_equal [@aa, @aaa, @aab, @ab, @aba, @abb] nodes[1].descendants.must_equal [@aaaa, @aaaaa, @aaab] nodes[2].descendants.must_equal [@ba, @bb] end it "should populate all :children associations when eagerly loading descendants for a dataset" do nodes = @Node.filter(@Node.primary_key=>[@a.pk, @b.pk, @aaa.pk]).order(:name).eager(:descendants).all nodes[0].associations[:children].must_equal [@aa, @ab] nodes[1].associations[:children].must_equal [@aaaa, @aaab] nodes[2].associations[:children].must_equal [@ba, @bb] nodes[0].associations[:children].map{|c1| c1.associations[:children]}.must_equal [[@aaa, @aab], [@aba, @abb]] nodes[1].associations[:children].map{|c1| c1.associations[:children]}.must_equal [[@aaaaa], []] nodes[2].associations[:children].map{|c1| c1.associations[:children]}.must_equal [[], []] nodes[0].associations[:children].map{|c1| c1.associations[:children].map{|c2| c2.associations[:children]}}.must_equal [[[@aaaa, @aaab], []], [[], []]] nodes[1].associations[:children].map{|c1| c1.associations[:children].map{|c2| c2.associations[:children]}}.must_equal [[[]], []] nodes[0].associations[:children].map{|c1| c1.associations[:children].map{|c2| c2.associations[:children].map{|c3| c3.associations[:children]}}}.must_equal [[[[@aaaaa], []], []], [[], []]] nodes[0].associations[:children].map{|c1| c1.associations[:children].map{|c2| c2.associations[:children].map{|c3| c3.associations[:children].map{|c4| c4.associations[:children]}}}}.must_equal [[[[[]], []], []], [[], []]] end it "should not populate :children associations for final level when loading descendants to a given level" do nodes = @Node.filter(@Node.primary_key=>[@a.pk, @b.pk, @aaa.pk]).order(:name).eager(:descendants=>1).all nodes[0].associations[:children].must_equal [@aa, @ab] nodes[0].associations[:children].map{|c1| c1.associations[:children]}.must_equal [nil, nil] nodes[1].associations[:children].must_equal [@aaaa, @aaab] nodes[1].associations[:children].map{|c1| c1.associations[:children]}.must_equal [nil, nil] nodes[2].associations[:children].must_equal [@ba, @bb] nodes[2].associations[:children].map{|c1| c1.associations[:children]}.must_equal [nil, nil] nodes[0].associations[:children].map{|c1| c1.children}.must_equal [[@aaa, @aab], [@aba, @abb]] nodes[1].associations[:children].map{|c1| c1.children}.must_equal [[@aaaaa], []] nodes[2].associations[:children].map{|c1| c1.children}.must_equal [[], []] nodes = @Node.filter(@Node.primary_key=>[@a.pk, @b.pk, @aaa.pk]).order(:name).eager(:descendants=>2).all nodes[0].associations[:children].must_equal [@aa, @ab] nodes[0].associations[:children].map{|c1| c1.associations[:children]}.must_equal [[@aaa, @aab], [@aba, @abb]] nodes[0].associations[:children].map{|c1| c1.associations[:children].map{|c2| c2.associations[:children]}}.must_equal [[[@aaaa, @aaab], nil], [nil, nil]] nodes[0].associations[:children].map{|c1| c1.associations[:children].map{|c2| (cc2 = c2.associations[:children]) ? cc2.map{|c3| c3.associations[:children]} : nil}}.must_equal [[[[@aaaaa], []], nil], [nil, nil]] nodes[0].associations[:children].map{|c1| c1.associations[:children].map{|c2| (cc2 = c2.associations[:children]) ? cc2.map{|c3| (cc3 = c3.associations[:children]) ? cc3.map{|c4| c4.associations[:children]} : nil} : nil}}.must_equal [[[[nil], []], nil], [nil, nil]] nodes[1].associations[:children].must_equal [@aaaa, @aaab] nodes[1].associations[:children].map{|c1| c1.associations[:children]}.must_equal [[@aaaaa], []] nodes[1].associations[:children].map{|c1| c1.associations[:children].map{|c2| c2.associations[:children]}}.must_equal [[nil], []] nodes[2].associations[:children].must_equal [@ba, @bb] nodes[2].associations[:children].map{|c1| c1.associations[:children]}.must_equal [[], []] nodes[0].associations[:children].map{|c1| c1.associations[:children].map{|c2| c2.children}}.must_equal [[[@aaaa, @aaab], []], [[], []]] nodes[0].associations[:children].map{|c1| c1.associations[:children].map{|c2| c2.children.map{|c3| c3.children}}}.must_equal [[[[@aaaaa], []], []], [[], []]] nodes[0].associations[:children].map{|c1| c1.associations[:children].map{|c2| c2.children.map{|c3| c3.children.map{|c4| c4.children}}}}.must_equal [[[[[]], []], []], [[], []]] nodes[1].associations[:children].map{|c1| c1.associations[:children].map{|c2| c2.children}}.must_equal [[[]], []] end it "should populate all :children associations when lazily loading descendants" do @a.descendants @a.associations[:children].must_equal [@aa, @ab] @a.associations[:children].map{|c1| c1.associations[:children]}.must_equal [[@aaa, @aab], [@aba, @abb]] @a.associations[:children].map{|c1| c1.associations[:children].map{|c2| c2.associations[:children]}}.must_equal [[[@aaaa, @aaab], []], [[], []]] @a.associations[:children].map{|c1| c1.associations[:children].map{|c2| c2.associations[:children].map{|c3| c3.associations[:children]}}}.must_equal [[[[@aaaaa], []], []], [[], []]] @a.associations[:children].map{|c1| c1.associations[:children].map{|c2| c2.associations[:children].map{|c3| c3.associations[:children].map{|c4| c4.associations[:children]}}}}.must_equal [[[[[]], []], []], [[], []]] @b.descendants @b.associations[:children].must_equal [@ba, @bb] @b.associations[:children].map{|c1| c1.associations[:children]}.must_equal [[], []] @aaa.descendants @aaa.associations[:children].map{|c1| c1.associations[:children]}.must_equal [[@aaaaa], []] @aaa.associations[:children].map{|c1| c1.associations[:children].map{|c2| c2.associations[:children]}}.must_equal [[[]], []] end it "should populate all :parent associations when eagerly loading ancestors for a dataset" do nodes = @Node.filter(@Node.primary_key=>[@a.pk, @ba.pk, @aaa.pk, @aaaaa.pk]).order(:name).eager(:ancestors).all nodes[0].associations.fetch(:parent, 1).must_be_nil nodes[1].associations[:parent].must_equal @aa nodes[1].associations[:parent].associations[:parent].must_equal @a nodes[1].associations[:parent].associations[:parent].associations.fetch(:parent, 1).must_be_nil nodes[2].associations[:parent].must_equal @aaaa nodes[2].associations[:parent].associations[:parent].must_equal @aaa nodes[2].associations[:parent].associations[:parent].associations[:parent].must_equal @aa nodes[2].associations[:parent].associations[:parent].associations[:parent].associations[:parent].must_equal @a nodes[2].associations[:parent].associations[:parent].associations[:parent].associations[:parent].associations.fetch(:parent, 1).must_be_nil nodes[3].associations[:parent].must_equal @b nodes[3].associations[:parent].associations.fetch(:parent, 1).must_be_nil end it "should populate all :parent associations when lazily loading ancestors" do @a.reload @a.ancestors @a.associations[:parent].must_be_nil @ba.reload @ba.ancestors @ba.associations[:parent].must_equal @b @ba.associations[:parent].associations.fetch(:parent, 1).must_be_nil @ba.reload @aaaaa.ancestors @aaaaa.associations[:parent].must_equal @aaaa @aaaaa.associations[:parent].associations[:parent].must_equal @aaa @aaaaa.associations[:parent].associations[:parent].associations[:parent].must_equal @aa @aaaaa.associations[:parent].associations[:parent].associations[:parent].associations[:parent].must_equal @a @aaaaa.associations[:parent].associations[:parent].associations[:parent].associations[:parent].associations.fetch(:parent, 1).must_be_nil end end before do @nodes.each{|n| n.associations.clear} end describe "with single key" do before(:all) do @db = DB @db.create_table!(:nodes) do primary_key :id Integer :parent_id String :name end @Node = Class.new(Sequel::Model(@db[:nodes])) @Node.plugin :rcte_tree, :order=>:name @nodes = [] @nodes << @a = @Node.create(:name=>'a') @nodes << @b = @Node.create(:name=>'b') @nodes << @aa = @Node.create(:name=>'aa', :parent=>@a) @nodes << @ab = @Node.create(:name=>'ab', :parent=>@a) @nodes << @ba = @Node.create(:name=>'ba', :parent=>@b) @nodes << @bb = @Node.create(:name=>'bb', :parent=>@b) @nodes << @aaa = @Node.create(:name=>'aaa', :parent=>@aa) @nodes << @aab = @Node.create(:name=>'aab', :parent=>@aa) @nodes << @aba = @Node.create(:name=>'aba', :parent=>@ab) @nodes << @abb = @Node.create(:name=>'abb', :parent=>@ab) @nodes << @aaaa = @Node.create(:name=>'aaaa', :parent=>@aaa) @nodes << @aaab = @Node.create(:name=>'aaab', :parent=>@aaa) @nodes << @aaaaa = @Node.create(:name=>'aaaaa', :parent=>@aaaa) end after(:all) do @db.drop_table? :nodes end include rcte_tree_plugin_specs it "should work correctly if not all columns are selected" do c = Class.new(Sequel::Model(@db[:nodes])) c.plugin :rcte_tree, :order=>:name c.plugin :lazy_attributes, :name c[:name=>'aaaa'].descendants.must_equal [c.load(:parent_id=>11, :id=>13)] c[:name=>'aa'].ancestors.must_equal [c.load(:parent_id=>nil, :id=>1)] nodes = c.filter(:id=>[@a.id, @b.id, @aaa.id]).order(:name).eager(:ancestors, :descendants).all nodes.must_equal [{:parent_id=>nil, :id=>1}, {:parent_id=>3, :id=>7}, {:parent_id=>nil, :id=>2}].map{|x| c.load(x)} nodes[2].descendants.must_equal [{:parent_id=>2, :id=>5}, {:parent_id=>2, :id=>6}].map{|x| c.load(x)} nodes[1].ancestors.must_equal [{:parent_id=>nil, :id=>1}, {:parent_id=>1, :id=>3}].map{|x| c.load(x)} end end describe "with composite keys" do before(:all) do @db = DB @db.create_table!(:nodes) do Integer :id Integer :id2 Integer :parent_id Integer :parent_id2 String :name primary_key [:id, :id2] end @Node = Class.new(Sequel::Model(@db[:nodes])) @Node.plugin :rcte_tree, :order=>:name, :key=>[:parent_id, :parent_id2] @Node.unrestrict_primary_key @nodes = [] @nodes << @a = @Node.create(:id=>1, :id2=>1, :name=>'a') @nodes << @b = @Node.create(:id=>1, :id2=>2, :name=>'b') @nodes << @aa = @Node.create(:id=>2, :id2=>1, :name=>'aa', :parent=>@a) @nodes << @ab = @Node.create(:id=>2, :id2=>2, :name=>'ab', :parent=>@a) @nodes << @ba = @Node.create(:id=>3, :id2=>1, :name=>'ba', :parent=>@b) @nodes << @bb = @Node.create(:id=>3, :id2=>2, :name=>'bb', :parent=>@b) @nodes << @aaa = @Node.create(:id=>3, :id2=>3, :name=>'aaa', :parent=>@aa) @nodes << @aab = @Node.create(:id=>1, :id2=>3, :name=>'aab', :parent=>@aa) @nodes << @aba = @Node.create(:id=>2, :id2=>3, :name=>'aba', :parent=>@ab) @nodes << @abb = @Node.create(:id=>4, :id2=>1, :name=>'abb', :parent=>@ab) @nodes << @aaaa = @Node.create(:id=>1, :id2=>4, :name=>'aaaa', :parent=>@aaa) @nodes << @aaab = @Node.create(:id=>2, :id2=>4, :name=>'aaab', :parent=>@aaa) @nodes << @aaaaa = @Node.create(:id=>3, :id2=>4, :name=>'aaaaa', :parent=>@aaaa) end after(:all) do @db.drop_table? :nodes end include rcte_tree_plugin_specs end end if DB.dataset.supports_cte? and !Sequel.guarded?(:db2) describe "Instance Filters plugin" do before(:all) do @db = DB @db.create_table!(:items) do primary_key :id String :name Integer :cost Integer :number end class ::Item < Sequel::Model(@db) plugin :instance_filters end end before do @db[:items].delete @i = Item.create(:name=>'J', :number=>1, :cost=>2) @i.instance_filter(:number=>1) @i.set(:name=>'K') end after(:all) do @db.drop_table?(:items) Object.send(:remove_const, :Item) end it "should not raise an error if saving only updates one row" do @i.save @i.refresh.name.must_equal 'K' end it "should raise error if saving doesn't update a row" do @i.this.update(:number=>2) proc{@i.save}.must_raise(Sequel::NoExistingObject) end it "should apply all instance filters" do @i.instance_filter{cost <= 2} @i.this.update(:number=>2) proc{@i.save}.must_raise(Sequel::NoExistingObject) @i.this.update(:number=>1, :cost=>3) proc{@i.save}.must_raise(Sequel::NoExistingObject) @i.this.update(:cost=>2) @i.save @i.refresh.name.must_equal 'K' end it "should clear instance filters after successful save" do @i.save @i.this.update(:number=>2) @i.update(:name=>'L') @i.refresh.name.must_equal 'L' end it "should not raise an error if deleting only deletes one row" do @i.destroy proc{@i.refresh}.must_raise(Sequel::Error, 'Record not found') end it "should raise error if destroying doesn't delete a row" do @i.this.update(:number=>2) proc{@i.destroy}.must_raise(Sequel::NoExistingObject) end end describe "UpdatePrimaryKey plugin" do before(:all) do @db = DB @db.create_table!(:t) do Integer :a, :primary_key=>true Integer :b end @ds = @db[:t] @c = Class.new(Sequel::Model(@ds)) @c.set_primary_key(:a) @c.unrestrict_primary_key @c.plugin :update_primary_key end before do @ds.delete @ds.insert(:a=>1, :b=>3) end after(:all) do @db.drop_table?(:t) end it "should handle regular updates" do @c.first.update(:b=>4) @db[:t].all.must_equal [{:a=>1, :b=>4}] @c.first.set(:b=>5).save @db[:t].all.must_equal [{:a=>1, :b=>5}] @c.first.set(:b=>6).save(:columns=>:b) @db[:t].all.must_equal [{:a=>1, :b=>6}] end it "should handle updating the primary key field with another field" do @c.first.update(:a=>2, :b=>4) @db[:t].all.must_equal [{:a=>2, :b=>4}] end it "should handle updating just the primary key field when saving changes" do @c.first.update(:a=>2) @db[:t].all.must_equal [{:a=>2, :b=>3}] @c.first.set(:a=>3).save(:columns=>:a) @db[:t].all.must_equal [{:a=>3, :b=>3}] end it "should handle saving after modifying the primary key field with another field" do @c.first.set(:a=>2, :b=>4).save @db[:t].all.must_equal [{:a=>2, :b=>4}] end it "should handle saving after modifying just the primary key field" do @c.first.set(:a=>2).save @db[:t].all.must_equal [{:a=>2, :b=>3}] end it "should handle saving after updating the primary key" do @c.first.update(:a=>2).update(:b=>4).set(:b=>5).save @db[:t].all.must_equal [{:a=>2, :b=>5}] end end describe "AssociationPks plugin" do before(:all) do @db = DB @db.drop_table?(:albums_tags, :albums_vocalists, :vocalists_instruments, :vocalists_hits, :hits, :instruments, :vocalists, :tags, :albums, :artists) @db.create_table(:artists) do primary_key :id String :name end @db.create_table(:albums) do primary_key :id String :name foreign_key :artist_id, :artists end @db.create_table(:tags) do primary_key :id String :name end @db.create_table(:albums_tags) do foreign_key :album_id, :albums foreign_key :tag_id, :tags end @db.create_table(:vocalists) do String :first String :last primary_key [:first, :last] foreign_key :album_id, :albums end @db.create_table(:albums_vocalists) do foreign_key :album_id, :albums String :first String :last foreign_key [:first, :last], :vocalists end @db.create_table(:instruments) do primary_key :id String :first String :last foreign_key [:first, :last], :vocalists end @db.create_table(:vocalists_instruments) do String :first String :last foreign_key [:first, :last], :vocalists foreign_key :instrument_id, :instruments end @db.create_table(:hits) do Integer :year Integer :week primary_key [:year, :week] String :first String :last foreign_key [:first, :last], :vocalists end @db.create_table(:vocalists_hits) do String :first String :last foreign_key [:first, :last], :vocalists Integer :year Integer :week foreign_key [:year, :week], :hits end class ::Artist < Sequel::Model plugin :association_pks one_to_many :albums, :order=>:id, :delay_pks=>false end class ::Album < Sequel::Model plugin :association_pks many_to_many :tags, :order=>:id, :delay_pks=>false many_to_many :uat_tags, :order=>Sequel[:tags][:id], :delay_pks=>false, :class=>:Tag, :right_key=>:tag_id, :association_pks_use_associated_table=>true end class ::Tag < Sequel::Model end class ::Vocalist < Sequel::Model set_primary_key [:first, :last] plugin :association_pks end class ::Instrument < Sequel::Model plugin :association_pks end class ::Hit < Sequel::Model set_primary_key [:year, :week] end end before do [:albums_tags, :albums_vocalists, :vocalists_instruments, :vocalists_hits, :hits, :instruments, :vocalists, :tags, :albums, :artists].each{|t| @db[t].delete} @ar1 =@db[:artists].insert(:name=>'YJM') @ar2 =@db[:artists].insert(:name=>'AS') @al1 =@db[:albums].insert(:name=>'RF', :artist_id=>@ar1) @al2 =@db[:albums].insert(:name=>'MO', :artist_id=>@ar1) @al3 =@db[:albums].insert(:name=>'T', :artist_id=>@ar1) @t1 = @db[:tags].insert(:name=>'A') @t2 = @db[:tags].insert(:name=>'B') @t3 = @db[:tags].insert(:name=>'C') {@al1=>[@t1, @t2, @t3], @al2=>[@t2]}.each do |aid, tids| tids.each{|tid| @db[:albums_tags].insert([aid, tid])} end @v1 = ['F1', 'L1'] @v2 = ['F2', 'L2'] @v3 = ['F3', 'L3'] @db[:vocalists].insert(@v1 + [@al1]) @db[:vocalists].insert(@v2 + [@al1]) @db[:vocalists].insert(@v3 + [@al1]) @i1 = @db[:instruments].insert([:first, :last], @v1) @i2 = @db[:instruments].insert([:first, :last], @v1) @i3 = @db[:instruments].insert([:first, :last], @v1) @h1 = [1997, 1] @h2 = [1997, 2] @h3 = [1997, 3] @db[:hits].insert(@h1 + @v1) @db[:hits].insert(@h2 + @v1) @db[:hits].insert(@h3 + @v1) {@al1=>[@v1, @v2, @v3], @al2=>[@v2]}.each do |aid, vids| vids.each{|vid| @db[:albums_vocalists].insert([aid] + vid)} end {@v1=>[@i1, @i2, @i3], @v2=>[@i2]}.each do |vid, iids| iids.each{|iid| @db[:vocalists_instruments].insert(vid + [iid])} end {@v1=>[@h1, @h2, @h3], @v2=>[@h2]}.each do |vid, hids| hids.each{|hid| @db[:vocalists_hits].insert(vid + hid)} end end after(:all) do @db.drop_table? :albums_tags, :albums_vocalists, :vocalists_instruments, :vocalists_hits, :hits, :instruments, :vocalists, :tags, :albums, :artists [:Artist, :Album, :Tag, :Vocalist, :Instrument, :Hit].each{|s| Object.send(:remove_const, s)} end it "should return correct associated pks for one_to_many associations" do Artist.order(:id).all.map{|a| a.album_pks}.must_equal [[@al1, @al2, @al3], []] end it "should return correct associated pks for many_to_many associations" do Album.order(:id).all.map{|a| a.tag_pks.sort}.must_equal [[@t1, @t2, @t3], [@t2], []] end it "should return correct associated pks for one_to_many associations using dataset" do Artist.order(:id).all.map{|a| a.album_pks_dataset.map(:id).sort}.must_equal [[@al1, @al2, @al3], []] end it "should return correct associated pks for many_to_many associations using" do Album.order(:id).all.map{|a| a.tag_pks_dataset.map(:tag_id).sort}.must_equal [[@t1, @t2, @t3], [@t2], []] end it "should return correct associated pks for many_to_many associations using :association_pks_use_associated_table" do Album.order(:id).all.map{|a| a.uat_tag_pks.sort}.must_equal [[@t1, @t2, @t3], [@t2], []] end it "should return correct associated right-side cpks for one_to_many associations" do Album.one_to_many :vocalists, :order=>:first Album.order(:id).all.map{|a| a.vocalist_pks.sort}.must_equal [[@v1, @v2, @v3], [], []] end it "should return correct associated right-side cpks for many_to_many associations" do Album.many_to_many :vocalists, :join_table=>:albums_vocalists, :right_key=>[:first, :last], :order=>:first Album.order(:id).all.map{|a| a.vocalist_pks.sort}.must_equal [[@v1, @v2, @v3], [@v2], []] end it "should return correct associated right-side cpks for many_to_many associations when using :association_pks_use_associated_table" do Album.many_to_many :vocalists, :join_table=>:albums_vocalists, :right_key=>[:first, :last], :order=>Sequel[:vocalists][:first], :association_pks_use_associated_table=>true Album.order(:id).all.map{|a| a.vocalist_pks.sort}.must_equal [[@v1, @v2, @v3], [@v2], []] end it "should return correct associated pks for left-side cpks for one_to_many associations" do Vocalist.one_to_many :instruments, :key=>[:first, :last], :order=>:id Vocalist.order(:first, :last).all.map{|a| a.instrument_pks.sort}.must_equal [[@i1, @i2, @i3], [], []] end it "should return correct associated pks for left-side cpks for many_to_many associations" do Vocalist.many_to_many :instruments, :join_table=>:vocalists_instruments, :left_key=>[:first, :last], :order=>:id Vocalist.order(:first, :last).all.map{|a| a.instrument_pks.sort}.must_equal [[@i1, @i2, @i3], [@i2], []] end it "should return correct associated pks for left-side cpks for many_to_many associations when using :association_pks_use_associated_table" do Vocalist.many_to_many :instruments, :join_table=>:vocalists_instruments, :left_key=>[:first, :last], :order=>:id, :association_pks_use_associated_table=>true Vocalist.order(:first, :last).all.map{|a| a.instrument_pks.sort}.must_equal [[@i1, @i2, @i3], [@i2], []] end it "should return correct associated right-side cpks for left-side cpks for one_to_many associations" do Vocalist.one_to_many :hits, :key=>[:first, :last], :order=>:week Vocalist.order(:first, :last).all.map{|a| a.hit_pks.sort}.must_equal [[@h1, @h2, @h3], [], []] end it "should return correct associated right-side cpks for left-side cpks for many_to_many associations" do Vocalist.many_to_many :hits, :join_table=>:vocalists_hits, :left_key=>[:first, :last], :right_key=>[:year, :week], :order=>:week Vocalist.order(:first, :last).all.map{|a| a.hit_pks.sort}.must_equal [[@h1, @h2, @h3], [@h2], []] end it "should return correct associated right-side cpks for left-side cpks for many_to_many associations when using :association_pks_use_associated_table" do Vocalist.many_to_many :hits, :join_table=>:vocalists_hits, :left_key=>[:first, :last], :right_key=>[:year, :week], :order=>Sequel[:vocalists_hits][:week], :association_pks_use_associated_table=>true Vocalist.order(:first, :last).all.map{|a| a.hit_pks.sort}.must_equal [[@h1, @h2, @h3], [@h2], []] end it "should default to delaying association_pks setter method changes until saving" do album_class = Class.new(Album) album_class.many_to_many :tags, :clone=>:tags, :delay_pks=>true, :join_table=>:albums_tags, :left_key=>:album_id album = album_class.with_pk!(@al1) album.tag_pks.sort.must_equal [@t1, @t2, @t3] album.tag_pks = [@t1, @t2] album.tag_pks.must_equal [@t1, @t2] album.save_changes album_class.with_pk!(album.pk).tag_pks.sort.must_equal [@t1, @t2] album.tag_pks = [] album.tag_pks.must_equal [] album.save_changes album_class.with_pk!(album.pk).tag_pks.sort.must_equal [] end it "should set associated pks correctly for a one_to_many association" do Artist.use_transactions = true Album.order(:id).select_map(:artist_id).must_equal [@ar1, @ar1, @ar1] Artist[@ar2].album_pks = [@al1, @al3] Artist[@ar1].album_pks.must_equal [@al2] Album.order(:id).select_map(:artist_id).must_equal [@ar2, @ar1, @ar2] Artist[@ar1].album_pks = [@al1] Artist[@ar2].album_pks.must_equal [@al3] Album.order(:id).select_map(:artist_id).must_equal [@ar1, nil, @ar2] Artist[@ar1].album_pks = [@al1, @al2] Artist[@ar2].album_pks.must_equal [@al3] Album.order(:id).select_map(:artist_id).must_equal [@ar1, @ar1, @ar2] Artist[@ar1].album_pks = [] Album.order(:id).select_map(:artist_id).must_equal [nil, nil, @ar2] end it "should set associated pks correctly for a many_to_many association" do Artist.use_transactions = true @db[:albums_tags].filter(:album_id=>@al1).select_order_map(:tag_id).must_equal [@t1, @t2, @t3] Album[@al1].tag_pks = [@t1, @t3] @db[:albums_tags].filter(:album_id=>@al1).select_order_map(:tag_id).must_equal [@t1, @t3] Album[@al1].tag_pks = [] @db[:albums_tags].filter(:album_id=>@al1).select_order_map(:tag_id).must_equal [] @db[:albums_tags].filter(:album_id=>@al2).select_order_map(:tag_id).must_equal [@t2] Album[@al2].tag_pks = [@t1, @t2] @db[:albums_tags].filter(:album_id=>@al2).select_order_map(:tag_id).must_equal [@t1, @t2] Album[@al2].tag_pks = [] @db[:albums_tags].filter(:album_id=>@al1).select_order_map(:tag_id).must_equal [] @db[:albums_tags].filter(:album_id=>@al3).select_order_map(:tag_id).must_equal [] Album[@al3].tag_pks = [@t1, @t3] @db[:albums_tags].filter(:album_id=>@al3).select_order_map(:tag_id).must_equal [@t1, @t3] Album[@al3].tag_pks = [] @db[:albums_tags].filter(:album_id=>@al1).select_order_map(:tag_id).must_equal [] end it "should set associated right-side cpks correctly for a one_to_many association" do Album.use_transactions = true Album.one_to_many :vocalists, :order=>:first, :delay_pks=>false Album.order(:id).all.map{|a| a.vocalist_pks.sort}.must_equal [[@v1, @v2, @v3], [], []] Album[@al2].vocalist_pks = [@v1, @v3] Album[@al1].vocalist_pks.must_equal [@v2] Vocalist.order(:first, :last).select_map(:album_id).must_equal [@al2, @al1, @al2] Album[@al1].vocalist_pks = [@v1] Album[@al2].vocalist_pks.must_equal [@v3] Vocalist.order(:first, :last).select_map(:album_id).must_equal [@al1, nil, @al2] Album[@al1].vocalist_pks = [@v1, @v2] Album[@al2].vocalist_pks.must_equal [@v3] Vocalist.order(:first, :last).select_map(:album_id).must_equal [@al1, @al1, @al2] Album[@al1].vocalist_pks = [] Vocalist.order(:first, :last).select_map(:album_id).must_equal [nil, nil, @al2] end it "should set associated right-side cpks correctly for a many_to_many association" do Album.use_transactions = true Album.many_to_many :vocalists, :join_table=>:albums_vocalists, :right_key=>[:first, :last], :order=>:first, :delay_pks=>false @db[:albums_vocalists].filter(:album_id=>@al1).select_order_map([:first, :last]).must_equal [@v1, @v2, @v3] Album[@al1].vocalist_pks = [@v1, @v3] @db[:albums_vocalists].filter(:album_id=>@al1).select_order_map([:first, :last]).must_equal [@v1, @v3] Album[@al1].vocalist_pks = [] @db[:albums_vocalists].filter(:album_id=>@al1).select_order_map([:first, :last]).must_equal [] @db[:albums_vocalists].filter(:album_id=>@al2).select_order_map([:first, :last]).must_equal [@v2] Album[@al2].vocalist_pks = [@v1, @v2] @db[:albums_vocalists].filter(:album_id=>@al2).select_order_map([:first, :last]).must_equal [@v1, @v2] Album[@al2].vocalist_pks = [] @db[:albums_vocalists].filter(:album_id=>@al1).select_order_map([:first, :last]).must_equal [] @db[:albums_vocalists].filter(:album_id=>@al3).select_order_map([:first, :last]).must_equal [] Album[@al3].vocalist_pks = [@v1, @v3] @db[:albums_vocalists].filter(:album_id=>@al3).select_order_map([:first, :last]).must_equal [@v1, @v3] Album[@al3].vocalist_pks = [] @db[:albums_vocalists].filter(:album_id=>@al1).select_order_map([:first, :last]).must_equal [] end it "should set associated pks correctly with left-side cpks for a one_to_many association" do Vocalist.use_transactions = true Vocalist.one_to_many :instruments, :key=>[:first, :last], :order=>:id, :delay_pks=>false Vocalist.order(:first, :last).all.map{|a| a.instrument_pks.sort}.must_equal [[@i1, @i2, @i3], [], []] Vocalist[@v2].instrument_pks = [@i1, @i3] Vocalist[@v1].instrument_pks.must_equal [@i2] Instrument.order(:id).select_map([:first, :last]).must_equal [@v2, @v1, @v2] Vocalist[@v1].instrument_pks = [@i1] Vocalist[@v2].instrument_pks.must_equal [@i3] Instrument.order(:id).select_map([:first, :last]).must_equal [@v1, [nil, nil], @v2] Vocalist[@v1].instrument_pks = [@i1, @i2] Vocalist[@v2].instrument_pks.must_equal [@i3] Instrument.order(:id).select_map([:first, :last]).must_equal [@v1, @v1, @v2] Vocalist[@v1].instrument_pks = [] Instrument.order(:id).select_map([:first, :last]).must_equal [[nil, nil], [nil, nil], @v2] end it "should set associated pks correctly with left-side cpks for a many_to_many association" do Vocalist.use_transactions = true Vocalist.many_to_many :instruments, :join_table=>:vocalists_instruments, :left_key=>[:first, :last], :order=>:id, :delay_pks=>false @db[:vocalists_instruments].filter([:first, :last]=>[@v1]).select_order_map(:instrument_id).must_equal [@i1, @i2, @i3] Vocalist[@v1].instrument_pks = [@i1, @i3] @db[:vocalists_instruments].filter([:first, :last]=>[@v1]).select_order_map(:instrument_id).must_equal [@i1, @i3] Vocalist[@v1].instrument_pks = [] @db[:vocalists_instruments].filter([:first, :last]=>[@v1]).select_order_map(:instrument_id).must_equal [] @db[:vocalists_instruments].filter([:first, :last]=>[@v2]).select_order_map(:instrument_id).must_equal [@i2] Vocalist[@v2].instrument_pks = [@i1, @i2] @db[:vocalists_instruments].filter([:first, :last]=>[@v2]).select_order_map(:instrument_id).must_equal [@i1, @i2] Vocalist[@v2].instrument_pks = [] @db[:vocalists_instruments].filter([:first, :last]=>[@v1]).select_order_map(:instrument_id).must_equal [] @db[:vocalists_instruments].filter([:first, :last]=>[@v3]).select_order_map(:instrument_id).must_equal [] Vocalist[@v3].instrument_pks = [@i1, @i3] @db[:vocalists_instruments].filter([:first, :last]=>[@v3]).select_order_map(:instrument_id).must_equal [@i1, @i3] Vocalist[@v3].instrument_pks = [] @db[:vocalists_instruments].filter([:first, :last]=>[@v1]).select_order_map(:instrument_id).must_equal [] end it "should set associated right-side cpks correctly with left-side cpks for a one_to_many association" do Vocalist.use_transactions = true Vocalist.one_to_many :hits, :key=>[:first, :last], :order=>:week, :delay_pks=>false Vocalist.order(:first, :last).all.map{|a| a.hit_pks.sort}.must_equal [[@h1, @h2, @h3], [], []] Vocalist[@v2].hit_pks = [@h1, @h3] Vocalist[@v1].hit_pks.must_equal [@h2] Hit.order(:year, :week).select_map([:first, :last]).must_equal [@v2, @v1, @v2] Vocalist[@v1].hit_pks = [@h1] Vocalist[@v2].hit_pks.must_equal [@h3] Hit.order(:year, :week).select_map([:first, :last]).must_equal [@v1, [nil, nil], @v2] Vocalist[@v1].hit_pks = [@h1, @h2] Vocalist[@v2].hit_pks.must_equal [@h3] Hit.order(:year, :week).select_map([:first, :last]).must_equal [@v1, @v1, @v2] Vocalist[@v1].hit_pks = [] Hit.order(:year, :week).select_map([:first, :last]).must_equal [[nil, nil], [nil, nil], @v2] end it "should set associated right-side cpks correctly with left-side cpks for a many_to_many association" do Vocalist.use_transactions = true Vocalist.many_to_many :hits, :join_table=>:vocalists_hits, :left_key=>[:first, :last], :right_key=>[:year, :week], :order=>:week, :delay_pks=>false @db[:vocalists_hits].filter([:first, :last]=>[@v1]).select_order_map([:year, :week]).must_equal [@h1, @h2, @h3] Vocalist[@v1].hit_pks = [@h1, @h3] @db[:vocalists_hits].filter([:first, :last]=>[@v1]).select_order_map([:year, :week]).must_equal [@h1, @h3] Vocalist[@v1].hit_pks = [] @db[:vocalists_hits].filter([:first, :last]=>[@v1]).select_order_map([:year, :week]).must_equal [] @db[:vocalists_hits].filter([:first, :last]=>[@v2]).select_order_map([:year, :week]).must_equal [@h2] Vocalist[@v2].hit_pks = [@h1, @h2] @db[:vocalists_hits].filter([:first, :last]=>[@v2]).select_order_map([:year, :week]).must_equal [@h1, @h2] Vocalist[@v2].hit_pks = [] @db[:vocalists_hits].filter([:first, :last]=>[@v1]).select_order_map([:year, :week]).must_equal [] @db[:vocalists_hits].filter([:first, :last]=>[@v3]).select_order_map([:year, :week]).must_equal [] Vocalist[@v3].hit_pks = [@h1, @h3] @db[:vocalists_hits].filter([:first, :last]=>[@v3]).select_order_map([:year, :week]).must_equal [@h1, @h3] Vocalist[@v3].hit_pks = [] @db[:vocalists_hits].filter([:first, :last]=>[@v1]).select_order_map([:year, :week]).must_equal [] end end describe "List plugin without a scope" do before(:all) do @db = DB @db.create_table!(:sites) do primary_key :id String :name Integer :position end @c = Class.new(Sequel::Model(@db[:sites])) @c.plugin :list end before do @c.dataset.delete @c.create :name => "abc" @c.create :name => "def" @c.create :name => "hig" end after(:all) do @db.drop_table?(:sites) end it "should return rows in order of position" do @c.map(:position).must_equal [1,2,3] @c.map(:name).must_equal %w[ abc def hig ] end it "should define prev and next" do i = @c[:name => "abc"] i.prev.must_be_nil i = @c[:name => "def"] i.prev.must_equal @c[:name => "abc"] i.next.must_equal @c[:name => "hig"] i = @c[:name => "hig"] i.next.must_be_nil end it "should define move_to" do @c[:name => "def"].move_to(1) @c.map(:name).must_equal %w[ def abc hig ] @c[:name => "abc"].move_to(3) @c.map(:name).must_equal %w[ def hig abc ] @c[:name => "abc"].move_to(-1) @c.map(:name).must_equal %w[ abc def hig ] @c[:name => "abc"].move_to(10) @c.map(:name).must_equal %w[ def hig abc ] end it "should define move_to_top and move_to_bottom" do @c[:name => "def"].move_to_top @c.map(:name).must_equal %w[ def abc hig ] @c[:name => "def"].move_to_bottom @c.map(:name).must_equal %w[ abc hig def ] end it "should define move_up and move_down" do @c[:name => "def"].move_up @c.map(:name).must_equal %w[ def abc hig ] @c[:name => "abc"].move_down @c.map(:name).must_equal %w[ def hig abc ] @c[:name => "abc"].move_up(2) @c.map(:name).must_equal %w[ abc def hig ] @c[:name => "abc"].move_down(2) @c.map(:name).must_equal %w[ def hig abc ] @c[:name => "abc"].move_up(10) @c.map(:name).must_equal %w[ abc def hig ] @c[:name => "abc"].move_down(10) @c.map(:name).must_equal %w[ def hig abc ] end it "should update positions on destroy" do @c[:name => "def"].destroy @c.select_map([:position, :name]).must_equal [[1, 'abc'], [2, 'hig']] end end describe "List plugin with a scope" do before(:all) do @db = DB @db.create_table!(:pages) do primary_key :id String :name Integer :pos Integer :parent_id end @c = Class.new(Sequel::Model(@db[:pages])) @c.plugin :list, :field => :pos, :scope => :parent_id end before do @c.dataset.delete p1 = @c.create :name => "Hm", :parent_id => 0 p2 = @c.create :name => "Ps", :parent_id => p1.id @c.create :name => "P1", :parent_id => p2.id @c.create :name => "P2", :parent_id => p2.id @c.create :name => "P3", :parent_id => p2.id @c.create :name => "Au", :parent_id => p1.id end after(:all) do @db.drop_table?(:pages) end it "should return rows in order of position" do @c.map(:name).must_equal %w[ Hm Ps Au P1 P2 P3 ] end it "should define prev and next" do @c[:name => "Ps"].next.name.must_equal 'Au' @c[:name => "Au"].prev.name.must_equal 'Ps' @c[:name => "P1"].next.name.must_equal 'P2' @c[:name => "P2"].prev.name.must_equal 'P1' @c[:name => "P1"].next(2).name.must_equal 'P3' @c[:name => "P2"].next(-1).name.must_equal 'P1' @c[:name => "P3"].prev(2).name.must_equal 'P1' @c[:name => "P2"].prev(-1).name.must_equal 'P3' @c[:name => "Ps"].prev.must_be_nil @c[:name => "Au"].next.must_be_nil @c[:name => "P1"].prev.must_be_nil @c[:name => "P3"].next.must_be_nil end it "should define move_to" do @c[:name => "P2"].move_to(1) @c.map(:name).must_equal %w[ Hm Ps Au P2 P1 P3 ] @c[:name => "P2"].move_to(3) @c.map(:name).must_equal %w[ Hm Ps Au P1 P3 P2 ] @c[:name => "P2"].move_to(-1) @c.map(:name).must_equal %w[ Hm Ps Au P2 P1 P3 ] @c[:name => "P2"].move_to(10) @c.map(:name).must_equal %w[ Hm Ps Au P1 P3 P2 ] end it "should define move_to_top and move_to_bottom" do @c[:name => "Au"].move_to_top @c.map(:name).must_equal %w[ Hm Au Ps P1 P2 P3 ] @c[:name => "Au"].move_to_bottom @c.map(:name).must_equal %w[ Hm Ps Au P1 P2 P3 ] end it "should define move_up and move_down" do @c[:name => "P2"].move_up @c.map(:name).must_equal %w[ Hm Ps Au P2 P1 P3 ] @c[:name => "P1"].move_down @c.map(:name).must_equal %w[ Hm Ps Au P2 P3 P1 ] @c[:name => "P1"].move_up(10) @c.map(:name).must_equal %w[ Hm Ps Au P1 P2 P3 ] @c[:name => "P1"].move_down(10) @c.map(:name).must_equal %w[ Hm Ps Au P2 P3 P1 ] end it "should update positions on destroy" do @c[:name => "P2"].destroy @c.select_order_map([:pos, :name]).must_equal [[1, "Hm"], [1, "P1"], [1, "Ps"], [2, "Au"], [2, "P3"]] end end describe "Sequel::Plugins::Tree" do tree_plugin_specs = Module.new do extend Minitest::Spec::DSL it "should instantiate" do @Node.all.size.must_equal 12 end it "should find all descendants of a node" do @Node.find(:name => 'two').descendants.map{|m| m.name}.must_equal %w'two.one two.two two.three two.two.one' end it "should find all ancestors of a node" do @Node.find(:name => "two.two.one").ancestors.map{|m| m.name}.must_equal %w'two.two two' end it "should find all siblings of a node, excepting self" do @Node.find(:name=>"two.one").siblings.map{|m| m.name}.must_equal %w'two.two two.three' end it "should find all siblings of a node, including self" do @Node.find(:name=>"two.one").self_and_siblings.map{|m| m.name}.must_equal %w'two.one two.two two.three' end it "should find siblings for root nodes" do @Node.find(:name=>'three').self_and_siblings.map{|m| m.name}.must_equal %w'one two three four five' end it "should find correct root for a node" do @Node.find(:name=>"two.two.one").root.name.must_equal 'two' @Node.find(:name=>"three").root.name.must_equal 'three' @Node.find(:name=>"five.one").root.name.must_equal 'five' end it "iterate top-level nodes in order" do @Node.roots_dataset.count.must_equal 5 @Node.roots.map(&:name).must_equal %w'one two three four five' @Node.where(:name=>%w'one two.one').roots_dataset.count.must_equal 1 @Node.where(:name=>%w'one two.one').roots.map(&:name).must_equal %w'one' end it "should have children" do @Node.find(:name=>'one').children.map{|m| m.name}.must_equal %w'one.one one.two' end end describe "with simple key" do before(:all) do @db = DB @db.create_table!(:nodes) do Integer :id, :primary_key=>true String :name Integer :parent_id Integer :position end @nodes = [{:id => 1, :name => 'one', :parent_id => nil, :position => 1}, {:id => 2, :name => 'two', :parent_id => nil, :position => 2}, {:id => 3, :name => 'three', :parent_id => nil, :position => 3}, {:id => 4, :name => "two.one", :parent_id => 2, :position => 1}, {:id => 5, :name => "two.two", :parent_id => 2, :position => 2}, {:id => 6, :name => "two.two.one", :parent_id => 5, :position => 1}, {:id => 7, :name => "one.two", :parent_id => 1, :position => 2}, {:id => 8, :name => "one.one", :parent_id => 1, :position => 1}, {:id => 9, :name => "five", :parent_id => nil, :position => 5}, {:id => 10, :name => "four", :parent_id => nil, :position => 4}, {:id => 11, :name => "five.one", :parent_id => 9, :position => 1}, {:id => 12, :name => "two.three", :parent_id => 2, :position => 3}] @nodes.each{|node| @db[:nodes].insert(node)} @Node = Class.new(Sequel::Model(:nodes)) @Node.plugin :tree, :order=>:position end after(:all) do @db.drop_table?(:nodes) end include tree_plugin_specs end describe "with composite key" do before(:all) do @db = DB @db.create_table!(:nodes) do Integer :id Integer :id2 String :name Integer :parent_id Integer :parent_id2 Integer :position primary_key [:id, :id2] end @nodes = [{:id => 1, :id2=> 1, :name => 'one', :parent_id => nil, :parent_id2 => nil, :position => 1}, {:id => 2, :id2=> 1, :name => 'two', :parent_id => nil, :parent_id2 => nil, :position => 2}, {:id => 1, :id2=> 2, :name => 'three', :parent_id => nil, :parent_id2 => nil, :position => 3}, {:id => 2, :id2=> 2, :name => "two.one", :parent_id => 2, :parent_id2 => 1, :position => 1}, {:id => 3, :id2=> 1, :name => "two.two", :parent_id => 2, :parent_id2 => 1, :position => 2}, {:id => 3, :id2=> 2, :name => "two.two.one", :parent_id => 3, :parent_id2 => 1, :position => 1}, {:id => 3, :id2=> 3, :name => "one.two", :parent_id => 1, :parent_id2 => 1, :position => 2}, {:id => 1, :id2=> 3, :name => "one.one", :parent_id => 1, :parent_id2 => 1, :position => 1}, {:id => 2, :id2=> 3, :name => "five", :parent_id => nil, :parent_id2 => nil, :position => 5}, {:id => 4, :id2=> 1, :name => "four", :parent_id => nil, :parent_id2 => nil, :position => 4}, {:id => 1, :id2=> 4, :name => "five.one", :parent_id => 2, :parent_id2 => 3, :position => 1}, {:id => 2, :id2=> 4, :name => "two.three", :parent_id => 2, :parent_id2 => 1, :position => 3}] @nodes.each{|node| @db[:nodes].insert(node)} @Node = Class.new(Sequel::Model(:nodes)) @Node.plugin :tree, :order=>:position, :key=>[:parent_id, :parent_id2] end after(:all) do @db.drop_table?(:nodes) end include tree_plugin_specs end end describe "Sequel::Plugins::UpdateRefresh" do before(:all) do @db = DB @db.create_table!(:tests) do primary_key :id String :name Integer :i end @c = Class.new(Sequel::Model(@db[:tests])) @c.plugin :update_refresh end before do @c.dataset.delete @foo = @c.create(:name=>'foo', :i=>10) end after(:all) do @db.drop_table?(:tests) end it "should refresh when updating" do @foo.this.update(:i=>20) @foo.update(:name=>'bar') @foo.name.must_equal 'bar' @foo.i.must_equal 20 end end describe "Sequel::Plugins::PreparedStatements" do before(:all) do @db = DB @db.create_table!(:ps_test) do primary_key :id String :name Integer :i end @c = Class.new(Sequel::Model(@db[:ps_test])) end before do @c.dataset.delete @foo = @c.create(:name=>'foo', :i=>10) @bar = @c.create(:name=>'bar', :i=>20) end after(:all) do @db.drop_table?(:ps_test) end it "should work with looking up using Model.[]" do @c[@foo.id].must_equal @foo @c[@bar.id].must_equal @bar @c[0].must_be_nil @c[nil].must_be_nil end it "should work with looking up using Dataset#with_pk" do @c.dataset.with_pk(@foo.id).must_equal @foo @c.dataset.with_pk(@bar.id).must_equal @bar @c.dataset.with_pk(0).must_be_nil @c.dataset.with_pk(nil).must_be_nil @c.dataset.filter(:i=>0).with_pk(@foo.id).must_be_nil @c.dataset.filter(:i=>10).with_pk(@foo.id).must_equal @foo @c.dataset.filter(:i=>20).with_pk(@bar.id).must_equal @bar @c.dataset.filter(:i=>10).with_pk(nil).must_be_nil @c.dataset.filter(:name=>'foo').with_pk(@foo.id).must_equal @foo @c.dataset.filter(:name=>'bar').with_pk(@bar.id).must_equal @bar @c.dataset.filter(:name=>'baz').with_pk(@bar.id).must_be_nil @c.dataset.filter(:name=>'bar').with_pk(nil).must_be_nil end it "should work with Model#destroy" do @foo.destroy @bar.destroy @c[@foo.id].must_be_nil @c[@bar.id].must_be_nil end it "should work with Model#update" do @foo.update(:name=>'foo2', :i=>30) @c[@foo.id].must_equal @c.load(:id=>@foo.id, :name=>'foo2', :i=>30) @foo.update(:name=>'foo3') @c[@foo.id].must_equal @c.load(:id=>@foo.id, :name=>'foo3', :i=>30) @foo.update(:i=>40) @c[@foo.id].must_equal @c.load(:id=>@foo.id, :name=>'foo3', :i=>40) @foo.update(:i=>nil) @c[@foo.id].must_equal @c.load(:id=>@foo.id, :name=>'foo3', :i=>nil) end it "should work with Model#create" do o = @c.create(:name=>'foo2', :i=>30) @c[o.id].must_equal @c.load(:id=>o.id, :name=>'foo2', :i=>30) o = @c.create(:name=>'foo2') @c[o.id].must_equal @c.load(:id=>o.id, :name=>'foo2', :i=>nil) o = @c.create(:i=>30) @c[o.id].must_equal @c.load(:id=>o.id, :name=>nil, :i=>30) o = @c.create(:name=>nil, :i=>40) @c[o.id].must_equal @c.load(:id=>o.id, :name=>nil, :i=>40) end end describe "Sequel::Plugins::PreparedStatements with schema changes" do before do @db = DB @db.create_table!(:ps_test) do primary_key :id String :name end @c = Class.new(Sequel::Model(@db[:ps_test])) @c.many_to_one :ps_test, :key=>:id, :class=>@c @c.one_to_many :ps_tests, :key=>:id, :class=>@c @c.many_to_many :mps_tests, :left_key=>:id, :right_key=>:id, :class=>@c, :join_table=>Sequel[:ps_test].as(:x) @c.plugin :prepared_statements end after do @db.drop_table?(:ps_test) end it "should handle added columns" do foo = @c.create(:name=>'foo') @c[foo.id].name.must_equal 'foo' foo.ps_test.name.must_equal 'foo' foo.ps_tests.map{|x| x.name}.must_equal %w'foo' foo.mps_tests.map{|x| x.name}.must_equal %w'foo' foo.update(:name=>'foo2') @c[foo.id].name.must_equal 'foo2' foo.delete foo.exists?.must_equal false @db.alter_table(:ps_test){add_column :i, Integer} foo = @c.create(:name=>'foo') @c[foo.id].name.must_equal 'foo' foo.ps_test.name.must_equal 'foo' foo.ps_tests.map{|x| x.name}.must_equal %w'foo' foo.mps_tests.map{|x| x.name}.must_equal %w'foo' foo.update(:name=>'foo2') @c[foo.id].name.must_equal 'foo2' foo.delete foo.exists?.must_equal false end end describe "Caching plugins" do before(:all) do @db = DB @db.drop_table?(:albums, :artists) @db.create_table(:artists) do primary_key :id end @db.create_table(:albums) do primary_key :id foreign_key :artist_id, :artists end @db[:artists].insert @db[:albums].insert(:artist_id=>1) end before do @Album = Class.new(Sequel::Model(@db[:albums])) end after(:all) do @db.drop_table?(:albums, :artists) end caching_plugin_specs = Module.new do extend Minitest::Spec::DSL it "should work with looking up using Model.[]" do @Artist[1].must_be_same_as(@Artist[1]) @Artist[:id=>1].must_equal @Artist[1] @Artist[0].must_be_nil @Artist[nil].must_be_nil end it "should work with lookup up many_to_one associated objects" do a = @Artist[1] @Album.first.artist.must_be_same_as(a) end end describe "caching plugin" do before do @cache_class = Class.new(Hash) do def set(k, v, ttl) self[k] = v end alias get [] end @cache = @cache_class.new @Artist = Class.new(Sequel::Model(@db[:artists])) @Artist.plugin :caching, @cache @Album.many_to_one :artist, :class=>@Artist end include caching_plugin_specs end describe "static_cache plugin" do before do @Artist = Class.new(Sequel::Model(@db[:artists])) @Artist.plugin :static_cache @Album.many_to_one :artist, :class=>@Artist end include caching_plugin_specs it "should have first retrieve correct values" do @Artist.first.must_equal @Artist.load(:id=>1) @Artist.first(1).must_equal [@Artist.load(:id=>1)] @Artist.first(:id=>1).must_equal @Artist.load(:id=>1) @Artist.first{id =~ 1}.must_equal @Artist.load(:id=>1) end end end describe "Sequel::Plugins::AutoValidations" do before(:all) do @db = DB @db.create_table!(:auto_validations_test) do primary_key :id String :s, :size=>50 String :n, :null=>false Integer :u, :null=>false index :u, :unique=>true end end before do @c = Sequel::Model(:auto_validations_test) @c.plugin :auto_validations, :skip_invalid=>true @c.dataset.delete @o = @c.new(:s=>'s', :n=>'n', :u=>1) end after(:all) do @db.drop_table?(:auto_validations_test) end it "should setup type validations automatically" do @o.s = '' @o.u = 'a' @o.valid?.must_equal false @o.errors.must_equal(:u=>["is not a valid integer"]) end it "should setup not_null validations automatically" do @o.u = nil @o.n = '' @o.valid?.must_equal false @o.errors.must_equal(:u=>["is not present"]) end it "should setup presence validations if configured" do @c.plugin :auto_validations, not_null: :presence @o.n = '' @o.valid?.must_equal false @o.errors.must_equal(:n=>["is not present"]) end it "should setup unique validations automatically" do @o.save o = @c.new(:s=>'s', :n=>'n', :u=>1) o.valid?.must_equal false o.errors.must_equal(:u=>["is already taken"]) end if DB.supports_index_parsing? it "should setup no null byte validations automatically" do @o.s = "a\0b" @o.valid?.must_equal false @o.errors.must_equal(:s=>["contains a null byte"]) end it "should setup max length validations automatically" do @o.s = "a" * 51 @o.valid?.must_equal false @o.errors.must_equal(:s=>["is longer than 50 characters"]) end cspecify "should setup max value validations for integers automatically", :sqlite, :oracle, [proc{|db| !db.send(:supports_check_constraints?)}, :mysql] do @o.u = 2147483648 @o.valid?.must_equal false @o.errors.must_equal(:u=>["is greater than maximum allowed value"]) end cspecify "should setup min value validations for integers automatically", :sqlite, :oracle, [proc{|db| !db.send(:supports_check_constraints?)}, :mysql] do @o.u = -2147483649 @o.valid?.must_equal false @o.errors.must_equal(:u=>["is less than minimum allowed value"]) end end describe "Sequel::Plugins::ConstraintValidations" do before(:all) do @db = DB @db.extension(:constraint_validations) unless @db.frozen? @db.drop_table?(:sequel_constraint_validations) @db.create_constraint_validations_table @ds = @db[:cv_test] @regexp = regexp = @db.dataset.supports_regexp? @validation_opts = {} opts_proc = proc{@validation_opts} @validate_block = proc do |opts| opts = opts_proc.call presence :pre, opts.merge(:name=>:p) exact_length 5, :exactlen, opts.merge(:name=>:el) min_length 5, :minlen, opts.merge(:name=>:minl) max_length 5, :maxlen, opts.merge(:name=>:maxl) length_range 3..5, :lenrange, opts.merge(:name=>:lr) if regexp format(/^foo\d+/, :form, opts.merge(:name=>:f)) end like 'foo%', :lik, opts.merge(:name=>:l) ilike 'foo%', :ilik, opts.merge(:name=>:il) includes %w'abc def', :inc, opts.merge(:name=>:i) unique :uniq, opts.merge(:name=>:u) max_length 6, :minlen, opts.merge(:name=>:maxl2) operator :<, 'm', :exactlen, opts.merge(:name=>:lt) operator :>=, 5, :num, opts.merge(:name=>:gte) presence [:m1, :m2, :m3], opts.merge(:name=>:pm) end @valid_row = {:pre=>'a', :exactlen=>'12345', :minlen=>'12345', :maxlen=>'12345', :lenrange=>'1234', :lik=>'fooabc', :ilik=>'FooABC', :inc=>'abc', :uniq=>'u', :num=>5, :m1=>'a', :m2=>1, :m3=>'a'} @violations = [ [:pre, [nil, '', ' ']], [:exactlen, [nil, '', '1234', '123456', 'n1234']], [:minlen, [nil, '', '1234']], [:maxlen, [nil, '123456']], [:lenrange, [nil, '', '12', '123456']], [:lik, [nil, '', 'fo', 'fotabc', 'FOOABC']], [:ilik, [nil, '', 'fo', 'fotabc']], [:inc, [nil, '', 'ab', 'abcd']], [:num, [nil, 3, 4]], ] if @regexp @valid_row[:form] = 'foo1' @violations << [:form, [nil, '', 'foo', 'fooa']] end end after(:all) do @db.drop_constraint_validations_table end constraint_validations_specs = Module.new do extend Minitest::Spec::DSL cspecify "should set up constraints that work even outside the model", [proc{|db| !db.mariadb? || db.server_version <= 100200}, :mysql] do @ds.insert(@valid_row) # Test for unique constraint proc{@ds.insert(@valid_row)}.must_raise(Sequel::DatabaseError) @ds.delete @violations.each do |col, vals| try = @valid_row.dup vals += ['1234567'] if col == :minlen vals.each do |val| next if val.nil? && @validation_opts[:allow_nil] next if val == '' && @validation_opts[:allow_nil] && @db.database_type == :oracle try[col] = val proc{@ds.insert(try)}.must_raise(Sequel::DatabaseError) end end try = @valid_row.dup if @validation_opts[:allow_nil] [:m1, :m2, :m3].each do |c| @ds.insert(try.merge(c=>nil)) @ds.delete end @ds.insert(try.merge(:m1=>nil, :m2=>nil)) @ds.delete @ds.insert(try.merge(:m1=>nil, :m3=>nil)) @ds.delete @ds.insert(try.merge(:m2=>nil, :m3=>nil)) @ds.delete @ds.insert(try.merge(:m1=>nil, :m2=>nil, :m3=>nil)) @ds.delete else [:m1, :m2, :m3].each do |c| proc{@ds.insert(try.merge(c=>nil))}.must_raise(Sequel::DatabaseError) end proc{@ds.insert(try.merge(:m1=>nil, :m2=>nil))}.must_raise(Sequel::DatabaseError) proc{@ds.insert(try.merge(:m1=>nil, :m3=>nil))}.must_raise(Sequel::DatabaseError) proc{@ds.insert(try.merge(:m2=>nil, :m3=>nil))}.must_raise(Sequel::DatabaseError) proc{@ds.insert(try.merge(:m1=>nil, :m2=>nil, :m3=>nil))}.must_raise(Sequel::DatabaseError) end unless @db.database_type == :oracle [:m1, :m3].each do |c| proc{@ds.insert(try.merge(c=>''))}.must_raise(Sequel::DatabaseError) end proc{@ds.insert(try.merge(:m1=>'', :m3=>''))}.must_raise(Sequel::DatabaseError) proc{@ds.insert(try.merge(:m1=>'', :m2=>nil))}.must_raise(Sequel::DatabaseError) proc{@ds.insert(try.merge(:m1=>nil, :m3=>''))}.must_raise(Sequel::DatabaseError) proc{@ds.insert(try.merge(:m2=>nil, :m3=>''))}.must_raise(Sequel::DatabaseError) proc{@ds.insert(try.merge(:m1=>'', :m2=>nil, :m3=>''))}.must_raise(Sequel::DatabaseError) end unless @db.database_type == :mysql && @db.mariadb? && @db.server_version >= 10500 # Test for dropping of constraint @db.alter_table(:cv_test){validate{drop :maxl2}} @ds.insert(@valid_row.merge(:minlen=>'1234567')) end end it "should set up automatic validations inside the model" do skip if @db.frozen? c = Class.new(Sequel::Model(@ds)) c.plugin :constraint_validations c.dataset.delete c.create(@valid_row) # Test for unique validation c.new(@valid_row).wont_be :valid? c.dataset.delete @violations.each do |col, vals| try = @valid_row.dup vals.each do |val| next if val.nil? && @validation_opts[:allow_nil] try[col] = val c.new(try).wont_be :valid? end end try = @valid_row.dup if @validation_opts[:allow_nil] [:m1, :m2, :m3].each do |col| c.new(try.merge(col=>nil)).must_be :valid? end c.new(try.merge(:m1=>nil, :m2=>nil)).must_be :valid? c.new(try.merge(:m1=>nil, :m3=>nil)).must_be :valid? c.new(try.merge(:m2=>nil, :m3=>nil)).must_be :valid? c.new(try.merge(:m1=>nil, :m2=>nil, :m3=>nil)).must_be :valid? else [:m1, :m2, :m3].each do |col| c.new(try.merge(col=>nil)).wont_be :valid? end c.new(try.merge(:m1=>nil, :m2=>nil)).wont_be :valid? c.new(try.merge(:m1=>nil, :m3=>nil)).wont_be :valid? c.new(try.merge(:m2=>nil, :m3=>nil)).wont_be :valid? c.new(try.merge(:m1=>nil, :m2=>nil, :m3=>nil)).wont_be :valid? end c.new(try.merge(:m1=>'', :m2=>nil)).wont_be :valid? c.new(try.merge(:m1=>nil, :m3=>'')).wont_be :valid? c.new(try.merge(:m2=>nil, :m3=>'')).wont_be :valid? c.new(try.merge(:m1=>'', :m2=>nil, :m3=>'')).wont_be :valid? [:m1, :m3].each do |col| c.new(try.merge(col=>'')).wont_be :valid? end c.new(try.merge(:m1=>'', :m3=>'')).wont_be :valid? c.db.constraint_validations = nil end end describe "via create_table" do before(:all) do @table_block = proc do regexp = @regexp validate_block = @validate_block @db.create_table!(:cv_test) do primary_key :id String :pre String :exactlen String :minlen String :maxlen String :lenrange if regexp String :form end String :lik String :ilik String :inc String :uniq, :null=>false Integer :num String :m1 Integer :m2 String :m3 validate(&validate_block) end end end after(:all) do @db.drop_table?(:cv_test) @db.drop_constraint_validations_for(:table=>:cv_test) end describe "with :allow_nil=>true" do before(:all) do @validation_opts = {:allow_nil=>true} @table_block.call end include constraint_validations_specs end describe "with :allow_nil=>false" do before(:all) do @table_block.call end include constraint_validations_specs end end describe "via alter_table" do before(:all) do @table_block = proc do regexp = @regexp validate_block = @validate_block @db.create_table!(:cv_test) do primary_key :id String :lik String :ilik String :inc String :uniq, :null=>false end @db.alter_table(:cv_test) do add_column :pre, String add_column :exactlen, String add_column :minlen, String add_column :maxlen, String add_column :lenrange, String if regexp add_column :form, String end add_column :num, Integer add_column :m1, String add_column :m2, Integer add_column :m3, String validate(&validate_block) end end end after(:all) do @db.drop_table?(:cv_test) @db.drop_constraint_validations_for(:table=>:cv_test) end describe "with :allow_nil=>true" do before(:all) do @validation_opts = {:allow_nil=>true} @table_block.call end include constraint_validations_specs end describe "with :allow_nil=>false" do before(:all) do @table_block.call end include constraint_validations_specs end end end describe "is_distinct_from extension" do before do @db = DB @db.create_table!(:is_distinct_from) do Integer :a Integer :b Integer :c end @ds = @db[:is_distinct_from].extension(:is_distinct_from) @ds.insert(1, nil, nil) @ds.insert(2, nil, 1) @ds.insert(3, 1, nil) @ds.insert(4, 1, 1) @ds.insert(5, 1, 2) end after do @db.drop_table?(:is_distinct_from) end it "should support is_distinct_from" do @ds.where(Sequel.is_distinct_from(:b, :c)).select_order_map(:a).must_equal [2, 3, 5] @ds.exclude(Sequel.is_distinct_from(:b, :c)).select_order_map(:a).must_equal [1, 4] @ds.where(Sequel.is_distinct_from(nil, nil)).count.must_equal 0 @ds.where(Sequel.is_distinct_from(1, nil)).count.must_equal 5 @ds.where(Sequel.is_distinct_from(nil, 2)).count.must_equal 5 @ds.where(Sequel.is_distinct_from(1, 1)).count.must_equal 0 @ds.where(Sequel.is_distinct_from(1, 2)).count.must_equal 5 @ds.exclude(Sequel.is_distinct_from(nil, nil)).count.must_equal 5 @ds.exclude(Sequel.is_distinct_from(1, nil)).count.must_equal 0 @ds.exclude(Sequel.is_distinct_from(nil, 2)).count.must_equal 0 @ds.exclude(Sequel.is_distinct_from(1, 1)).count.must_equal 5 @ds.exclude(Sequel.is_distinct_from(1, 2)).count.must_equal 0 end end describe "date_arithmetic extension" do asd = begin require 'active_support' require 'active_support/duration' require 'active_support/inflector' require 'active_support/core_ext/string/inflections' true rescue LoadError false end before(:all) do @db = DB @db.extension(:date_arithmetic) unless @db.frozen? skip if @db.database_type == :sqlite && @db.frozen? if @db.database_type == :sqlite @db.use_timestamp_timezones = false end @date = Date.civil(2010, 7, 12) @dt = Time.local(2010, 7, 12) if asd @d0 = ActiveSupport::Duration.new(0, [[:days, 0]]) @d1 = ActiveSupport::Duration.new(1, [[:days, 1]]) @d2 = ActiveSupport::Duration.new(1, [[:years, 1], [:months, 1], [:days, 1], [:minutes, 61], [:seconds, 1]]) end @h0 = {:days=>0} @h1 = {:days=>1, :years=>nil, :hours=>0} @h2 = {:years=>1, :months=>1, :days=>1, :hours=>1, :minutes=>1, :seconds=>1} @a1 = Time.local(2010, 7, 13) @a2 = Time.local(2011, 8, 13, 1, 1, 1) @s1 = Time.local(2010, 7, 11) @s2 = Time.local(2009, 6, 10, 22, 58, 59) @check = lambda do |meth, in_date, in_interval, should| output = @db.get(Sequel.send(meth, in_date, in_interval)) output = Time.parse(output.to_s) unless output.is_a?(Time) || output.is_a?(DateTime) output.year.must_equal should.year output.month.must_equal should.month output.day.must_equal should.day output.hour.must_equal should.hour output.min.must_equal should.min output.sec.must_equal should.sec end end after(:all) do if @db.database_type == :sqlite @db.use_timestamp_timezones = true end end if asd it "be able to use Sequel.date_add to add ActiveSupport::Duration objects to dates and datetimes" do @check.call(:date_add, @date, @d0, @dt) @check.call(:date_add, @date, @d1, @a1) @check.call(:date_add, @date, @d2, @a2) @check.call(:date_add, @dt, @d0, @dt) @check.call(:date_add, @dt, @d1, @a1) @check.call(:date_add, @dt, @d2, @a2) end it "be able to use Sequel.date_sub to subtract ActiveSupport::Duration objects from dates and datetimes" do @check.call(:date_sub, @date, @d0, @dt) @check.call(:date_sub, @date, @d1, @s1) @check.call(:date_sub, @date, @d2, @s2) @check.call(:date_sub, @dt, @d0, @dt) @check.call(:date_sub, @dt, @d1, @s1) @check.call(:date_sub, @dt, @d2, @s2) end end it "be able to use Sequel.date_add to add interval hashes to dates and datetimes" do @check.call(:date_add, @date, @h0, @dt) @check.call(:date_add, @date, @h1, @a1) @check.call(:date_add, @date, @h2, @a2) @check.call(:date_add, @dt, @h0, @dt) @check.call(:date_add, @dt, @h1, @a1) @check.call(:date_add, @dt, @h2, @a2) end it "be able to use Sequel.date_sub to subtract interval hashes from dates and datetimes" do @check.call(:date_sub, @date, @h0, @dt) @check.call(:date_sub, @date, @h1, @s1) @check.call(:date_sub, @date, @h2, @s2) @check.call(:date_sub, @dt, @h0, @dt) @check.call(:date_sub, @dt, @h1, @s1) @check.call(:date_sub, @dt, @h2, @s2) end it "be able to use expressions as interval values" do zero = Sequel[1] - 1 one = Sequel[0] + 1 h0 = {:days=>zero} h1 = {:days=>one, :years=>nil, :hours=>zero} h2 = {:years=>one, :months=>one, :days=>one, :hours=>one, :minutes=>one, :seconds=>one} @check.call(:date_add, @date, h0, @dt) @check.call(:date_add, @date, h1, @a1) @check.call(:date_add, @date, h2, @a2) @check.call(:date_add, @dt, h0, @dt) @check.call(:date_add, @dt, h1, @a1) @check.call(:date_add, @dt, h2, @a2) @check.call(:date_sub, @date, h0, @dt) @check.call(:date_sub, @date, h1, @s1) @check.call(:date_sub, @date, h2, @s2) @check.call(:date_sub, @dt, h0, @dt) @check.call(:date_sub, @dt, h1, @s1) @check.call(:date_sub, @dt, h2, @s2) end if DB.database_type == :postgres && DB.server_version >= 90400 end describe "string_agg extension" do before(:all) do @db = DB @db.extension(:string_agg) unless @db.frozen? @db.create_table!(:string_agg_test) do Integer :id String :s Integer :o end @db[:string_agg_test].import([:id, :s, :o], [[1, 'a', 3], [1, 'a', 3], [1, 'b', 5], [1, 'c', 4], [2, 'aa', 2], [2, 'bb', 1]]) @ds = @db[:string_agg_test].select_group(:id).order(:id) end after(:all) do @db.drop_table?(:string_agg_test) end cspecify "should have string_agg return aggregated concatenation", :mssql, :sqlite, :derby do h = @ds.select_append(Sequel.string_agg(:s).as(:v)).to_hash(:id, :v) h[1].must_match(/\A[abc],[abc],[abc],[abc]\z/) h[2].must_match(/\A(aa|bb),(aa|bb)\z/) @ds.select_append(Sequel.string_agg(:s).order(:o).as(:v)).map([:id, :v]).must_equal [[1, 'a,a,c,b'], [2, 'bb,aa']] @ds.select_append(Sequel.string_agg(:s, '-').order(:o).as(:v)).map([:id, :v]).must_equal [[1, 'a-a-c-b'], [2, 'bb-aa']] end cspecify "should have string_agg return aggregated concatenation for distinct values", :mssql, :sqlite, :oracle, :db2, :derby do @ds.select_group(:id).select_append(Sequel.string_agg(:s).order(:s).distinct.as(:v)).map([:id, :v]).must_equal [[1, 'a,b,c'], [2, 'aa,bb']] end end if (DB.database_type != :postgres || DB.server_version >= 90000) describe "insert_conflict plugin" do before(:all) do @db = DB @db.create_table!(:ic_test) do primary_key :id String :s, :unique=>true Integer :o end @model = Class.new(Sequel::Model) @model.set_dataset @db[:ic_test] @model.plugin :insert_conflict end after(:all) do @db.drop_table?(:ic_test) end it "should allow Model#insert_conflict to work" do ic_opts = {:target=>:s, :update => {:o => Sequel[:excluded][:o]}} @model.new(:s=>'A', :o=>1).insert_conflict(ic_opts).save @model.select_order_map([:s, :o]).must_equal [['A', 1]] @model.new(:s=>'A', :o=>2).insert_conflict(ic_opts).save @model.select_order_map([:s, :o]).must_equal [['A', 2]] @model.new(:s=>'B', :o=>3).insert_conflict(ic_opts).save @model.select_order_map([:s, :o]).must_equal [['A', 2], ['B', 3]] end end if (DB.database_type == :postgres && DB.server_version >= 90500) || (DB.database_type == :sqlite && DB.sqlite_version >= 32400) describe "column_encryption plugin" do before(:all) do @db = DB @db.create_table!(:ce_test) do primary_key :id String :not_enc String :enc end end before do @model = Class.new(Sequel::Model) @model.set_dataset @db[:ce_test] @model.plugin :column_encryption do |enc| enc.key 0, "0"*32 enc.column :enc end @obj = @model.create(:not_enc=>'123', :enc=>'Abc') end after do @db[:ce_test].delete end after(:all) do @db.drop_table?(:ce_test) end it "should store columns encrypted" do @obj.not_enc.must_equal '123' @obj[:not_enc].must_equal '123' @obj.enc.must_equal 'Abc' @obj[:enc].start_with?('AAAA').must_equal true end it "should support searching encrypted columns" do @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>true end @model.with_encrypted_value(:enc, 'Abc').must_be_empty @obj.reencrypt @model.with_encrypted_value(:enc, 'Abc').all.must_equal [@obj] @model.with_encrypted_value(:enc, 'abc').must_be_empty end it "should support case insensitive searching encrypted columns" do @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>:case_insensitive end @model.with_encrypted_value(:enc, 'Abc').must_be_empty @obj.reencrypt @model.with_encrypted_value(:enc, 'Abc').all.must_equal [@obj] @model.with_encrypted_value(:enc, 'abc').all.must_equal [@obj] @model.with_encrypted_value(:enc, 'Abd').must_be_empty end it "should support searching columns encrypted with previous keys" do @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>true end @obj.reencrypt @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>true do |cenc| cenc.key 1, "1"*32 cenc.key 0, "0"*32 end end obj = @model.with_encrypted_value(:enc, 'Abc').first @model.with_encrypted_value(:enc, 'abc').must_be_empty obj.must_equal @obj obj[:enc].start_with?('AQAA').must_equal true obj.reencrypt obj = @model.with_encrypted_value(:enc, 'Abc').first @model.with_encrypted_value(:enc, 'abc').must_be_empty obj.wont_equal @obj obj.id.must_equal @obj.id obj.enc.must_equal 'Abc' obj[:enc].start_with?('AQAB').must_equal true end it "should support case insensitive searching columns encrypted with previous keys" do @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>:case_insensitive end @obj.reencrypt @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>:case_insensitive do |cenc| cenc.key 1, "1"*32 cenc.key 0, "0"*32 end end obj = @model.with_encrypted_value(:enc, 'Abc').first @model.with_encrypted_value(:enc, 'abc').all.must_equal [obj] obj.must_equal @obj obj[:enc].start_with?('AgAA').must_equal true obj.reencrypt obj = @model.with_encrypted_value(:enc, 'Abc').first @model.with_encrypted_value(:enc, 'abc').all.must_equal [obj] obj.wont_equal @obj obj.id.must_equal @obj.id obj.enc.must_equal 'Abc' obj[:enc].start_with?('AgAB').must_equal true @model.with_encrypted_value(:enc, 'Abd').must_be_empty end it "should support searching columns encrypted with previous keys and different case sensitivity setting" do @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>true end @obj.reencrypt obj2 = @model.create(:not_enc=>'234', :enc=>'Def') @model.plugin :column_encryption do |enc| enc.key 1, "1"*32 enc.key 0, "0"*32 enc.column :enc, :searchable=>:case_insensitive end @model.with_encrypted_value(:enc, 'Abc').must_be_empty @model.with_encrypted_value(:enc, 'Def').must_be_empty @model.plugin :column_encryption do |enc| enc.key 2, "2"*32 enc.key 1, "1"*32 enc.key 0, "0"*32 enc.column :enc, :searchable=>:case_insensitive, :search_both=>true end @model.with_encrypted_value(:enc, 'abc').must_be_empty obj = @model.with_encrypted_value(:enc, 'Abc').first obj.reencrypt @model.with_encrypted_value(:enc, 'Abc').all.must_equal [obj] @model.with_encrypted_value(:enc, 'abc').all.must_equal [obj] obj.wont_equal @obj obj.id.must_equal @obj.id obj.enc.must_equal 'Abc' obj[:enc].start_with?('AgAC').must_equal true @model.with_encrypted_value(:enc, 'Def').all.must_equal [obj2] @model.with_encrypted_value(:enc, 'Abd').must_be_empty @model.plugin :column_encryption do |enc| enc.key 3, "3"*32 enc.key 2, "2"*32 enc.key 1, "1"*32 enc.key 0, "0"*32 enc.column :enc, :searchable=>true, :search_both=>true end obj = @model.with_encrypted_value(:enc, 'Abc').first @model.with_encrypted_value(:enc, 'abc').all.must_equal [obj] obj.reencrypt obj = @model.with_encrypted_value(:enc, 'Abc').first @model.with_encrypted_value(:enc, 'abc').must_be_empty obj.wont_equal @obj obj.id.must_equal @obj.id obj.enc.must_equal 'Abc' obj[:enc].start_with?('AQAD').must_equal true @model.with_encrypted_value(:enc, 'Def').all.must_equal [obj2] @model.with_encrypted_value(:enc, 'Abd').must_be_empty end it "should not return searching encrypted columns with NULL values" do @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>true end @obj.update(:enc=>nil) @obj.reencrypt.must_be_nil @model.with_encrypted_value(:enc, 'Abc').must_be_empty @model.with_encrypted_value(:enc, 'abc').must_be_empty end it "should raise an error when trying to decrypt with missing key" do @model.plugin :column_encryption do |enc| enc.key 1, "1"*32 enc.column :enc, :searchable=>true end obj = @model.first proc{obj.enc}.must_raise Sequel::Error end it "should raise an error when trying to decrypt with invalid key" do @model.plugin :column_encryption do |enc| enc.key 0, "1"*32 enc.column :enc, :searchable=>true end obj = @model.first proc{obj.enc}.must_raise end it "should raise an error when trying to decrypt with invalid auth data" do @model.plugin :column_encryption do |enc| enc.key 0, "0"*32, :auth_data=>'Foo' enc.column :enc, :searchable=>true end obj = @model.first proc{obj.enc}.must_raise Sequel::Error end it "should support a configurable amount of padding" do @model.plugin :column_encryption do |enc| enc.key 1, "0"*32, :padding=>110 enc.key 0, "0"*32 enc.column :enc end encrypt_len = @obj[:enc].bytesize @obj.reencrypt @obj[:enc].bytesize.must_be(:>, encrypt_len + 100) end it "should support not using padding" do @model.plugin :column_encryption do |enc| enc.key 1, "0"*32, :padding=>false enc.key 0, "0"*32 enc.column :enc end encrypt_len = @obj[:enc].bytesize @obj.reencrypt @obj[:enc].bytesize.must_be(:<, encrypt_len) end it "should support reencrypting rows that need reencryption" do obj = @model.create(:not_enc=>'234', :enc=>'Def') @model.plugin :column_encryption do |enc| enc.key 1, "1"*32 enc.key 0, "0"*32 enc.column :enc end @model.needing_reencryption.count.must_equal 2 @model.needing_reencryption.all(&:reencrypt) @model.needing_reencryption.must_be_empty @model.plugin :column_encryption do |enc| enc.key 1, "1"*32 enc.column :enc end @model.needing_reencryption.must_be_empty @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>true end @model.needing_reencryption.count.must_equal 2 @model.needing_reencryption.all(&:reencrypt) @model.needing_reencryption.must_be_empty @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>:case_insensitive end @model.needing_reencryption.count.must_equal 2 obj.refresh.reencrypt @model.needing_reencryption.count.must_equal 1 @obj.refresh.reencrypt @model.needing_reencryption.must_be_empty @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>:case_insensitive enc.column :not_enc, :searchable=>true end @obj.values.delete(:not_enc) obj.values.delete(:not_enc) @obj.update(:enc=>nil, :not_enc=>'abc') obj.set(:not_enc=>nil).save @model.needing_reencryption.must_be_empty @model.plugin :column_encryption do |enc| enc.key 2, "2"*32 enc.key 1, "1"*32 enc.column :enc, :searchable=>:case_insensitive enc.column :not_enc, :searchable=>true end @model.needing_reencryption.count.must_equal 2 obj.refresh.reencrypt @model.needing_reencryption.count.must_equal 1 @obj.refresh.reencrypt @model.needing_reencryption.must_be_empty @obj.update(:not_enc=>nil) obj.update(:enc=>nil) @model.needing_reencryption.must_be_empty end it "should support encrypted columns with a registered serialization format" do require 'json' @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>true, :format=>:json end @model.dataset.delete obj = @model.create(:not_enc=>'123', :enc=>{'a'=>1}) @model[obj.id].enc['a'].must_equal 1 @model.with_encrypted_value(:enc, 'a'=>1).all.must_equal [obj] @model.with_encrypted_value(:enc, 'a'=>2).must_be_empty @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>true, :format=>:json do |cenc| cenc.key 1, "1"*32 cenc.key 0, "0"*32 end end @model[obj.id].enc['a'].must_equal 1 @model.with_encrypted_value(:enc, 'a'=>1).all.must_equal [obj] @model.with_encrypted_value(:enc, 'a'=>2).must_be_empty obj.reencrypt @model[obj.id].enc['a'].must_equal 1 @model.with_encrypted_value(:enc, 'a'=>1).all.must_equal [obj] @model.with_encrypted_value(:enc, 'a'=>2).must_be_empty end it "should support encrypted columns with a custom serialization format" do require 'json' @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>true, :format=>[:to_json.to_proc, JSON.method(:parse)] end @model.dataset.delete obj = @model.create(:not_enc=>'123', :enc=>{'a'=>1}) @model[obj.id].enc['a'].must_equal 1 @model.with_encrypted_value(:enc, 'a'=>1).all.must_equal [obj] @model.with_encrypted_value(:enc, 'a'=>2).must_be_empty @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>true, :format=>[:to_json.to_proc, JSON.method(:parse)] do |cenc| cenc.key 1, "1"*32 cenc.key 0, "0"*32 end end @model[obj.id].enc['a'].must_equal 1 @model.with_encrypted_value(:enc, 'a'=>1).all.must_equal [obj] @model.with_encrypted_value(:enc, 'a'=>2).must_be_empty obj.reencrypt @model[obj.id].enc['a'].must_equal 1 @model.with_encrypted_value(:enc, 'a'=>1).all.must_equal [obj] @model.with_encrypted_value(:enc, 'a'=>2).must_be_empty obj = @model.create(:not_enc=>'123', :enc=>nil) @model[obj.id].enc.must_be_nil end it "should support unique indexes on searchable value" do begin DB.add_index(:ce_test, Sequel.function(:substring, :enc, 0, 48), :unique=>true, :name=>:ce_enc_idx) @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>true end obj = @model.create(:enc=>"DEF") proc{@model.create(:enc=>"DEF")}.must_raise Sequel::UniqueConstraintViolation @model.create(:enc=>"def").delete @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>:case_insensitive end obj.reencrypt proc{@model.create(:enc=>"DEF")}.must_raise Sequel::UniqueConstraintViolation proc{@model.create(:enc=>"def")}.must_raise Sequel::UniqueConstraintViolation ensure DB.drop_index(:ce_test, :enc, :name=>:ce_enc_idx) rescue nil end end if DB.database_type == :postgres it "should support CHECK constraint on column" do begin DB.alter_table(:ce_test) do c = Sequel[:enc] add_constraint(:enc_format, c.like('AA__A%') | c.like('Ag__A%') | c.like('AQ__A%')) add_constraint(:enc_length, Sequel.char_length(c) >= 88) end @model.create(:enc=>"def") proc{@model.insert(:enc=>"def")}.must_raise Sequel::CheckConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>true end @model.needing_reencryption.all(&:reencrypt).size.must_equal 2 @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>:case_insensitive end @model.needing_reencryption.all(&:reencrypt).size.must_equal 2 ensure DB.alter_table(:ce_test) do drop_constraint(:enc_length, :type=>:check) drop_constraint(:enc_format, :type=>:check) end end end unless DB.database_type == :mysql && DB.server_version < (DB.mariadb? ? 100201 : 80016) it "should support CHECK constraint on column enforcing urlsafe base64 of sufficient length" do begin DB.alter_table(:ce_test) do add_constraint(:enc_base64){octet_length(decode(regexp_replace(regexp_replace(:enc, '_', '/', 'g'), '-', '+', 'g'), 'base64')) >= 65} end @model.create(:enc=>"def") proc{@model.insert(:enc=>"def")}.must_raise Sequel::CheckConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>true end @model.needing_reencryption.all(&:reencrypt).size.must_equal 2 @model.plugin :column_encryption do |enc| enc.column :enc, :searchable=>:case_insensitive end @model.needing_reencryption.all(&:reencrypt).size.must_equal 2 ensure DB.alter_table(:ce_test) do drop_constraint(:enc_base64, :type=>:check) end end end if DB.database_type == :postgres end if RUBY_VERSION >= '2.3' && (begin; require 'sequel/plugins/column_encryption'; true; rescue LoadError; false end) �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/integration/prepared_statement_test.rb�������������������������������������������0000664�0000000�0000000�00000045466�14342141206�0023646�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Prepared Statements and Bound Arguments" do before do @db = DB @db.create_table!(:items) do primary_key :id integer :numb end @c = Class.new(Sequel::Model(:items)) @ds = @db[:items] @ds.insert(:numb=>10) @pr = @ds.requires_placeholder_type_specifiers? ? proc{|i| :"#{i}__integer"} : proc{|i| i} end after do @db.drop_table?(:items) end it "should support bound variables when selecting" do @ds.filter(:numb=>:$n).call(:each, :n=>10){|h| h.must_equal(:id=>1, :numb=>10)} @ds.filter(:numb=>:$n).call(:select, :n=>10).must_equal [{:id=>1, :numb=>10}] @ds.filter(:numb=>:$n).call(:all, :n=>10).must_equal [{:id=>1, :numb=>10}] @ds.filter(:numb=>:$n).call(:first, :n=>10).must_equal(:id=>1, :numb=>10) @ds.select(:numb).filter(:numb=>:$n).call(:single_value, :n=>10).must_equal(10) @ds.filter(:numb=>:$n).call([:map, :numb], :n=>10).must_equal [10] @ds.filter(:numb=>:$n).call([:as_hash, :id, :numb], :n=>10).must_equal(1=>10) @ds.filter(:numb=>:$n).call([:to_hash, :id, :numb], :n=>10).must_equal(1=>10) @ds.filter(:numb=>:$n).call([:to_hash_groups, :id, :numb], :n=>10).must_equal(1=>[10]) end it "should support blocks for each, select, all, and map when using bound variables" do a = [] @ds.filter(:numb=>:$n).call(:each, :n=>10){|r| r[:numb] *= 2; a << r}; a.must_equal [{:id=>1, :numb=>20}] @ds.filter(:numb=>:$n).call(:select, :n=>10){|r| r[:numb] *= 2}.must_equal [{:id=>1, :numb=>20}] @ds.filter(:numb=>:$n).call(:all, :n=>10){|r| r[:numb] *= 2}.must_equal [{:id=>1, :numb=>20}] @ds.filter(:numb=>:$n).call([:map], :n=>10){|r| r[:numb] * 2}.must_equal [20] end it "should support binding variables before the call with #bind" do @ds.filter(:numb=>:$n).bind(:n=>10).call(:select).must_equal [{:id=>1, :numb=>10}] @ds.filter(:numb=>:$n).bind(:n=>10).call(:all).must_equal [{:id=>1, :numb=>10}] @ds.filter(:numb=>:$n).bind(:n=>10).call(:first).must_equal(:id=>1, :numb=>10) @ds.select(:numb).filter(:numb=>:$n).bind(:n=>10).call(:single_value).must_equal(10) @ds.bind(:n=>10).filter(:numb=>:$n).call(:select).must_equal [{:id=>1, :numb=>10}] @ds.bind(:n=>10).filter(:numb=>:$n).call(:all).must_equal [{:id=>1, :numb=>10}] @ds.bind(:n=>10).filter(:numb=>:$n).call(:first).must_equal(:id=>1, :numb=>10) @ds.bind(:n=>10).select(:numb).filter(:numb=>:$n).call(:single_value).must_equal(10) end it "should allow overriding variables specified with #bind" do @ds.filter(:numb=>:$n).bind(:n=>1).call(:select, :n=>10).must_equal [{:id=>1, :numb=>10}] @ds.filter(:numb=>:$n).bind(:n=>1).call(:all, :n=>10).must_equal [{:id=>1, :numb=>10}] @ds.filter(:numb=>:$n).bind(:n=>1).call(:first, :n=>10).must_equal(:id=>1, :numb=>10) @ds.select(:numb).filter(:numb=>:$n).bind(:n=>1).call(:single_value, :n=>10).must_equal(10) @ds.filter(:numb=>:$n).bind(:n=>1).bind(:n=>10).call(:select).must_equal [{:id=>1, :numb=>10}] @ds.filter(:numb=>:$n).bind(:n=>1).bind(:n=>10).call(:all).must_equal [{:id=>1, :numb=>10}] @ds.filter(:numb=>:$n).bind(:n=>1).bind(:n=>10).call(:first).must_equal(:id=>1, :numb=>10) @ds.select(:numb).filter(:numb=>:$n).bind(:n=>1).bind(:n=>10).call(:single_value).must_equal(10) end it "should support placeholder literal strings with call" do @ds.filter(Sequel.lit("numb = ?", :$n)).call(:select, :n=>10).must_equal [{:id=>1, :numb=>10}] end it "should support named placeholder literal strings and handle multiple named placeholders correctly with call" do @ds.filter(Sequel.lit("numb = :n", :n=>:$n)).call(:select, :n=>10).must_equal [{:id=>1, :numb=>10}] @ds.insert(:numb=>20) @ds.insert(:numb=>30) @ds.filter(Sequel.lit("numb > :n1 AND numb < :n2 AND numb = :n3", :n3=>:$n3, :n2=>:$n2, :n1=>:$n1)).call(:select, :n3=>20, :n2=>30, :n1=>10).must_equal [{:id=>2, :numb=>20}] end it "should support datasets with static sql and placeholders with call" do @db["SELECT * FROM items WHERE numb = ?", :$n].call(:select, :n=>10).must_equal [{:id=>1, :numb=>10}] end it "should support subselects with call" do @ds.filter(:id=>:$i).filter(:numb=>@ds.select(:numb).filter(:numb=>:$n)).filter(:id=>:$j).call(:select, :n=>10, :i=>1, :j=>1).must_equal [{:id=>1, :numb=>10}] end it "should support subselects with exists with call" do @ds.filter(:id=>:$i).filter(@ds.select(:numb).filter(:numb=>:$n).exists).filter(:id=>:$j).call(:select, :n=>10, :i=>1, :j=>1).must_equal [{:id=>1, :numb=>10}] end it "should support subselects with literal strings with call" do @ds.filter(:id=>:$i, :numb=>@ds.select(:numb).filter(Sequel.lit("numb = ?", :$n))).call(:select, :n=>10, :i=>1).must_equal [{:id=>1, :numb=>10}] end it "should support subselects with static sql and placeholders with call" do @ds.filter(:id=>:$i, :numb=>@db["SELECT numb FROM items WHERE numb = ?", :$n]).call(:select, :n=>10, :i=>1).must_equal [{:id=>1, :numb=>10}] end it "should support subselects of subselects with call" do @ds.filter(:id=>:$i).filter(:numb=>@ds.select(:numb).filter(:numb=>@ds.select(:numb).filter(:numb=>:$n))).filter(:id=>:$j).call(:select, :n=>10, :i=>1, :j=>1).must_equal [{:id=>1, :numb=>10}] end cspecify "should support using a bound variable for a limit and offset", [:jdbc, :db2] do @ds.insert(:numb=>20) ds = @ds.limit(:$n, :$n2).order(:id) ds.call(:select, :n=>1, :n2=>0).must_equal [{:id=>1, :numb=>10}] ds.call(:select, :n=>1, :n2=>1).must_equal [{:id=>2, :numb=>20}] ds.call(:select, :n=>1, :n2=>2).must_equal [] ds.call(:select, :n=>2, :n2=>0).must_equal [{:id=>1, :numb=>10}, {:id=>2, :numb=>20}] ds.call(:select, :n=>2, :n2=>1).must_equal [{:id=>2, :numb=>20}] end it "should support bound variables with insert" do @ds.call(:insert, {:n=>20}, :numb=>:$n) @ds.count.must_equal 2 @ds.order(:id).map(:numb).must_equal [10, 20] end it "should support bound variables with NULL values" do @ds.delete @ds.call(:insert, {:n=>nil}, :numb=>@pr[:$n]) @ds.count.must_equal 1 @ds.map(:numb).must_equal [nil] end it "should have insert return primary key value when using bound arguments" do @ds.call(:insert, {:n=>20}, :numb=>:$n).must_equal 2 @ds.filter(:id=>2).first[:numb].must_equal 20 end it "should support bound variables with insert_select" do @ds.call(:insert_select, {:n=>20}, :numb=>:$n).must_equal(:id=>2, :numb=>20) @ds.count.must_equal 2 @ds.order(:id).map(:numb).must_equal [10, 20] end if DB.dataset.supports_insert_select? it "should support bound variables with insert returning" do @ds.returning.call(:insert, {:n=>20}, :numb=>:$n).must_equal([{:id=>2, :numb=>20}]) @ds.count.must_equal 2 @ds.order(:id).map(:numb).must_equal [10, 20] end if DB.dataset.supports_returning?(:insert) it "should support bound variables with update returning" do @ds.returning.call(:update, {:n=>20}, :numb=>:$n).must_equal([{:id=>1, :numb=>20}]) @ds.count.must_equal 1 @ds.order(:id).map(:numb).must_equal [20] end if DB.dataset.supports_returning?(:update) it "should support bound variables with delete returning" do @ds.where(:id=>:$id).returning.call(:delete, :id=>1).must_equal([{:id=>1, :numb=>10}]) @ds.count.must_equal 0 end if DB.dataset.supports_returning?(:delete) it "should support bound variables with delete" do @ds.filter(:numb=>:$n).call(:delete, :n=>10).must_equal 1 @ds.count.must_equal 0 end it "should support bound variables with update" do @ds.filter(:numb=>:$n).call(:update, {:n=>10, :nn=>20}, :numb=>Sequel.+(:numb, :$nn)).must_equal 1 @ds.all.must_equal [{:id=>1, :numb=>30}] end it "should support prepared statements when selecting" do @ds.filter(:numb=>:$n).prepare(:each, :select_n) @db.call(:select_n, :n=>10){|h| h.must_equal(:id=>1, :numb=>10)} @ds.filter(:numb=>:$n).prepare(:select, :select_n) @db.call(:select_n, :n=>10).must_equal [{:id=>1, :numb=>10}] @ds.filter(:numb=>:$n).prepare(:all, :select_n) @db.call(:select_n, :n=>10).must_equal [{:id=>1, :numb=>10}] @ds.filter(:numb=>:$n).prepare(:first, :select_n) @db.call(:select_n, :n=>10).must_equal(:id=>1, :numb=>10) @ds.select(:numb).filter(:numb=>:$n).prepare(:single_value, :select_n) @db.call(:select_n, :n=>10).must_equal(10) @ds.filter(:numb=>:$n).prepare([:map, :numb], :select_n) @db.call(:select_n, :n=>10).must_equal [10] @ds.filter(:numb=>:$n).prepare([:as_hash, :id, :numb], :select_n) @db.call(:select_n, :n=>10).must_equal(1=>10) @ds.filter(:numb=>:$n).prepare([:to_hash, :id, :numb], :select_n) @db.call(:select_n, :n=>10).must_equal(1=>10) end it "should support blocks for each, select, all, and map when using prepared statements" do a = [] @ds.filter(:numb=>:$n).prepare(:each, :select_n).call(:n=>10){|r| r[:numb] *= 2; a << r}; a.must_equal [{:id=>1, :numb=>20}] a = [] @db.call(:select_n, :n=>10){|r| r[:numb] *= 2; a << r}; a.must_equal [{:id=>1, :numb=>20}] @ds.filter(:numb=>:$n).prepare(:select, :select_n).call(:n=>10){|r| r[:numb] *= 2}.must_equal [{:id=>1, :numb=>20}] @db.call(:select_n, :n=>10){|r| r[:numb] *= 2}.must_equal [{:id=>1, :numb=>20}] @ds.filter(:numb=>:$n).prepare(:all, :select_n).call(:n=>10){|r| r[:numb] *= 2}.must_equal [{:id=>1, :numb=>20}] @db.call(:select_n, :n=>10){|r| r[:numb] *= 2}.must_equal [{:id=>1, :numb=>20}] @ds.filter(:numb=>:$n).prepare([:map], :select_n).call(:n=>10){|r| r[:numb] *= 2}.must_equal [20] @db.call(:select_n, :n=>10){|r| r[:numb] *= 2}.must_equal [20] end it "should support prepared statements being called multiple times with different arguments" do @ds.filter(:numb=>:$n).prepare(:select, :select_n) @db.call(:select_n, :n=>10).must_equal [{:id=>1, :numb=>10}] @db.call(:select_n, :n=>0).must_equal [] @db.call(:select_n, :n=>10).must_equal [{:id=>1, :numb=>10}] end it "should support placeholder literal strings with prepare" do @ds.filter(Sequel.lit("numb = ?", :$n)).prepare(:select, :seq_select).call(:n=>10).must_equal [{:id=>1, :numb=>10}] end it "should support named placeholder literal strings and handle multiple named placeholders correctly with prepare" do @ds.filter(Sequel.lit("numb = :n", :n=>:$n)).prepare(:select, :seq_select).call(:n=>10).must_equal [{:id=>1, :numb=>10}] @ds.insert(:numb=>20) @ds.insert(:numb=>30) @ds.filter(Sequel.lit("numb > :n1 AND numb < :n2 AND numb = :n3", :n3=>:$n3, :n2=>:$n2, :n1=>:$n1)).call(:select, :n3=>20, :n2=>30, :n1=>10).must_equal [{:id=>2, :numb=>20}] end it "should support datasets with static sql and placeholders with prepare" do @db["SELECT * FROM items WHERE numb = ?", :$n].prepare(:select, :seq_select).call(:n=>10).must_equal [{:id=>1, :numb=>10}] end it "should support subselects with prepare" do @ds.filter(:id=>:$i).filter(:numb=>@ds.select(:numb).filter(:numb=>:$n)).filter(:id=>:$j).prepare(:select, :seq_select).call(:n=>10, :i=>1, :j=>1).must_equal [{:id=>1, :numb=>10}] end it "should support subselects with exists with prepare" do @ds.filter(:id=>:$i).filter(@ds.select(:numb).filter(:numb=>:$n).exists).filter(:id=>:$j).prepare(:select, :seq_select).call(:n=>10, :i=>1, :j=>1).must_equal [{:id=>1, :numb=>10}] end it "should support subselects with literal strings with prepare" do @ds.filter(:id=>:$i, :numb=>@ds.select(:numb).filter(Sequel.lit("numb = ?", :$n))).prepare(:select, :seq_select).call(:n=>10, :i=>1).must_equal [{:id=>1, :numb=>10}] end it "should support subselects with static sql and placeholders with prepare" do @ds.filter(:id=>:$i, :numb=>@db["SELECT numb FROM items WHERE numb = ?", :$n]).prepare(:select, :seq_select).call(:n=>10, :i=>1).must_equal [{:id=>1, :numb=>10}] end it "should support subselects of subselects with prepare" do @ds.filter(:id=>:$i).filter(:numb=>@ds.select(:numb).filter(:numb=>@ds.select(:numb).filter(:numb=>:$n))).filter(:id=>:$j).prepare(:select, :seq_select).call(:n=>10, :i=>1, :j=>1).must_equal [{:id=>1, :numb=>10}] end cspecify "should support using a prepared_statement for a limit and offset", :db2 do @ds.insert(:numb=>20) ps = @ds.limit(:$n, :$n2).order(:id).prepare(:select, :seq_select) ps.call(:n=>1, :n2=>0).must_equal [{:id=>1, :numb=>10}] ps.call(:n=>1, :n2=>1).must_equal [{:id=>2, :numb=>20}] ps.call(:n=>1, :n2=>2).must_equal [] ps.call(:n=>2, :n2=>0).must_equal [{:id=>1, :numb=>10}, {:id=>2, :numb=>20}] ps.call(:n=>2, :n2=>1).must_equal [{:id=>2, :numb=>20}] end it "should support prepared statements with insert" do @ds.prepare(:insert, :insert_n, :numb=>:$n) @db.call(:insert_n, :n=>20) @ds.count.must_equal 2 @ds.order(:id).map(:numb).must_equal [10, 20] end it "should support prepared statements with NULL values" do @ds.delete @ds.prepare(:insert, :insert_n, :numb=>@pr[:$n]) @db.call(:insert_n, :n=>nil) @ds.count.must_equal 1 @ds.map(:numb).must_equal [nil] end it "should have insert return primary key value when using prepared statements" do @ds.prepare(:insert, :insert_n, :numb=>:$n) @db.call(:insert_n, :n=>20).must_equal 2 @ds.filter(:id=>2).first[:numb].must_equal 20 end it "should support prepared_statements with insert_select" do @ds.prepare(:insert_select, :insert_select_n, :numb=>:$n).call(:n=>20).must_equal(:id=>2, :numb=>20) @ds.count.must_equal 2 @ds.order(:id).map(:numb).must_equal [10, 20] end if DB.dataset.supports_insert_select? it "should support bound variables with insert returning" do @ds.returning.prepare(:insert, :insert_rn, :numb=>:$n).call(:n=>20).must_equal([{:id=>2, :numb=>20}]) @ds.count.must_equal 2 @ds.order(:id).map(:numb).must_equal [10, 20] end if DB.dataset.supports_returning?(:insert) it "should support bound variables with update returning" do @ds.returning.prepare(:update, :update_rn, :numb=>:$n).call(:n=>20).must_equal([{:id=>1, :numb=>20}]) @ds.count.must_equal 1 @ds.order(:id).map(:numb).must_equal [20] end if DB.dataset.supports_returning?(:update) it "should support bound variables with delete returning" do @ds.where(:id=>:$id).returning.prepare(:delete, :delete_rn).call(:id=>1).must_equal([{:id=>1, :numb=>10}]) @ds.count.must_equal 0 end if DB.dataset.supports_returning?(:delete) it "should support prepared statements with delete" do @ds.filter(:numb=>:$n).prepare(:delete, :delete_n) @db.call(:delete_n, :n=>10).must_equal 1 @ds.count.must_equal 0 end it "should support prepared statements with update" do @ds.filter(:numb=>:$n).prepare(:update, :update_n, :numb=>Sequel.+(:numb, :$nn)) @db.call(:update_n, :n=>10, :nn=>20).must_equal 1 @ds.all.must_equal [{:id=>1, :numb=>30}] end it "model datasets should return model instances when using select, all, and first with bound variables" do @c.filter(:numb=>:$n).call(:select, :n=>10).must_equal [@c.load(:id=>1, :numb=>10)] @c.filter(:numb=>:$n).call(:all, :n=>10).must_equal [@c.load(:id=>1, :numb=>10)] @c.filter(:numb=>:$n).call(:first, :n=>10).must_equal @c.load(:id=>1, :numb=>10) end it "model datasets should return model instances when using select, all, and first with prepared statements" do @c.filter(:numb=>:$n).prepare(:select, :select_n1) @db.call(:select_n1, :n=>10).must_equal [@c.load(:id=>1, :numb=>10)] @c.filter(:numb=>:$n).prepare(:all, :select_n1) @db.call(:select_n1, :n=>10).must_equal [@c.load(:id=>1, :numb=>10)] @c.filter(:numb=>:$n).prepare(:first, :select_n1) @db.call(:select_n1, :n=>10).must_equal @c.load(:id=>1, :numb=>10) end end describe "Bound Argument Types" do before(:all) do @db = DB @db.create_table!(:items) do primary_key :id Date :d DateTime :dt File :file String :s Time :t Float :f TrueClass :b end @ds = @db[:items] @vs = {:d=>Date.civil(2010, 10, 11), :dt=>DateTime.civil(2010, 10, 12, 13, 14, 15), :f=>1.0, :s=>'str', :t=>Time.at(Time.now.to_i), :file=>Sequel::SQL::Blob.new('blob'), :b=>true} end before do @ds.delete @ds.insert(@vs) end after do Sequel.default_timezone = nil Sequel.datetime_class = Time end after(:all) do @db.drop_table?(:items) end cspecify "should handle date type", [:tinytds], [:jdbc, :mssql], [:jdbc, :sqlite], :oracle do @ds.filter(:d=>:$x).prepare(:first, :ps_date).call(:x=>@vs[:d])[:d].must_equal @vs[:d] end cspecify "should handle datetime type", [:mysql2], [:jdbc, :sqlite], [:tinytds], [:oracle] do Sequel.datetime_class = DateTime @ds.filter(:dt=>:$x).prepare(:first, :ps_datetime).call(:x=>@vs[:dt])[:dt].must_equal @vs[:dt] end cspecify "should handle datetime type with fractional seconds", [:jdbc, :sqlite], [:jdbc, :mysql], [:oracle] do Sequel.datetime_class = DateTime Sequel.default_timezone = :utc fract_time = DateTime.parse('2010-10-12 13:14:15.500000') @ds.prepare(:update, :ps_datetime_up, :dt=>:$x).call(:x=>fract_time) dt = @ds.filter(:dt=>:$x).prepare(:first, :ps_datetime).call(:x=>fract_time)[:dt] @ds.literal(dt).must_equal @ds.literal(fract_time) end cspecify "should handle time type", [:jdbc, :sqlite] do @ds.filter(:t=>:$x).prepare(:first, :ps_time).call(:x=>@vs[:t])[:t].must_equal @vs[:t] end cspecify "should handle time type with fractional seconds", [:jdbc, :sqlite], [:jdbc, :mysql] do fract_time = @vs[:t] + 0.5 @ds.prepare(:update, :ps_time_up, :t=>:$x).call(:x=>fract_time) @ds.literal(@ds.filter(:t=>:$x).prepare(:first, :ps_time).call(:x=>fract_time)[:t]).must_equal @ds.literal(fract_time) end cspecify "should handle blob type", [:odbc] do @ds.delete @ds.prepare(:insert, :ps_blob, {:file=>:$x}).call(:x=>@vs[:file]) @ds.get(:file).must_equal @vs[:file] end cspecify "should handle blob type with special characters", [:odbc] do @ds.delete blob = Sequel.blob("\"'[]`a0 ") @ds.prepare(:insert, :ps_blob, {:file=>:$x}).call(:x=>blob) @ds.get(:file).must_equal blob end cspecify "should handle blob type with nil values", [:oracle], [:tinytds], [:jdbc, :mssql] do @ds.delete @ds.prepare(:insert, :ps_blob, {:file=>:$x}).call(:x=>nil) @ds.get(:file).must_be_nil end cspecify "should handle blob type with embedded zeros", [:odbc] do zero_blob = Sequel::SQL::Blob.new("a\0"*100) @ds.delete @ds.prepare(:insert, :ps_blob, {:file=>:$x}).call(:x=>zero_blob) @ds.get(:file).must_equal zero_blob end it "should handle float type" do @ds.filter(:f=>:$x).prepare(:first, :ps_float).call(:x=>@vs[:f])[:f].must_equal @vs[:f] end it "should handle string type" do @ds.filter(:s=>:$x).prepare(:first, :ps_string).call(:x=>@vs[:s])[:s].must_equal @vs[:s] end cspecify "should handle boolean type", [:jdbc, :sqlite], [:jdbc, :db2], :oracle do @ds.filter(:b=>:$x).prepare(:first, :ps_string).call(:x=>@vs[:b])[:b].must_equal @vs[:b] end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/integration/schema_test.rb�������������������������������������������������������0000664�0000000�0000000�00000115611�14342141206�0021206�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Database schema parser" do after do DB.drop_table?(:items) end describe "with identifier mangling" do before do @iom = DB.identifier_output_method @iim = DB.identifier_input_method @qi = DB.quote_identifiers? end after do DB.identifier_output_method = @iom DB.identifier_input_method = @iim DB.quote_identifiers = @qi end it "should handle a database with a identifier methods" do DB.identifier_output_method = :reverse DB.identifier_input_method = :reverse DB.quote_identifiers = true DB.create_table!(:items){Integer :number} begin DB.schema(:items, :reload=>true).must_be_kind_of(Array) DB.schema(:items, :reload=>true).first.first.must_equal :number ensure end end it "should handle a dataset with identifier methods different than the database's" do DB.identifier_output_method = :reverse DB.identifier_input_method = :reverse DB.quote_identifiers = true DB.create_table!(:items){Integer :number} DB.identifier_output_method = @iom DB.identifier_input_method = @iim ds = DB[:items]. with_identifier_output_method(:reverse). with_identifier_input_method(:reverse) begin DB.schema(ds, :reload=>true).must_be_kind_of(Array) DB.schema(ds, :reload=>true).first.first.must_equal :number ensure DB.identifier_output_method = :reverse DB.identifier_input_method = :reverse DB.drop_table(:items) end end end if IDENTIFIER_MANGLING && !DB.frozen? it "should not issue an sql query if the schema has been loaded unless :reload is true" do DB.create_table!(:items){Integer :number} DB.schema(:items, :reload=>true) DB.schema(:items) DB.schema(:items, :reload=>true) end it "Model schema should include columns in the table, even if they aren't selected" do DB.create_table!(:items){String :a; Integer :number} m = Sequel::Model(DB[:items].select(:a)) m.columns.must_equal [:a] m.db_schema[:number][:type].must_equal :integer end it "should raise an error when the table doesn't exist" do proc{DB.schema(:no_table)}.must_raise(Sequel::Error, Sequel::DatabaseError) end it "should return the schema correctly" do DB.create_table!(:items){Integer :number} schema = DB.schema(:items, :reload=>true) schema.must_be_kind_of(Array) schema.length.must_equal 1 col = schema.first col.must_be_kind_of(Array) col.length.must_equal 2 col.first.must_equal :number col_info = col.last col_info.must_be_kind_of(Hash) col_info[:type].must_equal :integer DB.schema(:items) end it "should parse primary keys from the schema properly" do DB.create_table!(:items){Integer :number} DB.schema(:items).collect{|k,v| k if v[:primary_key]}.compact.must_equal [] DB.create_table!(:items){primary_key :number} DB.schema(:items).collect{|k,v| k if v[:primary_key]}.compact.must_equal [:number] DB.create_table!(:items){Integer :number1; Integer :number2; primary_key [:number1, :number2]} DB.schema(:items).collect{|k,v| k if v[:primary_key]}.compact.must_equal [:number1, :number2] end cspecify "should parse autoincrementing primary keys from the schema properly", :sqlite, :oracle do DB.create_table!(:items){Integer :number} DB.schema(:items).collect{|k,v| k if v[:primary_key] && v[:auto_increment]}.compact.must_equal [] DB.create_table!(:items){primary_key :number} DB.schema(:items).collect{|k,v| k if v[:primary_key] && v[:auto_increment]}.compact.must_equal [:number] DB.create_table!(:items){Integer :number, :primary_key=>true} DB.schema(:items).collect{|k,v| k if v[:primary_key] && v[:auto_increment]}.compact.must_equal [] end it "should parse NULL/NOT NULL from the schema properly" do DB.create_table!(:items){Integer :number, :null=>true} DB.schema(:items).first.last[:allow_null].must_equal true DB.create_table!(:items){Integer :number, :null=>false} DB.schema(:items).first.last[:allow_null].must_equal false end it "should parse defaults from the schema properly" do DB.create_table!(:items){Integer :number} DB.schema(:items).first.last[:ruby_default].must_be_nil DB.create_table!(:items){Integer :number, :default=>0} DB.schema(:items).first.last[:ruby_default].must_equal 0 DB.create_table!(:items){String :a, :default=>"blah"} DB.schema(:items).first.last[:ruby_default].must_equal 'blah' end it "should make :default nil for a NULL default" do DB.create_table!(:items){Integer :number} DB.schema(:items).first.last[:default].must_be_nil DB.create_table!(:items){Integer :number, :default=>0} DB.schema(:items).first.last[:default].wont_equal nil end it "should parse current timestamp defaults from the schema properly" do DB.create_table!(:items){Time :a, :default=>Sequel::CURRENT_TIMESTAMP} DB.schema(:items).first.last[:ruby_default].must_equal Sequel::CURRENT_TIMESTAMP end cspecify "should parse current date defaults from the schema properly", [proc{|db| !db.mariadb? || db.server_version <= 100200}, :mysql], :oracle do DB.create_table!(:items){Date :a, :default=>Sequel::CURRENT_DATE} DB.schema(:items).first.last[:ruby_default].must_equal Sequel::CURRENT_DATE end cspecify "should parse types from the schema properly", [:jdbc, :db2], :oracle do DB.create_table!(:items){Integer :number} DB.schema(:items).first.last[:type].must_equal :integer DB.create_table!(:items){Fixnum :number} DB.schema(:items).first.last[:type].must_equal :integer DB.create_table!(:items){Bignum :number} DB.schema(:items).first.last[:type].must_equal :integer DB.create_table!(:items){Float :number} DB.schema(:items).first.last[:type].must_equal :float DB.create_table!(:items){BigDecimal :number, :size=>[11, 2]} DB.schema(:items).first.last[:type].must_equal :decimal DB.create_table!(:items){Numeric :number, :size=>[12, 0]} DB.schema(:items).first.last[:type].must_equal :integer DB.create_table!(:items){String :number} DB.schema(:items).first.last[:type].must_equal :string DB.create_table!(:items){Date :number} DB.schema(:items).first.last[:type].must_equal :date DB.create_table!(:items){Time :number} DB.schema(:items).first.last[:type].must_equal :datetime DB.create_table!(:items){DateTime :number} DB.schema(:items).first.last[:type].must_equal :datetime DB.create_table!(:items){File :number} DB.schema(:items).first.last[:type].must_equal :blob DB.create_table!(:items){TrueClass :number} DB.schema(:items).first.last[:type].must_equal :boolean DB.create_table!(:items){FalseClass :number} DB.schema(:items).first.last[:type].must_equal :boolean end it "should round trip database types from the schema properly" do DB.create_table!(:items){String :number, :size=>50} db_type = DB.schema(:items).first.last[:db_type] DB.create_table!(:items){column :number, db_type} DB.schema(:items).first.last[:db_type].must_equal db_type DB.create_table!(:items){Numeric :number, :size=>[11,3]} db_type = DB.schema(:items).first.last[:db_type] DB.create_table!(:items){column :number, db_type} DB.schema(:items).first.last[:db_type].must_equal db_type end int_types = [Integer, :Bignum] case DB.database_type when :postgres int_types.concat([:smallint, :int2, :int4, :int8]) when :mysql if DB.send(:supports_check_constraints?) int_types.concat([:tinyint, :smallint, :mediumint, 'int(9)', 'tinyint(2)', "integer unsigned", "bigint unsigned", "tinyint unsigned", "smallint unsigned", "mediumint unsigned", 'int(9) unsigned', 'tinyint(2) unsigned']) else int_types.clear end when :mssql, :h2, :hsqldb int_types.concat([:smallint, :tinyint]) when :derby, :access int_types.concat([:smallint]) when :sqlanywhere int_types.concat([:tinyint]) when :sqlite, :oracle # SQLite doesn't enforce integer type values, even on strict tables. # Oracle only has a number type with variable precision, not a standard integer type. int_types.clear end if int_types.empty? it "should not parse maximum and minimum values for integer columns" do DB.create_table!(:items){Integer :a} sch = DB.schema(:items).first.last sch.keys.wont_include :max_value sch.keys.wont_include :min_value end end int_types.each do |type| it "should correctly parse maximum and minimum values for #{type} columns" do DB.create_table!(:items){column :a, type} sch = DB.schema(:items).first.last max = sch[:max_value] min = sch[:min_value] max.must_be_kind_of Integer min.must_be_kind_of Integer ds = DB[:items] proc{ds.insert(max+1)}.must_raise(Sequel::DatabaseError, Sequel::InvalidValue) proc{ds.insert(min-1)}.must_raise(Sequel::DatabaseError, Sequel::InvalidValue) ds.insert(max) ds.insert(min) ds.select_order_map(:a).must_equal [min, max] end end it "should parse maximum length for string columns" do DB.create_table!(:items){String :a, :size=>4} DB.schema(:items).first.last[:max_length].must_equal 4 DB.create_table!(:items){String :a, :fixed=>true, :size=>3} DB.schema(:items).first.last[:max_length].must_equal 3 end end if DB.supports_schema_parsing? describe "Database index parsing" do after do DB.drop_table?(:items) end it "should parse indexes into a hash" do [:items, Sequel.identifier(:items)].each do |table| # Delete :deferrable entry, since not all adapters implement it f = lambda{h = DB.indexes(table); h.values.each{|h2| h2.delete(:deferrable)}; h} DB.create_table!(table){Integer :n; Integer :a} f.call.must_equal({}) DB.add_index(table, :n) f.call.must_equal(:items_n_index=>{:columns=>[:n], :unique=>false}) DB.drop_index(table, :n) f.call.must_equal({}) DB.add_index(table, :n, :unique=>true, :name=>:blah_blah_index) f.call.must_equal(:blah_blah_index=>{:columns=>[:n], :unique=>true}) DB.add_index(table, [:n, :a]) f.call.must_equal(:blah_blah_index=>{:columns=>[:n], :unique=>true}, :items_n_a_index=>{:columns=>[:n, :a], :unique=>false}) DB.drop_index(table, :n, :name=>:blah_blah_index) f.call.must_equal(:items_n_a_index=>{:columns=>[:n, :a], :unique=>false}) DB.drop_index(table, [:n, :a]) f.call.must_equal({}) end end it "should not include a primary key index" do DB.create_table!(:items){primary_key :n} DB.indexes(:items).must_equal({}) DB.create_table!(:items){Integer :n; Integer :a; primary_key [:n, :a]} DB.indexes(:items).must_equal({}) end cspecify "should not include partial indexes", [proc{|db| db.sqlite_version < 30808}, :sqlite] do DB.create_table!(:items){Integer :n; Integer :a; index :n, :where=>proc{n > 10}} DB.indexes(:items).must_equal({}) end if DB.supports_partial_indexes? end if DB.supports_index_parsing? describe "Database foreign key parsing" do before do @db = DB @pr = lambda do |table, *expected| actual = @db.foreign_key_list(table).sort_by{|c| c[:columns].map{|s| s.to_s}.join << (c[:key]||[]).map{|s| s.to_s}.join}.map{|v| v.values_at(:columns, :table, :key)} actual.zip(expected).each do |a, e| if e.last.first == :pk if a.last == nil a.pop e.pop else e.last.shift end end a.must_equal e end actual.length.must_equal expected.length end end after do @db.drop_table?(:b, :a) end it "should parse foreign key information into an array of hashes" do @db.create_table!(:a, :engine=>:InnoDB){primary_key :c; Integer :d, :null => false, :unique => true} @db.create_table!(:b, :engine=>:InnoDB){foreign_key :e, :a} @pr[:a] @pr[:b, [[:e], :a, [:pk, :c]]] @db.alter_table(:b){add_foreign_key :f, :a, :key=>[:d]} @pr[:b, [[:e], :a, [:pk, :c]], [[:f], :a, [:d]]] @db.alter_table(:b){add_foreign_key [:f], :a, :key=>[:c]} @pr[:b, [[:e], :a, [:pk, :c]], [[:f], :a, [:c]], [[:f], :a, [:d]]] @db.alter_table(:a){add_unique_constraint [:d, :c]} @db.alter_table(:b){add_foreign_key [:f, :e], :a, :key=>[:d, :c]} @pr[:b, [[:e], :a, [:pk, :c]], [[:f], :a, [:c]], [[:f], :a, [:d]], [[:f, :e], :a, [:d, :c]]] @db.alter_table(:b){drop_foreign_key [:f, :e]} @pr[:b, [[:e], :a, [:pk, :c]], [[:f], :a, [:c]], [[:f], :a, [:d]]] @db.alter_table(:b){drop_foreign_key :e} @pr[:b, [[:f], :a, [:c]], [[:f], :a, [:d]]] proc{@db.alter_table(:b){drop_foreign_key :f}}.must_raise(Sequel::Error, Sequel::DatabaseError) @pr[:b, [[:f], :a, [:c]], [[:f], :a, [:d]]] end it "should handle composite foreign and primary keys" do @db.create_table!(:a, :engine=>:InnoDB){Integer :b, :null=>false; Integer :c, :null=>false; Integer :d, :null=>false; primary_key [:b, :c]; unique [:d, :c]} @db.create_table!(:b, :engine=>:InnoDB){Integer :e, :null=>false; Integer :f, :null=>false; Integer :g, :null=>false; foreign_key [:e, :f], :a; foreign_key [:g, :f], :a, :key=>[:d, :c]} @pr[:b, [[:e, :f], :a, [:pk, :b, :c]], [[:g, :f], :a, [:d, :c]]] end it "should handle self-referential composite foreign and primary keys" do @db.create_table!(:a, :engine=>:InnoDB){Integer :b, :null=>false; Integer :c, :null=>false; Integer :d, :null=>false; primary_key [:b, :c]; unique [:d, :b]} @db.alter_table(:a){add_foreign_key [:b, :d], :a; add_foreign_key [:d, :c], :a; add_foreign_key [:c, :b], :a, :key=>[:d, :b]} @pr[:a, [[:b, :d], :a, [:pk, :b, :c]], [[:c, :b], :a, [:d, :b]], [[:d, :c], :a, [:pk, :b, :c]]] end end if DB.supports_foreign_key_parsing? describe "Database" do after do DB.drop_table(:items_temp) rescue nil end it "should create temporary tables without raising an exception" do DB.disconnect DB.create_table!(:items_temp, :temp=>true){Integer :number} end end describe "Database schema modifiers" do before do @db = DB @ds = @db[:items] end after do # Use instead of drop_table? to work around issues on jdbc/db2 @db.drop_table(:items) rescue nil @db.drop_table(:items2) rescue nil end it "should create tables correctly" do @db.create_table!(:items){Integer :number} @db.table_exists?(:items).must_equal true @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:number] @ds.insert([10]) @ds.columns!.must_equal [:number] end it "should create tables from select statements correctly" do @db.create_table!(:items){Integer :number} @ds.insert([10]) @db.create_table(:items2, :as=>@db[:items]) @db.schema(:items2, :reload=>true).map{|x| x.first}.must_equal [:number] @db[:items2].columns.must_equal [:number] @db[:items2].all.must_equal [{:number=>10}] end it "should not raise an error if table doesn't exist when using drop_table :if_exists" do @db.drop_table(:items, :if_exists=>true) end if DB.supports_drop_table_if_exists? describe "views" do before do @db.drop_view(:items_view2) rescue nil @db.drop_view(:items_view) rescue nil @db.create_table!(:items){Integer :number} @ds.insert(:number=>1) @ds.insert(:number=>2) end after do @db.drop_view(:items_view2) rescue nil @db.drop_view(:items_view) rescue nil end it "should create views correctly" do @db.create_view(:items_view, @ds.where(:number=>1)) @db[:items_view].map(:number).must_equal [1] end it "should create views with check options correctly" do @db.create_view(:items_view, @ds.where{number > 2}, :check=>true) proc{@db[:items_view].insert(1)}.must_raise(Sequel::DatabaseError) @db[:items_view].insert(3) @db[:items_view].select_order_map(:number).must_equal [3] @db.create_view(:items_view2, @db[:items_view].where{number > 1}, :check=>true) proc{@db[:items_view2].insert(1)}.must_raise(Sequel::DatabaseError) proc{@db[:items_view2].insert(2)}.must_raise(Sequel::DatabaseError) @db[:items_view2].insert(4) @db[:items_view2].select_order_map(:number).must_equal [3, 4] @ds.select_order_map(:number).must_equal [1, 2, 3, 4] end if DB.supports_views_with_check_option? it "should create views with local check options correctly" do @db.create_view(:items_view, @ds.where{number > 2}) @db[:items_view].insert(3) @db[:items_view].select_order_map(:number).must_equal [3] @db.create_view(:items_view2, @db[:items_view].where{number > 1}, :check=>:local) proc{@db[:items_view2].insert(1)}.must_raise(Sequel::DatabaseError) @db[:items_view2].insert(2) @db[:items_view2].insert(4) @db[:items_view2].select_order_map(:number).must_equal [3, 4] @ds.select_order_map(:number).must_equal [1, 2, 2, 3, 4] end if DB.supports_views_with_local_check_option? cspecify "should create views with explicit columns correctly", [proc{|db| db.sqlite_version < 30900}, :sqlite] do @db.create_view(:items_view, @ds.where(:number=>1), :columns=>[:n]) @db[:items_view].map(:n).must_equal [1] end it "should drop views correctly" do @db.create_view(:items_view, @ds.where(:number=>1)) @db.drop_view(:items_view) proc{@db[:items_view].map(:number)}.must_raise(Sequel::DatabaseError) end it "should not raise an error if view doesn't exist when using drop_view :if_exists" do @db.drop_view(:items_view, :if_exists=>true) end if DB.supports_drop_table_if_exists? it "should create or replace views correctly" do @db.create_or_replace_view(:items_view, @ds.where(:number=>1)) @db[:items_view].map(:number).must_equal [1] @db.create_or_replace_view(:items_view, @ds.where(:number=>2)) @db[:items_view].map(:number).must_equal [2] end end it "should handle create table in a rolled back transaction" do @db.drop_table?(:items) @db.transaction(:rollback=>:always){@db.create_table(:items){Integer :number}} @db.table_exists?(:items).must_equal false end if DB.supports_transactional_ddl? it "should handle errors creating indexes when ignoring index errors" do @db.drop_table?(:items) @db.transaction do @db.create_table(:items, :ignore_index_errors=>true) do Integer :n1 Integer :n2 index :n1, :name=>'items_n1' index :foo, :name=>'items_f' index :n2, :name=>'items_n2' index :bar, :name=>'items_g' end end @db.table_exists?(:items).must_equal true indexes = @db.indexes(:items).keys indexes.must_include :items_n1 indexes.must_include :items_n2 indexes.wont_include :items_f indexes.wont_include :items_g end if DB.supports_transactional_ddl? && DB.database_type != :mssql describe "join tables" do after do @db.drop_join_table(:cat_id=>:cats, :dog_id=>:dogs) if @db.table_exists?(:cats_dogs) @db.drop_table(:cats, :dogs) @db.table_exists?(:cats_dogs).must_equal false end it "should create join tables correctly" do @db.create_table!(:cats){primary_key :id} @db.create_table!(:dogs){primary_key :id} @db.create_join_table(:cat_id=>:cats, :dog_id=>:dogs) @db.table_exists?(:cats_dogs).must_equal true end end it "should have create_table? only create the table if it doesn't already exist" do @db.create_table!(:items){String :a} @db.create_table?(:items){String :b} @db[:items].columns.must_equal [:a] @db.drop_table?(:items) @db.create_table?(:items){String :b} @db[:items].columns.must_equal [:b] end it "should have create_table? work correctly with indexes" do @db.create_table!(:items){String :a, :index=>true} @db.create_table?(:items){String :b, :index=>true} @db[:items].columns.must_equal [:a] @db.drop_table?(:items) @db.create_table?(:items){String :b, :index=>true} @db[:items].columns.must_equal [:b] end it "should rename tables correctly" do @db.drop_table?(:items) @db.create_table!(:items2){Integer :number} @db.rename_table(:items2, :items) @db.table_exists?(:items).must_equal true @db.table_exists?(:items2).must_equal false @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:number] @ds.insert([10]) @ds.columns!.must_equal [:number] end it "should allow creating indexes with tables" do @db.create_table!(:items){Integer :number; index :number} @db.table_exists?(:items).must_equal true @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:number] @ds.insert([10]) @ds.columns!.must_equal [:number] end it "should allow creating partial indexes with tables" do @db.create_table!(:items){Integer :number; index :number, :where=>proc{number > 10}} @db.table_exists?(:items).must_equal true @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:number] @ds.insert([10]) @ds.columns!.must_equal [:number] end if DB.supports_partial_indexes? it "should handle combination of default, unique, and not null" do @db.create_table!(:items){Integer :number, :default=>0, :null=>false, :unique=>true} @db.table_exists?(:items).must_equal true @db.schema(:items, :reload=>true).map{|x| x.last}.first.values_at(:ruby_default, :allow_null).must_equal [0, false] @ds.insert([10]) end it "should be able to specify constraint names for column constraints" do @db.create_table!(:items2){primary_key :id, :primary_key_constraint_name=>:foo_pk} @db.create_table!(:items){foreign_key :id, :items2, :unique=>true, :foreign_key_constraint_name => :foo_fk, :unique_constraint_name => :foo_uk, :null=>false} @db.alter_table(:items){drop_constraint :foo_fk, :type=>:foreign_key; drop_constraint :foo_uk, :type=>:unique} @db.alter_table(:items2){drop_constraint :foo_pk, :type=>:primary_key} end it "should handle foreign keys correctly when creating tables" do @db.create_table!(:items) do primary_key :id foreign_key :item_id, :items unique [:item_id, :id] foreign_key [:id, :item_id], :items, :key=>[:item_id, :id] end @db.table_exists?(:items).must_equal true @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id, :item_id] @ds.columns!.must_equal [:id, :item_id] end it "should add columns to tables correctly" do @db.create_table!(:items){Integer :number} @ds.insert(:number=>10) @db.alter_table(:items){add_column :name, String} @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:number, :name] @ds.columns!.must_equal [:number, :name] @ds.all.must_equal [{:number=>10, :name=>nil}] end cspecify "should add primary key columns to tables correctly", :derby, :h2 do @db.create_table!(:items){Integer :number} @ds.insert(:number=>10) @db.alter_table(:items){add_primary_key :id} @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:number, :id] @ds.columns!.must_equal [:number, :id] @ds.map(:number).must_equal [10] proc{@ds.insert(:id=>@ds.map(:id).first)}.must_raise Sequel::UniqueConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError end it "should drop primary key constraints from tables correctly" do @db.create_table!(:items){Integer :number; primary_key [:number], :name=>:items_pk} @ds.insert(:number=>10) @db.alter_table(:items){drop_constraint :items_pk, :type=>:primary_key} @ds.map(:number).must_equal [10] @ds.insert(10) end it "should add foreign key columns to tables correctly" do @db.create_table!(:items){primary_key :id} @ds.insert i = @ds.get(:id) @db.alter_table(:items){add_foreign_key :item_id, :items} @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id, :item_id] @ds.columns!.must_equal [:id, :item_id] @ds.all.must_equal [{:id=>i, :item_id=>nil}] end it "should not allow NULLs in a primary key" do @db.create_table!(:items){String :id, :primary_key=>true} proc{@ds.insert(:id=>nil)}.must_raise(Sequel::NotNullConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError) end it "should not allow NULLs when adding a primary key column" do @db.create_table!(:items){String :foo} @db.alter_table(:items){add_column :id, String, :primary_key=>true, :default=>'a'} proc{@ds.insert(:id=>nil)}.must_raise(Sequel::NotNullConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError) end it "should not allow NULLs when creating table with primary key constraint" do @db.create_table!(:items){String :id1; String :id2; primary_key [:id1, :id2]} proc{@ds.insert(:id1=>nil, :id2=>nil)}.must_raise(Sequel::NotNullConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError) proc{@ds.insert(:id1=>nil, :id2=>'1')}.must_raise(Sequel::NotNullConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError) proc{@ds.insert(:id1=>'1', :id2=>nil)}.must_raise(Sequel::NotNullConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError) end it "should not allow NULLs when adding a primary key constraint" do @db.create_table!(:items){String :id1; String :id2} @db.alter_table(:items){add_primary_key [:id1, :id2]} proc{@ds.insert(:id1=>nil, :id2=>nil)}.must_raise(Sequel::NotNullConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError) proc{@ds.insert(:id1=>nil, :id2=>'1')}.must_raise(Sequel::NotNullConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError) proc{@ds.insert(:id1=>'1', :id2=>nil)}.must_raise(Sequel::NotNullConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError) end it "should rename columns correctly" do @db.create_table!(:items){Integer :id} @ds.insert(:id=>10) @db.alter_table(:items){rename_column :id, :id2} @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id2] @ds.columns!.must_equal [:id2] @ds.all.must_equal [{:id2=>10}] end it "should rename columns with defaults correctly" do @db.create_table!(:items){String :n, :default=>'blah'} @ds.insert @db.alter_table(:items){rename_column :n, :n2} @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:n2] @ds.columns!.must_equal [:n2] @ds.insert @ds.all.must_equal [{:n2=>'blah'}, {:n2=>'blah'}] end it "should rename columns with not null constraints" do @db.create_table!(:items, :engine=>:InnoDB){String :n, :null=>false} @ds.insert(:n=>'blah') @db.alter_table(:items){rename_column :n, :n2} @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:n2] @ds.columns!.must_equal [:n2] @ds.insert(:n2=>'blah') @ds.all.must_equal [{:n2=>'blah'}, {:n2=>'blah'}] proc{@ds.insert(:n=>nil)}.must_raise(Sequel::DatabaseError) end it "should rename columns when the table is referenced by a foreign key" do @db.create_table!(:items2){primary_key :id; Integer :a} @db.create_table!(:items){Integer :id, :primary_key=>true; foreign_key :items_id, :items2} @db[:items2].insert(:a=>10) @ds.insert(:id=>1) @db.alter_table(:items2){rename_column :a, :b} @db[:items2].insert(:b=>20) @ds.insert(:id=>2) @db[:items2].select_order_map([:id, :b]).must_equal [[1, 10], [2, 20]] end cspecify "should rename primary_key columns correctly", :db2 do @db.create_table!(:items){Integer :id, :primary_key=>true} @ds.insert(:id=>10) @db.alter_table(:items){rename_column :id, :id2} @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id2] @ds.columns!.must_equal [:id2] @ds.all.must_equal [{:id2=>10}] end cspecify "should set column NULL/NOT NULL correctly", [:jdbc, :db2] do @db.create_table!(:items, :engine=>:InnoDB){Integer :id} @ds.insert(:id=>10) @db.alter_table(:items){set_column_allow_null :id, false} @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id] @ds.columns!.must_equal [:id] proc{@ds.insert(:id=>nil)}.must_raise(Sequel::NotNullConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError) @db.alter_table(:items){set_column_allow_null :id, true} @ds.insert(:id=>nil) @ds.all.must_equal [{:id=>10}, {:id=>nil}] end it "should set column defaults correctly" do @db.create_table!(:items){Integer :id} @ds.insert(:id=>10) @db.alter_table(:items){set_column_default :id, 20} @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id] @ds.columns!.must_equal [:id] @ds.insert @ds.all.must_equal [{:id=>10}, {:id=>20}] end it "should set column defaults correctly if column has existing default" do @db.create_table!(:items){Integer :id, :default=>10} @ds.insert @ds.all.must_equal [{:id=>10}] @db.alter_table(:items){set_column_default :id, 20} @ds.insert @ds.all.must_equal [{:id=>10}, {:id=>20}] end it "should set column defaults to nil correctly" do @db.create_table!(:items){Integer :id} @ds.insert(:id=>10) @db.alter_table(:items){set_column_default :id, nil} @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id] @ds.columns!.must_equal [:id] @ds.insert @ds.all.must_equal [{:id=>10}, {:id=>nil}] end it "should set column defaults to nil for NOT NULL columns correctly" do @db.create_table!(:items){Integer :id, :null=>false} @ds.insert(:id=>10) @db.alter_table(:items){set_column_default :id, nil} @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id] @ds.columns!.must_equal [:id] @ds.insert(20) @ds.all.must_equal [{:id=>10}, {:id=>20}] end cspecify "should set column types correctly", [:jdbc, :db2], :oracle do @db.create_table!(:items){Integer :id} @ds.insert(:id=>10) @db.alter_table(:items){set_column_type :id, String} @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id] @ds.columns!.must_equal [:id] @ds.insert(:id=>'20') @ds.order(:id).all.must_equal [{:id=>"10"}, {:id=>"20"}] end cspecify "should set column types without modifying NULL/NOT NULL", [:jdbc, :db2], :derby do @db.create_table!(:items){Integer :id, :null=>false, :default=>2} proc{@ds.insert(:id=>nil)}.must_raise(Sequel::NotNullConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError) @db.alter_table(:items){set_column_type :id, String} proc{@ds.insert(:id=>nil)}.must_raise(Sequel::NotNullConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError) @db.create_table!(:items){Integer :id} @ds.insert(:id=>nil) @db.alter_table(:items){set_column_type :id, String} @ds.insert(:id=>nil) @ds.map(:id).must_equal [nil, nil] end cspecify "should set column types without modifying defaults", [:jdbc, :db2], :oracle, :derby do @db.create_table!(:items){Integer :id, :default=>0} @ds.insert @ds.map(:id).must_equal [0] @db.alter_table(:items){set_column_type :id, String} @ds.insert @ds.map(:id).must_equal ['0', '0'] @db.create_table!(:items){String :id, :default=>'a'} @ds.insert @ds.map(:id).must_equal %w'a' @db.alter_table(:items){set_column_type :id, String, :size=>1} @ds.insert @ds.map(:id).must_equal %w'a a' end it "should add unnamed unique constraints and foreign key table constraints correctly" do @db.create_table!(:items, :engine=>:InnoDB){Integer :id, :null => false; Integer :item_id, :null => false} @db.alter_table(:items) do add_unique_constraint [:item_id, :id] add_foreign_key [:id, :item_id], :items, :key=>[:item_id, :id] end @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id, :item_id] @ds.columns!.must_equal [:id, :item_id] @ds.insert(1, 1) proc{@ds.insert(1, 1)}.must_raise Sequel::UniqueConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError proc{@ds.insert(1, 2)}.must_raise Sequel::ForeignKeyConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError end it "should add named unique constraints and foreign key table constraints correctly" do @db.create_table!(:items, :engine=>:InnoDB){Integer :id, :null=>false; Integer :item_id, :null=>false} @db.alter_table(:items) do add_unique_constraint [:item_id, :id], :name=>:unique_iii add_foreign_key [:id, :item_id], :items, :key=>[:item_id, :id], :name=>:fk_iii end @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id, :item_id] @ds.columns!.must_equal [:id, :item_id] @ds.insert(1, 1) proc{@ds.insert(1, 1)}.must_raise Sequel::UniqueConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError proc{@ds.insert(1, 2)}.must_raise Sequel::ForeignKeyConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError end it "should drop unique constraints and foreign key table constraints correctly" do @db.create_table!(:items) do Integer :id Integer :item_id unique [:item_id, :id], :name=>:items_uk foreign_key [:id, :item_id], :items, :key=>[:item_id, :id], :name=>:items_fk end @db.alter_table(:items) do drop_constraint(:items_fk, :type=>:foreign_key) drop_constraint(:items_uk, :type=>:unique) end @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id, :item_id] @ds.columns!.must_equal [:id, :item_id] @ds.insert(1, 2) @ds.insert(1, 2) end it "should remove columns from tables correctly" do @db.create_table!(:items) do primary_key :id Integer :i end @ds.insert(:i=>10) @db.drop_column(:items, :i) @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id] end it "should remove columns with defaults from tables correctly" do @db.create_table!(:items) do primary_key :id Integer :i, :default=>20 Integer :j, :default=>10 String :s, :default=>'a' end @ds.insert(:i=>10, :j=>20, :s=>'b') @db.drop_column(:items, :i) @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id, :j, :s] @ds.first.must_equal(:id=>1, :j=>20, :s=>'b') @ds.insert @ds.first(:id=>2).must_equal(:id=>2, :j=>10, :s=>'a') end it "should remove foreign key columns from tables correctly" do @db.create_table!(:items, :engine=>:InnoDB) do primary_key :id Integer :i foreign_key :item_id, :items end @ds.insert(:i=>10) @db.alter_table(:items){drop_foreign_key :item_id} @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id, :i] end if DB.supports_foreign_key_parsing? it "should remove multiple columns in a single alter_table block" do @db.create_table!(:items) do primary_key :id String :name Integer :number end @ds.insert(:number=>10) @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id, :name, :number] @db.alter_table(:items) do drop_column :name drop_column :number end @db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id] end cspecify "should work correctly with many operations in a single alter_table call", [:jdbc, :db2] do @db.create_table!(:items) do primary_key :id String :name2 String :number2 constraint :bar, Sequel.~(:number2=>nil, :name2=>nil) end @ds.insert(:name2=>'A12') @db.alter_table(:items) do add_column :number, Integer drop_constraint :bar drop_column :number2 rename_column :name2, :name set_column_not_null :name set_column_default :name, 'A13' add_constraint :foo, Sequel.like(:name, 'A%') end @db[:items].first.must_equal(:id=>1, :name=>'A12', :number=>nil) @db[:items].delete proc{@db[:items].insert(:name=>nil)}.must_raise(Sequel::NotNullConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError) @db[:items].insert(:number=>1) @db[:items].get(:name).must_equal 'A13' end it "should support deferrable foreign key constraints" do @db.create_table!(:items2){Integer :id, :primary_key=>true} @db.create_table!(:items){foreign_key :id, :items2, :deferrable=>true} proc{@db[:items].insert(1)}.must_raise(Sequel::ForeignKeyConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError) @db.transaction{proc{@db[:items].insert(1)}}.must_raise(Sequel::ForeignKeyConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError) end if DB.supports_deferrable_foreign_key_constraints? it "should support deferrable unique constraints when creating or altering tables" do @db.create_table!(:items){Integer :t; unique [:t], :name=>:atest_def, :deferrable=>true, :using=>:btree} @db[:items].insert(1) @db[:items].insert(2) proc{@db[:items].insert(2)}.must_raise(Sequel::DatabaseError, Sequel::UniqueConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError) @db.transaction{proc{@db[:items].insert(2)}}.must_raise(Sequel::DatabaseError, Sequel::UniqueConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError) @db.create_table!(:items){Integer :t} @db.alter_table(:items){add_unique_constraint [:t], :name=>:atest_def, :deferrable=>true, :using=>:btree} @db[:items].insert(1) @db[:items].insert(2) proc{@db[:items].insert(2)}.must_raise(Sequel::DatabaseError, Sequel::UniqueConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError) @db.transaction{proc{@db[:items].insert(2)}}.must_raise(Sequel::DatabaseError, Sequel::UniqueConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError) end if DB.supports_deferrable_constraints? end describe "Database#tables and #views" do before do class ::String @@xxxxx = 0 def xxxxx "xxxxx#{@@xxxxx += 1}" end alias xxxxx xxxxx end @db = DB @db.drop_view(:sequel_test_view) rescue nil @db.drop_table?(:sequel_test_table) @db.create_table(:sequel_test_table){Integer :a} @db.create_view :sequel_test_view, @db[:sequel_test_table] end after do @db.drop_view :sequel_test_view @db.drop_table :sequel_test_table end it "#tables should return an array of symbols" do ts = @db.tables ts.must_be_kind_of(Array) ts.each{|t| t.must_be_kind_of(Symbol)} ts.must_include(:sequel_test_table) ts.wont_include(:sequel_test_view) end if DB.supports_table_listing? it "#views should return an array of symbols" do ts = @db.views ts.must_be_kind_of(Array) ts.each{|t| t.must_be_kind_of(Symbol)} ts.wont_include(:sequel_test_table) ts.must_include(:sequel_test_view) end if DB.supports_view_listing? describe "with identifier mangling" do before do @iom = @db.identifier_output_method @iim = @db.identifier_input_method end after do @db.identifier_output_method = @iom @db.identifier_input_method = @iim end it "#tables should respect the database's identifier_output_method" do @db.identifier_output_method = :xxxxx @db.identifier_input_method = :xxxxx @db.tables.each{|t| t.to_s.must_match(/\Ax{5}\d+\z/)} end if DB.supports_table_listing? it "#views should respect the database's identifier_output_method" do @db.identifier_output_method = :xxxxx @db.identifier_input_method = :xxxxx @db.views.each{|t| t.to_s.must_match(/\Ax{5}\d+\z/)} end if DB.supports_view_listing? end if IDENTIFIER_MANGLING && !DB.frozen? end �����������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/integration/spec_helper.rb�������������������������������������������������������0000664�0000000�0000000�00000005421�14342141206�0021175�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require 'logger' if ENV['COVERAGE'] require_relative "../sequel_coverage" SimpleCov.sequel_coverage(:group=>%r{lib/sequel/adapters}) end $:.unshift(File.join(File.dirname(File.expand_path(__FILE__)), "../../lib/")) require_relative "../../lib/sequel" require_relative "../visibility_checking" if ENV['CHECK_METHOD_VISIBILITY'] begin require_relative "../spec_config" unless defined?(DB) rescue LoadError end Sequel::Deprecation.backtrace_filter = lambda{|line, lineno| lineno < 4 || line =~ /_(spec|test)\.rb/} Sequel.extension :fiber_concurrency if ENV['SEQUEL_FIBER_CONCURRENCY'] Sequel::DB = nil unless Sequel.constants.include?(:DB) Sequel.split_symbols = true if ENV['SEQUEL_SPLIT_SYMBOLS'] Sequel::Database.extension :columns_introspection if ENV['SEQUEL_COLUMNS_INTROSPECTION'] Sequel::Model.cache_associations = false if ENV['SEQUEL_NO_CACHE_ASSOCIATIONS'] Sequel::Model.plugin :prepared_statements if ENV['SEQUEL_MODEL_PREPARED_STATEMENTS'] Sequel::Model.plugin :throw_failures if ENV['SEQUEL_MODEL_THROW_FAILURES'] Sequel::Model.plugin :primary_key_lookup_check_values if ENV['SEQUEL_PRIMARY_KEY_LOOKUP_CHECK_VALUES'] Sequel::Model.use_transactions = false Sequel::Model.cache_anonymous_models = false require_relative '../guards_helper' DB = Sequel.connect(ENV['SEQUEL_INTEGRATION_URL']) unless defined?(DB) IDENTIFIER_MANGLING = !!ENV['SEQUEL_IDENTIFIER_MANGLING'] unless defined?(IDENTIFIER_MANGLING) DB.extension(:identifier_mangling) if IDENTIFIER_MANGLING if DB.adapter_scheme == :ibmdb || (DB.adapter_scheme == :ado && DB.database_type == :access) def DB.drop_table(*tables) super rescue Sequel::DatabaseError disconnect super end end require_relative '../async_spec_helper' DB.extension :index_caching if ENV['SEQUEL_INDEX_CACHING'] DB.extension :error_sql if ENV['SEQUEL_ERROR_SQL'] DB.extension :synchronize_sql if ENV['SEQUEL_SYNCHRONIZE_SQL'] DB.extension :integer64 if ENV['SEQUEL_INTEGER64'] if ENV['SEQUEL_CONNECTION_VALIDATOR'] DB.extension(:connection_validator) DB.pool.connection_validation_timeout = -1 end if dch = ENV['SEQUEL_DUPLICATE_COLUMNS_HANDLER'] DB.extension :duplicate_columns_handler DB.opts[:on_duplicate_columns] = dch.to_sym unless dch.empty? end if ENV['SEQUEL_FREEZE_DATABASE'] DB.extension(:constraint_validations, :string_agg, :date_arithmetic) DB.extension(:pg_array) if DB.database_type == :postgres DB.freeze end version = if DB.respond_to?(:server_version) DB.server_version elsif DB.respond_to?(:sqlite_version) DB.sqlite_version end puts "running #{defined?(SEQUEL_ADAPTER_TEST) ? SEQUEL_ADAPTER_TEST : "integration (database type: #{DB.database_type})"} specs on #{RUBY_ENGINE} #{defined?(JRUBY_VERSION) ? JRUBY_VERSION : RUBY_VERSION} with #{DB.adapter_scheme} adapter#{" (database version: #{version})" if version}" �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/integration/timezone_test.rb�����������������������������������������������������0000664�0000000�0000000�00000005451�14342141206�0021600�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel timezone support" do def _test_timezone(timezone=Sequel.application_timezone) Sequel.datetime_class = Time # Tests should cover both DST and non-DST times. [Time.now, Time.local(2010,1,1,12), Time.local(2010,6,1,12)].each do |t| @db[:t].insert(t) t2 = @db[:t].single_value t2 = @db.to_application_timestamp(t2.to_s) unless t2.is_a?(Time) (t2 - t).must_be_close_to 0, 2 t2.utc_offset.must_equal 0 if timezone == :utc t2.utc_offset.must_equal t.getlocal.utc_offset if timezone == :local @db[:t].delete end Sequel.datetime_class = DateTime local_dst_offset = Time.local(2010, 6).utc_offset/86400.0 local_std_offset = Time.local(2010, 1).utc_offset/86400.0 [DateTime.now, DateTime.civil(2010,1,1,12,0,0,local_std_offset), DateTime.civil(2010,6,1,12,0,0,local_dst_offset)].each do |dt| @db[:t].insert(dt) dt2 = @db[:t].single_value dt2 = @db.to_application_timestamp(dt2.to_s) unless dt2.is_a?(DateTime) (dt2 - dt).must_be_close_to 0, 0.00002 dt2.offset.must_equal 0 if timezone == :utc dt2.offset.must_equal dt.offset if timezone == :local @db[:t].delete end end before do @db = DB @db.create_table!(:t){DateTime :t} end after do @db.timezone = nil Sequel.default_timezone = nil Sequel.datetime_class = Time @db.drop_table(:t) end cspecify "should support using UTC for database storage and local time for the application", [:tinytds], [:oracle] do Sequel.database_timezone = :utc Sequel.application_timezone = :local _test_timezone Sequel.database_timezone = nil @db.timezone = :utc _test_timezone end cspecify "should support using local time for database storage and UTC for the application", [:tinytds], [:oracle] do Sequel.database_timezone = :local Sequel.application_timezone = :utc _test_timezone Sequel.database_timezone = nil @db.timezone = :local _test_timezone end cspecify "should support using UTC for both database storage and for application", [:oracle] do Sequel.default_timezone = :utc _test_timezone Sequel.database_timezone = :local @db.timezone = :utc _test_timezone end cspecify "should support using local time for both database storage and for application", [:oracle] do Sequel.default_timezone = :local _test_timezone Sequel.database_timezone = :utc @db.timezone = :local _test_timezone end it "should allow overriding the database_timezone on a per-database basis" do Sequel.database_timezone = :utc @db.timezone = :local t = Time.now @db[:t].insert(t) s = @db[:t].get(Sequel.cast(:t, String)) if o = Date._parse(s)[:offset] o.must_equal t.utc_offset end end end unless DB.frozen? �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/integration/transaction_test.rb��������������������������������������������������0000664�0000000�0000000�00000044474�14342141206�0022303�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Database transactions" do before(:all) do @db = DB @db.disconnect @db.create_table!(:items, :engine=>'InnoDB'){String :name; Integer :value} @d = @db[:items] end before do @d.delete end after(:all) do @db.drop_table?(:items) end it "should support transactions" do @db.transaction{@d.insert(:name => 'abc', :value => 1)} @d.count.must_equal 1 end it "should have #transaction yield the connection" do @db.transaction{|conn| conn.wont_equal nil} end it "should have #in_transaction? work correctly" do @db.in_transaction?.must_equal false c = nil @db.transaction{c = @db.in_transaction?} c.must_equal true end it "should correctly rollback transactions" do proc do @db.transaction do @d.insert(:name => 'abc', :value => 1) raise Interrupt, 'asdf' end end.must_raise(Interrupt) @db.transaction do @d.insert(:name => 'abc', :value => 1) raise Sequel::Rollback end.must_be_nil proc do @db.transaction(:rollback=>:reraise) do @d.insert(:name => 'abc', :value => 1) raise Sequel::Rollback end end.must_raise(Sequel::Rollback) @db.transaction(:rollback=>:always) do @d.insert(:name => 'abc', :value => 1) 2 end.must_equal 2 @d.count.must_equal 0 end it "should support nested transactions" do @db.transaction do @db.transaction do @d.insert(:name => 'abc', :value => 1) end end @d.count.must_equal 1 @d.delete @db.transaction do @d.insert(:name => 'abc', :value => 1) @db.transaction do raise Sequel::Rollback end end @d.count.must_equal 0 proc {@db.transaction do @d.insert(:name => 'abc', :value => 1) @db.transaction do raise Interrupt, 'asdf' end end}.must_raise(Interrupt) @d.count.must_equal 0 end it "should support rollback_on_exit" do @db.transaction do @d.insert(:name => 'abc', :value => 1) @db.rollback_on_exit end @d.must_be_empty catch(:foo) do @db.transaction do @d.insert(:name => 'abc', :value => 1) @db.rollback_on_exit throw :foo end end @d.must_be_empty lambda do @db.transaction do @d.insert(:name => 'abc', :value => 1) @db.rollback_on_exit return true end end @d.must_be_empty @db.transaction do @d.insert(:name => 'abc', :value => 1) @db.rollback_on_exit @db.rollback_on_exit(:cancel=>true) end @d.count.must_equal 1 @d.delete @db.transaction do @d.insert(:name => 'abc', :value => 1) @db.rollback_on_exit(:cancel=>true) end @d.count.must_equal 1 @d.delete @db.transaction do @d.insert(:name => 'abc', :value => 1) @db.rollback_on_exit @db.rollback_on_exit(:cancel=>true) @db.rollback_on_exit end @d.must_be_empty end if DB.supports_savepoints? it "should support rollback_on_exit inside savepoints" do @db.transaction do @d.insert(:name => 'abc', :value => 1) @db.transaction(:savepoint=>true) do @d.insert(:name => 'def', :value => 2) @db.rollback_on_exit end end @d.must_be_empty @db.transaction do @d.insert(:name => 'abc', :value => 1) @db.transaction(:savepoint=>true) do @d.insert(:name => 'def', :value => 2) @db.rollback_on_exit @db.transaction(:savepoint=>true) do @d.insert(:name => 'ghi', :value => 3) end end end @d.must_be_empty @db.transaction do @d.insert(:name => 'abc', :value => 1) @db.transaction(:savepoint=>true) do @d.insert(:name => 'def', :value => 2) @db.transaction(:savepoint=>true) do @db.rollback_on_exit @d.insert(:name => 'ghi', :value => 3) end end end @d.must_be_empty end it "should support rollback_on_exit with :savepoint option" do @db.transaction do @d.insert(:name => 'abc', :value => 1) @db.transaction(:savepoint=>true) do @d.insert(:name => 'def', :value => 2) @db.rollback_on_exit(:savepoint=>true) end end @d.select_order_map(:value).must_equal [1] @d.delete @db.transaction do @d.insert(:name => 'abc', :value => 1) @db.transaction(:savepoint=>true) do @d.insert(:name => 'def', :value => 2) @db.rollback_on_exit(:savepoint=>true) @db.transaction(:savepoint=>true) do @db.rollback_on_exit(:savepoint=>true) @d.insert(:name => 'ghi', :value => 3) end end end @d.select_order_map(:value).must_equal [1] end it "should support rollback_on_exit with :savepoint=>Integer" do @db.transaction do @d.insert(:name => 'abc', :value => 1) @db.transaction(:savepoint=>true) do @d.insert(:name => 'def', :value => 2) @db.rollback_on_exit(:savepoint=>2) end end @d.must_be_empty @db.transaction do @d.insert(:name => 'abc', :value => 1) @db.transaction(:savepoint=>true) do @d.insert(:name => 'def', :value => 2) @db.rollback_on_exit(:savepoint=>3) end end @d.must_be_empty @db.transaction do @d.insert(:name => 'abc', :value => 1) @db.transaction(:savepoint=>true) do @d.insert(:name => 'def', :value => 2) @db.transaction(:savepoint=>true) do @db.rollback_on_exit(:savepoint=>2) @d.insert(:name => 'ghi', :value => 3) end end end @d.select_order_map(:value).must_equal [1] end it "should support rollback_on_exit with :savepoint=>Integer and :cancel" do @db.transaction do @d.insert(:name => 'abc', :value => 1) @db.transaction(:savepoint=>true) do @db.rollback_on_exit(:savepoint=>true) @d.insert(:name => 'def', :value => 2) @db.transaction(:savepoint=>true) do @db.rollback_on_exit(:savepoint=>2, :cancel=>true) @d.insert(:name => 'ghi', :value => 3) end end end @d.select_order_map(:value).must_equal [1, 2, 3] @d.delete @db.transaction do @db.rollback_on_exit(:savepoint=>true) @d.insert(:name => 'abc', :value => 1) @db.transaction(:savepoint=>true) do @db.rollback_on_exit(:savepoint=>true) @d.insert(:name => 'def', :value => 2) @db.transaction(:savepoint=>true) do @db.rollback_on_exit(:savepoint=>3, :cancel=>true) @d.insert(:name => 'ghi', :value => 3) end end end @d.select_order_map(:value).must_equal [1, 2, 3] @d.delete @db.transaction do @d.insert(:name => 'abc', :value => 1) @db.rollback_on_exit(:savepoint=>true) @db.transaction(:savepoint=>true) do @d.insert(:name => 'def', :value => 2) @db.transaction(:savepoint=>true) do @db.rollback_on_exit(:savepoint=>4, :cancel=>true) @d.insert(:name => 'ghi', :value => 3) end end end @d.select_order_map(:value).must_equal [1, 2, 3] @d.delete @db.transaction do @d.insert(:name => 'abc', :value => 1) @db.transaction(:savepoint=>true) do @db.rollback_on_exit(:savepoint=>2) @d.insert(:name => 'def', :value => 2) @db.transaction(:savepoint=>true) do @db.rollback_on_exit(:savepoint=>2, :cancel=>true) @d.insert(:name => 'ghi', :value => 3) end end end @d.must_be_empty end it "should handle table_exists? failures inside transactions" do @db.transaction do @d.insert(:name => '1') @db.table_exists?(:asadf098asd9asd98sa).must_equal false @d.insert(:name => '2') end @d.select_order_map(:name).must_equal %w'1 2' end it "should handle :rollback=>:always inside transactions" do @db.transaction do @db.transaction(:rollback=>:always) do @d.insert(:name => 'abc', :value => 1) 2 end.must_equal 2 end @d.select_order_map(:value).must_equal [] end it "should handle table_exists? failures inside savepoints" do @db.transaction do @d.insert(:name => '1') @db.transaction(:savepoint=>true) do @d.insert(:name => '2') @db.table_exists?(:asadf098asd9asd98sa).must_equal false @d.insert(:name => '3') end @d.insert(:name => '4') end @d.select_order_map(:name).must_equal %w'1 2 3 4' end it "should support nested transactions through savepoints using the savepoint option" do @db.transaction do @d.insert(:name => '1') @db.transaction(:savepoint=>true) do @d.insert(:name => '2') @db.transaction do @d.insert(:name => '3') raise Sequel::Rollback end end @d.insert(:name => '4') @db.transaction do @d.insert(:name => '6') @db.transaction(:savepoint=>true) do @d.insert(:name => '7') raise Sequel::Rollback end end @d.insert(:name => '5') end @d.order(:name).map(:name).must_equal %w{1 4 5 6} end it "should support nested transactions through savepoints using the auto_savepoint option" do @db.transaction(:auto_savepoint=>true) do @d.insert(:name => '1') @db.transaction do @d.insert(:name => '2') @db.transaction do @d.insert(:name => '3') raise Sequel::Rollback end end @d.insert(:name => '4') @db.transaction(:auto_savepoint=>true) do @d.insert(:name => '6') @db.transaction do @d.insert(:name => '7') raise Sequel::Rollback end end @d.insert(:name => '5') end @d.order(:name).map(:name).must_equal %w{1 4 5 6} end end it "should handle returning inside of the block by committing" do def ret_commit @db.transaction do @db[:items].insert(:name => 'abc') return end end @d.count.must_equal 0 ret_commit @d.count.must_equal 1 ret_commit @d.count.must_equal 2 proc do @db.transaction do raise Interrupt, 'asdf' end end.must_raise(Interrupt) @d.count.must_equal 2 end if DB.supports_prepared_transactions? it "should allow saving and destroying of model objects" do c = Class.new(Sequel::Model(@d)) c.set_primary_key :name c.unrestrict_primary_key @db.transaction(:prepare=>'XYZ'){c.create(:name => '1'); c.create(:name => '2').destroy} @db.commit_prepared_transaction('XYZ') @d.select_order_map(:name).must_equal ['1'] end it "should commit prepared transactions using commit_prepared_transaction" do @db.transaction(:prepare=>'XYZ'){@d.insert(:name => '1')} @db.commit_prepared_transaction('XYZ') @d.select_order_map(:name).must_equal ['1'] end it "should rollback prepared transactions using rollback_prepared_transaction" do @db.transaction(:prepare=>'XYZ'){@d.insert(:name => '1')} @db.rollback_prepared_transaction('XYZ') @d.select_order_map(:name).must_equal [] end if DB.supports_savepoints_in_prepared_transactions? it "should support savepoints when using prepared transactions" do @db.transaction(:prepare=>'XYZ'){@db.transaction(:savepoint=>true){@d.insert(:name => '1')}} @db.commit_prepared_transaction('XYZ') @d.select_order_map(:name).must_equal ['1'] end end end it "should support all transaction isolation levels" do [:uncommitted, :committed, :repeatable, :serializable].each_with_index do |l, i| @db.transaction(:isolation=>l){@d.insert(:name => 'abc', :value => 1)} @d.count.must_equal i + 1 end end it "should support after_commit outside transactions" do c = nil @db.after_commit{c = 1} c.must_equal 1 end it "should support after_rollback outside transactions" do c = nil @db.after_rollback{c = 1} c.must_be_nil end it "should support after_commit inside transactions" do c = nil @db.transaction{@db.after_commit{c = 1}; c.must_be_nil} c.must_equal 1 end it "should support after_rollback inside transactions" do c = nil @db.transaction{@db.after_rollback{c = 1}; c.must_be_nil} c.must_be_nil end it "should not call after_commit if the transaction rolls back" do c = nil @db.transaction{@db.after_commit{c = 1}; c.must_be_nil; raise Sequel::Rollback} c.must_be_nil end it "should call after_rollback if the transaction rolls back" do c = nil @db.transaction{@db.after_rollback{c = 1}; c.must_be_nil; raise Sequel::Rollback} c.must_equal 1 end it "should support multiple after_commit blocks inside transactions" do c = [] @db.transaction{@db.after_commit{c << 1}; @db.after_commit{c << 2}; c.must_equal []} c.must_equal [1, 2] end it "should support multiple after_rollback blocks inside transactions" do c = [] @db.transaction{@db.after_rollback{c << 1}; @db.after_rollback{c << 2}; c.must_equal []; raise Sequel::Rollback} c.must_equal [1, 2] end it "should support after_commit inside nested transactions" do c = nil @db.transaction{@db.transaction{@db.after_commit{c = 1}}; c.must_be_nil} c.must_equal 1 end it "should support after_rollback inside nested transactions" do c = nil @db.transaction{@db.transaction{@db.after_rollback{c = 1}}; c.must_be_nil; raise Sequel::Rollback} c.must_equal 1 end if DB.supports_savepoints? it "should support after_commit inside savepoints" do c = nil @db.transaction{@db.transaction(:savepoint=>true){@db.after_commit{c = 1}}; c.must_be_nil} c.must_equal 1 end it "should support after_rollback inside savepoints" do c = nil @db.transaction{@db.transaction(:savepoint=>true){@db.after_rollback{c = 1}}; c.must_be_nil; raise Sequel::Rollback} c.must_equal 1 end it "should support after_commit inside savepoints with :savepoint_option" do c = nil @db.transaction{@db.transaction(:savepoint=>true){@db.after_commit(:savepoint=>true){c = 1}}; c.must_be_nil} c.must_equal 1 c = nil @db.transaction{@db.transaction(:savepoint=>true){@db.transaction(:savepoint=>true){@db.after_commit(:savepoint=>true){c = 1}}}; c.must_be_nil} c.must_equal 1 c = nil @db.transaction{@db.transaction(:savepoint=>true, :rollback=>:always){@db.after_commit(:savepoint=>true){c = 1}}} c.must_be_nil @db.transaction(:rollback=>:always){@db.transaction(:savepoint=>true){@db.after_commit(:savepoint=>true){c = 1}}} c.must_be_nil @db.transaction(:rollback=>:always){@db.transaction(:savepoint=>true){@db.transaction(:savepoint=>true){@db.after_commit(:savepoint=>true){c = 1}}}} c.must_be_nil end it "should support after_rollback inside savepoints with :savepoint_option" do c = nil @db.transaction{@db.transaction(:savepoint=>true, :rollback=>:always){@db.after_rollback(:savepoint=>true){c = 1}; c.must_be_nil}; c.must_equal 1} c.must_equal 1 c = nil @db.transaction(:rollback=>:always){@db.transaction(:savepoint=>true){@db.after_rollback(:savepoint=>true){c = 1}}; c.must_be_nil} c.must_equal 1 c = nil @db.transaction(:rollback=>:always){@db.transaction(:savepoint=>true, :rollback=>:always){@db.after_rollback(:savepoint=>true){c = 1}; c.must_be_nil}; c.must_equal 1} c.must_equal 1 c = nil @db.transaction(:rollback=>:always){@db.transaction(:savepoint=>true){@db.after_rollback(:savepoint=>true){c = 1}}; c.must_be_nil} c.must_equal 1 c = nil @db.transaction(:rollback=>:always){@db.transaction(:savepoint=>true){@db.transaction(:savepoint=>true){@db.after_rollback(:savepoint=>true){c = 1}}; c.must_be_nil}} c.must_equal 1 c = nil @db.transaction(:rollback=>:always){@db.transaction(:savepoint=>true){@db.transaction(:savepoint=>true, :rollback=>:always){@db.after_rollback(:savepoint=>true){c = 1}; c.must_be_nil}; c.must_equal 1}} c.must_equal 1 c = nil @db.transaction{@db.transaction(:savepoint=>true, :rollback=>:always){@db.transaction(:savepoint=>true){@db.after_rollback(:savepoint=>true){c = 1}}; c.must_be_nil}; c.must_equal 1} c.must_equal 1 end end if DB.supports_prepared_transactions? it "should raise an error if you attempt to use after_commit or after_rollback inside a prepared transaction" do proc{@db.transaction(:prepare=>'XYZ'){@db.after_commit{}}}.must_raise(Sequel::Error) proc{@db.transaction(:prepare=>'XYZ'){@db.after_rollback{}}}.must_raise(Sequel::Error) end if DB.supports_savepoints_in_prepared_transactions? it "should raise an error if you attempt to use after_commit or after rollback inside a savepoint in a prepared transaction" do proc{@db.transaction(:prepare=>'XYZ'){@db.transaction(:savepoint=>true){@db.after_commit{}}}}.must_raise(Sequel::Error) proc{@db.transaction(:prepare=>'XYZ'){@db.transaction(:savepoint=>true){@db.after_rollback{}}}}.must_raise(Sequel::Error) end end end end describe "Database transaction retrying" do before(:all) do @db = DB @db.create_table!(:items, :engine=>'InnoDB'){String :a, :unique=>true, :null=>false} @d = @db[:items] end before do @d.delete end after(:all) do @db.drop_table?(:items) end it "should be supported using the :retry_on option" do @d.insert('b') @d.insert('c') s = 'a' @db.transaction(:retry_on=>Sequel::ConstraintViolation) do s = s.succ @d.insert(s) end @d.select_order_map(:a).must_equal %w'b c d' end it "should limit number of retries via the :num_retries option" do @d.insert('b') @d.insert('c') s = 'a' lambda do @db.transaction(:num_retries=>1, :retry_on=>Sequel::ConstraintViolation) do s = s.succ @d.insert(s) end end.must_raise(Sequel::UniqueConstraintViolation, Sequel::ConstraintViolation) @d.select_order_map(:a).must_equal %w'b c' end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/integration/type_test.rb���������������������������������������������������������0000664�0000000�0000000�00000010740�14342141206�0020724�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Supported types" do def create_items_table_with_column(name, type, opts={}) DB.create_table!(:items){column name, type, opts} DB[:items] end after(:all) do DB.drop_table?(:items) end it "should support casting correctly" do ds = create_items_table_with_column(:number, Integer) ds.insert(:number => 1) ds.select(Sequel.cast(:number, String).as(:n)).map(:n).must_equal %w'1' ds = create_items_table_with_column(:name, String) ds.insert(:name=> '1') ds.select(Sequel.cast(:name, Integer).as(:n)).map(:n).must_equal [1] end it "should support NULL correctly" do ds = create_items_table_with_column(:number, Integer) ds.insert(:number => nil) ds.all.must_equal [{:number=>nil}] end it "should support generic integer type" do ds = create_items_table_with_column(:number, Integer) ds.insert(:number => 2) ds.all.must_equal [{:number=>2}] end it "should support generic bignum type" do ds = create_items_table_with_column(:number, :Bignum) ds.insert(:number => 2**34) ds.all.must_equal [{:number=>2**34}] end it "should support generic float type" do ds = create_items_table_with_column(:number, Float) ds.insert(:number => 2.1) ds.all.must_equal [{:number=>2.1}] end cspecify "should support generic numeric type", [:odbc, :mssql] do ds = create_items_table_with_column(:number, Numeric, :size=>[15, 10]) ds.insert(:number => BigDecimal('2.123456789')) ds.all.must_equal [{:number=>BigDecimal('2.123456789')}] ds = create_items_table_with_column(:number, BigDecimal, :size=>[15, 10]) ds.insert(:number => BigDecimal('2.123456789')) ds.all.must_equal [{:number=>BigDecimal('2.123456789')}] end it "should support generic string type" do ds = create_items_table_with_column(:name, String) ds.insert(:name => 'Test User') ds.all.must_equal [{:name=>'Test User'}] end it "should support generic text type" do ds = create_items_table_with_column(:name, String, :text=>true) ds.insert(:name => 'Test User'*100) ds.all.must_equal [{:name=>'Test User'*100}] ds.update(:name=>ds.get(:name)) ds.all.must_equal [{:name=>'Test User'*100}] end cspecify "should support generic date type", [:jdbc, :sqlite], [:tinytds], [:jdbc, :mssql], :oracle do ds = create_items_table_with_column(:dat, Date) d = Date.today ds.insert(:dat => d) ds.first[:dat].must_be_kind_of(Date) ds.first[:dat].to_s.must_equal d.to_s end cspecify "should support generic time type", [:odbc], [:jdbc, :mssql], [:jdbc, :sqlite], [:mysql2], [:tinytds], :oracle, [:ado] do ds = create_items_table_with_column(:tim, Time, :only_time=>true) t = Sequel::SQLTime.now ds.insert(:tim => t) v = ds.first[:tim] ds.literal(v).must_equal ds.literal(t) v.must_be_kind_of(Sequel::SQLTime) ds.delete ds.insert(:tim => v) v2 = ds.first[:tim] ds.literal(v2).must_equal ds.literal(t) v2.must_be_kind_of(Sequel::SQLTime) end cspecify "should support generic datetime type", [:jdbc, :sqlite] do ds = create_items_table_with_column(:tim, DateTime) t = DateTime.now ds.insert(:tim => t) ds.first[:tim].strftime('%Y%m%d%H%M%S').must_equal t.strftime('%Y%m%d%H%M%S') ds = create_items_table_with_column(:tim, Time) t = Time.now ds.insert(:tim => t) ds.first[:tim].strftime('%Y%m%d%H%M%S').must_equal t.strftime('%Y%m%d%H%M%S') end cspecify "should support generic file type", [:odbc, :mssql], [:mysql2], [:tinytds] do ds = create_items_table_with_column(:name, File) ds.insert(:name =>Sequel.blob("a\0"*300)) ds.all.must_equal [{:name=>Sequel.blob("a\0"*300)}] ds.first[:name].must_be_kind_of(::Sequel::SQL::Blob) end cspecify "should support generic boolean type", [:jdbc, :sqlite], [:jdbc, :db2], :oracle do ds = create_items_table_with_column(:number, TrueClass) ds.insert(:number => true) ds.all.must_equal [{:number=>true}] ds = create_items_table_with_column(:number, FalseClass) ds.insert(:number => true) ds.all.must_equal [{:number=>true}] end cspecify "should support generic boolean type with defaults", [:jdbc, :sqlite], [:jdbc, :db2], :oracle do ds = create_items_table_with_column(:number, TrueClass, :default=>true) ds.insert ds.all.must_equal [{:number=>true}] ds = create_items_table_with_column(:number, FalseClass, :default=>false) ds.insert ds.all.must_equal [{:number=>false}] end end ��������������������������������sequel-5.63.0/spec/model/���������������������������������������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0015132�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/model/association_reflection_spec.rb���������������������������������������������0000664�0000000�0000000�00000116074�14342141206�0023230�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Model::Associations::AssociationReflection, "#associated_class" do before do @c = Class.new(Sequel::Model(:foo)) class ::ParParent < Sequel::Model; end end after do Object.send(:remove_const, :ParParent) end it "should use the :class value if present" do @c.many_to_one :c, :class=>ParParent @c.association_reflection(:c).keys.must_include(:class) @c.association_reflection(:c).associated_class.must_equal ParParent end it "should use the :class value if present" do @c.many_to_one :c, :class=>@c @c.one_to_many :cs, :class=>@c c = @c.association_reflection(:c) cs = @c.association_reflection(:cs) c.association_method.must_equal :c c.dataset_method.must_equal :c_dataset c.setter_method.must_equal :c= c._setter_method.must_equal :_c= cs.association_method.must_equal :cs cs.dataset_method.must_equal :cs_dataset cs.add_method.must_equal :add_c cs._add_method.must_equal :_add_c cs.remove_method.must_equal :remove_c cs._remove_method.must_equal :_remove_c cs.remove_all_method.must_equal :remove_all_cs cs._remove_all_method.must_equal :_remove_all_cs end it "should have inspect include association class and representation of association definition " do ParParent.many_to_one :c ParParent.association_reflection(:c).inspect.must_equal "#<Sequel::Model::Associations::ManyToOneAssociationReflection ParParent.many_to_one :c>" ParParent.many_to_one :c, :class=>ParParent ParParent.association_reflection(:c).inspect.must_equal "#<Sequel::Model::Associations::ManyToOneAssociationReflection ParParent.many_to_one :c, :class=>ParParent>" ParParent.many_to_one :c, :class=>ParParent, :key=>:c_id ParParent.association_reflection(:c).inspect.must_equal "#<Sequel::Model::Associations::ManyToOneAssociationReflection ParParent.many_to_one :c, :key=>:c_id, :class=>ParParent>" @c.one_to_many :foos do |ds| ds end @c.association_reflection(:foos).inspect.must_equal "#<Sequel::Model::Associations::OneToManyAssociationReflection #{@c.to_s}.one_to_many :foos, :block=>#{@c.association_reflection(:foos)[:block].inspect}>" end it "should figure out the class if the :class value is not present" do @c.many_to_one :c, :class=>'ParParent' @c.association_reflection(:c).keys.wont_include(:class) @c.association_reflection(:c).associated_class.must_equal ParParent end it "should respect :class_namespace option for specifying the namespace" do class ::ParParent class ParParent < Sequel::Model; end end ParParent.many_to_one :par_parent, :class=>'ParParent' ParParent.association_reflection(:par_parent).associated_class.must_equal ParParent ParParent.many_to_one :par_parent, :class=>'ParParent', :class_namespace=>'ParParent' ParParent.association_reflection(:par_parent).associated_class.must_equal ParParent::ParParent end it "should include association inspect output if an exception would be raised" do r = @c.many_to_one(:c) begin r.associated_class rescue NameError => e end e.message.must_include r.inspect end end describe Sequel::Model::Associations::AssociationReflection, "#primary_key" do before do @c = Class.new(Sequel::Model(:foo)) class ::ParParent < Sequel::Model; end end after do Object.send(:remove_const, :ParParent) end it "should use the :primary_key value if present" do @c.many_to_one :c, :class=>ParParent, :primary_key=>:blah__blah @c.association_reflection(:c).keys.must_include(:primary_key) @c.association_reflection(:c).primary_key.must_equal :blah__blah end it "should use the associated table's primary key if :primary_key is not present" do @c.many_to_one :c, :class=>'ParParent' @c.association_reflection(:c).keys.wont_include(:primary_key) @c.association_reflection(:c).primary_key.must_equal :id end end describe Sequel::Model::Associations::AssociationReflection, "#reciprocal_type" do it "should include a specific type if only one matches" do c = Class.new(Sequel::Model(:a)) c.one_to_many :cs, :class=>c, :key=>:c_id c.many_to_one :c, :class=>c, :key=>:c_id c.association_reflection(:c).send(:reciprocal_type).must_equal :one_to_many end it "should work if not caching" do c = Class.new(Sequel::Model(:a)) c.cache_associations = false c.one_to_many :cs, :class=>c, :key=>:c_id c.many_to_one :c, :class=>c, :key=>:c_id c.association_reflection(:c).reciprocal.must_equal :cs c.association_reflection(:c).send(:reciprocal_type).must_equal :one_to_many end end describe Sequel::Model::Associations::AssociationReflection, "#reciprocal" do before do class ::ParParent < Sequel::Model; end class ::ParParentTwo < Sequel::Model; end class ::ParParentThree < Sequel::Model; end end after do Object.send(:remove_const, :ParParent) Object.send(:remove_const, :ParParentTwo) Object.send(:remove_const, :ParParentThree) end it "should use the :reciprocal value if present" do @c = Class.new(Sequel::Model(:foo)) @d = Class.new(Sequel::Model(:foo)) @c.many_to_one :c, :class=>@d, :reciprocal=>:xx @c.association_reflection(:c).keys.must_include(:reciprocal) @c.association_reflection(:c).reciprocal.must_equal :xx end it "should not raise an error if some reciprocal associations have invalid associated classes" do @c = Class.new(Sequel::Model(:foo)) @c.one_to_many :sadfakloasdfioas @c.many_to_one :c, :class=>@c @c.association_reflection(:c).reciprocal end it "should require the associated class is the current class to be a reciprocal" do ParParent.many_to_one :par_parent_two, :key=>:blah ParParent.many_to_one :par_parent_three, :key=>:blah ParParentTwo.one_to_many :par_parents, :key=>:blah ParParentThree.one_to_many :par_parents, :key=>:blah ParParentTwo.association_reflection(:par_parents).reciprocal.must_equal :par_parent_two ParParentThree.association_reflection(:par_parents).reciprocal.must_equal :par_parent_three ParParent.many_to_many :par_parent_twos, :left_key=>:l, :right_key=>:r, :join_table=>:jt ParParent.many_to_many :par_parent_threes, :left_key=>:l, :right_key=>:r, :join_table=>:jt ParParentTwo.many_to_many :par_parents, :right_key=>:l, :left_key=>:r, :join_table=>:jt ParParentThree.many_to_many :par_parents, :right_key=>:l, :left_key=>:r, :join_table=>:jt ParParentTwo.association_reflection(:par_parents).reciprocal.must_equal :par_parent_twos ParParentThree.association_reflection(:par_parents).reciprocal.must_equal :par_parent_threes end it "should handle composite keys" do ParParent.many_to_one :par_parent_two, :key=>[:a, :b], :primary_key=>[:c, :b] ParParent.many_to_one :par_parent_three, :key=>[:d, :e], :primary_key=>[:c, :b] ParParentTwo.one_to_many :par_parents, :primary_key=>[:c, :b], :key=>[:a, :b] ParParentThree.one_to_many :par_parents, :primary_key=>[:c, :b], :key=>[:d, :e] ParParentTwo.association_reflection(:par_parents).reciprocal.must_equal :par_parent_two ParParentThree.association_reflection(:par_parents).reciprocal.must_equal :par_parent_three ParParent.many_to_many :par_parent_twos, :left_key=>[:l1, :l2], :right_key=>[:r1, :r2], :left_primary_key=>[:pl1, :pl2], :right_primary_key=>[:pr1, :pr2], :join_table=>:jt ParParent.many_to_many :par_parent_threes, :right_key=>[:l1, :l2], :left_key=>[:r1, :r2], :left_primary_key=>[:pl1, :pl2], :right_primary_key=>[:pr1, :pr2], :join_table=>:jt ParParentTwo.many_to_many :par_parents, :right_key=>[:l1, :l2], :left_key=>[:r1, :r2], :right_primary_key=>[:pl1, :pl2], :left_primary_key=>[:pr1, :pr2], :join_table=>:jt ParParentThree.many_to_many :par_parents, :left_key=>[:l1, :l2], :right_key=>[:r1, :r2], :right_primary_key=>[:pl1, :pl2], :left_primary_key=>[:pr1, :pr2], :join_table=>:jt ParParentTwo.association_reflection(:par_parents).reciprocal.must_equal :par_parent_twos ParParentThree.association_reflection(:par_parents).reciprocal.must_equal :par_parent_threes end it "should figure out the reciprocal if the :reciprocal value is not present" do ParParent.many_to_one :par_parent_two ParParentTwo.one_to_many :par_parents ParParent.many_to_many :par_parent_threes ParParentThree.many_to_many :par_parents ParParent.association_reflection(:par_parent_two).keys.wont_include(:reciprocal) ParParent.association_reflection(:par_parent_two).reciprocal.must_equal :par_parents ParParentTwo.association_reflection(:par_parents).keys.wont_include(:reciprocal) ParParentTwo.association_reflection(:par_parents).reciprocal.must_equal :par_parent_two ParParent.association_reflection(:par_parent_threes).keys.wont_include(:reciprocal) ParParent.association_reflection(:par_parent_threes).reciprocal.must_equal :par_parents ParParentThree.association_reflection(:par_parents).keys.wont_include(:reciprocal) ParParentThree.association_reflection(:par_parents).reciprocal.must_equal :par_parent_threes end it "should handle ambiguous reciprocals where only one doesn't have conditions/blocks" do ParParent.many_to_one :par_parent_two, :class=>ParParentTwo, :key=>:par_parent_two_id ParParent.many_to_one :par_parent_two2, :clone=>:par_parent_two, :conditions=>{:id=>:id} ParParentTwo.one_to_many :par_parents ParParent.many_to_many :par_parent_threes, :class=>ParParentThree, :right_key=>:par_parent_three_id ParParent.many_to_many :par_parent_threes2, :clone=>:par_parent_threes do |ds| ds end ParParentThree.many_to_many :par_parents ParParentTwo.association_reflection(:par_parents).reciprocal.must_equal :par_parent_two ParParentThree.association_reflection(:par_parents).reciprocal.must_equal :par_parent_threes end it "should handle ambiguous reciprocals where only one has matching primary keys" do ParParent.many_to_one :par_parent_two, :class=>ParParentTwo, :key=>:par_parent_two_id ParParent.many_to_one :par_parent_two2, :clone=>:par_parent_two, :primary_key=>:foo ParParentTwo.one_to_many :par_parents, :class=>ParParent, :key=>:par_parent_two_id ParParentTwo.one_to_many :par_parents2, :clone=>:par_parents, :primary_key=>:foo ParParent.many_to_many :par_parent_threes, :class=>ParParentThree, :right_key=>:par_parent_three_id ParParent.many_to_many :par_parent_threes2, :clone=>:par_parent_threes, :right_primary_key=>:foo ParParent.many_to_many :par_parent_threes3, :clone=>:par_parent_threes, :left_primary_key=>:foo ParParentThree.many_to_many :par_parents ParParent.association_reflection(:par_parent_two).reciprocal.must_equal :par_parents ParParent.association_reflection(:par_parent_two2).reciprocal.must_equal :par_parents2 ParParentTwo.association_reflection(:par_parents).reciprocal.must_equal :par_parent_two ParParentTwo.association_reflection(:par_parents2).reciprocal.must_equal :par_parent_two2 ParParentThree.association_reflection(:par_parents).reciprocal.must_equal :par_parent_threes end it "should handle reciprocals where current association has conditions/block" do ParParent.many_to_one :par_parent_two, :conditions=>{:id=>:id} ParParentTwo.one_to_many :par_parents ParParent.many_to_many :par_parent_threes do |ds| ds end ParParentThree.many_to_many :par_parents ParParent.association_reflection(:par_parent_two).reciprocal.must_equal :par_parents ParParent.association_reflection(:par_parent_threes).reciprocal.must_equal :par_parents end end describe Sequel::Model::Associations::AssociationReflection, "#select" do before do @c = Class.new(Sequel::Model(:foo)) class ::ParParent < Sequel::Model; end end after do Object.send(:remove_const, :ParParent) end it "should use the :select value if present" do @c.many_to_one :c, :class=>ParParent, :select=>[:par_parents__id] @c.association_reflection(:c).keys.must_include(:select) @c.association_reflection(:c).select.must_equal [:par_parents__id] end it "should be the associated_table.* if :select is not present for a many_to_many associaiton" do @c.many_to_many :cs, :class=>'ParParent' @c.association_reflection(:cs).keys.wont_include(:select) @c.association_reflection(:cs).select.must_equal Sequel::SQL::ColumnAll.new(:par_parents) end it "should be blank if :select is not present for a many_to_one and one_to_many associaiton" do @c.one_to_many :cs, :class=>'ParParent' @c.association_reflection(:cs).keys.wont_include(:select) @c.association_reflection(:cs).select.must_be_nil @c.many_to_one :c, :class=>'ParParent' @c.association_reflection(:c).keys.wont_include(:select) @c.association_reflection(:c).select.must_be_nil end end describe Sequel::Model::Associations::AssociationReflection, "#can_have_associated_objects?" do it "should be true for any given object (for backward compatibility)" do Sequel::Model::Associations::AssociationReflection.new.can_have_associated_objects?(Object.new).must_equal true end end describe Sequel::Model::Associations::AssociationReflection, "#associated_object_keys" do before do @c = Class.new(Sequel::Model(:foo)) class ::ParParent < Sequel::Model; end end after do Object.send(:remove_const, :ParParent) end it "should use the primary keys for a many_to_one association" do @c.many_to_one :c, :class=>ParParent @c.association_reflection(:c).associated_object_keys.must_equal [:id] @c.many_to_one :c, :class=>ParParent, :primary_key=>:d_id @c.association_reflection(:c).associated_object_keys.must_equal [:d_id] @c.many_to_one :c, :class=>ParParent, :key=>[:c_id1, :c_id2], :primary_key=>[:id1, :id2] @c.association_reflection(:c).associated_object_keys.must_equal [:id1, :id2] end it "should use the keys for a one_to_many association" do ParParent.one_to_many :cs, :class=>ParParent ParParent.association_reflection(:cs).associated_object_keys.must_equal [:par_parent_id] @c.one_to_many :cs, :class=>ParParent, :key=>:d_id @c.association_reflection(:cs).associated_object_keys.must_equal [:d_id] @c.one_to_many :cs, :class=>ParParent, :key=>[:c_id1, :c_id2], :primary_key=>[:id1, :id2] @c.association_reflection(:cs).associated_object_keys.must_equal [:c_id1, :c_id2] end it "should use the right primary keys for a many_to_many association" do @c.many_to_many :cs, :class=>ParParent @c.association_reflection(:cs).associated_object_keys.must_equal [:id] @c.many_to_many :cs, :class=>ParParent, :right_primary_key=>:d_id @c.association_reflection(:cs).associated_object_keys.must_equal [:d_id] @c.many_to_many :cs, :class=>ParParent, :right_key=>[:c_id1, :c_id2], :right_primary_key=>[:id1, :id2] @c.association_reflection(:cs).associated_object_keys.must_equal [:id1, :id2] end end describe Sequel::Model::Associations::AssociationReflection do before do @c = Class.new(Sequel::Model(:foo)) def @c.name() "C" end end it "one_to_many #qualified_primary_key should be a qualified version of the primary key" do @c.one_to_many :cs, :class=>@c @c.dataset.literal(@c.association_reflection(:cs).qualified_primary_key).must_equal 'foo.id' end it "many_to_many #associated_key_column should be the left key" do @c.many_to_many :cs, :class=>@c @c.association_reflection(:cs).associated_key_column.must_equal :c_id end it "many_to_many #qualified_right_key should be a qualified version of the primary key" do @c.many_to_many :cs, :class=>@c, :right_key=>:c2_id @c.dataset.literal(@c.association_reflection(:cs).qualified_right_key).must_equal 'cs_cs.c2_id' end it "many_to_many #qualified_right_primary_key should be a qualified version of the primary key" do @c.many_to_many :cs, :class=>@c @c.dataset.literal(@c.association_reflection(:cs).qualified_right_primary_key).must_equal 'foo.id' end end describe Sequel::Model::Associations::AssociationReflection, "#remove_before_destroy?" do before do @c = Class.new(Sequel::Model(:foo)) end it "should be true for many_to_one and many_to_many associations" do @c.many_to_one :c, :class=>@c @c.association_reflection(:c).remove_before_destroy?.must_equal true @c.many_to_many :cs, :class=>@c @c.association_reflection(:cs).remove_before_destroy?.must_equal true end it "should be false for one_to_one and one_to_many associations" do @c.one_to_one :c, :class=>@c @c.association_reflection(:c).remove_before_destroy?.must_equal false @c.one_to_many :cs, :class=>@c @c.association_reflection(:cs).remove_before_destroy?.must_equal false end end describe Sequel::Model::Associations::AssociationReflection, "#filter_by_associations_limit_strategy" do before do @db = Sequel.mock @c = Class.new(Sequel::Model(@db[:a])) end after do Sequel::Model.default_eager_limit_strategy = true end it "should be nil by default for *_one associations" do @c.many_to_one :c, :class=>@c @c.association_reflection(:c).send(:filter_by_associations_limit_strategy).must_be_nil @c.one_to_one :c, :class=>@c @c.association_reflection(:c).send(:filter_by_associations_limit_strategy).must_be_nil @c.one_through_one :c, :class=>@c @c.association_reflection(:c).send(:filter_by_associations_limit_strategy).must_be_nil end it "should be :correlated_subquery by default for one_to_many and one_to_one with :order associations" do @c.one_to_one :c, :class=>@c, :order=>:a @c.association_reflection(:c).send(:filter_by_associations_limit_strategy).must_equal :correlated_subquery @c.one_to_many :cs, :class=>@c, :limit=>1 @c.association_reflection(:cs).send(:filter_by_associations_limit_strategy).must_equal :correlated_subquery end it "should be :ruby by default for many_to_many and one_through_one with :order associations" do @c.one_through_one :c, :class=>@c, :order=>:a @c.association_reflection(:c).send(:filter_by_associations_limit_strategy).must_equal :ruby @c.many_to_many :cs, :class=>@c, :limit=>1 @c.association_reflection(:cs).send(:filter_by_associations_limit_strategy).must_equal :ruby end it "should be nil for many_to_one associations even if :eager_limit_strategy or :filter_limit_strategy is used" do @c.many_to_one :c, :class=>@c, :eager_limit_strategy=>true @c.association_reflection(:c).send(:filter_by_associations_limit_strategy).must_be_nil @c.many_to_one :c, :class=>@c, :eager_limit_strategy=>:distinct_on @c.association_reflection(:c).send(:filter_by_associations_limit_strategy).must_be_nil @c.many_to_one :c, :class=>@c, :filter_limit_strategy=>true @c.association_reflection(:c).send(:filter_by_associations_limit_strategy).must_be_nil end it "should be a symbol for other associations if given a symbol" do @c.one_to_one :c, :class=>@c, :eager_limit_strategy=>:distinct_on @c.association_reflection(:c).send(:filter_by_associations_limit_strategy).must_equal :distinct_on @c.one_to_many :cs, :class=>@c, :eager_limit_strategy=>:window_function, :limit=>1 @c.association_reflection(:cs).send(:filter_by_associations_limit_strategy).must_equal :window_function end it "should use :distinct_on for one_to_one associations if picking and the association dataset supports ordered distinct on" do @c.dataset = @c.dataset.with_extend{def supports_ordered_distinct_on?; true end} @c.one_to_one :c, :class=>@c, :eager_limit_strategy=>true @c.association_reflection(:c).send(:filter_by_associations_limit_strategy).must_equal :distinct_on end it "should use :window_function for associations if picking and the association dataset supports window functions" do @c.dataset = @c.dataset.with_extend{def supports_window_functions?; true end} @c.one_to_one :c, :class=>@c, :eager_limit_strategy=>true @c.association_reflection(:c).send(:filter_by_associations_limit_strategy).must_equal :window_function @c.one_to_many :cs, :class=>@c, :eager_limit_strategy=>true, :limit=>1 @c.association_reflection(:cs).send(:filter_by_associations_limit_strategy).must_equal :window_function @c.many_to_many :cs, :class=>@c, :eager_limit_strategy=>true, :limit=>1 @c.association_reflection(:cs).send(:filter_by_associations_limit_strategy).must_equal :window_function end it "should use :ruby for one_to_many associations if the database doesn't support limits in subqueries" do @c.dataset = @c.dataset.with_extend{def supports_limits_in_correlated_subqueries?; false end} @c.one_to_many :cs, :class=>@c, :eager_limit_strategy=>true, :limit=>1 @c.association_reflection(:cs).send(:filter_by_associations_limit_strategy).must_equal :ruby end it "should use :ruby for one_to_many associations if offset doesn't work in correlated subqueries and an offset is used" do @c.dataset = @c.dataset.with_extend{def supports_offsets_in_correlated_subqueries?; false end} @c.one_to_many :cs, :class=>@c, :eager_limit_strategy=>true, :limit=>1 @c.association_reflection(:cs).send(:filter_by_associations_limit_strategy).must_equal :correlated_subquery @c.one_to_many :cs, :class=>@c, :eager_limit_strategy=>true, :limit=>[1, 1] @c.association_reflection(:cs).send(:filter_by_associations_limit_strategy).must_equal :ruby end it "should use :ruby for one_to_many associations if composite primary key is used and database does not support multiple columns in IN" do @c.dataset = @c.dataset.with_extend{def supports_multiple_column_in?; false end} @c.set_primary_key [:id, :id2] @c.one_to_many :cs, :class=>@c, :eager_limit_strategy=>true, :limit=>1, :key=>[:id, :id2] @c.association_reflection(:cs).send(:filter_by_associations_limit_strategy).must_equal :ruby end it "should use :ruby for many_to_many associations if picking and the association dataset doesn't window functions" do @c.many_to_many :cs, :class=>@c, :eager_limit_strategy=>true, :limit=>1 @c.association_reflection(:cs).send(:filter_by_associations_limit_strategy).must_equal :ruby end it "should respect Model.default_eager_limit_strategy to *_many associations" do Sequel::Model.default_eager_limit_strategy = :window_function Sequel::Model.default_eager_limit_strategy.must_equal :window_function c = Class.new(Sequel::Model) c.dataset = :a c.default_eager_limit_strategy.must_equal :window_function c.one_to_many :cs, :class=>c, :limit=>1 c.association_reflection(:cs).send(:filter_by_associations_limit_strategy).must_equal :window_function c.many_to_many :cs, :class=>c, :limit=>1 c.association_reflection(:cs).send(:filter_by_associations_limit_strategy).must_equal :window_function Sequel::Model.default_eager_limit_strategy = true c = Class.new(Sequel::Model) c.dataset = :a c.one_to_many :cs, :class=>c, :limit=>1 c.association_reflection(:cs).send(:filter_by_associations_limit_strategy).must_equal :correlated_subquery c.dataset = c.dataset.with_extend{def supports_window_functions?; true end} c.many_to_many :cs, :class=>c, :limit=>1 c.association_reflection(:cs).send(:filter_by_associations_limit_strategy).must_equal :window_function end it "should ignore Model.default_eager_limit_strategy for one_to_one associations" do @c.default_eager_limit_strategy = :window_function @c.one_to_one :c, :class=>@c @c.association_reflection(:c).send(:filter_by_associations_limit_strategy).must_be_nil end end describe Sequel::Model::Associations::AssociationReflection, "#apply_eager_dataset_changes" do before do @c = Class.new(Sequel::Model(:foo)) end it "should apply the eager block as well as the association options to the dataset" do @c.one_to_many :cs, :class=>@c, :select=>:a, :order=>:b do |ds| ds.where(:c) end @c.association_reflection(:cs).apply_eager_dataset_changes(@c.dataset).sql.must_equal 'SELECT a FROM foo WHERE c ORDER BY b' end it "should handle :eager_block option" do @c.one_to_many :cs, :class=>@c, :select=>:a, :order=>:b, :eager_block=>proc{|ds| ds.where(:c)} @c.association_reflection(:cs).apply_eager_dataset_changes(@c.dataset).sql.must_equal 'SELECT a FROM foo WHERE c ORDER BY b' end it "should handle no block" do @c.one_to_many :cs, :class=>@c, :select=>:a, :order=>:b @c.association_reflection(:cs).apply_eager_dataset_changes(@c.dataset).sql.must_equal 'SELECT a FROM foo ORDER BY b' end end describe Sequel::Model, " association reflection methods" do before do @c1 = Class.new(Sequel::Model(:nodes)) do def self.name; 'Node'; end def self.to_s; 'Node'; end end DB.reset end it "#all_association_reflections should include all association reflection hashes" do @c1.all_association_reflections.must_equal [] @c1.associate :many_to_one, :parent, :class => @c1 @c1.all_association_reflections.collect{|v| v[:name]}.must_equal [:parent] @c1.all_association_reflections.collect{|v| v[:type]}.must_equal [:many_to_one] @c1.all_association_reflections.collect{|v| v[:class]}.must_equal [@c1] @c1.associate :one_to_many, :children, :class => @c1 @c1.all_association_reflections.sort_by{|x|x[:name].to_s} @c1.all_association_reflections.sort_by{|x|x[:name].to_s}.collect{|v| v[:name]}.must_equal [:children, :parent] @c1.all_association_reflections.sort_by{|x|x[:name].to_s}.collect{|v| v[:type]}.must_equal [:one_to_many, :many_to_one] @c1.all_association_reflections.sort_by{|x|x[:name].to_s}.collect{|v| v[:class]}.must_equal [@c1, @c1] end it "#association_reflection should return nil for nonexistent association" do @c1.association_reflection(:blah).must_be_nil end it "#association_reflection should return association reflection hash if association exists" do @c1.associate :many_to_one, :parent, :class => @c1 @c1.association_reflection(:parent).must_be_kind_of(Sequel::Model::Associations::AssociationReflection) @c1.association_reflection(:parent)[:name].must_equal :parent @c1.association_reflection(:parent)[:type].must_equal :many_to_one @c1.association_reflection(:parent)[:class].must_equal @c1 @c1.associate :one_to_many, :children, :class => @c1 @c1.association_reflection(:children).must_be_kind_of(Sequel::Model::Associations::AssociationReflection) @c1.association_reflection(:children)[:name].must_equal :children @c1.association_reflection(:children)[:type].must_equal :one_to_many @c1.association_reflection(:children)[:class].must_equal @c1 end it "#associations should include all association names" do @c1.associations.must_equal [] @c1.associate :many_to_one, :parent, :class => @c1 @c1.associations.must_equal [:parent] @c1.associate :one_to_many, :children, :class => @c1 @c1.associations.sort_by{|x|x.to_s}.must_equal [:children, :parent] end it "association reflections should be copied upon subclasing" do @c1.associate :many_to_one, :parent, :class => @c1 c = Class.new(@c1) @c1.associations.must_equal [:parent] c.associations.must_equal [:parent] c.associate :many_to_one, :parent2, :class => @c1 @c1.associations.must_equal [:parent] c.associations.sort_by{|x| x.to_s}.must_equal [:parent, :parent2] c.instance_methods.must_include(:parent) end end describe Sequel::Model::Associations::AssociationReflection, "with caching disabled" do before do @db = Sequel.mock @c = Class.new(Sequel::Model) @c.dataset = @db[:foo] @c.cache_associations = false end it "should not cache metadata" do begin class ::ParParent < Sequel::Model; end c = ParParent @c.many_to_one :c, :class=>:ParParent @c.association_reflection(:c).associated_class.must_equal c Object.send(:remove_const, :ParParent) class ::ParParent < Sequel::Model; end c = ParParent @c.association_reflection(:c).associated_class.must_equal c ensure Object.send(:remove_const, :ParParent) end end it "should not used cached schema" do def @db.supports_schema_parsing?; true end def @db.schema(table, opts={}) [[opts[:reload] ? :reload : :id, {}]] end @c.dataset = @db[:items] @c.columns.must_equal [:reload] @c.cache_associations = true @c.dataset = @db[:items] @c.columns.must_equal [:id] end end describe Sequel::Model::Associations::AssociationReflection, "with default association options" do before do @db = Sequel.mock @c = Class.new(Sequel::Model) @c.dataset = @db[:foo] end it "should use default_association_options as defaults" do @c.default_association_options = {:foo=>1, :bar=>2} @c.many_to_one :c, :class=>@c, :foo=>3 r = @c.association_reflection(:c) r[:foo].must_equal 3 r[:bar].must_equal 2 end it "should inherit default_association_options" do @c.default_association_options = {:foo=>1, :bar=>2} c = Class.new(@c) c.many_to_one :c, :class=>c, :foo=>3 r = c.association_reflection(:c) r[:foo].must_equal 3 r[:bar].must_equal 2 @c.default_association_options[:bar] = 4 c.many_to_one :d, :class=>c, :foo=>3 r = c.association_reflection(:d) r[:foo].must_equal 3 r[:bar].must_equal 2 end it "should have default_association_type_options take precedence over default_association_options" do @c.default_association_options = {:foo=>2, :bar=>3} @c.default_association_type_options[:many_to_one] = {:foo=>1, :bar=>2} @c.many_to_one :c, :class=>@c, :foo=>3 r = @c.association_reflection(:c) r[:foo].must_equal 3 r[:bar].must_equal 2 end it "should use default_association_type_options as defaults" do @c.default_association_type_options[:many_to_one] = {:foo=>1, :bar=>2} @c.many_to_one :c, :class=>@c, :foo=>3 r = @c.association_reflection(:c) r[:foo].must_equal 3 r[:bar].must_equal 2 @c.one_to_many :cs, :class=>@c, :foo=>3 r = @c.association_reflection(:cs) r[:foo].must_equal 3 r[:bar].must_be_nil end it "should inherit default_association_type_options" do @c.default_association_type_options[:many_to_one] = {:foo=>1, :bar=>2} c = Class.new(@c) c.many_to_one :c, :class=>c, :foo=>3 r = c.association_reflection(:c) r[:foo].must_equal 3 r[:bar].must_equal 2 @c.default_association_type_options[:many_to_one][:bar] = 4 c.many_to_one :d, :class=>c, :foo=>3 r = c.association_reflection(:d) r[:foo].must_equal 3 r[:bar].must_equal 2 c.one_to_many :ds, :class=>c, :foo=>3 r = c.association_reflection(:ds) r[:foo].must_equal 3 r[:bar].must_be_nil end end describe "Sequel::Model.freeze" do it "should freeze the model class and not allow any changes to associations" do model = Class.new(Sequel::Model(:items)) model.many_to_one :foo, :class=>model, :key=>:id model.default_association_options = {:read_only=>true} model.freeze model.association_reflections.frozen?.must_equal true model.association_reflection(:foo).frozen?.must_equal true model.autoreloading_associations.frozen?.must_equal true model.autoreloading_associations[:id].frozen?.must_equal true model.default_association_options.frozen?.must_equal true end it "should allow subclasses of frozen model classes to modify associations" do model = Class.new(Sequel::Model(:items)) model.many_to_one :foo, :class=>model, :key=>:id model.freeze model = Class.new(model) model.dataset = :items2 model.association_reflection(:foo).frozen?.must_equal true model.autoreloading_associations.frozen?.must_equal false model.autoreloading_associations[:id].frozen?.must_equal false model.many_to_one :bar, :class=>model, :key=>:id model.many_to_one :foo, :class=>model, :key=>:id model.association_reflections.frozen?.must_equal false model.association_reflection(:foo).frozen?.must_equal false model.association_reflection(:bar).frozen?.must_equal false model.default_association_options.frozen?.wont_equal true model.default_association_options = {:read_only=>true} model.default_association_options.frozen?.wont_equal true end end describe "Sequel::Model.finalize_associations" do before do class ::MtmItem < Sequel::Model set_primary_key :mtm_id many_to_many :items many_to_one :item end class ::OtoItem < Sequel::Model set_primary_key :oto_id end class ::Item < Sequel::Model many_to_one :item one_to_many :items, :limit=>10 one_to_one :mtm_item many_to_many :mtm_items one_through_one :oto_item end [MtmItem, OtoItem, Item].each(&:finalize_associations) end after do Object.send(:remove_const, :Item) Object.send(:remove_const, :MtmItem) Object.send(:remove_const, :OtoItem) end it "AssociationReflection should have default finalize_settings method" do Sequel::Model::Associations::AssociationReflection.new.finalize_settings[:associated_class].must_equal :class end it "should finalize many_to_one associations" do r = Item.association_reflection(:item) r[:class].must_equal Item r[:_dataset].sql.must_equal "SELECT * FROM items LIMIT 1" r[:associated_eager_dataset].sql.must_equal "SELECT * FROM items" r[:filter_by_associations_conditions_dataset].sql.must_equal "SELECT items.id FROM items WHERE (items.id IS NOT NULL)" r[:placeholder_loader].wont_be_nil r[:predicate_key].must_equal Sequel.qualify(:items, :id) r[:primary_key].must_equal :id r[:primary_keys].must_equal [:id] r[:primary_key_method].must_equal :id r[:primary_key_methods].must_equal [:id] r[:qualified_primary_key].must_equal Sequel.qualify(:items, :id) r.fetch(:reciprocal_type).must_equal :one_to_many r.fetch(:reciprocal).must_equal :items end it "should finalize one_to_many associations" do r = Item.association_reflection(:items) r[:class].must_equal Item r[:_dataset].sql.must_equal "SELECT * FROM items LIMIT 10" r[:associated_eager_dataset].sql.must_equal "SELECT * FROM items" r[:_eager_limit_strategy].must_equal :union r[:filter_by_associations_conditions_dataset].sql.must_equal "SELECT items.item_id FROM items WHERE ((items.item_id IS NOT NULL) AND (items.id IN (SELECT t1.id FROM items AS t1 WHERE (t1.item_id = items.item_id) LIMIT 10)))" r[:placeholder_loader].wont_be_nil r[:predicate_key].must_equal Sequel.qualify(:items, :item_id) r[:predicate_keys].must_equal [Sequel.qualify(:items, :item_id)] r[:qualified_primary_key].must_equal Sequel.qualify(:items, :id) r.fetch(:reciprocal).must_equal :item end it "should finalize one_to_one associations" do r = Item.association_reflection(:mtm_item) r[:class].must_equal MtmItem r[:_dataset].sql.must_equal "SELECT * FROM mtm_items LIMIT 1" r[:associated_eager_dataset].sql.must_equal "SELECT * FROM mtm_items" r[:_eager_limit_strategy].must_be_nil r[:filter_by_associations_conditions_dataset].sql.must_equal "SELECT mtm_items.item_id FROM mtm_items WHERE (mtm_items.item_id IS NOT NULL)" r[:placeholder_loader].wont_be_nil r[:predicate_key].must_equal Sequel.qualify(:mtm_items, :item_id) r[:predicate_keys].must_equal [Sequel.qualify(:mtm_items, :item_id)] r[:qualified_primary_key].must_equal Sequel.qualify(:items, :id) r.fetch(:reciprocal).must_equal :item end it "should finalize many_to_many associations" do r = Item.association_reflection(:mtm_items) r[:class].must_equal MtmItem r[:_dataset].sql.must_equal "SELECT mtm_items.* FROM mtm_items INNER JOIN items_mtm_items ON (items_mtm_items.mtm_item_id = mtm_items.mtm_id)" r[:associated_eager_dataset].sql.must_equal "SELECT mtm_items.* FROM mtm_items INNER JOIN items_mtm_items ON (items_mtm_items.mtm_item_id = mtm_items.mtm_id)" r[:_eager_limit_strategy].must_be_nil r[:filter_by_associations_conditions_dataset].sql.must_equal "SELECT items_mtm_items.item_id FROM mtm_items INNER JOIN items_mtm_items ON (items_mtm_items.mtm_item_id = mtm_items.mtm_id) WHERE (items_mtm_items.item_id IS NOT NULL)" r[:placeholder_loader].wont_be_nil r[:predicate_key].must_equal Sequel.qualify(:items_mtm_items, :item_id) r[:predicate_keys].must_equal [Sequel.qualify(:items_mtm_items, :item_id)] r.fetch(:reciprocal).must_equal :items r[:associated_key_array].must_equal [Sequel.qualify(:items_mtm_items, :item_id).as(:x_foreign_key_x)] r[:qualified_right_key].must_equal Sequel.qualify(:items_mtm_items, :mtm_item_id) r[:join_table_source].must_equal :items_mtm_items r[:join_table_alias].must_equal :items_mtm_items r[:qualified_right_primary_key].must_equal Sequel.qualify(:mtm_items, :mtm_id) r[:right_primary_key].must_equal :mtm_id r[:right_primary_keys].must_equal [:mtm_id] r[:right_primary_key_method].must_equal :mtm_id r[:right_primary_key_methods].must_equal [:mtm_id] r[:select].must_equal Sequel::SQL::ColumnAll.new(:mtm_items) end it "should finalize one_through_one associations" do r = Item.association_reflection(:oto_item) r[:class].must_equal OtoItem r[:_dataset].sql.must_equal "SELECT oto_items.* FROM oto_items INNER JOIN items_oto_items ON (items_oto_items.oto_item_id = oto_items.oto_id) LIMIT 1" r[:associated_eager_dataset].sql.must_equal "SELECT oto_items.* FROM oto_items INNER JOIN items_oto_items ON (items_oto_items.oto_item_id = oto_items.oto_id)" r[:_eager_limit_strategy].must_be_nil r[:filter_by_associations_conditions_dataset].sql.must_equal "SELECT items_oto_items.item_id FROM oto_items INNER JOIN items_oto_items ON (items_oto_items.oto_item_id = oto_items.oto_id) WHERE (items_oto_items.item_id IS NOT NULL)" r[:placeholder_loader].wont_be_nil r[:predicate_key].must_equal Sequel.qualify(:items_oto_items, :item_id) r[:predicate_keys].must_equal [Sequel.qualify(:items_oto_items, :item_id)] r[:associated_key_array].must_equal [Sequel.qualify(:items_oto_items, :item_id).as(:x_foreign_key_x)] r[:qualified_right_key].must_equal Sequel.qualify(:items_oto_items, :oto_item_id) r[:join_table_source].must_equal :items_oto_items r[:join_table_alias].must_equal :items_oto_items r[:qualified_right_primary_key].must_equal Sequel.qualify(:oto_items, :oto_id) r[:right_primary_key].must_equal :oto_id r[:right_primary_keys].must_equal [:oto_id] r[:right_primary_key_method].must_equal :oto_id r[:right_primary_key_methods].must_equal [:oto_id] r[:select].must_equal Sequel::SQL::ColumnAll.new(:oto_items) end it "should have finalize work when not caching associations" do Item.cache_associations = false Item.many_to_many :smtm_items, :class=>MtmItem, :reciprocal=>:items Item.finalize_associations r = Item.association_reflection(:smtm_items) r[:cache].must_be_nil end end describe Sequel::Model::Associations::AssociationReflection, "#assign_singular?" do it "should be true unless singular association without offset" do c = Class.new(Sequel::Model(:a)) c.many_to_one :c, :class=>c, :key=>:c_id c.association_reflection(:c).assign_singular?.must_equal true c.one_to_many :cs, :class=>c, :key=>:c_id c.association_reflection(:cs).assign_singular?.must_equal false c.one_to_one :c, :class=>c, :key=>:c_id c.association_reflection(:c).assign_singular?.must_equal true c.one_to_one :c, :class=>c, :key=>:c_id, :eager_limit_strategy=>:ruby c.association_reflection(:c).assign_singular?.must_equal false c.one_to_one :c, :class=>c, :key=>:c_id, :limit=>[nil, 1], :eager_limit_strategy=>:ruby c.association_reflection(:c).assign_singular?.must_equal false end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/model/associations_spec.rb�������������������������������������������������������0000664�0000000�0000000�00001054022�14342141206�0021174�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Model, "associate" do it "should use explicit class if given a class, symbol, or string" do begin klass = Class.new(Sequel::Model(:nodes)) class ::ParParent < Sequel::Model; end klass.associate :many_to_one, :par_parent0, :class=>ParParent klass.associate :one_to_many, :par_parent1s, :class=>'ParParent' klass.associate :many_to_many, :par_parent2s, :class=>:ParParent klass.association_reflection(:"par_parent0").associated_class.must_equal ParParent klass.association_reflection(:"par_parent1s").associated_class.must_equal ParParent klass.association_reflection(:"par_parent2s").associated_class.must_equal ParParent ensure Object.send(:remove_const, :ParParent) end end it "should default to associating to other models in the same scope" do begin class ::AssociationModuleTest class Album < Sequel::Model many_to_one :artist many_to_many :tags end class Artist< Sequel::Model one_to_many :albums end class Tag < Sequel::Model many_to_many :albums end end ::AssociationModuleTest::Album.association_reflection(:artist).associated_class.must_equal ::AssociationModuleTest::Artist ::AssociationModuleTest::Album.association_reflection(:tags).associated_class.must_equal ::AssociationModuleTest::Tag ::AssociationModuleTest::Artist.association_reflection(:albums).associated_class.must_equal ::AssociationModuleTest::Album ::AssociationModuleTest::Tag.association_reflection(:albums).associated_class.must_equal ::AssociationModuleTest::Album ensure Object.send(:remove_const, :AssociationModuleTest) end end it "should add a model_object and association_reflection accessors to the dataset, and return it with the current model object" do klass = Class.new(Sequel::Model(:nodes)) do columns :id, :a_id end mod = Module.new do def blah filter{|o| o.__send__(association_reflection[:key]) > model_object.id*2} end end klass.associate :many_to_one, :a, :class=>klass klass.associate :one_to_many, :bs, :key=>:b_id, :class=>klass, :extend=>mod klass.associate :many_to_many, :cs, :class=>klass node = klass.load(:id=>1) node.a_dataset.model_object.must_equal node node.bs_dataset.model_object.must_equal node node.cs_dataset.model_object.must_equal node node.a_dataset.association_reflection.must_equal klass.association_reflection(:a) node.bs_dataset.association_reflection.must_equal klass.association_reflection(:bs) node.cs_dataset.association_reflection.must_equal klass.association_reflection(:cs) node.bs_dataset.blah.sql.must_equal 'SELECT * FROM nodes WHERE ((nodes.b_id = 1) AND (b_id > 2))' end it "should allow extending the dataset with :extend option" do klass = Class.new(Sequel::Model(:nodes)) do columns :id, :a_id end mod = Module.new do def blah 1 end end mod2 = Module.new do def blar 2 end end klass.associate :many_to_one, :a, :class=>klass, :extend=>mod klass.associate :one_to_many, :bs, :class=>klass, :extend=>[mod] klass.associate :many_to_many, :cs, :class=>klass, :extend=>[mod, mod2] node = klass.load(:id=>1) node.a_dataset.blah.must_equal 1 node.bs_dataset.blah.must_equal 1 node.cs_dataset.blah.must_equal 1 node.cs_dataset.blar.must_equal 2 end it "should clone an existing association with the :clone option" do begin class ::ParParent < Sequel::Model; end class ::ParParent2 < Sequel::Model; end klass = Class.new(Sequel::Model(:nodes)) klass.many_to_one(:par_parent, :order=>:a){|ds| 1} klass.one_to_many(:par_parent1s, :class=>'ParParent', :limit=>12){|ds| 4} klass.many_to_many(:par_parent2s, :class=>:ParParent, :uniq=>true){|ds| 2} klass.many_to_one :par, :clone=>:par_parent, :select=>:b klass.one_to_many :par1s, :clone=>:par_parent1s, :order=>:b, :limit=>10, :block=>nil klass.many_to_many(:par2s, :clone=>:par_parent2s, :order=>:c, :class=>:ParParent2){|ds| 3} klass.many_to_one :par3, :clone=>:par klass.association_reflection(:par).associated_class.must_equal ParParent klass.association_reflection(:par1s).associated_class.must_equal ParParent klass.association_reflection(:par2s).associated_class.must_equal ParParent2 klass.association_reflection(:par)[:order].must_equal :a klass.association_reflection(:par).select.must_equal :b klass.association_reflection(:par)[:block].call.must_equal 1 klass.association_reflection(:par)[:eager_block].call.must_equal 1 klass.association_reflection(:par1s)[:limit].must_equal 10 klass.association_reflection(:par1s)[:order].must_equal :b klass.association_reflection(:par1s)[:block].must_be_nil klass.association_reflection(:par2s)[:after_load].length.must_equal 1 klass.association_reflection(:par2s)[:order].must_equal :c klass.association_reflection(:par2s)[:block].call.must_equal 3 klass.association_reflection(:par3)[:block].call.must_equal 1 klass.association_reflection(:par3)[:eager_block].call.must_equal 1 ensure Object.send(:remove_const, :ParParent) Object.send(:remove_const, :ParParent2) end end it "should respect :no_dataset_method option to not create a dataset method" do c = Class.new(Sequel::Model(:c)) c.many_to_one :c, :no_dataset_method=>true c.method_defined?(:c_dataset).must_equal false c.method_defined?(:c).must_equal true end it "should respect :no_association_method option to not create an association method" do c = Class.new(Sequel::Model(:c)) c.many_to_one :c, :no_association_method=>true c.method_defined?(:c_dataset).must_equal true c.method_defined?(:c).must_equal false end it "should respect :setter=>nil option to not create a setter method" do c = Class.new(Sequel::Model(:c)) c.many_to_one :c, :setter=>nil c.method_defined?(:c).must_equal true c.method_defined?(:c=).must_equal false c = Class.new(Sequel::Model(:c)) c.one_to_one :c, :setter=>nil c.method_defined?(:c).must_equal true c.method_defined?(:c=).must_equal false c = Class.new(Sequel::Model(:c)) c.one_through_one :c, :setter=>nil c.method_defined?(:c).must_equal true c.method_defined?(:c=).must_equal false end it "should respect :adder=>nil option to not create a add_* method" do c = Class.new(Sequel::Model(:c)) c.one_to_many :cs, :adder=>nil c.method_defined?(:add_c).must_equal false c.method_defined?(:remove_c).must_equal true c.method_defined?(:remove_all_cs).must_equal true c = Class.new(Sequel::Model(:c)) c.many_to_many :cs, :adder=>nil c.method_defined?(:add_c).must_equal false c.method_defined?(:remove_c).must_equal true c.method_defined?(:remove_all_cs).must_equal true end it "should respect :remover=>nil option to not create a remove_* method" do c = Class.new(Sequel::Model(:c)) c.one_to_many :cs, :remover=>nil c.method_defined?(:add_c).must_equal true c.method_defined?(:remove_c).must_equal false c.method_defined?(:remove_all_cs).must_equal true c = Class.new(Sequel::Model(:c)) c.many_to_many :cs, :remover=>nil c.method_defined?(:add_c).must_equal true c.method_defined?(:remove_c).must_equal false c.method_defined?(:remove_all_cs).must_equal true end it "should respect :clearer=>nil option to not create a remove_all_* method" do c = Class.new(Sequel::Model(:c)) c.one_to_many :cs, :clearer=>nil c.method_defined?(:add_c).must_equal true c.method_defined?(:remove_c).must_equal true c.method_defined?(:remove_all_cs).must_equal false c = Class.new(Sequel::Model(:c)) c.many_to_many :cs, :clearer=>nil c.method_defined?(:add_c).must_equal true c.method_defined?(:remove_c).must_equal true c.method_defined?(:remove_all_cs).must_equal false end it "should raise an error if attempting to clone an association of differing type" do c = Class.new(Sequel::Model(:c)) c.many_to_one :c proc{c.one_to_many :cs, :clone=>:c}.must_raise(Sequel::Error) end it "should allow overriding the :instance_specific option" do c = Class.new(Sequel::Model(:c)) c.many_to_one :c, :instance_specific=>true c.association_reflection(:c)[:instance_specific].must_equal true c.many_to_one :c, :instance_specific=>false do |ds| ds end c.association_reflection(:c)[:instance_specific].must_equal false end it "should set :allow_eager false by default if :instance_specific option is set and eager loading not otherwise allowed" do c = Class.new(Sequel::Model(:c)) c.many_to_one :c, :instance_specific=>true c.association_reflection(:c)[:allow_eager].must_equal false c.many_to_one :c, :instance_specific=>true, :allow_eager=>true c.association_reflection(:c)[:allow_eager].must_equal true c.many_to_one :c, :instance_specific=>true, :eager_loader=>proc{} c.association_reflection(:c)[:allow_eager].must_be_nil c.many_to_one :c do |_| end c.association_reflection(:c)[:allow_eager].must_be_nil c.many_to_one :c, :dataset=>proc{|_|} c.association_reflection(:c)[:allow_eager].must_equal false end it "should allow cloning of one_to_many to one_to_one associations and vice-versa" do c = Class.new(Sequel::Model(:c)) c.one_to_one :c c.one_to_many :cs, :clone=>:c c.one_to_one :c2, :clone=>:cs end it "should allow cloning of many_to_many to one_through_one associations and vice-versa" do c = Class.new(Sequel::Model(:c)) c.many_to_many :c c.one_through_one :cs, :clone=>:c c.many_to_many :c2, :clone=>:cs end it "should clear associations cache when refreshing object manually" do c = Class.new(Sequel::Model(:c)) c.many_to_one :c o = c.new o.associations[:c] = 1 o.refresh o.associations.must_equal({}) end it "should not clear associations cache when refreshing object after save" do c = Class.new(Sequel::Model(:c)) c.many_to_one :c o = c.new o.associations[:c] = 1 o.save o.associations.must_equal(:c=>1) end it "should not clear associations cache when saving with insert_select" do ds = Sequel::Model.db[:c].with_extend do def supports_insert_select?; true end def insert_select(*) {:id=>1} end end c = Class.new(Sequel::Model(ds)) c.many_to_one :c o = c.new o.associations[:c] = 1 o.save o.associations.must_equal(:c=>1) end it "should not autoreload associations when the current foreign key value is nil" do c = Class.new(Sequel::Model(Sequel::Model.db[:c])) c.many_to_one :c o = c.new o.associations[:c] = 1 o[:c_id] = 2 o.associations[:c].must_equal 1 o = c.load({}) o.associations[:c] = 1 o[:c_id] = 2 o.associations[:c].must_equal 1 end it "should autoreload associations when the current foreign key is nil and the current associated value is nil" do c = Class.new(Sequel::Model(Sequel::Model.db[:c])) c.many_to_one :c o = c.new o.associations[:c] = nil o[:c_id] = 2 o.associations.must_be_empty o = c.load({}) o.associations[:c] = nil o[:c_id] = 2 o.associations.must_be_empty end it "should handle autoreloading for multiple associations when the current foreign key is nil" do c = Class.new(Sequel::Model(Sequel::Model.db[:c])) c.many_to_one :c c.many_to_one :d, :key=>:c_id o = c.new o.associations[:c] = nil o.associations[:d] = 1 o[:c_id] = nil o.associations.must_equal(:c=>nil, :d=>1) o[:c_id] = 2 o.associations.must_equal(:d=>1) o[:c_id] = 2 o.associations.must_equal(:d=>1) o[:c_id] = nil o.associations.must_be_empty o = c.load({:c_id=>nil}) o.associations[:c] = nil o.associations[:d] = 1 o[:c_id] = nil o.associations.must_equal(:c=>nil, :d=>1) o[:c_id] = 2 o.associations.must_equal(:d=>1) o[:c_id] = 2 o.associations.must_equal(:d=>1) o[:c_id] = nil o.associations.must_be_empty end it "should raise error for unsupported type" do klass = Class.new(Sequel::Model(:nodes)) proc{klass.associate(:foo, :bar)}.must_raise Sequel::Error end it "should raise error for unsupported name" do klass = Class.new(Sequel::Model(:nodes)) proc{klass.associate(:many_to_one, "bar")}.must_raise Sequel::Error end end describe Sequel::Model, "many_to_one" do before do @c2 = Class.new(Sequel::Model(:nodes)) do unrestrict_primary_key columns :id, :parent_id, :par_parent_id, :blah end @dataset = @c2.dataset DB.reset end it "should raise an error if associated class does not have a primary key, and :primary_key is not specified" do @c2.no_primary_key @c2.many_to_one :parent, :class => @c2 d = @c2.new(:id => 1, :parent_id => 234) proc{d.parent}.must_raise(Sequel::Error) DB.sqls.must_equal [] end it "should raise an error if associated class does not have a primary key, and :primary_key is not specified, with an association block" do @c2.no_primary_key @c2.many_to_one :parent, :class => @c2 do |ds| ds end d = @c2.new(:id => 1, :parent_id => 234) proc{d.parent}.must_raise(Sequel::Error) DB.sqls.must_equal [] end it "should use implicit key if omitted" do @c2.many_to_one :parent, :class => @c2 d = @c2.new(:id => 1, :parent_id => 234) p = d.parent p.class.must_equal @c2 p.values.must_equal(:x => 1, :id => 1) DB.sqls.must_equal ["SELECT * FROM nodes WHERE id = 234"] end it "should allow association with the same name as the key if :key_column is given" do @c2.def_column_alias(:parent_id_id, :parent_id) @c2.many_to_one :parent_id, :key_column=>:parent_id, :class => @c2 d = @c2.load(:id => 1, :parent_id => 234) d.parent_id_dataset.sql.must_equal "SELECT * FROM nodes WHERE (nodes.id = 234) LIMIT 1" d.parent_id.must_equal @c2.load(:x => 1, :id => 1) d.parent_id_id.must_equal 234 d[:parent_id].must_equal 234 DB.sqls.must_equal ["SELECT * FROM nodes WHERE id = 234"] d.parent_id_id = 3 d.parent_id_id.must_equal 3 d[:parent_id].must_equal 3 end it "should use implicit class if omitted" do begin class ::ParParent < Sequel::Model; end @c2.many_to_one :par_parent @c2.new(:id => 1, :par_parent_id => 234).par_parent.class.must_equal ParParent DB.sqls.must_equal ["SELECT * FROM par_parents WHERE id = 234"] ensure Object.send(:remove_const, :ParParent) end end it "should use class inside module if given as a string" do begin module ::Par class Parent < Sequel::Model; end end @c2.many_to_one :par_parent, :class=>"Par::Parent" @c2.new(:id => 1, :par_parent_id => 234).par_parent.class.must_equal Par::Parent DB.sqls.must_equal ["SELECT * FROM parents WHERE id = 234"] ensure Object.send(:remove_const, :Par) end end it "should use explicit key if given" do @c2.many_to_one :parent, :class => @c2, :key => :blah d = @c2.new(:id => 1, :blah => 567) p = d.parent p.class.must_equal @c2 p.values.must_equal(:x => 1, :id => 1) DB.sqls.must_equal ["SELECT * FROM nodes WHERE id = 567"] end it "should use explicit composite key if given" do @c2.set_primary_key [:blah, :id] @c2.many_to_one :parent, :class => @c2, :key => [:id, :blah], :primary_key=>[:blah, :id] d = @c2.new(:id => 1, :blah => 567) p = d.parent p.class.must_equal @c2 p.values.must_equal(:x => 1, :id => 1) DB.sqls.must_equal ["SELECT * FROM nodes WHERE ((blah = 1) AND (id = 567)) LIMIT 1"] end it "should respect :qualify => false option" do @c2.many_to_one :parent, :class => @c2, :key => :blah, :qualify=>false @c2.new(:id => 1, :blah => 567).parent DB.sqls.must_equal ["SELECT * FROM nodes WHERE id = 567"] end it "should use :primary_key option if given" do @c2.many_to_one :parent, :class => @c2, :key => :blah, :primary_key => :pk @c2.new(:id => 1, :blah => 567).parent DB.sqls.must_equal ["SELECT * FROM nodes WHERE (nodes.pk = 567) LIMIT 1"] end it "should support composite keys" do @c2.many_to_one :parent, :class => @c2, :key=>[:id, :parent_id], :primary_key=>[:parent_id, :id] @c2.new(:id => 1, :parent_id => 234).parent DB.sqls.must_equal ["SELECT * FROM nodes WHERE ((nodes.parent_id = 1) AND (nodes.id = 234)) LIMIT 1"] end it "should not issue query if not all keys have values" do @c2.many_to_one :parent, :class => @c2, :key=>[:id, :parent_id], :primary_key=>[:parent_id, :id] @c2.new(:id => 1, :parent_id => nil).parent.must_be_nil DB.sqls.must_equal [] end it "should raise an Error unless same number of composite keys used" do proc{@c2.many_to_one :parent, :class => @c2, :primary_key=>[:parent_id, :id]}.must_raise(Sequel::Error) proc{@c2.many_to_one :parent, :class => @c2, :key=>[:id, :parent_id], :primary_key=>:id}.must_raise(Sequel::Error) proc{@c2.many_to_one :parent, :class => @c2, :key=>:id, :primary_key=>[:parent_id, :id]}.must_raise(Sequel::Error) proc{@c2.many_to_one :parent, :class => @c2, :key=>[:id, :parent_id, :blah], :primary_key=>[:parent_id, :id]}.must_raise(Sequel::Error) end it "should use :select option if given" do @c2.many_to_one :parent, :class => @c2, :key => :blah, :select=>[:id, :name] @c2.new(:id => 1, :blah => 567).parent DB.sqls.must_equal ["SELECT id, name FROM nodes WHERE (nodes.id = 567) LIMIT 1"] end it "should use :conditions option if given" do @c2.many_to_one :parent, :class => @c2, :key => :blah, :conditions=>{:a=>32} @c2.new(:id => 1, :blah => 567).parent DB.sqls.must_equal ["SELECT * FROM nodes WHERE ((a = 32) AND (nodes.id = 567)) LIMIT 1"] @c2.many_to_one :parent, :class => @c2, :key => :blah, :conditions=>:a @c2.new(:id => 1, :blah => 567).parent DB.sqls.must_equal ["SELECT * FROM nodes WHERE (a AND (nodes.id = 567)) LIMIT 1"] end it "should support :order, :limit (only for offset), and :dataset options, as well as a block" do @c2.many_to_one :child_20, :class => @c2, :key=>:id, :dataset=>proc{model.filter(:parent_id=>pk)}, :limit=>[10,20], :order=>:name do |ds| ds.filter{x > 1} end @c2.load(:id => 100).child_20 DB.sqls.must_equal ["SELECT * FROM nodes WHERE ((parent_id = 100) AND (x > 1)) ORDER BY name LIMIT 1 OFFSET 20"] end it "should return nil if key value is nil" do @c2.many_to_one :parent, :class => @c2 @c2.new(:id => 1).parent.must_be_nil DB.sqls.must_equal [] end it "should cache negative lookup" do @c2.many_to_one :parent, :class => @c2 @c2.dataset = @c2.dataset.with_fetch([]) d = @c2.new(:id => 1, :parent_id=>555) DB.sqls.must_equal [] d.parent.must_be_nil DB.sqls.must_equal ['SELECT * FROM nodes WHERE id = 555'] d.parent.must_be_nil DB.sqls.must_equal [] end it "should define a setter method" do @c2.many_to_one :parent, :class => @c2 d = @c2.new(:id => 1) d.parent = @c2.new(:id => 4321) d.values.must_equal(:id => 1, :parent_id => 4321) d.parent = nil d.values.must_equal(:id => 1, :parent_id => nil) e = @c2.new(:id => 6677) d.parent = e d.values.must_equal(:id => 1, :parent_id => 6677) end it "should have the setter method respect the :primary_key option" do @c2.many_to_one :parent, :class => @c2, :primary_key=>:blah d = @c2.new(:id => 1) d.parent = @c2.new(:id => 4321, :blah=>444) d.values.must_equal(:id => 1, :parent_id => 444) d.parent = nil d.values.must_equal(:id => 1, :parent_id => nil) e = @c2.new(:id => 6677, :blah=>8) d.parent = e d.values.must_equal(:id => 1, :parent_id => 8) end it "should have the setter method respect composite keys" do @c2.many_to_one :parent, :class => @c2, :key=>[:id, :parent_id], :primary_key=>[:parent_id, :id] d = @c2.new(:id => 1, :parent_id=> 234) d.parent = @c2.new(:id => 4, :parent_id=>52) d.values.must_equal(:id => 52, :parent_id => 4) d.parent = nil d.values.must_equal(:id => nil, :parent_id => nil) e = @c2.new(:id => 6677, :parent_id=>8) d.parent = e d.values.must_equal(:id => 8, :parent_id => 6677) end it "should not persist changes until saved" do @c2.many_to_one :parent, :class => @c2 d = @c2.load(:id => 1) DB.reset d.parent = @c2.new(:id => 345) DB.sqls.must_equal [] d.save_changes DB.sqls.must_equal ['UPDATE nodes SET parent_id = 345 WHERE (id = 1)'] end it "should populate cache when accessed" do @c2.many_to_one :parent, :class => @c2 d = @c2.load(:id => 1) d.parent_id = 234 d.associations[:parent].must_be_nil @c2.dataset = @c2.dataset.with_fetch(:id=>234) e = d.parent DB.sqls.must_equal ["SELECT * FROM nodes WHERE id = 234"] d.associations[:parent].must_equal e end it "should populate cache when assigned" do @c2.many_to_one :parent, :class => @c2 d = @c2.create(:id => 1) DB.reset d.associations[:parent].must_be_nil d.parent = @c2.new(:id => 234) e = d.parent d.associations[:parent].must_equal e DB.sqls.must_equal [] end it "should use cache if available" do @c2.many_to_one :parent, :class => @c2 d = @c2.create(:id => 1, :parent_id => 234) DB.reset d.associations[:parent] = 42 d.parent.must_equal 42 DB.sqls.must_equal [] end it "should not use cache if asked to reload" do @c2.many_to_one :parent, :class => @c2 d = @c2.create(:id => 1) DB.reset d.parent_id = 234 d.associations[:parent] = 42 d.parent(:reload=>true).wont_equal 42 DB.sqls.must_equal ["SELECT * FROM nodes WHERE id = 234"] end it "should use a callback if given one as the argument" do @c2.many_to_one :parent, :class => @c2 d = @c2.create(:id => 1) DB.reset d.parent_id = 234 d.associations[:parent] = 42 d.parent{|ds| ds.where{name > 'M'}}.wont_equal 42 DB.sqls.must_equal ["SELECT * FROM nodes WHERE ((nodes.id = 234) AND (name > 'M')) LIMIT 1"] end it "should use a block given to the association method as a callback" do @c2.many_to_one :parent, :class => @c2 d = @c2.create(:id => 1) DB.reset d.parent_id = 234 d.associations[:parent] = 42 d.parent{|ds| ds.filter{name > 'M'}}.wont_equal 42 DB.sqls.must_equal ["SELECT * FROM nodes WHERE ((nodes.id = 234) AND (name > 'M')) LIMIT 1"] end it "should have the setter add to the reciprocal one_to_many cached association array if it exists" do @c2.many_to_one :parent, :class => @c2 @c2.one_to_many :children, :class => @c2, :key=>:parent_id @c2.dataset = @c2.dataset.with_fetch([]) d = @c2.new(:id => 1) e = @c2.new(:id => 2) DB.sqls.must_equal [] d.parent = e e.children.wont_include(d) DB.sqls.must_equal ['SELECT * FROM nodes WHERE (nodes.parent_id = 2)'] d = @c2.new(:id => 1) e = @c2.new(:id => 2) e.children.wont_include(d) DB.sqls.must_equal ['SELECT * FROM nodes WHERE (nodes.parent_id = 2)'] d.parent = e e.children.must_include(d) DB.sqls.must_equal [] end it "should have setter deal with a one_to_one reciprocal" do @c2.many_to_one :parent, :class => @c2, :key=>:parent_id @c2.one_to_one :child, :class => @c2, :key=>:parent_id d = @c2.new(:id => 1) e = @c2.new(:id => 2) e.associations[:child] = nil d.parent = e e.child.must_equal d d.parent = nil e.child.must_be_nil d.parent = e e.child.must_equal d f = @c2.new(:id => 3) d.parent = nil e.child.must_be_nil e.associations[:child] = f d.parent = e e.child.must_equal d end it "should have the setter remove the object from the previous associated object's reciprocal one_to_many cached association array if it exists" do @c2.many_to_one :parent, :class => @c2 @c2.one_to_many :children, :class => @c2, :key=>:parent_id @c2.dataset = @c2.dataset.with_fetch([]) d = @c2.new(:id => 1) e = @c2.new(:id => 2) f = @c2.new(:id => 3) e.children.wont_include(d) f.children.wont_include(d) DB.reset d.parent = e e.children.must_include(d) d.parent = f f.children.must_include(d) e.children.wont_include(d) d.parent = nil f.children.wont_include(d) DB.sqls.must_equal [] end it "should have the setter not modify the reciprocal if set to same value as current" do @c2.many_to_one :parent, :class => @c2 @c2.one_to_many :children, :class => @c2, :key=>:parent_id c1 = @c2.load(:id => 1, :parent_id=>nil) c2 = @c2.load(:id => 2, :parent_id=>1) c3 = @c2.load(:id => 3, :parent_id=>1) c1.associations[:children] = [c2, c3] c2.associations[:parent] = c1 c2.parent = c1 c1.children.must_equal [c2, c3] DB.sqls.must_equal [] end it "should get all matching records and only return the first if :key option is set to nil" do @c2.dataset = @c2.dataset.with_fetch([{:id=>1, :parent_id=>0, :par_parent_id=>3, :blah=>4, :children_id=>2, :children_parent_id=>1, :children_par_parent_id=>5, :children_blah=>6}, {}]) @c2.dataset.columns(:id, :parent_id, :par_parent_id, :blah) @c2.one_to_many :children, :class => @c2, :key=>:parent_id @c2.many_to_one :first_grand_parent, :class => @c2, :key=>nil, :eager_graph=>:children, :dataset=>proc{model.filter(:children_id=>parent_id)} p = @c2.new(:parent_id=>2) fgp = p.first_grand_parent DB.sqls.must_equal ["SELECT nodes.id, nodes.parent_id, nodes.par_parent_id, nodes.blah, children.id AS children_id, children.parent_id AS children_parent_id, children.par_parent_id AS children_par_parent_id, children.blah AS children_blah FROM nodes LEFT OUTER JOIN nodes AS children ON (children.parent_id = nodes.id) WHERE (children_id = 2)"] fgp.values.must_equal(:id=>1, :parent_id=>0, :par_parent_id=>3, :blah=>4) fgp.children.first.values.must_equal(:id=>2, :parent_id=>1, :par_parent_id=>5, :blah=>6) end it "should not create the setter method if :read_only option is used" do @c2.many_to_one :parent, :class => @c2, :read_only=>true @c2.instance_methods.must_include(:parent) @c2.instance_methods.wont_include(:parent=) end it "should not add associations methods directly to class" do @c2.many_to_one :parent, :class => @c2 @c2.instance_methods.must_include(:parent) @c2.instance_methods.must_include(:parent=) @c2.instance_methods(false).wont_include(:parent) @c2.instance_methods(false).wont_include(:parent=) end it "should add associations methods to the :methods_module option" do m = Module.new @c2.many_to_one :parent, :class => @c2, :methods_module=>m m.instance_methods.must_include(:parent) m.instance_methods.must_include(:parent=) @c2.instance_methods.wont_include(:parent) @c2.instance_methods.wont_include(:parent=) end it "should add associations methods directly to class if :methods_module is the class itself" do @c2.many_to_one :parent, :class => @c2, :methods_module=>@c2 @c2.instance_methods(false).must_include(:parent) @c2.instance_methods(false).must_include(:parent=) end it "should raise an error if trying to set a model object that doesn't have a valid primary key" do @c2.many_to_one :parent, :class => @c2 p = @c2.new c = @c2.load(:id=>123) proc{c.parent = p}.must_raise(Sequel::Error) end it "should make the change to the foreign_key value inside a _association= method" do @c2.many_to_one :parent, :class => @c2 @c2.private_instance_methods.must_include(:_parent=) p = @c2.new c = @c2.load(:id=>123) def p._parent=(x) @x = x end def p.parent_id=; raise; end p.parent = c p.instance_variable_get(:@x).must_equal c end it "should have the :setter option define the _association= method" do @c2.many_to_one :parent, :class => @c2, :setter=>proc{|x| @x = x} p = @c2.new c = @c2.load(:id=>123) def p.parent_id=; raise; end p.parent = c p.instance_variable_get(:@x).must_equal c end it "should support (before|after)_set callbacks" do h = [] @c2.many_to_one :parent, :class => @c2, :before_set=>[proc{|x,y| h << x.pk; h << (y ? -y.pk : :y)}, :blah], :after_set=>proc{h << 3} @c2.class_eval do self::Foo = h def []=(a, v) a == :parent_id ? (model::Foo << (v ? 4 : 5)) : super end def blah(x) model::Foo << (x ? x.pk : :x) end def blahr(x) model::Foo << 6 end end p = @c2.load(:id=>10) c = @c2.load(:id=>123) h.must_equal [] p.parent = c h.must_equal [10, -123, 123, 4, 3] p.parent = nil h.must_equal [10, -123, 123, 4, 3, 10, :y, :x, 5, 3] end it "should support after_load association callback" do h = [] @c2.many_to_one :parent, :class => @c2, :after_load=>[proc{|x,y| h << [x.pk, y.pk]}, :al] @c2.class_eval do self::Foo = h def al(v) model::Foo << v.pk end set_dataset dataset.with_fetch(:id=>20) end p = @c2.load(:id=>10, :parent_id=>20) parent = p.parent h.must_equal [[10, 20], 20] parent.pk.must_equal 20 end it "should support after_load association callback that changes the cached object" do @c2.many_to_one :parent, :class => @c2, :after_load=>:al @c2.class_eval do def al(v) associations[:parent] = :foo end end p = @c2.load(:id=>10, :parent_id=>20) p.parent.must_equal :foo p.associations[:parent].must_equal :foo end it "should raise error and not call internal add or remove method if before callback calls cancel_action, even if raise_on_save_failure is false" do p = @c2.new c = @c2.load(:id=>123) p.raise_on_save_failure = false @c2.many_to_one :parent, :class => @c2, :before_set=>:bs def p.bs(x) cancel_action end def p._parent=; raise; end proc{p.parent = c}.must_raise(Sequel::HookFailed) p.parent.must_be_nil p.associations[:parent] = c p.parent.must_equal c proc{p.parent = nil}.must_raise(Sequel::HookFailed) end it "should raise an error if a callback is not a proc or symbol" do @c2.many_to_one :parent, :class => @c2, :before_set=>Object.new proc{@c2.new.parent = @c2.load(:id=>1)}.must_raise(Sequel::Error) end it "should have association dataset use false condition if any key is nil" do @c2.many_to_one :parent, :class => @c2 @c2.load({}).parent_dataset.sql.must_equal "SELECT * FROM nodes WHERE 'f' LIMIT 1" end end describe Sequel::Model, "one_to_one" do before do @c1 = Class.new(Sequel::Model(:attributes)) do unrestrict_primary_key columns :id, :node_id, :y end @c2 = Class.new(Sequel::Model(:nodes)) do unrestrict_primary_key attr_accessor :xxx def self.name; 'Node'; end def self.to_s; 'Node'; end columns :id, :x, :parent_id, :par_parent_id, :blah, :node_id end @dataset = @c2.dataset @dataset = @dataset.with_fetch({}) @c1.dataset = @c1.dataset.with_fetch({}) DB.reset end it "should have the getter method return a single object" do @c2.one_to_one :attribute, :class => @c1 att = @c2.new(:id => 1234).attribute DB.sqls.must_equal ['SELECT * FROM attributes WHERE (attributes.node_id = 1234) LIMIT 1'] att.must_be_kind_of(@c1) att.values.must_equal({}) end it "should not add a setter method if the :read_only option is true" do @c2.one_to_one :attribute, :class => @c1, :read_only=>true im = @c2.instance_methods im.must_include(:attribute) im.wont_include(:attribute=) end it "should add a setter method" do @c2.one_to_one :attribute, :class => @c1 attrib = @c1.new(:id=>3) @c1.dataset = @c1.dataset.with_fetch(:id=>3) @c2.new(:id => 1234).attribute = attrib DB.sqls.must_equal ['UPDATE attributes SET node_id = NULL WHERE (node_id = 1234)', 'INSERT INTO attributes (id, node_id) VALUES (3, 1234)', "SELECT * FROM attributes WHERE id = 3"] @c2.new(:id => 1234).attribute.must_equal attrib attrib = @c1.load(:id=>3) @c2.new(:id => 1234).attribute = attrib DB.sqls.must_equal ["SELECT * FROM attributes WHERE (attributes.node_id = 1234) LIMIT 1", 'UPDATE attributes SET node_id = NULL WHERE ((node_id = 1234) AND (id != 3))', "UPDATE attributes SET node_id = 1234 WHERE (id = 3)"] end it "should use a transaction in the setter method" do @c2.one_to_one :attribute, :class => @c1 @c2.use_transactions = true attrib = @c1.load(:id=>3) @c2.new(:id => 1234).attribute = attrib DB.sqls.must_equal ['BEGIN', 'UPDATE attributes SET node_id = NULL WHERE ((node_id = 1234) AND (id != 3))', "UPDATE attributes SET node_id = 1234 WHERE (id = 3)", 'COMMIT'] end it "should have setter method respect association filters" do @c2.one_to_one :attribute, :class => @c1, :conditions=>{:a=>1} do |ds| ds.filter(:b=>2) end attrib = @c1.load(:id=>3) @c2.new(:id => 1234).attribute = attrib DB.sqls.must_equal ['UPDATE attributes SET node_id = NULL WHERE ((a = 1) AND (node_id = 1234) AND (b = 2) AND (id != 3))', "UPDATE attributes SET node_id = 1234 WHERE (id = 3)"] end it "should have the setter method respect the :primary_key option" do @c2.one_to_one :attribute, :class => @c1, :primary_key=>:xxx attrib = @c1.new(:id=>3) @c1.dataset = @c1.dataset.with_fetch(:id=>3) @c2.new(:id => 1234, :xxx=>5).attribute = attrib DB.sqls.must_equal ['UPDATE attributes SET node_id = NULL WHERE (node_id = 5)', 'INSERT INTO attributes (id, node_id) VALUES (3, 5)', "SELECT * FROM attributes WHERE id = 3"] @c2.new(:id => 321, :xxx=>5).attribute.must_equal attrib attrib = @c1.load(:id=>3) @c2.new(:id => 621, :xxx=>5).attribute = attrib DB.sqls.must_equal ["SELECT * FROM attributes WHERE (attributes.node_id = 5) LIMIT 1", 'UPDATE attributes SET node_id = NULL WHERE ((node_id = 5) AND (id != 3))', 'UPDATE attributes SET node_id = 5 WHERE (id = 3)'] end it "should have the setter method respect composite keys" do @c2.one_to_one :attribute, :class => @c1, :key=>[:node_id, :y], :primary_key=>[:id, :x] attrib = @c1.load(:id=>3, :y=>6) @c1.dataset = @c1.dataset.with_fetch(:id=>3, :y=>6) @c2.load(:id => 1234, :x=>5).attribute = attrib DB.sqls.must_equal ["UPDATE attributes SET node_id = NULL, y = NULL WHERE ((node_id = 1234) AND (y = 5) AND (id != 3))", "UPDATE attributes SET y = 5, node_id = 1234 WHERE (id = 3)"] end it "should have setter method handle associations to models with joined datasets" do db = Sequel.mock c = Class.new(Sequel::Model(db)) do set_dataset(db[:attributes].join(:foo, :attribute_id=>:id)) def _insert_dataset db[:attributes] end def _update_dataset db[:attributes].where(pk_hash) end @instance_dataset = dataset.limit(1).naked.skip_limit_check unrestrict_primary_key columns :id, :node_id, :y end @c2.one_to_one :attribute, :class => c attrib = c.new(:id=>3) db.fetch = [[], {:id=>3}] @c2.load(:id => 1234).attribute = attrib DB.sqls.must_equal [] db.sqls.must_equal [ "SELECT * FROM (SELECT * FROM attributes INNER JOIN foo ON (foo.attribute_id = attributes.id)) AS attributes LIMIT 0", "SELECT * FROM (SELECT * FROM attributes INNER JOIN foo ON (foo.attribute_id = attributes.id)) AS attributes WHERE (node_id = 1234) LIMIT 1", "INSERT INTO attributes (id, node_id) VALUES (3, 1234)", "SELECT * FROM (SELECT * FROM attributes INNER JOIN foo ON (foo.attribute_id = attributes.id)) AS attributes WHERE (id = 3) LIMIT 1"] db.fetch = [[{:id=>4}], {:id=>3, :node_id=>1234}] db.numrows = 1 @c2.load(:id => 1234).attribute = c.load(:id=>3) db.sqls.must_equal [ "SELECT * FROM (SELECT * FROM attributes INNER JOIN foo ON (foo.attribute_id = attributes.id)) AS attributes WHERE (node_id = 1234) LIMIT 1", "UPDATE attributes SET node_id = NULL WHERE (id = 4)", "UPDATE attributes SET node_id = 1234 WHERE (id = 3)"] db.fetch = [[{:id=>4}], {:id=>3, :node_id=>1234}] @c2.load(:id => 1234).attribute = c.new(:id=>3) db.sqls.must_equal [ "SELECT * FROM (SELECT * FROM attributes INNER JOIN foo ON (foo.attribute_id = attributes.id)) AS attributes WHERE (node_id = 1234) LIMIT 1", "UPDATE attributes SET node_id = NULL WHERE (id = 4)", "INSERT INTO attributes (id, node_id) VALUES (3, 1234)", "SELECT * FROM (SELECT * FROM attributes INNER JOIN foo ON (foo.attribute_id = attributes.id)) AS attributes WHERE (id = 3) LIMIT 1"] end it "should use implicit key if omitted" do @c2.dataset = @c2.dataset.with_fetch({}) @c2.one_to_one :parent, :class => @c2 d = @c2.new(:id => 234) p = d.parent p.class.must_equal @c2 p.values.must_equal({}) DB.sqls.must_equal ["SELECT * FROM nodes WHERE (nodes.node_id = 234) LIMIT 1"] end it "should use implicit class if omitted" do begin class ::ParParent < Sequel::Model; end @c2.one_to_one :par_parent @c2.new(:id => 234).par_parent.class.must_equal ParParent DB.sqls.must_equal ["SELECT * FROM par_parents WHERE (par_parents.node_id = 234) LIMIT 1"] ensure Object.send(:remove_const, :ParParent) end end it "should use class inside module if given as a string" do begin module ::Par class Parent < Sequel::Model; end end @c2.one_to_one :par_parent, :class=>"Par::Parent" @c2.new(:id => 234).par_parent.class.must_equal Par::Parent DB.sqls.must_equal ["SELECT * FROM parents WHERE (parents.node_id = 234) LIMIT 1"] ensure Object.send(:remove_const, :Par) end end it "should use explicit key if given" do @c2.dataset = @c2.dataset.with_fetch({}) @c2.one_to_one :parent, :class => @c2, :key => :blah d = @c2.new(:id => 234) p = d.parent p.class.must_equal @c2 p.values.must_equal({}) DB.sqls.must_equal ["SELECT * FROM nodes WHERE (nodes.blah = 234) LIMIT 1"] end it "should use :primary_key option if given" do @c2.one_to_one :parent, :class => @c2, :key => :pk, :primary_key => :blah @c2.new(:id => 1, :blah => 567).parent DB.sqls.must_equal ["SELECT * FROM nodes WHERE (nodes.pk = 567) LIMIT 1"] end it "should support composite keys" do @c2.one_to_one :parent, :class => @c2, :primary_key=>[:id, :parent_id], :key=>[:parent_id, :id] @c2.new(:id => 1, :parent_id => 234).parent DB.sqls.must_equal ["SELECT * FROM nodes WHERE ((nodes.parent_id = 1) AND (nodes.id = 234)) LIMIT 1"] end it "should not issue query if not all keys have values" do @c2.one_to_one :parent, :class => @c2, :key=>[:id, :parent_id], :primary_key=>[:parent_id, :id] @c2.new(:id => 1, :parent_id => nil).parent.must_be_nil DB.sqls.must_equal [] end it "should raise an Error unless same number of composite keys used" do proc{@c2.one_to_one :parent, :class => @c2, :primary_key=>[:parent_id, :id]}.must_raise(Sequel::Error) proc{@c2.one_to_one :parent, :class => @c2, :key=>[:id, :parent_id], :primary_key=>:id}.must_raise(Sequel::Error) proc{@c2.one_to_one :parent, :class => @c2, :key=>:id, :primary_key=>[:parent_id, :id]}.must_raise(Sequel::Error) proc{@c2.one_to_one :parent, :class => @c2, :key=>[:id, :parent_id, :blah], :primary_key=>[:parent_id, :id]}.must_raise(Sequel::Error) end it "should use :select option if given" do @c2.one_to_one :parent, :class => @c2, :select=>[:id, :name] @c2.new(:id => 567).parent DB.sqls.must_equal ["SELECT id, name FROM nodes WHERE (nodes.node_id = 567) LIMIT 1"] end it "should use :conditions option if given" do @c2.one_to_one :parent, :class => @c2, :conditions=>{:a=>32} @c2.new(:id => 567).parent DB.sqls.must_equal ["SELECT * FROM nodes WHERE ((a = 32) AND (nodes.node_id = 567)) LIMIT 1"] @c2.one_to_one :parent, :class => @c2, :conditions=>:a @c2.new(:id => 567).parent DB.sqls.must_equal ["SELECT * FROM nodes WHERE (a AND (nodes.node_id = 567)) LIMIT 1"] end it "should support :order, :limit (only for offset), and :dataset options, as well as a block" do @c2.one_to_one :child_20, :class => @c2, :key=>:id, :dataset=>proc{model.filter(:parent_id=>pk)}, :limit=>[10,20], :order=>:name do |ds| ds.filter{x > 1} end @c2.load(:id => 100).child_20 DB.sqls.must_equal ["SELECT * FROM nodes WHERE ((parent_id = 100) AND (x > 1)) ORDER BY name LIMIT 1 OFFSET 20"] end it "should support :dataset options with different types of arity" do @c2.one_to_one :child_20, :class => @c2, :key=>:id, :dataset=>proc{model.filter(:parent_id=>pk)} @c2.load(:id => 100).child_20 DB.sqls.must_equal ["SELECT * FROM nodes WHERE (parent_id = 100) LIMIT 1"] @c2.one_to_one :child_20, :class => @c2, :key=>:id, :dataset=>proc{|_| model.filter(:parent_id=>pk)} @c2.load(:id => 100).child_20 DB.sqls.must_equal ["SELECT * FROM nodes WHERE (parent_id = 100) LIMIT 1"] @c2.one_to_one :child_20, :class => @c2, :key=>:id, :dataset=>proc{|_, *| model.filter(:parent_id=>pk)} @c2.load(:id => 100).child_20 DB.sqls.must_equal ["SELECT * FROM nodes WHERE (parent_id = 100) LIMIT 1"] @c2.one_to_one :child_20, :class => @c2, :key=>:id, :dataset=>proc{|*| model.filter(:parent_id=>pk)} @c2.load(:id => 100).child_20 DB.sqls.must_equal ["SELECT * FROM nodes WHERE (parent_id = 100) LIMIT 1"] end deprecated "should support :dataset option that requires multiple arguments" do @c2.one_to_one :child_20, :class => @c2, :key=>:id, :dataset=>proc{|_, _| model.filter(:parent_id=>pk)} @c2.load(:id => 100).child_20 DB.sqls.must_equal ["SELECT * FROM nodes WHERE (parent_id = 100) LIMIT 1"] end deprecated "should support association block requires no arguments" do @c2.one_to_one :child_20, :class => @c2, :key=>:id do model.filter(:parent_id=>pk) end @c2.load(:id => 100).child_20 DB.sqls.must_equal ["SELECT * FROM nodes WHERE (parent_id = 100)"] end deprecated "should support association block requires multiple arguments" do @c2.one_to_one :child_20, :class => @c2, :key=>:id do |_, _| model.filter(:parent_id=>pk) end @c2.load(:id => 100).child_20 DB.sqls.must_equal ["SELECT * FROM nodes WHERE (parent_id = 100)"] end it "should support instance specific behavior in association block" do @c2.one_to_one :child_20, :class => @c2, :key=>:id do |ds| ds.where(:x=>pk) end @c2.load(:id => 100).child_20 DB.sqls.must_equal ["SELECT * FROM nodes WHERE ((nodes.id = 100) AND (x = 100)) LIMIT 1"] end it "should support instance specific behavior in association block when finalizing associations" do @c2.one_to_one :child_20, :class => @c2, :key=>:id do |ds| ds.where(:x=>pk) end @c2.load(:id => 100).child_20 @c2.finalize_associations DB.sqls.must_equal ["SELECT * FROM nodes WHERE ((nodes.id = 100) AND (x = 100)) LIMIT 1"] end it "should handle associations with blocks that are marked as instance specific" do @c2.one_to_one :child_20, :class => @c2, :key=>:id, :instance_specific=>true do |ds| ds.where(:x=>100) end @c2.load(:id => 100).child_20 DB.sqls.must_equal ["SELECT * FROM nodes WHERE ((nodes.id = 100) AND (x = 100)) LIMIT 1"] end it "should handle associations with blocks that are marked as not-instance specific" do @c2.one_to_one :child_20, :class => @c2, :key=>:id, :instance_specific=>false do |ds| ds.where(:x=>100) end @c2.load(:id => 100).child_20 DB.sqls.must_equal ["SELECT * FROM nodes WHERE ((nodes.id = 100) AND (x = 100)) LIMIT 1"] end it "should handle associations with blocks that are marked as instance specific when finalizing associations" do @c2.one_to_one :child_20, :class => @c2, :key=>:id, :instance_specific=>true do |ds| ds.where(:x=>100) end @c2.finalize_associations @c2.load(:id => 100).child_20 DB.sqls.must_equal ["SELECT * FROM nodes WHERE ((nodes.id = 100) AND (x = 100)) LIMIT 1"] end it "should handle associations with blocks that are marked as not-instance specific when finalizing associations" do @c2.one_to_one :child_20, :class => @c2, :key=>:id, :instance_specific=>false do |ds| ds.where(:x=>100) end @c2.finalize_associations @c2.load(:id => 100).child_20 DB.sqls.must_equal ["SELECT * FROM nodes WHERE ((nodes.id = 100) AND (x = 100)) LIMIT 1"] end it "should handle associations with :dataset option that are marked as instance specific" do @c2.one_to_one :child_20, :class => @c2, :key=>:id, :instance_specific=>true, :dataset=>proc{|r| r.associated_dataset.where(:x=>100)} @c2.load(:id => 100).child_20 DB.sqls.must_equal ["SELECT * FROM nodes WHERE (x = 100) LIMIT 1"] end it "should handle associations with :dataset option that are marked as not-instance specific" do @c2.one_to_one :child_20, :class => @c2, :key=>:id, :instance_specific=>false, :dataset=>proc{|r| r.associated_dataset.where(:x=>100)} @c2.load(:id => 100).child_20 DB.sqls.must_equal ["SELECT * FROM nodes WHERE (x = 100) LIMIT 1"] end it "should handle associations with :dataset option that are marked as instance specific when finalizing associations" do @c2.one_to_one :child_20, :class => @c2, :key=>:id, :instance_specific=>true, :dataset=>proc{|r| r.associated_dataset.where(:x=>100)} @c2.finalize_associations @c2.load(:id => 100).child_20 DB.sqls.must_equal ["SELECT * FROM nodes WHERE (x = 100) LIMIT 1"] end it "should handle associations with :dataset option that are marked as not-instance specific when finalizing associations" do @c2.one_to_one :child_20, :class => @c2, :key=>:id, :instance_specific=>false, :dataset=>proc{|r| r.associated_dataset.where(:x=>100)} @c2.finalize_associations @c2.load(:id => 100).child_20 DB.sqls.must_equal ["SELECT * FROM nodes WHERE (x = 100) LIMIT 1"] end it "should handle associations with blocks that are marked as not-instance specific and use Sequel.delay" do x = 100 @c2.one_to_one :child_20, :class => @c2, :key=>:id, :instance_specific=>false do |ds| ds.where(:x=>Sequel.delay{x}) end @c2.load(:id => 100).child_20 DB.sqls.must_equal ["SELECT * FROM nodes WHERE ((nodes.id = 100) AND (x = 100)) LIMIT 1"] x = 2 @c2.load(:id => 100).child_20 DB.sqls.must_equal ["SELECT * FROM nodes WHERE ((nodes.id = 100) AND (x = 2)) LIMIT 1"] end it "should handle associations with blocks that are marked as not-instance specific and use Sequel.delay when finalizing associations" do x = 100 @c2.one_to_one :child_20, :class => @c2, :key=>:id, :instance_specific=>false do |ds| ds.where(:x=>Sequel.delay{x}) end @c2.finalize_associations @c2.load(:id => 100).child_20 DB.sqls.must_equal ["SELECT * FROM nodes WHERE ((nodes.id = 100) AND (x = 100)) LIMIT 1"] x = 2 @c2.load(:id => 100).child_20 DB.sqls.must_equal ["SELECT * FROM nodes WHERE ((nodes.id = 100) AND (x = 2)) LIMIT 1"] end it "should return nil if primary_key value is nil" do @c2.one_to_one :parent, :class => @c2, :primary_key=>:node_id @c2.new(:id => 1).parent.must_be_nil DB.sqls.must_equal [] end it "should cache negative lookup" do @c2.one_to_one :parent, :class => @c2 @c2.dataset = @c2.dataset.with_fetch([]) d = @c2.new(:id => 555) DB.sqls.must_equal [] d.parent.must_be_nil DB.sqls.must_equal ['SELECT * FROM nodes WHERE (nodes.node_id = 555) LIMIT 1'] d.parent.must_be_nil DB.sqls.must_equal [] end it "should have the setter method respect the :key option" do @c2.one_to_one :parent, :class => @c2, :key=>:blah d = @c2.new(:id => 3) e = @c2.new(:id => 4321, :blah=>444) @c2.dataset = @c2.dataset.with_fetch(:id => 4321, :blah => 3) d.parent = e e.values.must_equal(:id => 4321, :blah => 3) DB.sqls.must_equal ["UPDATE nodes SET blah = NULL WHERE (blah = 3)", "INSERT INTO nodes (id, blah) VALUES (4321, 3)", "SELECT * FROM nodes WHERE id = 4321"] end it "should persist changes to associated object when the setter is called" do @c2.one_to_one :parent, :class => @c2 d = @c2.load(:id => 1) d.parent = @c2.load(:id => 3, :node_id=>345) DB.sqls.must_equal ["UPDATE nodes SET node_id = NULL WHERE ((node_id = 1) AND (id != 3))", "UPDATE nodes SET node_id = 1 WHERE (id = 3)"] end it "should populate cache when accessed" do @c2.one_to_one :parent, :class => @c2 d = @c2.load(:id => 1) d.associations[:parent].must_be_nil @c2.dataset = @c2.dataset.with_fetch(:id=>234) e = d.parent DB.sqls.must_equal ["SELECT * FROM nodes WHERE (nodes.node_id = 1) LIMIT 1"] d.parent DB.sqls.must_equal [] d.associations[:parent].must_equal e end it "should populate cache when assigned" do @c2.one_to_one :parent, :class => @c2 d = @c2.load(:id => 1) d.associations[:parent].must_be_nil e = @c2.load(:id => 234) d.parent = e f = d.parent d.associations[:parent].must_equal e e.must_equal f end it "should use cache if available" do @c2.one_to_one :parent, :class => @c2 d = @c2.load(:id => 1, :parent_id => 234) d.associations[:parent] = 42 d.parent.must_equal 42 DB.sqls.must_equal [] end it "should not use cache if asked to reload" do @c2.one_to_one :parent, :class => @c2 d = @c2.load(:id => 1) d.associations[:parent] = [42] d.parent(:reload=>true).wont_equal 42 DB.sqls.must_equal ["SELECT * FROM nodes WHERE (nodes.node_id = 1) LIMIT 1"] end it "should have the setter set the reciprocal many_to_one cached association" do @c2.one_to_one :parent, :class => @c2, :key=>:parent_id @c2.many_to_one :child, :class => @c2, :key=>:parent_id d = @c2.load(:id => 1) e = @c2.load(:id => 2) d.parent = e e.child.must_equal d DB.sqls.must_equal ["UPDATE nodes SET parent_id = NULL WHERE ((parent_id = 1) AND (id != 2))", "UPDATE nodes SET parent_id = 1 WHERE (id = 2)"] d.parent = nil e.child.must_be_nil DB.sqls.must_equal ["UPDATE nodes SET parent_id = NULL WHERE (parent_id = 1)"] end it "should have the setter remove the object from the previous associated object's reciprocal many_to_one cached association array if it exists" do @c2.one_to_one :parent, :class => @c2, :key=>:parent_id @c2.many_to_one :child, :class => @c2, :key=>:parent_id @c2.dataset = @c2.dataset.with_fetch([]) d = @c2.load(:id => 1) e = @c2.load(:id => 2) f = @c2.load(:id => 3) e.child.must_be_nil f.child.must_be_nil d.parent = e e.child.must_equal d d.parent = f f.child.must_equal d e.child.must_be_nil d.parent = nil f.child.must_be_nil end it "should have the setter not modify the reciprocal if set to same value as current" do @c2.one_to_one :parent, :class => @c2, :key=>:parent_id @c2.many_to_one :child, :class => @c2, :key=>:parent_id c1 = @c2.load(:id => 1, :parent_id=>nil) c2 = @c2.load(:id => 2, :parent_id=>1) c1.associations[:child] = c2 c2.associations[:parent] = c1 c2.parent = c1 c1.child.must_equal c2 DB.sqls.must_equal [] end it "should have setter not unset reciprocal during save if reciprocal is the same as current" do @c2.many_to_one :parent, :class => @c2, :key=>:parent_id @c2.one_to_one :child, :class => @c2, :key=>:parent_id, :reciprocal=>:parent d = @c2.new(:id => 1) e = @c2.new(:id => 2) e2 = @c2.new(:id => 3) e3 = @c2.new(:id => 4) d.associations[:parent] = e e.associations[:child] = d e2.associations[:child] = d e3.associations[:child] = e assoc = nil d.define_singleton_method(:after_save) do super() assoc = associations end def e.set_associated_object_if_same?; true; end e.child = d assoc.must_equal(:parent=>e) def e2.set_associated_object_if_same?; true; end e2.child = e assoc.must_equal(:parent=>nil) d.associations.clear e3.child = d assoc.must_equal({}) end it "should have setter method handle case where there is no reciprocal" do @c2.many_to_one :parent, :class => @c2, :key=>:parent_id, :reciprocal=>nil @c2.one_to_one :child, :class => @c2, :key=>:parent_id, :reciprocal=>nil d = @c2.new(:id => 1) e = @c2.new(:id => 2) e2 = @c2.new(:id => 3) e3 = @c2.new(:id => 4) d.associations[:parent] = e e.associations[:child] = d e2.associations[:child] = d e3.associations[:child] = e def e.set_associated_object_if_same?; true; end e.child = d d.parent.must_equal e end it "should not add associations methods directly to class" do @c2.one_to_one :parent, :class => @c2 @c2.instance_methods.must_include(:parent) @c2.instance_methods.must_include(:parent=) @c2.instance_methods(false).wont_include(:parent) @c2.instance_methods(false).wont_include(:parent=) end it "should raise an error if the current model object that doesn't have a valid primary key" do @c2.one_to_one :parent, :class => @c2 p = @c2.new c = @c2.load(:id=>123) proc{p.parent = c}.must_raise(Sequel::Error) end it "should make the change to the foreign_key value inside a _association= method" do @c2.one_to_one :parent, :class => @c2 @c2.private_instance_methods.must_include(:_parent=) c = @c2.new p = @c2.load(:id=>123) def p._parent=(x) @x = x end def p.parent_id=; raise; end p.parent = c p.instance_variable_get(:@x).must_equal c end it "should have a :setter option define the _association= method" do @c2.one_to_one :parent, :class => @c2, :setter=>proc{|x| @x = x} c = @c2.new p = @c2.load(:id=>123) def p.parent_id=; raise; end p.parent = c p.instance_variable_get(:@x).must_equal c end it "should support (before|after)_set callbacks" do h = [] @c2.one_to_one :parent, :class => @c2, :before_set=>[proc{|x,y| h << x.pk; h << (y ? -y.pk : :y)}, :blah], :after_set=>proc{h << 3} @c2.class_eval do self::Foo = h def blah(x) model::Foo << (x ? x.pk : :x) end def blahr(x) model::Foo << 6 end end p = @c2.load(:id=>10) c = @c2.load(:id=>123) h.must_equal [] p.parent = c h.must_equal [10, -123, 123, 3] p.parent = nil h.must_equal [10, -123, 123, 3, 10, :y, :x, 3] end it "should support after_load association callback" do h = [] @c2.one_to_one :parent, :class => @c2, :after_load=>[proc{|x,y| h << [x.pk, y.pk]}, :al] @c2.class_eval do self::Foo = h def al(v) model::Foo << v.pk end @dataset = @dataset.with_fetch(:id=>20) end p = @c2.load(:id=>10) parent = p.parent h.must_equal [[10, 20], 20] parent.pk.must_equal 20 end it "should raise error and not call internal add or remove method if before callback calls cancel_action, even if raise_on_save_failure is false" do p = @c2.load(:id=>321) c = @c2.load(:id=>123) p.raise_on_save_failure = false @c2.one_to_one :parent, :class => @c2, :before_set=>:bs def p.bs(x) cancel_action end def p._parent=; raise; end proc{p.parent = c}.must_raise(Sequel::HookFailed) p.associations[:parent].must_be_nil p.associations[:parent] = c p.parent.must_equal c proc{p.parent = nil}.must_raise(Sequel::HookFailed) end it "should not validate the associated object in setter if the :validate=>false option is used" do @c2.one_to_one :parent, :class => @c2, :validate=>false n = @c2.new(:id => 1234) a = @c2.new(:id => 2345) def a.validate() errors.add(:id, 'foo') end (n.parent = a).must_equal a end it "should raise an error if a callback is not a proc or symbol" do @c2.one_to_one :parent, :class => @c2, :before_set=>Object.new proc{@c2.new.parent = @c2.load(:id=>1)}.must_raise(Sequel::Error) end it "should work_correctly when used with associate" do @c2.dataset = @c2.dataset.with_fetch({}) @c2.associate :one_to_one, :parent, :class => @c2 @c2.load(:id => 567).parent.must_equal @c2.load({}) DB.sqls.must_equal ["SELECT * FROM nodes WHERE (nodes.node_id = 567) LIMIT 1"] end it "should have association dataset use false condition if any key is nil" do @c2.one_to_one :parent, :class => @c2, :primary_key=>:parent_id @c2.load(:id=>1).parent_dataset.sql.must_equal "SELECT * FROM nodes WHERE 'f' LIMIT 1" end end describe Sequel::Model, "one_to_many" do before do @c1 = Class.new(Sequel::Model(:attributes)) do unrestrict_primary_key columns :id, :node_id, :y, :z end @c2 = Class.new(Sequel::Model(:nodes)) do def _refresh(ds); end unrestrict_primary_key attr_accessor :xxx def self.name; 'Node'; end def self.to_s; 'Node'; end columns :id, :x end @dataset = @c2.dataset = @c2.dataset.with_fetch({}) @c1.dataset = @c1.dataset.with_fetch(proc{|sql| sql =~ /SELECT 1/ ? {:a=>1} : {}}) DB.reset end it "should raise an error if current class does not have a primary key, and :primary_key is not specified" do @c2.no_primary_key proc{@c2.one_to_many :attributes, :class => @c1}.must_raise(Sequel::Error) DB.sqls.must_equal [] end it "should use implicit key if omitted" do @c2.one_to_many :attributes, :class => @c1 @c2.new(:id => 1234).attributes_dataset.sql.must_equal 'SELECT * FROM attributes WHERE (attributes.node_id = 1234)' end it "should use implicit class if omitted" do begin class ::HistoricalValue < Sequel::Model; end @c2.one_to_many :historical_values v = @c2.new(:id => 1234).historical_values_dataset v.must_be_kind_of(Sequel::Dataset) v.sql.must_equal 'SELECT * FROM historical_values WHERE (historical_values.node_id = 1234)' v.model.must_equal HistoricalValue ensure Object.send(:remove_const, :HistoricalValue) end end it "should use class inside a module if given as a string" do begin module ::Historical class Value < Sequel::Model; end end @c2.one_to_many :historical_values, :class=>'Historical::Value' v = @c2.new(:id => 1234).historical_values_dataset v.must_be_kind_of(Sequel::Dataset) v.sql.must_equal 'SELECT * FROM values WHERE (values.node_id = 1234)' v.model.must_equal Historical::Value ensure Object.send(:remove_const, :Historical) end end it "should use a callback if given one as a block" do @c2.one_to_many :attributes, :class => @c1, :key => :nodeid d = @c2.load(:id => 1234) d.associations[:attributes] = [] d.attributes{|ds| ds.where{name > 'M'}}.wont_equal [] DB.sqls.must_equal ["SELECT * FROM attributes WHERE ((attributes.nodeid = 1234) AND (name > 'M'))"] end it "should use explicit key if given" do @c2.one_to_many :attributes, :class => @c1, :key => :nodeid @c2.new(:id => 1234).attributes_dataset.sql.must_equal 'SELECT * FROM attributes WHERE (attributes.nodeid = 1234)' end it "should support_composite keys" do @c2.one_to_many :attributes, :class => @c1, :key =>[:node_id, :id], :primary_key=>[:id, :x] @c2.load(:id => 1234, :x=>234).attributes_dataset.sql.must_equal 'SELECT * FROM attributes WHERE ((attributes.node_id = 1234) AND (attributes.id = 234))' end it "should not issue query if not all keys have values" do @c2.one_to_many :attributes, :class => @c1, :key =>[:node_id, :id], :primary_key=>[:id, :x] @c2.load(:id => 1234, :x=>nil).attributes.must_equal [] DB.sqls.must_equal [] end it "should raise an Error unless same number of composite keys used" do proc{@c2.one_to_many :attributes, :class => @c1, :key=>[:node_id, :id]}.must_raise(Sequel::Error) proc{@c2.one_to_many :attributes, :class => @c1, :primary_key=>[:node_id, :id]}.must_raise(Sequel::Error) proc{@c2.one_to_many :attributes, :class => @c1, :key=>[:node_id, :id], :primary_key=>:id}.must_raise(Sequel::Error) proc{@c2.one_to_many :attributes, :class => @c1, :key=>:id, :primary_key=>[:node_id, :id]}.must_raise(Sequel::Error) proc{@c2.one_to_many :attributes, :class => @c1, :key=>[:node_id, :id, :x], :primary_key=>[:parent_id, :id]}.must_raise(Sequel::Error) end it "should define an add_ method that works on existing records" do @c2.one_to_many :attributes, :class => @c1 n = @c2.load(:id => 1234) a = @c1.load(:id => 2345) a.must_equal n.add_attribute(a) a.values.must_equal(:node_id => 1234, :id => 2345) DB.sqls.must_equal ['UPDATE attributes SET node_id = 1234 WHERE (id = 2345)'] end it "should define an add_ method that works on new records" do @c2.one_to_many :attributes, :class => @c1 n = @c2.load(:id => 1234) a = @c1.new(:id => 234) @c1.dataset = @c1.dataset.with_fetch(:id=>234, :node_id=>1234) a.must_equal n.add_attribute(a) DB.sqls.must_equal ["INSERT INTO attributes (id, node_id) VALUES (234, 1234)", "SELECT * FROM attributes WHERE id = 234"] a.values.must_equal(:node_id => 1234, :id => 234) end it "should define a remove_ method that works on existing records" do @c2.one_to_many :attributes, :class => @c1 n = @c2.load(:id => 1234) a = @c1.load(:id => 2345, :node_id => 1234) a.must_equal n.remove_attribute(a) a.values.must_equal(:node_id => nil, :id => 2345) DB.sqls.must_equal ["SELECT 1 AS one FROM attributes WHERE ((attributes.node_id = 1234) AND (id = 2345)) LIMIT 1", 'UPDATE attributes SET node_id = NULL WHERE (id = 2345)'] end it "should have the remove_ method raise an error if the passed object is not already associated" do @c2.one_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) a = @c1.load(:id => 2345, :node_id => 1234) @c1.dataset = @c1.dataset.with_fetch([]) proc{n.remove_attribute(a)}.must_raise(Sequel::Error) DB.sqls.must_equal ["SELECT 1 AS one FROM attributes WHERE ((attributes.node_id = 1234) AND (id = 2345)) LIMIT 1"] end it "should have the remove_ method raise an error if the passed object id is not already associated" do @c2.one_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) a = @c1.load(:id => 2345, :node_id => 1234) @c1.dataset = @c1.dataset.with_fetch([]) proc{n.remove_attribute(a.id)}.must_raise(Sequel::Error) DB.sqls.must_equal ["SELECT * FROM attributes WHERE ((attributes.node_id = 1234) AND (attributes.id = 2345)) LIMIT 1"] end it "should accept a hash for the add_ method and create a new record" do @c2.one_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) DB.reset @c1.dataset = @c1.dataset.with_fetch(:node_id => 1234, :id => 234) n.add_attribute(:id => 234).must_equal @c1.load(:node_id => 1234, :id => 234) DB.sqls.must_equal ["INSERT INTO attributes (id, node_id) VALUES (234, 1234)", "SELECT * FROM attributes WHERE id = 234"] end it "should accept a primary key for the add_ method" do @c2.one_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) @c1.dataset = @c1.dataset.with_fetch(:node_id => nil, :id => 234) n.add_attribute(234).must_equal @c1.load(:node_id => 1234, :id => 234) DB.sqls.must_equal ["SELECT * FROM attributes WHERE id = 234", "UPDATE attributes SET node_id = 1234 WHERE (id = 234)"] end it "should raise an error if the primary key passed to the add_ method does not match an existing record" do @c2.one_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) @c1.dataset = @c1.dataset.with_fetch([]) proc{n.add_attribute(234)}.must_raise(Sequel::NoMatchingRow) DB.sqls.must_equal ["SELECT * FROM attributes WHERE id = 234"] end it "should raise an error in the add_ method if the passed associated object is not of the correct type" do @c2.one_to_many :attributes, :class => @c1 proc{@c2.new(:id => 1234).add_attribute(@c2.new)}.must_raise(Sequel::Error) end it "should accept a primary key for the remove_ method and remove an existing record" do @c2.one_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) @c1.dataset = @c1.dataset.with_fetch(:id=>234, :node_id=>1234) n.remove_attribute(234).must_equal @c1.load(:node_id => nil, :id => 234) DB.sqls.must_equal ['SELECT * FROM attributes WHERE ((attributes.node_id = 1234) AND (attributes.id = 234)) LIMIT 1', 'UPDATE attributes SET node_id = NULL WHERE (id = 234)'] end it "should raise an error in the remove_ method if the passed associated object is not of the correct type" do @c2.one_to_many :attributes, :class => @c1 proc{@c2.new(:id => 1234).remove_attribute(@c2.new)}.must_raise(Sequel::Error) end it "should have add_ method respect the :primary_key option" do @c2.one_to_many :attributes, :class => @c1, :primary_key=>:xxx n = @c2.new(:id => 1234, :xxx=>5) a = @c1.load(:id => 2345) n.add_attribute(a).must_equal a DB.sqls.must_equal ['UPDATE attributes SET node_id = 5 WHERE (id = 2345)'] end it "should have add_ method not add the same object to the cached association array if the object is already in the array" do @c2.one_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) a = @c1.load(:id => 2345) n.associations[:attributes] = [] a.must_equal n.add_attribute(a) a.must_equal n.add_attribute(a) a.values.must_equal(:node_id => 1234, :id => 2345) n.attributes.must_equal [a] DB.sqls.must_equal ['UPDATE attributes SET node_id = 1234 WHERE (id = 2345)'] * 2 end it "should have add_ method respect composite keys" do @c2.one_to_many :attributes, :class => @c1, :key =>[:node_id, :y], :primary_key=>[:id, :x] n = @c2.load(:id => 1234, :x=>5) a = @c1.load(:id => 2345) n.add_attribute(a).must_equal a DB.sqls.must_equal ["UPDATE attributes SET node_id = 1234, y = 5 WHERE (id = 2345)"] end it "should have add_ method accept a composite key" do @c1.dataset = @c1.dataset.with_fetch(:id=>2345, :node_id=>1234, :z=>8, :y=>5) @c1.set_primary_key [:id, :z] @c2.one_to_many :attributes, :class => @c1, :key =>[:node_id, :y], :primary_key=>[:id, :x] n = @c2.load(:id => 1234, :x=>5) a = @c1.load(:id => 2345, :z => 8, :node_id => 1234, :y=>5) n.add_attribute([2345, 8]).must_equal a DB.sqls.must_equal ["SELECT * FROM attributes WHERE ((id = 2345) AND (z = 8)) LIMIT 1", "UPDATE attributes SET node_id = 1234, y = 5 WHERE ((id = 2345) AND (z = 8))"] end it "should have remove_ method respect composite keys" do @c2.one_to_many :attributes, :class => @c1, :key =>[:node_id, :y], :primary_key=>[:id, :x] n = @c2.load(:id => 1234, :x=>5) a = @c1.load(:id => 2345, :node_id=>1234, :y=>5) n.remove_attribute(a).must_equal a DB.sqls.must_equal ["SELECT 1 AS one FROM attributes WHERE ((attributes.node_id = 1234) AND (attributes.y = 5) AND (id = 2345)) LIMIT 1", "UPDATE attributes SET node_id = NULL, y = NULL WHERE (id = 2345)"] end it "should accept a array of composite primary key values for the remove_ method and remove an existing record" do @c1.dataset = @c1.dataset.with_fetch(:id=>234, :node_id=>123, :y=>5) @c1.set_primary_key [:id, :y] @c2.one_to_many :attributes, :class => @c1, :key=>:node_id, :primary_key=>:id n = @c2.new(:id => 123) n.remove_attribute([234, 5]).must_equal @c1.load(:node_id => nil, :y => 5, :id => 234) DB.sqls.must_equal ["SELECT * FROM attributes WHERE ((attributes.node_id = 123) AND (attributes.id = 234) AND (attributes.y = 5)) LIMIT 1", "UPDATE attributes SET node_id = NULL WHERE ((id = 234) AND (y = 5))"] end it "should raise an error in add_ and remove_ if the passed object returns false to save (is not valid)" do @c2.one_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) a = @c1.new(:id => 2345) def a.validate() errors.add(:id, 'foo') end proc{n.add_attribute(a)}.must_raise(Sequel::ValidationFailed) proc{n.remove_attribute(a)}.must_raise(Sequel::ValidationFailed) end it "should not validate the associated object in add_ and remove_ if the :validate=>false option is used" do @c2.one_to_many :attributes, :class => @c1, :validate=>false n = @c2.new(:id => 1234) a = @c1.new(:id => 2345) def a.validate() errors.add(:id, 'foo') end n.add_attribute(a).must_equal a n.remove_attribute(a).must_equal a end it "should not raise exception in add_ and remove_ if the :raise_on_save_failure=>false option is used" do @c2.one_to_many :attributes, :class => @c1, :raise_on_save_failure=>false n = @c2.new(:id => 1234) a = @c1.new(:id => 2345) def a.validate() errors.add(:id, 'foo') end n.associations[:attributes] = [] n.add_attribute(a).must_be_nil n.associations[:attributes].must_equal [] n.remove_attribute(a).must_be_nil n.associations[:attributes].must_equal [] end it "should raise an error if the model object doesn't have a valid primary key" do @c2.one_to_many :attributes, :class => @c1 a = @c2.new n = @c1.load(:id=>123) proc{a.attributes_dataset}.must_raise(Sequel::Error) proc{a.add_attribute(n)}.must_raise(Sequel::Error) proc{a.remove_attribute(n)}.must_raise(Sequel::Error) proc{a.remove_all_attributes}.must_raise(Sequel::Error) end it "should use :primary_key option if given" do @c1.one_to_many :nodes, :class => @c2, :primary_key => :node_id, :key=>:id @c1.load(:id => 1234, :node_id=>4321).nodes_dataset.sql.must_equal "SELECT * FROM nodes WHERE (nodes.id = 4321)" end it "should support a select option" do @c2.one_to_many :attributes, :class => @c1, :select => [:id, :name] @c2.new(:id => 1234).attributes_dataset.sql.must_equal "SELECT id, name FROM attributes WHERE (attributes.node_id = 1234)" end it "should support a conditions option" do @c2.one_to_many :attributes, :class => @c1, :conditions => {:a=>32} @c2.new(:id => 1234).attributes_dataset.sql.must_equal "SELECT * FROM attributes WHERE ((a = 32) AND (attributes.node_id = 1234))" @c2.one_to_many :attributes, :class => @c1, :conditions => Sequel.~(:a) @c2.new(:id => 1234).attributes_dataset.sql.must_equal "SELECT * FROM attributes WHERE (NOT a AND (attributes.node_id = 1234))" @c2.one_to_many :attributes, :class => @c1, :conditions => [Sequel.~(:a)] @c2.new(:id => 1234).attributes_dataset.sql.must_equal "SELECT * FROM attributes WHERE (NOT a AND (attributes.node_id = 1234))" end it "should support an order option" do @c2.one_to_many :attributes, :class => @c1, :order => :kind @c2.new(:id => 1234).attributes_dataset.sql.must_equal "SELECT * FROM attributes WHERE (attributes.node_id = 1234) ORDER BY kind" end it "should support an array for the order option" do @c2.one_to_many :attributes, :class => @c1, :order => [:kind1, :kind2] @c2.new(:id => 1234).attributes_dataset.sql.must_equal "SELECT * FROM attributes WHERE (attributes.node_id = 1234) ORDER BY kind1, kind2" end it "should have a dataset method for the associated object dataset" do @c2.one_to_many :attributes, :class => @c1 @c2.new(:id => 1234).attributes_dataset.sql.must_equal 'SELECT * FROM attributes WHERE (attributes.node_id = 1234)' end it "should accept a block" do @c2.one_to_many :attributes, :class => @c1 do |ds| ds.filter(:xxx => nil) end @c2.new(:id => 1234).attributes_dataset.sql.must_equal 'SELECT * FROM attributes WHERE ((attributes.node_id = 1234) AND (xxx IS NULL))' end it "should support :order option with block" do @c2.one_to_many :attributes, :class => @c1, :order => :kind do |ds| ds.filter(:xxx => nil) end @c2.new(:id => 1234).attributes_dataset.sql.must_equal 'SELECT * FROM attributes WHERE ((attributes.node_id = 1234) AND (xxx IS NULL)) ORDER BY kind' end it "should have the block argument affect the _dataset method" do @c2.one_to_many :attributes, :class => @c1 do |ds| ds.filter(:xxx => 456) end @c2.new(:id => 1234).attributes_dataset.sql.must_equal 'SELECT * FROM attributes WHERE ((attributes.node_id = 1234) AND (xxx = 456))' end it "should support a :dataset option that is used instead of the default" do c1 = @c1 @c1.dataset = @c1.dataset.with_fetch({}) @c2.one_to_many :all_other_attributes, :class => @c1, :dataset=>proc{c1.exclude(:nodeid=>pk)}, :order=>:a, :limit=>10 do |ds| ds.filter(:xxx => 5) end @c2.new(:id => 1234).all_other_attributes_dataset.sql.must_equal 'SELECT * FROM attributes WHERE ((nodeid != 1234) AND (xxx = 5)) ORDER BY a LIMIT 10' @c2.new(:id => 1234).all_other_attributes.must_equal [@c1.load({})] DB.sqls.must_equal ['SELECT * FROM attributes WHERE ((nodeid != 1234) AND (xxx = 5)) ORDER BY a LIMIT 10'] end it "should support a :limit option" do @c2.one_to_many :attributes, :class => @c1 , :limit=>10 @c2.new(:id => 1234).attributes_dataset.sql.must_equal 'SELECT * FROM attributes WHERE (attributes.node_id = 1234) LIMIT 10' @c2.one_to_many :attributes, :class => @c1 , :limit=>[10,10] @c2.new(:id => 1234).attributes_dataset.sql.must_equal 'SELECT * FROM attributes WHERE (attributes.node_id = 1234) LIMIT 10 OFFSET 10' end it "should have the :eager option affect the _dataset method" do @c2.one_to_many :attributes, :class => @c2 , :eager=>:attributes @c2.new(:id => 1234).attributes_dataset.opts[:eager].must_equal(:attributes=>nil) end it "should populate cache when accessed" do @c2.one_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) n.associations.include?(:attributes).must_equal false atts = n.attributes atts.must_equal n.associations[:attributes] DB.sqls.must_equal ['SELECT * FROM attributes WHERE (attributes.node_id = 1234)'] end it "should use cache if available" do @c2.one_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) n.associations[:attributes] = 42 n.attributes.must_equal 42 DB.sqls.must_equal [] end it "should not use cache if asked to reload" do @c2.one_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) n.associations[:attributes] = 42 n.attributes(:reload=>true).wont_equal 42 DB.sqls.must_equal ['SELECT * FROM attributes WHERE (attributes.node_id = 1234)'] end it "should add item to cache if it exists when calling add_" do @c2.one_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) att = @c1.load(:id => 345) a = [] n.associations[:attributes] = a n.add_attribute(att) a.must_equal [att] end it "should set object to item's reciprocal cache when calling add_" do @c2.one_to_many :attributes, :class => @c1 @c1.many_to_one :node, :class => @c2 n = @c2.new(:id => 1234) att = @c1.new(:id => 345) n.add_attribute(att) att.node.must_equal n end it "should remove item from cache if it exists when calling remove_" do @c2.one_to_many :attributes, :class => @c1 n = @c2.load(:id => 1234) att = @c1.load(:id => 345) a = [att] n.associations[:attributes] = a n.remove_attribute(att) a.must_equal [] end it "should remove item's reciprocal cache calling remove_" do @c2.one_to_many :attributes, :class => @c1 @c1.many_to_one :node, :class => @c2 n = @c2.new(:id => 1234) att = @c1.new(:id => 345) att.associations[:node] = n att.node.must_equal n n.remove_attribute(att) att.node.must_be_nil end it "should not create the add_, remove_, or remove_all_ methods if :read_only option is used" do @c2.one_to_many :attributes, :class => @c1, :read_only=>true im = @c2.instance_methods im.must_include(:attributes) im.must_include(:attributes_dataset) im.wont_include(:add_attribute) im.wont_include(:remove_attribute) im.wont_include(:remove_all_attributes) end it "should not add associations methods directly to class" do @c2.one_to_many :attributes, :class => @c1 im = @c2.instance_methods im.must_include(:attributes) im.must_include(:attributes_dataset) im.must_include(:add_attribute) im.must_include(:remove_attribute) im.must_include(:remove_all_attributes) im2 = @c2.instance_methods(false) im2.wont_include(:attributes) im2.wont_include(:attributes_dataset) im2.wont_include(:add_attribute) im2.wont_include(:remove_attribute) im2.wont_include(:remove_all_attributes) end it "should populate the reciprocal many_to_one cache when loading the one_to_many association" do @c2.one_to_many :attributes, :class => @c1, :key => :node_id @c1.many_to_one :node, :class => @c2, :key => :node_id n = @c2.new(:id => 1234) atts = n.attributes DB.sqls.must_equal ['SELECT * FROM attributes WHERE (attributes.node_id = 1234)'] atts.must_equal [@c1.load({})] atts.map{|a| a.node}.must_equal [n] DB.sqls.must_equal [] end it "should use an explicit :reciprocal option if given" do @c2.one_to_many :attributes, :class => @c1, :key => :node_id, :reciprocal=>:wxyz n = @c2.new(:id => 1234) atts = n.attributes DB.sqls.must_equal ['SELECT * FROM attributes WHERE (attributes.node_id = 1234)'] atts.must_equal [@c1.load({})] atts.map{|a| a.associations[:wxyz]}.must_equal [n] DB.sqls.must_equal [] end it "should have an remove_all_ method that removes all associated objects" do @c2.one_to_many :attributes, :class => @c1 @c2.new(:id => 1234).remove_all_attributes DB.sqls.must_equal ['UPDATE attributes SET node_id = NULL WHERE (node_id = 1234)'] end it "should have remove_all method respect association filters" do @c2.one_to_many :attributes, :class => @c1, :conditions=>{:a=>1} do |ds| ds.filter(:b=>2) end @c2.new(:id => 1234).remove_all_attributes DB.sqls.must_equal ['UPDATE attributes SET node_id = NULL WHERE ((a = 1) AND (node_id = 1234) AND (b = 2))'] end it "should have the remove_all_ method respect the :primary_key option" do @c2.one_to_many :attributes, :class => @c1, :primary_key=>:xxx @c2.new(:id => 1234, :xxx=>5).remove_all_attributes DB.sqls.must_equal ['UPDATE attributes SET node_id = NULL WHERE (node_id = 5)'] end it "should have the remove_all_ method respect composite keys" do @c2.one_to_many :attributes, :class => @c1, :key=>[:node_id, :y], :primary_key=>[:id, :x] @c2.new(:id => 1234, :x=>5).remove_all_attributes DB.sqls.must_equal ["UPDATE attributes SET node_id = NULL, y = NULL WHERE ((node_id = 1234) AND (y = 5))"] end it "remove_all should set the cache to []" do @c2.one_to_many :attributes, :class => @c1 node = @c2.new(:id => 1234) node.remove_all_attributes node.associations[:attributes].must_equal [] end it "remove_all should return the array of previously associated items if the cache is populated" do @c2.one_to_many :attributes, :class => @c1 attrib = @c1.new(:id=>3) node = @c2.new(:id => 1234) @c1.dataset = @c1.dataset.with_fetch([[], [{:id=>3, :node_id=>1234}]]) node.attributes.must_equal [] node.add_attribute(attrib) node.associations[:attributes].must_equal [attrib] node.remove_all_attributes.must_equal [attrib] end it "remove_all should return nil if the cache is not populated" do @c2.one_to_many :attributes, :class => @c1 @c2.new(:id => 1234).remove_all_attributes.must_be_nil end it "remove_all should remove the current item from all reciprocal association caches if they are populated" do @c2.one_to_many :attributes, :class => @c1 @c1.many_to_one :node, :class => @c2 @c2.dataset = @c2.dataset.with_fetch([]) @c1.dataset = @c1.dataset.with_fetch([[], [{:id=>3, :node_id=>1234}]]) attrib = @c1.new(:id=>3) node = @c2.load(:id => 1234) node.attributes.must_equal [] attrib.node.must_be_nil node.add_attribute(attrib) attrib.associations[:node].must_equal node node.remove_all_attributes attrib.associations.fetch(:node, 2).must_be_nil end it "should call an _add_ method internally to add attributes" do @c2.one_to_many :attributes, :class => @c1 @c2.private_instance_methods.must_include(:_add_attribute) p = @c2.load(:id=>10) c = @c1.load(:id=>123) def p._add_attribute(x) @x = x end def c._node_id=; raise; end p.add_attribute(c) p.instance_variable_get(:@x).must_equal c end it "should support an :adder option for defining the _add_ method" do @c2.one_to_many :attributes, :class => @c1, :adder=>proc{|x| @x = x} p = @c2.load(:id=>10) c = @c1.load(:id=>123) def c._node_id=; raise; end p.add_attribute(c) p.instance_variable_get(:@x).must_equal c end it "should support an :adder option that accepts keywords" do @c2.one_to_many :attributes, :class => @c1, :adder=>eval('proc{|x, foo: nil| @x = [x, foo]}') p = @c2.load(:id=>10) c = @c1.load(:id=>123) def c._node_id=; raise; end p.add_attribute(c, foo: 1) p.instance_variable_get(:@x).must_equal [c, 1] end if RUBY_VERSION >= '2.0' it "should allow additional arguments given to the add_ method and pass them onwards to the _add_ method" do @c2.one_to_many :attributes, :class => @c1 p = @c2.load(:id=>10) c = @c1.load(:id=>123) def p._add_attribute(x,*y) @x = x @y = y end def c._node_id=; raise; end p.add_attribute(c,:foo,:bar=>:baz) p.instance_variable_get(:@x).must_equal c p.instance_variable_get(:@y).must_equal [:foo,{:bar=>:baz}] end it "should call a _remove_ method internally to remove attributes" do @c2.one_to_many :attributes, :class => @c1 @c2.private_instance_methods.must_include(:_remove_attribute) p = @c2.load(:id=>10) c = @c1.load(:id=>123) def p._remove_attribute(x) @x = x end def c._node_id=; raise; end p.remove_attribute(c) p.instance_variable_get(:@x).must_equal c end it "should support a :remover option for defining the _remove_ method" do @c2.one_to_many :attributes, :class => @c1, :remover=>proc{|x| @x = x} p = @c2.load(:id=>10) c = @c1.load(:id=>123) def c._node_id=; raise; end p.remove_attribute(c) p.instance_variable_get(:@x).must_equal c end it "should support a :remover option that accepts keywords" do @c2.one_to_many :attributes, :class => @c1, :remover=>eval('proc{|x, foo: nil| @x = [x, foo]}') p = @c2.load(:id=>10) c = @c1.load(:id=>123) def c._node_id=; raise; end p.remove_attribute(c, foo: 1) p.instance_variable_get(:@x).must_equal [c, 1] end if RUBY_VERSION >= '2.0' it "should allow additional arguments given to the remove_ method and pass them onwards to the _remove_ method" do @c2.one_to_many :attributes, :class => @c1, :reciprocal=>nil p = @c2.load(:id=>10) c = @c1.load(:id=>123) def p._remove_attribute(x,*y) @x = x @y = y end def c._node_id=; raise; end p.remove_attribute(c,:foo,:bar=>:baz) p.instance_variable_get(:@x).must_equal c p.instance_variable_get(:@y).must_equal [:foo,{:bar=>:baz}] end it "should allow additional arguments given to the remove_all_ method and pass them onwards to the _remove_all_ method" do @c2.one_to_many :attributes, :class => @c1 p = @c2.load(:id=>10) c = @c1.load(:id=>123) def p._remove_all_attributes(*y) @y = y end def c._node_id=; raise; end p.remove_all_attributes(:foo,:bar=>:baz) p.instance_variable_get(:@y).must_equal [:foo,{:bar=>:baz}] end it "should call a _remove_all_ method internally to remove attributes" do @c2.one_to_many :attributes, :class => @c1 @c2.private_instance_methods.must_include(:_remove_all_attributes) p = @c2.load(:id=>10) def p._remove_all_attributes @x = :foo end p.remove_all_attributes p.instance_variable_get(:@x).must_equal :foo end it "should support a :clearer option for defining the _remove_all_ method" do @c2.one_to_many :attributes, :class => @c1, :clearer=>proc{@x = :foo} p = @c2.load(:id=>10) p.remove_all_attributes p.instance_variable_get(:@x).must_equal :foo end it "should support a :clearer option that supports keywords" do @c2.one_to_many :attributes, :class => @c1, :clearer=>eval('proc{|foo: nil| @x = foo}') p = @c2.load(:id=>10) p.remove_all_attributes(foo: 1) p.instance_variable_get(:@x).must_equal 1 end if RUBY_VERSION >= '2.0' it "should support (before|after)_(add|remove) callbacks" do h = [] @c2.one_to_many :attributes, :class => @c1, :before_add=>[proc{|x,y| h << x.pk; h << -y.pk}, :blah], :after_add=>proc{h << 3}, :before_remove=>:blah, :after_remove=>[:blahr] @c2.class_eval do self::Foo = h def _add_attribute(v) model::Foo << 4 end def _remove_attribute(v) model::Foo << 5 end def blah(x) model::Foo << x.pk end def blahr(x) model::Foo << 6 end end p = @c2.load(:id=>10) c = @c1.load(:id=>123) h.must_equal [] p.add_attribute(c) h.must_equal [10, -123, 123, 4, 3] p.remove_attribute(c) h.must_equal [10, -123, 123, 4, 3, 123, 5, 6] end it "should support after_load association callback" do h = [] @c2.one_to_many :attributes, :class => @c1, :after_load=>[proc{|x,y| h << [x.pk, y.collect{|z|z.pk}]}, :al] @c2.class_eval do self::Foo = h def al(v) v.each{|x| model::Foo << x.pk} end end @c1.dataset = @c1.dataset.with_fetch([{:id=>20}, {:id=>30}]) p = @c2.load(:id=>10, :parent_id=>20) attributes = p.attributes h.must_equal [[10, [20, 30]], 20, 30] attributes.collect{|a| a.pk}.must_equal [20, 30] end it "should raise error and not call internal add or remove method if before callback calls cancel_action if raise_on_save_failure is true" do p = @c2.load(:id=>10) c = @c1.load(:id=>123) @c2.one_to_many :attributes, :class => @c1, :before_add=>:ba, :before_remove=>:br def p.ba(o); cancel_action; end def p._add_attribute; raise; end def p._remove_attribute; raise; end p.associations[:attributes] = [] proc{p.add_attribute(c)}.must_raise(Sequel::HookFailed) p.attributes.must_equal [] p.associations[:attributes] = [c] def p.br(o); cancel_action; end proc{p.remove_attribute(c)}.must_raise(Sequel::HookFailed) p.attributes.must_equal [c] end it "should return nil and not call internal add or remove method if before callback calls cancel_action if raise_on_save_failure is false" do p = @c2.load(:id=>10) c = @c1.load(:id=>123) p.raise_on_save_failure = false @c2.one_to_many :attributes, :class => @c1, :before_add=>:ba, :before_remove=>:br def p.ba(o); cancel_action; end def p._add_attribute; raise; end def p._remove_attribute; raise; end p.associations[:attributes] = [] p.add_attribute(c).must_be_nil p.attributes.must_equal [] p.associations[:attributes] = [c] def p.br(o); cancel_action; end p.remove_attribute(c).must_be_nil p.attributes.must_equal [c] end it "should have association dataset use false condition if any key is nil" do @c1.one_to_many :children, :class => @c1, :primary_key=>:node_id @c1.load(:id=>1).children_dataset.sql.must_equal "SELECT * FROM attributes WHERE 'f'" end end describe Sequel::Model, "many_to_many" do before do @c1 = Class.new(Sequel::Model(:attributes)) do unrestrict_primary_key attr_accessor :yyy def self.name; 'Attribute'; end def self.to_s; 'Attribute'; end columns :id, :y, :z end @c2 = Class.new(Sequel::Model(:nodes)) do unrestrict_primary_key attr_accessor :xxx def self.name; 'Node'; end def self.to_s; 'Node'; end columns :id, :x end @dataset = @c2.dataset @c1.dataset = @c1.dataset.with_autoid(1) [@c1, @c2].each{|c| c.dataset = c.dataset.with_fetch({})} DB.reset end it "should raise an error if current class does not have a primary key, and :left_primary_key is not specified" do @c2.no_primary_key proc{@c2.many_to_many :attributes, :class => @c1}.must_raise(Sequel::Error) DB.sqls.must_equal [] end it "should raise an error if associated class does not have a primary key, and :right_primary_key is not specified" do @c1.no_primary_key @c2.many_to_many :attributes, :class => @c1 d = @c2.new(:id => 1234) proc{d.attributes}.must_raise(Sequel::Error) DB.sqls.must_equal [] end it "should use implicit key values and join table if omitted" do @c2.many_to_many :attributes, :class => @c1 @c2.new(:id => 1234).attributes_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234)' end it "should use implicit key values and join table if omitted" do @c2.one_through_one :attribute, :class => @c1 @c2.new(:id => 1234).attribute_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234) LIMIT 1' end it "should use implicit class if omitted" do begin class ::Tag < Sequel::Model; end @c2.many_to_many :tags @c2.new(:id => 1234).tags_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN nodes_tags ON (nodes_tags.tag_id = tags.id) WHERE (nodes_tags.node_id = 1234)' ensure Object.send(:remove_const, :Tag) end end it "should use class inside module if given as a string" do begin module ::Historical class Tag < Sequel::Model; end end @c2.many_to_many :tags, :class=>'::Historical::Tag' @c2.new(:id => 1234).tags_dataset.sql.must_equal 'SELECT tags.* FROM tags INNER JOIN nodes_tags ON (nodes_tags.tag_id = tags.id) WHERE (nodes_tags.node_id = 1234)' ensure Object.send(:remove_const, :Historical) end end it "should not override a selection consisting completely of qualified columns using Sequel::SQL::QualifiedIdentifier" do @c1.dataset = @c1.dataset.select(Sequel.qualify(:attributes, :id), Sequel.qualify(:attributes, :b)) @c2.many_to_many :attributes, :class => @c1 @c2.new(:id => 1234).attributes_dataset.sql.must_equal 'SELECT attributes.id, attributes.b FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234)' end with_symbol_splitting "should not override a selection consisting completely of qualified columns using symbols" do @c1.dataset = @c1.dataset.select(:attributes__id, :attributes__b) @c2.many_to_many :attributes, :class => @c1 @c2.new(:id => 1234).attributes_dataset.sql.must_equal 'SELECT attributes.id, attributes.b FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234)' end it "should not override a selection consisting completely of qualified columns using Sequel::SQL::AliasedExpression" do @c1.dataset = @c1.dataset.select(Sequel.qualify(:attributes, :id).as(:a), Sequel[:attributes][:b].as(:c)) @c2.many_to_many :attributes, :class => @c1 @c2.new(:id => 1234).attributes_dataset.sql.must_equal 'SELECT attributes.id AS a, attributes.b AS c FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234)' end with_symbol_splitting "should not override a selection consisting completely of qualified columns using Sequel::SQL::AliasedExpression with qualified symbol" do @c1.dataset = @c1.dataset.select(Sequel.qualify(:attributes, :id).as(:a), Sequel.as(:attributes__b, :c)) @c2.many_to_many :attributes, :class => @c1 @c2.new(:id => 1234).attributes_dataset.sql.must_equal 'SELECT attributes.id AS a, attributes.b AS c FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234)' end it "should override a selection consisting of non qualified columns" do @c1.dataset = @c1.dataset.select{foo(:bar)} @c2.many_to_many :attributes, :class => @c1 @c2.new(:id => 1234).attributes_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234)' end it "should respect :predicate_key when lazily loading" do @c2.many_to_many :attributes, :class => @c1, :predicate_key=>Sequel.subscript(Sequel[:attributes_nodes][:node_id], 0) @c2.new(:id => 1234).attributes_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id[0] = 1234)' end it "should use explicit key values and join table if given" do @c2.many_to_many :attributes, :class => @c1, :left_key => :nodeid, :right_key => :attributeid, :join_table => :attribute2node @c2.new(:id => 1234).attributes_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attribute2node ON (attribute2node.attributeid = attributes.id) WHERE (attribute2node.nodeid = 1234)' end it "should support a conditions option" do @c2.many_to_many :attributes, :class => @c1, :conditions => {:a=>32} @c2.new(:id => 1234).attributes_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE ((a = 32) AND (attributes_nodes.node_id = 1234))' @c2.many_to_many :attributes, :class => @c1, :conditions => Sequel.lit('a = ?', 32) @c2.new(:id => 1234).attributes_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE ((a = 32) AND (attributes_nodes.node_id = 1234))' @c2.new(:id => 1234).attributes.must_equal [@c1.load({})] end it "should support an order option" do @c2.many_to_many :attributes, :class => @c1, :order => :blah @c2.new(:id => 1234).attributes_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234) ORDER BY blah' end it "should support an array for the order option" do @c2.many_to_many :attributes, :class => @c1, :order => [:blah1, :blah2] @c2.new(:id => 1234).attributes_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234) ORDER BY blah1, blah2' end it "should support :left_primary_key and :right_primary_key options" do @c2.many_to_many :attributes, :class => @c1, :left_primary_key=>:xxx, :right_primary_key=>:yyy @c2.new(:id => 1234, :xxx=>5).attributes_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.yyy) WHERE (attributes_nodes.node_id = 5)' end it "should support composite keys" do @c2.many_to_many :attributes, :class => @c1, :left_key=>[:l1, :l2], :right_key=>[:r1, :r2], :left_primary_key=>[:id, :x], :right_primary_key=>[:id, :y] @c2.load(:id => 1234, :x=>5).attributes_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON ((attributes_nodes.r1 = attributes.id) AND (attributes_nodes.r2 = attributes.y)) WHERE ((attributes_nodes.l1 = 1234) AND (attributes_nodes.l2 = 5))' end it "should not issue query if not all keys have values" do @c2.many_to_many :attributes, :class => @c1, :left_key=>[:l1, :l2], :right_key=>[:r1, :r2], :left_primary_key=>[:id, :x], :right_primary_key=>[:id, :y] @c2.load(:id => 1234, :x=>nil).attributes.must_equal [] DB.sqls.must_equal [] end it "should raise an Error unless same number of composite keys used" do proc{@c2.many_to_many :attributes, :class => @c1, :left_key=>[:node_id, :id]}.must_raise(Sequel::Error) proc{@c2.many_to_many :attributes, :class => @c1, :left_primary_key=>[:node_id, :id]}.must_raise(Sequel::Error) proc{@c2.many_to_many :attributes, :class => @c1, :left_key=>[:node_id, :id], :left_primary_key=>:id}.must_raise(Sequel::Error) proc{@c2.many_to_many :attributes, :class => @c1, :left_key=>:id, :left_primary_key=>[:node_id, :id]}.must_raise(Sequel::Error) proc{@c2.many_to_many :attributes, :class => @c1, :left_key=>[:node_id, :id, :x], :left_primary_key=>[:parent_id, :id]}.must_raise(Sequel::Error) proc{@c2.many_to_many :attributes, :class => @c1, :right_primary_key=>[:node_id, :id]}.must_raise(Sequel::Error) proc{@c2.many_to_many :attributes, :class => @c1, :right_key=>[:node_id, :id], :right_primary_key=>:id}.must_raise(Sequel::Error) proc{@c2.many_to_many :attributes, :class => @c1, :right_key=>:id, :left_primary_key=>[:node_id, :id]}.must_raise(Sequel::Error) proc{@c2.many_to_many :attributes, :class => @c1, :right_key=>[:node_id, :id, :x], :right_primary_key=>[:parent_id, :id]}.must_raise(Sequel::Error) end it "should support a select option" do @c2.many_to_many :attributes, :class => @c1, :select => :blah @c2.new(:id => 1234).attributes_dataset.sql.must_equal 'SELECT blah FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234)' end it "should support an array for the select option" do @c2.many_to_many :attributes, :class => @c1, :select => [Sequel::SQL::ColumnAll.new(:attributes), Sequel[:attribute_nodes][:blah2]] @c2.new(:id => 1234).attributes_dataset.sql.must_equal 'SELECT attributes.*, attribute_nodes.blah2 FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234)' end it "should accept a block" do @c2.many_to_many :attributes, :class => @c1 do |ds| ds.filter(:xxx => @xxx) end n = @c2.new(:id => 1234) n.xxx = 555 n.attributes_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE ((attributes_nodes.node_id = 1234) AND (xxx = 555))' end it "should allow the :order option while accepting a block" do @c2.many_to_many :attributes, :class => @c1, :order=>[:blah1, :blah2] do |ds| ds.filter(:xxx => @xxx) end n = @c2.new(:id => 1234) n.xxx = 555 n.attributes_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE ((attributes_nodes.node_id = 1234) AND (xxx = 555)) ORDER BY blah1, blah2' end it "should support a :dataset option that is used instead of the default" do c1 = @c1 @c2.many_to_many :attributes, :class => @c1, :dataset=>proc{c1.join_table(:natural, :an).filter(Sequel[:an][:nodeid]=>pk)}, :order=> :a, :limit=>10, :select=>nil do |ds| ds.filter(:xxx => @xxx) end n = @c2.new(:id => 1234) n.xxx = 555 n.attributes_dataset.sql.must_equal 'SELECT * FROM attributes NATURAL JOIN an WHERE ((an.nodeid = 1234) AND (xxx = 555)) ORDER BY a LIMIT 10' n.attributes.must_equal [@c1.load({})] DB.sqls.must_equal ['SELECT * FROM attributes NATURAL JOIN an WHERE ((an.nodeid = 1234) AND (xxx = 555)) ORDER BY a LIMIT 10'] end it "should support a :dataset option that accepts the reflection as an argument" do @c2.many_to_many :attributes, :class => @c1, :dataset=>lambda{|opts| opts.associated_class.natural_join(:an).filter(Sequel[:an][:nodeid]=>pk)}, :order=> :a, :limit=>10, :select=>nil do |ds| ds.filter(:xxx => @xxx) end n = @c2.new(:id => 1234) n.xxx = 555 n.attributes_dataset.sql.must_equal 'SELECT * FROM attributes NATURAL JOIN an WHERE ((an.nodeid = 1234) AND (xxx = 555)) ORDER BY a LIMIT 10' n.attributes.must_equal [@c1.load({})] DB.sqls.must_equal ['SELECT * FROM attributes NATURAL JOIN an WHERE ((an.nodeid = 1234) AND (xxx = 555)) ORDER BY a LIMIT 10'] end it "should support a :limit option" do @c2.many_to_many :attributes, :class => @c1 , :limit=>10 @c2.new(:id => 1234).attributes_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234) LIMIT 10' @c2.many_to_many :attributes, :class => @c1 , :limit=>[10, 10] @c2.new(:id => 1234).attributes_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234) LIMIT 10 OFFSET 10' end it "should have the :eager option affect the _dataset method" do @c2.many_to_many :attributes, :class => @c2 , :eager=>:attributes @c2.new(:id => 1234).attributes_dataset.opts[:eager].must_equal(:attributes=>nil) end it "should handle an aliased join table" do @c2.many_to_many :attributes, :class => @c1, :join_table => Sequel[:attribute2node].as(:attributes_nodes) n = @c2.load(:id => 1234) a = @c1.load(:id => 2345) n.attributes_dataset.sql.must_equal "SELECT attributes.* FROM attributes INNER JOIN attribute2node AS attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234)" a.must_equal n.add_attribute(a) a.must_equal n.remove_attribute(a) n.remove_all_attributes DB.sqls.must_equal ["INSERT INTO attribute2node (node_id, attribute_id) VALUES (1234, 2345)", "DELETE FROM attribute2node WHERE ((node_id = 1234) AND (attribute_id = 2345))", "DELETE FROM attribute2node WHERE (node_id = 1234)"] end with_symbol_splitting "should handle an aliased symbol join table" do @c2.many_to_many :attributes, :class => @c1, :join_table => :attribute2node___attributes_nodes n = @c2.load(:id => 1234) a = @c1.load(:id => 2345) n.attributes_dataset.sql.must_equal "SELECT attributes.* FROM attributes INNER JOIN attribute2node AS attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234)" a.must_equal n.add_attribute(a) a.must_equal n.remove_attribute(a) n.remove_all_attributes DB.sqls.must_equal ["INSERT INTO attribute2node (node_id, attribute_id) VALUES (1234, 2345)", "DELETE FROM attribute2node WHERE ((node_id = 1234) AND (attribute_id = 2345))", "DELETE FROM attribute2node WHERE (node_id = 1234)"] end it "should define an add_ method that works on existing records" do @c2.many_to_many :attributes, :class => @c1 n = @c2.load(:id => 1234) a = @c1.load(:id => 2345) n.add_attribute(a).must_equal a DB.sqls.must_equal ["INSERT INTO attributes_nodes (node_id, attribute_id) VALUES (1234, 2345)"] end it "should define an add_ method that works with a primary key" do @c2.many_to_many :attributes, :class => @c1 n = @c2.load(:id => 1234) a = @c1.load(:id => 2345) @c1.dataset = @c1.dataset.with_fetch(:id=>2345) n.add_attribute(2345).must_equal a DB.sqls.must_equal ["SELECT * FROM attributes WHERE id = 2345", "INSERT INTO attributes_nodes (node_id, attribute_id) VALUES (1234, 2345)"] end it "should raise an error if the primary key passed to the add_ method does not match an existing record" do @c2.many_to_many :attributes, :class => @c1 n = @c2.load(:id => 1234) @c1.dataset = @c1.dataset.with_fetch([]) proc{n.add_attribute(2345)}.must_raise(Sequel::NoMatchingRow) DB.sqls.must_equal ["SELECT * FROM attributes WHERE id = 2345"] end it "should allow passing a hash to the add_ method which creates a new record" do @c2.many_to_many :attributes, :class => @c1 n = @c2.load(:id => 1234) @c1.dataset = @c1.dataset.with_fetch(:id=>1) n.add_attribute(:id => 1).must_equal @c1.load(:id => 1) DB.sqls.must_equal ['INSERT INTO attributes (id) VALUES (1)', "SELECT * FROM attributes WHERE id = 1", "INSERT INTO attributes_nodes (node_id, attribute_id) VALUES (1234, 1)"] end it "should define a remove_ method that works on existing records" do @c2.many_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) a = @c1.new(:id => 2345) n.remove_attribute(a).must_equal a DB.sqls.must_equal ['DELETE FROM attributes_nodes WHERE ((node_id = 1234) AND (attribute_id = 2345))'] end it "should raise an error in the add_ method if the passed associated object is not of the correct type" do @c2.many_to_many :attributes, :class => @c1 proc{@c2.new(:id => 1234).add_attribute(@c2.new)}.must_raise(Sequel::Error) end it "should accept a primary key for the remove_ method and remove an existing record" do @c2.many_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) @c1.dataset = @c1.dataset.with_fetch(:id=>234) n.remove_attribute(234).must_equal @c1.load(:id => 234) DB.sqls.must_equal ["SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE ((attributes_nodes.node_id = 1234) AND (attributes.id = 234)) LIMIT 1", "DELETE FROM attributes_nodes WHERE ((node_id = 1234) AND (attribute_id = 234))"] end it "should raise an error in the remove_ method if the passed associated object is not of the correct type" do @c2.many_to_many :attributes, :class => @c1 proc{@c2.new(:id => 1234).remove_attribute(@c2.new)}.must_raise(Sequel::Error) end it "should have the add_ method respect the :left_primary_key and :right_primary_key options" do @c2.many_to_many :attributes, :class => @c1, :left_primary_key=>:xxx, :right_primary_key=>:yyy n = @c2.load(:id => 1234).set(:xxx=>5) a = @c1.load(:id => 2345).set(:yyy=>8) n.add_attribute(a).must_equal a DB.sqls.must_equal ["INSERT INTO attributes_nodes (node_id, attribute_id) VALUES (5, 8)"] end it "should have add_ method not add the same object to the cached association array if the object is already in the array" do @c2.many_to_many :attributes, :class => @c1 n = @c2.load(:id => 1234).set(:xxx=>5) a = @c1.load(:id => 2345).set(:yyy=>8) n.associations[:attributes] = [] a.must_equal n.add_attribute(a) a.must_equal n.add_attribute(a) n.attributes.must_equal [a] end it "should have the add_ method respect composite keys" do @c2.many_to_many :attributes, :class => @c1, :left_key=>[:l1, :l2], :right_key=>[:r1, :r2], :left_primary_key=>[:id, :x], :right_primary_key=>[:id, :z] n = @c2.load(:id => 1234, :x=>5) a = @c1.load(:id => 2345, :z=>8) a.must_equal n.add_attribute(a) sqls = DB.sqls m = /INSERT INTO attributes_nodes \((\w+), (\w+), (\w+), (\w+)\) VALUES \((\d+), (\d+), (\d+), (\d+)\)/.match(sqls.pop) sqls.must_equal [] m.wont_equal nil map = {'l1'=>1234, 'l2'=>5, 'r1'=>2345, 'r2'=>8} %w[l1 l2 r1 r2].each do |x| v = false 4.times do |i| i += 1 if m[i] == x m[i+4].must_equal map[x].to_s v = true end end v.must_equal true end end it "should have the add_ method respect composite keys" do @c2.many_to_many :attributes, :class => @c1, :left_key=>[:l1, :l2], :right_key=>[:r1, :r2], :left_primary_key=>[:id, :x], :right_primary_key=>[:id, :z] @c1.dataset = @c1.dataset.with_fetch(:id=>2345, :z=>8) @c1.set_primary_key [:id, :z] n = @c2.load(:id => 1234, :x=>5) a = @c1.load(:id => 2345, :z=>8) n.add_attribute([2345, 8]).must_equal a DB.sqls.must_equal ["SELECT * FROM attributes WHERE ((id = 2345) AND (z = 8)) LIMIT 1", "INSERT INTO attributes_nodes (l1, l2, r1, r2) VALUES (1234, 5, 2345, 8)"] end it "should have the remove_ method respect the :left_primary_key and :right_primary_key options" do @c2.many_to_many :attributes, :class => @c1, :left_primary_key=>:xxx, :right_primary_key=>:yyy n = @c2.new(:id => 1234, :xxx=>5) a = @c1.new(:id => 2345, :yyy=>8) n.remove_attribute(a).must_equal a DB.sqls.must_equal ['DELETE FROM attributes_nodes WHERE ((node_id = 5) AND (attribute_id = 8))'] end it "should have the remove_ method respect composite keys" do @c2.many_to_many :attributes, :class => @c1, :left_key=>[:l1, :l2], :right_key=>[:r1, :r2], :left_primary_key=>[:id, :x], :right_primary_key=>[:id, :z] n = @c2.load(:id => 1234, :x=>5) a = @c1.load(:id => 2345, :z=>8) a.must_equal n.remove_attribute(a) DB.sqls.must_equal ["DELETE FROM attributes_nodes WHERE ((l1 = 1234) AND (l2 = 5) AND (r1 = 2345) AND (r2 = 8))"] end it "should accept a array of composite primary key values for the remove_ method and remove an existing record" do @c1.dataset = @c1.dataset.with_fetch(:id=>234, :y=>8) @c1.set_primary_key [:id, :y] @c2.many_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) @c1.load(:id => 234, :y=>8).must_equal n.remove_attribute([234, 8]) DB.sqls.must_equal ["SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE ((attributes_nodes.node_id = 1234) AND (attributes.id = 234) AND (attributes.y = 8)) LIMIT 1", "DELETE FROM attributes_nodes WHERE ((node_id = 1234) AND (attribute_id = 234))"] end it "should raise an error if the model object doesn't have a valid primary key" do @c2.many_to_many :attributes, :class => @c1 a = @c2.new n = @c1.load(:id=>123) proc{a.attributes_dataset}.must_raise(Sequel::Error) proc{a.add_attribute(n)}.must_raise(Sequel::Error) proc{a.remove_attribute(n)}.must_raise(Sequel::Error) proc{a.remove_all_attributes}.must_raise(Sequel::Error) end it "should save the associated object first in add_ if passed a new model object" do @c2.many_to_many :attributes, :class => @c1 n = @c1.new a = @c2.load(:id=>123) n.new?.must_equal true @c1.dataset = @c1.dataset.with_fetch(:id=>1) a.add_attribute(n) n.new?.must_equal false end it "should raise a ValidationFailed in add_ if the associated object is new and invalid" do @c2.many_to_many :attributes, :class => @c1 n = @c1.new a = @c2.load(:id=>123) def n.validate() errors.add(:id, 'foo') end proc{a.add_attribute(n)}.must_raise(Sequel::ValidationFailed) end it "should raise an Error in add_ if the associated object is new and invalid and raise_on_save_failure is false" do @c2.many_to_many :attributes, :class => @c1 n = @c1.new n.raise_on_save_failure = false a = @c2.load(:id=>123) def n.validate() errors.add(:id, 'foo') end proc{a.add_attribute(n)}.must_raise(Sequel::Error) end it "should not attempt to validate the associated object in add_ if the :validate=>false option is used" do @c2.many_to_many :attributes, :class => @c1, :validate=>false n = @c1.new a = @c2.load(:id=>123) def n.validate() errors.add(:id, 'foo') end @c1.dataset = @c1.dataset.with_fetch(:id=>1) a.add_attribute(n) n.new?.must_equal false end it "should raise an error if trying to remove a model object that doesn't have a valid primary key" do @c2.many_to_many :attributes, :class => @c1 n = @c1.new a = @c2.load(:id=>123) proc{a.remove_attribute(n)}.must_raise(Sequel::Error) end it "should provide an array with all members of the association" do @c2.many_to_many :attributes, :class => @c1 @c2.new(:id => 1234).attributes.must_equal [@c1.load({})] DB.sqls.must_equal ['SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234)'] end it "should populate cache when accessed" do @c2.many_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) n.associations.include?(:attributes).must_equal false atts = n.attributes atts.must_equal n.associations[:attributes] end it "should use cache if available" do @c2.many_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) n.associations[:attributes] = 42 n.attributes.must_equal 42 DB.sqls.must_equal [] end it "should not use cache if asked to reload" do @c2.many_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) n.associations[:attributes] = 42 n.attributes(:reload=>true).wont_equal 42 DB.sqls.must_equal ["SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234)"] end it "should add item to cache if it exists when calling add_" do @c2.many_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) att = @c1.load(:id => 345) a = [] n.associations[:attributes] = a n.add_attribute(att) a.must_equal [att] end it "should add item to reciprocal's cache if it exists when calling add_" do @c2.many_to_many :attributes, :class => @c1 @c1.many_to_many :nodes, :class => @c2 n = @c2.new(:id => 1234) att = @c1.load(:id => 345) att.associations[:nodes] = [] n.add_attribute(att) att.nodes.must_equal [n] end it "should remove item from cache if it exists when calling remove_" do @c2.many_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) att = @c1.load(:id => 345) a = [att] n.associations[:attributes] = a n.remove_attribute(att) a.must_equal [] end it "should remove item from reciprocal's if it exists when calling remove_" do @c2.many_to_many :attributes, :class => @c1 @c1.many_to_many :nodes, :class => @c2 n = @c2.new(:id => 1234) att = @c1.new(:id => 345) att.associations[:nodes] = [n] n.remove_attribute(att) att.nodes.must_equal [] end it "should not create the add_, remove_, or remove_all_ methods if :read_only option is used" do @c2.many_to_many :attributes, :class => @c1, :read_only=>true im = @c2.instance_methods im.must_include(:attributes) im.must_include(:attributes_dataset) im.wont_include(:add_attribute) im.wont_include(:remove_attribute) im.wont_include(:remove_all_attributes) end it "should not add associations methods directly to class" do @c2.many_to_many :attributes, :class => @c1 im = @c2.instance_methods im.must_include(:attributes) im.must_include(:attributes_dataset) im.must_include(:add_attribute) im.must_include(:remove_attribute) im.must_include(:remove_all_attributes) im2 = @c2.instance_methods(false) im2.wont_include(:attributes) im2.wont_include(:attributes_dataset) im2.wont_include(:add_attribute) im2.wont_include(:remove_attribute) im2.wont_include(:remove_all_attributes) end it "should have an remove_all_ method that removes all associations" do @c2.many_to_many :attributes, :class => @c1 @c2.new(:id => 1234).remove_all_attributes DB.sqls.must_equal ['DELETE FROM attributes_nodes WHERE (node_id = 1234)'] end it "should have the remove_all_ method respect the :left_primary_key option" do @c2.many_to_many :attributes, :class => @c1, :left_primary_key=>:xxx @c2.new(:id => 1234, :xxx=>5).remove_all_attributes DB.sqls.must_equal ['DELETE FROM attributes_nodes WHERE (node_id = 5)'] end it "should have the remove_all_ method respect composite keys" do @c2.many_to_many :attributes, :class => @c1, :left_primary_key=>[:id, :x], :left_key=>[:l1, :l2] @c2.load(:id => 1234, :x=>5).remove_all_attributes DB.sqls.must_equal ['DELETE FROM attributes_nodes WHERE ((l1 = 1234) AND (l2 = 5))'] end it "remove_all should set the cached instance variable to []" do @c2.many_to_many :attributes, :class => @c1 node = @c2.new(:id => 1234) node.remove_all_attributes node.associations[:attributes].must_equal [] end it "remove_all should return the array of previously associated items if the cached instance variable exists" do @c2.many_to_many :attributes, :class => @c1 attrib = @c1.load(:id=>3) node = @c2.load(:id => 1234) @c1.dataset = @c1.dataset.with_fetch([]) node.attributes.must_equal [] node.add_attribute(attrib) node.associations[:attributes].must_equal [attrib] node.remove_all_attributes.must_equal [attrib] end it "remove_all should return nil if the cached instance variable does not exist" do @c2.many_to_many :attributes, :class => @c1 @c2.new(:id => 1234).remove_all_attributes.must_be_nil end it "remove_all should remove the current item from all reciprocal instance varaibles if it cached instance variable exists" do @c2.many_to_many :attributes, :class => @c1 @c1.many_to_many :nodes, :class => @c2 @c1.dataset = @c1.dataset.with_fetch([]) @c2.dataset = @c2.dataset.with_fetch([]) attrib = @c1.load(:id=>3) node = @c2.new(:id => 1234) node.attributes.must_equal [] attrib.nodes.must_equal [] node.add_attribute(attrib) attrib.associations[:nodes].must_equal [node] node.remove_all_attributes attrib.associations[:nodes].must_equal [] end it "add, remove, and remove_all methods should respect :join_table_block option" do @c2.many_to_many :attributes, :class => @c1, :join_table_block=>proc{|ds| ds.filter(:x=>123)} o = @c2.load(:id => 1234) o.add_attribute(@c1.load(:id=>44)) o.remove_attribute(@c1.load(:id=>45)) o.remove_all_attributes sqls = DB.sqls sqls.shift =~ /INSERT INTO attributes_nodes \((node_id|attribute_id), (node_id|attribute_id)\) VALUES \((1234|44), (1234|44)\)/ sqls.must_equal ["DELETE FROM attributes_nodes WHERE ((x = 123) AND (node_id = 1234) AND (attribute_id = 45))", "DELETE FROM attributes_nodes WHERE ((x = 123) AND (node_id = 1234))"] end it "should call an _add_ method internally to add attributes" do @c2.many_to_many :attributes, :class => @c1 @c2.private_instance_methods.must_include(:_add_attribute) p = @c2.load(:id=>10) c = @c1.load(:id=>123) def p._add_attribute(x) @x = x end p.add_attribute(c) p.instance_variable_get(:@x).must_equal c DB.sqls.must_equal [] end it "should support an :adder option for defining the _add_ method" do @c2.many_to_many :attributes, :class => @c1, :adder=>proc{|x| @x = x} p = @c2.load(:id=>10) c = @c1.load(:id=>123) p.add_attribute(c) p.instance_variable_get(:@x).must_equal c DB.sqls.must_equal [] end it "should allow additional arguments given to the add_ method and pass them onwards to the _add_ method" do @c2.many_to_many :attributes, :class => @c1 p = @c2.load(:id=>10) c = @c1.load(:id=>123) def p._add_attribute(x,*y) @x = x @y = y end p.add_attribute(c,:foo,:bar=>:baz) p.instance_variable_get(:@x).must_equal c p.instance_variable_get(:@y).must_equal [:foo,{:bar=>:baz}] end it "should call a _remove_ method internally to remove attributes" do @c2.many_to_many :attributes, :class => @c1 @c2.private_instance_methods.must_include(:_remove_attribute) p = @c2.load(:id=>10) c = @c1.load(:id=>123) def p._remove_attribute(x) @x = x end p.remove_attribute(c) p.instance_variable_get(:@x).must_equal c DB.sqls.must_equal [] end it "should support a :remover option for defining the _remove_ method" do @c2.many_to_many :attributes, :class => @c1, :remover=>proc{|x| @x = x} p = @c2.load(:id=>10) c = @c1.load(:id=>123) p.remove_attribute(c) p.instance_variable_get(:@x).must_equal c DB.sqls.must_equal [] end it "should allow additional arguments given to the remove_ method and pass them onwards to the _remove_ method" do @c2.many_to_many :attributes, :class => @c1 p = @c2.load(:id=>10) c = @c1.load(:id=>123) def p._remove_attribute(x,*y) @x = x @y = y end p.remove_attribute(c,:foo,:bar=>:baz) p.instance_variable_get(:@x).must_equal c p.instance_variable_get(:@y).must_equal [:foo,{:bar=>:baz}] end it "should allow additional arguments given to the remove_all_ method and pass them onwards to the _remove_all_ method" do @c2.many_to_many :attributes, :class => @c1 p = @c2.load(:id=>10) def p._remove_all_attributes(*y) @y = y end p.remove_all_attributes(:foo,:bar=>:baz) p.instance_variable_get(:@y).must_equal [:foo,{:bar=>:baz}] end it "should call a _remove_all_ method internally to remove attributes" do @c2.many_to_many :attributes, :class => @c1 @c2.private_instance_methods.must_include(:_remove_all_attributes) p = @c2.load(:id=>10) def p._remove_all_attributes @x = :foo end p.remove_all_attributes p.instance_variable_get(:@x).must_equal :foo DB.sqls.must_equal [] end it "should support a :clearer option for defining the _remove_all_ method" do @c2.many_to_many :attributes, :class => @c1, :clearer=>proc{@x = :foo} p = @c2.load(:id=>10) p.remove_all_attributes p.instance_variable_get(:@x).must_equal :foo DB.sqls.must_equal [] end it "should support (before|after)_(add|remove) callbacks" do h = [] @c2.many_to_many :attributes, :class => @c1, :before_add=>[proc{|x,y| h << x.pk; h << -y.pk}, :blah], :after_add=>proc{h << 3}, :before_remove=>:blah, :after_remove=>[:blahr] @c2.class_eval do self::Foo = h def _add_attribute(v) model::Foo << 4 end def _remove_attribute(v) model::Foo << 5 end def blah(x) model::Foo << x.pk end def blahr(x) model::Foo << 6 end end p = @c2.load(:id=>10) c = @c1.load(:id=>123) h.must_equal [] p.add_attribute(c) h.must_equal [10, -123, 123, 4, 3] p.remove_attribute(c) h.must_equal [10, -123, 123, 4, 3, 123, 5, 6] end it "should support after_load association callback" do h = [] @c2.many_to_many :attributes, :class => @c1, :after_load=>[proc{|x,y| h << [x.pk, y.collect{|z|z.pk}]}, :al] @c2.class_eval do self::Foo = h def al(v) v.each{|x| model::Foo << x.pk} end end @c1.dataset = @c1.dataset.with_fetch([{:id=>20}, {:id=>30}]) p = @c2.load(:id=>10, :parent_id=>20) attributes = p.attributes h.must_equal [[10, [20, 30]], 20, 30] attributes.collect{|a| a.pk}.must_equal [20, 30] end it "should raise error and not call internal add or remove method if before callback calls cancel_action if raise_on_save_failure is true" do p = @c2.load(:id=>10) c = @c1.load(:id=>123) @c2.many_to_many :attributes, :class => @c1, :before_add=>:ba, :before_remove=>:br def p.ba(o) cancel_action end def p._add_attribute; raise; end def p._remove_attribute; raise; end p.associations[:attributes] = [] p.raise_on_save_failure = true proc{p.add_attribute(c)}.must_raise(Sequel::HookFailed) p.attributes.must_equal [] p.associations[:attributes] = [c] def p.br(o) cancel_action end proc{p.remove_attribute(c)}.must_raise(Sequel::HookFailed) p.attributes.must_equal [c] end it "should return nil and not call internal add or remove method if before callback calls cancel_action if raise_on_save_failure is false" do p = @c2.load(:id=>10) c = @c1.load(:id=>123) p.raise_on_save_failure = false @c2.many_to_many :attributes, :class => @c1, :before_add=>:ba, :before_remove=>:br def p.ba(o) cancel_action end def p._add_attribute; raise; end def p._remove_attribute; raise; end p.associations[:attributes] = [] p.add_attribute(c).must_be_nil p.attributes.must_equal [] p.associations[:attributes] = [c] def p.br(o) cancel_action end p.remove_attribute(c).must_be_nil p.attributes.must_equal [c] end it "should support a :uniq option that removes duplicates from the association" do @c2.many_to_many :attributes, :class => @c1, :uniq=>true @c1.dataset = @c1.dataset.with_fetch([{:id=>20}, {:id=>30}, {:id=>20}, {:id=>30}]) @c2.load(:id=>10, :parent_id=>20).attributes.must_equal [@c1.load(:id=>20), @c1.load(:id=>30)] end it "should support a :distinct option that uses the DISTINCT clause" do @c2.many_to_many :attributes, :class => @c1, :distinct=>true @c2.load(:id=>10).attributes_dataset.sql.must_equal "SELECT DISTINCT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 10)" end it "should not apply association options when removing all associated records" do @c2.many_to_many :attributes, :class => @c1 do |ds| ds.filter(:name=>'John') end @c2.load(:id=>1).remove_all_attributes DB.sqls.must_equal ["DELETE FROM attributes_nodes WHERE (node_id = 1)"] end it "should use assocation's dataset when grabbing a record to remove from the assocation by primary key" do @c2.many_to_many :attributes, :class => @c1 do |ds| ds.filter(:join_table_att=>3) end @c1.dataset = @c1.dataset.with_fetch(:id=>2) @c2.load(:id=>1).remove_attribute(2) DB.sqls.must_equal ["SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE ((attributes_nodes.node_id = 1) AND (join_table_att = 3) AND (attributes.id = 2)) LIMIT 1", "DELETE FROM attributes_nodes WHERE ((node_id = 1) AND (attribute_id = 2))"] end it "should have association dataset use false condition if any key is nil" do @c1.many_to_many :attributes, :class => @c1, :left_primary_key=>:y @c1.load(:id=>1).attributes_dataset.sql.must_equal "SELECT attributes.* FROM attributes INNER JOIN attributes_attributes ON (attributes_attributes.attribute_id = attributes.id) WHERE 'f'" end end describe Sequel::Model, "one_through_one" do before do @c1 = Class.new(Sequel::Model(:attributes)) do unrestrict_primary_key attr_accessor :yyy def self.name; 'Attribute'; end def self.to_s; 'Attribute'; end columns :id, :y, :z end @c2 = Class.new(Sequel::Model(:nodes)) do unrestrict_primary_key attr_accessor :xxx def self.name; 'Node'; end def self.to_s; 'Node'; end columns :id, :x end @dataset = @c2.dataset @c1.dataset = @c1.dataset.with_autoid(1) [@c1, @c2].each{|c| c.dataset = c.dataset.with_fetch({})} DB.reset end after do DB.fetch = {:id => 1, :x => 1} end it "should use implicit key values and join table if omitted" do @c2.one_through_one :attribute, :class => @c1 @c2.new(:id => 1234).attribute_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234) LIMIT 1' end it "should respect :predicate_key when lazily loading" do @c2.one_through_one :attribute, :class => @c1, :predicate_key=>Sequel.subscript(Sequel[:attributes_nodes][:node_id], 0) @c2.new(:id => 1234).attribute_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id[0] = 1234) LIMIT 1' end it "should use explicit key values and join table if given" do @c2.one_through_one :attribute, :class => @c1, :left_key => :nodeid, :right_key => :attributeid, :join_table => :attribute2node @c2.new(:id => 1234).attribute_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attribute2node ON (attribute2node.attributeid = attributes.id) WHERE (attribute2node.nodeid = 1234) LIMIT 1' end it "should support a conditions option" do @c2.one_through_one :attribute, :class => @c1, :conditions => {:a=>32} @c2.new(:id => 1234).attribute_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE ((a = 32) AND (attributes_nodes.node_id = 1234)) LIMIT 1' @c2.one_through_one :attribute, :class => @c1, :conditions => Sequel.lit('a = ?', 32) @c2.new(:id => 1234).attribute_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE ((a = 32) AND (attributes_nodes.node_id = 1234)) LIMIT 1' @c2.new(:id => 1234).attribute.must_equal @c1.load({}) end it "should support an order option" do @c2.one_through_one :attribute, :class => @c1, :order => :blah @c2.new(:id => 1234).attribute_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234) ORDER BY blah LIMIT 1' end it "should support an array for the order option" do @c2.one_through_one :attribute, :class => @c1, :order => [:blah1, :blah2] @c2.new(:id => 1234).attribute_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234) ORDER BY blah1, blah2 LIMIT 1' end it "should support :left_primary_key and :right_primary_key options" do @c2.one_through_one :attribute, :class => @c1, :left_primary_key=>:xxx, :right_primary_key=>:yyy @c2.new(:id => 1234, :xxx=>5).attribute_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.yyy) WHERE (attributes_nodes.node_id = 5) LIMIT 1' end it "should support composite keys" do @c2.one_through_one :attribute, :class => @c1, :left_key=>[:l1, :l2], :right_key=>[:r1, :r2], :left_primary_key=>[:id, :x], :right_primary_key=>[:id, :y] @c2.load(:id => 1234, :x=>5).attribute_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON ((attributes_nodes.r1 = attributes.id) AND (attributes_nodes.r2 = attributes.y)) WHERE ((attributes_nodes.l1 = 1234) AND (attributes_nodes.l2 = 5)) LIMIT 1' end it "should not issue query if not all keys have values" do @c2.one_through_one :attribute, :class => @c1, :left_key=>[:l1, :l2], :right_key=>[:r1, :r2], :left_primary_key=>[:id, :x], :right_primary_key=>[:id, :y] @c2.load(:id => 1234, :x=>nil).attribute.must_be_nil DB.sqls.must_equal [] end it "should raise an Error unless same number of composite keys used" do proc{@c2.one_through_one :attribute, :class => @c1, :left_key=>[:node_id, :id]}.must_raise(Sequel::Error) proc{@c2.one_through_one :attribute, :class => @c1, :left_primary_key=>[:node_id, :id]}.must_raise(Sequel::Error) proc{@c2.one_through_one :attribute, :class => @c1, :left_key=>[:node_id, :id], :left_primary_key=>:id}.must_raise(Sequel::Error) proc{@c2.one_through_one :attribute, :class => @c1, :left_key=>:id, :left_primary_key=>[:node_id, :id]}.must_raise(Sequel::Error) proc{@c2.one_through_one :attribute, :class => @c1, :left_key=>[:node_id, :id, :x], :left_primary_key=>[:parent_id, :id]}.must_raise(Sequel::Error) proc{@c2.one_through_one :attribute, :class => @c1, :right_primary_key=>[:node_id, :id]}.must_raise(Sequel::Error) proc{@c2.one_through_one :attribute, :class => @c1, :right_key=>[:node_id, :id], :right_primary_key=>:id}.must_raise(Sequel::Error) proc{@c2.one_through_one :attribute, :class => @c1, :right_key=>:id, :left_primary_key=>[:node_id, :id]}.must_raise(Sequel::Error) proc{@c2.one_through_one :attribute, :class => @c1, :right_key=>[:node_id, :id, :x], :right_primary_key=>[:parent_id, :id]}.must_raise(Sequel::Error) end it "should support a select option" do @c2.one_through_one :attribute, :class => @c1, :select => :blah @c2.new(:id => 1234).attribute_dataset.sql.must_equal 'SELECT blah FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234) LIMIT 1' end it "should support an array for the select option" do @c2.one_through_one :attribute, :class => @c1, :select => [Sequel::SQL::ColumnAll.new(:attributes), Sequel[:attribute_nodes][:blah2]] @c2.new(:id => 1234).attribute_dataset.sql.must_equal 'SELECT attributes.*, attribute_nodes.blah2 FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234) LIMIT 1' end it "should accept a block" do @c2.one_through_one :attribute, :class => @c1 do |ds| ds.filter(:xxx => @xxx) end n = @c2.new(:id => 1234) n.xxx = 555 n.attribute_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE ((attributes_nodes.node_id = 1234) AND (xxx = 555)) LIMIT 1' end it "should allow the :order option while accepting a block" do @c2.one_through_one :attribute, :class => @c1, :order=>[:blah1, :blah2] do |ds| ds.filter(:xxx => @xxx) end n = @c2.new(:id => 1234) n.xxx = 555 n.attribute_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE ((attributes_nodes.node_id = 1234) AND (xxx = 555)) ORDER BY blah1, blah2 LIMIT 1' end it "should support a :dataset option that is used instead of the default" do c1 = @c1 @c2.one_through_one :attribute, :class => @c1, :dataset=>proc{c1.join_table(:natural, :an).filter(Sequel[:an][:nodeid]=>pk)}, :order=> :a, :select=>nil do |ds| ds.filter(:xxx => @xxx) end n = @c2.new(:id => 1234) n.xxx = 555 n.attribute_dataset.sql.must_equal 'SELECT * FROM attributes NATURAL JOIN an WHERE ((an.nodeid = 1234) AND (xxx = 555)) ORDER BY a LIMIT 1' n.attribute.must_equal @c1.load({}) DB.sqls.must_equal ['SELECT * FROM attributes NATURAL JOIN an WHERE ((an.nodeid = 1234) AND (xxx = 555)) ORDER BY a LIMIT 1'] end it "should support a :dataset option that accepts the reflection as an argument" do @c2.one_through_one :attribute, :class => @c1, :dataset=>lambda{|opts| opts.associated_class.natural_join(:an).filter(Sequel[:an][:nodeid]=>pk)}, :order=> :a, :select=>nil do |ds| ds.filter(:xxx => @xxx) end n = @c2.new(:id => 1234) n.xxx = 555 n.attribute_dataset.sql.must_equal 'SELECT * FROM attributes NATURAL JOIN an WHERE ((an.nodeid = 1234) AND (xxx = 555)) ORDER BY a LIMIT 1' n.attribute.must_equal @c1.load({}) DB.sqls.must_equal ['SELECT * FROM attributes NATURAL JOIN an WHERE ((an.nodeid = 1234) AND (xxx = 555)) ORDER BY a LIMIT 1'] end it "should support a :limit option to specify an offset" do @c2.one_through_one :attribute, :class => @c1 , :limit=>[nil, 10] @c2.new(:id => 1234).attribute_dataset.sql.must_equal 'SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234) LIMIT 1 OFFSET 10' end it "should have the :eager option affect the _dataset method" do @c2.one_through_one :attribute, :class => @c2 , :eager=>:attribute @c2.new(:id => 1234).attribute_dataset.opts[:eager].must_equal(:attribute=>nil) end it "should handle an aliased join table" do @c2.one_through_one :attribute, :class => @c1, :join_table => Sequel[:attribute2node].as(:attributes_nodes) n = @c2.load(:id => 1234) n.attribute_dataset.sql.must_equal "SELECT attributes.* FROM attributes INNER JOIN attribute2node AS attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234) LIMIT 1" end with_symbol_splitting "should handle an aliased join table with splittable symbol" do @c2.one_through_one :attribute, :class => @c1, :join_table => :attribute2node___attributes_nodes n = @c2.load(:id => 1234) n.attribute_dataset.sql.must_equal "SELECT attributes.* FROM attributes INNER JOIN attribute2node AS attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234) LIMIT 1" end it "should raise an error if the model object doesn't have a valid primary key" do @c2.one_through_one :attribute, :class => @c1 a = @c2.new proc{a.attribute_dataset}.must_raise(Sequel::Error) end it "should provide an array with all members of the association" do @c2.one_through_one :attribute, :class => @c1 @c2.new(:id => 1234).attribute.must_equal @c1.load({}) DB.sqls.must_equal ['SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234) LIMIT 1'] end it "should populate cache when accessed" do @c2.one_through_one :attribute, :class => @c1 n = @c2.new(:id => 1234) n.associations.include?(:attribute).must_equal false atts = n.attribute atts.must_equal n.associations[:attribute] end it "should use cache if available" do @c2.one_through_one :attribute, :class => @c1 n = @c2.new(:id => 1234) n.associations[:attribute] = 42 n.attribute.must_equal 42 DB.sqls.must_equal [] end it "should not use cache if asked to reload" do @c2.one_through_one :attribute, :class => @c1 n = @c2.new(:id => 1234) n.associations[:attribute] = 42 n.attribute(:reload=>true).wont_equal 42 DB.sqls.must_equal ["SELECT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 1234) LIMIT 1"] end it "should not add associations methods directly to class" do @c2.one_through_one :attribute, :class => @c1 im = @c2.instance_methods im.must_include(:attribute) im.must_include(:attribute_dataset) im2 = @c2.instance_methods(false) im2.wont_include(:attribute) im2.wont_include(:attribute_dataset) end it "should support after_load association callback" do h = [] @c2.one_through_one :attribute, :class => @c1, :after_load=>[proc{|x,y| h << [x.pk, y.pk]}, :al] @c2.class_eval do self::Foo = h def al(v) model::Foo << v.pk end end @c1.dataset = @c1.dataset.with_fetch([{:id=>20}]) p = @c2.load(:id=>10, :parent_id=>20) attribute = p.attribute h.must_equal [[10, 20], 20] attribute.pk.must_equal 20 end it "should support a :distinct option that uses the DISTINCT clause" do @c2.one_through_one :attribute, :class => @c1, :distinct=>true @c2.load(:id=>10).attribute_dataset.sql.must_equal "SELECT DISTINCT attributes.* FROM attributes INNER JOIN attributes_nodes ON (attributes_nodes.attribute_id = attributes.id) WHERE (attributes_nodes.node_id = 10) LIMIT 1" end it "should not add a setter method if the :read_only option is true" do @c2.one_through_one :attribute, :class => @c1, :read_only=>true im = @c2.instance_methods im.must_include(:attribute) im.wont_include(:attribute=) end it "should add a setter method" do @c2.one_through_one :attribute, :class => @c1 attrib = @c1.new(:id=>3) DB.fetch = [] o = @c2.load(:id => 1234) o.attribute = nil DB.sqls.must_equal ["SELECT * FROM attributes_nodes WHERE (node_id = 1234) LIMIT 1"] o.attribute = attrib DB.sqls.must_equal ["SELECT * FROM attributes_nodes WHERE (node_id = 1234) LIMIT 1", "INSERT INTO attributes_nodes (attribute_id, node_id) VALUES (3, 1234)"] DB.fetch = {:node_id=>1234, :attribute_id=>5} o.attribute = nil DB.sqls.must_equal ["SELECT * FROM attributes_nodes WHERE (node_id = 1234) LIMIT 1", "DELETE FROM attributes_nodes WHERE ((node_id = 1234) AND (attribute_id = 5))"] o.attribute = attrib DB.sqls.must_equal ["SELECT * FROM attributes_nodes WHERE (node_id = 1234) LIMIT 1", "UPDATE attributes_nodes SET attribute_id = 3 WHERE ((node_id = 1234) AND (attribute_id = 5))"] @c2.load(:id => 1234).attribute = @c1.new(:id=>5) DB.sqls.must_equal ["SELECT * FROM attributes_nodes WHERE (node_id = 1234) LIMIT 1"] end it "should use a transaction in the setter method" do @c2.one_through_one :attribute, :class => @c1 @c2.use_transactions = true @c1.load(:id=>3) DB.fetch = [] @c2.new(:id => 1234).attribute = nil DB.sqls.must_equal ['BEGIN', "SELECT * FROM attributes_nodes WHERE (node_id = 1234) LIMIT 1", 'COMMIT'] end it "should have setter method respect :join_table_block option" do @c2.one_through_one :attribute, :class => @c1, :join_table_block=>proc{|ds| ds.where(:a)} attrib = @c1.new(:id=>3) DB.fetch = [] o = @c2.new(:id => 1234) o.attribute = nil DB.sqls.must_equal ["SELECT * FROM attributes_nodes WHERE (a AND (node_id = 1234)) LIMIT 1"] o.attribute = attrib DB.sqls.must_equal ["SELECT * FROM attributes_nodes WHERE (a AND (node_id = 1234)) LIMIT 1", "INSERT INTO attributes_nodes (attribute_id, node_id) VALUES (3, 1234)"] DB.fetch = {:node_id=>1234, :attribute_id=>5} o.attribute = nil DB.sqls.must_equal ["SELECT * FROM attributes_nodes WHERE (a AND (node_id = 1234)) LIMIT 1", "DELETE FROM attributes_nodes WHERE (a AND (node_id = 1234) AND (attribute_id = 5))"] o.attribute = attrib DB.sqls.must_equal ["SELECT * FROM attributes_nodes WHERE (a AND (node_id = 1234)) LIMIT 1", "UPDATE attributes_nodes SET attribute_id = 3 WHERE (a AND (node_id = 1234) AND (attribute_id = 5))"] end it "should have the setter method respect the :left_primary_key and :right_primary_key option" do @c2.one_through_one :attribute, :class => @c1, :left_primary_key=>:xxx, :right_primary_key=>:yyy attrib = @c1.new(:id=>3, :yyy=>7) DB.fetch = [] o = @c2.new(:id => 1234, :xxx=>5) o.attribute = nil DB.sqls.must_equal ["SELECT * FROM attributes_nodes WHERE (node_id = 5) LIMIT 1"] o.attribute = attrib DB.sqls.must_equal ["SELECT * FROM attributes_nodes WHERE (node_id = 5) LIMIT 1", "INSERT INTO attributes_nodes (attribute_id, node_id) VALUES (7, 5)"] DB.fetch = {:node_id=>1234, :attribute_id=>9} o.attribute = nil DB.sqls.must_equal ["SELECT * FROM attributes_nodes WHERE (node_id = 5) LIMIT 1", "DELETE FROM attributes_nodes WHERE ((node_id = 5) AND (attribute_id = 9))"] o.attribute = attrib DB.sqls.must_equal ["SELECT * FROM attributes_nodes WHERE (node_id = 5) LIMIT 1", "UPDATE attributes_nodes SET attribute_id = 7 WHERE ((node_id = 5) AND (attribute_id = 9))"] end it "should have the setter method respect composite keys" do @c2.one_through_one :attribute, :class => @c1, :left_key=>[:node_id, :y], :left_primary_key=>[:id, :x], :right_key=>[:attribute_id, :z], :right_primary_key=>[:id, :w] attrib = @c1.load(:id=>3, :w=>7) @c1.def_column_alias :w, :w DB.fetch = [] o = @c2.new(:id => 1234, :x=>5) o.attribute = nil DB.sqls.must_equal ["SELECT * FROM attributes_nodes WHERE ((node_id = 1234) AND (y = 5)) LIMIT 1"] o.attribute = attrib DB.sqls.must_equal ["SELECT * FROM attributes_nodes WHERE ((node_id = 1234) AND (y = 5)) LIMIT 1", "INSERT INTO attributes_nodes (attribute_id, z, node_id, y) VALUES (3, 7, 1234, 5)"] DB.fetch = {:node_id=>1234, :attribute_id=>10, :y=>6, :z=>8} o.attribute = nil DB.sqls.must_equal ["SELECT * FROM attributes_nodes WHERE ((node_id = 1234) AND (y = 5)) LIMIT 1", "DELETE FROM attributes_nodes WHERE ((node_id = 1234) AND (y = 5) AND (attribute_id = 10) AND (z = 8))"] o.attribute = attrib DB.sqls.must_equal ["SELECT * FROM attributes_nodes WHERE ((node_id = 1234) AND (y = 5)) LIMIT 1", "UPDATE attributes_nodes SET attribute_id = 3, z = 7 WHERE ((node_id = 1234) AND (y = 5) AND (attribute_id = 10) AND (z = 8))"] end it "should raise an error if the current model object that doesn't have a valid primary key" do @c2.one_through_one :attribute, :class => @c1 p = @c2.new c = @c2.load(:id=>123) proc{c.attribute = p}.must_raise(Sequel::Error) end it "should raise an error if the associated object that doesn't have a valid primary key" do @c2.one_through_one :attribute, :class => @c1 p = @c2.new c = @c2.load(:id=>123) proc{p.attribute = c}.must_raise(Sequel::Error) end it "should make the change to the foreign_key value inside a _association= method" do @c2.one_through_one :attribute, :class => @c1 @c2.private_instance_methods.must_include(:_attribute=) attrib = @c1.new(:id=>3) o = @c2.new(:id => 1234) def o._attribute=(x) @x = x end o.attribute = attrib o.instance_variable_get(:@x).must_equal attrib end it "should have a :setter option define the _association= method" do @c2.one_through_one :attribute, :class => @c1, :setter=>proc{|x| @x = x} attrib = @c1.new(:id=>3) o = @c2.new(:id => 1234) o.attribute = attrib o.instance_variable_get(:@x).must_equal attrib end it "should support (before|after)_set callbacks" do h = [] @c2.one_through_one :attribute, :class => @c1, :before_set=>[proc{|x,y| h << x.pk; h << (y ? -y.pk : :y)}, :blah], :after_set=>proc{h << :l} @c2.class_eval do self::Foo = h def blah(x) model::Foo << (x ? x.pk : :x) end end attrib = @c1.new(:id=>3) o = @c2.new(:id => 1234) h.must_equal [] o.attribute = attrib h.must_equal [1234, -3, 3, :l] o.attribute = nil h.must_equal [1234, -3, 3, :l, 1234, :y, :x, :l] end it "should have association dataset use false condition if any key is nil" do @c1.one_through_one :attribute, :class => @c1, :left_primary_key=>:y @c1.load(:id=>1).attribute_dataset.sql.must_equal "SELECT attributes.* FROM attributes INNER JOIN attributes_attributes ON (attributes_attributes.attribute_id = attributes.id) WHERE 'f' LIMIT 1" end end describe "many_to_many/one_through_one associations with :join_table_db" do before do @db1, @db2, @db3 = @dbs = 3.times.map{Sequel.mock(:fetch=>{:id => 1, :x => 1}, :numrows=>1, :autoid=>proc{|sql| 10})} @c1 = Class.new(Sequel::Model(@db1[:attributes])) do unrestrict_primary_key attr_accessor :yyy def self.name; 'Attribute'; end def self.to_s; 'Attribute'; end columns :id, :y, :z end @c2 = Class.new(Sequel::Model(@db2[:nodes])) do unrestrict_primary_key attr_accessor :xxx def self.name; 'Node'; end def self.to_s; 'Node'; end columns :id, :x end @c1.default_association_options[:join_table_db] = @db3 @c2.default_association_options[:join_table_db] = @db3 @db3.fetch = {:attribute_id=>555} @db1.fetch = {:id=>555} sqls end def sqls @dbs.map(&:sqls) end it "should support dataset method" do @c2.many_to_many :attributes, :class => @c1 @c2.new(:id => 1234).attributes_dataset.sql.must_equal "SELECT attributes.* FROM attributes WHERE (id IN (555))" sqls.must_equal [[], [], ["SELECT attribute_id FROM attributes_nodes WHERE (node_id = 1234)"]] end it "should support association method" do @c2.many_to_many :attributes, :class => @c1 @c2.new(:id => 1234).attributes.must_equal [@c1.load(:id=>555)] sqls.must_equal [["SELECT attributes.* FROM attributes WHERE (id IN (555))"], [], ["SELECT attribute_id FROM attributes_nodes WHERE (node_id = 1234)"]] @c2.one_through_one :attribute, :class => @c1 @db1.fetch = [{:id=>555, :x=>1}, {:id=>555, :x=>2}] @c2.new(:id => 1234).attribute.must_equal @c1.load(:id=>555, :x=>1) sqls.must_equal [["SELECT attributes.* FROM attributes WHERE (id IN (555)) LIMIT 1"], [], ["SELECT attribute_id FROM attributes_nodes WHERE (node_id = 1234)"]] end it "should support an existing selection on the dataset" do @c1.dataset = @c1.dataset.select(Sequel.qualify(:attributes, :id), Sequel.qualify(:attributes, :b)) @db1.sqls @c2.many_to_many :attributes, :class => @c1 @db1.fetch = {:id=>555, :b=>10} @c2.new(:id => 1234).attributes.must_equal [@c1.load(:id=>555, :b=>10)] sqls.must_equal [["SELECT attributes.id, attributes.b FROM attributes WHERE (id IN (555))"], [], ["SELECT attribute_id FROM attributes_nodes WHERE (node_id = 1234)"]] end it "should support a conditions option" do @c2.many_to_many :attributes, :class => @c1, :conditions => {:a=>32} @c2.new(:id => 1234).attributes.must_equal [@c1.load(:id=>555)] sqls.must_equal [["SELECT attributes.* FROM attributes WHERE ((a = 32) AND (id IN (555)))"], [], ["SELECT attribute_id FROM attributes_nodes WHERE (node_id = 1234)"]] end it "should support an order option" do @c2.many_to_many :attributes, :class => @c1, :order => :blah @c2.new(:id => 1234).attributes.must_equal [@c1.load(:id=>555)] sqls.must_equal [["SELECT attributes.* FROM attributes WHERE (id IN (555)) ORDER BY blah"], [], ["SELECT attribute_id FROM attributes_nodes WHERE (node_id = 1234)"]] end it "should support :left_primary_key and :right_primary_key options" do @c2.many_to_many :attributes, :class => @c1, :left_primary_key=>:xxx, :right_primary_key=>:yyy @db3.fetch = {:attribute_id=>555, :node_id=>5} @db1.fetch = {:id=>14, :yyy=>555} @c2.new(:id => 1234, :xxx=>5).attributes.must_equal [@c1.load(:id=>14, :yyy=>555)] sqls.must_equal [["SELECT attributes.* FROM attributes WHERE (yyy IN (555))"], [], ["SELECT attribute_id FROM attributes_nodes WHERE (node_id = 5)"]] end it "should support composite keys" do @c2.many_to_many :attributes, :class => @c1, :left_key=>[:l1, :l2], :right_key=>[:r1, :r2], :left_primary_key=>[:id, :x], :right_primary_key=>[:id, :y] @db3.fetch = {:l1=>555, :l2=>5, :r1=>14, :r2=>555} @db1.fetch = {:id=>14, :y=>555} @c2.new(:id => 1234, :x=>5).attributes.must_equal [@c1.load(:id=>14, :y=>555)] sqls.must_equal [["SELECT attributes.* FROM attributes WHERE ((id, y) IN ((14, 555)))"], [], ["SELECT r1, r2 FROM attributes_nodes WHERE ((l1 = 1234) AND (l2 = 5))"]] end it "should handle case where join table query does not produce any rows" do @c2.many_to_many :attributes, :class => @c1, :left_key=>[:l1, :l2], :right_key=>[:r1, :r2], :left_primary_key=>[:id, :x], :right_primary_key=>[:id, :y] @db3.fetch = [] @db1.fetch = [] @c2.load(:id => 1234, :x=>5).attributes.must_equal [] sqls.must_equal [[], [], ["SELECT r1, r2 FROM attributes_nodes WHERE ((l1 = 1234) AND (l2 = 5))"]] end it "should handle case where join table query returns a NULL value" do @db1.fetch = [] @c2.many_to_many :attributes, :class => @c1 @db3.fetch = {:attribute_id=>nil} @c2.new(:id => 1234).attributes.must_equal [] sqls.must_equal [[], [], ["SELECT attribute_id FROM attributes_nodes WHERE (node_id = 1234)"]] @c2.many_to_many :attributes, :class => @c1, :left_key=>[:l1, :l2], :right_key=>[:r1, :r2], :left_primary_key=>[:id, :x], :right_primary_key=>[:id, :y] @db3.fetch = {:r1=>nil, :r2=>3} @c2.load(:id => 1234, :x=>5).attributes.must_equal [] sqls.must_equal [[], [], ["SELECT r1, r2 FROM attributes_nodes WHERE ((l1 = 1234) AND (l2 = 5))"]] end it "should support a select option" do @c2.many_to_many :attributes, :class => @c1, :select => :blah @db1.fetch = {:blah=>19} @c2.new(:id => 1234).attributes.must_equal [@c1.load(:blah=>19)] sqls.must_equal [["SELECT blah FROM attributes WHERE (id IN (555))"], [], ["SELECT attribute_id FROM attributes_nodes WHERE (node_id = 1234)"]] end it "should accept a block" do @c2.many_to_many :attributes, :class => @c1 do |ds| ds.filter(:xxx => @xxx) end n = @c2.new(:id => 1234) n.xxx = 444 n.attributes.must_equal [@c1.load(:id=>555)] sqls.must_equal [["SELECT attributes.* FROM attributes WHERE ((id IN (555)) AND (xxx = 444))"], [], ["SELECT attribute_id FROM attributes_nodes WHERE (node_id = 1234)"]] end it "should handle an aliased join table" do @c2.many_to_many :attributes, :class => @c1, :join_table => Sequel[:attribute2node].as(:attributes_nodes) n = @c2.load(:id => 1234) a = @c1.load(:id => 2345) n.attributes.must_equal [@c1.load(:id=>555)] sqls.must_equal [["SELECT attributes.* FROM attributes WHERE (id IN (555))"], [], ["SELECT attribute_id FROM attribute2node AS attributes_nodes WHERE (node_id = 1234)"]] a.must_equal n.add_attribute(a) a.must_equal n.remove_attribute(a) n.remove_all_attributes sqls.must_equal [[], [], ["INSERT INTO attribute2node (node_id, attribute_id) VALUES (1234, 2345)", "DELETE FROM attribute2node WHERE ((node_id = 1234) AND (attribute_id = 2345))", "DELETE FROM attribute2node WHERE (node_id = 1234)"]] end it "should define an add_ method that works on existing records" do @c2.many_to_many :attributes, :class => @c1 n = @c2.load(:id => 1234) a = @c1.load(:id => 2345) n.add_attribute(a).must_equal a sqls.must_equal [[], [], ["INSERT INTO attributes_nodes (node_id, attribute_id) VALUES (1234, 2345)"]] end it "should define a remove_ method that works on existing records" do @c2.many_to_many :attributes, :class => @c1 n = @c2.new(:id => 1234) a = @c1.new(:id => 2345) n.remove_attribute(a).must_equal a sqls.must_equal [[], [], ['DELETE FROM attributes_nodes WHERE ((node_id = 1234) AND (attribute_id = 2345))']] end it "should have an remove_all_ method that removes all associations" do @c2.many_to_many :attributes, :class => @c1 @c2.new(:id => 1234).remove_all_attributes sqls.must_equal [[], [], ['DELETE FROM attributes_nodes WHERE (node_id = 1234)']] end it "should support eager loading" do @db2.fetch = [{:id=>1234}, {:id=>33}] @db3.fetch = [{:attribute_id=>555, :node_id=>1234}] @c2.many_to_many :attributes, :class => @c1 a = @c2.eager(:attributes).all a.must_equal [@c2.load(:id=>1234), @c2.load(:id=>33)] a[0].associations[:attributes].must_equal [@c1.load(:id=>555)] a[1].associations[:attributes].must_equal [] sqls.must_equal [["SELECT attributes.* FROM attributes WHERE (id IN (555))"], ["SELECT * FROM nodes"], ["SELECT attribute_id, node_id FROM attributes_nodes WHERE (node_id IN (1234, 33))"]] @c2.one_through_one :attribute, :clone=>:attributes @db1.fetch = [{:id=>555, :x=>1}, {:id=>555, :x=>2}] a = @c2.eager(:attribute).all a.must_equal [@c2.load(:id=>1234), @c2.load(:id=>33)] a[0].associations[:attribute].must_equal @c1.load(:id=>555, :x=>1) a[1].associations[:attribute].must_be_nil sqls.must_equal [["SELECT attributes.* FROM attributes WHERE (id IN (555))"], ["SELECT * FROM nodes"], ["SELECT attribute_id, node_id FROM attributes_nodes WHERE (node_id IN (1234, 33))"]] end it "should skip loading associated table when the join table has no results" do @db2.fetch = [{:id=>1234}, {:id=>33}] @db3.fetch = [] @c2.many_to_many :attributes, :class => @c1 a = @c2.eager(:attributes).all a.must_equal [@c2.load(:id=>1234), @c2.load(:id=>33)] a[0].associations[:attributes].must_equal [] a[1].associations[:attributes].must_equal [] sqls.must_equal [[], ["SELECT * FROM nodes"], ["SELECT attribute_id, node_id FROM attributes_nodes WHERE (node_id IN (1234, 33))"]] @c2.one_through_one :attribute, :clone=>:attributes @db1.fetch = [{:id=>555, :x=>1}, {:id=>555, :x=>2}] a = @c2.eager(:attribute).all a.must_equal [@c2.load(:id=>1234), @c2.load(:id=>33)] a[0].associations[:attribute].must_be_nil a[1].associations[:attribute].must_be_nil sqls.must_equal [[], ["SELECT * FROM nodes"], ["SELECT attribute_id, node_id FROM attributes_nodes WHERE (node_id IN (1234, 33))"]] end it "should support eager loading when the join table includes NULL values" do @db2.fetch = [{:id=>1234}, {:id=>33}] @db3.fetch = [{:attribute_id=>nil, :node_id=>1234}] @c2.many_to_many :attributes, :class => @c1 a = @c2.eager(:attributes).all a.must_equal [@c2.load(:id=>1234), @c2.load(:id=>33)] a[0].associations[:attributes].must_equal [] a[1].associations[:attributes].must_equal [] sqls.must_equal [[], ["SELECT * FROM nodes"], ["SELECT attribute_id, node_id FROM attributes_nodes WHERE (node_id IN (1234, 33))"]] @c2.one_through_one :attribute, :clone=>:attributes a = @c2.eager(:attribute).all a.must_equal [@c2.load(:id=>1234), @c2.load(:id=>33)] a[0].associations[:attribute].must_be_nil a[1].associations[:attribute].must_be_nil sqls.must_equal [[], ["SELECT * FROM nodes"], ["SELECT attribute_id, node_id FROM attributes_nodes WHERE (node_id IN (1234, 33))"]] end it "should support eager loading when using composite keys" do @db1.fetch = {:id=>14, :y=>555} @db2.fetch = [{:id=>1234, :x=>333}, {:id=>33, :x=>4}] @db3.fetch = [{:l1=>1234, :l2=>333, :r1=>14, :r2=>555}] @c2.many_to_many :attributes, :class => @c1, :left_key=>[:l1, :l2], :right_key=>[:r1, :r2], :left_primary_key=>[:id, :x], :right_primary_key=>[:id, :y] a = @c2.eager(:attributes).all a.must_equal [@c2.load(:id=>1234, :x=>333), @c2.load(:id=>33, :x=>4)] a[0].associations[:attributes].must_equal [@c1.load(:id=>14, :y=>555)] a[1].associations[:attributes].must_equal [] sqls.must_equal [["SELECT attributes.* FROM attributes WHERE ((id, y) IN ((14, 555)))"], ["SELECT * FROM nodes"], ["SELECT r1, r2, l1, l2 FROM attributes_nodes WHERE ((l1, l2) IN ((1234, 333), (33, 4)))"]] @c2.one_through_one :attribute, :clone=>:attributes @db1.fetch = [{:id=>14, :y=>555, :z=>2}, {:id=>14, :y=>555, :z=>3}] a = @c2.eager(:attribute).all a.must_equal [@c2.load(:id=>1234, :x=>333), @c2.load(:id=>33, :x=>4)] a[0].associations[:attribute].must_equal @c1.load(:id=>14, :y=>555, :z=>2) a[1].associations[:attribute].must_be_nil sqls.must_equal [["SELECT attributes.* FROM attributes WHERE ((id, y) IN ((14, 555)))"], ["SELECT * FROM nodes"], ["SELECT r1, r2, l1, l2 FROM attributes_nodes WHERE ((l1, l2) IN ((1234, 333), (33, 4)))"]] end it "should support eager loading when using composite keys when the join table includes NULL values" do @db2.fetch = [{:id=>1234, :x=>333}, {:id=>33, :x=>4}] @db3.fetch = [{:l1=>1234, :l2=>333, :r1=>nil, :r2=>555}, {:l1=>1234, :l2=>333, :r1=>14, :r2=>nil}, {:l1=>1234, :l2=>333, :r1=>nil, :r2=>nil}] @c2.many_to_many :attributes, :class => @c1, :left_key=>[:l1, :l2], :right_key=>[:r1, :r2], :left_primary_key=>[:id, :x], :right_primary_key=>[:id, :y] a = @c2.eager(:attributes).all a.must_equal [@c2.load(:id=>1234, :x=>333), @c2.load(:id=>33, :x=>4)] a[0].associations[:attributes].must_equal [] a[1].associations[:attributes].must_equal [] sqls.must_equal [[], ["SELECT * FROM nodes"], ["SELECT r1, r2, l1, l2 FROM attributes_nodes WHERE ((l1, l2) IN ((1234, 333), (33, 4)))"]] @c2.one_through_one :attribute, :clone=>:attributes a = @c2.eager(:attribute).all a.must_equal [@c2.load(:id=>1234, :x=>333), @c2.load(:id=>33, :x=>4)] a[0].associations[:attribute].must_be_nil a[1].associations[:attribute].must_be_nil sqls.must_equal [[], ["SELECT * FROM nodes"], ["SELECT r1, r2, l1, l2 FROM attributes_nodes WHERE ((l1, l2) IN ((1234, 333), (33, 4)))"]] end end describe "Filtering by associations" do before(:all) do db = Sequel.mock db.extend_datasets do def supports_window_functions?; true; end def supports_distinct_on?; true; end end @Album = Class.new(Sequel::Model(db[:albums])) artist = @Artist = Class.new(Sequel::Model(db[:artists])) tag = @Tag = Class.new(Sequel::Model(db[:tags])) track = @Track = Class.new(Sequel::Model(db[:tracks])) album_info = @AlbumInfo = Class.new(Sequel::Model(db[:album_infos])) @Artist.columns :id, :id1, :id2 @Tag.columns :id, :tid1, :tid2 @Track.columns :id, :album_id, :album_id1, :album_id2 @AlbumInfo.columns :id, :album_id, :album_id1, :album_id2 @Album.class_eval do columns :id, :id1, :id2, :artist_id, :artist_id1, :artist_id2 b = lambda{|ds| ds.where(:name=>'B')} c = {:name=>'A'} many_to_one :no_artist, :class=>artist, :key=>:artist_id, :allow_filtering_by=>false many_to_one :artist, :class=>artist, :key=>:artist_id one_to_many :tracks, :class=>track, :key=>:album_id one_to_one :track, :class=>track, :key=>:album_id one_to_one :album_info, :class=>album_info, :key=>:album_id many_to_many :tags, :class=>tag, :left_key=>:album_id, :join_table=>:albums_tags, :right_key=>:tag_id many_to_one :a_artist, :clone=>:artist, :conditions=>c one_to_many :a_tracks, :clone=>:tracks, :conditions=>c one_to_one :a_album_info, :clone=>:album_info, :conditions=>c many_to_many :a_tags, :clone=>:tags, :conditions=>c many_to_one :b_artist, :clone=>:artist, &b one_to_many :b_tracks, :clone=>:tracks, &b one_to_one :b_album_info, :clone=>:album_info, &b many_to_many :b_tags, :clone=>:tags, &b one_to_many :l_tracks, :clone=>:tracks, :limit=>10 one_to_one :l_track, :clone=>:tracks, :order=>:name many_to_many :l_tags, :clone=>:tags, :limit=>10 one_through_one :l_tag, :clone=>:tags, :order=>:name one_to_many :al_tracks, :clone=>:l_tracks, :conditions=>c one_to_one :al_track, :clone=>:l_track, :conditions=>c many_to_many :al_tags, :clone=>:l_tags, :conditions=>c one_through_one :al_tag, :clone=>:l_tag, :conditions=>c many_to_one :cartist, :class=>artist, :key=>[:artist_id1, :artist_id2], :primary_key=>[:id1, :id2] one_to_many :ctracks, :class=>track, :key=>[:album_id1, :album_id2], :primary_key=>[:id1, :id2] one_to_one :calbum_info, :class=>album_info, :key=>[:album_id1, :album_id2], :primary_key=>[:id1, :id2] many_to_many :ctags, :class=>tag, :left_key=>[:album_id1, :album_id2], :left_primary_key=>[:id1, :id2], :right_key=>[:tag_id1, :tag_id2], :right_primary_key=>[:tid1, :tid2], :join_table=>:albums_tags many_to_one :a_cartist, :clone=>:cartist, :conditions=>c one_to_many :a_ctracks, :clone=>:ctracks, :conditions=>c one_to_one :a_calbum_info, :clone=>:calbum_info, :conditions=>c many_to_many :a_ctags, :clone=>:ctags, :conditions=>c many_to_one :b_cartist, :clone=>:cartist, &b one_to_many :b_ctracks, :clone=>:ctracks, &b one_to_one :b_calbum_info, :clone=>:calbum_info, &b many_to_many :b_ctags, :clone=>:ctags, &b one_to_many :l_ctracks, :clone=>:ctracks, :limit=>10 one_to_one :l_ctrack, :clone=>:ctracks, :order=>:name many_to_many :l_ctags, :clone=>:ctags, :limit=>10 one_through_one :l_ctag, :clone=>:ctags, :order=>:name one_to_many :al_ctracks, :clone=>:l_ctracks, :conditions=>c one_to_one :al_ctrack, :clone=>:l_ctrack, :conditions=>c many_to_many :al_ctags, :clone=>:l_ctags, :conditions=>c one_through_one :al_ctag, :clone=>:l_ctag, :conditions=>c end end after do @Album.default_eager_limit_strategy = true end it "should be able to filter on many_to_one associations" do @Album.filter(:artist=>@Artist.load(:id=>3)).sql.must_equal 'SELECT * FROM albums WHERE (albums.artist_id = 3)' end it "should be able to filter on one_to_many associations" do @Album.filter(:tracks=>@Track.load(:album_id=>3)).sql.must_equal 'SELECT * FROM albums WHERE (albums.id = 3)' end it "should be able to filter on one_to_one associations" do @Album.filter(:album_info=>@AlbumInfo.load(:album_id=>3)).sql.must_equal 'SELECT * FROM albums WHERE (albums.id = 3)' end it "should be able to filter on many_to_many associations" do @Album.filter(:tags=>@Tag.load(:id=>3)).sql.must_equal 'SELECT * FROM albums WHERE (albums.id IN (SELECT albums_tags.album_id FROM albums_tags WHERE ((albums_tags.tag_id = 3) AND (albums_tags.album_id IS NOT NULL))))' end it "should be able to filter on many_to_one associations with :conditions" do @Album.filter(:a_artist=>@Artist.load(:id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.artist_id IN (SELECT artists.id FROM artists WHERE ((name = 'A') AND (artists.id IS NOT NULL) AND (artists.id = 3))))" end it "should be able to filter on one_to_many associations with :conditions" do @Album.filter(:a_tracks=>@Track.load(:id=>5, :album_id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT tracks.album_id FROM tracks WHERE ((name = 'A') AND (tracks.album_id IS NOT NULL) AND (tracks.id = 5))))" end it "should be able to filter on one_to_one associations with :conditions" do @Album.filter(:a_album_info=>@AlbumInfo.load(:id=>5, :album_id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT album_infos.album_id FROM album_infos WHERE ((name = 'A') AND (album_infos.album_id IS NOT NULL) AND (album_infos.id = 5))))" end it "should be able to filter on many_to_many associations with :conditions" do @Album.filter(:a_tags=>@Tag.load(:id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT albums_tags.album_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE ((name = 'A') AND (albums_tags.album_id IS NOT NULL) AND (tags.id = 3))))" end it "should be able to filter on many_to_one associations with block" do @Album.filter(:b_artist=>@Artist.load(:id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.artist_id IN (SELECT artists.id FROM artists WHERE ((name = 'B') AND (artists.id IS NOT NULL) AND (artists.id = 3))))" end it "should be able to filter on one_to_many associations with block" do @Album.filter(:b_tracks=>@Track.load(:id=>5, :album_id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT tracks.album_id FROM tracks WHERE ((name = 'B') AND (tracks.album_id IS NOT NULL) AND (tracks.id = 5))))" end it "should be able to filter on one_to_one associations with block" do @Album.filter(:b_album_info=>@AlbumInfo.load(:id=>5, :album_id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT album_infos.album_id FROM album_infos WHERE ((name = 'B') AND (album_infos.album_id IS NOT NULL) AND (album_infos.id = 5))))" end it "should be able to filter on many_to_many associations with block" do @Album.filter(:b_tags=>@Tag.load(:id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT albums_tags.album_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE ((name = 'B') AND (albums_tags.album_id IS NOT NULL) AND (tags.id = 3))))" end it "should not use an eager limit strategy if the strategy is not specified and the model defaults to not using one" do @Album.default_eager_limit_strategy = nil @Album.one_to_many :l_tracks2, :clone=>:l_tracks @Album.filter(:l_tracks2=>@Track.load(:id=>5, :album_id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT tracks.album_id FROM tracks WHERE ((tracks.album_id IS NOT NULL) AND (tracks.id = 5))))" end it "should be able to filter on one_to_many associations with :limit" do @Album.filter(:l_tracks=>@Track.load(:id=>5, :album_id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT tracks.album_id FROM tracks WHERE ((tracks.album_id IS NOT NULL) AND (tracks.id IN (SELECT id FROM (SELECT tracks.id, row_number() OVER (PARTITION BY tracks.album_id) AS x_sequel_row_number_x FROM tracks) AS t1 WHERE (x_sequel_row_number_x <= 10))) AND (tracks.id = 5))))" end it "should be able to filter on one_to_one associations with :order" do @Album.filter(:l_track=>@Track.load(:id=>5, :album_id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT tracks.album_id FROM tracks WHERE ((tracks.album_id IS NOT NULL) AND (tracks.id IN (SELECT DISTINCT ON (tracks.album_id) tracks.id FROM tracks ORDER BY tracks.album_id, name)) AND (tracks.id = 5))))" end it "should be able to filter on one_to_one associations with :filter_limit_strategy" do @Album.one_to_one :l_track2, :clone=>:track, :filter_limit_strategy=>:window_function @Album.filter(:l_track2=>@Track.load(:id=>5, :album_id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT tracks.album_id FROM tracks WHERE ((tracks.album_id IS NOT NULL) AND (tracks.id IN (SELECT id FROM (SELECT tracks.id, row_number() OVER (PARTITION BY tracks.album_id) AS x_sequel_row_number_x FROM tracks) AS t1 WHERE (x_sequel_row_number_x = 1))) AND (tracks.id = 5))))" end it "should be able to filter on one_to_one associations with :eager_limit_strategy" do @Album.one_to_one :l_track2, :clone=>:track, :eager_limit_strategy=>:window_function @Album.filter(:l_track2=>@Track.load(:id=>5, :album_id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT tracks.album_id FROM tracks WHERE ((tracks.album_id IS NOT NULL) AND (tracks.id IN (SELECT id FROM (SELECT tracks.id, row_number() OVER (PARTITION BY tracks.album_id) AS x_sequel_row_number_x FROM tracks) AS t1 WHERE (x_sequel_row_number_x = 1))) AND (tracks.id = 5))))" end it "should be able to filter on one_to_one associations with :order and :filter_limit_strategy" do @Album.one_to_one :l_track2, :clone=>:l_track, :filter_limit_strategy=>:window_function @Album.filter(:l_track2=>@Track.load(:id=>5, :album_id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT tracks.album_id FROM tracks WHERE ((tracks.album_id IS NOT NULL) AND (tracks.id IN (SELECT id FROM (SELECT tracks.id, row_number() OVER (PARTITION BY tracks.album_id ORDER BY name) AS x_sequel_row_number_x FROM tracks) AS t1 WHERE (x_sequel_row_number_x = 1))) AND (tracks.id = 5))))" end it "should be able to filter on one_to_one associations with :order and :eager_limit_strategy" do @Album.one_to_one :l_track2, :clone=>:l_track, :eager_limit_strategy=>:window_function @Album.filter(:l_track2=>@Track.load(:id=>5, :album_id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT tracks.album_id FROM tracks WHERE ((tracks.album_id IS NOT NULL) AND (tracks.id IN (SELECT id FROM (SELECT tracks.id, row_number() OVER (PARTITION BY tracks.album_id ORDER BY name) AS x_sequel_row_number_x FROM tracks) AS t1 WHERE (x_sequel_row_number_x = 1))) AND (tracks.id = 5))))" end it "should be able to filter on one_to_one associations with :order and Model.default_eager_limit_strategy" do @Album.default_eager_limit_strategy = :window_function @Album.one_to_one :l_track2, :clone=>:l_track @Album.filter(:l_track2=>@Track.load(:id=>5, :album_id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT tracks.album_id FROM tracks WHERE ((tracks.album_id IS NOT NULL) AND (tracks.id IN (SELECT id FROM (SELECT tracks.id, row_number() OVER (PARTITION BY tracks.album_id ORDER BY name) AS x_sequel_row_number_x FROM tracks) AS t1 WHERE (x_sequel_row_number_x = 1))) AND (tracks.id = 5))))" end it "should be able to filter on one_to_one associations with :order and :eager_limit_strategy=>:union" do @Album.one_to_one :l_track2, :clone=>:l_track, :eager_limit_strategy=>:union @Album.filter(:l_track2=>@Track.load(:id=>5, :album_id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT tracks.album_id FROM tracks WHERE ((tracks.album_id IS NOT NULL) AND (tracks.id IN (SELECT DISTINCT ON (tracks.album_id) tracks.id FROM tracks ORDER BY tracks.album_id, name)) AND (tracks.id = 5))))" end it "should be able to filter on one_to_one associations with :order and :eager_limit_strategy=>:ruby" do @Album.one_to_one :l_track2, :clone=>:l_track, :eager_limit_strategy=>:ruby @Album.filter(:l_track2=>@Track.load(:id=>5, :album_id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT tracks.album_id FROM tracks WHERE ((tracks.album_id IS NOT NULL) AND (tracks.id IN (SELECT DISTINCT ON (tracks.album_id) tracks.id FROM tracks ORDER BY tracks.album_id, name)) AND (tracks.id = 5))))" end it "should be able to filter on one_to_one associations with :filter_limit_strategy :correlated_subquery" do @Album.one_to_one :l_track2, :clone=>:track, :filter_limit_strategy=>:correlated_subquery @Album.filter(:l_track2=>@Track.load(:id=>5, :album_id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT tracks.album_id FROM tracks WHERE ((tracks.album_id IS NOT NULL) AND (tracks.id IN (SELECT t1.id FROM tracks AS t1 WHERE (t1.album_id = tracks.album_id) LIMIT 1)) AND (tracks.id = 5))))" end it "should be able to filter on many_to_many associations with :limit" do @Album.filter(:l_tags=>@Tag.load(:id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT albums_tags.album_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE ((albums_tags.album_id IS NOT NULL) AND ((albums_tags.album_id, tags.id) IN (SELECT b, c FROM (SELECT albums_tags.album_id AS b, tags.id AS c, row_number() OVER (PARTITION BY albums_tags.album_id) AS x_sequel_row_number_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id)) AS t1 WHERE (x_sequel_row_number_x <= 10))) AND (tags.id = 3))))" end it "should be able to filter on one_through_one associations with :order" do @Album.filter(:l_tag=>@Tag.load(:id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT albums_tags.album_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE ((albums_tags.album_id IS NOT NULL) AND ((albums_tags.album_id, tags.id) IN (SELECT DISTINCT ON (albums_tags.album_id) albums_tags.album_id, tags.id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) ORDER BY albums_tags.album_id, name)) AND (tags.id = 3))))" end it "should be able to filter on one_to_many associations with :limit and :conditions" do @Album.filter(:al_tracks=>@Track.load(:id=>5, :album_id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT tracks.album_id FROM tracks WHERE ((name = 'A') AND (tracks.album_id IS NOT NULL) AND (tracks.id IN (SELECT id FROM (SELECT tracks.id, row_number() OVER (PARTITION BY tracks.album_id) AS x_sequel_row_number_x FROM tracks WHERE (name = 'A')) AS t1 WHERE (x_sequel_row_number_x <= 10))) AND (tracks.id = 5))))" end it "should be able to filter on one_to_one associations with :order and :conditions" do @Album.filter(:al_track=>@Track.load(:id=>5, :album_id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT tracks.album_id FROM tracks WHERE ((name = 'A') AND (tracks.album_id IS NOT NULL) AND (tracks.id IN (SELECT DISTINCT ON (tracks.album_id) tracks.id FROM tracks WHERE (name = 'A') ORDER BY tracks.album_id, name)) AND (tracks.id = 5))))" end it "should be able to filter on many_to_many associations with :limit and :conditions" do @Album.filter(:al_tags=>@Tag.load(:id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT albums_tags.album_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE ((name = 'A') AND (albums_tags.album_id IS NOT NULL) AND ((albums_tags.album_id, tags.id) IN (SELECT b, c FROM (SELECT albums_tags.album_id AS b, tags.id AS c, row_number() OVER (PARTITION BY albums_tags.album_id) AS x_sequel_row_number_x FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE (name = 'A')) AS t1 WHERE (x_sequel_row_number_x <= 10))) AND (tags.id = 3))))" end it "should be able to filter on one_through_one associations with :order and :conditions" do @Album.filter(:al_tag=>@Tag.load(:id=>3)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT albums_tags.album_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE ((name = 'A') AND (albums_tags.album_id IS NOT NULL) AND ((albums_tags.album_id, tags.id) IN (SELECT DISTINCT ON (albums_tags.album_id) albums_tags.album_id, tags.id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE (name = 'A') ORDER BY albums_tags.album_id, name)) AND (tags.id = 3))))" end it "should be able to filter on many_to_one associations with composite keys" do @Album.filter(:cartist=>@Artist.load(:id1=>3, :id2=>4)).sql.must_equal 'SELECT * FROM albums WHERE ((albums.artist_id1 = 3) AND (albums.artist_id2 = 4))' end it "should be able to filter on one_to_many associations with composite keys" do @Album.filter(:ctracks=>@Track.load(:album_id1=>3, :album_id2=>4)).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id1 = 3) AND (albums.id2 = 4))' end it "should be able to filter on one_to_one associations with composite keys" do @Album.filter(:calbum_info=>@AlbumInfo.load(:album_id1=>3, :album_id2=>4)).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id1 = 3) AND (albums.id2 = 4))' end it "should be able to filter on many_to_many associations with composite keys" do @Album.filter(:ctags=>@Tag.load(:tid1=>3, :tid2=>4)).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM albums_tags WHERE ((albums_tags.tag_id1 = 3) AND (albums_tags.tag_id2 = 4) AND (albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL))))' end it "should be able to filter on many_to_one associations with :conditions and composite keys" do @Album.filter(:a_cartist=>@Artist.load(:id=>5, :id1=>3, :id2=>4)).sql.must_equal "SELECT * FROM albums WHERE ((albums.artist_id1, albums.artist_id2) IN (SELECT artists.id1, artists.id2 FROM artists WHERE ((name = 'A') AND (artists.id1 IS NOT NULL) AND (artists.id2 IS NOT NULL) AND (artists.id = 5))))" end it "should be able to filter on one_to_many associations with :conditions and composite keys" do @Album.filter(:a_ctracks=>@Track.load(:id=>5, :album_id1=>3, :album_id2=>4)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT tracks.album_id1, tracks.album_id2 FROM tracks WHERE ((name = 'A') AND (tracks.album_id1 IS NOT NULL) AND (tracks.album_id2 IS NOT NULL) AND (tracks.id = 5))))" end it "should be able to filter on one_to_one associations with :conditions and composite keys" do @Album.filter(:a_calbum_info=>@AlbumInfo.load(:id=>5, :album_id1=>3, :album_id2=>4)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT album_infos.album_id1, album_infos.album_id2 FROM album_infos WHERE ((name = 'A') AND (album_infos.album_id1 IS NOT NULL) AND (album_infos.album_id2 IS NOT NULL) AND (album_infos.id = 5))))" end it "should be able to filter on many_to_many associations with block and composite keys" do @Album.filter(:a_ctags=>@Tag.load(:id=>5, :tid1=>3, :tid2=>4)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.tid1) AND (albums_tags.tag_id2 = tags.tid2)) WHERE ((name = 'A') AND (albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL) AND (tags.id = 5))))" end it "should be able to filter on many_to_one associations with block and composite keys" do @Album.filter(:b_cartist=>@Artist.load(:id=>5, :id1=>3, :id2=>4)).sql.must_equal "SELECT * FROM albums WHERE ((albums.artist_id1, albums.artist_id2) IN (SELECT artists.id1, artists.id2 FROM artists WHERE ((name = 'B') AND (artists.id1 IS NOT NULL) AND (artists.id2 IS NOT NULL) AND (artists.id = 5))))" end it "should be able to filter on one_to_many associations with block and composite keys" do @Album.filter(:b_ctracks=>@Track.load(:id=>5, :album_id1=>3, :album_id2=>4)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT tracks.album_id1, tracks.album_id2 FROM tracks WHERE ((name = 'B') AND (tracks.album_id1 IS NOT NULL) AND (tracks.album_id2 IS NOT NULL) AND (tracks.id = 5))))" end it "should be able to filter on one_to_one associations with block and composite keys" do @Album.filter(:b_calbum_info=>@AlbumInfo.load(:id=>5, :album_id1=>3, :album_id2=>4)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT album_infos.album_id1, album_infos.album_id2 FROM album_infos WHERE ((name = 'B') AND (album_infos.album_id1 IS NOT NULL) AND (album_infos.album_id2 IS NOT NULL) AND (album_infos.id = 5))))" end it "should be able to filter on many_to_many associations with block and composite keys" do @Album.filter(:b_ctags=>@Tag.load(:id=>5, :tid1=>3, :tid2=>4)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.tid1) AND (albums_tags.tag_id2 = tags.tid2)) WHERE ((name = 'B') AND (albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL) AND (tags.id = 5))))" end it "should be able to filter on one_to_many associations with :limit and composite keys" do @Album.filter(:l_ctracks=>@Track.load(:id=>5, :album_id1=>3, :album_id2=>4)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT tracks.album_id1, tracks.album_id2 FROM tracks WHERE ((tracks.album_id1 IS NOT NULL) AND (tracks.album_id2 IS NOT NULL) AND (tracks.id IN (SELECT id FROM (SELECT tracks.id, row_number() OVER (PARTITION BY tracks.album_id1, tracks.album_id2) AS x_sequel_row_number_x FROM tracks) AS t1 WHERE (x_sequel_row_number_x <= 10))) AND (tracks.id = 5))))" end it "should be able to filter on one_to_many associations with composite keys and :filter_limit_strategy :correlated_subquery" do @Album.one_to_one :l_ctracks2, :clone=>:l_ctracks, :filter_limit_strategy=>:correlated_subquery @Album.filter(:l_ctracks2=>@Track.load(:id=>5, :album_id1=>3, :album_id2=>4)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT tracks.album_id1, tracks.album_id2 FROM tracks WHERE ((tracks.album_id1 IS NOT NULL) AND (tracks.album_id2 IS NOT NULL) AND (tracks.id IN (SELECT t1.id FROM tracks AS t1 WHERE ((t1.album_id1 = tracks.album_id1) AND (t1.album_id2 = tracks.album_id2)) LIMIT 1)) AND (tracks.id = 5))))" end it "should be able to filter on one_to_one associations with :order and composite keys" do @Album.filter(:l_ctrack=>@Track.load(:id=>5, :album_id1=>3, :album_id2=>4)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT tracks.album_id1, tracks.album_id2 FROM tracks WHERE ((tracks.album_id1 IS NOT NULL) AND (tracks.album_id2 IS NOT NULL) AND (tracks.id IN (SELECT DISTINCT ON (tracks.album_id1, tracks.album_id2) tracks.id FROM tracks ORDER BY tracks.album_id1, tracks.album_id2, name)) AND (tracks.id = 5))))" end it "should be able to filter on many_to_many associations with :limit and composite keys" do @Album.filter(:l_ctags=>@Tag.load(:id=>5, :tid1=>3, :tid2=>4)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.tid1) AND (albums_tags.tag_id2 = tags.tid2)) WHERE ((albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL) AND ((albums_tags.album_id1, albums_tags.album_id2, tags.id) IN (SELECT b, c, d FROM (SELECT albums_tags.album_id1 AS b, albums_tags.album_id2 AS c, tags.id AS d, row_number() OVER (PARTITION BY albums_tags.album_id1, albums_tags.album_id2) AS x_sequel_row_number_x FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.tid1) AND (albums_tags.tag_id2 = tags.tid2))) AS t1 WHERE (x_sequel_row_number_x <= 10))) AND (tags.id = 5))))" end it "should be able to filter on one_through_one associations with :order and composite keys" do @Album.filter(:l_ctag=>@Tag.load(:id=>5, :tid1=>3, :tid2=>4)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.tid1) AND (albums_tags.tag_id2 = tags.tid2)) WHERE ((albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL) AND ((albums_tags.album_id1, albums_tags.album_id2, tags.id) IN (SELECT DISTINCT ON (albums_tags.album_id1, albums_tags.album_id2) albums_tags.album_id1, albums_tags.album_id2, tags.id FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.tid1) AND (albums_tags.tag_id2 = tags.tid2)) ORDER BY albums_tags.album_id1, albums_tags.album_id2, name)) AND (tags.id = 5))))" end it "should be able to filter on one_to_many associations with :limit and :conditions and composite keys" do @Album.filter(:al_ctracks=>@Track.load(:id=>5, :album_id1=>3, :album_id2=>4)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT tracks.album_id1, tracks.album_id2 FROM tracks WHERE ((name = 'A') AND (tracks.album_id1 IS NOT NULL) AND (tracks.album_id2 IS NOT NULL) AND (tracks.id IN (SELECT id FROM (SELECT tracks.id, row_number() OVER (PARTITION BY tracks.album_id1, tracks.album_id2) AS x_sequel_row_number_x FROM tracks WHERE (name = 'A')) AS t1 WHERE (x_sequel_row_number_x <= 10))) AND (tracks.id = 5))))" end it "should be able to filter on one_to_one associations with :order and :conditions and composite keys" do @Album.filter(:al_ctrack=>@Track.load(:id=>5, :album_id1=>3, :album_id2=>4)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT tracks.album_id1, tracks.album_id2 FROM tracks WHERE ((name = 'A') AND (tracks.album_id1 IS NOT NULL) AND (tracks.album_id2 IS NOT NULL) AND (tracks.id IN (SELECT DISTINCT ON (tracks.album_id1, tracks.album_id2) tracks.id FROM tracks WHERE (name = 'A') ORDER BY tracks.album_id1, tracks.album_id2, name)) AND (tracks.id = 5))))" end it "should be able to filter on many_to_many associations with :limit and :conditions and composite keys" do @Album.filter(:al_ctags=>@Tag.load(:id=>5, :tid1=>3, :tid2=>4)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.tid1) AND (albums_tags.tag_id2 = tags.tid2)) WHERE ((name = 'A') AND (albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL) AND ((albums_tags.album_id1, albums_tags.album_id2, tags.id) IN (SELECT b, c, d FROM (SELECT albums_tags.album_id1 AS b, albums_tags.album_id2 AS c, tags.id AS d, row_number() OVER (PARTITION BY albums_tags.album_id1, albums_tags.album_id2) AS x_sequel_row_number_x FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.tid1) AND (albums_tags.tag_id2 = tags.tid2)) WHERE (name = 'A')) AS t1 WHERE (x_sequel_row_number_x <= 10))) AND (tags.id = 5))))" end it "should be able to filter on one_through_one associations with :order and :conditions and composite keys" do @Album.filter(:al_ctag=>@Tag.load(:id=>5, :tid1=>3, :tid2=>4)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.tid1) AND (albums_tags.tag_id2 = tags.tid2)) WHERE ((name = 'A') AND (albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL) AND ((albums_tags.album_id1, albums_tags.album_id2, tags.id) IN (SELECT DISTINCT ON (albums_tags.album_id1, albums_tags.album_id2) albums_tags.album_id1, albums_tags.album_id2, tags.id FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.tid1) AND (albums_tags.tag_id2 = tags.tid2)) WHERE (name = 'A') ORDER BY albums_tags.album_id1, albums_tags.album_id2, name)) AND (tags.id = 5))))" end it "should work inside a complex filter" do artist = @Artist.load(:id=>3) @Album.filter{foo & {:artist=>artist}}.sql.must_equal 'SELECT * FROM albums WHERE (foo AND (albums.artist_id = 3))' track = @Track.load(:album_id=>4) @Album.filter{foo & [[:artist, artist], [:tracks, track]]}.sql.must_equal 'SELECT * FROM albums WHERE (foo AND (albums.artist_id = 3) AND (albums.id = 4))' end it "should raise for an invalid association name" do proc{@Album.filter(:foo=>@Artist.load(:id=>3)).sql}.must_raise(Sequel::Error) end it "should raise for an invalid association type" do @Album.many_to_many :iatags, :clone=>:tags @Album.association_reflection(:iatags)[:type] = :foo proc{@Album.filter(:iatags=>@Tag.load(:id=>3)).sql}.must_raise(Sequel::Error) end it "should raise for an invalid associated object class " do proc{@Album.filter(:tags=>@Artist.load(:id=>3)).sql}.must_raise(Sequel::Error) end it "should raise for an invalid associated object class when multiple objects are used" do proc{@Album.filter(:tags=>[@Tag.load(:id=>3), @Artist.load(:id=>3)]).sql}.must_raise(Sequel::Error) end it "should correctly handle case when a multiple value association is used" do proc{@Album.filter(:tags=>[@Tag.load(:id=>3), @Artist.load(:id=>3)]).sql}.must_raise(Sequel::Error) end it "should not affect non-association IN/NOT IN filtering with an empty array" do @Album.filter(:tag_id=>[]).sql.must_equal 'SELECT * FROM albums WHERE (1 = 0)' @Album.exclude(:tag_id=>[]).sql.must_equal 'SELECT * FROM albums WHERE (1 = 1)' end it "should work correctly in subclasses" do c = Class.new(@Album) c.many_to_one :sartist, :class=>@Artist c.filter(:sartist=>@Artist.load(:id=>3)).sql.must_equal 'SELECT * FROM albums WHERE (albums.sartist_id = 3)' end it "should be able to exclude on many_to_one associations" do @Album.exclude(:artist=>@Artist.load(:id=>3)).sql.must_equal 'SELECT * FROM albums WHERE ((albums.artist_id != 3) OR (albums.artist_id IS NULL))' end it "should be able to exclude on one_to_many associations" do @Album.exclude(:tracks=>@Track.load(:album_id=>3)).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id != 3) OR (albums.id IS NULL))' end it "should be able to exclude on one_to_one associations" do @Album.exclude(:album_info=>@AlbumInfo.load(:album_id=>3)).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id != 3) OR (albums.id IS NULL))' end it "should be able to exclude on many_to_many associations" do @Album.exclude(:tags=>@Tag.load(:id=>3)).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id NOT IN (SELECT albums_tags.album_id FROM albums_tags WHERE ((albums_tags.tag_id = 3) AND (albums_tags.album_id IS NOT NULL)))) OR (albums.id IS NULL))' end it "should be able to exclude on many_to_one associations with :conditions" do @Album.exclude(:a_artist=>@Artist.load(:id=>3)).sql.must_equal "SELECT * FROM albums WHERE ((albums.artist_id NOT IN (SELECT artists.id FROM artists WHERE ((name = 'A') AND (artists.id IS NOT NULL) AND (artists.id = 3)))) OR (albums.artist_id IS NULL))" end it "should be able to exclude on one_to_many associations with :conditions" do @Album.exclude(:a_tracks=>@Track.load(:id=>5, :album_id=>3)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id NOT IN (SELECT tracks.album_id FROM tracks WHERE ((name = 'A') AND (tracks.album_id IS NOT NULL) AND (tracks.id = 5)))) OR (albums.id IS NULL))" end it "should be able to exclude on one_to_one associations with :conditions" do @Album.exclude(:a_album_info=>@AlbumInfo.load(:id=>5, :album_id=>3)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id NOT IN (SELECT album_infos.album_id FROM album_infos WHERE ((name = 'A') AND (album_infos.album_id IS NOT NULL) AND (album_infos.id = 5)))) OR (albums.id IS NULL))" end it "should be able to exclude on many_to_many associations with :conditions" do @Album.exclude(:a_tags=>@Tag.load(:id=>3)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id NOT IN (SELECT albums_tags.album_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE ((name = 'A') AND (albums_tags.album_id IS NOT NULL) AND (tags.id = 3)))) OR (albums.id IS NULL))" end it "should be able to exclude on many_to_one associations with block" do @Album.exclude(:b_artist=>@Artist.load(:id=>3)).sql.must_equal "SELECT * FROM albums WHERE ((albums.artist_id NOT IN (SELECT artists.id FROM artists WHERE ((name = 'B') AND (artists.id IS NOT NULL) AND (artists.id = 3)))) OR (albums.artist_id IS NULL))" end it "should be able to exclude on one_to_many associations with block" do @Album.exclude(:b_tracks=>@Track.load(:id=>5, :album_id=>3)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id NOT IN (SELECT tracks.album_id FROM tracks WHERE ((name = 'B') AND (tracks.album_id IS NOT NULL) AND (tracks.id = 5)))) OR (albums.id IS NULL))" end it "should be able to exclude on one_to_one associations with block" do @Album.exclude(:b_album_info=>@AlbumInfo.load(:id=>5, :album_id=>3)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id NOT IN (SELECT album_infos.album_id FROM album_infos WHERE ((name = 'B') AND (album_infos.album_id IS NOT NULL) AND (album_infos.id = 5)))) OR (albums.id IS NULL))" end it "should be able to exclude on many_to_many associations with block" do @Album.exclude(:b_tags=>@Tag.load(:id=>3)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id NOT IN (SELECT albums_tags.album_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE ((name = 'B') AND (albums_tags.album_id IS NOT NULL) AND (tags.id = 3)))) OR (albums.id IS NULL))" end it "should be able to exclude on many_to_one associations with composite keys" do @Album.exclude(:cartist=>@Artist.load(:id1=>3, :id2=>4)).sql.must_equal 'SELECT * FROM albums WHERE ((albums.artist_id1 != 3) OR (albums.artist_id2 != 4) OR (albums.artist_id1 IS NULL) OR (albums.artist_id2 IS NULL))' end it "should be able to exclude on one_to_many associations with composite keys" do @Album.exclude(:ctracks=>@Track.load(:album_id1=>3, :album_id2=>4)).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id1 != 3) OR (albums.id2 != 4) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))' end it "should be able to exclude on one_to_one associations with composite keys" do @Album.exclude(:calbum_info=>@AlbumInfo.load(:album_id1=>3, :album_id2=>4)).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id1 != 3) OR (albums.id2 != 4) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))' end it "should be able to exclude on many_to_many associations with composite keys" do @Album.exclude(:ctags=>@Tag.load(:tid1=>3, :tid2=>4)).sql.must_equal 'SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM albums_tags WHERE ((albums_tags.tag_id1 = 3) AND (albums_tags.tag_id2 = 4) AND (albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL)))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))' end it "should be able to exclude on many_to_one associations with :conditions and composite keys" do @Album.exclude(:a_cartist=>@Artist.load(:id=>5, :id1=>3, :id2=>4)).sql.must_equal "SELECT * FROM albums WHERE (((albums.artist_id1, albums.artist_id2) NOT IN (SELECT artists.id1, artists.id2 FROM artists WHERE ((name = 'A') AND (artists.id1 IS NOT NULL) AND (artists.id2 IS NOT NULL) AND (artists.id = 5)))) OR (albums.artist_id1 IS NULL) OR (albums.artist_id2 IS NULL))" end it "should be able to exclude on one_to_many associations with :conditions and composite keys" do @Album.exclude(:a_ctracks=>@Track.load(:id=>5, :album_id1=>3, :album_id2=>4)).sql.must_equal "SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT tracks.album_id1, tracks.album_id2 FROM tracks WHERE ((name = 'A') AND (tracks.album_id1 IS NOT NULL) AND (tracks.album_id2 IS NOT NULL) AND (tracks.id = 5)))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))" end it "should be able to exclude on one_to_one associations with :conditions and composite keys" do @Album.exclude(:a_calbum_info=>@AlbumInfo.load(:id=>5, :album_id1=>3, :album_id2=>4)).sql.must_equal "SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT album_infos.album_id1, album_infos.album_id2 FROM album_infos WHERE ((name = 'A') AND (album_infos.album_id1 IS NOT NULL) AND (album_infos.album_id2 IS NOT NULL) AND (album_infos.id = 5)))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))" end it "should be able to exclude on many_to_many associations with block and composite keys" do @Album.exclude(:a_ctags=>@Tag.load(:id=>5, :tid1=>3, :tid2=>4)).sql.must_equal "SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.tid1) AND (albums_tags.tag_id2 = tags.tid2)) WHERE ((name = 'A') AND (albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL) AND (tags.id = 5)))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))" end it "should be able to exclude on many_to_one associations with block and composite keys" do @Album.exclude(:b_cartist=>@Artist.load(:id=>5, :id1=>3, :id2=>4)).sql.must_equal "SELECT * FROM albums WHERE (((albums.artist_id1, albums.artist_id2) NOT IN (SELECT artists.id1, artists.id2 FROM artists WHERE ((name = 'B') AND (artists.id1 IS NOT NULL) AND (artists.id2 IS NOT NULL) AND (artists.id = 5)))) OR (albums.artist_id1 IS NULL) OR (albums.artist_id2 IS NULL))" end it "should be able to exclude on one_to_many associations with block and composite keys" do @Album.exclude(:b_ctracks=>@Track.load(:id=>5, :album_id1=>3, :album_id2=>4)).sql.must_equal "SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT tracks.album_id1, tracks.album_id2 FROM tracks WHERE ((name = 'B') AND (tracks.album_id1 IS NOT NULL) AND (tracks.album_id2 IS NOT NULL) AND (tracks.id = 5)))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))" end it "should be able to exclude on one_to_one associations with block and composite keys" do @Album.exclude(:b_calbum_info=>@AlbumInfo.load(:id=>5, :album_id1=>3, :album_id2=>4)).sql.must_equal "SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT album_infos.album_id1, album_infos.album_id2 FROM album_infos WHERE ((name = 'B') AND (album_infos.album_id1 IS NOT NULL) AND (album_infos.album_id2 IS NOT NULL) AND (album_infos.id = 5)))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))" end it "should be able to exclude on many_to_many associations with block and composite keys" do @Album.exclude(:b_ctags=>@Tag.load(:id=>5, :tid1=>3, :tid2=>4)).sql.must_equal "SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.tid1) AND (albums_tags.tag_id2 = tags.tid2)) WHERE ((name = 'B') AND (albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL) AND (tags.id = 5)))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))" end it "should be able to filter on multiple many_to_one associations" do @Album.filter(:artist=>[@Artist.load(:id=>3), @Artist.load(:id=>4)]).sql.must_equal 'SELECT * FROM albums WHERE (albums.artist_id IN (3, 4))' end it "should be able to filter on multiple one_to_many associations" do @Album.filter(:tracks=>[@Track.load(:album_id=>3), @Track.load(:album_id=>4)]).sql.must_equal 'SELECT * FROM albums WHERE (albums.id IN (3, 4))' end it "should be able to filter on multiple one_to_one associations" do @Album.filter(:album_info=>[@AlbumInfo.load(:album_id=>3), @AlbumInfo.load(:album_id=>4)]).sql.must_equal 'SELECT * FROM albums WHERE (albums.id IN (3, 4))' end it "should be able to filter on multiple many_to_many associations" do @Album.filter(:tags=>[@Tag.load(:id=>3), @Tag.load(:id=>4)]).sql.must_equal 'SELECT * FROM albums WHERE (albums.id IN (SELECT albums_tags.album_id FROM albums_tags WHERE ((albums_tags.tag_id IN (3, 4)) AND (albums_tags.album_id IS NOT NULL))))' end it "should be able to filter on multiple many_to_one associations with :conditions" do @Album.filter(:a_artist=>[@Artist.load(:id=>3), @Artist.load(:id=>4)]).sql.must_equal "SELECT * FROM albums WHERE (albums.artist_id IN (SELECT artists.id FROM artists WHERE ((name = 'A') AND (artists.id IS NOT NULL) AND (artists.id IN (3, 4)))))" end it "should be able to filter on multiple one_to_many associations with :conditions" do @Album.filter(:a_tracks=>[@Track.load(:id=>5, :album_id=>3), @Track.load(:id=>6, :album_id=>4)]).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT tracks.album_id FROM tracks WHERE ((name = 'A') AND (tracks.album_id IS NOT NULL) AND (tracks.id IN (5, 6)))))" end it "should be able to filter on multiple one_to_one associations with :conditions" do @Album.filter(:a_album_info=>[@AlbumInfo.load(:id=>5, :album_id=>3), @AlbumInfo.load(:id=>6, :album_id=>4)]).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT album_infos.album_id FROM album_infos WHERE ((name = 'A') AND (album_infos.album_id IS NOT NULL) AND (album_infos.id IN (5, 6)))))" end it "should be able to filter on multiple many_to_many associations with :conditions" do @Album.filter(:a_tags=>[@Tag.load(:id=>3), @Tag.load(:id=>4)]).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT albums_tags.album_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE ((name = 'A') AND (albums_tags.album_id IS NOT NULL) AND (tags.id IN (3, 4)))))" end it "should be able to filter on multiple many_to_one associations with block" do @Album.filter(:b_artist=>[@Artist.load(:id=>3), @Artist.load(:id=>4)]).sql.must_equal "SELECT * FROM albums WHERE (albums.artist_id IN (SELECT artists.id FROM artists WHERE ((name = 'B') AND (artists.id IS NOT NULL) AND (artists.id IN (3, 4)))))" end it "should be able to filter on multiple one_to_many associations with block" do @Album.filter(:b_tracks=>[@Track.load(:id=>5, :album_id=>3), @Track.load(:id=>6, :album_id=>4)]).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT tracks.album_id FROM tracks WHERE ((name = 'B') AND (tracks.album_id IS NOT NULL) AND (tracks.id IN (5, 6)))))" end it "should be able to filter on multiple one_to_one associations with block" do @Album.filter(:b_album_info=>[@AlbumInfo.load(:id=>5, :album_id=>3), @AlbumInfo.load(:id=>6, :album_id=>4)]).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT album_infos.album_id FROM album_infos WHERE ((name = 'B') AND (album_infos.album_id IS NOT NULL) AND (album_infos.id IN (5, 6)))))" end it "should be able to filter on multiple many_to_many associations with block" do @Album.filter(:b_tags=>[@Tag.load(:id=>3), @Tag.load(:id=>4)]).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT albums_tags.album_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE ((name = 'B') AND (albums_tags.album_id IS NOT NULL) AND (tags.id IN (3, 4)))))" end it "should be able to filter on multiple many_to_one associations with composite keys" do @Album.filter(:cartist=>[@Artist.load(:id1=>3, :id2=>4), @Artist.load(:id1=>5, :id2=>6)]).sql.must_equal 'SELECT * FROM albums WHERE ((albums.artist_id1, albums.artist_id2) IN ((3, 4), (5, 6)))' end it "should be able to filter on multiple one_to_many associations with composite keys" do @Album.filter(:ctracks=>[@Track.load(:album_id1=>3, :album_id2=>4), @Track.load(:album_id1=>5, :album_id2=>6)]).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN ((3, 4), (5, 6)))' end it "should be able to filter on multiple one_to_one associations with composite keys" do @Album.filter(:calbum_info=>[@AlbumInfo.load(:album_id1=>3, :album_id2=>4), @AlbumInfo.load(:album_id1=>5, :album_id2=>6)]).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN ((3, 4), (5, 6)))' end it "should be able to filter on multiple many_to_many associations with composite keys" do @Album.filter(:ctags=>[@Tag.load(:tid1=>3, :tid2=>4), @Tag.load(:tid1=>5, :tid2=>6)]).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM albums_tags WHERE (((albums_tags.tag_id1, albums_tags.tag_id2) IN ((3, 4), (5, 6))) AND (albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL))))' end it "should be able to filter on multiple many_to_one associations with :conditions and composite keys" do @Album.filter(:a_cartist=>[@Artist.load(:id=>7, :id1=>3, :id2=>4), @Artist.load(:id=>8, :id1=>5, :id2=>6)]).sql.must_equal "SELECT * FROM albums WHERE ((albums.artist_id1, albums.artist_id2) IN (SELECT artists.id1, artists.id2 FROM artists WHERE ((name = 'A') AND (artists.id1 IS NOT NULL) AND (artists.id2 IS NOT NULL) AND (artists.id IN (7, 8)))))" end it "should be able to filter on multiple one_to_many associations with :conditions and composite keys" do @Album.filter(:a_ctracks=>[@Track.load(:id=>7, :album_id1=>3, :album_id2=>4), @Track.load(:id=>8, :album_id1=>5, :album_id2=>6)]).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT tracks.album_id1, tracks.album_id2 FROM tracks WHERE ((name = 'A') AND (tracks.album_id1 IS NOT NULL) AND (tracks.album_id2 IS NOT NULL) AND (tracks.id IN (7, 8)))))" end it "should be able to filter on multiple one_to_one associations with :conditions and composite keys" do @Album.filter(:a_calbum_info=>[@AlbumInfo.load(:id=>7, :album_id1=>3, :album_id2=>4), @AlbumInfo.load(:id=>8, :album_id1=>5, :album_id2=>6)]).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT album_infos.album_id1, album_infos.album_id2 FROM album_infos WHERE ((name = 'A') AND (album_infos.album_id1 IS NOT NULL) AND (album_infos.album_id2 IS NOT NULL) AND (album_infos.id IN (7, 8)))))" end it "should be able to filter on multiple many_to_many associations with block and composite keys" do @Album.filter(:a_ctags=>[@Tag.load(:id=>7, :tid1=>3, :tid2=>4), @Tag.load(:id=>8, :tid1=>5, :tid2=>6)]).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.tid1) AND (albums_tags.tag_id2 = tags.tid2)) WHERE ((name = 'A') AND (albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL) AND (tags.id IN (7, 8)))))" end it "should be able to filter on multiple many_to_one associations with block and composite keys" do @Album.filter(:b_cartist=>[@Artist.load(:id=>7, :id1=>3, :id2=>4), @Artist.load(:id=>8, :id1=>5, :id2=>6)]).sql.must_equal "SELECT * FROM albums WHERE ((albums.artist_id1, albums.artist_id2) IN (SELECT artists.id1, artists.id2 FROM artists WHERE ((name = 'B') AND (artists.id1 IS NOT NULL) AND (artists.id2 IS NOT NULL) AND (artists.id IN (7, 8)))))" end it "should be able to filter on multiple one_to_many associations with block and composite keys" do @Album.filter(:b_ctracks=>[@Track.load(:id=>7, :album_id1=>3, :album_id2=>4), @Track.load(:id=>8, :album_id1=>5, :album_id2=>6)]).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT tracks.album_id1, tracks.album_id2 FROM tracks WHERE ((name = 'B') AND (tracks.album_id1 IS NOT NULL) AND (tracks.album_id2 IS NOT NULL) AND (tracks.id IN (7, 8)))))" end it "should be able to filter on multiple one_to_one associations with block and composite keys" do @Album.filter(:b_calbum_info=>[@AlbumInfo.load(:id=>7, :album_id1=>3, :album_id2=>4), @AlbumInfo.load(:id=>8, :album_id1=>5, :album_id2=>6)]).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT album_infos.album_id1, album_infos.album_id2 FROM album_infos WHERE ((name = 'B') AND (album_infos.album_id1 IS NOT NULL) AND (album_infos.album_id2 IS NOT NULL) AND (album_infos.id IN (7, 8)))))" end it "should be able to filter on multiple many_to_many associations with block and composite keys" do @Album.filter(:b_ctags=>[@Tag.load(:id=>7, :tid1=>3, :tid2=>4), @Tag.load(:id=>8, :tid1=>5, :tid2=>6)]).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.tid1) AND (albums_tags.tag_id2 = tags.tid2)) WHERE ((name = 'B') AND (albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL) AND (tags.id IN (7, 8)))))" end it "should be able to exclude on multiple many_to_one associations" do @Album.exclude(:artist=>[@Artist.load(:id=>3), @Artist.load(:id=>4)]).sql.must_equal 'SELECT * FROM albums WHERE ((albums.artist_id NOT IN (3, 4)) OR (albums.artist_id IS NULL))' end it "should be able to exclude on multiple one_to_many associations" do @Album.exclude(:tracks=>[@Track.load(:album_id=>3), @Track.load(:album_id=>4)]).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id NOT IN (3, 4)) OR (albums.id IS NULL))' end it "should be able to exclude on multiple one_to_one associations" do @Album.exclude(:album_info=>[@AlbumInfo.load(:album_id=>3), @AlbumInfo.load(:album_id=>4)]).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id NOT IN (3, 4)) OR (albums.id IS NULL))' end it "should be able to exclude on multiple many_to_many associations" do @Album.exclude(:tags=>[@Tag.load(:id=>3), @Tag.load(:id=>4)]).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id NOT IN (SELECT albums_tags.album_id FROM albums_tags WHERE ((albums_tags.tag_id IN (3, 4)) AND (albums_tags.album_id IS NOT NULL)))) OR (albums.id IS NULL))' end it "should be able to exclude on multiple many_to_one associations with :conditions" do @Album.exclude(:a_artist=>[@Artist.load(:id=>3), @Artist.load(:id=>4)]).sql.must_equal "SELECT * FROM albums WHERE ((albums.artist_id NOT IN (SELECT artists.id FROM artists WHERE ((name = 'A') AND (artists.id IS NOT NULL) AND (artists.id IN (3, 4))))) OR (albums.artist_id IS NULL))" end it "should be able to exclude on multiple one_to_many associations with :conditions" do @Album.exclude(:a_tracks=>[@Track.load(:id=>5, :album_id=>3), @Track.load(:id=>6, :album_id=>4)]).sql.must_equal "SELECT * FROM albums WHERE ((albums.id NOT IN (SELECT tracks.album_id FROM tracks WHERE ((name = 'A') AND (tracks.album_id IS NOT NULL) AND (tracks.id IN (5, 6))))) OR (albums.id IS NULL))" end it "should be able to exclude on multiple one_to_one associations with :conditions" do @Album.exclude(:a_album_info=>[@AlbumInfo.load(:id=>5, :album_id=>3), @AlbumInfo.load(:id=>6, :album_id=>4)]).sql.must_equal "SELECT * FROM albums WHERE ((albums.id NOT IN (SELECT album_infos.album_id FROM album_infos WHERE ((name = 'A') AND (album_infos.album_id IS NOT NULL) AND (album_infos.id IN (5, 6))))) OR (albums.id IS NULL))" end it "should be able to exclude on multiple many_to_many associations with :conditions" do @Album.exclude(:a_tags=>[@Tag.load(:id=>3), @Tag.load(:id=>4)]).sql.must_equal "SELECT * FROM albums WHERE ((albums.id NOT IN (SELECT albums_tags.album_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE ((name = 'A') AND (albums_tags.album_id IS NOT NULL) AND (tags.id IN (3, 4))))) OR (albums.id IS NULL))" end it "should be able to exclude on multiple many_to_one associations with block" do @Album.exclude(:b_artist=>[@Artist.load(:id=>3), @Artist.load(:id=>4)]).sql.must_equal "SELECT * FROM albums WHERE ((albums.artist_id NOT IN (SELECT artists.id FROM artists WHERE ((name = 'B') AND (artists.id IS NOT NULL) AND (artists.id IN (3, 4))))) OR (albums.artist_id IS NULL))" end it "should be able to exclude on multiple one_to_many associations with block" do @Album.exclude(:b_tracks=>[@Track.load(:id=>5, :album_id=>3), @Track.load(:id=>6, :album_id=>4)]).sql.must_equal "SELECT * FROM albums WHERE ((albums.id NOT IN (SELECT tracks.album_id FROM tracks WHERE ((name = 'B') AND (tracks.album_id IS NOT NULL) AND (tracks.id IN (5, 6))))) OR (albums.id IS NULL))" end it "should be able to exclude on multiple one_to_one associations with block" do @Album.exclude(:b_album_info=>[@AlbumInfo.load(:id=>5, :album_id=>3), @AlbumInfo.load(:id=>6, :album_id=>4)]).sql.must_equal "SELECT * FROM albums WHERE ((albums.id NOT IN (SELECT album_infos.album_id FROM album_infos WHERE ((name = 'B') AND (album_infos.album_id IS NOT NULL) AND (album_infos.id IN (5, 6))))) OR (albums.id IS NULL))" end it "should be able to exclude on multiple many_to_many associations with block" do @Album.exclude(:b_tags=>[@Tag.load(:id=>3), @Tag.load(:id=>4)]).sql.must_equal "SELECT * FROM albums WHERE ((albums.id NOT IN (SELECT albums_tags.album_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE ((name = 'B') AND (albums_tags.album_id IS NOT NULL) AND (tags.id IN (3, 4))))) OR (albums.id IS NULL))" end it "should be able to exclude on multiple many_to_one associations with composite keys" do @Album.exclude(:cartist=>[@Artist.load(:id1=>3, :id2=>4), @Artist.load(:id1=>5, :id2=>6)]).sql.must_equal 'SELECT * FROM albums WHERE (((albums.artist_id1, albums.artist_id2) NOT IN ((3, 4), (5, 6))) OR (albums.artist_id1 IS NULL) OR (albums.artist_id2 IS NULL))' end it "should be able to exclude on multiple one_to_many associations with composite keys" do @Album.exclude(:ctracks=>[@Track.load(:album_id1=>3, :album_id2=>4), @Track.load(:album_id1=>5, :album_id2=>6)]).sql.must_equal 'SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN ((3, 4), (5, 6))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))' end it "should be able to exclude on multiple one_to_one associations with composite keys" do @Album.exclude(:calbum_info=>[@AlbumInfo.load(:album_id1=>3, :album_id2=>4), @AlbumInfo.load(:album_id1=>5, :album_id2=>6)]).sql.must_equal 'SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN ((3, 4), (5, 6))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))' end it "should be able to exclude on multiple many_to_many associations with composite keys" do @Album.exclude(:ctags=>[@Tag.load(:tid1=>3, :tid2=>4), @Tag.load(:tid1=>5, :tid2=>6)]).sql.must_equal 'SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM albums_tags WHERE (((albums_tags.tag_id1, albums_tags.tag_id2) IN ((3, 4), (5, 6))) AND (albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL)))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))' end it "should be able to exclude on multiple many_to_one associations with :conditions and composite keys" do @Album.exclude(:a_cartist=>[@Artist.load(:id=>7, :id1=>3, :id2=>4), @Artist.load(:id=>8, :id1=>5, :id2=>6)]).sql.must_equal "SELECT * FROM albums WHERE (((albums.artist_id1, albums.artist_id2) NOT IN (SELECT artists.id1, artists.id2 FROM artists WHERE ((name = 'A') AND (artists.id1 IS NOT NULL) AND (artists.id2 IS NOT NULL) AND (artists.id IN (7, 8))))) OR (albums.artist_id1 IS NULL) OR (albums.artist_id2 IS NULL))" end it "should be able to exclude on multiple one_to_many associations with :conditions and composite keys" do @Album.exclude(:a_ctracks=>[@Track.load(:id=>7, :album_id1=>3, :album_id2=>4), @Track.load(:id=>8, :album_id1=>5, :album_id2=>6)]).sql.must_equal "SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT tracks.album_id1, tracks.album_id2 FROM tracks WHERE ((name = 'A') AND (tracks.album_id1 IS NOT NULL) AND (tracks.album_id2 IS NOT NULL) AND (tracks.id IN (7, 8))))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))" end it "should be able to exclude on multiple one_to_one associations with :conditions and composite keys" do @Album.exclude(:a_calbum_info=>[@AlbumInfo.load(:id=>7, :album_id1=>3, :album_id2=>4), @AlbumInfo.load(:id=>8, :album_id1=>5, :album_id2=>6)]).sql.must_equal "SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT album_infos.album_id1, album_infos.album_id2 FROM album_infos WHERE ((name = 'A') AND (album_infos.album_id1 IS NOT NULL) AND (album_infos.album_id2 IS NOT NULL) AND (album_infos.id IN (7, 8))))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))" end it "should be able to exclude on multiple many_to_many associations with :conditions and composite keys" do @Album.exclude(:a_ctags=>[@Tag.load(:id=>7, :tid1=>3, :tid2=>4), @Tag.load(:id=>8, :tid1=>5, :tid2=>6)]).sql.must_equal "SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.tid1) AND (albums_tags.tag_id2 = tags.tid2)) WHERE ((name = 'A') AND (albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL) AND (tags.id IN (7, 8))))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))" end it "should be able to exclude on multiple many_to_one associations with block and composite keys" do @Album.exclude(:b_cartist=>[@Artist.load(:id=>7, :id1=>3, :id2=>4), @Artist.load(:id=>8, :id1=>5, :id2=>6)]).sql.must_equal "SELECT * FROM albums WHERE (((albums.artist_id1, albums.artist_id2) NOT IN (SELECT artists.id1, artists.id2 FROM artists WHERE ((name = 'B') AND (artists.id1 IS NOT NULL) AND (artists.id2 IS NOT NULL) AND (artists.id IN (7, 8))))) OR (albums.artist_id1 IS NULL) OR (albums.artist_id2 IS NULL))" end it "should be able to exclude on multiple one_to_many associations with block and composite keys" do @Album.exclude(:b_ctracks=>[@Track.load(:id=>7, :album_id1=>3, :album_id2=>4), @Track.load(:id=>8, :album_id1=>5, :album_id2=>6)]).sql.must_equal "SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT tracks.album_id1, tracks.album_id2 FROM tracks WHERE ((name = 'B') AND (tracks.album_id1 IS NOT NULL) AND (tracks.album_id2 IS NOT NULL) AND (tracks.id IN (7, 8))))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))" end it "should be able to exclude on multiple one_to_one associations with block and composite keys" do @Album.exclude(:b_calbum_info=>[@AlbumInfo.load(:id=>7, :album_id1=>3, :album_id2=>4), @AlbumInfo.load(:id=>8, :album_id1=>5, :album_id2=>6)]).sql.must_equal "SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT album_infos.album_id1, album_infos.album_id2 FROM album_infos WHERE ((name = 'B') AND (album_infos.album_id1 IS NOT NULL) AND (album_infos.album_id2 IS NOT NULL) AND (album_infos.id IN (7, 8))))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))" end it "should be able to exclude on multiple many_to_many associations with block and composite keys" do @Album.exclude(:b_ctags=>[@Tag.load(:id=>7, :tid1=>3, :tid2=>4), @Tag.load(:id=>8, :tid1=>5, :tid2=>6)]).sql.must_equal "SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.tid1) AND (albums_tags.tag_id2 = tags.tid2)) WHERE ((name = 'B') AND (albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL) AND (tags.id IN (7, 8))))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))" end it "should be able to handle NULL values when filtering many_to_one associations" do @Album.filter(:artist=>@Artist.new).sql.must_equal 'SELECT * FROM albums WHERE \'f\'' end it "should be able to handle NULL values when filtering one_to_many associations" do @Album.filter(:tracks=>@Track.new).sql.must_equal 'SELECT * FROM albums WHERE \'f\'' end it "should be able to handle NULL values when filtering one_to_one associations" do @Album.filter(:album_info=>@AlbumInfo.new).sql.must_equal 'SELECT * FROM albums WHERE \'f\'' end it "should be able to handle NULL values when filtering many_to_many associations" do @Album.filter(:tags=>@Tag.new).sql.must_equal 'SELECT * FROM albums WHERE \'f\'' end it "should be able to handle filtering with NULL values for many_to_one associations with composite keys" do @Album.filter(:cartist=>@Artist.load(:id2=>4)).sql.must_equal 'SELECT * FROM albums WHERE \'f\'' @Album.filter(:cartist=>@Artist.load(:id1=>3)).sql.must_equal 'SELECT * FROM albums WHERE \'f\'' @Album.filter(:cartist=>@Artist.new).sql.must_equal 'SELECT * FROM albums WHERE \'f\'' end it "should be able to filter with NULL values for one_to_many associations with composite keys" do @Album.filter(:ctracks=>@Track.load(:album_id2=>4)).sql.must_equal 'SELECT * FROM albums WHERE \'f\'' @Album.filter(:ctracks=>@Track.load(:album_id1=>3)).sql.must_equal 'SELECT * FROM albums WHERE \'f\'' @Album.filter(:ctracks=>@Track.new).sql.must_equal 'SELECT * FROM albums WHERE \'f\'' end it "should be able to filter with NULL values for one_to_one associations with composite keys" do @Album.filter(:calbum_info=>@AlbumInfo.load(:album_id2=>4)).sql.must_equal 'SELECT * FROM albums WHERE \'f\'' @Album.filter(:calbum_info=>@AlbumInfo.load(:album_id1=>3)).sql.must_equal 'SELECT * FROM albums WHERE \'f\'' @Album.filter(:calbum_info=>@AlbumInfo.new).sql.must_equal 'SELECT * FROM albums WHERE \'f\'' end it "should be able to filter with NULL values for many_to_many associations with composite keys" do @Album.filter(:ctags=>@Tag.load(:tid1=>3)).sql.must_equal 'SELECT * FROM albums WHERE \'f\'' @Album.filter(:ctags=>@Tag.load(:tid2=>4)).sql.must_equal 'SELECT * FROM albums WHERE \'f\'' @Album.filter(:ctags=>@Tag.new).sql.must_equal 'SELECT * FROM albums WHERE \'f\'' end it "should be able to handle NULL values when excluding many_to_one associations" do @Album.exclude(:artist=>@Artist.new).sql.must_equal 'SELECT * FROM albums WHERE \'t\'' end it "should be able to handle NULL values when excluding one_to_many associations" do @Album.exclude(:tracks=>@Track.new).sql.must_equal 'SELECT * FROM albums WHERE \'t\'' end it "should be able to handle NULL values when excluding one_to_one associations" do @Album.exclude(:album_info=>@AlbumInfo.new).sql.must_equal 'SELECT * FROM albums WHERE \'t\'' end it "should be able to handle NULL values when excluding many_to_many associations" do @Album.exclude(:tags=>@Tag.new).sql.must_equal 'SELECT * FROM albums WHERE \'t\'' end it "should be able to handle excluding with NULL values for many_to_one associations with composite keys" do @Album.exclude(:cartist=>@Artist.load(:id2=>4)).sql.must_equal 'SELECT * FROM albums WHERE \'t\'' @Album.exclude(:cartist=>@Artist.load(:id1=>3)).sql.must_equal 'SELECT * FROM albums WHERE \'t\'' @Album.exclude(:cartist=>@Artist.new).sql.must_equal 'SELECT * FROM albums WHERE \'t\'' end it "should be able to excluding with NULL values for one_to_many associations with composite keys" do @Album.exclude(:ctracks=>@Track.load(:album_id2=>4)).sql.must_equal 'SELECT * FROM albums WHERE \'t\'' @Album.exclude(:ctracks=>@Track.load(:album_id1=>3)).sql.must_equal 'SELECT * FROM albums WHERE \'t\'' @Album.exclude(:ctracks=>@Track.new).sql.must_equal 'SELECT * FROM albums WHERE \'t\'' end it "should be able to excluding with NULL values for one_to_one associations with composite keys" do @Album.exclude(:calbum_info=>@AlbumInfo.load(:album_id2=>4)).sql.must_equal 'SELECT * FROM albums WHERE \'t\'' @Album.exclude(:calbum_info=>@AlbumInfo.load(:album_id1=>3)).sql.must_equal 'SELECT * FROM albums WHERE \'t\'' @Album.exclude(:calbum_info=>@AlbumInfo.new).sql.must_equal 'SELECT * FROM albums WHERE \'t\'' end it "should be able to excluding with NULL values for many_to_many associations with composite keys" do @Album.exclude(:ctags=>@Tag.load(:tid1=>3)).sql.must_equal 'SELECT * FROM albums WHERE \'t\'' @Album.exclude(:ctags=>@Tag.load(:tid2=>4)).sql.must_equal 'SELECT * FROM albums WHERE \'t\'' @Album.exclude(:ctags=>@Tag.new).sql.must_equal 'SELECT * FROM albums WHERE \'t\'' end it "should be able to handle NULL values when filtering multiple many_to_one associations" do @Album.filter(:artist=>[@Artist.load(:id=>3), @Artist.new]).sql.must_equal 'SELECT * FROM albums WHERE (albums.artist_id IN (3))' @Album.filter(:artist=>[@Artist.new, @Artist.new]).sql.must_equal 'SELECT * FROM albums WHERE \'f\'' end it "should be able to handle NULL values when filtering multiple one_to_many associations" do @Album.filter(:tracks=>[@Track.load(:album_id=>3), @Track.new]).sql.must_equal 'SELECT * FROM albums WHERE (albums.id IN (3))' @Album.filter(:tracks=>[@Track.new, @Track.new]).sql.must_equal 'SELECT * FROM albums WHERE \'f\'' end it "should be able to handle NULL values when filtering multiple one_to_one associations" do @Album.filter(:album_info=>[@AlbumInfo.load(:album_id=>3), @AlbumInfo.new]).sql.must_equal 'SELECT * FROM albums WHERE (albums.id IN (3))' @Album.filter(:album_info=>[@AlbumInfo.new, @AlbumInfo.new]).sql.must_equal 'SELECT * FROM albums WHERE \'f\'' end it "should be able to handle NULL values when filtering multiple many_to_many associations" do @Album.filter(:tags=>[@Tag.load(:id=>3), @Tag.new]).sql.must_equal 'SELECT * FROM albums WHERE (albums.id IN (SELECT albums_tags.album_id FROM albums_tags WHERE ((albums_tags.tag_id IN (3)) AND (albums_tags.album_id IS NOT NULL))))' @Album.filter(:tags=>[@Tag.new, @Tag.new]).sql.must_equal 'SELECT * FROM albums WHERE \'f\'' end it "should be able to handle NULL values when filtering multiple many_to_one associations with composite keys" do @Album.filter(:cartist=>[@Artist.load(:id1=>3, :id2=>4), @Artist.load(:id1=>3)]).sql.must_equal 'SELECT * FROM albums WHERE ((albums.artist_id1, albums.artist_id2) IN ((3, 4)))' @Album.filter(:cartist=>[@Artist.load(:id1=>3, :id2=>4), @Artist.new]).sql.must_equal 'SELECT * FROM albums WHERE ((albums.artist_id1, albums.artist_id2) IN ((3, 4)))' end it "should be able handle NULL values when filtering multiple one_to_many associations with composite keys" do @Album.filter(:ctracks=>[@Track.load(:album_id1=>3, :album_id2=>4), @Track.load(:album_id1=>3)]).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN ((3, 4)))' @Album.filter(:ctracks=>[@Track.load(:album_id1=>3, :album_id2=>4), @Track.new]).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN ((3, 4)))' end it "should be able to handle NULL values when filtering multiple one_to_one associations with composite keys" do @Album.filter(:calbum_info=>[@AlbumInfo.load(:album_id1=>3, :album_id2=>4), @AlbumInfo.load(:album_id1=>5)]).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN ((3, 4)))' @Album.filter(:calbum_info=>[@AlbumInfo.load(:album_id1=>3, :album_id2=>4), @AlbumInfo.new]).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN ((3, 4)))' end it "should be able to handle NULL values when filtering multiple many_to_many associations with composite keys" do @Album.filter(:ctags=>[@Tag.load(:tid1=>3, :tid2=>4), @Tag.load(:tid1=>5)]).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM albums_tags WHERE (((albums_tags.tag_id1, albums_tags.tag_id2) IN ((3, 4))) AND (albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL))))' @Album.filter(:ctags=>[@Tag.load(:tid1=>3, :tid2=>4), @Tag.new]).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM albums_tags WHERE (((albums_tags.tag_id1, albums_tags.tag_id2) IN ((3, 4))) AND (albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL))))' end it "should be able to handle NULL values when excluding multiple many_to_one associations" do @Album.exclude(:artist=>[@Artist.load(:id=>3), @Artist.new]).sql.must_equal 'SELECT * FROM albums WHERE ((albums.artist_id NOT IN (3)) OR (albums.artist_id IS NULL))' @Album.exclude(:artist=>[@Artist.new, @Artist.new]).sql.must_equal 'SELECT * FROM albums WHERE \'t\'' end it "should be able to handle NULL values when excluding multiple one_to_many associations" do @Album.exclude(:tracks=>[@Track.load(:album_id=>3), @Track.new]).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id NOT IN (3)) OR (albums.id IS NULL))' @Album.exclude(:tracks=>[@Track.new, @Track.new]).sql.must_equal 'SELECT * FROM albums WHERE \'t\'' end it "should be able to handle NULL values when excluding multiple one_to_one associations" do @Album.exclude(:album_info=>[@AlbumInfo.load(:album_id=>3), @AlbumInfo.new]).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id NOT IN (3)) OR (albums.id IS NULL))' @Album.exclude(:album_info=>[@AlbumInfo.new, @AlbumInfo.new]).sql.must_equal 'SELECT * FROM albums WHERE \'t\'' end it "should be able to handle NULL values when excluding multiple many_to_many associations" do @Album.exclude(:tags=>[@Tag.load(:id=>3), @Tag.new]).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id NOT IN (SELECT albums_tags.album_id FROM albums_tags WHERE ((albums_tags.tag_id IN (3)) AND (albums_tags.album_id IS NOT NULL)))) OR (albums.id IS NULL))' @Album.exclude(:tags=>[@Tag.new, @Tag.new]).sql.must_equal 'SELECT * FROM albums WHERE \'t\'' end it "should be able to handle NULL values when excluding multiple many_to_one associations with composite keys" do @Album.exclude(:cartist=>[@Artist.load(:id1=>3, :id2=>4), @Artist.load(:id1=>3)]).sql.must_equal 'SELECT * FROM albums WHERE (((albums.artist_id1, albums.artist_id2) NOT IN ((3, 4))) OR (albums.artist_id1 IS NULL) OR (albums.artist_id2 IS NULL))' @Album.exclude(:cartist=>[@Artist.load(:id1=>3, :id2=>4), @Artist.new]).sql.must_equal 'SELECT * FROM albums WHERE (((albums.artist_id1, albums.artist_id2) NOT IN ((3, 4))) OR (albums.artist_id1 IS NULL) OR (albums.artist_id2 IS NULL))' end it "should be able handle NULL values when excluding multiple one_to_many associations with composite keys" do @Album.exclude(:ctracks=>[@Track.load(:album_id1=>3, :album_id2=>4), @Track.load(:album_id1=>3)]).sql.must_equal 'SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN ((3, 4))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))' @Album.exclude(:ctracks=>[@Track.load(:album_id1=>3, :album_id2=>4), @Track.new]).sql.must_equal 'SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN ((3, 4))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))' end it "should be able to handle NULL values when excluding multiple one_to_one associations with composite keys" do @Album.exclude(:calbum_info=>[@AlbumInfo.load(:album_id1=>3, :album_id2=>4), @AlbumInfo.load(:album_id1=>5)]).sql.must_equal 'SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN ((3, 4))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))' @Album.exclude(:calbum_info=>[@AlbumInfo.load(:album_id1=>3, :album_id2=>4), @AlbumInfo.new]).sql.must_equal 'SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN ((3, 4))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))' end it "should be able to handle NULL values when excluding multiple many_to_many associations with composite keys" do @Album.exclude(:ctags=>[@Tag.load(:tid1=>3, :tid2=>4), @Tag.load(:tid1=>5)]).sql.must_equal 'SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM albums_tags WHERE (((albums_tags.tag_id1, albums_tags.tag_id2) IN ((3, 4))) AND (albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL)))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))' @Album.exclude(:ctags=>[@Tag.load(:tid1=>3, :tid2=>4), @Tag.new]).sql.must_equal 'SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM albums_tags WHERE (((albums_tags.tag_id1, albums_tags.tag_id2) IN ((3, 4))) AND (albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL)))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))' end it "should not allow filtering on associations with allow_filtering_by: false" do proc{@Album.filter(:no_artist=>@Artist.filter(:x=>1)).sql}.must_raise Sequel::Error end it "should be able to filter on many_to_one association datasets" do @Album.filter(:artist=>@Artist.filter(:x=>1)).sql.must_equal 'SELECT * FROM albums WHERE (albums.artist_id IN (SELECT artists.id FROM artists WHERE ((x = 1) AND (artists.id IS NOT NULL))))' end it "should be able to filter on one_to_many association datasets" do @Album.filter(:tracks=>@Track.filter(:x=>1)).sql.must_equal 'SELECT * FROM albums WHERE (albums.id IN (SELECT tracks.album_id FROM tracks WHERE ((x = 1) AND (tracks.album_id IS NOT NULL))))' end it "should be able to filter on one_to_one association datasets" do @Album.filter(:album_info=>@AlbumInfo.filter(:x=>1)).sql.must_equal 'SELECT * FROM albums WHERE (albums.id IN (SELECT album_infos.album_id FROM album_infos WHERE ((x = 1) AND (album_infos.album_id IS NOT NULL))))' end it "should be able to filter on many_to_many association datasets" do @Album.filter(:tags=>@Tag.filter(:x=>1)).sql.must_equal 'SELECT * FROM albums WHERE (albums.id IN (SELECT albums_tags.album_id FROM albums_tags WHERE ((albums_tags.tag_id IN (SELECT tags.id FROM tags WHERE ((x = 1) AND (tags.id IS NOT NULL)))) AND (albums_tags.album_id IS NOT NULL))))' end it "should be able to filter on many_to_one association datasets with :conditions" do @Album.filter(:a_artist=>@Artist.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE (albums.artist_id IN (SELECT artists.id FROM artists WHERE ((name = 'A') AND (artists.id IS NOT NULL) AND (artists.id IN (SELECT artists.id FROM artists WHERE (x = 1))))))" end it "should be able to filter on one_to_many association datasets with :conditions" do @Album.filter(:a_tracks=>@Track.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT tracks.album_id FROM tracks WHERE ((name = 'A') AND (tracks.album_id IS NOT NULL) AND (tracks.id IN (SELECT tracks.id FROM tracks WHERE (x = 1))))))" end it "should be able to filter on one_to_one association datasets with :conditions" do @Album.filter(:a_album_info=>@AlbumInfo.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT album_infos.album_id FROM album_infos WHERE ((name = 'A') AND (album_infos.album_id IS NOT NULL) AND (album_infos.id IN (SELECT album_infos.id FROM album_infos WHERE (x = 1))))))" end it "should be able to filter on many_to_many association datasets with :conditions" do @Album.filter(:a_tags=>@Tag.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT albums_tags.album_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE ((name = 'A') AND (albums_tags.album_id IS NOT NULL) AND (tags.id IN (SELECT tags.id FROM tags WHERE (x = 1))))))" end it "should be able to filter on many_to_one association datasets with block" do @Album.filter(:b_artist=>@Artist.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE (albums.artist_id IN (SELECT artists.id FROM artists WHERE ((name = 'B') AND (artists.id IS NOT NULL) AND (artists.id IN (SELECT artists.id FROM artists WHERE (x = 1))))))" end it "should be able to filter on one_to_many association datasets with block" do @Album.filter(:b_tracks=>@Track.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT tracks.album_id FROM tracks WHERE ((name = 'B') AND (tracks.album_id IS NOT NULL) AND (tracks.id IN (SELECT tracks.id FROM tracks WHERE (x = 1))))))" end it "should be able to filter on one_to_one association datasets with block" do @Album.filter(:b_album_info=>@AlbumInfo.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT album_infos.album_id FROM album_infos WHERE ((name = 'B') AND (album_infos.album_id IS NOT NULL) AND (album_infos.id IN (SELECT album_infos.id FROM album_infos WHERE (x = 1))))))" end it "should be able to filter on many_to_many association datasets with block" do @Album.filter(:b_tags=>@Tag.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE (albums.id IN (SELECT albums_tags.album_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE ((name = 'B') AND (albums_tags.album_id IS NOT NULL) AND (tags.id IN (SELECT tags.id FROM tags WHERE (x = 1))))))" end it "should be able to filter on many_to_one association datasets with composite keys" do @Album.filter(:cartist=>@Artist.filter(:x=>1)).sql.must_equal 'SELECT * FROM albums WHERE ((albums.artist_id1, albums.artist_id2) IN (SELECT artists.id1, artists.id2 FROM artists WHERE ((x = 1) AND (artists.id1 IS NOT NULL) AND (artists.id2 IS NOT NULL))))' end it "should be able to filter on one_to_many association datasets with composite keys" do @Album.filter(:ctracks=>@Track.filter(:x=>1)).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT tracks.album_id1, tracks.album_id2 FROM tracks WHERE ((x = 1) AND (tracks.album_id1 IS NOT NULL) AND (tracks.album_id2 IS NOT NULL))))' end it "should be able to filter on one_to_one association datasets with composite keys" do @Album.filter(:calbum_info=>@AlbumInfo.filter(:x=>1)).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT album_infos.album_id1, album_infos.album_id2 FROM album_infos WHERE ((x = 1) AND (album_infos.album_id1 IS NOT NULL) AND (album_infos.album_id2 IS NOT NULL))))' end it "should be able to filter on many_to_many association datasets with composite keys" do @Album.filter(:ctags=>@Tag.filter(:x=>1)).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM albums_tags WHERE (((albums_tags.tag_id1, albums_tags.tag_id2) IN (SELECT tags.tid1, tags.tid2 FROM tags WHERE ((x = 1) AND (tags.tid1 IS NOT NULL) AND (tags.tid2 IS NOT NULL)))) AND (albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL))))' end it "should be able to filter on many_to_one association datasets with :conditions and composite keys" do @Album.filter(:a_cartist=>@Artist.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE ((albums.artist_id1, albums.artist_id2) IN (SELECT artists.id1, artists.id2 FROM artists WHERE ((name = 'A') AND (artists.id1 IS NOT NULL) AND (artists.id2 IS NOT NULL) AND (artists.id IN (SELECT artists.id FROM artists WHERE (x = 1))))))" end it "should be able to filter on one_to_many association datasets with :conditions and composite keys" do @Album.filter(:a_ctracks=>@Track.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT tracks.album_id1, tracks.album_id2 FROM tracks WHERE ((name = 'A') AND (tracks.album_id1 IS NOT NULL) AND (tracks.album_id2 IS NOT NULL) AND (tracks.id IN (SELECT tracks.id FROM tracks WHERE (x = 1))))))" end it "should be able to filter on one_to_one association datasets with :conditions and composite keys" do @Album.filter(:a_calbum_info=>@AlbumInfo.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT album_infos.album_id1, album_infos.album_id2 FROM album_infos WHERE ((name = 'A') AND (album_infos.album_id1 IS NOT NULL) AND (album_infos.album_id2 IS NOT NULL) AND (album_infos.id IN (SELECT album_infos.id FROM album_infos WHERE (x = 1))))))" end it "should be able to filter on many_to_many association datasets with :conditions and composite keys" do @Album.filter(:a_ctags=>@Tag.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.tid1) AND (albums_tags.tag_id2 = tags.tid2)) WHERE ((name = 'A') AND (albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL) AND (tags.id IN (SELECT tags.id FROM tags WHERE (x = 1))))))" end it "should be able to filter on many_to_one association datasets with block and composite keys" do @Album.filter(:b_cartist=>@Artist.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE ((albums.artist_id1, albums.artist_id2) IN (SELECT artists.id1, artists.id2 FROM artists WHERE ((name = 'B') AND (artists.id1 IS NOT NULL) AND (artists.id2 IS NOT NULL) AND (artists.id IN (SELECT artists.id FROM artists WHERE (x = 1))))))" end it "should be able to filter on one_to_many association datasets with block and composite keys" do @Album.filter(:b_ctracks=>@Track.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT tracks.album_id1, tracks.album_id2 FROM tracks WHERE ((name = 'B') AND (tracks.album_id1 IS NOT NULL) AND (tracks.album_id2 IS NOT NULL) AND (tracks.id IN (SELECT tracks.id FROM tracks WHERE (x = 1))))))" end it "should be able to filter on one_to_one association datasets with block and composite keys" do @Album.filter(:b_calbum_info=>@AlbumInfo.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT album_infos.album_id1, album_infos.album_id2 FROM album_infos WHERE ((name = 'B') AND (album_infos.album_id1 IS NOT NULL) AND (album_infos.album_id2 IS NOT NULL) AND (album_infos.id IN (SELECT album_infos.id FROM album_infos WHERE (x = 1))))))" end it "should be able to filter on many_to_many association datasets with block and composite keys" do @Album.filter(:b_ctags=>@Tag.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id1, albums.id2) IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.tid1) AND (albums_tags.tag_id2 = tags.tid2)) WHERE ((name = 'B') AND (albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL) AND (tags.id IN (SELECT tags.id FROM tags WHERE (x = 1))))))" end it "should be able to exclude on many_to_one association datasets" do @Album.exclude(:artist=>@Artist.filter(:x=>1)).sql.must_equal 'SELECT * FROM albums WHERE ((albums.artist_id NOT IN (SELECT artists.id FROM artists WHERE ((x = 1) AND (artists.id IS NOT NULL)))) OR (albums.artist_id IS NULL))' end it "should be able to exclude on one_to_many association datasets" do @Album.exclude(:tracks=>@Track.filter(:x=>1)).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id NOT IN (SELECT tracks.album_id FROM tracks WHERE ((x = 1) AND (tracks.album_id IS NOT NULL)))) OR (albums.id IS NULL))' end it "should be able to exclude on one_to_one association datasets" do @Album.exclude(:album_info=>@AlbumInfo.filter(:x=>1)).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id NOT IN (SELECT album_infos.album_id FROM album_infos WHERE ((x = 1) AND (album_infos.album_id IS NOT NULL)))) OR (albums.id IS NULL))' end it "should be able to exclude on many_to_many association datasets" do @Album.exclude(:tags=>@Tag.filter(:x=>1)).sql.must_equal 'SELECT * FROM albums WHERE ((albums.id NOT IN (SELECT albums_tags.album_id FROM albums_tags WHERE ((albums_tags.tag_id IN (SELECT tags.id FROM tags WHERE ((x = 1) AND (tags.id IS NOT NULL)))) AND (albums_tags.album_id IS NOT NULL)))) OR (albums.id IS NULL))' end it "should be able to exclude on many_to_one association datasets with :conditions" do @Album.exclude(:a_artist=>@Artist.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE ((albums.artist_id NOT IN (SELECT artists.id FROM artists WHERE ((name = 'A') AND (artists.id IS NOT NULL) AND (artists.id IN (SELECT artists.id FROM artists WHERE (x = 1)))))) OR (albums.artist_id IS NULL))" end it "should be able to exclude on one_to_many association datasets with :conditions" do @Album.exclude(:a_tracks=>@Track.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id NOT IN (SELECT tracks.album_id FROM tracks WHERE ((name = 'A') AND (tracks.album_id IS NOT NULL) AND (tracks.id IN (SELECT tracks.id FROM tracks WHERE (x = 1)))))) OR (albums.id IS NULL))" end it "should be able to exclude on one_to_one association datasets with :conditions" do @Album.exclude(:a_album_info=>@AlbumInfo.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id NOT IN (SELECT album_infos.album_id FROM album_infos WHERE ((name = 'A') AND (album_infos.album_id IS NOT NULL) AND (album_infos.id IN (SELECT album_infos.id FROM album_infos WHERE (x = 1)))))) OR (albums.id IS NULL))" end it "should be able to exclude on many_to_many association datasets with :conditions" do @Album.exclude(:a_tags=>@Tag.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id NOT IN (SELECT albums_tags.album_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE ((name = 'A') AND (albums_tags.album_id IS NOT NULL) AND (tags.id IN (SELECT tags.id FROM tags WHERE (x = 1)))))) OR (albums.id IS NULL))" end it "should be able to exclude on many_to_one association datasets with block" do @Album.exclude(:b_artist=>@Artist.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE ((albums.artist_id NOT IN (SELECT artists.id FROM artists WHERE ((name = 'B') AND (artists.id IS NOT NULL) AND (artists.id IN (SELECT artists.id FROM artists WHERE (x = 1)))))) OR (albums.artist_id IS NULL))" end it "should be able to exclude on one_to_many association datasets with block" do @Album.exclude(:b_tracks=>@Track.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id NOT IN (SELECT tracks.album_id FROM tracks WHERE ((name = 'B') AND (tracks.album_id IS NOT NULL) AND (tracks.id IN (SELECT tracks.id FROM tracks WHERE (x = 1)))))) OR (albums.id IS NULL))" end it "should be able to exclude on one_to_one association datasets with block" do @Album.exclude(:b_album_info=>@AlbumInfo.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id NOT IN (SELECT album_infos.album_id FROM album_infos WHERE ((name = 'B') AND (album_infos.album_id IS NOT NULL) AND (album_infos.id IN (SELECT album_infos.id FROM album_infos WHERE (x = 1)))))) OR (albums.id IS NULL))" end it "should be able to exclude on many_to_many association datasets with block" do @Album.exclude(:b_tags=>@Tag.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE ((albums.id NOT IN (SELECT albums_tags.album_id FROM tags INNER JOIN albums_tags ON (albums_tags.tag_id = tags.id) WHERE ((name = 'B') AND (albums_tags.album_id IS NOT NULL) AND (tags.id IN (SELECT tags.id FROM tags WHERE (x = 1)))))) OR (albums.id IS NULL))" end it "should be able to exclude on many_to_one association datasets with composite keys" do @Album.exclude(:cartist=>@Artist.filter(:x=>1)).sql.must_equal 'SELECT * FROM albums WHERE (((albums.artist_id1, albums.artist_id2) NOT IN (SELECT artists.id1, artists.id2 FROM artists WHERE ((x = 1) AND (artists.id1 IS NOT NULL) AND (artists.id2 IS NOT NULL)))) OR (albums.artist_id1 IS NULL) OR (albums.artist_id2 IS NULL))' end it "should be able to exclude on one_to_many association datasets with composite keys" do @Album.exclude(:ctracks=>@Track.filter(:x=>1)).sql.must_equal 'SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT tracks.album_id1, tracks.album_id2 FROM tracks WHERE ((x = 1) AND (tracks.album_id1 IS NOT NULL) AND (tracks.album_id2 IS NOT NULL)))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))' end it "should be able to exclude on one_to_one association datasets with composite keys" do @Album.exclude(:calbum_info=>@AlbumInfo.filter(:x=>1)).sql.must_equal 'SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT album_infos.album_id1, album_infos.album_id2 FROM album_infos WHERE ((x = 1) AND (album_infos.album_id1 IS NOT NULL) AND (album_infos.album_id2 IS NOT NULL)))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))' end it "should be able to exclude on many_to_many association datasets with composite keys" do @Album.exclude(:ctags=>@Tag.filter(:x=>1)).sql.must_equal 'SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM albums_tags WHERE (((albums_tags.tag_id1, albums_tags.tag_id2) IN (SELECT tags.tid1, tags.tid2 FROM tags WHERE ((x = 1) AND (tags.tid1 IS NOT NULL) AND (tags.tid2 IS NOT NULL)))) AND (albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL)))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))' end it "should be able to exclude on many_to_one association datasets with :conditions and composite keys" do @Album.exclude(:a_cartist=>@Artist.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE (((albums.artist_id1, albums.artist_id2) NOT IN (SELECT artists.id1, artists.id2 FROM artists WHERE ((name = 'A') AND (artists.id1 IS NOT NULL) AND (artists.id2 IS NOT NULL) AND (artists.id IN (SELECT artists.id FROM artists WHERE (x = 1)))))) OR (albums.artist_id1 IS NULL) OR (albums.artist_id2 IS NULL))" end it "should be able to exclude on one_to_many association datasets with :conditions and composite keys" do @Album.exclude(:a_ctracks=>@Track.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT tracks.album_id1, tracks.album_id2 FROM tracks WHERE ((name = 'A') AND (tracks.album_id1 IS NOT NULL) AND (tracks.album_id2 IS NOT NULL) AND (tracks.id IN (SELECT tracks.id FROM tracks WHERE (x = 1)))))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))" end it "should be able to exclude on one_to_one association datasets with :conditions and composite keys" do @Album.exclude(:a_calbum_info=>@AlbumInfo.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT album_infos.album_id1, album_infos.album_id2 FROM album_infos WHERE ((name = 'A') AND (album_infos.album_id1 IS NOT NULL) AND (album_infos.album_id2 IS NOT NULL) AND (album_infos.id IN (SELECT album_infos.id FROM album_infos WHERE (x = 1)))))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))" end it "should be able to exclude on many_to_many association datasets with :conditions and composite keys" do @Album.exclude(:a_ctags=>@Tag.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.tid1) AND (albums_tags.tag_id2 = tags.tid2)) WHERE ((name = 'A') AND (albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL) AND (tags.id IN (SELECT tags.id FROM tags WHERE (x = 1)))))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))" end it "should be able to exclude on many_to_one association datasets with block and composite keys" do @Album.exclude(:b_cartist=>@Artist.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE (((albums.artist_id1, albums.artist_id2) NOT IN (SELECT artists.id1, artists.id2 FROM artists WHERE ((name = 'B') AND (artists.id1 IS NOT NULL) AND (artists.id2 IS NOT NULL) AND (artists.id IN (SELECT artists.id FROM artists WHERE (x = 1)))))) OR (albums.artist_id1 IS NULL) OR (albums.artist_id2 IS NULL))" end it "should be able to exclude on one_to_many association datasets with block and composite keys" do @Album.exclude(:b_ctracks=>@Track.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT tracks.album_id1, tracks.album_id2 FROM tracks WHERE ((name = 'B') AND (tracks.album_id1 IS NOT NULL) AND (tracks.album_id2 IS NOT NULL) AND (tracks.id IN (SELECT tracks.id FROM tracks WHERE (x = 1)))))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))" end it "should be able to exclude on one_to_one association datasets with block and composite keys" do @Album.exclude(:b_calbum_info=>@AlbumInfo.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT album_infos.album_id1, album_infos.album_id2 FROM album_infos WHERE ((name = 'B') AND (album_infos.album_id1 IS NOT NULL) AND (album_infos.album_id2 IS NOT NULL) AND (album_infos.id IN (SELECT album_infos.id FROM album_infos WHERE (x = 1)))))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))" end it "should be able to exclude on many_to_many association datasets with block and composite keys" do @Album.exclude(:b_ctags=>@Tag.filter(:x=>1)).sql.must_equal "SELECT * FROM albums WHERE (((albums.id1, albums.id2) NOT IN (SELECT albums_tags.album_id1, albums_tags.album_id2 FROM tags INNER JOIN albums_tags ON ((albums_tags.tag_id1 = tags.tid1) AND (albums_tags.tag_id2 = tags.tid2)) WHERE ((name = 'B') AND (albums_tags.album_id1 IS NOT NULL) AND (albums_tags.album_id2 IS NOT NULL) AND (tags.id IN (SELECT tags.id FROM tags WHERE (x = 1)))))) OR (albums.id1 IS NULL) OR (albums.id2 IS NULL))" end it "should do a regular IN query if the dataset for a different model is used" do @Album.filter(:artist=>@Album.select(:x)).sql.must_equal 'SELECT * FROM albums WHERE (artist IN (SELECT x FROM albums))' end it "should do a regular IN query if a non-model dataset is used" do @Album.filter(:artist=>@Album.db.from(:albums).select(:x)).sql.must_equal 'SELECT * FROM albums WHERE (artist IN (SELECT x FROM albums))' end end describe "Sequel::Model Associations with clashing column names" do before do @db = Sequel.mock(:fetch=>{:id=>1, :object_id=>2}) @Foo = Class.new(Sequel::Model(@db[:foos])) @Bar = Class.new(Sequel::Model(@db[:bars])) @Foo.columns :id, :object_id @Bar.columns :id, :object_id @Foo.def_column_alias(:obj_id, :object_id) @Bar.def_column_alias(:obj_id, :object_id) @Foo.one_to_many :bars, :primary_key=>:obj_id, :primary_key_column=>:object_id, :key=>:object_id, :key_method=>:obj_id, :class=>@Bar @Foo.one_to_one :bar, :primary_key=>:obj_id, :primary_key_column=>:object_id, :key=>:object_id, :key_method=>:obj_id, :class=>@Bar @Bar.many_to_one :foo, :key=>:obj_id, :key_column=>:object_id, :primary_key=>:object_id, :primary_key_method=>:obj_id, :class=>@Foo @Foo.many_to_many :mtmbars, :join_table=>:bars_foos, :left_primary_key=>:obj_id, :left_primary_key_column=>:object_id, :right_primary_key=>:object_id, :right_primary_key_method=>:obj_id, :left_key=>:foo_id, :right_key=>:object_id, :class=>@Bar @Bar.many_to_many :mtmfoos, :join_table=>:bars_foos, :left_primary_key=>:obj_id, :left_primary_key_column=>:object_id, :right_primary_key=>:object_id, :right_primary_key_method=>:obj_id, :left_key=>:object_id, :right_key=>:foo_id, :class=>@Foo @foo = @Foo.load(:id=>1, :object_id=>2) @bar = @Bar.load(:id=>1, :object_id=>2) @db.sqls end it "should have working regular association methods" do @Bar.first.foo.must_equal @foo @db.sqls.must_equal ["SELECT * FROM bars LIMIT 1", "SELECT * FROM foos WHERE (foos.object_id = 2) LIMIT 1"] @Foo.first.bars.must_equal [@bar] @db.sqls.must_equal ["SELECT * FROM foos LIMIT 1", "SELECT * FROM bars WHERE (bars.object_id = 2)"] @Foo.first.bar.must_equal @bar @db.sqls.must_equal ["SELECT * FROM foos LIMIT 1", "SELECT * FROM bars WHERE (bars.object_id = 2) LIMIT 1"] @Foo.first.mtmbars.must_equal [@bar] @db.sqls.must_equal ["SELECT * FROM foos LIMIT 1", "SELECT bars.* FROM bars INNER JOIN bars_foos ON (bars_foos.object_id = bars.object_id) WHERE (bars_foos.foo_id = 2)"] @Bar.first.mtmfoos.must_equal [@foo] @db.sqls.must_equal ["SELECT * FROM bars LIMIT 1", "SELECT foos.* FROM foos INNER JOIN bars_foos ON (bars_foos.foo_id = foos.object_id) WHERE (bars_foos.object_id = 2)"] end it "should have working eager loading methods" do @Bar.eager(:foo).all.map{|o| [o, o.foo]}.must_equal [[@bar, @foo]] @db.sqls.must_equal ["SELECT * FROM bars", "SELECT * FROM foos WHERE (foos.object_id IN (2))"] @Foo.eager(:bars).all.map{|o| [o, o.bars]}.must_equal [[@foo, [@bar]]] @db.sqls.must_equal ["SELECT * FROM foos", "SELECT * FROM bars WHERE (bars.object_id IN (2))"] @Foo.eager(:bar).all.map{|o| [o, o.bar]}.must_equal [[@foo, @bar]] @db.sqls.must_equal ["SELECT * FROM foos", "SELECT * FROM bars WHERE (bars.object_id IN (2))"] @db.fetch = [[{:id=>1, :object_id=>2}], [{:id=>1, :object_id=>2, :x_foreign_key_x=>2}]] @Foo.eager(:mtmbars).all.map{|o| [o, o.mtmbars]}.must_equal [[@foo, [@bar]]] @db.sqls.must_equal ["SELECT * FROM foos", "SELECT bars.*, bars_foos.foo_id AS x_foreign_key_x FROM bars INNER JOIN bars_foos ON (bars_foos.object_id = bars.object_id) WHERE (bars_foos.foo_id IN (2))"] @db.fetch = [[{:id=>1, :object_id=>2}], [{:id=>1, :object_id=>2, :x_foreign_key_x=>2}]] @Bar.eager(:mtmfoos).all.map{|o| [o, o.mtmfoos]}.must_equal [[@bar, [@foo]]] @db.sqls.must_equal ["SELECT * FROM bars", "SELECT foos.*, bars_foos.object_id AS x_foreign_key_x FROM foos INNER JOIN bars_foos ON (bars_foos.foo_id = foos.object_id) WHERE (bars_foos.object_id IN (2))"] end it "should have working eager graphing methods" do @db.fetch = {:id=>1, :object_id=>2, :foo_id=>1, :foo_object_id=>2} @Bar.eager_graph(:foo).all.map{|o| [o, o.foo]}.must_equal [[@bar, @foo]] @db.sqls.must_equal ["SELECT bars.id, bars.object_id, foo.id AS foo_id, foo.object_id AS foo_object_id FROM bars LEFT OUTER JOIN foos AS foo ON (foo.object_id = bars.object_id)"] @db.fetch = {:id=>1, :object_id=>2, :bars_id=>1, :bars_object_id=>2} @Foo.eager_graph(:bars).all.map{|o| [o, o.bars]}.must_equal [[@foo, [@bar]]] @db.sqls.must_equal ["SELECT foos.id, foos.object_id, bars.id AS bars_id, bars.object_id AS bars_object_id FROM foos LEFT OUTER JOIN bars ON (bars.object_id = foos.object_id)"] @db.fetch = {:id=>1, :object_id=>2, :bar_id=>1, :bar_object_id=>2} @Foo.eager_graph(:bar).all.map{|o| [o, o.bar]}.must_equal [[@foo, @bar]] @db.sqls.must_equal ["SELECT foos.id, foos.object_id, bar.id AS bar_id, bar.object_id AS bar_object_id FROM foos LEFT OUTER JOIN bars AS bar ON (bar.object_id = foos.object_id)"] @db.fetch = {:id=>1, :object_id=>2, :mtmfoos_id=>1, :mtmfoos_object_id=>2} @Bar.eager_graph(:mtmfoos).all.map{|o| [o, o.mtmfoos]}.must_equal [[@bar, [@foo]]] @db.sqls.must_equal ["SELECT bars.id, bars.object_id, mtmfoos.id AS mtmfoos_id, mtmfoos.object_id AS mtmfoos_object_id FROM bars LEFT OUTER JOIN bars_foos ON (bars_foos.object_id = bars.object_id) LEFT OUTER JOIN foos AS mtmfoos ON (mtmfoos.object_id = bars_foos.foo_id)"] @db.fetch = {:id=>1, :object_id=>2, :mtmbars_id=>1, :mtmbars_object_id=>2} @Foo.eager_graph(:mtmbars).all.map{|o| [o, o.mtmbars]}.must_equal [[@foo, [@bar]]] @db.sqls.must_equal ["SELECT foos.id, foos.object_id, mtmbars.id AS mtmbars_id, mtmbars.object_id AS mtmbars_object_id FROM foos LEFT OUTER JOIN bars_foos ON (bars_foos.foo_id = foos.object_id) LEFT OUTER JOIN bars AS mtmbars ON (mtmbars.object_id = bars_foos.object_id)"] end it "should not have filter by associations code break if using IN/NOT in with a set-returning function" do @Bar.where(Sequel::SQL::BooleanExpression.new(:IN, :foo, Sequel.function(:srf))).sql.must_equal 'SELECT * FROM bars WHERE (foo IN srf())' @Bar.exclude(Sequel::SQL::BooleanExpression.new(:IN, :foo, Sequel.function(:srf))).sql.must_equal 'SELECT * FROM bars WHERE (foo NOT IN srf())' end it "should have working eager graphing methods when using SQL::Identifier inside SQL::AliasedExpression" do @db.fetch = {:id=>1, :object_id=>2, :f_id=>1, :f_object_id=>2} @Bar.eager_graph(Sequel[:foo].as(:f)).all.map{|o| [o, o.foo]}.must_equal [[@bar, @foo]] @db.sqls.must_equal ["SELECT bars.id, bars.object_id, f.id AS f_id, f.object_id AS f_object_id FROM bars LEFT OUTER JOIN foos AS f ON (f.object_id = bars.object_id)"] end it "should have working filter by associations with model instances" do @Bar.first(:foo=>@foo).must_equal @bar @db.sqls.must_equal ["SELECT * FROM bars WHERE (bars.object_id = 2) LIMIT 1"] @Foo.first(:bars=>@bar).must_equal @foo @db.sqls.must_equal ["SELECT * FROM foos WHERE (foos.object_id = 2) LIMIT 1"] @Foo.first(:bar=>@bar).must_equal @foo @db.sqls.must_equal ["SELECT * FROM foos WHERE (foos.object_id = 2) LIMIT 1"] @Foo.first(:mtmbars=>@bar).must_equal @foo @db.sqls.must_equal ["SELECT * FROM foos WHERE (foos.object_id IN (SELECT bars_foos.foo_id FROM bars_foos WHERE ((bars_foos.object_id = 2) AND (bars_foos.foo_id IS NOT NULL)))) LIMIT 1"] @Bar.first(:mtmfoos=>@foo).must_equal @bar @db.sqls.must_equal ["SELECT * FROM bars WHERE (bars.object_id IN (SELECT bars_foos.object_id FROM bars_foos WHERE ((bars_foos.foo_id = 2) AND (bars_foos.object_id IS NOT NULL)))) LIMIT 1"] end it "should have working filter by associations for associations with :conditions with model instances" do @Bar.many_to_one :foo, :clone=>:foo, :conditions=>{:name=>'A'} @Foo.one_to_many :bars, :clone=>:bars, :conditions=>{:name=>'A'} @Foo.one_to_one :bar, :clone=>:bars @Foo.many_to_many :mtmbars, :clone=>:mtmbars, :conditions=>{:name=>'A'} @Bar.many_to_many :mtmfoos, :clone=>:mtmfoos, :conditions=>{:name=>'A'} @Bar.where(:foo=>@foo).sql.must_equal "SELECT * FROM bars WHERE (bars.object_id IN (SELECT foos.object_id FROM foos WHERE ((name = 'A') AND (foos.object_id IS NOT NULL) AND (foos.id = 1))))" @Foo.where(:bars=>@bar).sql.must_equal "SELECT * FROM foos WHERE (foos.object_id IN (SELECT bars.object_id FROM bars WHERE ((name = 'A') AND (bars.object_id IS NOT NULL) AND (bars.id = 1))))" @Foo.where(:bar=>@bar).sql.must_equal "SELECT * FROM foos WHERE (foos.object_id IN (SELECT bars.object_id FROM bars WHERE ((name = 'A') AND (bars.object_id IS NOT NULL) AND (bars.id = 1))))" @Foo.where(:mtmbars=>@bar).sql.must_equal "SELECT * FROM foos WHERE (foos.object_id IN (SELECT bars_foos.foo_id FROM bars INNER JOIN bars_foos ON (bars_foos.object_id = bars.object_id) WHERE ((name = 'A') AND (bars_foos.foo_id IS NOT NULL) AND (bars.id = 1))))" @Bar.where(:mtmfoos=>@foo).sql.must_equal "SELECT * FROM bars WHERE (bars.object_id IN (SELECT bars_foos.object_id FROM foos INNER JOIN bars_foos ON (bars_foos.foo_id = foos.object_id) WHERE ((name = 'A') AND (bars_foos.object_id IS NOT NULL) AND (foos.id = 1))))" end it "should have working filter by associations for associations with block with model instances" do b = lambda{|ds| ds.where(:name=>'A')} @Bar.many_to_one :foo, :clone=>:foo, &b @Foo.one_to_many :bars, :clone=>:bars, &b @Foo.one_to_one :bar, :clone=>:bars @Foo.many_to_many :mtmbars, :clone=>:mtmbars, &b @Bar.many_to_many :mtmfoos, :clone=>:mtmfoos, &b @Bar.where(:foo=>@foo).sql.must_equal "SELECT * FROM bars WHERE (bars.object_id IN (SELECT foos.object_id FROM foos WHERE ((name = 'A') AND (foos.object_id IS NOT NULL) AND (foos.id = 1))))" @Foo.where(:bars=>@bar).sql.must_equal "SELECT * FROM foos WHERE (foos.object_id IN (SELECT bars.object_id FROM bars WHERE ((name = 'A') AND (bars.object_id IS NOT NULL) AND (bars.id = 1))))" @Foo.where(:bar=>@bar).sql.must_equal "SELECT * FROM foos WHERE (foos.object_id IN (SELECT bars.object_id FROM bars WHERE ((name = 'A') AND (bars.object_id IS NOT NULL) AND (bars.id = 1))))" @Foo.where(:mtmbars=>@bar).sql.must_equal "SELECT * FROM foos WHERE (foos.object_id IN (SELECT bars_foos.foo_id FROM bars INNER JOIN bars_foos ON (bars_foos.object_id = bars.object_id) WHERE ((name = 'A') AND (bars_foos.foo_id IS NOT NULL) AND (bars.id = 1))))" @Bar.where(:mtmfoos=>@foo).sql.must_equal "SELECT * FROM bars WHERE (bars.object_id IN (SELECT bars_foos.object_id FROM foos INNER JOIN bars_foos ON (bars_foos.foo_id = foos.object_id) WHERE ((name = 'A') AND (bars_foos.object_id IS NOT NULL) AND (foos.id = 1))))" end it "should have working modification methods" do b = @Bar.load(:id=>2, :object_id=>3) f = @Foo.load(:id=>2, :object_id=>3) @db.numrows = 1 @bar.foo = f @bar.obj_id.must_equal 3 @foo.bar = @bar @bar.obj_id.must_equal 2 @foo.add_bar(b) @db.fetch = [[{:id=>1, :object_id=>2}, {:id=>2, :object_id=>2}], [{:id=>1, :object_id=>2}]] @foo.bars.must_equal [@bar, b] @foo.remove_bar(b) @foo.bars.must_equal [@bar] @foo.remove_all_bars @foo.bars.must_equal [] @db.fetch = [[{:id=>1, :object_id=>2}], [], [{:id=>2, :object_id=>2}]] @bar = @Bar.load(:id=>1, :object_id=>2) @foo.mtmbars.must_equal [@bar] @foo.remove_all_mtmbars @foo.mtmbars.must_equal [] @foo.add_mtmbar(b) @foo.mtmbars.must_equal [b] @foo.remove_mtmbar(b) @foo.mtmbars.must_equal [] @db.fetch = [[{:id=>2, :object_id=>3}], [], [{:id=>2, :object_id=>3}]] @bar.add_mtmfoo(f) @bar.mtmfoos.must_equal [f] @bar.remove_all_mtmfoos @bar.mtmfoos.must_equal [] @bar.add_mtmfoo(f) @bar.mtmfoos.must_equal [f] @bar.remove_mtmfoo(f) @bar.mtmfoos.must_equal [] end end describe "Sequel::Model Associations with non-column expression keys" do before do @db = Sequel.mock(:fetch=>{:id=>1, :object_ids=>[2]}) @Foo = Class.new(Sequel::Model(@db[:foos])) @Bar = Class.new(Sequel::Model(@db[:bars])) @Foo.columns :id, :object_ids @Bar.columns :id, :object_ids m = Module.new{def obj_id; object_ids[0]; end} @Foo.include m @Bar.include m @Foo.one_to_many :bars, :primary_key=>:obj_id, :primary_key_column=>Sequel.subscript(:object_ids, 0), :key=>Sequel.subscript(:object_ids, 0), :key_method=>:obj_id, :class=>@Bar @Foo.one_to_one :bar, :primary_key=>:obj_id, :primary_key_column=>Sequel.subscript(:object_ids, 0), :key=>Sequel.subscript(:object_ids, 0), :key_method=>:obj_id, :class=>@Bar @Bar.many_to_one :foo, :key=>:obj_id, :key_column=>Sequel.subscript(:object_ids, 0), :primary_key=>Sequel.subscript(:object_ids, 0), :primary_key_method=>:obj_id, :class=>@Foo @Foo.many_to_many :mtmbars, :join_table=>:bars_foos, :left_primary_key=>:obj_id, :left_primary_key_column=>Sequel.subscript(:object_ids, 0), :right_primary_key=>Sequel.subscript(:object_ids, 0), :right_primary_key_method=>:obj_id, :left_key=>Sequel.subscript(:foo_ids, 0), :right_key=>Sequel.subscript(:bar_ids, 0), :class=>@Bar @Bar.many_to_many :mtmfoos, :join_table=>:bars_foos, :left_primary_key=>:obj_id, :left_primary_key_column=>Sequel.subscript(:object_ids, 0), :right_primary_key=>Sequel.subscript(:object_ids, 0), :right_primary_key_method=>:obj_id, :left_key=>Sequel.subscript(:bar_ids, 0), :right_key=>Sequel.subscript(:foo_ids, 0), :class=>@Foo, :reciprocal=>nil @foo = @Foo.load(:id=>1, :object_ids=>[2]) @bar = @Bar.load(:id=>1, :object_ids=>[2]) @db.sqls end it "should have working regular association methods" do @Bar.first.foo.must_equal @foo @db.sqls.must_equal ["SELECT * FROM bars LIMIT 1", "SELECT * FROM foos WHERE (foos.object_ids[0] = 2) LIMIT 1"] @Foo.first.bars.must_equal [@bar] @db.sqls.must_equal ["SELECT * FROM foos LIMIT 1", "SELECT * FROM bars WHERE (bars.object_ids[0] = 2)"] @Foo.first.bar.must_equal @bar @db.sqls.must_equal ["SELECT * FROM foos LIMIT 1", "SELECT * FROM bars WHERE (bars.object_ids[0] = 2) LIMIT 1"] @Foo.first.mtmbars.must_equal [@bar] @db.sqls.must_equal ["SELECT * FROM foos LIMIT 1", "SELECT bars.* FROM bars INNER JOIN bars_foos ON (bars_foos.bar_ids[0] = bars.object_ids[0]) WHERE (bars_foos.foo_ids[0] = 2)"] @Bar.first.mtmfoos.must_equal [@foo] @db.sqls.must_equal ["SELECT * FROM bars LIMIT 1", "SELECT foos.* FROM foos INNER JOIN bars_foos ON (bars_foos.foo_ids[0] = foos.object_ids[0]) WHERE (bars_foos.bar_ids[0] = 2)"] end it "should have working eager loading methods" do @Bar.eager(:foo).all.map{|o| [o, o.foo]}.must_equal [[@bar, @foo]] @db.sqls.must_equal ["SELECT * FROM bars", "SELECT * FROM foos WHERE (foos.object_ids[0] IN (2))"] @Foo.eager(:bars).all.map{|o| [o, o.bars]}.must_equal [[@foo, [@bar]]] @db.sqls.must_equal ["SELECT * FROM foos", "SELECT * FROM bars WHERE (bars.object_ids[0] IN (2))"] @Foo.eager(:bar).all.map{|o| [o, o.bar]}.must_equal [[@foo, @bar]] @db.sqls.must_equal ["SELECT * FROM foos", "SELECT * FROM bars WHERE (bars.object_ids[0] IN (2))"] @db.fetch = [[{:id=>1, :object_ids=>[2]}], [{:id=>1, :object_ids=>[2], :x_foreign_key_x=>2}]] @Foo.eager(:mtmbars).all.map{|o| [o, o.mtmbars]}.must_equal [[@foo, [@bar]]] @db.sqls.must_equal ["SELECT * FROM foos", "SELECT bars.*, bars_foos.foo_ids[0] AS x_foreign_key_x FROM bars INNER JOIN bars_foos ON (bars_foos.bar_ids[0] = bars.object_ids[0]) WHERE (bars_foos.foo_ids[0] IN (2))"] @db.fetch = [[{:id=>1, :object_ids=>[2]}], [{:id=>1, :object_ids=>[2], :x_foreign_key_x=>2}]] @Bar.eager(:mtmfoos).all.map{|o| [o, o.mtmfoos]}.must_equal [[@bar, [@foo]]] @db.sqls.must_equal ["SELECT * FROM bars", "SELECT foos.*, bars_foos.bar_ids[0] AS x_foreign_key_x FROM foos INNER JOIN bars_foos ON (bars_foos.foo_ids[0] = foos.object_ids[0]) WHERE (bars_foos.bar_ids[0] IN (2))"] end it "should have working eager graphing methods" do @db.fetch = {:id=>1, :object_ids=>[2], :foo_id=>1, :foo_object_ids=>[2]} @Bar.eager_graph(:foo).all.map{|o| [o, o.foo]}.must_equal [[@bar, @foo]] @db.sqls.must_equal ["SELECT bars.id, bars.object_ids, foo.id AS foo_id, foo.object_ids AS foo_object_ids FROM bars LEFT OUTER JOIN foos AS foo ON (foo.object_ids[0] = bars.object_ids[0])"] @db.fetch = {:id=>1, :object_ids=>[2], :bars_id=>1, :bars_object_ids=>[2]} @Foo.eager_graph(:bars).all.map{|o| [o, o.bars]}.must_equal [[@foo, [@bar]]] @db.sqls.must_equal ["SELECT foos.id, foos.object_ids, bars.id AS bars_id, bars.object_ids AS bars_object_ids FROM foos LEFT OUTER JOIN bars ON (bars.object_ids[0] = foos.object_ids[0])"] @db.fetch = {:id=>1, :object_ids=>[2], :bar_id=>1, :bar_object_ids=>[2]} @Foo.eager_graph(:bar).all.map{|o| [o, o.bar]}.must_equal [[@foo, @bar]] @db.sqls.must_equal ["SELECT foos.id, foos.object_ids, bar.id AS bar_id, bar.object_ids AS bar_object_ids FROM foos LEFT OUTER JOIN bars AS bar ON (bar.object_ids[0] = foos.object_ids[0])"] @db.fetch = {:id=>1, :object_ids=>[2], :mtmfoos_id=>1, :mtmfoos_object_ids=>[2]} @Bar.eager_graph(:mtmfoos).all.map{|o| [o, o.mtmfoos]}.must_equal [[@bar, [@foo]]] @db.sqls.must_equal ["SELECT bars.id, bars.object_ids, mtmfoos.id AS mtmfoos_id, mtmfoos.object_ids AS mtmfoos_object_ids FROM bars LEFT OUTER JOIN bars_foos ON (bars_foos.bar_ids[0] = bars.object_ids[0]) LEFT OUTER JOIN foos AS mtmfoos ON (mtmfoos.object_ids[0] = bars_foos.foo_ids[0])"] @db.fetch = {:id=>1, :object_ids=>[2], :mtmbars_id=>1, :mtmbars_object_ids=>[2]} @Foo.eager_graph(:mtmbars).all.map{|o| [o, o.mtmbars]}.must_equal [[@foo, [@bar]]] @db.sqls.must_equal ["SELECT foos.id, foos.object_ids, mtmbars.id AS mtmbars_id, mtmbars.object_ids AS mtmbars_object_ids FROM foos LEFT OUTER JOIN bars_foos ON (bars_foos.foo_ids[0] = foos.object_ids[0]) LEFT OUTER JOIN bars AS mtmbars ON (mtmbars.object_ids[0] = bars_foos.bar_ids[0])"] end it "should have working filter by associations with model instances" do @Bar.first(:foo=>@foo).must_equal @bar @db.sqls.must_equal ["SELECT * FROM bars WHERE (bars.object_ids[0] = 2) LIMIT 1"] @Foo.first(:bars=>@bar).must_equal @foo @db.sqls.must_equal ["SELECT * FROM foos WHERE (foos.object_ids[0] = 2) LIMIT 1"] @Foo.first(:bar=>@bar).must_equal @foo @db.sqls.must_equal ["SELECT * FROM foos WHERE (foos.object_ids[0] = 2) LIMIT 1"] @Foo.first(:mtmbars=>@bar).must_equal @foo @db.sqls.must_equal ["SELECT * FROM foos WHERE (foos.object_ids[0] IN (SELECT bars_foos.foo_ids[0] FROM bars_foos WHERE ((bars_foos.bar_ids[0] = 2) AND (bars_foos.foo_ids[0] IS NOT NULL)))) LIMIT 1"] @Bar.first(:mtmfoos=>@foo).must_equal @bar @db.sqls.must_equal ["SELECT * FROM bars WHERE (bars.object_ids[0] IN (SELECT bars_foos.bar_ids[0] FROM bars_foos WHERE ((bars_foos.foo_ids[0] = 2) AND (bars_foos.bar_ids[0] IS NOT NULL)))) LIMIT 1"] end it "should have working filter by associations for associations with :conditions with model instances" do @Bar.many_to_one :foo, :clone=>:foo, :conditions=>{:name=>'A'} @Foo.one_to_many :bars, :clone=>:bars, :conditions=>{:name=>'A'} @Foo.one_to_one :bar, :clone=>:bars @Foo.many_to_many :mtmbars, :clone=>:mtmbars, :conditions=>{:name=>'A'} @Bar.many_to_many :mtmfoos, :clone=>:mtmfoos, :conditions=>{:name=>'A'} @Bar.where(:foo=>@foo).sql.must_equal "SELECT * FROM bars WHERE (bars.object_ids[0] IN (SELECT foos.object_ids[0] FROM foos WHERE ((name = 'A') AND (foos.object_ids[0] IS NOT NULL) AND (foos.id = 1))))" @Foo.where(:bars=>@bar).sql.must_equal "SELECT * FROM foos WHERE (foos.object_ids[0] IN (SELECT bars.object_ids[0] FROM bars WHERE ((name = 'A') AND (bars.object_ids[0] IS NOT NULL) AND (bars.id = 1))))" @Foo.where(:bar=>@bar).sql.must_equal "SELECT * FROM foos WHERE (foos.object_ids[0] IN (SELECT bars.object_ids[0] FROM bars WHERE ((name = 'A') AND (bars.object_ids[0] IS NOT NULL) AND (bars.id = 1))))" @Foo.where(:mtmbars=>@bar).sql.must_equal "SELECT * FROM foos WHERE (foos.object_ids[0] IN (SELECT bars_foos.foo_ids[0] FROM bars INNER JOIN bars_foos ON (bars_foos.bar_ids[0] = bars.object_ids[0]) WHERE ((name = 'A') AND (bars_foos.foo_ids[0] IS NOT NULL) AND (bars.id = 1))))" @Bar.where(:mtmfoos=>@foo).sql.must_equal "SELECT * FROM bars WHERE (bars.object_ids[0] IN (SELECT bars_foos.bar_ids[0] FROM foos INNER JOIN bars_foos ON (bars_foos.foo_ids[0] = foos.object_ids[0]) WHERE ((name = 'A') AND (bars_foos.bar_ids[0] IS NOT NULL) AND (foos.id = 1))))" end it "should have working filter by associations for associations with block with model instances" do b = lambda{|ds| ds.where(:name=>'A')} @Bar.many_to_one :foo, :clone=>:foo, &b @Foo.one_to_many :bars, :clone=>:bars, &b @Foo.one_to_one :bar, :clone=>:bars @Foo.many_to_many :mtmbars, :clone=>:mtmbars, &b @Bar.many_to_many :mtmfoos, :clone=>:mtmfoos, &b @Bar.where(:foo=>@foo).sql.must_equal "SELECT * FROM bars WHERE (bars.object_ids[0] IN (SELECT foos.object_ids[0] FROM foos WHERE ((name = 'A') AND (foos.object_ids[0] IS NOT NULL) AND (foos.id = 1))))" @Foo.where(:bars=>@bar).sql.must_equal "SELECT * FROM foos WHERE (foos.object_ids[0] IN (SELECT bars.object_ids[0] FROM bars WHERE ((name = 'A') AND (bars.object_ids[0] IS NOT NULL) AND (bars.id = 1))))" @Foo.where(:bar=>@bar).sql.must_equal "SELECT * FROM foos WHERE (foos.object_ids[0] IN (SELECT bars.object_ids[0] FROM bars WHERE ((name = 'A') AND (bars.object_ids[0] IS NOT NULL) AND (bars.id = 1))))" @Foo.where(:mtmbars=>@bar).sql.must_equal "SELECT * FROM foos WHERE (foos.object_ids[0] IN (SELECT bars_foos.foo_ids[0] FROM bars INNER JOIN bars_foos ON (bars_foos.bar_ids[0] = bars.object_ids[0]) WHERE ((name = 'A') AND (bars_foos.foo_ids[0] IS NOT NULL) AND (bars.id = 1))))" @Bar.where(:mtmfoos=>@foo).sql.must_equal "SELECT * FROM bars WHERE (bars.object_ids[0] IN (SELECT bars_foos.bar_ids[0] FROM foos INNER JOIN bars_foos ON (bars_foos.foo_ids[0] = foos.object_ids[0]) WHERE ((name = 'A') AND (bars_foos.bar_ids[0] IS NOT NULL) AND (foos.id = 1))))" end it "should have working filter by associations with model datasets" do @Bar.first(:foo=>@Foo.where(:id=>@foo.id)).must_equal @bar @db.sqls.must_equal ["SELECT * FROM bars WHERE (bars.object_ids[0] IN (SELECT foos.object_ids[0] FROM foos WHERE ((id = 1) AND (foos.object_ids[0] IS NOT NULL)))) LIMIT 1"] @Foo.first(:bars=>@Bar.where(:id=>@bar.id)).must_equal @foo @db.sqls.must_equal ["SELECT * FROM foos WHERE (foos.object_ids[0] IN (SELECT bars.object_ids[0] FROM bars WHERE ((id = 1) AND (bars.object_ids[0] IS NOT NULL)))) LIMIT 1"] @Foo.first(:bar=>@Bar.where(:id=>@bar.id)).must_equal @foo @db.sqls.must_equal ["SELECT * FROM foos WHERE (foos.object_ids[0] IN (SELECT bars.object_ids[0] FROM bars WHERE ((id = 1) AND (bars.object_ids[0] IS NOT NULL)))) LIMIT 1"] @Foo.first(:mtmbars=>@Bar.where(:id=>@bar.id)).must_equal @foo @db.sqls.must_equal ["SELECT * FROM foos WHERE (foos.object_ids[0] IN (SELECT bars_foos.foo_ids[0] FROM bars_foos WHERE ((bars_foos.bar_ids[0] IN (SELECT bars.object_ids[0] FROM bars WHERE ((id = 1) AND (bars.object_ids[0] IS NOT NULL)))) AND (bars_foos.foo_ids[0] IS NOT NULL)))) LIMIT 1"] @Bar.first(:mtmfoos=>@Foo.where(:id=>@foo.id)).must_equal @bar @db.sqls.must_equal ["SELECT * FROM bars WHERE (bars.object_ids[0] IN (SELECT bars_foos.bar_ids[0] FROM bars_foos WHERE ((bars_foos.foo_ids[0] IN (SELECT foos.object_ids[0] FROM foos WHERE ((id = 1) AND (foos.object_ids[0] IS NOT NULL)))) AND (bars_foos.bar_ids[0] IS NOT NULL)))) LIMIT 1"] end end describe Sequel::Model, "#refresh" do before do @c = Class.new(Sequel::Model(:items)) do unrestrict_primary_key columns :id, :x end DB.reset end it "should remove cached associations" do @c.many_to_one :node, :class=>@c @m = @c.new(:id => 555) @m.associations[:node] = 15 @m.reload @m.associations.must_equal({}) end end describe "Model#freeze" do before do class ::Album < Sequel::Model columns :id class B < Sequel::Model columns :id, :album_id many_to_one :album, :class=>Album end one_to_one :b, :key=>:album_id, :class=>B end @o = Album.load(:id=>1).freeze DB.sqls end after do Object.send(:remove_const, :Album) end it "should freeze the object's associations" do @o.associations.frozen?.must_equal true end it "should freeze associations after validating" do Album.send(:define_method, :validate){super(); b} @o = Album.load(:id=>1) @o.freeze @o.associations.fetch(:b).id.must_equal 1 end it "should not break associations getters" do Album::B.dataset = Album::B.dataset.with_fetch(:album_id=>1, :id=>2) @o.b.must_equal Album::B.load(:id=>2, :album_id=>1) @o.associations[:b].must_be_nil @o = @o.dup @o.b.must_equal Album::B.load(:id=>2, :album_id=>1) @o.associations[:b].must_equal Album::B.load(:id=>2, :album_id=>1) end it "should not break reciprocal associations" do b = Album::B.load(:id=>2, :album_id=>nil) b.album = @o @o.associations[:b].must_be_nil @o = @o.dup b = Album::B.load(:id=>2, :album_id=>nil) b.album = @o @o.associations[:b].must_equal Album::B.load(:id=>2, :album_id=>1) end end describe "association autoreloading" do before do @c = Class.new(Sequel::Model) @Artist = Class.new(@c).set_dataset(:artists) @Artist.dataset = @Artist.dataset.with_fetch(:id=>2, :name=>'Ar') @Album = Class.new(@c).set_dataset(:albums) @Artist.columns :id, :name @Album.columns :id, :name, :artist_id @Album.db_schema[:artist_id][:type] = :integer @Album.many_to_one :artist, :class=>@Artist DB.reset end it "should not reload many_to_one association when foreign key is not modified" do album = @Album.load(:id => 1, :name=>'Al', :artist_id=>1) album.artist DB.sqls.must_equal ['SELECT * FROM artists WHERE id = 1'] album.artist_id = 1 album.artist DB.sqls.must_equal [] album = @Album.new(:name=>'Al', :artist_id=>1) album.artist DB.sqls.must_equal ['SELECT * FROM artists WHERE id = 1'] album.artist_id = 1 album.artist DB.sqls.must_equal [] end it "should reload many_to_one association when foreign key is modified" do album = @Album.load(:id => 1, :name=>'Al', :artist_id=>2) album.artist DB.sqls.must_equal ['SELECT * FROM artists WHERE id = 2'] album.artist_id = 1 album.artist DB.sqls.must_equal ['SELECT * FROM artists WHERE id = 1'] end it "should handle multiple many_to_one association with the same foreign key" do @Album.many_to_one :artist2, :key=>:artist_id, :class=>@Artist album = @Album.load(:id => 1, :name=>'Al', :artist_id=>2) album.artist album.artist2 DB.sqls.must_equal ['SELECT * FROM artists WHERE id = 2'] * 2 album.artist album.artist2 DB.sqls.must_equal [] album.artist_id = 1 album.artist album.artist2 DB.sqls.must_equal ['SELECT * FROM artists WHERE id = 1'] * 2 end it "should not reload when value has not changed" do album = @Album.load(:id => 1, :name=>'Al', :artist_id=>2) album.artist DB.sqls.must_equal ['SELECT * FROM artists WHERE id = 2'] album.artist_id = 2 album.artist DB.sqls.must_equal [] album.artist_id = "2" album.artist DB.sqls.must_equal [] end it "should reload all associations which use the foreign key" do @Album.many_to_one :other_artist, :key => :artist_id, :foreign_key => :id, :class => @Artist album = @Album.load(:id => 1, :name=>'Al', :artist_id=>2) album.artist album.other_artist DB.reset album.artist_id = 1 album.artist DB.sqls.must_equal ['SELECT * FROM artists WHERE id = 1'] album.other_artist DB.sqls.must_equal ['SELECT * FROM artists WHERE id = 1'] end it "should work with composite keys" do @Album.many_to_one :composite_artist, :key => [:artist_id, :name], :primary_key => [:id, :name], :class => @Artist album = @Album.load(:id => 1, :name=>'Al', :artist_id=>2) album.composite_artist DB.reset album.artist_id = 1 album.composite_artist DB.sqls.must_equal ["SELECT * FROM artists WHERE ((artists.id = 1) AND (artists.name = 'Al')) LIMIT 1"] album.name = 'Al2' album.composite_artist DB.sqls.must_equal ["SELECT * FROM artists WHERE ((artists.id = 1) AND (artists.name = 'Al2')) LIMIT 1"] end it "should work with subclasses" do salbum = Class.new(@Album) oartist = Class.new(@c).set_dataset(:oartist) oartist.columns :id, :name salbum.many_to_one :artist2, :class=>oartist, :key=>:artist_id album = salbum.load(:id => 1, :name=>'Al', :artist_id=>2) album.artist DB.sqls.must_equal ['SELECT * FROM artists WHERE id = 2'] album.artist_id = 1 album.artist DB.sqls.must_equal ['SELECT * FROM artists WHERE id = 1'] end end describe Sequel::Model, ".dataset_module" do before do @c = Class.new(Sequel::Model(:items)) end it "should have dataset_module support an eager method" do @c.many_to_one :foo, :class=>@c @c.many_to_one :bar, :class=>@c @c.many_to_one :baz, :class=>@c @c.many_to_one :quux, :class=>@c @c.dataset_module{eager(:foo, {:foo=>{:bar=>:baz}}, :quux)} @c.foo.opts[:eager].must_equal(:foo=>{:bar=>:baz}, :quux=>nil) @c.where(:bar).foo.opts[:eager].must_equal(:foo=>{:bar=>:baz}, :quux=>nil) end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/model/base_spec.rb���������������������������������������������������������������0000664�0000000�0000000�00000071755�14342141206�0017422�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Model attribute setters" do before do @c = Class.new(Sequel::Model(:items)) do columns :id, :x, :y, :"x y" end @o = @c.new DB.reset end it "refresh should return self" do @o = @c[1] def @o._refresh(*) [] end @o.refresh.must_equal @o end it "should mark the column value as changed" do @o.changed_columns.must_equal [] @o.x = 2 @o.changed_columns.must_equal [:x] @o.y = 3 @o.changed_columns.must_equal [:x, :y] @o.changed_columns.clear @o[:x] = 2 @o.changed_columns.must_equal [:x] @o[:y] = 3 @o.changed_columns.must_equal [:x, :y] end it "should handle columns that can't be called like normal ruby methods" do @o.send(:"x y=", 3) @o.changed_columns.must_equal [:"x y"] @o.values.must_equal(:"x y"=>3) @o.send(:"x y").must_equal 3 end end describe "Model attribute getters/setters" do before do a = @a = [] @c = Class.new(Sequel::Model(:items)) do columns :id, :x, :"x y", :require_modification [:x, :"x y"].each do |c| define_method(c) do a << c super() end define_method(:"#{c}=") do |v| a << :"#{c}=" << v super(v) end end end DB.reset end it "should not override existing methods" do @o = @c.new @o.values.merge!(:x=>4, :"x y"=>5, :require_modification=>6) @o.x.must_equal 4 @o.x = 1 @o.send(:"x y").must_equal 5 @o.send(:"x y=", 2) @o.require_modification.must_equal true @o.require_modification = 3 @o.values.must_equal(:x=>1, :"x y"=>2, :require_modification=>6) @a.must_equal [:x, :x=, 1, :"x y", :"x y=", 2] end it "should not override existing methods in subclasses" do @c = Class.new(@c) @c.columns(:id, :x, :y, :"x y", :require_modification) @o = @c.new @o.values.merge!(:x=>4, :"x y"=>5, :require_modification=>6) @o.x.must_equal 4 @o.x = 1 @o.send(:"x y").must_equal 5 @o.send(:"x y=", 2) @o.require_modification.must_equal true @o.require_modification = 3 @o.values.must_equal(:x=>1, :"x y"=>2, :require_modification=>6) @a.must_equal [:x, :x=, 1, :"x y", :"x y=", 2] end end describe "Model.def_column_alias" do before do @o = Class.new(Sequel::Model(:items)) do columns :id def_column_alias(:id2, :id) end.load(:id=>1) DB.reset end it "should create an getter alias for the column" do @o.id2.must_equal 1 end it "should create an setter alias for the column" do @o.id2 = 2 @o.id2.must_equal 2 @o.values.must_equal(:id => 2) end end describe Sequel::Model, "dataset" do before do @a = Class.new(Sequel::Model(:items)) @b = Class.new(Sequel::Model) class ::Elephant < Sequel::Model(:ele1); end class ::Maggot < Sequel::Model; end class ::ShoeSize < Sequel::Model; end class ::BootSize < ShoeSize; end end after do [:Elephant, :Maggot, :ShoeSize, :BootSize].each{|x| Object.send(:remove_const, x)} end it "should default to the plural of the class name" do Maggot.dataset.sql.must_equal 'SELECT * FROM maggots' ShoeSize.dataset.sql.must_equal 'SELECT * FROM shoe_sizes' end it "should return the dataset for the superclass if available" do BootSize.dataset.sql.must_equal 'SELECT * FROM shoe_sizes' end it "should return the correct dataset if set explicitly" do Elephant.dataset.sql.must_equal 'SELECT * FROM ele1' @a.dataset.sql.must_equal 'SELECT * FROM items' end it "should raise if no dataset is explicitly set and the class is anonymous" do proc {@b.dataset}.must_raise(Sequel::Error) end it "should not override dataset explicitly set when subclassing" do sc = Class.new(::Elephant) do set_dataset :foo end sc.table_name.must_equal :foo end end describe Sequel::Model, "has_dataset?" do it "should return whether the model has a dataset" do c = Class.new(Sequel::Model) c.has_dataset?.must_equal false c.dataset = c.db[:table] c.has_dataset?.must_equal true end end describe Sequel::Model, "implicit table names" do after do Object.send(:remove_const, :BlahBlah) end it "should disregard namespaces for the table name" do module ::BlahBlah class MwaHaHa < Sequel::Model end end BlahBlah::MwaHaHa.dataset.sql.must_equal 'SELECT * FROM mwa_ha_has' end it "should automatically set datasets when anonymous class of Sequel::Model is used as superclass" do class BlahBlah < Class.new(Sequel::Model); end BlahBlah.dataset.sql.must_equal 'SELECT * FROM blah_blahs' end end describe Sequel::Model, ".dataset_module" do before do @c = Class.new(Sequel::Model(:items)) end it "should extend the dataset with the module if the model has a dataset" do @c.dataset_module{def return_3() 3 end} @c.dataset.return_3.must_equal 3 end it "should also extend the instance_dataset with the module if the model has a dataset" do @c.dataset_module{def return_3() 3 end} @c.instance_dataset.return_3.must_equal 3 end it "should add methods defined in the module to the class" do @c.dataset_module{def return_3() 3 end} @c.return_3.must_equal 3 end it "should add methods defined in the module outside the block to the class" do @c.dataset_module.module_eval{def return_3() 3 end} @c.return_3.must_equal 3 end it "should add methods that can't be called with normal method syntax as class methods" do @c.dataset_module.module_eval{define_method(:'return 3'){3}} @c.send(:'return 3').must_equal 3 end it "should not add private or protected methods defined in the module to the class" do @c.dataset_module{private; def return_3() 3 end} @c.dataset_module{protected; def return_4() 4 end} @c.respond_to?(:return_3).must_equal false @c.respond_to?(:return_4).must_equal false end it "should cache calls and readd methods if set_dataset is used" do @c.dataset_module{def return_3() 3 end} @c.set_dataset :items @c.return_3.must_equal 3 @c.dataset.return_3.must_equal 3 end it "should readd methods to subclasses, if set_dataset is used in a subclass" do @c.dataset_module{def return_3() 3 end} c = Class.new(@c) c.set_dataset :items c.return_3.must_equal 3 c.dataset.return_3.must_equal 3 end it "should only have a single dataset_module per class" do @c.dataset_module{def return_3() 3 end; alias return_3 return_3} @c.dataset_module{def return_3() 3 + (begin; super; rescue NoMethodError; 1; end) end} @c.return_3.must_equal 4 end it "should not have subclasses share the dataset_module" do @c.dataset_module{def return_3() 3 end} c = Class.new(@c) c.dataset_module{def return_3() 3 + (begin; super; rescue NoMethodError; 1; end) end} c.return_3.must_equal 6 end it "should accept a module object and extend the dataset with it" do @c.dataset_module Module.new{def return_3() 3 end} @c.dataset.return_3.must_equal 3 end it "should be able to call dataset_module with a module multiple times" do @c.dataset_module Module.new{def return_3() 3 end} @c.dataset_module Module.new{def return_4() 4 end} @c.dataset.return_3.must_equal 3 @c.dataset.return_4.must_equal 4 end it "should be able mix dataset_module calls with and without arguments" do @c.dataset_module{def return_3() 3 end} @c.dataset_module Module.new{def return_4() 4 end} @c.dataset.return_3.must_equal 3 @c.dataset.return_4.must_equal 4 end it "should have modules provided to dataset_module extend subclass datasets" do @c.dataset_module{def return_3() 3 end} @c.dataset_module Module.new{def return_4() 4 end} c = Class.new(@c) c.set_dataset :a c.dataset.return_3.must_equal 3 c.dataset.return_4.must_equal 4 end it "should return the dataset module if given a block" do Object.new.extend(@c.dataset_module{def return_3() 3 end}).return_3.must_equal 3 end it "should return the argument if given one" do Object.new.extend(@c.dataset_module Module.new{def return_3() 3 end}).return_3.must_equal 3 end it "should have dataset_module support a subset method" do @c.dataset_module{subset :released, :released} @c.released.sql.must_equal 'SELECT * FROM items WHERE released' @c.where(:foo).released.sql.must_equal 'SELECT * FROM items WHERE (foo AND released)' end it "should have dataset_module support a where method" do @c.dataset_module{where :released, :released} @c.released.sql.must_equal 'SELECT * FROM items WHERE released' @c.where(:foo).released.sql.must_equal 'SELECT * FROM items WHERE (foo AND released)' end if Sequel::Model.dataset_module_class == Sequel::Model::DatasetModule it "should have dataset_module not support an eager method" do proc{@c.dataset_module{eager :foo}}.must_raise NoMethodError end end it "should have dataset_module support a having method" do @c.dataset_module{having(:released){released}} @c.released.sql.must_equal 'SELECT * FROM items HAVING released' @c.where(:foo).released.sql.must_equal 'SELECT * FROM items WHERE foo HAVING released' end it "should have dataset_module support an exclude method" do @c.dataset_module{exclude :released, :released} @c.released.sql.must_equal 'SELECT * FROM items WHERE NOT released' @c.where(:foo).released.sql.must_equal 'SELECT * FROM items WHERE (foo AND NOT released)' end it "should have dataset_module support an exclude_having method" do @c.dataset_module{exclude_having :released, :released} @c.released.sql.must_equal 'SELECT * FROM items HAVING NOT released' @c.where(:foo).released.sql.must_equal 'SELECT * FROM items WHERE foo HAVING NOT released' end it "should have dataset_module support a distinct method" do @c.dataset = @c.dataset.with_extend{def supports_distinct_on?; true end} @c.dataset_module{distinct :foo, :baz} @c.foo.sql.must_equal 'SELECT DISTINCT ON (baz) * FROM items' @c.where(:bar).foo.sql.must_equal 'SELECT DISTINCT ON (baz) * FROM items WHERE bar' end it "should have dataset_module support a grep method" do @c.dataset_module{grep :foo, :baz, 'quux%'} @c.foo.sql.must_equal 'SELECT * FROM items WHERE ((baz LIKE \'quux%\' ESCAPE \'\\\'))' @c.where(:bar).foo.sql.must_equal 'SELECT * FROM items WHERE (bar AND ((baz LIKE \'quux%\' ESCAPE \'\\\')))' end it "should have dataset_module support a group method" do @c.dataset_module{group :foo, :baz} @c.foo.sql.must_equal 'SELECT * FROM items GROUP BY baz' @c.where(:bar).foo.sql.must_equal 'SELECT * FROM items WHERE bar GROUP BY baz' end it "should have dataset_module support a group_and_count method" do @c.dataset_module{group_and_count :foo, :baz} @c.foo.sql.must_equal 'SELECT baz, count(*) AS count FROM items GROUP BY baz' @c.where(:bar).foo.sql.must_equal 'SELECT baz, count(*) AS count FROM items WHERE bar GROUP BY baz' end it "should have dataset_module support a group_append method" do @c.dataset_module{group_append :foo, :baz} @c.foo.sql.must_equal 'SELECT * FROM items GROUP BY baz' @c.group(:bar).foo.sql.must_equal 'SELECT * FROM items GROUP BY bar, baz' end it "should have dataset_module support a limit method" do @c.dataset_module{limit :foo, 1} @c.foo.sql.must_equal 'SELECT * FROM items LIMIT 1' @c.where(:bar).foo.sql.must_equal 'SELECT * FROM items WHERE bar LIMIT 1' end it "should have dataset_module support a offset method" do @c.dataset_module{offset :foo, 1} @c.foo.sql.must_equal 'SELECT * FROM items OFFSET 1' @c.where(:bar).foo.sql.must_equal 'SELECT * FROM items WHERE bar OFFSET 1' end it "should have dataset_module support a order method" do @c.dataset_module{order(:foo){:baz}} @c.foo.sql.must_equal 'SELECT * FROM items ORDER BY baz' @c.where(:bar).foo.sql.must_equal 'SELECT * FROM items WHERE bar ORDER BY baz' end it "should have dataset_module support a order_append method" do @c.dataset_module{order_append :foo, :baz} @c.foo.sql.must_equal 'SELECT * FROM items ORDER BY baz' @c.order(:bar).foo.sql.must_equal 'SELECT * FROM items ORDER BY bar, baz' end it "should have dataset_module support a order_prepend method" do @c.dataset_module{order_prepend :foo, :baz} @c.foo.sql.must_equal 'SELECT * FROM items ORDER BY baz' @c.order(:bar).foo.sql.must_equal 'SELECT * FROM items ORDER BY baz, bar' end it "should have dataset_module support a reverse method" do @c.dataset_module{reverse(:foo){:baz}} @c.foo.sql.must_equal 'SELECT * FROM items ORDER BY baz DESC' @c.where(:bar).foo.sql.must_equal 'SELECT * FROM items WHERE bar ORDER BY baz DESC' end it "should have dataset_module support a select method" do @c.dataset_module{select :foo, :baz} @c.foo.sql.must_equal 'SELECT baz FROM items' @c.where(:bar).foo.sql.must_equal 'SELECT baz FROM items WHERE bar' end it "should have dataset_module support a select_all method" do @c.dataset_module{select_all :foo, :baz} @c.foo.sql.must_equal 'SELECT baz.* FROM items' @c.where(:bar).foo.sql.must_equal 'SELECT baz.* FROM items WHERE bar' end it "should have dataset_module support a select_append method" do @c.dataset_module{select_append :foo, :baz} @c.foo.sql.must_equal 'SELECT *, baz FROM items' @c.where(:bar).foo.sql.must_equal 'SELECT *, baz FROM items WHERE bar' end it "should have dataset_module support a select_group method" do @c.dataset_module{select_group :foo, :baz} @c.foo.sql.must_equal 'SELECT baz FROM items GROUP BY baz' @c.where(:bar).foo.sql.must_equal 'SELECT baz FROM items WHERE bar GROUP BY baz' end it "should have dataset_module support a server method" do @c.dataset_module{server :foo, :baz} @c.foo.opts[:server].must_equal :baz @c.where(:bar).foo.opts[:server].must_equal :baz end it "should raise error if called with both an argument and a block" do proc{@c.dataset_module(Module.new{def return_3() 3 end}){}}.must_raise(Sequel::Error) end it "should have dataset_module support a method with keyword arguments" do @c.dataset_module { eval('def with_foo(name: (raise)); end') } proc{@c.with_foo}.must_raise(StandardError) end if RUBY_VERSION >= '2.0' end describe "A model class with implicit table name" do before do class ::Donkey < Sequel::Model end end after do Object.send(:remove_const, :Donkey) end it "should have a dataset associated with the model class" do Donkey.dataset.model.must_equal Donkey end end describe "A model inheriting from a model" do before do class ::Feline < Sequel::Model; end class ::Leopard < Feline; end end after do Object.send(:remove_const, :Leopard) Object.send(:remove_const, :Feline) end it "should have a dataset associated with itself" do Feline.dataset.model.must_equal Feline Leopard.dataset.model.must_equal Leopard end end describe "A model inheriting from a custom base that sets @dataset" do before do ::Feline = Class.new(Sequel::Model) def Feline.inherited(subclass) subclass.instance_variable_set(:@dataset, nil) super end class ::Leopard < Feline; end end after do Object.send(:remove_const, :Leopard) Object.send(:remove_const, :Feline) end it "should not infer the dataset of the subclass" do proc{Leopard.dataset}.must_raise(Sequel::Error) end end describe "Model.primary_key" do before do @c = Class.new(Sequel::Model) end it "should default to id" do @c.primary_key.must_equal :id end it "should be overridden by set_primary_key" do @c.set_primary_key :cid @c.primary_key.must_equal :cid @c.set_primary_key([:id1, :id2]) @c.primary_key.must_equal [:id1, :id2] end it "should use nil for no primary key" do @c.no_primary_key @c.primary_key.must_be_nil end end describe "Model.primary_key_hash" do before do @c = Class.new(Sequel::Model) end it "should handle a single primary key" do @c.primary_key_hash(1).must_equal(:id=>1) end it "should handle a composite primary key" do @c.set_primary_key([:id1, :id2]) @c.primary_key_hash([1, 2]).must_equal(:id1=>1, :id2=>2) end it "should raise an error for no primary key" do @c.no_primary_key proc{@c.primary_key_hash(1)}.must_raise(Sequel::Error) end end describe "Model.qualified_primary_key_hash" do before do @c = Class.new(Sequel::Model(:items)) end it "should handle a single primary key" do @c.qualified_primary_key_hash(1).must_equal(Sequel.qualify(:items, :id)=>1) end it "should handle a composite primary key" do @c.set_primary_key([:id1, :id2]) @c.qualified_primary_key_hash([1, 2]).must_equal(Sequel.qualify(:items, :id1)=>1, Sequel.qualify(:items, :id2)=>2) end it "should raise an error for no primary key" do @c.no_primary_key proc{@c.qualified_primary_key_hash(1)}.must_raise(Sequel::Error) end it "should allow specifying a different qualifier" do @c.qualified_primary_key_hash(1, :apple).must_equal(Sequel.qualify(:apple, :id)=>1) @c.set_primary_key([:id1, :id2]) @c.qualified_primary_key_hash([1, 2], :bear).must_equal(Sequel.qualify(:bear, :id1)=>1, Sequel.qualify(:bear, :id2)=>2) end end describe "Model.db" do before do @db = Sequel.mock @databases = Sequel::DATABASES.dup @model_db = Sequel::Model.db Sequel::Model.db = nil Sequel::DATABASES.clear end after do Sequel::Model.instance_variable_get(:@db).must_be_nil Sequel::DATABASES.replace(@databases) Sequel::Model.db = @model_db end it "should be required when creating named model classes" do begin proc{class ModelTest < Sequel::Model; end}.must_raise(Sequel::Error) ensure Object.send(:remove_const, :ModelTest) end end it "should be required when creating anonymous model classes without a database" do proc{Sequel::Model(:foo)}.must_raise(Sequel::Error) end it "should not be required when creating anonymous model classes with a database" do Sequel::Model(@db).db.must_equal @db Sequel::Model(@db[:foo]).db.must_equal @db end it "should work correctly when subclassing anonymous model classes with a database" do begin Class.new(Sequel::Model(@db)).db.must_equal @db Class.new(Sequel::Model(@db[:foo])).db.must_equal @db class ModelTest < Sequel::Model(@db) db.must_equal @db end class ModelTest2 < Sequel::Model(@db[:foo]) db.must_equal @db end ModelTest.instance_variable_set(:@db, nil) ModelTest.db.must_equal @db ensure Object.send(:remove_const, :ModelTest) Object.send(:remove_const, :ModelTest2) end end end describe "Model.db=" do before do @db1 = Sequel.mock @db2 = Sequel.mock @m = Class.new(Sequel::Model(@db1)) end it "should change database for model" do @m.db = @db2 @m.db.must_equal @db2 end it "should raise Error for model with existing dataset" do @m.dataset = :table proc{@m.db = @db2}.must_raise Sequel::Error end it "should use the database for subclasses" do Class.new(@m).db.must_equal @db1 end end describe Sequel::Model, ".(un)?restrict_primary_key\\??" do before do @c = Class.new(Sequel::Model(:blahblah)) do set_primary_key :id columns :x, :y, :z, :id end @c.strict_param_setting = false end it "should restrict updates to primary key by default" do i = @c.new(:x => 1, :y => 2, :id => 3) i.values.must_equal(:x => 1, :y => 2) i.set(:x => 4, :y => 5, :id => 6) i.values.must_equal(:x => 4, :y => 5) end it "should allow updates to primary key if unrestrict_primary_key is used" do @c.unrestrict_primary_key i = @c.new(:x => 1, :y => 2, :id => 3) i.values.must_equal(:x => 1, :y => 2, :id=>3) i.set(:x => 4, :y => 5, :id => 6) i.values.must_equal(:x => 4, :y => 5, :id=>6) end it "should have restrict_primary_key? return true or false depending" do @c.restrict_primary_key?.must_equal true @c.unrestrict_primary_key @c.restrict_primary_key?.must_equal false c1 = Class.new(@c) c1.restrict_primary_key?.must_equal false @c.restrict_primary_key @c.restrict_primary_key?.must_equal true c1.restrict_primary_key?.must_equal false c2 = Class.new(@c) c2.restrict_primary_key?.must_equal true end end describe Sequel::Model, ".strict_param_setting" do before do @c = Class.new(Sequel::Model(:blahblah)) do columns :x, :y, :z, :id end end it "should be enabled by default" do @c.strict_param_setting.must_equal true end it "should raise an error if a missing/restricted column/method is accessed" do proc{@c.new(:a=>1)}.must_raise(Sequel::MassAssignmentRestriction) proc{@c.create(:a=>1)}.must_raise(Sequel::MassAssignmentRestriction) c = @c.new proc{c.set(:a=>1)}.must_raise(Sequel::MassAssignmentRestriction) proc{c.update(:a=>1)}.must_raise(Sequel::MassAssignmentRestriction) end it "should be disabled by strict_param_setting = false" do @c.strict_param_setting = false @c.strict_param_setting.must_equal false @c.new(:a=>1) end end describe Sequel::Model, ".require_modification" do before do @ds1 = DB[:items].with_extend{def provides_accurate_rows_matched?; false end} @ds2 = DB[:items].with_extend{def provides_accurate_rows_matched?; true end} end after do Sequel::Model.require_modification = nil end it "should depend on whether the dataset provides an accurate number of rows matched by default" do Class.new(Sequel::Model).set_dataset(@ds1).require_modification.must_equal false Class.new(Sequel::Model).set_dataset(@ds2).require_modification.must_equal true end it "should obey global setting regardless of dataset support if set" do Sequel::Model.require_modification = true Class.new(Sequel::Model).set_dataset(@ds1).require_modification.must_equal true Class.new(Sequel::Model).set_dataset(@ds2).require_modification.must_equal true Sequel::Model.require_modification = false Class.new(Sequel::Model).set_dataset(@ds1).require_modification.must_equal false Class.new(Sequel::Model).set_dataset(@ds1).require_modification.must_equal false end end describe Sequel::Model, ".[] optimization" do before do @db = Sequel.mock def @db.schema(*) [[:id, {:primary_key=>true}]] end def @db.supports_schema_parsing?() true end @c = Class.new(Sequel::Model(@db)) @ds = @db.dataset.with_quote_identifiers(true) end it "should set simple_pk to the literalized primary key column name if a single primary key" do @c.set_primary_key :id @c.simple_pk.must_equal 'id' @c.set_primary_key :b @c.simple_pk.must_equal 'b' @c.set_primary_key Sequel.identifier(:b__a) @c.simple_pk.must_equal 'b__a' end it "should have simple_pk be blank if compound or no primary key" do @c.no_primary_key @c.simple_pk.must_be_nil @c.set_primary_key [:b, :a] @c.simple_pk.must_be_nil end it "should have simple table set if passed a Symbol to set_dataset" do @c.set_dataset :a @c.simple_table.must_equal 'a' @c.set_dataset :b @c.simple_table.must_equal 'b' end it "should have simple_table set if passed a simple select all dataset to set_dataset" do @c.set_dataset @ds.from(:a) @c.simple_table.must_equal '"a"' @c.set_dataset @ds.from(:b) @c.simple_table.must_equal '"b"' @c.set_dataset @ds.from(Sequel[:b][:a]) @c.simple_table.must_equal '"b"."a"' end with_symbol_splitting "should have simple_table set using qualified symbol" do @c.set_dataset :b__a @c.simple_table.must_equal 'b.a' @c.set_dataset @ds.from(:b__a) @c.simple_table.must_equal '"b"."a"' end it "should have simple table = nil if passed an aliased expression to set_dataset" do @c.set_dataset Sequel.as(:a, :b) @c.simple_table.must_be_nil end it "should have simple table = nil if passed a literal string" do @c.set_dataset Sequel.lit('a') @c.simple_table.must_be_nil end it "should have simple_table = nil if passed a non-simple select all dataset to set_dataset" do @c.set_dataset @c.db[:a].filter(:active) @c.simple_table.must_be_nil end it "should have simple_table inherit superclass's setting" do Class.new(@c).simple_table.must_be_nil @c.set_dataset :a Class.new(@c).simple_table.must_equal 'a' end it "should use Dataset#with_sql if simple_table and simple_pk are true" do @c.set_dataset @db[:a].with_fetch(:id=>1) @c[1].must_equal @c.load(:id=>1) @db.sqls.must_equal ['SELECT * FROM a WHERE id = 1'] end it "should use Dataset#with_sql if simple_table and simple_pk are true" do @c.set_dataset @db[:a].with_fetch(:id=>1).with_extend{def supports_placeholder_literalizer?; false end} @c[1].must_equal @c.load(:id=>1) @db.sqls.must_equal ['SELECT * FROM a WHERE (id = 1) LIMIT 1'] end it "should not use Dataset#with_sql if either simple_table or simple_pk is nil" do @c.set_dataset @db[:a].where(:active).with_fetch(:id=>1) @c[1].must_equal @c.load(:id=>1) @db.sqls.must_equal ['SELECT * FROM a WHERE (active AND (id = 1)) LIMIT 1'] end it "should return first matching record for hash argument" do @c.set_dataset @db[:a].with_fetch(:id=>1, :a=>2) @c[:a=>2].values.must_equal(:id=>1, :a=>2) @db.sqls.must_equal ['SELECT * FROM a WHERE (a = 2) LIMIT 1'] end it "should return nil for nil argument" do @c[nil].must_be_nil @db.sqls.must_equal [] end end describe "Model datasets #with_pk with #with_pk!" do before do @c = Class.new(Sequel::Model(:a)) @ds = @c.dataset = @c.dataset.with_fetch(:id=>1) DB.reset end it "should be callable on the model class with optimized SQL" do @c.with_pk(1).must_equal @c.load(:id=>1) DB.sqls.must_equal ["SELECT * FROM a WHERE id = 1"] @c.with_pk!(1).must_equal @c.load(:id=>1) DB.sqls.must_equal ["SELECT * FROM a WHERE id = 1"] end it "should return the first record where the primary key matches" do @ds.with_pk(1).must_equal @c.load(:id=>1) DB.sqls.must_equal ["SELECT * FROM a WHERE (a.id = 1) LIMIT 1"] @ds.with_pk!(1).must_equal @c.load(:id=>1) DB.sqls.must_equal ["SELECT * FROM a WHERE (a.id = 1) LIMIT 1"] end it "should work when called repeatedly on a frozen dataset" do @ds.freeze 5.times do @ds.with_pk(1).must_equal @c.load(:id=>1) DB.sqls.must_equal ["SELECT * FROM a WHERE (a.id = 1) LIMIT 1"] end end it "should handle existing filters" do @ds.filter(:a=>2).with_pk(1) DB.sqls.must_equal ["SELECT * FROM a WHERE ((a = 2) AND (a.id = 1)) LIMIT 1"] @ds.filter(:a=>2).with_pk!(1) DB.sqls.must_equal ["SELECT * FROM a WHERE ((a = 2) AND (a.id = 1)) LIMIT 1"] end it "should work with string values" do @ds.with_pk("foo") DB.sqls.must_equal ["SELECT * FROM a WHERE (a.id = 'foo') LIMIT 1"] @ds.with_pk!("foo") DB.sqls.must_equal ["SELECT * FROM a WHERE (a.id = 'foo') LIMIT 1"] end it "should handle an array for composite primary keys" do @c.set_primary_key [:id1, :id2] @ds.with_pk([1, 2]) DB.sqls.must_equal ["SELECT * FROM a WHERE ((a.id1 = 1) AND (a.id2 = 2)) LIMIT 1"] @ds.with_pk!([1, 2]) DB.sqls.must_equal ["SELECT * FROM a WHERE ((a.id1 = 1) AND (a.id2 = 2)) LIMIT 1"] end it "should work with composite primary keys when called repeatedly on a frozen dataset with" do @c.set_primary_key [:id1, :id2] @ds.freeze 5.times do @ds.with_pk([1,2]) DB.sqls.must_equal ["SELECT * FROM a WHERE ((a.id1 = 1) AND (a.id2 = 2)) LIMIT 1"] end end it "should have with_pk return nil and with_pk! raise if no rows match" do @ds = @ds.with_fetch([]) @ds.with_pk(1).must_be_nil DB.sqls.must_equal ["SELECT * FROM a WHERE (a.id = 1) LIMIT 1"] proc{@ds.with_pk!(1)}.must_raise(Sequel::NoMatchingRow) DB.sqls.must_equal ["SELECT * FROM a WHERE (a.id = 1) LIMIT 1"] end it "should have with_pk return nil and with_pk! raise if no rows match when calling the class method" do @c.dataset = @c.dataset.with_fetch([]) @c.with_pk(1).must_be_nil DB.sqls.must_equal ["SELECT * FROM a WHERE id = 1"] proc{@c.with_pk!(1)}.must_raise(Sequel::NoMatchingRow) DB.sqls.must_equal ["SELECT * FROM a WHERE id = 1"] end it "should have #[] consider an integer as a primary key lookup" do @ds[1].must_equal @c.load(:id=>1) DB.sqls.must_equal ["SELECT * FROM a WHERE (a.id = 1) LIMIT 1"] end it "should not have #[] consider a literal string as a primary key lookup" do @ds[Sequel.lit('foo')].must_equal @c.load(:id=>1) DB.sqls.must_equal ["SELECT * FROM a WHERE (foo) LIMIT 1"] end it "should raise Error if called on a dataset with no primary key" do @c.no_primary_key @ds.freeze 5.times do proc{@ds.with_pk(1)}.must_raise Sequel::Error end end end describe "Model::include" do it "shouldn't change the signature of Module::include" do mod1 = Module.new mod2 = Module.new including_class = Class.new(Sequel::Model(:items)) do include(mod1, mod2) end including_class.included_modules.must_include(mod1) including_class.included_modules.must_include(mod2) end end �������������������sequel-5.63.0/spec/model/class_dataset_methods_spec.rb����������������������������������������������0000664�0000000�0000000�00000021044�14342141206�0023027�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Model, "class dataset methods" do before do @db = Sequel.mock @c = Class.new(Sequel::Model(@db[:items].with_extend{def supports_cte?(*) true end}.with_fetch(:id=>1).with_autoid(1).with_numrows(0))) @d = @c.dataset @db.sqls end it "should call the dataset method of the same name with the same args" do @c.all.must_equal [@c.load(:id=>1)] @db.sqls.must_equal ["SELECT * FROM items"] @c.avg(:id).must_equal 1 @db.sqls.must_equal ["SELECT avg(id) AS avg FROM items LIMIT 1"] @c.count.must_equal 1 @db.sqls.must_equal ["SELECT count(*) AS count FROM items LIMIT 1"] @c.cross_join(@c.table_name).sql.must_equal "SELECT * FROM items CROSS JOIN items" @c.distinct.sql.must_equal "SELECT DISTINCT * FROM items" @c.each{|r| r.must_equal @c.load(:id=>1)}.must_equal @d @db.sqls.must_equal ["SELECT * FROM items"] @c.each_server{|r| r.opts[:server].must_equal :default} @c.empty?.must_equal false @db.sqls.must_equal ["SELECT 1 AS one FROM items LIMIT 1"] @c.except(@d, :from_self=>false).sql.must_equal "SELECT * FROM items EXCEPT SELECT * FROM items" @c.exclude(:a).sql.must_equal "SELECT * FROM items WHERE NOT a" @c.exclude_having(:a).sql.must_equal "SELECT * FROM items HAVING NOT a" @c.fetch_rows("S"){|r| r.must_equal(:id=>1)} @db.sqls.must_equal ["S"] @c.filter(:a).sql.must_equal "SELECT * FROM items WHERE a" @c.first.must_equal @c.load(:id=>1) @db.sqls.must_equal ["SELECT * FROM items LIMIT 1"] @c.first!.must_equal @c.load(:id=>1) @db.sqls.must_equal ["SELECT * FROM items LIMIT 1"] @c.for_update.sql.must_equal "SELECT * FROM items FOR UPDATE" @c.from.sql.must_equal "SELECT *" @c.from_self.sql.must_equal "SELECT * FROM (SELECT * FROM items) AS t1" @c.full_join(@c.table_name).sql.must_equal "SELECT * FROM items FULL JOIN items" @c.full_outer_join(@c.table_name).sql.must_equal "SELECT * FROM items FULL OUTER JOIN items" @c.get(:a).must_equal 1 @db.sqls.must_equal ["SELECT a FROM items LIMIT 1"] @c.graph(@c.table_name, nil, :table_alias=>:a).sql.must_equal "SELECT * FROM items LEFT OUTER JOIN items AS a" @db.sqls @c.grep(:id, 'a%').sql.must_equal "SELECT * FROM items WHERE ((id LIKE 'a%' ESCAPE '\\'))" @c.group(:a).sql.must_equal "SELECT * FROM items GROUP BY a" @c.group_append(:a).sql.must_equal "SELECT * FROM items GROUP BY a" @c.group_and_count(:a).sql.must_equal "SELECT a, count(*) AS count FROM items GROUP BY a" @c.group_by(:a).sql.must_equal "SELECT * FROM items GROUP BY a" @c.having(:a).sql.must_equal "SELECT * FROM items HAVING a" @c.import([:id], [[1]]) @db.sqls.must_equal ["INSERT INTO items (id) VALUES (1)"] @c.inner_join(@c.table_name).sql.must_equal "SELECT * FROM items INNER JOIN items" @c.insert.must_equal 1 @db.sqls.must_equal ["INSERT INTO items DEFAULT VALUES"] @c.intersect(@d, :from_self=>false).sql.must_equal "SELECT * FROM items INTERSECT SELECT * FROM items" @c.join(@c.table_name).sql.must_equal "SELECT * FROM items INNER JOIN items" @c.join_table(:inner, @c.table_name).sql.must_equal "SELECT * FROM items INNER JOIN items" @c.last.must_equal @c.load(:id=>1) @db.sqls.must_equal ["SELECT * FROM items ORDER BY id DESC LIMIT 1"] @c.left_join(@c.table_name).sql.must_equal "SELECT * FROM items LEFT JOIN items" @c.left_outer_join(@c.table_name).sql.must_equal "SELECT * FROM items LEFT OUTER JOIN items" @c.limit(2).sql.must_equal "SELECT * FROM items LIMIT 2" @c.lock_style(:update).sql.must_equal "SELECT * FROM items FOR UPDATE" @c.map(:id).must_equal [1] @db.sqls.must_equal ["SELECT * FROM items"] @c.max(:id).must_equal 1 @db.sqls.must_equal ["SELECT max(id) AS max FROM items LIMIT 1"] @c.min(:id).must_equal 1 @db.sqls.must_equal ["SELECT min(id) AS min FROM items LIMIT 1"] @c.multi_insert([{:id=>1}]) @db.sqls.must_equal ["INSERT INTO items (id) VALUES (1)"] @c.naked.row_proc.must_be_nil @c.natural_full_join(@c.table_name).sql.must_equal "SELECT * FROM items NATURAL FULL JOIN items" @c.natural_join(@c.table_name).sql.must_equal "SELECT * FROM items NATURAL JOIN items" @c.natural_left_join(@c.table_name).sql.must_equal "SELECT * FROM items NATURAL LEFT JOIN items" @c.natural_right_join(@c.table_name).sql.must_equal "SELECT * FROM items NATURAL RIGHT JOIN items" @c.offset(2).sql.must_equal "SELECT * FROM items OFFSET 2" @c.order(:a).sql.must_equal "SELECT * FROM items ORDER BY a" @c.order_append(:a).sql.must_equal "SELECT * FROM items ORDER BY a" @c.order_by(:a).sql.must_equal "SELECT * FROM items ORDER BY a" @c.order_more(:a).sql.must_equal "SELECT * FROM items ORDER BY a" @c.order_prepend(:a).sql.must_equal "SELECT * FROM items ORDER BY a" @c.paged_each{|r| r.must_equal @c.load(:id=>1)} @db.sqls.must_equal ["BEGIN", "SELECT * FROM items ORDER BY id LIMIT 1000 OFFSET 0", "COMMIT"] @c.qualify.sql.must_equal 'SELECT items.* FROM items' @c.right_join(@c.table_name).sql.must_equal "SELECT * FROM items RIGHT JOIN items" @c.right_outer_join(@c.table_name).sql.must_equal "SELECT * FROM items RIGHT OUTER JOIN items" @c.select(:a).sql.must_equal "SELECT a FROM items" @c.select_all(:items).sql.must_equal "SELECT items.* FROM items" @c.select_append(:a).sql.must_equal "SELECT *, a FROM items" @c.select_group(:a).sql.must_equal "SELECT a FROM items GROUP BY a" @c.select_hash(:id, :id).must_equal(1=>1) @db.sqls.must_equal ["SELECT id, id FROM items"] @c.select_hash_groups(:id, :id).must_equal(1=>[1]) @db.sqls.must_equal ["SELECT id, id FROM items"] @c.select_map(:id).must_equal [1] @db.sqls.must_equal ["SELECT id FROM items"] @c.select_order_map(:id).must_equal [1] @db.sqls.must_equal ["SELECT id FROM items ORDER BY id"] @c.server(:a).opts[:server].must_equal :a @c.single_record.must_equal @c.load(:id=>1) @db.sqls.must_equal ["SELECT * FROM items LIMIT 1"] @c.single_record!.must_equal @c.load(:id=>1) @db.sqls.must_equal ["SELECT * FROM items"] @c.single_value.must_equal 1 @db.sqls.must_equal ["SELECT * FROM items LIMIT 1"] @c.single_value!.must_equal 1 @db.sqls.must_equal ["SELECT * FROM items"] @c.sum(:id).must_equal 1 @db.sqls.must_equal ["SELECT sum(id) AS sum FROM items LIMIT 1"] @c.as_hash(:id, :id).must_equal(1=>1) @db.sqls.must_equal ["SELECT * FROM items"] @c.to_hash(:id, :id).must_equal(1=>1) @db.sqls.must_equal ["SELECT * FROM items"] @c.to_hash_groups(:id, :id).must_equal(1=>[1]) @db.sqls.must_equal ["SELECT * FROM items"] @c.truncate @db.sqls.must_equal ["TRUNCATE TABLE items"] @c.union(@d, :from_self=>false).sql.must_equal "SELECT * FROM items UNION SELECT * FROM items" @c.where(:a).sql.must_equal "SELECT * FROM items WHERE a" @c.with(:a, @d).sql.must_equal "WITH a AS (SELECT * FROM items) SELECT * FROM items" @c.with_recursive(:a, @d, @d).sql.must_equal "WITH a AS (SELECT * FROM items UNION ALL SELECT * FROM items) SELECT * FROM items" @c.with_sql('S').sql.must_equal "S" @c.where_all(:id=>1){|r|}.must_equal [@c.load(:id=>1)] @db.sqls.must_equal ["SELECT * FROM items WHERE (id = 1)"] @c.where_each(:id=>1){|r|} @db.sqls.must_equal ["SELECT * FROM items WHERE (id = 1)"] @c.where_single_value(:id=>1).must_equal 1 @db.sqls.must_equal ["SELECT * FROM items WHERE (id = 1) LIMIT 1"] sc = Class.new(@c) sc.set_dataset(@d.where(:a).order(:a).select(:a).group(:a).limit(2)) @db.sqls sc.invert.sql.must_equal 'SELECT a FROM items WHERE NOT a GROUP BY a ORDER BY a LIMIT 2' sc.dataset = sc.dataset.with_fetch(:v1=>1, :v2=>2) @db.sqls sc.reverse.sql.must_equal 'SELECT a FROM items WHERE a GROUP BY a ORDER BY a DESC LIMIT 2' sc.reverse_order.sql.must_equal 'SELECT a FROM items WHERE a GROUP BY a ORDER BY a DESC LIMIT 2' sc.select_more(:a).sql.must_equal 'SELECT a, a FROM items WHERE a GROUP BY a ORDER BY a LIMIT 2' sc.unfiltered.sql.must_equal 'SELECT a FROM items GROUP BY a ORDER BY a LIMIT 2' sc.ungrouped.sql.must_equal 'SELECT a FROM items WHERE a ORDER BY a LIMIT 2' sc.unordered.sql.must_equal 'SELECT a FROM items WHERE a GROUP BY a LIMIT 2' sc.unlimited.sql.must_equal 'SELECT a FROM items WHERE a GROUP BY a ORDER BY a' sc.dataset.graph(:a).ungraphed.opts[:graph].must_be_nil end it 'should not initialize model for aggregate methods when placeholder cached' do @c.define_singleton_method(:call) { |*| raise 'Should not be called for aggregate methods' } 5.times do @c.max(:id) end end end ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/model/dataset_methods_spec.rb����������������������������������������������������0000664�0000000�0000000�00000014041�14342141206�0021641�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Model::DatasetMethods, "#destroy" do before do @c = Class.new(Sequel::Model(:items)) do self::Destroyed = [] def destroy model::Destroyed << self end end @d = @c.dataset @d = @d.with_fetch([{:id=>1}, {:id=>2}]) DB.reset end it "should instantiate objects in the dataset and call destroy on each" do @d.destroy @c::Destroyed.collect{|x| x.values}.must_equal [{:id=>1}, {:id=>2}] end it "should return the number of records destroyed" do @d.destroy.must_equal 2 @d = @d.with_fetch([[{:id=>1}], []]) @d.destroy.must_equal 1 @d.destroy.must_equal 0 end it "should use a transaction if use_transactions is true for the model" do @c.use_transactions = true @d.destroy DB.sqls.must_equal ["BEGIN", "SELECT * FROM items", "COMMIT"] end it "should not use a transaction if use_transactions is false for the model" do @c.use_transactions = false @d.destroy DB.sqls.must_equal ["SELECT * FROM items"] end end describe Sequel::Model::DatasetMethods, "#as_hash" do before do @c = Class.new(Sequel::Model(:items)) do set_primary_key :name end @d = @c.dataset end it "should result in a hash with primary key value keys and model object values" do @d = @d.with_fetch([{:name=>1}, {:name=>2}]) h = @d.as_hash h.must_be_kind_of(Hash) a = h.to_a a.collect{|x| x[1].class}.must_equal [@c, @c] a.sort_by{|x| x[0]}.collect{|x| [x[0], x[1].values]}.must_equal [[1, {:name=>1}], [2, {:name=>2}]] end it "should be aliased as to_hash" do @d = @d.with_fetch([{:name=>1}, {:name=>2}]) h = @d.to_hash h.must_be_kind_of(Hash) a = h.to_a a.collect{|x| x[1].class}.must_equal [@c, @c] a.sort_by{|x| x[0]}.collect{|x| [x[0], x[1].values]}.must_equal [[1, {:name=>1}], [2, {:name=>2}]] end it "should result in a hash with given value keys and model object values" do @d = @d.with_fetch([{:name=>1, :number=>3}, {:name=>2, :number=>4}]) h = @d.as_hash(:number) h.must_be_kind_of(Hash) a = h.to_a a.collect{|x| x[1].class}.must_equal [@c, @c] a.sort_by{|x| x[0]}.collect{|x| [x[0], x[1].values]}.must_equal [[3, {:name=>1, :number=>3}], [4, {:name=>2, :number=>4}]] end it "should raise an error if the class doesn't have a primary key" do @c.no_primary_key proc{@d.as_hash}.must_raise(Sequel::Error) end end describe Sequel::Model::DatasetMethods do before do @c = Class.new(Sequel::Model(:items)) @c.columns :id @c.db.reset end it "#first should handle no primary key" do @c.no_primary_key @c.first.must_be_kind_of(@c) @c.db.sqls.must_equal ['SELECT * FROM items LIMIT 1'] end it "#last should reverse order by primary key if not already ordered" do @c.last.must_be_kind_of(@c) @c.db.sqls.must_equal ['SELECT * FROM items ORDER BY id DESC LIMIT 1'] @c.where(:id=>2).last(:foo=>2){{bar=>3}}.must_be_kind_of(@c) @c.db.sqls.must_equal ['SELECT * FROM items WHERE ((id = 2) AND (foo = 2) AND (bar = 3)) ORDER BY id DESC LIMIT 1'] end it "#last should use existing order if there is one" do @c.order(:foo).last.must_be_kind_of(@c) @c.db.sqls.must_equal ['SELECT * FROM items ORDER BY foo DESC LIMIT 1'] end it "#last should handle a composite primary key" do @c.set_primary_key [:id1, :id2] @c.last.must_be_kind_of(@c) @c.db.sqls.must_equal ['SELECT * FROM items ORDER BY id1 DESC, id2 DESC LIMIT 1'] end it "#last should raise an error if no primary key" do @c.no_primary_key proc{@c.last}.must_raise(Sequel::Error) end it "#paged_each should order by primary key if not already ordered" do @c.paged_each{|r| r.must_be_kind_of(@c)} @c.db.sqls.must_equal ['BEGIN', 'SELECT * FROM items ORDER BY id LIMIT 1000 OFFSET 0', 'COMMIT'] @c.paged_each(:rows_per_fetch=>5){|r|} @c.db.sqls.must_equal ['BEGIN', 'SELECT * FROM items ORDER BY id LIMIT 5 OFFSET 0', 'COMMIT'] end it "#paged_each should use existing order if there is one" do @c.order(:foo).paged_each{|r| r.must_be_kind_of(@c)} @c.db.sqls.must_equal ['BEGIN', 'SELECT * FROM items ORDER BY foo LIMIT 1000 OFFSET 0', 'COMMIT'] end it "#paged_each should handle a composite primary key" do @c.set_primary_key [:id1, :id2] @c.paged_each{|r| r.must_be_kind_of(@c)} @c.db.sqls.must_equal ['BEGIN', 'SELECT * FROM items ORDER BY id1, id2 LIMIT 1000 OFFSET 0', 'COMMIT'] end it "#paged_each should raise an error if no primary key" do @c.no_primary_key proc{@c.paged_each{|r| }}.must_raise(Sequel::Error) end end describe Sequel::Model::DatasetMethods, "#where_all" do before do @c = Class.new(Sequel::Model(DB[:items].freeze)) DB.reset end it "should filter dataset with condition, and return related rows" do 5.times do @c.where_all(:id=>1).must_equal [@c.load(:id=>1, :x=>1)] @c.db.sqls.must_equal ['SELECT * FROM items WHERE (id = 1)'] end end it "should yield each row to the given block" do 5.times do a = [] @c.where_all(:id=>1){|r| a << r}.must_equal [@c.load(:id=>1, :x=>1)] a.must_equal [@c.load(:id=>1, :x=>1)] @c.db.sqls.must_equal ['SELECT * FROM items WHERE (id = 1)'] end end end describe Sequel::Model::DatasetMethods, "#where_each" do before do @c = Class.new(Sequel::Model(DB[:items].freeze)) DB.reset end it "should yield each row to the given block" do 5.times do a = [] @c.where_each(:id=>1){|r| a << r} a.must_equal [@c.load(:id=>1, :x=>1)] @c.db.sqls.must_equal ['SELECT * FROM items WHERE (id = 1)'] end end end describe Sequel::Model::DatasetMethods, "#where_single_value" do before do @c = Class.new(Sequel::Model(DB[:items].freeze)) @c.class_eval do dataset_module do select :only_id, :id end end DB.reset end it "should return single value" do 5.times do @c.only_id.where_single_value(:id=>1).must_equal 1 @c.db.sqls.must_equal ['SELECT id FROM items WHERE (id = 1) LIMIT 1'] end end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/model/eager_loading_spec.rb������������������������������������������������������0000664�0000000�0000000�00000563434�14342141206�0021270�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Model, "#eager" do before do class ::EagerAlbum < Sequel::Model(:albums) columns :id, :band_id many_to_one :band, :class=>'EagerBand', :key=>:band_id one_to_many :tracks, :class=>'EagerTrack', :key=>:album_id many_to_many :genres, :class=>'EagerGenre', :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag one_through_one :genre, :clone=>:genres one_to_many :good_tracks, :class=>'EagerTrack', :reciprocal=>nil, :key=>:album_id do |ds| ds.filter(:name=>'Good') end many_to_one :band_name, :class=>'EagerBand', :key=>:band_id, :select=>[:id, :name] one_to_many :track_names, :class=>'EagerTrack', :key=>:album_id, :select=>[:id, :name] many_to_many :genre_names, :class=>'EagerGenre', :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :select=>[:id] def band_id3 band_id*3 end end class ::EagerBand < Sequel::Model(:bands) columns :id, :p_k one_to_many :albums, :class=>'EagerAlbum', :key=>:band_id, :eager=>:tracks, :reciprocal=>:band one_to_many :graph_albums, :class=>'EagerAlbum', :key=>:band_id, :eager_graph=>:tracks, :reciprocal=>nil many_to_many :members, :class=>'EagerBandMember', :left_key=>:band_id, :right_key=>:member_id, :join_table=>:bm many_to_many :graph_members, :clone=>:members, :eager_graph=>:bands one_to_many :good_albums, :class=>'EagerAlbum', :key=>:band_id, :reciprocal=>nil, :eager_block=>proc{|ds| ds.filter(:name=>'good')} do |ds| ds.filter(:name=>'Good') end one_to_many :self_titled_albums, :class=>'EagerAlbum', :key=>:band_id, :allow_eager=>false do |ds| ds.filter(:name=>name) end one_to_many :albums_by_name, :class=>'EagerAlbum', :key=>:band_id, :order=>:name, :allow_eager=>false one_to_many :top_10_albums, :class=>'EagerAlbum', :key=>:band_id, :limit=>10 def id3 id/3 end end class ::EagerTrack < Sequel::Model(:tracks) columns :id, :album_id many_to_one :album, :class=>'EagerAlbum', :key=>:album_id end class ::EagerGenre < Sequel::Model(:genres) columns :id, :xxx many_to_many :albums, :class=>'EagerAlbum', :left_key=>:genre_id, :right_key=>:album_id, :join_table=>:ag end class ::EagerBandMember < Sequel::Model(:members) columns :id many_to_many :bands, :class=>'EagerBand', :left_key=>:member_id, :right_key=>:band_id, :join_table=>:bm, :order =>:id end EagerAlbum.dataset = EagerAlbum.dataset.with_fetch(proc do |sql| h = if sql =~ /101/ {:id => 101, :band_id=> 101} else {:id => 1, :band_id=> 2} end h[:x_foreign_key_x] = 4 if sql =~ /ag\.genre_id/ h end) EagerAlbum.dataset.columns(:id, :band_id) EagerBand.dataset = EagerBand.dataset.with_fetch(proc do |sql| case sql when /id IN (101)/ # nothing when /id > 100/ [{:id => 101}, {:id => 102}] else h = {:id => 2} h[:x_foreign_key_x] = 5 if sql =~ /bm\.member_id/ h end end) EagerTrack.dataset = EagerTrack.dataset.with_fetch(:id => 3, :album_id => 1) EagerGenre.dataset = EagerGenre.dataset.with_fetch(proc do |sql| h = {:id => 4} h[:x_foreign_key_x] = 1 if sql =~ /ag\.album_id/ h end) EagerBandMember.dataset = EagerBandMember.dataset.with_fetch(proc do |sql| h = {:id => 5} h[:x_foreign_key_x] = 2 if sql =~ /bm\.band_id/ h end) DB.reset end after do [:EagerAlbum, :EagerBand, :EagerTrack, :EagerGenre, :EagerBandMember].each{|x| Object.send(:remove_const, x)} end it "should populate :key_hash and :id_map option correctly for custom eager loaders" do khs = {} pr = proc{|a, m| proc{|h| khs[a] = h[:key_hash][m]; h[:id_map].must_equal h[:key_hash][m]}} EagerAlbum.many_to_one :sband, :clone=>:band, :eager_loader=>pr.call(:sband, :band_id) EagerAlbum.one_to_many :stracks, :clone=>:tracks, :eager_loader=>pr.call(:stracks, :id) EagerAlbum.many_to_many :sgenres, :clone=>:genres, :eager_loader=>pr.call(:sgenres, :id) EagerAlbum.eager(:sband, :stracks, :sgenres).all khs.must_equal(:sband=>{2=>[EagerAlbum.load(:band_id=>2, :id=>1)]}, :stracks=>{1=>[EagerAlbum.load(:band_id=>2, :id=>1)]}, :sgenres=>{1=>[EagerAlbum.load(:band_id=>2, :id=>1)]}) end it "should populate :key_hash using the method symbol" do khs = {} pr = proc{|a, m| proc{|h| khs[a] = h[:key_hash][m]}} EagerAlbum.many_to_one :sband, :clone=>:band, :eager_loader=>pr.call(:sband, :band_id), :key=>:band_id, :key_column=>:b_id EagerAlbum.one_to_many :stracks, :clone=>:tracks, :eager_loader=>pr.call(:stracks, :id), :primary_key=>:id, :primary_key_column=>:i EagerAlbum.many_to_many :sgenres, :clone=>:genres, :eager_loader=>pr.call(:sgenres, :id), :left_primary_key=>:id, :left_primary_key_column=>:i EagerAlbum.eager(:sband, :stracks, :sgenres).all khs.must_equal(:sband=>{2=>[EagerAlbum.load(:band_id=>2, :id=>1)]}, :stracks=>{1=>[EagerAlbum.load(:band_id=>2, :id=>1)]}, :sgenres=>{1=>[EagerAlbum.load(:band_id=>2, :id=>1)]}) end it "should raise an error if called without a symbol or hash" do proc{EagerAlbum.eager(Object.new)}.must_raise(Sequel::Error) end it "should eagerly load a single many_to_one association" do a = EagerAlbum.eager(:band).all DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM bands WHERE (bands.id IN (2))'] a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] a.first.band.must_equal EagerBand.load(:id=>2) DB.sqls.must_equal [] end it "should eagerly load when using to_hash" do h = EagerAlbum.eager(:band).to_hash DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM bands WHERE (bands.id IN (2))'] h.must_equal(1=>EagerAlbum.load(:id => 1, :band_id => 2)) h[1].band.must_equal EagerBand.load(:id=>2) DB.sqls.must_equal [] end it "should eagerly load when using to_hash_groups" do h = EagerAlbum.eager(:band).to_hash_groups(:id) DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM bands WHERE (bands.id IN (2))'] h.must_equal(1=>[EagerAlbum.load(:id => 1, :band_id => 2)]) h[1].first.band.must_equal EagerBand.load(:id=>2) DB.sqls.must_equal [] end it "should work correctly if there are no records returned" do EagerAlbum.dataset = EagerAlbum.dataset.with_fetch([]) a = EagerAlbum.eager(:band).all DB.sqls.must_equal ['SELECT * FROM albums'] a.must_equal [] end it "should skip eager loading for a many_to_one association with no matching keys" do EagerAlbum.dataset = EagerAlbum.dataset.with_fetch([{:id=>1, :band_id=>nil}]) a = EagerAlbum.eager(:band).all DB.sqls.must_equal ['SELECT * FROM albums'] a.must_equal [EagerAlbum.load(:id => 1, :band_id => nil)] a.first.associations.fetch(:band).must_be_nil a.first.band.must_be_nil DB.sqls.must_equal [] end it "should eagerly load a single many_to_one association with the same name as the column" do EagerAlbum.def_column_alias(:band_id_id, :band_id) EagerAlbum.many_to_one :band_id, :key_column=>:band_id, :class=>EagerBand a = EagerAlbum.eager(:band_id).all DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM bands WHERE (bands.id IN (2))'] a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] a.first.band_id.must_equal EagerBand.load(:id=>2) DB.sqls.must_equal [] end it "should eagerly load a single one_to_one association" do EagerAlbum.one_to_one :track, :class=>'EagerTrack', :key=>:album_id a = EagerAlbum.eager(:track).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM tracks WHERE (tracks.album_id IN (1))'] a.first.track.must_equal EagerTrack.load(:id => 3, :album_id=>1) DB.sqls.must_equal [] end it "should not break if the dataset does not have a row proc" do EagerAlbum.one_to_one :track, :class=>'EagerTrack', :key=>:album_id a = EagerAlbum.eager(:track).naked.all a.must_equal [{:id => 1, :band_id => 2}] DB.sqls.must_equal ['SELECT * FROM albums'] end it "should eagerly load a single one_to_one association with a composite key with some parts nil" do EagerAlbum.one_to_one :track, :class=>'EagerTrack', :key=>[:album_id, :id], :primary_key=>[:id, :band_id] EagerAlbum.dataset = EagerAlbum.dataset.with_fetch([{:id => 1, :band_id=>2}, {:id => 2, :band_id=>nil}]) EagerTrack.dataset = EagerTrack.dataset.with_fetch([{:id => 2, :album_id=>1}, {:id => nil, :album_id=>2}]) a = EagerAlbum.eager(:track).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2), EagerAlbum.load(:id => 2, :band_id => nil)] DB.sqls.must_equal ['SELECT * FROM albums', "SELECT * FROM tracks WHERE ((tracks.album_id, tracks.id) IN ((1, 2)))"] a.first.track.must_equal EagerTrack.load(:id => 2, :album_id=>1) a.last.track.must_be_nil DB.sqls.must_equal [] end it "should eagerly load a single one_to_one association without an order" do EagerAlbum.one_to_one :track, :class=>'EagerTrack', :key=>:album_id EagerTrack.dataset = EagerTrack.dataset.with_fetch([{:id => 3, :album_id=>1}, {:id => 4, :album_id=>1}]) a = EagerAlbum.eager(:track).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM tracks WHERE (tracks.album_id IN (1))'] a.first.track.must_equal EagerTrack.load(:id => 3, :album_id=>1) DB.sqls.must_equal [] end it "should eagerly load a single one_to_one association with an order" do EagerAlbum.one_to_one :track, :class=>'EagerTrack', :key=>:album_id, :order=>:a a = EagerAlbum.eager(:track).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM (SELECT * FROM tracks WHERE (1 = tracks.album_id) ORDER BY a LIMIT 1) AS t1'] a.first.track.must_equal EagerTrack.load(:id => 3, :album_id=>1) DB.sqls.must_equal [] end it "should eagerly load a single one_to_one association using the :distinct_on strategy" do EagerTrack.dataset = EagerTrack.dataset.with_extend{def supports_distinct_on?; true end} EagerAlbum.one_to_one :track, :class=>'EagerTrack', :key=>:album_id, :order=>:a, :eager_limit_strategy=>:distinct_on a = EagerAlbum.eager(:track).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT DISTINCT ON (tracks.album_id) * FROM tracks WHERE (tracks.album_id IN (1)) ORDER BY tracks.album_id, a'] a.first.track.must_equal EagerTrack.load(:id => 3, :album_id=>1) DB.sqls.must_equal [] end it "should eagerly load a single one_to_one association using the :window_function strategy" do EagerTrack.dataset = EagerTrack.dataset.with_extend{def supports_window_functions?; true end} EagerAlbum.one_to_one :track, :class=>'EagerTrack', :key=>:album_id, :order=>:name, :eager_limit_strategy=>:window_function a = EagerAlbum.eager(:track).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM (SELECT *, row_number() OVER (PARTITION BY tracks.album_id ORDER BY name) AS x_sequel_row_number_x FROM tracks WHERE (tracks.album_id IN (1))) AS t1 WHERE (x_sequel_row_number_x = 1)'] a.first.track.must_equal EagerTrack.load(:id => 3, :album_id=>1) DB.sqls.must_equal [] end it "should eagerly load a single one_to_one association using the :window_function strategy on MySQL" do odb = DB db = Class.new do def database_type; :mysql; end define_method(:method_missing) do |*args, &block| odb.send(*args, &block) end end.new begin EagerTrack.dataset = EagerTrack.dataset.with_extend do def supports_window_functions?; true end define_method(:db){db} end EagerAlbum.one_to_one :track, :class=>'EagerTrack', :key=>:album_id, :order=>:name, :eager_limit_strategy=>:window_function a = EagerAlbum.eager(:track).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM (SELECT *, row_number() OVER (PARTITION BY tracks.album_id ORDER BY name) AS x_sequel_row_number_x FROM tracks WHERE (tracks.album_id IN (1))) AS t1 WHERE (x_sequel_row_number_x = 1) ORDER BY x_sequel_row_number_x'] a.first.track.must_equal EagerTrack.load(:id => 3, :album_id=>1) DB.sqls.must_equal [] ensure db = DB end end it "should eagerly load a single one_to_one association with an offset and an eager callback" do EagerTrack.dataset = EagerTrack.dataset.with_extend{def supports_window_functions?; true end}.with_fetch([{:id => 3, :album_id=>1, :x_sequel_row_number_x=>2}]) EagerAlbum.one_to_one :track, :class=>'EagerTrack', :key=>:album_id, :order=>:name, :limit=>[nil, 1] a = EagerAlbum.eager(:track=>proc{|ds| ds}).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM (SELECT *, row_number() OVER (PARTITION BY tracks.album_id ORDER BY name) AS x_sequel_row_number_x FROM tracks WHERE (tracks.album_id IN (1))) AS t1 WHERE (x_sequel_row_number_x = 2)'] a.first.track.must_equal EagerTrack.load(:id => 3, :album_id=>1) DB.sqls.must_equal [] end it "should automatically use an eager limit stategy if the association has an offset" do EagerAlbum.one_to_one :track, :class=>'EagerTrack', :key=>:album_id, :limit=>[1,1] EagerTrack.dataset = EagerTrack.dataset.with_fetch([{:id => 4, :album_id=>1}]) a = EagerAlbum.eager(:track).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM (SELECT * FROM tracks WHERE (1 = tracks.album_id) LIMIT 1 OFFSET 1) AS t1'] a.first.track.must_equal EagerTrack.load(:id => 4, :album_id=>1) DB.sqls.must_equal [] end it "should handle offsets when using the :ruby eager limit stategy" do EagerAlbum.one_to_one :track, :class=>'EagerTrack', :key=>:album_id, :limit=>[1,1], :eager_limit_strategy=>:ruby EagerTrack.dataset = EagerTrack.dataset.with_fetch([{:id => 3, :album_id=>1}, {:id => 4, :album_id=>1}]) a = EagerAlbum.eager(:track).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM tracks WHERE (tracks.album_id IN (1))'] a.first.track.must_equal EagerTrack.load(:id => 4, :album_id=>1) DB.sqls.must_equal [] end it "should handle :ruby eager limit stategy without a limit or offset" do EagerAlbum.one_to_one :track, :class=>'EagerTrack', :key=>:album_id, :eager_limit_strategy=>:ruby EagerTrack.dataset = EagerTrack.dataset.with_fetch([{:id => 3, :album_id=>1}, {:id => 4, :album_id=>1}]) a = EagerAlbum.eager(:track).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM tracks WHERE (tracks.album_id IN (1))'] a.first.track.must_equal EagerTrack.load(:id => 3, :album_id=>1) DB.sqls.must_equal [] end it "should support a :subqueries_per_union option for the number of subqueries in a union" do EagerAlbum.one_to_one :track, :class=>'EagerTrack', :key=>:album_id, :limit=>[1,1], :subqueries_per_union=>1 EagerAlbum.dataset = EagerAlbum.dataset.with_fetch([{:id => 1, :band_id => 2}, {:id => 2, :band_id => 3}, {:id => 3, :band_id => 4}]) EagerTrack.dataset = EagerTrack.dataset.with_fetch([[{:id => 4, :album_id=>1}], [{:id=>5, :album_id=>2}], [{:id=>6, :album_id=>3}]]) a = EagerAlbum.eager(:track).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2), EagerAlbum.load(:id => 2, :band_id => 3), EagerAlbum.load(:id => 3, :band_id => 4)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM (SELECT * FROM tracks WHERE (1 = tracks.album_id) LIMIT 1 OFFSET 1) AS t1', 'SELECT * FROM (SELECT * FROM tracks WHERE (2 = tracks.album_id) LIMIT 1 OFFSET 1) AS t1', 'SELECT * FROM (SELECT * FROM tracks WHERE (3 = tracks.album_id) LIMIT 1 OFFSET 1) AS t1'] a.first.track.must_equal EagerTrack.load(:id => 4, :album_id=>1) DB.sqls.must_equal [] EagerAlbum.one_to_one :track, :class=>'EagerTrack', :key=>:album_id, :limit=>[1,1], :subqueries_per_union=>2 EagerTrack.dataset = EagerTrack.dataset.with_fetch([[{:id => 4, :album_id=>1}, {:id=>5, :album_id=>2}], [{:id=>6, :album_id=>3}]]) a = EagerAlbum.eager(:track).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2), EagerAlbum.load(:id => 2, :band_id => 3), EagerAlbum.load(:id => 3, :band_id => 4)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM (SELECT * FROM tracks WHERE (1 = tracks.album_id) LIMIT 1 OFFSET 1) AS t1 UNION ALL SELECT * FROM (SELECT * FROM tracks WHERE (2 = tracks.album_id) LIMIT 1 OFFSET 1) AS t1', 'SELECT * FROM (SELECT * FROM tracks WHERE (3 = tracks.album_id) LIMIT 1 OFFSET 1) AS t1'] a.first.track.must_equal EagerTrack.load(:id => 4, :album_id=>1) DB.sqls.must_equal [] EagerAlbum.one_to_one :track, :class=>'EagerTrack', :key=>:album_id, :limit=>[1,1], :subqueries_per_union=>3 EagerTrack.dataset = EagerTrack.dataset.with_fetch([[{:id => 4, :album_id=>1}, {:id=>5, :album_id=>2}, {:id=>6, :album_id=>3}]]) a = EagerAlbum.eager(:track).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2), EagerAlbum.load(:id => 2, :band_id => 3), EagerAlbum.load(:id => 3, :band_id => 4)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM (SELECT * FROM tracks WHERE (1 = tracks.album_id) LIMIT 1 OFFSET 1) AS t1 UNION ALL SELECT * FROM (SELECT * FROM tracks WHERE (2 = tracks.album_id) LIMIT 1 OFFSET 1) AS t1 UNION ALL SELECT * FROM (SELECT * FROM tracks WHERE (3 = tracks.album_id) LIMIT 1 OFFSET 1) AS t1'] a.first.track.must_equal EagerTrack.load(:id => 4, :album_id=>1) DB.sqls.must_equal [] end it "should handle eager loading with UNION when using an eager block" do EagerAlbum.one_to_one :track, :class=>'EagerTrack', :key=>:album_id, :limit=>[1,1], :subqueries_per_union=>2, :instance_specific=>false do |ds| ds.where(:x=>4) end EagerAlbum.dataset = EagerAlbum.dataset.with_fetch([{:id => 1, :band_id => 2}, {:id => 2, :band_id => 3}, {:id => 3, :band_id => 4}]) EagerTrack.dataset = EagerTrack.dataset.with_fetch([[{:id => 4, :album_id=>1}, {:id=>5, :album_id=>2}], [{:id=>6, :album_id=>3}]]) a = EagerAlbum.eager(:track).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2), EagerAlbum.load(:id => 2, :band_id => 3), EagerAlbum.load(:id => 3, :band_id => 4)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM (SELECT * FROM tracks WHERE ((x = 4) AND (1 = tracks.album_id)) LIMIT 1 OFFSET 1) AS t1 UNION ALL SELECT * FROM (SELECT * FROM tracks WHERE ((x = 4) AND (2 = tracks.album_id)) LIMIT 1 OFFSET 1) AS t1', 'SELECT * FROM (SELECT * FROM tracks WHERE ((x = 4) AND (3 = tracks.album_id)) LIMIT 1 OFFSET 1) AS t1'] a.first.track.must_equal EagerTrack.load(:id => 4, :album_id=>1) DB.sqls.must_equal [] end it "should eagerly load a single one_to_many association" do a = EagerAlbum.eager(:tracks).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM tracks WHERE (tracks.album_id IN (1))'] a.first.tracks.must_equal [EagerTrack.load(:id => 3, :album_id=>1)] DB.sqls.must_equal [] end it "should eagerly load a single one_to_many association with an :eager_loader_key" do EagerAlbum.one_to_many :tracks, :eager_loader_key=>:id, :class=>:EagerTrack, :key=>:album_id a = EagerAlbum.eager(:tracks).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM tracks WHERE (tracks.album_id IN (1))'] a.first.tracks.must_equal [EagerTrack.load(:id => 3, :album_id=>1)] DB.sqls.must_equal [] end it "should eagerly load a single one_through_one association" do a = EagerAlbum.eager(:genre).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', "SELECT genres.*, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (1))"] a.first.genre.must_equal EagerGenre.load(:id=>4) DB.sqls.must_equal [] end it "should use first matching entry when eager loading one_through_one association" do EagerGenre.dataset = EagerGenre.dataset.with_fetch([{:id => 3, :x_foreign_key_x=>1}, {:id => 4, :x_foreign_key_x=>1}]) a = EagerAlbum.eager(:genre).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', "SELECT genres.*, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (1))"] a.first.genre.must_equal EagerGenre.load(:id=>3) DB.sqls.must_equal [] end it "should eagerly load a single one_through_one association" do EagerAlbum.one_through_one :genre, :clone=>:genre, :order=>:a a = EagerAlbum.eager(:genre).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', "SELECT * FROM (SELECT genres.*, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (1 = ag.album_id) ORDER BY a LIMIT 1) AS t1"] a.first.genre.must_equal EagerGenre.load(:id=>4) DB.sqls.must_equal [] end it "should eagerly load a single one_through_one association using the :distinct_on strategy" do EagerGenre.dataset = EagerGenre.dataset.with_extend{def supports_distinct_on?; true end} EagerAlbum.one_through_one :genre, :clone=>:genre, :order=>:a, :eager_limit_strategy=>:distinct_on a = EagerAlbum.eager(:genre).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', "SELECT DISTINCT ON (ag.album_id) genres.*, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (1)) ORDER BY ag.album_id, a"] a.first.genre.must_equal EagerGenre.load(:id=>4) DB.sqls.must_equal [] end it "should eagerly load a single one_through_one association using the :window_function strategy" do EagerGenre.dataset = EagerGenre.dataset.with_extend{def supports_window_functions?; true end} EagerAlbum.one_through_one :genre, :clone=>:genre, :eager_limit_strategy=>:window_function a = EagerAlbum.eager(:genre).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', "SELECT * FROM (SELECT genres.*, ag.album_id AS x_foreign_key_x, row_number() OVER (PARTITION BY ag.album_id) AS x_sequel_row_number_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (1))) AS t1 WHERE (x_sequel_row_number_x = 1)"] a.first.genre.must_equal EagerGenre.load(:id=>4) DB.sqls.must_equal [] end it "should automatically use an eager limit stategy if the association has an offset" do EagerGenre.dataset = EagerGenre.dataset.with_fetch([{:id => 3, :x_foreign_key_x=>1}, {:id => 4, :x_foreign_key_x=>1}]) a = EagerAlbum.eager(:genre).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', "SELECT genres.*, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (1))"] a.first.genre.must_equal EagerGenre.load(:id=>3) DB.sqls.must_equal [] end it "should eagerly load a single many_to_many association" do a = EagerAlbum.eager(:genres).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', "SELECT genres.*, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (1))"] a.first.genres.must_equal [EagerGenre.load(:id=>4)] DB.sqls.must_equal [] end it "should support using a custom :key option when eager loading many_to_one associations" do EagerAlbum.many_to_one :sband, :clone=>:band, :key=>:band_id3 EagerBand.dataset = EagerBand.dataset.with_fetch(:id=>6) a = EagerAlbum.eager(:sband).all DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM bands WHERE (bands.id IN (6))'] a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] a.first.sband.must_equal EagerBand.load(:id=>6) DB.sqls.must_equal [] end it "should support using a custom :eager_loader_key option when eager loading many_to_one associations" do EagerAlbum.many_to_one :sband, :clone=>:band, :eager_loader_key=>:band_id3, :key=>:band_id3 EagerBand.dataset = EagerBand.dataset.with_fetch(:id=>6) a = EagerAlbum.eager(:sband).all DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM bands WHERE (bands.id IN (6))'] a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] a.first.sband.must_equal EagerBand.load(:id=>6) DB.sqls.must_equal [] end it "should support using a custom :primary_key option when eager loading one_to_many associations" do EagerBand.one_to_many :salbums, :clone=>:albums, :primary_key=>:id3, :eager=>nil, :reciprocal=>nil EagerBand.dataset = EagerBand.dataset.with_fetch(:id=>6) a = EagerBand.eager(:salbums).all DB.sqls.must_equal ['SELECT * FROM bands', 'SELECT * FROM albums WHERE (albums.band_id IN (2))'] a.must_equal [EagerBand.load(:id => 6)] a.first.salbums.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal [] end it "should support using a custom :left_primary_key option when eager loading many_to_many associations" do EagerAlbum.many_to_many :sgenres, :clone=>:genres, :left_primary_key=>:band_id3 EagerGenre.dataset = EagerGenre.dataset.with_fetch(:id=>4, :x_foreign_key_x=>6) a = EagerAlbum.eager(:sgenres).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', "SELECT genres.*, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (6))"] a.first.sgenres.must_equal [EagerGenre.load(:id=>4)] DB.sqls.must_equal [] end it "should support using a custom :left_primary_key option when eager loading one_through_one associations" do EagerAlbum.one_through_one :sgenre, :clone=>:genre, :left_primary_key=>:band_id3 EagerGenre.dataset = EagerGenre.dataset.with_fetch(:id=>4, :x_foreign_key_x=>6) a = EagerAlbum.eager(:sgenre).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', "SELECT genres.*, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (6))"] a.first.sgenre.must_equal EagerGenre.load(:id=>4) DB.sqls.must_equal [] end it "should handle a :predicate_key option to change the SQL used in the lookup, for many_to_one associations" do EagerAlbum.many_to_one :sband, :clone=>:band, :predicate_key=>(Sequel[:bands][:id] / 3), :primary_key_method=>:id3 EagerBand.dataset = EagerBand.dataset.with_fetch(:id=>6) a = EagerAlbum.eager(:sband).all DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM bands WHERE ((bands.id / 3) IN (2))'] a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] a.first.sband.must_equal EagerBand.load(:id=>6) DB.sqls.must_equal [] end it "should handle a :predicate_key option to change the SQL used in the lookup, for one_to_many associations" do EagerBand.one_to_many :salbums, :clone=>:albums, :predicate_key=>(Sequel[:albums][:band_id] * 3), :key_method=>:band_id3, :eager=>nil, :reciprocal=>nil EagerBand.dataset = EagerBand.dataset.with_fetch(:id=>6) a = EagerBand.eager(:salbums).all DB.sqls.must_equal ['SELECT * FROM bands', 'SELECT * FROM albums WHERE ((albums.band_id * 3) IN (6))'] a.must_equal [EagerBand.load(:id => 6)] a.first.salbums.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal [] end it "should handle a :predicate_key option to change the SQL used in the lookup, for many_to_many associations" do EagerAlbum.many_to_many :sgenres, :clone=>:genres, :predicate_key=>(Sequel[:ag][:album_id] * 1) a = EagerAlbum.eager(:sgenres).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', "SELECT genres.*, (ag.album_id * 1) AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE ((ag.album_id * 1) IN (1))"] a.first.sgenres.must_equal [EagerGenre.load(:id=>4)] DB.sqls.must_equal [] end it "should handle a :predicate_key option to change the SQL used in the lookup, for one_through_one associations" do EagerAlbum.one_through_one :sgenre, :clone=>:genre, :predicate_key=>(Sequel[:ag][:album_id] * 1) a = EagerAlbum.eager(:sgenre).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', "SELECT genres.*, (ag.album_id * 1) AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE ((ag.album_id * 1) IN (1))"] a.first.sgenre.must_equal EagerGenre.load(:id=>4) DB.sqls.must_equal [] end it "should raise an error for an unhandled :eager_loader_key option" do EagerAlbum.many_to_many :sgenres, :clone=>:genres, :eager_loader_key=>1 ds = EagerAlbum.eager(:sgenres) proc{ds.all}.must_raise(Sequel::Error) end it "should not add entry to key_hash for :eager_loader_key=>nil option" do eo = nil EagerAlbum.many_to_many :sgenres, :clone=>:genres, :eager_loader_key=>nil, :eager_loader=>proc{|o| eo = o} ds = EagerAlbum.eager(:sgenres) ds.all eo[:key_hash].must_equal({}) eo[:id_map].must_be_nil end it "should have eager_load_results work with :eager_loader_key=>nil and :initialize_rows=>false options" do EagerAlbum.many_to_many :sgenres, :clone=>:genres, :eager_loader_key=>nil, :eager_loader=>(proc do |eo| rows = eo[:rows] EagerAlbum.eager_load_results(EagerAlbum.association_reflection(:sgenres), eo.merge(:initialize_rows=>false)) do |assoc_record| hash_key = assoc_record.values.delete(:x_foreign_key_x) objects = rows.select{|x| x.id == hash_key} objects.each do |object| (object.associations[:sgenres] ||= []).push(assoc_record) end end end) ds = EagerAlbum.eager(:sgenres) a = ds.all a.must_equal [EagerAlbum.load(:id=>1, :band_id=>2)] a.first.sgenres.must_equal [EagerGenre.load(:id=>4)] end it "should correctly handle a :select=>[] option to many_to_many" do EagerAlbum.many_to_many :sgenres, :clone=>:genres, :select=>[] EagerAlbum.eager(:sgenres).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT *, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (1))"] end it "should correctly handle a :select=>[] option to one_through_one" do EagerAlbum.one_through_one :sgenre, :clone=>:genre, :select=>[] EagerAlbum.eager(:sgenre).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT *, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (1))"] end with_symbol_splitting "should correctly handle an aliased join table symbol in many_to_many" do EagerAlbum.many_to_many :sgenres, :clone=>:genres, :join_table=>:ag___ga EagerAlbum.eager(:sgenres).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT genres.*, ga.album_id AS x_foreign_key_x FROM genres INNER JOIN ag AS ga ON (ga.genre_id = genres.id) WHERE (ga.album_id IN (1))"] end with_symbol_splitting "should correctly handle an aliased join table symbol in one_through_one" do EagerAlbum.one_through_one :sgenre, :clone=>:genre, :join_table=>:ag___ga EagerAlbum.eager(:sgenre).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT genres.*, ga.album_id AS x_foreign_key_x FROM genres INNER JOIN ag AS ga ON (ga.genre_id = genres.id) WHERE (ga.album_id IN (1))"] end it "should correctly handle an aliased join table in many_to_many" do EagerAlbum.many_to_many :sgenres, :clone=>:genres, :join_table=>Sequel[:ag].as(:ga) EagerAlbum.eager(:sgenres).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT genres.*, ga.album_id AS x_foreign_key_x FROM genres INNER JOIN ag AS ga ON (ga.genre_id = genres.id) WHERE (ga.album_id IN (1))"] end it "should correctly handle an aliased join table in one_through_one" do EagerAlbum.one_through_one :sgenre, :clone=>:genre, :join_table=>Sequel[:ag].as(:ga) EagerAlbum.eager(:sgenre).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT genres.*, ga.album_id AS x_foreign_key_x FROM genres INNER JOIN ag AS ga ON (ga.genre_id = genres.id) WHERE (ga.album_id IN (1))"] end it "should eagerly load multiple associations in a single call" do a = EagerAlbum.eager(:genres, :tracks, :band).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] sqls = DB.sqls sqls.shift.must_equal 'SELECT * FROM albums' sqls.sort.must_equal ['SELECT * FROM bands WHERE (bands.id IN (2))', 'SELECT * FROM tracks WHERE (tracks.album_id IN (1))', 'SELECT genres.*, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (1))'] a = a.first a.band.must_equal EagerBand.load(:id=>2) a.tracks.must_equal [EagerTrack.load(:id => 3, :album_id=>1)] a.genres.must_equal [EagerGenre.load(:id => 4)] DB.sqls.must_equal [] end it "should eagerly load multiple associations in separate calls" do a = EagerAlbum.eager(:genres).eager(:tracks).eager(:band).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] sqls = DB.sqls sqls.shift.must_equal 'SELECT * FROM albums' sqls.sort.must_equal ['SELECT * FROM bands WHERE (bands.id IN (2))', 'SELECT * FROM tracks WHERE (tracks.album_id IN (1))', 'SELECT genres.*, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (1))'] a = a.first a.band.must_equal EagerBand.load(:id=>2) a.tracks.must_equal [EagerTrack.load(:id => 3, :album_id=>1)] a.genres.must_equal [EagerGenre.load(:id => 4)] DB.sqls.must_equal [] end it "should allow cascading of eager loading for associations of associated models" do a = EagerTrack.eager(:album=>{:band=>:members}).all a.must_equal [EagerTrack.load(:id => 3, :album_id => 1)] DB.sqls.must_equal ['SELECT * FROM tracks', 'SELECT * FROM albums WHERE (albums.id IN (1))', 'SELECT * FROM bands WHERE (bands.id IN (2))', "SELECT members.*, bm.band_id AS x_foreign_key_x FROM members INNER JOIN bm ON (bm.member_id = members.id) WHERE (bm.band_id IN (2))"] a = a.first a.album.must_equal EagerAlbum.load(:id => 1, :band_id => 2) a.album.band.must_equal EagerBand.load(:id => 2) a.album.band.members.must_equal [EagerBandMember.load(:id => 5)] DB.sqls.must_equal [] end it "should cascade eager loading when using a UNION strategy for eager loading limited associations" do EagerTrack.many_to_one :album2, :clone=>:album EagerAlbum.one_to_one :track, :class=>'EagerTrack', :key=>:album_id, :order=>:a a = EagerAlbum.eager(:track=>:album2).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM (SELECT * FROM tracks WHERE (1 = tracks.album_id) ORDER BY a LIMIT 1) AS t1', 'SELECT * FROM albums WHERE (albums.id IN (1))'] a.first.track.must_equal EagerTrack.load(:id => 3, :album_id=>1) a.first.track.album2.must_equal EagerAlbum.load(:id => 1, :band_id => 2) DB.sqls.must_equal [] a = EagerAlbum.eager(:track=>[:album2]).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM (SELECT * FROM tracks WHERE (1 = tracks.album_id) ORDER BY a LIMIT 1) AS t1', 'SELECT * FROM albums WHERE (albums.id IN (1))'] a.first.track.must_equal EagerTrack.load(:id => 3, :album_id=>1) a.first.track.album2.must_equal EagerAlbum.load(:id => 1, :band_id => 2) DB.sqls.must_equal [] a = EagerAlbum.eager(:track=>{:album2=>:track}).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM (SELECT * FROM tracks WHERE (1 = tracks.album_id) ORDER BY a LIMIT 1) AS t1', 'SELECT * FROM albums WHERE (albums.id IN (1))', 'SELECT * FROM (SELECT * FROM tracks WHERE (1 = tracks.album_id) ORDER BY a LIMIT 1) AS t1'] a.first.track.must_equal EagerTrack.load(:id => 3, :album_id=>1) a.first.track.album2.must_equal EagerAlbum.load(:id => 1, :band_id => 2) a.first.track.album2.track.must_equal EagerTrack.load(:id => 3, :album_id=>1) DB.sqls.must_equal [] end it "should call post_load when eager loading limited associations" do EagerTrack.many_to_one :album2, :clone=>:album a = [] m = Module.new do define_method(:post_load) do |objs| a << 1 super(objs) end end EagerAlbum.one_to_one :track, :class=>'EagerTrack', :key=>:album_id, :order=>:a, :extend=>m EagerAlbum.eager(:track).all a.must_equal [1] end it "should cascade eagerly loading when the :eager association option is used" do a = EagerBand.eager(:albums).all a.must_equal [EagerBand.load(:id=>2)] DB.sqls.must_equal ['SELECT * FROM bands', 'SELECT * FROM albums WHERE (albums.band_id IN (2))', 'SELECT * FROM tracks WHERE (tracks.album_id IN (1))'] a.first.albums.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] a.first.albums.first.tracks.must_equal [EagerTrack.load(:id => 3, :album_id => 1)] DB.sqls.must_equal [] end it "should respect :eager when lazily loading an association" do a = EagerBand.all a.must_equal [EagerBand.load(:id=>2)] DB.sqls.must_equal ['SELECT * FROM bands'] a = a.first.albums DB.sqls.must_equal ['SELECT * FROM albums WHERE (albums.band_id = 2)', 'SELECT * FROM tracks WHERE (tracks.album_id IN (1))'] a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] a.first.tracks.must_equal [EagerTrack.load(:id => 3, :album_id => 1)] DB.sqls.must_equal [] end it "should respect :eager with cascaded hash when lazily loading an association" do EagerBand.one_to_many :albums, :eager=>{:tracks=>:album}, :clone=>:albums a = EagerBand.all a.must_equal [EagerBand.load(:id=>2)] DB.sqls.must_equal ['SELECT * FROM bands'] a = a.first.albums DB.sqls.must_equal ['SELECT * FROM albums WHERE (albums.band_id = 2)', 'SELECT * FROM tracks WHERE (tracks.album_id IN (1))', 'SELECT * FROM albums WHERE (albums.id IN (1))'] a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] a.first.tracks.must_equal [EagerTrack.load(:id => 3, :album_id => 1)] a.first.tracks.first.album.must_equal a.first DB.sqls.must_equal [] end it "should cascade eagerly loading when the :eager_graph association option is used" do EagerAlbum.dataset = EagerAlbum.dataset.with_fetch(:id=>1, :band_id=>2, :tracks_id=>3, :album_id=>1) EagerAlbum.dataset.columns(:id, :band_id) EagerTrack.dataset.columns(:id, :album_id) a = EagerBand.eager(:graph_albums).all a.must_equal [EagerBand.load(:id=>2)] DB.sqls.must_equal ['SELECT * FROM bands', 'SELECT albums.id, albums.band_id, tracks.id AS tracks_id, tracks.album_id FROM albums LEFT OUTER JOIN tracks ON (tracks.album_id = albums.id) WHERE (albums.band_id IN (2))'] a.first.graph_albums.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] a.first.graph_albums.first.tracks.must_equal [EagerTrack.load(:id => 3, :album_id => 1)] DB.sqls.must_equal [] end it "should raise an Error when eager loading a many_to_many association with the :eager_graph option" do proc{EagerBand.eager(:graph_members).all}.must_raise(Sequel::Error) end it "should respect :eager_graph when lazily loading an association" do a = EagerBand.all a.must_equal [EagerBand.load(:id=>2)] DB.sqls.must_equal ['SELECT * FROM bands'] a = a.first EagerAlbum.dataset = EagerAlbum.dataset.with_fetch(:id=>1, :band_id=>2, :tracks_id=>3, :album_id=>1) EagerAlbum.dataset.columns(:id, :band_id) EagerTrack.dataset.columns(:id, :album_id) a.graph_albums DB.sqls.must_equal ['SELECT albums.id, albums.band_id, tracks.id AS tracks_id, tracks.album_id FROM albums LEFT OUTER JOIN tracks ON (tracks.album_id = albums.id) WHERE (albums.band_id = 2)'] a.graph_albums.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] a.graph_albums.first.tracks.must_equal [EagerTrack.load(:id => 3, :album_id => 1)] DB.sqls.must_equal [] end it "should respect :eager_graph when lazily loading a many_to_many association" do EagerBandMember.dataset = EagerBandMember.dataset.with_fetch([{:id=>5, :bands_id=>2, :p_k=>6}, {:id=>5, :bands_id=>3, :p_k=>6}]).with_extend{def columns; [:id] end} a = EagerBand.load(:id=>2) EagerBand.dataset.columns(:id, :p_k) a.graph_members.must_equal [EagerBandMember.load(:id=>5)] DB.sqls.must_equal ['SELECT members.id, bands.id AS bands_id, bands.p_k FROM (SELECT members.* FROM members INNER JOIN bm ON (bm.member_id = members.id) WHERE (bm.band_id = 2)) AS members LEFT OUTER JOIN bm AS bm_0 ON (bm_0.member_id = members.id) LEFT OUTER JOIN bands ON (bands.id = bm_0.band_id) ORDER BY bands.id'] a.graph_members.first.bands.must_equal [EagerBand.load(:id=>2, :p_k=>6), EagerBand.load(:id=>3, :p_k=>6)] DB.sqls.must_equal [] end it "should respect :conditions when eagerly loading" do EagerBandMember.many_to_many :good_bands, :clone=>:bands, :conditions=>{:a=>32} a = EagerBandMember.eager(:good_bands).all a.must_equal [EagerBandMember.load(:id => 5)] DB.sqls.must_equal ['SELECT * FROM members', 'SELECT bands.*, bm.member_id AS x_foreign_key_x FROM bands INNER JOIN bm ON (bm.band_id = bands.id) WHERE ((a = 32) AND (bm.member_id IN (5))) ORDER BY id'] a.first.good_bands.must_equal [EagerBand.load(:id => 2)] DB.sqls.must_equal [] EagerBandMember.many_to_many :good_bands, :clone=>:bands, :conditions=>Sequel.lit("x = 1") a = EagerBandMember.eager(:good_bands).all DB.sqls.must_equal ['SELECT * FROM members', 'SELECT bands.*, bm.member_id AS x_foreign_key_x FROM bands INNER JOIN bm ON (bm.band_id = bands.id) WHERE ((x = 1) AND (bm.member_id IN (5))) ORDER BY id'] end it "should respect :order when eagerly loading" do a = EagerBandMember.eager(:bands).all a.must_equal [EagerBandMember.load(:id => 5)] DB.sqls.must_equal ['SELECT * FROM members', 'SELECT bands.*, bm.member_id AS x_foreign_key_x FROM bands INNER JOIN bm ON (bm.band_id = bands.id) WHERE (bm.member_id IN (5)) ORDER BY id'] a.first.bands.must_equal [EagerBand.load(:id => 2)] DB.sqls.must_equal [] end it "should populate the reciprocal many_to_one association when eagerly loading the one_to_many association" do a = EagerAlbum.eager(:tracks).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM tracks WHERE (tracks.album_id IN (1))'] a.first.tracks.must_equal [EagerTrack.load(:id => 3, :album_id=>1)] a.first.tracks.first.album.must_equal a.first DB.sqls.must_equal [] end it "should cache the negative lookup when eagerly loading a many_to_one association" do a = EagerAlbum.eager(:band).filter(:id=>101).all a.must_equal [EagerAlbum.load(:id => 101, :band_id => 101)] DB.sqls.must_equal ['SELECT * FROM albums WHERE (id = 101)', 'SELECT * FROM bands WHERE (bands.id IN (101))'] a.first.associations.fetch(:band, 2).must_be_nil a.first.band.must_be_nil DB.sqls.must_equal [] end it "should cache the negative lookup when eagerly loading a *_to_many associations" do a = EagerBand.eager(:albums).where{id > 100}.all a.must_equal [EagerBand.load(:id => 101), EagerBand.load(:id =>102)] DB.sqls.must_equal ['SELECT * FROM bands WHERE (id > 100)', 'SELECT * FROM albums WHERE (albums.band_id IN (101, 102))', "SELECT * FROM tracks WHERE (tracks.album_id IN (101))"] a.map{|b| b.associations[:albums]}.must_equal [[EagerAlbum.load({:band_id=>101, :id=>101})], []] DB.sqls.must_equal [] end it "should use the association's block when eager loading by default" do EagerAlbum.eager(:good_tracks).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT * FROM tracks WHERE ((name = 'Good') AND (tracks.album_id IN (1)))"] end it "should use the eager_block option when eager loading if given" do EagerBand.eager(:good_albums).all DB.sqls.must_equal ['SELECT * FROM bands', "SELECT * FROM albums WHERE ((name = 'good') AND (albums.band_id IN (2)))"] EagerBand.eager(:good_albums=>:good_tracks).all DB.sqls.must_equal ['SELECT * FROM bands', "SELECT * FROM albums WHERE ((name = 'good') AND (albums.band_id IN (2)))", "SELECT * FROM tracks WHERE ((name = 'Good') AND (tracks.album_id IN (1)))"] end it "should raise an error when attempting to eagerly load an association with the :allow_eager option set to false" do proc{EagerBand.eager(:self_titled_albums).all}.must_raise(Sequel::Error) proc{EagerBand.eager(:albums_by_name).all}.must_raise(Sequel::Error) end it "should respect the association's :select option" do EagerAlbum.eager(:band_name).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT id, name FROM bands WHERE (bands.id IN (2))"] EagerAlbum.eager(:track_names).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT id, name FROM tracks WHERE (tracks.album_id IN (1))"] EagerAlbum.eager(:genre_names).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT id, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (1))"] end it "should respect many_to_one association's :qualify option" do EagerAlbum.many_to_one :special_band, :class=>:EagerBand, :qualify=>false, :key=>:band_id EagerBand.dataset = EagerBand.dataset.with_fetch(:id=>2) as = EagerAlbum.eager(:special_band).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT * FROM bands WHERE (id IN (2))"] as.map{|a| a.special_band}.must_equal [EagerBand.load(:id=>2)] DB.sqls.must_equal [] end it "should respect the association's :primary_key option" do EagerAlbum.many_to_one :special_band, :class=>:EagerBand, :primary_key=>:p_k, :key=>:band_id EagerBand.dataset = EagerBand.dataset.with_fetch(:p_k=>2, :id=>1) as = EagerAlbum.eager(:special_band).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT * FROM bands WHERE (bands.p_k IN (2))"] as.length.must_equal 1 as.first.special_band.must_equal EagerBand.load(:p_k=>2, :id=>1) EagerAlbum.one_to_many :special_tracks, :class=>:EagerTrack, :primary_key=>:band_id, :key=>:album_id, :reciprocal=>nil EagerTrack.dataset = EagerTrack.dataset.with_fetch(:album_id=>2, :id=>1) as = EagerAlbum.eager(:special_tracks).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT * FROM tracks WHERE (tracks.album_id IN (2))"] as.length.must_equal 1 as.first.special_tracks.must_equal [EagerTrack.load(:album_id=>2, :id=>1)] end it "should respect the many_to_one association's composite keys" do EagerAlbum.many_to_one :special_band, :class=>:EagerBand, :primary_key=>[:id, :p_k], :key=>[:band_id, :id] EagerBand.dataset = EagerBand.dataset.with_fetch(:p_k=>1, :id=>2) as = EagerAlbum.eager(:special_band).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT * FROM bands WHERE ((bands.id, bands.p_k) IN ((2, 1)))"] as.length.must_equal 1 as.first.special_band.must_equal EagerBand.load(:p_k=>1, :id=>2) end it "should respect the one_to_many association's composite keys" do EagerAlbum.one_to_many :special_tracks, :class=>:EagerTrack, :primary_key=>[:band_id, :id], :key=>[:id, :album_id] EagerTrack.dataset = EagerTrack.dataset.with_fetch(:album_id=>1, :id=>2) as = EagerAlbum.eager(:special_tracks).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT * FROM tracks WHERE ((tracks.id, tracks.album_id) IN ((2, 1)))"] as.length.must_equal 1 as.first.special_tracks.must_equal [EagerTrack.load(:album_id=>1, :id=>2)] end it "should respect many_to_many association's composite keys" do EagerAlbum.many_to_many :special_genres, :class=>:EagerGenre, :left_primary_key=>[:band_id, :id], :left_key=>[:l1, :l2], :right_primary_key=>[:xxx, :id], :right_key=>[:r1, :r2], :join_table=>:ag EagerGenre.dataset = EagerGenre.dataset.with_fetch([{:x_foreign_key_0_x=>2, :x_foreign_key_1_x=>1, :id=>5}, {:x_foreign_key_0_x=>2, :x_foreign_key_1_x=>1, :id=>6}]) as = EagerAlbum.eager(:special_genres).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT genres.*, ag.l1 AS x_foreign_key_0_x, ag.l2 AS x_foreign_key_1_x FROM genres INNER JOIN ag ON ((ag.r1 = genres.xxx) AND (ag.r2 = genres.id)) WHERE ((ag.l1, ag.l2) IN ((2, 1)))"] as.length.must_equal 1 as.first.special_genres.must_equal [EagerGenre.load(:id=>5), EagerGenre.load(:id=>6)] end it "should respect one_through_one association's composite keys" do EagerAlbum.one_through_one :special_genre, :class=>:EagerGenre, :left_primary_key=>[:band_id, :id], :left_key=>[:l1, :l2], :right_primary_key=>[:xxx, :id], :right_key=>[:r1, :r2], :join_table=>:ag EagerGenre.dataset = EagerGenre.dataset.with_fetch([{:x_foreign_key_0_x=>2, :x_foreign_key_1_x=>1, :id=>5}]) as = EagerAlbum.eager(:special_genre).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT genres.*, ag.l1 AS x_foreign_key_0_x, ag.l2 AS x_foreign_key_1_x FROM genres INNER JOIN ag ON ((ag.r1 = genres.xxx) AND (ag.r2 = genres.id)) WHERE ((ag.l1, ag.l2) IN ((2, 1)))"] as.length.must_equal 1 as.first.special_genre.must_equal EagerGenre.load(:id=>5) end it "should respect many_to_many association's :left_primary_key and :right_primary_key options" do EagerAlbum.many_to_many :special_genres, :class=>:EagerGenre, :left_primary_key=>:band_id, :left_key=>:album_id, :right_primary_key=>:xxx, :right_key=>:genre_id, :join_table=>:ag EagerGenre.dataset = EagerGenre.dataset.with_fetch([{:x_foreign_key_x=>2, :id=>5}, {:x_foreign_key_x=>2, :id=>6}]) as = EagerAlbum.eager(:special_genres).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT genres.*, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.xxx) WHERE (ag.album_id IN (2))"] as.length.must_equal 1 as.first.special_genres.must_equal [EagerGenre.load(:id=>5), EagerGenre.load(:id=>6)] end it "should respect one_through_one association's :left_primary_key and :right_primary_key options" do EagerAlbum.one_through_one :special_genre, :class=>:EagerGenre, :left_primary_key=>:band_id, :left_key=>:album_id, :right_primary_key=>:xxx, :right_key=>:genre_id, :join_table=>:ag EagerGenre.dataset = EagerGenre.dataset.with_fetch([{:x_foreign_key_x=>2, :id=>5}]) as = EagerAlbum.eager(:special_genre).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT genres.*, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.xxx) WHERE (ag.album_id IN (2))"] as.length.must_equal 1 as.first.special_genre.must_equal EagerGenre.load(:id=>5) end it "should respect the :limit option on a one_to_many association using the :ruby strategy" do EagerAlbum.one_to_many :first_two_tracks, :class=>:EagerTrack, :key=>:album_id, :limit=>2, :eager_limit_strategy=>:ruby EagerTrack.dataset = EagerTrack.dataset.with_fetch([{:album_id=>1, :id=>2}, {:album_id=>1, :id=>3}, {:album_id=>1, :id=>4}]) as = EagerAlbum.eager(:first_two_tracks).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT * FROM tracks WHERE (tracks.album_id IN (1))"] as.length.must_equal 1 as.first.first_two_tracks.must_equal [EagerTrack.load(:album_id=>1, :id=>2), EagerTrack.load(:album_id=>1, :id=>3)] DB.reset EagerAlbum.one_to_many :first_two_tracks, :class=>:EagerTrack, :key=>:album_id, :limit=>[1,1], :eager_limit_strategy=>:ruby as = EagerAlbum.eager(:first_two_tracks).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT * FROM tracks WHERE (tracks.album_id IN (1))"] as.length.must_equal 1 as.first.first_two_tracks.must_equal [EagerTrack.load(:album_id=>1, :id=>3)] DB.reset EagerAlbum.one_to_many :first_two_tracks, :class=>:EagerTrack, :key=>:album_id, :limit=>[nil,1], :eager_limit_strategy=>:ruby as = EagerAlbum.eager(:first_two_tracks).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT * FROM tracks WHERE (tracks.album_id IN (1))"] as.length.must_equal 1 as.first.first_two_tracks.must_equal [EagerTrack.load(:album_id=>1, :id=>3), EagerTrack.load(:album_id=>1, :id=>4)] end it "should support :ruby strategy without limit" do EagerTrack.dataset = EagerTrack.dataset.with_fetch([{:album_id=>1, :id=>2}, {:album_id=>1, :id=>3}, {:album_id=>1, :id=>4}]) EagerAlbum.one_to_many :first_two_tracks, :class=>:EagerTrack, :key=>:album_id, :eager_limit_strategy=>:ruby as = EagerAlbum.eager(:first_two_tracks).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT * FROM tracks WHERE (tracks.album_id IN (1))"] as.length.must_equal 1 as.first.first_two_tracks.must_equal [EagerTrack.load(:album_id=>1, :id=>2), EagerTrack.load(:album_id=>1, :id=>3), EagerTrack.load(:album_id=>1, :id=>4)] end it "should support :ruby strategy without limit and with block" do EagerTrack.dataset = EagerTrack.dataset.with_fetch([{:album_id=>1, :id=>2}, {:album_id=>1, :id=>3}, {:album_id=>1, :id=>4}]) EagerAlbum.one_to_many :first_two_tracks, :class=>:EagerTrack, :key=>:album_id, :eager_limit_strategy=>:ruby do |ds| ds.clone(:eager_limit_strategy=>:ruby).where(:active) end as = EagerAlbum.eager(:first_two_tracks=>proc{|ds| ds.where(:a)}).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT * FROM tracks WHERE (active AND (tracks.album_id IN (1)) AND a)"] as.length.must_equal 1 as.first.first_two_tracks.must_equal [EagerTrack.load(:album_id=>1, :id=>2), EagerTrack.load(:album_id=>1, :id=>3), EagerTrack.load(:album_id=>1, :id=>4)] end it "should respect the :limit option on a one_to_many association" do EagerAlbum.one_to_many :tracks, :class=>'EagerTrack', :key=>:album_id, :order=>:name, :limit=>2 a = EagerAlbum.eager(:tracks).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM (SELECT * FROM tracks WHERE (1 = tracks.album_id) ORDER BY name LIMIT 2) AS t1'] a.first.tracks.must_equal [EagerTrack.load(:id => 3, :album_id=>1)] DB.sqls.must_equal [] EagerAlbum.one_to_many :tracks, :class=>'EagerTrack', :key=>:album_id, :order=>:name, :limit=>[2, 1] a = EagerAlbum.eager(:tracks).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM (SELECT * FROM tracks WHERE (1 = tracks.album_id) ORDER BY name LIMIT 2 OFFSET 1) AS t1'] a.first.tracks.must_equal [EagerTrack.load(:id => 3, :album_id=>1)] DB.sqls.must_equal [] EagerAlbum.one_to_many :tracks, :class=>'EagerTrack', :key=>:album_id, :order=>:name, :limit=>[nil, 1] a = EagerAlbum.eager(:tracks).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM (SELECT * FROM tracks WHERE (1 = tracks.album_id) ORDER BY name OFFSET 1) AS t1'] a.first.tracks.must_equal [EagerTrack.load(:id => 3, :album_id=>1)] DB.sqls.must_equal [] end it "should respect the :limit option on a one_to_many association with an association block" do EagerTrack.dataset = EagerTrack.dataset.with_fetch([{:id=>2, :album_id=>1}, {:id=>3, :album_id=>1}, {:id=>4, :album_id=>1}]) EagerAlbum.one_to_many :tracks, :class=>'EagerTrack', :key=>:album_id, :order=>:name, :limit=>2 do |ds| ds.where(:a=>1) end a = EagerAlbum.eager(:tracks).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM tracks WHERE ((a = 1) AND (tracks.album_id IN (1))) ORDER BY name'] a.first.tracks.must_equal [EagerTrack.load(:id => 2, :album_id=>1), EagerTrack.load(:id => 3, :album_id=>1)] DB.sqls.must_equal [] end it "should respect the :limit option on a one_to_many association using the :window_function strategy" do EagerTrack.dataset = EagerTrack.dataset.with_extend{def supports_window_functions?; true end} EagerAlbum.one_to_many :tracks, :class=>'EagerTrack', :key=>:album_id, :order=>:name, :limit=>2, :eager_limit_strategy=>:window_function a = EagerAlbum.eager(:tracks).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM (SELECT *, row_number() OVER (PARTITION BY tracks.album_id ORDER BY name) AS x_sequel_row_number_x FROM tracks WHERE (tracks.album_id IN (1))) AS t1 WHERE (x_sequel_row_number_x <= 2)'] a.first.tracks.must_equal [EagerTrack.load(:id => 3, :album_id=>1)] DB.sqls.must_equal [] EagerAlbum.one_to_many :tracks, :class=>'EagerTrack', :key=>:album_id, :order=>:name, :limit=>[2, 1], :eager_limit_strategy=>:window_function a = EagerAlbum.eager(:tracks).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM (SELECT *, row_number() OVER (PARTITION BY tracks.album_id ORDER BY name) AS x_sequel_row_number_x FROM tracks WHERE (tracks.album_id IN (1))) AS t1 WHERE ((x_sequel_row_number_x >= 2) AND (x_sequel_row_number_x < 4))'] a.first.tracks.must_equal [EagerTrack.load(:id => 3, :album_id=>1)] DB.sqls.must_equal [] EagerAlbum.one_to_many :tracks, :class=>'EagerTrack', :key=>:album_id, :order=>:name, :limit=>[nil, 1], :eager_limit_strategy=>:window_function a = EagerAlbum.eager(:tracks).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM (SELECT *, row_number() OVER (PARTITION BY tracks.album_id ORDER BY name) AS x_sequel_row_number_x FROM tracks WHERE (tracks.album_id IN (1))) AS t1 WHERE (x_sequel_row_number_x >= 2)'] a.first.tracks.must_equal [EagerTrack.load(:id => 3, :album_id=>1)] DB.sqls.must_equal [] end it "should use a ruby strategy for limit if :eager_graph option is used" do EagerTrack.many_to_one :album2, :clone=>:album EagerAlbum.one_to_many :first_two_tracks, :class=>:EagerTrack, :key=>:album_id, :limit=>2, :eager_graph=>:album2 EagerTrack.dataset = EagerTrack.dataset.with_fetch([{:album_id=>1, :id=>2, :album2_id=>1, :band_id=>5}, {:album_id=>1, :id=>3, :album2_id=>1, :band_id=>5}, {:album_id=>1, :id=>4, :album2_id=>1, :band_id=>5}]) EagerTrack.dataset.columns(:id, :album_id) as = EagerAlbum.eager(:first_two_tracks).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT tracks.id, tracks.album_id, album2.id AS album2_id, album2.band_id FROM tracks LEFT OUTER JOIN albums AS album2 ON (album2.id = tracks.album_id) WHERE (tracks.album_id IN (1))"] as.length.must_equal 1 tracks = as.first.first_two_tracks tracks.must_equal [EagerTrack.load(:album_id=>1, :id=>2), EagerTrack.load(:album_id=>1, :id=>3)] tracks.first.album2.must_equal EagerAlbum.load(:id=>1, :band_id=>5) tracks.last.album2.must_equal EagerAlbum.load(:id=>1, :band_id=>5) end it "should not use a union strategy for limit by default if providing a per-eager load callback" do EagerTrack.dataset = EagerTrack.dataset.with_extend{def supports_window_functions?; true end} EagerAlbum.one_to_many :tracks, :class=>'EagerTrack', :key=>:album_id, :order=>:name, :limit=>2 a = EagerAlbum.eager(:tracks=>proc{|ds| ds.where(:id=>3)}).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM (SELECT *, row_number() OVER (PARTITION BY tracks.album_id ORDER BY name) AS x_sequel_row_number_x FROM tracks WHERE ((tracks.album_id IN (1)) AND (id = 3))) AS t1 WHERE (x_sequel_row_number_x <= 2)'] a.first.tracks.must_equal [EagerTrack.load(:id => 3, :album_id=>1)] DB.sqls.must_equal [] end it "should respect the limit option on a many_to_many association using the :ruby strategy" do EagerAlbum.many_to_many :first_two_genres, :class=>:EagerGenre, :left_primary_key=>:band_id, :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :limit=>2, :eager_limit_strategy=>:ruby EagerGenre.dataset = EagerGenre.dataset.with_fetch([{:x_foreign_key_x=>2, :id=>5}, {:x_foreign_key_x=>2, :id=>6}, {:x_foreign_key_x=>2, :id=>7}]) as = EagerAlbum.eager(:first_two_genres).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT genres.*, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (2))"] as.length.must_equal 1 as.first.first_two_genres.must_equal [EagerGenre.load(:id=>5), EagerGenre.load(:id=>6)] EagerGenre.dataset = EagerGenre.dataset.with_fetch([{:x_foreign_key_x=>2, :id=>5}, {:x_foreign_key_x=>2, :id=>6}, {:x_foreign_key_x=>2, :id=>7}]) EagerAlbum.many_to_many :first_two_genres, :class=>:EagerGenre, :left_primary_key=>:band_id, :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :limit=>[1, 1], :eager_limit_strategy=>:ruby as = EagerAlbum.eager(:first_two_genres).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT genres.*, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (2))"] as.length.must_equal 1 as.first.first_two_genres.must_equal [EagerGenre.load(:id=>6)] EagerGenre.dataset = EagerGenre.dataset.with_fetch([{:x_foreign_key_x=>2, :id=>5}, {:x_foreign_key_x=>2, :id=>6}, {:x_foreign_key_x=>2, :id=>7}]) EagerAlbum.many_to_many :first_two_genres, :class=>:EagerGenre, :left_primary_key=>:band_id, :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :limit=>[nil, 1], :eager_limit_strategy=>:ruby as = EagerAlbum.eager(:first_two_genres).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT genres.*, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (2))"] as.length.must_equal 1 as.first.first_two_genres.must_equal [EagerGenre.load(:id=>6), EagerGenre.load(:id=>7)] end it "should respect the limit option on a many_to_many association" do EagerGenre.dataset = EagerGenre.dataset.with_fetch([{:x_foreign_key_x=>2, :id=>5}, {:x_foreign_key_x=>2, :id=>6}]).with_extend{def supports_window_functions?; true end} EagerAlbum.many_to_many :first_two_genres, :class=>:EagerGenre, :left_primary_key=>:band_id, :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :limit=>2, :order=>:name as = EagerAlbum.eager(:first_two_genres).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT * FROM (SELECT genres.*, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (2 = ag.album_id) ORDER BY name LIMIT 2) AS t1"] as.length.must_equal 1 as.first.first_two_genres.must_equal [EagerGenre.load(:id=>5), EagerGenre.load(:id=>6)] EagerGenre.dataset = EagerGenre.dataset.with_fetch([{:x_foreign_key_x=>2, :id=>5}]) EagerAlbum.many_to_many :first_two_genres, :class=>:EagerGenre, :left_primary_key=>:band_id, :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :limit=>[1, 1], :order=>:name as = EagerAlbum.eager(:first_two_genres).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT * FROM (SELECT genres.*, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (2 = ag.album_id) ORDER BY name LIMIT 1 OFFSET 1) AS t1"] as.length.must_equal 1 as.first.first_two_genres.must_equal [EagerGenre.load(:id=>5)] EagerGenre.dataset = EagerGenre.dataset.with_fetch([{:x_foreign_key_x=>2, :id=>5}, {:x_foreign_key_x=>2, :id=>6}]) EagerAlbum.many_to_many :first_two_genres, :class=>:EagerGenre, :left_primary_key=>:band_id, :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :limit=>[nil, 1], :order=>:name as = EagerAlbum.eager(:first_two_genres).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT * FROM (SELECT genres.*, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (2 = ag.album_id) ORDER BY name OFFSET 1) AS t1"] as.length.must_equal 1 as.first.first_two_genres.must_equal [EagerGenre.load(:id=>5), EagerGenre.load(:id=>6)] end it "should respect the limit option on a many_to_many association using the :window_function strategy" do EagerGenre.dataset = EagerGenre.dataset.with_fetch([{:x_foreign_key_x=>2, :id=>5}, {:x_foreign_key_x=>2, :id=>6}]).with_extend{def supports_window_functions?; true end} EagerAlbum.many_to_many :first_two_genres, :class=>:EagerGenre, :left_primary_key=>:band_id, :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :limit=>2, :order=>:name, :eager_limit_strategy=>:window_function as = EagerAlbum.eager(:first_two_genres).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT * FROM (SELECT genres.*, ag.album_id AS x_foreign_key_x, row_number() OVER (PARTITION BY ag.album_id ORDER BY name) AS x_sequel_row_number_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (2))) AS t1 WHERE (x_sequel_row_number_x <= 2)"] as.length.must_equal 1 as.first.first_two_genres.must_equal [EagerGenre.load(:id=>5), EagerGenre.load(:id=>6)] EagerGenre.dataset = EagerGenre.dataset.with_fetch([{:x_foreign_key_x=>2, :id=>5}]) EagerAlbum.many_to_many :first_two_genres, :class=>:EagerGenre, :left_primary_key=>:band_id, :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :limit=>[1, 1], :order=>:name, :eager_limit_strategy=>:window_function as = EagerAlbum.eager(:first_two_genres).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT * FROM (SELECT genres.*, ag.album_id AS x_foreign_key_x, row_number() OVER (PARTITION BY ag.album_id ORDER BY name) AS x_sequel_row_number_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (2))) AS t1 WHERE ((x_sequel_row_number_x >= 2) AND (x_sequel_row_number_x < 3))"] as.length.must_equal 1 as.first.first_two_genres.must_equal [EagerGenre.load(:id=>5)] EagerGenre.dataset = EagerGenre.dataset.with_fetch([{:x_foreign_key_x=>2, :id=>5}, {:x_foreign_key_x=>2, :id=>6}]) EagerAlbum.many_to_many :first_two_genres, :class=>:EagerGenre, :left_primary_key=>:band_id, :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :limit=>[nil, 1], :order=>:name, :eager_limit_strategy=>:window_function as = EagerAlbum.eager(:first_two_genres).all DB.sqls.must_equal ['SELECT * FROM albums', "SELECT * FROM (SELECT genres.*, ag.album_id AS x_foreign_key_x, row_number() OVER (PARTITION BY ag.album_id ORDER BY name) AS x_sequel_row_number_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (2))) AS t1 WHERE (x_sequel_row_number_x >= 2)"] as.length.must_equal 1 as.first.first_two_genres.must_equal [EagerGenre.load(:id=>5), EagerGenre.load(:id=>6)] end it "should use the :eager_loader association option when eager loading" do EagerAlbum.many_to_one :special_band, :key=>:band_id, :eager_loader=>(proc do |eo| item = EagerBand.filter(:album_id=>eo[:rows].collect{|r| [r.pk, r.pk*2]}.flatten).order(:name).first eo[:rows].each{|r| r.associations[:special_band] = item} end) EagerAlbum.one_to_many :special_tracks, :eager_loader=>(proc do |eo| items = EagerTrack.filter(:album_id=>eo[:rows].collect{|r| [r.pk, r.pk*2]}.flatten).all eo[:rows].each{|r| r.associations[:special_tracks] = items} end) EagerAlbum.many_to_many :special_genres, :class=>:EagerGenre, :eager_loader=>(proc do |eo| items = EagerGenre.inner_join(:ag, [:genre_id]).filter(:album_id=>eo[:rows].collect{|r| r.pk}).all eo[:rows].each{|r| r.associations[:special_genres] = items} end) a = EagerAlbum.eager(:special_genres, :special_tracks, :special_band).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] sqls = DB.sqls sqls.shift.must_equal 'SELECT * FROM albums' sqls.sort.must_equal ['SELECT * FROM bands WHERE (album_id IN (1, 2)) ORDER BY name LIMIT 1', 'SELECT * FROM genres INNER JOIN ag USING (genre_id) WHERE (album_id IN (1))', 'SELECT * FROM tracks WHERE (album_id IN (1, 2))'] a = a.first a.special_band.must_equal EagerBand.load(:id => 2) a.special_tracks.must_equal [EagerTrack.load(:id => 3, :album_id=>1)] a.special_genres.must_equal [EagerGenre.load(:id => 4)] DB.sqls.must_equal [] end it "should respect :after_load callbacks on associations when eager loading" do EagerAlbum.many_to_one :al_band, :class=>'EagerBand', :key=>:band_id, :after_load=>proc{|o, a| a.id *=2} EagerAlbum.one_to_many :al_tracks, :class=>'EagerTrack', :key=>:album_id, :after_load=>proc{|o, os| os.each{|a| a.id *=2}} EagerAlbum.many_to_many :al_genres, :class=>'EagerGenre', :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :after_load=>proc{|o, os| os.each{|a| a.id *=2}} a = EagerAlbum.eager(:al_band, :al_tracks, :al_genres).all.first a.must_equal EagerAlbum.load(:id => 1, :band_id => 2) a.al_band.must_equal EagerBand.load(:id=>4) a.al_tracks.must_equal [EagerTrack.load(:id=>6, :album_id=>1)] a.al_genres.must_equal [EagerGenre.load(:id=>8)] end it "should respect :uniq option when eagerly loading many_to_many associations" do EagerAlbum.many_to_many :al_genres, :class=>'EagerGenre', :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :uniq=>true EagerGenre.dataset = EagerGenre.dataset.with_fetch([{:x_foreign_key_x=>1, :id=>8}, {:x_foreign_key_x=>1, :id=>8}]) a = EagerAlbum.eager(:al_genres).all.first DB.sqls.must_equal ['SELECT * FROM albums', "SELECT genres.*, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (1))"] a.must_equal EagerAlbum.load(:id => 1, :band_id => 2) a.al_genres.must_equal [EagerGenre.load(:id=>8)] end it "should respect :distinct option when eagerly loading many_to_many associations" do EagerAlbum.many_to_many :al_genres, :class=>'EagerGenre', :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :distinct=>true a = EagerAlbum.eager(:al_genres).all.first DB.sqls.must_equal ['SELECT * FROM albums', "SELECT DISTINCT genres.*, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (1))"] a.must_equal EagerAlbum.load(:id => 1, :band_id => 2) a.al_genres.must_equal [EagerGenre.load(:id=>4)] end it "should eagerly load a many_to_one association with custom eager block" do a = EagerAlbum.eager(:band => proc {|ds| ds.select(:id, :name)}).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT id, name FROM bands WHERE (bands.id IN (2))'] a.first.band.must_equal EagerBand.load(:id => 2) DB.sqls.must_equal [] end it "should eagerly load a one_to_one association with custom eager block" do EagerAlbum.one_to_one :track, :class=>'EagerTrack', :key=>:album_id a = EagerAlbum.eager(:track => proc {|ds| ds.select(:id)}).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT id FROM tracks WHERE (tracks.album_id IN (1))'] a.first.track.must_equal EagerTrack.load(:id => 3, :album_id=>1) DB.sqls.must_equal [] end it "should eagerly load a one_to_many association with custom eager block" do a = EagerAlbum.eager(:tracks => proc {|ds| ds.select(:id)}).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT id FROM tracks WHERE (tracks.album_id IN (1))'] a.first.tracks.must_equal [EagerTrack.load(:id => 3, :album_id=>1)] DB.sqls.must_equal [] end it "should eagerly load a many_to_many association with custom eager block" do a = EagerAlbum.eager(:genres => proc {|ds| ds.select(:name)}).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', "SELECT name, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (1))"] a.first.genres.must_equal [EagerGenre.load(:id => 4)] DB.sqls.must_equal [] end it "should eagerly load a one_through_one association with custom eager block" do a = EagerAlbum.eager(:genre => proc {|ds| ds.select(:name)}).all a.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] DB.sqls.must_equal ['SELECT * FROM albums', "SELECT name, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (1))"] a.first.genre.must_equal EagerGenre.load(:id => 4) DB.sqls.must_equal [] end it "should allow cascading of eager loading within a custom eager block" do a = EagerTrack.eager(:album => proc {|ds| ds.eager(:band => :members)}).all a.must_equal [EagerTrack.load(:id => 3, :album_id => 1)] DB.sqls.must_equal ['SELECT * FROM tracks', 'SELECT * FROM albums WHERE (albums.id IN (1))', 'SELECT * FROM bands WHERE (bands.id IN (2))', "SELECT members.*, bm.band_id AS x_foreign_key_x FROM members INNER JOIN bm ON (bm.member_id = members.id) WHERE (bm.band_id IN (2))"] a = a.first a.album.must_equal EagerAlbum.load(:id => 1, :band_id => 2) a.album.band.must_equal EagerBand.load(:id => 2) a.album.band.members.must_equal [EagerBandMember.load(:id => 5)] DB.sqls.must_equal [] end it "should allow cascading of eager loading with custom callback with hash value" do a = EagerTrack.eager(:album=>{proc{|ds| ds.select(:id, :band_id)}=>{:band => :members}}).all a.must_equal [EagerTrack.load(:id => 3, :album_id => 1)] DB.sqls.must_equal ['SELECT * FROM tracks', 'SELECT id, band_id FROM albums WHERE (albums.id IN (1))', 'SELECT * FROM bands WHERE (bands.id IN (2))', "SELECT members.*, bm.band_id AS x_foreign_key_x FROM members INNER JOIN bm ON (bm.member_id = members.id) WHERE (bm.band_id IN (2))"] a = a.first a.album.must_equal EagerAlbum.load(:id => 1, :band_id => 2) a.album.band.must_equal EagerBand.load(:id => 2) a.album.band.members.must_equal [EagerBandMember.load(:id => 5)] DB.sqls.must_equal [] end it "should allow cascading of eager loading with custom callback with symbol value" do a = EagerTrack.eager(:album=>{proc{|ds| ds.select(:id, :band_id)}=>:band}).all a.must_equal [EagerTrack.load(:id => 3, :album_id => 1)] DB.sqls.must_equal ['SELECT * FROM tracks', 'SELECT id, band_id FROM albums WHERE (albums.id IN (1))', 'SELECT * FROM bands WHERE (bands.id IN (2))'] a = a.first a.album.must_equal EagerAlbum.load(:id => 1, :band_id => 2) a.album.band.must_equal EagerBand.load(:id => 2) DB.sqls.must_equal [] end it "should allow cascading of eager loading with custom callback with symbol value when association has a limit" do EagerAlbum.dataset = EagerAlbum.dataset.with_fetch((1..11).map{|i| {:band_id=>2, :id=>i}}) EagerTrack.dataset = EagerTrack.dataset.with_fetch([{:id=>3, :album_id=>1}]) a = EagerBand.eager(:top_10_albums=>{proc{|ds| ds.select(:id, :name)}=>:tracks}).all a.must_equal [EagerBand.load(:id => 2)] DB.sqls.must_equal ['SELECT * FROM bands', 'SELECT id, name FROM albums WHERE (albums.band_id IN (2))', 'SELECT * FROM tracks WHERE (tracks.album_id IN (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))'] a = a.first a.top_10_albums.must_equal((1..10).map{|i| EagerAlbum.load(:band_id=>2, :id=>i)}) a.top_10_albums.map{|x| x.tracks}.must_equal [[EagerTrack.load(:id => 3, :album_id=>1)]] + ([[]] * 9) DB.sqls.must_equal [] end it "should allow cascading of eager loading with custom callback with symbol value when association has a limit when using window function eager limit strategy" do EagerAlbum.dataset = EagerAlbum.dataset.with_fetch(:band_id=>2, :id=>1).with_extend{def supports_window_functions?; true end} EagerTrack.dataset = EagerTrack.dataset.with_fetch([{:id=>3, :album_id=>1}]) a = EagerBand.eager(:top_10_albums=>{proc{|ds| ds.select(:id, :name)}=>:tracks}).all a.must_equal [EagerBand.load(:id => 2)] DB.sqls.must_equal ['SELECT * FROM bands', 'SELECT * FROM (SELECT id, name, row_number() OVER (PARTITION BY albums.band_id) AS x_sequel_row_number_x FROM albums WHERE (albums.band_id IN (2))) AS t1 WHERE (x_sequel_row_number_x <= 10)', 'SELECT * FROM tracks WHERE (tracks.album_id IN (1))'] a = a.first a.top_10_albums.must_equal [EagerAlbum.load(:band_id=>2, :id=>1)] a.top_10_albums.first.tracks.must_equal [EagerTrack.load(:id => 3, :album_id=>1)] DB.sqls.must_equal [] end it "should allow cascading of eager loading with custom callback with array value" do a = EagerTrack.eager(:album=>{proc{|ds| ds.select(:id, :band_id)}=>[:band, :band_name]}).all a.must_equal [EagerTrack.load(:id => 3, :album_id => 1)] sqls = DB.sqls sqls.slice!(0..1).must_equal ['SELECT * FROM tracks', 'SELECT id, band_id FROM albums WHERE (albums.id IN (1))'] sqls.sort.must_equal ['SELECT * FROM bands WHERE (bands.id IN (2))', 'SELECT id, name FROM bands WHERE (bands.id IN (2))'] a = a.first a.album.must_equal EagerAlbum.load(:id => 1, :band_id => 2) a.album.band.must_equal EagerBand.load(:id => 2) a.album.band_name.must_equal EagerBand.load(:id => 2) DB.sqls.must_equal [] end it "should call both association and custom eager blocks" do EagerBand.eager(:good_albums => proc {|ds| ds.select(:name)}).all DB.sqls.must_equal ['SELECT * FROM bands', "SELECT name FROM albums WHERE ((name = 'good') AND (albums.band_id IN (2)))"] end it "should respect an :eager_limit option passed in a custom callback" do EagerTrack.dataset = EagerTrack.dataset.with_extend{def supports_window_functions?; true end} a = EagerAlbum.eager(:tracks=> proc{|ds| ds.clone(:eager_limit=>5)}).all a.must_equal [EagerAlbum.load(:id => 1, :band_id=> 2)] sqls = DB.sqls sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM (SELECT *, row_number() OVER (PARTITION BY tracks.album_id) AS x_sequel_row_number_x FROM tracks WHERE (tracks.album_id IN (1))) AS t1 WHERE (x_sequel_row_number_x <= 5)'] a = a.first a.tracks.must_equal [EagerTrack.load(:id => 3, :album_id => 1)] DB.sqls.must_equal [] end it "should respect an :eager_limit option that includes an offset" do EagerTrack.dataset = EagerTrack.dataset.with_extend{def supports_window_functions?; true end} EagerAlbum.eager(:tracks=> proc{|ds| ds.clone(:eager_limit=>[5, 5])}).all DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM (SELECT *, row_number() OVER (PARTITION BY tracks.album_id) AS x_sequel_row_number_x FROM tracks WHERE (tracks.album_id IN (1))) AS t1 WHERE ((x_sequel_row_number_x >= 6) AND (x_sequel_row_number_x < 11))'] end it "should have an :eager_limit option passed in a custom callback override a :limit defined in the association" do EagerTrack.dataset = EagerTrack.dataset.with_extend{def supports_window_functions?; true end} EagerAlbum.one_to_many :first_two_tracks, :class=>:EagerTrack, :key=>:album_id, :limit=>2 EagerAlbum.eager(:first_two_tracks=> proc{|ds| ds.clone(:eager_limit=>5)}).all DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM (SELECT *, row_number() OVER (PARTITION BY tracks.album_id) AS x_sequel_row_number_x FROM tracks WHERE (tracks.album_id IN (1))) AS t1 WHERE (x_sequel_row_number_x <= 5)'] end it "should respect an :eager_limit_strategy option passed in a custom callback" do EagerTrack.dataset = EagerTrack.dataset.with_fetch((1..4).map{|i| {:album_id=>1, :id=>i}}).with_extend{def supports_window_functions?; true end} a = EagerAlbum.eager(:tracks=> proc{|ds| ds.clone(:eager_limit=>2, :eager_limit_strategy=>:ruby)}).all a.must_equal [EagerAlbum.load(:id => 1, :band_id=> 2)] sqls = DB.sqls sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM tracks WHERE (tracks.album_id IN (1))'] a = a.first a.tracks.must_equal [EagerTrack.load(:id => 1, :album_id => 1), EagerTrack.load(:id => 2, :album_id => 1)] DB.sqls.must_equal [] end it "should have an :eager_limit_strategy option passed in a custom callback override a :eager_limit_strategy defined in the association" do EagerTrack.dataset = EagerTrack.dataset.with_extend{def supports_window_functions?; true end} EagerAlbum.one_to_many :first_two_tracks, :class=>:EagerTrack, :key=>:album_id, :limit=>2, :eager_limit_strategy=>:ruby EagerAlbum.eager(:first_two_tracks=> proc{|ds| ds.clone(:eager_limit_strategy=>:window_function)}).all DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM (SELECT *, row_number() OVER (PARTITION BY tracks.album_id) AS x_sequel_row_number_x FROM tracks WHERE (tracks.album_id IN (1))) AS t1 WHERE (x_sequel_row_number_x <= 2)'] end it "should raise error if using :eager_limit for a singular association" do EagerAlbum.one_to_one :track, :class=>'EagerTrack', :key=>:album_id proc{EagerAlbum.eager(:track=> proc{|ds| ds.clone(:eager_limit=>1)}).all}.must_raise Sequel::Error DB.sqls.must_equal ['SELECT * FROM albums'] end it "should raise error if using :eager_limit for a singular association" do EagerAlbum.one_to_one :track, :class=>'EagerTrack', :key=>:album_id, :order=>:name a = EagerAlbum.eager(:track=> proc{|ds| ds.order(:foo)}).all DB.sqls.must_equal ['SELECT * FROM albums', 'SELECT * FROM tracks WHERE (tracks.album_id IN (1)) ORDER BY foo'] a = a.first a.track.must_equal EagerTrack.load(:id => 3, :album_id => 1) DB.sqls.must_equal [] end it "should support eager load of many_to_one with eager_graph of many_to_one in custom callback" do a = EagerTrack.eager(:album=>proc{|ds| ds.eager_graph(:band).with_fetch(:id=>1, :band_id=>2, :band_id_0=>2)}).all a.must_equal [EagerTrack.load(:id => 3, :album_id => 1)] DB.sqls.must_equal ["SELECT * FROM tracks", "SELECT albums.id, albums.band_id, band.id AS band_id_0 FROM albums LEFT OUTER JOIN bands AS band ON (band.id = albums.band_id) WHERE (albums.id IN (1))"] a = a.first a.album.must_equal EagerAlbum.load(:id => 1, :band_id => 2) a.album.band.must_equal EagerBand.load(:id => 2) DB.sqls.must_equal [] end it "should support eager load of many_to_one with eager_graph of one_to_many in custom callback" do a = EagerTrack.eager(:album=>proc{|ds| ds.eager_graph(:tracks).with_fetch(:id=>1, :band_id=>2, :tracks_id=>3)}).all a.must_equal [EagerTrack.load(:id => 3, :album_id => 1)] DB.sqls.must_equal ["SELECT * FROM tracks", "SELECT albums.id, albums.band_id, tracks.id AS tracks_id FROM albums LEFT OUTER JOIN tracks ON (tracks.album_id = albums.id) WHERE (albums.id IN (1))"] a = a.first a.album.must_equal EagerAlbum.load(:id => 1, :band_id => 2) a.album.tracks.must_equal [EagerTrack.load(:id=>3)] DB.sqls.must_equal [] end it "should support eager load of many_to_one with eager_graph of many_to_many in custom callback" do a = EagerTrack.eager(:album=>proc{|ds| ds.eager_graph(:genres).with_fetch(:id=>1, :band_id=>2, :genres_id=>4)}).all a.must_equal [EagerTrack.load(:id => 3, :album_id => 1)] DB.sqls.must_equal ["SELECT * FROM tracks", "SELECT albums.id, albums.band_id, genres.id AS genres_id FROM albums LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres ON (genres.id = ag.genre_id) WHERE (albums.id IN (1))"] a = a.first a.album.must_equal EagerAlbum.load(:id => 1, :band_id => 2) a.album.genres.must_equal [EagerGenre.load(:id=>4)] DB.sqls.must_equal [] end it "should support eager load of many_to_many with eager_graph of many_to_one in custom callback" do a = EagerGenre.eager(:albums=>proc{|ds| ds.columns(:id, :band_id, :x_foreign_key_x).eager_graph(:band).with_fetch(:id=>1, :band_id=>2, :x_foreign_key_x=>4, :band_id_0=>2)}).all a.must_equal [EagerGenre.load(:id => 4)] DB.sqls.must_equal ["SELECT * FROM genres", "SELECT albums.id, albums.band_id, albums.x_foreign_key_x, band.id AS band_id_0 FROM (SELECT albums.*, ag.genre_id AS x_foreign_key_x FROM albums INNER JOIN ag ON (ag.album_id = albums.id) WHERE (ag.genre_id IN (4))) AS albums LEFT OUTER JOIN bands AS band ON (band.id = albums.band_id)"] a = a.first a.albums.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] a.albums.first.band.must_equal EagerBand.load(:id=>2) DB.sqls.must_equal [] end it "should support eager load of many_to_many with eager_graph of one_to_many in custom callback" do a = EagerGenre.eager(:albums=>proc{|ds| ds.columns(:id, :band_id, :x_foreign_key_x).eager_graph(:tracks).with_fetch(:id=>1, :band_id=>2, :x_foreign_key_x=>4, :tracks_id=>5)}).all a.must_equal [EagerGenre.load(:id => 4)] DB.sqls.must_equal ["SELECT * FROM genres", "SELECT albums.id, albums.band_id, albums.x_foreign_key_x, tracks.id AS tracks_id FROM (SELECT albums.*, ag.genre_id AS x_foreign_key_x FROM albums INNER JOIN ag ON (ag.album_id = albums.id) WHERE (ag.genre_id IN (4))) AS albums LEFT OUTER JOIN tracks ON (tracks.album_id = albums.id)"] a = a.first a.albums.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] a.albums.first.tracks.must_equal [EagerTrack.load(:id=>5)] DB.sqls.must_equal [] end it "should support eager load of many_to_many with eager_graph of many_to_many in custom callback" do a = EagerGenre.eager(:albums=>proc{|ds| ds.columns(:id, :band_id, :x_foreign_key_x).eager_graph(:genres).with_fetch(:id=>1, :band_id=>2, :x_foreign_key_x=>4, :genres_id=>4)}).all a.must_equal [EagerGenre.load(:id => 4)] DB.sqls.must_equal ["SELECT * FROM genres", "SELECT albums.id, albums.band_id, albums.x_foreign_key_x, genres.id AS genres_id FROM (SELECT albums.*, ag.genre_id AS x_foreign_key_x FROM albums INNER JOIN ag ON (ag.album_id = albums.id) WHERE (ag.genre_id IN (4))) AS albums LEFT OUTER JOIN ag AS ag_0 ON (ag_0.album_id = albums.id) LEFT OUTER JOIN genres ON (genres.id = ag_0.genre_id)"] a = a.first a.albums.must_equal [EagerAlbum.load(:id => 1, :band_id => 2)] a.albums.first.genres.must_equal [EagerGenre.load(:id=>4)] DB.sqls.must_equal [] end it "should support eager_graph usage with cascaded associations in custom callback" do a = EagerTrack.eager(:album=>proc{|ds| ds.eager_graph(:band=>:members).with_fetch(:id=>1, :band_id=>2, :band_id_0=>2, :members_id=>5)}).all a.must_equal [EagerTrack.load(:id => 3, :album_id => 1)] DB.sqls.must_equal ["SELECT * FROM tracks", "SELECT albums.id, albums.band_id, band.id AS band_id_0, members.id AS members_id FROM albums LEFT OUTER JOIN bands AS band ON (band.id = albums.band_id) LEFT OUTER JOIN bm ON (bm.band_id = band.id) LEFT OUTER JOIN members ON (members.id = bm.member_id) WHERE (albums.id IN (1))"] a = a.first a.album.must_equal EagerAlbum.load(:id => 1, :band_id => 2) a.album.band.must_equal EagerBand.load(:id => 2) a.album.band.members.must_equal [EagerBandMember.load(:id => 5)] DB.sqls.must_equal [] end it "should support eager_graph usage in custom callback with dependencies" do a = EagerTrack.eager(:album=>{proc{|ds| ds.eager_graph(:band).with_fetch(:id=>1, :band_id=>2, :band_id_0=>2)}=>:genre}).all a.must_equal [EagerTrack.load(:id => 3, :album_id => 1)] DB.sqls.must_equal ["SELECT * FROM tracks", "SELECT albums.id, albums.band_id, band.id AS band_id_0 FROM albums LEFT OUTER JOIN bands AS band ON (band.id = albums.band_id) WHERE (albums.id IN (1))", "SELECT genres.*, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (1))"] a = a.first a.album.must_equal EagerAlbum.load(:id => 1, :band_id => 2) a.album.band.must_equal EagerBand.load(:id => 2) a.album.genre.must_equal EagerGenre.load(:id => 4) DB.sqls.must_equal [] end end describe Sequel::Model, "#eager_graph" do before(:all) do class ::GraphAlbum < Sequel::Model(:albums) columns :id, :band_id many_to_one :band, :class=>'GraphBand', :key=>:band_id one_to_many :tracks, :class=>'GraphTrack', :key=>:album_id one_to_one :track, :class=>'GraphTrack', :key=>:album_id many_to_many :genres, :class=>'GraphGenre', :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag one_through_one :genre, :clone=>:genres many_to_one :previous_album, :class=>'GraphAlbum' end class ::GraphBand < Sequel::Model(:bands) columns :id, :vocalist_id many_to_one :vocalist, :class=>'GraphBandMember', :key=>:vocalist_id one_to_many :albums, :class=>'GraphAlbum', :key=>:band_id many_to_many :members, :class=>'GraphBandMember', :left_key=>:band_id, :right_key=>:member_id, :join_table=>:bm many_to_many :genres, :class=>'GraphGenre', :left_key=>:band_id, :right_key=>:genre_id, :join_table=>:bg end class ::GraphTrack < Sequel::Model(:tracks) columns :id, :album_id many_to_one :album, :class=>'GraphAlbum', :key=>:album_id end class ::GraphGenre < Sequel::Model(:genres) columns :id many_to_many :albums, :class=>'GraphAlbum', :left_key=>:genre_id, :right_key=>:album_id, :join_table=>:ag end class ::GraphBandMember < Sequel::Model(:members) columns :id many_to_many :no_bands, :class=>'GraphBand', :left_key=>:member_id, :right_key=>:band_id, :join_table=>:bm, :allow_eager_graph=>false many_to_many :bands, :class=>'GraphBand', :left_key=>:member_id, :right_key=>:band_id, :join_table=>:bm end end before do DB.sqls end after(:all) do [:GraphAlbum, :GraphBand, :GraphTrack, :GraphGenre, :GraphBandMember].each{|x| Object.send(:remove_const, x)} end it "should raise an error if called with an association with allow_eager_graph: false" do proc{GraphBandMember.eager_graph(:no_bands)}.must_raise(Sequel::Error) end it "should raise an error if called without a symbol or hash" do proc{GraphAlbum.eager_graph(Object.new)}.must_raise(Sequel::Error) end it "should raise an error if called with invalid association" do proc{GraphAlbum.eager_graph(:foo_bars)}.must_raise(Sequel::UndefinedAssociation) end it "should work correctly with select_map" do ds = GraphAlbum.eager_graph(:band) ds.with_fetch([{:id=>1}, {:id=>2}]).select_map(Sequel[:albums][:id]).must_equal [1, 2] DB.sqls.must_equal ['SELECT albums.id FROM albums LEFT OUTER JOIN bands AS band ON (band.id = albums.band_id)'] ds.with_fetch([{:id=>1}, {:id=>2}]).select_map([Sequel[:albums][:id], Sequel[:albums][:id]]).must_equal [[1, 1], [2, 2]] DB.sqls.must_equal ['SELECT albums.id, albums.id FROM albums LEFT OUTER JOIN bands AS band ON (band.id = albums.band_id)'] end it "should work correctly with single_value" do ds = GraphAlbum.eager_graph(:band).select(Sequel[:albums][:id]) ds.with_fetch([{:id=>1}]).single_value.must_equal 1 DB.sqls.must_equal ['SELECT albums.id FROM albums LEFT OUTER JOIN bands AS band ON (band.id = albums.band_id) LIMIT 1'] end it "should not split results and assign associations if ungraphed is called" do ds = GraphAlbum.eager_graph(:band).ungraphed ds.sql.must_equal 'SELECT albums.id, albums.band_id, band.id AS band_id_0, band.vocalist_id FROM albums LEFT OUTER JOIN bands AS band ON (band.id = albums.band_id)' ds.with_fetch(:id=>1, :band_id=>2, :band_id_0=>2, :vocalist_id=>3).all.must_equal [GraphAlbum.load(:id=>1, :band_id=>2, :band_id_0=>2, :vocalist_id=>3)] end it "should not modify existing dataset" do ds1 = GraphAlbum.dataset ds2 = ds1.eager_graph(:band) ds1.eager_graph(:band) ds2.eager_graph(:tracks) ds2.eager_graph(:tracks) end it "should allow manually selecting the alias base per call via an AliasedExpression" do ds = GraphAlbum.eager_graph(Sequel.as(:band, :b)) ds.sql.must_equal 'SELECT albums.id, albums.band_id, b.id AS b_id, b.vocalist_id FROM albums LEFT OUTER JOIN bands AS b ON (b.id = albums.band_id)' a = ds.with_fetch(:id=>1, :band_id=>2, :b_id=>2, :vocalist_id=>3).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.band.must_equal GraphBand.load(:id => 2, :vocalist_id=>3) end it "should allow manually selecting the alias base per call via an AliasedExpression containing an Identifier with a symbol" do ds = GraphAlbum.eager_graph(Sequel.as(Sequel.identifier(:band), :b)) ds.sql.must_equal 'SELECT albums.id, albums.band_id, b.id AS b_id, b.vocalist_id FROM albums LEFT OUTER JOIN bands AS b ON (b.id = albums.band_id)' a = ds.with_fetch(:id=>1, :band_id=>2, :b_id=>2, :vocalist_id=>3).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.band.must_equal GraphBand.load(:id => 2, :vocalist_id=>3) end it "should support calling eager_graph with no associations" do ds = GraphAlbum.eager_graph.eager_graph(Sequel.as(:band, :b)).eager_graph ds.sql.must_equal 'SELECT albums.id, albums.band_id, b.id AS b_id, b.vocalist_id FROM albums LEFT OUTER JOIN bands AS b ON (b.id = albums.band_id)' a = ds.with_fetch(:id=>1, :band_id=>2, :b_id=>2, :vocalist_id=>3).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.band.must_equal GraphBand.load(:id => 2, :vocalist_id=>3) end it "should handle multiple associations using the same alias base" do ds = GraphAlbum.eager_graph(Sequel.as(:genres, :b), Sequel.as(:tracks, :b), Sequel.as(:band, :b)) ds.sql.must_equal 'SELECT albums.id, albums.band_id, b.id AS b_id, b_0.id AS b_0_id, b_0.album_id, b_1.id AS b_1_id, b_1.vocalist_id FROM albums LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS b ON (b.id = ag.genre_id) LEFT OUTER JOIN tracks AS b_0 ON (b_0.album_id = albums.id) LEFT OUTER JOIN bands AS b_1 ON (b_1.id = albums.band_id)' a = ds.with_fetch(:id=>1, :band_id=>2, :b_id=>4, :b_0_id=>3, :album_id=>1, :b_1_id=>2, :vocalist_id=>6).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a = a.first a.band.must_equal GraphBand.load(:id => 2, :vocalist_id=>6) a.tracks.must_equal [GraphTrack.load({:id => 3, :album_id=>1})] a.genres.must_equal [GraphGenre.load(:id => 4)] ds = GraphTrack.eager_graph(Sequel.as(:album, :b)=>{Sequel.as(:band, :b)=>Sequel.as(:members, :b)}) ds.sql.must_equal 'SELECT tracks.id, tracks.album_id, b.id AS b_id, b.band_id, b_0.id AS b_0_id, b_0.vocalist_id, b_1.id AS b_1_id FROM tracks LEFT OUTER JOIN albums AS b ON (b.id = tracks.album_id) LEFT OUTER JOIN bands AS b_0 ON (b_0.id = b.band_id) LEFT OUTER JOIN bm ON (bm.band_id = b_0.id) LEFT OUTER JOIN members AS b_1 ON (b_1.id = bm.member_id)' a = ds.with_fetch(:id=>3, :album_id=>1, :b_id=>1, :band_id=>2, :b_1_id=>5, :b_0_id=>2, :vocalist_id=>6).all a.must_equal [GraphTrack.load(:id => 3, :album_id => 1)] a = a.first a.album.must_equal GraphAlbum.load(:id => 1, :band_id => 2) a.album.band.must_equal GraphBand.load(:id => 2, :vocalist_id=>6) a.album.band.members.must_equal [GraphBandMember.load(:id => 5)] end it "should allow manually customizing the join type per association via an AliasedExpression" do ds = GraphAlbum.eager_graph(Sequel.as(:band, :b, :join_type=>:inner)) ds.sql.must_equal 'SELECT albums.id, albums.band_id, b.id AS b_id, b.vocalist_id FROM albums INNER JOIN bands AS b ON (b.id = albums.band_id)' ds = ds.with_fetch(:id=>1, :band_id=>2, :b_id=>2, :vocalist_id=>3) 5.times do a = ds.all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.band.must_equal GraphBand.load(:id => 2, :vocalist_id=>3) end end it "should allow manually customizing the join type per association via an AliasedExpression while using the default alias name" do ds = GraphAlbum.eager_graph(Sequel.as(:band, nil, :join_type=>:inner)) ds.sql.must_equal 'SELECT albums.id, albums.band_id, band.id AS band_id_0, band.vocalist_id FROM albums INNER JOIN bands AS band ON (band.id = albums.band_id)' a = ds.with_fetch(:id=>1, :band_id=>2, :band_id_0=>2, :vocalist_id=>3).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.band.must_equal GraphBand.load(:id => 2, :vocalist_id=>3) end it "should set up correct inner joins when using association_join" do GraphAlbum.association_join(:band).sql.must_equal 'SELECT * FROM albums INNER JOIN bands AS band ON (band.id = albums.band_id)' GraphAlbum.association_join(:track).sql.must_equal 'SELECT * FROM albums INNER JOIN tracks AS track ON (track.album_id = albums.id)' GraphAlbum.association_join(:tracks).sql.must_equal 'SELECT * FROM albums INNER JOIN tracks ON (tracks.album_id = albums.id)' GraphAlbum.association_join(:genres).sql.must_equal 'SELECT * FROM albums INNER JOIN ag ON (ag.album_id = albums.id) INNER JOIN genres ON (genres.id = ag.genre_id)' GraphAlbum.association_join(:genre).sql.must_equal 'SELECT * FROM albums INNER JOIN ag ON (ag.album_id = albums.id) INNER JOIN genres AS genre ON (genre.id = ag.genre_id)' end it "should handle custom selects when using association_join" do GraphAlbum.select{a(b)}.association_join(:band).sql.must_equal 'SELECT a(b) FROM albums INNER JOIN bands AS band ON (band.id = albums.band_id)' GraphAlbum.select{a(b)}.association_join(:track).sql.must_equal 'SELECT a(b) FROM albums INNER JOIN tracks AS track ON (track.album_id = albums.id)' GraphAlbum.select{a(b)}.association_join(:tracks).sql.must_equal 'SELECT a(b) FROM albums INNER JOIN tracks ON (tracks.album_id = albums.id)' GraphAlbum.select{a(b)}.association_join(:genres).sql.must_equal 'SELECT a(b) FROM albums INNER JOIN ag ON (ag.album_id = albums.id) INNER JOIN genres ON (genres.id = ag.genre_id)' GraphAlbum.select{a(b)}.association_join(:genre).sql.must_equal 'SELECT a(b) FROM albums INNER JOIN ag ON (ag.album_id = albums.id) INNER JOIN genres AS genre ON (genre.id = ag.genre_id)' end it "should set up correct join types when using association_*_join" do GraphAlbum.association_inner_join(:band).sql.must_equal 'SELECT * FROM albums INNER JOIN bands AS band ON (band.id = albums.band_id)' GraphAlbum.association_left_join(:track).sql.must_equal 'SELECT * FROM albums LEFT JOIN tracks AS track ON (track.album_id = albums.id)' GraphAlbum.association_right_join(:tracks).sql.must_equal 'SELECT * FROM albums RIGHT JOIN tracks ON (tracks.album_id = albums.id)' GraphAlbum.association_full_join(:genres).sql.must_equal 'SELECT * FROM albums FULL JOIN ag ON (ag.album_id = albums.id) FULL JOIN genres ON (genres.id = ag.genre_id)' end it "should eagerly load a single many_to_one association" do ds = GraphAlbum.eager_graph(:band) ds.sql.must_equal 'SELECT albums.id, albums.band_id, band.id AS band_id_0, band.vocalist_id FROM albums LEFT OUTER JOIN bands AS band ON (band.id = albums.band_id)' a = ds.with_fetch(:id=>1, :band_id=>2, :band_id_0=>2, :vocalist_id=>3).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.band.must_equal GraphBand.load(:id => 2, :vocalist_id=>3) end it "should eagerly load a single many_to_one association with the same name as a column" do GraphAlbum.def_column_alias(:band_id_id, :band_id) GraphAlbum.many_to_one :band_id, :key_column=>:band_id, :class=>GraphBand ds = GraphAlbum.eager_graph(:band_id) ds.sql.must_equal 'SELECT albums.id, albums.band_id, band_id.id AS band_id_id, band_id.vocalist_id FROM albums LEFT OUTER JOIN bands AS band_id ON (band_id.id = albums.band_id)' a = ds.with_fetch(:id=>1, :band_id=>2, :band_id_id=>2, :vocalist_id=>3).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.band_id.must_equal GraphBand.load(:id => 2, :vocalist_id=>3) end it "should support :join_type eager_graph option one_to_one association" do ds = GraphAlbum.eager_graph_with_options(:track, :join_type=>:inner) ds.sql.must_equal 'SELECT albums.id, albums.band_id, track.id AS track_id, track.album_id FROM albums INNER JOIN tracks AS track ON (track.album_id = albums.id)' a = ds.with_fetch(:id=>1, :band_id=>2, :track_id=>3, :album_id=>1).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.track.must_equal GraphTrack.load(:id => 3, :album_id=>1) end it "should eagerly load a single one_to_one association" do ds = GraphAlbum.eager_graph(:track) ds.sql.must_equal 'SELECT albums.id, albums.band_id, track.id AS track_id, track.album_id FROM albums LEFT OUTER JOIN tracks AS track ON (track.album_id = albums.id)' a = ds.with_fetch(:id=>1, :band_id=>2, :track_id=>3, :album_id=>1).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.track.must_equal GraphTrack.load(:id => 3, :album_id=>1) end it "should eagerly graph a single one_to_one association using the :distinct_on strategy" do sub = Class.new(GraphTrack) sub.dataset = sub.dataset.with_extend do def supports_distinct_on?; true end def columns; [:id, :album_id] end end GraphAlbum.one_to_one :ltrack, :clone=>:track, :class=>sub ds = GraphAlbum.eager_graph_with_options(:ltrack, :limit_strategy=>true) ds.sql.must_equal 'SELECT albums.id, albums.band_id, ltrack.id AS ltrack_id, ltrack.album_id FROM albums LEFT OUTER JOIN (SELECT DISTINCT ON (tracks.album_id) * FROM tracks ORDER BY tracks.album_id) AS ltrack ON (ltrack.album_id = albums.id)' a = ds.with_fetch(:id=>1, :band_id=>2, :ltrack_id=>3, :album_id=>1).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.ltrack.must_equal sub.load(:id => 3, :album_id=>1) end it "should eagerly graph a single one_to_one association using the :window_function strategy" do sub = Class.new(GraphTrack) sub.dataset = sub.dataset.with_extend do def supports_window_functions?; true end def columns; [:id, :album_id] end end GraphAlbum.one_to_one :ltrack, :clone=>:track, :class=>sub ds = GraphAlbum.eager_graph_with_options(:ltrack, :limit_strategy=>true) ds.sql.must_equal 'SELECT albums.id, albums.band_id, ltrack.id AS ltrack_id, ltrack.album_id FROM albums LEFT OUTER JOIN (SELECT id, album_id FROM (SELECT *, row_number() OVER (PARTITION BY tracks.album_id) AS x_sequel_row_number_x FROM tracks) AS t1 WHERE (x_sequel_row_number_x = 1)) AS ltrack ON (ltrack.album_id = albums.id)' a = ds.with_fetch(:id=>1, :band_id=>2, :ltrack_id=>3, :album_id=>1).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.ltrack.must_equal sub.load(:id => 3, :album_id=>1) end it "should eagerly graph a single one_to_one association with an offset using the :window_function strategy" do sub = Class.new(GraphTrack) sub.dataset = sub.dataset.with_extend do def supports_window_functions?; true end def columns; [:id, :album_id] end end GraphAlbum.one_to_one :ltrack, :clone=>:track, :class=>sub, :limit=>[nil, 1] ds = GraphAlbum.eager_graph_with_options(:ltrack, :limit_strategy=>true) ds.sql.must_equal 'SELECT albums.id, albums.band_id, ltrack.id AS ltrack_id, ltrack.album_id FROM albums LEFT OUTER JOIN (SELECT id, album_id FROM (SELECT *, row_number() OVER (PARTITION BY tracks.album_id) AS x_sequel_row_number_x FROM tracks) AS t1 WHERE (x_sequel_row_number_x = 2)) AS ltrack ON (ltrack.album_id = albums.id)' a = ds.with_fetch(:id=>1, :band_id=>2, :ltrack_id=>3, :album_id=>1).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.ltrack.must_equal sub.load(:id => 3, :album_id=>1) end it "should eagerly graph a single one_to_one association using the :correlated_subquery strategy" do sub = Class.new(GraphTrack) sub.dataset = sub.dataset.with_extend do def supports_window_functions?; true end def columns; [:id, :album_id] end end GraphAlbum.one_to_one :ltrack, :clone=>:track, :class=>sub ds = GraphAlbum.eager_graph_with_options(:ltrack, :limit_strategy=>:correlated_subquery) ds.sql.must_equal 'SELECT albums.id, albums.band_id, ltrack.id AS ltrack_id, ltrack.album_id FROM albums LEFT OUTER JOIN (SELECT * FROM tracks WHERE (tracks.id IN (SELECT t1.id FROM tracks AS t1 WHERE (t1.album_id = tracks.album_id) LIMIT 1))) AS ltrack ON (ltrack.album_id = albums.id)' a = ds.with_fetch(:id=>1, :band_id=>2, :ltrack_id=>3, :album_id=>1).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.ltrack.must_equal sub.load(:id => 3, :album_id=>1) end it "should eagerly load a single one_to_many association" do ds = GraphAlbum.eager_graph(:tracks) ds.sql.must_equal 'SELECT albums.id, albums.band_id, tracks.id AS tracks_id, tracks.album_id FROM albums LEFT OUTER JOIN tracks ON (tracks.album_id = albums.id)' a = ds.with_fetch(:id=>1, :band_id=>2, :tracks_id=>3, :album_id=>1).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.tracks.must_equal [GraphTrack.load(:id => 3, :album_id=>1)] end it "should eagerly graph a single one_to_many association using the :window_function strategy" do sub = Class.new(GraphTrack) sub.dataset = sub.dataset.with_extend do def supports_window_functions?; true end def columns; [:id, :album_id] end end GraphAlbum.one_to_many :ltracks, :clone=>:tracks, :limit=>2, :class=>sub ds = GraphAlbum.eager_graph_with_options(:ltracks, :limit_strategy=>true) ds.sql.must_equal 'SELECT albums.id, albums.band_id, ltracks.id AS ltracks_id, ltracks.album_id FROM albums LEFT OUTER JOIN (SELECT id, album_id FROM (SELECT *, row_number() OVER (PARTITION BY tracks.album_id) AS x_sequel_row_number_x FROM tracks) AS t1 WHERE (x_sequel_row_number_x <= 2)) AS ltracks ON (ltracks.album_id = albums.id)' a = ds.with_fetch(:id=>1, :band_id=>2, :ltracks_id=>3, :album_id=>1).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.ltracks.must_equal [sub.load(:id => 3, :album_id=>1)] end it "should eagerly load a single many_to_many association" do ds = GraphAlbum.eager_graph(:genres) ds.sql.must_equal 'SELECT albums.id, albums.band_id, genres.id AS genres_id FROM albums LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres ON (genres.id = ag.genre_id)' a = ds.with_fetch(:id=>1, :band_id=>2, :genres_id=>4).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.genres.must_equal [GraphGenre.load(:id => 4)] end it "should eagerly graph a single many_to_many association using the :ruby strategy" do sub = Class.new(GraphGenre) sub.dataset = sub.dataset.with_extend do def supports_window_functions?; true end def columns; literal(opts[:select]) =~ /x_foreign_key_x/ ? [:id, :x_foreign_key_x] : [:id] end end GraphAlbum.many_to_many :lgenres, :clone=>:genres, :class=>sub, :limit=>2 ds = GraphAlbum.eager_graph_with_options(:lgenres, :limit_strategy=>:ruby) ds.sql.must_equal "SELECT albums.id, albums.band_id, lgenres.id AS lgenres_id FROM albums LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS lgenres ON (lgenres.id = ag.genre_id)" a = ds.with_fetch(:id=>1, :band_id=>2, :lgenres_id=>4).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.lgenres.must_equal [sub.load(:id => 4)] end it "should eagerly graph a single many_to_many association using the :ruby strategy with an offset" do sub = Class.new(GraphGenre) sub.dataset = sub.dataset.with_extend do def supports_window_functions?; true end def columns; literal(opts[:select]) =~ /x_foreign_key_x/ ? [:id, :x_foreign_key_x] : [:id] end end GraphAlbum.many_to_many :lgenres, :clone=>:genres, :class=>sub, :limit=>[2, 1] ds = GraphAlbum.eager_graph_with_options(:lgenres, :limit_strategy=>:ruby) ds.sql.must_equal "SELECT albums.id, albums.band_id, lgenres.id AS lgenres_id FROM albums LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS lgenres ON (lgenres.id = ag.genre_id)" a = ds.with_fetch(:id=>1, :band_id=>2, :lgenres_id=>4).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.lgenres.must_equal [] end it "should eagerly graph a single many_to_many association using the :ruby strategy with only an offset" do sub = Class.new(GraphGenre) sub.dataset = sub.dataset.with_extend do def supports_window_functions?; true end def columns; literal(opts[:select]) =~ /x_foreign_key_x/ ? [:id, :x_foreign_key_x] : [:id] end end GraphAlbum.many_to_many :lgenres, :clone=>:genres, :class=>sub, :limit=>[nil, 1] ds = GraphAlbum.eager_graph_with_options(:lgenres, :limit_strategy=>:ruby) ds.sql.must_equal "SELECT albums.id, albums.band_id, lgenres.id AS lgenres_id FROM albums LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS lgenres ON (lgenres.id = ag.genre_id)" a = ds.with_fetch(:id=>1, :band_id=>2, :lgenres_id=>4).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.lgenres.must_equal [] end it "should eagerly graph a single many_to_many association using the :window_function strategy" do sub = Class.new(GraphGenre) sub.dataset = sub.dataset.with_extend do def supports_window_functions?; true end def columns; literal(opts[:select]) =~ /x_foreign_key_x/ ? [:id, :x_foreign_key_x] : [:id] end end GraphAlbum.many_to_many :lgenres, :clone=>:genres, :class=>sub, :limit=>2 ds = GraphAlbum.eager_graph_with_options(:lgenres, :limit_strategy=>true) ds.sql.must_equal 'SELECT albums.id, albums.band_id, lgenres.id AS lgenres_id FROM albums LEFT OUTER JOIN (SELECT id, x_foreign_key_x FROM (SELECT genres.*, ag.album_id AS x_foreign_key_x, row_number() OVER (PARTITION BY ag.album_id) AS x_sequel_row_number_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id)) AS t1 WHERE (x_sequel_row_number_x <= 2)) AS lgenres ON (lgenres.x_foreign_key_x = albums.id)' a = ds.with_fetch(:id=>1, :band_id=>2, :lgenres_id=>4).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.lgenres.must_equal [sub.load(:id => 4)] end it "should eagerly load a single one_through_one association" do ds = GraphAlbum.eager_graph(:genre) ds.sql.must_equal 'SELECT albums.id, albums.band_id, genre.id AS genre_id FROM albums LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS genre ON (genre.id = ag.genre_id)' a = ds.with_fetch(:id=>1, :band_id=>2, :genre_id=>4).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.genre.must_equal GraphGenre.load(:id => 4) end it "should eagerly graph a single one_through_one association using the :distinct_on strategy" do sub = Class.new(GraphGenre) sub.dataset = sub.dataset.with_extend do def supports_distinct_on?; true end def columns; [:id] end end GraphAlbum.one_through_one :lgenre, :clone=>:genre, :class=>sub ds = GraphAlbum.eager_graph_with_options(:lgenre, :limit_strategy=>true) ds.sql.must_equal 'SELECT albums.id, albums.band_id, lgenre.id AS lgenre_id FROM albums LEFT OUTER JOIN (SELECT DISTINCT ON (ag.album_id) genres.*, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) ORDER BY ag.album_id) AS lgenre ON (lgenre.x_foreign_key_x = albums.id)' a = ds.with_fetch(:id=>1, :band_id=>2, :lgenre_id=>4).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.lgenre.must_equal sub.load(:id => 4) end it "should eagerly graph a single one_through_one association using the :window_function strategy" do sub = Class.new(GraphGenre) sub.dataset = sub.dataset.with_extend do def supports_window_functions?; true end def columns; literal(opts[:select]) =~ /x_foreign_key_x/ ? [:id, :x_foreign_key_x] : [:id] end end GraphAlbum.one_through_one :lgenre, :clone=>:genre, :class=>sub ds = GraphAlbum.eager_graph_with_options(:lgenre, :limit_strategy=>true) ds.sql.must_equal 'SELECT albums.id, albums.band_id, lgenre.id AS lgenre_id FROM albums LEFT OUTER JOIN (SELECT id, x_foreign_key_x FROM (SELECT genres.*, ag.album_id AS x_foreign_key_x, row_number() OVER (PARTITION BY ag.album_id) AS x_sequel_row_number_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id)) AS t1 WHERE (x_sequel_row_number_x = 1)) AS lgenre ON (lgenre.x_foreign_key_x = albums.id)' a = ds.with_fetch(:id=>1, :band_id=>2, :lgenre_id=>4).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.lgenre.must_equal sub.load(:id => 4) end it "should correctly handle an aliased join table in many_to_many and one_through_one with graph_conditions or graph_block" do c = Class.new(GraphAlbum) c.many_to_many :genres, :clone=>:genres, :join_table=>Sequel[:ag].as(:ga), :graph_conditions=>{:a=>:b} c.eager_graph(:genres).sql.must_equal 'SELECT albums.id, albums.band_id, genres.id AS genres_id FROM albums LEFT OUTER JOIN ag AS ga ON (ga.album_id = albums.id) LEFT OUTER JOIN genres ON ((genres.id = ga.genre_id) AND (genres.a = ga.b))' c.many_to_many :genres, :clone=>:genres, :join_table=>Sequel[:ag].as(:ga), :graph_block => proc{true} c.eager_graph(:genres).sql.must_equal 'SELECT albums.id, albums.band_id, genres.id AS genres_id FROM albums LEFT OUTER JOIN ag AS ga ON (ga.album_id = albums.id) LEFT OUTER JOIN genres ON ((genres.id = ga.genre_id) AND (genres.a = ga.b) AND \'t\')' end with_symbol_splitting "should correctly handle an aliased join table symbol in many_to_many and one_through_one with graph_conditions or graph_block" do c = Class.new(GraphAlbum) c.many_to_many :genres, :clone=>:genres, :join_table=>:ag___ga, :graph_conditions=>{:a=>:b} c.eager_graph(:genres).sql.must_equal 'SELECT albums.id, albums.band_id, genres.id AS genres_id FROM albums LEFT OUTER JOIN ag AS ga ON (ga.album_id = albums.id) LEFT OUTER JOIN genres ON ((genres.id = ga.genre_id) AND (genres.a = ga.b))' c.many_to_many :genres, :clone=>:genres, :join_table=>:ag___ga, :graph_block => proc{true} c.eager_graph(:genres).sql.must_equal 'SELECT albums.id, albums.band_id, genres.id AS genres_id FROM albums LEFT OUTER JOIN ag AS ga ON (ga.album_id = albums.id) LEFT OUTER JOIN genres ON ((genres.id = ga.genre_id) AND (genres.a = ga.b) AND \'t\')' end it "should raise Error when using eager_graph with :conditions option that isn't a conditions specifier" do c = Class.new(GraphAlbum) c.many_to_many :genres, :clone=>:genres, :join_table=>Sequel[:ag].as(:ga), :conditions=>'true' proc{c.eager_graph(:genres)}.must_raise Sequel::Error end with_symbol_splitting "should correctly handle an aliased join table symbol in many_to_many and one_through_one" do c = Class.new(GraphAlbum) c.many_to_many :genres, :clone=>:genres, :join_table=>:ag___ga c.eager_graph(:genres).sql.must_equal 'SELECT albums.id, albums.band_id, genres.id AS genres_id FROM albums LEFT OUTER JOIN ag AS ga ON (ga.album_id = albums.id) LEFT OUTER JOIN genres ON (genres.id = ga.genre_id)' c.many_to_many :genre, :clone=>:genre, :join_table=>:ag___ga c.eager_graph(:genre).sql.must_equal 'SELECT albums.id, albums.band_id, genre.id AS genre_id FROM albums LEFT OUTER JOIN ag AS ga ON (ga.album_id = albums.id) LEFT OUTER JOIN genres AS genre ON (genre.id = ga.genre_id)' c.many_to_many :genres, :clone=>:genres, :join_table=>:ag___albums c.eager_graph(:genres).sql.must_equal 'SELECT albums.id, albums.band_id, genres.id AS genres_id FROM albums LEFT OUTER JOIN ag AS albums_0 ON (albums_0.album_id = albums.id) LEFT OUTER JOIN genres ON (genres.id = albums_0.genre_id)' c.many_to_many :genres, :clone=>:genres, :join_table=>:ag___genres c.eager_graph(:genres).sql.must_equal 'SELECT albums.id, albums.band_id, genres.id AS genres_id FROM albums LEFT OUTER JOIN ag AS genres_0 ON (genres_0.album_id = albums.id) LEFT OUTER JOIN genres ON (genres.id = genres_0.genre_id)' end it "should correctly handle an aliased join table in many_to_many and one_through_one" do c = Class.new(GraphAlbum) c.many_to_many :genres, :clone=>:genres, :join_table=>Sequel[:ag].as(:ga) c.eager_graph(:genres).sql.must_equal 'SELECT albums.id, albums.band_id, genres.id AS genres_id FROM albums LEFT OUTER JOIN ag AS ga ON (ga.album_id = albums.id) LEFT OUTER JOIN genres ON (genres.id = ga.genre_id)' c.many_to_many :genre, :clone=>:genre, :join_table=>Sequel[:ag].as(:ga) c.eager_graph(:genre).sql.must_equal 'SELECT albums.id, albums.band_id, genre.id AS genre_id FROM albums LEFT OUTER JOIN ag AS ga ON (ga.album_id = albums.id) LEFT OUTER JOIN genres AS genre ON (genre.id = ga.genre_id)' c.many_to_many :genres, :clone=>:genres, :join_table=>Sequel[:ag].as(:albums) c.eager_graph(:genres).sql.must_equal 'SELECT albums.id, albums.band_id, genres.id AS genres_id FROM albums LEFT OUTER JOIN ag AS albums_0 ON (albums_0.album_id = albums.id) LEFT OUTER JOIN genres ON (genres.id = albums_0.genre_id)' c.many_to_many :genres, :clone=>:genres, :join_table=>Sequel[:ag].as(:genres) c.eager_graph(:genres).sql.must_equal 'SELECT albums.id, albums.band_id, genres.id AS genres_id FROM albums LEFT OUTER JOIN ag AS genres_0 ON (genres_0.album_id = albums.id) LEFT OUTER JOIN genres ON (genres.id = genres_0.genre_id)' end it "should handle multiple associations in a single call to association_join" do GraphAlbum.association_join(:genres, :tracks, :band).sql.must_equal 'SELECT * FROM albums INNER JOIN ag ON (ag.album_id = albums.id) INNER JOIN genres ON (genres.id = ag.genre_id) INNER JOIN tracks ON (tracks.album_id = albums.id) INNER JOIN bands AS band ON (band.id = albums.band_id)' end it "should eagerly load multiple associations in a single call" do ds = GraphAlbum.eager_graph(:genres, :tracks, :band) ds.sql.must_equal 'SELECT albums.id, albums.band_id, genres.id AS genres_id, tracks.id AS tracks_id, tracks.album_id, band.id AS band_id_0, band.vocalist_id FROM albums LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres ON (genres.id = ag.genre_id) LEFT OUTER JOIN tracks ON (tracks.album_id = albums.id) LEFT OUTER JOIN bands AS band ON (band.id = albums.band_id)' a = ds.with_fetch(:id=>1, :band_id=>2, :genres_id=>4, :tracks_id=>3, :album_id=>1, :band_id_0=>2, :vocalist_id=>6).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a = a.first a.band.must_equal GraphBand.load(:id => 2, :vocalist_id=>6) a.tracks.must_equal [GraphTrack.load({:id => 3, :album_id=>1})] a.genres.must_equal [GraphGenre.load(:id => 4)] end it "should eagerly load multiple associations with different limit strategies in a single call" do subg = Class.new(GraphGenre) subg.dataset = subg.dataset.with_extend do def supports_distinct_on?; true end def supports_window_functions?; true end def columns; literal(opts[:select]) =~ /x_foreign_key_x/ ? [:id, :x_foreign_key_x] : [:id] end end GraphAlbum.one_through_one :lgenre, :clone=>:genre, :class=>subg GraphAlbum.many_to_many :lgenres, :clone=>:genres, :class=>subg, :limit=>2 ds = GraphAlbum.eager_graph_with_options([:lgenre, :lgenres], :limit_strategy=>{:lgenre=>:distinct_on, :lgenres=>:window_function}) ds.sql.must_equal 'SELECT albums.id, albums.band_id, lgenre.id AS lgenre_id, lgenres.id AS lgenres_id FROM albums LEFT OUTER JOIN (SELECT DISTINCT ON (ag.album_id) genres.*, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) ORDER BY ag.album_id) AS lgenre ON (lgenre.x_foreign_key_x = albums.id) LEFT OUTER JOIN (SELECT id, x_foreign_key_x FROM (SELECT genres.*, ag.album_id AS x_foreign_key_x, row_number() OVER (PARTITION BY ag.album_id) AS x_sequel_row_number_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id)) AS t1 WHERE (x_sequel_row_number_x <= 2)) AS lgenres ON (lgenres.x_foreign_key_x = albums.id)' a = ds.with_fetch(:id=>1, :band_id=>2, :lgenres_id=>4, :lgenre_id=>3).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a = a.first a.lgenre.must_equal subg.load(:id => 3) a.lgenres.must_equal [subg.load(:id => 4)] end it "should handle multiple associations in separate calls to association_join" do GraphAlbum.association_join(:genres).association_join(:tracks).association_join(:band).sql.must_equal 'SELECT * FROM albums INNER JOIN ag ON (ag.album_id = albums.id) INNER JOIN genres ON (genres.id = ag.genre_id) INNER JOIN tracks ON (tracks.album_id = albums.id) INNER JOIN bands AS band ON (band.id = albums.band_id)' end it "should eagerly load multiple associations in separate calls" do ds = GraphAlbum.eager_graph(:genres).eager_graph(:tracks).eager_graph(:band) ds.sql.must_equal 'SELECT albums.id, albums.band_id, genres.id AS genres_id, tracks.id AS tracks_id, tracks.album_id, band.id AS band_id_0, band.vocalist_id FROM albums LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres ON (genres.id = ag.genre_id) LEFT OUTER JOIN tracks ON (tracks.album_id = albums.id) LEFT OUTER JOIN bands AS band ON (band.id = albums.band_id)' a = ds.with_fetch(:id=>1, :band_id=>2, :genres_id=>4, :tracks_id=>3, :album_id=>1, :band_id_0=>2, :vocalist_id=>6).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a = a.first a.band.must_equal GraphBand.load(:id => 2, :vocalist_id=>6) a.tracks.must_equal [GraphTrack.load({:id => 3, :album_id=>1})] a.genres.must_equal [GraphGenre.load(:id => 4)] end it "should handle cascading associations in a single call to association_join" do GraphTrack.association_join(:album=>{:band=>:members}).sql.must_equal 'SELECT * FROM tracks INNER JOIN albums AS album ON (album.id = tracks.album_id) INNER JOIN bands AS band ON (band.id = album.band_id) INNER JOIN bm ON (bm.band_id = band.id) INNER JOIN members ON (members.id = bm.member_id)' GraphBand.association_join({:albums=>:tracks}, :members).sql.must_equal 'SELECT * FROM bands INNER JOIN albums ON (albums.band_id = bands.id) INNER JOIN tracks ON (tracks.album_id = albums.id) INNER JOIN bm ON (bm.band_id = bands.id) INNER JOIN members ON (members.id = bm.member_id)' end it "should handle matching association names for different models when using association_join" do GraphAlbum.association_join(:genres).association_join(:band=>:genres).sql.must_equal 'SELECT * FROM albums INNER JOIN ag ON (ag.album_id = albums.id) INNER JOIN genres ON (genres.id = ag.genre_id) INNER JOIN bands AS band ON (band.id = albums.band_id) INNER JOIN bg ON (bg.band_id = band.id) INNER JOIN genres AS genres_0 ON (genres_0.id = bg.genre_id)' end it "should allow cascading of eager loading for associations of associated models" do ds = GraphTrack.eager_graph(:album=>{:band=>:members}) ds.sql.must_equal 'SELECT tracks.id, tracks.album_id, album.id AS album_id_0, album.band_id, band.id AS band_id_0, band.vocalist_id, members.id AS members_id FROM tracks LEFT OUTER JOIN albums AS album ON (album.id = tracks.album_id) LEFT OUTER JOIN bands AS band ON (band.id = album.band_id) LEFT OUTER JOIN bm ON (bm.band_id = band.id) LEFT OUTER JOIN members ON (members.id = bm.member_id)' a = ds.with_fetch(:id=>3, :album_id=>1, :album_id_0=>1, :band_id=>2, :members_id=>5, :band_id_0=>2, :vocalist_id=>6).all a.must_equal [GraphTrack.load(:id => 3, :album_id => 1)] a = a.first a.album.must_equal GraphAlbum.load(:id => 1, :band_id => 2) a.album.band.must_equal GraphBand.load(:id => 2, :vocalist_id=>6) a.album.band.members.must_equal [GraphBandMember.load(:id => 5)] end it "should allow cascading of eager loading for multiple *_to_many associations, eliminating duplicates caused by cartesian products" do ds = GraphBand.eager_graph({:albums=>:tracks}, :members) ds.sql.must_equal 'SELECT bands.id, bands.vocalist_id, albums.id AS albums_id, albums.band_id, tracks.id AS tracks_id, tracks.album_id, members.id AS members_id FROM bands LEFT OUTER JOIN albums ON (albums.band_id = bands.id) LEFT OUTER JOIN tracks ON (tracks.album_id = albums.id) LEFT OUTER JOIN bm ON (bm.band_id = bands.id) LEFT OUTER JOIN members ON (members.id = bm.member_id)' a = ds.with_fetch([{:id=>1, :vocalist_id=>2, :albums_id=>3, :band_id=>1, :tracks_id=>4, :album_id=>3, :members_id=>5}, {:id=>1, :vocalist_id=>2, :albums_id=>3, :band_id=>1, :tracks_id=>4, :album_id=>3, :members_id=>6}, {:id=>1, :vocalist_id=>2, :albums_id=>3, :band_id=>1, :tracks_id=>5, :album_id=>3, :members_id=>5}, {:id=>1, :vocalist_id=>2, :albums_id=>3, :band_id=>1, :tracks_id=>5, :album_id=>3, :members_id=>6}, {:id=>1, :vocalist_id=>2, :albums_id=>4, :band_id=>1, :tracks_id=>6, :album_id=>4, :members_id=>5}, {:id=>1, :vocalist_id=>2, :albums_id=>4, :band_id=>1, :tracks_id=>6, :album_id=>4, :members_id=>6}, {:id=>1, :vocalist_id=>2, :albums_id=>4, :band_id=>1, :tracks_id=>7, :album_id=>4, :members_id=>5}, {:id=>1, :vocalist_id=>2, :albums_id=>4, :band_id=>1, :tracks_id=>7, :album_id=>4, :members_id=>6}, {:id=>2, :vocalist_id=>2, :albums_id=>5, :band_id=>2, :tracks_id=>8, :album_id=>5, :members_id=>5}, {:id=>2, :vocalist_id=>2, :albums_id=>5, :band_id=>2, :tracks_id=>8, :album_id=>5, :members_id=>6}, {:id=>2, :vocalist_id=>2, :albums_id=>5, :band_id=>2, :tracks_id=>9, :album_id=>5, :members_id=>5}, {:id=>2, :vocalist_id=>2, :albums_id=>5, :band_id=>2, :tracks_id=>9, :album_id=>5, :members_id=>6}, {:id=>2, :vocalist_id=>2, :albums_id=>6, :band_id=>2, :tracks_id=>1, :album_id=>6, :members_id=>5}, {:id=>2, :vocalist_id=>2, :albums_id=>6, :band_id=>2, :tracks_id=>1, :album_id=>6, :members_id=>6}, {:id=>2, :vocalist_id=>2, :albums_id=>6, :band_id=>2, :tracks_id=>2, :album_id=>6, :members_id=>5}, {:id=>2, :vocalist_id=>2, :albums_id=>6, :band_id=>2, :tracks_id=>2, :album_id=>6, :members_id=>6}]).all a.must_equal [GraphBand.load(:id=>1, :vocalist_id=>2), GraphBand.load(:id=>2, :vocalist_id=>2)] members = a.map{|x| x.members} members.must_equal [[GraphBandMember.load(:id=>5), GraphBandMember.load(:id=>6)], [GraphBandMember.load(:id=>5), GraphBandMember.load(:id=>6)]] albums = a.map{|x| x.albums} albums.must_equal [[GraphAlbum.load(:id=>3, :band_id=>1), GraphAlbum.load(:id=>4, :band_id=>1)], [GraphAlbum.load(:id=>5, :band_id=>2), GraphAlbum.load(:id=>6, :band_id=>2)]] tracks = albums.map{|x| x.map{|y| y.tracks}} tracks.must_equal [[[GraphTrack.load(:id=>4, :album_id=>3), GraphTrack.load(:id=>5, :album_id=>3)], [GraphTrack.load(:id=>6, :album_id=>4), GraphTrack.load(:id=>7, :album_id=>4)]], [[GraphTrack.load(:id=>8, :album_id=>5), GraphTrack.load(:id=>9, :album_id=>5)], [GraphTrack.load(:id=>1, :album_id=>6), GraphTrack.load(:id=>2, :album_id=>6)]]] end it "should populate the reciprocal many_to_one association when eagerly loading the one_to_many association" do DB.reset ds = GraphAlbum.eager_graph(:tracks) ds.sql.must_equal 'SELECT albums.id, albums.band_id, tracks.id AS tracks_id, tracks.album_id FROM albums LEFT OUTER JOIN tracks ON (tracks.album_id = albums.id)' a = ds.with_fetch(:id=>1, :band_id=>2, :tracks_id=>3, :album_id=>1).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a = a.first a.tracks.must_equal [GraphTrack.load(:id => 3, :album_id=>1)] a.tracks.first.album.must_equal a DB.sqls.must_equal ['SELECT albums.id, albums.band_id, tracks.id AS tracks_id, tracks.album_id FROM albums LEFT OUTER JOIN tracks ON (tracks.album_id = albums.id)'] end it "should eager load multiple associations from the same table" do ds = GraphBand.eager_graph(:vocalist, :members) ds.sql.must_equal 'SELECT bands.id, bands.vocalist_id, vocalist.id AS vocalist_id_0, members.id AS members_id FROM bands LEFT OUTER JOIN members AS vocalist ON (vocalist.id = bands.vocalist_id) LEFT OUTER JOIN bm ON (bm.band_id = bands.id) LEFT OUTER JOIN members ON (members.id = bm.member_id)' a = ds.with_fetch(:id=>2, :vocalist_id=>6, :vocalist_id_0=>6, :members_id=>5).all a.must_equal [GraphBand.load(:id => 2, :vocalist_id => 6)] a = a.first a.vocalist.must_equal GraphBandMember.load(:id => 6) a.members.must_equal [GraphBandMember.load(:id => 5)] end it "should give you a plain hash when called without .all" do ds = GraphAlbum.eager_graph(:band) ds.sql.must_equal 'SELECT albums.id, albums.band_id, band.id AS band_id_0, band.vocalist_id FROM albums LEFT OUTER JOIN bands AS band ON (band.id = albums.band_id)' ds.with_fetch(:id=>1, :band_id=>2, :band_id_0=>2, :vocalist_id=>3).first.must_equal(:id=>1, :band_id=>2, :band_id_0=>2, :vocalist_id=>3) end it "should not drop any associated objects if the graph could not be a cartesian product" do ds = GraphBand.eager_graph(:members, :vocalist) ds.sql.must_equal 'SELECT bands.id, bands.vocalist_id, members.id AS members_id, vocalist.id AS vocalist_id_0 FROM bands LEFT OUTER JOIN bm ON (bm.band_id = bands.id) LEFT OUTER JOIN members ON (members.id = bm.member_id) LEFT OUTER JOIN members AS vocalist ON (vocalist.id = bands.vocalist_id)' a = ds.with_fetch([{:id=>2, :vocalist_id=>6, :members_id=>5, :vocalist_id_0=>6}, {:id=>2, :vocalist_id=>6, :members_id=>5, :vocalist_id_0=>6}]).all a.must_equal [GraphBand.load(:id => 2, :vocalist_id => 6)] a = a.first a.vocalist.must_equal GraphBandMember.load(:id => 6) a.members.must_equal [GraphBandMember.load(:id => 5), GraphBandMember.load(:id => 5)] end it "should respect the :cartesian_product_number option" do GraphBand.many_to_one :other_vocalist, :class=>'GraphBandMember', :key=>:vocalist_id, :cartesian_product_number=>1 ds = GraphBand.eager_graph(:members, :other_vocalist) ds.sql.must_equal 'SELECT bands.id, bands.vocalist_id, members.id AS members_id, other_vocalist.id AS other_vocalist_id FROM bands LEFT OUTER JOIN bm ON (bm.band_id = bands.id) LEFT OUTER JOIN members ON (members.id = bm.member_id) LEFT OUTER JOIN members AS other_vocalist ON (other_vocalist.id = bands.vocalist_id)' a = ds.with_fetch([{:id=>2, :vocalist_id=>6, :members_id=>5, :other_vocalist_id=>6}, {:id=>2, :vocalist_id=>6, :members_id=>5, :other_vocalist_id=>6}]).all a.must_equal [GraphBand.load(:id=>2, :vocalist_id => 6)] a.first.other_vocalist.must_equal GraphBandMember.load(:id=>6) a.first.members.must_equal [GraphBandMember.load(:id=>5)] end it "should drop duplicate items that occur in sequence if the graph could be a cartesian product" do ds = GraphBand.eager_graph(:members, :genres) ds.sql.must_equal 'SELECT bands.id, bands.vocalist_id, members.id AS members_id, genres.id AS genres_id FROM bands LEFT OUTER JOIN bm ON (bm.band_id = bands.id) LEFT OUTER JOIN members ON (members.id = bm.member_id) LEFT OUTER JOIN bg ON (bg.band_id = bands.id) LEFT OUTER JOIN genres ON (genres.id = bg.genre_id)' a = ds.with_fetch([{:id=>2, :vocalist_id=>6, :members_id=>5, :genres_id=>7}, {:id=>2, :vocalist_id=>6, :members_id=>5, :genres_id=>8}, {:id=>2, :vocalist_id=>6, :members_id=>6, :genres_id=>7}, {:id=>2, :vocalist_id=>6, :members_id=>6, :genres_id=>8}]).all a.must_equal [GraphBand.load(:id => 2, :vocalist_id => 6)] a = a.first a.members.must_equal [GraphBandMember.load(:id => 5), GraphBandMember.load(:id => 6)] a.genres.must_equal [GraphGenre.load(:id => 7), GraphGenre.load(:id => 8)] end it "should be able to be used in combination with #eager" do DB.reset ds = GraphAlbum.eager_graph(:tracks).eager(:genres) GraphGenre.dataset = GraphGenre.dataset.with_fetch(:id=>6, :x_foreign_key_x=>1) a = ds.with_fetch(:id=>1, :band_id=>2, :tracks_id=>3, :album_id=>1).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a = a.first a.tracks.must_equal [GraphTrack.load(:id=>3, :album_id=>1)] a.genres.must_equal [GraphGenre.load(:id => 6)] DB.sqls.must_equal ['SELECT albums.id, albums.band_id, tracks.id AS tracks_id, tracks.album_id FROM albums LEFT OUTER JOIN tracks ON (tracks.album_id = albums.id)', "SELECT genres.*, ag.album_id AS x_foreign_key_x FROM genres INNER JOIN ag ON (ag.genre_id = genres.id) WHERE (ag.album_id IN (1))"] end it "should handle no associated records for a single many_to_one association" do ds = GraphAlbum.eager_graph(:band) ds.sql.must_equal 'SELECT albums.id, albums.band_id, band.id AS band_id_0, band.vocalist_id FROM albums LEFT OUTER JOIN bands AS band ON (band.id = albums.band_id)' a = ds.with_fetch(:id=>1, :band_id=>2, :band_id_0=>nil, :vocalist_id=>nil).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.band.must_be_nil end it "should handle no associated records for a single one_to_one association" do ds = GraphAlbum.eager_graph(:track) ds.sql.must_equal 'SELECT albums.id, albums.band_id, track.id AS track_id, track.album_id FROM albums LEFT OUTER JOIN tracks AS track ON (track.album_id = albums.id)' a = ds.with_fetch(:id=>1, :band_id=>2, :track_id=>nil, :album_id=>nil).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.track.must_be_nil end it "should handle no associated records for a single one_to_many association" do ds = GraphAlbum.eager_graph(:tracks) ds.sql.must_equal 'SELECT albums.id, albums.band_id, tracks.id AS tracks_id, tracks.album_id FROM albums LEFT OUTER JOIN tracks ON (tracks.album_id = albums.id)' a = ds.with_fetch(:id=>1, :band_id=>2, :tracks_id=>nil, :album_id=>nil).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.tracks.must_equal [] end it "should handle no associated records for a single one_through_one association" do ds = GraphAlbum.eager_graph(:genre) ds.sql.must_equal 'SELECT albums.id, albums.band_id, genre.id AS genre_id FROM albums LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS genre ON (genre.id = ag.genre_id)' a = ds.with_fetch(:id=>1, :band_id=>2, :genres_id=>nil).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.genre.must_be_nil end it "should handle no associated records for a single many_to_many association" do ds = GraphAlbum.eager_graph(:genres) ds.sql.must_equal 'SELECT albums.id, albums.band_id, genres.id AS genres_id FROM albums LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres ON (genres.id = ag.genre_id)' a = ds.with_fetch(:id=>1, :band_id=>2, :genres_id=>nil).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.genres.must_equal [] end it "should handle missing associated records when loading multiple associations" do ds = GraphAlbum.eager_graph(:genres, :tracks, :band) ds.sql.must_equal 'SELECT albums.id, albums.band_id, genres.id AS genres_id, tracks.id AS tracks_id, tracks.album_id, band.id AS band_id_0, band.vocalist_id FROM albums LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres ON (genres.id = ag.genre_id) LEFT OUTER JOIN tracks ON (tracks.album_id = albums.id) LEFT OUTER JOIN bands AS band ON (band.id = albums.band_id)' a = ds.with_fetch([{:id=>1, :band_id=>2, :genres_id=>nil, :tracks_id=>3, :album_id=>1, :band_id_0=>nil, :vocalist_id=>nil}, {:id=>1, :band_id=>2, :genres_id=>nil, :tracks_id=>4, :album_id=>1, :band_id_0=>nil, :vocalist_id=>nil}, {:id=>1, :band_id=>2, :genres_id=>nil, :tracks_id=>5, :album_id=>1, :band_id_0=>nil, :vocalist_id=>nil}, {:id=>1, :band_id=>2, :genres_id=>nil, :tracks_id=>6, :album_id=>1, :band_id_0=>nil, :vocalist_id=>nil}]).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a = a.first a.tracks.must_equal [GraphTrack.load(:id => 3, :album_id => 1), GraphTrack.load(:id => 4, :album_id => 1), GraphTrack.load(:id => 5, :album_id => 1), GraphTrack.load(:id => 6, :album_id => 1)] a.band.must_be_nil a.genres.must_equal [] end it "should handle missing associated records when cascading eager loading for associations of associated models" do ds = GraphTrack.eager_graph(:album=>{:band=>:members}) ds.sql.must_equal 'SELECT tracks.id, tracks.album_id, album.id AS album_id_0, album.band_id, band.id AS band_id_0, band.vocalist_id, members.id AS members_id FROM tracks LEFT OUTER JOIN albums AS album ON (album.id = tracks.album_id) LEFT OUTER JOIN bands AS band ON (band.id = album.band_id) LEFT OUTER JOIN bm ON (bm.band_id = band.id) LEFT OUTER JOIN members ON (members.id = bm.member_id)' a = ds.with_fetch([{:id=>2, :album_id=>2, :album_id_0=>nil, :band_id=>nil, :members_id=>nil, :band_id_0=>nil, :vocalist_id=>nil}, {:id=>3, :album_id=>3, :album_id_0=>3, :band_id=>3, :members_id=>nil, :band_id_0=>nil, :vocalist_id=>nil}, {:id=>4, :album_id=>4, :album_id_0=>4, :band_id=>2, :members_id=>nil, :band_id_0=>2, :vocalist_id=>6}, {:id=>5, :album_id=>1, :album_id_0=>1, :band_id=>4, :members_id=>5, :band_id_0=>4, :vocalist_id=>8}, {:id=>5, :album_id=>1, :album_id_0=>1, :band_id=>4, :members_id=>6, :band_id_0=>4, :vocalist_id=>8}]).all a.must_equal [GraphTrack.load(:id => 2, :album_id => 2), GraphTrack.load(:id => 3, :album_id => 3), GraphTrack.load(:id => 4, :album_id => 4), GraphTrack.load(:id => 5, :album_id => 1)] a.map{|x| x.album}.must_equal [nil, GraphAlbum.load(:id => 3, :band_id => 3), GraphAlbum.load(:id => 4, :band_id => 2), GraphAlbum.load(:id => 1, :band_id => 4)] a.map{|x| x.album.band if x.album}.must_equal [nil, nil, GraphBand.load(:id => 2, :vocalist_id=>6), GraphBand.load(:id => 4, :vocalist_id=>8)] a.map{|x| x.album.band.members if x.album && x.album.band}.must_equal [nil, nil, [], [GraphBandMember.load(:id => 5), GraphBandMember.load(:id => 6)]] end it "should respect the association's :primary_key option" do GraphAlbum.many_to_one :inner_band, :class=>'GraphBand', :key=>:band_id, :primary_key=>:vocalist_id ds = GraphAlbum.eager_graph(:inner_band) ds.sql.must_equal 'SELECT albums.id, albums.band_id, inner_band.id AS inner_band_id, inner_band.vocalist_id FROM albums LEFT OUTER JOIN bands AS inner_band ON (inner_band.vocalist_id = albums.band_id)' as = ds.with_fetch(:id=>3, :band_id=>2, :inner_band_id=>5, :vocalist_id=>2).all as.must_equal [GraphAlbum.load(:id=>3, :band_id=>2)] as.first.inner_band.must_equal GraphBand.load(:id=>5, :vocalist_id=>2) GraphAlbum.one_to_many :right_tracks, :class=>'GraphTrack', :key=>:album_id, :primary_key=>:band_id, :reciprocal=>nil ds = GraphAlbum.eager_graph(:right_tracks) ds.sql.must_equal 'SELECT albums.id, albums.band_id, right_tracks.id AS right_tracks_id, right_tracks.album_id FROM albums LEFT OUTER JOIN tracks AS right_tracks ON (right_tracks.album_id = albums.band_id)' as = ds.with_fetch([{:id=>3, :band_id=>2, :right_tracks_id=>5, :album_id=>2}, {:id=>3, :band_id=>2, :right_tracks_id=>6, :album_id=>2}]).all as.must_equal [GraphAlbum.load(:id=>3, :band_id=>2)] as.first.right_tracks.must_equal [GraphTrack.load(:id=>5, :album_id=>2), GraphTrack.load(:id=>6, :album_id=>2)] end it "should respect many_to_one association's composite keys" do GraphAlbum.many_to_one :inner_band, :class=>'GraphBand', :key=>[:band_id, :id], :primary_key=>[:vocalist_id, :id] ds = GraphAlbum.eager_graph(:inner_band) ds.sql.must_equal 'SELECT albums.id, albums.band_id, inner_band.id AS inner_band_id, inner_band.vocalist_id FROM albums LEFT OUTER JOIN bands AS inner_band ON ((inner_band.vocalist_id = albums.band_id) AND (inner_band.id = albums.id))' as = ds.with_fetch(:id=>3, :band_id=>2, :inner_band_id=>3, :vocalist_id=>2).all as.must_equal [GraphAlbum.load(:id=>3, :band_id=>2)] as.first.inner_band.must_equal GraphBand.load(:id=>3, :vocalist_id=>2) end it "should respect one_to_many association's composite keys" do GraphAlbum.one_to_many :right_tracks, :class=>'GraphTrack', :key=>[:album_id, :id], :primary_key=>[:band_id, :id] ds = GraphAlbum.eager_graph(:right_tracks) ds.sql.must_equal 'SELECT albums.id, albums.band_id, right_tracks.id AS right_tracks_id, right_tracks.album_id FROM albums LEFT OUTER JOIN tracks AS right_tracks ON ((right_tracks.album_id = albums.band_id) AND (right_tracks.id = albums.id))' as = ds.with_fetch(:id=>3, :band_id=>2, :right_tracks_id=>3, :album_id=>2).all as.must_equal [GraphAlbum.load(:id=>3, :band_id=>2)] as.first.right_tracks.must_equal [GraphTrack.load(:id=>3, :album_id=>2)] end it "should respect many_to_many association's composite keys" do GraphAlbum.many_to_many :sbands, :class=>'GraphBand', :left_key=>[:l1, :l2], :left_primary_key=>[:band_id, :id], :right_key=>[:r1, :r2], :right_primary_key=>[:vocalist_id, :id], :join_table=>:b ds = GraphAlbum.eager_graph(:sbands) ds.sql.must_equal 'SELECT albums.id, albums.band_id, sbands.id AS sbands_id, sbands.vocalist_id FROM albums LEFT OUTER JOIN b ON ((b.l1 = albums.band_id) AND (b.l2 = albums.id)) LEFT OUTER JOIN bands AS sbands ON ((sbands.vocalist_id = b.r1) AND (sbands.id = b.r2))' as = ds.with_fetch([{:id=>3, :band_id=>2, :sbands_id=>5, :vocalist_id=>6}, {:id=>3, :band_id=>2, :sbands_id=>6, :vocalist_id=>22}]).all as.must_equal [GraphAlbum.load(:id=>3, :band_id=>2)] as.first.sbands.must_equal [GraphBand.load(:id=>5, :vocalist_id=>6), GraphBand.load(:id=>6, :vocalist_id=>22)] end it "should respect many_to_many association's :left_primary_key and :right_primary_key options" do GraphAlbum.many_to_many :inner_genres, :class=>'GraphGenre', :left_key=>:album_id, :left_primary_key=>:band_id, :right_key=>:genre_id, :right_primary_key=>:xxx, :join_table=>:ag ds = GraphAlbum.eager_graph(:inner_genres) ds.sql.must_equal 'SELECT albums.id, albums.band_id, inner_genres.id AS inner_genres_id FROM albums LEFT OUTER JOIN ag ON (ag.album_id = albums.band_id) LEFT OUTER JOIN genres AS inner_genres ON (inner_genres.xxx = ag.genre_id)' as = ds.with_fetch([{:id=>3, :band_id=>2, :inner_genres_id=>5, :xxx=>12}, {:id=>3, :band_id=>2, :inner_genres_id=>6, :xxx=>22}]).all as.must_equal [GraphAlbum.load(:id=>3, :band_id=>2)] as.first.inner_genres.must_equal [GraphGenre.load(:id=>5), GraphGenre.load(:id=>6)] end it "should respect composite primary keys for classes when eager loading" do c1 = Class.new(GraphAlbum) c2 = Class.new(GraphBand) c1.set_primary_key [:band_id, :id] c2.set_primary_key [:vocalist_id, :id] c1.many_to_many :sbands, :class=>c2, :left_key=>[:l1, :l2], :right_key=>[:r1, :r2], :join_table=>:b c2.one_to_many :salbums, :class=>c1, :key=>[:band_id, :id] ds = c1.eager_graph(:sbands=>:salbums) ds.sql.must_equal 'SELECT albums.id, albums.band_id, sbands.id AS sbands_id, sbands.vocalist_id, salbums.id AS salbums_id, salbums.band_id AS salbums_band_id FROM albums LEFT OUTER JOIN b ON ((b.l1 = albums.band_id) AND (b.l2 = albums.id)) LEFT OUTER JOIN bands AS sbands ON ((sbands.vocalist_id = b.r1) AND (sbands.id = b.r2)) LEFT OUTER JOIN albums AS salbums ON ((salbums.band_id = sbands.vocalist_id) AND (salbums.id = sbands.id))' as = ds.with_fetch([{:id=>3, :band_id=>2, :sbands_id=>5, :vocalist_id=>6, :salbums_id=>7, :salbums_band_id=>8}, {:id=>3, :band_id=>2, :sbands_id=>5, :vocalist_id=>6, :salbums_id=>9, :salbums_band_id=>10}, {:id=>3, :band_id=>2, :sbands_id=>6, :vocalist_id=>22, :salbums_id=>nil, :salbums_band_id=>nil}, {:id=>7, :band_id=>8, :sbands_id=>nil, :vocalist_id=>nil, :salbums_id=>nil, :salbums_band_id=>nil}]).all as.must_equal [c1.load(:id=>3, :band_id=>2), c1.load(:id=>7, :band_id=>8)] as.map{|x| x.sbands}.must_equal [[c2.load(:id=>5, :vocalist_id=>6), c2.load(:id=>6, :vocalist_id=>22)], []] as.map{|x| x.sbands.map{|y| y.salbums}}.must_equal [[[c1.load(:id=>7, :band_id=>8), c1.load(:id=>9, :band_id=>10)], []], []] end it "should consider all row columns for primary key if any composite primary key components is NULL" do c1 = Class.new(GraphAlbum) c2 = Class.new(GraphBand) c1.columns(:id, :band_id, :foo) c1.set_primary_key [:band_id, :id] c2.set_primary_key [:vocalist_id, :id] c1.many_to_many :sbands, :class=>c2, :left_key=>[:l1, :l2], :right_key=>[:r1, :r2], :join_table=>:b c2.one_to_many :salbums, :class=>c1, :key=>[:band_id, :id] ds = c1.eager_graph(:sbands=>:salbums) ds.sql.must_equal 'SELECT albums.id, albums.band_id, albums.foo, sbands.id AS sbands_id, sbands.vocalist_id, salbums.id AS salbums_id, salbums.band_id AS salbums_band_id, salbums.foo AS salbums_foo FROM albums LEFT OUTER JOIN b ON ((b.l1 = albums.band_id) AND (b.l2 = albums.id)) LEFT OUTER JOIN bands AS sbands ON ((sbands.vocalist_id = b.r1) AND (sbands.id = b.r2)) LEFT OUTER JOIN albums AS salbums ON ((salbums.band_id = sbands.vocalist_id) AND (salbums.id = sbands.id))' as = ds.with_fetch([{:id=>3, :band_id=>nil, :foo=>1, :sbands_id=>5, :vocalist_id=>6, :salbums_id=>7, :salbums_band_id=>8, :salbums_foo=>9}, {:id=>3, :band_id=>nil, :foo=>1, :sbands_id=>5, :vocalist_id=>6, :salbums_id=>9, :salbums_band_id=>10, :salbums_foo=>9}, {:id=>3, :band_id=>nil, :foo=>2, :sbands_id=>5, :vocalist_id=>6, :salbums_id=>9, :salbums_band_id=>10, :salbums_foo=>9}, {:id=>7, :band_id=>8, :foo=>2, :sbands_id=>6, :vocalist_id=>22, :salbums_id=>nil, :salbums_band_id=>nil, :salbums_foo=>nil}, {:id=>7, :band_id=>8, :foo=>3, :sbands_id=>nil, :vocalist_id=>nil, :salbums_id=>nil, :salbums_band_id=>nil, :salbums_foo=>nil}]).all as.must_equal [c1.load(:id=>3, :band_id=>nil, :foo=>1), c1.load(:id=>3, :band_id=>nil, :foo=>2), c1.load(:id=>7, :band_id=>8, :foo=>2)] as.map{|x| x.sbands}.must_equal [[c2.load(:id=>5, :vocalist_id=>6)], [c2.load(:id=>5, :vocalist_id=>6)], [c2.load(:id=>6, :vocalist_id=>22)]] as.map{|x| x.sbands.map{|y| y.salbums}}.must_equal [[[c1.load(:id=>7, :band_id=>8, :foo=>9), c1.load(:id=>9, :band_id=>10, :foo=>9)]], [[c1.load(:id=>7, :band_id=>8, :foo=>9), c1.load(:id=>9, :band_id=>10, :foo=>9)]], [[]]] end it "should respect the association's :graph_select option" do GraphAlbum.many_to_one :inner_band, :class=>'GraphBand', :key=>:band_id, :graph_select=>:vocalist_id GraphAlbum.eager_graph(:inner_band).sql.must_equal 'SELECT albums.id, albums.band_id, inner_band.vocalist_id FROM albums LEFT OUTER JOIN bands AS inner_band ON (inner_band.id = albums.band_id)' GraphAlbum.one_to_many :right_tracks, :class=>'GraphTrack', :key=>:album_id, :graph_select=>[:album_id] GraphAlbum.eager_graph(:right_tracks).sql.must_equal 'SELECT albums.id, albums.band_id, right_tracks.album_id FROM albums LEFT OUTER JOIN tracks AS right_tracks ON (right_tracks.album_id = albums.id)' GraphAlbum.many_to_many :inner_genres, :class=>'GraphGenre', :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :graph_select=>[] GraphAlbum.eager_graph(:inner_genres).sql.must_equal 'SELECT albums.id, albums.band_id FROM albums LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS inner_genres ON (inner_genres.id = ag.genre_id)' end it "should respect the association's :graph_alias_base option" do GraphAlbum.many_to_one :inner_band, :class=>'GraphBand', :key=>:band_id, :graph_alias_base=>:foo ds = GraphAlbum.eager_graph(:inner_band) ds.sql.must_equal 'SELECT albums.id, albums.band_id, foo.id AS foo_id, foo.vocalist_id FROM albums LEFT OUTER JOIN bands AS foo ON (foo.id = albums.band_id)' GraphAlbum.one_to_many :right_tracks, :class=>'GraphTrack', :key=>:album_id, :graph_alias_base=>:foo ds.eager_graph(:right_tracks).sql.must_equal 'SELECT albums.id, albums.band_id, foo.id AS foo_id, foo.vocalist_id, foo_0.id AS foo_0_id, foo_0.album_id FROM albums LEFT OUTER JOIN bands AS foo ON (foo.id = albums.band_id) LEFT OUTER JOIN tracks AS foo_0 ON (foo_0.album_id = albums.id)' end it "should respect the association's :graph_join_type option" do GraphAlbum.many_to_one :inner_band, :class=>'GraphBand', :key=>:band_id, :graph_join_type=>:inner GraphAlbum.eager_graph(:inner_band).sql.must_equal 'SELECT albums.id, albums.band_id, inner_band.id AS inner_band_id, inner_band.vocalist_id FROM albums INNER JOIN bands AS inner_band ON (inner_band.id = albums.band_id)' GraphAlbum.one_to_many :right_tracks, :class=>'GraphTrack', :key=>:album_id, :graph_join_type=>:right_outer GraphAlbum.eager_graph(:right_tracks).sql.must_equal 'SELECT albums.id, albums.band_id, right_tracks.id AS right_tracks_id, right_tracks.album_id FROM albums RIGHT OUTER JOIN tracks AS right_tracks ON (right_tracks.album_id = albums.id)' GraphAlbum.many_to_many :inner_genres, :class=>'GraphGenre', :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :graph_join_type=>:inner GraphAlbum.eager_graph(:inner_genres).sql.must_equal 'SELECT albums.id, albums.band_id, inner_genres.id AS inner_genres_id FROM albums INNER JOIN ag ON (ag.album_id = albums.id) INNER JOIN genres AS inner_genres ON (inner_genres.id = ag.genre_id)' end it "should respect the association's :graph_join_table_join_type option" do GraphAlbum.many_to_many :inner_genres, :class=>'GraphGenre', :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :graph_join_table_join_type=>:inner GraphAlbum.eager_graph(:inner_genres).sql.must_equal 'SELECT albums.id, albums.band_id, inner_genres.id AS inner_genres_id FROM albums INNER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS inner_genres ON (inner_genres.id = ag.genre_id)' GraphAlbum.many_to_many :inner_genres, :class=>'GraphGenre', :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :graph_join_table_join_type=>:inner, :graph_join_type=>:right_outer GraphAlbum.eager_graph(:inner_genres).sql.must_equal 'SELECT albums.id, albums.band_id, inner_genres.id AS inner_genres_id FROM albums INNER JOIN ag ON (ag.album_id = albums.id) RIGHT OUTER JOIN genres AS inner_genres ON (inner_genres.id = ag.genre_id)' end it "should respect the association's :conditions option" do GraphAlbum.many_to_one :active_band, :class=>'GraphBand', :key=>:band_id, :conditions=>{:active=>true} GraphAlbum.eager_graph(:active_band).sql.must_equal "SELECT albums.id, albums.band_id, active_band.id AS active_band_id, active_band.vocalist_id FROM albums LEFT OUTER JOIN bands AS active_band ON ((active_band.id = albums.band_id) AND (active_band.active IS TRUE))" GraphAlbum.one_to_many :right_tracks, :class=>'GraphTrack', :key=>:album_id, :conditions=>{:id=>(0..100)}, :reciprocal=>nil GraphAlbum.eager_graph(:right_tracks).sql.must_equal 'SELECT albums.id, albums.band_id, right_tracks.id AS right_tracks_id, right_tracks.album_id FROM albums LEFT OUTER JOIN tracks AS right_tracks ON ((right_tracks.album_id = albums.id) AND (right_tracks.id >= 0) AND (right_tracks.id <= 100))' GraphAlbum.many_to_many :active_genres, :class=>'GraphGenre', :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :conditions=>{true=>:active} GraphAlbum.eager_graph(:active_genres).sql.must_equal "SELECT albums.id, albums.band_id, active_genres.id AS active_genres_id FROM albums LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS active_genres ON ((active_genres.id = ag.genre_id) AND ('t' = ag.active))" end it "should respect the association's :graph_conditions option" do GraphAlbum.many_to_one :active_band, :class=>'GraphBand', :key=>:band_id, :graph_conditions=>{:active=>true} GraphAlbum.eager_graph(:active_band).sql.must_equal "SELECT albums.id, albums.band_id, active_band.id AS active_band_id, active_band.vocalist_id FROM albums LEFT OUTER JOIN bands AS active_band ON ((active_band.id = albums.band_id) AND (active_band.active IS TRUE))" GraphAlbum.one_to_many :right_tracks, :class=>'GraphTrack', :key=>:album_id, :graph_conditions=>{:id=>(0..100)} GraphAlbum.eager_graph(:right_tracks).sql.must_equal 'SELECT albums.id, albums.band_id, right_tracks.id AS right_tracks_id, right_tracks.album_id FROM albums LEFT OUTER JOIN tracks AS right_tracks ON ((right_tracks.album_id = albums.id) AND (right_tracks.id >= 0) AND (right_tracks.id <= 100))' GraphAlbum.many_to_many :active_genres, :class=>'GraphGenre', :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :graph_conditions=>{true=>:active} GraphAlbum.eager_graph(:active_genres).sql.must_equal "SELECT albums.id, albums.band_id, active_genres.id AS active_genres_id FROM albums LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS active_genres ON ((active_genres.id = ag.genre_id) AND ('t' = ag.active))" end it "should respect the association's :graph_join_table_conditions option" do GraphAlbum.many_to_many :active_genres, :class=>'GraphGenre', :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :graph_join_table_conditions=>{:active=>true} GraphAlbum.eager_graph(:active_genres).sql.must_equal "SELECT albums.id, albums.band_id, active_genres.id AS active_genres_id FROM albums LEFT OUTER JOIN ag ON ((ag.album_id = albums.id) AND (ag.active IS TRUE)) LEFT OUTER JOIN genres AS active_genres ON (active_genres.id = ag.genre_id)" GraphAlbum.many_to_many :active_genres, :class=>'GraphGenre', :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :graph_conditions=>{true=>:active}, :graph_join_table_conditions=>{true=>:active} GraphAlbum.eager_graph(:active_genres).sql.must_equal "SELECT albums.id, albums.band_id, active_genres.id AS active_genres_id FROM albums LEFT OUTER JOIN ag ON ((ag.album_id = albums.id) AND ('t' = albums.active)) LEFT OUTER JOIN genres AS active_genres ON ((active_genres.id = ag.genre_id) AND ('t' = ag.active))" end it "should respect the association's :graph_block option" do GraphAlbum.many_to_one :active_band, :class=>'GraphBand', :key=>:band_id, :graph_block=>proc{|ja,lja,js| {Sequel.qualify(ja, :active)=>true}} GraphAlbum.eager_graph(:active_band).sql.must_equal "SELECT albums.id, albums.band_id, active_band.id AS active_band_id, active_band.vocalist_id FROM albums LEFT OUTER JOIN bands AS active_band ON ((active_band.id = albums.band_id) AND (active_band.active IS TRUE))" GraphAlbum.one_to_many :right_tracks, :class=>'GraphTrack', :key=>:album_id, :graph_block=>proc{|ja,lja,js| {Sequel.qualify(ja, :id)=>(0..100)}} GraphAlbum.eager_graph(:right_tracks).sql.must_equal 'SELECT albums.id, albums.band_id, right_tracks.id AS right_tracks_id, right_tracks.album_id FROM albums LEFT OUTER JOIN tracks AS right_tracks ON ((right_tracks.album_id = albums.id) AND (right_tracks.id >= 0) AND (right_tracks.id <= 100))' GraphAlbum.many_to_many :active_genres, :class=>'GraphGenre', :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :graph_block=>proc{|ja,lja,js| {true=>Sequel.qualify(lja, :active)}} GraphAlbum.eager_graph(:active_genres).sql.must_equal "SELECT albums.id, albums.band_id, active_genres.id AS active_genres_id FROM albums LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS active_genres ON ((active_genres.id = ag.genre_id) AND ('t' = ag.active))" end it "should respect the association's :graph_join_table_block option" do GraphAlbum.many_to_many :active_genres, :class=>'GraphGenre', :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :graph_join_table_block=>proc{|ja,lja,js| {Sequel.qualify(ja, :active)=>true}} GraphAlbum.eager_graph(:active_genres).sql.must_equal "SELECT albums.id, albums.band_id, active_genres.id AS active_genres_id FROM albums LEFT OUTER JOIN ag ON ((ag.album_id = albums.id) AND (ag.active IS TRUE)) LEFT OUTER JOIN genres AS active_genres ON (active_genres.id = ag.genre_id)" GraphAlbum.many_to_many :active_genres, :class=>'GraphGenre', :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :graph_block=>proc{|ja,lja,js| {true=>Sequel.qualify(lja, :active)}}, :graph_join_table_block=>proc{|ja,lja,js| {true=>Sequel.qualify(lja, :active)}} GraphAlbum.eager_graph(:active_genres).sql.must_equal "SELECT albums.id, albums.band_id, active_genres.id AS active_genres_id FROM albums LEFT OUTER JOIN ag ON ((ag.album_id = albums.id) AND ('t' = albums.active)) LEFT OUTER JOIN genres AS active_genres ON ((active_genres.id = ag.genre_id) AND ('t' = ag.active))" end it "should respect the association's :eager_grapher option" do GraphAlbum.many_to_one :active_band, :class=>'GraphBand', :key=>:band_id, :eager_grapher=>proc{|eo| eo[:self].graph(GraphBand.dataset, {:active=>true}, :table_alias=>eo[:table_alias], :join_type=>:inner)} GraphAlbum.eager_graph(:active_band).sql.must_equal "SELECT albums.id, albums.band_id, active_band.id AS active_band_id, active_band.vocalist_id FROM albums INNER JOIN bands AS active_band ON (active_band.active IS TRUE)" GraphAlbum.one_to_many :right_tracks, :class=>'GraphTrack', :key=>:album_id, :eager_grapher=>proc{|eo| eo[:self].graph(GraphTrack.dataset, nil, :join_type=>:natural, :table_alias=>eo[:table_alias])} GraphAlbum.eager_graph(:right_tracks).sql.must_equal 'SELECT albums.id, albums.band_id, right_tracks.id AS right_tracks_id, right_tracks.album_id FROM albums NATURAL JOIN tracks AS right_tracks' GraphAlbum.many_to_many :active_genres, :class=>'GraphGenre', :eager_grapher=>proc{|eo| eo[:self].graph(:ag, {:album_id=>:id}, :table_alias=>:a123, :implicit_qualifier=>eo[:implicit_qualifier]).graph(GraphGenre.dataset, [:album_id], :table_alias=>eo[:table_alias])} GraphAlbum.eager_graph(:active_genres).sql.must_equal "SELECT albums.id, albums.band_id, active_genres.id AS active_genres_id FROM albums LEFT OUTER JOIN ag AS a123 ON (a123.album_id = albums.id) LEFT OUTER JOIN genres AS active_genres USING (album_id)" end it "should respect the association's :graph_only_conditions option" do GraphAlbum.many_to_one :active_band, :class=>'GraphBand', :key=>:band_id, :graph_only_conditions=>{:active=>true} GraphAlbum.eager_graph(:active_band).sql.must_equal "SELECT albums.id, albums.band_id, active_band.id AS active_band_id, active_band.vocalist_id FROM albums LEFT OUTER JOIN bands AS active_band ON (active_band.active IS TRUE)" GraphAlbum.one_to_many :right_tracks, :class=>'GraphTrack', :key=>:album_id, :graph_only_conditions=>nil, :graph_join_type=>:natural GraphAlbum.eager_graph(:right_tracks).sql.must_equal 'SELECT albums.id, albums.band_id, right_tracks.id AS right_tracks_id, right_tracks.album_id FROM albums NATURAL JOIN tracks AS right_tracks' GraphAlbum.many_to_many :active_genres, :class=>'GraphGenre', :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :graph_only_conditions=>[:album_id] GraphAlbum.eager_graph(:active_genres).sql.must_equal "SELECT albums.id, albums.band_id, active_genres.id AS active_genres_id FROM albums LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS active_genres USING (album_id)" end it "should respect the association's :graph_join_table_only_conditions option" do GraphAlbum.many_to_many :active_genres, :class=>'GraphGenre', :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :graph_join_table_only_conditions=>{:active=>true} GraphAlbum.eager_graph(:active_genres).sql.must_equal "SELECT albums.id, albums.band_id, active_genres.id AS active_genres_id FROM albums LEFT OUTER JOIN ag ON (ag.active IS TRUE) LEFT OUTER JOIN genres AS active_genres ON (active_genres.id = ag.genre_id)" GraphAlbum.many_to_many :active_genres, :class=>'GraphGenre', :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :graph_only_conditions=>(Sequel.expr(:price) + 2 > 100), :graph_join_table_only_conditions=>Sequel.identifier("active") GraphAlbum.eager_graph(:active_genres).sql.must_equal "SELECT albums.id, albums.band_id, active_genres.id AS active_genres_id FROM albums LEFT OUTER JOIN ag ON active LEFT OUTER JOIN genres AS active_genres ON ((price + 2) > 100)" end it "should create unique table aliases for all associations" do GraphAlbum.eager_graph(:previous_album=>{:previous_album=>:previous_album}).sql.must_equal "SELECT albums.id, albums.band_id, previous_album.id AS previous_album_id, previous_album.band_id AS previous_album_band_id, previous_album_0.id AS previous_album_0_id, previous_album_0.band_id AS previous_album_0_band_id, previous_album_1.id AS previous_album_1_id, previous_album_1.band_id AS previous_album_1_band_id FROM albums LEFT OUTER JOIN albums AS previous_album ON (previous_album.id = albums.previous_album_id) LEFT OUTER JOIN albums AS previous_album_0 ON (previous_album_0.id = previous_album.previous_album_id) LEFT OUTER JOIN albums AS previous_album_1 ON (previous_album_1.id = previous_album_0.previous_album_id)" end it "should respect the association's :order" do GraphAlbum.one_to_many :right_tracks, :class=>'GraphTrack', :key=>:album_id, :order=>[:id, :album_id] GraphAlbum.eager_graph(:right_tracks).sql.must_equal 'SELECT albums.id, albums.band_id, right_tracks.id AS right_tracks_id, right_tracks.album_id FROM albums LEFT OUTER JOIN tracks AS right_tracks ON (right_tracks.album_id = albums.id) ORDER BY right_tracks.id, right_tracks.album_id' end with_symbol_splitting "should not qualify qualified symbols in association's :order" do GraphAlbum.one_to_many :right_tracks, :class=>'GraphTrack', :key=>:album_id, :order=>[Sequel.desc(:blah__id), :blah__id] GraphAlbum.eager_graph(:right_tracks).sql.must_equal 'SELECT albums.id, albums.band_id, right_tracks.id AS right_tracks_id, right_tracks.album_id FROM albums LEFT OUTER JOIN tracks AS right_tracks ON (right_tracks.album_id = albums.id) ORDER BY blah.id DESC, blah.id' end it "should only qualify unqualified symbols, identifiers, or ordered versions in association's :order" do GraphAlbum.one_to_many :right_tracks, :class=>'GraphTrack', :key=>:album_id, :order=>[Sequel.identifier(:blah__id), Sequel.identifier(:blah__id).desc, Sequel[:blah][:id].desc, Sequel[:blah][:id], :album_id, Sequel.desc(:album_id), 1, Sequel.lit('RANDOM()'), Sequel.qualify(:b, :a)] GraphAlbum.eager_graph(:right_tracks).sql.must_equal 'SELECT albums.id, albums.band_id, right_tracks.id AS right_tracks_id, right_tracks.album_id FROM albums LEFT OUTER JOIN tracks AS right_tracks ON (right_tracks.album_id = albums.id) ORDER BY right_tracks.blah__id, right_tracks.blah__id DESC, blah.id DESC, blah.id, right_tracks.album_id, right_tracks.album_id DESC, 1, RANDOM(), b.a' end it "should not respect the association's :order if :order_eager_graph is false" do GraphAlbum.one_to_many :right_tracks, :class=>'GraphTrack', :key=>:album_id, :order=>[:id, :album_id], :order_eager_graph=>false GraphAlbum.eager_graph(:right_tracks).sql.must_equal 'SELECT albums.id, albums.band_id, right_tracks.id AS right_tracks_id, right_tracks.album_id FROM albums LEFT OUTER JOIN tracks AS right_tracks ON (right_tracks.album_id = albums.id)' end it "should add the association's :order to the existing order" do GraphAlbum.one_to_many :right_tracks, :class=>'GraphTrack', :key=>:album_id, :order=>[:id, :album_id] GraphAlbum.order(:band_id).eager_graph(:right_tracks).sql.must_equal 'SELECT albums.id, albums.band_id, right_tracks.id AS right_tracks_id, right_tracks.album_id FROM albums LEFT OUTER JOIN tracks AS right_tracks ON (right_tracks.album_id = albums.id) ORDER BY band_id, right_tracks.id, right_tracks.album_id' end it "should use the association's :graph_order in preference or order" do GraphAlbum.one_to_many :right_tracks, :class=>'GraphTrack', :key=>:album_id, :order=>[:tracks__id, :tracks__album_id], :graph_order=>[:id, :album_id] GraphAlbum.order(:band_id).eager_graph(:right_tracks).sql.must_equal 'SELECT albums.id, albums.band_id, right_tracks.id AS right_tracks_id, right_tracks.album_id FROM albums LEFT OUTER JOIN tracks AS right_tracks ON (right_tracks.album_id = albums.id) ORDER BY band_id, right_tracks.id, right_tracks.album_id' end it "should add the association's :order for cascading associations" do GraphBand.one_to_many :a_albums, :class=>'GraphAlbum', :key=>:band_id, :order=>:name, :reciprocal=>nil GraphAlbum.one_to_many :b_tracks, :class=>'GraphTrack', :key=>:album_id, :order=>[:id, :album_id] GraphBand.eager_graph(:a_albums=>:b_tracks).sql.must_equal 'SELECT bands.id, bands.vocalist_id, a_albums.id AS a_albums_id, a_albums.band_id, b_tracks.id AS b_tracks_id, b_tracks.album_id FROM bands LEFT OUTER JOIN albums AS a_albums ON (a_albums.band_id = bands.id) LEFT OUTER JOIN tracks AS b_tracks ON (b_tracks.album_id = a_albums.id) ORDER BY a_albums.name, b_tracks.id, b_tracks.album_id' GraphAlbum.one_to_many :albums, :class=>'GraphAlbum', :key=>:band_id, :order=>[:band_id, :id] GraphAlbum.eager_graph(:albums=>{:albums=>:albums}).sql.must_equal 'SELECT albums.id, albums.band_id, albums_0.id AS albums_0_id, albums_0.band_id AS albums_0_band_id, albums_1.id AS albums_1_id, albums_1.band_id AS albums_1_band_id, albums_2.id AS albums_2_id, albums_2.band_id AS albums_2_band_id FROM albums LEFT OUTER JOIN albums AS albums_0 ON (albums_0.band_id = albums.id) LEFT OUTER JOIN albums AS albums_1 ON (albums_1.band_id = albums_0.id) LEFT OUTER JOIN albums AS albums_2 ON (albums_2.band_id = albums_1.id) ORDER BY albums_0.band_id, albums_0.id, albums_1.band_id, albums_1.id, albums_2.band_id, albums_2.id' end it "should add the associations :order for multiple associations" do GraphAlbum.many_to_many :a_genres, :class=>'GraphGenre', :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :order=>:id GraphAlbum.one_to_many :b_tracks, :class=>'GraphTrack', :key=>:album_id, :order=>[:id, :album_id] GraphAlbum.eager_graph(:a_genres, :b_tracks).sql.must_equal 'SELECT albums.id, albums.band_id, a_genres.id AS a_genres_id, b_tracks.id AS b_tracks_id, b_tracks.album_id FROM albums LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS a_genres ON (a_genres.id = ag.genre_id) LEFT OUTER JOIN tracks AS b_tracks ON (b_tracks.album_id = albums.id) ORDER BY a_genres.id, b_tracks.id, b_tracks.album_id' end it "should use the correct qualifier when graphing multiple tables with extra conditions" do GraphAlbum.many_to_many :a_genres, :class=>'GraphGenre', :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag GraphAlbum.one_to_many :b_tracks, :class=>'GraphTrack', :key=>:album_id, :graph_conditions=>{:a=>:b} GraphAlbum.eager_graph(:a_genres, :b_tracks).sql.must_equal 'SELECT albums.id, albums.band_id, a_genres.id AS a_genres_id, b_tracks.id AS b_tracks_id, b_tracks.album_id FROM albums LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS a_genres ON (a_genres.id = ag.genre_id) LEFT OUTER JOIN tracks AS b_tracks ON ((b_tracks.album_id = albums.id) AND (b_tracks.a = albums.b))' end it "should eagerly load associated records for classes that do not have a primary key" do GraphAlbum.no_primary_key GraphGenre.no_primary_key GraphAlbum.many_to_many :inner_genres, :class=>'GraphGenre', :left_key=>:album_id, :left_primary_key=>:band_id, :right_key=>:genre_id, :right_primary_key=>:xxx, :join_table=>:ag ds = GraphAlbum.eager_graph(:inner_genres) ds.sql.must_equal 'SELECT albums.id, albums.band_id, inner_genres.id AS inner_genres_id FROM albums LEFT OUTER JOIN ag ON (ag.album_id = albums.band_id) LEFT OUTER JOIN genres AS inner_genres ON (inner_genres.xxx = ag.genre_id)' as = ds.with_fetch([{:id=>3, :band_id=>2, :inner_genres_id=>5, :xxx=>12}, {:id=>3, :band_id=>2, :inner_genres_id=>6, :xxx=>22}]).all as.must_equal [GraphAlbum.load(:id=>3, :band_id=>2)] as.first.inner_genres.must_equal [GraphGenre.load(:id=>5), GraphGenre.load(:id=>6)] GraphAlbum.set_primary_key :id GraphGenre.set_primary_key :id end it "should handle eager loading with schemas and aliases of different types" do GraphAlbum.eager_graph(:band).join(Sequel[:s][:genres], [:b_id]).eager_graph(:genres).sql.must_equal 'SELECT albums.id, albums.band_id, band.id AS band_id_0, band.vocalist_id, genres_0.id AS genres_0_id FROM albums LEFT OUTER JOIN bands AS band ON (band.id = albums.band_id) INNER JOIN s.genres USING (b_id) LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS genres_0 ON (genres_0.id = ag.genre_id)' GraphAlbum.eager_graph(:band).join(Sequel.qualify(:s, :genres), [:b_id]).eager_graph(:genres).sql.must_equal 'SELECT albums.id, albums.band_id, band.id AS band_id_0, band.vocalist_id, genres_0.id AS genres_0_id FROM albums LEFT OUTER JOIN bands AS band ON (band.id = albums.band_id) INNER JOIN s.genres USING (b_id) LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS genres_0 ON (genres_0.id = ag.genre_id)' GraphAlbum.eager_graph(:band).join(Sequel[:s][:b].as('genres'), [:b_id]).eager_graph(:genres).sql.must_equal 'SELECT albums.id, albums.band_id, band.id AS band_id_0, band.vocalist_id, genres_0.id AS genres_0_id FROM albums LEFT OUTER JOIN bands AS band ON (band.id = albums.band_id) INNER JOIN s.b AS genres USING (b_id) LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS genres_0 ON (genres_0.id = ag.genre_id)' GraphAlbum.eager_graph(:band).join(Sequel[:s][:b], [:b_id], :table_alias=>Sequel.identifier(:genres)).eager_graph(:genres).sql.must_equal 'SELECT albums.id, albums.band_id, band.id AS band_id_0, band.vocalist_id, genres_0.id AS genres_0_id FROM albums LEFT OUTER JOIN bands AS band ON (band.id = albums.band_id) INNER JOIN s.b AS genres USING (b_id) LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS genres_0 ON (genres_0.id = ag.genre_id)' GraphAlbum.eager_graph(:band).join(Sequel.identifier(:genres), [:b_id]).eager_graph(:genres).sql.must_equal 'SELECT albums.id, albums.band_id, band.id AS band_id_0, band.vocalist_id, genres_0.id AS genres_0_id FROM albums LEFT OUTER JOIN bands AS band ON (band.id = albums.band_id) INNER JOIN genres USING (b_id) LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS genres_0 ON (genres_0.id = ag.genre_id)' GraphAlbum.eager_graph(:band).join('genres', [:b_id]).eager_graph(:genres).sql.must_equal 'SELECT albums.id, albums.band_id, band.id AS band_id_0, band.vocalist_id, genres_0.id AS genres_0_id FROM albums LEFT OUTER JOIN bands AS band ON (band.id = albums.band_id) INNER JOIN genres USING (b_id) LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS genres_0 ON (genres_0.id = ag.genre_id)' end with_symbol_splitting "should handle eager loading with splittable symbols" do GraphAlbum.eager_graph(:band).join(:s__genres, [:b_id]).eager_graph(:genres).sql.must_equal 'SELECT albums.id, albums.band_id, band.id AS band_id_0, band.vocalist_id, genres_0.id AS genres_0_id FROM albums LEFT OUTER JOIN bands AS band ON (band.id = albums.band_id) INNER JOIN s.genres USING (b_id) LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS genres_0 ON (genres_0.id = ag.genre_id)' GraphAlbum.eager_graph(:band).join(Sequel.expr(:s__b).as('genres'), [:b_id]).eager_graph(:genres).sql.must_equal 'SELECT albums.id, albums.band_id, band.id AS band_id_0, band.vocalist_id, genres_0.id AS genres_0_id FROM albums LEFT OUTER JOIN bands AS band ON (band.id = albums.band_id) INNER JOIN s.b AS genres USING (b_id) LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS genres_0 ON (genres_0.id = ag.genre_id)' GraphAlbum.eager_graph(:band).join(:s__b, [:b_id], :table_alias=>Sequel.identifier(:genres)).eager_graph(:genres).sql.must_equal 'SELECT albums.id, albums.band_id, band.id AS band_id_0, band.vocalist_id, genres_0.id AS genres_0_id FROM albums LEFT OUTER JOIN bands AS band ON (band.id = albums.band_id) INNER JOIN s.b AS genres USING (b_id) LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS genres_0 ON (genres_0.id = ag.genre_id)' end it "should raise errors if invalid aliases or table styles are used" do proc{GraphAlbum.from_self(:alias=>Sequel.qualify(:s, :bands)).eager_graph(:band)}.must_raise(Sequel::Error) proc{GraphAlbum.from(Sequel.lit('?', :bands)).eager_graph(:band)}.must_raise(Sequel::Error) end it "should eagerly load schema qualified tables correctly with joins" do c1 = Class.new(GraphAlbum) c2 = Class.new(GraphGenre) ds = c1.dataset.from(Sequel[:s][:a]).with_extend{def columns; [:id] end} c1.dataset = ds ds = c1.dataset c2.dataset = c2.dataset.from(Sequel[:s][:g]) c1.many_to_many :a_genres, :class=>c2, :left_primary_key=>:id, :left_key=>:album_id, :right_key=>:genre_id, :join_table=>Sequel[:s][:ag] ds = c1.join(Sequel[:s][:t], [:b_id]).eager_graph(:a_genres) ds.sql.must_equal 'SELECT a.id, a_genres.id AS a_genres_id FROM (SELECT * FROM s.a INNER JOIN s.t USING (b_id)) AS a LEFT OUTER JOIN s.ag AS ag ON (ag.album_id = a.id) LEFT OUTER JOIN s.g AS a_genres ON (a_genres.id = ag.genre_id)' ds = c1.eager_graph(:a_genres) ds.sql.must_equal 'SELECT s.a.id, a_genres.id AS a_genres_id FROM s.a LEFT OUTER JOIN s.ag AS ag ON (ag.album_id = s.a.id) LEFT OUTER JOIN s.g AS a_genres ON (a_genres.id = ag.genre_id)' end with_symbol_splitting "should eagerly load schema qualified table symbols correctly with joins" do c1 = Class.new(GraphAlbum) c2 = Class.new(GraphGenre) ds = c1.dataset.from(:s__a).with_extend{def columns; [:id] end} c1.dataset = ds ds = c1.dataset c2.dataset = c2.dataset.from(:s__g) c1.many_to_many :a_genres, :class=>c2, :left_primary_key=>:id, :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:s__ag ds = c1.join(:s__t, [:b_id]).eager_graph(:a_genres) ds.sql.must_equal 'SELECT a.id, a_genres.id AS a_genres_id FROM (SELECT * FROM s.a INNER JOIN s.t USING (b_id)) AS a LEFT OUTER JOIN s.ag AS ag ON (ag.album_id = a.id) LEFT OUTER JOIN s.g AS a_genres ON (a_genres.id = ag.genre_id)' ds = c1.eager_graph(:a_genres) ds.sql.must_equal 'SELECT s.a.id, a_genres.id AS a_genres_id FROM s.a LEFT OUTER JOIN s.ag AS ag ON (ag.album_id = s.a.id) LEFT OUTER JOIN s.g AS a_genres ON (a_genres.id = ag.genre_id)' end it "should respect :after_load callbacks on associations when eager graphing" do GraphAlbum.many_to_one :al_band, :class=>GraphBand, :key=>:band_id, :after_load=>proc{|o, a| a.id *=2} GraphAlbum.one_to_many :al_tracks, :class=>GraphTrack, :key=>:album_id, :after_load=>proc{|o, os| os.each{|a| a.id *=2}} GraphAlbum.many_to_many :al_genres, :class=>GraphGenre, :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :after_load=>proc{|o, os| os.each{|a| a.id *=2}} ds = GraphAlbum.eager_graph(:al_band, :al_tracks, :al_genres) ds.sql.must_equal "SELECT albums.id, albums.band_id, al_band.id AS al_band_id, al_band.vocalist_id, al_tracks.id AS al_tracks_id, al_tracks.album_id, al_genres.id AS al_genres_id FROM albums LEFT OUTER JOIN bands AS al_band ON (al_band.id = albums.band_id) LEFT OUTER JOIN tracks AS al_tracks ON (al_tracks.album_id = albums.id) LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS al_genres ON (al_genres.id = ag.genre_id)" a = ds.with_fetch(:id=>1, :band_id=>2, :al_band_id=>3, :vocalist_id=>4, :al_tracks_id=>5, :album_id=>6, :al_genres_id=>7).all.first a.must_equal GraphAlbum.load(:id => 1, :band_id => 2) a.al_band.must_equal GraphBand.load(:id=>6, :vocalist_id=>4) a.al_tracks.must_equal [GraphTrack.load(:id=>10, :album_id=>6)] a.al_genres.must_equal [GraphGenre.load(:id=>14)] end it "should respect limits on associations when eager graphing" do GraphAlbum.many_to_one :al_band, :class=>GraphBand, :key=>:band_id GraphAlbum.one_to_many :al_tracks, :class=>GraphTrack, :key=>:album_id, :limit=>2 GraphAlbum.many_to_many :al_genres, :class=>GraphGenre, :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :limit=>2 ds = GraphAlbum.eager_graph(:al_band, :al_tracks, :al_genres) ds.sql.must_equal "SELECT albums.id, albums.band_id, al_band.id AS al_band_id, al_band.vocalist_id, al_tracks.id AS al_tracks_id, al_tracks.album_id, al_genres.id AS al_genres_id FROM albums LEFT OUTER JOIN bands AS al_band ON (al_band.id = albums.band_id) LEFT OUTER JOIN tracks AS al_tracks ON (al_tracks.album_id = albums.id) LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS al_genres ON (al_genres.id = ag.genre_id)" a = ds.with_fetch([{:id=>1, :band_id=>2, :al_band_id=>3, :vocalist_id=>4, :al_tracks_id=>5, :album_id=>6, :al_genres_id=>7}, {:id=>1, :band_id=>2, :al_band_id=>8, :vocalist_id=>9, :al_tracks_id=>10, :album_id=>11, :al_genres_id=>12}, {:id=>1, :band_id=>2, :al_band_id=>13, :vocalist_id=>14, :al_tracks_id=>15, :album_id=>16, :al_genres_id=>17}]).all.first a.must_equal GraphAlbum.load(:id => 1, :band_id => 2) a.al_band.must_equal GraphBand.load(:id=>3, :vocalist_id=>4) a.al_tracks.must_equal [GraphTrack.load(:id=>5, :album_id=>6), GraphTrack.load(:id=>10, :album_id=>11)] a.al_genres.must_equal [GraphGenre.load(:id=>7), GraphGenre.load(:id=>12)] end it "should handle offsets on associations with no results when eager graphing" do GraphAlbum.one_to_many :al_tracks, :class=>GraphTrack, :key=>:album_id, :limit=>[2, 1] ds = GraphAlbum.eager_graph(:al_tracks) ds.sql.must_equal "SELECT albums.id, albums.band_id, al_tracks.id AS al_tracks_id, al_tracks.album_id FROM albums LEFT OUTER JOIN tracks AS al_tracks ON (al_tracks.album_id = albums.id)" a = ds.with_fetch([{:id=>1, :band_id=>2, :al_tracks_id=>nil, :album_id=>nil}]).all.first a.must_equal GraphAlbum.load(:id => 1, :band_id => 2) a.al_tracks.must_equal [] end it "should respect offsets on associations when eager graphing" do GraphAlbum.many_to_one :al_band, :class=>GraphBand, :key=>:band_id GraphAlbum.one_to_many :al_tracks, :class=>GraphTrack, :key=>:album_id, :limit=>[1, 1] GraphAlbum.many_to_many :al_genres, :class=>GraphGenre, :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :limit=>[1,1] ds = GraphAlbum.eager_graph(:al_band, :al_tracks, :al_genres) ds.sql.must_equal "SELECT albums.id, albums.band_id, al_band.id AS al_band_id, al_band.vocalist_id, al_tracks.id AS al_tracks_id, al_tracks.album_id, al_genres.id AS al_genres_id FROM albums LEFT OUTER JOIN bands AS al_band ON (al_band.id = albums.band_id) LEFT OUTER JOIN tracks AS al_tracks ON (al_tracks.album_id = albums.id) LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS al_genres ON (al_genres.id = ag.genre_id)" a = ds.with_fetch([{:id=>1, :band_id=>2, :al_band_id=>3, :vocalist_id=>4, :al_tracks_id=>5, :album_id=>6, :al_genres_id=>7}, {:id=>1, :band_id=>2, :al_band_id=>8, :vocalist_id=>9, :al_tracks_id=>10, :album_id=>11, :al_genres_id=>12}, {:id=>1, :band_id=>2, :al_band_id=>13, :vocalist_id=>14, :al_tracks_id=>15, :album_id=>16, :al_genres_id=>17}]).all.first a.must_equal GraphAlbum.load(:id => 1, :band_id => 2) a.al_band.must_equal GraphBand.load(:id=>3, :vocalist_id=>4) a.al_tracks.must_equal [GraphTrack.load(:id=>10, :album_id=>11)] a.al_genres.must_equal [GraphGenre.load(:id=>12)] end it "should respect offsets on associations when eager graphing one_to_one and one_through_one associations" do GraphAlbum.many_to_one :al_band, :class=>GraphBand, :key=>:band_id GraphAlbum.one_to_one :al_track, :class=>GraphTrack, :key=>:album_id, :limit=>[nil, 1] GraphAlbum.one_through_one :al_genre, :class=>GraphGenre, :left_key=>:album_id, :right_key=>:genre_id, :join_table=>:ag, :limit=>[nil,1] ds = GraphAlbum.eager_graph(:al_band, :al_track, :al_genre) ds.sql.must_equal "SELECT albums.id, albums.band_id, al_band.id AS al_band_id, al_band.vocalist_id, al_track.id AS al_track_id, al_track.album_id, al_genre.id AS al_genre_id FROM albums LEFT OUTER JOIN bands AS al_band ON (al_band.id = albums.band_id) LEFT OUTER JOIN tracks AS al_track ON (al_track.album_id = albums.id) LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN genres AS al_genre ON (al_genre.id = ag.genre_id)" a = ds.with_fetch([{:id=>1, :band_id=>2, :al_band_id=>3, :vocalist_id=>4, :al_track_id=>5, :album_id=>6, :al_genre_id=>7}, {:id=>1, :band_id=>2, :al_band_id=>8, :vocalist_id=>9, :al_track_id=>10, :album_id=>11, :al_genre_id=>12}, {:id=>1, :band_id=>2, :al_band_id=>13, :vocalist_id=>14, :al_track_id=>15, :album_id=>16, :al_genre_id=>17}]).all.first a.must_equal GraphAlbum.load(:id => 1, :band_id => 2) a.al_band.must_equal GraphBand.load(:id=>3, :vocalist_id=>4) a.al_track.must_equal GraphTrack.load(:id=>10, :album_id=>11) a.al_genre.must_equal GraphGenre.load(:id=>12) end it "should not include duplicate objects when eager graphing many_to_one=>one_to_many" do ds = GraphAlbum.eager_graph(:band=>:albums) ds.sql.must_equal "SELECT albums.id, albums.band_id, band.id AS band_id_0, band.vocalist_id, albums_0.id AS albums_0_id, albums_0.band_id AS albums_0_band_id FROM albums LEFT OUTER JOIN bands AS band ON (band.id = albums.band_id) LEFT OUTER JOIN albums AS albums_0 ON (albums_0.band_id = band.id)" a = ds.with_fetch([ {:id=>1, :band_id=>2, :band_id_0=>2, :vocalist_id=>1, :albums_0_id=>1, :albums_0_band_id=>2}, {:id=>2, :band_id=>2, :band_id_0=>2, :vocalist_id=>1, :albums_0_id=>1, :albums_0_band_id=>2}, {:id=>1, :band_id=>2, :band_id_0=>2, :vocalist_id=>1, :albums_0_id=>2, :albums_0_band_id=>2}, {:id=>2, :band_id=>2, :band_id_0=>2, :vocalist_id=>1, :albums_0_id=>2, :albums_0_band_id=>2} ]).all albums = [GraphAlbum.load(:id => 1, :band_id => 2), GraphAlbum.load(:id => 2, :band_id => 2)] a.must_equal albums a.map(&:band).must_equal [GraphBand.load(:id=>2, :vocalist_id=>1), GraphBand.load(:id=>2, :vocalist_id=>1)] a.map(&:band).map(&:albums).must_equal [albums, albums] end it "should eagerly load a many_to_one association with a custom callback" do ds = GraphAlbum.eager_graph(:band => proc {|ds1| ds1.select(:id).columns(:id)}) ds.sql.must_equal 'SELECT albums.id, albums.band_id, band.id AS band_id_0 FROM albums LEFT OUTER JOIN (SELECT id FROM bands) AS band ON (band.id = albums.band_id)' a = ds.with_fetch(:id=>1, :band_id=>2, :band_id_0=>2).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.band.must_equal GraphBand.load(:id => 2) end it "should eagerly load a one_to_one association with a custom callback" do ds = GraphAlbum.eager_graph(:track => proc {|ds1| ds1.select(:album_id).columns(:album_id)}) ds.sql.must_equal 'SELECT albums.id, albums.band_id, track.album_id FROM albums LEFT OUTER JOIN (SELECT album_id FROM tracks) AS track ON (track.album_id = albums.id)' a = ds.with_fetch(:id=>1, :band_id=>2, :album_id=>1).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.track.must_equal GraphTrack.load(:album_id=>1) end it "should eagerly load a one_to_many association with a custom callback" do ds = GraphAlbum.eager_graph(:tracks => proc {|ds1| ds1.select(:album_id).columns(:album_id)}) ds.sql.must_equal 'SELECT albums.id, albums.band_id, tracks.album_id FROM albums LEFT OUTER JOIN (SELECT album_id FROM tracks) AS tracks ON (tracks.album_id = albums.id)' a = ds.with_fetch(:id=>1, :band_id=>2, :album_id=>1).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.tracks.must_equal [GraphTrack.load(:album_id=>1)] end it "should eagerly load a one_through_one association with a custom callback" do ds = GraphAlbum.eager_graph(:genre => proc {|ds1| ds1.select(:id).columns(:id)}) ds.sql.must_equal 'SELECT albums.id, albums.band_id, genre.id AS genre_id FROM albums LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN (SELECT id FROM genres) AS genre ON (genre.id = ag.genre_id)' a = ds.with_fetch(:id=>1, :band_id=>2, :genre_id=>4).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.genre.must_equal GraphGenre.load(:id => 4) end it "should eagerly load a many_to_many association with a custom callback" do ds = GraphAlbum.eager_graph(:genres => proc {|ds1| ds1.select(:id).columns(:id)}) ds.sql.must_equal 'SELECT albums.id, albums.band_id, genres.id AS genres_id FROM albums LEFT OUTER JOIN ag ON (ag.album_id = albums.id) LEFT OUTER JOIN (SELECT id FROM genres) AS genres ON (genres.id = ag.genre_id)' a = ds.with_fetch(:id=>1, :band_id=>2, :genres_id=>4).all a.must_equal [GraphAlbum.load(:id => 1, :band_id => 2)] a.first.genres.must_equal [GraphGenre.load(:id => 4)] end it "should allow cascading of eager loading with a custom callback with hash value" do ds = GraphTrack.eager_graph(:album=>{proc{|ds1| ds1.select(:id, :band_id).columns(:id, :band_id)}=>{:band=>:members}}) ds.sql.must_equal 'SELECT tracks.id, tracks.album_id, album.id AS album_id_0, album.band_id, band.id AS band_id_0, band.vocalist_id, members.id AS members_id FROM tracks LEFT OUTER JOIN (SELECT id, band_id FROM albums) AS album ON (album.id = tracks.album_id) LEFT OUTER JOIN bands AS band ON (band.id = album.band_id) LEFT OUTER JOIN bm ON (bm.band_id = band.id) LEFT OUTER JOIN members ON (members.id = bm.member_id)' a = ds.with_fetch(:id=>3, :album_id=>1, :album_id_0=>1, :band_id=>2, :members_id=>5, :band_id_0=>2, :vocalist_id=>6).all a.must_equal [GraphTrack.load(:id => 3, :album_id => 1)] a = a.first a.album.must_equal GraphAlbum.load(:id => 1, :band_id => 2) a.album.band.must_equal GraphBand.load(:id => 2, :vocalist_id=>6) a.album.band.members.must_equal [GraphBandMember.load(:id => 5)] end it "should allow cascading of eager loading with a custom callback with array value" do ds = GraphTrack.eager_graph(:album=>{proc{|ds1| ds1.select(:id, :band_id).columns(:id, :band_id)}=>[:band, :tracks]}) ds.sql.must_equal 'SELECT tracks.id, tracks.album_id, album.id AS album_id_0, album.band_id, band.id AS band_id_0, band.vocalist_id, tracks_0.id AS tracks_0_id, tracks_0.album_id AS tracks_0_album_id FROM tracks LEFT OUTER JOIN (SELECT id, band_id FROM albums) AS album ON (album.id = tracks.album_id) LEFT OUTER JOIN bands AS band ON (band.id = album.band_id) LEFT OUTER JOIN tracks AS tracks_0 ON (tracks_0.album_id = album.id)' a = ds.with_fetch(:id=>3, :album_id=>1, :album_id_0=>1, :band_id=>2, :band_id_0=>2, :vocalist_id=>6, :tracks_0_id=>3, :tracks_0_album_id=>1).all a.must_equal [GraphTrack.load(:id => 3, :album_id => 1)] a = a.first a.album.must_equal GraphAlbum.load(:id => 1, :band_id => 2) a.album.band.must_equal GraphBand.load(:id => 2, :vocalist_id=>6) a.album.tracks.must_equal [GraphTrack.load(:id => 3, :album_id => 1)] end it "should have frozen internal data structures" do ds = GraphAlbum.eager_graph(:band) ds.opts[:eager_graph].must_be :frozen? ds.opts[:eager_graph].each_value{|v| v.must_be :frozen? if v.is_a?(Hash)} ds = ds.eager_graph(:tracks) ds.opts[:eager_graph].must_be :frozen? ds.opts[:eager_graph].each_value{|v| v.must_be :frozen? if v.is_a?(Hash)} end end describe "Sequel::Models with double underscores in table names" do before do @db = Sequel.mock(:fetch=>{:id=>1, :foo_id=>2}) @Foo = Class.new(Sequel::Model(@db[Sequel.identifier(:fo__os)])) @Foo.columns :id, :foo_id @Foo.one_to_many :foos, :class=>@Foo @db.sqls end it "should have working eager_graph implementations" do @db.fetch = {:id=>1, :foo_id=>1, :foos_id=>1, :foos_foo_id=>1} foos = @Foo.eager_graph(:foos).all @db.sqls.must_equal ["SELECT fo__os.id, fo__os.foo_id, foos.id AS foos_id, foos.foo_id AS foos_foo_id FROM fo__os LEFT OUTER JOIN fo__os AS foos ON (foos._id = fo__os.id)"] foos.must_equal [@Foo.load(:id=>1, :foo_id=>1)] foos.first.foos.must_equal [@Foo.load(:id=>1, :foo_id=>1)] end it "should have working eager_graph implementations when qualified" do @Foo.dataset = Sequel.identifier(:fo__os).qualify(:s) @Foo.columns :id, :foo_id @db.sqls @db.fetch = {:id=>1, :foo_id=>1, :foos_id=>1, :foos_foo_id=>1} foos = @Foo.eager_graph(:foos).all @db.sqls.must_equal ["SELECT s.fo__os.id, s.fo__os.foo_id, foos.id AS foos_id, foos.foo_id AS foos_foo_id FROM s.fo__os LEFT OUTER JOIN s.fo__os AS foos ON (foos._id = s.fo__os.id)"] foos.must_equal [@Foo.load(:id=>1, :foo_id=>1)] foos.first.foos.must_equal [@Foo.load(:id=>1, :foo_id=>1)] end end ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/model/hooks_spec.rb��������������������������������������������������������������0000664�0000000�0000000�00000026566�14342141206�0017633�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Model#before_create && Model#after_create" do before do @c = Class.new(Sequel::Model(:items)) do columns :x set_primary_key :x unrestrict_primary_key def after_create DB << "BLAH after" end end DB.reset end it "should be called around new record creation" do @c.send(:define_method, :before_create){DB << "BLAH before"} @c.create(:x => 2) DB.sqls.must_equal ['BLAH before', 'INSERT INTO items (x) VALUES (2)', 'SELECT * FROM items WHERE x = 2', 'BLAH after'] end it ".create should cancel the save and raise an error if before_create calls cancel_action and raise_on_save_failure is true" do @c.send(:define_method, :before_create){cancel_action 'not good'} proc{@c.create(:x => 2)}.must_raise(Sequel::HookFailed, 'not good') DB.sqls.must_equal [] @c.load(:id => 2233).save end it ".create should cancel the save and return nil if before_create calls cancel_action and raise_on_save_failure is false" do @c.send(:define_method, :before_create){cancel_action} @c.raise_on_save_failure = false @c.create(:x => 2).must_be_nil DB.sqls.must_equal [] end end describe "Model#before_update && Model#after_update" do before do @c = Class.new(Sequel::Model(:items)) do columns :id, :x def after_update DB << "BLAH after" end end DB.reset end it "should be called around record update" do @c.send(:define_method, :before_update){DB << "BLAH before"} m = @c.load(:id => 2233, :x=>123) m.save DB.sqls.must_equal ['BLAH before', 'UPDATE items SET x = 123 WHERE (id = 2233)', 'BLAH after'] end it "#save should cancel the save and raise an error if before_update calls cancel_action and raise_on_save_failure is true" do @c.send(:define_method, :before_update){cancel_action} proc{@c.load(:id => 2233).save}.must_raise(Sequel::HookFailed) DB.sqls.must_equal [] end it "#save should cancel the save and return nil if before_update calls cancel_action and raise_on_save_failure is false" do @c.send(:define_method, :before_update){cancel_action} @c.raise_on_save_failure = false @c.load(:id => 2233).save.must_be_nil DB.sqls.must_equal [] end end describe "Model#before_save && Model#after_save" do before do @c = Class.new(Sequel::Model(:items)) do columns :x def after_save DB << "BLAH after" end end DB.reset end it "should be called around record update" do @c.send(:define_method, :before_save){DB << "BLAH before"} m = @c.load(:id => 2233, :x=>123) m.save DB.sqls.must_equal ['BLAH before', 'UPDATE items SET x = 123 WHERE (id = 2233)', 'BLAH after'] end it "should be called around record creation" do @c.send(:define_method, :before_save){DB << "BLAH before"} @c.set_primary_key :x @c.unrestrict_primary_key @c.create(:x => 2) DB.sqls.must_equal ['BLAH before', 'INSERT INTO items (x) VALUES (2)', 'SELECT * FROM items WHERE x = 2', 'BLAH after'] end it "#save should cancel the save and raise an error if before_save calls cancel_action and raise_on_failure option is true" do @c.send(:define_method, :before_save){cancel_action} @c.raise_on_save_failure = false proc{@c.load(:id => 2233).save(:raise_on_failure => true)}.must_raise(Sequel::HookFailed) DB.sqls.must_equal [] end it "#save should cancel the save and return nil if before_save calls cancel_action and raise_on_save_failure is false" do @c.send(:define_method, :before_save){cancel_action} @c.raise_on_save_failure = false @c.load(:id => 2233).save.must_be_nil DB.sqls.must_equal [] end it "#save should have a raised exception reference the model instance" do @c.send(:define_method, :before_save){cancel_action} proc{@c.create(:x => 2233)}.must_raise(Sequel::HookFailed){|e| e.model.must_equal @c.load(:x=>2233)} DB.sqls.must_equal [] end end describe "Model#before_destroy && Model#after_destroy" do before do @c = Class.new(Sequel::Model(:items)) do def after_destroy DB << "BLAH after" end end DB.reset end it "should be called around record destruction" do @c.send(:define_method, :before_destroy){DB << "BLAH before"} m = @c.load(:id => 2233) m.destroy DB.sqls.must_equal ['BLAH before', 'DELETE FROM items WHERE id = 2233', 'BLAH after'] end it "#destroy should cancel the destroy and raise an error if before_destroy calls cancel_action and raise_on_save_failure is true" do @c.send(:define_method, :before_destroy){cancel_action; true} proc{@c.load(:id => 2233).destroy}.must_raise(Sequel::HookFailed) DB.sqls.must_equal [] end it "#destroy should cancel the destroy and return nil if before_destroy calls cancel_action and raise_on_save_failure is false" do @c.send(:define_method, :before_destroy){cancel_action; true} @c.raise_on_save_failure = false @c.load(:id => 2233).destroy.must_be_nil DB.sqls.must_equal [] end end describe "Model#before_validation && Model#after_validation" do before do @c = Class.new(Sequel::Model(:items)) do columns :id def after_validation DB << "BLAH after" end def validate errors.add(:id, 'not valid') unless id == 2233 end end DB.reset end it "should be called around validation" do @c.send(:define_method, :before_validation){DB << "BLAH before"} m = @c.load(:id => 2233) m.must_be :valid? DB.sqls.must_equal ['BLAH before', 'BLAH after'] m = @c.load(:id => 22) m.wont_be :valid? DB.sqls.must_equal ['BLAH before', 'BLAH after'] end it "should be called when calling save" do @c.send(:define_method, :before_validation){DB << "BLAH before"} m = @c.load(:id => 2233, :x=>123) m.save.must_equal m DB.sqls.must_equal ['BLAH before', 'BLAH after', 'UPDATE items SET x = 123 WHERE (id = 2233)'] m = @c.load(:id => 22) m.raise_on_save_failure = false m.save.must_be_nil DB.sqls.must_equal ['BLAH before', 'BLAH after'] end it "#save should cancel the save and raise an error if before_validation calls cancel_action and raise_on_save_failure is true" do @c.send(:define_method, :before_validation){cancel_action} proc{@c.load(:id => 2233).save}.must_raise(Sequel::HookFailed) DB.sqls.must_equal [] end it "#save should cancel the save and return nil if before_validation calls cancel_action and raise_on_save_failure is false" do @c.send(:define_method, :before_validation){cancel_action} @c.raise_on_save_failure = false @c.load(:id => 2233).save.must_be_nil DB.sqls.must_equal [] end it "#valid? should return false if before_validation calls cancel_action" do @c.send(:define_method, :before_validation){cancel_action} @c.load(:id => 2233).valid?.must_equal false end end describe "Model around filters" do before do @c = Class.new(Sequel::Model(:items)) do columns :id, :x end DB.reset end it "around_create should be called around new record creation" do @c.class_eval do def around_create DB << 'ac_before' super DB << 'ac_after' end end @c.create(:x => 2) DB.sqls.must_equal ['ac_before', 'INSERT INTO items (x) VALUES (2)', "SELECT * FROM items WHERE id = 10", 'ac_after'] end it "around_delete should be called around record destruction" do @c.class_eval do def around_destroy DB << 'ad_before' super DB << 'ad_after' end end @c.load(:id=>1, :x => 2).destroy DB.sqls.must_equal ['ad_before', 'DELETE FROM items WHERE id = 1', 'ad_after'] end it "around_update should be called around updating existing records" do @c.class_eval do def around_update DB << 'au_before' super DB << 'au_after' end end @c.load(:id=>1, :x => 2).save DB.sqls.must_equal ['au_before', 'UPDATE items SET x = 2 WHERE (id = 1)', 'au_after'] end it "around_save should be called around saving both new and existing records, around either after_create and after_update" do @c.class_eval do def around_update DB << 'au_before' super DB << 'au_after' end def around_create DB << 'ac_before' super DB << 'ac_after' end def around_save DB << 'as_before' super DB << 'as_after' end end @c.create(:x => 2) DB.sqls.must_equal ['as_before', 'ac_before', 'INSERT INTO items (x) VALUES (2)', "SELECT * FROM items WHERE id = 10", 'ac_after', 'as_after'] @c.load(:id=>1, :x => 2).save DB.sqls.must_equal ['as_before', 'au_before', 'UPDATE items SET x = 2 WHERE (id = 1)', 'au_after', 'as_after'] end it "around_validation should be called around validating records" do @c.class_eval do def around_validation DB << 'av_before' super DB << 'av_after' end def validate DB << 'validate' end end @c.new(:x => 2).valid?.must_equal true DB.sqls.must_equal [ 'av_before', 'validate', 'av_after' ] end it "around_validation should handle cancel_action" do @c.class_eval do def around_validation DB << 'av_before' cancel_action super DB << 'av_after' end def validate DB << 'validate' end end @c.new(:x => 2).valid?.must_equal false DB.sqls.must_equal [ 'av_before' ] end it "around_validation should be able to catch validation errors and modify them" do @c.class_eval do def validate errors.add(:x, 'foo') end end @c.new(:x => 2).valid?.must_equal false @c.class_eval do def around_validation super errors.clear end end @c.new(:x => 2).valid?.must_equal true end it "around_create that doesn't call super should raise a HookFailed" do @c.send(:define_method, :around_create){} proc{@c.create(:x => 2)}.must_raise(Sequel::HookFailed) end it "around_update that doesn't call super should raise a HookFailed" do @c.send(:define_method, :around_update){} proc{@c.load(:x => 2).save}.must_raise(Sequel::HookFailed) end it "around_save that doesn't call super should raise a HookFailed" do @c.send(:define_method, :around_save){} proc{@c.create(:x => 2)}.must_raise(Sequel::HookFailed) proc{@c.load(:x => 2).save}.must_raise(Sequel::HookFailed) end it "around_destroy that doesn't call super should raise a HookFailed" do @c.send(:define_method, :around_destroy){} proc{@c.load(:x => 2).destroy}.must_raise(Sequel::HookFailed) end it "around_validation that doesn't call super should raise a HookFailed" do @c.send(:define_method, :around_validation){} proc{@c.new.save}.must_raise(Sequel::HookFailed) end it "around_validation that doesn't call super should have valid? return false" do @c.send(:define_method, :around_validation){} @c.new.valid?.must_equal false end it "around_* that doesn't call super should return nil if raise_on_save_failure is false" do @c.raise_on_save_failure = false o = @c.load(:id => 1) def o.around_save() end o.save.must_be_nil o = @c.load(:id => 1) def o.around_update() end o.save.must_be_nil o = @c.new def o.around_create() end o.save.must_be_nil o = @c.new def o.around_validation() end o.save.must_be_nil end end ������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/model/inflector_spec.rb����������������������������������������������������������0000664�0000000�0000000�00000006240�14342141206�0020460�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Inflections do before do @plurals, @singulars, @uncountables = Sequel.inflections.plurals.dup, Sequel.inflections.singulars.dup, Sequel.inflections.uncountables.dup end after do Sequel.inflections.plurals.replace(@plurals) Sequel.inflections.singulars.replace(@singulars) Sequel.inflections.uncountables.replace(@uncountables) end it "should be possible to clear the list of singulars, plurals, and uncountables" do Sequel.inflections.clear(:plurals) Sequel.inflections.plurals.must_equal [] Sequel.inflections.plural('blah', 'blahs') Sequel.inflections.clear Sequel.inflections.plurals.must_equal [] Sequel.inflections.singulars.must_equal [] Sequel.inflections.uncountables.must_equal [] end it "should be yielded and returned by Sequel.inflections" do Sequel.inflections{|i| i.must_equal Sequel::Inflections}.must_equal Sequel::Inflections end end describe Sequel::Inflections do include Sequel::Inflections it "#camelize should transform the word to CamelCase" do camelize("post").must_equal "Post" camelize("egg_and_hams").must_equal "EggAndHams" camelize("foo/bar").must_equal "Foo::Bar" camelize("foo/").must_equal "Foo::" camelize("foo//bar").must_equal "Foo::/bar" camelize("foo///bar").must_equal "Foo::/::Bar" s = "x".dup def s.camelize; "P" end camelize(s).must_equal "P" end it "#constantize should eval the string to get a constant" do constantize("String").must_equal String constantize("Sequel::Inflections").must_equal Sequel::Inflections proc{constantize("BKSDDF")}.must_raise NameError proc{constantize("++A++")}.must_raise NameError s = "x".dup def s.constantize; "P" end constantize(s).must_equal "P" end it "#demodulize should remove any preceding modules" do demodulize("String::Inflections::Blah").must_equal "Blah" demodulize("String::Inflections").must_equal "Inflections" demodulize("String").must_equal "String" s = "x".dup def s.demodulize; "P" end demodulize(s).must_equal "P" end it "#pluralize should transform words from singular to plural" do pluralize("sheep").must_equal "sheep" pluralize("post").must_equal "posts" pluralize("octopus").must_equal"octopuses" pluralize("the blue mailman").must_equal "the blue mailmen" pluralize("CamelOctopus").must_equal "CamelOctopuses" s = "x".dup def s.pluralize; "P" end pluralize(s).must_equal "P" end it "#singularize should transform words from plural to singular" do singularize("sheep").must_equal "sheep" singularize("posts").must_equal "post" singularize("octopuses").must_equal "octopus" singularize("the blue mailmen").must_equal "the blue mailman" singularize("CamelOctopuses").must_equal "CamelOctopus" s = "x".dup def s.singularize; "P" end singularize(s).must_equal "P" end it "#underscore should convert class name to underscored string" do underscore("Message").must_equal "message" underscore("Admin::Post").must_equal "admin/post" s = "x".dup def s.underscore; "P" end underscore(s).must_equal "P" end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/model/model_spec.rb��������������������������������������������������������������0000664�0000000�0000000�00000074552�14342141206�0017606�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Sequel::Model()" do before do @db = Sequel::Model.db end it "should return a model subclass with the given dataset if given a dataset" do ds = @db[:blah] c = Sequel::Model(ds) c.superclass.must_equal Sequel::Model c.dataset.row_proc.must_equal c end it "should return a model subclass with a dataset with the default database and given table name if given a Symbol" do c = Sequel::Model(:blah) c.superclass.must_equal Sequel::Model c.db.must_equal @db c.table_name.must_equal :blah end it "should return a model subclass with a dataset with the default database and given table name if given a LiteralString" do c = Sequel::Model(Sequel.lit('blah')) c.superclass.must_equal Sequel::Model c.db.must_equal @db c.table_name.must_equal Sequel.lit('blah') end it "should return a model subclass with a dataset with the default database and given table name if given an SQL::Identifier" do c = Sequel::Model(Sequel.identifier(:blah)) c.superclass.must_equal Sequel::Model c.db.must_equal @db c.table_name.must_equal Sequel.identifier(:blah) end it "should return a model subclass with a dataset with the default database and given table name if given an SQL::QualifiedIdentifier" do c = Sequel::Model(Sequel.qualify(:boo, :blah)) c.superclass.must_equal Sequel::Model c.db.must_equal @db c.table_name.must_equal Sequel.qualify(:boo, :blah) end it "should return a model subclass with a dataset with the default database and given table name if given an SQL::AliasedExpression" do c = Sequel::Model(Sequel.as(:blah, :boo)) c.superclass.must_equal Sequel::Model c.db.must_equal @db c.table_name.must_equal :boo end it "should return a model subclass with the given dataset if given a dataset using an SQL::Identifier" do ds = @db[Sequel.identifier(:blah)] c = Sequel::Model(ds) c.superclass.must_equal Sequel::Model c.dataset.row_proc.must_equal c end it "should be callable on Sequel::Model" do ds = @db[:blah] c = Sequel::Model::Model(ds) c.superclass.must_equal Sequel::Model c.dataset.row_proc.must_equal c end it "should be callable on subclasses of Sequel::Model" do ds = @db[:blah] c = Class.new(Sequel::Model) sc = c::Model(ds) sc.superclass.must_equal c sc.dataset.row_proc.must_equal sc end it "should be callable on other modules if def_Model is used" do m = Module.new Sequel::Model.def_Model(m) ds = @db[:blah] c = m::Model(ds) c.superclass.must_equal Sequel::Model c.dataset.row_proc.must_equal c end it "should be callable using model subclasses on other modules if def_Model is used" do m = Module.new c = Class.new(Sequel::Model) c.def_Model(m) ds = @db[:blah] sc = m::Model(ds) sc.superclass.must_equal c sc.dataset.row_proc.must_equal sc end it "should return a model subclass associated to the given database if given a database" do db = Sequel.mock c = Sequel::Model(db) c.superclass.must_equal Sequel::Model c.db.must_equal db proc{c.dataset}.must_raise(Sequel::Error) class SmBlahTest < c end SmBlahTest.db.must_equal db SmBlahTest.table_name.must_equal :sm_blah_tests end describe "reloading" do before do Sequel::Model.cache_anonymous_models = true end after do Sequel::Model.cache_anonymous_models = false Object.send(:remove_const, :Album) if defined?(::Album) end it "should work without raising an exception with a symbol" do class ::Album < Sequel::Model(:table); end class ::Album < Sequel::Model(:table); end end it "should work without raising an exception with an SQL::Identifier " do class ::Album < Sequel::Model(Sequel.identifier(:table)); end class ::Album < Sequel::Model(Sequel.identifier(:table)); end end it "should work without raising an exception with an SQL::QualifiedIdentifier " do class ::Album < Sequel::Model(Sequel.qualify(:schema, :table)); end class ::Album < Sequel::Model(Sequel.qualify(:schema, :table)); end end it "should work without raising an exception with an SQL::AliasedExpression" do class ::Album < Sequel::Model(Sequel.as(:table, :alias)); end class ::Album < Sequel::Model(Sequel.as(:table, :alias)); end end it "should work without raising an exception with an LiteralString" do class ::Album < Sequel::Model(Sequel.lit('table')); end class ::Album < Sequel::Model(Sequel.lit('table')); end end it "should work without raising an exception with a database" do class ::Album < Sequel::Model(@db); end class ::Album < Sequel::Model(@db); end end it "should work without raising an exception with a dataset" do class ::Album < Sequel::Model(@db[:table]); end class ::Album < Sequel::Model(@db[:table]); end end it "should work without raising an exception with a dataset with an SQL::Identifier" do class ::Album < Sequel::Model(@db[Sequel.identifier(:table)]); end class ::Album < Sequel::Model(@db[Sequel.identifier(:table)]); end end it "should raise an exception if anonymous model caching is disabled" do Sequel::Model.cache_anonymous_models = false proc do class ::Album < Sequel::Model(@db[Sequel.identifier(:table)]); end class ::Album < Sequel::Model(@db[Sequel.identifier(:table)]); end end.must_raise TypeError end it "should use separate anonymous cache for subclasses" do c = Class.new(Sequel::Model) c.cache_anonymous_models.must_equal true class ::Album < c::Model(:table); end class ::Album < c::Model(:table); end c1 = c::Model(:t1) c1.must_equal c::Model(:t1) c1.wont_equal Sequel::Model(:t1) c.cache_anonymous_models = false Sequel::Model.cache_anonymous_models.must_equal true c1.wont_equal c::Model(:t1) end end end describe "Sequel::Model.freeze" do it "should freeze the model class and not allow any changes" do model = Class.new(Sequel::Model(:items)) model.freeze model.frozen?.must_equal true model.dataset.frozen?.must_equal true model.db_schema.frozen?.must_equal true model.db_schema[:id].frozen?.must_equal true model.columns.frozen?.must_equal true model.setter_methods.frozen?.must_equal true model.send(:overridable_methods_module).frozen?.must_equal true model.default_set_fields_options.frozen?.must_equal true proc{model.dataset_module{}}.must_raise RuntimeError end it "should work if the model is already frozen" do model = Class.new(Sequel::Model(:items)) model.freeze.freeze end it "should freeze a model class without a dataset without breaking" do model = Class.new(Sequel::Model) model.freeze model.frozen?.must_equal true proc{model.dataset}.must_raise Sequel::Error model.db_schema.must_be_nil model.columns.must_be_nil model.setter_methods.must_equal [] model.send(:overridable_methods_module).frozen?.must_equal true model.default_set_fields_options.frozen?.must_equal true proc{model.dataset_module{}}.must_raise RuntimeError end it "should allow subclasses of frozen model classes to work correctly" do model = Class.new(Sequel::Model(:items)) model.freeze model = Class.new(model) model.dataset = :items2 model.dataset_module{} model.plugin Module.new model.frozen?.must_equal false model.db_schema.frozen?.must_equal false model.db_schema[:id].frozen?.must_equal false model.setter_methods.frozen?.must_equal false model.dataset_module{}.frozen?.must_equal false model.send(:overridable_methods_module).frozen?.must_equal false model.default_set_fields_options.frozen?.must_equal false end end describe Sequel::Model do it "should have class method aliased as model" do model_a = Class.new(Sequel::Model(:items)) model_a.new.model.must_be_same_as model_a end it "should be associated with a dataset" do model_a = Class.new(Sequel::Model) { set_dataset DB[:as] } model_a.dataset.must_be_kind_of(Sequel::Mock::Dataset) model_a.dataset.opts[:from].must_equal [:as] model_b = Class.new(Sequel::Model) { set_dataset DB[:bs] } model_b.dataset.must_be_kind_of(Sequel::Mock::Dataset) model_b.dataset.opts[:from].must_equal [:bs] model_a.dataset.opts[:from].must_equal [:as] end end describe Sequel::Model do before do @model = Class.new(Sequel::Model(:items)) DB.reset end it "should not allow dup/clone" do proc{@model.dup}.must_raise NoMethodError proc{@model.clone}.must_raise NoMethodError end it "has table_name return name of table" do @model.table_name.must_equal :items end it "defaults to primary key of id" do @model.primary_key.must_equal :id end it "allow primary key change" do @model.set_primary_key :ssn @model.primary_key.must_equal :ssn end it "allow not primary key change for frozen class" do @model.freeze proc{@model.set_primary_key :ssn}.must_raise RuntimeError end it "allows dataset change" do @model.set_dataset(DB[:foo]) @model.table_name.must_equal :foo end it "allows frozen dataset" do @model.set_dataset(DB[:foo].freeze) @model.table_name.must_equal :foo @model.dataset.sql.must_equal 'SELECT * FROM foo' end it "table_name should respect table aliases" do @model.set_dataset(Sequel[:foo].as(:x)) @model.table_name.must_equal :x end with_symbol_splitting "table_name should respect table alias symbols" do @model.set_dataset(:foo___x) @model.table_name.must_equal :x end it "set_dataset should raise an error unless given a Symbol or Dataset" do proc{@model.set_dataset(Object.new)}.must_raise(Sequel::Error) end it "set_dataset should use a subquery for joined datasets" do @model.set_dataset(DB.from(:foo, :bar)) @model.dataset.sql.must_equal 'SELECT * FROM (SELECT * FROM foo, bar) AS foo' @model.set_dataset(DB[:foo].cross_join(:bar)) @model.dataset.sql.must_equal 'SELECT * FROM (SELECT * FROM foo CROSS JOIN bar) AS foo' end it "set_dataset should add the destroy method to the dataset that destroys each object" do ds = DB[:foo] ds.wont_respond_to(:destroy) ds = @model.set_dataset(ds).dataset ds.must_respond_to(:destroy) DB.sqls ds.with_fetch([{:id=>1}, {:id=>2}]).destroy.must_equal 2 DB.sqls.must_equal ["SELECT * FROM foo", "DELETE FROM foo WHERE id = 1", "DELETE FROM foo WHERE id = 2"] end it "set_dataset should add the destroy method that respects sharding with transactions" do db = Sequel.mock(:servers=>{:s1=>{}}) ds = db[:foo].server(:s1) @model.use_transactions = true ds = @model.set_dataset(ds).dataset db.sqls ds.destroy.must_equal 0 db.sqls.must_equal ["BEGIN -- s1", "SELECT * FROM foo -- s1", "COMMIT -- s1"] end it "should raise an error on set_dataset if there is an error connecting to the database" do def @model.columns() raise Sequel::DatabaseConnectionError end proc{@model.set_dataset(Sequel.mock[:foo].join(:blah).from_self)}.must_raise Sequel::DatabaseConnectionError end it "should raise an error on set_dataset if there is a disconnect error" do def @model.columns() raise Sequel::DatabaseDisconnectError end proc{@model.set_dataset(Sequel.mock[:foo].join(:blah).from_self)}.must_raise Sequel::DatabaseDisconnectError end it "should not raise an error if there is a problem getting the columns for a dataset" do def @model.columns() raise Sequel::Error end @model.set_dataset(DB[:foo].join(:blah).from_self) end it "doesn't raise an error on set_dataset if there is an error raised getting the schema" do db = Sequel.mock def db.schema(*) raise Sequel::Error; end @model.set_dataset(db[:foo]) end it "reload_db_schema? should be false by default" do c = Class.new c.extend Sequel::Model::ClassMethods c.send(:reload_db_schema?).must_equal false end it "doesn't raise an error on inherited if there is an error setting the dataset" do db = Sequel.mock def db.schema(*) raise Sequel::Error; end @model.dataset = db[:foo] Class.new(@model) end it "uses a savepoint if inside a transaction when getting the columns" do db = Sequel.mock def db.supports_savepoints?; true end Sequel::Model(db[:table]) db.sqls.must_equal ["SELECT * FROM table LIMIT 0"] db.transaction{Sequel::Model(db[:table])} db.sqls.must_equal ["BEGIN", "SAVEPOINT autopoint_1", "SELECT * FROM table LIMIT 0", "RELEASE SAVEPOINT autopoint_1", "COMMIT"] end it "should raise if bad inherited instance variable value is used" do def @model.inherited_instance_variables() super.merge(:@a=>:foo) end @model.instance_eval{@a=1} proc{Class.new(@model)}.must_raise(Sequel::Error) end it "copy inherited instance variables into subclass if set" do def @model.inherited_instance_variables() super.merge(:@a=>nil, :@b=>:dup, :@c=>:hash_dup, :@d=>proc{|v| v * 2}) end @model.instance_eval{@a=1; @b=[2]; @c={3=>[4]}; @d=10} m = Class.new(@model) @model.instance_eval{@a=5; @b << 6; @c[3] << 7; @c[8] = [9]; @d=40} m.instance_eval do @a.must_equal 1 @b.must_equal [2] @c.must_equal(3=>[4]) @d.must_equal 20 end end it "set_dataset should readd dataset method modules" do m = Module.new @model.dataset_module(m) @model.set_dataset(@model.dataset) @model.dataset.singleton_class.ancestors.must_include m end end describe Sequel::Model do before do @model = Class.new(Sequel::Model) DB.reset end it "allows set_dataset to accept a Symbol" do @model.set_dataset(:foo) @model.table_name.must_equal :foo end it "allows set_dataset to accept a LiteralString" do @model.set_dataset(Sequel.lit('foo')) @model.table_name.must_equal Sequel.lit('foo') end it "allows set_dataset to acceptan SQL::Identifier" do @model.set_dataset(Sequel.identifier(:foo)) @model.table_name.must_equal Sequel.identifier(:foo) end it "allows set_dataset to acceptan SQL::QualifiedIdentifier" do @model.set_dataset(Sequel.qualify(:bar, :foo)) @model.table_name.must_equal Sequel.qualify(:bar, :foo) end it "allows set_dataset to acceptan SQL::AliasedExpression" do @model.set_dataset(Sequel.as(:foo, :bar)) @model.table_name.must_equal :bar end end describe Sequel::Model, ".require_valid_table = true" do before do @db = Sequel.mock @db.columns = proc do |sql| raise Sequel::Error if sql =~ /foos/ [:id] end def @db.supports_schema_parsing?; true end def @db.schema(t, *) t.first_source == :foos ? (raise Sequel::Error) : [[:id, {}]] end Sequel::Model.db = @db Sequel::Model.require_valid_table = true end after do Sequel::Model.require_valid_table = false Sequel::Model.db = DB if Object.const_defined?(:Bar) Object.send(:remove_const, :Bar) end if Object.const_defined?(:Foo) Object.send(:remove_const, :Foo) end end it "should raise an exception when creating a model with an invalid implicit table" do proc{class ::Foo < Sequel::Model; end}.must_raise Sequel::Error end it "should not raise an exception when creating a model with a valid implicit table" do class ::Bar < Sequel::Model; end Bar.columns.must_equal [:id] end it "should raise an exception when creating a model with an invalid explicit table" do proc{Sequel::Model(@db[:foos])}.must_raise Sequel::Error end it "should not raise an exception when creating a model with a valid explicit table" do c = Sequel::Model(@db[:bars]) c.columns.must_equal [:id] end it "should raise an exception when calling set_dataset with an invalid table" do c = Class.new(Sequel::Model) proc{c.set_dataset @db[:foos]}.must_raise Sequel::Error end it "should not raise an exception when calling set_dataset with an valid table" do c = Class.new(Sequel::Model) c.set_dataset @db[:bars] c.columns.must_equal [:id] end it "should assume nil value is the same as false" do c = Class.new(Sequel::Model) c.require_valid_table = nil ds = @db.dataset.with_extend{def columns; raise Sequel::Error; end} c.set_dataset(ds) end end describe Sequel::Model, "constructors" do before do @m = Class.new(Sequel::Model) @m.columns :a, :b end it "should accept a hash" do m = @m.new(:a => 1, :b => 2) m.values.must_equal(:a => 1, :b => 2) m.must_be :new? end it "should accept a block and yield itself to the block" do block_called = false m = @m.new {|i| block_called = true; i.must_be_kind_of(@m); i.values[:a] = 1} block_called.must_equal true m.values[:a].must_equal 1 end it "should have dataset row_proc create an existing object" do @m.dataset = Sequel.mock.dataset o = @m.dataset.row_proc.call(:a=>1) o.must_be_kind_of(@m) o.values.must_equal(:a=>1) o.new?.must_equal false end it "should have .call create an existing object" do o = @m.call(:a=>1) o.must_be_kind_of(@m) o.values.must_equal(:a=>1) o.new?.must_equal false end it "should have .load create an existing object" do o = @m.load(:a=>1) o.must_be_kind_of(@m) o.values.must_equal(:a=>1) o.new?.must_equal false end end describe Sequel::Model, "new" do before do @m = Class.new(Sequel::Model) do set_dataset DB[:items] columns :x, :id end end it "should be marked as new?" do o = @m.new o.must_be :new? end it "should not be marked as new? once it is saved" do o = @m.new(:x => 1) o.must_be :new? o.save o.wont_be :new? end it "should use the last inserted id as primary key if not in values" do @m.dataset = @m.dataset.with_fetch(:x => 1, :id => 1234).with_autoid(1234) o = @m.new(:x => 1) o.save o.id.must_equal 1234 o = @m.load(:x => 1, :id => 333) o.save o.id.must_equal 333 end end describe Sequel::Model, ".find" do before do @c = Class.new(Sequel::Model(:items)) @c.dataset = @c.dataset.with_fetch(:name => 'sharon', :id => 1) DB.reset end it "should return the first record matching the given filter" do @c.find(:name => 'sharon').must_be_kind_of(@c) DB.sqls.must_equal ["SELECT * FROM items WHERE (name = 'sharon') LIMIT 1"] @c.find(Sequel.expr(:name).like('abc%')).must_be_kind_of(@c) DB.sqls.must_equal ["SELECT * FROM items WHERE (name LIKE 'abc%' ESCAPE '\\') LIMIT 1"] end it "should accept filter blocks" do @c.find{id > 1}.must_be_kind_of(@c) DB.sqls.must_equal ["SELECT * FROM items WHERE (id > 1) LIMIT 1"] @c.find{(x > 1) & (y < 2)}.must_be_kind_of(@c) DB.sqls.must_equal ["SELECT * FROM items WHERE ((x > 1) AND (y < 2)) LIMIT 1"] end end describe Sequel::Model, ".fetch" do before do DB.reset @c = Class.new(Sequel::Model(:items)) end it "should return instances of Model" do @c.fetch("SELECT * FROM items").first.must_be_kind_of(@c) end it "should return true for .empty? and not raise an error on empty selection" do @c.dataset = @c.dataset.with_extend do def fetch_rows(sql) yield({:count => 0}) end end @c.fetch("SELECT * FROM items WHERE FALSE").empty? end end describe Sequel::Model, ".find_or_create" do before do @db = Sequel.mock @c = Class.new(Sequel::Model(@db[:items])) do set_primary_key :id columns :x end @db.sqls end it "should find the record" do @db.fetch = [{:x=>1, :id=>1}] @db.autoid = 1 @c.find_or_create(:x => 1).must_equal @c.load(:x=>1, :id=>1) @db.sqls.must_equal ["SELECT * FROM items WHERE (x = 1) LIMIT 1"] end it "should create the record if not found" do @db.fetch = [[], {:x=>1, :id=>1}] @db.autoid = 1 @c.find_or_create(:x => 1).must_equal @c.load(:x=>1, :id=>1) @db.sqls.must_equal ["SELECT * FROM items WHERE (x = 1) LIMIT 1", "INSERT INTO items (x) VALUES (1)", "SELECT * FROM items WHERE id = 1"] end it "should pass the new record to be created to the block if no record is found" do @db.fetch = [[], {:x=>1, :id=>1}] @db.autoid = 1 @c.find_or_create(:x => 1){|x| x[:y] = 2}.must_equal @c.load(:x=>1, :id=>1) @db.sqls.must_equal ["SELECT * FROM items WHERE (x = 1) LIMIT 1", "INSERT INTO items (x, y) VALUES (1, 2)", "SELECT * FROM items WHERE id = 1"] end end describe Sequel::Model, ".all" do it "should return all records in the dataset" do c = Class.new(Sequel::Model(:items)) c.all.must_equal [c.load(:x=>1, :id=>1)] end end describe Sequel::Model, "A model class without a primary key" do before do @c = Class.new(Sequel::Model(:items)) do columns :x no_primary_key end DB.reset end it "should be able to insert records without selecting them back" do i = nil i = @c.create(:x => 1) i.class.wont_be_nil i.values.to_hash.must_equal(:x => 1) DB.sqls.must_equal ['INSERT INTO items (x) VALUES (1)'] end it "should raise when deleting" do proc{@c.load(:x=>1).delete}.must_raise Sequel::Error end it "should raise when updating" do proc{@c.load(:x=>1).update(:x=>2)}.must_raise Sequel::Error end it "should insert a record when saving" do o = @c.new(:x => 2) o.must_be :new? o.save DB.sqls.must_equal ['INSERT INTO items (x) VALUES (2)'] end end describe Sequel::Model, "attribute accessors" do before do db = Sequel.mock def db.supports_schema_parsing?() true end def db.schema(*) [[:x, {:type=>:integer}], [:z, {:type=>:integer}]] end @dataset = db[:items].columns(:x, :z) @c = Class.new(Sequel::Model) DB.reset end it "should be created on set_dataset" do a = [:x, :z, :x= ,:z=] (a - @c.instance_methods).must_equal a @c.set_dataset(@dataset) (a - @c.instance_methods).must_equal [] o = @c.new (a - o.methods).must_equal [] o.x.must_be_nil o.x = 34 o.x.must_equal 34 end it "should be only accept one argument for the write accessor" do @c.set_dataset(@dataset) o = @c.new o.x = 34 o.x.must_equal 34 proc{o.send(:x=)}.must_raise ArgumentError proc{o.send(:x=, 3, 4)}.must_raise ArgumentError end it "should have a working typecasting setter even if the column is not selected" do @c.set_dataset(@dataset.select(:z).columns(:z)) o = @c.new o.x = '34' o.x.must_equal 34 end it "should typecast if the new value is the same as the existing but has a different class" do @c.set_dataset(@dataset.select(:z).columns(:z)) o = @c.new o.x = 34 o.x = 34.0 o.x.must_equal 34.0 o.x = 34 o.x.must_equal 34 end end describe Sequel::Model, ".[]" do before do @c = Class.new(Sequel::Model(:items)) @c.dataset = @c.dataset.with_fetch(:name => 'sharon', :id => 1) DB.reset end it "should return the first record for the given pk" do @c[1].must_equal @c.load(:name => 'sharon', :id => 1) DB.sqls.must_equal ["SELECT * FROM items WHERE id = 1"] @c[9999].must_equal @c.load(:name => 'sharon', :id => 1) DB.sqls.must_equal ["SELECT * FROM items WHERE id = 9999"] end it "should have #[] return nil if no rows match" do @c.dataset = @c.dataset.with_fetch([]) @c[1].must_be_nil DB.sqls.must_equal ["SELECT * FROM items WHERE id = 1"] end it "should work correctly for custom primary key" do @c.set_primary_key :name @c['sharon'].must_equal @c.load(:name => 'sharon', :id => 1) DB.sqls.must_equal ["SELECT * FROM items WHERE name = 'sharon'"] end it "should handle a dataset that uses a subquery" do @c.dataset = @c.dataset.cross_join(:a).from_self(:alias=>:b) @c[1].must_equal @c.load(:name => 'sharon', :id => 1) DB.sqls.must_equal ["SELECT * FROM (SELECT * FROM items CROSS JOIN a) AS b WHERE (id = 1) LIMIT 1"] end it "should work correctly for composite primary key specified as array" do @c.set_primary_key [:node_id, :kind] @c[3921, 201].must_be_kind_of(@c) DB.sqls.must_equal ['SELECT * FROM items WHERE ((node_id = 3921) AND (kind = 201)) LIMIT 1'] end end describe "Model#inspect" do it "should include the class name and the values" do Sequel::Model.load(:x => 333).inspect.must_equal '#<Sequel::Model @values={:x=>333}>' end end describe "Model.db_schema" do before do @c = Class.new(Sequel::Model(:items)) do def self.columns; orig_columns; end end @db = Sequel.mock def @db.supports_schema_parsing?() true end @dataset = @db[:items] end it "should not call database's schema if it isn't supported" do @db.singleton_class.send(:remove_method, :supports_schema_parsing?) def @db.supports_schema_parsing?() false end def @db.schema(table, opts = {}) raise Sequel::Error end @dataset = @dataset.with_extend do def columns [:x, :y] end end @c.dataset = @dataset @c.db_schema.must_equal(:x=>{}, :y=>{}) @c.columns.must_equal [:x, :y] @c.instance_eval{@db_schema = nil} @c.db_schema.must_equal(:x=>{}, :y=>{}) @c.columns.must_equal [:x, :y] end it "should use the database's schema and set the columns and dataset columns" do def @db.schema(table, opts = {}) [[:x, {:type=>:integer}], [:y, {:type=>:string}]] end @c.dataset = @dataset @c.db_schema.must_equal(:x=>{:type=>:integer}, :y=>{:type=>:string}) @c.columns.must_equal [:x, :y] @c.dataset.columns.must_equal [:x, :y] end it "should not restrict the schema for datasets with a :select option" do @c.singleton_class.send(:remove_method, :columns) def @c.columns; [:x, :z]; end def @db.schema(table, opts = {}) [[:x, {:type=>:integer}], [:y, {:type=>:string}]] end @c.dataset = @dataset.select(:x, :y___z) @c.db_schema.must_equal(:x=>{:type=>:integer}, :z=>{}, :y=>{:type=>:string}) end it "should not raise error if setting dataset where getting schema and columns raises an error and require_valid_table is false" do @c.require_valid_table = false def @db.schema(table, opts={}) raise Sequel::Error end @c.dataset = @dataset.join(:x, :id).from_self.columns(:id, :x) @c.db_schema.must_equal(:x=>{}, :id=>{}) end it "should raise error if setting dataset where getting schema and columns raises an error and require_valid_table is true" do @c.require_valid_table = true def @db.schema(table, opts={}) raise Sequel::Error end @c.dataset = @dataset.join(:x, :id).from_self.columns(:id, :x) @c.db_schema.must_equal(:x=>{}, :id=>{}) end it "should use dataset columns if getting schema raises an error and require_valid_table is false" do @c.require_valid_table = false def @db.schema(table, opts={}) raise Sequel::Error end @c.dataset = @dataset.join(:x, :id).from_self.columns(:id, :x) @c.db_schema.must_equal(:x=>{}, :id=>{}) end it "should use dataset columns if getting schema raises an error and require_valid_table is true" do @c.require_valid_table = true def @db.schema(table, opts={}) raise Sequel::Error end @c.dataset = @dataset.join(:x, :id).from_self.columns(:id, :x) @c.db_schema.must_equal(:x=>{}, :id=>{}) end it "should automatically set a singular primary key based on the schema" do ds = @dataset d = ds.db def d.schema(table, *opts) [[:x, {:primary_key=>true}]] end @c.primary_key.must_equal :id @c.dataset = ds @c.db_schema.must_equal(:x=>{:primary_key=>true}) @c.primary_key.must_equal :x end it "should automatically set a singular primary key even if there are specific columns selected" do ds = @dataset.select(:a, :b, :x) d = ds.db def d.schema(table, *opts) [[:a, {:primary_key=>false}], [:b, {:primary_key=>false}], [:x, {:primary_key=>true}]] end @c.primary_key.must_equal :id @c.dataset = ds @c.db_schema.must_equal(:a=>{:primary_key=>false}, :b=>{:primary_key=>false}, :x=>{:primary_key=>true}) @c.primary_key.must_equal :x end it "should automatically set the composite primary key based on the schema" do ds = @dataset d = ds.db def d.schema(table, *opts) [[:x, {:primary_key=>true}], [:y, {:primary_key=>true}]] end @c.primary_key.must_equal :id @c.dataset = ds @c.db_schema.must_equal(:x=>{:primary_key=>true}, :y=>{:primary_key=>true}) @c.primary_key.must_equal [:x, :y] end it "should set an immutable composite primary key based on the schema" do ds = @dataset d = ds.db def d.schema(table, *opts) [[:x, {:primary_key=>true}], [:y, {:primary_key=>true}]] end @c.dataset = ds @c.primary_key.must_equal [:x, :y] proc{@c.primary_key.pop}.must_raise end it "should automatically set no primary key based on the schema" do ds = @dataset d = ds.db def d.schema(table, *opts) [[:x, {:primary_key=>false}], [:y, {:primary_key=>false}]] end @c.primary_key.must_equal :id @c.dataset = ds @c.db_schema.must_equal(:x=>{:primary_key=>false}, :y=>{:primary_key=>false}) @c.primary_key.must_be_nil end it "should automatically set primary key for dataset selecting table.*" do ds = @dataset.select_all(:items) d = ds.db def d.schema(table, *opts) [[:x, {:primary_key=>true}]] end @c.primary_key.must_equal :id @c.dataset = ds @c.db_schema.must_equal(:x=>{:primary_key=>true}) @c.primary_key.must_equal :x end it "should not modify the primary key unless all column schema hashes have a :primary_key entry" do ds = @dataset d = ds.db def d.schema(table, *opts) [[:x, {:primary_key=>false}], [:y, {}]] end @c.primary_key.must_equal :id @c.dataset = ds @c.db_schema.must_equal(:x=>{:primary_key=>false}, :y=>{}) @c.primary_key.must_equal :id end it "should return nil if the class has no dataset" do Class.new(Sequel::Model).db_schema.must_be_nil end end describe "Model#use_transactions" do before do @c = Class.new(Sequel::Model(:items)) end it "should return class value by default" do @c.use_transactions = true @c.new.use_transactions.must_equal true @c.use_transactions = false @c.new.use_transactions.must_equal false end it "should return set value if manually set" do instance = @c.new instance.use_transactions = false instance.use_transactions.must_equal false @c.use_transactions = true instance.use_transactions.must_equal false instance.use_transactions = true instance.use_transactions.must_equal true @c.use_transactions = false instance.use_transactions.must_equal true end end ������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/model/plugins_spec.rb������������������������������������������������������������0000664�0000000�0000000�00000036307�14342141206�0020163�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Model, ".plugin" do before do module Sequel::Plugins module Timestamped module InstanceMethods def get_stamp(*args); @values[:stamp] end def abc; 123; end end module ClassMethods def def; 234; end end module DatasetMethods def ghi; 345; end end end end @c = Class.new(Sequel::Model(:items)) @t = Sequel::Plugins::Timestamped end after do Sequel::Plugins.send(:remove_const, :Timestamped) end it "should raise LoadError if the plugin is not found" do proc{@c.plugin :something_or_other}.must_raise(LoadError) end it "should store the plugin in .plugins" do @c.plugins.wont_include(@t) @c.plugin @t @c.plugins.must_include(@t) end it "should be inherited in subclasses" do @c.plugins.wont_include(@t) c1 = Class.new(@c) @c.plugin @t c2 = Class.new(@c) @c.plugins.must_include(@t) c1.plugins.wont_include(@t) c2.plugins.must_include(@t) end it "should accept a symbol and load the module from the Sequel::Plugins namespace" do @c.plugin :timestamped @c.plugins.must_include(@t) end it "should accept a module" do m = Module.new @c.plugin m @c.plugins.must_include(m) end it "should not attempt to load a plugin twice" do @c.plugins.wont_include(@t) @c.plugin @t @c.plugins.reject{|m| m != @t}.length.must_equal 1 @c.plugin @t @c.plugins.reject{|m| m != @t}.length.must_equal 1 end deprecated "should warn if loading the plugin with an argument" do @c.plugin @t, 1 @c.plugins.must_include(@t) end deprecated "should warn if loading the plugin with a block" do @c.plugin(@t){} @c.plugins.must_include(@t) end it "should call apply and configure if the plugin responds to it, with the args and block used" do m = Module.new do def self.args; @args; end def self.block; @block; end def self.block_call; @block.call; end def self.args2; @args2; end def self.block2; @block2; end def self.block2_call; @block2.call; end def self.apply(model, *args, &block) @args = args @block = block model.send(:define_method, :blah){43} end def self.configure(model, *args, &block) @args2 = args @block2 = block model.send(:define_method, :blag){44} end end b = lambda{42} @c.plugin(m, 123, 1=>2, &b) m.args.must_equal [123, {1=>2}] m.block.must_equal b m.block_call.must_equal 42 @c.new.blah.must_equal 43 m.args2.must_equal [123, {1=>2}] m.block2.must_equal b m.block2_call.must_equal 42 @c.new.blag.must_equal 44 end it "should call configure even if the plugin has already been loaded" do m = Module.new do @args = [] def self.args; @args; end def self.configure(model, *args, &block) @args << [block, *args] end end b = lambda{42} @c.plugin(m, 123, 1=>2, &b) m.args.must_equal [[b, 123, {1=>2}]] b2 = lambda{44} @c.plugin(m, 234, 2=>3, &b2) m.args.must_equal [[b, 123, {1=>2}], [b2, 234, {2=>3}]] end it "should call things in the following order: apply, ClassMethods, InstanceMethods, DatasetMethods, configure" do m = Module.new do @args = [] def self.args; @args; end def self.apply(model, *args, &block) @args << :apply end def self.configure(model, *args, &block) @args << :configure end self::InstanceMethods = Module.new do def self.included(model) model.plugins.last.args << :im end end self::ClassMethods = Module.new do def self.extended(model) model.plugins.last.args << :cm end end self::DatasetMethods = Module.new do def self.extended(dataset) dataset.model.plugins.last.args << :dm end end end b = lambda{44} @c.plugin(m, 123, 1=>2, &b) m.args.must_equal [:apply, :cm, :im, :dm, :configure] @c.plugin(m, 234, 2=>3, &b) m.args.must_equal [:apply, :cm, :im, :dm, :configure, :configure] end if RUBY_VERSION >= '2.7' it "should handle keywords calling apply and configure" do m = Module.new do eval <<-END def self.apply(model, name: (raise), &block) model.instance_variable_set(:@apply, name) end def self.configure(model, name: (raise), &block) model.instance_variable_set(:@configure, name) end END end @c.plugin(m, name: 1) @c.instance_variable_get(:@apply).must_equal 1 @c.instance_variable_get(:@configure).must_equal 1 end end it "should include an InstanceMethods module in the class if the plugin includes it" do @c.plugin @t m = @c.new m.must_respond_to(:get_stamp) m.must_respond_to(:abc) m.abc.must_equal 123 t = Time.now m[:stamp] = t m.get_stamp.must_equal t end it "should extend the class with a ClassMethods module if the plugin includes it" do @c.plugin @t @c.def.must_equal 234 end it "should extend the class's dataset with a DatasetMethods module if the plugin includes it" do @c.plugin @t @c.dataset.ghi.must_equal 345 end it "should save the DatasetMethods module and apply it later if the class doesn't have a dataset" do c = Class.new(Sequel::Model) c.plugin @t c.dataset = DB[:i] c.dataset.ghi.must_equal 345 end it "should save the DatasetMethods module and apply it later if the class has a dataset" do @c.plugin @t @c.dataset = DB[:i] @c.dataset.ghi.must_equal 345 end it "should not define class methods for private instance methods in DatasetMethod" do m = Module.new do self::DatasetMethods = Module.new do def b; 2; end private def a; 1; end end end @c.plugin m @c.dataset.b.must_equal 2 lambda{@c.dataset.a}.must_raise(NoMethodError) @c.dataset.send(:a).must_equal 1 lambda{@c.a}.must_raise(NoMethodError) lambda{@c.send(:a)}.must_raise(NoMethodError) end it "should not raise an error if the DatasetMethod module has no public instance methods" do m = Module.new do self::DatasetMethods = Module.new do private def a; 1; end end end @c.plugin m end it "should not raise an error if plugin submodule names exist higher up in the namespace hierarchy" do class ::ClassMethods; end @c.plugin(m = Module.new) Object.send(:remove_const, :ClassMethods) @c.plugins.must_include(m) class ::InstanceMethods; end @c.plugin(m = Module.new) Object.send(:remove_const, :InstanceMethods) @c.plugins.must_include(m) class ::DatasetMethods; end @c.plugin(m = Module.new) Object.send(:remove_const, :DatasetMethods) @c.plugins.must_include(m) end end describe Sequel::Plugins do before do @c = Class.new(Sequel::Model(:items)) end it "should have def_dataset_methods define methods that call methods on the dataset" do m = Module.new do module self::ClassMethods Sequel::Plugins.def_dataset_methods(self, :one) end module self::DatasetMethods def one 1 end end end @c.plugin m @c.one.must_equal 1 end if RUBY_VERSION >= '2.7' it "should handle keywords when delegating" do m = Module.new do module self::ClassMethods Sequel::Plugins.def_dataset_methods(self, :one) end module self::DatasetMethods eval <<-END def one(name: (raise)) name end END end end @c.plugin m @c.one(name: 1).must_equal 1 end end it "should have def_dataset_methods accept an array with multiple methods" do m = Module.new do module self::ClassMethods Sequel::Plugins.def_dataset_methods(self, [:one, :two]) end module self::DatasetMethods def one 1 end def two 2 end end end @c.plugin m @c.one.must_equal 1 @c.two.must_equal 2 end it "should have inherited_instance_variables add instance variables to copy into the subclass" do m = Module.new do def self.apply(model) model.instance_variable_set(:@one, 1) end module self::ClassMethods attr_reader :one Sequel::Plugins.inherited_instance_variables(self, :@one=>nil) end end @c.plugin m Class.new(@c).one.must_equal 1 end it "should have after_set_dataset add a method to call after set_dataset" do m = Module.new do module self::ClassMethods Sequel::Plugins.after_set_dataset(self, :one) def foo; dataset.send(:cache_get, :foo) end private def one; dataset.send(:cache_set, :foo, 1) end end end @c.plugin m @c.foo.must_be_nil @c.set_dataset :blah @c.foo.must_equal 1 end end describe "Sequel::Model.plugin" do before do @c = Class.new(Sequel::Model) end after do Sequel::Plugins.send(:remove_const, :SomethingOrOther) end it "should try loading plugins from sequel/plugins/:plugin" do a = [] m = Module.new @c.define_singleton_method(:require) do |b| a << b Sequel::Plugins.const_set(:SomethingOrOther, m) end @c.plugin :something_or_other @c.plugins.must_include m a.must_equal ['sequel/plugins/something_or_other'] end end describe "Sequel::Plugins.def_sequel_method" do before do @m = Class.new @scope = @m.new end it "should define methods using block" do m0 = Sequel::Plugins.def_sequel_method(@m, "x", 0){1} m0.must_be_kind_of Symbol m0.must_match(/\A_sequel_x_\d+\z/) @scope.send(m0).must_equal 1 m1 = Sequel::Plugins.def_sequel_method(@m, "x", 1){|x| [x, 2]} m1.must_be_kind_of Symbol m1.must_match(/\A_sequel_x_\d+\z/) @scope.send(m1, 3).must_equal [3, 2] end it "should define public methods" do proc{@scope.public_send(Sequel::Plugins.def_sequel_method(@m, "x", 0){1})}.must_raise NoMethodError end it "should accept symbols as method name and return the same symbol" do m0 = Sequel::Plugins.def_sequel_method(@m, :_roda_foo, 0){1} m0.must_equal :_roda_foo @scope.send(m0).must_equal 1 end it "should handle optional arguments and splats for expected_arity 0" do m2 = Sequel::Plugins.def_sequel_method(@m, "x", 0){|*x| [x, 3]} @scope.send(m2).must_equal [[], 3] m3 = Sequel::Plugins.def_sequel_method(@m, "x", 0){|x=5| [x, 4]} @scope.send(m3).must_equal [5, 4] m4 = Sequel::Plugins.def_sequel_method(@m, "x", 0){|x=6, *y| [x, y, 5]} @scope.send(m4).must_equal [6, [], 5] end it "should should optional arguments and splats for expected_arity 1" do m2 = Sequel::Plugins.def_sequel_method(@m, "x", 1){|y, *x| [y, x, 3]} @scope.send(m2, :a).must_equal [:a, [], 3] m3 = Sequel::Plugins.def_sequel_method(@m, "x", 1){|y, x=5| [y, x, 4]} @scope.send(m3, :b).must_equal [:b, 5, 4] m4 = Sequel::Plugins.def_sequel_method(@m, "x", 1){|y, x=6, *z| [y, x, z, 5]} @scope.send(m4, :c).must_equal [:c, 6, [], 5] end deprecated "should handle differences in arity" do m0 = Sequel::Plugins.def_sequel_method(@m, "x", 0){|x| [x, 1]} @scope.send(m0).must_equal [nil, 1] m1 = Sequel::Plugins.def_sequel_method(@m, "x", 1){2} @scope.send(m1, 3).must_equal 2 m1 = Sequel::Plugins.def_sequel_method(@m, "x", 1){|x, y| [x, y]} @scope.send(m1, 4).must_equal [4, nil] m1 = Sequel::Plugins.def_sequel_method(@m, "x y", 1){2} @scope.send(m1, 3).must_equal 2 end it "should raise for unexpected expected_arity" do proc{Sequel::Plugins.def_sequel_method(@m, "x", 2){|x|}}.must_raise Sequel::Error end it "should fail if a lambda with invalid arity is passed" do m0 = Sequel::Plugins.def_sequel_method(@m, "x", 0, &lambda{|x| [x, 1]}) proc{@scope.send(m0)}.must_raise ArgumentError m1 = Sequel::Plugins.def_sequel_method(@m, "x", 1, &lambda{2}) proc{@scope.send(m1, 1)}.must_raise ArgumentError end it "should raise if an invalid arg type is used" do o = Object.new def o.parameters; [[:foo]] end proc{Sequel::Plugins.send(:_define_sequel_method_arg_numbers, o)}.must_raise Sequel::Error end if RUBY_VERSION > '2.1' it "should raise for required keyword arguments for expected_arity 0 or 1" do proc{eval("Sequel::Plugins.def_sequel_method(@m, 'x', 0){|b:| [b, 1]}", binding)}.must_raise Sequel::Error proc{eval("Sequel::Plugins.def_sequel_method(@m, 'x', 0){|c=1, b:| [c, b, 1]}", binding)}.must_raise Sequel::Error proc{eval("Sequel::Plugins.def_sequel_method(@m, 'x', 1){|x, b:| [b, 1]}", binding)}.must_raise Sequel::Error proc{eval("Sequel::Plugins.def_sequel_method(@m, 'x', 1){|x, c=1, b:| [c, b, 1]}", binding)}.must_raise Sequel::Error end it "should ignore keyword arguments for expected_arity 0" do @scope.send(eval("Sequel::Plugins.def_sequel_method(@m, 'x', 0){|b:2| [b, 1]}", binding)).must_equal [2, 1] @scope.send(eval("Sequel::Plugins.def_sequel_method(@m, 'x', 0){|**b| [b, 1]}", binding)).must_equal [{}, 1] @scope.send(eval("Sequel::Plugins.def_sequel_method(@m, 'x', 0){|c=1, b:2| [c, b, 1]}", binding)).must_equal [1, 2, 1] @scope.send(eval("Sequel::Plugins.def_sequel_method(@m, 'x', 0){|c=1, **b| [c, b, 1]}", binding)).must_equal [1, {}, 1] end deprecated "should ignore keyword arguments for expected_arity 0 with invalid arity" do @scope.send(eval("Sequel::Plugins.def_sequel_method(@m, 'x', 0){|x, b:2| [x, b, 1]}", binding)).must_equal [nil, 2, 1] @scope.send(eval("Sequel::Plugins.def_sequel_method(@m, 'x', 0){|x, **b| [x, b, 1]}", binding)).must_equal [nil, {}, 1] @scope.send(eval("Sequel::Plugins.def_sequel_method(@m, 'x', 0){|x, c=1, b:2| [x, c, b, 1]}", binding)).must_equal [nil, 1, 2, 1] @scope.send(eval("Sequel::Plugins.def_sequel_method(@m, 'x', 0){|x, c=1, **b| [x, c, b, 1]}", binding)).must_equal [nil, 1, {}, 1] end deprecated "should ignore keyword arguments for expected_arity 1 with invalid arity" do @scope.send(eval("Sequel::Plugins.def_sequel_method(@m, 'x', 1){|b:2| [b, 1]}", binding), 3).must_equal [2, 1] @scope.send(eval("Sequel::Plugins.def_sequel_method(@m, 'x', 1){|**b| [b, 1]}", binding), 3).must_equal [{}, 1] end it "should ignore keyword arguments for expected_arity 1" do @scope.send(eval("Sequel::Plugins.def_sequel_method(@m, 'x', 1){|c=1, b:2| [c, b, 1]}", binding), 3).must_equal [3, 2, 1] @scope.send(eval("Sequel::Plugins.def_sequel_method(@m, 'x', 1){|c=1, **b| [c, b, 1]}", binding), 3).must_equal [3, {}, 1] @scope.send(eval("Sequel::Plugins.def_sequel_method(@m, 'x', 1){|x, b:2| [x, b, 1]}", binding), 3).must_equal [3, 2, 1] @scope.send(eval("Sequel::Plugins.def_sequel_method(@m, 'x', 1){|x, **b| [x, b, 1]}", binding), 3).must_equal [3, {}, 1] @scope.send(eval("Sequel::Plugins.def_sequel_method(@m, 'x', 1){|x, c=1, b:2| [x, c, b, 1]}", binding), 3).must_equal [3, 1, 2, 1] @scope.send(eval("Sequel::Plugins.def_sequel_method(@m, 'x', 1){|x, c=1, **b| [x, c, b, 1]}", binding), 3).must_equal [3, 1, {}, 1] end end end �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/model/record_spec.rb�������������������������������������������������������������0000664�0000000�0000000�00000177555�14342141206�0017773�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe "Model#values" do before do @c = Class.new(Sequel::Model(:items)) end it "should return the hash of model values" do hash = {:x=>1} @c.load(hash).values.must_be_same_as(hash) end it "should be aliased as to_hash" do hash = {:x=>1} @c.load(hash).to_hash.must_be_same_as(hash) end end describe "Model#get_column_value and set_column_value" do before do @c = Class.new(Sequel::Model(:items)) @c.columns :x @o = @c.load(:x=>1) end it "should get and set column values" do @o.get_column_value(:x).must_equal 1 @o.set_column_value(:x=, 2) @o.get_column_value(:x).must_equal 2 @o.x.must_equal 2 end end describe "Model#save server use" do before do @db = Sequel.mock(:autoid=>proc{|sql| 10}, :fetch=>{:x=>1, :id=>10}, :numrows=>1, :servers=>{:blah=>{}, :read_only=>{}}) @c = Class.new(Sequel::Model(@db[:items])) @c.columns :id, :x, :y @c.dataset.columns(:id, :x, :y) @db.sqls end it "should use the :default server if the model doesn't have one already specified" do @c.new(:x=>1).save.must_equal @c.load(:x=>1, :id=>10) @db.sqls.must_equal ["INSERT INTO items (x) VALUES (1)", 'SELECT * FROM items WHERE (id = 10) LIMIT 1'] end it "should use the model's server if the model has one already specified" do @c.dataset = @c.dataset.server(:blah) @c.new(:x=>1).save.must_equal @c.load(:x=>1, :id=>10) @db.sqls.must_equal ["INSERT INTO items (x) VALUES (1) -- blah", 'SELECT * FROM items WHERE (id = 10) LIMIT 1 -- blah'] end it "should use transactions on the correct server" do @c.use_transactions = true @c.dataset = @c.dataset.server(:blah) @c.new(:x=>1).save.must_equal @c.load(:x=>1, :id=>10) @db.sqls.must_equal ["BEGIN -- blah", "INSERT INTO items (x) VALUES (1) -- blah", 'SELECT * FROM items WHERE (id = 10) LIMIT 1 -- blah', 'COMMIT -- blah'] o = @c.load(:id=>1) o.x = 2 o.this o.save @db.sqls.must_equal ["BEGIN -- blah", "UPDATE items SET x = 2 WHERE (id = 1) -- blah", 'COMMIT -- blah'] end end describe "Model#save" do before do @c = Class.new(Sequel::Model(:items)) do columns :id, :x, :y end @c.dataset = @c.dataset.with_autoid(13) DB.reset end it "should insert a record for a new model instance" do o = @c.new(:x => 1) o.save DB.sqls.must_equal ["INSERT INTO items (x) VALUES (1)", "SELECT * FROM items WHERE id = 13"] end it "should raise if the object can't be refreshed after save" do o = @c.new(:x => 1) @c.dataset = @c.dataset.with_fetch([]) proc{o.save}.must_raise(Sequel::NoExistingObject) end it "should use dataset's insert_select method if present" do @c.dataset = @c.dataset.with_fetch(:y=>2).with_extend do def supports_insert_select?; true end def insert_select(hash) with_sql_first("INSERT INTO items (y) VALUES (2) RETURNING *") end end o = @c.new(:x => 1) o.save o.values.must_equal(:y=>2) DB.sqls.must_equal ["INSERT INTO items (y) VALUES (2) RETURNING *"] end it "should issue regular insert query if insert_select returns nil" do @c.dataset = @c.dataset.with_fetch(:id=>4, :x=>2).with_autoid(4).with_extend do def supports_insert_select?; true end def insert_select(hash) end end o = @c.new(:x => 1) o.save DB.sqls.must_equal ["INSERT INTO items (x) VALUES (1)", "SELECT * FROM items WHERE id = 4"] o.values.must_equal(:id=>4, :x=>2) end it "should assume insert statement already ran if insert_select returns false" do @c.dataset = @c.dataset.with_fetch(:y=>2).with_extend do def supports_insert_select?; true end def insert_select(hash) with_sql_first("INSERT INTO items (y) VALUES (2) RETURNING *") false end end o = @c.new(:x => 1) o.save o.values.must_equal(:x=>1) DB.sqls.must_equal ["INSERT INTO items (y) VALUES (2) RETURNING *"] end it "should not use dataset's insert_select method if specific columns are selected" do @c.dataset = @c.dataset.select(:y).with_extend{def insert_select(*) raise; end} @c.new(:x => 1).save end it "should use dataset's insert_select method if the dataset uses returning, even if specific columns are selected" do @c.dataset = @c.dataset.select(:y).with_fetch(:y=>2).with_extend do def supports_returning?(_) true end def supports_insert_select?; true end def insert_select(hash) with_sql_first("INSERT INTO items (y) VALUES (2) RETURNING y") end end.returning(:y) DB.reset o = @c.new(:x => 1) o.save o.values.must_equal(:y=>2) DB.sqls.must_equal ["INSERT INTO items (y) VALUES (2) RETURNING y"] end it "should use value returned by insert as the primary key and refresh the object" do o = @c.new(:x => 11) o.save DB.sqls.must_equal ["INSERT INTO items (x) VALUES (11)", "SELECT * FROM items WHERE id = 13"] end it "should allow you to skip refreshing by overridding _save_refresh" do @c.send(:define_method, :_save_refresh){} @c.create(:x => 11) DB.sqls.must_equal ["INSERT INTO items (x) VALUES (11)"] end it "should work correctly for inserting a record without a primary key" do @c.no_primary_key o = @c.new(:x => 11) o.save DB.sqls.must_equal ["INSERT INTO items (x) VALUES (11)"] end it "should set the autoincrementing_primary_key value to the value returned by insert" do @c.unrestrict_primary_key @c.set_primary_key [:x, :y] o = @c.new(:x => 11) def o.autoincrementing_primary_key() :y end o.save DB.sqls.must_equal ["INSERT INTO items (x) VALUES (11)", 'SELECT * FROM items WHERE ((x = 11) AND (y = 13)) LIMIT 1'] end it "should update a record for an existing model instance" do o = @c.load(:id => 3, :x => 1) o.save DB.sqls.must_equal ["UPDATE items SET x = 1 WHERE (id = 3)"] end it "should include primary keys in update statement if they have changed" do o = @c.load(:id => 3, :x => 1) o.id = 4 o.save DB.sqls.must_equal ["UPDATE items SET id = 4, x = 1 WHERE (id = 4)"] end it "should raise a NoExistingObject exception if the dataset update call doesn't return 1, unless require_modification is false" do i = 0 @c.dataset = @c.dataset.with_extend{define_method(:numrows){i}} o = @c.load(:id => 3, :x => 1) proc{o.save}.must_raise(Sequel::NoExistingObject) i = 2 proc{o.save}.must_raise(Sequel::NoExistingObject) i = 1 o.save o.require_modification = false i = 0 o.save i = 2 o.save end it "should respect the :columns option to specify the columns to save" do o = @c.load(:id => 3, :x => 1, :y => nil) o.save(:columns=>:y) DB.sqls.first.must_equal "UPDATE items SET y = NULL WHERE (id = 3)" end it "should mark saved columns as not changed" do o = @c.load(:id => 3, :x => 1, :y => nil) o[:y] = 4 o.changed_columns.must_equal [:y] o.save(:columns=>:x) o.changed_columns.must_equal [:y] o.save(:columns=>:y) o.changed_columns.must_equal [] end it "should mark all columns as not changed if this is a new record" do o = @c.new(:x => 1, :y => nil) o.x = 4 o.changed_columns.must_equal [:x] o.save o.changed_columns.must_equal [] end it "should mark all columns as not changed if this is a new record and insert_select was used" do @c.dataset = @c.dataset.with_extend{def insert_select(h) h.merge(:id=>1) end} o = @c.new(:x => 1, :y => nil) o.x = 4 o.changed_columns.must_equal [:x] o.save o.changed_columns.must_equal [] end it "should use Model's use_transactions setting by default" do @c.use_transactions = true @c.load(:id => 3, :x => 1, :y => nil).save(:columns=>:y) DB.sqls.must_equal ["BEGIN", "UPDATE items SET y = NULL WHERE (id = 3)", "COMMIT"] @c.use_transactions = false @c.load(:id => 3, :x => 1, :y => nil).save(:columns=>:y) DB.sqls.must_equal ["UPDATE items SET y = NULL WHERE (id = 3)"] end it "should inherit Model's use_transactions setting" do @c.use_transactions = true Class.new(@c).load(:id => 3, :x => 1, :y => nil).save(:columns=>:y) DB.sqls.must_equal ["BEGIN", "UPDATE items SET y = NULL WHERE (id = 3)", "COMMIT"] @c.use_transactions = false Class.new(@c).load(:id => 3, :x => 1, :y => nil).save(:columns=>:y) DB.sqls.must_equal ["UPDATE items SET y = NULL WHERE (id = 3)"] end it "should use object's use_transactions setting" do o = @c.load(:id => 3, :x => 1, :y => nil) o.use_transactions = false @c.use_transactions = true o.save(:columns=>:y) DB.sqls.must_equal ["UPDATE items SET y = NULL WHERE (id = 3)"] o = @c.load(:id => 3, :x => 1, :y => nil) o.use_transactions = true @c.use_transactions = false o.save(:columns=>:y) DB.sqls.must_equal ["BEGIN", "UPDATE items SET y = NULL WHERE (id = 3)", "COMMIT"] end it "should use :transaction option if given" do o = @c.load(:id => 3, :x => 1, :y => nil) o.use_transactions = true o.save(:columns=>:y, :transaction=>false) DB.sqls.must_equal ["UPDATE items SET y = NULL WHERE (id = 3)"] o = @c.load(:id => 3, :x => 1, :y => nil) o.use_transactions = false o.save(:columns=>:y, :transaction=>true) DB.sqls.must_equal ["BEGIN", "UPDATE items SET y = NULL WHERE (id = 3)", "COMMIT"] end it "should rollback if before_save calls cancel_action and raise_on_save_failure = true" do o = @c.load(:id => 3, :x => 1, :y => nil) o.use_transactions = true o.raise_on_save_failure = true def o.before_save cancel_action end proc { o.save(:columns=>:y) }.must_raise(Sequel::HookFailed) DB.sqls.must_equal ["BEGIN", "ROLLBACK"] end it "should rollback if before_save calls cancel_action and :raise_on_failure option is true" do o = @c.load(:id => 3, :x => 1, :y => nil) o.use_transactions = true o.raise_on_save_failure = false def o.before_save cancel_action end proc { o.save(:columns=>:y, :raise_on_failure => true) }.must_raise(Sequel::HookFailed) DB.sqls.must_equal ["BEGIN", "ROLLBACK"] end it "should not rollback outer transactions if before_save calls cancel_action and raise_on_save_failure = false" do o = @c.load(:id => 3, :x => 1, :y => nil) o.use_transactions = true o.raise_on_save_failure = false def o.before_save cancel_action end DB.transaction do o.save(:columns=>:y).must_be_nil DB.run "BLAH" end DB.sqls.must_equal ["BEGIN", "BLAH", "COMMIT"] end it "should rollback if before_save calls cancel_action and raise_on_save_failure = false" do o = @c.load(:id => 3, :x => 1, :y => nil) o.use_transactions = true o.raise_on_save_failure = false def o.before_save cancel_action end o.save(:columns=>:y).must_be_nil DB.sqls.must_equal ["BEGIN", "ROLLBACK"] end it "should not rollback if before_save throws Rollback and use_transactions = false" do o = @c.load(:id => 3, :x => 1, :y => nil) o.use_transactions = false def o.before_save raise Sequel::Rollback end proc { o.save(:columns=>:y) }.must_raise(Sequel::Rollback) DB.sqls.must_equal [] end it "should support a :server option to set the server/shard to use" do db = Sequel.mock(:fetch=>{:id=>13, :x=>1}, :autoid=>proc{13}, :numrows=>1, :servers=>{:s1=>{}}) c = Class.new(Sequel::Model(db[:items])) c.columns :id, :x db.sqls o = c.new(:x => 1) o.save(:server=>:s1) db.sqls.must_equal ["INSERT INTO items (x) VALUES (1) -- s1", "SELECT * FROM items WHERE (id = 13) LIMIT 1 -- s1"] o.save(:server=>:s1, :transaction=>true) db.sqls.must_equal ["BEGIN -- s1", "UPDATE items SET x = 1 WHERE (id = 13) -- s1", 'COMMIT -- s1'] end end describe "Model#set_server" do before do @db = Sequel.mock(:fetch=>{:id=>13, :x=>1}, :autoid=>proc{13}, :numrows=>1, :servers=>{:s1=>{}}) @c = Class.new(Sequel::Model(@db[:items])) do columns :id, :x end @db.sqls end it "should set the server to use when inserting" do @c.new(:x => 1).set_server(:s1).save @db.sqls.must_equal ["INSERT INTO items (x) VALUES (1) -- s1", "SELECT * FROM items WHERE (id = 13) LIMIT 1 -- s1"] end it "should set the server to use when updating" do @c.load(:id=>13, :x => 1).set_server(:s1).save @db.sqls.must_equal ["UPDATE items SET x = 1 WHERE (id = 13) -- s1"] end it "should set the server to use for transactions when saving" do @c.load(:id=>13, :x => 1).set_server(:s1).save(:transaction=>true) @db.sqls.must_equal ["BEGIN -- s1", "UPDATE items SET x = 1 WHERE (id = 13) -- s1", 'COMMIT -- s1'] end it "should set the server to use when deleting" do @c.load(:id=>13).set_server(:s1).delete @db.sqls.must_equal ["DELETE FROM items WHERE (id = 13) -- s1"] end it "should set the server to use when deleting when using optimized delete" do @c.set_primary_key :id @c.load(:id=>13).set_server(:s1).delete @db.sqls.must_equal ["DELETE FROM items WHERE id = 13 -- s1"] end it "should set the server to use for transactions when destroying" do o = @c.load(:id=>13).set_server(:s1) o.use_transactions = true o.destroy @db.sqls.must_equal ["BEGIN -- s1", "DELETE FROM items WHERE (id = 13) -- s1", 'COMMIT -- s1'] end it "should set the server on this if this is already loaded" do o = @c.load(:id=>13, :x => 1) o.this o.set_server(:s1) o.this.opts[:server].must_equal :s1 end it "should set the server on this if this is not already loaded" do @c.load(:id=>13, :x => 1).set_server(:s1).this.opts[:server].must_equal :s1 end end describe "Model#freeze" do before do class ::Album < Sequel::Model columns :id class B < Sequel::Model columns :id, :album_id end end @o = Album.load(:id=>1).freeze DB.sqls end after do Object.send(:remove_const, :Album) end it "should freeze the object" do @o.frozen?.must_equal true end it "should freeze the object if the model doesn't have a primary key" do Album.no_primary_key @o = Album.load(:id=>1).freeze @o.frozen?.must_equal true end it "should freeze the object's values, associations, changed_columns, errors, and this" do @o.values.frozen?.must_equal true @o.changed_columns.frozen?.must_equal true @o.errors.frozen?.must_equal true @o.this.frozen?.must_equal true end it "should still have working class attr overriddable methods" do [:typecast_empty_string_to_nil, :typecast_on_assignment, :strict_param_setting, :raise_on_save_failure, :raise_on_typecast_failure, :require_modification, :use_transactions].each{|m| @o.send(m) == Album.send(m)} end it "should have working new? method" do @o.new?.must_equal false Album.new.freeze.new?.must_equal true end it "should handle cases where validations modify values or changed columns" do o = Album.new def o.validate() self.id = 2 end o.freeze o.valid?.must_equal true end it "should have working valid? method" do @o.valid?.must_equal true o = Album.new def o.validate() errors.add(:foo, '') end o.freeze o.valid?.must_equal false end it "should not call validate if errors is already frozen" do @o.valid?.must_equal true o = Album.new o.errors.freeze def o.validate() errors.add(:foo, '') end o.freeze o.valid?.must_equal true end it "should raise an Error if trying to save/destroy/delete/refresh" do proc{@o.save}.must_raise(Sequel::Error) proc{@o.destroy}.must_raise(Sequel::Error) proc{@o.delete}.must_raise(Sequel::Error) proc{@o.refresh}.must_raise(Sequel::Error) @o.db.sqls.must_equal [] end end describe "Model#dup" do before do @Album = Class.new(Sequel::Model(:albums)) @o = @Album.load(:id=>1) DB.sqls end it "should be equal to existing object" do @o.dup.must_equal @o @o.dup.values.must_equal @o.values @o.dup.changed_columns.must_equal @o.changed_columns @o.dup.errors.must_equal @o.errors @o.dup.this.must_equal @o.this end it "should not use identical structures" do @o.dup.wont_be_same_as(@o) @o.dup.values.wont_be_same_as(@o.values) @o.dup.changed_columns.wont_be_same_as(@o.changed_columns) @o.dup.errors.wont_be_same_as(@o.errors) @o.dup.this.wont_be_same_as(@o.this) end it "should keep new status" do @o.dup.new?.must_equal false @Album.new.dup.new?.must_equal true end it "should not copy frozen status" do this_frozen = @o.this.frozen? d = @o.freeze.dup d.wont_be :frozen? d.values.wont_be :frozen? d.changed_columns.wont_be :frozen? d.errors.wont_be :frozen? d.this.frozen?.must_equal this_frozen end end describe "Model#clone" do before do @Album = Class.new(Sequel::Model(:albums)) @o = @Album.load(:id=>1) DB.sqls end it "should be equal to existing object" do @o.clone.must_equal @o @o.clone.values.must_equal @o.values @o.clone.changed_columns.must_equal @o.changed_columns @o.clone.errors.must_equal @o.errors @o.clone.this.must_equal @o.this end it "should not use identical structures" do @o.clone.wont_be_same_as(@o) @o.clone.values.wont_be_same_as(@o.values) @o.clone.changed_columns.wont_be_same_as(@o.changed_columns) @o.clone.errors.wont_be_same_as(@o.errors) @o.clone.this.wont_be_same_as(@o.this) end it "should keep new status" do @o.clone.new?.must_equal false @Album.new.clone.new?.must_equal true end it "should copy frozen status" do @o.freeze.clone.must_be :frozen? @o.freeze.clone.values.must_be :frozen? @o.freeze.clone.changed_columns.must_be :frozen? @o.freeze.clone.errors.must_be :frozen? @o.freeze.clone.this.must_be :frozen? end end describe "Model#marshallable" do before do class ::Album < Sequel::Model columns :id, :x end end after do Object.send(:remove_const, :Album) end it "should make an object marshallable" do i = Album.new(:x=>2) s = nil i2 = nil i.marshallable! s = Marshal.dump(i) i2 = Marshal.load(s) i2.must_equal i i.save i.marshallable! s = Marshal.dump(i) i2 = Marshal.load(s) i2.must_equal i i.save i.marshallable! s = Marshal.dump(i) i2 = Marshal.load(s) i2.must_equal i end end describe "Model#modified?" do before do @c = Class.new(Sequel::Model(:items)) @c.class_eval do columns :id, :x @db_schema = {:x => {:type => :integer}} end DB.reset end it "should be true if the object is new" do @c.new.modified?.must_equal true end it "should be false if the object has not been modified" do @c.load(:id=>1).modified?.must_equal false end it "should be true if the object has been modified" do o = @c.load(:id=>1, :x=>2) o.x = 3 o.modified?.must_equal true end it "should be true if the object is marked modified!" do o = @c.load(:id=>1, :x=>2) o.modified! o.modified?.must_equal true end it "should be false if the object is marked modified! after saving until modified! again" do o = @c.load(:id=>1, :x=>2) o.modified! o.save o.modified?.must_equal false o.modified! o.modified?.must_equal true end it "should be false if a column value is set that is the same as the current value after typecasting" do o = @c.load(:id=>1, :x=>2) o.x = '2' o.modified?.must_equal false end it "should be true if a column value is set that is the different as the current value after typecasting" do o = @c.load(:id=>1, :x=>'2') o.x = '2' o.modified?.must_equal true end it "should be true if given a column argument and the column has been changed" do o = @c.new o.modified?(:id).must_equal false o.id = 1 o.modified?(:id).must_equal true end end describe "Model#modified!" do before do @c = Class.new(Sequel::Model(:items)) @c.class_eval do columns :id, :x end DB.reset end it "should mark the object as modified so that save_changes still runs the callbacks" do o = @c.load(:id=>1, :x=>2) def o.after_save values[:x] = 3 end o.update({}) o.x.must_equal 2 o.modified! o.update({}) o.x.must_equal 3 o.db.sqls.must_equal [] end it "should mark given column argument as modified" do o = @c.load(:id=>1, :x=>2) o.modified!(:x) o.changed_columns.must_equal [:x] o.save o.db.sqls.must_equal ["UPDATE items SET x = 2 WHERE (id = 1)"] end end describe "Model#save_changes" do before do @c = Class.new(Sequel::Model(:items)) do unrestrict_primary_key columns :id, :x, :y end DB.reset end it "should always save if the object is new" do o = @c.new(:x => 1) o.save_changes DB.sqls.first.must_equal "INSERT INTO items (x) VALUES (1)" end it "should take options passed to save" do o = @c.load(:id=>1, :x => 1) o.x = 2 o.save_changes DB.sqls.must_equal ["UPDATE items SET x = 2 WHERE (id = 1)"] o.x = 3 o.save_changes(:transaction=>true) DB.sqls.must_equal ["BEGIN", "UPDATE items SET x = 3 WHERE (id = 1)", "COMMIT"] end it "should do nothing if no changed columns" do o = @c.load(:id => 3, :x => 1, :y => nil) o.save_changes DB.sqls.must_equal [] end it "should do nothing if modified? is false" do o = @c.load(:id => 3, :x => 1, :y => nil) def o.modified?; false; end o.save_changes DB.sqls.must_equal [] end it "should update only changed columns" do o = @c.load(:id => 3, :x => 1, :y => nil) o.x = 2 o.save_changes DB.sqls.must_equal ["UPDATE items SET x = 2 WHERE (id = 3)"] o.save_changes o.save_changes DB.sqls.must_equal [] o.y = 4 o.save_changes DB.sqls.must_equal ["UPDATE items SET y = 4 WHERE (id = 3)"] o.save_changes o.save_changes DB.sqls.must_equal [] end it "should not consider columns changed if the values did not change" do o = @c.load(:id => 3, :x => 1, :y => nil) o.x = 1 o.save_changes DB.sqls.must_equal [] o.x = 3 o.save_changes DB.sqls.must_equal ["UPDATE items SET x = 3 WHERE (id = 3)"] o[:y] = nil o.save_changes DB.sqls.must_equal [] o[:y] = 4 o.save_changes DB.sqls.must_equal ["UPDATE items SET y = 4 WHERE (id = 3)"] end it "should clear changed_columns" do o = @c.load(:id => 3, :x => 1, :y => nil) o.x = 4 o.changed_columns.must_equal [:x] o.save_changes o.changed_columns.must_equal [] end it "should update columns changed in a before_update hook" do o = @c.load(:id => 3, :x => 1, :y => nil) @c.send(:define_method, :before_update){self.x += 1} o.save_changes DB.sqls.must_equal [] o.x = 2 o.save_changes DB.sqls.must_equal ["UPDATE items SET x = 3 WHERE (id = 3)"] o.save_changes DB.sqls.must_equal [] o.x = 4 o.save_changes DB.sqls.must_equal ["UPDATE items SET x = 5 WHERE (id = 3)"] end it "should update columns changed in a before_save hook" do o = @c.load(:id => 3, :x => 1, :y => nil) @c.send(:define_method, :before_update){self.x += 1} o.save_changes DB.sqls.must_equal [] o.x = 2 o.save_changes DB.sqls.must_equal ["UPDATE items SET x = 3 WHERE (id = 3)"] o.save_changes DB.sqls.must_equal [] o.x = 4 o.save_changes DB.sqls.must_equal ["UPDATE items SET x = 5 WHERE (id = 3)"] end end describe "Model#new?" do before do @c = Class.new(Sequel::Model(:items)) do unrestrict_primary_key columns :x end DB.reset end it "should be true for a new instance" do n = @c.new(:x => 1) n.must_be :new? end it "should be false after saving" do n = @c.new(:x => 1) n.save n.wont_be :new? end end describe Sequel::Model, "with a primary key" do it "should default to :id" do model_a = Class.new Sequel::Model model_a.primary_key.must_equal :id end it "should be changed through 'set_primary_key'" do model_a = Class.new(Sequel::Model){ set_primary_key :a } model_a.primary_key.must_equal :a end it "should accept single argument composite keys" do model_a = Class.new(Sequel::Model){ set_primary_key [:a, :b] } model_a.primary_key.must_equal [:a, :b] end end describe Sequel::Model, "without a primary key" do it "should return nil for primary key" do Class.new(Sequel::Model){no_primary_key}.primary_key.must_be_nil end it "should raise a Sequel::Error on 'this'" do instance = Class.new(Sequel::Model){no_primary_key}.new proc{instance.this}.must_raise(Sequel::Error) end end describe Sequel::Model, "#this" do before do @example = Class.new(Sequel::Model(:examples)) @example.columns :id, :a, :x, :y end it "should return a dataset identifying the record" do instance = @example.load(:id => 3) instance.this.sql.must_equal "SELECT * FROM examples WHERE (id = 3) LIMIT 1" end it "should support arbitary primary keys" do @example.set_primary_key :a instance = @example.load(:a => 3) instance.this.sql.must_equal "SELECT * FROM examples WHERE (a = 3) LIMIT 1" end it "should use a subquery if the dataset is joined" do @example.dataset = @example.dataset.cross_join(:a) instance = @example.load(:id => 3) instance.this.sql.must_equal "SELECT * FROM (SELECT * FROM examples CROSS JOIN a) AS examples WHERE (id = 3) LIMIT 1" end it "should use a primary key if the dataset uses a subquery" do @example.dataset = @example.dataset.cross_join(:a).from_self(:alias=>:b) instance = @example.load(:id => 3) instance.this.sql.must_equal "SELECT * FROM (SELECT * FROM examples CROSS JOIN a) AS b WHERE (id = 3) LIMIT 1" end it "should support composite primary keys" do @example.set_primary_key [:x, :y] instance = @example.load(:x => 4, :y => 5) instance.this.sql.must_equal 'SELECT * FROM examples WHERE ((x = 4) AND (y = 5)) LIMIT 1' end end describe "Model#pk" do before do @m = Class.new(Sequel::Model) @m.columns :id, :x, :y end it "should by default return the value of the :id column" do m = @m.load(:id => 111, :x => 2, :y => 3) m.pk.must_equal 111 end it "should return the primary key value for custom primary key" do @m.set_primary_key :x m = @m.load(:id => 111, :x => 2, :y => 3) m.pk.must_equal 2 end it "should return the primary key value for composite primary key" do @m.set_primary_key [:y, :x] m = @m.load(:id => 111, :x => 2, :y => 3) m.pk.must_equal [3, 2] end it "should raise if no primary key" do @m.set_primary_key nil m = @m.new(:id => 111, :x => 2, :y => 3) proc {m.pk}.must_raise(Sequel::Error) @m.no_primary_key m = @m.new(:id => 111, :x => 2, :y => 3) proc {m.pk}.must_raise(Sequel::Error) end end describe "Model#pk_hash" do before do @m = Class.new(Sequel::Model) @m.columns :id, :x, :y end it "should by default return a hash with the value of the :id column" do m = @m.load(:id => 111, :x => 2, :y => 3) m.pk_hash.must_equal(:id => 111) end it "should return a hash with the primary key value for custom primary key" do @m.set_primary_key :x m = @m.load(:id => 111, :x => 2, :y => 3) m.pk_hash.must_equal(:x => 2) end it "should return a hash with the primary key values for composite primary key" do @m.set_primary_key [:y, :x] m = @m.load(:id => 111, :x => 2, :y => 3) m.pk_hash.must_equal(:y => 3, :x => 2) end it "should raise if no primary key" do @m.set_primary_key nil m = @m.new(:id => 111, :x => 2, :y => 3) proc{m.pk_hash}.must_raise(Sequel::Error) @m.no_primary_key m = @m.new(:id => 111, :x => 2, :y => 3) proc{m.pk_hash}.must_raise(Sequel::Error) end end describe "Model#qualified_pk_hash" do before do @m = Class.new(Sequel::Model(:items)) @m.columns :id, :x, :y end it "should by default return a hash with the value of the :id column" do m = @m.load(:id => 111, :x => 2, :y => 3) m.qualified_pk_hash.must_equal(Sequel.qualify(:items, :id) => 111) end it "should accept a custom qualifier" do m = @m.load(:id => 111, :x => 2, :y => 3) m.qualified_pk_hash(:foo).must_equal(Sequel.qualify(:foo, :id) => 111) end it "should return a hash with the primary key value for custom primary key" do @m.set_primary_key :x m = @m.load(:id => 111, :x => 2, :y => 3) m.qualified_pk_hash.must_equal(Sequel.qualify(:items, :x) => 2) end it "should return a hash with the primary key values for composite primary key" do @m.set_primary_key [:y, :x] m = @m.load(:id => 111, :x => 2, :y => 3) m.qualified_pk_hash.must_equal(Sequel.qualify(:items, :y) => 3, Sequel.qualify(:items, :x) => 2) end it "should raise if no primary key" do @m.set_primary_key nil m = @m.new(:id => 111, :x => 2, :y => 3) proc{m.qualified_pk_hash}.must_raise(Sequel::Error) @m.no_primary_key m = @m.new(:id => 111, :x => 2, :y => 3) proc{m.qualified_pk_hash}.must_raise(Sequel::Error) end end describe Sequel::Model, "#set" do before do @c = Class.new(Sequel::Model(:items)) do set_primary_key :id columns :x, :y, :id end @c.strict_param_setting = false @o1 = @c.new @o2 = @c.load(:id => 5) DB.reset end it "should filter the given params using the model columns" do @o1.set(:x => 1, :z => 2) @o1.values.must_equal(:x => 1) DB.sqls.must_equal [] @o2.set(:y => 1, :abc => 2) @o2.values.must_equal(:y => 1, :id=> 5) DB.sqls.must_equal [] end it "should work with both strings and symbols" do @o1.set('x'=> 1, 'z'=> 2) @o1.values.must_equal(:x => 1) DB.sqls.must_equal [] @o2.set('y'=> 1, 'abc'=> 2) @o2.values.must_equal(:y => 1, :id=> 5) DB.sqls.must_equal [] end it "should support virtual attributes" do @c.send(:define_method, :blah=){|v| self.x = v} @o1.set(:blah => 333) @o1.values.must_equal(:x => 333) DB.sqls.must_equal [] @o1.set('blah'=> 334) @o1.values.must_equal(:x => 334) DB.sqls.must_equal [] end it "should not modify the primary key" do @o1.set(:x => 1, :id => 2) @o1.values.must_equal(:x => 1) DB.sqls.must_equal [] @o2.set('y'=> 1, 'id'=> 2) @o2.values.must_equal(:y => 1, :id=> 5) DB.sqls.must_equal [] end it "should return self" do returned_value = @o1.set(:x => 1, :z => 2) returned_value.must_equal @o1 DB.sqls.must_equal [] end it "should raise error if strict_param_setting is true and method does not exist" do @o1.strict_param_setting = true proc{@o1.set('foo' => 1)}.must_raise(Sequel::MassAssignmentRestriction) end it "should raise error if strict_param_setting is true and column is a primary key" do @o1.strict_param_setting = true proc{@o1.set('id' => 1)}.must_raise(Sequel::MassAssignmentRestriction) end it "should raise error if strict_param_setting is true and column is restricted" do @o1.strict_param_setting = true @c.setter_methods.delete("x=") proc{@o1.set('x' => 1)}.must_raise(Sequel::MassAssignmentRestriction) end it "should not create a symbol if strict_param_setting is true and string is given" do @o1.strict_param_setting = true proc{@o1.set('sadojafdso' => 1)}.must_raise(Sequel::MassAssignmentRestriction) Symbol.all_symbols.map(&:to_s).wont_include('sadojafdso') end it "#set should correctly handle cases where an instance method is added to the class" do @o1.set(:x => 1) @o1.values.must_equal(:x => 1) @c.class_eval do def z=(v) self[:z] = v end end @o1.set(:x => 2, :z => 3) @o1.values.must_equal(:x => 2, :z=>3) end it "#set should correctly handle cases where a singleton method is added to the object" do @o1.set(:x => 1) @o1.values.must_equal(:x => 1) def @o1.z=(v) self[:z] = v end @o1.set(:x => 2, :z => 3) @o1.values.must_equal(:x => 2, :z=>3) end it "#set should correctly handle cases where a module with a setter method is included in the class" do @o1.set(:x => 1) @o1.values.must_equal(:x => 1) @c.send(:include, Module.new do def z=(v) self[:z] = v end end) @o1.set(:x => 2, :z => 3) @o1.values.must_equal(:x => 2, :z=>3) end it "#set should correctly handle cases where the object extends a module with a setter method" do @o1.set(:x => 1) @o1.values.must_equal(:x => 1) @o1.extend(Module.new do def z=(v) self[:z] = v end end) @o1.set(:x => 2, :z => 3) @o1.values.must_equal(:x => 2, :z=>3) end it "#set should correctly handle cases where the object extends a module with a setter method and primary keys are not restricint" do @c.unrestrict_primary_key @o1.set(:x => 1) @o1.values.must_equal(:x => 1) @o1.extend(Module.new do def z=(v) self[:z] = v end end) @o1.set(:x => 2, :z => 3) @o1.values.must_equal(:x => 2, :z=>3) @o1.set(:id => 8) @o1.values.must_equal(:id => 8, :x => 2, :z=>3) end end describe Sequel::Model, "#update" do before do @c = Class.new(Sequel::Model(:items)) do set_primary_key :id columns :x, :y, :id end @c.strict_param_setting = false @o1 = @c.new @o2 = @c.load(:id => 5) DB.reset end it "should filter the given params using the model columns" do @o1.update(:x => 1, :z => 2) DB.sqls.must_equal ["INSERT INTO items (x) VALUES (1)", "SELECT * FROM items WHERE id = 10"] DB.reset @o2.update(:y => 1, :abc => 2) DB.sqls.must_equal ["UPDATE items SET y = 1 WHERE (id = 5)"] end it "should support virtual attributes" do @c.send(:define_method, :blah=){|v| self.x = v} @o1.update(:blah => 333) DB.sqls.must_equal ["INSERT INTO items (x) VALUES (333)", "SELECT * FROM items WHERE id = 10"] end it "should not modify the primary key" do @o1.update(:x => 1, :id => 2) DB.sqls.must_equal ["INSERT INTO items (x) VALUES (1)", "SELECT * FROM items WHERE id = 10"] DB.reset @o2.update('y'=> 1, 'id'=> 2) @o2.values.must_equal(:y => 1, :id=> 5) DB.sqls.must_equal ["UPDATE items SET y = 1 WHERE (id = 5)"] end end describe Sequel::Model, "#set_fields" do before do @c = Class.new(Sequel::Model(:items)) do set_primary_key :id columns :x, :y, :z, :id end @o1 = @c.new DB.reset end it "should set only the given fields" do @o1.set_fields({:x => 1, :y => 2, :z=>3, :id=>4}, [:x, :y]) @o1.values.must_equal(:x => 1, :y => 2) @o1.set_fields({:x => 9, :y => 8, :z=>6, :id=>7}, [:x, :y, :id]) @o1.values.must_equal(:x => 9, :y => 8, :id=>7) DB.sqls.must_equal [] end it "should lookup into the hash without checking if the entry exists" do @o1.set_fields({:x => 1}, [:x, :y]) @o1.values.must_equal(:x => 1, :y => nil) @o1.set_fields(Hash.new(2), [:x, :y]) @o1.values.must_equal(:x => 2, :y => 2) end it "should skip missing fields if :missing=>:skip option is used" do @o1.set_fields({:x => 3}, [:x, :y], :missing=>:skip) @o1.values.must_equal(:x => 3) @o1.set_fields({"x" => 4}, [:x, :y], :missing=>:skip) @o1.values.must_equal(:x => 4) @o1.set_fields(Hash.new(2).merge(:x=>2), [:x, :y], :missing=>:skip) @o1.values.must_equal(:x => 2) @o1.set_fields({:x => 1, :y => 2, :z=>3, :id=>4}, [:x, :y], :missing=>:skip) @o1.values.must_equal(:x => 1, :y => 2) end it "should raise for missing fields if :missing=>:raise option is used" do proc{@o1.set_fields({:x => 1}, [:x, :y], :missing=>:raise)}.must_raise(Sequel::Error) proc{@o1.set_fields(Hash.new(2).merge(:x=>2), [:x, :y], :missing=>:raise)}.must_raise(Sequel::Error) proc{@o1.set_fields({"x" => 1}, [:x, :y], :missing=>:raise)}.must_raise(Sequel::Error) @o1.set_fields({:x => 5, "y"=>2}, [:x, :y], :missing=>:raise) @o1.values.must_equal(:x => 5, :y => 2) @o1.set_fields({:x => 1, :y => 3, :z=>3, :id=>4}, [:x, :y], :missing=>:raise) @o1.values.must_equal(:x => 1, :y => 3) end it "should use default behavior for an unrecognized :missing option" do @o1.set_fields({:x => 1, :y => 2, :z=>3, :id=>4}, [:x, :y], :missing=>:foo) @o1.values.must_equal(:x => 1, :y => 2) @o1.set_fields({:x => 9, :y => 8, :z=>6, :id=>7}, [:x, :y, :id], :missing=>:foo) @o1.values.must_equal(:x => 9, :y => 8, :id=>7) DB.sqls.must_equal [] end it "should respect model's default_set_fields_options" do @c.default_set_fields_options = {:missing=>:skip} @o1.set_fields({:x => 3}, [:x, :y]) @o1.values.must_equal(:x => 3) @o1.set_fields({:x => 4}, [:x, :y], {}) @o1.values.must_equal(:x => 4) proc{@o1.set_fields({:x => 3}, [:x, :y], :missing=>:raise)}.must_raise(Sequel::Error) @c.default_set_fields_options = {:missing=>:raise} proc{@o1.set_fields({:x => 3}, [:x, :y])}.must_raise(Sequel::Error) proc{@o1.set_fields({:x => 3}, [:x, :y], {})}.must_raise(Sequel::Error) @o1.set_fields({:x => 5}, [:x, :y], :missing=>:skip) @o1.values.must_equal(:x => 5) @o1.set_fields({:x => 5}, [:x, :y], :missing=>nil) @o1.values.must_equal(:x => 5, :y=>nil) DB.sqls.must_equal [] end it "should respect model's default_set_fields_options in a subclass" do @c.default_set_fields_options = {:missing=>:skip} o = Class.new(@c).new o.set_fields({:x => 3}, [:x, :y]) o.values.must_equal(:x => 3) end it "should respect set_column_value" do @c.class_eval do def set_column_value(c, v) if c.to_s == 'model=' self[:model] = v else send(c, v) end end end @o1.set_fields({:model=>2, :x=>3}, [:model, :x]) @o1[:model].must_equal 2 @o1.x.must_equal 3 end end describe Sequel::Model, "#update_fields" do before do @c = Class.new(Sequel::Model(:items)) do set_primary_key :id columns :x, :y, :z, :id end @c.strict_param_setting = true @o1 = @c.load(:id=>1) DB.reset end it "should set only the given fields, and then save the changes to the record" do @o1.update_fields({:x => 1, :y => 2, :z=>3, :id=>4}, [:x, :y]) @o1.values.must_equal(:x => 1, :y => 2, :id=>1) DB.sqls.must_equal ['UPDATE items SET x = 1, y = 2 WHERE (id = 1)'] @o1.update_fields({:x => 1, :y => 5, :z=>6, :id=>7}, [:x, :y]) @o1.values.must_equal(:x => 1, :y => 5, :id=>1) DB.sqls.must_equal ["UPDATE items SET y = 5 WHERE (id = 1)"] end it "should support :missing=>:skip option" do @o1.update_fields({:x => 1, :z=>3, :id=>4}, [:x, :y], :missing=>:skip) @o1.values.must_equal(:x => 1, :id=>1) DB.sqls.must_equal ["UPDATE items SET x = 1 WHERE (id = 1)"] end it "should support :missing=>:raise option" do proc{@o1.update_fields({:x => 1}, [:x, :y], :missing=>:raise)}.must_raise(Sequel::Error) end it "should respect model's default_set_fields_options" do @c.default_set_fields_options = {:missing=>:skip} @o1.update_fields({:x => 3}, [:x, :y]) @o1.values.must_equal(:x => 3, :id=>1) DB.sqls.must_equal ["UPDATE items SET x = 3 WHERE (id = 1)"] @c.default_set_fields_options = {:missing=>:raise} proc{@o1.update_fields({:x => 3}, [:x, :y])}.must_raise(Sequel::Error) DB.sqls.must_equal [] end end describe Sequel::Model, "#destroy with filtered dataset" do before do @model = Class.new(Sequel::Model(DB[:items].where(:a=>1))) @model.columns :id, :a @instance = @model.load(:id => 1234) DB.reset end it "should raise a NoExistingObject exception if the dataset delete call doesn't return 1" do i = 0 @model.dataset = @model.dataset.with_extend{define_method(:execute_dui){|*| i}} proc{@instance.delete}.must_raise(Sequel::NoExistingObject) i = 2 proc{@instance.delete}.must_raise(Sequel::NoExistingObject) i = 1 @instance.delete @instance.require_modification = false i = 0 @instance.delete i = 2 @instance.delete end it "should include WHERE clause when deleting" do @instance.destroy DB.sqls.must_equal ["DELETE FROM items WHERE ((a = 1) AND (id = 1234))"] end end describe Sequel::Model, "#destroy" do before do @model = Class.new(Sequel::Model(:items)) @model.columns :id @instance = @model.load(:id => 1234) DB.reset end it "should return self" do @model.send(:define_method, :after_destroy){3} @instance.destroy.must_equal @instance end it "should raise a NoExistingObject exception if the dataset delete call doesn't return 1" do i = 0 @model.dataset = @model.dataset.with_extend{define_method(:execute_dui){|*| i}} proc{@instance.delete}.must_raise(Sequel::NoExistingObject) i = 2 proc{@instance.delete}.must_raise(Sequel::NoExistingObject) i = 1 @instance.delete @instance.require_modification = false i = 0 @instance.delete i = 2 @instance.delete end it "should run within a transaction if use_transactions is true" do @instance.use_transactions = true @instance.destroy DB.sqls.must_equal ["BEGIN", "DELETE FROM items WHERE id = 1234", "COMMIT"] end it "should not run within a transaction if use_transactions is false" do @instance.use_transactions = false @instance.destroy DB.sqls.must_equal ["DELETE FROM items WHERE id = 1234"] end it "should run within a transaction if :transaction option is true" do @instance.use_transactions = false @instance.destroy(:transaction => true) DB.sqls.must_equal ["BEGIN", "DELETE FROM items WHERE id = 1234", "COMMIT"] end it "should not run within a transaction if :transaction option is false" do @instance.use_transactions = true @instance.destroy(:transaction => false) DB.sqls.must_equal ["DELETE FROM items WHERE id = 1234"] end it "should run before_destroy and after_destroy hooks" do @model.send(:define_method, :before_destroy){DB.execute('before blah')} @model.send(:define_method, :after_destroy){DB.execute('after blah')} @instance.destroy DB.sqls.must_equal ["before blah", "DELETE FROM items WHERE id = 1234", "after blah"] end it "should run within a transaction if use_transactions is true" do @model.dataset = @model.dataset.with_extend{def supports_placeholder_literalizer?; false end} @instance.destroy DB.sqls.must_equal ["DELETE FROM items WHERE (id = 1234)"] end end describe Sequel::Model, "#exists?" do before do @model = Class.new(Sequel::Model(:items)) @model.dataset = @model.dataset.with_fetch(proc{|sql| {:x=>1} if sql =~ /id = 1/}) DB.reset end it "should do a query to check if the record exists" do @model.load(:id=>1).exists?.must_equal true DB.sqls.must_equal ['SELECT 1 AS one FROM items WHERE (id = 1) LIMIT 1'] end it "should return false when #this.count == 0" do @model.load(:id=>2).exists?.must_equal false DB.sqls.must_equal ['SELECT 1 AS one FROM items WHERE (id = 2) LIMIT 1'] end it "should return false without issuing a query if the model object is new" do @model.new.exists?.must_equal false DB.sqls.must_equal [] end end describe Sequel::Model, "#each" do before do @model = Class.new(Sequel::Model(:items)) @model.columns :a, :b, :id @m = @model.load(:a => 1, :b => 2, :id => 4444) end it "should iterate over the values" do h = {} @m.each{|k, v| h[k] = v} h.must_equal(:a => 1, :b => 2, :id => 4444) end end describe Sequel::Model, "#keys" do before do @model = Class.new(Sequel::Model(:items)) @model.columns :a, :b, :id @m = @model.load(:a => 1, :b => 2, :id => 4444) end it "should return the value keys" do @m.keys.sort_by{|k| k.to_s}.must_equal [:a, :b, :id] @model.new.keys.must_equal [] end end describe Sequel::Model, "#==" do it "should compare instances by values" do z = Class.new(Sequel::Model) z.columns :id, :x a = z.load(:id => 1, :x => 3) b = z.load(:id => 1, :x => 4) c = z.load(:id => 1, :x => 3) a.wont_equal b a.must_equal c b.wont_equal c end it "should be aliased to #eql?" do z = Class.new(Sequel::Model) z.columns :id, :x a = z.load(:id => 1, :x => 3) b = z.load(:id => 1, :x => 4) c = z.load(:id => 1, :x => 3) a.eql?(b).must_equal false a.eql?(c).must_equal true b.eql?(c).must_equal false end end [:===, :pk_equal?].each do |method_name| describe Sequel::Model, "##{method_name}" do it "should compare instances by class and pk if pk is not nil" do z = Class.new(Sequel::Model) z.columns :id, :x y = Class.new(Sequel::Model) y.columns :id, :x a = z.load(:id => 1, :x => 3) b = z.load(:id => 1, :x => 4) c = z.load(:id => 2, :x => 3) d = y.load(:id => 1, :x => 3) a.must_be method_name, b a.wont_be method_name, c a.wont_be method_name, d end it "should handle composite primary keys" do z = Class.new(Sequel::Model) z.columns :id, :x z.set_primary_key [:id, :x] z.load(:id => 1, :x => 2).must_be method_name, z.load(:id => 1, :x => 2) z.load(:id => 1, :x => 2).wont_be method_name, z.load(:id => 2, :x => 1) z.load(:id => 1, :x => 2).wont_be method_name, z.load(:id => 1, :x => 1) z.load(:id => 1, :x => 2).wont_be method_name, z.load(:id => 2, :x => 2) end it "should always be false if the primary key is nil" do z = Class.new(Sequel::Model) z.columns :id, :x y = Class.new(Sequel::Model) y.columns :id, :x a = z.new(:x => 3) b = z.new(:x => 4) c = z.new(:x => 3) d = y.new(:x => 3) a.wont_be method_name, b a.wont_be method_name, c a.wont_be method_name, d end it "should always be false if the primary key is an array containing nil" do z = Class.new(Sequel::Model) z.columns :id, :x z.set_primary_key [:id, :x] z.load(:id => nil, :x => nil).wont_be method_name, z.load(:id => nil, :x => nil) z.load(:id => 1, :x => nil).wont_be method_name, z.load(:id => 1, :x => nil) z.load(:id => nil, :x => 2).wont_be method_name, z.load(:id => nil, :x => 2) end end end describe Sequel::Model, "#hash" do it "should be the same only for objects with the same class and pk if the pk is not nil" do z = Class.new(Sequel::Model) z.columns :id, :x y = Class.new(Sequel::Model) y.columns :id, :x a = z.load(:id => 1, :x => 3) a.hash.must_equal z.load(:id => 1, :x => 4).hash a.hash.wont_equal z.load(:id => 2, :x => 3).hash a.hash.wont_equal y.load(:id => 1, :x => 3).hash end it "should be the same only for objects with the same class and values if the pk is nil" do z = Class.new(Sequel::Model) z.columns :id, :x y = Class.new(Sequel::Model) y.columns :id, :x a = z.new(:x => 3) a.hash.wont_equal z.new(:x => 4).hash a.hash.must_equal z.new(:x => 3).hash a.hash.wont_equal y.new(:x => 3).hash end it "should be the same only for objects with the same class and pk if pk is composite and all values are non-NULL" do z = Class.new(Sequel::Model) z.columns :id, :id2, :x z.set_primary_key([:id, :id2]) y = Class.new(Sequel::Model) y.columns :id, :id2, :x y.set_primary_key([:id, :id2]) a = z.load(:id => 1, :id2=>2, :x => 3) a.hash.must_equal z.load(:id => 1, :id2=>2, :x => 4).hash a.hash.wont_equal z.load(:id => 2, :id2=>1, :x => 3).hash a.hash.wont_equal y.load(:id => 1, :id2=>1, :x => 3).hash end it "should be the same only for objects with the same class and value if pk is composite and one values is NULL" do z = Class.new(Sequel::Model) z.columns :id, :id2, :x z.set_primary_key([:id, :id2]) y = Class.new(Sequel::Model) y.columns :id, :id2, :x y.set_primary_key([:id, :id2]) a = z.load(:id => 1, :id2 => nil, :x => 3) a.hash.must_equal z.load(:id => 1, :id2=>nil, :x => 3).hash a.hash.wont_equal z.load(:id => 1, :id2=>nil, :x => 4).hash a.hash.wont_equal y.load(:id => 1, :id2=>nil, :x => 3).hash a = z.load(:id =>nil, :id2 => nil, :x => 3) a.hash.must_equal z.load(:id => nil, :id2=>nil, :x => 3).hash a.hash.wont_equal z.load(:id => nil, :id2=>nil, :x => 4).hash a.hash.wont_equal y.load(:id => nil, :id2=>nil, :x => 3).hash a = z.load(:id => 1, :x => 3) a.hash.must_equal z.load(:id => 1, :x => 3).hash a.hash.wont_equal z.load(:id => 1, :id2=>nil, :x => 3).hash a.hash.wont_equal z.load(:id => 1, :x => 4).hash a.hash.wont_equal y.load(:id => 1, :x => 3).hash a = z.load(:x => 3) a.hash.must_equal z.load(:x => 3).hash a.hash.wont_equal z.load(:id => nil, :id2=>nil, :x => 3).hash a.hash.wont_equal z.load(:x => 4).hash a.hash.wont_equal y.load(:x => 3).hash end it "should be the same only for objects with the same class and values if the no primary key" do z = Class.new(Sequel::Model) z.columns :id, :x z.no_primary_key y = Class.new(Sequel::Model) y.columns :id, :x y.no_primary_key a = z.new(:x => 3) a.hash.wont_equal z.new(:x => 4).hash a.hash.must_equal z.new(:x => 3).hash a.hash.wont_equal y.new(:x => 3).hash end end describe Sequel::Model, "#initialize" do before do @c = Class.new(Sequel::Model) do columns :id, :x end @c.strict_param_setting = false end it "should accept values" do m = @c.new(:x => 2) m.values.must_equal(:x => 2) end it "should not modify the primary key" do m = @c.new(:id => 1, :x => 2) m.values.must_equal(:x => 2) end it "should accept no values" do m = @c.new m.values.must_equal({}) end it "should accept a block to execute" do m = @c.new {|o| o[:id] = 1234} m.id.must_equal 1234 end it "should accept virtual attributes" do @c.send(:define_method, :blah=){|x| @blah = x} @c.send(:define_method, :blah){@blah} m = @c.new(:x => 2, :blah => 3) m.values.must_equal(:x => 2) m.blah.must_equal 3 end it "should convert string keys into symbol keys" do m = @c.new('x' => 2) m.values.must_equal(:x => 2) end end describe Sequel::Model, "#initialize_set" do before do @c = Class.new(Sequel::Model){columns :id, :x, :y} end it "should be called by initialize to set the column values" do @c.send(:define_method, :initialize_set){|h| set(:y => 3)} @c.new(:x => 2).values.must_equal(:y => 3) end it "should be called with the hash given to initialize " do x = nil @c.send(:define_method, :initialize_set){|y| x = y} @c.new(:x => 2) x.must_equal(:x => 2) end it "should not cause columns modified by the method to be considered as changed" do @c.send(:define_method, :initialize_set){|h| set(:y => 3)} @c.new(:x => 2).changed_columns.must_equal [] end end describe Sequel::Model, ".create" do before do DB.reset @c = Class.new(Sequel::Model(:items)) do unrestrict_primary_key columns :x end end it "should be able to create rows in the associated table" do o = @c.create(:x => 1) o.class.must_equal @c DB.sqls.must_equal ['INSERT INTO items (x) VALUES (1)', "SELECT * FROM items WHERE id = 10"] end it "should be able to create rows without any values specified" do o = @c.create o.class.must_equal @c DB.sqls.must_equal ["INSERT INTO items DEFAULT VALUES", "SELECT * FROM items WHERE id = 10"] end it "should accept a block and call it" do o1, o2, o3 = nil, nil, nil o = @c.create {|o4| o1 = o4; o3 = o4; o2 = :blah; o3.x = 333} o.class.must_equal @c o1.must_be :===, o o3.must_be :===, o o2.must_equal :blah DB.sqls.must_equal ["INSERT INTO items (x) VALUES (333)", "SELECT * FROM items WHERE id = 10"] end it "should create a row for a model with custom primary key" do @c.set_primary_key :x o = @c.create(:x => 30) o.class.must_equal @c DB.sqls.must_equal ["INSERT INTO items (x) VALUES (30)", "SELECT * FROM items WHERE x = 30"] end end describe Sequel::Model, "#refresh" do before do @c = Class.new(Sequel::Model(:items)) do unrestrict_primary_key columns :id, :x end DB.reset end it "should reload the instance values from the database" do @m = @c.new(:id => 555) @m[:x] = 'blah' @c.dataset = @c.dataset.with_fetch(:x => 'kaboom', :id => 555) @m.refresh @m[:x].must_equal 'kaboom' DB.sqls.must_equal ["SELECT * FROM items WHERE id = 555"] end it "should raise if the instance is not found" do @m = @c.new(:id => 555) @c.dataset = @c.dataset.with_fetch([]) proc {@m.refresh}.must_raise(Sequel::NoExistingObject) DB.sqls.must_equal ["SELECT * FROM items WHERE id = 555"] end it "should be aliased by #reload" do @m = @c.new(:id => 555) @c.dataset = @c.dataset.with_fetch(:x => 'kaboom', :id => 555) @m.reload @m[:x].must_equal 'kaboom' DB.sqls.must_equal ["SELECT * FROM items WHERE id = 555"] end end describe Sequel::Model, "typecasting" do before do @c = Class.new(Sequel::Model(:items)) do columns :x end @c.db_schema = {:x=>{:type=>:integer}} @c.raise_on_typecast_failure = true DB.reset end after do Sequel.datetime_class = Time end it "should not convert if typecasting is turned off" do @c.typecast_on_assignment = false m = @c.new m.x = '1' m.x.must_equal '1' end it "should convert to integer for an integer field" do @c.db_schema = {:x=>{:type=>:integer}} m = @c.new m.x = '1' m.x.must_equal 1 m.x = 1 m.x.must_equal 1 m.x = 1.3 m.x.must_equal 1 end it "should typecast '' to nil unless type is string or blob" do [:integer, :float, :decimal, :boolean, :date, :time, :datetime].each do |x| @c.db_schema = {:x=>{:type=>x}} m = @c.new m.x = '' m.x.must_be_nil end [:string, :blob].each do |x| @c.db_schema = {:x=>{:type=>x}} m = @c.new m.x = '' m.x.must_equal '' end end it "should not typecast '' to nil if typecast_empty_string_to_nil is false" do m = @c.new m.typecast_empty_string_to_nil = false proc{m.x = ''}.must_raise Sequel::InvalidValue @c.typecast_empty_string_to_nil = false proc{@c.new.x = ''}.must_raise Sequel::InvalidValue end it "should handle typecasting where == raises an error on the object" do m = @c.new o = Object.new def o.==(v) raise ArgumentError end def o.to_i() 4 end m.x = o m.x.must_equal 4 end it "should not typecast nil if NULLs are allowed" do @c.db_schema[:x][:allow_null] = true m = @c.new m.x = nil m.x.must_be_nil end it "should raise an error if attempting to typecast nil and NULLs are not allowed" do @c.db_schema[:x][:allow_null] = false proc{@c.new.x = nil}.must_raise(Sequel::InvalidValue) proc{@c.new.x = ''}.must_raise(Sequel::InvalidValue) end it "should not raise an error if NULLs are not allowed and typecasting is turned off" do @c.typecast_on_assignment = false @c.db_schema[:x][:allow_null] = false m = @c.new m.x = nil m.x.must_be_nil end it "should not raise when typecasting nil to NOT NULL column but raise_on_typecast_failure is off" do @c.raise_on_typecast_failure = false @c.typecast_on_assignment = true m = @c.new m.x = '' m.x.must_be_nil m.x = nil m.x.must_be_nil end it "should raise an error if invalid data is used in an integer field" do proc{@c.new.x = 'a'}.must_raise(Sequel::InvalidValue) end it "should assign value if raise_on_typecast_failure is off and assigning invalid integer" do @c.raise_on_typecast_failure = false model = @c.new model.x = '1d' model.x.must_equal '1d' end it "should convert to float for a float field" do @c.db_schema = {:x=>{:type=>:float}} m = @c.new m.x = '1.3' m.x.must_equal 1.3 m.x = 1 m.x.must_equal 1.0 m.x = 1.3 m.x.must_equal 1.3 end it "should raise an error if invalid data is used in an float field" do @c.db_schema = {:x=>{:type=>:float}} proc{@c.new.x = 'a'}.must_raise(Sequel::InvalidValue) end it "should assign value if raise_on_typecast_failure is off and assigning invalid float" do @c.raise_on_typecast_failure = false @c.db_schema = {:x=>{:type=>:float}} model = @c.new model.x = '1d' model.x.must_equal '1d' end it "should convert to BigDecimal for a decimal field" do @c.db_schema = {:x=>{:type=>:decimal}} m = @c.new bd = BigDecimal('1.0') m.x = '1.0' m.x.must_equal bd m.x = 1.0 m.x.must_equal bd m.x = 1 m.x.must_equal bd m.x = bd m.x.must_equal bd m.x = '0' m.x.must_equal 0 end it "should raise an error if invalid data is used in an decimal field" do @c.db_schema = {:x=>{:type=>:decimal}} proc{@c.new.x = Date.today}.must_raise(Sequel::InvalidValue) proc{@c.new.x = 'foo'}.must_raise(Sequel::InvalidValue) end it "should assign value if raise_on_typecast_failure is off and assigning invalid decimal" do @c.raise_on_typecast_failure = false @c.db_schema = {:x=>{:type=>:decimal}} model = @c.new time = Time.now model.x = time model.x.must_equal time end it "should convert to string for a string field" do @c.db_schema = {:x=>{:type=>:string}} m = @c.new m.x = '1.3' m.x.must_equal '1.3' m.x = 1 m.x.must_equal '1' m.x = 1.3 m.x.must_equal '1.3' end it "should convert to boolean for a boolean field" do @c.db_schema = {:x=>{:type=>:boolean}} m = @c.new m.x = '1.3' m.x.must_equal true m.x = 1 m.x.must_equal true m.x = 1.3 m.x.must_equal true m.x = 't' m.x.must_equal true m.x = 'T' m.x.must_equal true m.x = 'y' m.x.must_equal true m.x = 'Y' m.x.must_equal true m.x = true m.x.must_equal true m.x = nil m.x.must_be_nil m.x = '' m.x.must_be_nil m.x = [] m.x.must_be_nil m.x = 'f' m.x.must_equal false m.x = 'F' m.x.must_equal false m.x = 'false' m.x.must_equal false m.x = 'FALSE' m.x.must_equal false m.x = 'n' m.x.must_equal false m.x = 'N' m.x.must_equal false m.x = 'no' m.x.must_equal false m.x = 'NO' m.x.must_equal false m.x = '0' m.x.must_equal false m.x = 0 m.x.must_equal false m.x = false m.x.must_equal false end it "should convert to date for a date field" do @c.db_schema = {:x=>{:type=>:date}} m = @c.new y = Date.new(2007,10,21) m.x = '2007-10-21' m.x.must_equal y m.x = Date.parse('2007-10-21') m.x.must_equal y m.x = Time.parse('2007-10-21') m.x.must_equal y m.x = DateTime.parse('2007-10-21') m.x.must_equal y end it "should accept a hash with symbol or string keys for a date field" do @c.db_schema = {:x=>{:type=>:date}} m = @c.new y = Date.new(2007,10,21) m.x = {:year=>2007, :month=>10, :day=>21} m.x.must_equal y m.x = {'year'=>'2007', 'month'=>'10', 'day'=>'21'} m.x.must_equal y end it "should raise an error if invalid data is used in a date field" do @c.db_schema = {:x=>{:type=>:date}} proc{@c.new.x = 'a'}.must_raise(Sequel::InvalidValue) proc{@c.new.x = 100}.must_raise(Sequel::InvalidValue) end it "should assign value if raise_on_typecast_failure is off and assigning invalid date" do @c.raise_on_typecast_failure = false @c.db_schema = {:x=>{:type=>:date}} model = @c.new model.x = 4 model.x.must_equal 4 end it "should convert to Sequel::SQLTime for a time field" do @c.db_schema = {:x=>{:type=>:time}} m = @c.new x = '10:20:30' y = Sequel::SQLTime.parse(x) m.x = x m.x.must_equal y m.x = y m.x.must_equal y m.x.must_be_kind_of(Sequel::SQLTime) end it "should accept a hash with symbol or string keys for a time field" do @c.db_schema = {:x=>{:type=>:time}} m = @c.new y = Time.parse('10:20:30') m.x = {:hour=>10, :minute=>20, :second=>30} m.x.must_equal y m.x = {'hour'=>'10', 'minute'=>'20', 'second'=>'30'} m.x.must_equal y end it "should raise an error if invalid data is used in a time field" do @c.db_schema = {:x=>{:type=>:time}} proc{@c.new.x = '0000'}.must_raise(Sequel::InvalidValue) proc{@c.new.x = Date.parse('2008-10-21')}.must_raise(Sequel::InvalidValue) proc{@c.new.x = DateTime.parse('2008-10-21')}.must_raise(Sequel::InvalidValue) end it "should assign value if raise_on_typecast_failure is off and assigning invalid time" do @c.raise_on_typecast_failure = false @c.db_schema = {:x=>{:type=>:time}} model = @c.new model.x = '0000' model.x.must_equal '0000' end it "should convert to the Sequel.datetime_class for a datetime field" do @c.db_schema = {:x=>{:type=>:datetime}} m = @c.new x = '2007-10-21T10:20:30-07:00' y = Time.parse(x) m.x = x m.x.must_equal y m.x = DateTime.parse(x) m.x.must_equal y m.x = Time.parse(x) m.x.must_equal y m.x = Date.parse('2007-10-21') m.x.must_equal Time.parse('2007-10-21') Sequel.datetime_class = DateTime y = DateTime.parse(x) m.x = x m.x.must_equal y m.x = DateTime.parse(x) m.x.must_equal y m.x = Time.parse(x) m.x.must_equal y m.x = Date.parse('2007-10-21') m.x.must_equal DateTime.parse('2007-10-21') end it "should accept a hash with symbol or string keys for a datetime field" do @c.db_schema = {:x=>{:type=>:datetime}} m = @c.new y = Time.parse('2007-10-21 10:20:30') m.x = {:year=>2007, :month=>10, :day=>21, :hour=>10, :minute=>20, :second=>30} m.x.must_equal y m.x = {'year'=>'2007', 'month'=>'10', 'day'=>'21', 'hour'=>'10', 'minute'=>'20', 'second'=>'30'} m.x.must_equal y Sequel.datetime_class = DateTime y = DateTime.parse('2007-10-21 10:20:30') m.x = {:year=>2007, :month=>10, :day=>21, :hour=>10, :minute=>20, :second=>30} m.x.must_equal y m.x = {'year'=>'2007', 'month'=>'10', 'day'=>'21', 'hour'=>'10', 'minute'=>'20', 'second'=>'30'} m.x.must_equal y end it "should raise an error if invalid data is used in a datetime field" do @c.db_schema = {:x=>{:type=>:datetime}} proc{@c.new.x = '0000'}.must_raise(Sequel::InvalidValue) Sequel.datetime_class = DateTime proc{@c.new.x = '0000'}.must_raise(Sequel::InvalidValue) proc{@c.new.x = 'a'}.must_raise(Sequel::InvalidValue) end it "should assign value if raise_on_typecast_failure is off and assigning invalid datetime" do @c.raise_on_typecast_failure = false @c.db_schema = {:x=>{:type=>:datetime}} model = @c.new model.x = '0000' model.x.must_equal '0000' Sequel.datetime_class = DateTime model = @c.new model.x = '0000' model.x.must_equal '0000' model.x = 'a' model.x.must_equal 'a' end end describe "Model#lock!" do before do @c = Class.new(Sequel::Model(:items)) do columns :id end @c.dataset = @c.dataset.with_fetch(:id=>1) DB.reset end it "should do nothing if the record is a new record" do o = @c.new def o._refresh(x) raise Sequel::Error; super(x) end x = o.lock! x.must_equal o DB.sqls.must_equal [] end it "should refresh the record using for_update if it is not a new record" do o = @c.load(:id => 1) def o._refresh(x) instance_variable_set(:@a, 1); super(x) end x = o.lock! x.must_equal o o.instance_variable_get(:@a).must_equal 1 DB.sqls.must_equal ["SELECT * FROM items WHERE (id = 1) LIMIT 1 FOR UPDATE"] end it "should refresh the record using the specified lock when it is not a new record and a style is given" do o = @c.load(:id => 1) def o._refresh(x) instance_variable_set(:@a, 1); super(x) end x = o.lock!('FOR NO KEY UPDATE') x.must_equal o o.instance_variable_get(:@a).must_equal 1 DB.sqls.must_equal ["SELECT * FROM items WHERE (id = 1) LIMIT 1 FOR NO KEY UPDATE"] end end describe "Model#schema_type_class" do it "should return the class or array of classes for the given type symbol" do @c = Class.new(Sequel::Model(:items)) @c.class_eval{@db_schema = {:id=>{:type=>:integer}}} @c.new.send(:schema_type_class, :id).must_equal Integer end it "should return nil for a missing column or column type" do @c = Class.new(Sequel::Model(:items)) @c.class_eval{@db_schema = {:id=>{:type=>:integer}, :bar=>{}}} @c.new.send(:schema_type_class, :c).must_be_nil @c.new.send(:schema_type_class, :bar).must_be_nil end end ���������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/model/spec_helper.rb�������������������������������������������������������������0000664�0000000�0000000�00000002605�14342141206�0017753�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������$:.unshift(File.join(File.dirname(File.expand_path(__FILE__)), "../../lib/")) require_relative "../../lib/sequel" Sequel::Deprecation.backtrace_filter = lambda{|line, lineno| lineno < 4 || line =~ /_spec\.rb/} ENV['MT_NO_PLUGINS'] = '1' # Work around stupid autoloading of plugins gem 'minitest' require 'minitest/global_expectations/autorun' require 'minitest/hooks/default' require_relative '../deprecation_helper' class << Sequel::Model attr_writer :db_schema alias orig_columns columns def columns(*cols) return super if cols.empty? define_method(:columns){cols} alias_method(:columns, :columns) @dataset.send(:columns=, cols) if @dataset def_column_accessor(*cols) @columns = cols @db_schema = {} cols.each{|c| @db_schema[c] = {}} end end Sequel::DB = nil Sequel::Model.use_transactions = false Sequel::Model.cache_anonymous_models = false db = Sequel.mock(:fetch=>{:id => 1, :x => 1}, :numrows=>1, :autoid=>proc{|sql| 10}) def db.schema(*) [[:id, {:primary_key=>true}]] end def db.reset() sqls end def db.supports_schema_parsing?() true end Sequel::Model.db = DB = db if ENV['SEQUEL_COLUMNS_INTROSPECTION'] Sequel.extension :columns_introspection Sequel::Database.extension :columns_introspection Sequel::Mock::Dataset.send(:include, Sequel::ColumnsIntrospection) end if ENV['SEQUEL_NO_CACHE_ASSOCIATIONS'] Sequel::Model.cache_associations = false end ���������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/model/validations_spec.rb��������������������������������������������������������0000664�0000000�0000000�00000014706�14342141206�0021016�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "spec_helper" describe Sequel::Model::Errors do before do @errors = Sequel::Model::Errors.new end it "should be clearable using #clear" do @errors.add(:a, 'b') @errors.must_equal(:a=>['b']) @errors.clear @errors.must_equal({}) end it "should be empty if there are no errors" do @errors.must_be :empty? end it "should not be empty if there are errors" do @errors.add(:blah, "blah") @errors.wont_be :empty? end it "should return an array of errors for a specific attribute using #on if there are errors" do @errors.add(:blah, 'blah') @errors.on(:blah).must_equal ['blah'] end it "should return nil using #on if there are no errors for that attribute" do @errors.on(:blah).must_be_nil end it "should accept errors using #add" do @errors.add :blah, 'zzzz' @errors[:blah].must_equal ['zzzz'] end it "should return full messages using #full_messages" do @errors.full_messages.must_equal [] @errors.add(:blow, 'blieuh') @errors.add(:blow, 'blich') @errors.add(:blay, 'bliu') msgs = @errors.full_messages msgs.sort.must_equal ['blay bliu', 'blow blich', 'blow blieuh'] end it "should not add column names for LiteralStrings" do @errors.full_messages.must_equal [] @errors.add(:blow, 'blieuh') @errors.add(:blow, Sequel.lit('blich')) @errors.add(:blay, 'bliu') msgs = @errors.full_messages msgs.sort.must_equal ['blay bliu', 'blich', 'blow blieuh'] end it "should allow for customizing #full_messages via #full_message if not a LiteralString" do @errors.full_messages.must_equal [] def @errors.full_message(attribute, msg) "#{attribute.class}-#{msg}-#{attribute.inspect}" end @errors.add(:blow, 'blieuh') @errors.add(:blow, Sequel.lit('blich')) @errors.add(:blay, 'bliu') msgs = @errors.full_messages msgs.sort.must_equal ['Symbol-blieuh-:blow', 'Symbol-bliu-:blay', 'blich'] end it "should return the number of error messages using #count" do @errors.count.must_equal 0 @errors.add(:a, 'b') @errors.count.must_equal 1 @errors.add(:a, 'c') @errors.count.must_equal 2 @errors.add(:b, 'c') @errors.count.must_equal 3 end it "should return the array of error messages for a given attribute using #on" do @errors.add(:a, 'b') @errors.on(:a).must_equal ['b'] @errors.add(:a, 'c') @errors.on(:a).must_equal ['b', 'c'] @errors.add(:b, 'c') @errors.on(:a).must_equal ['b', 'c'] end it "should return nil if there are no error messages for a given attribute using #on" do @errors.on(:a).must_be_nil @errors.add(:b, 'b') @errors.on(:a).must_be_nil end end describe Sequel::Model do before do @c = Class.new(Sequel::Model) do columns :score def validate errors.add(:score, 'too low') if score < 87 end end @o = @c.new end it "should supply a #valid? method that returns true if validations pass" do @o.score = 50 @o.wont_be :valid? @o.score = 100 @o.must_be :valid? end it "should provide an errors object" do @o.score = 100 @o.must_be :valid? @o.errors.must_be :empty? @o.score = 86 @o.wont_be :valid? @o.errors[:score].must_equal ['too low'] @o.errors.on(:blah).must_be_nil end it "should allow raising of ValidationFailed with a Model instance with errors" do @o.errors.add(:score, 'is too low') begin raise Sequel::ValidationFailed, @o rescue Sequel::ValidationFailed => e end e.model.must_be_same_as(@o) e.errors.must_be_same_as(@o.errors) e.message.must_equal 'score is too low' end it "should allow raising of ValidationFailed with an Errors instance" do @o.errors.add(:score, 'is too low') begin raise Sequel::ValidationFailed, @o.errors rescue Sequel::ValidationFailed => e end e.model.must_be_nil e.errors.must_be_same_as(@o.errors) e.message.must_equal 'score is too low' end it "should allow raising of ValidationFailed with a string" do proc{raise Sequel::ValidationFailed, "no reason"}.must_raise(Sequel::ValidationFailed, "no reason") end end describe "Model#save" do before do @c = Class.new(Sequel::Model(:people)) do columns :id, :x def validate errors.add(:id, 'blah') unless x == 7 end end @m = @c.load(:id => 4, :x=>6) DB.reset end it "should save only if validations pass" do @m.raise_on_save_failure = false @m.wont_be :valid? @m.save DB.sqls.must_be :empty? @m.x = 7 @m.must_be :valid? @m.save.wont_equal false DB.sqls.must_equal ['UPDATE people SET x = 7 WHERE (id = 4)'] end it "should skip validations if the :validate=>false option is used" do @m.raise_on_save_failure = false @m.wont_be :valid? @m.save(:validate=>false) DB.sqls.must_equal ['UPDATE people SET x = 6 WHERE (id = 4)'] end it "should skip validations if the skip_validation_on_save! method is used" do @m.raise_on_save_failure = false @m.wont_be :valid? @m.skip_validation_on_next_save! @m.save DB.sqls.must_equal ['UPDATE people SET x = 6 WHERE (id = 4)'] end it "should not skip future validations if the skip_validation_on_save! method is used" do @m.wont_be :valid? @m.skip_validation_on_next_save! @m.save DB.sqls.must_equal ['UPDATE people SET x = 6 WHERE (id = 4)'] proc{@m.save}.must_raise Sequel::ValidationFailed @m.skip_validation_on_next_save! @m.save DB.sqls.must_equal ['UPDATE people SET x = 6 WHERE (id = 4)'] end it "should skip validations if the skip_validation_on_save! method is used and :validate=>true option is used" do @m.wont_be :valid? @m.skip_validation_on_next_save! @m.save(:validate=>true) DB.sqls.must_equal ['UPDATE people SET x = 6 WHERE (id = 4)'] end it "should raise error if validations fail and raise_on_save_failure is true" do begin @m.save rescue Sequel::ValidationFailed => e e.model.must_be_same_as(@m) e.errors.must_be_same_as(@m.errors) else raise end end it "should raise error if validations fail and :raise_on_failure option is true" do @m.raise_on_save_failure = false proc{@m.save(:raise_on_failure => true)}.must_raise(Sequel::ValidationFailed) end it "should return nil if validations fail and raise_on_save_faiure is false" do @m.raise_on_save_failure = false @m.save.must_be_nil end end ����������������������������������������������������������sequel-5.63.0/spec/model_no_assoc_spec.rb�����������������������������������������������������������0000664�0000000�0000000�00000000142�14342141206�0020352�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Dir['./spec/model/*_spec.rb'].delete_if{|f| f =~ /association|eager_loading/}.each{|f| require f} ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/model_spec.rb��������������������������������������������������������������������0000664�0000000�0000000�00000000062�14342141206�0016467�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Dir['./spec/model/*_spec.rb'].each{|f| require f} ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/plugin_spec.rb�������������������������������������������������������������������0000664�0000000�0000000�00000000067�14342141206�0016672�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������Dir['./spec/extensions/*_spec.rb'].each{|f| require f} �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/sequel_coverage.rb���������������������������������������������������������������0000664�0000000�0000000�00000001726�14342141206�0017536�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require 'simplecov' def SimpleCov.sequel_coverage(opts = {}) start do enable_coverage :branch command_name SEQUEL_COVERAGE unless SEQUEL_COVERAGE == "1" add_filter "/spec/" if ENV['SEQUEL_MERGE_COVERAGE'] filter = %r{bin/sequel\z|lib/sequel/(\w+\.rb|(dataset|database|model|connection_pool|extensions|plugins)/\w+\.rb|adapters/(mock|(shared/)?postgres)\.rb)\z} add_filter{|src| src.filename !~ filter} elsif opts[:filter] add_filter{|src| src.filename !~ opts[:filter]} end if opts[:subprocesses] enable_for_subprocesses true ENV['COVERAGE'] = 'subprocess' ENV['RUBYOPT'] = "#{ENV['RUBYOPT']} -r ./spec/sequel_coverage" elsif SEQUEL_COVERAGE == 'subprocess' command_name "bin-#{$$}" self.print_error_status = false formatter SimpleCov::Formatter::SimpleFormatter end end end SEQUEL_COVERAGE = ENV.delete('COVERAGE') if SEQUEL_COVERAGE == 'subprocess' SimpleCov.sequel_coverage end ������������������������������������������sequel-5.63.0/spec/spec_config.rb.example�����������������������������������������������������������0000664�0000000�0000000�00000000231�14342141206�0020264�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������# Custom setup for the adapter/integration specs # ENV['SEQUEL_INTEGRATION_URL'] = 'sqlite:/' # ENV['SEQUEL_POSTGRES_URL'] = 'postgres://localhost/test' �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/visibility_checking.rb�����������������������������������������������������������0000664�0000000�0000000�00000001144�14342141206�0020401�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require 'visibility_checker' changes = VISIBILITY_CHANGES = [] Minitest.after_run do if defined?(DB) [DB.singleton_class, DB.dataset.singleton_class].each do |c| VISIBILITY_CHANGES.concat(VisibilityChecker.visibility_changes(c).map{|v| [v, c.inspect]}) end end changes.uniq!{|v,| v} changes.map! do |v, caller| "#{caller}: #{v.new_visibility} method #{v.overridden_by}##{v.method} overrides #{v.original_visibility} method in #{v.defined_in}" end changes.sort! if changes.empty? puts "No visibility changes" else puts "Visibility changes:" puts(*changes) end end ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/spec/visibility_checking_after_hook.rb������������������������������������������������0000664�0000000�0000000�00000002135�14342141206�0022603�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������require_relative "visibility_checking" model_subclasses = [] [Sequel::Database, Sequel::Dataset, Sequel::Model, Sequel::Model.singleton_class].each do |c| VISIBILITY_CHANGES.concat(VisibilityChecker.visibility_changes(c).map{|v| [v, c.inspect]}) end Sequel::Model.singleton_class.class_eval do prepend(Module.new do private define_method(:inherited) do |sc| model_subclasses << sc super(sc) end end) end Minitest::HooksSpec.class_eval do after do path, lineno = method(@NAME).source_location check = [] Sequel::DATABASES.each do |db| check.push(db.singleton_class) check.push(db.dataset.singleton_class) end Sequel::DATABASES.clear subclasses = model_subclasses.dup model_subclasses.clear check.concat(subclasses) check.concat(subclasses.map(&:singleton_class)) check.concat(subclasses.map{|c| c.dataset.singleton_class if c.instance_variable_get(:@dataset)}) check.each do |c| next unless c VISIBILITY_CHANGES.concat(VisibilityChecker.visibility_changes(c).map{|v| [v, "#{path}:#{lineno}"]}) end end end �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/www/����������������������������������������������������������������������������������0000775�0000000�0000000�00000000000�14342141206�0013724�5����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������sequel-5.63.0/www/layout.html.erb�������������������������������������������������������������������0000664�0000000�0000000�00000010250�14342141206�0016674�0����������������������������������������������������������������������������������������������������ustar�00root����������������������������root����������������������������0000000�0000000������������������������������������������������������������������������������������������������������������������������������������������������������������������������<!DOCTYPE html> <html lang="en"> <head> <title><%= "#{title.capitalize} - " unless title == 'index' %>Sequel: The Database Toolkit for Ruby
<%= content %>
sequel-5.63.0/www/make_www.rb000077500000000000000000000007531434214120600161020ustar00rootroot00000000000000#!/usr/bin/env ruby require 'erb' $: << File.join(File.dirname(__FILE__), '..','lib', 'sequel') require 'version' Dir.chdir(File.dirname(__FILE__)) erb = ERB.new(File.read('layout.html.erb')) Dir['pages/*.html.erb'].each do |page| public_loc = "#{page.gsub(/\Apages\//, 'public/').sub('.erb', '')}" content = content = ERB.new(File.read(page)).result(binding) title = title = File.basename(page.sub('.html.erb', '')) File.open(public_loc, 'wb'){|f| f.write(erb.result(binding))} end sequel-5.63.0/www/pages/000077500000000000000000000000001434214120600150235ustar00rootroot00000000000000sequel-5.63.0/www/pages/development.html.erb000066400000000000000000000045301434214120600210040ustar00rootroot00000000000000

Development


Sequel is being actively developed. New versions of Sequel are generally released monthly on the first of the month. You can join in on the discussions, ask questions, suggest features, and discuss Sequel in general by asking questions in GitHub Discussions or on our Google Group: Sequel Talk.


Reporting Bugs

To report a bug in Sequel, use GitHub Issues. If you aren't sure if something is a bug, post a question on GitHub Discussions or the Google Group.

Note that GitHub Issues should not be used to ask questions about how to use Sequel, use GitHub Discussions or the Google Group for that.

Contributing

The easiest way to contribute is to use git, post the changes to a public repository, and send a pull request, either via GitHub or the Google Group. Posting patches to the bug tracker or the Google Group works fine as well.

You can find the contributing guideliness on GitHub.

Source Code

The master source code repository is jeremyevans/sequel on GitHub.

License

Sequel is distributed under the MIT License. Patches are assumed to be submitted under the same license as Sequel.

sequel-5.63.0/www/pages/documentation.html.erb000066400000000000000000000167221434214120600213410ustar00rootroot00000000000000

Documentation for Sequel (v<%= Sequel.version %>)


General Info, Guides, Examples, and Tutorials


RDoc


Release Notes

    <% %w'5 4 3'.each do |i| %>
  • Sequel <%= i %>
      <% lines = [] Dir["../doc/release_notes/#{i}.*.txt"].map{|f| File.basename(f)}.each do |f| (lines[f.split('.')[1].to_i/10] ||= []) << f end lines.reverse.each do |fs| %>
    • <% fs.sort_by{|f| f.split('.').map{|x| x.to_i}}.reverse.each do |f| %> <%= f.sub(/\.txt$/, '').sub(/(..)\.0$/, '\\1') %> | <% end %>
    • <% end %>
  • <% end %> <% %w'2 1'.each do |i| %>
  • Sequel <%= i %>
  • <% end %>

sequel-5.63.0/www/pages/index.html.erb000066400000000000000000000076221434214120600175760ustar00rootroot00000000000000

Sequel: The Database Toolkit for Ruby

  • Thread safety, connection pooling and a concise DSL for constructing SQL queries and table schemas.
  • Comprehensive ORM layer for mapping records to Ruby objects and handling associated records.
  • Advanced database features such as prepared statements, bound variables, stored procedures, savepoints, two-phase commit, transaction isolation, primary/replica configurations, and database sharding.
  • With adapters for ADO, Amalgalite, IBM_DB, JDBC, MySQL, Mysql2, ODBC, Oracle, PostgreSQL, SQLAnywhere, SQLite3, and TinyTDS.
require "sequel"

# connect to an in-memory database
DB = Sequel.sqlite

# create an items table
DB.create_table :items do
  primary_key :id
  String :name, unique: true, null: false
  Float :price, null: false
end

# create a dataset from the items table
items = DB[:items]

# populate the table
items.insert(name: 'abc', price: rand * 100)
items.insert(name: 'def', price: rand * 100)
items.insert(name: 'ghi', price: rand * 100)

# print out the number of records
puts "Item count: #{items.count}"

# print out the average price
puts "The average price is: #{items.avg(:price)}"
sequel-5.63.0/www/pages/links.html.erb000066400000000000000000000150151434214120600176020ustar00rootroot00000000000000 sequel-5.63.0/www/pages/plugins.html.erb000066400000000000000000001757671434214120600201700ustar00rootroot00000000000000

Sequel::Model Plugins for v<%= Sequel.version %>

Sequel::Model has a standardized and very flexible plugin architecture, see the RDoc. Here is a list of plugins that members of the Sequel community have developed.

Plugins that ship with Sequel

Associations

  • association_dependencies Allows easy deleting, destroying, or nullifying associated objects when destroying a model object.
  • association_lazy_eager_option Support :eager option when calling association method, for per-call eager loading when association is not cached.
  • association_multi_add_remove Allows adding/removing multiple associated objects in a single method call.
  • association_pks Adds the association_pks and association_pks= to *_to_many associations.
  • association_proxies Changes the *_to_many association method to return a proxy instead of an array of objects.
  • auto_restrict_eager_graph Automatically disallow the use of eager_graph for associations with blocks but without :graph_* conditions.
  • concurrent_eager_loading Allows you to concurrently eagerly load multiple associations using the async_thread_pool Database extension.
  • dataset_associations Adds association methods to datasets that return datasets of associated objects.
  • delay_add_association Delay add_association calls on new objects until after after the object is saved.
  • eager_each Makes each on an eagerly loaded dataset do eager loading.
  • eager_graph_eager Allows chaining eager loads to associations loaded by eager_graph.
  • forbid_lazy_load Forbids lazy loading of associations in cases where they could cause N+1 query issues.
  • instance_specific_default Find associations that would benefit from having the :instance_specific option specified.
  • many_through_many Allows you to create an association through multiple join tables.
  • nested_attributes Allows you to modified associated objects directly through a model object, similar to ActiveRecord's Nested Attributes.
  • pg_array_associations Adds associations types to handle the case where foreign keys are stored in a PostgreSQL array in one of the tables.
  • rcte_tree Supports retrieving all ancestors and descendants for tree structured data using recursive common table expressions.
  • tactical_eager_loading Allows you to eagerly load an association for all objects retreived from the same dataset when calling the association method on any of the objects in the dataset.
  • tree Allows you to treat model objects as being part of a tree, finding ancestors, descendants, siblings, and tree roots.
  • unused_associations Determines which associations and association methods you can skip defining to save on memory.
  • validate_associated Supports validating associated objects at the same time as the current object.

Attributes

  • accessed_columns Records which columns have been accessed for a given model instance.
  • blacklist_security Adds blacklist-based model mass-assignment protection.
  • boolean_readers Adds attribute? methods for all boolean columns.
  • column_conflicts Automatically handles column names that conflict with Ruby/Sequel method names.
  • column_encryption Encrypt column values stored in the database.
  • dirty Allows you get get initial values of columns after changing the values.
  • defaults_setter Get default values for new models before saving.
  • enum Allows for treating a column as a enum.
  • force_encoding Forces the all model column string values to a given encoding.
  • input_transformer Automatically transform input to model column setters.
  • lazy_attributes Allows you to set some attributes that should not be loaded by default, but only loaded when an object requests them.
  • modification_detection Automatically detect in-place changes to column values.
  • split_values Splits noncolumn entries from values hash into separate hash.
  • string_stripper Strips strings assigned to model attributes.
  • uuid Automatically sets UUID attribute when creating model objects.
  • whitelist_security Adds whitelist-based model mass-assignment protection.

Caching

  • caching Supports caching primary key lookups of model objects to any object that supports the Ruby-Memcache API.
  • static_cache Caches all model instances, improving performance for static models.
  • static_cache_cache Support caching rows for static_cache models to a file to avoid database queries during model initialization.

Hooks

  • after_initialize Adds an after_initialize hook to model objects, called for both new objects and those loaded from the database.
  • hook_class_methods Adds backwards compatiblity for the legacy class-level hook methods (e.g. before_save :do_something).
  • instance_hooks Allows you to add hooks to specific model instances.

Inheritance

  • class_table_inheritance Supports inheritance in the database by using a single database table for each class in a class hierarchy.
  • single_table_inheritance Supports inheritance in the database by using a single table for all classes in a class hierarchy.

Prepared Statements

  • prepared_statements Makes models use prepared statements for deletes, inserts, updates, and lookups by primary key.
  • prepared_statements_safe Makes use of prepared_statements plugin more safe by setting explicit defaults for model columns when inserting and saving whole rows instead of changed columns.

Saving

  • columns_updated Makes columns hash used for updates available in after_update and after_save hooks.
  • insert_conflict Allows handling unique constraint conflicts when saving new model instances on PostgreSQL 9.5+ and SQLite 3.24.0+.
  • instance_filters Allows you to add per instance filters that are used when updating or destroying the instance.
  • mssql_optimistic_locking Uses a timestamp/rowversion column on Microsoft SQL Server to prevent concurrent updates overwriting changes.
  • optimistic_locking Adds a database-independent locking mechanism to models to prevent concurrent updates overwriting changes.
  • sharding Additional model support for Sequel's sharding support.
  • skip_create_refresh Allows you to skip the refresh when saving new model objects.
  • skip_saving_columns Allows for marking columns to skip when saving model objects, and skips the saving of generated columns by default.
  • timestamps Creates hooks for automatically setting create and update timestamps.
  • touch Allows easily updating timestamps via Model#touch, as well as touching associations when model instances are updated or destroyed.
  • unlimited_update Works around MySQL warnings when using replication due to LIMIT clause use when updating model instances.
  • update_or_create Adds helper methods for updating an object if it exists, or creating such an object if it does not.
  • update_primary_key Allows you to safely update the primary key of a model object.
  • update_refresh Refreshes a model object when updating it.

Selection

  • column_select Selects explicitly qualified columns (table.column1, table.column2, ...) instead of just * for model datasets.
  • insert_returning_select Automatically sets RETURNING clause for INSERT queries for models that use an explicit column selection.
  • table_select Selects table.* instead of just * for model datasets.

Serialization

Subsets

  • boolean_subsets Automatically creates a subset method for each boolean column.
  • def_dataset_method Adds Model.def_dataset_method and Model.subset for backwards compatibility.
  • inverted_subsets Creates an additional method for each subset, that inverts the subset's conditions.
  • subset_conditions Creates an additional method for each subset, returning the filter conditions used for the subset.

Validations

Other

  • active_model Makes Sequel::Model objects compliant to the ActiveModel::Lint specs, so they should work correctly in Rails 3+.
  • async_thread_pool Adds better support for model classes to use the async_thread_pool Database extension.
  • finder Adds Model.finder and Model.prepared_finder methods for defining optimized lookup methods.
  • list Allows you to treat model objects as being part of a list, so you can move them up/down and get next/previous entries.
  • pg_row Allows Sequel::Model classes to implement PostgreSQL row-valued/composite types.
  • primary_key_lookup_check_values Typecasts and checks values in allowed range during lookups by primary key.
  • require_valid_schema Requires that model classes selecting from simple tables have valid schema.
  • singular_table_names Makes Sequel default to not pluralizing table names.
  • sql_comments Supports automatically adding comments to queries to show which class, instance, dataset, or association method triggered the query.
  • subclasses Allows easy access all model subclasses and descendent classes, without using ObjectSpace.
  • typecast_on_load Fixes bad database typecasting when loading model objects.

External Plugins


Sequel Extensions are modifications or additions to Sequel that affect either Sequel::Database objects (database extensions), Sequel::Dataset objects (dataset extensions), or the general ruby environment (global extensions).

Database extensions that ship with Sequel

Database extensions can be loaded into a single Sequel::Database object via Sequel::Database#extension, or to all databases by using Sequel::Database.extension.

  • arbitrary_servers Adds ability to connection to arbitrary servers (instead of just preconfigured ones) in the sharding support.
  • async_thread_pool Adds support for running queries asynchronously using a thread pool.
  • caller_logging Include caller information when logging queries.
  • connection_expiration Automatically removes connections from the pool after they have been open longer than a timeout.
  • connection_validator Automatically validates connections on pool checkout and handles disconnections transparently.
  • constant_sql_override Override generated SQL for Sequel constants with configurable strings.
  • constraint_validations Creates database constraints when creating/altering tables, with metadata for automatic model validations via the constraint_validations plugin.
  • error_sql Makes DatabaseError#sql return the SQL query that caused the underlying exception.
  • identifier_mangling Support modification for the default identifier mangling for the database.
  • index_caching Speeds up loading index information by saving/loading database index metadata to a file.
  • integer64 Treats the Integer class as a 64-bit integer instead of a 32-bit integer when used as a generic database type.
  • looser_typecasting Uses .to_f and .to_i instead of Kernel.Float and Kernel.Integer when typecasting floats and integers.
  • pg_array Adds support for PostgreSQL arrays.
  • pg_auto_parameterize Automatically parameterizes queries when using the postgres adapter with the pg driver.
  • pg_enum Adds support for PostgreSQL enums.
  • pg_extended_date_support Adds support for PostgreSQL infinite and BC date/timestamp support.
  • pg_extended_integer_support Adds support for handling Ruby integers outside PostgreSQL bigint range.
  • pg_hstore Adds support for the PostgreSQL hstore type.
  • pg_inet Adds support for the PostgreSQL inet and cidr types.
  • pg_interval Adds support for the PostgreSQL interval type.
  • pg_json Adds support for the PostgreSQL json type.
  • pg_loose_count Adds Database#loose_count for fast approximate counts of whole tables on PostgreSQL.
  • pg_multirange Adds support for PostgreSQL multirange types.
  • pg_range Adds support for PostgreSQL range types.
  • pg_row Adds support for PostgreSQL row-valued/composite types.
  • pg_static_cache_updater Listens for changes to underlying tables in order to automatically update models using the static_cache plugin.
  • pg_timestamptz Uses timestamptz (timestamp with time zone) as the generic timestamp type used for Time and DateTime classes.
  • run_transaction_hooks Support running after_commit and after_rollback transaction hooks before transaction commit/rollback, designed for transactional testing.
  • schema_caching Speeds up loading a large number of models by caching database schema and loading it from a file.
  • schema_dumper Adds Database#dump_schema_migration and related methods for dumping the schema of the database as a migration that can be restored on other databases.
  • server_block Adds Database#with_server method that makes access inside the passed block use the specified shard by default.
  • server_logging Include server/shard information when logging queries.
  • sql_comments Adds Database#with_comments method, for automatically using comments for queries exceuted inside a block.
  • sql_log_normalizer Normalizes SQL before logging, helpful for analytics and sensitive data.

Dataset extensions that ship with Sequel

Dataset extensions can be loaded into a single Sequel::Database object via Sequel::Dataset#extension, or to all datasets for a given database via Sequel::Database#extension, or all datasets for all databases by using Sequel::Database.extension.

  • any_not_empty Make Dataset#any? without block mean !empty?.
  • auto_literal_strings Automatically treats string arguments passed to filter methods as literal SQL.
  • columns_introspection Attemps to skip database queries by introspecting the selected columns if possible.
  • current_datetime_timestamp Creates current Time/DateTime objects that are literalized as CURRENT_TIMESTAMP.
  • dataset_source_alias Automatically aliases datasets to their source instead of using t1, t2, etc.
  • date_arithmetic Allows for database-independent date calculations (adding/subtracting an interval to/from a date/timestamp).
  • duplicate_columns_handler Allows you to raise or warn if the dataset returns multiple columns with the same name when returning the results.
  • empty_array_consider_nulls Makes Sequel's handling of IN/NOT IN with an empty array use a NULL expression for NULL column values.
  • exclude_or_null Add Dataset#exclude_or_null for only including rows where condition is false or NULL.
  • graph_each Makes Dataset#each split returned results by table when using Dataset#graph.
  • implicit_subquery Makes dataset methods that return datasets with modified SQL use a subquery implicitly if the current dataset uses raw SQL.
  • is_distinct_from Allows for using or emulating the IS DISTINCT FROM operator.
  • null_dataset Adds Dataset#nullify to get a dataset that will never issue a query.
  • mssql_emulate_lateral_with_apply Emulates LATERAL queries using CROSS/OUTER APPLY on Microsoft SQL Server.
  • pagination Adds Dataset#paginate for easier pagination of datasets.
  • pretty_table Adds Dataset#print for printing a dataset as a simple plain-text table.
  • query Adds Dataset#query for a different interface to creating queries that doesn't use method chaining.
  • round_timestamps Automatically round timestamp values to database supported precision before literalizing them.
  • select_remove Adds Dataset#select_remove to remove selected columns from a dataset.
  • sequel_4_dataset_methods Adds #and, #exclude_where, #interval, and #range dataset methods.
  • split_array_nil Splits nils out of IN/NOT IN arrays into separate OR IS NULL or AND IS NOT NULL clauses.
  • sql_comments Adds Dataset#comment method, for setting an SQL comment in the queries created by the dataset.
  • string_agg Adds generic database support for aggregate string concatentation.
  • synchronize_sql Checks out a connection while generating SQL strings, improving performance in some cases.
  • to_dot Adds Dataset#to_dot method, which returns a string suitable for processing by graphviz's dot program to get a visualization of the dataset's abstract syntax tree.

Global extensions that ship with Sequel

Global extensions can affect other parts of Sequel or the general ruby environment, and are loaded with Sequel.extension.

  • blank Adds blank? instance methods to all objects.
  • core_extensions Extends the Array, Hash, String, and Symbol classes with methods that return Sequel expression objects.
  • core_refinements Adds refinement versions of Sequel's core extensions.
  • eval_inspect Makes inspect on Sequel's expression objects attempt to return a string suitable for eval.
  • escaped_like Support creation of LIKE expressions with placeholders in the pattern without access to a dataset.
  • date_parse_input_handler Allows for custom handling of input strings to the date parsing methods.
  • datetime_parse_to_time Uses DateTime.parse.to_time to parse timestamp strings without offset information where such timestamps are assumed to be in UTC.
  • fiber_concurrency Uses Fiber.current instead of Thread.current as the key for checking out connections.
  • inflector Adds instance-level inflection methods to String.
  • migration Adds Migration and Migrator classes for easily migrating the database schema forward or reverting to a previous version.
  • named_timezones Allows you to use named timezones instead of just :local and :utc (requires TZInfo).
  • pg_array_ops Adds DSL support for calling PostgreSQL array operators and functions.
  • pg_hstore_ops Adds DSL support for calling PostgreSQL hstore operators and functions.
  • pg_inet_ops Adds DSL support for calling PostgreSQL inet/cidr operators and functions.
  • pg_json_ops Adds DSL support for calling PostgreSQL json operators and functions.
  • pg_range_ops Adds DSL support for calling PostgreSQL range and multirange operators and functions.
  • pg_row_ops Adds DSL support for dealing with PostgreSQL row-valued/composite types.
  • s Adds Sequel::S module with #S private method for easily calling Sequel.expr, including use as a refinement.
  • sql_expr Adds sql_expr method to all objects, allowing easy use of Sequel's DSL.
  • sqlite_json_ops Adds DSL support for calling SQLite json operators and functions.
  • string_date_time Adds instance methods to String for converting the string into a Date/Time/DateTime.
  • symbol_aref Extends Symbol#[] to create a qualified identifier if given a symbol, identifier, or qualified identifier.
  • symbol_aref_refinement Adds refinement version of symbol_aref extension.
  • symbol_as Adds Symbol#as to create aliased expressions.
  • symbol_as_refinement Adds refinement version of symbol_as extension.
  • thread_local_timezones Allows for per-thread overrides of the global timezone settings.
  • virtual_row_method_block Supports passing blocks to virtual row method for terser SQL function creation.

External Extensions

  • annotate-sequel Generates model annotations for Sequel models.
  • fixture_dependencies YAML fixture loader that handles dependencies/associated objects, respecting foreign key constraints.
  • i18n_backend_sequel Allows Sequel to be a backend for i18n translations.
  • miguel Allows management of database schemas, including auto generation/application of migrations.
  • rails_sequel Rails 2 plugin that allows you to use Sequel instead of ActiveRecord.
  • refile-sequel Provides an extension for using Refile with Sequel.
  • rspec_sequel_matchers RSpec matchers for Sequel validations, associations, and columns.
  • sequel-activerecord_connection Allows Sequel to use ActiveRecord's connection for database interaction.
  • sequel-annotate Generates model annotations for Sequel models, including constraint and trigger information on PostgreSQL.
  • sequel_extjs Generates JSON from datasets that is consumable by the ExtJS JsonStore.
  • sequel-location Easy setup and syntax for doing geolocation search on PostgreSQL.
  • sequel_pg Faster SELECTs when using Sequel with pg.
  • sequel-pg_advisory_locking Adds PostreSQL advisory locking support.
  • sequel-pg-comment Document your schema by setting comments on all your PgSQL objects.
  • sequel_plus Collection of sequel extensions.
  • sequel_postgresql_triggers Database enforced timestamps, immutable columns, and counter/sum caches.
  • sequel-rails Rails 3 plugin that allows you to use Sequel instead of ActiveRecord.
  • sequel_rails3 Another Rails 3 plugin that allows you to use Sequel instead of ActiveRecord.
  • sequel-force-hooks Have after_commit/after_rollback hooks apply to current savepoint instead of transaction.
  • sequel-seed A Sequel extension to make seeds/fixtures manageable like migrations.
  • sequel_tools Tools to help with managing database operations with Sequel through Rake tasks.
  • sequel_vectorized Allows Sequel::Dataset to be exported as an Hash of Arrays and NArrays.

External Adapters

Adapters allow Sequel to communicate with databases. Sequel ships with many adapters, but here are links to external adapters:


Submitting Your Plugin/Extension/Adapter

If you have created a Sequel plugin/extension/adapter and would like to list it here, please submit a request to code AT jeremyevans DOT net, or send a pull request via GitHub.

sequel-5.63.0/www/public/000077500000000000000000000000001434214120600152025ustar00rootroot00000000000000sequel-5.63.0/www/public/css/000077500000000000000000000000001434214120600157725ustar00rootroot00000000000000sequel-5.63.0/www/public/css/application.css000066400000000000000000000110761434214120600210140ustar00rootroot00000000000000:root { --font-sans-serif: "Helvetica Neue", Helvetica, Arial, sans-serif; --font-monospace: "Monaco", monospace; --ivory: #FFFFF0; --graphite: #333; --red-primary: #B33D34; --red-secondary: #771608; --border: #e2e2e2; --gap: 24px; } @media (max-width: 960px) { :root { --gap: 20px; } } @media (max-width: 640px) { :root { --gap: 16px; } } body.body, html.html { margin: 0; padding: 0; scroll-behavior: smooth; } body.body { font-family: var(--font-sans-serif); font-size: 14px; font-weight: 400; background-color: var(--ivory); color: var(--graphite); } header.header, main.main, footer.footer { padding: 16px 16px 0 16px; } @media (max-width: 960px) { header.header, main.main, footer.footer { padding-left: 64px; padding-right: 64px; } } @media (max-width: 640px) { header.header, main.main, footer.footer { padding-left: 32px; padding-right: 32px; } } main.main { padding-bottom: 64px; } div.main__container, nav.nav { max-width: 1200px; margin-left: auto; margin-right: auto; } nav.nav { display: flex; flex-direction: row; justify-content: space-between; gap: 16px; } @media (max-width: 1200px) { nav.nav { flex-direction: column; } } nav.nav--header { align-items: center; } img.nav__img { margin: 0; transition: filter 100ms; } img.nav__img--header:hover { filter: saturate(1.25); opacity: 1; } img.nav__img--footer { height: 40px; opacity: .25; } img.nav__img--footer:hover { opacity: .9 } ul.nav__ul { display: flex; flex-direction: row; justify-content: space-between; gap: var(--gap); list-style: none; padding: 0; margin: 0; } @media (max-width: 960px) { ul.nav__ul { flex-direction: column; gap: calc(var(--gap) * 2); } } ul.nav__ul--header { align-items: center; } @media (max-width: 960px) { ul.nav__ul--header { display: none; } } ul.nav__ul--column { flex-direction: column; } li.nav__li { display: inline-flex; flex-direction: column; gap: 16px; } a.a { color: var(--red-primary); } a.nav__a { color: var(--graphite); font-weight: 600; } a.a, a.nav__a { text-decoration: none; transition: filter 100ms; } a.a:hover, a.nav__a:hover { color: var(--red-primary); filter: saturate(1.5); } a.a:visited { color: var(--red-secondary); } a.a--dead { text-decoration: line-through; } input.nav__input { font-family: var(--font-sans-serif); padding: 16px; background-color: rgba(0, 0, 0, 0.05); border-radius: 6px; border: 2px solid transparent; min-width: 256px; } @media (max-width: 1200px) { form.nav__form, input.nav__input { display: block; width: 100%; min-width: 0px; } } @media (max-width: 1200px) { form.nav__form--header { display: none; } } input.nav__input:focus { outline: none; border: 2px solid var(--red-primary); background-color: #FFF; } div.main__container { border-radius: 6px; } footer.footer { border-top: 1px solid var(--border); padding-bottom: 64px; } hr.hr { height: 1px; border: none; background-color: var(--border); margin-top: var(--gap); margin-bottom: var(--gap); box-sizing: border-box; } h1.h1 { font-size: 32px; padding: var(--gap) var(--gap) 0 var(--gap); font-weight: 300; margin: 0; } @media (max-width: 960px) { h1.h1 { font-size: 24px; } } h2.h2 { font-size: 24px; margin: var(--gap); letter-spacing: -0.025em; line-height: 1.35em; } @media (max-width: 960px) { h2.h2 { font-size: 20px; } } h2.h2:first-child { margin-top: var(--gap); } h2.h2:last-child { margin-bottom: var(--gap); } span.h2__span { font-weight: 300; letter-spacing: 0.025em; } h3.h3 { font-size: 18px; font-weight: 400; margin: var(--gap) 0 0 0; } h3.h3:first-of-type { margin-top: 0; } h4.h4 { margin-top: var(--gap); } p.p { line-height: 1.35em; } ul.ul { display: flex; flex-direction: column; row-gap: 12px; padding: 0; margin: 12px 0 0 0; list-style: none; } ul.ul--grid { padding-top: 16px; border-top: 1px solid var(--border); } li.ul__li { display: flex; column-gap: 16px; } @media (max-width: 960px) { li.ul__li { display: block; } } li.ul__li--grid { display: grid; grid-template-columns: 1fr 2fr; } @media (max-width: 960px) { li.ul__li--grid { grid-template-columns: 1fr; } } div.row { display: grid; grid-auto-flow: row; gap: var(--gap); margin-top: calc(var(--gap) / 2); margin-bottom: calc(var(--gap) / 2); margin-left: var(--gap); margin-right: var(--gap); } div.row:first-child { margin-top: var(--gap); } div.row:last-child { margin-bottom: var(--gap); } div.row--grid { display: grid; grid-template-columns: repeat(auto-fit, minmax(280px, 1fr)); } div.row--grid:last-child { padding-bottom: var(--gap); } sequel-5.63.0/www/public/css/development.css000066400000000000000000000001611434214120600210240ustar00rootroot00000000000000section.development { background-color: #FFF; border-radius: 12px; box-shadow: 0 24px 64px rgba(0,0,0,.15); } sequel-5.63.0/www/public/css/documentation.css000066400000000000000000000005671434214120600213650ustar00rootroot00000000000000section.documentation { background-color: #FFF; border-radius: 12px; box-shadow: 0 24px 64px rgba(0,0,0,.15); } ul.release-notes { font-family: var(--font-monospace); display: flex; flex-direction: column; gap: 8px; font-size: 14px; color: #ccc; list-style: none; margin-top: 8px; margin-bottom: 16px; padding: 0; } ul.release-notes__li { list-style: none; } sequel-5.63.0/www/public/css/index.css000066400000000000000000000056531434214120600176240ustar00rootroot00000000000000section.index { color: #FFF; background-color: var(--red-primary); border-radius: 12px; margin-bottom: 64px; } div.index__hero { display: flex; flex-direction: column; text-align: center; justify-content: center; row-gap: 8px; padding-top: 16px; padding-bottom: 16px; } h1.index__h1 { font-size: 64px; padding-left: var(--gap); padding-right: var(--gap); font-weight: 300; margin: 0; letter-spacing: -0.03em; } @media (max-width: 960px) { h1.index__h1 { padding-top: var(--gap); padding-bottom: var(--gap); font-size: 48px; } } @media (max-width: 640px) { h1.index__h1 { font-size: 40px; } } a.index__a { display: inline-block; width: max-content; margin-left: auto; margin-right: auto; color: #FFF; opacity: 0.55; font-size: 20px; text-decoration: none; border-bottom: 1px solid #FFF; } a.index__a:hover { opacity: 1; } nav.index__nav { display: flex; align-items: center; gap: calc(var(--gap) / 4); margin-top: 16px; margin-left: auto; margin-right: auto; } @media (max-width: 640px) { nav.index__nav { flex-direction: column; } } a.index__button { display: block; padding: 16px 32px; text-decoration: none; font-weight: 600; border-radius: 6px; line-height: 1em; color: var(--ivory); background-color: var(--graphite); border: 2px solid transparent; } a.index__button:hover { background-color: #222; } a.index__button--secondary { color: var(--ivory); background-color: transparent; border-color: var(--ivory); } a.index__button--secondary:hover { color: var(--ivory); background-color: rgba(255,255,255,.05) } ul.index__ul { margin: 0 calc(var(--gap) * 2); padding: 0; list-style: none; display: grid; grid-template-columns: repeat(4, 1fr); gap: var(--gap); } @media (max-width: 1200px) { ul.index__ul { grid-template-columns: 1fr; grid-template-rows: auto; } ul.index__ul { margin: 0 32px; } } li.index__li { padding-left: var(--gap); line-height: 1.55em; border-left: 1px solid rgba(255,255,255,.25); } @media (max-width: 1200px) { li.index__li { padding-left: 0px; padding-top: var(--gap); border-left: none; border-top: 1px solid rgba(255,255,255,.25); } } li.index__li:first-child { border-top: 0px; padding-left: 0px; border-left: none; border-bottom: none; } pre.pre { padding: var(--gap); margin: 0; } code { font-family: var(--font-monospace); font-style: italic; color: #ffdead; background: #222; display: block; font-size: 14px; font-style: normal; overflow: auto; border-radius: 6px; box-shadow: 0 24px 64px rgba(0,0,0,.45); padding: var(--gap); margin: 0; margin-bottom: calc(var(--gap) * -2); } .ruby .keyword { color: #faa; } .ruby .punct { color: #faa; } .ruby .class { color: #7fffd4; } .ruby .constant { color: #34db9e; } .ruby .ident { color: var(--ivory); } .ruby .comment { color: #666; } .ruby .symbol { color: #7fffd4; } .ruby .string { color: #ffa07a; } .ruby .regex { color: #bf8060; } .ruby .number { color: #23acce; } sequel-5.63.0/www/public/css/links.css000066400000000000000000000020751434214120600176300ustar00rootroot00000000000000section.links { background-color: #FFF; border-radius: 12px; box-shadow: 0 24px 64px rgba(0,0,0,.15); } ul.assets { display: grid; grid-template-columns: repeat(auto-fit, minmax(320px, 1fr)); gap: calc(var(--gap) / 2); list-style: none; padding: 0; margin: calc(var(--gap) / 2) var(--gap); } @media (max-width: 640px) { ul.assets { grid-template-columns: 1fr; } } li.assets__item { display: grid; grid-template-rows: auto max-content; border-radius: 8px; border: 1px solid var(--border); } a.assets__container { display: flex; align-items: center; justify-content: center; padding: var(--gap) 32px; box-sizing: border-box; background-image: url("../images/transparent-background.svg"); background-size: 32px; } @media (max-width: 480px) { img.assets__img { width: 80%; } } p.assets__description { border-top: 1px solid var(--border); background-color: #f4f4f4; margin: 0; padding: 12px 16px; font-size: 12px; text-align: center; border-radius: 0 0 8px 8px; color: #666; } a.parens:before { content: "("; } a.parens:after { content: ")"; } sequel-5.63.0/www/public/css/plugins.css000066400000000000000000000014521434214120600201670ustar00rootroot00000000000000section.plugins { background-color: #FFF; border-radius: 12px; box-shadow: 0 24px 64px rgba(0,0,0,.15); } code.plugins__code { background-color: #eaeaea; border-radius: 4px; padding: 2px 4px; } div.plugins__sidebar { display: grid; grid-template-columns: 1fr 4fr; margin-top: var(--gap); border-top: 1px solid var(--border); } @media (max-width: 1200px) { div.plugins__sidebar { grid-template-columns: 1fr; } } aside.plugins__sidebar-aside { position: sticky; top: 0; max-height: 100vh; border-right: 1px solid var(--border); padding-bottom: var(--gap); padding-top: var(--gap); padding-left: var(--gap); padding-right: var(--gap); } @media (max-width: 1200px) { aside.plugins__sidebar-aside { position: inherit; border-right: none; border-bottom: 1px solid var(--border); } } sequel-5.63.0/www/public/images/000077500000000000000000000000001434214120600164475ustar00rootroot00000000000000sequel-5.63.0/www/public/images/favicon.ico000066400000000000000000000360561434214120600206020ustar00rootroot00000000000000 h6  (00 h&(  T3=d4=3=,64<@4=4<4<5=4=3ozjt?HenHyOX4=4=4=4=4=4=3<4=4=4=2=u4=4<4=4<1=0:04=4=4=4=4=4=J3<3=4=4=3=4=2;R.. 29$3;5<5=4=4=4=4=4=5=3<4>11;2=p3<4=4=4=4=4=4=4=4=4=4=4<33:h.:09:4<4=4=4=4=4=4=4=4=4=4=4=4=4=4=4<4=4=4<3<27@7@6?4=4=4=4=4=4=4=4=4=4=4=4=4<2|4<4<$I0@3=4=4=4=5=4=4=4=4=4=7@Q[4=4=4=4=4=4=4=4=4=3<-<4;g4=4=4=4<$I0@3=4=4=4=4=4=4<4=4=4=Yc7?4=4=5=4=4=5=4=4=3<.. 3=4=4=4=4=4<$I0@3=4=4=4=4=4=4=4=4=4=hrnx>G4=4=4=4=4<$I0@3=4=4=4=4=4=4<5=4=4=6@w7@=FHR7A4=4=5=4=4=4=4=4=3<1=6<&@@4;N4=4=4=4=4=4<$I0@3=4=4=4=5=4=4=4=4=4=4=5>@Ifpq{5>4=4=4=4=4=4=4=4=2<323:F4=4=6>!4:O4=4=4=4=4=4<$I0@3=4=4=4=4=4=4=4=4=4=4=CM?H4=4=4=4=4=4=4=4=4=3<1B4<3=4=4=5;+4:O5=4=4<4=4=4<$I0@3=4=4=4=4=4=4=4=4=4=6@;D4=4=4=4=5=4=4=5=4=4=3<4=4=4=4=4=5;+4;O4=4=4=4=4=4<$I0@3=4=4=4=5=4=4=4=4=4=EOhrIR7@4=4=4=4=4=4=4=4=4=4=4=3<4=4=4=4=4=4;+4:O4=4=4=4=4=4<$I0@3=4=4=4=4=4=4=4=4=4=U^BK6?:Dzy8A4=4=4=4=4=4=4=4=4=3<4=4=4=4=4=5;+4:O4=4=4=4=4=4<$I0@3=4=4=4=4=4=4=4=4=4=FPeoT^]g4=4=4=4=4=4=4=4=4<3<4=4=4=4=4=5;+4:O4=4=4=4=4=4<$I0@3=4=4=4=4=4=4=4=4=4=6@MV4=4=4=4=4=4=4=4=4=3<4=4=4=4=4=5;+4:O4=4=4=4=4=4<$I0@3=4=4=4=4=4=4=4=4=4=4=PZpz6?4=4=4=4=4=4=4=4=4=3<4=4=4=4=4=5;+4:O4=4=4=4=4=4<%I0@3=4=4=4=4=4=4=4=4=4=4=4=LV^h4=4<4=4=4=4=4=4=4=4=4<3=4=4=4=4=4<5;+4;N5=4=4=4=4=3>$I*@ 3=4=4=4=4=4=4=5=4=4=4=4=4=6?DNV_YcKU;D4=4=4=4=4=4=4=4=4<4=4=4=4<4=4<4=4=4<5:,2;=4>4=4=5=4=4=;; 4>g4=4=4=4=4=4=4=4=4=4=4=4=5=4=4=4=4=4=4=4=4=4=4=4=4=4=4=4=4=4=3=i4=4=4=4=3=2.:,334>c3=5<4=4=4=4=4=4=4=4=5=4=4=4=4=4=4=4=4<5=4=4=4=4=4<4=4=l3=99 5=?3<4=4=4=4<55/95;y4=4=4=4=4=33;t5=4=4=4=4=4=4=4=4=4=4=4=4=4=4=4=4=4=4=4=4=4=4<18 UU!@4;g4=4=3<4=4=T**4@,4=4>4=4=4=5=24<33 1=3=4=4=4=4<4=4=4=4>3<66!+*13<4<@$$.F 1;44 sequel-5.63.0/www/public/images/ruby-sequel.svg000066400000000000000000000226671434214120600214620ustar00rootroot00000000000000 sequel-5.63.0/www/public/images/sequel-button.png000066400000000000000000000065561434214120600220000ustar00rootroot00000000000000PNG  IHDRP pHYs   OiCCPPhotoshop ICC profilexڝSgTS=BKKoR RB&*! J!QEEȠQ, !{kּ> H3Q5 B.@ $pd!s#~<<+"x M0B\t8K@zB@F&S`cbP-`'{[! eDh;VEX0fK9-0IWfH  0Q){`##xFW<+*x<$9E[-qWW.(I+6aa@.y24x6_-"bbϫp@t~,/;m%h^ uf@Wp~<5j>{-]cK'Xto(hw?G%fIq^D$.Tʳ?D*A, `6B$BB dr`)B(Ͱ*`/@4Qhp.U=pa( Aa!ڈbX#!H$ ɈQ"K5H1RT UH=r9\F;2G1Q= C7F dt1r=6Ыhڏ>C03l0.B8, c˱" VcϱwE 6wB aAHXLXNH $4 7 Q'"K&b21XH,#/{C7$C2'ITFnR#,4H#dk9, +ȅ3![ b@qS(RjJ4e2AURݨT5ZBRQ4u9̓IKhhitݕNWGw Ljg(gwLӋT071oUX**| J&*/Tު UUT^S}FU3S ԖUPSSg;goT?~YYLOCQ_ cx,!k u5&|v*=9C3J3WRf?qtN (~))4L1e\kXHQG6EYAJ'\'GgSSݧ M=:.kDwn^Loy}/TmG X $ <5qo</QC]@Caaᄑ.ȽJtq]zۯ6iܟ4)Y3sCQ? 0k߬~OCOg#/c/Wװwa>>r><72Y_7ȷOo_C#dz%gA[z|!?:eAAA!h쐭!ΑiP~aa~ 'W?pX15wCsDDDޛg1O9-J5*>.j<74?.fYXXIlK9.*6nl {/]py.,:@LN8A*%w% yg"/6шC\*NH*Mz쑼5y$3,幄'L Lݛ:v m2=:1qB!Mggfvˬen/kY- BTZ(*geWf͉9+̳ې7ᒶKW-X潬j9(xoʿܔĹdff-[n ڴ VE/(ۻCɾUUMfeI?m]Nmq#׹=TR+Gw- 6 U#pDy  :v{vg/jBFS[b[O>zG499?rCd&ˮ/~јѡ򗓿m|x31^VwwO| (hSЧc3- cHRMz%u0`:o_FIDATxbLKKcIa̙3;#3˰mzz:3={On[Z122p024ɠmڙ3Fiɟ/_EmY 6>Qw. ~3c`I Ϸo\/~{~Y8qpr223 #v!OWL94۳;.WՋ>}$E^b``4.\ WU \LsK A\ӧrﯟ! 7oI:)p[L!`z+ a&䌏o8˓g=0>1?߇[7fgd,ČR Y𩨼>wwddal& ϟc$ԌahȤ&(~002?ݻ?O疔tuL$ #_Ywlvf9ç}wMc$K!GF??t hف=?TU]A6 a3s˞|rYyE͛}9؇n=̏ aº Ж,:=>rzK