DBIx-Class-Schema-Loader-0.07045/0000755000175000017500000000000012650450355015445 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/MANIFEST0000644000175000017500000001164312650450352016600 0ustar ilmariilmariChanges inc/Module/AutoInstall.pm inc/Module/Install.pm inc/Module/Install/AutoInstall.pm inc/Module/Install/Base.pm inc/Module/Install/Can.pm inc/Module/Install/Fetch.pm inc/Module/Install/Include.pm inc/Module/Install/Makefile.pm inc/Module/Install/Metadata.pm inc/Module/Install/Scripts.pm inc/Module/Install/Win32.pm inc/Module/Install/WriteAll.pm lib/DBIx/Class/Schema/Loader.pm lib/DBIx/Class/Schema/Loader/Base.pm lib/DBIx/Class/Schema/Loader/Column.pm lib/DBIx/Class/Schema/Loader/DBI.pm lib/DBIx/Class/Schema/Loader/DBI/ADO.pm lib/DBIx/Class/Schema/Loader/DBI/ADO/Microsoft_SQL_Server.pm lib/DBIx/Class/Schema/Loader/DBI/ADO/MS_Jet.pm lib/DBIx/Class/Schema/Loader/DBI/Component/QuotedDefault.pm lib/DBIx/Class/Schema/Loader/DBI/DB2.pm lib/DBIx/Class/Schema/Loader/DBI/Firebird.pm lib/DBIx/Class/Schema/Loader/DBI/Informix.pm lib/DBIx/Class/Schema/Loader/DBI/InterBase.pm lib/DBIx/Class/Schema/Loader/DBI/MSSQL.pm lib/DBIx/Class/Schema/Loader/DBI/mysql.pm lib/DBIx/Class/Schema/Loader/DBI/ODBC.pm lib/DBIx/Class/Schema/Loader/DBI/ODBC/ACCESS.pm lib/DBIx/Class/Schema/Loader/DBI/ODBC/Firebird.pm lib/DBIx/Class/Schema/Loader/DBI/ODBC/Microsoft_SQL_Server.pm lib/DBIx/Class/Schema/Loader/DBI/ODBC/SQL_Anywhere.pm lib/DBIx/Class/Schema/Loader/DBI/Oracle.pm lib/DBIx/Class/Schema/Loader/DBI/Pg.pm lib/DBIx/Class/Schema/Loader/DBI/SQLAnywhere.pm lib/DBIx/Class/Schema/Loader/DBI/SQLite.pm lib/DBIx/Class/Schema/Loader/DBI/Sybase.pm lib/DBIx/Class/Schema/Loader/DBI/Sybase/Common.pm lib/DBIx/Class/Schema/Loader/DBI/Sybase/Microsoft_SQL_Server.pm lib/DBIx/Class/Schema/Loader/DBI/Writing.pm lib/DBIx/Class/Schema/Loader/DBObject.pm lib/DBIx/Class/Schema/Loader/DBObject/Informix.pm lib/DBIx/Class/Schema/Loader/DBObject/Sybase.pm lib/DBIx/Class/Schema/Loader/Manual/UpgradingFromV4.pod lib/DBIx/Class/Schema/Loader/Optional/Dependencies.pm lib/DBIx/Class/Schema/Loader/RelBuilder.pm lib/DBIx/Class/Schema/Loader/RelBuilder/Compat/v0_040.pm lib/DBIx/Class/Schema/Loader/RelBuilder/Compat/v0_05.pm lib/DBIx/Class/Schema/Loader/RelBuilder/Compat/v0_06.pm lib/DBIx/Class/Schema/Loader/RelBuilder/Compat/v0_07.pm lib/DBIx/Class/Schema/Loader/Table.pm lib/DBIx/Class/Schema/Loader/Table/Informix.pm lib/DBIx/Class/Schema/Loader/Table/Sybase.pm lib/DBIx/Class/Schema/Loader/Utils.pm Makefile.PL MANIFEST This list of files META.yml script/dbicdump t/01use.t t/10_01sqlite_common.t t/10_02mysql_common.t t/10_03pg_common.t t/10_04db2_common.t t/10_05ora_common.t t/10_06sybase_common.t t/10_07mssql_common.t t/10_08sqlanywhere_common.t t/10_09firebird_common.t t/10_10informix_common.t t/10_11msaccess_common.t t/20invocations.t t/21misc_fatal.t t/22dump.t t/23dumpmore.t t/24loader_subclass.t t/25backcompat.t t/26dump_use_moose.t t/27filter_generated.t t/30_01comments.t t/30_02bad_comment_table.t t/30_03no_comment_table.t t/40overwrite_modifications.t t/45relationships.t t/46relationships_multi_m2m.t t/50rt59849.t t/60dbicdump_config.t t/65dbicdump_invocations.t t/70schema_base_dispatched.t t/80split_name.t t/90bug_58_mro.t t/backcompat/0.04006/10sqlite_common.t t/backcompat/0.04006/11mysql_common.t t/backcompat/0.04006/12pg_common.t t/backcompat/0.04006/13db2_common.t t/backcompat/0.04006/14ora_common.t t/backcompat/0.04006/20invocations.t t/backcompat/0.04006/21misc_fatal.t t/backcompat/0.04006/22dump.t t/backcompat/0.04006/23dumpmore.t t/backcompat/0.04006/lib/DBIx/Class/TestComponent.pm t/backcompat/0.04006/lib/dbixcsl_common_tests.pm t/backcompat/0.04006/lib/DBIXCSL_Test/Schema/LoaderTest1.pm t/backcompat/0.04006/lib/dbixcsl_test_dir.pm t/backcompat/0.04006/lib/make_dbictest_db.pm t/backcompat/0.04006/lib/My/ResultBaseClass.pm t/backcompat/0.04006/lib/My/SchemaBaseClass.pm t/backcompat/0.04006/lib/TestAdditional.pm t/backcompat/0.04006/lib/TestAdditionalBase.pm t/backcompat/0.04006/lib/TestLeftBase.pm t/bin/simple_filter t/lib/DBICTest/Schema/_no_skip_load_external/Foo.pm t/lib/DBICTest/Schema/_skip_load_external/Foo.pm t/lib/DBICTestMethods/Backcompat/Schema/Foo.pm t/lib/DBICTestMethods/Namespaces/Schema/Result/Foo.pm t/lib/DBIx/Class/TestComponent.pm t/lib/DBIx/Class/TestComponentForMap.pm t/lib/DBIx/Class/TestSchemaComponent.pm t/lib/dbixcsl_common_tests.pm t/lib/dbixcsl_dumper_tests.pm t/lib/DBIXCSL_Test/Schema/MyResult/LoaderTest1.pm t/lib/dbixcsl_test_dir.pm t/lib/make_dbictest_db.pm t/lib/make_dbictest_db_bad_comment_tables.pm t/lib/make_dbictest_db_clashing_monikers.pm t/lib/make_dbictest_db_comments.pm t/lib/make_dbictest_db_multi_m2m.pm t/lib/make_dbictest_db_multi_unique.pm t/lib/make_dbictest_db_plural_tables.pm t/lib/make_dbictest_db_with_unique.pm t/lib/My/ResultBaseClass.pm t/lib/My/SchemaBaseClass.pm t/lib/TestAdditional.pm t/lib/TestAdditionalBase.pm t/lib/TestComponentForMapFQN.pm t/lib/TestComponentFQN.pm t/lib/TestLeftBase.pm t/lib/TestLoaderSubclass.pm t/lib/TestLoaderSubclass_NoRebless.pm t/lib/TestRole.pm t/lib/TestRole2.pm t/lib/TestRoleForMap.pm t/lib/TestSchemaBaseClass.pm t/lib/TestSchemaComponentFQN.pm xt/pod_validity.t xt/strictures.t xt/whitespace.t DBIx-Class-Schema-Loader-0.07045/Makefile.PL0000644000175000017500000001020312542756321017415 0ustar ilmariilmariuse warnings; use strict; use 5.008001; use inc::Module::Install 1.00; use Getopt::Long(); my $getopt = Getopt::Long::Parser->new( config => [qw/gnu_getopt bundling_override no_ignore_case pass_through/] ); my $args = { skip_author_deps => undef, }; $getopt->getoptions($args, 'skip_author_deps'); if (@ARGV) { warn "\nIgnoring unrecognized option(s): @ARGV\n\n"; } use FindBin; use lib "$FindBin::Bin/lib"; perl_version '5.008001'; name 'DBIx-Class-Schema-Loader'; all_from 'lib/DBIx/Class/Schema/Loader.pm'; # temporary, needs to be thrown out test_requires 'DBIx::Class::IntrospectableM2M' => 0; # core, but specific versions not available on older perls test_requires 'File::Temp' => '0.16'; test_requires 'File::Path' => '2.07'; test_requires 'DBD::SQLite' => '1.29'; test_requires 'Test::Exception' => '0.31'; test_requires 'Test::More' => '0.94'; test_requires 'Test::Warn' => '0.21'; test_requires 'Test::Deep' => '0.107'; test_requires 'Test::Differences' => '0.60'; requires 'Carp::Clan' => 0; requires 'Class::Accessor::Grouped' => '0.10008'; requires 'Class::C3::Componentised' => '1.0008'; requires 'Class::Inspector' => '1.27'; requires 'Class::Unload' => '0.07'; requires 'Data::Dump' => '1.06'; requires 'DBIx::Class' => '0.08127'; requires 'Hash::Merge' => '0.12'; requires 'Lingua::EN::Inflect::Number' => '1.1'; requires 'Lingua::EN::Tagger' => '0.23'; requires 'Lingua::EN::Inflect::Phrase' => '0.15'; requires 'List::Util' => '1.33'; requires 'MRO::Compat' => '0.09'; requires 'namespace::clean' => '0.23'; requires 'Scope::Guard' => '0.20'; requires 'String::ToIdentifier::EN' => '0.05'; requires 'String::CamelCase' => '0.02'; requires 'Sub::Util' => '1.40'; requires 'Try::Tiny' => 0; # core, but specific versions not available on older perls requires 'Digest::MD5' => '2.36'; requires 'Exporter' => '5.63'; print <<"EOF"; ******************* DBIx::Class::Schema::Loader WARNING *********************** The default attributes for belongs_to relationships for foreign keys with no rules has been changed for most databases, and is soon changing for the rest, as ON DELETE/UPDATE and DEFERRABLE clauses for foreign keys are now being introspected. THIS MAY AFFECT YOUR DDL DIFFS WHEN DEPLOYING YOUR GENERATED CODE WILL ALMOST CERTAINLY CHANGE Read more about the changes in "relationship_attrs" in: perldoc DBIx::Class::Schema::Loader::Base https://metacpan.org/module/DBIx::Class::Schema::Loader::Base#relationship_attrs See also the "Changes" file for the last few revisions. ******************************************************************************* EOF if ($Module::Install::AUTHOR && ! $args->{skip_author_deps}) { eval { require Module::Install::ReadmeFromPod } or die "\nYou need Module::Install::ReadmeFromPod installed to run this Makefile.PL in author mode:\n\n$@\n"; warn "\n*** AUTHOR MODE: some optional dependencies converted to hard requires.\n\n"; require DBIx::Class::Schema::Loader::Optional::Dependencies; test_requires %{DBIx::Class::Schema::Loader::Optional::Dependencies ->modreq_list_for([ grep { !/rdbms/ } keys %{ DBIx::Class::Schema::Loader::Optional::Dependencies ->req_group_list } ])}; DBIx::Class::Schema::Loader::Optional::Dependencies->_gen_pod(undef, 'lib'); author_tests( 'xt' ); readme_from( 'lib/DBIx/Class/Schema/Loader.pm' ); realclean_files( qw[README MANIFEST lib/DBIx/Class/Schema/Loader/Optional/Dependencies.pod] ); } tests_recursive; install_script 'script/dbicdump'; resources 'IRC' => 'irc://irc.perl.org/#dbix-class'; resources 'license' => 'http://dev.perl.org/licenses/'; resources 'repository' => 'https://github.com/dbsrgits/dbix-class-schema-loader'; resources 'MailingList' => 'http://lists.scsys.co.uk/cgi-bin/mailman/listinfo/dbix-class'; auto_install; WriteAll; # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/xt/0000755000175000017500000000000012650450355016100 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/xt/pod_validity.t0000644000175000017500000000045712542756321020764 0ustar ilmariilmariuse DBIx::Class::Schema::Loader::Optional::Dependencies -skip_all_without => 'test_pod'; use warnings; use strict; use Test::More; use lib qw(t/lib); # this has already been required but leave it here for CPANTS static analysis require Test::Pod; Test::Pod::all_pod_files_ok( 'lib', 'script' ); DBIx-Class-Schema-Loader-0.07045/xt/strictures.t0000644000175000017500000000207512542756321020502 0ustar ilmariilmariuse DBIx::Class::Schema::Loader::Optional::Dependencies -skip_all_without => 'test_strictures'; use warnings; use strict; use Test::More; use File::Find; use lib 't/lib'; # The rationale is - if we can load all our optdeps # that are related to lib/ - then we should be able to run # perl -c checks (via syntax_ok), and all should just work my $missing_groupdeps_present = grep { DBIx::Class::Schema::Loader::Optional::Dependencies->req_ok_for($_) } grep { $_ !~ /^ (?: test | rdbms | dist ) _ /x } keys %{DBIx::Class::Schema::Loader::Optional::Dependencies->req_group_list} ; find({ wanted => sub { -f $_ or return; m/\.(?: pm | pl | t )$ /ix or return; return if m{^(?: lib/DBIx/Class/Schema/Loader/Optional/Dependencies.pm # no stictures by design (load speed sensitive) )$}x; my $f = $_; Test::Strict::strict_ok($f); Test::Strict::warnings_ok($f); Test::Strict::syntax_ok($f) if ! $missing_groupdeps_present and $f =~ /^ (?: lib )/x; }, no_chdir => 1, }, (qw(lib t script maint)) ); done_testing; DBIx-Class-Schema-Loader-0.07045/xt/whitespace.t0000644000175000017500000000270212542756321020424 0ustar ilmariilmariuse DBIx::Class::Schema::Loader::Optional::Dependencies -skip_all_without => 'test_whitespace'; use warnings; use strict; use Test::More; use File::Glob 'bsd_glob'; use lib 't/lib'; # FIXME - temporary workaround for RT#82032, RT#82033 # also add all scripts (no extension) and some extra extensions # we want to check { no warnings 'redefine'; my $is_pm = sub { $_[0] !~ /\./ || $_[0] =~ /\.(?:pm|pod|skip|bash|sql|json|proto)$/i || $_[0] =~ /::/; }; *Test::EOL::_is_perl_module = $is_pm; *Test::NoTabs::_is_perl_module = $is_pm; } my @pl_targets = qw/t xt lib script maint/; Test::EOL::all_perl_files_ok({ trailing_whitespace => 1 }, @pl_targets); Test::NoTabs::all_perl_files_ok(@pl_targets); # check some non-"perl files" in the root separately # use .gitignore as a guide of what to skip # (or do not test at all if no .gitignore is found) if (open(my $gi, '<', '.gitignore')) { my $skipnames; while (my $ln = <$gi>) { next if $ln =~ /^\s*$/; chomp $ln; $ln =~ s{^/}{}; $skipnames->{$_}++ for bsd_glob($ln); } # that we want to check anyway delete $skipnames->{'META.yml'}; for my $fn (bsd_glob('*')) { next if $skipnames->{$fn}; next unless -f $fn; Test::EOL::eol_unix_ok($fn, { trailing_whitespace => 1 }); Test::NoTabs::notabs_ok($fn); } } # FIXME - Test::NoTabs and Test::EOL declare 'no_plan' which conflicts with done_testing # https://github.com/schwern/test-more/issues/14 #done_testing; DBIx-Class-Schema-Loader-0.07045/META.yml0000644000175000017500000000276712650450343016727 0ustar ilmariilmari--- abstract: 'Create a DBIx::Class::Schema based on a database' author: - 'Caelum: Rafael Kitover ' build_requires: DBD::SQLite: '1.29' DBIx::Class::IntrospectableM2M: 0 ExtUtils::MakeMaker: 6.59 File::Path: '2.07' File::Temp: '0.16' Test::Deep: '0.107' Test::Differences: '0.60' Test::Exception: '0.31' Test::More: '0.94' Test::Warn: '0.21' configure_requires: ExtUtils::MakeMaker: 6.59 distribution_type: module dynamic_config: 1 generated_by: 'Module::Install version 1.16' license: perl meta-spec: url: http://module-build.sourceforge.net/META-spec-v1.4.html version: 1.4 name: DBIx-Class-Schema-Loader no_index: directory: - inc - t - xt requires: Carp::Clan: 0 Class::Accessor::Grouped: '0.10008' Class::C3::Componentised: '1.0008' Class::Inspector: '1.27' Class::Unload: '0.07' DBIx::Class: '0.08127' Data::Dump: '1.06' Digest::MD5: '2.36' Exporter: '5.63' Hash::Merge: '0.12' Lingua::EN::Inflect::Number: '1.1' Lingua::EN::Inflect::Phrase: '0.15' Lingua::EN::Tagger: '0.23' List::Util: '1.33' MRO::Compat: '0.09' Scope::Guard: '0.20' String::CamelCase: '0.02' String::ToIdentifier::EN: '0.05' Sub::Util: '1.40' Try::Tiny: 0 namespace::clean: '0.23' perl: 5.8.1 resources: IRC: irc://irc.perl.org/#dbix-class MailingList: http://lists.scsys.co.uk/cgi-bin/mailman/listinfo/dbix-class license: http://dev.perl.org/licenses/ repository: https://github.com/dbsrgits/dbix-class-schema-loader version: '0.07045' DBIx-Class-Schema-Loader-0.07045/inc/0000755000175000017500000000000012650450355016216 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/inc/Module/0000755000175000017500000000000012650450355017443 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/inc/Module/Install.pm0000644000175000017500000003021712650450342021406 0ustar ilmariilmari#line 1 package Module::Install; # For any maintainers: # The load order for Module::Install is a bit magic. # It goes something like this... # # IF ( host has Module::Install installed, creating author mode ) { # 1. Makefile.PL calls "use inc::Module::Install" # 2. $INC{inc/Module/Install.pm} set to installed version of inc::Module::Install # 3. The installed version of inc::Module::Install loads # 4. inc::Module::Install calls "require Module::Install" # 5. The ./inc/ version of Module::Install loads # } ELSE { # 1. Makefile.PL calls "use inc::Module::Install" # 2. $INC{inc/Module/Install.pm} set to ./inc/ version of Module::Install # 3. The ./inc/ version of Module::Install loads # } use 5.006; use strict 'vars'; use Cwd (); use File::Find (); use File::Path (); use vars qw{$VERSION $MAIN}; BEGIN { # All Module::Install core packages now require synchronised versions. # This will be used to ensure we don't accidentally load old or # different versions of modules. # This is not enforced yet, but will be some time in the next few # releases once we can make sure it won't clash with custom # Module::Install extensions. $VERSION = '1.16'; # Storage for the pseudo-singleton $MAIN = undef; *inc::Module::Install::VERSION = *VERSION; @inc::Module::Install::ISA = __PACKAGE__; } sub import { my $class = shift; my $self = $class->new(@_); my $who = $self->_caller; #------------------------------------------------------------- # all of the following checks should be included in import(), # to allow "eval 'require Module::Install; 1' to test # installation of Module::Install. (RT #51267) #------------------------------------------------------------- # Whether or not inc::Module::Install is actually loaded, the # $INC{inc/Module/Install.pm} is what will still get set as long as # the caller loaded module this in the documented manner. # If not set, the caller may NOT have loaded the bundled version, and thus # they may not have a MI version that works with the Makefile.PL. This would # result in false errors or unexpected behaviour. And we don't want that. my $file = join( '/', 'inc', split /::/, __PACKAGE__ ) . '.pm'; unless ( $INC{$file} ) { die <<"END_DIE" } Please invoke ${\__PACKAGE__} with: use inc::${\__PACKAGE__}; not: use ${\__PACKAGE__}; END_DIE # This reportedly fixes a rare Win32 UTC file time issue, but # as this is a non-cross-platform XS module not in the core, # we shouldn't really depend on it. See RT #24194 for detail. # (Also, this module only supports Perl 5.6 and above). eval "use Win32::UTCFileTime" if $^O eq 'MSWin32' && $] >= 5.006; # If the script that is loading Module::Install is from the future, # then make will detect this and cause it to re-run over and over # again. This is bad. Rather than taking action to touch it (which # is unreliable on some platforms and requires write permissions) # for now we should catch this and refuse to run. if ( -f $0 ) { my $s = (stat($0))[9]; # If the modification time is only slightly in the future, # sleep briefly to remove the problem. my $a = $s - time; if ( $a > 0 and $a < 5 ) { sleep 5 } # Too far in the future, throw an error. my $t = time; if ( $s > $t ) { die <<"END_DIE" } Your installer $0 has a modification time in the future ($s > $t). This is known to create infinite loops in make. Please correct this, then run $0 again. END_DIE } # Build.PL was formerly supported, but no longer is due to excessive # difficulty in implementing every single feature twice. if ( $0 =~ /Build.PL$/i ) { die <<"END_DIE" } Module::Install no longer supports Build.PL. It was impossible to maintain duel backends, and has been deprecated. Please remove all Build.PL files and only use the Makefile.PL installer. END_DIE #------------------------------------------------------------- # To save some more typing in Module::Install installers, every... # use inc::Module::Install # ...also acts as an implicit use strict. $^H |= strict::bits(qw(refs subs vars)); #------------------------------------------------------------- unless ( -f $self->{file} ) { foreach my $key (keys %INC) { delete $INC{$key} if $key =~ /Module\/Install/; } local $^W; require "$self->{path}/$self->{dispatch}.pm"; File::Path::mkpath("$self->{prefix}/$self->{author}"); $self->{admin} = "$self->{name}::$self->{dispatch}"->new( _top => $self ); $self->{admin}->init; @_ = ($class, _self => $self); goto &{"$self->{name}::import"}; } local $^W; *{"${who}::AUTOLOAD"} = $self->autoload; $self->preload; # Unregister loader and worker packages so subdirs can use them again delete $INC{'inc/Module/Install.pm'}; delete $INC{'Module/Install.pm'}; # Save to the singleton $MAIN = $self; return 1; } sub autoload { my $self = shift; my $who = $self->_caller; my $cwd = Cwd::getcwd(); my $sym = "${who}::AUTOLOAD"; $sym->{$cwd} = sub { my $pwd = Cwd::getcwd(); if ( my $code = $sym->{$pwd} ) { # Delegate back to parent dirs goto &$code unless $cwd eq $pwd; } unless ($$sym =~ s/([^:]+)$//) { # XXX: it looks like we can't retrieve the missing function # via $$sym (usually $main::AUTOLOAD) in this case. # I'm still wondering if we should slurp Makefile.PL to # get some context or not ... my ($package, $file, $line) = caller; die <<"EOT"; Unknown function is found at $file line $line. Execution of $file aborted due to runtime errors. If you're a contributor to a project, you may need to install some Module::Install extensions from CPAN (or other repository). If you're a user of a module, please contact the author. EOT } my $method = $1; if ( uc($method) eq $method ) { # Do nothing return; } elsif ( $method =~ /^_/ and $self->can($method) ) { # Dispatch to the root M:I class return $self->$method(@_); } # Dispatch to the appropriate plugin unshift @_, ( $self, $1 ); goto &{$self->can('call')}; }; } sub preload { my $self = shift; unless ( $self->{extensions} ) { $self->load_extensions( "$self->{prefix}/$self->{path}", $self ); } my @exts = @{$self->{extensions}}; unless ( @exts ) { @exts = $self->{admin}->load_all_extensions; } my %seen; foreach my $obj ( @exts ) { while (my ($method, $glob) = each %{ref($obj) . '::'}) { next unless $obj->can($method); next if $method =~ /^_/; next if $method eq uc($method); $seen{$method}++; } } my $who = $self->_caller; foreach my $name ( sort keys %seen ) { local $^W; *{"${who}::$name"} = sub { ${"${who}::AUTOLOAD"} = "${who}::$name"; goto &{"${who}::AUTOLOAD"}; }; } } sub new { my ($class, %args) = @_; delete $INC{'FindBin.pm'}; { # to suppress the redefine warning local $SIG{__WARN__} = sub {}; require FindBin; } # ignore the prefix on extension modules built from top level. my $base_path = Cwd::abs_path($FindBin::Bin); unless ( Cwd::abs_path(Cwd::getcwd()) eq $base_path ) { delete $args{prefix}; } return $args{_self} if $args{_self}; $args{dispatch} ||= 'Admin'; $args{prefix} ||= 'inc'; $args{author} ||= ($^O eq 'VMS' ? '_author' : '.author'); $args{bundle} ||= 'inc/BUNDLES'; $args{base} ||= $base_path; $class =~ s/^\Q$args{prefix}\E:://; $args{name} ||= $class; $args{version} ||= $class->VERSION; unless ( $args{path} ) { $args{path} = $args{name}; $args{path} =~ s!::!/!g; } $args{file} ||= "$args{base}/$args{prefix}/$args{path}.pm"; $args{wrote} = 0; bless( \%args, $class ); } sub call { my ($self, $method) = @_; my $obj = $self->load($method) or return; splice(@_, 0, 2, $obj); goto &{$obj->can($method)}; } sub load { my ($self, $method) = @_; $self->load_extensions( "$self->{prefix}/$self->{path}", $self ) unless $self->{extensions}; foreach my $obj (@{$self->{extensions}}) { return $obj if $obj->can($method); } my $admin = $self->{admin} or die <<"END_DIE"; The '$method' method does not exist in the '$self->{prefix}' path! Please remove the '$self->{prefix}' directory and run $0 again to load it. END_DIE my $obj = $admin->load($method, 1); push @{$self->{extensions}}, $obj; $obj; } sub load_extensions { my ($self, $path, $top) = @_; my $should_reload = 0; unless ( grep { ! ref $_ and lc $_ eq lc $self->{prefix} } @INC ) { unshift @INC, $self->{prefix}; $should_reload = 1; } foreach my $rv ( $self->find_extensions($path) ) { my ($file, $pkg) = @{$rv}; next if $self->{pathnames}{$pkg}; local $@; my $new = eval { local $^W; require $file; $pkg->can('new') }; unless ( $new ) { warn $@ if $@; next; } $self->{pathnames}{$pkg} = $should_reload ? delete $INC{$file} : $INC{$file}; push @{$self->{extensions}}, &{$new}($pkg, _top => $top ); } $self->{extensions} ||= []; } sub find_extensions { my ($self, $path) = @_; my @found; File::Find::find( sub { my $file = $File::Find::name; return unless $file =~ m!^\Q$path\E/(.+)\.pm\Z!is; my $subpath = $1; return if lc($subpath) eq lc($self->{dispatch}); $file = "$self->{path}/$subpath.pm"; my $pkg = "$self->{name}::$subpath"; $pkg =~ s!/!::!g; # If we have a mixed-case package name, assume case has been preserved # correctly. Otherwise, root through the file to locate the case-preserved # version of the package name. if ( $subpath eq lc($subpath) || $subpath eq uc($subpath) ) { my $content = Module::Install::_read($subpath . '.pm'); my $in_pod = 0; foreach ( split /\n/, $content ) { $in_pod = 1 if /^=\w/; $in_pod = 0 if /^=cut/; next if ($in_pod || /^=cut/); # skip pod text next if /^\s*#/; # and comments if ( m/^\s*package\s+($pkg)\s*;/i ) { $pkg = $1; last; } } } push @found, [ $file, $pkg ]; }, $path ) if -d $path; @found; } ##################################################################### # Common Utility Functions sub _caller { my $depth = 0; my $call = caller($depth); while ( $call eq __PACKAGE__ ) { $depth++; $call = caller($depth); } return $call; } # Done in evals to avoid confusing Perl::MinimumVersion eval( $] >= 5.006 ? <<'END_NEW' : <<'END_OLD' ); die $@ if $@; sub _read { local *FH; open( FH, '<', $_[0] ) or die "open($_[0]): $!"; binmode FH; my $string = do { local $/; }; close FH or die "close($_[0]): $!"; return $string; } END_NEW sub _read { local *FH; open( FH, "< $_[0]" ) or die "open($_[0]): $!"; binmode FH; my $string = do { local $/; }; close FH or die "close($_[0]): $!"; return $string; } END_OLD sub _readperl { my $string = Module::Install::_read($_[0]); $string =~ s/(?:\015{1,2}\012|\015|\012)/\n/sg; $string =~ s/(\n)\n*__(?:DATA|END)__\b.*\z/$1/s; $string =~ s/\n\n=\w+.+?\n\n=cut\b.+?\n+/\n\n/sg; return $string; } sub _readpod { my $string = Module::Install::_read($_[0]); $string =~ s/(?:\015{1,2}\012|\015|\012)/\n/sg; return $string if $_[0] =~ /\.pod\z/; $string =~ s/(^|\n=cut\b.+?\n+)[^=\s].+?\n(\n=\w+|\z)/$1$2/sg; $string =~ s/\n*=pod\b[^\n]*\n+/\n\n/sg; $string =~ s/\n*=cut\b[^\n]*\n+/\n\n/sg; $string =~ s/^\n+//s; return $string; } # Done in evals to avoid confusing Perl::MinimumVersion eval( $] >= 5.006 ? <<'END_NEW' : <<'END_OLD' ); die $@ if $@; sub _write { local *FH; open( FH, '>', $_[0] ) or die "open($_[0]): $!"; binmode FH; foreach ( 1 .. $#_ ) { print FH $_[$_] or die "print($_[0]): $!"; } close FH or die "close($_[0]): $!"; } END_NEW sub _write { local *FH; open( FH, "> $_[0]" ) or die "open($_[0]): $!"; binmode FH; foreach ( 1 .. $#_ ) { print FH $_[$_] or die "print($_[0]): $!"; } close FH or die "close($_[0]): $!"; } END_OLD # _version is for processing module versions (eg, 1.03_05) not # Perl versions (eg, 5.8.1). sub _version { my $s = shift || 0; my $d =()= $s =~ /(\.)/g; if ( $d >= 2 ) { # Normalise multipart versions $s =~ s/(\.)(\d{1,3})/sprintf("$1%03d",$2)/eg; } $s =~ s/^(\d+)\.?//; my $l = $1 || 0; my @v = map { $_ . '0' x (3 - length $_) } $s =~ /(\d{1,3})\D?/g; $l = $l . '.' . join '', @v if @v; return $l + 0; } sub _cmp { _version($_[1]) <=> _version($_[2]); } # Cloned from Params::Util::_CLASS sub _CLASS { ( defined $_[0] and ! ref $_[0] and $_[0] =~ m/^[^\W\d]\w*(?:::\w+)*\z/s ) ? $_[0] : undef; } 1; # Copyright 2008 - 2012 Adam Kennedy. DBIx-Class-Schema-Loader-0.07045/inc/Module/Install/0000755000175000017500000000000012650450355021051 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/inc/Module/Install/Can.pm0000644000175000017500000000615712650450343022116 0ustar ilmariilmari#line 1 package Module::Install::Can; use strict; use Config (); use ExtUtils::MakeMaker (); use Module::Install::Base (); use vars qw{$VERSION @ISA $ISCORE}; BEGIN { $VERSION = '1.16'; @ISA = 'Module::Install::Base'; $ISCORE = 1; } # check if we can load some module ### Upgrade this to not have to load the module if possible sub can_use { my ($self, $mod, $ver) = @_; $mod =~ s{::|\\}{/}g; $mod .= '.pm' unless $mod =~ /\.pm$/i; my $pkg = $mod; $pkg =~ s{/}{::}g; $pkg =~ s{\.pm$}{}i; local $@; eval { require $mod; $pkg->VERSION($ver || 0); 1 }; } # Check if we can run some command sub can_run { my ($self, $cmd) = @_; my $_cmd = $cmd; return $_cmd if (-x $_cmd or $_cmd = MM->maybe_command($_cmd)); for my $dir ((split /$Config::Config{path_sep}/, $ENV{PATH}), '.') { next if $dir eq ''; require File::Spec; my $abs = File::Spec->catfile($dir, $cmd); return $abs if (-x $abs or $abs = MM->maybe_command($abs)); } return; } # Can our C compiler environment build XS files sub can_xs { my $self = shift; # Ensure we have the CBuilder module $self->configure_requires( 'ExtUtils::CBuilder' => 0.27 ); # Do we have the configure_requires checker? local $@; eval "require ExtUtils::CBuilder;"; if ( $@ ) { # They don't obey configure_requires, so it is # someone old and delicate. Try to avoid hurting # them by falling back to an older simpler test. return $self->can_cc(); } # Do we have a working C compiler my $builder = ExtUtils::CBuilder->new( quiet => 1, ); unless ( $builder->have_compiler ) { # No working C compiler return 0; } # Write a C file representative of what XS becomes require File::Temp; my ( $FH, $tmpfile ) = File::Temp::tempfile( "compilexs-XXXXX", SUFFIX => '.c', ); binmode $FH; print $FH <<'END_C'; #include "EXTERN.h" #include "perl.h" #include "XSUB.h" int main(int argc, char **argv) { return 0; } int boot_sanexs() { return 1; } END_C close $FH; # Can the C compiler access the same headers XS does my @libs = (); my $object = undef; eval { local $^W = 0; $object = $builder->compile( source => $tmpfile, ); @libs = $builder->link( objects => $object, module_name => 'sanexs', ); }; my $result = $@ ? 0 : 1; # Clean up all the build files foreach ( $tmpfile, $object, @libs ) { next unless defined $_; 1 while unlink; } return $result; } # Can we locate a (the) C compiler sub can_cc { my $self = shift; my @chunks = split(/ /, $Config::Config{cc}) or return; # $Config{cc} may contain args; try to find out the program part while (@chunks) { return $self->can_run("@chunks") || (pop(@chunks), next); } return; } # Fix Cygwin bug on maybe_command(); if ( $^O eq 'cygwin' ) { require ExtUtils::MM_Cygwin; require ExtUtils::MM_Win32; if ( ! defined(&ExtUtils::MM_Cygwin::maybe_command) ) { *ExtUtils::MM_Cygwin::maybe_command = sub { my ($self, $file) = @_; if ($file =~ m{^/cygdrive/}i and ExtUtils::MM_Win32->can('maybe_command')) { ExtUtils::MM_Win32->maybe_command($file); } else { ExtUtils::MM_Unix->maybe_command($file); } } } } 1; __END__ #line 236 DBIx-Class-Schema-Loader-0.07045/inc/Module/Install/Metadata.pm0000644000175000017500000004330212650450342023125 0ustar ilmariilmari#line 1 package Module::Install::Metadata; use strict 'vars'; use Module::Install::Base (); use vars qw{$VERSION @ISA $ISCORE}; BEGIN { $VERSION = '1.16'; @ISA = 'Module::Install::Base'; $ISCORE = 1; } my @boolean_keys = qw{ sign }; my @scalar_keys = qw{ name module_name abstract version distribution_type tests installdirs }; my @tuple_keys = qw{ configure_requires build_requires requires recommends bundles resources }; my @resource_keys = qw{ homepage bugtracker repository }; my @array_keys = qw{ keywords author }; *authors = \&author; sub Meta { shift } sub Meta_BooleanKeys { @boolean_keys } sub Meta_ScalarKeys { @scalar_keys } sub Meta_TupleKeys { @tuple_keys } sub Meta_ResourceKeys { @resource_keys } sub Meta_ArrayKeys { @array_keys } foreach my $key ( @boolean_keys ) { *$key = sub { my $self = shift; if ( defined wantarray and not @_ ) { return $self->{values}->{$key}; } $self->{values}->{$key} = ( @_ ? $_[0] : 1 ); return $self; }; } foreach my $key ( @scalar_keys ) { *$key = sub { my $self = shift; return $self->{values}->{$key} if defined wantarray and !@_; $self->{values}->{$key} = shift; return $self; }; } foreach my $key ( @array_keys ) { *$key = sub { my $self = shift; return $self->{values}->{$key} if defined wantarray and !@_; $self->{values}->{$key} ||= []; push @{$self->{values}->{$key}}, @_; return $self; }; } foreach my $key ( @resource_keys ) { *$key = sub { my $self = shift; unless ( @_ ) { return () unless $self->{values}->{resources}; return map { $_->[1] } grep { $_->[0] eq $key } @{ $self->{values}->{resources} }; } return $self->{values}->{resources}->{$key} unless @_; my $uri = shift or die( "Did not provide a value to $key()" ); $self->resources( $key => $uri ); return 1; }; } foreach my $key ( grep { $_ ne "resources" } @tuple_keys) { *$key = sub { my $self = shift; return $self->{values}->{$key} unless @_; my @added; while ( @_ ) { my $module = shift or last; my $version = shift || 0; push @added, [ $module, $version ]; } push @{ $self->{values}->{$key} }, @added; return map {@$_} @added; }; } # Resource handling my %lc_resource = map { $_ => 1 } qw{ homepage license bugtracker repository }; sub resources { my $self = shift; while ( @_ ) { my $name = shift or last; my $value = shift or next; if ( $name eq lc $name and ! $lc_resource{$name} ) { die("Unsupported reserved lowercase resource '$name'"); } $self->{values}->{resources} ||= []; push @{ $self->{values}->{resources} }, [ $name, $value ]; } $self->{values}->{resources}; } # Aliases for build_requires that will have alternative # meanings in some future version of META.yml. sub test_requires { shift->build_requires(@_) } sub install_requires { shift->build_requires(@_) } # Aliases for installdirs options sub install_as_core { $_[0]->installdirs('perl') } sub install_as_cpan { $_[0]->installdirs('site') } sub install_as_site { $_[0]->installdirs('site') } sub install_as_vendor { $_[0]->installdirs('vendor') } sub dynamic_config { my $self = shift; my $value = @_ ? shift : 1; if ( $self->{values}->{dynamic_config} ) { # Once dynamic we never change to static, for safety return 0; } $self->{values}->{dynamic_config} = $value ? 1 : 0; return 1; } # Convenience command sub static_config { shift->dynamic_config(0); } sub perl_version { my $self = shift; return $self->{values}->{perl_version} unless @_; my $version = shift or die( "Did not provide a value to perl_version()" ); # Normalize the version $version = $self->_perl_version($version); # We don't support the really old versions unless ( $version >= 5.005 ) { die "Module::Install only supports 5.005 or newer (use ExtUtils::MakeMaker)\n"; } $self->{values}->{perl_version} = $version; } sub all_from { my ( $self, $file ) = @_; unless ( defined($file) ) { my $name = $self->name or die( "all_from called with no args without setting name() first" ); $file = join('/', 'lib', split(/-/, $name)) . '.pm'; $file =~ s{.*/}{} unless -e $file; unless ( -e $file ) { die("all_from cannot find $file from $name"); } } unless ( -f $file ) { die("The path '$file' does not exist, or is not a file"); } $self->{values}{all_from} = $file; # Some methods pull from POD instead of code. # If there is a matching .pod, use that instead my $pod = $file; $pod =~ s/\.pm$/.pod/i; $pod = $file unless -e $pod; # Pull the different values $self->name_from($file) unless $self->name; $self->version_from($file) unless $self->version; $self->perl_version_from($file) unless $self->perl_version; $self->author_from($pod) unless @{$self->author || []}; $self->license_from($pod) unless $self->license; $self->abstract_from($pod) unless $self->abstract; return 1; } sub provides { my $self = shift; my $provides = ( $self->{values}->{provides} ||= {} ); %$provides = (%$provides, @_) if @_; return $provides; } sub auto_provides { my $self = shift; return $self unless $self->is_admin; unless (-e 'MANIFEST') { warn "Cannot deduce auto_provides without a MANIFEST, skipping\n"; return $self; } # Avoid spurious warnings as we are not checking manifest here. local $SIG{__WARN__} = sub {1}; require ExtUtils::Manifest; local *ExtUtils::Manifest::manicheck = sub { return }; require Module::Build; my $build = Module::Build->new( dist_name => $self->name, dist_version => $self->version, license => $self->license, ); $self->provides( %{ $build->find_dist_packages || {} } ); } sub feature { my $self = shift; my $name = shift; my $features = ( $self->{values}->{features} ||= [] ); my $mods; if ( @_ == 1 and ref( $_[0] ) ) { # The user used ->feature like ->features by passing in the second # argument as a reference. Accomodate for that. $mods = $_[0]; } else { $mods = \@_; } my $count = 0; push @$features, ( $name => [ map { ref($_) ? ( ref($_) eq 'HASH' ) ? %$_ : @$_ : $_ } @$mods ] ); return @$features; } sub features { my $self = shift; while ( my ( $name, $mods ) = splice( @_, 0, 2 ) ) { $self->feature( $name, @$mods ); } return $self->{values}->{features} ? @{ $self->{values}->{features} } : (); } sub no_index { my $self = shift; my $type = shift; push @{ $self->{values}->{no_index}->{$type} }, @_ if $type; return $self->{values}->{no_index}; } sub read { my $self = shift; $self->include_deps( 'YAML::Tiny', 0 ); require YAML::Tiny; my $data = YAML::Tiny::LoadFile('META.yml'); # Call methods explicitly in case user has already set some values. while ( my ( $key, $value ) = each %$data ) { next unless $self->can($key); if ( ref $value eq 'HASH' ) { while ( my ( $module, $version ) = each %$value ) { $self->can($key)->($self, $module => $version ); } } else { $self->can($key)->($self, $value); } } return $self; } sub write { my $self = shift; return $self unless $self->is_admin; $self->admin->write_meta; return $self; } sub version_from { require ExtUtils::MM_Unix; my ( $self, $file ) = @_; $self->version( ExtUtils::MM_Unix->parse_version($file) ); # for version integrity check $self->makemaker_args( VERSION_FROM => $file ); } sub abstract_from { require ExtUtils::MM_Unix; my ( $self, $file ) = @_; $self->abstract( bless( { DISTNAME => $self->name }, 'ExtUtils::MM_Unix' )->parse_abstract($file) ); } # Add both distribution and module name sub name_from { my ($self, $file) = @_; if ( Module::Install::_read($file) =~ m/ ^ \s* package \s* ([\w:]+) [\s|;]* /ixms ) { my ($name, $module_name) = ($1, $1); $name =~ s{::}{-}g; $self->name($name); unless ( $self->module_name ) { $self->module_name($module_name); } } else { die("Cannot determine name from $file\n"); } } sub _extract_perl_version { if ( $_[0] =~ m/ ^\s* (?:use|require) \s* v? ([\d_\.]+) \s* ; /ixms ) { my $perl_version = $1; $perl_version =~ s{_}{}g; return $perl_version; } else { return; } } sub perl_version_from { my $self = shift; my $perl_version=_extract_perl_version(Module::Install::_read($_[0])); if ($perl_version) { $self->perl_version($perl_version); } else { warn "Cannot determine perl version info from $_[0]\n"; return; } } sub author_from { my $self = shift; my $content = Module::Install::_read($_[0]); if ($content =~ m/ =head \d \s+ (?:authors?)\b \s* ([^\n]*) | =head \d \s+ (?:licen[cs]e|licensing|copyright|legal)\b \s* .*? copyright .*? \d\d\d[\d.]+ \s* (?:\bby\b)? \s* ([^\n]*) /ixms) { my $author = $1 || $2; # XXX: ugly but should work anyway... if (eval "require Pod::Escapes; 1") { # Pod::Escapes has a mapping table. # It's in core of perl >= 5.9.3, and should be installed # as one of the Pod::Simple's prereqs, which is a prereq # of Pod::Text 3.x (see also below). $author =~ s{ E<( (\d+) | ([A-Za-z]+) )> } { defined $2 ? chr($2) : defined $Pod::Escapes::Name2character_number{$1} ? chr($Pod::Escapes::Name2character_number{$1}) : do { warn "Unknown escape: E<$1>"; "E<$1>"; }; }gex; } elsif (eval "require Pod::Text; 1" && $Pod::Text::VERSION < 3) { # Pod::Text < 3.0 has yet another mapping table, # though the table name of 2.x and 1.x are different. # (1.x is in core of Perl < 5.6, 2.x is in core of # Perl < 5.9.3) my $mapping = ($Pod::Text::VERSION < 2) ? \%Pod::Text::HTML_Escapes : \%Pod::Text::ESCAPES; $author =~ s{ E<( (\d+) | ([A-Za-z]+) )> } { defined $2 ? chr($2) : defined $mapping->{$1} ? $mapping->{$1} : do { warn "Unknown escape: E<$1>"; "E<$1>"; }; }gex; } else { $author =~ s{E}{<}g; $author =~ s{E}{>}g; } $self->author($author); } else { warn "Cannot determine author info from $_[0]\n"; } } #Stolen from M::B my %license_urls = ( perl => 'http://dev.perl.org/licenses/', apache => 'http://apache.org/licenses/LICENSE-2.0', apache_1_1 => 'http://apache.org/licenses/LICENSE-1.1', artistic => 'http://opensource.org/licenses/artistic-license.php', artistic_2 => 'http://opensource.org/licenses/artistic-license-2.0.php', lgpl => 'http://opensource.org/licenses/lgpl-license.php', lgpl2 => 'http://opensource.org/licenses/lgpl-2.1.php', lgpl3 => 'http://opensource.org/licenses/lgpl-3.0.html', bsd => 'http://opensource.org/licenses/bsd-license.php', gpl => 'http://opensource.org/licenses/gpl-license.php', gpl2 => 'http://opensource.org/licenses/gpl-2.0.php', gpl3 => 'http://opensource.org/licenses/gpl-3.0.html', mit => 'http://opensource.org/licenses/mit-license.php', mozilla => 'http://opensource.org/licenses/mozilla1.1.php', open_source => undef, unrestricted => undef, restrictive => undef, unknown => undef, ); sub license { my $self = shift; return $self->{values}->{license} unless @_; my $license = shift or die( 'Did not provide a value to license()' ); $license = __extract_license($license) || lc $license; $self->{values}->{license} = $license; # Automatically fill in license URLs if ( $license_urls{$license} ) { $self->resources( license => $license_urls{$license} ); } return 1; } sub _extract_license { my $pod = shift; my $matched; return __extract_license( ($matched) = $pod =~ m/ (=head \d \s+ L(?i:ICEN[CS]E|ICENSING)\b.*?) (=head \d.*|=cut.*|)\z /xms ) || __extract_license( ($matched) = $pod =~ m/ (=head \d \s+ (?:C(?i:OPYRIGHTS?)|L(?i:EGAL))\b.*?) (=head \d.*|=cut.*|)\z /xms ); } sub __extract_license { my $license_text = shift or return; my @phrases = ( '(?:under )?the same (?:terms|license) as (?:perl|the perl (?:\d )?programming language)' => 'perl', 1, '(?:under )?the terms of (?:perl|the perl programming language) itself' => 'perl', 1, 'Artistic and GPL' => 'perl', 1, 'GNU general public license' => 'gpl', 1, 'GNU public license' => 'gpl', 1, 'GNU lesser general public license' => 'lgpl', 1, 'GNU lesser public license' => 'lgpl', 1, 'GNU library general public license' => 'lgpl', 1, 'GNU library public license' => 'lgpl', 1, 'GNU Free Documentation license' => 'unrestricted', 1, 'GNU Affero General Public License' => 'open_source', 1, '(?:Free)?BSD license' => 'bsd', 1, 'Artistic license 2\.0' => 'artistic_2', 1, 'Artistic license' => 'artistic', 1, 'Apache (?:Software )?license' => 'apache', 1, 'GPL' => 'gpl', 1, 'LGPL' => 'lgpl', 1, 'BSD' => 'bsd', 1, 'Artistic' => 'artistic', 1, 'MIT' => 'mit', 1, 'Mozilla Public License' => 'mozilla', 1, 'Q Public License' => 'open_source', 1, 'OpenSSL License' => 'unrestricted', 1, 'SSLeay License' => 'unrestricted', 1, 'zlib License' => 'open_source', 1, 'proprietary' => 'proprietary', 0, ); while ( my ($pattern, $license, $osi) = splice(@phrases, 0, 3) ) { $pattern =~ s#\s+#\\s+#gs; if ( $license_text =~ /\b$pattern\b/i ) { return $license; } } return ''; } sub license_from { my $self = shift; if (my $license=_extract_license(Module::Install::_read($_[0]))) { $self->license($license); } else { warn "Cannot determine license info from $_[0]\n"; return 'unknown'; } } sub _extract_bugtracker { my @links = $_[0] =~ m#L<( https?\Q://rt.cpan.org/\E[^>]+| https?\Q://github.com/\E[\w_]+/[\w_]+/issues| https?\Q://code.google.com/p/\E[\w_\-]+/issues/list )>#gx; my %links; @links{@links}=(); @links=keys %links; return @links; } sub bugtracker_from { my $self = shift; my $content = Module::Install::_read($_[0]); my @links = _extract_bugtracker($content); unless ( @links ) { warn "Cannot determine bugtracker info from $_[0]\n"; return 0; } if ( @links > 1 ) { warn "Found more than one bugtracker link in $_[0]\n"; return 0; } # Set the bugtracker bugtracker( $links[0] ); return 1; } sub requires_from { my $self = shift; my $content = Module::Install::_readperl($_[0]); my @requires = $content =~ m/^use\s+([^\W\d]\w*(?:::\w+)*)\s+(v?[\d\.]+)/mg; while ( @requires ) { my $module = shift @requires; my $version = shift @requires; $self->requires( $module => $version ); } } sub test_requires_from { my $self = shift; my $content = Module::Install::_readperl($_[0]); my @requires = $content =~ m/^use\s+([^\W\d]\w*(?:::\w+)*)\s+([\d\.]+)/mg; while ( @requires ) { my $module = shift @requires; my $version = shift @requires; $self->test_requires( $module => $version ); } } # Convert triple-part versions (eg, 5.6.1 or 5.8.9) to # numbers (eg, 5.006001 or 5.008009). # Also, convert double-part versions (eg, 5.8) sub _perl_version { my $v = $_[-1]; $v =~ s/^([1-9])\.([1-9]\d?\d?)$/sprintf("%d.%03d",$1,$2)/e; $v =~ s/^([1-9])\.([1-9]\d?\d?)\.(0|[1-9]\d?\d?)$/sprintf("%d.%03d%03d",$1,$2,$3 || 0)/e; $v =~ s/(\.\d\d\d)000$/$1/; $v =~ s/_.+$//; if ( ref($v) ) { # Numify $v = $v + 0; } return $v; } sub add_metadata { my $self = shift; my %hash = @_; for my $key (keys %hash) { warn "add_metadata: $key is not prefixed with 'x_'.\n" . "Use appopriate function to add non-private metadata.\n" unless $key =~ /^x_/; $self->{values}->{$key} = $hash{$key}; } } ###################################################################### # MYMETA Support sub WriteMyMeta { die "WriteMyMeta has been deprecated"; } sub write_mymeta_yaml { my $self = shift; # We need YAML::Tiny to write the MYMETA.yml file unless ( eval { require YAML::Tiny; 1; } ) { return 1; } # Generate the data my $meta = $self->_write_mymeta_data or return 1; # Save as the MYMETA.yml file print "Writing MYMETA.yml\n"; YAML::Tiny::DumpFile('MYMETA.yml', $meta); } sub write_mymeta_json { my $self = shift; # We need JSON to write the MYMETA.json file unless ( eval { require JSON; 1; } ) { return 1; } # Generate the data my $meta = $self->_write_mymeta_data or return 1; # Save as the MYMETA.yml file print "Writing MYMETA.json\n"; Module::Install::_write( 'MYMETA.json', JSON->new->pretty(1)->canonical->encode($meta), ); } sub _write_mymeta_data { my $self = shift; # If there's no existing META.yml there is nothing we can do return undef unless -f 'META.yml'; # We need Parse::CPAN::Meta to load the file unless ( eval { require Parse::CPAN::Meta; 1; } ) { return undef; } # Merge the perl version into the dependencies my $val = $self->Meta->{values}; my $perl = delete $val->{perl_version}; if ( $perl ) { $val->{requires} ||= []; my $requires = $val->{requires}; # Canonize to three-dot version after Perl 5.6 if ( $perl >= 5.006 ) { $perl =~ s{^(\d+)\.(\d\d\d)(\d*)}{join('.', $1, int($2||0), int($3||0))}e } unshift @$requires, [ perl => $perl ]; } # Load the advisory META.yml file my @yaml = Parse::CPAN::Meta::LoadFile('META.yml'); my $meta = $yaml[0]; # Overwrite the non-configure dependency hashes delete $meta->{requires}; delete $meta->{build_requires}; delete $meta->{recommends}; if ( exists $val->{requires} ) { $meta->{requires} = { map { @$_ } @{ $val->{requires} } }; } if ( exists $val->{build_requires} ) { $meta->{build_requires} = { map { @$_ } @{ $val->{build_requires} } }; } return $meta; } 1; DBIx-Class-Schema-Loader-0.07045/inc/Module/Install/Base.pm0000644000175000017500000000214712650450342022261 0ustar ilmariilmari#line 1 package Module::Install::Base; use strict 'vars'; use vars qw{$VERSION}; BEGIN { $VERSION = '1.16'; } # Suspend handler for "redefined" warnings BEGIN { my $w = $SIG{__WARN__}; $SIG{__WARN__} = sub { $w }; } #line 42 sub new { my $class = shift; unless ( defined &{"${class}::call"} ) { *{"${class}::call"} = sub { shift->_top->call(@_) }; } unless ( defined &{"${class}::load"} ) { *{"${class}::load"} = sub { shift->_top->load(@_) }; } bless { @_ }, $class; } #line 61 sub AUTOLOAD { local $@; my $func = eval { shift->_top->autoload } or return; goto &$func; } #line 75 sub _top { $_[0]->{_top}; } #line 90 sub admin { $_[0]->_top->{admin} or Module::Install::Base::FakeAdmin->new; } #line 106 sub is_admin { ! $_[0]->admin->isa('Module::Install::Base::FakeAdmin'); } sub DESTROY {} package Module::Install::Base::FakeAdmin; use vars qw{$VERSION}; BEGIN { $VERSION = $Module::Install::Base::VERSION; } my $fake; sub new { $fake ||= bless(\@_, $_[0]); } sub AUTOLOAD {} sub DESTROY {} # Restore warning handler BEGIN { $SIG{__WARN__} = $SIG{__WARN__}->(); } 1; #line 159 DBIx-Class-Schema-Loader-0.07045/inc/Module/Install/Include.pm0000644000175000017500000000101512650450342022763 0ustar ilmariilmari#line 1 package Module::Install::Include; use strict; use Module::Install::Base (); use vars qw{$VERSION @ISA $ISCORE}; BEGIN { $VERSION = '1.16'; @ISA = 'Module::Install::Base'; $ISCORE = 1; } sub include { shift()->admin->include(@_); } sub include_deps { shift()->admin->include_deps(@_); } sub auto_include { shift()->admin->auto_include(@_); } sub auto_include_deps { shift()->admin->auto_include_deps(@_); } sub auto_include_dependent_dists { shift()->admin->auto_include_dependent_dists(@_); } 1; DBIx-Class-Schema-Loader-0.07045/inc/Module/Install/Makefile.pm0000644000175000017500000002743712650450342023135 0ustar ilmariilmari#line 1 package Module::Install::Makefile; use strict 'vars'; use ExtUtils::MakeMaker (); use Module::Install::Base (); use Fcntl qw/:flock :seek/; use vars qw{$VERSION @ISA $ISCORE}; BEGIN { $VERSION = '1.16'; @ISA = 'Module::Install::Base'; $ISCORE = 1; } sub Makefile { $_[0] } my %seen = (); sub prompt { shift; # Infinite loop protection my @c = caller(); if ( ++$seen{"$c[1]|$c[2]|$_[0]"} > 3 ) { die "Caught an potential prompt infinite loop ($c[1]|$c[2]|$_[0])"; } # In automated testing or non-interactive session, always use defaults if ( ($ENV{AUTOMATED_TESTING} or -! -t STDIN) and ! $ENV{PERL_MM_USE_DEFAULT} ) { local $ENV{PERL_MM_USE_DEFAULT} = 1; goto &ExtUtils::MakeMaker::prompt; } else { goto &ExtUtils::MakeMaker::prompt; } } # Store a cleaned up version of the MakeMaker version, # since we need to behave differently in a variety of # ways based on the MM version. my $makemaker = eval $ExtUtils::MakeMaker::VERSION; # If we are passed a param, do a "newer than" comparison. # Otherwise, just return the MakeMaker version. sub makemaker { ( @_ < 2 or $makemaker >= eval($_[1]) ) ? $makemaker : 0 } # Ripped from ExtUtils::MakeMaker 6.56, and slightly modified # as we only need to know here whether the attribute is an array # or a hash or something else (which may or may not be appendable). my %makemaker_argtype = ( C => 'ARRAY', CONFIG => 'ARRAY', # CONFIGURE => 'CODE', # ignore DIR => 'ARRAY', DL_FUNCS => 'HASH', DL_VARS => 'ARRAY', EXCLUDE_EXT => 'ARRAY', EXE_FILES => 'ARRAY', FUNCLIST => 'ARRAY', H => 'ARRAY', IMPORTS => 'HASH', INCLUDE_EXT => 'ARRAY', LIBS => 'ARRAY', # ignore '' MAN1PODS => 'HASH', MAN3PODS => 'HASH', META_ADD => 'HASH', META_MERGE => 'HASH', PL_FILES => 'HASH', PM => 'HASH', PMLIBDIRS => 'ARRAY', PMLIBPARENTDIRS => 'ARRAY', PREREQ_PM => 'HASH', CONFIGURE_REQUIRES => 'HASH', SKIP => 'ARRAY', TYPEMAPS => 'ARRAY', XS => 'HASH', # VERSION => ['version',''], # ignore # _KEEP_AFTER_FLUSH => '', clean => 'HASH', depend => 'HASH', dist => 'HASH', dynamic_lib=> 'HASH', linkext => 'HASH', macro => 'HASH', postamble => 'HASH', realclean => 'HASH', test => 'HASH', tool_autosplit => 'HASH', # special cases where you can use makemaker_append CCFLAGS => 'APPENDABLE', DEFINE => 'APPENDABLE', INC => 'APPENDABLE', LDDLFLAGS => 'APPENDABLE', LDFROM => 'APPENDABLE', ); sub makemaker_args { my ($self, %new_args) = @_; my $args = ( $self->{makemaker_args} ||= {} ); foreach my $key (keys %new_args) { if ($makemaker_argtype{$key}) { if ($makemaker_argtype{$key} eq 'ARRAY') { $args->{$key} = [] unless defined $args->{$key}; unless (ref $args->{$key} eq 'ARRAY') { $args->{$key} = [$args->{$key}] } push @{$args->{$key}}, ref $new_args{$key} eq 'ARRAY' ? @{$new_args{$key}} : $new_args{$key}; } elsif ($makemaker_argtype{$key} eq 'HASH') { $args->{$key} = {} unless defined $args->{$key}; foreach my $skey (keys %{ $new_args{$key} }) { $args->{$key}{$skey} = $new_args{$key}{$skey}; } } elsif ($makemaker_argtype{$key} eq 'APPENDABLE') { $self->makemaker_append($key => $new_args{$key}); } } else { if (defined $args->{$key}) { warn qq{MakeMaker attribute "$key" is overriden; use "makemaker_append" to append values\n}; } $args->{$key} = $new_args{$key}; } } return $args; } # For mm args that take multiple space-separated args, # append an argument to the current list. sub makemaker_append { my $self = shift; my $name = shift; my $args = $self->makemaker_args; $args->{$name} = defined $args->{$name} ? join( ' ', $args->{$name}, @_ ) : join( ' ', @_ ); } sub build_subdirs { my $self = shift; my $subdirs = $self->makemaker_args->{DIR} ||= []; for my $subdir (@_) { push @$subdirs, $subdir; } } sub clean_files { my $self = shift; my $clean = $self->makemaker_args->{clean} ||= {}; %$clean = ( %$clean, FILES => join ' ', grep { length $_ } ($clean->{FILES} || (), @_), ); } sub realclean_files { my $self = shift; my $realclean = $self->makemaker_args->{realclean} ||= {}; %$realclean = ( %$realclean, FILES => join ' ', grep { length $_ } ($realclean->{FILES} || (), @_), ); } sub libs { my $self = shift; my $libs = ref $_[0] ? shift : [ shift ]; $self->makemaker_args( LIBS => $libs ); } sub inc { my $self = shift; $self->makemaker_args( INC => shift ); } sub _wanted_t { } sub tests_recursive { my $self = shift; my $dir = shift || 't'; unless ( -d $dir ) { die "tests_recursive dir '$dir' does not exist"; } my %tests = map { $_ => 1 } split / /, ($self->tests || ''); require File::Find; File::Find::find( sub { /\.t$/ and -f $_ and $tests{"$File::Find::dir/*.t"} = 1 }, $dir ); $self->tests( join ' ', sort keys %tests ); } sub write { my $self = shift; die "&Makefile->write() takes no arguments\n" if @_; # Check the current Perl version my $perl_version = $self->perl_version; if ( $perl_version ) { eval "use $perl_version; 1" or die "ERROR: perl: Version $] is installed, " . "but we need version >= $perl_version"; } # Make sure we have a new enough MakeMaker require ExtUtils::MakeMaker; if ( $perl_version and $self->_cmp($perl_version, '5.006') >= 0 ) { # This previous attempted to inherit the version of # ExtUtils::MakeMaker in use by the module author, but this # was found to be untenable as some authors build releases # using future dev versions of EU:MM that nobody else has. # Instead, #toolchain suggests we use 6.59 which is the most # stable version on CPAN at time of writing and is, to quote # ribasushi, "not terminally fucked, > and tested enough". # TODO: We will now need to maintain this over time to push # the version up as new versions are released. $self->build_requires( 'ExtUtils::MakeMaker' => 6.59 ); $self->configure_requires( 'ExtUtils::MakeMaker' => 6.59 ); } else { # Allow legacy-compatibility with 5.005 by depending on the # most recent EU:MM that supported 5.005. $self->build_requires( 'ExtUtils::MakeMaker' => 6.36 ); $self->configure_requires( 'ExtUtils::MakeMaker' => 6.36 ); } # Generate the MakeMaker params my $args = $self->makemaker_args; $args->{DISTNAME} = $self->name; $args->{NAME} = $self->module_name || $self->name; $args->{NAME} =~ s/-/::/g; $args->{VERSION} = $self->version or die <<'EOT'; ERROR: Can't determine distribution version. Please specify it explicitly via 'version' in Makefile.PL, or set a valid $VERSION in a module, and provide its file path via 'version_from' (or 'all_from' if you prefer) in Makefile.PL. EOT if ( $self->tests ) { my @tests = split ' ', $self->tests; my %seen; $args->{test} = { TESTS => (join ' ', grep {!$seen{$_}++} @tests), }; } elsif ( $Module::Install::ExtraTests::use_extratests ) { # Module::Install::ExtraTests doesn't set $self->tests and does its own tests via harness. # So, just ignore our xt tests here. } elsif ( -d 'xt' and ($Module::Install::AUTHOR or $ENV{RELEASE_TESTING}) ) { $args->{test} = { TESTS => join( ' ', map { "$_/*.t" } grep { -d $_ } qw{ t xt } ), }; } if ( $] >= 5.005 ) { $args->{ABSTRACT} = $self->abstract; $args->{AUTHOR} = join ', ', @{$self->author || []}; } if ( $self->makemaker(6.10) ) { $args->{NO_META} = 1; #$args->{NO_MYMETA} = 1; } if ( $self->makemaker(6.17) and $self->sign ) { $args->{SIGN} = 1; } unless ( $self->is_admin ) { delete $args->{SIGN}; } if ( $self->makemaker(6.31) and $self->license ) { $args->{LICENSE} = $self->license; } my $prereq = ($args->{PREREQ_PM} ||= {}); %$prereq = ( %$prereq, map { @$_ } # flatten [module => version] map { @$_ } grep $_, ($self->requires) ); # Remove any reference to perl, PREREQ_PM doesn't support it delete $args->{PREREQ_PM}->{perl}; # Merge both kinds of requires into BUILD_REQUIRES my $build_prereq = ($args->{BUILD_REQUIRES} ||= {}); %$build_prereq = ( %$build_prereq, map { @$_ } # flatten [module => version] map { @$_ } grep $_, ($self->configure_requires, $self->build_requires) ); # Remove any reference to perl, BUILD_REQUIRES doesn't support it delete $args->{BUILD_REQUIRES}->{perl}; # Delete bundled dists from prereq_pm, add it to Makefile DIR my $subdirs = ($args->{DIR} || []); if ($self->bundles) { my %processed; foreach my $bundle (@{ $self->bundles }) { my ($mod_name, $dist_dir) = @$bundle; delete $prereq->{$mod_name}; $dist_dir = File::Basename::basename($dist_dir); # dir for building this module if (not exists $processed{$dist_dir}) { if (-d $dist_dir) { # List as sub-directory to be processed by make push @$subdirs, $dist_dir; } # Else do nothing: the module is already present on the system $processed{$dist_dir} = undef; } } } unless ( $self->makemaker('6.55_03') ) { %$prereq = (%$prereq,%$build_prereq); delete $args->{BUILD_REQUIRES}; } if ( my $perl_version = $self->perl_version ) { eval "use $perl_version; 1" or die "ERROR: perl: Version $] is installed, " . "but we need version >= $perl_version"; if ( $self->makemaker(6.48) ) { $args->{MIN_PERL_VERSION} = $perl_version; } } if ($self->installdirs) { warn qq{old INSTALLDIRS (probably set by makemaker_args) is overriden by installdirs\n} if $args->{INSTALLDIRS}; $args->{INSTALLDIRS} = $self->installdirs; } my %args = map { ( $_ => $args->{$_} ) } grep {defined($args->{$_} ) } keys %$args; my $user_preop = delete $args{dist}->{PREOP}; if ( my $preop = $self->admin->preop($user_preop) ) { foreach my $key ( keys %$preop ) { $args{dist}->{$key} = $preop->{$key}; } } my $mm = ExtUtils::MakeMaker::WriteMakefile(%args); $self->fix_up_makefile($mm->{FIRST_MAKEFILE} || 'Makefile'); } sub fix_up_makefile { my $self = shift; my $makefile_name = shift; my $top_class = ref($self->_top) || ''; my $top_version = $self->_top->VERSION || ''; my $preamble = $self->preamble ? "# Preamble by $top_class $top_version\n" . $self->preamble : ''; my $postamble = "# Postamble by $top_class $top_version\n" . ($self->postamble || ''); local *MAKEFILE; open MAKEFILE, "+< $makefile_name" or die "fix_up_makefile: Couldn't open $makefile_name: $!"; eval { flock MAKEFILE, LOCK_EX }; my $makefile = do { local $/; }; $makefile =~ s/\b(test_harness\(\$\(TEST_VERBOSE\), )/$1'inc', /; $makefile =~ s/( -I\$\(INST_ARCHLIB\))/ -Iinc$1/g; $makefile =~ s/( "-I\$\(INST_LIB\)")/ "-Iinc"$1/g; $makefile =~ s/^(FULLPERL = .*)/$1 "-Iinc"/m; $makefile =~ s/^(PERL = .*)/$1 "-Iinc"/m; # Module::Install will never be used to build the Core Perl # Sometimes PERL_LIB and PERL_ARCHLIB get written anyway, which breaks # PREFIX/PERL5LIB, and thus, install_share. Blank them if they exist $makefile =~ s/^PERL_LIB = .+/PERL_LIB =/m; #$makefile =~ s/^PERL_ARCHLIB = .+/PERL_ARCHLIB =/m; # Perl 5.005 mentions PERL_LIB explicitly, so we have to remove that as well. $makefile =~ s/(\"?)-I\$\(PERL_LIB\)\1//g; # XXX - This is currently unused; not sure if it breaks other MM-users # $makefile =~ s/^pm_to_blib\s+:\s+/pm_to_blib :: /mg; seek MAKEFILE, 0, SEEK_SET; truncate MAKEFILE, 0; print MAKEFILE "$preamble$makefile$postamble" or die $!; close MAKEFILE or die $!; 1; } sub preamble { my ($self, $text) = @_; $self->{preamble} = $text . $self->{preamble} if defined $text; $self->{preamble}; } sub postamble { my ($self, $text) = @_; $self->{postamble} ||= $self->admin->postamble; $self->{postamble} .= $text if defined $text; $self->{postamble} } 1; __END__ #line 544 DBIx-Class-Schema-Loader-0.07045/inc/Module/Install/WriteAll.pm0000644000175000017500000000237612650450343023137 0ustar ilmariilmari#line 1 package Module::Install::WriteAll; use strict; use Module::Install::Base (); use vars qw{$VERSION @ISA $ISCORE}; BEGIN { $VERSION = '1.16'; @ISA = qw{Module::Install::Base}; $ISCORE = 1; } sub WriteAll { my $self = shift; my %args = ( meta => 1, sign => 0, inline => 0, check_nmake => 1, @_, ); $self->sign(1) if $args{sign}; $self->admin->WriteAll(%args) if $self->is_admin; $self->check_nmake if $args{check_nmake}; unless ( $self->makemaker_args->{PL_FILES} ) { # XXX: This still may be a bit over-defensive... unless ($self->makemaker(6.25)) { $self->makemaker_args( PL_FILES => {} ) if -f 'Build.PL'; } } # Until ExtUtils::MakeMaker support MYMETA.yml, make sure # we clean it up properly ourself. $self->realclean_files('MYMETA.yml'); if ( $args{inline} ) { $self->Inline->write; } else { $self->Makefile->write; } # The Makefile write process adds a couple of dependencies, # so write the META.yml files after the Makefile. if ( $args{meta} ) { $self->Meta->write; } # Experimental support for MYMETA if ( $ENV{X_MYMETA} ) { if ( $ENV{X_MYMETA} eq 'JSON' ) { $self->Meta->write_mymeta_json; } else { $self->Meta->write_mymeta_yaml; } } return 1; } 1; DBIx-Class-Schema-Loader-0.07045/inc/Module/Install/Fetch.pm0000644000175000017500000000462712650450343022446 0ustar ilmariilmari#line 1 package Module::Install::Fetch; use strict; use Module::Install::Base (); use vars qw{$VERSION @ISA $ISCORE}; BEGIN { $VERSION = '1.16'; @ISA = 'Module::Install::Base'; $ISCORE = 1; } sub get_file { my ($self, %args) = @_; my ($scheme, $host, $path, $file) = $args{url} =~ m|^(\w+)://([^/]+)(.+)/(.+)| or return; if ( $scheme eq 'http' and ! eval { require LWP::Simple; 1 } ) { $args{url} = $args{ftp_url} or (warn("LWP support unavailable!\n"), return); ($scheme, $host, $path, $file) = $args{url} =~ m|^(\w+)://([^/]+)(.+)/(.+)| or return; } $|++; print "Fetching '$file' from $host... "; unless (eval { require Socket; Socket::inet_aton($host) }) { warn "'$host' resolve failed!\n"; return; } return unless $scheme eq 'ftp' or $scheme eq 'http'; require Cwd; my $dir = Cwd::getcwd(); chdir $args{local_dir} or return if exists $args{local_dir}; if (eval { require LWP::Simple; 1 }) { LWP::Simple::mirror($args{url}, $file); } elsif (eval { require Net::FTP; 1 }) { eval { # use Net::FTP to get past firewall my $ftp = Net::FTP->new($host, Passive => 1, Timeout => 600); $ftp->login("anonymous", 'anonymous@example.com'); $ftp->cwd($path); $ftp->binary; $ftp->get($file) or (warn("$!\n"), return); $ftp->quit; } } elsif (my $ftp = $self->can_run('ftp')) { eval { # no Net::FTP, fallback to ftp.exe require FileHandle; my $fh = FileHandle->new; local $SIG{CHLD} = 'IGNORE'; unless ($fh->open("|$ftp -n")) { warn "Couldn't open ftp: $!\n"; chdir $dir; return; } my @dialog = split(/\n/, <<"END_FTP"); open $host user anonymous anonymous\@example.com cd $path binary get $file $file quit END_FTP foreach (@dialog) { $fh->print("$_\n") } $fh->close; } } else { warn "No working 'ftp' program available!\n"; chdir $dir; return; } unless (-f $file) { warn "Fetching failed: $@\n"; chdir $dir; return; } return if exists $args{size} and -s $file != $args{size}; system($args{run}) if exists $args{run}; unlink($file) if $args{remove}; print(((!exists $args{check_for} or -e $args{check_for}) ? "done!" : "failed! ($!)"), "\n"); chdir $dir; return !$?; } 1; DBIx-Class-Schema-Loader-0.07045/inc/Module/Install/Win32.pm0000644000175000017500000000340312650450343022306 0ustar ilmariilmari#line 1 package Module::Install::Win32; use strict; use Module::Install::Base (); use vars qw{$VERSION @ISA $ISCORE}; BEGIN { $VERSION = '1.16'; @ISA = 'Module::Install::Base'; $ISCORE = 1; } # determine if the user needs nmake, and download it if needed sub check_nmake { my $self = shift; $self->load('can_run'); $self->load('get_file'); require Config; return unless ( $^O eq 'MSWin32' and $Config::Config{make} and $Config::Config{make} =~ /^nmake\b/i and ! $self->can_run('nmake') ); print "The required 'nmake' executable not found, fetching it...\n"; require File::Basename; my $rv = $self->get_file( url => 'http://download.microsoft.com/download/vc15/Patch/1.52/W95/EN-US/Nmake15.exe', ftp_url => 'ftp://ftp.microsoft.com/Softlib/MSLFILES/Nmake15.exe', local_dir => File::Basename::dirname($^X), size => 51928, run => 'Nmake15.exe /o > nul', check_for => 'Nmake.exe', remove => 1, ); die <<'END_MESSAGE' unless $rv; ------------------------------------------------------------------------------- Since you are using Microsoft Windows, you will need the 'nmake' utility before installation. It's available at: http://download.microsoft.com/download/vc15/Patch/1.52/W95/EN-US/Nmake15.exe or ftp://ftp.microsoft.com/Softlib/MSLFILES/Nmake15.exe Please download the file manually, save it to a directory in %PATH% (e.g. C:\WINDOWS\COMMAND\), then launch the MS-DOS command line shell, "cd" to that directory, and run "Nmake15.exe" from there; that will create the 'nmake.exe' file needed by this module. You may then resume the installation process described in README. ------------------------------------------------------------------------------- END_MESSAGE } 1; DBIx-Class-Schema-Loader-0.07045/inc/Module/Install/Scripts.pm0000644000175000017500000000101112650450342023023 0ustar ilmariilmari#line 1 package Module::Install::Scripts; use strict 'vars'; use Module::Install::Base (); use vars qw{$VERSION @ISA $ISCORE}; BEGIN { $VERSION = '1.16'; @ISA = 'Module::Install::Base'; $ISCORE = 1; } sub install_script { my $self = shift; my $args = $self->makemaker_args; my $exe = $args->{EXE_FILES} ||= []; foreach ( @_ ) { if ( -f $_ ) { push @$exe, $_; } elsif ( -d 'script' and -f "script/$_" ) { push @$exe, "script/$_"; } else { die("Cannot find script '$_'"); } } } 1; DBIx-Class-Schema-Loader-0.07045/inc/Module/Install/AutoInstall.pm0000644000175000017500000000416212650450342023645 0ustar ilmariilmari#line 1 package Module::Install::AutoInstall; use strict; use Module::Install::Base (); use vars qw{$VERSION @ISA $ISCORE}; BEGIN { $VERSION = '1.16'; @ISA = 'Module::Install::Base'; $ISCORE = 1; } sub AutoInstall { $_[0] } sub run { my $self = shift; $self->auto_install_now(@_); } sub write { my $self = shift; $self->auto_install(@_); } sub auto_install { my $self = shift; return if $self->{done}++; # Flatten array of arrays into a single array my @core = map @$_, map @$_, grep ref, $self->build_requires, $self->requires; my @config = @_; # We'll need Module::AutoInstall $self->include('Module::AutoInstall'); require Module::AutoInstall; my @features_require = Module::AutoInstall->import( (@config ? (-config => \@config) : ()), (@core ? (-core => \@core) : ()), $self->features, ); my %seen; my @requires = map @$_, map @$_, grep ref, $self->requires; while (my ($mod, $ver) = splice(@requires, 0, 2)) { $seen{$mod}{$ver}++; } my @build_requires = map @$_, map @$_, grep ref, $self->build_requires; while (my ($mod, $ver) = splice(@build_requires, 0, 2)) { $seen{$mod}{$ver}++; } my @configure_requires = map @$_, map @$_, grep ref, $self->configure_requires; while (my ($mod, $ver) = splice(@configure_requires, 0, 2)) { $seen{$mod}{$ver}++; } my @deduped; while (my ($mod, $ver) = splice(@features_require, 0, 2)) { push @deduped, $mod => $ver unless $seen{$mod}{$ver}++; } $self->requires(@deduped); $self->makemaker_args( Module::AutoInstall::_make_args() ); my $class = ref($self); $self->postamble( "# --- $class section:\n" . Module::AutoInstall::postamble() ); } sub installdeps_target { my ($self, @args) = @_; $self->include('Module::AutoInstall'); require Module::AutoInstall; Module::AutoInstall::_installdeps_target(1); $self->auto_install(@args); } sub auto_install_now { my $self = shift; $self->auto_install(@_); Module::AutoInstall::do_install(); } 1; DBIx-Class-Schema-Loader-0.07045/inc/Module/AutoInstall.pm0000644000175000017500000006231112650450342022237 0ustar ilmariilmari#line 1 package Module::AutoInstall; use strict; use Cwd (); use File::Spec (); use ExtUtils::MakeMaker (); use vars qw{$VERSION}; BEGIN { $VERSION = '1.16'; } # special map on pre-defined feature sets my %FeatureMap = ( '' => 'Core Features', # XXX: deprecated '-core' => 'Core Features', ); # various lexical flags my ( @Missing, @Existing, %DisabledTests, $UnderCPAN, $InstallDepsTarget, $HasCPANPLUS ); my ( $Config, $CheckOnly, $SkipInstall, $AcceptDefault, $TestOnly, $AllDeps, $UpgradeDeps ); my ( $PostambleActions, $PostambleActionsNoTest, $PostambleActionsUpgradeDeps, $PostambleActionsUpgradeDepsNoTest, $PostambleActionsListDeps, $PostambleActionsListAllDeps, $PostambleUsed, $NoTest); # See if it's a testing or non-interactive session _accept_default( $ENV{AUTOMATED_TESTING} or ! -t STDIN ); _init(); sub _accept_default { $AcceptDefault = shift; } sub _installdeps_target { $InstallDepsTarget = shift; } sub missing_modules { return @Missing; } sub do_install { __PACKAGE__->install( [ $Config ? ( UNIVERSAL::isa( $Config, 'HASH' ) ? %{$Config} : @{$Config} ) : () ], @Missing, ); } # initialize various flags, and/or perform install sub _init { foreach my $arg ( @ARGV, split( /[\s\t]+/, $ENV{PERL_AUTOINSTALL} || $ENV{PERL_EXTUTILS_AUTOINSTALL} || '' ) ) { if ( $arg =~ /^--config=(.*)$/ ) { $Config = [ split( ',', $1 ) ]; } elsif ( $arg =~ /^--installdeps=(.*)$/ ) { __PACKAGE__->install( $Config, @Missing = split( /,/, $1 ) ); exit 0; } elsif ( $arg =~ /^--upgradedeps=(.*)$/ ) { $UpgradeDeps = 1; __PACKAGE__->install( $Config, @Missing = split( /,/, $1 ) ); exit 0; } elsif ( $arg =~ /^--default(?:deps)?$/ ) { $AcceptDefault = 1; } elsif ( $arg =~ /^--check(?:deps)?$/ ) { $CheckOnly = 1; } elsif ( $arg =~ /^--skip(?:deps)?$/ ) { $SkipInstall = 1; } elsif ( $arg =~ /^--test(?:only)?$/ ) { $TestOnly = 1; } elsif ( $arg =~ /^--all(?:deps)?$/ ) { $AllDeps = 1; } } } # overrides MakeMaker's prompt() to automatically accept the default choice sub _prompt { goto &ExtUtils::MakeMaker::prompt unless $AcceptDefault; my ( $prompt, $default ) = @_; my $y = ( $default =~ /^[Yy]/ ); print $prompt, ' [', ( $y ? 'Y' : 'y' ), '/', ( $y ? 'n' : 'N' ), '] '; print "$default\n"; return $default; } # the workhorse sub import { my $class = shift; my @args = @_ or return; my $core_all; print "*** $class version " . $class->VERSION . "\n"; print "*** Checking for Perl dependencies...\n"; my $cwd = Cwd::getcwd(); $Config = []; my $maxlen = length( ( sort { length($b) <=> length($a) } grep { /^[^\-]/ } map { ref($_) ? ( ( ref($_) eq 'HASH' ) ? keys(%$_) : @{$_} ) : '' } map { +{@args}->{$_} } grep { /^[^\-]/ or /^-core$/i } keys %{ +{@args} } )[0] ); # We want to know if we're under CPAN early to avoid prompting, but # if we aren't going to try and install anything anyway then skip the # check entirely since we don't want to have to load (and configure) # an old CPAN just for a cosmetic message $UnderCPAN = _check_lock(1) unless $SkipInstall || $InstallDepsTarget; while ( my ( $feature, $modules ) = splice( @args, 0, 2 ) ) { my ( @required, @tests, @skiptests ); my $default = 1; my $conflict = 0; if ( $feature =~ m/^-(\w+)$/ ) { my $option = lc($1); # check for a newer version of myself _update_to( $modules, @_ ) and return if $option eq 'version'; # sets CPAN configuration options $Config = $modules if $option eq 'config'; # promote every features to core status $core_all = ( $modules =~ /^all$/i ) and next if $option eq 'core'; next unless $option eq 'core'; } print "[" . ( $FeatureMap{ lc($feature) } || $feature ) . "]\n"; $modules = [ %{$modules} ] if UNIVERSAL::isa( $modules, 'HASH' ); unshift @$modules, -default => &{ shift(@$modules) } if ( ref( $modules->[0] ) eq 'CODE' ); # XXX: bugward compatibility while ( my ( $mod, $arg ) = splice( @$modules, 0, 2 ) ) { if ( $mod =~ m/^-(\w+)$/ ) { my $option = lc($1); $default = $arg if ( $option eq 'default' ); $conflict = $arg if ( $option eq 'conflict' ); @tests = @{$arg} if ( $option eq 'tests' ); @skiptests = @{$arg} if ( $option eq 'skiptests' ); next; } printf( "- %-${maxlen}s ...", $mod ); if ( $arg and $arg =~ /^\D/ ) { unshift @$modules, $arg; $arg = 0; } # XXX: check for conflicts and uninstalls(!) them. my $cur = _version_of($mod); if (_version_cmp ($cur, $arg) >= 0) { print "loaded. ($cur" . ( $arg ? " >= $arg" : '' ) . ")\n"; push @Existing, $mod => $arg; $DisabledTests{$_} = 1 for map { glob($_) } @skiptests; } else { if (not defined $cur) # indeed missing { print "missing." . ( $arg ? " (would need $arg)" : '' ) . "\n"; } else { # no need to check $arg as _version_cmp ($cur, undef) would satisfy >= above print "too old. ($cur < $arg)\n"; } push @required, $mod => $arg; } } next unless @required; my $mandatory = ( $feature eq '-core' or $core_all ); if ( !$SkipInstall and ( $CheckOnly or ($mandatory and $UnderCPAN) or $AllDeps or $InstallDepsTarget or _prompt( qq{==> Auto-install the } . ( @required / 2 ) . ( $mandatory ? ' mandatory' : ' optional' ) . qq{ module(s) from CPAN?}, $default ? 'y' : 'n', ) =~ /^[Yy]/ ) ) { push( @Missing, @required ); $DisabledTests{$_} = 1 for map { glob($_) } @skiptests; } elsif ( !$SkipInstall and $default and $mandatory and _prompt( qq{==> The module(s) are mandatory! Really skip?}, 'n', ) =~ /^[Nn]/ ) { push( @Missing, @required ); $DisabledTests{$_} = 1 for map { glob($_) } @skiptests; } else { $DisabledTests{$_} = 1 for map { glob($_) } @tests; } } if ( @Missing and not( $CheckOnly or $UnderCPAN) ) { require Config; my $make = $Config::Config{make}; if ($InstallDepsTarget) { print "*** To install dependencies type '$make installdeps' or '$make installdeps_notest'.\n"; } else { print "*** Dependencies will be installed the next time you type '$make'.\n"; } # make an educated guess of whether we'll need root permission. print " (You may need to do that as the 'root' user.)\n" if eval '$>'; } print "*** $class configuration finished.\n"; chdir $cwd; # import to main:: no strict 'refs'; *{'main::WriteMakefile'} = \&Write if caller(0) eq 'main'; return (@Existing, @Missing); } sub _running_under { my $thing = shift; print <<"END_MESSAGE"; *** Since we're running under ${thing}, I'll just let it take care of the dependency's installation later. END_MESSAGE return 1; } # Check to see if we are currently running under CPAN.pm and/or CPANPLUS; # if we are, then we simply let it taking care of our dependencies sub _check_lock { return unless @Missing or @_; if ($ENV{PERL5_CPANM_IS_RUNNING}) { return _running_under('cpanminus'); } my $cpan_env = $ENV{PERL5_CPAN_IS_RUNNING}; if ($ENV{PERL5_CPANPLUS_IS_RUNNING}) { return _running_under($cpan_env ? 'CPAN' : 'CPANPLUS'); } require CPAN; if ($CPAN::VERSION > '1.89') { if ($cpan_env) { return _running_under('CPAN'); } return; # CPAN.pm new enough, don't need to check further } # last ditch attempt, this -will- configure CPAN, very sorry _load_cpan(1); # force initialize even though it's already loaded # Find the CPAN lock-file my $lock = MM->catfile( $CPAN::Config->{cpan_home}, ".lock" ); return unless -f $lock; # Check the lock local *LOCK; return unless open(LOCK, $lock); if ( ( $^O eq 'MSWin32' ? _under_cpan() : == getppid() ) and ( $CPAN::Config->{prerequisites_policy} || '' ) ne 'ignore' ) { print <<'END_MESSAGE'; *** Since we're running under CPAN, I'll just let it take care of the dependency's installation later. END_MESSAGE return 1; } close LOCK; return; } sub install { my $class = shift; my $i; # used below to strip leading '-' from config keys my @config = ( map { s/^-// if ++$i; $_ } @{ +shift } ); my ( @modules, @installed, @modules_to_upgrade ); while (my ($pkg, $ver) = splice(@_, 0, 2)) { # grep out those already installed if (_version_cmp(_version_of($pkg), $ver) >= 0) { push @installed, $pkg; if ($UpgradeDeps) { push @modules_to_upgrade, $pkg, $ver; } } else { push @modules, $pkg, $ver; } } if ($UpgradeDeps) { push @modules, @modules_to_upgrade; @installed = (); @modules_to_upgrade = (); } return @installed unless @modules; # nothing to do return @installed if _check_lock(); # defer to the CPAN shell print "*** Installing dependencies...\n"; return unless _connected_to('cpan.org'); my %args = @config; my %failed; local *FAILED; if ( $args{do_once} and open( FAILED, '.#autoinstall.failed' ) ) { while () { chomp; $failed{$_}++ } close FAILED; my @newmod; while ( my ( $k, $v ) = splice( @modules, 0, 2 ) ) { push @newmod, ( $k => $v ) unless $failed{$k}; } @modules = @newmod; } if ( _has_cpanplus() and not $ENV{PERL_AUTOINSTALL_PREFER_CPAN} ) { _install_cpanplus( \@modules, \@config ); } else { _install_cpan( \@modules, \@config ); } print "*** $class installation finished.\n"; # see if we have successfully installed them while ( my ( $pkg, $ver ) = splice( @modules, 0, 2 ) ) { if ( _version_cmp( _version_of($pkg), $ver ) >= 0 ) { push @installed, $pkg; } elsif ( $args{do_once} and open( FAILED, '>> .#autoinstall.failed' ) ) { print FAILED "$pkg\n"; } } close FAILED if $args{do_once}; return @installed; } sub _install_cpanplus { my @modules = @{ +shift }; my @config = _cpanplus_config( @{ +shift } ); my $installed = 0; require CPANPLUS::Backend; my $cp = CPANPLUS::Backend->new; my $conf = $cp->configure_object; return unless $conf->can('conf') # 0.05x+ with "sudo" support or _can_write($conf->_get_build('base')); # 0.04x # if we're root, set UNINST=1 to avoid trouble unless user asked for it. my $makeflags = $conf->get_conf('makeflags') || ''; if ( UNIVERSAL::isa( $makeflags, 'HASH' ) ) { # 0.03+ uses a hashref here $makeflags->{UNINST} = 1 unless exists $makeflags->{UNINST}; } else { # 0.02 and below uses a scalar $makeflags = join( ' ', split( ' ', $makeflags ), 'UNINST=1' ) if ( $makeflags !~ /\bUNINST\b/ and eval qq{ $> eq '0' } ); } $conf->set_conf( makeflags => $makeflags ); $conf->set_conf( prereqs => 1 ); while ( my ( $key, $val ) = splice( @config, 0, 2 ) ) { $conf->set_conf( $key, $val ); } my $modtree = $cp->module_tree; while ( my ( $pkg, $ver ) = splice( @modules, 0, 2 ) ) { print "*** Installing $pkg...\n"; MY::preinstall( $pkg, $ver ) or next if defined &MY::preinstall; my $success; my $obj = $modtree->{$pkg}; if ( $obj and _version_cmp( $obj->{version}, $ver ) >= 0 ) { my $pathname = $pkg; $pathname =~ s/::/\\W/; foreach my $inc ( grep { m/$pathname.pm/i } keys(%INC) ) { delete $INC{$inc}; } my $rv = $cp->install( modules => [ $obj->{module} ] ); if ( $rv and ( $rv->{ $obj->{module} } or $rv->{ok} ) ) { print "*** $pkg successfully installed.\n"; $success = 1; } else { print "*** $pkg installation cancelled.\n"; $success = 0; } $installed += $success; } else { print << "."; *** Could not find a version $ver or above for $pkg; skipping. . } MY::postinstall( $pkg, $ver, $success ) if defined &MY::postinstall; } return $installed; } sub _cpanplus_config { my @config = (); while ( @_ ) { my ($key, $value) = (shift(), shift()); if ( $key eq 'prerequisites_policy' ) { if ( $value eq 'follow' ) { $value = CPANPLUS::Internals::Constants::PREREQ_INSTALL(); } elsif ( $value eq 'ask' ) { $value = CPANPLUS::Internals::Constants::PREREQ_ASK(); } elsif ( $value eq 'ignore' ) { $value = CPANPLUS::Internals::Constants::PREREQ_IGNORE(); } else { die "*** Cannot convert option $key = '$value' to CPANPLUS version.\n"; } push @config, 'prereqs', $value; } elsif ( $key eq 'force' ) { push @config, $key, $value; } elsif ( $key eq 'notest' ) { push @config, 'skiptest', $value; } else { die "*** Cannot convert option $key to CPANPLUS version.\n"; } } return @config; } sub _install_cpan { my @modules = @{ +shift }; my @config = @{ +shift }; my $installed = 0; my %args; _load_cpan(); require Config; if (CPAN->VERSION < 1.80) { # no "sudo" support, probe for writableness return unless _can_write( MM->catfile( $CPAN::Config->{cpan_home}, 'sources' ) ) and _can_write( $Config::Config{sitelib} ); } # if we're root, set UNINST=1 to avoid trouble unless user asked for it. my $makeflags = $CPAN::Config->{make_install_arg} || ''; $CPAN::Config->{make_install_arg} = join( ' ', split( ' ', $makeflags ), 'UNINST=1' ) if ( $makeflags !~ /\bUNINST\b/ and eval qq{ $> eq '0' } ); # don't show start-up info $CPAN::Config->{inhibit_startup_message} = 1; # set additional options while ( my ( $opt, $arg ) = splice( @config, 0, 2 ) ) { ( $args{$opt} = $arg, next ) if $opt =~ /^(?:force|notest)$/; # pseudo-option $CPAN::Config->{$opt} = $opt eq 'urllist' ? [$arg] : $arg; } if ($args{notest} && (not CPAN::Shell->can('notest'))) { die "Your version of CPAN is too old to support the 'notest' pragma"; } local $CPAN::Config->{prerequisites_policy} = 'follow'; while ( my ( $pkg, $ver ) = splice( @modules, 0, 2 ) ) { MY::preinstall( $pkg, $ver ) or next if defined &MY::preinstall; print "*** Installing $pkg...\n"; my $obj = CPAN::Shell->expand( Module => $pkg ); my $success = 0; if ( $obj and _version_cmp( $obj->cpan_version, $ver ) >= 0 ) { my $pathname = $pkg; $pathname =~ s/::/\\W/; foreach my $inc ( grep { m/$pathname.pm/i } keys(%INC) ) { delete $INC{$inc}; } my $rv = do { if ($args{force}) { CPAN::Shell->force( install => $pkg ) } elsif ($args{notest}) { CPAN::Shell->notest( install => $pkg ) } else { CPAN::Shell->install($pkg) } }; $rv ||= eval { $CPAN::META->instance( 'CPAN::Distribution', $obj->cpan_file, ) ->{install} if $CPAN::META; }; if ( $rv eq 'YES' ) { print "*** $pkg successfully installed.\n"; $success = 1; } else { print "*** $pkg installation failed.\n"; $success = 0; } $installed += $success; } else { print << "."; *** Could not find a version $ver or above for $pkg; skipping. . } MY::postinstall( $pkg, $ver, $success ) if defined &MY::postinstall; } return $installed; } sub _has_cpanplus { return ( $HasCPANPLUS = ( $INC{'CPANPLUS/Config.pm'} or _load('CPANPLUS::Shell::Default') ) ); } # make guesses on whether we're under the CPAN installation directory sub _under_cpan { require Cwd; require File::Spec; my $cwd = File::Spec->canonpath( Cwd::getcwd() ); my $cpan = File::Spec->canonpath( $CPAN::Config->{cpan_home} ); return ( index( $cwd, $cpan ) > -1 ); } sub _update_to { my $class = __PACKAGE__; my $ver = shift; return if _version_cmp( _version_of($class), $ver ) >= 0; # no need to upgrade if ( _prompt( "==> A newer version of $class ($ver) is required. Install?", 'y' ) =~ /^[Nn]/ ) { die "*** Please install $class $ver manually.\n"; } print << "."; *** Trying to fetch it from CPAN... . # install ourselves _load($class) and return $class->import(@_) if $class->install( [], $class, $ver ); print << '.'; exit 1; *** Cannot bootstrap myself. :-( Installation terminated. . } # check if we're connected to some host, using inet_aton sub _connected_to { my $site = shift; return ( ( _load('Socket') and Socket::inet_aton($site) ) or _prompt( qq( *** Your host cannot resolve the domain name '$site', which probably means the Internet connections are unavailable. ==> Should we try to install the required module(s) anyway?), 'n' ) =~ /^[Yy]/ ); } # check if a directory is writable; may create it on demand sub _can_write { my $path = shift; mkdir( $path, 0755 ) unless -e $path; return 1 if -w $path; print << "."; *** You are not allowed to write to the directory '$path'; the installation may fail due to insufficient permissions. . if ( eval '$>' and lc(`sudo -V`) =~ /version/ and _prompt( qq( ==> Should we try to re-execute the autoinstall process with 'sudo'?), ((-t STDIN) ? 'y' : 'n') ) =~ /^[Yy]/ ) { # try to bootstrap ourselves from sudo print << "."; *** Trying to re-execute the autoinstall process with 'sudo'... . my $missing = join( ',', @Missing ); my $config = join( ',', UNIVERSAL::isa( $Config, 'HASH' ) ? %{$Config} : @{$Config} ) if $Config; return unless system( 'sudo', $^X, $0, "--config=$config", "--installdeps=$missing" ); print << "."; *** The 'sudo' command exited with error! Resuming... . } return _prompt( qq( ==> Should we try to install the required module(s) anyway?), 'n' ) =~ /^[Yy]/; } # load a module and return the version it reports sub _load { my $mod = pop; # method/function doesn't matter my $file = $mod; $file =~ s|::|/|g; $file .= '.pm'; local $@; return eval { require $file; $mod->VERSION } || ( $@ ? undef: 0 ); } # report version without loading a module sub _version_of { my $mod = pop; # method/function doesn't matter my $file = $mod; $file =~ s|::|/|g; $file .= '.pm'; foreach my $dir ( @INC ) { next if ref $dir; my $path = File::Spec->catfile($dir, $file); next unless -e $path; require ExtUtils::MM_Unix; return ExtUtils::MM_Unix->parse_version($path); } return undef; } # Load CPAN.pm and it's configuration sub _load_cpan { return if $CPAN::VERSION and $CPAN::Config and not @_; require CPAN; # CPAN-1.82+ adds CPAN::Config::AUTOLOAD to redirect to # CPAN::HandleConfig->load. CPAN reports that the redirection # is deprecated in a warning printed at the user. # CPAN-1.81 expects CPAN::HandleConfig->load, does not have # $CPAN::HandleConfig::VERSION but cannot handle # CPAN::Config->load # Which "versions expect CPAN::Config->load? if ( $CPAN::HandleConfig::VERSION || CPAN::HandleConfig->can('load') ) { # Newer versions of CPAN have a HandleConfig module CPAN::HandleConfig->load; } else { # Older versions had the load method in Config directly CPAN::Config->load; } } # compare two versions, either use Sort::Versions or plain comparison # return values same as <=> sub _version_cmp { my ( $cur, $min ) = @_; return -1 unless defined $cur; # if 0 keep comparing return 1 unless $min; $cur =~ s/\s+$//; # check for version numbers that are not in decimal format if ( ref($cur) or ref($min) or $cur =~ /v|\..*\./ or $min =~ /v|\..*\./ ) { if ( ( $version::VERSION or defined( _load('version') )) and version->can('new') ) { # use version.pm if it is installed. return version->new($cur) <=> version->new($min); } elsif ( $Sort::Versions::VERSION or defined( _load('Sort::Versions') ) ) { # use Sort::Versions as the sorting algorithm for a.b.c versions return Sort::Versions::versioncmp( $cur, $min ); } warn "Cannot reliably compare non-decimal formatted versions.\n" . "Please install version.pm or Sort::Versions.\n"; } # plain comparison local $^W = 0; # shuts off 'not numeric' bugs return $cur <=> $min; } # nothing; this usage is deprecated. sub main::PREREQ_PM { return {}; } sub _make_args { my %args = @_; $args{PREREQ_PM} = { %{ $args{PREREQ_PM} || {} }, @Existing, @Missing } if $UnderCPAN or $TestOnly; if ( $args{EXE_FILES} and -e 'MANIFEST' ) { require ExtUtils::Manifest; my $manifest = ExtUtils::Manifest::maniread('MANIFEST'); $args{EXE_FILES} = [ grep { exists $manifest->{$_} } @{ $args{EXE_FILES} } ]; } $args{test}{TESTS} ||= 't/*.t'; $args{test}{TESTS} = join( ' ', grep { !exists( $DisabledTests{$_} ) } map { glob($_) } split( /\s+/, $args{test}{TESTS} ) ); my $missing = join( ',', @Missing ); my $config = join( ',', UNIVERSAL::isa( $Config, 'HASH' ) ? %{$Config} : @{$Config} ) if $Config; $PostambleActions = ( ($missing and not $UnderCPAN) ? "\$(PERL) $0 --config=$config --installdeps=$missing" : "\$(NOECHO) \$(NOOP)" ); my $deps_list = join( ',', @Missing, @Existing ); $PostambleActionsUpgradeDeps = "\$(PERL) $0 --config=$config --upgradedeps=$deps_list"; my $config_notest = join( ',', (UNIVERSAL::isa( $Config, 'HASH' ) ? %{$Config} : @{$Config}), 'notest', 1 ) if $Config; $PostambleActionsNoTest = ( ($missing and not $UnderCPAN) ? "\$(PERL) $0 --config=$config_notest --installdeps=$missing" : "\$(NOECHO) \$(NOOP)" ); $PostambleActionsUpgradeDepsNoTest = "\$(PERL) $0 --config=$config_notest --upgradedeps=$deps_list"; $PostambleActionsListDeps = '@$(PERL) -le "print for @ARGV" ' . join(' ', map $Missing[$_], grep $_ % 2 == 0, 0..$#Missing); my @all = (@Missing, @Existing); $PostambleActionsListAllDeps = '@$(PERL) -le "print for @ARGV" ' . join(' ', map $all[$_], grep $_ % 2 == 0, 0..$#all); return %args; } # a wrapper to ExtUtils::MakeMaker::WriteMakefile sub Write { require Carp; Carp::croak "WriteMakefile: Need even number of args" if @_ % 2; if ($CheckOnly) { print << "."; *** Makefile not written in check-only mode. . return; } my %args = _make_args(@_); no strict 'refs'; $PostambleUsed = 0; local *MY::postamble = \&postamble unless defined &MY::postamble; ExtUtils::MakeMaker::WriteMakefile(%args); print << "." unless $PostambleUsed; *** WARNING: Makefile written with customized MY::postamble() without including contents from Module::AutoInstall::postamble() -- auto installation features disabled. Please contact the author. . return 1; } sub postamble { $PostambleUsed = 1; my $fragment; $fragment .= <<"AUTO_INSTALL" if !$InstallDepsTarget; config :: installdeps \t\$(NOECHO) \$(NOOP) AUTO_INSTALL $fragment .= <<"END_MAKE"; checkdeps :: \t\$(PERL) $0 --checkdeps installdeps :: \t$PostambleActions installdeps_notest :: \t$PostambleActionsNoTest upgradedeps :: \t$PostambleActionsUpgradeDeps upgradedeps_notest :: \t$PostambleActionsUpgradeDepsNoTest listdeps :: \t$PostambleActionsListDeps listalldeps :: \t$PostambleActionsListAllDeps END_MAKE return $fragment; } 1; __END__ #line 1197 DBIx-Class-Schema-Loader-0.07045/t/0000755000175000017500000000000012650450355015710 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/24loader_subclass.t0000644000175000017500000000302012476065213021404 0ustar ilmariilmariuse strict; use warnings; use Test::More; use lib qw(t/lib); use make_dbictest_db; my %loader_class = ( 'TestLoaderSubclass' => 'TestLoaderSubclass', 'TestLoaderSubclass_NoRebless' => 'TestLoaderSubclass_NoRebless', '::DBI::SQLite' => 'DBIx::Class::Schema::Loader::DBI::SQLite' ); my %invocations = ( loader_class => sub { package DBICTest::Schema::1; use base qw/ DBIx::Class::Schema::Loader /; __PACKAGE__->_loader_invoked(0); __PACKAGE__->naming('current'); __PACKAGE__->loader_class(shift); __PACKAGE__->connect($make_dbictest_db::dsn); }, connect_info => sub { package DBICTeset::Schema::2; use base qw/ DBIx::Class::Schema::Loader /; __PACKAGE__->_loader_invoked(0); __PACKAGE__->naming('current'); __PACKAGE__->connect($make_dbictest_db::dsn, { loader_class => shift }); }, make_schema_at => sub { use DBIx::Class::Schema::Loader qw/ make_schema_at /; make_schema_at( 'DBICTeset::Schema::3', { naming => 'current' }, [ $make_dbictest_db::dsn, { loader_class => shift } ] ); } ); # one test per invocation/class combo plan tests => keys(%invocations) * keys(%loader_class); while (my ($style,$subref) = each %invocations) { while (my ($arg, $class) = each %loader_class) { my $schema = $subref->($arg); $schema = $schema->clone unless ref $schema; isa_ok($schema->loader, $class, "$style($arg)"); } } DBIx-Class-Schema-Loader-0.07045/t/bin/0000755000175000017500000000000012650450355016460 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/bin/simple_filter0000755000175000017500000000016012131533457021240 0ustar ilmariilmari#!perl use strict; use warnings; while () { print; } print q{my $foo = "Kilroy was here";}, "\n"; DBIx-Class-Schema-Loader-0.07045/t/23dumpmore.t0000644000175000017500000004605412542756321020105 0ustar ilmariilmariuse strict; use warnings; use Test::More; use DBIx::Class::Schema::Loader::Utils qw/slurp_file write_file/; use namespace::clean; use File::Temp (); use lib qw(t/lib); use dbixcsl_dumper_tests; my $t = 'dbixcsl_dumper_tests'; $t->cleanup; # test loading external content $t->dump_test( classname => 'DBICTest::Schema::_no_skip_load_external', regexes => { Foo => [ qr/package DBICTest::Schema::_no_skip_load_external::Foo;.*\nour \$skip_me = "bad mojo";\n1;/s ], }, ); # test skipping external content $t->dump_test( classname => 'DBICTest::Schema::_skip_load_external', options => { skip_load_external => 1, }, neg_regexes => { Foo => [ qr/package DBICTest::Schema::_skip_load_external::Foo;.*\nour \$skip_me = "bad mojo";\n1;/s ], }, ); $t->cleanup; # test config_file { my $config_file = File::Temp->new (UNLINK => 1); print $config_file "{ skip_relationships => 1 }\n"; close $config_file; $t->dump_test( classname => 'DBICTest::Schema::_config_file', options => { config_file => "$config_file" }, neg_regexes => { Foo => [ qr/has_many/, ], }, ); } # proper exception $t->dump_test( classname => 'DBICTest::Schema::_clashing_monikers', test_db_class => 'make_dbictest_db_clashing_monikers', error => qr/tables (?:"bar", "bars"|"bars", "bar") reduced to the same source moniker 'Bar'/, ); $t->cleanup; # test naming => { column_accessors => 'preserve' } # also test POD for unique constraint $t->dump_test( classname => 'DBICTest::Schema::_preserve_column_accessors', test_db_class => 'make_dbictest_db_with_unique', options => { naming => { column_accessors => 'preserve' } }, neg_regexes => { RouteChange => [ qr/\baccessor\b/, ], }, regexes => { Baz => [ qr/\n\n=head1 UNIQUE CONSTRAINTS\n\n=head2 C\n\n=over 4\n\n=item \* L<\/baz_num>\n\n=back\n\n=cut\n\n__PACKAGE__->add_unique_constraint\("baz_num_unique"\, \["baz_num"\]\);\n\n/, ], } ); $t->cleanup; # test that rels are sorted $t->dump_test( classname => 'DBICTest::Schema::_sorted_rels', test_db_class => 'make_dbictest_db_with_unique', regexes => { Baz => [ qr/->might_have\(\n "quux".*->belongs_to\(\n "station_visited"/s, ], } ); $t->cleanup; $t->dump_test( classname => 'DBICTest::Schema::_sorted_uniqs', test_db_class => 'make_dbictest_db_multi_unique', regexes => { Bar => [ qr/->add_unique_constraint\("uniq1_unique".*->add_unique_constraint\("uniq2_unique"/s, ], }, ); $t->cleanup; # test naming => { monikers => 'plural' } $t->dump_test( classname => 'DBICTest::Schema::_plural_monikers', options => { naming => { monikers => 'plural' } }, regexes => { Foos => [ qr/\n=head1 NAME\n\nDBICTest::Schema::_plural_monikers::Foos\n\n=cut\n\n/, ], Bars => [ qr/\n=head1 NAME\n\nDBICTest::Schema::_plural_monikers::Bars\n\n=cut\n\n/, ], }, ); $t->cleanup; # test naming => { monikers => 'singular' } $t->dump_test( classname => 'DBICTest::Schema::_singular_monikers', test_db_class => 'make_dbictest_db_plural_tables', options => { naming => { monikers => 'singular' } }, regexes => { Foo => [ qr/\n=head1 NAME\n\nDBICTest::Schema::_singular_monikers::Foo\n\n=cut\n\n/, ], Bar => [ qr/\n=head1 NAME\n\nDBICTest::Schema::_singular_monikers::Bar\n\n=cut\n\n/, ], }, ); $t->cleanup; # test naming => { monikers => 'preserve' } $t->dump_test( classname => 'DBICTest::Schema::_preserve_monikers', test_db_class => 'make_dbictest_db_plural_tables', options => { naming => { monikers => 'preserve' } }, regexes => { Foos => [ qr/\n=head1 NAME\n\nDBICTest::Schema::_preserve_monikers::Foos\n\n=cut\n\n/, ], Bars => [ qr/\n=head1 NAME\n\nDBICTest::Schema::_preserve_monikers::Bars\n\n=cut\n\n/, ], }, ); $t->cleanup; # test out the POD and "use utf8;" $t->dump_test( classname => 'DBICTest::DumpMore::1', options => { custom_column_info => sub { my ($table, $col, $info) = @_; return +{ extra => { is_footext => 1 } } if $col eq 'footext'; }, result_base_class => 'My::ResultBaseClass', additional_classes => 'TestAdditional', additional_base_classes => 'TestAdditionalBase', left_base_classes => 'TestLeftBase', components => [ 'TestComponent', '+TestComponentFQN' ], }, regexes => { schema => [ qr/^use utf8;\n/, qr/package DBICTest::DumpMore::1;/, qr/->load_classes/, ], Foo => [ qr/^use utf8;\n/, qr/package DBICTest::DumpMore::1::Foo;/, qr/\n=head1 NAME\n\nDBICTest::DumpMore::1::Foo\n\n=cut\n\nuse strict;\nuse warnings;\n\n/, qr/\n=head1 BASE CLASS: L\n\n=cut\n\nuse base 'My::ResultBaseClass';\n\n/, qr/\n=head1 ADDITIONAL CLASSES USED\n\n=over 4\n\n=item \* L\n\n=back\n\n=cut\n\n/, qr/\n=head1 ADDITIONAL BASE CLASSES\n\n=over 4\n\n=item \* L\n\n=back\n\n=cut\n\n/, qr/\n=head1 LEFT BASE CLASSES\n\n=over 4\n\n=item \* L\n\n=back\n\n=cut\n\n/, qr/\n=head1 COMPONENTS LOADED\n\n=over 4\n\n=item \* L\n\n=item \* L\n\n=back\n\n=cut\n\n/, qr/\n=head1 TABLE: C\n\n=cut\n\n__PACKAGE__->table\("foo"\);\n\n/, qr/\n=head1 ACCESSORS\n\n/, qr/\n=head2 fooid\n\n data_type: 'integer'\n is_auto_increment: 1\n is_nullable: 0\n\n/, qr/\n=head2 footext\n\n data_type: 'text'\n default_value: 'footext'\n extra: \{is_footext => 1\}\n is_nullable: 1\n\n/, qr/\n=head1 PRIMARY KEY\n\n=over 4\n\n=item \* L<\/fooid>\n\n=back\n\n=cut\n\n__PACKAGE__->set_primary_key\("fooid"\);\n/, qr/\n=head1 RELATIONS\n\n/, qr/\n=head2 bars\n\nType: has_many\n\nRelated object: L\n\n=cut\n\n/, qr/1;\n$/, ], Bar => [ qr/^use utf8;\n/, qr/package DBICTest::DumpMore::1::Bar;/, qr/\n=head1 NAME\n\nDBICTest::DumpMore::1::Bar\n\n=cut\n\nuse strict;\nuse warnings;\n\n/, qr/\n=head1 BASE CLASS: L\n\n=cut\n\nuse base 'My::ResultBaseClass';\n\n/, qr/\n=head1 ADDITIONAL CLASSES USED\n\n=over 4\n\n=item \* L\n\n=back\n\n=cut\n\n/, qr/\n=head1 ADDITIONAL BASE CLASSES\n\n=over 4\n\n=item \* L\n\n=back\n\n=cut\n\n/, qr/\n=head1 LEFT BASE CLASSES\n\n=over 4\n\n=item \* L\n\n=back\n\n=cut\n\n/, qr/\n=head1 COMPONENTS LOADED\n\n=over 4\n\n=item \* L\n\n=item \* L\n\n=back\n\n=cut\n\n/, qr/\n=head1 TABLE: C\n\n=cut\n\n__PACKAGE__->table\("bar"\);\n\n/, qr/\n=head1 ACCESSORS\n\n/, qr/\n=head2 barid\n\n data_type: 'integer'\n is_auto_increment: 1\n is_nullable: 0\n\n/, qr/\n=head2 fooref\n\n data_type: 'integer'\n is_foreign_key: 1\n is_nullable: 1\n\n/, qr/\n=head1 PRIMARY KEY\n\n=over 4\n\n=item \* L<\/barid>\n\n=back\n\n=cut\n\n__PACKAGE__->set_primary_key\("barid"\);\n/, qr/\n=head1 RELATIONS\n\n/, qr/\n=head2 fooref\n\nType: belongs_to\n\nRelated object: L\n\n=cut\n\n/, qr/\n1;\n$/, ], }, ); $t->append_to_class('DBICTest::DumpMore::1::Foo',q{# XXX This is my custom content XXX}); $t->dump_test( classname => 'DBICTest::DumpMore::1', regexes => { schema => [ qr/package DBICTest::DumpMore::1;/, qr/->load_classes/, ], Foo => [ qr/package DBICTest::DumpMore::1::Foo;/, qr/->set_primary_key/, qr/1;\n# XXX This is my custom content XXX/, ], Bar => [ qr/package DBICTest::DumpMore::1::Bar;/, qr/->set_primary_key/, qr/1;\n$/, ], }, ); $t->dump_test( classname => 'DBICTest::DumpMore::1', options => { really_erase_my_files => 1 }, regexes => { schema => [ qr/package DBICTest::DumpMore::1;/, qr/->load_classes/, ], Foo => [ qr/package DBICTest::DumpMore::1::Foo;/, qr/->set_primary_key/, qr/1;\n$/, ], Bar => [ qr/package DBICTest::DumpMore::1::Bar;/, qr/->set_primary_key/, qr/1;\n$/, ], }, neg_regexes => { Foo => [ qr/# XXX This is my custom content XXX/, ], }, ); $t->cleanup; # test namespaces $t->dump_test( classname => 'DBICTest::DumpMore::1', options => { use_namespaces => 1, generate_pod => 0 }, neg_regexes => { 'Result/Foo' => [ qr/^=/m, ], }, ); $t->dump_test( classname => 'DBICTest::DumpMore::1', options => { db_schema => 'foo_schema', qualify_objects => 1, use_namespaces => 1 }, warnings => [ qr/^db_schema is not supported on SQLite/, ], regexes => { 'Result/Foo' => [ qr/^\Q__PACKAGE__->table("foo_schema.foo");\E/m, # the has_many relname should not have the schema in it! qr/^__PACKAGE__->has_many\(\n "bars"/m, ], }, ); # test qualify_objects $t->dump_test( classname => 'DBICTest::DumpMore::1', options => { db_schema => [ 'foo_schema', 'bar_schema' ], qualify_objects => 0, use_namespaces => 1, }, warnings => [ qr/^db_schema is not supported on SQLite/, ], regexes => { 'Result/Foo' => [ # the table name should not include the db schema qr/^\Q__PACKAGE__->table("foo");\E/m, ], 'Result/Bar' => [ # the table name should not include the db schema qr/^\Q__PACKAGE__->table("bar");\E/m, ], }, ); # test moniker_parts $t->dump_test( classname => 'DBICTest::DumpMore::1', options => { db_schema => 'my_schema', moniker_parts => ['_schema', 'name'], qualify_objects => 1, use_namespaces => 1, }, warnings => [ qr/^db_schema is not supported on SQLite/, ], regexes => { 'Result/MySchemaFoo' => [ qr/^\Q__PACKAGE__->table("my_schema.foo");\E/m, # the has_many relname should not have the schema in it, but the class should qr/^__PACKAGE__->has_many\(\n "bars",\n "DBICTest::DumpMore::1::Result::MySchemaBar"/m, ], }, ); # test moniker_part_separator $t->dump_test( classname => 'DBICTest::DumpMore::1', options => { db_schema => 'my_schema', moniker_parts => ['_schema', 'name'], moniker_part_separator => '::', qualify_objects => 1, use_namespaces => 1, }, warnings => [ qr/^db_schema is not supported on SQLite/, ], regexes => { 'Result/MySchema/Foo' => [ qr/^package DBICTest::DumpMore::1::Result::MySchema::Foo;/m, qr/^\Q__PACKAGE__->table("my_schema.foo");\E/m, # the has_many relname should not have the schema in it, but the class should qr/^__PACKAGE__->has_many\(\n "bars",\n "DBICTest::DumpMore::1::Result::MySchema::Bar"/m, ], }, ); # test moniker_part_separator + moniker_map + recursive constraints $t->dump_test( classname => 'DBICTest::DumpMore::1', options => { db_schema => 'my_schema', moniker_parts => ['_schema', 'name'], moniker_part_separator => '::', qualify_objects => 1, use_namespaces => 1, moniker_map => { my_schema => { foo => "MySchema::Floop" }, }, constraint => [ [ qr/my_schema/ => qr/foo|bar/ ] ], exclude => [ [ qr/my_schema/ => qr/bar/ ] ], }, generated_results => [qw(MySchema::Floop)], warnings => [ qr/^db_schema is not supported on SQLite/, ], regexes => { 'Result/MySchema/Floop' => [ qr/^package DBICTest::DumpMore::1::Result::MySchema::Floop;/m, qr/^\Q__PACKAGE__->table("my_schema.foo");\E/m, ], }, neg_regexes => { 'Result/MySchema/Floop' => [ # the bar table should not be loaded, so no relationship should exist qr/^__PACKAGE__->has_many\(\n "bars"/m, ], }, ); # test moniker_map + moniker_part_map $t->dump_test( classname => 'DBICTest::DumpMore::1', options => { db_schema => 'my_schema', moniker_parts => ['_schema', 'name'], moniker_part_separator => '::', moniker_part_map => { _schema => { my_schema => 'OtherSchema', }, }, moniker_map => { my_schema => { foo => 'MySchema::Floop', }, }, qualify_objects => 1, use_namespaces => 1, }, warnings => [ qr/^db_schema is not supported on SQLite/, ], regexes => { 'Result/MySchema/Floop' => [ qr/^package DBICTest::DumpMore::1::Result::MySchema::Floop;/m, qr/^\Q__PACKAGE__->table("my_schema.foo");\E/m, # the has_many relname should not have the schema in it, but the class should qr/^__PACKAGE__->has_many\(\n "bars",\n "DBICTest::DumpMore::1::Result::OtherSchema::Bar"/m, ], 'Result/OtherSchema/Bar' => [ qr/^package DBICTest::DumpMore::1::Result::OtherSchema::Bar;/m, qr/^\Q__PACKAGE__->table("my_schema.bar");\E/m, # the has_many relname should not have the schema in it, but the class should qr/^__PACKAGE__->belongs_to\(\n "fooref",\n "DBICTest::DumpMore::1::Result::MySchema::Floop"/m, ], }, ); $t->dump_test( classname => 'DBICTest::DumpMore::1', options => { use_namespaces => 1 }, regexes => { schema => [ qr/package DBICTest::DumpMore::1;/, qr/->load_namespaces/, ], 'Result/Foo' => [ qr/package DBICTest::DumpMore::1::Result::Foo;/, qr/->set_primary_key/, qr/1;\n$/, ], 'Result/Bar' => [ qr/package DBICTest::DumpMore::1::Result::Bar;/, qr/->set_primary_key/, qr/1;\n$/, ], }, ); $t->dump_test( classname => 'DBICTest::DumpMore::1', options => { use_namespaces => 1, result_namespace => 'Res', resultset_namespace => 'RSet', default_resultset_class => 'RSetBase', }, regexes => { schema => [ qr/package DBICTest::DumpMore::1;/, qr/->load_namespaces/, qr/result_namespace => "Res"/, qr/resultset_namespace => "RSet"/, qr/default_resultset_class => "RSetBase"/, ], 'Res/Foo' => [ qr/package DBICTest::DumpMore::1::Res::Foo;/, qr/->set_primary_key/, qr/1;\n$/, ], 'Res/Bar' => [ qr/package DBICTest::DumpMore::1::Res::Bar;/, qr/->set_primary_key/, qr/1;\n$/, ], }, ); $t->dump_test( classname => 'DBICTest::DumpMore::1', options => { use_namespaces => 1, result_namespace => '+DBICTest::DumpMore::1::Res', resultset_namespace => 'RSet', default_resultset_class => 'RSetBase', result_base_class => 'My::ResultBaseClass', schema_base_class => 'My::SchemaBaseClass', }, regexes => { schema => [ qr/package DBICTest::DumpMore::1;/, qr/->load_namespaces/, qr/result_namespace => "\+DBICTest::DumpMore::1::Res"/, qr/resultset_namespace => "RSet"/, qr/default_resultset_class => "RSetBase"/, qr/use base 'My::SchemaBaseClass'/, ], 'Res/Foo' => [ qr/package DBICTest::DumpMore::1::Res::Foo;/, qr/use base 'My::ResultBaseClass'/, qr/->set_primary_key/, qr/1;\n$/, ], 'Res/Bar' => [ qr/package DBICTest::DumpMore::1::Res::Bar;/, qr/use base 'My::ResultBaseClass'/, qr/->set_primary_key/, qr/1;\n$/, ], }, ); $t->dump_test( classname => 'DBICTest::DumpMore::1', options => { use_namespaces => 1, result_base_class => 'My::MissingResultBaseClass', }, error => qr/My::MissingResultBaseClass.*is not installed/, ); # test quote_char in connect_info for dbicdump $t->dump_test( classname => 'DBICTest::DumpMore::1', extra_connect_info => [ '', '', { quote_char => '"' }, ], ); # test fix for RT#70507 (end comment and 1; gets lost if left with actual # custom content) $t->dump_test( classname => 'DBICTest::DumpMore::Upgrade', options => { use_namespaces => 0, }, ); my $file = $t->class_file('DBICTest::DumpMore::Upgrade::Foo'); my $code = slurp_file $file; $code =~ s/(?=# You can replace)/sub custom_method { 'custom_method works' }\n0;\n\n/; write_file $file, $code; $t->dump_test( classname => 'DBICTest::DumpMore::Upgrade', options => { use_namespaces => 1, }, generated_results => [qw(Foo Bar)], regexes => { 'Result/Foo' => [ qr/sub custom_method \{ 'custom_method works' \}\n0;\n\n# You can replace.*\n1;\n\z/, ], }, ); # test dry-run mode $t->dump_test( classname => 'DBICTest::DumpMore::DryRun', options => { dry_run => 1, }, generated_results => [qw(Foo Bar)], ); my $schema_file = $t->class_file('DBICTest::DumpMore::DryRun'); ok( !-e $schema_file, "dry-run doesn't create file for schema class" ); (my $schema_dir = $schema_file) =~ s/\.pm\z//; ok( !-e $schema_dir, "dry-run doesn't create subdirectory for schema namespace" ); # test omit_version (RT#92300) $t->dump_test( classname => 'DBICTest::DumpMore::omit_version', options => { omit_version => 1, }, regexes => { Foo => [ qr/^\# Created by DBIx::Class::Schema::Loader @ \d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d$/m, ], }, ); # test omit_timestamp (RT#92300) $t->dump_test( classname => 'DBICTest::DumpMore::omit_timestamp', options => { omit_timestamp => 1, }, regexes => { Foo => [ qr/^\# Created by DBIx::Class::Schema::Loader v[\d.]+$/m, ], }, ); # test omit_version and omit_timestamp simultaneously (RT#92300) $t->dump_test( classname => 'DBICTest::DumpMore::omit_both', options => { omit_version => 1, omit_timestamp => 1, }, # A positive regex here would match the top comment neg_regexes => { Foo => [ qr/^\# Created by DBIx::Class::Schema::Loader.+$/m, ], }, ); done_testing; # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/t/25backcompat.t0000644000175000017500000012451712542756321020364 0ustar ilmariilmariuse strict; use warnings; use Test::More; use Test::Exception; use File::Path qw/rmtree make_path/; use Class::Unload; use File::Temp qw/tempfile tempdir/; use IO::File; use DBIx::Class::Schema::Loader (); use DBIx::Class::Schema::Loader::Utils 'slurp_file'; use Lingua::EN::Inflect::Number (); use lib qw(t/lib); use make_dbictest_db_with_unique; use dbixcsl_test_dir qw/$tdir/; my $DUMP_DIR = "$tdir/common_dump"; rmtree $DUMP_DIR; my $SCHEMA_CLASS = 'DBIXCSL_Test::Schema'; my $RESULT_COUNT = 7; sub class_content_contains; sub contains; # test dynamic schema in 0.04006 mode { my $res = run_loader(); my $warning = $res->{warnings}[0]; contains $warning, 'Dynamic schema', 'dynamic schema in backcompat mode detected'; contains $warning, 'run in 0.04006 mode', 'dynamic schema in 0.04006 mode warning'; contains $warning, 'DBIx::Class::Schema::Loader::Manual::UpgradingFromV4', 'warning refers to upgrading doc'; run_v4_tests($res); } # setting naming accessor on dynamic schema should disable warning (even when # we're setting it to 'v4' .) { my $res = run_loader(naming => 'v4'); is_deeply $res->{warnings}, [], 'no warnings with naming attribute set'; run_v4_tests($res); } # test upgraded dynamic schema { my $res = run_loader(naming => 'current'); is_deeply $res->{warnings}, [], 'no warnings with naming attribute set'; run_v7_tests($res); } # test upgraded dynamic schema with external content loaded { my $temp_dir = setup_load_external({ Quuxs => 'Bazs', Bar => 'Foos', }); my $res = run_loader(naming => 'current', use_namespaces => 0); my $schema = $res->{schema}; is scalar @{ $res->{warnings} }, 1, 'correct nummber of warnings for upgraded dynamic schema with external ' . 'content for unsingularized Result.'; my $warning = $res->{warnings}[0]; contains $warning, 'Detected external content', 'detected external content warning'; lives_and { is $schema->resultset('Quux')->find(1)->a_method, 'hlagh' } 'external custom content for unsingularized Result was loaded by upgraded ' . 'dynamic Schema'; lives_and { isa_ok $schema->resultset('Quux')->find(1)->bazrel, $res->{classes}{bazs} } 'unsingularized class names in external content are translated'; lives_and { is $schema->resultset('Bar')->find(1)->a_method, 'hlagh' } 'external content from unchanged Result class'; lives_and { isa_ok $schema->resultset('Bar')->find(1)->foorel, $res->{classes}{foos} } 'unsingularized class names in external content from unchanged Result class ' . 'names are translated'; run_v7_tests($res); } # test upgraded dynamic schema with use_namespaces with external content loaded { my $temp_dir = setup_load_external({ Quuxs => 'Bazs', Bar => 'Foos', }); my $res = run_loader(naming => 'current', use_namespaces => 1); my $schema = $res->{schema}; is scalar @{ $res->{warnings} }, 2, 'correct nummber of warnings for upgraded dynamic schema with external ' . 'content for unsingularized Result with use_namespaces.'; my $warning = $res->{warnings}[0]; contains $warning, "Detected external content", 'detected external content warning'; lives_and { is $schema->resultset('Quux')->find(1)->a_method, 'hlagh' } 'external custom content for unsingularized Result was loaded by upgraded ' . 'dynamic Schema'; lives_and { isa_ok $schema->resultset('Quux')->find(1)->bazrel, $res->{classes}{bazs} } 'unsingularized class names in external content are translated'; lives_and { isa_ok $schema->resultset('Bar')->find(1)->foorel, $res->{classes}{foos} } 'unsingularized class names in external content from unchanged Result class ' . 'names are translated'; run_v7_tests($res); } # test upgraded static schema with external content loaded { clean_dumpdir(); my $temp_dir = setup_load_external({ Quuxs => 'Bazs', Bar => 'Foos', }); write_v4_schema_pm(); my $res = run_loader(static => 1, naming => 'current'); my $schema = $res->{schema}; run_v7_tests($res); lives_and { is $schema->resultset('Quux')->find(1)->a_method, 'hlagh' } 'external custom content for unsingularized Result was loaded by upgraded ' . 'static Schema'; lives_and { isa_ok $schema->resultset('Quux')->find(1)->bazrel, $res->{classes}{bazs} } 'unsingularized class names in external content are translated'; lives_and { isa_ok $schema->resultset('Bar')->find(1)->foorel, $res->{classes}{foos} } 'unsingularized class names in external content from unchanged Result class ' . 'names are translated in static schema'; class_content_contains $schema, $res->{classes}{quuxs}, "package ${SCHEMA_CLASS}::Quux;", 'package line translated correctly from external custom content in static dump'; class_content_contains $schema, $res->{classes}{quuxs}, "sub a_method { 'hlagh' }", 'external custom content loaded into static dump correctly'; } # test running against v4 schema without upgrade, twice, then upgrade { clean_dumpdir(); write_v4_schema_pm(); my $res = run_loader(static => 1); my $warning = $res->{warnings}[1]; contains $warning, "static schema", 'static schema in backcompat mode detected'; contains $warning, "0.04006", 'correct version detected'; contains $warning, "DBIx::Class::Schema::Loader::Manual::UpgradingFromV4", 'refers to upgrading doc'; is scalar @{ $res->{warnings} }, 4, 'correct number of warnings for static schema in backcompat mode'; run_v4_tests($res); add_custom_content($res->{schema}, { Quuxs => 'Bazs' }); # Rerun the loader in backcompat mode to make sure it's still in backcompat # mode. $res = run_loader(static => 1); run_v4_tests($res); # now upgrade the schema $res = run_loader( static => 1, naming => 'current', use_namespaces => 1 ); my $schema = $res->{schema}; contains $res->{warnings}[0], "Dumping manual schema", 'correct warnings on upgrading static schema (with "naming" set)'; contains $res->{warnings}[1], "dump completed", 'correct warnings on upgrading static schema (with "naming" set)'; is scalar @{ $res->{warnings} }, 2, 'correct number of warnings on upgrading static schema (with "naming" set)' or diag @{ $res->{warnings} }; run_v7_tests($res); is result_count('Result'), $RESULT_COUNT, 'un-singularized results were replaced during upgrade'; # check that custom content was preserved lives_and { is $schema->resultset('Quux')->find(1)->b_method, 'dongs' } 'custom content was carried over from un-singularized Result'; lives_and { isa_ok $schema->resultset('Quux')->find(1)->bazrel, $res->{classes}{bazs} } 'unsingularized class names in custom content are translated'; class_content_contains $schema, $res->{classes}{quuxs}, "sub b_method { 'dongs' }", 'custom content from unsingularized Result loaded into static dump correctly'; } # test running against v4 schema without upgrade, then upgrade with # use_namespaces not explicitly set { clean_dumpdir(); write_v4_schema_pm(); my $res = run_loader(static => 1); my $warning = $res->{warnings}[1]; contains $warning, "static schema", 'static schema in backcompat mode detected'; contains $warning, "0.04006", 'correct version detected'; contains $warning, "DBIx::Class::Schema::Loader::Manual::UpgradingFromV4", 'refers to upgrading doc'; is scalar @{ $res->{warnings} }, 4, 'correct number of warnings for static schema in backcompat mode'; run_v4_tests($res); add_custom_content($res->{schema}, { Quuxs => 'Bazs' }); # now upgrade the schema $res = run_loader( static => 1, naming => 'current' ); my $schema = $res->{schema}; contains $res->{warnings}[0], "load_classes", 'correct warnings on upgrading static schema (with "naming" set and ' . 'use_namespaces not set)'; contains $res->{warnings}[1], "Dumping manual schema", 'correct warnings on upgrading static schema (with "naming" set and ' . 'use_namespaces not set)'; contains $res->{warnings}[2], "dump completed", 'correct warnings on upgrading static schema (with "naming" set and ' . 'use_namespaces not set)'; is scalar @{ $res->{warnings} }, 3, 'correct number of warnings on upgrading static schema (with "naming" set)' or diag @{ $res->{warnings} }; run_v7_tests($res); is result_count(), $RESULT_COUNT, 'un-singularized results were replaced during upgrade'; # check that custom content was preserved lives_and { is $schema->resultset('Quux')->find(1)->b_method, 'dongs' } 'custom content was carried over from un-singularized Result'; lives_and { isa_ok $schema->resultset('Quux')->find(1)->bazrel, $res->{classes}{bazs} } 'unsingularized class names in custom content are translated'; class_content_contains $schema, $res->{classes}{quuxs}, "sub b_method { 'dongs' }", 'custom content from unsingularized Result loaded into static dump correctly'; } # test running against v4 schema with load_namespaces, upgrade to current but # downgrade to load_classes, with external content { clean_dumpdir(); my $temp_dir = setup_load_external({ Quuxs => 'Bazs', Bar => 'Foos', }, { result_namespace => 'Result' }); write_v4_schema_pm(use_namespaces => 1); my $res = run_loader(static => 1); my $warning = $res->{warnings}[0]; contains $warning, "static schema", 'static schema in backcompat mode detected'; contains $warning, "0.04006", 'correct version detected'; contains $warning, "DBIx::Class::Schema::Loader::Manual::UpgradingFromV4", 'refers to upgrading doc'; is scalar @{ $res->{warnings} }, 3, 'correct number of warnings for static schema in backcompat mode'; run_v4_tests($res); is $res->{classes}{quuxs}, 'DBIXCSL_Test::Schema::Result::Quuxs', 'use_namespaces in backcompat mode'; add_custom_content($res->{schema}, { Quuxs => 'Bazs', }, { result_namespace => 'Result', rel_name_map => { QuuxBaz => 'bazrel2' }, }); # now upgrade the schema to current but downgrade to load_classes $res = run_loader( static => 1, naming => 'current', use_namespaces => 0, ); my $schema = $res->{schema}; contains $res->{warnings}[0], "Dumping manual schema", 'correct warnings on upgrading static schema (with "naming" set and ' . 'use_namespaces => 0)'; contains $res->{warnings}[1], "dump completed", 'correct warnings on upgrading static schema (with "naming" set and ' . 'use_namespaces => 0)'; is scalar @{ $res->{warnings} }, 2, 'correct number of warnings on upgrading static schema (with "naming" set)' or diag @{ $res->{warnings} }; run_v7_tests($res); is result_count(), $RESULT_COUNT, 'un-singularized results were replaced during upgrade and Result dir removed'; ok ((not -d result_dir('Result')), 'Result dir was removed for load_classes downgrade'); is $res->{classes}{quuxs}, 'DBIXCSL_Test::Schema::Quux', 'load_classes in upgraded mode'; # check that custom and external content was preserved lives_and { is $schema->resultset('Quux')->find(1)->b_method, 'dongs' } 'custom content was carried over from un-singularized Result'; lives_and { is $schema->resultset('Quux')->find(1)->a_method, 'hlagh' } 'external content was carried over from un-singularized Result'; lives_and { isa_ok $schema->resultset('Quux')->find(1)->bazrel2, $res->{classes}{bazs} } 'unsingularized class names in custom content are translated'; lives_and { isa_ok $schema->resultset('Quux')->find(1)->bazrel, $res->{classes}{bazs} } 'unsingularized class names in external content are translated'; lives_and { isa_ok $schema->resultset('Bar')->find(1)->foorel, $res->{classes}{foos} } 'unsingularized class names in external content from unchanged Result class ' . 'names are translated in static schema'; class_content_contains $schema, $res->{classes}{quuxs}, "sub a_method { 'hlagh' }", 'external content from unsingularized Result loaded into static dump correctly'; class_content_contains $schema, $res->{classes}{quuxs}, "sub b_method { 'dongs' }", 'custom content from unsingularized Result loaded into static dump correctly'; } # test a regular schema with use_namespaces => 0 upgraded to # use_namespaces => 1 { my $res = run_loader( clean_dumpdir => 1, static => 1, use_namespaces => 0, naming => 'current', ); contains $res->{warnings}[0], "Dumping manual schema", 'correct warnings on dumping static schema with use_namespaces => 0'; contains $res->{warnings}[1], "dump completed", 'correct warnings on dumping static schema with use_namespaces => 0'; is scalar @{ $res->{warnings} }, 2, 'correct number of warnings on dumping static schema with use_namespaces => 0' or diag @{ $res->{warnings} }; run_v7_tests($res); my $schema = $res->{schema}; add_custom_content($res->{schema}, { Quux => 'Baz' }); # test that with no use_namespaces option, there is a warning and # load_classes is preserved $res = run_loader(static => 1, naming => 'current'); contains $res->{warnings}[0], "load_classes", 'correct warnings on re-dumping static schema with load_classes'; contains $res->{warnings}[1], "Dumping manual schema", 'correct warnings on re-dumping static schema with load_classes'; contains $res->{warnings}[2], "dump completed", 'correct warnings on re-dumping static schema with load_classes'; is scalar @{ $res->{warnings} }, 3, 'correct number of warnings on re-dumping static schema with load_classes' or diag @{ $res->{warnings} }; is $res->{classes}{quuxs}, 'DBIXCSL_Test::Schema::Quux', 'load_classes preserved on re-dump'; run_v7_tests($res); # now upgrade the schema to use_namespaces $res = run_loader( static => 1, use_namespaces => 1, naming => 'current', ); $schema = $res->{schema}; contains $res->{warnings}[0], "Dumping manual schema", 'correct warnings on upgrading to use_namespaces'; contains $res->{warnings}[1], "dump completed", 'correct warnings on upgrading to use_namespaces'; is scalar @{ $res->{warnings} }, 2, 'correct number of warnings on upgrading to use_namespaces' or diag @{ $res->{warnings} }; run_v7_tests($res); my @schema_files = schema_files(); is 1, (scalar @schema_files), "schema dir contains only 1 entry"; like $schema_files[0], qr{/Result\z}, "schema dir contains only a Result/ directory"; # check that custom content was preserved lives_and { is $schema->resultset('Quux')->find(1)->b_method, 'dongs' } 'custom content was carried over during use_namespaces upgrade'; lives_and { isa_ok $schema->resultset('Quux')->find(1)->bazrel, $res->{classes}{bazs} } 'un-namespaced class names in custom content are translated'; class_content_contains $schema, $res->{classes}{quuxs}, "sub b_method { 'dongs' }", 'custom content from un-namespaced Result loaded into static dump correctly'; } # test a regular schema with default use_namespaces => 1, redump, and downgrade # to load_classes { my $res = run_loader(clean_dumpdir => 1, static => 1, naming => 'current'); contains $res->{warnings}[0], "Dumping manual schema", 'correct warnings on dumping static schema'; contains $res->{warnings}[1], "dump completed", 'correct warnings on dumping static schema'; is scalar @{ $res->{warnings} }, 2, 'correct number of warnings on dumping static schema' or diag @{ $res->{warnings} }; run_v7_tests($res); is $res->{classes}{quuxs}, 'DBIXCSL_Test::Schema::Result::Quux', 'defaults to use_namespaces on regular dump'; add_custom_content($res->{schema}, { Quux => 'Baz' }, { result_namespace => 'Result' }); # test that with no use_namespaces option, use_namespaces is preserved $res = run_loader(static => 1, naming => 'current'); contains $res->{warnings}[0], "Dumping manual schema", 'correct warnings on re-dumping static schema'; contains $res->{warnings}[1], "dump completed", 'correct warnings on re-dumping static schema'; is scalar @{ $res->{warnings} }, 2, 'correct number of warnings on re-dumping static schema' or diag @{ $res->{warnings} }; is $res->{classes}{quuxs}, 'DBIXCSL_Test::Schema::Result::Quux', 'use_namespaces preserved on re-dump'; run_v7_tests($res); # now downgrade the schema to load_classes $res = run_loader( static => 1, use_namespaces => 0, naming => 'current', ); my $schema = $res->{schema}; contains $res->{warnings}[0], "Dumping manual schema", 'correct warnings on downgrading to load_classes'; contains $res->{warnings}[1], "dump completed", 'correct warnings on downgrading to load_classes'; is scalar @{ $res->{warnings} }, 2, 'correct number of warnings on downgrading to load_classes' or diag @{ $res->{warnings} }; run_v7_tests($res); is $res->{classes}{quuxs}, 'DBIXCSL_Test::Schema::Quux', 'load_classes downgrade correct'; is result_count(), $RESULT_COUNT, 'correct number of Results after upgrade and Result dir removed'; ok ((not -d result_dir('Result')), 'Result dir was removed for load_classes downgrade'); # check that custom content was preserved lives_and { is $schema->resultset('Quux')->find(1)->b_method, 'dongs' } 'custom content was carried over during load_classes downgrade'; lives_and { isa_ok $schema->resultset('Quux')->find(1)->bazrel, $res->{classes}{bazs} } 'namespaced class names in custom content are translated during load_classes '. 'downgrade'; class_content_contains $schema, $res->{classes}{quuxs}, "sub b_method { 'dongs' }", 'custom content from namespaced Result loaded into static dump correctly '. 'during load_classes downgrade'; } # test a regular schema with use_namespaces => 1 and a custom result_namespace # downgraded to load_classes { my $res = run_loader( clean_dumpdir => 1, static => 1, result_namespace => 'MyResult', naming => 'current', ); contains $res->{warnings}[0], "Dumping manual schema", 'correct warnings on dumping static schema'; contains $res->{warnings}[1], "dump completed", 'correct warnings on dumping static schema'; is scalar @{ $res->{warnings} }, 2, 'correct number of warnings on dumping static schema' or diag @{ $res->{warnings} }; run_v7_tests($res); is $res->{classes}{quuxs}, 'DBIXCSL_Test::Schema::MyResult::Quux', 'defaults to use_namespaces and uses custom result_namespace'; add_custom_content($res->{schema}, { Quux => 'Baz' }, { result_namespace => 'MyResult' }); # test that with no use_namespaces option, use_namespaces is preserved, and # the custom result_namespace is preserved $res = run_loader(static => 1, naming => 'current'); contains $res->{warnings}[0], "Dumping manual schema", 'correct warnings on re-dumping static schema'; contains $res->{warnings}[1], "dump completed", 'correct warnings on re-dumping static schema'; is scalar @{ $res->{warnings} }, 2, 'correct number of warnings on re-dumping static schema' or diag @{ $res->{warnings} }; is $res->{classes}{quuxs}, 'DBIXCSL_Test::Schema::MyResult::Quux', 'use_namespaces and custom result_namespace preserved on re-dump'; run_v7_tests($res); # now downgrade the schema to load_classes $res = run_loader( static => 1, use_namespaces => 0, naming => 'current', ); my $schema = $res->{schema}; contains $res->{warnings}[0], "Dumping manual schema", 'correct warnings on downgrading to load_classes'; contains $res->{warnings}[1], "dump completed", 'correct warnings on downgrading to load_classes'; is scalar @{ $res->{warnings} }, 2, 'correct number of warnings on downgrading to load_classes' or diag @{ $res->{warnings} }; run_v7_tests($res); is $res->{classes}{quuxs}, 'DBIXCSL_Test::Schema::Quux', 'load_classes downgrade correct'; is result_count(), $RESULT_COUNT, 'correct number of Results after upgrade and Result dir removed'; ok ((not -d result_dir('MyResult')), 'Result dir was removed for load_classes downgrade'); # check that custom content was preserved lives_and { is $schema->resultset('Quux')->find(1)->b_method, 'dongs' } 'custom content was carried over during load_classes downgrade'; lives_and { isa_ok $schema->resultset('Quux')->find(1)->bazrel, $res->{classes}{bazs} } 'namespaced class names in custom content are translated during load_classes '. 'downgrade'; class_content_contains $schema, $res->{classes}{quuxs}, "sub b_method { 'dongs' }", 'custom content from namespaced Result loaded into static dump correctly '. 'during load_classes downgrade'; } # rewrite from one result_namespace to another, with external content { clean_dumpdir(); my $temp_dir = setup_load_external({ Quux => 'Baz', Bar => 'Foo' }, { result_namespace => 'Result' }); my $res = run_loader(static => 1, naming => 'current'); # add some custom content to a Result that will be replaced add_custom_content($res->{schema}, { Quux => 'Baz' }, { result_namespace => 'Result', rel_name_map => { QuuxBaz => 'bazrel2' } }); # Rewrite implicit 'Result' to 'MyResult' $res = run_loader( static => 1, result_namespace => 'MyResult', naming => 'current', ); my $schema = $res->{schema}; is $res->{classes}{quuxs}, 'DBIXCSL_Test::Schema::MyResult::Quux', 'using new result_namespace'; is result_count('MyResult'), $RESULT_COUNT, 'correct number of Results after rewritten result_namespace'; ok ((not -d schema_dir('Result')), 'original Result dir was removed when rewriting result_namespace'); # check that custom content was preserved lives_and { is $schema->resultset('Quux')->find(1)->b_method, 'dongs' } 'custom content was carried over when rewriting result_namespace'; lives_and { isa_ok $schema->resultset('Quux')->find(1)->bazrel2, $res->{classes}{bazs} } 'class names in custom content are translated when rewriting result_namespace'; class_content_contains $schema, $res->{classes}{quuxs}, "sub b_method { 'dongs' }", 'custom content from namespaced Result loaded into static dump correctly '. 'when rewriting result_namespace'; # Now rewrite 'MyResult' to 'Mtfnpy' $res = run_loader( static => 1, result_namespace => 'Mtfnpy', naming => 'current', ); $schema = $res->{schema}; is $res->{classes}{quuxs}, 'DBIXCSL_Test::Schema::Mtfnpy::Quux', 'using new result_namespace'; is result_count('Mtfnpy'), $RESULT_COUNT, 'correct number of Results after rewritten result_namespace'; ok ((not -d result_dir('MyResult')), 'original Result dir was removed when rewriting result_namespace'); # check that custom and external content was preserved lives_and { is $schema->resultset('Quux')->find(1)->a_method, 'hlagh' } 'external content was carried over when rewriting result_namespace'; lives_and { is $schema->resultset('Quux')->find(1)->b_method, 'dongs' } 'custom content was carried over when rewriting result_namespace'; lives_and { isa_ok $schema->resultset('Quux')->find(1)->bazrel2, $res->{classes}{bazs} } 'class names in custom content are translated when rewriting result_namespace'; lives_and { isa_ok $schema->resultset('Quux')->find(1)->bazrel, $res->{classes}{bazs} } 'class names in external content are translated when rewriting '. 'result_namespace'; lives_and { isa_ok $schema->resultset('Bar')->find(1)->foorel, $res->{classes}{foos} } 'class names in external content are translated when rewriting '. 'result_namespace'; class_content_contains $schema, $res->{classes}{quuxs}, "sub b_method { 'dongs' }", 'custom content from namespaced Result loaded into static dump correctly '. 'when rewriting result_namespace'; class_content_contains $schema, $res->{classes}{quuxs}, "sub a_method { 'hlagh' }", 'external content from unsingularized Result loaded into static dump correctly'; } # test upgrading a v4 schema, then check that the version string is correct { clean_dumpdir(); write_v4_schema_pm(); run_loader(static => 1); my $res = run_loader(static => 1, naming => 'current'); my $schema = $res->{schema}; my $file = $schema->loader->get_dump_filename($SCHEMA_CLASS); my $code = slurp_file $file; my ($dumped_ver) = $code =~ /^# Created by DBIx::Class::Schema::Loader v(\S+)/m; is $dumped_ver, $DBIx::Class::Schema::Loader::VERSION, 'correct version dumped after upgrade of v4 static schema'; } # Test upgrading an already singular result with custom content that refers to # old class names. { clean_dumpdir(); write_v4_schema_pm(); my $res = run_loader(static => 1); my $schema = $res->{schema}; run_v4_tests($res); # add some custom content to a Result that will be replaced add_custom_content($schema, { Bar => 'Foos' }); # now upgrade the schema $res = run_loader(static => 1, naming => 'current'); $schema = $res->{schema}; run_v7_tests($res); # check that custom content was preserved lives_and { is $schema->resultset('Bar')->find(1)->b_method, 'dongs' } 'custom content was preserved from Result pre-upgrade'; lives_and { isa_ok $schema->resultset('Bar')->find(1)->foorel, $res->{classes}{foos} } 'unsingularized class names in custom content from Result with unchanged ' . 'name are translated'; class_content_contains $schema, $res->{classes}{bar}, "sub b_method { 'dongs' }", 'custom content from Result with unchanged name loaded into static dump ' . 'correctly'; } # test creating static schema in v5 mode then upgrade to current with external # content loaded { clean_dumpdir(); write_v5_schema_pm(); my $res = run_loader(static => 1); contains $res->{warnings}[0], "0.05003 static schema", 'backcompat warning'; run_v5_tests($res); my $temp_dir = setup_load_external({ Baz => 'StationsVisited', StationsVisited => 'Quux', }, { result_namespace => 'Result' }); add_custom_content($res->{schema}, { Baz => 'StationsVisited', }, { result_namespace => 'Result', rel_name_map => { BazStationsvisited => 'custom_content_rel' }, }); $res = run_loader(static => 1, naming => 'current'); my $schema = $res->{schema}; run_v7_tests($res); lives_and { is $schema->resultset('Baz')->find(1)->a_method, 'hlagh' } 'external custom content loaded for v5 -> v6'; lives_and { isa_ok $schema->resultset('Baz')->find(1)->stationsvisitedrel, $res->{classes}{stations_visited} } 'external content rewritten for v5 -> v6'; lives_and { isa_ok $schema->resultset('Baz')->find(1)->custom_content_rel, $res->{classes}{stations_visited} } 'custom content rewritten for v5 -> v6'; lives_and { isa_ok $schema->resultset('StationVisited')->find(1)->quuxrel, $res->{classes}{quuxs} } 'external content rewritten for v5 -> v6 for upgraded Result class names'; } # test creating static schema in v6 mode then upgrade to current with external # content loaded { clean_dumpdir(); write_v6_schema_pm(); my $res = run_loader(static => 1); contains $res->{warnings}[0], "0.06001 static schema", 'backcompat warning'; run_v6_tests($res); my $temp_dir = setup_load_external({ Routechange => 'Quux', }, { result_namespace => 'Result' }); add_custom_content($res->{schema}, { Routechange => 'Quux', }, { result_namespace => 'Result', rel_name_map => { RoutechangeQuux => 'custom_content_rel' }, }); $res = run_loader(static => 1, naming => 'current'); my $schema = $res->{schema}; run_v7_tests($res); lives_and { is $schema->resultset('RouteChange')->find(1)->a_method, 'hlagh' } 'external custom content loaded for v6 -> v7'; lives_and { isa_ok $schema->resultset('RouteChange')->find(1)->quuxrel, $res->{classes}{quuxs} } 'external content rewritten for v6 -> v7'; lives_and { isa_ok $schema->resultset('RouteChange')->find(1)->custom_content_rel, $res->{classes}{quuxs} } 'custom content rewritten for v6 -> v7'; } done_testing; END { rmtree $DUMP_DIR unless $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}; } sub clean_dumpdir { rmtree $DUMP_DIR; make_path $DUMP_DIR; } sub run_loader { my %loader_opts = @_; $loader_opts{dump_directory} = $DUMP_DIR if delete $loader_opts{static}; $loader_opts{preserve_case} = 1 if $loader_opts{naming} && $loader_opts{naming} eq 'current'; clean_dumpdir() if delete $loader_opts{clean_dumpdir}; eval { foreach my $source_name ($SCHEMA_CLASS->clone->sources) { Class::Unload->unload("${SCHEMA_CLASS}::${source_name}"); } Class::Unload->unload($SCHEMA_CLASS); }; undef $@; my @connect_info = $make_dbictest_db_with_unique::dsn; my @loader_warnings; local $SIG{__WARN__} = sub { push(@loader_warnings, @_); }; eval qq{ package $SCHEMA_CLASS; use base qw/DBIx::Class::Schema::Loader/; __PACKAGE__->loader_options(\%loader_opts); __PACKAGE__->connection(\@connect_info); }; ok(!$@, "Loader initialization") or diag $@; my $schema = $SCHEMA_CLASS->clone; my (%monikers, %classes); foreach my $source_name ($schema->sources) { my $table_name = $schema->source($source_name)->from; $monikers{$table_name} = $source_name; $classes{$table_name} = $schema->source($source_name)->result_class; } return { schema => $schema, warnings => \@loader_warnings, monikers => \%monikers, classes => \%classes, }; } sub write_v4_schema_pm { my %opts = @_; (my $schema_dir = "$DUMP_DIR/$SCHEMA_CLASS") =~ s/::[^:]+\z//; rmtree $schema_dir; make_path $schema_dir; my $schema_pm = "$schema_dir/Schema.pm"; open my $fh, '>', $schema_pm or die $!; if (not $opts{use_namespaces}) { print $fh <<'EOF'; package DBIXCSL_Test::Schema; use strict; use warnings; use base 'DBIx::Class::Schema'; __PACKAGE__->load_classes; # Created by DBIx::Class::Schema::Loader v0.04006 @ 2009-12-25 01:49:25 # DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:ibIJTbfM1ji4pyD/lgSEog # You can replace this text with custom content, and it will be preserved on regeneration 1; EOF } else { print $fh <<'EOF'; package DBIXCSL_Test::Schema; use strict; use warnings; use base 'DBIx::Class::Schema'; __PACKAGE__->load_namespaces; # Created by DBIx::Class::Schema::Loader v0.04006 @ 2010-01-12 16:04:12 # DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:d3wRVsHBNisyhxeaWJZcZQ # You can replace this text with custom content, and it will be preserved on # regeneration 1; EOF } } sub write_v5_schema_pm { my %opts = @_; (my $schema_dir = "$DUMP_DIR/$SCHEMA_CLASS") =~ s/::[^:]+\z//; rmtree $schema_dir; make_path $schema_dir; my $schema_pm = "$schema_dir/Schema.pm"; open my $fh, '>', $schema_pm or die $!; if (exists $opts{use_namespaces} && $opts{use_namespaces} == 0) { print $fh <<'EOF'; package DBIXCSL_Test::Schema; # Created by DBIx::Class::Schema::Loader # DO NOT MODIFY THE FIRST PART OF THIS FILE use strict; use warnings; use base 'DBIx::Class::Schema'; __PACKAGE__->load_classes; # Created by DBIx::Class::Schema::Loader v0.05003 @ 2010-03-27 17:07:37 # DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:LIzC/LT5IYvWpgusfbqMrg # You can replace this text with custom content, and it will be preserved on regeneration 1; EOF } else { print $fh <<'EOF'; package DBIXCSL_Test::Schema; # Created by DBIx::Class::Schema::Loader # DO NOT MODIFY THE FIRST PART OF THIS FILE use strict; use warnings; use base 'DBIx::Class::Schema'; __PACKAGE__->load_namespaces; # Created by DBIx::Class::Schema::Loader v0.05003 @ 2010-03-29 19:44:52 # DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:D+MYxtGxz97Ghvido5DTEg # You can replace this text with custom content, and it will be preserved on regeneration 1; EOF } } sub write_v6_schema_pm { my %opts = @_; (my $schema_dir = "$DUMP_DIR/$SCHEMA_CLASS") =~ s/::[^:]+\z//; rmtree $schema_dir; make_path $schema_dir; my $schema_pm = "$schema_dir/Schema.pm"; open my $fh, '>', $schema_pm or die $!; if (exists $opts{use_namespaces} && $opts{use_namespaces} == 0) { print $fh <<'EOF'; package DBIXCSL_Test::Schema; # Created by DBIx::Class::Schema::Loader # DO NOT MODIFY THE FIRST PART OF THIS FILE use strict; use warnings; use base 'DBIx::Class::Schema'; __PACKAGE__->load_classes; # Created by DBIx::Class::Schema::Loader v0.06001 @ 2010-04-21 19:56:03 # DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:/fqZCb95hsGIe1g5qyQQZg # You can replace this text with custom content, and it will be preserved on regeneration 1; EOF } else { print $fh <<'EOF'; package DBIXCSL_Test::Schema; # Created by DBIx::Class::Schema::Loader # DO NOT MODIFY THE FIRST PART OF THIS FILE use strict; use warnings; use base 'DBIx::Class::Schema'; __PACKAGE__->load_namespaces; # Created by DBIx::Class::Schema::Loader v0.06001 @ 2010-04-21 19:54:31 # DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:nwO5Vi47kl0X9SpEoiVO5w # You can replace this text with custom content, and it will be preserved on regeneration 1; EOF } } sub run_v4_tests { my $res = shift; my $schema = $res->{schema}; is_deeply [ @{ $res->{monikers} }{qw/foos bar bazs quuxs stations_visited RouteChange email/} ], [qw/Foos Bar Bazs Quuxs StationsVisited Routechange Email/], 'correct monikers in 0.04006 mode'; isa_ok ((my $bar = eval { $schema->resultset('Bar')->find(1) }), $res->{classes}{bar}, 'found a bar'); isa_ok eval { $bar->foo_id }, $res->{classes}{foos}, 'correct rel name in 0.04006 mode'; ok my $baz = eval { $schema->resultset('Bazs')->find(1) }; isa_ok eval { $baz->quux }, 'DBIx::Class::ResultSet', 'correct rel type and name for UNIQUE FK in 0.04006 mode'; ok my $foo = eval { $schema->resultset('Foos')->find(1) }; isa_ok eval { $foo->email_to_ids }, 'DBIx::Class::ResultSet', 'correct rel name inflection in 0.04006 mode'; ok (($schema->resultset('Routechange')->find(1)->can('quuxsid')), 'correct column accessor in 0.04006 mode'); is $schema->resultset('Routechange')->find(1)->foo2bar, 3, 'correct column accessor for column with word ending with digit in v4 mode'; } sub run_v5_tests { my $res = shift; my $schema = $res->{schema}; is_deeply [ @{ $res->{monikers} }{qw/foos bar bazs quuxs stations_visited RouteChange email/} ], [qw/Foo Bar Baz Quux StationsVisited Routechange Email/], 'correct monikers in v5 mode'; ok my $bar = eval { $schema->resultset('Bar')->find(1) }; isa_ok eval { $bar->foo }, $res->{classes}{foos}, 'correct rel name in v5 mode'; ok my $baz = eval { $schema->resultset('Baz')->find(1) }; isa_ok eval { $baz->quux }, $res->{classes}{quuxs}, 'correct rel type and name for UNIQUE FK in v5 mode'; ok my $foo = eval { $schema->resultset('Foo')->find(1) }; isa_ok eval { $foo->email_to_ids }, 'DBIx::Class::ResultSet', 'correct rel name inflection in v5 mode'; ok (($schema->resultset('Routechange')->find(1)->can('quuxsid')), 'correct column accessor in v5 mode'); is $schema->resultset('Routechange')->find(1)->foo2bar, 3, 'correct column accessor for column with word ending with digit in v5 mode'; } sub run_v6_tests { my $res = shift; my $schema = $res->{schema}; is_deeply [ @{ $res->{monikers} }{qw/foos bar bazs quuxs stations_visited RouteChange email/} ], [qw/Foo Bar Baz Quux StationVisited Routechange Email/], 'correct monikers in v6 mode'; ok my $bar = eval { $schema->resultset('Bar')->find(1) }; isa_ok eval { $bar->foo }, $res->{classes}{foos}, 'correct rel name in v6 mode'; ok my $baz = eval { $schema->resultset('Baz')->find(1) }; isa_ok eval { $baz->quux }, $res->{classes}{quuxs}, 'correct rel type and name for UNIQUE FK in v6 mode'; ok my $foo = eval { $schema->resultset('Foo')->find(1) }; isa_ok eval { $foo->emails_to }, 'DBIx::Class::ResultSet', 'correct rel name inflection in v6 mode'; ok my $route_change = eval { $schema->resultset('Routechange')->find(1) }; isa_ok eval { $route_change->quuxsid }, $res->{classes}{quuxs}, 'correct rel name in v6 mode'; ok (($schema->resultset('Routechange')->find(1)->can('quuxsid')), 'correct column accessor in v6 mode'); is $schema->resultset('Routechange')->find(1)->foo2bar, 3, 'correct column accessor for column with word ending with digit in v6 mode'; } sub run_v7_tests { my $res = shift; my $schema = $res->{schema}; is_deeply [ @{ $res->{monikers} }{qw/foos bar bazs quuxs stations_visited RouteChange email/} ], [qw/Foo Bar Baz Quux StationVisited RouteChange Email/], 'correct monikers in current mode'; ok my $bar = eval { $schema->resultset('Bar')->find(1) }; isa_ok eval { $bar->foo }, $res->{classes}{foos}, 'correct rel name in current mode'; ok my $baz = eval { $schema->resultset('Baz')->find(1) }; isa_ok eval { $baz->quux }, $res->{classes}{quuxs}, 'correct rel type and name for UNIQUE FK in current mode'; ok my $foo = eval { $schema->resultset('Foo')->find(1) }; isa_ok eval { $foo->emails_to }, 'DBIx::Class::ResultSet', 'correct rel name inflection in current mode'; ok my $route_change = eval { $schema->resultset('RouteChange')->find(1) }; isa_ok eval { $route_change->quux }, $res->{classes}{quuxs}, 'correct rel name based on mixed-case column name in current mode'; ok (($schema->resultset('RouteChange')->find(1)->can('quuxs_id')), 'correct column accessor in current mode'); is $schema->resultset('RouteChange')->find(1)->foo2_bar, 3, 'correct column accessor for column with word ending with digit in current mode'; } { package DBICSL::Test::TempExtDir; use overload '""' => sub { ${$_[0]} }; sub DESTROY { pop @INC; File::Path::rmtree ${$_[0]}; } } sub setup_load_external { my ($rels, $opts) = @_; my $temp_dir = tempdir(CLEANUP => 1); push @INC, $temp_dir; my $external_result_dir = join '/', $temp_dir, (split /::/, $SCHEMA_CLASS), ($opts->{result_namespace} || ()); make_path $external_result_dir; while (my ($from, $to) = each %$rels) { write_ext_result($external_result_dir, $from, $to, $opts); } my $guard = bless \$temp_dir, 'DBICSL::Test::TempExtDir'; return $guard; } sub write_ext_result { my ($result_dir, $from, $to, $opts) = @_; my $relname = $opts->{rel_name_map}{_rel_key($from, $to)} || _relname($to); my $from_class = _qualify_class($from, $opts->{result_namespace}); my $to_class = _qualify_class($to, $opts->{result_namespace}); my $condition = _rel_condition($from, $to); IO::File->new(">$result_dir/${from}.pm")->print(<<"EOF"); package ${from_class}; sub a_method { 'hlagh' } __PACKAGE__->has_one('$relname', '$to_class', { $condition }); 1; EOF return $relname; } sub _relname { my $to = shift; return Lingua::EN::Inflect::Number::to_S(lc $to) . 'rel'; } sub _qualify_class { my ($class, $result_namespace) = @_; return $SCHEMA_CLASS . '::' . ($result_namespace ? $result_namespace . '::' : '') . $class; } sub _rel_key { my ($from, $to) = @_; return join '', map ucfirst(Lingua::EN::Inflect::Number::to_S(lc($_))), $from, $to; } sub _rel_condition { my ($from, $to) = @_; return +{ QuuxBaz => q{'foreign.baz_num' => 'self.baz_id'}, BarFoo => q{'foreign.fooid' => 'self.foo_id'}, BazStationsvisited => q{'foreign.id' => 'self.stations_visited_id'}, StationsvisitedQuux => q{'foreign.quuxid' => 'self.quuxs_id'}, RoutechangeQuux => q{'foreign.quuxid' => 'self.QuuxsId'}, }->{_rel_key($from, $to)}; } sub class_content_contains { my ($schema, $class, $substr, $test_name) = @_; my $file = $schema->loader->get_dump_filename($class); my $code = slurp_file $file; local $Test::Builder::Level = $Test::Builder::Level + 1; contains $code, $substr, $test_name; } sub contains { my ($haystack, $needle, $test_name) = @_; local $Test::Builder::Level = $Test::Builder::Level + 1; like $haystack, qr/\Q$needle\E/, $test_name; } sub add_custom_content { my ($schema, $rels, $opts) = @_; while (my ($from, $to) = each %$rels) { my $relname = $opts->{rel_name_map}{_rel_key($from, $to)} || _relname($to); my $from_class = _qualify_class($from, $opts->{result_namespace}); my $to_class = _qualify_class($to, $opts->{result_namespace}); my $condition = _rel_condition($from, $to); my $content = <<"EOF"; package ${from_class}; sub b_method { 'dongs' } __PACKAGE__->has_one('$relname', '$to_class', { $condition }); 1; EOF _write_custom_content($schema, $from_class, $content); } } sub _write_custom_content { my ($schema, $class, $content) = @_; my $pm = $schema->loader->get_dump_filename($class); { local ($^I, @ARGV) = ('.bak', $pm); while (<>) { if (/DO NOT MODIFY THIS OR ANYTHING ABOVE/) { print; print $content; } else { print; } } close ARGV; unlink "${pm}.bak" or die $^E; } } sub result_count { my $path = shift || ''; my $dir = result_dir($path); my $file_count =()= glob "$dir/*"; return $file_count; } sub result_files { my $path = shift || ''; my $dir = result_dir($path); return glob "$dir/*"; } sub schema_files { result_files(@_) } sub result_dir { my $path = shift || ''; (my $dir = "$DUMP_DIR/$SCHEMA_CLASS/$path") =~ s{::}{/}g; $dir =~ s{/+\z}{}; return $dir; } sub schema_dir { result_dir(@_) } # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/t/26dump_use_moose.t0000644000175000017500000001415712542756321021302 0ustar ilmariilmariuse DBIx::Class::Schema::Loader::Optional::Dependencies -skip_all_without => 'use_moose'; use strict; use warnings; use Test::More; use lib qw(t/lib); use dbixcsl_dumper_tests; my $t = 'dbixcsl_dumper_tests'; $t->cleanup; # first dump a fresh use_moose=1 schema $t->dump_test( classname => 'DBICTest::DumpMore::1', options => { use_moose => 1, result_base_class => 'My::ResultBaseClass', schema_base_class => 'My::SchemaBaseClass', result_roles => ['TestRole', 'TestRole2'], }, regexes => { schema => [ qr/\nuse Moose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::SchemaBaseClass';\n\n/, qr/\n__PACKAGE__->meta->make_immutable\(inline_constructor => 0\);\n1;(?!\n1;\n)\n.*/, ], Foo => [ qr/\nuse Moose;\nuse MooseX::NonMoose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::ResultBaseClass';\n\n/, qr/=head1 L ROLES APPLIED\n\n=over 4\n\n=item \* L\n\n=item \* L\n\n=back\n\n=cut\n\n/, qr/\nwith 'TestRole', 'TestRole2';\n\n/, qr/\n__PACKAGE__->meta->make_immutable;\n1;(?!\n1;\n)\n.*/, ], Bar => [ qr/\nuse Moose;\nuse MooseX::NonMoose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::ResultBaseClass';\n\n/, qr/=head1 L ROLES APPLIED\n\n=over 4\n\n=item \* L\n\n=item \* L\n\n=back\n\n=cut\n\n/, qr/\nwith 'TestRole', 'TestRole2';\n\n/, qr/\n__PACKAGE__->meta->make_immutable;\n1;(?!\n1;\n)\n.*/, ], }, ); $t->cleanup; # check protect_overloads works as expected $t->dump_test( classname => 'DBICTest::DumpMore::1', options => { use_moose => 1, only_autoclean => 1, }, regexes => { schema => [ qr/\nuse namespace::autoclean;\n/, ], Foo => [ qr/\nuse namespace::autoclean;\n/, ], }, ); $t->cleanup; # now upgrade a fresh non-moose schema to use_moose=1 $t->dump_test( classname => 'DBICTest::DumpMore::1', options => { use_moose => 0, result_base_class => 'My::ResultBaseClass', schema_base_class => 'My::SchemaBaseClass', }, regexes => { schema => [ qr/\nuse base 'My::SchemaBaseClass';\n/, ], Foo => [ qr/\nuse base 'My::ResultBaseClass';\n/, ], Bar => [ qr/\nuse base 'My::ResultBaseClass';\n/, ], }, ); # check that changed custom content is upgraded for Moose bits $t->append_to_class('DBICTest::DumpMore::1::Foo', q{# XXX This is my custom content XXX}); $t->dump_test( classname => 'DBICTest::DumpMore::1', options => { use_moose => 1, result_base_class => 'My::ResultBaseClass', schema_base_class => 'My::SchemaBaseClass', }, regexes => { schema => [ qr/\nuse Moose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::SchemaBaseClass';\n\n/, qr/\n__PACKAGE__->meta->make_immutable\(inline_constructor => 0\);\n1;(?!\n1;\n)\n.*/, ], Foo => [ qr/\nuse Moose;\nuse MooseX::NonMoose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::ResultBaseClass';\n\n/, qr/\n__PACKAGE__->meta->make_immutable;\n1;(?!\n1;\n)\n.*/, qr/# XXX This is my custom content XXX/, ], Bar => [ qr/\nuse Moose;\nuse MooseX::NonMoose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::ResultBaseClass';\n\n/, qr/\n__PACKAGE__->meta->make_immutable;\n1;(?!\n1;\n)\n.*/, ], }, ); $t->cleanup; # check with a fresh non-moose schema that Moose custom content added to a use_moose=0 schema is not repeated $t->dump_test( classname => 'DBICTest::DumpMore::1', options => { result_base_class => 'My::ResultBaseClass', schema_base_class => 'My::SchemaBaseClass', }, regexes => { schema => [ qr/\nuse base 'My::SchemaBaseClass';\n/, ], Foo => [ qr/\nuse base 'My::ResultBaseClass';\n/, ], Bar => [ qr/\nuse base 'My::ResultBaseClass';\n/, ], }, ); # add Moose custom content then check it is not repeated # after that regen again *without* the use_moose flag, make # sure moose isn't stripped away $t->append_to_class('DBICTest::DumpMore::1::Foo', qq{use Moose;\n__PACKAGE__->meta->make_immutable;\n1;\n}); for my $supply_use_moose (1, 0) { $t->dump_test( classname => 'DBICTest::DumpMore::1', options => { $supply_use_moose ? (use_moose => 1) : (), result_base_class => 'My::ResultBaseClass', schema_base_class => 'My::SchemaBaseClass', }, regexes => { schema => [ qr/\nuse Moose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::SchemaBaseClass';\n\n/, qr/\n__PACKAGE__->meta->make_immutable\(inline_constructor => 0\);\n1;(?!\n1;\n)\n.*/, ], Foo => [ qr/\nuse Moose;\nuse MooseX::NonMoose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::ResultBaseClass';\n\n/, qr/\n__PACKAGE__->meta->make_immutable;\n1;(?!\n1;\n)\n.*/, ], Bar => [ qr/\nuse Moose;\nuse MooseX::NonMoose;\nuse MooseX::MarkAsMethods autoclean => 1;\nextends 'My::ResultBaseClass';\n\n/, qr/\n__PACKAGE__->meta->make_immutable;\n1;(?!\n1;\n)\n.*/, ], }, neg_regexes => { Foo => [ # qr/\nuse Moose;\n.*\nuse Moose;/s, # TODO qr/\n__PACKAGE__->meta->make_immutable;\n.*\n__PACKAGE__->meta->make_immutable;/s, ], }, ); } # check that a moose schema can *not* be downgraded $t->dump_test ( classname => 'DBICTest::DumpMore::1', options => { use_moose => 0, result_base_class => 'My::ResultBaseClass', schema_base_class => 'My::SchemaBaseClass', }, error => qr/\QIt is not possible to "downgrade" a schema that was loaded with use_moose => 1\E/, ); done_testing; DBIx-Class-Schema-Loader-0.07045/t/30_03no_comment_table.t0000644000175000017500000000136112131533457022047 0ustar ilmariilmariuse strict; use warnings; use Test::More; use Test::Exception; use Test::Warn; use DBIx::Class::Schema::Loader::Utils 'slurp_file'; use File::Path; use lib qw(t/lib); use make_dbictest_db; use dbixcsl_test_dir qw/$tdir/; my $dump_path = "$tdir/dump"; { package DBICTest::Schema::1; use base qw/ DBIx::Class::Schema::Loader /; __PACKAGE__->loader_options( dump_directory => $dump_path, quiet => 1, ); } DBICTest::Schema::1->connect($make_dbictest_db::dsn); plan tests => 1; my $foo = slurp_file("$dump_path/DBICTest/Schema/1/Result/Foo.pm"); my $bar = slurp_file("$dump_path/DBICTest/Schema/1/Result/Bar.pm"); like($foo, qr/Result::Foo\n/, 'No error from lack of comment tables'); END { rmtree($dump_path, 1, 1); } DBIx-Class-Schema-Loader-0.07045/t/10_07mssql_common.t0000644000175000017500000007620212542756321021264 0ustar ilmariilmariuse strict; use warnings; use Test::More; use Test::Exception; use DBIx::Class::Optional::Dependencies; use DBIx::Class::Schema::Loader::Utils qw/warnings_exist_silent sigwarn_silencer/; use Try::Tiny; use File::Path 'rmtree'; use DBIx::Class::Schema::Loader 'make_schema_at'; use namespace::clean; use Scope::Guard (); # use this if you keep a copy of DBD::Sybase linked to FreeTDS somewhere else BEGIN { if (my $lib_dirs = $ENV{DBICTEST_MSSQL_PERL5LIB}) { unshift @INC, $_ for split /:/, $lib_dirs; } } use lib qw(t/lib); use dbixcsl_common_tests (); use dbixcsl_test_dir '$tdir'; use constant EXTRA_DUMP_DIR => "$tdir/mssql_extra_dump"; # for extra tests cleanup my $schema; my (%dsns, $common_version); for (qw/MSSQL MSSQL_ODBC MSSQL_ADO/) { next unless $ENV{"DBICTEST_${_}_DSN"}; (my $dep_group = lc "rdbms_$_") =~ s/mssql$/mssql_sybase/; if (!DBIx::Class::Optional::Dependencies->req_ok_for($dep_group)) { diag 'You need to install ' . DBIx::Class::Optional::Dependencies->req_missing_for($dep_group) . " to test with $_"; next; } $dsns{$_}{dsn} = $ENV{"DBICTEST_${_}_DSN"}; $dsns{$_}{user} = $ENV{"DBICTEST_${_}_USER"}; $dsns{$_}{password} = $ENV{"DBICTEST_${_}_PASS"}; require DBI; my $dbh = DBI->connect (@{$dsns{$_}}{qw/dsn user password/}, { RaiseError => 1, PrintError => 0} ); my $srv_ver = eval { $dbh->get_info(18) || $dbh->selectrow_hashref('master.dbo.xp_msver ProductVersion')->{Character_Value} } || 0; my ($maj_srv_ver) = $srv_ver =~ /^(\d+)/; if (! defined $common_version or $common_version > $maj_srv_ver ) { $common_version = $maj_srv_ver; } } plan skip_all => 'You need to set the DBICTEST_MSSQL_DSN, _USER and _PASS and/or the DBICTEST_MSSQL_ODBC_DSN, _USER and _PASS environment variables' unless %dsns; my $mssql_2008_new_data_types = { date => { data_type => 'date' }, time => { data_type => 'time' }, 'time(0)'=> { data_type => 'time', size => 0 }, 'time(1)'=> { data_type => 'time', size => 1 }, 'time(2)'=> { data_type => 'time', size => 2 }, 'time(3)'=> { data_type => 'time', size => 3 }, 'time(4)'=> { data_type => 'time', size => 4 }, 'time(5)'=> { data_type => 'time', size => 5 }, 'time(6)'=> { data_type => 'time', size => 6 }, 'time(7)'=> { data_type => 'time' }, datetimeoffset => { data_type => 'datetimeoffset' }, 'datetimeoffset(0)' => { data_type => 'datetimeoffset', size => 0 }, 'datetimeoffset(1)' => { data_type => 'datetimeoffset', size => 1 }, 'datetimeoffset(2)' => { data_type => 'datetimeoffset', size => 2 }, 'datetimeoffset(3)' => { data_type => 'datetimeoffset', size => 3 }, 'datetimeoffset(4)' => { data_type => 'datetimeoffset', size => 4 }, 'datetimeoffset(5)' => { data_type => 'datetimeoffset', size => 5 }, 'datetimeoffset(6)' => { data_type => 'datetimeoffset', size => 6 }, 'datetimeoffset(7)' => { data_type => 'datetimeoffset' }, datetime2 => { data_type => 'datetime2' }, 'datetime2(0)' => { data_type => 'datetime2', size => 0 }, 'datetime2(1)' => { data_type => 'datetime2', size => 1 }, 'datetime2(2)' => { data_type => 'datetime2', size => 2 }, 'datetime2(3)' => { data_type => 'datetime2', size => 3 }, 'datetime2(4)' => { data_type => 'datetime2', size => 4 }, 'datetime2(5)' => { data_type => 'datetime2', size => 5 }, 'datetime2(6)' => { data_type => 'datetime2', size => 6 }, 'datetime2(7)' => { data_type => 'datetime2' }, hierarchyid => { data_type => 'hierarchyid' }, }; my $tester = dbixcsl_common_tests->new( vendor => 'mssql', auto_inc_pk => 'INTEGER IDENTITY NOT NULL PRIMARY KEY', default_function_def => 'DATETIME DEFAULT getdate()', connect_info => [ map { $dsns{$_} } sort keys %dsns ], preserve_case_mode_is_exclusive => 1, quote_char => [ qw/[ ]/ ], basic_date_datatype => ($common_version >= 10) ? 'DATE' : 'SMALLDATETIME', default_on_clause => 'NO ACTION', data_types => { # http://msdn.microsoft.com/en-us/library/ms187752.aspx # numeric types 'int identity' => { data_type => 'integer', is_auto_increment => 1 }, bigint => { data_type => 'bigint' }, int => { data_type => 'integer' }, integer => { data_type => 'integer' }, smallint => { data_type => 'smallint' }, tinyint => { data_type => 'tinyint' }, money => { data_type => 'money' }, smallmoney => { data_type => 'smallmoney' }, bit => { data_type => 'bit' }, real => { data_type => 'real' }, 'float(14)' => { data_type => 'real' }, 'float(24)' => { data_type => 'real' }, 'float(25)' => { data_type => 'double precision' }, 'float(53)' => { data_type => 'double precision' }, float => { data_type => 'double precision' }, 'double precision' => { data_type => 'double precision' }, 'numeric(6)' => { data_type => 'numeric', size => [6,0] }, 'numeric(6,3)' => { data_type => 'numeric', size => [6,3] }, 'decimal(6)' => { data_type => 'decimal', size => [6,0] }, 'decimal(6,3)' => { data_type => 'decimal', size => [6,3] }, 'dec(6,3)' => { data_type => 'decimal', size => [6,3] }, numeric => { data_type => 'numeric' }, decimal => { data_type => 'decimal' }, dec => { data_type => 'decimal' }, # datetime types datetime => { data_type => 'datetime' }, # test rewriting getdate() to current_timestamp 'datetime default getdate()' => { data_type => 'datetime', default_value => \'current_timestamp', original => { default_value => \'getdate()' } }, smalldatetime => { data_type => 'smalldatetime' }, ($common_version >= 10) ? %$mssql_2008_new_data_types : (), # string types char => { data_type => 'char', size => 1 }, 'char(2)' => { data_type => 'char', size => 2 }, character => { data_type => 'char', size => 1 }, 'character(2)' => { data_type => 'char', size => 2 }, 'varchar(2)' => { data_type => 'varchar', size => 2 }, nchar => { data_type => 'nchar', size => 1 }, 'nchar(2)' => { data_type => 'nchar', size => 2 }, 'nvarchar(2)' => { data_type => 'nvarchar', size => 2 }, # binary types 'binary' => { data_type => 'binary', size => 1 }, 'binary(2)' => { data_type => 'binary', size => 2 }, 'varbinary(2)' => { data_type => 'varbinary', size => 2 }, # blob types 'varchar(max)' => { data_type => 'text' }, text => { data_type => 'text' }, 'nvarchar(max)' => { data_type => 'ntext' }, ntext => { data_type => 'ntext' }, 'varbinary(max)' => { data_type => 'image' }, image => { data_type => 'image' }, # other types timestamp => { data_type => 'timestamp', inflate_datetime => 0 }, rowversion => { data_type => 'rowversion' }, uniqueidentifier => { data_type => 'uniqueidentifier' }, sql_variant => { data_type => 'sql_variant' }, xml => { data_type => 'xml' }, }, extra => { create => [ q{ CREATE TABLE [mssql_loader_test1.dot] ( id INT IDENTITY NOT NULL PRIMARY KEY, dat VARCHAR(8) ) }, q{ CREATE TABLE mssql_loader_test3 ( id INT IDENTITY NOT NULL PRIMARY KEY ) }, q{ CREATE VIEW mssql_loader_test4 AS SELECT * FROM mssql_loader_test3 }, # test capitalization of cols in unique constraints and rels q{ SET QUOTED_IDENTIFIER ON }, q{ SET ANSI_NULLS ON }, q{ CREATE TABLE [MSSQL_Loader_Test5] ( [Id] INT IDENTITY NOT NULL PRIMARY KEY, [FooCol] INT NOT NULL, [BarCol] INT NOT NULL, UNIQUE ([FooCol], [BarCol]) ) }, q{ CREATE TABLE [MSSQL_Loader_Test6] ( [Five_Id] INT REFERENCES [MSSQL_Loader_Test5] ([Id]) ) }, # 8 through 12 are used for the multi-schema tests and 13 through 16 are used for multi-db tests q{ create table mssql_loader_test17 ( id int identity primary key ) }, q{ create table mssql_loader_test18 ( id int identity primary key, seventeen_id int, foreign key (seventeen_id) references mssql_loader_test17(id) on delete set default on update set null ) }, ], pre_drop_ddl => [ 'CREATE TABLE mssql_loader_test3 (id INT IDENTITY NOT NULL PRIMARY KEY)', 'DROP VIEW mssql_loader_test4', ], drop => [ '[mssql_loader_test1.dot]', 'mssql_loader_test3', 'MSSQL_Loader_Test6', 'MSSQL_Loader_Test5', 'mssql_loader_test17', 'mssql_loader_test18', ], count => 14 + 30 * 2 + 26 * 2, # extra + multi-schema + mutli-db run => sub { my ($monikers, $classes, $self); ($schema, $monikers, $classes, $self) = @_; my $connect_info = [@$self{qw/dsn user password/}]; # Test that the table above (with '.' in name) gets loaded correctly. ok((my $rs = eval { $schema->resultset('MssqlLoaderTest1Dot') }), 'got a resultset for table with dot in name'); ok((my $from = eval { $rs->result_source->from }), 'got an $rsrc->from for table with dot in name'); is ref($from), 'SCALAR', '->table with dot in name is a scalar ref'; is eval { $$from }, "[mssql_loader_test1.dot]", '->table with dot in name has correct name'; # Test capitalization of columns and unique constraints ok ((my $rsrc = $schema->resultset($monikers->{mssql_loader_test5})->result_source), 'got result_source'); if ($schema->loader->preserve_case) { is_deeply [ $rsrc->columns ], [qw/Id FooCol BarCol/], 'column name case is preserved with case-sensitive collation'; my %uniqs = $rsrc->unique_constraints; delete $uniqs{primary}; is_deeply ((values %uniqs)[0], [qw/FooCol BarCol/], 'column name case is preserved in unique constraint with case-sensitive collation'); } else { is_deeply [ $rsrc->columns ], [qw/id foocol barcol/], 'column names are lowercased for case-insensitive collation'; my %uniqs = $rsrc->unique_constraints; delete $uniqs{primary}; is_deeply ((values %uniqs)[0], [qw/foocol barcol/], 'columns in unique constraint lowercased for case-insensitive collation'); } lives_and { my $five_row = $schema->resultset($monikers->{mssql_loader_test5})->new_result({}); if ($schema->loader->preserve_case) { $five_row->foo_col(1); $five_row->bar_col(2); } else { $five_row->foocol(1); $five_row->barcol(2); } $five_row->insert; my $six_row = $five_row->create_related('mssql_loader_test6s', {}); is $six_row->five->id, 1; } 'relationships for mixed-case tables/columns detected'; # Test that a bad view (where underlying table is gone) is ignored. my $dbh = $schema->storage->dbh; $dbh->do("DROP TABLE mssql_loader_test3"); warnings_exist_silent { $schema->rescan } qr/^Bad table or view 'mssql_loader_test4'/, 'bad view ignored'; throws_ok { $schema->resultset($monikers->{mssql_loader_test4}) } qr/Can't find source/, 'no source registered for bad view'; # test on delete/update fk clause introspection ok ((my $rel_info = $schema->source('MssqlLoaderTest18')->relationship_info('seventeen')), 'got rel info'); is $rel_info->{attrs}{on_delete}, 'SET DEFAULT', 'ON DELETE clause introspected correctly'; is $rel_info->{attrs}{on_update}, 'SET NULL', 'ON UPDATE clause introspected correctly'; is $rel_info->{attrs}{is_deferrable}, 1, 'is_deferrable defaults to 1'; SKIP: { my $dbh = $schema->storage->dbh; try { $dbh->do('CREATE SCHEMA [dbicsl-test]'); } catch { skip "no CREATE SCHEMA privileges", 30 * 2; }; $dbh->do(<<"EOF"); CREATE TABLE [dbicsl-test].mssql_loader_test8 ( id INT IDENTITY PRIMARY KEY, value VARCHAR(100) ) EOF $dbh->do(<<"EOF"); CREATE TABLE [dbicsl-test].mssql_loader_test9 ( id INT IDENTITY PRIMARY KEY, value VARCHAR(100), eight_id INTEGER NOT NULL, CONSTRAINT loader_test9_uniq UNIQUE (eight_id), FOREIGN KEY (eight_id) REFERENCES [dbicsl-test].mssql_loader_test8 (id) ) EOF $dbh->do('CREATE SCHEMA [dbicsl.test]'); $dbh->do(<<"EOF"); CREATE TABLE [dbicsl.test].mssql_loader_test9 ( pk INT IDENTITY PRIMARY KEY, value VARCHAR(100), eight_id INTEGER NOT NULL, CONSTRAINT loader_test9_uniq UNIQUE (eight_id), FOREIGN KEY (eight_id) REFERENCES [dbicsl-test].mssql_loader_test8 (id) ) EOF $dbh->do(<<"EOF"); CREATE TABLE [dbicsl.test].mssql_loader_test10 ( id INT IDENTITY PRIMARY KEY, value VARCHAR(100), mssql_loader_test8_id INTEGER, FOREIGN KEY (mssql_loader_test8_id) REFERENCES [dbicsl-test].mssql_loader_test8 (id) ) EOF $dbh->do(<<"EOF"); CREATE TABLE [dbicsl.test].mssql_loader_test11 ( id INT IDENTITY PRIMARY KEY, value VARCHAR(100), ten_id INTEGER NOT NULL UNIQUE, FOREIGN KEY (ten_id) REFERENCES [dbicsl.test].mssql_loader_test10 (id) ) EOF $dbh->do(<<"EOF"); CREATE TABLE [dbicsl-test].mssql_loader_test12 ( id INT IDENTITY PRIMARY KEY, value VARCHAR(100), mssql_loader_test11_id INTEGER, FOREIGN KEY (mssql_loader_test11_id) REFERENCES [dbicsl.test].mssql_loader_test11 (id) ) EOF my $guard = Scope::Guard->new(\&cleanup_schemas); foreach my $db_schema (['dbicsl-test', 'dbicsl.test'], '%') { lives_and { rmtree EXTRA_DUMP_DIR; my @warns; local $SIG{__WARN__} = sub { push @warns, $_[0] unless $_[0] =~ /\bcollides\b/; }; make_schema_at( 'MSSQLMultiSchema', { naming => 'current', db_schema => $db_schema, dump_directory => EXTRA_DUMP_DIR, quiet => 1, }, $connect_info, ); diag join "\n", @warns if @warns; is @warns, 0; } 'dumped schema for "dbicsl-test" and "dbicsl.test" schemas with no warnings'; my ($test_schema, $rsrc, $rs, $row, %uniqs, $rel_info); lives_and { ok $test_schema = MSSQLMultiSchema->connect(@$connect_info); } 'connected test schema'; lives_and { ok $rsrc = $test_schema->source('MssqlLoaderTest8'); } 'got source for table in schema name with dash'; is try { $rsrc->column_info('id')->{is_auto_increment} }, 1, 'column in schema name with dash'; is try { $rsrc->column_info('value')->{data_type} }, 'varchar', 'column in schema name with dash'; is try { $rsrc->column_info('value')->{size} }, 100, 'column in schema name with dash'; lives_and { ok $rs = $test_schema->resultset('MssqlLoaderTest8'); } 'got resultset for table in schema name with dash'; lives_and { ok $row = $rs->create({ value => 'foo' }); } 'executed SQL on table in schema name with dash'; $rel_info = try { $rsrc->relationship_info('dbicsl_dash_test_mssql_loader_test9') }; is_deeply $rel_info->{cond}, { 'foreign.eight_id' => 'self.id' }, 'relationship in schema name with dash'; is $rel_info->{attrs}{accessor}, 'single', 'relationship in schema name with dash'; is $rel_info->{attrs}{join_type}, 'LEFT', 'relationship in schema name with dash'; lives_and { ok $rsrc = $test_schema->source('DbicslDashTestMssqlLoaderTest9'); } 'got source for table in schema name with dash'; %uniqs = try { $rsrc->unique_constraints }; is keys %uniqs, 2, 'got unique and primary constraint in schema name with dash'; delete $uniqs{primary}; is_deeply ((values %uniqs)[0], ['eight_id'], 'correct unique constraint in schema name with dash'); lives_and { ok $rsrc = $test_schema->source('MssqlLoaderTest10'); } 'got source for table in schema name with dot'; is try { $rsrc->column_info('id')->{is_auto_increment} }, 1, 'column in schema name with dot introspected correctly'; is try { $rsrc->column_info('value')->{data_type} }, 'varchar', 'column in schema name with dot introspected correctly'; is try { $rsrc->column_info('value')->{size} }, 100, 'column in schema name with dot introspected correctly'; lives_and { ok $rs = $test_schema->resultset('MssqlLoaderTest10'); } 'got resultset for table in schema name with dot'; lives_and { ok $row = $rs->create({ value => 'foo' }); } 'executed SQL on table in schema name with dot'; $rel_info = try { $rsrc->relationship_info('mssql_loader_test11') }; is_deeply $rel_info->{cond}, { 'foreign.ten_id' => 'self.id' }, 'relationship in schema name with dot'; is $rel_info->{attrs}{accessor}, 'single', 'relationship in schema name with dot'; is $rel_info->{attrs}{join_type}, 'LEFT', 'relationship in schema name with dot'; lives_and { ok $rsrc = $test_schema->source('MssqlLoaderTest11'); } 'got source for table in schema name with dot'; %uniqs = try { $rsrc->unique_constraints }; is keys %uniqs, 2, 'got unique and primary constraint in schema name with dot'; delete $uniqs{primary}; is_deeply ((values %uniqs)[0], ['ten_id'], 'correct unique constraint in schema name with dot'); lives_and { ok $test_schema->source('MssqlLoaderTest10') ->has_relationship('mssql_loader_test8'); } 'cross-schema relationship in multi-db_schema'; lives_and { ok $test_schema->source('MssqlLoaderTest8') ->has_relationship('mssql_loader_test10s'); } 'cross-schema relationship in multi-db_schema'; lives_and { ok $test_schema->source('MssqlLoaderTest12') ->has_relationship('mssql_loader_test11'); } 'cross-schema relationship in multi-db_schema'; lives_and { ok $test_schema->source('MssqlLoaderTest11') ->has_relationship('mssql_loader_test12s'); } 'cross-schema relationship in multi-db_schema'; } } SKIP: { # for ADO local $SIG{__WARN__} = sigwarn_silencer( qr/Changed database context/ ); my $dbh = $schema->storage->dbh; try { $dbh->do('USE master'); $dbh->do('CREATE DATABASE dbicsl_test1'); } catch { diag "no CREATE DATABASE privileges: '$_'"; skip "no CREATE DATABASE privileges", 26 * 2; }; $dbh->do('CREATE DATABASE dbicsl_test2'); $dbh->do('USE dbicsl_test1'); $dbh->do(<<'EOF'); CREATE TABLE mssql_loader_test13 ( id INT IDENTITY PRIMARY KEY, value VARCHAR(100) ) EOF $dbh->do(<<'EOF'); CREATE TABLE mssql_loader_test14 ( id INT IDENTITY PRIMARY KEY, value VARCHAR(100), thirteen_id INTEGER REFERENCES mssql_loader_test13 (id), CONSTRAINT loader_test14_uniq UNIQUE (thirteen_id) ) EOF $dbh->do('USE dbicsl_test2'); $dbh->do(<<'EOF'); CREATE TABLE mssql_loader_test14 ( pk INT IDENTITY PRIMARY KEY, value VARCHAR(100), thirteen_id INTEGER, CONSTRAINT loader_test14_uniq UNIQUE (thirteen_id) ) EOF $dbh->do(<<"EOF"); CREATE TABLE mssql_loader_test15 ( id INT IDENTITY PRIMARY KEY, value VARCHAR(100) ) EOF $dbh->do(<<"EOF"); CREATE TABLE mssql_loader_test16 ( id INT IDENTITY PRIMARY KEY, value VARCHAR(100), fifteen_id INTEGER UNIQUE REFERENCES mssql_loader_test15 (id) ) EOF my $guard = Scope::Guard->new(\&cleanup_databases); foreach my $db_schema ({ dbicsl_test1 => '%', dbicsl_test2 => '%' }, { '%' => '%' }) { lives_and { my @warns; local $SIG{__WARN__} = sub { push @warns, $_[0] unless $_[0] =~ /\bcollides\b/; }; make_schema_at( 'MSSQLMultiDatabase', { naming => 'current', db_schema => $db_schema, dump_directory => EXTRA_DUMP_DIR, quiet => 1, }, $connect_info, ); diag join "\n", @warns if @warns; is @warns, 0; } "dumped schema for databases 'dbicsl_test1' and 'dbicsl_test2' with no warnings"; my $test_schema; lives_and { ok $test_schema = MSSQLMultiDatabase->connect(@$connect_info); } 'connected test schema'; my ($rsrc, $rs, $row, $rel_info, %uniqs); lives_and { ok $rsrc = $test_schema->source('MssqlLoaderTest13'); } 'got source for table in database one'; is try { $rsrc->column_info('id')->{is_auto_increment} }, 1, 'column in database one'; is try { $rsrc->column_info('value')->{data_type} }, 'varchar', 'column in database one'; is try { $rsrc->column_info('value')->{size} }, 100, 'column in database one'; lives_and { ok $rs = $test_schema->resultset('MssqlLoaderTest13'); } 'got resultset for table in database one'; lives_and { ok $row = $rs->create({ value => 'foo' }); } 'executed SQL on table in database one'; $rel_info = try { $rsrc->relationship_info('mssql_loader_test14') }; is_deeply $rel_info->{cond}, { 'foreign.thirteen_id' => 'self.id' }, 'relationship in database one'; is $rel_info->{attrs}{accessor}, 'single', 'relationship in database one'; is $rel_info->{attrs}{join_type}, 'LEFT', 'relationship in database one'; lives_and { ok $rsrc = $test_schema->source('DbicslTest1MssqlLoaderTest14'); } 'got source for table in database one'; %uniqs = try { $rsrc->unique_constraints }; is keys %uniqs, 2, 'got unique and primary constraint in database one'; delete $uniqs{primary}; is_deeply ((values %uniqs)[0], ['thirteen_id'], 'correct unique constraint in database one'); lives_and { ok $rsrc = $test_schema->source('MssqlLoaderTest15'); } 'got source for table in database two'; is try { $rsrc->column_info('id')->{is_auto_increment} }, 1, 'column in database two introspected correctly'; is try { $rsrc->column_info('value')->{data_type} }, 'varchar', 'column in database two introspected correctly'; is try { $rsrc->column_info('value')->{size} }, 100, 'column in database two introspected correctly'; lives_and { ok $rs = $test_schema->resultset('MssqlLoaderTest15'); } 'got resultset for table in database two'; lives_and { ok $row = $rs->create({ value => 'foo' }); } 'executed SQL on table in database two'; $rel_info = try { $rsrc->relationship_info('mssql_loader_test16') }; is_deeply $rel_info->{cond}, { 'foreign.fifteen_id' => 'self.id' }, 'relationship in database two'; is $rel_info->{attrs}{accessor}, 'single', 'relationship in database two'; is $rel_info->{attrs}{join_type}, 'LEFT', 'relationship in database two'; lives_and { ok $rsrc = $test_schema->source('MssqlLoaderTest16'); } 'got source for table in database two'; %uniqs = try { $rsrc->unique_constraints }; is keys %uniqs, 2, 'got unique and primary constraint in database two'; delete $uniqs{primary}; is_deeply ((values %uniqs)[0], ['fifteen_id'], 'correct unique constraint in database two'); } } }, }, ); $tester->run_tests(); sub cleanup_schemas { return if $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}; # switch back to default database $schema->storage->disconnect; my $dbh = $schema->storage->dbh; foreach my $table ('[dbicsl-test].mssql_loader_test12', '[dbicsl.test].mssql_loader_test11', '[dbicsl.test].mssql_loader_test10', '[dbicsl.test].mssql_loader_test9', '[dbicsl-test].mssql_loader_test9', '[dbicsl-test].mssql_loader_test8') { try { $dbh->do("DROP TABLE $table"); } catch { diag "Error dropping table: $_"; }; } foreach my $db_schema (qw/dbicsl-test dbicsl.test/) { try { $dbh->do(qq{DROP SCHEMA [$db_schema]}); } catch { diag "Error dropping test schema $db_schema: $_"; }; } rmtree EXTRA_DUMP_DIR; } sub cleanup_databases { return if $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}; # for ADO local $SIG{__WARN__} = sigwarn_silencer( qr/Changed database context/ ); my $dbh = $schema->storage->dbh; $dbh->do('USE dbicsl_test1'); foreach my $table ('mssql_loader_test14', 'mssql_loader_test13') { try { $dbh->do("DROP TABLE $table"); } catch { diag "Error dropping table: $_"; }; } $dbh->do('USE dbicsl_test2'); foreach my $table ('mssql_loader_test16', 'mssql_loader_test15', 'mssql_loader_test14') { try { $dbh->do("DROP TABLE $table"); } catch { diag "Error dropping table: $_"; }; } $dbh->do('USE master'); foreach my $database (qw/dbicsl_test1 dbicsl_test2/) { try { $dbh->do(qq{DROP DATABASE $database}); } catch { diag "Error dropping test database '$database': $_"; }; } rmtree EXTRA_DUMP_DIR; } # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/t/22dump.t0000644000175000017500000000534712542756321017221 0ustar ilmariilmariuse strict; use warnings; use Test::More; use Test::Exception; use Test::Warn; use lib qw(t/lib); use File::Path; use make_dbictest_db; use dbixcsl_test_dir qw/$tdir/; my $dump_path = "$tdir/dump"; { package DBICTest::Schema::1; use base qw/ DBIx::Class::Schema::Loader /; __PACKAGE__->loader_options( dump_directory => $dump_path, ); } { package DBICTest::Schema::2; use base qw/ DBIx::Class::Schema::Loader /; __PACKAGE__->loader_options( dump_directory => $dump_path, really_erase_my_files => 1, ); } rmtree($dump_path, 1, 1); lives_ok { warnings_exist { DBICTest::Schema::1->connect($make_dbictest_db::dsn) } [ qr|^Dumping manual schema|, qr|^Schema dump completed| ]; } 'no death with dump_directory set' or diag "Dump failed: $@"; is_deeply( [ sort @{ DBICTest::Schema::1->loader->generated_classes } ], [ sort 'DBICTest::Schema::1', map "DBICTest::Schema::1::Result::$_", qw(Foo Bar) ], 'generated_classes has schema and result classes' ); DBICTest::Schema::1->_loader_invoked(undef); SKIP: { skip "ActiveState perl produces additional warnings", 1 if ($^O eq 'MSWin32'); warnings_exist { DBICTest::Schema::1->connect($make_dbictest_db::dsn) } [ qr|^Dumping manual schema|, qr|^Schema dump completed| ]; is_deeply( [ sort @{ DBICTest::Schema::1->loader->generated_classes } ], [ ], 'no classes generated on second dump' ); rmtree($dump_path, 1, 1); } lives_ok { warnings_exist { DBICTest::Schema::2->connect($make_dbictest_db::dsn) } [ qr|^Dumping manual schema|, qr|^Schema dump completed| ]; } 'no death with dump_directory set (overwrite1)' or diag "Dump failed: $@"; is_deeply( [ sort @{ DBICTest::Schema::2->loader->generated_classes } ], [ sort 'DBICTest::Schema::2', map "DBICTest::Schema::2::Result::$_", qw(Foo Bar) ], 'generated_classes has schema and result classes' ); DBICTest::Schema::2->_loader_invoked(undef); lives_ok { warnings_exist { DBICTest::Schema::2->connect($make_dbictest_db::dsn) } [ qr/^Dumping manual schema/, qr|^Deleting .+Schema/2.+ due to 'really_erase_my_files'|, qr|^Deleting .+Schema/2/Result/Foo.+ due to 'really_erase_my_files'|, qr|^Deleting .+Schema/2/Result/Bar.+ due to 'really_erase_my_files'|, qr/^Schema dump completed/ ]; } 'no death with dump_directory set (overwrite2)' or diag "Dump failed: $@"; is_deeply( [ sort @{ DBICTest::Schema::2->loader->generated_classes } ], [ sort 'DBICTest::Schema::2', map "DBICTest::Schema::2::Result::$_", qw(Foo Bar) ], 'all classes regenerated with really_erase_my_files', ); done_testing(); END { rmtree($dump_path, 1, 1); } DBIx-Class-Schema-Loader-0.07045/t/10_10informix_common.t0000644000175000017500000003532112542756321021747 0ustar ilmariilmariuse DBIx::Class::Schema::Loader::Optional::Dependencies -skip_all_without => 'test_rdbms_informix'; use strict; use warnings; use Test::More; use Test::Exception; use Try::Tiny; use File::Path 'rmtree'; use DBIx::Class::Schema::Loader 'make_schema_at'; use DBIx::Class::Schema::Loader::Utils 'split_name'; use String::ToIdentifier::EN::Unicode 'to_identifier'; use namespace::clean; use lib qw(t/lib); use dbixcsl_common_tests (); use dbixcsl_test_dir '$tdir'; use constant EXTRA_DUMP_DIR => "$tdir/informix_extra_dump"; # to support " quoted identifiers BEGIN { $ENV{DELIMIDENT} = 'y' } # This test doesn't run over a shared memory connection, because of the single connection limit. my $dsn = $ENV{DBICTEST_INFORMIX_DSN} || ''; my $user = $ENV{DBICTEST_INFORMIX_USER} || ''; my $password = $ENV{DBICTEST_INFORMIX_PASS} || ''; my ($schema, $extra_schema); # for cleanup in END for extra tests dbixcsl_common_tests->new( vendor => 'Informix', auto_inc_pk => 'serial primary key', null => '', default_function => 'current year to fraction(5)', default_function_def => 'datetime year to fraction(5) default current year to fraction(5)', dsn => $dsn, user => $user, password => $password, loader_options => { preserve_case => 1 }, quote_char => '"', data_types => { # http://publib.boulder.ibm.com/infocenter/idshelp/v115/index.jsp?topic=/com.ibm.sqlr.doc/ids_sqr_094.htm # Numeric Types 'int' => { data_type => 'integer' }, integer => { data_type => 'integer' }, int8 => { data_type => 'bigint' }, bigint => { data_type => 'bigint' }, serial => { data_type => 'integer', is_auto_increment => 1 }, bigserial => { data_type => 'bigint', is_auto_increment => 1 }, serial8 => { data_type => 'bigint', is_auto_increment => 1 }, smallint => { data_type => 'smallint' }, real => { data_type => 'real' }, smallfloat => { data_type => 'real' }, # just 'double' is a syntax error 'double precision' => { data_type => 'double precision' }, float => { data_type => 'double precision' }, 'float(1)' => { data_type => 'double precision' }, 'float(5)' => { data_type => 'double precision' }, 'float(10)' => { data_type => 'double precision' }, 'float(15)' => { data_type => 'double precision' }, 'float(16)' => { data_type => 'double precision' }, numeric => { data_type => 'numeric' }, decimal => { data_type => 'numeric' }, dec => { data_type => 'numeric' }, 'numeric(6,3)' => { data_type => 'numeric', size => [6,3] }, 'decimal(6,3)' => { data_type => 'numeric', size => [6,3] }, 'dec(6,3)' => { data_type => 'numeric', size => [6,3] }, # Boolean Type # XXX this should map to 'boolean' boolean => { data_type => 'smallint' }, # Money Type money => { data_type => 'money' }, 'money(3,3)' => { data_type => 'numeric', size => [3,3] }, # Byte Type byte => { data_type => 'bytea', original => { data_type => 'byte' } }, # Character String Types char => { data_type => 'char', size => 1 }, 'char(3)' => { data_type => 'char', size => 3 }, character => { data_type => 'char', size => 1 }, 'character(3)' => { data_type => 'char', size => 3 }, 'varchar(3)' => { data_type => 'varchar', size => 3 }, 'character varying(3)' => { data_type => 'varchar', size => 3 }, # XXX min size not supported, colmin from syscolumns is NULL 'varchar(3,2)' => { data_type => 'varchar', size => 3 }, 'character varying(3,2)' => { data_type => 'varchar', size => 3 }, nchar => { data_type => 'nchar', size => 1 }, 'nchar(3)' => { data_type => 'nchar', size => 3 }, 'nvarchar(3)' => { data_type => 'nvarchar', size => 3 }, 'nvarchar(3,2)' => { data_type => 'nvarchar', size => 3 }, 'lvarchar(3)' => { data_type => 'lvarchar', size => 3 }, 'lvarchar(33)' => { data_type => 'lvarchar', size => 33 }, text => { data_type => 'text' }, # DateTime Types date => { data_type => 'date' }, 'date default today' => { data_type => 'date', default_value => \'today' }, # XXX support all precisions 'datetime year to fraction(5)', => { data_type => 'datetime year to fraction(5)' }, 'datetime year to fraction(5) default current year to fraction(5)', => { data_type => 'datetime year to fraction(5)', default_value => \'current year to fraction(5)' }, # XXX do interval # Blob Types # XXX no way to distinguish opaque types boolean, blob and clob blob => { data_type => 'blob' }, clob => { data_type => 'blob' }, # IDSSECURITYLABEL Type # # This requires the DBSECADM privilege and a security policy on the # table, things I know nothing about. # idssecuritylabel => { data_type => 'idssecuritylabel' }, # List Types # XXX need to introspect element type too 'list(varchar(20) not null)' => { data_type => 'list' }, 'multiset(varchar(20) not null)' => { data_type => 'multiset' }, 'set(varchar(20) not null)' => { data_type => 'set' }, }, extra => { count => 26 * 2, run => sub { ($schema) = @_; SKIP: { skip 'Set the DBICTEST_INFORMIX_EXTRADB_DSN, _USER and _PASS environment variables to run the multi-database tests', 26 * 2 unless $ENV{DBICTEST_INFORMIX_EXTRADB_DSN}; $extra_schema = $schema->clone; $extra_schema->connection(@ENV{map "DBICTEST_INFORMIX_EXTRADB_$_", qw/DSN USER PASS/ }); my $dbh1 = $schema->storage->dbh; $dbh1->do(<<'EOF'); CREATE TABLE informix_loader_test4 ( id SERIAL PRIMARY KEY, value VARCHAR(100) ) EOF $dbh1->do(<<'EOF'); CREATE TABLE informix_loader_test5 ( id SERIAL PRIMARY KEY, value VARCHAR(100), four_id INTEGER REFERENCES informix_loader_test4 (id) ) EOF $dbh1->do(<<'EOF'); ALTER TABLE informix_loader_test5 ADD CONSTRAINT UNIQUE (four_id) CONSTRAINT loader_test5_uniq EOF my $db1 = db_name($schema); $dbh1->disconnect; my $dbh2 = $extra_schema->storage->dbh; $dbh2->do(<<'EOF'); CREATE TABLE informix_loader_test5 ( pk SERIAL PRIMARY KEY, value VARCHAR(100), four_id INTEGER ) EOF $dbh2->do(<<'EOF'); ALTER TABLE informix_loader_test5 ADD CONSTRAINT UNIQUE (four_id) CONSTRAINT loader_test5_uniq EOF $dbh2->do(<<"EOF"); CREATE TABLE informix_loader_test6 ( id SERIAL PRIMARY KEY, value VARCHAR(100) ) EOF $dbh2->do(<<"EOF"); CREATE TABLE informix_loader_test7 ( id SERIAL PRIMARY KEY, value VARCHAR(100), six_id INTEGER UNIQUE REFERENCES informix_loader_test6 (id) ) EOF my $db2 = db_name($extra_schema); $dbh2->disconnect; my $db1_moniker = join '', map ucfirst lc, split_name to_identifier $db1; my $db2_moniker = join '', map ucfirst lc, split_name to_identifier $db2; foreach my $db_schema ({ $db1 => '%', $db2 => '%' }, { '%' => '%' }) { lives_and { my @warns; local $SIG{__WARN__} = sub { push @warns, $_[0] unless $_[0] =~ /\bcollides\b/ || $_[0] =~ /unreferencable/; }; make_schema_at( 'InformixMultiDatabase', { naming => 'current', db_schema => $db_schema, dump_directory => EXTRA_DUMP_DIR, quiet => 1, }, [ $dsn, $user, $password ], ); InformixMultiDatabase->storage->disconnect; diag join "\n", @warns if @warns; is @warns, 0; } "dumped schema for databases $db1 and $db2 with no warnings"; my $test_schema; lives_and { ok $test_schema = InformixMultiDatabase->connect($dsn, $user, $password); } 'connected test schema'; my ($rsrc, $rs, $row, $rel_info, %uniqs); lives_and { ok $rsrc = $test_schema->source("InformixLoaderTest4"); } 'got source for table in database one'; is try { $rsrc->column_info('id')->{is_auto_increment} }, 1, 'column in database one'; is try { $rsrc->column_info('value')->{data_type} }, 'varchar', 'column in database one'; is try { $rsrc->column_info('value')->{size} }, 100, 'column in database one'; lives_and { ok $rs = $test_schema->resultset("InformixLoaderTest4"); } 'got resultset for table in database one'; lives_and { ok $row = $rs->create({ value => 'foo' }); } 'executed SQL on table in database one'; $rel_info = try { $rsrc->relationship_info("informix_loader_test5") }; is_deeply $rel_info->{cond}, { 'foreign.four_id' => 'self.id' }, 'relationship in database one'; is $rel_info->{attrs}{accessor}, 'single', 'relationship in database one'; is $rel_info->{attrs}{join_type}, 'LEFT', 'relationship in database one'; lives_and { ok $rsrc = $test_schema->source("${db1_moniker}InformixLoaderTest5"); } 'got source for table in database one'; %uniqs = try { $rsrc->unique_constraints }; is keys %uniqs, 2, 'got unique and primary constraint in database one'; delete $uniqs{primary}; is_deeply ((values %uniqs)[0], ['four_id'], 'correct unique constraint in database one'); lives_and { ok $rsrc = $test_schema->source("InformixLoaderTest6"); } 'got source for table in database two'; is try { $rsrc->column_info('id')->{is_auto_increment} }, 1, 'column in database two introspected correctly'; is try { $rsrc->column_info('value')->{data_type} }, 'varchar', 'column in database two introspected correctly'; is try { $rsrc->column_info('value')->{size} }, 100, 'column in database two introspected correctly'; lives_and { ok $rs = $test_schema->resultset("InformixLoaderTest6"); } 'got resultset for table in database two'; lives_and { ok $row = $rs->create({ value => 'foo' }); } 'executed SQL on table in database two'; $rel_info = try { $rsrc->relationship_info('informix_loader_test7') }; is_deeply $rel_info->{cond}, { 'foreign.six_id' => 'self.id' }, 'relationship in database two'; is $rel_info->{attrs}{accessor}, 'single', 'relationship in database two'; is $rel_info->{attrs}{join_type}, 'LEFT', 'relationship in database two'; lives_and { ok $rsrc = $test_schema->source("InformixLoaderTest7"); } 'got source for table in database two'; %uniqs = try { $rsrc->unique_constraints }; is keys %uniqs, 2, 'got unique and primary constraint in database two'; delete $uniqs{primary}; is_deeply ((values %uniqs)[0], ['six_id'], 'correct unique constraint in database two'); } } }, }, )->run_tests(); sub db_name { my $schema = shift; # When we clone the schema, it still references the original loader, which # references the original schema. local $schema->loader->{schema} = $schema; return $schema->loader->_current_db; $schema->storage->disconnect; } END { if (not $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}) { if (my $dbh2 = try { $extra_schema->storage->dbh }) { try { $dbh2->do('DROP TABLE informix_loader_test7'); $dbh2->do('DROP TABLE informix_loader_test6'); $dbh2->do('DROP TABLE informix_loader_test5'); } catch { die "Error dropping test tables: $_"; }; $dbh2->disconnect; } if (my $dbh1 = try { $schema->storage->dbh }) { try { $dbh1->do('DROP TABLE informix_loader_test5'); $dbh1->do('DROP TABLE informix_loader_test4'); } catch { die "Error dropping test tables: $_"; }; $dbh1->disconnect; } rmtree EXTRA_DUMP_DIR; } } # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/t/27filter_generated.t0000644000175000017500000000514512542756321021560 0ustar ilmariilmariuse strict; use warnings; use DBIx::Class::Schema::Loader; use DBIx::Class::Schema::Loader::Utils 'slurp_file'; use File::Path; use Test::More tests => 19; use Test::Exception; use lib qw(t/lib); use make_dbictest_db; use dbixcsl_test_dir qw/$tdir/; my $dump_path = "$tdir/dump"; my %original_class_data; my ($schema_file_count, $result_file_count); { package DBICTest::Schema::1; use Test::More; use base 'DBIx::Class::Schema::Loader'; __PACKAGE__->loader_options( dump_directory => $dump_path, quiet => 1, filter_generated_code => sub { my ($type, $class, $text) = @_; like $type, qr/^(?:schema|result)\z/, 'got correct file type'; if ($type eq 'schema') { $schema_file_count++; is $class, 'DBICTest::Schema::1', 'correct class for schema type file passed to filter'; } elsif ($type eq 'result') { $result_file_count++; like $class, qr/^DBICTest::Schema::1::Result::(?:Foo|Bar)\z/, 'correct class for result type file passed to filter'; } else { die 'invalid file type passed to filter'; } $original_class_data{$class} = $text; if ($class =~ /::1$/) { $text = "No Gotcha!"; } else { $text .= q{my $foo = "Kilroy was here";}; } return $text; }, ); } { package DBICTest::Schema::2; use base 'DBIx::Class::Schema::Loader'; __PACKAGE__->loader_options( dump_directory => $dump_path, quiet => 1, filter_generated_code => "$^X t/bin/simple_filter", ); } DBICTest::Schema::1->connect($make_dbictest_db::dsn); # schema is generated in 2 passes is $schema_file_count, 2, 'correct number of schema files passed to filter'; is $result_file_count, 4, 'correct number of result files passed to filter'; my $foo = slurp_file "$dump_path/DBICTest/Schema/1/Result/Foo.pm"; ok ! -e "$dump_path/DBICTest/Schema/1.pm", "No package means no file written"; ok $original_class_data{"DBICTest::Schema::1"}, "Even though we processed the missing class"; like $foo, qr/# Created by .* THE FIRST PART/s, "We get the whole autogenerated text"; like $foo, qr/Kilroy was here/, "Can insert text"; DBICTest::Schema::2->connect($make_dbictest_db::dsn); $foo = slurp_file "$dump_path/DBICTest/Schema/2/Result/Foo.pm"; like $foo, qr/Kilroy was here/, "Can insert text via command filter"; END { rmtree($dump_path, 1, 1); } DBIx-Class-Schema-Loader-0.07045/t/10_02mysql_common.t0000644000175000017500000006245012542756321021265 0ustar ilmariilmariuse DBIx::Class::Schema::Loader::Optional::Dependencies -skip_all_without => 'test_rdbms_mysql'; use strict; use warnings; use Test::More; use Test::Exception; use Try::Tiny; use File::Path 'rmtree'; use DBIx::Class::Schema::Loader::Utils 'slurp_file'; use DBIx::Class::Schema::Loader 'make_schema_at'; use lib qw(t/lib); use dbixcsl_common_tests; use dbixcsl_test_dir '$tdir'; use constant EXTRA_DUMP_DIR => "$tdir/mysql_extra_dump"; my $dsn = $ENV{DBICTEST_MYSQL_DSN} || ''; my $user = $ENV{DBICTEST_MYSQL_USER} || ''; my $password = $ENV{DBICTEST_MYSQL_PASS} || ''; my $test_innodb = $ENV{DBICTEST_MYSQL_INNODB} || 0; my $skip_rels_msg = 'You need to set the environment variable DBICTEST_MYSQL_INNODB=1 to test relationships.'; my $innodb = $test_innodb ? q{Engine=InnoDB} : ''; my ($schema, $databases_created); # for cleanup in END for extra tests diag $skip_rels_msg if not $test_innodb; dbixcsl_common_tests->new( vendor => 'Mysql', auto_inc_pk => 'INTEGER NOT NULL PRIMARY KEY AUTO_INCREMENT', innodb => $innodb, dsn => $dsn, user => $user, password => $password, connect_info_opts => { on_connect_call => 'set_strict_mode' }, loader_options => { preserve_case => 1 }, skip_rels => $test_innodb ? 0 : $skip_rels_msg, quote_char => '`', no_inline_rels => 1, no_implicit_rels => 1, default_on_clause => 'RESTRICT', data_types => { # http://dev.mysql.com/doc/refman/5.5/en/data-type-overview.html # Numeric Types 'bit' => { data_type => 'bit', size => 1 }, 'bit(11)' => { data_type => 'bit', size => 11 }, 'bool' => { data_type => 'tinyint' }, 'boolean' => { data_type => 'tinyint' }, 'tinyint' => { data_type => 'tinyint' }, 'tinyint unsigned' => { data_type => 'tinyint', extra => { unsigned => 1 } }, 'smallint' => { data_type => 'smallint' }, 'smallint unsigned' => { data_type => 'smallint', extra => { unsigned => 1 } }, 'mediumint' => { data_type => 'mediumint' }, 'mediumint unsigned' => { data_type => 'mediumint', extra => { unsigned => 1 } }, 'int' => { data_type => 'integer' }, 'int unsigned' => { data_type => 'integer', extra => { unsigned => 1 } }, 'integer' => { data_type => 'integer' }, 'integer unsigned' => { data_type => 'integer', extra => { unsigned => 1 } }, 'integer not null' => { data_type => 'integer' }, 'bigint' => { data_type => 'bigint' }, 'bigint unsigned' => { data_type => 'bigint', extra => { unsigned => 1 } }, 'serial' => { data_type => 'bigint', is_auto_increment => 1, extra => { unsigned => 1 } }, 'float' => { data_type => 'float' }, 'float unsigned' => { data_type => 'float', extra => { unsigned => 1 } }, 'double' => { data_type => 'double precision' }, 'double unsigned' => { data_type => 'double precision', extra => { unsigned => 1 } }, 'double precision' => { data_type => 'double precision' }, 'double precision unsigned' => { data_type => 'double precision', extra => { unsigned => 1 } }, # we skip 'real' because its alias depends on the 'REAL AS FLOAT' setting 'float(2)' => { data_type => 'float' }, 'float(24)' => { data_type => 'float' }, 'float(25)' => { data_type => 'double precision' }, 'float(3,3)' => { data_type => 'float', size => [3,3] }, 'double(3,3)' => { data_type => 'double precision', size => [3,3] }, 'double precision(3,3)' => { data_type => 'double precision', size => [3,3] }, 'decimal' => { data_type => 'decimal' }, 'decimal unsigned' => { data_type => 'decimal', extra => { unsigned => 1 } }, 'dec' => { data_type => 'decimal' }, 'numeric' => { data_type => 'decimal' }, 'fixed' => { data_type => 'decimal' }, 'decimal(3)' => { data_type => 'decimal', size => [3,0] }, 'decimal(3,3)' => { data_type => 'decimal', size => [3,3] }, 'dec(3,3)' => { data_type => 'decimal', size => [3,3] }, 'numeric(3,3)' => { data_type => 'decimal', size => [3,3] }, 'fixed(3,3)' => { data_type => 'decimal', size => [3,3] }, # Date and Time Types 'date' => { data_type => 'date', datetime_undef_if_invalid => 1 }, 'datetime' => { data_type => 'datetime', datetime_undef_if_invalid => 1 }, 'timestamp default current_timestamp' => { data_type => 'timestamp', default_value => \'current_timestamp', datetime_undef_if_invalid => 1 }, 'time' => { data_type => 'time' }, 'year' => { data_type => 'year' }, 'year(4)' => { data_type => 'year' }, # String Types 'char' => { data_type => 'char', size => 1 }, 'char(11)' => { data_type => 'char', size => 11 }, 'varchar(20)' => { data_type => 'varchar', size => 20 }, 'binary' => { data_type => 'binary', size => 1 }, 'binary(11)' => { data_type => 'binary', size => 11 }, 'varbinary(20)'=> { data_type => 'varbinary', size => 20 }, 'tinyblob' => { data_type => 'tinyblob' }, 'tinytext' => { data_type => 'tinytext' }, 'blob' => { data_type => 'blob' }, # text(M) types will map to the appropriate type, length is not stored 'text' => { data_type => 'text' }, 'mediumblob' => { data_type => 'mediumblob' }, 'mediumtext' => { data_type => 'mediumtext' }, 'longblob' => { data_type => 'longblob' }, 'longtext' => { data_type => 'longtext' }, ( map { "$_('','foo','bar','baz')" => { data_type => $_, extra => { list => ['', qw/foo bar baz/] } }, "$_('foo \\'bar\\' baz', 'foo ''bar'' quux')" => { data_type => $_, extra => { list => [q{foo 'bar' baz}, q{foo 'bar' quux}] } }, "$_('''', '''foo', 'bar''')" => { data_type => $_, extra => { list => [qw(' 'foo bar')] } }, "$_('\\'', '\\'foo', 'bar\\'')", => { data_type => $_, extra => { list => [qw(' 'foo bar')] } }, } qw(set enum) ), # RT#68717 "enum('11,10 (<500)/0 DUN','4,90 (<120)/0 EUR') NOT NULL default '11,10 (<500)/0 DUN'" => { data_type => 'enum', extra => { list => ['11,10 (<500)/0 DUN', '4,90 (<120)/0 EUR'] }, default_value => '11,10 (<500)/0 DUN' }, "set('11_10 (<500)/0 DUN','4_90 (<120)/0 EUR') NOT NULL default '11_10 (<500)/0 DUN'" => { data_type => 'set', extra => { list => ['11_10 (<500)/0 DUN', '4_90 (<120)/0 EUR'] }, default_value => '11_10 (<500)/0 DUN' }, "enum('19,90 (<500)/0 EUR','4,90 (<120)/0 EUR','7,90 (<200)/0 CHF','300 (<6000)/0 CZK','4,90 (<100)/0 EUR','39 (<900)/0 DKK','299 (<5000)/0 EEK','9,90 (<250)/0 EUR','3,90 (<100)/0 GBP','3000 (<70000)/0 HUF','4000 (<70000)/0 JPY','13,90 (<200)/0 LVL','99 (<2500)/0 NOK','39 (<1000)/0 PLN','1000 (<20000)/0 RUB','49 (<2500)/0 SEK','29 (<600)/0 USD','19,90 (<600)/0 EUR','0 EUR','0 CHF') NOT NULL default '19,90 (<500)/0 EUR'" => { data_type => 'enum', extra => { list => ['19,90 (<500)/0 EUR','4,90 (<120)/0 EUR','7,90 (<200)/0 CHF','300 (<6000)/0 CZK','4,90 (<100)/0 EUR','39 (<900)/0 DKK','299 (<5000)/0 EEK','9,90 (<250)/0 EUR','3,90 (<100)/0 GBP','3000 (<70000)/0 HUF','4000 (<70000)/0 JPY','13,90 (<200)/0 LVL','99 (<2500)/0 NOK','39 (<1000)/0 PLN','1000 (<20000)/0 RUB','49 (<2500)/0 SEK','29 (<600)/0 USD','19,90 (<600)/0 EUR','0 EUR','0 CHF'] }, default_value => '19,90 (<500)/0 EUR' }, }, extra => { create => [ qq{ CREATE TABLE `mysql_loader-test1` ( id INT AUTO_INCREMENT PRIMARY KEY COMMENT 'The\15\12Column', value varchar(100) ) $innodb COMMENT 'The\15\12Table' }, q{ CREATE VIEW mysql_loader_test2 AS SELECT * FROM `mysql_loader-test1` }, # RT#68717 qq{ CREATE TABLE `mysql_loader_test3` ( `ISO3_code` char(3) NOT NULL default '', `lang_pref` enum('de','en','fr','nl','dk','es','se') NOT NULL, `vat` decimal(4,2) default '16.00', `price_group` enum('EUR_DEFAULT','GBP_GBR','EUR_AUT_BEL_FRA_IRL_NLD','EUR_DNK_SWE','EUR_AUT','EUR_BEL','EUR_FIN','EUR_FRA','EUR_IRL','EUR_NLD','EUR_DNK','EUR_POL','EUR_PRT','EUR_SWE','CHF_CHE','DKK_DNK','SEK_SWE','NOK_NOR','USD_USA','CZK_CZE','PLN_POL','RUB_RUS','HUF_HUN','SKK_SVK','JPY_JPN','LVL_LVA','ROL_ROU','EEK_EST') NOT NULL default 'EUR_DEFAULT', `del_group` enum('19,90 (<500)/0 EUR','4,90 (<120)/0 EUR','7,90 (<200)/0 CHF','300 (<6000)/0 CZK','4,90 (<100)/0 EUR','39 (<900)/0 DKK','299 (<5000)/0 EEK','9,90 (<250)/0 EUR','3,90 (<100)/0 GBP','3000 (<70000)/0 HUF','4000 (<70000)/0 JPY','13,90 (<200)/0 LVL','99 (<2500)/0 NOK','39 (<1000)/0 PLN','1000 (<20000)/0 RUB','49 (<2500)/0 SEK','29 (<600)/0 USD','19,90 (<600)/0 EUR','0 EUR','0 CHF') NOT NULL default '19,90 (<500)/0 EUR', `express_del_group` enum('NO','39 EUR (EXPRESS)','59 EUR (EXPRESS)','79 CHF (EXPRESS)','49 EUR (EXPRESS)','990 CZK (EXPRESS)','19,9 EUR (EXPRESS)','290 DKK (EXPRESS)','990 EEK (EXPRESS)','39 GBP (EXPRESS)','14000 HUF (EXPRESS)','49 LVL (EXPRESS)','590 NOK (EXPRESS)','250 PLN (EXPRESS)','490 SEK (EXPRESS)') NOT NULL default 'NO', `pmethod` varchar(255) NOT NULL default 'VISA,MASTER', `delivery_time` varchar(5) default NULL, `express_delivery_time` varchar(5) default NULL, `eu` int(1) default '0', `cod_costs` varchar(12) default NULL, PRIMARY KEY (`ISO3_code`) ) $innodb }, # 4 through 10 are used for the multi-schema tests qq{ create table mysql_loader_test11 ( id int auto_increment primary key ) $innodb }, qq{ create table mysql_loader_test12 ( id int auto_increment primary key, eleven_id int, foreign key (eleven_id) references mysql_loader_test11(id) on delete restrict on update set null ) $innodb }, ], pre_drop_ddl => [ 'DROP VIEW mysql_loader_test2', ], drop => [ 'mysql_loader-test1', 'mysql_loader_test3', 'mysql_loader_test11', 'mysql_loader_test12' ], count => 9 + 30 * 2, run => sub { my ($monikers, $classes); ($schema, $monikers, $classes) = @_; is $monikers->{'mysql_loader-test1'}, 'MysqlLoaderTest1', 'table with dash correctly monikerized'; my $rsrc = $schema->source('MysqlLoaderTest2'); is $rsrc->column_info('value')->{data_type}, 'varchar', 'view introspected successfully'; # test that views are marked as such isa_ok $schema->resultset($monikers->{mysql_loader_test2})->result_source, 'DBIx::Class::ResultSource::View', 'views have table_class set correctly'; $rsrc = $schema->source('MysqlLoaderTest3'); is_deeply $rsrc->column_info('del_group')->{extra}{list}, ['19,90 (<500)/0 EUR','4,90 (<120)/0 EUR','7,90 (<200)/0 CHF','300 (<6000)/0 CZK','4,90 (<100)/0 EUR','39 (<900)/0 DKK','299 (<5000)/0 EEK','9,90 (<250)/0 EUR','3,90 (<100)/0 GBP','3000 (<70000)/0 HUF','4000 (<70000)/0 JPY','13,90 (<200)/0 LVL','99 (<2500)/0 NOK','39 (<1000)/0 PLN','1000 (<20000)/0 RUB','49 (<2500)/0 SEK','29 (<600)/0 USD','19,90 (<600)/0 EUR','0 EUR','0 CHF'], 'hairy enum introspected correctly'; my $class = $classes->{'mysql_loader-test1'}; my $filename = $schema->loader->get_dump_filename($class); my $code = slurp_file $filename; like $code, qr/^=head1 NAME\n\n^$class - The\nTable\n\n^=cut\n/m, 'table comment'; like $code, qr/^=head2 id\n\n(.+:.+\n)+\nThe\nColumn\n\n/m, 'column comment and attrs'; # test on delete/update fk clause introspection ok ((my $rel_info = $schema->source('MysqlLoaderTest12')->relationship_info('eleven')), 'got rel info'); is $rel_info->{attrs}{on_delete}, 'RESTRICT', 'ON DELETE clause introspected correctly'; is $rel_info->{attrs}{on_update}, 'SET NULL', 'ON UPDATE clause introspected correctly'; # multischema tests follow SKIP: { my $dbh = $schema->storage->dbh; try { $dbh->do('CREATE DATABASE `dbicsl-test`'); } catch { note "CREATE DATABASE returned error: '$_'"; skip "no CREATE DATABASE privileges", 30 * 2; }; $dbh->do(<<"EOF"); CREATE TABLE `dbicsl-test`.mysql_loader_test4 ( id INT AUTO_INCREMENT PRIMARY KEY, value VARCHAR(100) ) $innodb EOF $dbh->do(<<"EOF"); CREATE TABLE `dbicsl-test`.mysql_loader_test5 ( id INT AUTO_INCREMENT PRIMARY KEY, value VARCHAR(100), four_id INTEGER, CONSTRAINT loader_test5_uniq UNIQUE (four_id), FOREIGN KEY (four_id) REFERENCES `dbicsl-test`.mysql_loader_test4 (id) ) $innodb EOF $dbh->do('CREATE DATABASE `dbicsl.test`'); # Test that keys are correctly cached by naming the primary and # unique keys in this table with the same name as a table in # the `dbicsl-test` schema differently. $dbh->do(<<"EOF"); CREATE TABLE `dbicsl.test`.mysql_loader_test5 ( pk INT AUTO_INCREMENT PRIMARY KEY, value VARCHAR(100), four_id INTEGER, CONSTRAINT loader_test5_uniq UNIQUE (four_id), FOREIGN KEY (four_id) REFERENCES `dbicsl-test`.mysql_loader_test4 (id) ) $innodb EOF $dbh->do(<<"EOF"); CREATE TABLE `dbicsl.test`.mysql_loader_test6 ( id INT AUTO_INCREMENT PRIMARY KEY, value VARCHAR(100), mysql_loader_test4_id INTEGER, FOREIGN KEY (mysql_loader_test4_id) REFERENCES `dbicsl-test`.mysql_loader_test4 (id) ) $innodb EOF $dbh->do(<<"EOF"); CREATE TABLE `dbicsl.test`.mysql_loader_test7 ( id INT AUTO_INCREMENT PRIMARY KEY, value VARCHAR(100), six_id INTEGER UNIQUE, FOREIGN KEY (six_id) REFERENCES `dbicsl.test`.mysql_loader_test6 (id) ) $innodb EOF $dbh->do(<<"EOF"); CREATE TABLE `dbicsl-test`.mysql_loader_test8 ( id INT AUTO_INCREMENT PRIMARY KEY, value VARCHAR(100), mysql_loader_test7_id INTEGER, FOREIGN KEY (mysql_loader_test7_id) REFERENCES `dbicsl.test`.mysql_loader_test7 (id) ) $innodb EOF # Test dumping a rel to a table that's not part of the dump. $dbh->do('CREATE DATABASE `dbicsl_test_ignored`'); $dbh->do(<<"EOF"); CREATE TABLE `dbicsl_test_ignored`.mysql_loader_test9 ( id INT AUTO_INCREMENT PRIMARY KEY, value VARCHAR(100) ) $innodb EOF $dbh->do(<<"EOF"); CREATE TABLE `dbicsl-test`.mysql_loader_test10 ( id INT AUTO_INCREMENT PRIMARY KEY, value VARCHAR(100), mysql_loader_test9_id INTEGER, FOREIGN KEY (mysql_loader_test9_id) REFERENCES `dbicsl_test_ignored`.mysql_loader_test9 (id) ) $innodb EOF $databases_created = 1; SKIP: foreach my $db_schema (['dbicsl-test', 'dbicsl.test'], '%') { if ($db_schema eq '%') { try { $dbh->selectall_arrayref('SHOW DATABASES'); } catch { skip 'no SHOW DATABASES privileges', 30; } } lives_and { rmtree EXTRA_DUMP_DIR; my @warns; local $SIG{__WARN__} = sub { push @warns, $_[0] unless $_[0] =~ /\bcollides\b/; }; make_schema_at( 'MySQLMultiSchema', { naming => 'current', db_schema => $db_schema, dump_directory => EXTRA_DUMP_DIR, quiet => 1, }, [ $dsn, $user, $password ], ); diag join "\n", @warns if @warns; is @warns, 0; } 'dumped schema for "dbicsl-test" and "dbicsl.test" databases with no warnings'; my ($test_schema, $rsrc, $rs, $row, %uniqs, $rel_info); lives_and { ok $test_schema = MySQLMultiSchema->connect($dsn, $user, $password); } 'connected test schema'; lives_and { ok $rsrc = $test_schema->source('MysqlLoaderTest4'); } 'got source for table in database name with dash'; is try { $rsrc->column_info('id')->{is_auto_increment} }, 1, 'column in database name with dash'; is try { $rsrc->column_info('value')->{data_type} }, 'varchar', 'column in database name with dash'; is try { $rsrc->column_info('value')->{size} }, 100, 'column in database name with dash'; lives_and { ok $rs = $test_schema->resultset('MysqlLoaderTest4'); } 'got resultset for table in database name with dash'; lives_and { ok $row = $rs->create({ value => 'foo' }); } 'executed SQL on table in database name with dash'; SKIP: { skip 'set the environment variable DBICTEST_MYSQL_INNODB=1 to test relationships', 3 unless $test_innodb; $rel_info = try { $rsrc->relationship_info('dbicsl_dash_test_mysql_loader_test5') }; is_deeply $rel_info->{cond}, { 'foreign.four_id' => 'self.id' }, 'relationship in database name with dash'; is $rel_info->{attrs}{accessor}, 'single', 'relationship in database name with dash'; is $rel_info->{attrs}{join_type}, 'LEFT', 'relationship in database name with dash'; } lives_and { ok $rsrc = $test_schema->source('DbicslDashTestMysqlLoaderTest5'); } 'got source for table in database name with dash'; %uniqs = try { $rsrc->unique_constraints }; is keys %uniqs, 2, 'got unique and primary constraint in database name with dash'; delete $uniqs{primary}; is_deeply ((values %uniqs)[0], ['four_id'], 'unique constraint is correct in database name with dash'); lives_and { ok $rsrc = $test_schema->source('MysqlLoaderTest6'); } 'got source for table in database name with dot'; is try { $rsrc->column_info('id')->{is_auto_increment} }, 1, 'column in database name with dot introspected correctly'; is try { $rsrc->column_info('value')->{data_type} }, 'varchar', 'column in database name with dot introspected correctly'; is try { $rsrc->column_info('value')->{size} }, 100, 'column in database name with dot introspected correctly'; lives_and { ok $rs = $test_schema->resultset('MysqlLoaderTest6'); } 'got resultset for table in database name with dot'; lives_and { ok $row = $rs->create({ value => 'foo' }); } 'executed SQL on table in database name with dot'; SKIP: { skip 'set the environment variable DBICTEST_MYSQL_INNODB=1 to test relationships', 3 unless $test_innodb; $rel_info = try { $rsrc->relationship_info('mysql_loader_test7') }; is_deeply $rel_info->{cond}, { 'foreign.six_id' => 'self.id' }, 'relationship in database name with dot'; is $rel_info->{attrs}{accessor}, 'single', 'relationship in database name with dot'; is $rel_info->{attrs}{join_type}, 'LEFT', 'relationship in database name with dot'; } lives_and { ok $rsrc = $test_schema->source('MysqlLoaderTest7'); } 'got source for table in database name with dot'; %uniqs = try { $rsrc->unique_constraints }; is keys %uniqs, 2, 'got unique and primary constraint in database name with dot'; delete $uniqs{primary}; is_deeply ((values %uniqs)[0], ['six_id'], 'unique constraint is correct in database name with dot'); SKIP: { skip 'set the environment variable DBICTEST_MYSQL_INNODB=1 to test relationships', 4 unless $test_innodb; lives_and { ok $test_schema->source('MysqlLoaderTest6') ->has_relationship('mysql_loader_test4'); } 'cross-database relationship in multi-db_schema'; lives_and { ok $test_schema->source('MysqlLoaderTest4') ->has_relationship('mysql_loader_test6s'); } 'cross-database relationship in multi-db_schema'; lives_and { ok $test_schema->source('MysqlLoaderTest8') ->has_relationship('mysql_loader_test7'); } 'cross-database relationship in multi-db_schema'; lives_and { ok $test_schema->source('MysqlLoaderTest7') ->has_relationship('mysql_loader_test8s'); } 'cross-database relationship in multi-db_schema'; } } } }, }, )->run_tests; END { if (not $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}) { if ($databases_created && (my $dbh = try { $schema->storage->dbh })) { foreach my $table ('`dbicsl-test`.mysql_loader_test10', 'dbicsl_test_ignored.mysql_loader_test9', '`dbicsl-test`.mysql_loader_test8', '`dbicsl.test`.mysql_loader_test7', '`dbicsl.test`.mysql_loader_test6', '`dbicsl.test`.mysql_loader_test5', '`dbicsl-test`.mysql_loader_test5', '`dbicsl-test`.mysql_loader_test4') { try { $dbh->do("DROP TABLE $table"); } catch { diag "Error dropping table: $_"; }; } foreach my $db (qw/dbicsl-test dbicsl.test dbicsl_test_ignored/) { try { $dbh->do("DROP DATABASE `$db`"); } catch { diag "Error dropping test database $db: $_"; }; } } rmtree EXTRA_DUMP_DIR; } } # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/t/20invocations.t0000644000175000017500000001511612542756321020601 0ustar ilmariilmariuse strict; use warnings; use Test::More; use Test::Warn; use DBIx::Class::Schema::Loader::Optional::Dependencies; use DBIx::Class::Schema::Loader::Utils qw/sigwarn_silencer/; use lib qw(t/lib); use make_dbictest_db; # Takes a $schema as input, runs 4 basic tests sub test_schema { my ($testname, $schema) = @_; warnings_are { $schema = $schema->clone if !ref $schema; isa_ok($schema, 'DBIx::Class::Schema', $testname); my $rel_foo_rs = $schema->resultset('Bar')->search({ barid => 3})->search_related('fooref'); isa_ok($rel_foo_rs, 'DBIx::Class::ResultSet', $testname); my $rel_foo = $rel_foo_rs->next; isa_ok($rel_foo, "DBICTest::Schema::_${testname}::Foo", $testname); is($rel_foo->footext, 'Foo record associated with the Bar with barid 3', "$testname correct object"); my $foo_rs = $schema->resultset('Foo'); my $foo_new = $foo_rs->create({footext => "${testname}_foo"}); is ($foo_rs->search({footext => "${testname}_foo"})->count, 1, "$testname object created") || die; } [], "No warnings during $testname invocations"; } my @invocations = ( 'hardcode' => sub { package DBICTest::Schema::_hardcode; use base qw/ DBIx::Class::Schema::Loader /; __PACKAGE__->naming('current'); __PACKAGE__->use_namespaces(0); __PACKAGE__->connection($make_dbictest_db::dsn); __PACKAGE__; }, 'normal' => sub { package DBICTest::Schema::_normal; use base qw/ DBIx::Class::Schema::Loader /; __PACKAGE__->loader_options(); __PACKAGE__->naming('current'); __PACKAGE__->use_namespaces(0); __PACKAGE__->connect($make_dbictest_db::dsn); }, 'make_schema_at' => sub { use DBIx::Class::Schema::Loader qw/ make_schema_at /; make_schema_at( 'DBICTest::Schema::_make_schema_at', { really_erase_my_files => 1, naming => 'current', use_namespaces => 0 }, [ $make_dbictest_db::dsn ], ); DBICTest::Schema::_make_schema_at->clone; }, 'embedded_options' => sub { package DBICTest::Schema::_embedded_options; use base qw/ DBIx::Class::Schema::Loader /; __PACKAGE__->naming('current'); __PACKAGE__->use_namespaces(0); __PACKAGE__->connect( $make_dbictest_db::dsn, { loader_options => { really_erase_my_files => 1 } } ); }, 'embedded_options_in_attrs' => sub { package DBICTest::Schema::_embedded_options_in_attrs; use base qw/ DBIx::Class::Schema::Loader /; __PACKAGE__->naming('current'); __PACKAGE__->use_namespaces(0); __PACKAGE__->connect( $make_dbictest_db::dsn, undef, undef, { AutoCommit => 1, loader_options => { really_erase_my_files => 1 } } ); }, 'embedded_options_make_schema_at' => sub { use DBIx::Class::Schema::Loader qw/ make_schema_at /; make_schema_at( 'DBICTest::Schema::_embedded_options_make_schema_at', { }, [ $make_dbictest_db::dsn, { loader_options => { really_erase_my_files => 1, naming => 'current', use_namespaces => 0, } }, ], ); "DBICTest::Schema::_embedded_options_make_schema_at"; }, 'almost_embedded' => sub { package DBICTest::Schema::_almost_embedded; use base qw/ DBIx::Class::Schema::Loader /; __PACKAGE__->loader_options( really_erase_my_files => 1, naming => 'current', use_namespaces => 0, ); __PACKAGE__->connect( $make_dbictest_db::dsn, undef, undef, { AutoCommit => 1 } ); }, 'make_schema_at_explicit' => sub { use DBIx::Class::Schema::Loader; DBIx::Class::Schema::Loader::make_schema_at( 'DBICTest::Schema::_make_schema_at_explicit', { really_erase_my_files => 1, naming => 'current', use_namespaces => 0, }, [ $make_dbictest_db::dsn ], ); DBICTest::Schema::_make_schema_at_explicit->clone; }, 'no_skip_load_external' => sub { # By default we should pull in t/lib/DBICTest/Schema/_no_skip_load_external/Foo.pm $skip_me since t/lib is in @INC use DBIx::Class::Schema::Loader; DBIx::Class::Schema::Loader::make_schema_at( 'DBICTest::Schema::_no_skip_load_external', { really_erase_my_files => 1, naming => 'current', use_namespaces => 0, }, [ $make_dbictest_db::dsn ], ); DBICTest::Schema::_no_skip_load_external->clone; }, 'skip_load_external' => sub { # When we explicitly skip_load_external t/lib/DBICTest/Schema/_skip_load_external/Foo.pm should be ignored use DBIx::Class::Schema::Loader; DBIx::Class::Schema::Loader::make_schema_at( 'DBICTest::Schema::_skip_load_external', { really_erase_my_files => 1, naming => 'current', use_namespaces => 0, skip_load_external => 1, }, [ $make_dbictest_db::dsn ], ); DBICTest::Schema::_skip_load_external->clone; }, (DBIx::Class::Schema::Loader::Optional::Dependencies->req_ok_for('use_moose') ? ('use_moose' => sub { package DBICTest::Schema::_use_moose; use base qw/ DBIx::Class::Schema::Loader /; __PACKAGE__->naming('current'); __PACKAGE__->use_namespaces(0); __PACKAGE__->connect( $make_dbictest_db::dsn, { loader_options => { use_moose => 1 } } ); }) : () ), ); # 6 tests per k/v pair plan tests => 6 * (@invocations/2) + 2; # + 2 more manual ones below. while(@invocations) { my $style = shift @invocations; my $cref = shift @invocations; my $schema = do { local $SIG{__WARN__} = sigwarn_silencer( qr/Deleting existing file .+ due to 'really_erase_my_files' setting/ ); $cref->(); }; test_schema($style, $schema); } { no warnings 'once'; is($DBICTest::Schema::_no_skip_load_external::Foo::skip_me, "bad mojo", "external content loaded"); is($DBICTest::Schema::_skip_load_external::Foo::skip_me, undef, "external content not loaded with skip_load_external => 1"); } DBIx-Class-Schema-Loader-0.07045/t/21misc_fatal.t0000644000175000017500000000123712542756321020347 0ustar ilmariilmariuse strict; use warnings; use Test::More; use lib qw(t/lib); use make_dbictest_db; { $INC{'DBIx/Class/Storage/xyzzy.pm'} = 1; package DBIx::Class::Storage::xyzzy; use base qw/ DBIx::Class::Storage /; sub new { bless {}, shift } sub connect_info { @_ } package DBICTest::Schema; use base qw/ DBIx::Class::Schema::Loader /; __PACKAGE__->loader_options( really_erase_my_files => 1 ); __PACKAGE__->storage_type( '::xyzzy' ); } plan tests => 1; eval { DBICTest::Schema->connect($make_dbictest_db::dsn) }; like( $@, qr/Could not load loader_class "DBIx::Class::Schema::Loader::xyzzy": /, 'Bad storage type dies correctly' ); DBIx-Class-Schema-Loader-0.07045/t/90bug_58_mro.t0000644000175000017500000000163212542756321020220 0ustar ilmariilmariuse strict; use warnings; use Test::More; use Test::Exception; use DBIx::Class::Schema::Loader; # use this if you keep a copy of DBD::Sybase linked to FreeTDS somewhere else BEGIN { if (my $lib_dirs = $ENV{DBICTEST_MSSQL_PERL5LIB}) { unshift @INC, $_ for split /:/, $lib_dirs; } } my ($dsn, $user, $pass); for (qw/MSSQL_ODBC MSSQL_ADO MSSQL/) { next unless $ENV{"DBICTEST_${_}_DSN"}; $dsn = $ENV{"DBICTEST_${_}_DSN"}; $user = $ENV{"DBICTEST_${_}_USER"}; $pass = $ENV{"DBICTEST_${_}_PASS"}; last; } plan skip_all => 'perl 5.8 required for this test' if $] >= 5.009005; plan ($dsn ? (tests => 1) : (skip_all => 'MSSQL required for this test')); lives_ok { DBIx::Class::Schema::Loader::make_schema_at( 'DBICTest::Schema', { naming => 'current' }, [ $dsn, $user, $pass ], ); } 'dynamic MSSQL schema created using make_schema_at'; done_testing; DBIx-Class-Schema-Loader-0.07045/t/10_09firebird_common.t0000644000175000017500000001763112542756317021723 0ustar ilmariilmariuse strict; use warnings; use Test::More; use Scope::Guard (); use DBIx::Class::Optional::Dependencies; use DBIx::Class::Schema::Loader::Utils qw/sigwarn_silencer/; use lib qw(t/lib); use dbixcsl_common_tests; my %dsns; for (qw(FIREBIRD FIREBIRD_ODBC FIREBIRD_INTERBASE)) { next unless $ENV{"DBICTEST_${_}_DSN"}; my $dep_group = lc "rdbms_$_"; if (!DBIx::Class::Optional::Dependencies->req_ok_for($dep_group)) { diag 'You need to install ' . DBIx::Class::Optional::Dependencies->req_missing_for($dep_group) . " to test with $_"; next; } $dsns{$_}{dsn} = $ENV{"DBICTEST_${_}_DSN"}; $dsns{$_}{user} = $ENV{"DBICTEST_${_}_USER"}; $dsns{$_}{password} = $ENV{"DBICTEST_${_}_PASS"}; $dsns{$_}{connect_info_opts} = { on_connect_call => 'use_softcommit' } if /\AFIREBIRD(?:_INTERBASE)?\z/; }; plan skip_all => 'You need to set the DBICTEST_FIREBIRD_DSN, _USER and _PASS and/or the DBICTEST_FIREBIRD_ODBC_DSN, _USER and _PASS and/or the DBICTEST_FIREBIRD_INTERBASE_DSN, _USER and _PASS environment variables' unless %dsns; my $schema; my $tester = dbixcsl_common_tests->new( vendor => 'Firebird', auto_inc_pk => 'INTEGER NOT NULL PRIMARY KEY', auto_inc_cb => sub { my ($table, $col) = @_; return ( qq{ CREATE GENERATOR gen_${table}_${col} }, qq{ CREATE TRIGGER ${table}_bi FOR $table ACTIVE BEFORE INSERT POSITION 0 AS BEGIN IF (NEW.$col IS NULL) THEN NEW.$col = GEN_ID(gen_${table}_${col},1); END } ); }, auto_inc_drop_cb => sub { my ($table, $col) = @_; return ( qq{ DROP TRIGGER ${table}_bi }, qq{ DROP GENERATOR gen_${table}_${col} }, ); }, null => '', preserve_case_mode_is_exclusive => 1, quote_char => '"', connect_info => [ map { $dsns{$_} } sort keys %dsns ], data_types => { # based on the Interbase Data Definition Guide # http://www.ibphoenix.com/downloads/60DataDef.zip # # Numeric types 'smallint' => { data_type => 'smallint' }, 'int' => { data_type => 'integer' }, 'integer' => { data_type => 'integer' }, 'bigint' => { data_type => 'bigint' }, 'float' => { data_type => 'real' }, 'double precision' => { data_type => 'double precision' }, 'real' => { data_type => 'real' }, 'float(2)' => { data_type => 'real' }, 'float(7)' => { data_type => 'real' }, 'float(8)' => { data_type => 'double precision' }, 'decimal' => { data_type => 'decimal' }, 'dec' => { data_type => 'decimal' }, 'numeric' => { data_type => 'numeric' }, 'decimal(3)' => { data_type => 'decimal', size => [3,0] }, 'decimal(3,3)' => { data_type => 'decimal', size => [3,3] }, 'dec(3,3)' => { data_type => 'decimal', size => [3,3] }, 'numeric(3,3)' => { data_type => 'numeric', size => [3,3] }, 'decimal(6,3)' => { data_type => 'decimal', size => [6,3] }, 'numeric(6,3)' => { data_type => 'numeric', size => [6,3] }, 'decimal(12,3)' => { data_type => 'decimal', size => [12,3] }, 'numeric(12,3)' => { data_type => 'numeric', size => [12,3] }, 'decimal(18,18)' => { data_type => 'decimal', size => [18,18] }, 'dec(18,18)' => { data_type => 'decimal', size => [18,18] }, 'numeric(18,18)' => { data_type => 'numeric', size => [18,18] }, # Date and Time Types 'date' => { data_type => 'date' }, 'timestamp default current_timestamp' => { data_type => 'timestamp', default_value => \'current_timestamp' }, 'time' => { data_type => 'time' }, # String Types 'char' => { data_type => 'char', size => 1 }, 'char(11)' => { data_type => 'char', size => 11 }, 'varchar(20)' => { data_type => 'varchar', size => 20 }, 'char(22) character set unicode_fss' => => { data_type => 'char(x) character set unicode_fss', size => 22 }, 'varchar(33) character set unicode_fss' => => { data_type => 'varchar(x) character set unicode_fss', size => 33 }, # Blob types 'blob' => { data_type => 'blob' }, 'blob sub_type text' => { data_type => 'blob sub_type text' }, 'blob sub_type text character set unicode_fss' => { data_type => 'blob sub_type text character set unicode_fss' }, }, extra => { count => 9, run => sub { $schema = shift; my ($monikers, $classes, $self) = @_; cleanup_extra(); my $dbh = $schema->storage->dbh; # create a mixed case table $dbh->do($_) for ( q{ CREATE TABLE "Firebird_Loader_Test1" ( "Id" INTEGER NOT NULL PRIMARY KEY, "Foo" INTEGER DEFAULT 42 ) }, q{ CREATE GENERATOR "Gen_Firebird_Loader_Test1_Id" }, q{ CREATE TRIGGER "Firebird_Loader_Test1_BI" for "Firebird_Loader_Test1" ACTIVE BEFORE INSERT POSITION 0 AS BEGIN IF (NEW."Id" IS NULL) THEN NEW."Id" = GEN_ID("Gen_Firebird_Loader_Test1_Id",1); END }, ); my $guard = Scope::Guard->new(\&cleanup_extra); local $schema->loader->{preserve_case} = 1; $schema->loader->_setup; $self->rescan_without_warnings($schema); ok ((my $rsrc = eval { $schema->resultset('FirebirdLoaderTest1')->result_source }), 'got rsrc for mixed case table'); ok ((my $col_info = eval { $rsrc->column_info('Id') }), 'got column_info for column Id'); is $col_info->{accessor}, 'id', 'column Id has lowercase accessor "id"'; is $col_info->{is_auto_increment}, 1, 'is_auto_increment detected for mixed case trigger'; is $col_info->{sequence}, 'Gen_Firebird_Loader_Test1_Id', 'correct mixed case sequence name'; is eval { $rsrc->column_info('Foo')->{default_value} }, 42, 'default_value detected for mixed case column'; # test the fixed up ->_dbh_type_info_type_name for the ODBC driver if ($schema->storage->_dbi_connect_info->[0] =~ /:ODBC:/i) { my %truncated_types = ( 4 => 'INTEGER', -9 => 'VARCHAR(x) CHARACTER SET UNICODE_FSS', -10 => 'BLOB SUB_TYPE TEXT CHARACTER SET UNICODE_FSS', ); for my $type_num (keys %truncated_types) { is $schema->loader->_dbh_type_info_type_name($type_num), $truncated_types{$type_num}, "ODBC ->_dbh_type_info_type_name correct for '$truncated_types{$type_num}'"; } } else { my $tb = Test::More->builder; $tb->skip('not testing _dbh_type_info_type_name on DBD::InterBase') for 1..3; } }, }, ); { # get rid of stupid warning from InterBase/GetInfo.pm if ($dsns{FIREBIRD_INTERBASE}) { local $SIG{__WARN__} = sigwarn_silencer( qr{^(?:Use of uninitialized value|Argument "[0-9_]+" isn't numeric|Missing argument) in sprintf at \S+DBD/InterBase/GetInfo.pm line \d+\.$} ); require DBD::InterBase; require DBD::InterBase::GetInfo; } $tester->run_tests(); } sub cleanup_extra { $schema->storage->disconnect; my $dbh = $schema->storage->dbh; foreach my $stmt ( 'DROP TRIGGER "Firebird_Loader_Test1_BI"', 'DROP GENERATOR "Gen_Firebird_Loader_Test1_Id"', 'DROP TABLE "Firebird_Loader_Test1"', ) { eval { $dbh->do($stmt) }; } } # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/t/60dbicdump_config.t0000644000175000017500000000240712542756321021364 0ustar ilmariilmariuse DBIx::Class::Schema::Loader::Optional::Dependencies -skip_all_without => 'test_dbicdump_config'; use strict; use warnings; use Test::More; use File::Path qw/make_path rmtree/; use DBIx::Class::Schema::Loader::Utils 'slurp_file'; use Try::Tiny; use namespace::clean; use lib 't/lib'; use make_dbictest_db (); use dbixcsl_test_dir '$tdir'; plan tests => 2; my $config_dir = "$tdir/dbicdump_config"; make_path $config_dir; my $config_file = "$config_dir/my.conf"; my $dump_path = "$tdir/dbicdump_config_dump"; open my $fh, '>', $config_file or die "Could not write to $config_file: $!"; print $fh <<"EOF"; schema_class DBICTest::Schema lib t/lib dsn $make_dbictest_db::dsn dump_directory $dump_path components InflateColumn::DateTime schema_base_class TestSchemaBaseClass quiet 1 EOF close $fh; system $^X, 'script/dbicdump', $config_file; is $? >> 8, 0, 'dbicdump executed successfully'; my $foo = try { slurp_file "$dump_path/DBICTest/Schema/Result/Foo.pm" } || ''; like $foo, qr/InflateColumn::DateTime/, 'loader options read correctly from config_file'; done_testing; END { rmtree($config_dir, 1, 1); rmtree($dump_path, 1, 1); } DBIx-Class-Schema-Loader-0.07045/t/01use.t0000644000175000017500000000110312542756321017027 0ustar ilmariilmariuse strict; use warnings; use Test::More tests => 10; BEGIN { use_ok 'DBIx::Class::Schema::Loader'; use_ok 'DBIx::Class::Schema::Loader::Base'; use_ok 'DBIx::Class::Schema::Loader::DBI'; use_ok 'DBIx::Class::Schema::Loader::RelBuilder'; use_ok 'DBIx::Class::Schema::Loader::DBI::SQLite'; use_ok 'DBIx::Class::Schema::Loader::DBI::mysql'; use_ok 'DBIx::Class::Schema::Loader::DBI::Pg'; use_ok 'DBIx::Class::Schema::Loader::DBI::DB2'; use_ok 'DBIx::Class::Schema::Loader::DBI::Oracle'; use_ok 'DBIx::Class::Schema::Loader::DBI::Writing'; } DBIx-Class-Schema-Loader-0.07045/t/40overwrite_modifications.t0000644000175000017500000000330012542756321023175 0ustar ilmariilmariuse strict; use warnings; use Test::More; use Test::Exception; use Test::Warn; use lib qw(t/lib); use make_dbictest_db; use File::Copy; use File::Spec; use File::Temp qw/ tempdir tempfile /; use DBIx::Class::Schema::Loader; use DBIx::Class::Schema::Loader::Utils qw/ slurp_file /; my $tempdir = tempdir( CLEANUP => 1 ); my $foopm = File::Spec->catfile( $tempdir, qw| DBICTest Schema Overwrite_modifications Result Foo.pm |); dump_schema(); # check that we dumped ok( -f $foopm, 'looks like it dumped' ); # now modify one of the files { open my $in, '<', $foopm or die "$! reading $foopm"; my ($tfh,$temp) = tempfile( UNLINK => 1); while(<$in>) { s/"bars"/"somethingelse"/; print $tfh $_; } close $tfh; copy( $temp, $foopm ); } # and dump again without overwrites throws_ok { dump_schema(); } qr/mismatch/, 'throws error dumping without overwrite_modifications'; # and then dump with overwrite lives_ok { dump_schema( overwrite_modifications => 1 ); } 'does not throw when dumping with overwrite_modifications'; unlike slurp_file $foopm, qr/"somethingelse"/, "Modifications actually overwritten"; sub dump_schema { # need to poke _loader_invoked in order to be able to rerun the # loader multiple times. DBICTest::Schema::Overwrite_modifications->_loader_invoked(0) if @DBICTest::Schema::Overwrite_modifications::ISA; my $args = \@_; warnings_exist { DBIx::Class::Schema::Loader::make_schema_at( 'DBICTest::Schema::Overwrite_modifications', { dump_directory => $tempdir, @$args }, [ $make_dbictest_db::dsn ], ); } [qr/^Dumping manual schema/, qr/^Schema dump completed/ ]; } done_testing(); DBIx-Class-Schema-Loader-0.07045/t/80split_name.t0000644000175000017500000000370112131533457020400 0ustar ilmariilmariuse strict; use warnings; use Test::More tests => 18; use DBIx::Class::Schema::Loader::Utils 'split_name'; is_deeply [split_name('foo_bar_baz')], [qw/foo bar baz/], 'by underscore'; is_deeply [split_name('foo__bar__baz')], [qw/foo bar baz/], 'by double underscore'; is_deeply [split_name('Foo_Bar_Baz')], [qw/Foo Bar Baz/], 'by underscore with full capitalization'; is_deeply [split_name('foo_Bar_Baz')], [qw/foo Bar Baz/], 'by underscore with lcfirst capitalization'; is_deeply [split_name('fooBarBaz')], [qw/foo Bar Baz/], 'lcfirst camelCase identifier'; is_deeply [split_name('FooBarBaz')], [qw/Foo Bar Baz/], 'ucfirst camelCase identifier'; is_deeply [split_name('VLANValidID')], [qw/VLAN Valid ID/], 'CAMELCase identifier (word with all caps)'; is_deeply [split_name('VlanVALIDId')], [qw/Vlan VALID Id/], 'CamelCASE identifier (second word with all caps)'; is_deeply [split_name('foo..bar/baz')], [qw/foo bar baz/], 'by non-alphanum chars'; # naming=v7 is_deeply [split_name('foo_bar_baz', 7)], [qw/foo bar baz/], 'by underscore for v=7'; is_deeply [split_name('foo__bar__baz', 7)], [qw/foo bar baz/], 'by double underscore for v=7'; is_deeply [split_name('Foo_Bar_Baz', 7)], [qw/Foo Bar Baz/], 'by underscore with full capitalization for v=7'; is_deeply [split_name('foo_Bar_Baz', 7)], [qw/foo Bar Baz/], 'by underscore with lcfirst capitalization for v=7'; is_deeply [split_name('fooBarBaz', 7)], [qw/foo Bar Baz/], 'lcfirst camelCase identifier for v=7'; is_deeply [split_name('FooBarBaz', 7)], [qw/Foo Bar Baz/], 'ucfirst camelCase identifier for v=7'; is_deeply [split_name('VLANValidID', 7)], [qw/VLANValid ID/], 'CAMELCase identifier (word with all caps) for v=7'; is_deeply [split_name('VlanVALIDId', 7)], [qw/Vlan VALIDId/], 'CamelCASE identifier (second word with all caps) for v=7'; is_deeply [split_name('foo..bar/baz', 7)], [qw/foo bar baz/], 'by non-alphanum chars for v=7'; DBIx-Class-Schema-Loader-0.07045/t/65dbicdump_invocations.t0000644000175000017500000000153112476065214022455 0ustar ilmariilmari#!perl use strict; use warnings; use Test::More; use DBIx::Class::Schema::Loader::Utils 'slurp_file'; use File::Path 'rmtree'; use namespace::clean; use lib 't/lib'; use make_dbictest_db (); use dbixcsl_test_dir '$tdir'; plan tests => 3; # Test the -I option dbicdump( '-I', 't/lib', '-o', 'schema_base_class=TestSchemaBaseClass', 'DBICTest::Schema', $make_dbictest_db::dsn ); dbicdump( '-It/lib', '-o', 'schema_base_class=TestSchemaBaseClass', 'DBICTest::Schema', $make_dbictest_db::dsn ); dbicdump( '-I/dummy', '-It/lib', '-o', 'schema_base_class=TestSchemaBaseClass', 'DBICTest::Schema', $make_dbictest_db::dsn ); done_testing; sub dbicdump { system $^X, 'script/dbicdump', '-o', "dump_directory=$tdir", '-o', 'quiet=1', @_; is $? >> 8, 0, 'dbicdump executed successfully'; } DBIx-Class-Schema-Loader-0.07045/t/46relationships_multi_m2m.t0000644000175000017500000000364212476065214023127 0ustar ilmariilmariuse strict; use warnings; use Test::More; use lib qw(t/lib); use make_dbictest_db_multi_m2m; use Devel::Dwarn; use DBIx::Class::Schema::Loader; my $schema_counter = 0; { my $hashmap = schema_with( rel_name_map => { foos_2s => "other_foos", bars_2s => "other_bars", }, ); foreach ([qw(Foo bars)], [qw(Bar foos)]) { my ($source, $rel) = @{$_}; my $row = $hashmap->resultset($source)->find(1); foreach my $link ("", "other_") { can_ok $row, "${link}${rel}"; } } } { my $submap = schema_with( rel_name_map => sub { my ($args) = @_; if ($args->{type} eq "many_to_many") { like $args->{link_class}, qr/\ADBICTest::Schema::${schema_counter}::Result::FooBar(?:One|Two)\z/, "link_class"; like $args->{link_moniker}, qr/\AFooBar(?:One|Two)\z/, "link_moniker"; like $args->{link_rel_name}, qr/\Afoo_bar_(?:ones|twos)\z/, "link_rel_name"; return $args->{name}."_".(split /_/, $args->{link_rel_name})[-1]; } }, ); foreach ([qw(Foo bars)], [qw(Bar foos)]) { my ($source, $rel) = @{$_}; my $row = $submap->resultset($source)->find(1); foreach ([ones => 1], [twos => 2]) { my ($link, $count) = @{$_}; my $m2m = "${rel}_${link}"; can_ok $row, $m2m; is $row->$m2m->count, $count, "$m2m count"; } } } done_testing; #### generates a new schema with the given opts every time it's called sub schema_with { $schema_counter++; DBIx::Class::Schema::Loader::make_schema_at( 'DBICTest::Schema::'.$schema_counter, { naming => 'current', @_ }, [ $make_dbictest_db_multi_m2m::dsn ], ); "DBICTest::Schema::$schema_counter"->clone; } DBIx-Class-Schema-Loader-0.07045/t/10_03pg_common.t0000644000175000017500000005151612576771405020537 0ustar ilmariilmariuse DBIx::Class::Schema::Loader::Optional::Dependencies -skip_all_without => 'test_rdbms_pg'; use strict; use warnings; use utf8; use DBIx::Class::Schema::Loader 'make_schema_at'; use DBIx::Class::Schema::Loader::Utils qw/no_warnings slurp_file/; use Test::More; use Test::Exception; use Try::Tiny; use File::Path 'rmtree'; use namespace::clean; use lib qw(t/lib); use dbixcsl_common_tests (); use dbixcsl_test_dir '$tdir'; use constant EXTRA_DUMP_DIR => "$tdir/pg_extra_dump"; my $dsn = $ENV{DBICTEST_PG_DSN} || ''; my $user = $ENV{DBICTEST_PG_USER} || ''; my $password = $ENV{DBICTEST_PG_PASS} || ''; dbixcsl_common_tests->new( vendor => 'Pg', auto_inc_pk => 'SERIAL NOT NULL PRIMARY KEY', dsn => $dsn, user => $user, password => $password, loader_options => { preserve_case => 1 }, connect_info_opts => { pg_enable_utf8 => 1, on_connect_do => [ 'SET client_min_messages=WARNING' ], }, quote_char => '"', default_is_deferrable => 0, default_on_clause => 'NO ACTION', data_types => { # http://www.postgresql.org/docs/7.4/interactive/datatype.html # # Numeric Types boolean => { data_type => 'boolean' }, bool => { data_type => 'boolean' }, 'bool default false' => { data_type => 'boolean', default_value => \'false' }, 'bool default true' => { data_type => 'boolean', default_value => \'true' }, 'bool default 0::bool' => { data_type => 'boolean', default_value => \'false' }, 'bool default 1::bool' => { data_type => 'boolean', default_value => \'true' }, bigint => { data_type => 'bigint' }, int8 => { data_type => 'bigint' }, bigserial => { data_type => 'bigint', is_auto_increment => 1 }, serial8 => { data_type => 'bigint', is_auto_increment => 1 }, integer => { data_type => 'integer' }, int => { data_type => 'integer' }, int4 => { data_type => 'integer' }, serial => { data_type => 'integer', is_auto_increment => 1 }, serial4 => { data_type => 'integer', is_auto_increment => 1 }, smallint => { data_type => 'smallint' }, int2 => { data_type => 'smallint' }, money => { data_type => 'money' }, 'double precision' => { data_type => 'double precision' }, float8 => { data_type => 'double precision' }, real => { data_type => 'real' }, float4 => { data_type => 'real' }, 'float(24)' => { data_type => 'real' }, 'float(25)' => { data_type => 'double precision' }, 'float(53)' => { data_type => 'double precision' }, float => { data_type => 'double precision' }, numeric => { data_type => 'numeric' }, decimal => { data_type => 'numeric' }, 'numeric(6,3)' => { data_type => 'numeric', size => [6,3] }, 'decimal(6,3)' => { data_type => 'numeric', size => [6,3] }, # Bit String Types 'bit varying(2)' => { data_type => 'varbit', size => 2 }, 'varbit(2)' => { data_type => 'varbit', size => 2 }, 'bit varying' => { data_type => 'varbit' }, 'varbit' => { data_type => 'varbit' }, bit => { data_type => 'bit', size => 1 }, 'bit(3)' => { data_type => 'bit', size => 3 }, # Network Types inet => { data_type => 'inet' }, cidr => { data_type => 'cidr' }, macaddr => { data_type => 'macaddr' }, # Geometric Types point => { data_type => 'point' }, line => { data_type => 'line' }, lseg => { data_type => 'lseg' }, box => { data_type => 'box' }, path => { data_type => 'path' }, polygon => { data_type => 'polygon' }, circle => { data_type => 'circle' }, # Character Types 'character varying(2)' => { data_type => 'varchar', size => 2 }, 'varchar(2)' => { data_type => 'varchar', size => 2 }, 'character(2)' => { data_type => 'char', size => 2 }, 'char(2)' => { data_type => 'char', size => 2 }, # check that default null is correctly rewritten 'char(3) default null' => { data_type => 'char', size => 3, default_value => \'null' }, 'character' => { data_type => 'char', size => 1 }, 'char' => { data_type => 'char', size => 1 }, text => { data_type => 'text' }, # varchar with no size has unlimited size, we rewrite to 'text' varchar => { data_type => 'text', original => { data_type => 'varchar' } }, # check default null again (to make sure ref is safe) 'varchar(3) default null' => { data_type => 'varchar', size => 3, default_value => \'null' }, # Datetime Types date => { data_type => 'date' }, interval => { data_type => 'interval' }, 'interval(0)' => { data_type => 'interval', size => 0 }, 'interval(2)' => { data_type => 'interval', size => 2 }, time => { data_type => 'time' }, 'time(0)' => { data_type => 'time', size => 0 }, 'time(2)' => { data_type => 'time', size => 2 }, 'time without time zone' => { data_type => 'time' }, 'time(0) without time zone' => { data_type => 'time', size => 0 }, 'time with time zone' => { data_type => 'time with time zone' }, 'time(0) with time zone' => { data_type => 'time with time zone', size => 0 }, 'time(2) with time zone' => { data_type => 'time with time zone', size => 2 }, timestamp => { data_type => 'timestamp' }, 'timestamp default now()' => { data_type => 'timestamp', default_value => \'current_timestamp', original => { default_value => \'now()' } }, 'timestamp(0)' => { data_type => 'timestamp', size => 0 }, 'timestamp(2)' => { data_type => 'timestamp', size => 2 }, 'timestamp without time zone' => { data_type => 'timestamp' }, 'timestamp(0) without time zone' => { data_type => 'timestamp', size => 0 }, 'timestamp(2) without time zone' => { data_type => 'timestamp', size => 2 }, 'timestamp with time zone' => { data_type => 'timestamp with time zone' }, 'timestamp(0) with time zone' => { data_type => 'timestamp with time zone', size => 0 }, 'timestamp(2) with time zone' => { data_type => 'timestamp with time zone', size => 2 }, # Blob Types bytea => { data_type => 'bytea' }, # Enum Types pg_loader_test_enum => { data_type => 'enum', extra => { custom_type_name => 'pg_loader_test_enum', list => [ qw/foo bar baz/] } }, }, pre_create => [ q{ CREATE TYPE pg_loader_test_enum AS ENUM ( 'foo', 'bar', 'baz' ) }, ], extra => { create => [ q{ CREATE SCHEMA dbicsl_test }, q{ CREATE SEQUENCE dbicsl_test.myseq }, q{ CREATE TABLE pg_loader_test1 ( id INTEGER NOT NULL DEFAULT nextval('dbicsl_test.myseq') PRIMARY KEY, value VARCHAR(100) ) }, qq{ COMMENT ON TABLE pg_loader_test1 IS 'The\15\12Table ∑' }, qq{ COMMENT ON COLUMN pg_loader_test1.value IS 'The\15\12Column' }, q{ CREATE TABLE pg_loader_test2 ( id SERIAL PRIMARY KEY, value VARCHAR(100) ) }, q{ COMMENT ON TABLE pg_loader_test2 IS 'very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very long comment' }, q{ CREATE SCHEMA "dbicsl-test" }, q{ CREATE TABLE "dbicsl-test".pg_loader_test4 ( id SERIAL PRIMARY KEY, value VARCHAR(100) ) }, q{ CREATE TABLE "dbicsl-test".pg_loader_test5 ( id SERIAL PRIMARY KEY, value VARCHAR(100), four_id INTEGER REFERENCES "dbicsl-test".pg_loader_test4 (id), CONSTRAINT loader_test5_uniq UNIQUE (four_id) ) }, q{ CREATE SCHEMA "dbicsl.test" }, q{ CREATE TABLE "dbicsl.test".pg_loader_test5 ( pk SERIAL PRIMARY KEY, value VARCHAR(100), four_id INTEGER REFERENCES "dbicsl-test".pg_loader_test4 (id), CONSTRAINT loader_test5_uniq UNIQUE (four_id) ) }, q{ CREATE TABLE "dbicsl.test".pg_loader_test6 ( id SERIAL PRIMARY KEY, value VARCHAR(100), pg_loader_test4_id INTEGER REFERENCES "dbicsl-test".pg_loader_test4 (id) ) }, q{ CREATE TABLE "dbicsl.test".pg_loader_test7 ( id SERIAL PRIMARY KEY, value VARCHAR(100), six_id INTEGER UNIQUE REFERENCES "dbicsl.test".pg_loader_test6 (id) ) }, q{ CREATE TABLE "dbicsl-test".pg_loader_test8 ( id SERIAL PRIMARY KEY, value VARCHAR(100), pg_loader_test7_id INTEGER REFERENCES "dbicsl.test".pg_loader_test7 (id) ) }, # 4 through 8 are used for the multi-schema tests q{ create table pg_loader_test9 ( id bigserial primary key ) }, q{ create table pg_loader_test10 ( id bigserial primary key, nine_id int, foreign key (nine_id) references pg_loader_test9(id) on delete restrict on update set null deferrable ) }, q{ create view pg_loader_test11 as select * from pg_loader_test1 }, q{ create table pg_loader_test12 ( id integer not null, value integer, active boolean, name text ) }, q{ create unique index uniq_id_lc_name on pg_loader_test12 ( id, lower(name) ) }, q{ create unique index uniq_uc_name_id on pg_loader_test12 ( upper(name), id ) }, q{ create unique index pg_loader_test12_value on pg_loader_test12 ( value ) }, q{ create unique index pg_loader_test12_name_active on pg_loader_test12 ( name ) where active }, ], pre_drop_ddl => [ 'DROP SCHEMA dbicsl_test CASCADE', 'DROP SCHEMA "dbicsl-test" CASCADE', 'DROP SCHEMA "dbicsl.test" CASCADE', 'DROP TYPE pg_loader_test_enum', 'DROP VIEW pg_loader_test11', ], drop => [ qw/pg_loader_test1 pg_loader_test2 pg_loader_test9 pg_loader_test10 pg_loader_test12/ ], count => 10 + 30 * 2, run => sub { my ($schema, $monikers, $classes) = @_; is $schema->source($monikers->{pg_loader_test1})->column_info('id')->{sequence}, 'dbicsl_test.myseq', 'qualified sequence detected'; my $class = $classes->{pg_loader_test1}; my $filename = $schema->loader->get_dump_filename($class); my $code = slurp_file $filename; like $code, qr/^=head1 NAME\n\n^$class - The\nTable ∑\n\n^=cut\n/m, 'table comment'; like $code, qr/^=head2 value\n\n(.+:.+\n)+\nThe\nColumn\n\n/m, 'column comment and attrs'; $class = $classes->{pg_loader_test2}; $filename = $schema->loader->get_dump_filename($class); $code = slurp_file $filename; like $code, qr/^=head1 NAME\n\n^$class\n\n=head1 DESCRIPTION\n\n^very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very long comment\n\n^=cut\n/m, 'long table comment is in DESCRIPTION'; # test on delete/update fk clause introspection ok ((my $rel_info = $schema->source('PgLoaderTest10')->relationship_info('nine')), 'got rel info'); is $rel_info->{attrs}{on_delete}, 'RESTRICT', 'ON DELETE clause introspected correctly'; is $rel_info->{attrs}{on_update}, 'SET NULL', 'ON UPDATE clause introspected correctly'; is $rel_info->{attrs}{is_deferrable}, 1, 'DEFERRABLE clause introspected correctly'; foreach my $db_schema (['dbicsl-test', 'dbicsl.test'], '%') { lives_and { rmtree EXTRA_DUMP_DIR; my @warns; local $SIG{__WARN__} = sub { push @warns, $_[0] unless $_[0] =~ /\bcollides\b/; }; make_schema_at( 'PGMultiSchema', { naming => 'current', db_schema => $db_schema, preserve_case => 1, dump_directory => EXTRA_DUMP_DIR, quiet => 1, }, [ $dsn, $user, $password, { on_connect_do => [ 'SET client_min_messages=WARNING' ], } ], ); diag join "\n", @warns if @warns; is @warns, 0; } 'dumped schema for "dbicsl-test" and "dbicsl.test" schemas with no warnings'; my ($test_schema, $rsrc, $rs, $row, %uniqs, $rel_info); lives_and { ok $test_schema = PGMultiSchema->connect($dsn, $user, $password, { on_connect_do => [ 'SET client_min_messages=WARNING' ], }); } 'connected test schema'; lives_and { ok $rsrc = $test_schema->source('PgLoaderTest4'); } 'got source for table in schema name with dash'; is try { $rsrc->column_info('id')->{is_auto_increment} }, 1, 'column in schema name with dash'; is try { $rsrc->column_info('value')->{data_type} }, 'varchar', 'column in schema name with dash'; is try { $rsrc->column_info('value')->{size} }, 100, 'column in schema name with dash'; lives_and { ok $rs = $test_schema->resultset('PgLoaderTest4'); } 'got resultset for table in schema name with dash'; lives_and { ok $row = $rs->create({ value => 'foo' }); } 'executed SQL on table in schema name with dash'; $rel_info = try { $rsrc->relationship_info('dbicsl_dash_test_pg_loader_test5') }; is_deeply $rel_info->{cond}, { 'foreign.four_id' => 'self.id' }, 'relationship in schema name with dash'; is $rel_info->{attrs}{accessor}, 'single', 'relationship in schema name with dash'; is $rel_info->{attrs}{join_type}, 'LEFT', 'relationship in schema name with dash'; lives_and { ok $rsrc = $test_schema->source('DbicslDashTestPgLoaderTest5'); } 'got source for table in schema name with dash'; %uniqs = try { $rsrc->unique_constraints }; is keys %uniqs, 2, 'got unique and primary constraint in schema name with dash'; delete $uniqs{primary}; is_deeply( (values %uniqs)[0], ['four_id'], 'unique constraint is correct in schema name with dash' ); lives_and { ok $rsrc = $test_schema->source('PgLoaderTest6'); } 'got source for table in schema name with dot'; is try { $rsrc->column_info('id')->{is_auto_increment} }, 1, 'column in schema name with dot introspected correctly'; is try { $rsrc->column_info('value')->{data_type} }, 'varchar', 'column in schema name with dot introspected correctly'; is try { $rsrc->column_info('value')->{size} }, 100, 'column in schema name with dot introspected correctly'; lives_and { ok $rs = $test_schema->resultset('PgLoaderTest6'); } 'got resultset for table in schema name with dot'; lives_and { ok $row = $rs->create({ value => 'foo' }); } 'executed SQL on table in schema name with dot'; $rel_info = try { $rsrc->relationship_info('pg_loader_test7') }; is_deeply $rel_info->{cond}, { 'foreign.six_id' => 'self.id' }, 'relationship in schema name with dot'; is $rel_info->{attrs}{accessor}, 'single', 'relationship in schema name with dot'; is $rel_info->{attrs}{join_type}, 'LEFT', 'relationship in schema name with dot'; lives_and { ok $rsrc = $test_schema->source('PgLoaderTest7'); } 'got source for table in schema name with dot'; %uniqs = try { $rsrc->unique_constraints }; is keys %uniqs, 2, 'got unique and primary constraint in schema name with dot'; delete $uniqs{primary}; is_deeply( (values %uniqs)[0], ['six_id'], 'unique constraint is correct in schema name with dot' ); lives_and { ok $test_schema->source('PgLoaderTest6') ->has_relationship('pg_loader_test4'); } 'cross-schema relationship in multi-db_schema'; lives_and { ok $test_schema->source('PgLoaderTest4') ->has_relationship('pg_loader_test6s'); } 'cross-schema relationship in multi-db_schema'; lives_and { ok $test_schema->source('PgLoaderTest8') ->has_relationship('pg_loader_test7'); } 'cross-schema relationship in multi-db_schema'; lives_and { ok $test_schema->source('PgLoaderTest7') ->has_relationship('pg_loader_test8s'); } 'cross-schema relationship in multi-db_schema'; } # test that views are marked as such isa_ok $schema->resultset($monikers->{pg_loader_test11})->result_source, 'DBIx::Class::ResultSource::View', 'views have table_class set correctly'; is_deeply { $schema->source($monikers->{pg_loader_test12})->unique_constraints }, { pg_loader_test12_value => ['value'] }, 'unique indexes are dumped correctly'; }, }, )->run_tests(); END { rmtree EXTRA_DUMP_DIR unless $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}; } # vim:et sw=4 sts=4 tw=0: DBIx-Class-Schema-Loader-0.07045/t/30_02bad_comment_table.t0000644000175000017500000000150312131533457022156 0ustar ilmariilmariuse strict; use warnings; use Test::More; use Test::Exception; use Test::Warn; use DBIx::Class::Schema::Loader::Utils 'slurp_file'; use File::Path; use Try::Tiny; use lib qw(t/lib); use make_dbictest_db_bad_comment_tables; use dbixcsl_test_dir qw/$tdir/; my $dump_path = "$tdir/dump"; { package DBICTest::Schema::1; use base qw/ DBIx::Class::Schema::Loader /; __PACKAGE__->loader_options( dump_directory => $dump_path, quiet => 1, ); } try { DBICTest::Schema::1->connect($make_dbictest_db_bad_comment_tables::dsn); }; plan tests => 1; my $foo = try { slurp_file("$dump_path/DBICTest/Schema/1/Result/Foo.pm") }; my $bar = try { slurp_file("$dump_path/DBICTest/Schema/1/Result/Bar.pm") }; like($foo, qr/Result::Foo\n/, 'No error from invalid comment tables'); END { rmtree($dump_path, 1, 1); } DBIx-Class-Schema-Loader-0.07045/t/70schema_base_dispatched.t0000644000175000017500000000445312542756321022676 0ustar ilmariilmariuse strict; use warnings; use Test::More tests => 10; use DBIx::Class::Schema::Loader 'make_schema_at'; use lib 't/lib'; use make_dbictest_db; make_schema_at( 'DBICTest::Schema::_test_schema_base', { naming => 'current', schema_base_class => 'TestSchemaBaseClass', schema_components => ['TestSchemaComponent'], }, [ $make_dbictest_db::dsn ], ); is $TestSchemaBaseClass::test_ok, 1, 'connected using schema_base_class'; is $DBIx::Class::TestSchemaComponent::test_component_ok, 1, 'connected using schema_components'; # try an explicit dynamic schema { package DBICTest::Schema::_test_schema_base_dynamic; use base 'DBIx::Class::Schema::Loader'; our $ran_connection = 0; __PACKAGE__->loader_options({ naming => 'current', schema_base_class => 'TestSchemaBaseClass', schema_components => ['TestSchemaComponent'], }); # check that connection doesn't cause an infinite loop sub connection { my $self = shift; $ran_connection++; return $self->next::method(@_) } } $TestSchemaBaseClass::test_ok = 0; $DBIx::Class::TestSchemaComponent::test_component_ok = 0; ok(my $schema = DBICTest::Schema::_test_schema_base_dynamic->connect($make_dbictest_db::dsn), 'connected dynamic schema'); is $DBICTest::Schema::_test_schema_base_dynamic::ran_connection, 1, 'schema class connection method ran only once'; is $TestSchemaBaseClass::test_ok, 1, 'connected using schema_base_class in dynamic schema'; is $DBIx::Class::TestSchemaComponent::test_component_ok, 1, 'connected using schema_components in dynamic schema'; # connect a second time $TestSchemaBaseClass::test_ok = 0; $DBIx::Class::TestSchemaComponent::test_component_ok = 0; $DBICTest::Schema::_test_schema_base_dynamic::ran_connection = 0; ok($schema = DBICTest::Schema::_test_schema_base_dynamic->connect($make_dbictest_db::dsn), 'connected dynamic schema a second time'); is $DBICTest::Schema::_test_schema_base_dynamic::ran_connection, 1, 'schema class connection method ran only once when connecting a second time'; is $TestSchemaBaseClass::test_ok, 1, 'connected using schema_base_class in dynamic schema a second time'; is $DBIx::Class::TestSchemaComponent::test_component_ok, 1, 'connected using schema_components in dynamic schema a second time'; DBIx-Class-Schema-Loader-0.07045/t/10_08sqlanywhere_common.t0000644000175000017500000004332212542756317022472 0ustar ilmariilmariuse strict; use warnings; use Test::More; use Test::Exception; use Try::Tiny; use File::Path 'rmtree'; use DBIx::Class::Optional::Dependencies; use DBIx::Class::Schema::Loader 'make_schema_at'; use Scope::Guard (); use lib qw(t/lib); use dbixcsl_common_tests; use dbixcsl_test_dir '$tdir'; use constant EXTRA_DUMP_DIR => "$tdir/sqlanywhere_extra_dump"; # The default max_cursor_count and max_statement_count settings of 50 are too # low to run this test. # # Setting them to zero is preferred. my %dsns; for (qw(SQLANYWHERE SQLANYWHERE_ODBC)) { next unless $ENV{"DBICTEST_${_}_DSN"}; my $dep_group = lc "rdbms_$_"; if (!DBIx::Class::Optional::Dependencies->req_ok_for($dep_group)) { diag 'You need to install ' . DBIx::Class::Optional::Dependencies->req_missing_for($dep_group) . " to test with $_"; next; } $dsns{$_}{dsn} = $ENV{"DBICTEST_${_}_DSN"}; $dsns{$_}{user} = $ENV{"DBICTEST_${_}_USER"}; $dsns{$_}{password} = $ENV{"DBICTEST_${_}_PASS"}; }; plan skip_all => 'You need to set the DBICTEST_SQLANYWHERE_DSN, _USER and _PASS and/or the DBICTEST_SQLANYWHERE_ODBC_DSN, _USER and _PASS environment variables' unless %dsns; my ($schema, $schemas_created); # for cleanup in END for extra tests my $tester = dbixcsl_common_tests->new( vendor => 'SQLAnywhere', auto_inc_pk => 'INTEGER IDENTITY NOT NULL PRIMARY KEY', connect_info => [ map { $dsns{$_} } sort keys %dsns ], loader_options => { preserve_case => 1 }, default_is_deferrable => 1, default_on_clause => 'RESTRICT', data_types => { # http://infocenter.sybase.com/help/topic/com.sybase.help.sqlanywhere.11.0.1/dbreference_en11/rf-datatypes.html # # Numeric types 'bit' => { data_type => 'bit' }, 'tinyint' => { data_type => 'tinyint' }, 'smallint' => { data_type => 'smallint' }, 'int' => { data_type => 'integer' }, 'integer' => { data_type => 'integer' }, 'bigint' => { data_type => 'bigint' }, 'float' => { data_type => 'real' }, 'real' => { data_type => 'real' }, 'double' => { data_type => 'double precision' }, 'double precision' => { data_type => 'double precision' }, 'float(2)' => { data_type => 'real' }, 'float(24)' => { data_type => 'real' }, 'float(25)' => { data_type => 'double precision' }, 'float(53)' => { data_type => 'double precision' }, # This test only works with the default precision and scale options. # # They are preserved even for the default values, because the defaults # can be changed. 'decimal' => { data_type => 'decimal', size => [30,6] }, 'dec' => { data_type => 'decimal', size => [30,6] }, 'numeric' => { data_type => 'numeric', size => [30,6] }, 'decimal(3)' => { data_type => 'decimal', size => [3,0] }, 'dec(3)' => { data_type => 'decimal', size => [3,0] }, 'numeric(3)' => { data_type => 'numeric', size => [3,0] }, 'decimal(3,3)' => { data_type => 'decimal', size => [3,3] }, 'dec(3,3)' => { data_type => 'decimal', size => [3,3] }, 'numeric(3,3)' => { data_type => 'numeric', size => [3,3] }, 'decimal(18,18)' => { data_type => 'decimal', size => [18,18] }, 'dec(18,18)' => { data_type => 'decimal', size => [18,18] }, 'numeric(18,18)' => { data_type => 'numeric', size => [18,18] }, # money types 'money' => { data_type => 'money' }, 'smallmoney' => { data_type => 'smallmoney' }, # bit arrays 'long varbit' => { data_type => 'long varbit' }, 'long bit varying' => { data_type => 'long varbit' }, 'varbit' => { data_type => 'varbit', size => 1 }, 'varbit(20)' => { data_type => 'varbit', size => 20 }, 'bit varying' => { data_type => 'varbit', size => 1 }, 'bit varying(20)' => { data_type => 'varbit', size => 20 }, # Date and Time Types 'date' => { data_type => 'date' }, 'datetime' => { data_type => 'datetime' }, 'smalldatetime' => { data_type => 'smalldatetime' }, 'timestamp' => { data_type => 'timestamp' }, # rewrite 'current timestamp' as 'current_timestamp' 'timestamp default current timestamp' => { data_type => 'timestamp', default_value => \'current_timestamp', original => { default_value => \'current timestamp' } }, 'time' => { data_type => 'time' }, # String Types 'char' => { data_type => 'char', size => 1 }, 'char(11)' => { data_type => 'char', size => 11 }, 'nchar' => { data_type => 'nchar', size => 1 }, 'nchar(11)' => { data_type => 'nchar', size => 11 }, 'varchar' => { data_type => 'varchar', size => 1 }, 'varchar(20)' => { data_type => 'varchar', size => 20 }, 'char varying(20)' => { data_type => 'varchar', size => 20 }, 'character varying(20)' => { data_type => 'varchar', size => 20 }, 'nvarchar(20)' => { data_type => 'nvarchar', size => 20 }, 'xml' => { data_type => 'xml' }, 'uniqueidentifierstr' => { data_type => 'uniqueidentifierstr' }, # Binary types 'binary' => { data_type => 'binary', size => 1 }, 'binary(20)' => { data_type => 'binary', size => 20 }, 'varbinary' => { data_type => 'varbinary', size => 1 }, 'varbinary(20)'=> { data_type => 'varbinary', size => 20 }, 'uniqueidentifier' => { data_type => 'uniqueidentifier' }, # Blob types 'long binary' => { data_type => 'long binary' }, 'image' => { data_type => 'image' }, 'long varchar' => { data_type => 'long varchar' }, 'text' => { data_type => 'text' }, 'long nvarchar'=> { data_type => 'long nvarchar' }, 'ntext' => { data_type => 'ntext' }, }, extra => { create => [ # 4 through 8 are used for the multi-schema tests q{ create table sqlanywhere_loader_test9 ( id int identity not null primary key ) }, q{ create table sqlanywhere_loader_test10 ( id int identity not null primary key, nine_id int, foreign key (nine_id) references sqlanywhere_loader_test9(id) on delete cascade on update set null ) }, ], drop => [ qw/sqlanywhere_loader_test9 sqlanywhere_loader_test10/ ], count => 4 + 30 * 2, run => sub { SKIP: { $schema = $_[0]; my $self = $_[3]; # test on delete/update fk clause introspection ok ((my $rel_info = $schema->source('SqlanywhereLoaderTest10')->relationship_info('nine')), 'got rel info'); is $rel_info->{attrs}{on_delete}, 'CASCADE', 'ON DELETE clause introspected correctly'; is $rel_info->{attrs}{on_update}, 'SET NULL', 'ON UPDATE clause introspected correctly'; is $rel_info->{attrs}{is_deferrable}, 1, 'is_deferrable defaults to 1'; my $connect_info = [@$self{qw/dsn user password/}]; my $dbh = $schema->storage->dbh; try { $dbh->do("CREATE USER dbicsl_test1 identified by 'dbicsl'"); } catch { $schemas_created = 0; skip "no CREATE USER privileges", 30 * 2; }; $dbh->do(<<"EOF"); CREATE TABLE dbicsl_test1.sqlanywhere_loader_test4 ( id INT IDENTITY NOT NULL PRIMARY KEY, value VARCHAR(100) ) EOF $dbh->do(<<"EOF"); CREATE TABLE dbicsl_test1.sqlanywhere_loader_test5 ( id INT IDENTITY NOT NULL PRIMARY KEY, value VARCHAR(100), four_id INTEGER NOT NULL, CONSTRAINT loader_test5_uniq UNIQUE (four_id), FOREIGN KEY (four_id) REFERENCES dbicsl_test1.sqlanywhere_loader_test4 (id) ) EOF $dbh->do("CREATE USER dbicsl_test2 identified by 'dbicsl'"); $dbh->do(<<"EOF"); CREATE TABLE dbicsl_test2.sqlanywhere_loader_test5 ( pk INT IDENTITY NOT NULL PRIMARY KEY, value VARCHAR(100), four_id INTEGER NOT NULL, CONSTRAINT loader_test5_uniq UNIQUE (four_id), FOREIGN KEY (four_id) REFERENCES dbicsl_test1.sqlanywhere_loader_test4 (id) ) EOF $dbh->do(<<"EOF"); CREATE TABLE dbicsl_test2.sqlanywhere_loader_test6 ( id INT IDENTITY NOT NULL PRIMARY KEY, value VARCHAR(100), sqlanywhere_loader_test4_id INTEGER, FOREIGN KEY (sqlanywhere_loader_test4_id) REFERENCES dbicsl_test1.sqlanywhere_loader_test4 (id) ) EOF $dbh->do(<<"EOF"); CREATE TABLE dbicsl_test2.sqlanywhere_loader_test7 ( id INT IDENTITY NOT NULL PRIMARY KEY, value VARCHAR(100), six_id INTEGER NOT NULL UNIQUE, FOREIGN KEY (six_id) REFERENCES dbicsl_test2.sqlanywhere_loader_test6 (id) ) EOF $dbh->do(<<"EOF"); CREATE TABLE dbicsl_test1.sqlanywhere_loader_test8 ( id INT IDENTITY NOT NULL PRIMARY KEY, value VARCHAR(100), sqlanywhere_loader_test7_id INTEGER, FOREIGN KEY (sqlanywhere_loader_test7_id) REFERENCES dbicsl_test2.sqlanywhere_loader_test7 (id) ) EOF $schemas_created = 1; my $guard = Scope::Guard->new(\&extra_cleanup); foreach my $db_schema (['dbicsl_test1', 'dbicsl_test2'], '%') { lives_and { rmtree EXTRA_DUMP_DIR; my @warns; local $SIG{__WARN__} = sub { push @warns, $_[0] unless $_[0] =~ /\bcollides\b/; }; make_schema_at( 'SQLAnywhereMultiSchema', { naming => 'current', db_schema => $db_schema, dump_directory => EXTRA_DUMP_DIR, quiet => 1, }, $connect_info, ); diag join "\n", @warns if @warns; is @warns, 0; } 'dumped schema for dbicsl_test1 and dbicsl_test2 schemas with no warnings'; my ($test_schema, $rsrc, $rs, $row, %uniqs, $rel_info); lives_and { ok $test_schema = SQLAnywhereMultiSchema->connect(@$connect_info); } 'connected test schema'; lives_and { ok $rsrc = $test_schema->source('SqlanywhereLoaderTest4'); } 'got source for table in schema one'; is try { $rsrc->column_info('id')->{is_auto_increment} }, 1, 'column in schema one'; is try { $rsrc->column_info('value')->{data_type} }, 'varchar', 'column in schema one'; is try { $rsrc->column_info('value')->{size} }, 100, 'column in schema one'; lives_and { ok $rs = $test_schema->resultset('SqlanywhereLoaderTest4'); } 'got resultset for table in schema one'; lives_and { ok $row = $rs->create({ value => 'foo' }); } 'executed SQL on table in schema one'; $rel_info = try { $rsrc->relationship_info('dbicsl_test1_sqlanywhere_loader_test5') }; is_deeply $rel_info->{cond}, { 'foreign.four_id' => 'self.id' }, 'relationship in schema one'; is $rel_info->{attrs}{accessor}, 'single', 'relationship in schema one'; is $rel_info->{attrs}{join_type}, 'LEFT', 'relationship in schema one'; lives_and { ok $rsrc = $test_schema->source('DbicslTest1SqlanywhereLoaderTest5'); } 'got source for table in schema one'; %uniqs = try { $rsrc->unique_constraints }; is keys %uniqs, 2, 'got unique and primary constraint in schema one'; delete $uniqs{primary}; is_deeply ((values %uniqs)[0], ['four_id'], 'correct unique constraint in schema one'); lives_and { ok $rsrc = $test_schema->source('SqlanywhereLoaderTest6'); } 'got source for table in schema two'; is try { $rsrc->column_info('id')->{is_auto_increment} }, 1, 'column in schema two introspected correctly'; is try { $rsrc->column_info('value')->{data_type} }, 'varchar', 'column in schema two introspected correctly'; is try { $rsrc->column_info('value')->{size} }, 100, 'column in schema two introspected correctly'; lives_and { ok $rs = $test_schema->resultset('SqlanywhereLoaderTest6'); } 'got resultset for table in schema two'; lives_and { ok $row = $rs->create({ value => 'foo' }); } 'executed SQL on table in schema two'; $rel_info = try { $rsrc->relationship_info('sqlanywhere_loader_test7') }; is_deeply $rel_info->{cond}, { 'foreign.six_id' => 'self.id' }, 'relationship in schema two'; is $rel_info->{attrs}{accessor}, 'single', 'relationship in schema two'; is $rel_info->{attrs}{join_type}, 'LEFT', 'relationship in schema two'; lives_and { ok $rsrc = $test_schema->source('SqlanywhereLoaderTest7'); } 'got source for table in schema two'; %uniqs = try { $rsrc->unique_constraints }; is keys %uniqs, 2, 'got unique and primary constraint in schema two'; delete $uniqs{primary}; is_deeply ((values %uniqs)[0], ['six_id'], 'correct unique constraint in schema two'); lives_and { ok $test_schema->source('SqlanywhereLoaderTest6') ->has_relationship('sqlanywhere_loader_test4'); } 'cross-schema relationship in multi-db_schema'; lives_and { ok $test_schema->source('SqlanywhereLoaderTest4') ->has_relationship('sqlanywhere_loader_test6s'); } 'cross-schema relationship in multi-db_schema'; lives_and { ok $test_schema->source('SqlanywhereLoaderTest8') ->has_relationship('sqlanywhere_loader_test7'); } 'cross-schema relationship in multi-db_schema'; lives_and { ok $test_schema->source('SqlanywhereLoaderTest7') ->has_relationship('sqlanywhere_loader_test8s'); } 'cross-schema relationship in multi-db_schema'; } } }, }, ); $tester->run_tests(); sub extra_cleanup { if (not $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}) { if ($schemas_created && (my $dbh = try { $schema->storage->dbh })) { foreach my $table ('dbicsl_test1.sqlanywhere_loader_test8', 'dbicsl_test2.sqlanywhere_loader_test7', 'dbicsl_test2.sqlanywhere_loader_test6', 'dbicsl_test2.sqlanywhere_loader_test5', 'dbicsl_test1.sqlanywhere_loader_test5', 'dbicsl_test1.sqlanywhere_loader_test4') { try { $dbh->do("DROP TABLE $table"); } catch { diag "Error dropping table: $_"; }; } foreach my $db_schema (qw/dbicsl_test1 dbicsl_test2/) { try { $dbh->do("DROP USER $db_schema"); } catch { diag "Error dropping test user $db_schema: $_"; }; } } rmtree EXTRA_DUMP_DIR; } } # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/t/45relationships.t0000644000175000017500000001723712542756321021146 0ustar ilmariilmariuse strict; use warnings; use Test::More; use Test::Exception; use Try::Tiny; use lib qw(t/lib); use make_dbictest_db; use DBIx::Class::Schema::Loader; my $schema_counter = 0; # test skip_relationships my $regular = schema_with(); is( ref($regular->source('Bar')->relationship_info('fooref')), 'HASH', 'regularly-made schema has fooref rel', ); my $skip_rel = schema_with( skip_relationships => 1 ); is_deeply( $skip_rel->source('Bar')->relationship_info('fooref'), undef, 'skip_relationships blocks generation of fooref rel', ); # test hashref as rel_name_map my $hash_relationship = schema_with( rel_name_map => { fooref => "got_fooref", bars => "ignored", Foo => { bars => "got_bars", fooref => "ignored", }, } ); is( ref($hash_relationship->source('Foo')->relationship_info('got_bars')), 'HASH', 'single level hash in rel_name_map picked up correctly' ); is( ref($hash_relationship->source('Bar')->relationship_info('got_fooref')), 'HASH', 'double level hash in rel_name_map picked up correctly' ); # test coderef as rel_name_map my $code_relationship = schema_with( rel_name_map => sub { my ($args, $orig) = @_; if ($args->{local_moniker} eq 'Foo') { is_deeply( $args, { name => 'bars', type => 'has_many', local_class => "DBICTest::Schema::${schema_counter}::Result::Foo", local_moniker => 'Foo', local_columns => ['fooid'], remote_class => "DBICTest::Schema::${schema_counter}::Result::Bar", remote_moniker => 'Bar', remote_columns => ['fooref'], }, 'correct args for Foo passed' ); } elsif ($args->{local_moniker} eq 'Bar') { is_deeply( $args, { name => 'fooref', type => 'belongs_to', local_class => "DBICTest::Schema::${schema_counter}::Result::Bar", local_moniker => 'Bar', local_columns => ['fooref'], remote_class => "DBICTest::Schema::${schema_counter}::Result::Foo", remote_moniker => 'Foo', remote_columns => ['fooid'], }, 'correct args for Foo passed' ); } else { fail( 'correct args passed to rel_name_map' ); diag "args were: ", explain $args; } return $orig->({ Bar => { fooref => 'fooref_caught' }, Foo => { bars => 'bars_caught' }, }); } ); is( ref($code_relationship->source('Foo')->relationship_info('bars_caught')), 'HASH', 'rel_name_map overrode local_info correctly' ); is( ref($code_relationship->source('Bar')->relationship_info('fooref_caught')), 'HASH', 'rel_name_map overrode remote_info correctly' ); throws_ok { schema_with( rel_name_map => sub { $_[-1]->(sub{}) } ), } qr/reentered rel_name_map must be a hashref/, 'throws error for invalid (code) rel_name_map callback map'; # test relationship_attrs throws_ok { schema_with( relationship_attrs => 'laughably invalid!!!' ); } qr/relationship_attrs/, 'throws error for invalid (scalar) relationship_attrs'; throws_ok { schema_with( relationship_attrs => [qw/laughably invalid/] ); } qr/relationship_attrs/, 'throws error for invalid (arrayref) relationship_attrs'; { my $nodelete = schema_with( relationship_attrs => { all => { cascade_delete => 0 }, belongs_to => { cascade_delete => 1 }, }); my $bars_info = $nodelete->source('Foo')->relationship_info('bars'); #use Data::Dumper; #die Dumper([ $nodelete->source('Foo')->relationships() ]); my $fooref_info = $nodelete->source('Bar')->relationship_info('fooref'); is( ref($fooref_info), 'HASH', 'fooref rel is present', ); is( $bars_info->{attrs}->{cascade_delete}, 0, 'relationship_attrs settings seem to be getting through to the generated rels', ); is( $fooref_info->{attrs}->{cascade_delete}, 1, 'belongs_to in relationship_attrs overrides all def', ); } # test relationship_attrs coderef { my $relationship_attrs_coderef_invoked = 0; my $schema; lives_ok { $schema = schema_with(relationship_attrs => sub { my %p = @_; $relationship_attrs_coderef_invoked++; if ($p{rel_name} eq 'bars') { is $p{rel_type}, 'has_many', 'correct rel_type'; is $p{local_table}, 'foo', 'correct local_table'; is_deeply $p{local_cols}, [ 'fooid' ], 'correct local_cols'; is $p{remote_table}, 'bar', 'correct remote_table'; is_deeply $p{remote_cols}, [ 'fooref' ], 'correct remote_cols'; is_deeply $p{attrs}, { cascade_delete => 0, cascade_copy => 0, }, "got default rel attrs for $p{rel_name} in $p{local_table}"; like $p{local_source}->result_class, qr/^DBICTest::Schema::\d+::Result::Foo\z/, 'correct local source'; like $p{remote_source}->result_class, qr/^DBICTest::Schema::\d+::Result::Bar\z/, 'correct remote source'; $p{attrs}{snoopy} = 1; return $p{attrs}; } elsif ($p{rel_name} eq 'fooref') { is $p{rel_type}, 'belongs_to', 'correct rel_type'; is $p{local_table}, 'bar', 'correct local_table'; is_deeply $p{local_cols}, [ 'fooref' ], 'correct local_cols'; is $p{remote_table}, 'foo', 'correct remote_table'; is_deeply $p{remote_cols}, [ 'fooid' ], 'correct remote_cols'; is_deeply $p{attrs}, { on_delete => 'NO ACTION', on_update => 'NO ACTION', is_deferrable => 0, }, "got correct rel attrs for $p{rel_name} in $p{local_table}"; like $p{local_source}->result_class, qr/^DBICTest::Schema::\d+::Result::Bar\z/, 'correct local source'; like $p{remote_source}->result_class, qr/^DBICTest::Schema::\d+::Result::Foo\z/, 'correct remote source'; $p{attrs}{scooby} = 1; return $p{attrs}; } else { fail "unknown rel $p{rel_name} in $p{local_table}"; } }); } 'dumping schema with coderef relationship_attrs survived'; is $relationship_attrs_coderef_invoked, 2, 'relationship_attrs coderef was invoked correct number of times'; is ((try { $schema->source('Foo')->relationship_info('bars')->{attrs}{snoopy} }) || undef, 1, "correct relationship attributes for 'bars' in 'Foo'"); is ((try { $schema->source('Bar')->relationship_info('fooref')->{attrs}{scooby} }) || undef, 1, "correct relationship attributes for 'fooref' in 'Bar'"); } done_testing; #### generates a new schema with the given opts every time it's called sub schema_with { $schema_counter++; DBIx::Class::Schema::Loader::make_schema_at( 'DBICTest::Schema::'.$schema_counter, { naming => 'current', @_ }, [ $make_dbictest_db::dsn ], ); "DBICTest::Schema::$schema_counter"->clone; } DBIx-Class-Schema-Loader-0.07045/t/10_11msaccess_common.t0000644000175000017500000001443012542756317021721 0ustar ilmariilmariuse strict; use warnings; use Test::More; use DBIx::Class::Optional::Dependencies; use DBIx::Class::Schema::Loader::DBI::ODBC::ACCESS (); use lib qw(t/lib); use dbixcsl_common_tests; my %dsns; for (qw(MSACCESS_ODBC MSACCESS_ADO)) { next unless $ENV{"DBICTEST_${_}_DSN"}; my $dep_group = lc "rdbms_$_"; if (!DBIx::Class::Optional::Dependencies->req_ok_for($dep_group)) { diag 'You need to install ' . DBIx::Class::Optional::Dependencies->req_missing_for($dep_group) . " to test with $_"; next; } $dsns{$_}{dsn} = $ENV{"DBICTEST_${_}_DSN"}; $dsns{$_}{user} = $ENV{"DBICTEST_${_}_USER"}; $dsns{$_}{password} = $ENV{"DBICTEST_${_}_PASS"}; }; plan skip_all => 'You need to set the DBICTEST_MSACCESS_ODBC_DSN, _USER and _PASS and/or the DBICTEST_MSACCESS_ADO_DSN, _USER and _PASS environment variables' unless %dsns; my %ado_extra_types = ( 'tinyint' => { data_type => 'tinyint', original => { data_type => 'byte' } }, 'smallmoney' => { data_type => 'money', original => { data_type => 'currency' } }, 'decimal' => { data_type => 'decimal' }, 'decimal(3)' => { data_type => 'decimal', size => [3, 0] }, 'decimal(3,3)'=> { data_type => 'decimal', size => [3, 3] }, 'dec(5,5)' => { data_type => 'decimal', size => [5, 5] }, 'numeric(2,2)'=> { data_type => 'decimal', size => [2, 2] }, 'character' => { data_type => 'char', size => 255 }, 'character varying(5)' => { data_type => 'varchar', size => 5 }, 'nchar(5)' => { data_type => 'char', size => 5 }, 'national character(5)' => { data_type => 'char', size => 5 }, 'nvarchar(5)' => { data_type => 'varchar', size => 5 }, 'national character varying(5)' => { data_type => 'varchar', size => 5 }, 'national char varying(5)' => { data_type => 'varchar', size => 5 }, 'smalldatetime' => { data_type => 'datetime' }, 'uniqueidentifier' => { data_type => 'uniqueidentifier', original => { data_type => 'guid' } }, 'text' => { data_type => 'text', original => { data_type => 'longchar' } }, 'ntext' => { data_type => 'text', original => { data_type => 'longchar' } }, ); my $tester = dbixcsl_common_tests->new( vendor => 'Access', auto_inc_pk => 'AUTOINCREMENT PRIMARY KEY', quote_char => [qw/[ ]/], connect_info => [ map { $dsns{$_} } sort keys %dsns ], data_types => { # http://msdn.microsoft.com/en-us/library/bb208866(v=office.12).aspx # # Numeric types 'autoincrement'=>{ data_type => 'integer', is_auto_increment => 1 }, 'int' => { data_type => 'integer' }, 'integer' => { data_type => 'integer' }, 'long' => { data_type => 'integer' }, 'integer4' => { data_type => 'integer' }, 'smallint' => { data_type => 'smallint' }, 'short' => { data_type => 'smallint' }, 'integer2' => { data_type => 'smallint' }, 'integer1' => { data_type => 'tinyint', original => { data_type => 'byte' } }, 'byte' => { data_type => 'tinyint', original => { data_type => 'byte' } }, 'bit' => { data_type => 'bit' }, 'logical' => { data_type => 'bit' }, 'logical1' => { data_type => 'bit' }, 'yesno' => { data_type => 'bit' }, 'money' => { data_type => 'money', original => { data_type => 'currency' } }, 'currency' => { data_type => 'money', original => { data_type => 'currency' } }, 'real' => { data_type => 'real' }, 'single' => { data_type => 'real' }, 'ieeesingle' => { data_type => 'real' }, 'float4' => { data_type => 'real' }, 'float' => { data_type => 'double precision', original => { data_type => 'double' } }, 'float' => { data_type => 'double precision', original => { data_type => 'double' } }, 'float8' => { data_type => 'double precision', original => { data_type => 'double' } }, 'double' => { data_type => 'double precision', original => { data_type => 'double' } }, 'ieeedouble' => { data_type => 'double precision', original => { data_type => 'double' } }, 'number' => { data_type => 'double precision', original => { data_type => 'double' } }, # # character types 'text(25)' => { data_type => 'varchar', size => 25 }, 'char' => { data_type => 'char', size => 255 }, 'char(5)' => { data_type => 'char', size => 5 }, 'string(5)' => { data_type => 'varchar', size => 5 }, 'varchar(5)' => { data_type => 'varchar', size => 5 }, # binary types 'binary(10)' => { data_type => 'binary', size => 10 }, 'varbinary(11)' => { data_type => 'varbinary', size => 11 }, # datetime types 'datetime' => { data_type => 'datetime' }, 'time' => { data_type => 'datetime' }, 'timestamp' => { data_type => 'datetime' }, # misc types 'guid' => { data_type => 'uniqueidentifier', original => { data_type => 'guid' } }, # blob types 'longchar' => { data_type => 'text', original => { data_type => 'longchar' } }, 'longtext' => { data_type => 'text', original => { data_type => 'longchar' } }, 'memo' => { data_type => 'text', original => { data_type => 'longchar' } }, 'image' => { data_type => 'image', original => { data_type => 'longbinary' } }, 'longbinary' => { data_type => 'image', original => { data_type => 'longbinary' } }, %ado_extra_types, }, data_types_ddl_cb => sub { my $ddl = shift; { package DBIXCSL_Test::DummySchema; use base 'DBIx::Class::Schema'; } my @connect_info = @{$dsns{MSACCESS_ODBC} || $dsns{MSACCESS_ADO}}; my $schema = DBIXCSL_Test::DummySchema->connect(@connect_info); my $loader = DBIx::Class::Schema::Loader::DBI::ODBC::ACCESS->new( schema => $schema, naming => 'current', ); my $conn = $loader->_ado_connection; require Win32::OLE; my $comm = Win32::OLE->new('ADODB.Command'); $comm->{ActiveConnection} = $conn; $comm->{CommandText} = $ddl; $comm->Execute; }, ); $tester->run_tests(); # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/t/10_05ora_common.t0000644000175000017500000005663312542756321020712 0ustar ilmariilmariuse DBIx::Class::Schema::Loader::Optional::Dependencies -skip_all_without => 'test_rdbms_oracle'; use strict; use warnings; use Test::More; use Test::Exception; use DBIx::Class::Schema::Loader 'make_schema_at'; use DBIx::Class::Schema::Loader::Utils qw/slurp_file split_name/; use Try::Tiny; use File::Path 'rmtree'; use String::ToIdentifier::EN::Unicode 'to_identifier'; use namespace::clean; use lib qw(t/lib); use dbixcsl_common_tests (); use dbixcsl_test_dir '$tdir'; use constant EXTRA_DUMP_DIR => "$tdir/ora_extra_dump"; my $dsn = $ENV{DBICTEST_ORA_DSN} || ''; my $user = $ENV{DBICTEST_ORA_USER} || ''; my $password = $ENV{DBICTEST_ORA_PASS} || ''; my ($schema, $extra_schema); # for cleanup in END for extra tests my $auto_inc_cb = sub { my ($table, $col) = @_; return ( qq{ CREATE SEQUENCE ${table}_${col}_seq START WITH 1 INCREMENT BY 1}, qq{ CREATE OR REPLACE TRIGGER ${table}_${col}_trg BEFORE INSERT ON ${table} FOR EACH ROW BEGIN SELECT ${table}_${col}_seq.nextval INTO :NEW.${col} FROM dual; END; } ); }; my $auto_inc_drop_cb = sub { my ($table, $col) = @_; return qq{ DROP SEQUENCE ${table}_${col}_seq }; }; dbixcsl_common_tests->new( vendor => 'Oracle', auto_inc_pk => 'INTEGER NOT NULL PRIMARY KEY', auto_inc_cb => $auto_inc_cb, auto_inc_drop_cb => $auto_inc_drop_cb, preserve_case_mode_is_exclusive => 1, quote_char => '"', default_is_deferrable => 0, default_on_delete_clause => 'NO ACTION', default_on_update_clause => 'NO ACTION', dsn => $dsn, user => $user, password => $password, data_types => { # From: # http://download.oracle.com/docs/cd/B19306_01/server.102/b14200/sql_elements001.htm#i54330 # # These tests require at least Oracle 9.2, because of the VARCHAR to # VARCHAR2 casting. # # Character Types 'char' => { data_type => 'char', size => 1 }, 'char(11)' => { data_type => 'char', size => 11 }, 'nchar' => { data_type => 'nchar', size => 1 }, 'national character' => { data_type => 'nchar', size => 1 }, 'nchar(11)' => { data_type => 'nchar', size => 11 }, 'national character(11)' => { data_type => 'nchar', size => 11 }, 'varchar(20)' => { data_type => 'varchar2', size => 20 }, 'varchar2(20)' => { data_type => 'varchar2', size => 20 }, 'nvarchar2(20)'=> { data_type => 'nvarchar2', size => 20 }, 'national character varying(20)' => { data_type => 'nvarchar2', size => 20 }, # Numeric Types # # integer/decimal/numeric is alised to NUMBER # 'integer' => { data_type => 'integer', original => { data_type => 'number', size => [38,0] } }, 'int' => { data_type => 'integer', original => { data_type => 'number', size => [38,0] } }, 'smallint' => { data_type => 'integer', original => { data_type => 'number', size => [38,0] } }, # very long DEFAULT throws an ORA-24345 "number(15) DEFAULT to_number(decode(substrb(userenv('CLIENT_INFO'),1,1),' ',null,substrb(userenv('CLIENT_INFO'),1,10)))" => { data_type => 'numeric', size => [15,0], original => { data_type => 'number' }, default_value => \"to_number(decode(substrb(userenv('CLIENT_INFO'),1,1),' ',null,substrb(userenv('CLIENT_INFO'),1,10)))" }, 'decimal' => { data_type => 'integer', original => { data_type => 'number', size => [38,0] } }, 'dec' => { data_type => 'integer', original => { data_type => 'number', size => [38,0] } }, 'numeric' => { data_type => 'integer', original => { data_type => 'number', size => [38,0] } }, 'decimal(3)' => { data_type => 'numeric', size => [3,0], original => { data_type => 'number' } }, 'dec(3)' => { data_type => 'numeric', size => [3,0], original => { data_type => 'number' } }, 'numeric(3)' => { data_type => 'numeric', size => [3,0], original => { data_type => 'number' } }, 'decimal(3,3)' => { data_type => 'numeric', size => [3,3], original => { data_type => 'number' } }, 'dec(3,3)' => { data_type => 'numeric', size => [3,3], original => { data_type => 'number' } }, 'numeric(3,3)' => { data_type => 'numeric', size => [3,3], original => { data_type => 'number' } }, 'binary_float' => { data_type => 'real', original => { data_type => 'binary_float' } }, 'binary_double' => { data_type => 'double precision', original => { data_type => 'binary_double' } }, # these are not mentioned in the summary chart, must be aliased real => { data_type => 'real', original => { data_type => 'float', size => 63 } }, 'float(63)' => { data_type => 'real', original => { data_type => 'float', size => 63 } }, 'float(64)' => { data_type => 'double precision', original => { data_type => 'float', size => 64 } }, 'float(126)' => { data_type => 'double precision', original => { data_type => 'float', size => 126 } }, float => { data_type => 'double precision', original => { data_type => 'float', size => 126 } }, # Blob Types 'raw(50)' => { data_type => 'raw', size => 50 }, 'clob' => { data_type => 'clob' }, 'nclob' => { data_type => 'nclob' }, 'blob' => { data_type => 'blob' }, 'bfile' => { data_type => 'bfile' }, 'long' => { data_type => 'long' }, 'long raw' => { data_type => 'long raw' }, # Datetime Types 'date' => { data_type => 'datetime', original => { data_type => 'date' } }, 'date default sysdate' => { data_type => 'datetime', default_value => \'current_timestamp', original => { data_type => 'date', default_value => \'sysdate' } }, 'timestamp' => { data_type => 'timestamp' }, 'timestamp default current_timestamp' => { data_type => 'timestamp', default_value => \'current_timestamp' }, 'timestamp(3)' => { data_type => 'timestamp', size => 3 }, 'timestamp with time zone' => { data_type => 'timestamp with time zone' }, 'timestamp(3) with time zone' => { data_type => 'timestamp with time zone', size => 3 }, 'timestamp with local time zone' => { data_type => 'timestamp with local time zone' }, 'timestamp(3) with local time zone' => { data_type => 'timestamp with local time zone', size => 3 }, 'interval year to month' => { data_type => 'interval year to month' }, 'interval year(3) to month' => { data_type => 'interval year to month', size => 3 }, 'interval day to second' => { data_type => 'interval day to second' }, 'interval day(3) to second' => { data_type => 'interval day to second', size => [3,6] }, 'interval day to second(3)' => { data_type => 'interval day to second', size => [2,3] }, 'interval day(3) to second(3)' => { data_type => 'interval day to second', size => [3,3] }, # Other Types 'rowid' => { data_type => 'rowid' }, 'urowid' => { data_type => 'urowid' }, 'urowid(3333)' => { data_type => 'urowid', size => 3333 }, }, extra => { create => [ q{ CREATE TABLE oracle_loader_test1 ( id NUMBER(11), value VARCHAR2(100) ) }, q{ COMMENT ON TABLE oracle_loader_test1 IS 'oracle_loader_test1 table comment' }, q{ COMMENT ON COLUMN oracle_loader_test1.value IS 'oracle_loader_test1.value column comment' }, # 4 through 8 are used for the multi-schema tests q{ create table oracle_loader_test9 ( id int primary key ) }, q{ create table oracle_loader_test10 ( id int primary key, nine_id int, foreign key (nine_id) references oracle_loader_test9(id) on delete set null deferrable ) }, q{ create table oracle_loader_test11 ( id int primary key disable, ten_id int unique disable, foreign key (ten_id) references oracle_loader_test10(id) disable ) }, $auto_inc_cb->('oracle_loader_test11', 'id'), 'alter trigger oracle_loader_test11_id_trg disable', ], drop => [qw/oracle_loader_test1 oracle_loader_test9 oracle_loader_test10 oracle_loader_test11/], count => 10 + 31 * 2, # basic + cross-schema * 2 run => sub { my ($monikers, $classes); ($schema, $monikers, $classes) = @_; SKIP: { if (my $source = $monikers->{loader_test1s}) { is $schema->source($source)->column_info('id')->{sequence}, 'loader_test1s_id_seq', 'Oracle sequence detection'; } else { skip 'not running common tests', 1; } } my $class = $classes->{oracle_loader_test1}; my $filename = $schema->loader->get_dump_filename($class); my $code = slurp_file $filename; like $code, qr/^=head1 NAME\n\n^$class - oracle_loader_test1 table comment\n\n^=cut\n/m, 'table comment'; like $code, qr/^=head2 value\n\n(.+:.+\n)+\noracle_loader_test1\.value column comment\n\n/m, 'column comment and attrs'; # test on delete/update fk clause introspection ok ((my $rel_info = $schema->source('OracleLoaderTest10')->relationship_info('nine')), 'got rel info'); is $rel_info->{attrs}{on_delete}, 'SET NULL', 'ON DELETE clause introspected correctly'; is $rel_info->{attrs}{on_update}, 'NO ACTION', 'ON UPDATE clause set to NO ACTION by default'; is $rel_info->{attrs}{is_deferrable}, 1, 'DEFERRABLE clause introspected correctly'; my $source11 = $schema->source('OracleLoaderTest11'); # DBD::Oracle < 1.76 doesn't filter out disabled primary keys my $uniqs = eval { DBD::Oracle->VERSION('1.76') } ? [] : ['primary']; is_deeply [keys %{{$source11->unique_constraints}}], $uniqs, 'Disabled unique constraints not loaded'; ok !$source11->relationship_info('ten'), 'Disabled FK not loaded'; ok !$source11->column_info('id')->{is_auto_increment}, 'Disabled autoinc trigger not loaded'; SKIP: { skip 'Set the DBICTEST_ORA_EXTRAUSER_DSN, _USER and _PASS environment variables to run the cross-schema relationship tests', 31 * 2 unless $ENV{DBICTEST_ORA_EXTRAUSER_DSN}; $extra_schema = $schema->clone; $extra_schema->connection(@ENV{map "DBICTEST_ORA_EXTRAUSER_$_", qw/DSN USER PASS/ }); my $dbh1 = $schema->storage->dbh; my $dbh2 = $extra_schema->storage->dbh; my ($schema1) = $dbh1->selectrow_array('SELECT USER FROM DUAL'); my ($schema2) = $dbh2->selectrow_array('SELECT USER FROM DUAL'); $dbh1->do(<<'EOF'); CREATE TABLE oracle_loader_test4 ( id INT NOT NULL PRIMARY KEY, value VARCHAR(100) ) EOF $dbh1->do($_) for $auto_inc_cb->(lc "${schema1}.oracle_loader_test4", 'id'); $dbh1->do("GRANT ALL ON oracle_loader_test4 TO $schema2"); $dbh1->do("GRANT ALL ON oracle_loader_test4_id_seq TO $schema2"); $dbh1->do(<<"EOF"); CREATE TABLE oracle_loader_test5 ( id INT NOT NULL PRIMARY KEY, value VARCHAR(100), four_id INT REFERENCES ${schema1}.oracle_loader_test4 (id), CONSTRAINT ora_loader5_uniq UNIQUE (four_id) ) EOF $dbh1->do($_) for $auto_inc_cb->('oracle_loader_test5', 'id'); $dbh1->do("GRANT ALL ON oracle_loader_test5 TO $schema2"); $dbh1->do("GRANT ALL ON oracle_loader_test5_id_seq TO $schema2"); $dbh2->do(<<"EOF"); CREATE TABLE oracle_loader_test5 ( pk INT NOT NULL PRIMARY KEY, value VARCHAR(100), four_id INT REFERENCES ${schema1}.oracle_loader_test4 (id), CONSTRAINT ora_loader5_uniq UNIQUE (four_id) ) EOF $dbh2->do($_) for $auto_inc_cb->('oracle_loader_test5', 'pk'); $dbh2->do("GRANT ALL ON oracle_loader_test5 TO $schema1"); $dbh2->do("GRANT ALL ON oracle_loader_test5_pk_seq TO $schema1"); $dbh2->do(<<"EOF"); CREATE TABLE oracle_loader_test6 ( id INT NOT NULL PRIMARY KEY, value VARCHAR(100), oracle_loader_test4_id INT REFERENCES ${schema1}.oracle_loader_test4 (id) ) EOF $dbh2->do($_) for $auto_inc_cb->('oracle_loader_test6', 'id'); $dbh2->do("GRANT ALL ON oracle_loader_test6 to $schema1"); $dbh2->do("GRANT ALL ON oracle_loader_test6_id_seq TO $schema1"); $dbh2->do(<<"EOF"); CREATE TABLE oracle_loader_test7 ( id INT NOT NULL PRIMARY KEY, value VARCHAR(100), six_id INT UNIQUE REFERENCES ${schema2}.oracle_loader_test6 (id) ) EOF $dbh2->do($_) for $auto_inc_cb->('oracle_loader_test7', 'id'); $dbh2->do("GRANT ALL ON oracle_loader_test7 to $schema1"); $dbh2->do("GRANT ALL ON oracle_loader_test7_id_seq TO $schema1"); $dbh1->do(<<"EOF"); CREATE TABLE oracle_loader_test8 ( id INT NOT NULL PRIMARY KEY, value VARCHAR(100), oracle_loader_test7_id INT REFERENCES ${schema2}.oracle_loader_test7 (id) ) EOF $dbh1->do($_) for $auto_inc_cb->('oracle_loader_test8', 'id'); $dbh1->do("GRANT ALL ON oracle_loader_test8 to $schema2"); $dbh1->do("GRANT ALL ON oracle_loader_test8_id_seq TO $schema2"); # We add schema to moniker_parts, so make a monikers hash for # the tests, of the form schemanum.tablenum my $schema1_moniker = join '', map ucfirst lc, split_name to_identifier $schema1; my $schema2_moniker = join '', map ucfirst lc, split_name to_identifier $schema2; my %monikers; $monikers{'1.5'} = $schema1_moniker . 'OracleLoaderTest5'; $monikers{'2.5'} = $schema2_moniker . 'OracleLoaderTest5'; foreach my $db_schema ([$schema1, $schema2], '%') { lives_and { rmtree EXTRA_DUMP_DIR; my @warns; local $SIG{__WARN__} = sub { push @warns, $_[0] unless $_[0] =~ /\bcollides\b/; }; make_schema_at( 'OracleMultiSchema', { naming => 'current', db_schema => $db_schema, dump_directory => EXTRA_DUMP_DIR, quiet => 1, }, [ $dsn, $user, $password ], ); diag join "\n", @warns if @warns; is @warns, 0; } qq{dumped schema for "$schema1" and "$schema2" schemas with no warnings}; my ($test_schema, $rsrc, $rs, $row, %uniqs, $rel_info); lives_and { ok $test_schema = OracleMultiSchema->connect($dsn, $user, $password); } 'connected test schema'; lives_and { ok $rsrc = $test_schema->source('OracleLoaderTest4'); } 'got source for table in schema1'; is try { $rsrc->column_info('id')->{is_auto_increment} }, 1, 'column in schema1'; is try { $rsrc->column_info('id')->{sequence} }, lc "${schema1}.oracle_loader_test4_id_seq", 'sequence in schema1'; is try { $rsrc->column_info('value')->{data_type} }, 'varchar2', 'column in schema1'; is try { $rsrc->column_info('value')->{size} }, 100, 'column in schema1'; lives_and { ok $rs = $test_schema->resultset('OracleLoaderTest4'); } 'got resultset for table in schema1'; lives_and { ok $row = $rs->create({ value => 'foo' }); } 'executed SQL on table in schema1'; my $schema1_identifier = join '_', map lc, split_name to_identifier $schema1; $rel_info = try { $rsrc->relationship_info( $schema1_identifier . '_oracle_loader_test5' ) }; is_deeply $rel_info->{cond}, { 'foreign.four_id' => 'self.id' }, 'relationship in schema1'; is $rel_info->{attrs}{accessor}, 'single', 'relationship in schema1'; is $rel_info->{attrs}{join_type}, 'LEFT', 'relationship in schema1'; lives_and { ok $rsrc = $test_schema->source($monikers{'1.5'}); } 'got source for table in schema1'; %uniqs = try { $rsrc->unique_constraints }; is keys %uniqs, 2, 'got unique and primary constraint in schema1'; delete $uniqs{primary}; is_deeply ((values %uniqs)[0], ['four_id'], 'correct unique constraint in schema1'); lives_and { ok $rsrc = $test_schema->source('OracleLoaderTest6'); } 'got source for table in schema2'; is try { $rsrc->column_info('id')->{is_auto_increment} }, 1, 'column in schema2 introspected correctly'; is try { $rsrc->column_info('value')->{data_type} }, 'varchar2', 'column in schema2 introspected correctly'; is try { $rsrc->column_info('value')->{size} }, 100, 'column in schema2 introspected correctly'; lives_and { ok $rs = $test_schema->resultset('OracleLoaderTest6'); } 'got resultset for table in schema2'; lives_and { ok $row = $rs->create({ value => 'foo' }); } 'executed SQL on table in schema2'; $rel_info = try { $rsrc->relationship_info('oracle_loader_test7') }; is_deeply $rel_info->{cond}, { 'foreign.six_id' => 'self.id' }, 'relationship in schema2'; is $rel_info->{attrs}{accessor}, 'single', 'relationship in schema2'; is $rel_info->{attrs}{join_type}, 'LEFT', 'relationship in schema2'; lives_and { ok $rsrc = $test_schema->source('OracleLoaderTest7'); } 'got source for table in schema2'; %uniqs = try { $rsrc->unique_constraints }; is keys %uniqs, 2, 'got unique and primary constraint in schema2'; delete $uniqs{primary}; is_deeply ((values %uniqs)[0], ['six_id'], 'correct unique constraint in schema2'); lives_and { ok $test_schema->source('OracleLoaderTest6') ->has_relationship('oracle_loader_test4'); } 'cross-schema relationship in multi-db_schema'; lives_and { ok $test_schema->source('OracleLoaderTest4') ->has_relationship('oracle_loader_test6s'); } 'cross-schema relationship in multi-db_schema'; lives_and { ok $test_schema->source('OracleLoaderTest8') ->has_relationship('oracle_loader_test7'); } 'cross-schema relationship in multi-db_schema'; lives_and { ok $test_schema->source('OracleLoaderTest7') ->has_relationship('oracle_loader_test8s'); } 'cross-schema relationship in multi-db_schema'; } } }, }, )->run_tests(); END { if (not $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}) { if (my $dbh1 = try { $schema->storage->dbh }) { $dbh1->do($_) for $auto_inc_drop_cb->('oracle_loader_test11','id'); if (my $dbh2 = try { $extra_schema->storage->dbh }) { try { $dbh1->do($_) for $auto_inc_drop_cb->('oracle_loader_test8', 'id'); $dbh2->do($_) for $auto_inc_drop_cb->('oracle_loader_test7', 'id'); $dbh2->do($_) for $auto_inc_drop_cb->('oracle_loader_test6', 'id'); $dbh2->do($_) for $auto_inc_drop_cb->('oracle_loader_test5', 'pk'); $dbh1->do($_) for $auto_inc_drop_cb->('oracle_loader_test5', 'id'); $dbh1->do($_) for $auto_inc_drop_cb->('oracle_loader_test4', 'id'); } catch { die "Error dropping sequences for cross-schema test tables: $_"; }; try { $dbh1->do('DROP TABLE oracle_loader_test8'); $dbh2->do('DROP TABLE oracle_loader_test7'); $dbh2->do('DROP TABLE oracle_loader_test6'); $dbh2->do('DROP TABLE oracle_loader_test5'); $dbh1->do('DROP TABLE oracle_loader_test5'); $dbh1->do('DROP TABLE oracle_loader_test4'); } catch { die "Error dropping cross-schema test tables: $_"; }; } } rmtree EXTRA_DUMP_DIR; } } # vim:et sw=4 sts=4 tw=0: DBIx-Class-Schema-Loader-0.07045/t/10_06sybase_common.t0000644000175000017500000004423212542756321021410 0ustar ilmariilmariuse DBIx::Class::Schema::Loader::Optional::Dependencies -skip_all_without => 'test_rdbms_ase'; use strict; use warnings; use Test::More; use Test::Exception; use Try::Tiny; use File::Path 'rmtree'; use DBIx::Class::Schema::Loader 'make_schema_at'; use DBIx::Class::Schema::Loader::Utils qw/sigwarn_silencer/; use namespace::clean; use DBI (); use lib qw(t/lib); use dbixcsl_common_tests (); use dbixcsl_test_dir '$tdir'; use constant EXTRA_DUMP_DIR => "$tdir/sybase_extra_dump"; my $dsn = $ENV{DBICTEST_SYBASE_DSN} || ''; my $user = $ENV{DBICTEST_SYBASE_USER} || ''; my $password = $ENV{DBICTEST_SYBASE_PASS} || ''; BEGIN { $ENV{DBIC_SYBASE_FREETDS_NOWARN} = 1 } my ($schema, $databases_created); # for cleanup in END for extra tests dbixcsl_common_tests->new( vendor => 'sybase', auto_inc_pk => 'INTEGER IDENTITY NOT NULL PRIMARY KEY', default_function => 'getdate()', default_function_def => 'AS getdate()', dsn => $dsn, user => $user, password => $password, data_types => { # http://ispirer.com/wiki/sqlways/sybase/data-types # # Numeric Types 'integer identity' => { data_type => 'integer', is_auto_increment => 1 }, int => { data_type => 'integer' }, integer => { data_type => 'integer' }, bigint => { data_type => 'bigint' }, smallint => { data_type => 'smallint' }, tinyint => { data_type => 'tinyint' }, 'double precision' => { data_type => 'double precision' }, real => { data_type => 'real' }, float => { data_type => 'double precision' }, 'float(14)' => { data_type => 'real' }, 'float(15)' => { data_type => 'real' }, 'float(16)' => { data_type => 'double precision' }, 'float(48)' => { data_type => 'double precision' }, 'numeric(6,3)' => { data_type => 'numeric', size => [6,3] }, 'decimal(6,3)' => { data_type => 'numeric', size => [6,3] }, numeric => { data_type => 'numeric' }, decimal => { data_type => 'numeric' }, bit => { data_type => 'bit' }, # Money Types money => { data_type => 'money' }, smallmoney => { data_type => 'smallmoney' }, # Computed Column 'AS getdate()' => { data_type => undef, inflate_datetime => 1, default_value => \'getdate()' }, # Blob Types text => { data_type => 'text' }, unitext => { data_type => 'unitext' }, image => { data_type => 'image' }, # DateTime Types date => { data_type => 'date' }, time => { data_type => 'time' }, datetime => { data_type => 'datetime' }, smalldatetime => { data_type => 'smalldatetime' }, # Timestamp column timestamp => { data_type => 'timestamp', inflate_datetime => 0 }, # String Types 'char' => { data_type => 'char', size => 1 }, 'char(2)' => { data_type => 'char', size => 2 }, 'nchar' => { data_type => 'nchar', size => 1 }, 'nchar(2)' => { data_type => 'nchar', size => 2 }, 'unichar(2)' => { data_type => 'unichar', size => 2 }, 'varchar(2)' => { data_type => 'varchar', size => 2 }, 'nvarchar(2)' => { data_type => 'nvarchar', size => 2 }, 'univarchar(2)' => { data_type => 'univarchar', size => 2 }, # Binary Types 'binary' => { data_type => 'binary', size => 1 }, 'binary(2)' => { data_type => 'binary', size => 2 }, 'varbinary(2)' => { data_type => 'varbinary', size => 2 }, }, # test that named constraints aren't picked up as tables (I can't reproduce this on my machine) failtrigger_warnings => [ qr/^Bad table or view 'sybase_loader_test2_ref_slt1'/ ], extra => { create => [ q{ CREATE TABLE sybase_loader_test1 ( id int identity primary key ) }, q{ CREATE TABLE sybase_loader_test2 ( id int identity primary key, sybase_loader_test1_id int, CONSTRAINT sybase_loader_test2_ref_slt1 FOREIGN KEY (sybase_loader_test1_id) REFERENCES sybase_loader_test1 (id) ) }, ], drop => [ qw/sybase_loader_test1 sybase_loader_test2/ ], count => 30 * 4, run => sub { $schema = shift; SKIP: { my $dbh = $schema->storage->dbh; try { $dbh->do('USE master'); } catch { skip "these tests require the sysadmin role", 30 * 4; }; try { $dbh->do('CREATE DATABASE [dbicsl_test1]'); $dbh->do('CREATE DATABASE [dbicsl_test2]'); } catch { skip "cannot create databases: $_", 30 * 4; }; try { local $SIG{__WARN__} = sigwarn_silencer( qr/^Password correctly set\.$|^Account unlocked\.$|^New login created\.$|^New user added\.$/ ); $dbh->do("sp_addlogin dbicsl_user1, dbicsl, [dbicsl_test1]"); $dbh->do("sp_addlogin dbicsl_user2, dbicsl, [dbicsl_test2]"); $dbh->do("USE [dbicsl_test1]"); $dbh->do("sp_adduser dbicsl_user1"); $dbh->do("sp_adduser dbicsl_user2"); $dbh->do("GRANT ALL TO dbicsl_user1"); $dbh->do("GRANT ALL TO dbicsl_user2"); $dbh->do("USE [dbicsl_test2]"); $dbh->do("sp_adduser dbicsl_user2"); $dbh->do("sp_adduser dbicsl_user1"); $dbh->do("GRANT ALL TO dbicsl_user2"); $dbh->do("GRANT ALL TO dbicsl_user1"); } catch { skip "cannot add logins: $_", 30 * 4; }; my ($dbh1, $dbh2); { local $SIG{__WARN__} = sigwarn_silencer( qr/can't change context/ ); $dbh1 = DBI->connect($dsn, 'dbicsl_user1', 'dbicsl', { RaiseError => 1, PrintError => 0, }); $dbh1->do('USE [dbicsl_test1]'); $dbh2 = DBI->connect($dsn, 'dbicsl_user2', 'dbicsl', { RaiseError => 1, PrintError => 0, }); $dbh2->do('USE [dbicsl_test2]'); } $dbh1->do(<<"EOF"); CREATE TABLE sybase_loader_test4 ( id INT IDENTITY PRIMARY KEY, value VARCHAR(100) NULL ) EOF $dbh1->do('GRANT ALL ON sybase_loader_test4 TO dbicsl_user2'); $dbh1->do(<<"EOF"); CREATE TABLE sybase_loader_test5 ( id INT IDENTITY PRIMARY KEY, value VARCHAR(100) NULL, four_id INTEGER, CONSTRAINT loader_test5_uniq UNIQUE (four_id), FOREIGN KEY (four_id) REFERENCES sybase_loader_test4 (id) ) EOF $dbh2->do(<<"EOF"); CREATE TABLE sybase_loader_test5 ( pk INT IDENTITY PRIMARY KEY, value VARCHAR(100) NULL, four_id INTEGER, CONSTRAINT loader_test5_uniq UNIQUE (four_id), FOREIGN KEY (four_id) REFERENCES [dbicsl_test1].dbicsl_user1.sybase_loader_test4 (id) ) EOF $dbh2->do(<<"EOF"); CREATE TABLE sybase_loader_test6 ( id INT IDENTITY PRIMARY KEY, value VARCHAR(100) NULL, sybase_loader_test4_id INTEGER NULL, FOREIGN KEY (sybase_loader_test4_id) REFERENCES [dbicsl_test1].dbicsl_user1.sybase_loader_test4 (id) ) EOF $dbh2->do(<<"EOF"); CREATE TABLE sybase_loader_test7 ( id INT IDENTITY PRIMARY KEY, value VARCHAR(100) NULL, six_id INTEGER UNIQUE, FOREIGN KEY (six_id) REFERENCES sybase_loader_test6 (id) ) EOF $dbh2->do('GRANT ALL ON sybase_loader_test7 TO dbicsl_user1'); $dbh1->do(<<"EOF"); CREATE TABLE sybase_loader_test8 ( id INT IDENTITY PRIMARY KEY, value VARCHAR(100) NULL, sybase_loader_test7_id INTEGER, FOREIGN KEY (sybase_loader_test7_id) REFERENCES [dbicsl_test2].dbicsl_user2.sybase_loader_test7 (id) ) EOF $databases_created = 1; foreach my $databases (['dbicsl_test1', 'dbicsl_test2'], '%') { foreach my $owners ([qw/dbicsl_user1 dbicsl_user2/], '%') { lives_and { rmtree EXTRA_DUMP_DIR; my @warns; local $SIG{__WARN__} = sub { push @warns, $_[0] unless $_[0] =~ /\bcollides\b/ || $_[0] =~ /can't change context/; }; my $database = $databases; $database = [ $database ] unless ref $database; my $db_schema = {}; foreach my $db (@$database) { $db_schema->{$db} = $owners; } make_schema_at( 'SybaseMultiSchema', { naming => 'current', db_schema => $db_schema, dump_directory => EXTRA_DUMP_DIR, quiet => 1, }, [ $dsn, $user, $password ], ); SybaseMultiSchema->storage->disconnect; diag join "\n", @warns if @warns; is @warns, 0; } 'dumped schema for "dbicsl_test1" and "dbicsl_test2" databases with no warnings'; my ($test_schema, $rsrc, $rs, $row, %uniqs, $rel_info); lives_and { ok $test_schema = SybaseMultiSchema->connect($dsn, $user, $password); } 'connected test schema'; lives_and { ok $rsrc = $test_schema->source('SybaseLoaderTest4'); } 'got source for table in database one'; is try { $rsrc->column_info('id')->{is_auto_increment} }, 1, 'column in database one'; is try { $rsrc->column_info('value')->{data_type} }, 'varchar', 'column in database one'; is try { $rsrc->column_info('value')->{size} }, 100, 'column in database one'; lives_and { ok $rs = $test_schema->resultset('SybaseLoaderTest4'); } 'got resultset for table in database one'; lives_and { ok $row = $rs->create({ value => 'foo' }); } 'executed SQL on table in database one'; $rel_info = try { $rsrc->relationship_info('dbicsl_test1_sybase_loader_test5') }; is_deeply $rel_info->{cond}, { 'foreign.four_id' => 'self.id' }, 'relationship in database one'; is $rel_info->{attrs}{accessor}, 'single', 'relationship in database one'; is $rel_info->{attrs}{join_type}, 'LEFT', 'relationship in database one'; lives_and { ok $rsrc = $test_schema->source('DbicslTest1SybaseLoaderTest5'); } 'got source for table in database one'; %uniqs = try { $rsrc->unique_constraints }; is keys %uniqs, 2, 'got unique and primary constraint in database one'; delete $uniqs{primary}; is_deeply ((values %uniqs)[0], ['four_id'], 'correct unique constraint in database one'); lives_and { ok $rsrc = $test_schema->source('SybaseLoaderTest6'); } 'got source for table in database two'; is try { $rsrc->column_info('id')->{is_auto_increment} }, 1, 'column in database two introspected correctly'; is try { $rsrc->column_info('value')->{data_type} }, 'varchar', 'column in database two introspected correctly'; is try { $rsrc->column_info('value')->{size} }, 100, 'column in database two introspected correctly'; lives_and { ok $rs = $test_schema->resultset('SybaseLoaderTest6'); } 'got resultset for table in database two'; lives_and { ok $row = $rs->create({ value => 'foo' }); } 'executed SQL on table in database two'; $rel_info = try { $rsrc->relationship_info('sybase_loader_test7') }; is_deeply $rel_info->{cond}, { 'foreign.six_id' => 'self.id' }, 'relationship in database two'; is $rel_info->{attrs}{accessor}, 'single', 'relationship in database two'; is $rel_info->{attrs}{join_type}, 'LEFT', 'relationship in database two'; lives_and { ok $rsrc = $test_schema->source('SybaseLoaderTest7'); } 'got source for table in database two'; %uniqs = try { $rsrc->unique_constraints }; is keys %uniqs, 2, 'got unique and primary constraint in database two'; delete $uniqs{primary}; is_deeply ((values %uniqs)[0], ['six_id'], 'correct unique constraint in database two'); lives_and { ok $test_schema->source('SybaseLoaderTest6') ->has_relationship('sybase_loader_test4'); } 'cross-database relationship in multi database schema'; lives_and { ok $test_schema->source('SybaseLoaderTest4') ->has_relationship('sybase_loader_test6s'); } 'cross-database relationship in multi database schema'; lives_and { ok $test_schema->source('SybaseLoaderTest8') ->has_relationship('sybase_loader_test7'); } 'cross-database relationship in multi database schema'; lives_and { ok $test_schema->source('SybaseLoaderTest7') ->has_relationship('sybase_loader_test8s'); } 'cross-database relationship in multi database schema'; } } } }, }, )->run_tests(); END { if (not $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}) { rmtree EXTRA_DUMP_DIR; if ($databases_created) { my $dbh = $schema->storage->dbh; $dbh->do('USE master'); local $dbh->{FetchHashKeyName} = 'NAME_lc'; my $sth = $dbh->prepare('sp_who'); $sth->execute; while (my $row = $sth->fetchrow_hashref) { if ($row->{dbname} =~ /^dbicsl_test[12]\z/) { $dbh->do("kill $row->{spid}"); } } foreach my $table ('[dbicsl_test1].dbicsl_user1.sybase_loader_test8', '[dbicsl_test2].dbicsl_user2.sybase_loader_test7', '[dbicsl_test2].dbicsl_user2.sybase_loader_test6', '[dbicsl_test2].dbicsl_user2.sybase_loader_test5', '[dbicsl_test1].dbicsl_user1.sybase_loader_test5', '[dbicsl_test1].dbicsl_user1.sybase_loader_test4') { try { $dbh->do("DROP TABLE $table"); } catch { diag "Error dropping table $table: $_"; }; } foreach my $db (qw/dbicsl_test1 dbicsl_test2/) { try { $dbh->do("DROP DATABASE [$db]"); } catch { diag "Error dropping test database $db: $_"; }; } foreach my $login (qw/dbicsl_user1 dbicsl_user2/) { try { local $SIG{__WARN__} = sigwarn_silencer( qr/^Account locked\.$|^Login dropped\.$/ ); $dbh->do("sp_droplogin $login"); } catch { diag "Error dropping login $login: $_" unless /Incorrect syntax/; }; } } } } # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/t/lib/0000755000175000017500000000000012650450355016456 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/lib/TestAdditionalBase.pm0000644000175000017500000000042512542756321022522 0ustar ilmariilmaripackage TestAdditionalBase; use strict; use warnings; sub test_additional_base { return "test_additional_base"; } sub test_additional_base_override { return "test_additional_base_override"; } sub test_additional_base_additional { return TestAdditional->test_additional; } 1; DBIx-Class-Schema-Loader-0.07045/t/lib/dbixcsl_common_tests.pm0000644000175000017500000026025012620647716023251 0ustar ilmariilmaripackage dbixcsl_common_tests; use strict; use warnings; use Test::More; use Test::Deep; use Test::Exception; use Test::Differences; use DBIx::Class::Schema::Loader; use Class::Unload; use File::Path 'rmtree'; use DBI; use File::Find 'find'; use Class::Unload (); use DBIx::Class::Schema::Loader::Utils qw/dumper_squashed slurp_file sigwarn_silencer apply/; use DBIx::Class::Schema::Loader::Optional::Dependencies (); use Try::Tiny; use File::Spec::Functions 'catfile'; use File::Basename 'basename'; use namespace::clean; use dbixcsl_test_dir '$tdir'; use constant DUMP_DIR => "$tdir/common_dump"; rmtree DUMP_DIR; use constant RESCAN_WARNINGS => qr/(?i:loader_test|LoaderTest)\d+s? has no primary key|^Dumping manual schema|^Schema dump completed|collides with an inherited method|invalidates \d+ active statement|^Bad table or view/; # skip schema-qualified tables in the Pg tests use constant SOURCE_DDL => qr/CREATE (?:TABLE|VIEW) (?!"dbicsl[.-]test")/i; use constant SCHEMA_CLASS => 'DBIXCSL_Test::Schema'; use constant RESULT_NAMESPACE => [ 'MyResult', 'MyResultTwo' ]; use constant RESULTSET_NAMESPACE => [ 'MyResultSet', 'MyResultSetTwo' ]; sub new { my $class = shift; my $self; if( ref($_[0]) eq 'HASH') { my $args = shift; $self = { (%$args) }; } else { $self = { @_ }; } # Only MySQL uses this $self->{innodb} ||= ''; # DB2 and Firebird don't support 'field type NULL' $self->{null} = 'NULL' unless defined $self->{null}; $self->{verbose} = $ENV{TEST_VERBOSE} || 0; # Optional extra tables and tests $self->{extra} ||= {}; $self->{basic_date_datatype} ||= 'DATE'; # Not all DBS do SQL-standard CURRENT_TIMESTAMP $self->{default_function} ||= "current_timestamp"; $self->{default_function_def} ||= "timestamp default $self->{default_function}"; $self = bless $self, $class; $self->{preserve_case_tests_table_names} = [qw/LoaderTest40 LoaderTest41/]; if (lc($self->{vendor}) eq 'mysql' && $^O =~ /^(?:MSWin32|cygwin)\z/) { $self->{preserve_case_tests_table_names} = [qw/Loader_Test40 Loader_Test41/]; } $self->setup_data_type_tests; return $self; } sub skip_tests { my ($self, $why) = @_; plan skip_all => $why; } sub _monikerize { my $name = shift; my $orig = pop; return $orig->({ loader_test2 => 'LoaderTest2X', LOADER_TEST2 => 'LoaderTest2X', }); } sub run_tests { my $self = shift; my @connect_info; if ($self->{dsn}) { push @connect_info, [ @{$self}{qw/dsn user password connect_info_opts/ } ]; } else { foreach my $info (@{ $self->{connect_info} || [] }) { push @connect_info, [ @{$info}{qw/dsn user password connect_info_opts/ } ]; } } if ($ENV{SCHEMA_LOADER_TESTS_EXTRA_ONLY}) { $self->run_only_extra_tests(\@connect_info); return; } my $extra_count = $self->{extra}{count} || 0; my $col_accessor_map_tests = 6; my $num_rescans = 6; $num_rescans++ if $self->{vendor} eq 'mssql'; $num_rescans++ if $self->{vendor} eq 'Firebird'; plan tests => @connect_info * (233 + $num_rescans * $col_accessor_map_tests + $extra_count + ($self->{data_type_tests}{test_count} || 0)); foreach my $info_idx (0..$#connect_info) { my $info = $connect_info[$info_idx]; @{$self}{qw/dsn user password connect_info_opts/} = @$info; $self->create(); my $schema_class = $self->setup_schema($info); $self->test_schema($schema_class); rmtree DUMP_DIR unless $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP} && $info_idx == $#connect_info; } } sub run_only_extra_tests { my ($self, $connect_info) = @_; plan tests => @$connect_info * (3 + ($self->{extra}{count} || 0) + ($self->{data_type_tests}{test_count} || 0)); rmtree DUMP_DIR; foreach my $info_idx (0..$#$connect_info) { my $info = $connect_info->[$info_idx]; @{$self}{qw/dsn user password connect_info_opts/} = @$info; $self->drop_extra_tables_only; my $dbh = $self->dbconnect(1); $dbh->do($_) for @{ $self->{pre_create} || [] }; $dbh->do($_) for @{ $self->{extra}{create} || [] }; if (not ($self->{vendor} eq 'mssql' && $dbh->{Driver}{Name} eq 'Sybase')) { foreach my $ddl (@{ $self->{data_type_tests}{ddl} || []}) { if (my $cb = $self->{data_types_ddl_cb}) { $cb->($ddl); } else { $dbh->do($ddl); } } } $self->{_created} = 1; my $file_count = grep $_ =~ SOURCE_DDL, @{ $self->{extra}{create} || [] }; $file_count++; # schema if (not ($self->{vendor} eq 'mssql' && $dbh->{Driver}{Name} eq 'Sybase')) { $file_count++ for @{ $self->{data_type_tests}{table_names} || [] }; } my $schema_class = $self->setup_schema($info, $file_count); my ($monikers, $classes) = $self->monikers_and_classes($schema_class); my $conn = $schema_class->clone; $self->test_data_types($conn); $self->{extra}{run}->($conn, $monikers, $classes, $self) if $self->{extra}{run}; if (not ($ENV{SCHEMA_LOADER_TESTS_NOCLEANUP} && $info_idx == $#$connect_info)) { $self->drop_extra_tables_only; rmtree DUMP_DIR; } } } sub drop_extra_tables_only { my $self = shift; my $dbh = $self->dbconnect(0); local $^W = 0; # for ADO $dbh->do($_) for @{ $self->{extra}{pre_drop_ddl} || [] }; $self->drop_table($dbh, $_) for @{ $self->{extra}{drop} || [] }; if (not ($self->{vendor} eq 'mssql' && $dbh->{Driver}{Name} eq 'Sybase')) { foreach my $data_type_table (@{ $self->{data_type_tests}{table_names} || [] }) { $self->drop_table($dbh, $data_type_table); } } } # defined in sub create my (@statements, @statements_reltests, @statements_advanced, @statements_advanced_sqlite, @statements_inline_rels, @statements_implicit_rels); sub CONSTRAINT { my $self = shift; return qr/^(?:(?:$self->{vendor}|extra)[_-]?)?loader[_-]?test[0-9]+(?!.*_)/i; } sub setup_schema { my ($self, $connect_info, $expected_count) = @_; my $debug = ($self->{verbose} > 1) ? 1 : 0; if ($ENV{SCHEMA_LOADER_TESTS_USE_MOOSE}) { if (not DBIx::Class::Schema::Loader::Optional::Dependencies->req_ok_for('use_moose')) { die sprintf ("Missing dependencies for SCHEMA_LOADER_TESTS_USE_MOOSE: %s\n", DBIx::Class::Schema::Loader::Optional::Dependencies->req_missing_for('use_moose')); } $self->{use_moose} = 1; } my %loader_opts = ( constraint => $self->CONSTRAINT, result_namespace => RESULT_NAMESPACE, resultset_namespace => RESULTSET_NAMESPACE, schema_base_class => 'TestSchemaBaseClass', schema_components => [ 'TestSchemaComponent', '+TestSchemaComponentFQN' ], additional_classes => 'TestAdditional', additional_base_classes => 'TestAdditionalBase', left_base_classes => [ qw/TestLeftBase/ ], components => [ qw/TestComponent +TestComponentFQN IntrospectableM2M/ ], inflect_plural => { loader_test4_fkid => 'loader_test4zes' }, inflect_singular => { fkid => 'fkid_singular' }, moniker_map => \&_monikerize, custom_column_info => \&_custom_column_info, debug => $debug, dump_directory => DUMP_DIR, datetime_timezone => 'Europe/Berlin', datetime_locale => 'de_DE', $self->{use_moose} ? ( use_moose => 1, result_roles => 'TestRole', result_roles_map => { LoaderTest2X => 'TestRoleForMap' }, ) : (), col_collision_map => { '^(can)\z' => 'caught_collision_%s' }, rel_collision_map => { '^(set_primary_key)\z' => 'caught_rel_collision_%s' }, relationship_attrs => { many_to_many => { order_by => 'me.id' } }, col_accessor_map => \&test_col_accessor_map, result_components_map => { LoaderTest2X => 'TestComponentForMap', LoaderTest1 => '+TestComponentForMapFQN' }, uniq_to_primary => 1, %{ $self->{loader_options} || {} }, ); $loader_opts{db_schema} = $self->{db_schema} if $self->{db_schema}; Class::Unload->unload(SCHEMA_CLASS); my $file_count; { my @loader_warnings; local $SIG{__WARN__} = sub { push(@loader_warnings, @_); }; eval qq{ package @{[SCHEMA_CLASS]}; use base qw/DBIx::Class::Schema::Loader/; __PACKAGE__->loader_options(\%loader_opts); __PACKAGE__->connection(\@\$connect_info); }; ok(!$@, "Loader initialization") or diag $@; find sub { return if -d; $file_count++ }, DUMP_DIR; my $standard_sources = not defined $expected_count; if ($standard_sources) { $expected_count = 41; if (not ($self->{vendor} eq 'mssql' && $connect_info->[0] =~ /Sybase/)) { $expected_count++ for @{ $self->{data_type_tests}{table_names} || [] }; } $expected_count += grep $_ =~ SOURCE_DDL, @{ $self->{extra}{create} || [] }; $expected_count -= grep /CREATE TABLE/i, @statements_inline_rels if $self->{skip_rels} || $self->{no_inline_rels}; $expected_count -= grep /CREATE TABLE/i, @statements_implicit_rels if $self->{skip_rels} || $self->{no_implicit_rels}; $expected_count -= grep /CREATE TABLE/i, ($self->{vendor} =~ /sqlite/ ? @statements_advanced_sqlite : @statements_advanced), @statements_reltests if $self->{skip_rels}; } is $file_count, $expected_count, 'correct number of files generated'; my $warn_count = 2; $warn_count++ for grep /^Bad table or view/, @loader_warnings; $warn_count++ for grep /renaming \S+ relation/, @loader_warnings; $warn_count++ for grep /\b(?!loader_test9)\w+ has no primary key/i, @loader_warnings; $warn_count++ for grep /^Column '\w+' in table '\w+' collides with an inherited method\./, @loader_warnings; $warn_count++ for grep /^Relationship '\w+' in source '\w+' for columns '[^']+' collides with an inherited method\./, @loader_warnings; $warn_count++ for grep { my $w = $_; grep $w =~ $_, @{ $self->{warnings} || [] } } @loader_warnings; $warn_count-- for grep { my $w = $_; grep $w =~ $_, @{ $self->{failtrigger_warnings} || [] } } @loader_warnings; is scalar(@loader_warnings), $warn_count, 'Correct number of warnings' or diag @loader_warnings; } exit if ($file_count||0) != $expected_count; return SCHEMA_CLASS; } sub test_schema { my $self = shift; my $schema_class = shift; my $conn = $schema_class->clone; ($self->{before_tests_run} || sub {})->($conn); my ($monikers, $classes) = $self->monikers_and_classes($schema_class); my $moniker1 = $monikers->{loader_test1s}; my $class1 = $classes->{loader_test1s}; my $rsobj1 = $conn->resultset($moniker1); check_no_duplicate_unique_constraints($class1); my $moniker2 = $monikers->{loader_test2}; my $class2 = $classes->{loader_test2}; my $rsobj2 = $conn->resultset($moniker2); check_no_duplicate_unique_constraints($class2); my $moniker23 = $monikers->{LOADER_test23} || $monikers->{loader_test23}; my $class23 = $classes->{LOADER_test23} || $classes->{loader_test23}; my $rsobj23 = $conn->resultset($moniker1); my $moniker24 = $monikers->{LoAdEr_test24} || $monikers->{loader_test24}; my $class24 = $classes->{LoAdEr_test24} || $classes->{loader_test24}; my $rsobj24 = $conn->resultset($moniker2); my $moniker35 = $monikers->{loader_test35}; my $class35 = $classes->{loader_test35}; my $rsobj35 = $conn->resultset($moniker35); my $moniker50 = $monikers->{loader_test50}; my $class50 = $classes->{loader_test50}; my $rsobj50 = $conn->resultset($moniker50); isa_ok( $rsobj1, "DBIx::Class::ResultSet" ); isa_ok( $rsobj2, "DBIx::Class::ResultSet" ); isa_ok( $rsobj23, "DBIx::Class::ResultSet" ); isa_ok( $rsobj24, "DBIx::Class::ResultSet" ); isa_ok( $rsobj35, "DBIx::Class::ResultSet" ); isa_ok( $rsobj50, "DBIx::Class::ResultSet" ); # check result_namespace my @schema_dir = split /::/, SCHEMA_CLASS; my $result_dir = ref RESULT_NAMESPACE ? ${RESULT_NAMESPACE()}[0] : RESULT_NAMESPACE; my $schema_files = [ sort map basename($_), glob catfile(DUMP_DIR, @schema_dir, '*') ]; is_deeply $schema_files, [ $result_dir ], 'first entry in result_namespace exists as a directory'; my $result_file_count =()= glob catfile(DUMP_DIR, @schema_dir, $result_dir, '*.pm'); ok $result_file_count, 'Result files dumped to first entry in result_namespace'; # parse out the resultset_namespace my $schema_code = slurp_file $conn->_loader->get_dump_filename(SCHEMA_CLASS); my ($schema_resultset_namespace) = $schema_code =~ /\bresultset_namespace => (.*)/; $schema_resultset_namespace = eval $schema_resultset_namespace; die $@ if $@; is_deeply $schema_resultset_namespace, RESULTSET_NAMESPACE, 'resultset_namespace set correctly on Schema'; like $schema_code, qr/\nuse base 'TestSchemaBaseClass';\n\n|\nextends 'TestSchemaBaseClass';\n\n/, 'schema_base_class works'; is $conn->testschemabaseclass, 'TestSchemaBaseClass works', 'schema base class works'; like $schema_code, qr/\n__PACKAGE__->load_components\("TestSchemaComponent", "\+TestSchemaComponentFQN"\);\n\n__PACKAGE__->load_namespaces/, 'schema_components works'; is $conn->dbix_class_testschemacomponent, 'dbix_class_testschemacomponent works', 'schema component works'; is $conn->testschemacomponent_fqn, 'TestSchemaComponentFQN works', 'fully qualified schema component works'; my @columns_lt2 = $class2->columns; is_deeply( \@columns_lt2, [ qw/id dat dat2 set_primary_key can dbix_class_testcomponent dbix_class_testcomponentmap testcomponent_fqn meta test_role_method test_role_for_map_method crumb_crisp_coating sticky_filling/ ], "Column Ordering" ); is $class2->column_info('can')->{accessor}, 'caught_collision_can', 'accessor for column name that conflicts with a UNIVERSAL method renamed based on col_collision_map'; ok (exists $class2->column_info('set_primary_key')->{accessor} && (not defined $class2->column_info('set_primary_key')->{accessor}), 'accessor for column name that conflicts with a result base class method removed'); ok (exists $class2->column_info('dbix_class_testcomponent')->{accessor} && (not defined $class2->column_info('dbix_class_testcomponent')->{accessor}), 'accessor for column name that conflicts with a component class method removed'); ok (exists $class2->column_info('dbix_class_testcomponentmap')->{accessor} && (not defined $class2->column_info('dbix_class_testcomponentmap')->{accessor}), 'accessor for column name that conflicts with a component class method removed'); ok (exists $class2->column_info('testcomponent_fqn')->{accessor} && (not defined $class2->column_info('testcomponent_fqn')->{accessor}), 'accessor for column name that conflicts with a fully qualified component class method removed'); if ($self->{use_moose}) { ok (exists $class2->column_info('meta')->{accessor} && (not defined $class2->column_info('meta')->{accessor}), 'accessor for column name that conflicts with Moose removed'); ok (exists $class2->column_info('test_role_for_map_method')->{accessor} && (not defined $class2->column_info('test_role_for_map_method')->{accessor}), 'accessor for column name that conflicts with a Result role removed'); ok (exists $class2->column_info('test_role_method')->{accessor} && (not defined $class2->column_info('test_role_method')->{accessor}), 'accessor for column name that conflicts with a Result role removed'); } else { ok ((not exists $class2->column_info('meta')->{accessor}), "not removing 'meta' accessor with use_moose disabled"); ok ((not exists $class2->column_info('test_role_for_map_method')->{accessor}), 'no role method conflicts with use_moose disabled'); ok ((not exists $class2->column_info('test_role_method')->{accessor}), 'no role method conflicts with use_moose disabled'); } my %uniq1 = $class1->unique_constraints; my $uniq1_test = 0; foreach my $ucname (keys %uniq1) { my $cols_arrayref = $uniq1{$ucname}; if(@$cols_arrayref == 1 && $cols_arrayref->[0] eq 'dat') { $uniq1_test = 1; last; } } ok($uniq1_test, "Unique constraint"); is($moniker1, 'LoaderTest1', 'moniker singularisation'); my %uniq2 = $class2->unique_constraints; my $uniq2_test = 0; foreach my $ucname (keys %uniq2) { my $cols_arrayref = $uniq2{$ucname}; if (@$cols_arrayref == 2 && $cols_arrayref->[0] eq 'dat2' && $cols_arrayref->[1] eq 'dat' ) { $uniq2_test = 2; last; } } ok($uniq2_test, "Multi-col unique constraint"); my %uniq3 = $class50->unique_constraints; is_deeply $uniq3{primary}, ['id1', 'id2'], 'unique constraint promoted to primary key with uniq_to_primary'; is($moniker2, 'LoaderTest2X', "moniker_map testing"); SKIP: { can_ok( $class1, 'test_additional_base' ) or skip "Pre-requisite test failed", 1; is( $class1->test_additional_base, "test_additional_base", "Additional Base method" ); } SKIP: { can_ok( $class1, 'test_additional_base_override' ) or skip "Pre-requisite test failed", 1; is( $class1->test_additional_base_override, "test_left_base_override", "Left Base overrides Additional Base method" ); } SKIP: { can_ok( $class1, 'test_additional_base_additional' ) or skip "Pre-requisite test failed", 1; is( $class1->test_additional_base_additional, "test_additional", "Additional Base can use Additional package method" ); } SKIP: { can_ok( $class1, 'dbix_class_testcomponent' ) or skip "Pre-requisite test failed", 1; is( $class1->dbix_class_testcomponent, 'dbix_class_testcomponent works', 'Additional Component' ); } is try { $class2->dbix_class_testcomponentmap }, 'dbix_class_testcomponentmap works', 'component from result_component_map'; isnt try { $class1->dbix_class_testcomponentmap }, 'dbix_class_testcomponentmap works', 'component from result_component_map not added to not mapped Result'; is try { $class1->testcomponent_fqn }, 'TestComponentFQN works', 'fully qualified component class'; is try { $class1->testcomponentformap_fqn }, 'TestComponentForMapFQN works', 'fully qualified component class from result_component_map'; isnt try { $class2->testcomponentformap_fqn }, 'TestComponentForMapFQN works', 'fully qualified component class from result_component_map not added to not mapped Result'; SKIP: { skip 'not testing role methods with use_moose disabled', 2 unless $self->{use_moose}; is try { $class1->test_role_method }, 'test_role_method works', 'role from result_roles applied'; is try { $class2->test_role_for_map_method }, 'test_role_for_map_method works', 'role from result_roles_map applied'; } SKIP: { can_ok( $class1, 'loader_test1_classmeth' ) or skip "Pre-requisite test failed", 1; is( $class1->loader_test1_classmeth, 'all is well', 'Class method' ); } ok( $class1->column_info('id')->{is_auto_increment}, 'is_auto_increment detection' ); my $obj = try { $rsobj1->find(1) }; is( try { $obj->id }, 1, "Find got the right row" ); is( try { $obj->dat }, "foo", "Column value" ); is( $rsobj2->count, 4, "Count" ); my $saved_id; eval { my $new_obj1 = $rsobj1->create({ dat => 'newthing' }); $saved_id = $new_obj1->id; }; ok(!$@, "Inserting new record using a PK::Auto key didn't die") or diag $@; ok($saved_id, "Got PK::Auto-generated id"); my $new_obj1 = $rsobj1->search({ dat => 'newthing' })->single; ok($new_obj1, "Found newly inserted PK::Auto record"); is($new_obj1->id, $saved_id, "Correct PK::Auto-generated id"); my ($obj2) = $rsobj2->search({ dat => 'bbb' })->single; is( $obj2->id, 2 ); SKIP: { skip 'no DEFAULT on Access', 7 if $self->{vendor} eq 'Access'; is( $class35->column_info('a_varchar')->{default_value}, 'foo', 'constant character default', ); is( $class35->column_info('an_int')->{default_value}, 42, 'constant integer default', ); is( $class35->column_info('a_negative_int')->{default_value}, -42, 'constant negative integer default', ); is( sprintf("%.3f", $class35->column_info('a_double')->{default_value}||0), '10.555', 'constant numeric default', ); is( sprintf("%.3f", $class35->column_info('a_negative_double')->{default_value}||0), -10.555, 'constant negative numeric default', ); my $function_default = $class35->column_info('a_function')->{default_value}; isa_ok( $function_default, 'SCALAR', 'default_value for function default' ); is_deeply( $function_default, \$self->{default_function}, 'default_value for function default is correct' ); } is( $class2->column_info('crumb_crisp_coating')->{accessor}, 'trivet', 'col_accessor_map is being run' ); is( $class2->column_info('sticky_filling')->{accessor}, 'goo', 'multi-level hash col_accessor_map works' ); is $class1->column_info('dat')->{is_nullable}, 0, 'is_nullable=0 detection'; is $class2->column_info('set_primary_key')->{is_nullable}, 1, 'is_nullable=1 detection'; SKIP: { skip $self->{skip_rels}, 149 if $self->{skip_rels}; my $moniker3 = $monikers->{loader_test3}; my $class3 = $classes->{loader_test3}; my $rsobj3 = $conn->resultset($moniker3); my $moniker4 = $monikers->{loader_test4}; my $class4 = $classes->{loader_test4}; my $rsobj4 = $conn->resultset($moniker4); my $moniker5 = $monikers->{loader_test5}; my $class5 = $classes->{loader_test5}; my $rsobj5 = $conn->resultset($moniker5); my $moniker6 = $monikers->{loader_test6}; my $class6 = $classes->{loader_test6}; my $rsobj6 = $conn->resultset($moniker6); my $moniker7 = $monikers->{loader_test7}; my $class7 = $classes->{loader_test7}; my $rsobj7 = $conn->resultset($moniker7); my $moniker8 = $monikers->{loader_test8}; my $class8 = $classes->{loader_test8}; my $rsobj8 = $conn->resultset($moniker8); my $moniker9 = $monikers->{loader_test9}; my $class9 = $classes->{loader_test9}; my $rsobj9 = $conn->resultset($moniker9); my $moniker16 = $monikers->{loader_test16}; my $class16 = $classes->{loader_test16}; my $rsobj16 = $conn->resultset($moniker16); my $moniker17 = $monikers->{loader_test17}; my $class17 = $classes->{loader_test17}; my $rsobj17 = $conn->resultset($moniker17); my $moniker18 = $monikers->{loader_test18}; my $class18 = $classes->{loader_test18}; my $rsobj18 = $conn->resultset($moniker18); my $moniker19 = $monikers->{loader_test19}; my $class19 = $classes->{loader_test19}; my $rsobj19 = $conn->resultset($moniker19); my $moniker20 = $monikers->{loader_test20}; my $class20 = $classes->{loader_test20}; my $rsobj20 = $conn->resultset($moniker20); my $moniker21 = $monikers->{loader_test21}; my $class21 = $classes->{loader_test21}; my $rsobj21 = $conn->resultset($moniker21); my $moniker22 = $monikers->{loader_test22}; my $class22 = $classes->{loader_test22}; my $rsobj22 = $conn->resultset($moniker22); my $moniker25 = $monikers->{loader_test25}; my $class25 = $classes->{loader_test25}; my $rsobj25 = $conn->resultset($moniker25); my $moniker26 = $monikers->{loader_test26}; my $class26 = $classes->{loader_test26}; my $rsobj26 = $conn->resultset($moniker26); my $moniker27 = $monikers->{loader_test27}; my $class27 = $classes->{loader_test27}; my $rsobj27 = $conn->resultset($moniker27); my $moniker28 = $monikers->{loader_test28}; my $class28 = $classes->{loader_test28}; my $rsobj28 = $conn->resultset($moniker28); my $moniker29 = $monikers->{loader_test29}; my $class29 = $classes->{loader_test29}; my $rsobj29 = $conn->resultset($moniker29); my $moniker31 = $monikers->{loader_test31}; my $class31 = $classes->{loader_test31}; my $rsobj31 = $conn->resultset($moniker31); my $moniker32 = $monikers->{loader_test32}; my $class32 = $classes->{loader_test32}; my $rsobj32 = $conn->resultset($moniker32); my $moniker33 = $monikers->{loader_test33}; my $class33 = $classes->{loader_test33}; my $rsobj33 = $conn->resultset($moniker33); my $moniker34 = $monikers->{loader_test34}; my $class34 = $classes->{loader_test34}; my $rsobj34 = $conn->resultset($moniker34); my $moniker36 = $monikers->{loader_test36}; my $class36 = $classes->{loader_test36}; my $rsobj36 = $conn->resultset($moniker36); my $moniker37 = $monikers->{loader_test37}; my $class37 = $classes->{loader_test37}; my $rsobj37 = $conn->resultset($moniker37); my $moniker42 = $monikers->{loader_test42}; my $class42 = $classes->{loader_test42}; my $rsobj42 = $conn->resultset($moniker42); my $moniker43 = $monikers->{loader_test43}; my $class43 = $classes->{loader_test43}; my $rsobj43 = $conn->resultset($moniker43); my $moniker44 = $monikers->{loader_test44}; my $class44 = $classes->{loader_test44}; my $rsobj44 = $conn->resultset($moniker44); isa_ok( $rsobj3, "DBIx::Class::ResultSet" ); isa_ok( $rsobj4, "DBIx::Class::ResultSet" ); isa_ok( $rsobj5, "DBIx::Class::ResultSet" ); isa_ok( $rsobj6, "DBIx::Class::ResultSet" ); isa_ok( $rsobj7, "DBIx::Class::ResultSet" ); isa_ok( $rsobj8, "DBIx::Class::ResultSet" ); isa_ok( $rsobj9, "DBIx::Class::ResultSet" ); isa_ok( $rsobj16, "DBIx::Class::ResultSet" ); isa_ok( $rsobj17, "DBIx::Class::ResultSet" ); isa_ok( $rsobj18, "DBIx::Class::ResultSet" ); isa_ok( $rsobj19, "DBIx::Class::ResultSet" ); isa_ok( $rsobj20, "DBIx::Class::ResultSet" ); isa_ok( $rsobj21, "DBIx::Class::ResultSet" ); isa_ok( $rsobj22, "DBIx::Class::ResultSet" ); isa_ok( $rsobj25, "DBIx::Class::ResultSet" ); isa_ok( $rsobj26, "DBIx::Class::ResultSet" ); isa_ok( $rsobj27, "DBIx::Class::ResultSet" ); isa_ok( $rsobj28, "DBIx::Class::ResultSet" ); isa_ok( $rsobj29, "DBIx::Class::ResultSet" ); isa_ok( $rsobj31, "DBIx::Class::ResultSet" ); isa_ok( $rsobj32, "DBIx::Class::ResultSet" ); isa_ok( $rsobj33, "DBIx::Class::ResultSet" ); isa_ok( $rsobj34, "DBIx::Class::ResultSet" ); isa_ok( $rsobj36, "DBIx::Class::ResultSet" ); isa_ok( $rsobj37, "DBIx::Class::ResultSet" ); isa_ok( $rsobj42, "DBIx::Class::ResultSet" ); isa_ok( $rsobj43, "DBIx::Class::ResultSet" ); isa_ok( $rsobj44, "DBIx::Class::ResultSet" ); # basic rel test my $obj4 = try { $rsobj4->find(123) } || $rsobj4->search({ id => 123 })->single; isa_ok( try { $obj4->fkid_singular }, $class3); # test renaming rel that conflicts with a class method ok ($obj4->has_relationship('belongs_to_rel'), 'relationship name that conflicts with a method renamed'); isa_ok( try { $obj4->belongs_to_rel }, $class3); ok ($obj4->has_relationship('caught_rel_collision_set_primary_key'), 'relationship name that conflicts with a method renamed based on rel_collision_map'); isa_ok( try { $obj4->caught_rel_collision_set_primary_key }, $class3); ok($class4->column_info('fkid')->{is_foreign_key}, 'Foreign key detected'); my $obj3 = try { $rsobj3->find(1) } || $rsobj3->search({ id => 1 })->single; my $rs_rel4 = try { $obj3->search_related('loader_test4zes') }; isa_ok( try { $rs_rel4->single }, $class4); # check rel naming with prepositions ok ($rsobj4->result_source->has_relationship('loader_test5s_to'), "rel with preposition 'to' pluralized correctly"); ok ($rsobj4->result_source->has_relationship('loader_test5s_from'), "rel with preposition 'from' pluralized correctly"); # check default relationship attributes is try { $rsobj3->result_source->relationship_info('loader_test4zes')->{attrs}{cascade_delete} }, 0, 'cascade_delete => 0 on has_many by default'; is try { $rsobj3->result_source->relationship_info('loader_test4zes')->{attrs}{cascade_copy} }, 0, 'cascade_copy => 0 on has_many by default'; ok ((not try { exists $rsobj3->result_source->relationship_info('loader_test4zes')->{attrs}{on_delete} }), 'has_many does not have on_delete'); ok ((not try { exists $rsobj3->result_source->relationship_info('loader_test4zes')->{attrs}{on_update} }), 'has_many does not have on_update'); ok ((not try { exists $rsobj3->result_source->relationship_info('loader_test4zes')->{attrs}{is_deferrable} }), 'has_many does not have is_deferrable'); my $default_on_clause = $self->{default_on_clause} || 'CASCADE'; my $default_on_delete_clause = $self->{default_on_delete_clause} || $default_on_clause; is try { $rsobj4->result_source->relationship_info('fkid_singular')->{attrs}{on_delete} }, $default_on_delete_clause, "on_delete is $default_on_delete_clause on belongs_to by default"; my $default_on_update_clause = $self->{default_on_update_clause} || $default_on_clause; is try { $rsobj4->result_source->relationship_info('fkid_singular')->{attrs}{on_update} }, $default_on_update_clause, "on_update is $default_on_update_clause on belongs_to by default"; my $default_is_deferrable = $self->{default_is_deferrable}; $default_is_deferrable = 1 if not defined $default_is_deferrable; is try { $rsobj4->result_source->relationship_info('fkid_singular')->{attrs}{is_deferrable} }, $default_is_deferrable, "is_deferrable => $default_is_deferrable on belongs_to by default"; ok ((not try { exists $rsobj4->result_source->relationship_info('fkid_singular')->{attrs}{cascade_delete} }), 'belongs_to does not have cascade_delete'); ok ((not try { exists $rsobj4->result_source->relationship_info('fkid_singular')->{attrs}{cascade_copy} }), 'belongs_to does not have cascade_copy'); is try { $rsobj27->result_source->relationship_info('loader_test28')->{attrs}{cascade_delete} }, 0, 'cascade_delete => 0 on might_have by default'; is try { $rsobj27->result_source->relationship_info('loader_test28')->{attrs}{cascade_copy} }, 0, 'cascade_copy => 0 on might_have by default'; ok ((not try { exists $rsobj27->result_source->relationship_info('loader_test28')->{attrs}{on_delete} }), 'might_have does not have on_delete'); ok ((not try { exists $rsobj27->result_source->relationship_info('loader_test28')->{attrs}{on_update} }), 'might_have does not have on_update'); ok ((not try { exists $rsobj27->result_source->relationship_info('loader_test28')->{attrs}{is_deferrable} }), 'might_have does not have is_deferrable'); # find on multi-col pk if ($conn->loader->preserve_case) { my $obj5 = $rsobj5->find({id1 => 1, iD2 => 1}); is $obj5->i_d2, 1, 'Find on multi-col PK'; } else { my $obj5 = $rsobj5->find({id1 => 1, id2 => 1}); is $obj5->id2, 1, 'Find on multi-col PK'; } # mulit-col fk def my $obj6 = try { $rsobj6->find(1) } || $rsobj6->search({ id => 1 })->single; isa_ok( try { $obj6->loader_test2 }, $class2); isa_ok( try { $obj6->loader_test5 }, $class5); ok($class6->column_info('loader_test2_id')->{is_foreign_key}, 'Foreign key detected'); ok($class6->column_info('id')->{is_foreign_key}, 'Foreign key detected'); my $id2_info = try { $class6->column_info('id2') } || $class6->column_info('Id2'); ok($id2_info->{is_foreign_key}, 'Foreign key detected'); unlike slurp_file $conn->_loader->get_dump_filename($class6), qr{ \n__PACKAGE__->(?:belongs_to|has_many|might_have|has_one|many_to_many)\( \s+ "(\w+?)" .*? \n__PACKAGE__->(?:belongs_to|has_many|might_have|has_one|many_to_many)\( \s+ "\1" }xs, 'did not create two relationships with the same name'; unlike slurp_file $conn->_loader->get_dump_filename($class8), qr{ \n__PACKAGE__->(?:belongs_to|has_many|might_have|has_one|many_to_many)\( \s+ "(\w+?)" .*? \n__PACKAGE__->(?:belongs_to|has_many|might_have|has_one|many_to_many)\( \s+ "\1" }xs, 'did not create two relationships with the same name'; # check naming of ambiguous relationships my $rel_info = $class6->relationship_info('lovely_loader_test7') || {}; ok (($class6->has_relationship('lovely_loader_test7') && $rel_info->{cond}{'foreign.lovely_loader_test6'} eq 'self.id' && $rel_info->{class} eq $class7 && $rel_info->{attrs}{accessor} eq 'single'), 'ambiguous relationship named correctly'); $rel_info = $class8->relationship_info('active_loader_test16') || {}; ok (($class8->has_relationship('active_loader_test16') && $rel_info->{cond}{'foreign.loader_test8_id'} eq 'self.id' && $rel_info->{class} eq $class16 && $rel_info->{attrs}{accessor} eq 'single'), 'ambiguous relationship named correctly'); # fk that references a non-pk key (UNIQUE) my $obj8 = try { $rsobj8->find(1) } || $rsobj8->search({ id => 1 })->single; isa_ok( try { $obj8->loader_test7 }, $class7); ok($class8->column_info('loader_test7')->{is_foreign_key}, 'Foreign key detected'); # test double-fk 17 ->-> 16 my $obj17 = try { $rsobj17->find(33) } || $rsobj17->search({ id => 33 })->single; my $rs_rel16_one = try { $obj17->loader16_one }; isa_ok($rs_rel16_one, $class16); is(try { $rs_rel16_one->dat }, 'y16', "Multiple FKs to same table"); ok($class17->column_info('loader16_one')->{is_foreign_key}, 'Foreign key detected'); my $rs_rel16_two = try { $obj17->loader16_two }; isa_ok($rs_rel16_two, $class16); is(try { $rs_rel16_two->dat }, 'z16', "Multiple FKs to same table"); ok($class17->column_info('loader16_two')->{is_foreign_key}, 'Foreign key detected'); my $obj16 = try { $rsobj16->find(2) } || $rsobj16->search({ id => 2 })->single; my $rs_rel17 = try { $obj16->search_related('loader_test17_loader16_ones') }; isa_ok(try { $rs_rel17->single }, $class17); is(try { $rs_rel17->single->id }, 3, "search_related with multiple FKs from same table"); # test many_to_many detection 18 -> 20 -> 19 and 19 -> 20 -> 18 ok($class20->column_info('parent')->{is_foreign_key}, 'Foreign key detected'); ok($class20->column_info('child')->{is_foreign_key}, 'Foreign key detected'); cmp_deeply( $class18->_m2m_metadata->{children}, superhashof({ relation => 'loader_test20s', foreign_relation => 'child', attrs => superhashof({ order_by => 'me.id' }) }), 'children m2m correct with ordering' ); cmp_deeply( $class19->_m2m_metadata->{parents}, superhashof({ relation => 'loader_test20s', foreign_relation => 'parent', attrs => superhashof({ order_by => 'me.id' }) }), 'parents m2m correct with ordering' ); # test double-fk m:m 21 <- 22 -> 21 ok($class22->column_info('parent')->{is_foreign_key}, 'Foreign key detected'); ok($class22->column_info('child')->{is_foreign_key}, 'Foreign key detected'); is_deeply( $class21->relationship_info("loader_test22_parents")->{cond}, { 'foreign.parent' => 'self.id' }, 'rel to foreign.parent correct' ); is_deeply( $class21->relationship_info("loader_test22_children")->{cond}, { 'foreign.child' => 'self.id' }, 'rel to foreign.child correct' ); cmp_deeply( $class21->_m2m_metadata, { parents => superhashof({ accessor => 'parents', relation => 'loader_test22_children', foreign_relation => 'parent', }), children => superhashof({ accessor => 'children', relation => 'loader_test22_parents', foreign_relation => 'child', }), }, 'self-m2m correct' ); ok( $class37->relationship_info('parent'), 'parents rel created' ); ok( $class37->relationship_info('child'), 'child rel created' ); is_deeply($class32->_m2m_metadata, {}, 'many_to_many not created for might_have'); is_deeply($class34->_m2m_metadata, {}, 'many_to_many not created for might_have'); # test m2m with overlapping compound keys is_deeply( $class44->relationship_info('loader_test42')->{cond}, { 'foreign.id1' => 'self.id42', 'foreign.id2' => 'self.id2', }, 'compound belongs_to key detected for overlapping m2m', ); is_deeply( $class44->relationship_info('loader_test43')->{cond}, { 'foreign.id1' => 'self.id43', 'foreign.id2' => 'self.id2', }, 'compound belongs_to key detected for overlapping m2m', ); cmp_deeply( $class42->_m2m_metadata, { loader_test43s => superhashof({ accessor => "loader_test43s", foreign_relation => "loader_test43", }), }, 'm2m created for overlapping multi-column foreign keys' ); cmp_deeply( $class43->_m2m_metadata, { loader_test42s => superhashof({ accessor => "loader_test42s", foreign_relation => "loader_test42", }), }, 'm2m created for overlapping multi-column foreign keys' ); # test double multi-col fk 26 -> 25 my $obj26 = try { $rsobj26->find(33) } || $rsobj26->search({ id => 33 })->single; my $rs_rel25_one = try { $obj26->loader_test25_id_rel1 }; isa_ok($rs_rel25_one, $class25); is(try { $rs_rel25_one->dat }, 'x25', "Multiple multi-col FKs to same table"); ok($class26->column_info('id')->{is_foreign_key}, 'Foreign key detected'); ok($class26->column_info('rel1')->{is_foreign_key}, 'Foreign key detected'); ok($class26->column_info('rel2')->{is_foreign_key}, 'Foreign key detected'); my $rs_rel25_two = try { $obj26->loader_test25_id_rel2 }; isa_ok($rs_rel25_two, $class25); is(try { $rs_rel25_two->dat }, 'y25', "Multiple multi-col FKs to same table"); my $obj25 = try { $rsobj25->find(3,42) } || $rsobj25->search({ id1 => 3, id2 => 42 })->single; my $rs_rel26 = try { $obj25->search_related('loader_test26_id_rel1s') }; isa_ok(try { $rs_rel26->single }, $class26); is(try { $rs_rel26->single->id }, 3, "search_related with multiple multi-col FKs from same table"); # test one-to-one rels my $obj27 = try { $rsobj27->find(1) } || $rsobj27->search({ id => 1 })->single; my $obj28 = try { $obj27->loader_test28 }; isa_ok($obj28, $class28); is(try { $obj28->get_column('id') }, 1, "One-to-one relationship with PRIMARY FK"); ok($class28->column_info('id')->{is_foreign_key}, 'Foreign key detected'); my $obj29 = try { $obj27->loader_test29 }; isa_ok($obj29, $class29); is(try { $obj29->id }, 1, "One-to-one relationship with UNIQUE FK"); ok($class29->column_info('fk')->{is_foreign_key}, 'Foreign key detected'); $obj27 = try { $rsobj27->find(2) } || $rsobj27->search({ id => 2 })->single; is(try { $obj27->loader_test28 }, undef, "Undef for missing one-to-one row"); is(try { $obj27->loader_test29 }, undef, "Undef for missing one-to-one row"); # test outer join for nullable referring columns: is $class32->column_info('rel2')->{is_nullable}, 1, 'is_nullable detection'; ok($class32->column_info('rel1')->{is_foreign_key}, 'Foreign key detected'); ok($class32->column_info('rel2')->{is_foreign_key}, 'Foreign key detected'); my $obj32 = try { $rsobj32->find(1, { prefetch => [qw/rel1 rel2/] }) } || try { $rsobj32->search({ id => 1 }, { prefetch => [qw/rel1 rel2/] })->single } || $rsobj32->search({ id => 1 })->single; my $obj34 = eval { $rsobj34->find(1, { prefetch => [qw/loader_test33_id_rel1 loader_test33_id_rel2/] }) } || eval { $rsobj34->search({ id => 1 }, { prefetch => [qw/loader_test33_id_rel1 loader_test33_id_rel2/] })->single } || $rsobj34->search({ id => 1 })->single; diag $@ if $@; isa_ok($obj32,$class32); isa_ok($obj34,$class34); ok($class34->column_info('id')->{is_foreign_key}, 'Foreign key detected'); ok($class34->column_info('rel1')->{is_foreign_key}, 'Foreign key detected'); ok($class34->column_info('rel2')->{is_foreign_key}, 'Foreign key detected'); my $rs_rel31_one = try { $obj32->rel1 }; my $rs_rel31_two = try { $obj32->rel2 }; isa_ok($rs_rel31_one, $class31); is($rs_rel31_two, undef); my $rs_rel33_one = try { $obj34->loader_test33_id_rel1 }; my $rs_rel33_two = try { $obj34->loader_test33_id_rel2 }; isa_ok($rs_rel33_one, $class33); isa_ok($rs_rel33_two, $class33); # from Chisel's tests... my $moniker10 = $monikers->{loader_test10}; my $class10 = $classes->{loader_test10}; my $rsobj10 = $conn->resultset($moniker10); my $moniker11 = $monikers->{loader_test11}; my $class11 = $classes->{loader_test11}; my $rsobj11 = $conn->resultset($moniker11); isa_ok( $rsobj10, "DBIx::Class::ResultSet" ); isa_ok( $rsobj11, "DBIx::Class::ResultSet" ); ok($class10->column_info('loader_test11')->{is_foreign_key}, 'Foreign key detected'); ok($class11->column_info('loader_test10')->{is_foreign_key}, 'Foreign key detected'); my $obj10 = $rsobj10->create({ subject => 'xyzzy' }); $obj10->update(); ok( defined $obj10, 'Create row' ); my $obj11 = $rsobj11->create({ loader_test10 => (try { $obj10->id() } || $obj10->id10) }); $obj11->update(); ok( defined $obj11, 'Create related row' ); eval { my $obj10_2 = $obj11->loader_test10; $obj10_2->update({ loader_test11 => $obj11->id11 }); }; diag $@ if $@; ok(!$@, "Setting up circular relationship"); SKIP: { skip 'Previous eval block failed', 3 if $@; my $results = $rsobj10->search({ subject => 'xyzzy' }); is( $results->count(), 1, 'No duplicate row created' ); my $obj10_3 = $results->single(); isa_ok( $obj10_3, $class10 ); is( $obj10_3->loader_test11()->id(), $obj11->id(), 'Circular rel leads back to same row' ); } SKIP: { skip 'This vendor cannot do inline relationship definitions', 9 if $self->{no_inline_rels}; my $moniker12 = $monikers->{loader_test12}; my $class12 = $classes->{loader_test12}; my $rsobj12 = $conn->resultset($moniker12); my $moniker13 = $monikers->{loader_test13}; my $class13 = $classes->{loader_test13}; my $rsobj13 = $conn->resultset($moniker13); isa_ok( $rsobj12, "DBIx::Class::ResultSet" ); isa_ok( $rsobj13, "DBIx::Class::ResultSet" ); ok($class13->column_info('id')->{is_foreign_key}, 'Foreign key detected'); ok($class13->column_info('loader_test12')->{is_foreign_key}, 'Foreign key detected'); ok($class13->column_info('dat')->{is_foreign_key}, 'Foreign key detected'); my $obj13 = try { $rsobj13->find(1) } || $rsobj13->search({ id => 1 })->single; isa_ok( $obj13->id, $class12 ); isa_ok( $obj13->loader_test12, $class12); isa_ok( $obj13->dat, $class12); my $obj12 = try { $rsobj12->find(1) } || $rsobj12->search({ id => 1 })->single; isa_ok( try { $obj12->loader_test13 }, $class13 ); } # relname is preserved when another fk is added { local $SIG{__WARN__} = sigwarn_silencer(qr/invalidates \d+ active statement/); $conn->storage->disconnect; # for mssql and access } isa_ok try { $rsobj3->find(1)->loader_test4zes }, 'DBIx::Class::ResultSet'; $conn->storage->disconnect; # for access if (lc($self->{vendor}) !~ /^(?:sybase|mysql)\z/) { $conn->storage->dbh->do('ALTER TABLE loader_test4 ADD fkid2 INTEGER REFERENCES loader_test3 (id)'); } else { $conn->storage->dbh->do(<<"EOF"); ALTER TABLE loader_test4 ADD fkid2 INTEGER $self->{null} EOF $conn->storage->dbh->do(<<"EOF"); ALTER TABLE loader_test4 ADD CONSTRAINT loader_test4_to_3_fk FOREIGN KEY (fkid2) REFERENCES loader_test3 (id) EOF } $conn->storage->disconnect; # for firebird $self->rescan_without_warnings($conn); isa_ok try { $rsobj3->find(1)->loader_test4zes }, 'DBIx::Class::ResultSet', 'relationship name preserved when another foreign key is added in remote table'; SKIP: { skip 'This vendor cannot do out-of-line implicit rel defs', 4 if $self->{no_implicit_rels}; my $moniker14 = $monikers->{loader_test14}; my $class14 = $classes->{loader_test14}; my $rsobj14 = $conn->resultset($moniker14); my $moniker15 = $monikers->{loader_test15}; my $class15 = $classes->{loader_test15}; my $rsobj15 = $conn->resultset($moniker15); isa_ok( $rsobj14, "DBIx::Class::ResultSet" ); isa_ok( $rsobj15, "DBIx::Class::ResultSet" ); ok($class15->column_info('loader_test14')->{is_foreign_key}, 'Foreign key detected'); my $obj15 = try { $rsobj15->find(1) } || $rsobj15->search({ id => 1 })->single; isa_ok( $obj15->loader_test14, $class14 ); } } # test custom_column_info and datetime_timezone/datetime_locale { my $class35 = $classes->{loader_test35}; my $class36 = $classes->{loader_test36}; ok($class35->column_info('an_int')->{is_numeric}, 'custom_column_info'); is($class36->column_info('a_date')->{locale},'de_DE','datetime_locale'); is($class36->column_info('a_date')->{timezone},'Europe/Berlin','datetime_timezone'); ok($class36->column_info('b_char_as_data')->{inflate_datetime},'custom_column_info'); is($class36->column_info('b_char_as_data')->{locale},'de_DE','datetime_locale'); is($class36->column_info('b_char_as_data')->{timezone},'Europe/Berlin','datetime_timezone'); ok($class36->column_info('c_char_as_data')->{inflate_date},'custom_column_info'); is($class36->column_info('c_char_as_data')->{locale},'de_DE','datetime_locale'); is($class36->column_info('c_char_as_data')->{timezone},'Europe/Berlin','datetime_timezone'); } # rescan and norewrite test { my @statements_rescan = ( qq{ CREATE TABLE loader_test30 ( id INTEGER NOT NULL PRIMARY KEY, loader_test2 INTEGER NOT NULL, FOREIGN KEY (loader_test2) REFERENCES loader_test2 (id) ) $self->{innodb} }, q{ INSERT INTO loader_test30 (id,loader_test2) VALUES(123, 1) }, q{ INSERT INTO loader_test30 (id,loader_test2) VALUES(321, 2) }, ); # get contents my %contents; my $find_cb = sub { return if -d; return if /^(?:LoaderTest30|LoaderTest1|LoaderTest2X)\.pm\z/; open my $fh, '<', $_ or die "Could not open $_ for reading: $!"; binmode $fh; local $/; $contents{$File::Find::name} = <$fh>; }; find $find_cb, DUMP_DIR; my %contents_before = %contents; # system "rm -rf /tmp/before_rescan /tmp/after_rescan"; # system "mkdir /tmp/before_rescan"; # system "mkdir /tmp/after_rescan"; # system "cp -a @{[DUMP_DIR]} /tmp/before_rescan"; $conn->storage->disconnect; # needed for Firebird and Informix my $dbh = $self->dbconnect(1); $dbh->do($_) for @statements_rescan; $dbh->disconnect; sleep 1; my @new = $self->rescan_without_warnings($conn); is_deeply(\@new, [ qw/LoaderTest30/ ], "Rescan"); # system "cp -a @{[DUMP_DIR]} /tmp/after_rescan"; undef %contents; find $find_cb, DUMP_DIR; my %contents_after = %contents; subtest 'dumped files are not rewritten when there is no modification' => sub { plan tests => 1 + scalar keys %contents_before; is_deeply [sort keys %contents_before], [sort keys %contents_after], 'same files dumped'; for my $file (sort keys %contents_before) { eq_or_diff $contents_before{$file}, $contents_after{$file}, "$file not rewritten"; } }; my $rsobj30 = $conn->resultset('LoaderTest30'); isa_ok($rsobj30, 'DBIx::Class::ResultSet'); SKIP: { skip 'no rels', 2 if $self->{skip_rels}; my $obj30 = try { $rsobj30->find(123) } || $rsobj30->search({ id => 123 })->single; isa_ok( $obj30->loader_test2, $class2); ok $rsobj30->result_source->column_info('loader_test2')->{is_foreign_key}, 'Foreign key detected'; } $conn->storage->disconnect; # for Firebird $self->drop_table($conn->storage->dbh, 'loader_test30'); @new = $self->rescan_without_warnings($conn); is_deeply(\@new, [], 'no new tables on rescan'); throws_ok { $conn->resultset('LoaderTest30') } qr/Can't find source/, 'source unregistered for dropped table after rescan'; } $self->test_data_types($conn); $self->test_preserve_case($conn); # run extra tests $self->{extra}{run}->($conn, $monikers, $classes, $self) if $self->{extra}{run}; ## Create a dump from an existing $dbh in a transaction TODO: { local $TODO = 'dumping in a txn is experimental and Pg-only right now' unless $self->{vendor} eq 'Pg'; ok eval { my %opts = ( naming => 'current', constraint => $self->CONSTRAINT, dump_directory => DUMP_DIR, debug => ($ENV{SCHEMA_LOADER_TESTS_DEBUG}||0) ); my $guard = $conn->txn_scope_guard; my $rescan_warnings = RESCAN_WARNINGS; local $SIG{__WARN__} = sigwarn_silencer( qr/$rescan_warnings|commit ineffective with AutoCommit enabled/ # FIXME ); my $schema_from = DBIx::Class::Schema::Loader::make_schema_at( "TestSchemaFromAnother", \%opts, [ sub { $conn->storage->dbh } ] ); $guard->commit; 1; }, 'Making a schema from another schema inside a transaction worked'; diag $@ if $@ && (not $TODO); } $self->drop_tables unless $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}; $conn->storage->disconnect; } sub test_data_types { my ($self, $conn) = @_; SKIP: { if (my $test_count = $self->{data_type_tests}{test_count}) { if ($self->{vendor} eq 'mssql' && $conn->storage->dbh->{Driver}{Name} eq 'Sybase') { skip 'DBD::Sybase does not work with the data_type tests on latest SQL Server', $test_count; } my $data_type_tests = $self->{data_type_tests}; foreach my $moniker (@{ $data_type_tests->{table_monikers} }) { my $columns = $data_type_tests->{columns}{$moniker}; my $rsrc = $conn->resultset($moniker)->result_source; while (my ($col_name, $expected_info) = each %$columns) { my %info = %{ $rsrc->column_info($col_name) }; delete @info{qw/is_nullable timezone locale sequence/}; my $text_col_def = dumper_squashed \%info; my $text_expected_info = dumper_squashed $expected_info; is_deeply \%info, $expected_info, "test column $col_name has definition: $text_col_def expecting: $text_expected_info"; } } } } } sub test_preserve_case { my ($self, $conn) = @_; my ($oqt, $cqt) = $self->get_oqt_cqt(always => 1); # open quote, close quote my $dbh = $conn->storage->dbh; my ($table40_name, $table41_name) = @{ $self->{preserve_case_tests_table_names} }; $dbh->do($_) for ( qq| CREATE TABLE ${oqt}${table40_name}${cqt} ( ${oqt}Id${cqt} INTEGER NOT NULL PRIMARY KEY, ${oqt}Foo3Bar${cqt} VARCHAR(100) NOT NULL ) $self->{innodb} |, qq| CREATE TABLE ${oqt}${table41_name}${cqt} ( ${oqt}Id${cqt} INTEGER NOT NULL PRIMARY KEY, ${oqt}LoaderTest40Id${cqt} INTEGER, FOREIGN KEY (${oqt}LoaderTest40Id${cqt}) REFERENCES ${oqt}${table40_name}${cqt} (${oqt}Id${cqt}) ) $self->{innodb} |, qq| INSERT INTO ${oqt}${table40_name}${cqt} VALUES (1, 'foo') |, qq| INSERT INTO ${oqt}${table41_name}${cqt} VALUES (1, 1) |, ); $conn->storage->disconnect; my $orig_preserve_case = $conn->loader->preserve_case; $conn->loader->preserve_case(1); $conn->loader->_setup; $self->rescan_without_warnings($conn); if (not $self->{skip_rels}) { ok my $row = try { $conn->resultset('LoaderTest41')->find(1) }, 'row in mixed-case table'; ok my $related_row = try { $row->loader_test40 }, 'rel in mixed-case table'; is try { $related_row->foo3_bar }, 'foo', 'accessor for mixed-case column name in mixed case table'; } else { SKIP: { skip 'not testing mixed-case rels with skip_rels', 2 } is try { $conn->resultset('LoaderTest40')->find(1)->foo3_bar }, 'foo', 'accessor for mixed-case column name in mixed case table'; } # Further tests may expect preserve_case to be unset, so reset it to the # original value and rescan again. $conn->loader->preserve_case($orig_preserve_case); $conn->loader->_setup; $self->rescan_without_warnings($conn); } sub monikers_and_classes { my ($self, $schema_class) = @_; my ($monikers, $classes); foreach my $source_name ($schema_class->sources) { my $table_name = $schema_class->loader->moniker_to_table->{$source_name}; my $result_class = $schema_class->source($source_name)->result_class; $monikers->{$table_name} = $source_name; $classes->{$table_name} = $result_class; # some DBs (Firebird, Oracle) uppercase everything $monikers->{lc $table_name} = $source_name; $classes->{lc $table_name} = $result_class; } return ($monikers, $classes); } sub check_no_duplicate_unique_constraints { my ($class) = @_; # unique_constraints() automatically includes the PK, if any my %uc_cols; ++$uc_cols{ join ", ", @$_ } for values %{ { $class->unique_constraints } }; my $dup_uc = grep { $_ > 1 } values %uc_cols; is($dup_uc, 0, "duplicate unique constraints ($class)") or diag "uc_cols: @{[ %uc_cols ]}"; } sub dbconnect { my ($self, $complain) = @_; require DBIx::Class::Storage::DBI; my $storage = DBIx::Class::Storage::DBI->new; $complain = defined $complain ? $complain : 1; $storage->connect_info([ @{ $self }{qw/dsn user password/}, { unsafe => 1, RaiseError => $complain, ShowErrorStatement => $complain, PrintError => 0, %{ $self->{connect_info_opts} || {} }, }, ]); my $dbh = $storage->dbh; die "Failed to connect to database: $@" if !$dbh; $self->{storage} = $storage; # storage DESTROY disconnects return $dbh; } sub get_oqt_cqt { my $self = shift; my %opts = @_; if ((not $opts{always}) && $self->{preserve_case_mode_is_exclusive}) { return ('', ''); } # XXX should get quote_char from the storage of an initialized loader. my ($oqt, $cqt); # open quote, close quote if (ref $self->{quote_char}) { ($oqt, $cqt) = @{ $self->{quote_char} }; } else { $oqt = $cqt = $self->{quote_char} || ''; } return ($oqt, $cqt); } sub create { my $self = shift; $self->{_created} = 1; $self->drop_tables; my $make_auto_inc = $self->{auto_inc_cb} || sub { return () }; @statements = ( qq{ CREATE TABLE loader_test1s ( id $self->{auto_inc_pk}, dat VARCHAR(32) NOT NULL UNIQUE ) $self->{innodb} }, $make_auto_inc->(qw/loader_test1s id/), q{ INSERT INTO loader_test1s (dat) VALUES('foo') }, q{ INSERT INTO loader_test1s (dat) VALUES('bar') }, q{ INSERT INTO loader_test1s (dat) VALUES('baz') }, # also test method collision # crumb_crisp_coating and sticky_filling are for col_accessor_map tests qq{ CREATE TABLE loader_test2 ( id $self->{auto_inc_pk}, dat VARCHAR(32) NOT NULL, dat2 VARCHAR(32) NOT NULL, set_primary_key INTEGER $self->{null}, can INTEGER $self->{null}, dbix_class_testcomponent INTEGER $self->{null}, dbix_class_testcomponentmap INTEGER $self->{null}, testcomponent_fqn INTEGER $self->{null}, meta INTEGER $self->{null}, test_role_method INTEGER $self->{null}, test_role_for_map_method INTEGER $self->{null}, crumb_crisp_coating VARCHAR(32) $self->{null}, sticky_filling VARCHAR(32) $self->{null}, UNIQUE (dat2, dat) ) $self->{innodb} }, $make_auto_inc->(qw/loader_test2 id/), q{ INSERT INTO loader_test2 (dat, dat2) VALUES('aaa', 'zzz') }, q{ INSERT INTO loader_test2 (dat, dat2) VALUES('bbb', 'yyy') }, q{ INSERT INTO loader_test2 (dat, dat2) VALUES('ccc', 'xxx') }, q{ INSERT INTO loader_test2 (dat, dat2) VALUES('ddd', 'www') }, qq{ CREATE TABLE LOADER_test23 ( ID INTEGER NOT NULL PRIMARY KEY, DAT VARCHAR(32) NOT NULL UNIQUE ) $self->{innodb} }, qq{ CREATE TABLE LoAdEr_test24 ( iD INTEGER NOT NULL PRIMARY KEY, DaT VARCHAR(32) NOT NULL UNIQUE ) $self->{innodb} }, # Access does not support DEFAULT $self->{vendor} ne 'Access' ? qq{ CREATE TABLE loader_test35 ( id INTEGER NOT NULL PRIMARY KEY, a_varchar VARCHAR(100) DEFAULT 'foo', an_int INTEGER DEFAULT 42, a_negative_int INTEGER DEFAULT -42, a_double DOUBLE PRECISION DEFAULT 10.555, a_negative_double DOUBLE PRECISION DEFAULT -10.555, a_function $self->{default_function_def} ) $self->{innodb} } : qq{ CREATE TABLE loader_test35 ( id INTEGER NOT NULL PRIMARY KEY, a_varchar VARCHAR(100), an_int INTEGER, a_negative_int INTEGER, a_double DOUBLE, a_negative_double DOUBLE, a_function DATETIME ) }, qq{ CREATE TABLE loader_test36 ( id INTEGER NOT NULL PRIMARY KEY, a_date $self->{basic_date_datatype}, b_char_as_data VARCHAR(100), c_char_as_data VARCHAR(100) ) $self->{innodb} }, # DB2 does not allow nullable uniq components, SQLAnywhere automatically # converts nullable uniq components to NOT NULL qq{ CREATE TABLE loader_test50 ( id INTEGER NOT NULL UNIQUE, id1 INTEGER NOT NULL, id2 INTEGER NOT NULL, @{[ $self->{vendor} !~ /^(?:DB2|SQLAnywhere)\z/i ? " id3 INTEGER $self->{null}, id4 INTEGER NOT NULL, UNIQUE (id3, id4), " : '' ]} UNIQUE (id1, id2) ) $self->{innodb} }, ); # some DBs require mixed case identifiers to be quoted my ($oqt, $cqt) = $self->get_oqt_cqt; @statements_reltests = ( qq{ CREATE TABLE loader_test3 ( id INTEGER NOT NULL PRIMARY KEY, dat VARCHAR(32) ) $self->{innodb} }, q{ INSERT INTO loader_test3 (id,dat) VALUES(1,'aaa') }, q{ INSERT INTO loader_test3 (id,dat) VALUES(2,'bbb') }, q{ INSERT INTO loader_test3 (id,dat) VALUES(3,'ccc') }, q{ INSERT INTO loader_test3 (id,dat) VALUES(4,'ddd') }, qq{ CREATE TABLE loader_test4 ( id INTEGER NOT NULL PRIMARY KEY, fkid INTEGER NOT NULL, dat VARCHAR(32), belongs_to INTEGER $self->{null}, set_primary_key INTEGER $self->{null}, FOREIGN KEY( fkid ) REFERENCES loader_test3 (id), FOREIGN KEY( belongs_to ) REFERENCES loader_test3 (id), FOREIGN KEY( set_primary_key ) REFERENCES loader_test3 (id) ) $self->{innodb} }, q{ INSERT INTO loader_test4 (id,fkid,dat,belongs_to,set_primary_key) VALUES(123,1,'aaa',1,1) }, q{ INSERT INTO loader_test4 (id,fkid,dat,belongs_to,set_primary_key) VALUES(124,2,'bbb',2,2) }, q{ INSERT INTO loader_test4 (id,fkid,dat,belongs_to,set_primary_key) VALUES(125,3,'ccc',3,3) }, q{ INSERT INTO loader_test4 (id,fkid,dat,belongs_to,set_primary_key) VALUES(126,4,'ddd',4,4) }, qq| CREATE TABLE loader_test5 ( id1 INTEGER NOT NULL, ${oqt}iD2${cqt} INTEGER NOT NULL, dat VARCHAR(8), from_id INTEGER $self->{null}, to_id INTEGER $self->{null}, PRIMARY KEY (id1,${oqt}iD2${cqt}), FOREIGN KEY (from_id) REFERENCES loader_test4 (id), FOREIGN KEY (to_id) REFERENCES loader_test4 (id) ) $self->{innodb} |, qq| INSERT INTO loader_test5 (id1,${oqt}iD2${cqt},dat) VALUES (1,1,'aaa') |, qq| CREATE TABLE loader_test6 ( id INTEGER NOT NULL PRIMARY KEY, ${oqt}Id2${cqt} INTEGER, loader_test2_id INTEGER, dat VARCHAR(8), FOREIGN KEY (loader_test2_id) REFERENCES loader_test2 (id), FOREIGN KEY(id,${oqt}Id2${cqt}) REFERENCES loader_test5 (id1,${oqt}iD2${cqt}) ) $self->{innodb} |, (qq| INSERT INTO loader_test6 (id, ${oqt}Id2${cqt},loader_test2_id,dat) | . q{ VALUES (1, 1,1,'aaa') }), # here we are testing adjective detection qq{ CREATE TABLE loader_test7 ( id INTEGER NOT NULL PRIMARY KEY, id2 VARCHAR(8) NOT NULL UNIQUE, dat VARCHAR(8), lovely_loader_test6 INTEGER NOT NULL UNIQUE, FOREIGN KEY (lovely_loader_test6) REFERENCES loader_test6 (id) ) $self->{innodb} }, q{ INSERT INTO loader_test7 (id,id2,dat,lovely_loader_test6) VALUES (1,'aaa','bbb',1) }, # for some DBs we need a named FK to drop later ($self->{vendor} =~ /^(mssql|sybase|access|mysql)\z/i ? ( (q{ ALTER TABLE loader_test6 ADD } . qq{ loader_test7_id INTEGER $self->{null} }), (q{ ALTER TABLE loader_test6 ADD CONSTRAINT loader_test6_to_7_fk } . q{ FOREIGN KEY (loader_test7_id) } . q{ REFERENCES loader_test7 (id) }) ) : ( (q{ ALTER TABLE loader_test6 ADD } . qq{ loader_test7_id INTEGER $self->{null} REFERENCES loader_test7 (id) }), )), qq{ CREATE TABLE loader_test8 ( id INTEGER NOT NULL PRIMARY KEY, loader_test7 VARCHAR(8) NOT NULL, dat VARCHAR(8), FOREIGN KEY (loader_test7) REFERENCES loader_test7 (id2) ) $self->{innodb} }, (q{ INSERT INTO loader_test8 (id,loader_test7,dat) VALUES (1,'aaa','bbb') }), (q{ INSERT INTO loader_test8 (id,loader_test7,dat) VALUES (2,'aaa','bbb') }), (q{ INSERT INTO loader_test8 (id,loader_test7,dat) VALUES (3,'aaa','bbb') }), qq{ CREATE TABLE loader_test9 ( loader_test9 VARCHAR(8) NOT NULL ) $self->{innodb} }, qq{ CREATE TABLE loader_test16 ( id INTEGER NOT NULL PRIMARY KEY, dat VARCHAR(8), loader_test8_id INTEGER NOT NULL UNIQUE, FOREIGN KEY (loader_test8_id) REFERENCES loader_test8 (id) ) $self->{innodb} }, qq{ INSERT INTO loader_test16 (id,dat,loader_test8_id) VALUES (2,'x16',1) }, qq{ INSERT INTO loader_test16 (id,dat,loader_test8_id) VALUES (4,'y16',2) }, qq{ INSERT INTO loader_test16 (id,dat,loader_test8_id) VALUES (6,'z16',3) }, # for some DBs we need a named FK to drop later ($self->{vendor} =~ /^(mssql|sybase|access|mysql)\z/i ? ( (q{ ALTER TABLE loader_test8 ADD } . qq{ loader_test16_id INTEGER $self->{null} }), (q{ ALTER TABLE loader_test8 ADD CONSTRAINT loader_test8_to_16_fk } . q{ FOREIGN KEY (loader_test16_id) } . q{ REFERENCES loader_test16 (id) }) ) : ( (q{ ALTER TABLE loader_test8 ADD } . qq{ loader_test16_id INTEGER $self->{null} REFERENCES loader_test16 (id) }), )), qq{ CREATE TABLE loader_test17 ( id INTEGER NOT NULL PRIMARY KEY, loader16_one INTEGER, loader16_two INTEGER, FOREIGN KEY (loader16_one) REFERENCES loader_test16 (id), FOREIGN KEY (loader16_two) REFERENCES loader_test16 (id) ) $self->{innodb} }, qq{ INSERT INTO loader_test17 (id, loader16_one, loader16_two) VALUES (3, 2, 4) }, qq{ INSERT INTO loader_test17 (id, loader16_one, loader16_two) VALUES (33, 4, 6) }, qq{ CREATE TABLE loader_test18 ( id INTEGER NOT NULL PRIMARY KEY, dat VARCHAR(8) ) $self->{innodb} }, qq{ INSERT INTO loader_test18 (id,dat) VALUES (1,'x18') }, qq{ INSERT INTO loader_test18 (id,dat) VALUES (2,'y18') }, qq{ INSERT INTO loader_test18 (id,dat) VALUES (3,'z18') }, qq{ CREATE TABLE loader_test19 ( id INTEGER NOT NULL PRIMARY KEY, dat VARCHAR(8) ) $self->{innodb} }, qq{ INSERT INTO loader_test19 (id,dat) VALUES (4,'x19') }, qq{ INSERT INTO loader_test19 (id,dat) VALUES (5,'y19') }, qq{ INSERT INTO loader_test19 (id,dat) VALUES (6,'z19') }, qq{ CREATE TABLE loader_test20 ( parent INTEGER NOT NULL, child INTEGER NOT NULL, PRIMARY KEY (parent, child), FOREIGN KEY (parent) REFERENCES loader_test18 (id), FOREIGN KEY (child) REFERENCES loader_test19 (id) ) $self->{innodb} }, q{ INSERT INTO loader_test20 (parent, child) VALUES (1,4) }, q{ INSERT INTO loader_test20 (parent, child) VALUES (2,5) }, q{ INSERT INTO loader_test20 (parent, child) VALUES (3,6) }, qq{ CREATE TABLE loader_test21 ( id INTEGER NOT NULL PRIMARY KEY, dat VARCHAR(8) ) $self->{innodb} }, q{ INSERT INTO loader_test21 (id,dat) VALUES (7,'a21')}, q{ INSERT INTO loader_test21 (id,dat) VALUES (11,'b21')}, q{ INSERT INTO loader_test21 (id,dat) VALUES (13,'c21')}, q{ INSERT INTO loader_test21 (id,dat) VALUES (17,'d21')}, qq{ CREATE TABLE loader_test22 ( parent INTEGER NOT NULL, child INTEGER NOT NULL, PRIMARY KEY (parent, child), FOREIGN KEY (parent) REFERENCES loader_test21 (id), FOREIGN KEY (child) REFERENCES loader_test21 (id) ) $self->{innodb} }, q{ INSERT INTO loader_test22 (parent, child) VALUES (7,11)}, q{ INSERT INTO loader_test22 (parent, child) VALUES (11,13)}, q{ INSERT INTO loader_test22 (parent, child) VALUES (13,17)}, qq{ CREATE TABLE loader_test25 ( id1 INTEGER NOT NULL, id2 INTEGER NOT NULL, dat VARCHAR(8), PRIMARY KEY (id1,id2) ) $self->{innodb} }, q{ INSERT INTO loader_test25 (id1,id2,dat) VALUES (33,5,'x25') }, q{ INSERT INTO loader_test25 (id1,id2,dat) VALUES (33,7,'y25') }, q{ INSERT INTO loader_test25 (id1,id2,dat) VALUES (3,42,'z25') }, qq{ CREATE TABLE loader_test26 ( id INTEGER NOT NULL PRIMARY KEY, rel1 INTEGER NOT NULL, rel2 INTEGER NOT NULL, FOREIGN KEY (id, rel1) REFERENCES loader_test25 (id1, id2), FOREIGN KEY (id, rel2) REFERENCES loader_test25 (id1, id2) ) $self->{innodb} }, q{ INSERT INTO loader_test26 (id,rel1,rel2) VALUES (33,5,7) }, q{ INSERT INTO loader_test26 (id,rel1,rel2) VALUES (3,42,42) }, qq{ CREATE TABLE loader_test27 ( id INTEGER NOT NULL PRIMARY KEY ) $self->{innodb} }, q{ INSERT INTO loader_test27 (id) VALUES (1) }, q{ INSERT INTO loader_test27 (id) VALUES (2) }, qq{ CREATE TABLE loader_test28 ( id INTEGER NOT NULL PRIMARY KEY, FOREIGN KEY (id) REFERENCES loader_test27 (id) ) $self->{innodb} }, q{ INSERT INTO loader_test28 (id) VALUES (1) }, qq{ CREATE TABLE loader_test29 ( id INTEGER NOT NULL PRIMARY KEY, fk INTEGER NOT NULL UNIQUE, FOREIGN KEY (fk) REFERENCES loader_test27 (id) ) $self->{innodb} }, q{ INSERT INTO loader_test29 (id,fk) VALUES (1,1) }, qq{ CREATE TABLE loader_test31 ( id INTEGER NOT NULL PRIMARY KEY ) $self->{innodb} }, q{ INSERT INTO loader_test31 (id) VALUES (1) }, qq{ CREATE TABLE loader_test32 ( id INTEGER NOT NULL PRIMARY KEY, rel1 INTEGER NOT NULL, rel2 INTEGER $self->{null}, FOREIGN KEY (rel1) REFERENCES loader_test31(id), FOREIGN KEY (rel2) REFERENCES loader_test31(id) ) $self->{innodb} }, q{ INSERT INTO loader_test32 (id,rel1) VALUES (1,1) }, qq{ CREATE TABLE loader_test33 ( id1 INTEGER NOT NULL, id2 INTEGER NOT NULL, PRIMARY KEY (id1,id2) ) $self->{innodb} }, q{ INSERT INTO loader_test33 (id1,id2) VALUES (1,2) }, qq{ CREATE TABLE loader_test34 ( id INTEGER NOT NULL PRIMARY KEY, rel1 INTEGER NOT NULL, rel2 INTEGER $self->{null}, FOREIGN KEY (id,rel1) REFERENCES loader_test33(id1,id2), FOREIGN KEY (id,rel2) REFERENCES loader_test33(id1,id2) ) $self->{innodb} }, q{ INSERT INTO loader_test34 (id,rel1,rel2) VALUES (1,2,2) }, qq{ CREATE TABLE loader_test37 ( parent INTEGER NOT NULL, child INTEGER NOT NULL UNIQUE, PRIMARY KEY (parent, child), FOREIGN KEY (parent) REFERENCES loader_test32 (id), FOREIGN KEY (child) REFERENCES loader_test34 (id) ) $self->{innodb} }, q{ INSERT INTO loader_test37 (parent, child) VALUES (1,1) }, qq{ CREATE TABLE loader_test42 ( id1 INTEGER NOT NULL, id2 INTEGER NOT NULL, PRIMARY KEY (id1, id2) ) $self->{innodb} }, qq{ CREATE TABLE loader_test43 ( id1 INTEGER NOT NULL, id2 INTEGER NOT NULL, PRIMARY KEY (id1, id2) ) $self->{innodb} }, qq{ CREATE TABLE loader_test44 ( id42 INTEGER NOT NULL, id43 INTEGER NOT NULL, id2 INTEGER NOT NULL, PRIMARY KEY (id42, id43, id2), FOREIGN KEY (id42, id2) REFERENCES loader_test42 (id1, id2), FOREIGN KEY (id43, id2) REFERENCES loader_test43 (id1, id2) ) $self->{innodb} }, ); @statements_advanced = ( qq{ CREATE TABLE loader_test10 ( id10 $self->{auto_inc_pk}, subject VARCHAR(8), loader_test11 INTEGER $self->{null} ) $self->{innodb} }, $make_auto_inc->(qw/loader_test10 id10/), # Access does not support DEFAULT. qq{ CREATE TABLE loader_test11 ( id11 $self->{auto_inc_pk}, a_message VARCHAR(8) @{[ $self->{vendor} ne 'Access' ? "DEFAULT 'foo'" : '' ]}, loader_test10 INTEGER $self->{null}, FOREIGN KEY (loader_test10) REFERENCES loader_test10 (id10) ) $self->{innodb} }, $make_auto_inc->(qw/loader_test11 id11/), (lc($self->{vendor}) ne 'informix' ? (q{ ALTER TABLE loader_test10 ADD CONSTRAINT loader_test11_fk } . q{ FOREIGN KEY (loader_test11) } . q{ REFERENCES loader_test11 (id11) }) : (q{ ALTER TABLE loader_test10 ADD CONSTRAINT } . q{ FOREIGN KEY (loader_test11) } . q{ REFERENCES loader_test11 (id11) } . q{ CONSTRAINT loader_test11_fk }) ), ); @statements_advanced_sqlite = ( qq{ CREATE TABLE loader_test10 ( id10 $self->{auto_inc_pk}, subject VARCHAR(8) ) $self->{innodb} }, $make_auto_inc->(qw/loader_test10 id10/), qq{ CREATE TABLE loader_test11 ( id11 $self->{auto_inc_pk}, a_message VARCHAR(8) DEFAULT 'foo', loader_test10 INTEGER $self->{null}, FOREIGN KEY (loader_test10) REFERENCES loader_test10 (id10) ) $self->{innodb} }, $make_auto_inc->(qw/loader_test11 id11/), (q{ ALTER TABLE loader_test10 ADD COLUMN } . q{ loader_test11 INTEGER REFERENCES loader_test11 (id11) }), ); @statements_inline_rels = ( qq{ CREATE TABLE loader_test12 ( id INTEGER NOT NULL PRIMARY KEY, id2 VARCHAR(8) NOT NULL UNIQUE, dat VARCHAR(8) NOT NULL UNIQUE ) $self->{innodb} }, q{ INSERT INTO loader_test12 (id,id2,dat) VALUES (1,'aaa','bbb') }, qq{ CREATE TABLE loader_test13 ( id INTEGER NOT NULL PRIMARY KEY REFERENCES loader_test12, loader_test12 VARCHAR(8) NOT NULL REFERENCES loader_test12 (id2), dat VARCHAR(8) REFERENCES loader_test12 (dat) ) $self->{innodb} }, (q{ INSERT INTO loader_test13 (id,loader_test12,dat) } . q{ VALUES (1,'aaa','bbb') }), ); @statements_implicit_rels = ( qq{ CREATE TABLE loader_test14 ( id INTEGER NOT NULL PRIMARY KEY, dat VARCHAR(8) ) $self->{innodb} }, q{ INSERT INTO loader_test14 (id,dat) VALUES (123,'aaa') }, qq{ CREATE TABLE loader_test15 ( id INTEGER NOT NULL PRIMARY KEY, loader_test14 INTEGER NOT NULL, FOREIGN KEY (loader_test14) REFERENCES loader_test14 ) $self->{innodb} }, q{ INSERT INTO loader_test15 (id,loader_test14) VALUES (1,123) }, ); $self->drop_tables; my $dbh = $self->dbconnect(1); $dbh->do($_) for @{ $self->{pre_create} || [] }; $dbh->do($_) foreach (@statements); if (not ($self->{vendor} eq 'mssql' && $dbh->{Driver}{Name} eq 'Sybase')) { foreach my $ddl (@{ $self->{data_type_tests}{ddl} || [] }) { if (my $cb = $self->{data_types_ddl_cb}) { $cb->($ddl); } else { $dbh->do($ddl); } } } unless ($self->{skip_rels}) { # hack for now, since DB2 doesn't like inline comments, and we need # to test one for mysql, which works on everyone else... # this all needs to be refactored anyways. for my $stmt (@statements_reltests) { try { $dbh->do($stmt); } catch { die "Error executing '$stmt': $_\n"; }; } if($self->{vendor} =~ /sqlite/i) { $dbh->do($_) for (@statements_advanced_sqlite); } else { $dbh->do($_) for (@statements_advanced); } unless($self->{no_inline_rels}) { $dbh->do($_) for (@statements_inline_rels); } unless($self->{no_implicit_rels}) { $dbh->do($_) for (@statements_implicit_rels); } } $dbh->do($_) for @{ $self->{extra}->{create} || [] }; $dbh->disconnect(); } sub drop_tables { my $self = shift; my @tables = qw/ loader_test1 loader_test1s loader_test2 LOADER_test23 LoAdEr_test24 loader_test35 loader_test36 loader_test50 /; my @tables_auto_inc = ( [ qw/loader_test1s id/ ], [ qw/loader_test2 id/ ], ); my @tables_reltests = qw/ loader_test4 loader_test3 loader_test6 loader_test5 loader_test8 loader_test7 loader_test9 loader_test17 loader_test16 loader_test20 loader_test19 loader_test18 loader_test22 loader_test21 loader_test26 loader_test25 loader_test28 loader_test29 loader_test27 loader_test37 loader_test32 loader_test31 loader_test34 loader_test33 loader_test44 loader_test43 loader_test42 /; my @tables_advanced = qw/ loader_test11 loader_test10 /; my @tables_advanced_auto_inc = ( [ qw/loader_test10 id10/ ], [ qw/loader_test11 id11/ ], ); my @tables_inline_rels = qw/ loader_test13 loader_test12 /; my @tables_implicit_rels = qw/ loader_test15 loader_test14 /; my @tables_rescan = qw/ loader_test30 /; my @tables_preserve_case_tests = @{ $self->{preserve_case_tests_table_names} }; my %drop_columns = ( loader_test6 => 'loader_test7_id', loader_test7 => 'lovely_loader_test6', loader_test8 => 'loader_test16_id', loader_test16 => 'loader_test8_id', ); my %drop_constraints = ( loader_test10 => 'loader_test11_fk', loader_test6 => 'loader_test6_to_7_fk', loader_test8 => 'loader_test8_to_16_fk', ); # For some reason some tests do this twice (I guess dependency issues?) # do it twice for all drops for (1,2) { local $^W = 0; # for ADO my $dbh = $self->dbconnect(0); $dbh->do($_) for @{ $self->{extra}{pre_drop_ddl} || [] }; $self->drop_table($dbh, $_) for @{ $self->{extra}{drop} || [] }; my $drop_auto_inc = $self->{auto_inc_drop_cb} || sub {}; unless ($self->{skip_rels}) { # drop the circular rel columns if possible, this # doesn't work on all DBs foreach my $table (keys %drop_columns) { $dbh->do("ALTER TABLE $table DROP $drop_columns{$table}"); $dbh->do("ALTER TABLE $table DROP COLUMN $drop_columns{$table}"); } foreach my $table (keys %drop_constraints) { # for MSSQL $dbh->do("ALTER TABLE $table DROP $drop_constraints{$table}"); # for Sybase and Access $dbh->do("ALTER TABLE $table DROP CONSTRAINT $drop_constraints{$table}"); # for MySQL $dbh->do("ALTER TABLE $table DROP FOREIGN KEY $drop_constraints{$table}"); } $self->drop_table($dbh, $_) for (@tables_reltests); $self->drop_table($dbh, $_) for (@tables_reltests); $dbh->do($_) for map { $drop_auto_inc->(@$_) } @tables_advanced_auto_inc; $self->drop_table($dbh, $_) for (@tables_advanced); unless($self->{no_inline_rels}) { $self->drop_table($dbh, $_) for (@tables_inline_rels); } unless($self->{no_implicit_rels}) { $self->drop_table($dbh, $_) for (@tables_implicit_rels); } } $dbh->do($_) for map { $drop_auto_inc->(@$_) } @tables_auto_inc; $self->drop_table($dbh, $_) for (@tables, @tables_rescan); if (not ($self->{vendor} eq 'mssql' && $dbh->{Driver}{Name} eq 'Sybase')) { foreach my $data_type_table (@{ $self->{data_type_tests}{table_names} || [] }) { $self->drop_table($dbh, $data_type_table); } } $self->drop_table($dbh, $_) for @tables_preserve_case_tests; $dbh->disconnect; } } sub drop_table { my ($self, $dbh, $table) = @_; local $^W = 0; # for ADO try { $dbh->do("DROP TABLE $table CASCADE CONSTRAINTS") }; # oracle try { $dbh->do("DROP TABLE $table CASCADE") }; # postgres and ? try { $dbh->do("DROP TABLE $table") }; # if table name is case sensitive my ($oqt, $cqt) = $self->get_oqt_cqt(always => 1); try { $dbh->do("DROP TABLE ${oqt}${table}${cqt}") }; } sub _custom_column_info { my ( $table_name, $column_name, $column_info ) = @_; $table_name = lc ( $table_name ); $column_name = lc ( $column_name ); if ( $table_name eq 'loader_test35' and $column_name eq 'an_int' ){ return { is_numeric => 1 } } # Set inflate_datetime or inflate_date to check # datetime_timezone and datetime_locale if ( $table_name eq 'loader_test36' ){ return { inflate_datetime => 1 } if ( $column_name eq 'b_char_as_data' ); return { inflate_date => 1 } if ( $column_name eq 'c_char_as_data' ); } return; } my %DATA_TYPE_MULTI_TABLE_OVERRIDES = ( oracle => qr/\blong\b/i, mssql => qr/\b(?:timestamp|rowversion)\b/i, informix => qr/\b(?:bigserial|serial8)\b/i, ); sub setup_data_type_tests { my $self = shift; return unless my $types = $self->{data_types}; my $tests = $self->{data_type_tests} = {}; # split types into tables based on overrides my (@types, @split_off_types, @first_table_types); { my $split_off_re = $DATA_TYPE_MULTI_TABLE_OVERRIDES{lc($self->{vendor})} || qr/(?!)/; @types = keys %$types; @split_off_types = grep /$split_off_re/, @types; @first_table_types = grep !/$split_off_re/, @types; } @types = ( +{ map +($_, $types->{$_}), @first_table_types }, map +{ $_, $types->{$_} }, @split_off_types, ); my $test_count = 0; my $table_num = 10000; foreach my $types (@types) { my $table_name = "loader_test$table_num"; push @{ $tests->{table_names} }, $table_name; my $table_moniker = "LoaderTest$table_num"; push @{ $tests->{table_monikers} }, $table_moniker; $table_num++; my $cols = $tests->{columns}{$table_moniker} = {}; my $ddl = "CREATE TABLE $table_name (\n id INTEGER NOT NULL PRIMARY KEY,\n"; my %seen_col_names; while (my ($col_def, $expected_info) = each %$types) { (my $type_alias = $col_def) =~ s/\( (.+) \)(?=(?:[^()]* '(?:[^']* (?:''|\\')* [^']*)* [^\\']' [^()]*)*\z)//xg; my $size = $1; $size = '' unless defined $size; $size = '' unless $size =~ /^[\d, ]+\z/; $size =~ s/\s+//g; my @size = split /,/, $size; # some DBs don't like very long column names if ($self->{vendor} =~ /^(?:Firebird|SQLAnywhere|Oracle|DB2)\z/i) { my ($col_def, $default) = $type_alias =~ /^(.*)(default.*)?\z/i; $type_alias = substr $col_def, 0, 15; $type_alias .= '_with_dflt' if $default; } $type_alias =~ s/\s/_/g; $type_alias =~ s/\W//g; my $col_name = 'col_' . $type_alias; if (@size) { my $size_name = join '_', apply { s/\W//g } @size; $col_name .= "_sz_$size_name"; } # XXX would be better to check loader->preserve_case $col_name = lc $col_name; $col_name .= '_' . $seen_col_names{$col_name} if $seen_col_names{$col_name}++; $ddl .= " $col_name $col_def,\n"; $cols->{$col_name} = $expected_info; $test_count++; } $ddl =~ s/,\n\z/\n)/; push @{ $tests->{ddl} }, $ddl; } $tests->{test_count} = $test_count; return $test_count; } sub rescan_without_warnings { my ($self, $conn) = @_; local $SIG{__WARN__} = sigwarn_silencer(RESCAN_WARNINGS); return $conn->rescan; } sub test_col_accessor_map { my ( $column_name, $default_name, $context, $default_map ) = @_; if( lc($column_name) eq 'crumb_crisp_coating' ) { is( $default_name, 'crumb_crisp_coating', 'col_accessor_map was passed the default name' ); ok( $context->{$_}, "col_accessor_map func was passed the $_" ) for qw( table table_name table_class table_moniker schema_class ); return 'trivet'; } else { return $default_map->({ LOADER_TEST2 => { sticky_filling => 'goo', }, loader_test2 => { sticky_filling => 'goo', }, }); } } sub DESTROY { my $self = shift; unless ($ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}) { $self->drop_tables if $self->{_created}; rmtree DUMP_DIR } } 1; # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/t/lib/DBIx/0000755000175000017500000000000012650450355017244 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/lib/DBIx/Class/0000755000175000017500000000000012650450355020311 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/lib/DBIx/Class/TestComponent.pm0000644000175000017500000000020512542756321023450 0ustar ilmariilmaripackage DBIx::Class::TestComponent; use strict; use warnings; sub dbix_class_testcomponent { 'dbix_class_testcomponent works' } 1; DBIx-Class-Schema-Loader-0.07045/t/lib/DBIx/Class/TestComponentForMap.pm0000644000175000017500000000022112542756321024553 0ustar ilmariilmaripackage DBIx::Class::TestComponentForMap; use strict; use warnings; sub dbix_class_testcomponentmap { 'dbix_class_testcomponentmap works' } 1; DBIx-Class-Schema-Loader-0.07045/t/lib/DBIx/Class/TestSchemaComponent.pm0000644000175000017500000000045012131533457024570 0ustar ilmariilmaripackage DBIx::Class::TestSchemaComponent; use strict; use warnings; our $test_component_ok = 0; sub connection { my ($self, @info) = @_; $test_component_ok++; return $self->next::method(@info); } sub dbix_class_testschemacomponent { 'dbix_class_testschemacomponent works' } 1; DBIx-Class-Schema-Loader-0.07045/t/lib/TestAdditional.pm0000644000175000017500000000015112542756321021723 0ustar ilmariilmaripackage TestAdditional; use strict; use warnings; sub test_additional { return "test_additional"; } 1; DBIx-Class-Schema-Loader-0.07045/t/lib/TestComponentForMapFQN.pm0000644000175000017500000000017612542756321023276 0ustar ilmariilmaripackage TestComponentForMapFQN; use strict; use warnings; sub testcomponentformap_fqn { 'TestComponentForMapFQN works' } 1; DBIx-Class-Schema-Loader-0.07045/t/lib/dbixcsl_test_dir.pm0000644000175000017500000000204612542756321022345 0ustar ilmariilmaripackage dbixcsl_test_dir; use strict; use warnings; use File::Path 'rmtree'; use File::Temp 'tempdir'; use Scalar::Util 'weaken'; use namespace::clean; use DBI (); use base qw/Exporter/; our @EXPORT_OK = '$tdir'; die "/t does not exist, this can't be right...\n" unless -d 't'; my $tbdir = 't/var'; unless (-d $tbdir) { mkdir $tbdir or die "Unable to create $tbdir: $!\n"; } our $tdir = tempdir(DIR => $tbdir); # We need to disconnect all active DBI handles before deleting the directory, # otherwise the SQLite .db files cannot be deleted on Win32 (file in use) since # END does not run in any sort of order. no warnings 'redefine'; my $connect = \&DBI::connect; my @handles; *DBI::connect = sub { my $dbh = $connect->(@_); push @handles, $dbh; weaken $handles[-1]; return $dbh; }; END { if (not $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}) { foreach my $dbh (@handles) { $dbh->disconnect if $dbh; } rmtree($tdir, 1, 1); rmdir($tbdir); # remove if empty, ignore otherwise } } 1; DBIx-Class-Schema-Loader-0.07045/t/lib/make_dbictest_db_comments.pm0000644000175000017500000000372712131533457024174 0ustar ilmariilmaripackage make_dbictest_db_comments; use strict; use warnings; use DBI; use dbixcsl_test_dir qw/$tdir/; eval { require DBD::SQLite }; my $class = $@ ? 'SQLite2' : 'SQLite'; my $fn = "$tdir/dbictest.db"; unlink($fn); our $dsn = "dbi:$class:dbname=$fn"; my $dbh = DBI->connect($dsn); $dbh->do('PRAGMA SYNCHRONOUS = OFF'); $dbh->do($_) for ( q|CREATE TABLE table_comments ( id INTEGER PRIMARY KEY, table_name TEXT, comment_text TEXT )|, q|CREATE TABLE column_comments ( id INTEGER PRIMARY KEY, table_name TEXT, column_name TEXT, comment_text TEXT )|, q|CREATE TABLE foo ( fooid INTEGER PRIMARY KEY, footext TEXT DEFAULT 'footext', foodt TIMESTAMP DEFAULT CURRENT_TIMESTAMP )|, q|CREATE TABLE bar ( barid INTEGER PRIMARY KEY, fooref INTEGER REFERENCES foo(fooid) )|, q|INSERT INTO table_comments (id, table_name, comment_text) VALUES (1, 'foo', 'a short comment') |, q|INSERT INTO table_comments (id, table_name, comment_text) VALUES (2, 'bar', 'a | . ('very ' x 80) . q|long comment') |, q|INSERT INTO column_comments (id, table_name, column_name, comment_text) VALUES (1, 'foo', 'fooid', 'a short comment') |, q|INSERT INTO column_comments (id, table_name, column_name, comment_text) VALUES (2, 'foo', 'footext', 'a | . ('very ' x 80) . q|long comment') |, q|INSERT INTO foo (fooid, footext) VALUES (1,'Foo text for number 1')|, q|INSERT INTO foo (fooid, footext) VALUES (2,'Foo record associated with the Bar with barid 3')|, q|INSERT INTO foo (fooid, footext) VALUES (3,'Foo text for number 3')|, q|INSERT INTO foo (fooid, footext) VALUES (4,'Foo text for number 4')|, q|INSERT INTO bar VALUES (1,4)|, q|INSERT INTO bar VALUES (2,3)|, q|INSERT INTO bar VALUES (3,2)|, q|INSERT INTO bar VALUES (4,1)|, ); END { unlink($fn) unless $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}; } 1 DBIx-Class-Schema-Loader-0.07045/t/lib/make_dbictest_db_with_unique.pm0000644000175000017500000000410712131533457024701 0ustar ilmariilmaripackage make_dbictest_db_with_unique; use strict; use warnings; use DBI; use dbixcsl_test_dir qw/$tdir/; eval { require DBD::SQLite }; my $class = $@ ? 'SQLite2' : 'SQLite'; my $fn = "$tdir/dbictest_with_unique.db"; unlink($fn); our $dsn = "dbi:$class:dbname=$fn"; my $dbh = DBI->connect($dsn); $dbh->do('PRAGMA SYNCHRONOUS = OFF'); $dbh->do($_) for ( q|CREATE TABLE foos ( fooid INTEGER PRIMARY KEY, footext TEXT )|, q|CREATE TABLE bar ( barid INTEGER PRIMARY KEY, foo_id INTEGER NOT NULL REFERENCES foos (fooid) )|, q|CREATE TABLE bazs ( bazid INTEGER PRIMARY KEY, baz_num INTEGER NOT NULL UNIQUE, stations_visited_id INTEGER REFERENCES stations_visited (id) )|, q|CREATE TABLE quuxs ( quuxid INTEGER PRIMARY KEY, baz_id INTEGER NOT NULL UNIQUE, FOREIGN KEY (baz_id) REFERENCES bazs (baz_num) )|, q|CREATE TABLE stations_visited ( id INTEGER PRIMARY KEY, quuxs_id INTEGER REFERENCES quuxs (quuxid) )|, q|CREATE TABLE RouteChange ( id INTEGER PRIMARY KEY, QuuxsId INTEGER REFERENCES quuxs (quuxid), Foo2Bar INTEGER )|, q|CREATE TABLE email ( id INTEGER PRIMARY KEY, to_id INTEGER REFERENCES foos (fooid), from_id INTEGER REFERENCES foos (fooid) )|, q|INSERT INTO foos VALUES (1,'Foos text for number 1')|, q|INSERT INTO foos VALUES (2,'Foos record associated with the Bar with barid 3')|, q|INSERT INTO foos VALUES (3,'Foos text for number 3')|, q|INSERT INTO foos VALUES (4,'Foos text for number 4')|, q|INSERT INTO bar VALUES (1,4)|, q|INSERT INTO bar VALUES (2,3)|, q|INSERT INTO bar VALUES (3,2)|, q|INSERT INTO bar VALUES (4,1)|, q|INSERT INTO bazs VALUES (1,20,1)|, q|INSERT INTO bazs VALUES (2,19,1)|, q|INSERT INTO quuxs VALUES (1,20)|, q|INSERT INTO quuxs VALUES (2,19)|, q|INSERT INTO stations_visited VALUES (1,1)|, q|INSERT INTO RouteChange VALUES (1,1,3)|, ); END { unlink($fn) unless $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}; } 1; DBIx-Class-Schema-Loader-0.07045/t/lib/make_dbictest_db_bad_comment_tables.pm0000644000175000017500000000245512131533457026146 0ustar ilmariilmaripackage make_dbictest_db_bad_comment_tables; use strict; use warnings; use DBI; use dbixcsl_test_dir qw/$tdir/; eval { require DBD::SQLite }; my $class = $@ ? 'SQLite2' : 'SQLite'; my $fn = "$tdir/dbictest.db"; unlink($fn); our $dsn = "dbi:$class:dbname=$fn"; my $dbh = DBI->connect($dsn); $dbh->do('PRAGMA SYNCHRONOUS = OFF'); $dbh->do($_) for ( q|CREATE TABLE table_comments( id INTEGER PRIMARY KEY )|, q|CREATE TABLE column_comments( id INTEGER PRIMARY KEY )|, q|CREATE TABLE foo ( fooid INTEGER PRIMARY KEY, footext TEXT DEFAULT 'footext', foodt TIMESTAMP DEFAULT CURRENT_TIMESTAMP )|, q|CREATE TABLE bar ( barid INTEGER PRIMARY KEY, fooref INTEGER REFERENCES foo(fooid) )|, q|INSERT INTO foo (fooid, footext) VALUES (1,'Foo text for number 1')|, q|INSERT INTO foo (fooid, footext) VALUES (2,'Foo record associated with the Bar with barid 3')|, q|INSERT INTO foo (fooid, footext) VALUES (3,'Foo text for number 3')|, q|INSERT INTO foo (fooid, footext) VALUES (4,'Foo text for number 4')|, q|INSERT INTO bar VALUES (1,4)|, q|INSERT INTO bar VALUES (2,3)|, q|INSERT INTO bar VALUES (3,2)|, q|INSERT INTO bar VALUES (4,1)|, ); END { unlink($fn) unless $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}; } 1 DBIx-Class-Schema-Loader-0.07045/t/lib/make_dbictest_db_multi_unique.pm0000644000175000017500000000234412131533457025061 0ustar ilmariilmaripackage make_dbictest_db_multi_unique; use strict; use warnings; use DBI; use dbixcsl_test_dir qw/$tdir/; eval { require DBD::SQLite }; my $class = $@ ? 'SQLite2' : 'SQLite'; my $fn = "$tdir/dbictest_multi_unique.db"; unlink($fn); our $dsn = "dbi:$class:dbname=$fn"; my $dbh = DBI->connect($dsn); $dbh->do('PRAGMA SYNCHRONOUS = OFF'); $dbh->do($_) for ( q|CREATE TABLE foo ( fooid INTEGER PRIMARY KEY, footext TEXT DEFAULT 'footext', foodt TIMESTAMP DEFAULT CURRENT_TIMESTAMP )|, q|CREATE TABLE bar ( barid INTEGER PRIMARY KEY, uniq1 INT UNIQUE, uniq2 INT UNIQUE, fooref INTEGER REFERENCES foo(fooid) )|, q|INSERT INTO foo (fooid, footext) VALUES (1,'Foo text for number 1')|, q|INSERT INTO foo (fooid, footext) VALUES (2,'Foo record associated with the Bar with barid 3')|, q|INSERT INTO foo (fooid, footext) VALUES (3,'Foo text for number 3')|, q|INSERT INTO foo (fooid, footext) VALUES (4,'Foo text for number 4')|, q|INSERT INTO bar VALUES (1,1,1,4)|, q|INSERT INTO bar VALUES (2,2,2,3)|, q|INSERT INTO bar VALUES (3,3,3,2)|, q|INSERT INTO bar VALUES (4,4,4,1)|, ); END { unlink($fn) unless $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}; } 1; DBIx-Class-Schema-Loader-0.07045/t/lib/TestComponentFQN.pm0000644000175000017500000000015412542756321022165 0ustar ilmariilmaripackage TestComponentFQN; use strict; use warnings; sub testcomponent_fqn { 'TestComponentFQN works' } 1; DBIx-Class-Schema-Loader-0.07045/t/lib/dbixcsl_dumper_tests.pm0000644000175000017500000001324212542756321023246 0ustar ilmariilmaripackage dbixcsl_dumper_tests; use strict; use warnings; use Test::More; use File::Path; use IPC::Open3; use IO::Handle; use List::Util 'any'; use DBIx::Class::Schema::Loader::Utils 'dumper_squashed'; use DBIx::Class::Schema::Loader (); use Class::Unload (); use namespace::clean; use dbixcsl_test_dir '$tdir'; my $DUMP_PATH = "$tdir/dump"; sub cleanup { rmtree($DUMP_PATH, 1, 1); } sub class_file { my ($self, $class) = @_; $class =~ s{::}{/}g; $class = $DUMP_PATH . '/' . $class . '.pm'; return $class; } sub append_to_class { my ($self, $class, $string) = @_; $class = $self->class_file($class); open(my $appendfh, '>>', $class) or die "Failed to open '$class' for append: $!"; print $appendfh $string; close($appendfh); } sub dump_test { my ($self, %tdata) = @_; $tdata{options}{dump_directory} = $DUMP_PATH; $tdata{options}{use_namespaces} ||= 0; SKIP: for my $dumper (\&_dump_directly, \&_dump_dbicdump) { skip 'skipping dbicdump tests on Win32', 1, if $dumper == \&_dump_dbicdump && $^O eq 'MSWin32'; _test_dumps(\%tdata, $dumper->(%tdata)); } } sub _dump_directly { my %tdata = @_; my $schema_class = $tdata{classname}; no strict 'refs'; @{$schema_class . '::ISA'} = ('DBIx::Class::Schema::Loader'); $schema_class->loader_options( quiet => 1, %{$tdata{options}}, ); my @warns; eval { local $SIG{__WARN__} = sub { push(@warns, @_) }; $schema_class->connect(_get_connect_info(\%tdata)); }; my $err = $@; my $classes = !$err && $schema_class->loader->generated_classes; Class::Unload->unload($schema_class); _check_error($err, $tdata{error}); return \@warns, $classes; } sub _dump_dbicdump { my %tdata = @_; # use $^X so we execute ./script/dbicdump with the same perl binary that the tests were executed with my @cmd = ($^X, qw(script/dbicdump)); $tdata{options}{quiet} = 1 unless exists $tdata{options}{quiet}; while (my ($opt, $val) = each(%{ $tdata{options} })) { $val = dumper_squashed $val if ref $val; my $param = "$opt=$val"; if ($^O eq 'MSWin32') { $param = q{"} . $param . q{"}; # that's not nearly enough... } push @cmd, '-o', $param; } my @connect_info = _get_connect_info(\%tdata); for my $info (@connect_info) { $info = dumper_squashed $info if ref $info; } push @cmd, $tdata{classname}, @connect_info; # make sure our current @INC gets used by dbicdump use Config; local $ENV{PERL5LIB} = join $Config{path_sep}, @INC, ($ENV{PERL5LIB} || ''); my $std = { map { $_ => IO::Handle->new } (qw/in out err/) }; my $pid = open3(@{$std}{qw/in out err/}, @cmd); waitpid($pid, 0); my @stdout = $std->{out}->getlines; ok (!scalar @stdout, 'Silence on STDOUT'); my @warnings = $std->{err}->getlines; if ($? >> 8 != 0) { my $exception = pop @warnings; _check_error($exception, $tdata{error}); } return \@warnings; } sub _get_connect_info { my $opts = shift; my $test_db_class = $opts->{test_db_class} || 'make_dbictest_db'; eval "require $test_db_class;"; die $@ if $@; my $dsn = do { no strict 'refs'; ${$test_db_class . '::dsn'}; }; return ($dsn, @{ $opts->{extra_connect_info} || [] }); } sub _check_error { my ($got, $expected) = @_; return unless $got; if (not $expected) { fail "Unexpected error in " . ((caller(1))[3]) . ": $got"; return; } if (ref $expected eq 'Regexp') { like $got, $expected, 'error matches expected pattern'; return; } is $got, $expected, 'error matches'; } sub _test_dumps { my ($tdata, $warns, $classes) = @_; my %tdata = %{$tdata}; my $schema_class = $tdata{classname}; my $check_warns = $tdata{warnings}; is(@$warns, @$check_warns, "$schema_class warning count") or diag @$warns; for(my $i = 0; $i <= $#$check_warns; $i++) { like(($warns->[$i] || ''), $check_warns->[$i], "$schema_class warning $i"); } if ($classes && (my $results = $tdata{generated_results})) { my $ns = $tdata{options}{use_namespaces} ? ("::".($tdata{result_namespace} || "Result")) : ""; is_deeply( [ sort grep { $_ ne $schema_class } @$classes ], [ sort map { "${schema_class}${ns}::$_" } @$results ], "$schema_class generated_classes set correctly", ); } my $file_regexes = $tdata{regexes}; my $file_neg_regexes = $tdata{neg_regexes} || {}; my $schema_regexes = delete $file_regexes->{schema}; my $schema_path = $DUMP_PATH . '/' . $schema_class; $schema_path =~ s{::}{/}g; _dump_file_like($schema_path . '.pm', @$schema_regexes) if $schema_regexes; foreach my $src (keys %$file_regexes) { my $src_file = $schema_path . '/' . $src . '.pm'; _dump_file_like($src_file, @{$file_regexes->{$src}}); } foreach my $src (keys %$file_neg_regexes) { my $src_file = $schema_path . '/' . $src . '.pm'; _dump_file_not_like($src_file, @{$file_neg_regexes->{$src}}); } } sub _slurp { my $path = shift; open(my $dumpfh, '<:raw', $path) or die "Failed to open '$path': $!"; my $contents = do { local $/; <$dumpfh>; }; close($dumpfh); return ($path, $contents); } sub _dump_file_like { my ($path, $contents) = _slurp shift; like($contents, $_, "$path matches $_") for @_; } sub _dump_file_not_like { my ($path, $contents) = _slurp shift; unlike($contents, $_, "$path does not match $_") for @_; } END { __PACKAGE__->cleanup unless $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP} } DBIx-Class-Schema-Loader-0.07045/t/lib/TestLoaderSubclass_NoRebless.pm0000644000175000017500000000016412131533457024536 0ustar ilmariilmaripackage TestLoaderSubclass_NoRebless; use strict; use warnings; use base qw/DBIx::Class::Schema::Loader::DBI/; 1; DBIx-Class-Schema-Loader-0.07045/t/lib/TestRole.pm0000644000175000017500000000013312131533457020551 0ustar ilmariilmaripackage TestRole; use Moose::Role; sub test_role_method { 'test_role_method works' } 1; DBIx-Class-Schema-Loader-0.07045/t/lib/TestLoaderSubclass.pm0000644000175000017500000000016212131533457022560 0ustar ilmariilmaripackage TestLoaderSubclass; use strict; use warnings; use base qw/DBIx::Class::Schema::Loader::DBI::SQLite/; 1; DBIx-Class-Schema-Loader-0.07045/t/lib/DBICTestMethods/0000755000175000017500000000000012650450355021343 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/lib/DBICTestMethods/Namespaces/0000755000175000017500000000000012650450355023422 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/lib/DBICTestMethods/Namespaces/Schema/0000755000175000017500000000000012650450355024622 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/lib/DBICTestMethods/Namespaces/Schema/Result/0000755000175000017500000000000012650450355026100 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/lib/DBICTestMethods/Namespaces/Schema/Result/Foo.pm0000644000175000017500000000044612131533457027164 0ustar ilmariilmaripackage DBICTestMethods::Namespaces::Schema::Result::Foo; use strict; use warnings FATAL => 'all'; use English '-no_match_vars'; sub biz { my ($self) = @_; return 'foo bar biz baz boz noz schnozz'; } sub boz { my ($self) = @_; return 'foo bar biz baz boz noz schnozz'; } 1; DBIx-Class-Schema-Loader-0.07045/t/lib/DBICTestMethods/Backcompat/0000755000175000017500000000000012650450355023407 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/lib/DBICTestMethods/Backcompat/Schema/0000755000175000017500000000000012650450355024607 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/lib/DBICTestMethods/Backcompat/Schema/Foo.pm0000644000175000017500000000031612131533457025667 0ustar ilmariilmaripackage DBICTestMethods::Backcompat::Schema::Foo; use strict; use warnings FATAL => 'all'; use English '-no_match_vars'; sub biz { my ($self) = @_; return 'foo bar biz baz boz noz schnozz'; } 1; DBIx-Class-Schema-Loader-0.07045/t/lib/make_dbictest_db_clashing_monikers.pm0000644000175000017500000000251312131533457026036 0ustar ilmariilmaripackage make_dbictest_db_clashing_monikers; use strict; use warnings; use DBI; use dbixcsl_test_dir qw/$tdir/; eval { require DBD::SQLite }; my $class = $@ ? 'SQLite2' : 'SQLite'; my $fn = "$tdir/dbictest_clashing_tables.db"; unlink($fn); our $dsn = "dbi:$class:dbname=$fn"; my $dbh = DBI->connect($dsn); $dbh->do('PRAGMA SYNCHRONOUS = OFF'); $dbh->do($_) for ( q|CREATE TABLE foo ( fooid INTEGER PRIMARY KEY, footext TEXT DEFAULT 'footext', foodt TIMESTAMP DEFAULT CURRENT_TIMESTAMP )|, q|CREATE TABLE bar ( barid INTEGER PRIMARY KEY, fooref INTEGER REFERENCES foo(fooid) )|, # this will cause a singularized moniker clash q|CREATE TABLE bars ( barid INTEGER PRIMARY KEY, fooref INTEGER REFERENCES foo(fooid) )|, q|INSERT INTO foo (fooid, footext) VALUES (1,'Foo text for number 1')|, q|INSERT INTO foo (fooid, footext) VALUES (2,'Foo record associated with the Bar with barid 3')|, q|INSERT INTO foo (fooid, footext) VALUES (3,'Foo text for number 3')|, q|INSERT INTO foo (fooid, footext) VALUES (4,'Foo text for number 4')|, q|INSERT INTO bar VALUES (1,4)|, q|INSERT INTO bar VALUES (2,3)|, q|INSERT INTO bar VALUES (3,2)|, q|INSERT INTO bar VALUES (4,1)|, ); END { unlink($fn) unless $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}; } 1; DBIx-Class-Schema-Loader-0.07045/t/lib/make_dbictest_db.pm0000644000175000017500000000220612131533457022256 0ustar ilmariilmaripackage make_dbictest_db; use strict; use warnings; use DBI; use dbixcsl_test_dir qw/$tdir/; eval { require DBD::SQLite }; my $class = $@ ? 'SQLite2' : 'SQLite'; my $fn = "$tdir/dbictest.db"; unlink($fn); our $dsn = "dbi:$class:dbname=$fn"; my $dbh = DBI->connect($dsn); $dbh->do('PRAGMA SYNCHRONOUS = OFF'); $dbh->do($_) for ( q|CREATE TABLE foo ( fooid INTEGER PRIMARY KEY, footext TEXT DEFAULT 'footext', foodt TIMESTAMP DEFAULT CURRENT_TIMESTAMP )|, q|CREATE TABLE bar ( barid INTEGER PRIMARY KEY, fooref INTEGER REFERENCES foo(fooid) )|, q|INSERT INTO foo (fooid, footext) VALUES (1,'Foo text for number 1')|, q|INSERT INTO foo (fooid, footext) VALUES (2,'Foo record associated with the Bar with barid 3')|, q|INSERT INTO foo (fooid, footext) VALUES (3,'Foo text for number 3')|, q|INSERT INTO foo (fooid, footext) VALUES (4,'Foo text for number 4')|, q|INSERT INTO bar VALUES (1,4)|, q|INSERT INTO bar VALUES (2,3)|, q|INSERT INTO bar VALUES (3,2)|, q|INSERT INTO bar VALUES (4,1)|, ); END { unlink($fn) unless $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}; } 1; DBIx-Class-Schema-Loader-0.07045/t/lib/make_dbictest_db_multi_m2m.pm0000644000175000017500000000273012542756321024250 0ustar ilmariilmaripackage make_dbictest_db_multi_m2m; use strict; use warnings; use DBI; use dbixcsl_test_dir qw/$tdir/; eval { require DBD::SQLite }; my $class = $@ ? 'SQLite2' : 'SQLite'; my $fn = "$tdir/dbictest_multi_m2m.db"; unlink($fn); our $dsn = "dbi:$class:dbname=$fn"; my $dbh = DBI->connect($dsn); $dbh->do('PRAGMA SYNCHRONOUS = OFF'); $dbh->do($_) for ( q|CREATE TABLE foo ( foo_id INTEGER PRIMARY KEY )|, q|CREATE TABLE bar ( bar_id INTEGER PRIMARY KEY )|, q|CREATE TABLE foo_bar_one ( foo_id INTEGER NOT NULL REFERENCES foo(foo_id), bar_id INTEGER NOT NULL REFERENCES bar(bar_id), PRIMARY KEY (foo_id, bar_id) )|, q|CREATE TABLE foo_bar_two ( foo_id INTEGER NOT NULL REFERENCES foo(foo_id), bar_id INTEGER NOT NULL REFERENCES bar(bar_id), PRIMARY KEY (foo_id, bar_id) )|, q|INSERT INTO foo (foo_id) VALUES (1)|, q|INSERT INTO foo (foo_id) VALUES (2)|, q|INSERT INTO bar (bar_id) VALUES (1)|, q|INSERT INTO bar (bar_id) VALUES (2)|, q|INSERT INTO foo_bar_one (foo_id, bar_id) VALUES (1,1)|, q|INSERT INTO foo_bar_one (foo_id, bar_id) VALUES (2,2)|, q|INSERT INTO foo_bar_two (foo_id, bar_id) VALUES (1,1)|, q|INSERT INTO foo_bar_two (foo_id, bar_id) VALUES (1,2)|, q|INSERT INTO foo_bar_two (foo_id, bar_id) VALUES (2,1)|, q|INSERT INTO foo_bar_two (foo_id, bar_id) VALUES (2,2)|, ); END { unlink($fn) unless $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}; } 1; DBIx-Class-Schema-Loader-0.07045/t/lib/DBICTest/0000755000175000017500000000000012650450355020017 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/lib/DBICTest/Schema/0000755000175000017500000000000012650450355021217 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/lib/DBICTest/Schema/_no_skip_load_external/0000755000175000017500000000000012650450355025721 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/lib/DBICTest/Schema/_no_skip_load_external/Foo.pm0000644000175000017500000000016012542756321027001 0ustar ilmariilmaripackage DBICTest::Schema::_no_skip_load_external::Foo; use strict; use warnings; our $skip_me = "bad mojo"; 1; DBIx-Class-Schema-Loader-0.07045/t/lib/DBICTest/Schema/_skip_load_external/0000755000175000017500000000000012650450355025225 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/lib/DBICTest/Schema/_skip_load_external/Foo.pm0000644000175000017500000000015512542756321026311 0ustar ilmariilmaripackage DBICTest::Schema::_skip_load_external::Foo; use strict; use warnings; our $skip_me = "bad mojo"; 1; DBIx-Class-Schema-Loader-0.07045/t/lib/DBIXCSL_Test/0000755000175000017500000000000012650450355020545 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/lib/DBIXCSL_Test/Schema/0000755000175000017500000000000012650450355021745 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/lib/DBIXCSL_Test/Schema/MyResult/0000755000175000017500000000000012650450355023531 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/lib/DBIXCSL_Test/Schema/MyResult/LoaderTest1.pm0000644000175000017500000000020112542756321026211 0ustar ilmariilmaripackage DBIXCSL_Test::Schema::MyResult::LoaderTest1; use strict; use warnings; sub loader_test1_classmeth { 'all is well' } 1; DBIx-Class-Schema-Loader-0.07045/t/lib/make_dbictest_db_plural_tables.pm0000644000175000017500000000225512131533457025173 0ustar ilmariilmaripackage make_dbictest_db_plural_tables; use strict; use warnings; use DBI; use dbixcsl_test_dir qw/$tdir/; eval { require DBD::SQLite }; my $class = $@ ? 'SQLite2' : 'SQLite'; my $fn = "$tdir/dbictest_plural_tables.db"; unlink($fn); our $dsn = "dbi:$class:dbname=$fn"; my $dbh = DBI->connect($dsn); $dbh->do('PRAGMA SYNCHRONOUS = OFF'); $dbh->do($_) for ( q|CREATE TABLE foos ( fooid INTEGER PRIMARY KEY, footext TEXT DEFAULT 'footext', foodt TIMESTAMP DEFAULT CURRENT_TIMESTAMP )|, q|CREATE TABLE bars ( barid INTEGER PRIMARY KEY, fooref INTEGER REFERENCES foos(fooid) )|, q|INSERT INTO foos (fooid, footext) VALUES (1,'Foo text for number 1')|, q|INSERT INTO foos (fooid, footext) VALUES (2,'Foo record associated with the Bar with barid 3')|, q|INSERT INTO foos (fooid, footext) VALUES (3,'Foo text for number 3')|, q|INSERT INTO foos (fooid, footext) VALUES (4,'Foo text for number 4')|, q|INSERT INTO bars VALUES (1,4)|, q|INSERT INTO bars VALUES (2,3)|, q|INSERT INTO bars VALUES (3,2)|, q|INSERT INTO bars VALUES (4,1)|, ); END { unlink($fn) unless $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}; } 1; DBIx-Class-Schema-Loader-0.07045/t/lib/TestRoleForMap.pm0000644000175000017500000000023612131533457021662 0ustar ilmariilmaripackage TestRoleForMap; use Moose::Role; requires qw/id dat meta/; # in loader_test2 sub test_role_for_map_method { 'test_role_for_map_method works' } 1; DBIx-Class-Schema-Loader-0.07045/t/lib/TestLeftBase.pm0000644000175000017500000000017512542756321021346 0ustar ilmariilmaripackage TestLeftBase; use strict; use warnings; sub test_additional_base_override { return "test_left_base_override"; } 1; DBIx-Class-Schema-Loader-0.07045/t/lib/TestSchemaComponentFQN.pm0000644000175000017500000000017612542756321023312 0ustar ilmariilmaripackage TestSchemaComponentFQN; use strict; use warnings; sub testschemacomponent_fqn { 'TestSchemaComponentFQN works' } 1; DBIx-Class-Schema-Loader-0.07045/t/lib/TestSchemaBaseClass.pm0000644000175000017500000000047112542756321022641 0ustar ilmariilmaripackage TestSchemaBaseClass; use strict; use warnings; use base 'DBIx::Class::Schema'; our $test_ok = 0; sub connection { my ($self, @info) = @_; if ($info[0] =~ /^dbi/) { $test_ok++; } return $self->next::method(@info); } sub testschemabaseclass { 'TestSchemaBaseClass works' } 1; DBIx-Class-Schema-Loader-0.07045/t/lib/My/0000755000175000017500000000000012650450355017043 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/lib/My/SchemaBaseClass.pm0000644000175000017500000000013312542756321022361 0ustar ilmariilmaripackage My::SchemaBaseClass; use strict; use warnings; use base 'DBIx::Class::Schema'; 1; DBIx-Class-Schema-Loader-0.07045/t/lib/My/ResultBaseClass.pm0000644000175000017500000000013112542756321022435 0ustar ilmariilmaripackage My::ResultBaseClass; use strict; use warnings; use base 'DBIx::Class::Core'; 1; DBIx-Class-Schema-Loader-0.07045/t/lib/TestRole2.pm0000644000175000017500000000013612131533457020636 0ustar ilmariilmaripackage TestRole2; use Moose::Role; sub test_role2_method { 'test_role2_method works' } 1; DBIx-Class-Schema-Loader-0.07045/t/10_01sqlite_common.t0000644000175000017500000002026712542756321021420 0ustar ilmariilmariuse strict; use warnings; use Test::More; use lib qw(t/lib); use dbixcsl_common_tests; use dbixcsl_test_dir qw/$tdir/; eval { require DBD::SQLite }; my $class = $@ ? 'SQLite2' : 'SQLite'; my $tester = dbixcsl_common_tests->new( vendor => 'SQLite', auto_inc_pk => 'INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT', dsn => "dbi:$class:dbname=$tdir/sqlite_test.db", user => '', password => '', connect_info_opts => { on_connect_do => [ 'PRAGMA foreign_keys = ON', 'PRAGMA synchronous = OFF', ] }, loader_options => { preserve_case => 1 }, default_is_deferrable => 0, default_on_clause => 'NO ACTION', data_types => { # SQLite ignores data types aside from INTEGER pks. # We just test that they roundtrip sanely. # # Numeric types 'smallint' => { data_type => 'smallint' }, 'int' => { data_type => 'int' }, 'integer' => { data_type => 'integer' }, # test that type name is lowercased 'INTEGER' => { data_type => 'integer' }, 'bigint' => { data_type => 'bigint' }, 'float' => { data_type => 'float' }, 'double precision' => { data_type => 'double precision' }, 'real' => { data_type => 'real' }, 'float(2)' => { data_type => 'float', size => 2 }, 'float(7)' => { data_type => 'float', size => 7 }, 'decimal' => { data_type => 'decimal' }, 'dec' => { data_type => 'dec' }, 'numeric' => { data_type => 'numeric' }, 'decimal(3)' => { data_type => 'decimal', size => 3 }, 'numeric(3)' => { data_type => 'numeric', size => 3 }, 'decimal(3,3)' => { data_type => 'decimal', size => [3,3] }, 'dec(3,3)' => { data_type => 'dec', size => [3,3] }, 'numeric(3,3)' => { data_type => 'numeric', size => [3,3] }, # Date and Time Types 'date' => { data_type => 'date' }, 'timestamp DEFAULT CURRENT_TIMESTAMP' => { data_type => 'timestamp', default_value => \'current_timestamp' }, 'time' => { data_type => 'time' }, # String Types 'char' => { data_type => 'char' }, 'char(11)' => { data_type => 'char', size => 11 }, 'varchar(20)' => { data_type => 'varchar', size => 20 }, }, extra => { create => [ # 'sqlite_' is reserved, so we use 'extra_' q{ CREATE TABLE "extra_loader_test1" ( "id" NOT NULL PRIMARY KEY, "value" TEXT UNIQUE NOT NULL ) }, q{ CREATE TABLE extra_loader_test2 ( event_id INTEGER PRIMARY KEY ) }, q{ CREATE TABLE extra_loader_test3 ( person_id INTEGER PRIMARY KEY ) }, # Wordy, newline-heavy SQL q{ CREATE TABLE extra_loader_test4 ( event_id INTEGER NOT NULL CONSTRAINT fk_event_id REFERENCES extra_loader_test2(event_id), person_id INTEGER NOT NULL CONSTRAINT fk_person_id REFERENCES extra_loader_test3 (person_id), PRIMARY KEY (event_id, person_id) ) }, # make sure views are picked up q{ CREATE VIEW extra_loader_test5 AS SELECT * FROM extra_loader_test4 }, # Compound primary keys can't be autoinc in the DBIC sense q{ CREATE TABLE extra_loader_test6 ( id1 INTEGER, id2 INTEGER, value INTEGER, PRIMARY KEY (id1, id2) ) }, q{ CREATE TABLE extra_loader_test7 ( id1 INTEGER, id2 TEXT, value DECIMAL, PRIMARY KEY (id1, id2) ) }, q{ create table extra_loader_test8 ( id integer primary key ) }, q{ create table extra_loader_test9 ( id integer primary key, eight_id int, foreign key (eight_id) references extra_loader_test8(id) on delete restrict on update set null deferrable ) }, # test inline constraint q{ create table extra_loader_test10 ( id integer primary key, eight_id int references extra_loader_test8(id) on delete restrict on update set null deferrable ) }, ], pre_drop_ddl => [ 'DROP VIEW extra_loader_test5' ], drop => [ qw/extra_loader_test1 extra_loader_test2 extra_loader_test3 extra_loader_test4 extra_loader_test6 extra_loader_test7 extra_loader_test8 extra_loader_test9 extra_loader_test10 / ], count => 20, run => sub { my ($schema, $monikers, $classes) = @_; ok ((my $rs = $schema->resultset($monikers->{extra_loader_test1})), 'resultset for quoted table'); ok ((my $source = $rs->result_source), 'source'); is_deeply [ $source->columns ], [ qw/id value/ ], 'retrieved quoted column names from quoted table'; ok ((exists $source->column_info('value')->{is_nullable}), 'is_nullable exists'); is $source->column_info('value')->{is_nullable}, 0, 'is_nullable is set correctly'; ok (($source = $schema->source($monikers->{extra_loader_test4})), 'verbose table'); is_deeply [ $source->primary_columns ], [ qw/event_id person_id/ ], 'composite primary key'; is ($source->relationships, 2, '2 foreign key constraints found'); # test that columns for views are picked up is $schema->resultset($monikers->{extra_loader_test5})->result_source->column_info('person_id')->{data_type}, 'integer', 'columns for views are introspected'; # test that views are marked as such isa_ok $schema->resultset($monikers->{extra_loader_test5})->result_source, 'DBIx::Class::ResultSource::View', 'views have table_class set correctly'; isnt $schema->resultset($monikers->{extra_loader_test6})->result_source->column_info('id1')->{is_auto_increment}, 1, q{two integer PKs don't get marked autoinc}; isnt $schema->resultset($monikers->{extra_loader_test7})->result_source->column_info('id1')->{is_auto_increment}, 1, q{composite integer PK with non-integer PK doesn't get marked autoinc}; # test on delete/update fk clause introspection ok ((my $rel_info = $schema->source('ExtraLoaderTest9')->relationship_info('eight')), 'got rel info'); is $rel_info->{attrs}{on_delete}, 'RESTRICT', 'ON DELETE clause introspected correctly'; is $rel_info->{attrs}{on_update}, 'SET NULL', 'ON UPDATE clause introspected correctly'; is $rel_info->{attrs}{is_deferrable}, 1, 'DEFERRABLE clause introspected correctly'; ok (($rel_info = $schema->source('ExtraLoaderTest10')->relationship_info('eight')), 'got rel info'); is $rel_info->{attrs}{on_delete}, 'RESTRICT', 'ON DELETE clause introspected correctly for inline FK'; is $rel_info->{attrs}{on_update}, 'SET NULL', 'ON UPDATE clause introspected correctly for inline FK'; is $rel_info->{attrs}{is_deferrable}, 1, 'DEFERRABLE clause introspected correctly for inline FK'; }, }, ); $tester->run_tests(); END { unlink "$tdir/sqlite_test.db" unless $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}; } DBIx-Class-Schema-Loader-0.07045/t/30_01comments.t0000644000175000017500000000211612131533457020364 0ustar ilmariilmariuse strict; use warnings; use Test::More; use Test::Exception; use Test::Warn; use DBIx::Class::Schema::Loader::Utils 'slurp_file'; use File::Path; use lib qw(t/lib); use make_dbictest_db_comments; use dbixcsl_test_dir qw/$tdir/; my $dump_path = "$tdir/dump"; { package DBICTest::Schema::1; use base qw/ DBIx::Class::Schema::Loader /; __PACKAGE__->loader_options( dump_directory => $dump_path, quiet => 1, ); } DBICTest::Schema::1->connect($make_dbictest_db_comments::dsn); plan tests => 4; my $foo = slurp_file("$dump_path/DBICTest/Schema/1/Result/Foo.pm"); my $bar = slurp_file("$dump_path/DBICTest/Schema/1/Result/Bar.pm"); like($foo, qr/Result::Foo - a short comment/, 'Short table comment inline'); like($bar, qr/Result::Bar\n\n=head1 DESCRIPTION\n\na (very ){80}long comment/, 'Long table comment in DESCRIPTION'); like($foo, qr/=head2 fooid\n\n( .*\n)+\na short comment/, 'Short column comment recorded'); like($foo, qr/=head2 footext\n\n( .*\n)+\na (very ){80}long comment/, 'Long column comment recorded'); END { rmtree($dump_path, 1, 1); } DBIx-Class-Schema-Loader-0.07045/t/10_04db2_common.t0000644000175000017500000004003112542756321020560 0ustar ilmariilmariuse DBIx::Class::Schema::Loader::Optional::Dependencies -skip_all_without => 'test_rdbms_db2'; use strict; use warnings; use Test::More; use Test::Exception; use Try::Tiny; use File::Path 'rmtree'; use DBIx::Class::Schema::Loader 'make_schema_at'; use lib qw(t/lib); use dbixcsl_common_tests (); use dbixcsl_test_dir '$tdir'; use constant EXTRA_DUMP_DIR => "$tdir/db2_extra_dump"; my $dsn = $ENV{DBICTEST_DB2_DSN} || ''; my $user = $ENV{DBICTEST_DB2_USER} || ''; my $password = $ENV{DBICTEST_DB2_PASS} || ''; my ($schema, $schemas_created); # for cleanup in END for extra tests my $srv_ver = do { require DBI; my $dbh = DBI->connect ($dsn, $user, $password, { RaiseError => 1, PrintError => 0} ); eval { $dbh->get_info(18) } || 0; }; my ($maj_srv_ver) = $srv_ver =~ /^(\d+)/; my $extra_graphics_data_types = { graphic => { data_type => 'graphic', size => 1 }, 'graphic(3)' => { data_type => 'graphic', size => 3 }, 'vargraphic(3)' => { data_type => 'vargraphic', size => 3 }, 'long vargraphic' => { data_type => 'long vargraphic' }, 'dbclob' => { data_type => 'dbclob' }, }; dbixcsl_common_tests->new( vendor => 'DB2', auto_inc_pk => 'INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL PRIMARY KEY', dsn => $dsn, user => $user, password => $password, null => '', preserve_case_mode_is_exclusive => 1, quote_char => '"', default_is_deferrable => 1, default_on_clause => 'NO ACTION', data_types => { # http://publib.boulder.ibm.com/infocenter/db2luw/v8/index.jsp?topic=/com.ibm.db2.udb.doc/admin/r0008483.htm # # Numeric Types smallint => { data_type => 'smallint' }, integer => { data_type => 'integer' }, 'int' => { data_type => 'integer' }, real => { data_type => 'real' }, 'double precision' => { data_type => 'double precision' }, double => { data_type => 'double precision' }, float => { data_type => 'double precision' }, 'float(24)' => { data_type => 'real' }, 'float(25)' => { data_type => 'double precision' }, 'float(53)' => { data_type => 'double precision' }, numeric => { data_type => 'numeric' }, decimal => { data_type => 'numeric' }, 'numeric(6,3)' => { data_type => 'numeric', size => [6,3] }, 'decimal(6,3)' => { data_type => 'numeric', size => [6,3] }, # Character String Types char => { data_type => 'char', size => 1 }, 'char(3)' => { data_type => 'char', size => 3 }, 'varchar(3)' => { data_type => 'varchar', size => 3 }, 'long varchar' => { data_type => 'long varchar' }, 'clob' => { data_type => 'clob' }, # Graphic String Types (double-byte strings) ($maj_srv_ver >= 9) ? (%$extra_graphics_data_types) : (), # Binary String Types 'char for bit data'=> { data_type => 'binary', size => 1, original => { data_type => 'char for bit data' } }, 'char(3) for bit data' => { data_type => 'binary', size => 3, original => { data_type => 'char for bit data' } }, 'varchar(3) for bit data' => { data_type => 'varbinary', size => 3, original => { data_type => 'varchar for bit data' } }, 'long varchar for bit data' => { data_type => 'blob', original => { data_type => 'long varchar for bit data' } }, blob => { data_type => 'blob' }, # DateTime Types 'date' => { data_type => 'date' }, 'date default current date' => { data_type => 'date', default_value => \'current_timestamp', original => { default_value => \'current date' } }, 'time' => { data_type => 'time' }, 'time default current time' => { data_type => 'time', default_value => \'current_timestamp', original => { default_value => \'current time' } }, timestamp => { data_type => 'timestamp' }, 'timestamp default current timestamp' => { data_type => 'timestamp', default_value => \'current_timestamp', original => { default_value => \'current timestamp' } }, # DATALINK Type # XXX I don't know how to make these # datalink => { data_type => 'datalink' }, }, extra => { create => [ # 4 through 8 are used for the multi-schema tests q{ create table db2_loader_test9 ( id int generated by default as identity not null primary key ) }, q{ create table db2_loader_test10 ( id int generated by default as identity not null primary key, nine_id int, foreign key (nine_id) references db2_loader_test9(id) on delete set null on update restrict ) }, ], drop => [ qw/db2_loader_test9 db2_loader_test10/ ], count => 4 + 30 * 2, run => sub { $schema = shift; # test on delete/update fk clause introspection ok ((my $rel_info = $schema->source('Db2LoaderTest10')->relationship_info('nine')), 'got rel info'); is $rel_info->{attrs}{on_delete}, 'SET NULL', 'ON DELETE clause introspected correctly'; is $rel_info->{attrs}{on_update}, 'RESTRICT', 'ON UPDATE clause introspected correctly'; is $rel_info->{attrs}{is_deferrable}, 1, 'DEFERRABLE defaults to 1'; SKIP: { my $dbh = $schema->storage->dbh; try { $dbh->do('CREATE SCHEMA "dbicsl-test"'); } catch { $schemas_created = 0; skip "no CREATE SCHEMA privileges", 30 * 2; }; $dbh->do(<<"EOF"); CREATE TABLE "dbicsl-test".db2_loader_test4 ( id INT GENERATED BY DEFAULT AS IDENTITY NOT NULL PRIMARY KEY, value VARCHAR(100) ) EOF $dbh->do(<<"EOF"); CREATE TABLE "dbicsl-test".db2_loader_test5 ( id INT GENERATED BY DEFAULT AS IDENTITY NOT NULL PRIMARY KEY, value VARCHAR(100), four_id INTEGER NOT NULL, CONSTRAINT loader_test5_uniq UNIQUE (four_id), FOREIGN KEY (four_id) REFERENCES "dbicsl-test".db2_loader_test4 (id) ) EOF $dbh->do('CREATE SCHEMA "dbicsl.test"'); $dbh->do(<<"EOF"); CREATE TABLE "dbicsl.test".db2_loader_test5 ( pk INT GENERATED BY DEFAULT AS IDENTITY NOT NULL PRIMARY KEY, value VARCHAR(100), four_id INTEGER NOT NULL, CONSTRAINT loader_test5_uniq UNIQUE (four_id), FOREIGN KEY (four_id) REFERENCES "dbicsl-test".db2_loader_test4 (id) ) EOF $dbh->do(<<"EOF"); CREATE TABLE "dbicsl.test".db2_loader_test6 ( id INT GENERATED BY DEFAULT AS IDENTITY NOT NULL PRIMARY KEY, value VARCHAR(100), db2_loader_test4_id INTEGER, FOREIGN KEY (db2_loader_test4_id) REFERENCES "dbicsl-test".db2_loader_test4 (id) ) EOF $dbh->do(<<"EOF"); CREATE TABLE "dbicsl.test".db2_loader_test7 ( id INT GENERATED BY DEFAULT AS IDENTITY NOT NULL PRIMARY KEY, value VARCHAR(100), six_id INTEGER NOT NULL UNIQUE, FOREIGN KEY (six_id) REFERENCES "dbicsl.test".db2_loader_test6 (id) ) EOF $dbh->do(<<"EOF"); CREATE TABLE "dbicsl-test".db2_loader_test8 ( id INT GENERATED BY DEFAULT AS IDENTITY NOT NULL PRIMARY KEY, value VARCHAR(100), db2_loader_test7_id INTEGER, FOREIGN KEY (db2_loader_test7_id) REFERENCES "dbicsl.test".db2_loader_test7 (id) ) EOF $schemas_created = 1; foreach my $db_schema (['dbicsl-test', 'dbicsl.test'], '%') { lives_and { rmtree EXTRA_DUMP_DIR; my @warns; local $SIG{__WARN__} = sub { push @warns, $_[0] unless $_[0] =~ /\bcollides\b/; }; make_schema_at( 'DB2MultiSchema', { naming => 'current', db_schema => $db_schema, dump_directory => EXTRA_DUMP_DIR, quiet => 1, }, [ $dsn, $user, $password ], ); diag join "\n", @warns if @warns; is @warns, 0; } 'dumped schema for "dbicsl-test" and "dbicsl.test" schemas with no warnings'; my ($test_schema, $rsrc, $rs, $row, %uniqs, $rel_info); lives_and { ok $test_schema = DB2MultiSchema->connect($dsn, $user, $password); } 'connected test schema'; lives_and { ok $rsrc = $test_schema->source('Db2LoaderTest4'); } 'got source for table in schema name with dash'; is try { $rsrc->column_info('id')->{is_auto_increment} }, 1, 'column in schema name with dash'; is try { $rsrc->column_info('value')->{data_type} }, 'varchar', 'column in schema name with dash'; is try { $rsrc->column_info('value')->{size} }, 100, 'column in schema name with dash'; lives_and { ok $rs = $test_schema->resultset('Db2LoaderTest4'); } 'got resultset for table in schema name with dash'; lives_and { ok $row = $rs->create({ value => 'foo' }); } 'executed SQL on table in schema name with dash'; $rel_info = try { $rsrc->relationship_info('dbicsl_dash_test_db2_loader_test5') }; is_deeply $rel_info->{cond}, { 'foreign.four_id' => 'self.id' }, 'relationship in schema name with dash'; is $rel_info->{attrs}{accessor}, 'single', 'relationship in schema name with dash'; is $rel_info->{attrs}{join_type}, 'LEFT', 'relationship in schema name with dash'; lives_and { ok $rsrc = $test_schema->source('DbicslDashTestDb2LoaderTest5'); } 'got source for table in schema name with dash'; %uniqs = try { $rsrc->unique_constraints }; is keys %uniqs, 2, 'got unique and primary constraint in schema name with dash'; delete $uniqs{primary}; is_deeply ((values %uniqs)[0], ['four_id'], 'correct unique constraint in schema name with dash'); lives_and { ok $rsrc = $test_schema->source('Db2LoaderTest6'); } 'got source for table in schema name with dot'; is try { $rsrc->column_info('id')->{is_auto_increment} }, 1, 'column in schema name with dot introspected correctly'; is try { $rsrc->column_info('value')->{data_type} }, 'varchar', 'column in schema name with dot introspected correctly'; is try { $rsrc->column_info('value')->{size} }, 100, 'column in schema name with dot introspected correctly'; lives_and { ok $rs = $test_schema->resultset('Db2LoaderTest6'); } 'got resultset for table in schema name with dot'; lives_and { ok $row = $rs->create({ value => 'foo' }); } 'executed SQL on table in schema name with dot'; $rel_info = try { $rsrc->relationship_info('db2_loader_test7') }; is_deeply $rel_info->{cond}, { 'foreign.six_id' => 'self.id' }, 'relationship in schema name with dot'; is $rel_info->{attrs}{accessor}, 'single', 'relationship in schema name with dot'; is $rel_info->{attrs}{join_type}, 'LEFT', 'relationship in schema name with dot'; lives_and { ok $rsrc = $test_schema->source('Db2LoaderTest7'); } 'got source for table in schema name with dot'; %uniqs = try { $rsrc->unique_constraints }; is keys %uniqs, 2, 'got unique and primary constraint in schema name with dot'; delete $uniqs{primary}; is_deeply ((values %uniqs)[0], ['six_id'], 'correct unique constraint in schema name with dot'); lives_and { ok $test_schema->source('Db2LoaderTest6') ->has_relationship('db2_loader_test4'); } 'cross-schema relationship in multi-db_schema'; lives_and { ok $test_schema->source('Db2LoaderTest4') ->has_relationship('db2_loader_test6s'); } 'cross-schema relationship in multi-db_schema'; lives_and { ok $test_schema->source('Db2LoaderTest8') ->has_relationship('db2_loader_test7'); } 'cross-schema relationship in multi-db_schema'; lives_and { ok $test_schema->source('Db2LoaderTest7') ->has_relationship('db2_loader_test8s'); } 'cross-schema relationship in multi-db_schema'; } } }, }, )->run_tests(); END { if (not $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}) { if ($schemas_created && (my $dbh = try { $schema->storage->dbh })) { foreach my $table ('"dbicsl-test".db2_loader_test8', '"dbicsl.test".db2_loader_test7', '"dbicsl.test".db2_loader_test6', '"dbicsl-test".db2_loader_test5', '"dbicsl.test".db2_loader_test5', '"dbicsl-test".db2_loader_test4') { try { $dbh->do("DROP TABLE $table"); } catch { diag "Error dropping table: $_"; }; } foreach my $db_schema (qw/dbicsl-test dbicsl.test/) { try { $dbh->do(qq{DROP SCHEMA "$db_schema" RESTRICT}); } catch { diag "Error dropping test schema $db_schema: $_"; }; } } rmtree EXTRA_DUMP_DIR; } } # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/t/50rt59849.t0000644000175000017500000000741412542756321017322 0ustar ilmariilmari# test for loading additional methods from file-defined packages # by Mark Hedges ( hedges -at| scriptdolphin.com ) use strict; use warnings; use Test::More tests => 7 * 5; use Test::Exception; use lib 't/lib'; use make_dbictest_db; use DBIx::Class::Schema::Loader; $ENV{SCHEMA_LOADER_BACKCOMPAT} = 1; # In the first test run, then, Foo should be a DBICTestMethods::Namespaces::Schema::Result::Foo run_test_sequence( testname => "naming => 'current'", schema_class => 'DBICTestMethods::Namespaces::Schema', foo_class => 'DBICTestMethods::Namespaces::Schema::Result::Foo', schema_opts => { naming => 'current', }, ); # In the second test run with use_namespaces => 0 (backcompat), Foo should be a DBICTestMethods::Backcompat::Schema run_test_sequence( testname => "naming => 'current', use_namespaces => 0", schema_class => 'DBICTestMethods::Backcompat::Schema', foo_class => 'DBICTestMethods::Backcompat::Schema::Foo', schema_opts => { naming => 'current', use_namespaces => 0, }, ); # In the third test, with use_namespaces => 1, Foo gets the explicit Result class again run_test_sequence( testname => "naming => 'current', use_namespaces => 1", schema_class => 'DBICTestMethods::Namespaces::Schema', foo_class => 'DBICTestMethods::Namespaces::Schema::Result::Foo', schema_opts => { naming => 'current', use_namespaces => 1, }, ); # try it in full backcompat 0.04006 mode with no schema options run_test_sequence( testname => "no naming or namespaces options (0.04006 mode)", schema_class => 'DBICTestMethods::Backcompat::Schema', foo_class => 'DBICTestMethods::Backcompat::Schema::Foo', schema_opts => { }, ); # try it in backcompat mode (no naming option) but with use_namespaces => 1 run_test_sequence( testname => "no naming, but with use_namespaces options (0.04006 mode)", schema_class => 'DBICTestMethods::Namespaces::Schema', foo_class => 'DBICTestMethods::Namespaces::Schema::Result::Foo', schema_opts => { use_namespaces => 1, }, ); sub run_test_sequence { my %p = @_; die "specify a $_ test param" for grep !$p{$_}, qw( testname schema_opts schema_class foo_class ); my $schema; lives_ok { $schema = make_schema_with(%p) } "($p{testname}) get schema"; SKIP: { skip 'no point in checking if schema could not be connected', 6 unless defined $schema; # well, if that worked, try to get a ResultSet my $foo_rs; lives_ok { $foo_rs = $schema->resultset('Foo')->search(); } "($p{testname}) get a ResultSet for Foo"; # get a foo my $foo; lives_ok { $foo = $foo_rs->first(); } "($p{testname}) get the first foo"; ok(defined $foo, "($p{testname}) \$foo is defined"); SKIP: { skip 'foo is not defined', 3 unless defined $foo; isa_ok $foo, $p{foo_class}; # call the file-defined method my $biz; lives_ok { $biz = $foo->biz(); } "($p{testname}) call the file-defined Foo->biz method"; SKIP: { skip 'no point in checking value if method was not found', 1 unless defined $biz; ok( $biz eq 'foo bar biz baz boz noz schnozz', "($p{testname}) biz() method returns correct string" ); } } } } sub make_schema_with { my %p = @_; return DBIx::Class::Schema::Loader::make_schema_at( $p{schema_class}, $p{schema_opts}, [ $make_dbictest_db::dsn ], ); } DBIx-Class-Schema-Loader-0.07045/t/backcompat/0000755000175000017500000000000012650450355020014 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/0000755000175000017500000000000012650450355020623 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/23dumpmore.t0000644000175000017500000001724212542756321023015 0ustar ilmariilmariuse DBIx::Class::Schema::Loader::Optional::Dependencies -skip_all_without => 'test_backcompat'; use strict; use warnings; use Test::More; use lib qw(t/backcompat/0.04006/lib); use File::Path; use make_dbictest_db; use dbixcsl_test_dir qw/$tdir/; use Class::Unload (); require DBIx::Class::Schema::Loader; my $DUMP_PATH = "$tdir/dump"; sub do_dump_test { my %tdata = @_; my $schema_class = $tdata{classname}; no strict 'refs'; @{$schema_class . '::ISA'} = ('DBIx::Class::Schema::Loader'); $tdata{options}{use_namespaces} ||= 0; $schema_class->loader_options(dump_directory => $DUMP_PATH, %{$tdata{options}}); my @warns; eval { local $SIG{__WARN__} = sub { push(@warns, @_) }; $schema_class->connect($make_dbictest_db::dsn); }; my $err = $@; Class::Unload->unload($schema_class); is($err, $tdata{error}); my $check_warns = $tdata{warnings}; is(@warns, @$check_warns); for(my $i = 0; $i <= $#$check_warns; $i++) { like($warns[$i], $check_warns->[$i]); } my $file_regexes = $tdata{regexes}; my $file_neg_regexes = $tdata{neg_regexes} || {}; my $schema_regexes = delete $file_regexes->{schema}; my $schema_path = $DUMP_PATH . '/' . $schema_class; $schema_path =~ s{::}{/}g; dump_file_like($schema_path . '.pm', @$schema_regexes); foreach my $src (keys %$file_regexes) { my $src_file = $schema_path . '/' . $src . '.pm'; dump_file_like($src_file, @{$file_regexes->{$src}}); } foreach my $src (keys %$file_neg_regexes) { my $src_file = $schema_path . '/' . $src . '.pm'; dump_file_not_like($src_file, @{$file_neg_regexes->{$src}}); } } sub dump_file_like { my $path = shift; open(my $dumpfh, '<', $path) or die "Failed to open '$path': $!"; my $contents = do { local $/; <$dumpfh>; }; close($dumpfh); like($contents, $_) for @_; } sub dump_file_not_like { my $path = shift; open(my $dumpfh, '<', $path) or die "Failed to open '$path': $!"; my $contents = do { local $/; <$dumpfh>; }; close($dumpfh); unlike($contents, $_) for @_; } sub append_to_class { my ($class, $string) = @_; $class =~ s{::}{/}g; $class = $DUMP_PATH . '/' . $class . '.pm'; open(my $appendfh, '>>', $class) or die "Failed to open '$class' for append: $!"; print $appendfh $string; close($appendfh); } rmtree($DUMP_PATH, 1, 1); do_dump_test( classname => 'DBICTest::DumpMore::1', options => { }, error => '', warnings => [ qr/Dumping manual schema for DBICTest::DumpMore::1 to directory /, qr/Schema dump completed/, ], regexes => { schema => [ qr/package DBICTest::DumpMore::1;/, qr/->load_classes/, ], Foo => [ qr/package DBICTest::DumpMore::1::Foo;/, qr/->set_primary_key/, qr/1;\n$/, ], Bar => [ qr/package DBICTest::DumpMore::1::Bar;/, qr/->set_primary_key/, qr/1;\n$/, ], }, ); append_to_class('DBICTest::DumpMore::1::Foo',q{# XXX This is my custom content XXX}); do_dump_test( classname => 'DBICTest::DumpMore::1', options => { }, error => '', warnings => [ qr/Dumping manual schema for DBICTest::DumpMore::1 to directory /, qr/Schema dump completed/, ], regexes => { schema => [ qr/package DBICTest::DumpMore::1;/, qr/->load_classes/, ], Foo => [ qr/package DBICTest::DumpMore::1::Foo;/, qr/->set_primary_key/, qr/1;\n# XXX This is my custom content XXX/, ], Bar => [ qr/package DBICTest::DumpMore::1::Bar;/, qr/->set_primary_key/, qr/1;\n$/, ], }, ); do_dump_test( classname => 'DBICTest::DumpMore::1', options => { really_erase_my_files => 1 }, error => '', warnings => [ qr/Dumping manual schema for DBICTest::DumpMore::1 to directory /, qr/Deleting existing file /, qr/Deleting existing file /, qr/Deleting existing file /, qr/Schema dump completed/, ], regexes => { schema => [ qr/package DBICTest::DumpMore::1;/, qr/->load_classes/, ], Foo => [ qr/package DBICTest::DumpMore::1::Foo;/, qr/->set_primary_key/, qr/1;\n$/, ], Bar => [ qr/package DBICTest::DumpMore::1::Bar;/, qr/->set_primary_key/, qr/1;\n$/, ], }, neg_regexes => { Foo => [ qr/# XXX This is my custom content XXX/, ], }, ); do_dump_test( classname => 'DBICTest::DumpMore::1', options => { use_namespaces => 1 }, error => '', warnings => [ qr/Dumping manual schema for DBICTest::DumpMore::1 to directory /, qr/Schema dump completed/, ], regexes => { schema => [ qr/package DBICTest::DumpMore::1;/, qr/->load_namespaces/, ], 'Result/Foo' => [ qr/package DBICTest::DumpMore::1::Result::Foo;/, qr/->set_primary_key/, qr/1;\n$/, ], 'Result/Bar' => [ qr/package DBICTest::DumpMore::1::Result::Bar;/, qr/->set_primary_key/, qr/1;\n$/, ], }, ); do_dump_test( classname => 'DBICTest::DumpMore::1', options => { use_namespaces => 1, result_namespace => 'Res', resultset_namespace => 'RSet', default_resultset_class => 'RSetBase', }, error => '', warnings => [ qr/Dumping manual schema for DBICTest::DumpMore::1 to directory /, qr/Schema dump completed/, ], regexes => { schema => [ qr/package DBICTest::DumpMore::1;/, qr/->load_namespaces/, qr/result_namespace => "Res"/, qr/resultset_namespace => "RSet"/, qr/default_resultset_class => "RSetBase"/, ], 'Res/Foo' => [ qr/package DBICTest::DumpMore::1::Res::Foo;/, qr/->set_primary_key/, qr/1;\n$/, ], 'Res/Bar' => [ qr/package DBICTest::DumpMore::1::Res::Bar;/, qr/->set_primary_key/, qr/1;\n$/, ], }, ); do_dump_test( classname => 'DBICTest::DumpMore::1', options => { use_namespaces => 1, result_namespace => '+DBICTest::DumpMore::1::Res', resultset_namespace => 'RSet', default_resultset_class => 'RSetBase', result_base_class => 'My::ResultBaseClass', schema_base_class => 'My::SchemaBaseClass', }, error => '', warnings => [ qr/Dumping manual schema for DBICTest::DumpMore::1 to directory /, qr/Schema dump completed/, ], regexes => { schema => [ qr/package DBICTest::DumpMore::1;/, qr/->load_namespaces/, qr/result_namespace => "\+DBICTest::DumpMore::1::Res"/, qr/resultset_namespace => "RSet"/, qr/default_resultset_class => "RSetBase"/, qr/use base 'My::SchemaBaseClass'/, ], 'Res/Foo' => [ qr/package DBICTest::DumpMore::1::Res::Foo;/, qr/use base 'My::ResultBaseClass'/, qr/->set_primary_key/, qr/1;\n$/, ], 'Res/Bar' => [ qr/package DBICTest::DumpMore::1::Res::Bar;/, qr/use base 'My::ResultBaseClass'/, qr/->set_primary_key/, qr/1;\n$/, ], }, ); done_testing; END { rmtree($DUMP_PATH, 1, 1) if $ENV{SCHEMA_LOADER_TESTS_BACKCOMPAT}; } DBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/10sqlite_common.t0000644000175000017500000000122712542756321024026 0ustar ilmariilmariuse DBIx::Class::Schema::Loader::Optional::Dependencies -skip_all_without => 'test_backcompat'; use strict; use warnings; use lib qw(t/backcompat/0.04006/lib); use dbixcsl_common_tests; use dbixcsl_test_dir qw/$tdir/; use Test::More; eval { require DBD::SQLite }; my $class = $@ ? 'SQLite2' : 'SQLite'; dbixcsl_common_tests->new( vendor => 'SQLite', auto_inc_pk => 'INTEGER NOT NULL PRIMARY KEY', dsn => "dbi:$class:dbname=$tdir/sqlite_test.db", user => '', password => '', )->run_tests; END { unlink "$tdir/sqlite_test.db" if $ENV{SCHEMA_LOADER_TESTS_BACKCOMPAT}; } DBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/22dump.t0000644000175000017500000000360012542756321022122 0ustar ilmariilmariuse DBIx::Class::Schema::Loader::Optional::Dependencies -skip_all_without => 'test_backcompat'; use strict; use warnings; use Test::More; use lib qw(t/backcompat/0.04006/lib); use File::Path; use make_dbictest_db; use dbixcsl_test_dir qw/$tdir/; my $dump_path = "$tdir/dump"; local $SIG{__WARN__} = sub { warn @_ unless $_[0] =~ /^Dumping manual schema|really_erase_my_files|^Schema dump complete/; }; { package DBICTest::Schema::1; use base qw/ DBIx::Class::Schema::Loader /; __PACKAGE__->loader_options( dump_directory => $dump_path, ); } { package DBICTest::Schema::2; use base qw/ DBIx::Class::Schema::Loader /; __PACKAGE__->loader_options( dump_directory => $dump_path, really_erase_my_files => 1, ); } plan tests => 5; rmtree($dump_path, 1, 1); eval { DBICTest::Schema::1->connect($make_dbictest_db::dsn) }; ok(!$@, 'no death with dump_directory set') or diag "Dump failed: $@"; DBICTest::Schema::1->_loader_invoked(undef); SKIP: { my @warnings_regexes = ( qr|Dumping manual schema|, qr|Schema dump completed|, ); skip "ActiveState perl produces additional warnings", scalar @warnings_regexes if ($^O eq 'MSWin32'); my @warn_output; { local $SIG{__WARN__} = sub { push(@warn_output, @_) }; DBICTest::Schema::1->connect($make_dbictest_db::dsn); } like(shift @warn_output, $_) foreach (@warnings_regexes); rmtree($dump_path, 1, 1); } eval { DBICTest::Schema::2->connect($make_dbictest_db::dsn) }; ok(!$@, 'no death with dump_directory set (overwrite1)') or diag "Dump failed: $@"; DBICTest::Schema::2->_loader_invoked(undef); eval { DBICTest::Schema::2->connect($make_dbictest_db::dsn) }; ok(!$@, 'no death with dump_directory set (overwrite2)') or diag "Dump failed: $@"; END { rmtree($dump_path, 1, 1) if $ENV{SCHEMA_LOADER_TESTS_BACKCOMPAT}; } DBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/14ora_common.t0000644000175000017500000000221212542756321023305 0ustar ilmariilmariuse DBIx::Class::Schema::Loader::Optional::Dependencies -skip_all_without => qw(test_backcompat test_rdbms_oracle); use strict; use warnings; use lib qw(t/backcompat/0.04006/lib); use dbixcsl_common_tests; use Test::More; my $dsn = $ENV{DBICTEST_ORA_DSN} || ''; my $user = $ENV{DBICTEST_ORA_USER} || ''; my $password = $ENV{DBICTEST_ORA_PASS} || ''; dbixcsl_common_tests->new( vendor => 'Oracle', auto_inc_pk => 'INTEGER NOT NULL PRIMARY KEY', auto_inc_cb => sub { my ($table, $col) = @_; return ( qq{ CREATE SEQUENCE ${table}_${col}_seq START WITH 1 INCREMENT BY 1}, qq{ CREATE OR REPLACE TRIGGER ${table}_${col}_trigger BEFORE INSERT ON ${table} FOR EACH ROW BEGIN SELECT ${table}_${col}_seq.nextval INTO :NEW.${col} FROM dual; END; } ); }, auto_inc_drop_cb => sub { my ($table, $col) = @_; return qq{ DROP SEQUENCE ${table}_${col}_seq }; }, dsn => $dsn, user => $user, password => $password, )->run_tests(); DBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/13db2_common.t0000644000175000017500000000120412542756321023172 0ustar ilmariilmariuse DBIx::Class::Schema::Loader::Optional::Dependencies -skip_all_without => qw(test_backcompat test_rdbms_db2); use strict; use warnings; use lib qw(t/backcompat/0.04006/lib); use dbixcsl_common_tests; use Test::More; my $dsn = $ENV{DBICTEST_DB2_DSN} || ''; my $user = $ENV{DBICTEST_DB2_USER} || ''; my $password = $ENV{DBICTEST_DB2_PASS} || ''; dbixcsl_common_tests->new( vendor => 'DB2', auto_inc_pk => 'INTEGER GENERATED BY DEFAULT AS IDENTITY NOT NULL PRIMARY KEY', dsn => $dsn, user => $user, password => $password, db_schema => uc $user, )->run_tests(); DBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/20invocations.t0000644000175000017500000000652112542756321023514 0ustar ilmariilmariuse DBIx::Class::Schema::Loader::Optional::Dependencies -skip_all_without => 'test_backcompat'; use strict; use warnings; use Test::More; use lib qw(t/backcompat/0.04006/lib); use make_dbictest_db; local $SIG{__WARN__} = sub { warn @_ unless $_[0] =~ /Dynamic schema|really_erase_my_files/; }; # Takes a $schema as input, runs 4 basic tests sub test_schema { my ($testname, $schema) = @_; $schema = $schema->clone if !ref $schema; isa_ok($schema, 'DBIx::Class::Schema', $testname); my $foo_rs = $schema->resultset('Bar')->search({ barid => 3})->search_related('fooref'); isa_ok($foo_rs, 'DBIx::Class::ResultSet', $testname); my $foo_first = $foo_rs->first; like(ref $foo_first, qr/DBICTest::Schema::\d+::Foo/, $testname); my $foo_first_text = $foo_first->footext; is($foo_first_text, 'Foo record associated with the Bar with barid 3'); } my @invocations = ( 'hardcode' => sub { package DBICTest::Schema::5; use base qw/ DBIx::Class::Schema::Loader /; __PACKAGE__->connection($make_dbictest_db::dsn); __PACKAGE__; }, 'normal' => sub { package DBICTest::Schema::6; use base qw/ DBIx::Class::Schema::Loader /; __PACKAGE__->loader_options(); __PACKAGE__->connect($make_dbictest_db::dsn); }, 'make_schema_at' => sub { use DBIx::Class::Schema::Loader qw/ make_schema_at /; make_schema_at( 'DBICTest::Schema::7', { really_erase_my_files => 1 }, [ $make_dbictest_db::dsn ], ); DBICTest::Schema::7->clone; }, 'embedded_options' => sub { package DBICTest::Schema::8; use base qw/ DBIx::Class::Schema::Loader /; __PACKAGE__->connect( $make_dbictest_db::dsn, { loader_options => { really_erase_my_files => 1 } } ); }, 'embedded_options_in_attrs' => sub { package DBICTest::Schema::9; use base qw/ DBIx::Class::Schema::Loader /; __PACKAGE__->connect( $make_dbictest_db::dsn, undef, undef, { AutoCommit => 1, loader_options => { really_erase_my_files => 1 } } ); }, 'embedded_options_make_schema_at' => sub { use DBIx::Class::Schema::Loader qw/ make_schema_at /; make_schema_at( 'DBICTest::Schema::10', { }, [ $make_dbictest_db::dsn, { loader_options => { really_erase_my_files => 1 } }, ], ); "DBICTest::Schema::10"; }, 'almost_embedded' => sub { package DBICTest::Schema::11; use base qw/ DBIx::Class::Schema::Loader /; __PACKAGE__->loader_options( really_erase_my_files => 1 ); __PACKAGE__->connect( $make_dbictest_db::dsn, undef, undef, { AutoCommit => 1 } ); }, 'make_schema_at_explicit' => sub { use DBIx::Class::Schema::Loader; DBIx::Class::Schema::Loader::make_schema_at( 'DBICTest::Schema::12', { really_erase_my_files => 1 }, [ $make_dbictest_db::dsn ], ); DBICTest::Schema::12->clone; } ); # 4 tests per k/v pair plan tests => 2 * @invocations; while(@invocations >= 2) { my $style = shift @invocations; my $subref = shift @invocations; test_schema($style, &$subref); } DBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/21misc_fatal.t0000644000175000017500000000142712542756321023263 0ustar ilmariilmariuse DBIx::Class::Schema::Loader::Optional::Dependencies -skip_all_without => 'test_backcompat'; use strict; use warnings; use Test::More; use lib qw(t/backcompat/0.04006/lib); use make_dbictest_db; { $INC{'DBIx/Class/Storage/xyzzy.pm'} = 1; package DBIx::Class::Storage::xyzzy; use base qw/ DBIx::Class::Storage /; sub new { bless {}, shift } sub connect_info { @_ } package DBICTest::Schema; use base qw/ DBIx::Class::Schema::Loader /; __PACKAGE__->loader_options( really_erase_my_files => 1 ); __PACKAGE__->storage_type( '::xyzzy' ); } plan tests => 1; eval { DBICTest::Schema->connect($make_dbictest_db::dsn) }; like( $@, qr/Could not load loader_class "DBIx::Class::Schema::Loader::xyzzy": /, 'Bad storage type dies correctly' ); DBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/11mysql_common.t0000644000175000017500000000167612542756321023703 0ustar ilmariilmariuse DBIx::Class::Schema::Loader::Optional::Dependencies -skip_all_without => qw(test_backcompat test_rdbms_mysql); use strict; use warnings; use lib qw(t/backcompat/0.04006/lib); use dbixcsl_common_tests; use Test::More; my $dsn = $ENV{DBICTEST_MYSQL_DSN} || ''; my $user = $ENV{DBICTEST_MYSQL_USER} || ''; my $password = $ENV{DBICTEST_MYSQL_PASS} || ''; my $test_innodb = $ENV{DBICTEST_MYSQL_INNODB} || 0; my $skip_rels_msg = 'You need to set the DBICTEST_MYSQL_INNODB environment variable to test relationships'; dbixcsl_common_tests->new( vendor => 'Mysql', auto_inc_pk => 'INTEGER NOT NULL PRIMARY KEY AUTO_INCREMENT', innodb => $test_innodb ? q{Engine=InnoDB} : 0, dsn => $dsn, user => $user, password => $password, skip_rels => $test_innodb ? 0 : $skip_rels_msg, no_inline_rels => 1, no_implicit_rels => 1, )->run_tests(); DBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/lib/0000755000175000017500000000000012650450355021371 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/lib/TestAdditionalBase.pm0000644000175000017500000000042512542756321025435 0ustar ilmariilmaripackage TestAdditionalBase; use strict; use warnings; sub test_additional_base { return "test_additional_base"; } sub test_additional_base_override { return "test_additional_base_override"; } sub test_additional_base_additional { return TestAdditional->test_additional; } 1; DBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/lib/dbixcsl_common_tests.pm0000644000175000017500000010155512542756321026162 0ustar ilmariilmaripackage dbixcsl_common_tests; use strict; use warnings; use Test::More; use DBIx::Class::Schema::Loader; use DBI; sub new { my $class = shift; my $self; if( ref($_[0]) eq 'HASH') { my $args = shift; $self = { (%$args) }; } else { $self = { @_ }; } # Only MySQL uses this $self->{innodb} ||= ''; $self->{verbose} = $ENV{TEST_VERBOSE} || 0; return bless $self => $class; } sub skip_tests { my ($self, $why) = @_; plan skip_all => $why; } sub _monikerize { my $name = shift; return 'LoaderTest2X' if $name =~ /^loader_test2$/i; return undef; } sub run_tests { my $self = shift; plan tests => 97; $self->create(); my $schema_class = 'DBIXCSL_Test::Schema'; my $debug = ($self->{verbose} > 1) ? 1 : 0; my @connect_info = ( $self->{dsn}, $self->{user}, $self->{password} ); my %loader_opts = ( constraint => qr/^(?:\S+\.)?loader_test[0-9]+$/i, relationships => 1, additional_classes => 'TestAdditional', additional_base_classes => 'TestAdditionalBase', left_base_classes => [ qw/TestLeftBase/ ], components => [ qw/TestComponent/ ], inflect_plural => { loader_test4 => 'loader_test4zes' }, inflect_singular => { fkid => 'fkid_singular' }, moniker_map => \&_monikerize, debug => $debug, ); $loader_opts{db_schema} = $self->{db_schema} if $self->{db_schema}; { my @loader_warnings; local $SIG{__WARN__} = sub { push(@loader_warnings, $_[0]); }; eval qq{ package $schema_class; use base qw/DBIx::Class::Schema::Loader/; __PACKAGE__->loader_options(\%loader_opts); __PACKAGE__->connection(\@connect_info); }; ok(!$@, "Loader initialization") or diag $@; my $warn_count = 0; $warn_count++ if grep /ResultSetManager/, @loader_warnings; $warn_count++ if grep /Dynamic schema detected/, @loader_warnings; $warn_count++ for grep /^Bad table or view/, @loader_warnings; is(scalar(@loader_warnings), $warn_count) or diag "Did not get the expected 0 warnings. Warnings are: " . join('',@loader_warnings); } my $conn = $schema_class->clone; my $monikers = {}; my $classes = {}; foreach my $source_name ($schema_class->sources) { my $table_name = $schema_class->loader->moniker_to_table->{$source_name}; my $result_class = $schema_class->source($source_name)->result_class; $monikers->{$table_name} = $source_name; $classes->{$table_name} = $result_class; # some DBs (Firebird, Oracle) uppercase everything $monikers->{lc $table_name} = $source_name; $classes->{lc $table_name} = $result_class; } # for debugging... # { # mkdir '/tmp/HLAGH'; # $conn->_loader->{dump_directory} = '/tmp/HLAGH'; # $conn->_loader->_dump_to_dir(values %$classes); # } my $moniker1 = $monikers->{loader_test1}; my $class1 = $classes->{loader_test1}; my $rsobj1 = $conn->resultset($moniker1); my $moniker2 = $monikers->{loader_test2}; my $class2 = $classes->{loader_test2}; my $rsobj2 = $conn->resultset($moniker2); my $moniker23 = $monikers->{LOADER_TEST23}; my $class23 = $classes->{LOADER_TEST23}; my $rsobj23 = $conn->resultset($moniker1); my $moniker24 = $monikers->{LoAdEr_test24}; my $class24 = $classes->{LoAdEr_test24}; my $rsobj24 = $conn->resultset($moniker2); isa_ok( $rsobj1, "DBIx::Class::ResultSet" ); isa_ok( $rsobj2, "DBIx::Class::ResultSet" ); isa_ok( $rsobj23, "DBIx::Class::ResultSet" ); isa_ok( $rsobj24, "DBIx::Class::ResultSet" ); my @columns_lt2 = $class2->columns; is($columns_lt2[0], 'id', "Column Ordering 0"); is($columns_lt2[1], 'dat', "Column Ordering 1"); is($columns_lt2[2], 'dat2', "Column Ordering 2"); my %uniq1 = $class1->unique_constraints; my $uniq1_test = 0; foreach my $ucname (keys %uniq1) { my $cols_arrayref = $uniq1{$ucname}; if(@$cols_arrayref == 1 && $cols_arrayref->[0] eq 'dat') { $uniq1_test = 1; last; } } ok($uniq1_test) or diag "Unique constraints not working"; my %uniq2 = $class2->unique_constraints; my $uniq2_test = 0; foreach my $ucname (keys %uniq2) { my $cols_arrayref = $uniq2{$ucname}; if(@$cols_arrayref == 2 && $cols_arrayref->[0] eq 'dat2' && $cols_arrayref->[1] eq 'dat') { $uniq2_test = 2; last; } } ok($uniq2_test) or diag "Multi-col unique constraints not working"; is($moniker2, 'LoaderTest2X', "moniker_map testing"); { my ($skip_tab, $skip_tabo, $skip_taba, $skip_cmeth, $skip_tcomp, $skip_trscomp); can_ok( $class1, 'test_additional_base' ) or $skip_tab = 1; can_ok( $class1, 'test_additional_base_override' ) or $skip_tabo = 1; can_ok( $class1, 'test_additional_base_additional' ) or $skip_taba = 1; can_ok( $class1, 'dbix_class_testcomponent' ) or $skip_tcomp = 1; can_ok( $class1, 'loader_test1_classmeth' ) or $skip_cmeth = 1; SKIP: { skip "Pre-requisite test failed", 1 if $skip_tab; is( $class1->test_additional_base, "test_additional_base", "Additional Base method" ); } SKIP: { skip "Pre-requisite test failed", 1 if $skip_tabo; is( $class1->test_additional_base_override, "test_left_base_override", "Left Base overrides Additional Base method" ); } SKIP: { skip "Pre-requisite test failed", 1 if $skip_taba; is( $class1->test_additional_base_additional, "test_additional", "Additional Base can use Additional package method" ); } SKIP: { skip "Pre-requisite test failed", 1 if $skip_tcomp; is( $class1->dbix_class_testcomponent, 'dbix_class_testcomponent works' ); } SKIP: { skip "Pre-requisite test failed", 1 if $skip_cmeth; is( $class1->loader_test1_classmeth, 'all is well' ); } } my $obj = $rsobj1->find(1); is( $obj->id, 1 ); is( $obj->dat, "foo" ); is( $rsobj2->count, 4 ); my $saved_id; eval { my $new_obj1 = $rsobj1->create({ dat => 'newthing' }); $saved_id = $new_obj1->id; }; ok(!$@) or diag "Died during create new record using a PK::Auto key: $@"; ok($saved_id) or diag "Failed to get PK::Auto-generated id"; my $new_obj1 = $rsobj1->search({ dat => 'newthing' })->first; ok($new_obj1) or diag "Cannot find newly inserted PK::Auto record"; is($new_obj1->id, $saved_id); my ($obj2) = $rsobj2->search({ dat => 'bbb' })->first; is( $obj2->id, 2 ); SKIP: { skip $self->{skip_rels}, 63 if $self->{skip_rels}; my $moniker3 = $monikers->{loader_test3}; my $class3 = $classes->{loader_test3}; my $rsobj3 = $conn->resultset($moniker3); my $moniker4 = $monikers->{loader_test4}; my $class4 = $classes->{loader_test4}; my $rsobj4 = $conn->resultset($moniker4); my $moniker5 = $monikers->{loader_test5}; my $class5 = $classes->{loader_test5}; my $rsobj5 = $conn->resultset($moniker5); my $moniker6 = $monikers->{loader_test6}; my $class6 = $classes->{loader_test6}; my $rsobj6 = $conn->resultset($moniker6); my $moniker7 = $monikers->{loader_test7}; my $class7 = $classes->{loader_test7}; my $rsobj7 = $conn->resultset($moniker7); my $moniker8 = $monikers->{loader_test8}; my $class8 = $classes->{loader_test8}; my $rsobj8 = $conn->resultset($moniker8); my $moniker9 = $monikers->{loader_test9}; my $class9 = $classes->{loader_test9}; my $rsobj9 = $conn->resultset($moniker9); my $moniker16 = $monikers->{loader_test16}; my $class16 = $classes->{loader_test16}; my $rsobj16 = $conn->resultset($moniker16); my $moniker17 = $monikers->{loader_test17}; my $class17 = $classes->{loader_test17}; my $rsobj17 = $conn->resultset($moniker17); my $moniker18 = $monikers->{loader_test18}; my $class18 = $classes->{loader_test18}; my $rsobj18 = $conn->resultset($moniker18); my $moniker19 = $monikers->{loader_test19}; my $class19 = $classes->{loader_test19}; my $rsobj19 = $conn->resultset($moniker19); my $moniker20 = $monikers->{loader_test20}; my $class20 = $classes->{loader_test20}; my $rsobj20 = $conn->resultset($moniker20); my $moniker21 = $monikers->{loader_test21}; my $class21 = $classes->{loader_test21}; my $rsobj21 = $conn->resultset($moniker21); my $moniker22 = $monikers->{loader_test22}; my $class22 = $classes->{loader_test22}; my $rsobj22 = $conn->resultset($moniker22); my $moniker25 = $monikers->{loader_test25}; my $class25 = $classes->{loader_test25}; my $rsobj25 = $conn->resultset($moniker25); my $moniker26 = $monikers->{loader_test26}; my $class26 = $classes->{loader_test26}; my $rsobj26 = $conn->resultset($moniker26); isa_ok( $rsobj3, "DBIx::Class::ResultSet" ); isa_ok( $rsobj4, "DBIx::Class::ResultSet" ); isa_ok( $rsobj5, "DBIx::Class::ResultSet" ); isa_ok( $rsobj6, "DBIx::Class::ResultSet" ); isa_ok( $rsobj7, "DBIx::Class::ResultSet" ); isa_ok( $rsobj8, "DBIx::Class::ResultSet" ); isa_ok( $rsobj9, "DBIx::Class::ResultSet" ); isa_ok( $rsobj16, "DBIx::Class::ResultSet" ); isa_ok( $rsobj17, "DBIx::Class::ResultSet" ); isa_ok( $rsobj18, "DBIx::Class::ResultSet" ); isa_ok( $rsobj19, "DBIx::Class::ResultSet" ); isa_ok( $rsobj20, "DBIx::Class::ResultSet" ); isa_ok( $rsobj21, "DBIx::Class::ResultSet" ); isa_ok( $rsobj22, "DBIx::Class::ResultSet" ); isa_ok( $rsobj25, "DBIx::Class::ResultSet" ); isa_ok( $rsobj26, "DBIx::Class::ResultSet" ); # basic rel test my $obj4 = $rsobj4->find(123); isa_ok( $obj4->fkid_singular, $class3); my $obj3 = $rsobj3->find(1); my $rs_rel4 = $obj3->search_related('loader_test4zes'); isa_ok( $rs_rel4->first, $class4); # test that _id is not stripped and prepositions in rel names are # ignored ok ($rsobj4->result_source->has_relationship('loader_test5_to_ids'), "rel with preposition 'to' and _id pluralized backward-compatibly"); ok ($rsobj4->result_source->has_relationship('loader_test5_from_ids'), "rel with preposition 'from' and _id pluralized backward-compatibly"); # check that default relationship attributes are not applied in 0.04006 mode is $rsobj3->result_source->relationship_info('loader_test4zes')->{attrs}{cascade_delete}, 1, 'cascade_delete => 1 on has_many by default'; is $rsobj3->result_source->relationship_info('loader_test4zes')->{attrs}{cascade_copy}, 1, 'cascade_copy => 1 on has_many by default'; ok ((not exists $rsobj3->result_source->relationship_info('loader_test4zes')->{attrs}{on_delete}), 'has_many does not have on_delete'); ok ((not exists $rsobj3->result_source->relationship_info('loader_test4zes')->{attrs}{on_update}), 'has_many does not have on_update'); ok ((not exists $rsobj3->result_source->relationship_info('loader_test4zes')->{attrs}{is_deferrable}), 'has_many does not have is_deferrable'); isnt $rsobj4->result_source->relationship_info('fkid_singular')->{attrs}{on_delete}, 'CASCADE', "on_delete => 'CASCADE' not on belongs_to by default"; isnt $rsobj4->result_source->relationship_info('fkid_singular')->{attrs}{on_update}, 'CASCADE', "on_update => 'CASCADE' not on belongs_to by default"; isnt $rsobj4->result_source->relationship_info('fkid_singular')->{attrs}{is_deferrable}, 1, "is_deferrable => 1 not on belongs_to by default"; ok ((not exists $rsobj4->result_source->relationship_info('fkid_singular')->{attrs}{cascade_delete}), 'belongs_to does not have cascade_delete'); ok ((not exists $rsobj4->result_source->relationship_info('fkid_singular')->{attrs}{cascade_copy}), 'belongs_to does not have cascade_copy'); # find on multi-col pk my $obj5 = $rsobj5->find({id1 => 1, id2 => 1}); is( $obj5->id2, 1 ); # mulit-col fk def my $obj6 = $rsobj6->find(1); isa_ok( $obj6->loader_test2, $class2); isa_ok( $obj6->loader_test5, $class5); # fk that references a non-pk key (UNIQUE) my $obj8 = $rsobj8->find(1); isa_ok( $obj8->loader_test7, $class7); # test double-fk 17 ->-> 16 my $obj17 = $rsobj17->find(33); my $rs_rel16_one = $obj17->loader16_one; isa_ok($rs_rel16_one, $class16); is($rs_rel16_one->dat, 'y16'); my $rs_rel16_two = $obj17->loader16_two; isa_ok($rs_rel16_two, $class16); is($rs_rel16_two->dat, 'z16'); my $obj16 = $rsobj16->find(2); my $rs_rel17 = $obj16->search_related('loader_test17_loader16_ones'); isa_ok($rs_rel17->first, $class17); is($rs_rel17->first->id, 3); # XXX test m:m 18 <- 20 -> 19 # XXX test double-fk m:m 21 <- 22 -> 21 # test double multi-col fk 26 -> 25 my $obj26 = $rsobj26->find(33); my $rs_rel25_one = $obj26->loader_test25_id_rel1; isa_ok($rs_rel25_one, $class25); is($rs_rel25_one->dat, 'x25'); my $rs_rel25_two = $obj26->loader_test25_id_rel2; isa_ok($rs_rel25_two, $class25); is($rs_rel25_two->dat, 'y25'); my $obj25 = $rsobj25->find(3,42); my $rs_rel26 = $obj25->search_related('loader_test26_id_rel1s'); isa_ok($rs_rel26->first, $class26); is($rs_rel26->first->id, 3); # from Chisel's tests... SKIP: { if($self->{vendor} =~ /sqlite/i) { skip 'SQLite cannot do the advanced tests', 8; } my $moniker10 = $monikers->{loader_test10}; my $class10 = $classes->{loader_test10}; my $rsobj10 = $conn->resultset($moniker10); my $moniker11 = $monikers->{loader_test11}; my $class11 = $classes->{loader_test11}; my $rsobj11 = $conn->resultset($moniker11); isa_ok( $rsobj10, "DBIx::Class::ResultSet" ); isa_ok( $rsobj11, "DBIx::Class::ResultSet" ); my $obj10 = $rsobj10->create({ subject => 'xyzzy' }); $obj10->update(); ok( defined $obj10, '$obj10 is defined' ); my $obj11 = $rsobj11->create({ loader_test10 => $obj10->id() }); $obj11->update(); ok( defined $obj11, '$obj11 is defined' ); eval { my $obj10_2 = $obj11->loader_test10; $obj10_2->loader_test11( $obj11->id11() ); $obj10_2->update(); }; is($@, '', 'No errors after eval{}') or do { diag explain $rsobj10->result_source->relationship_info('loader_test11'); diag explain $rsobj11->result_source->relationship_info('loader_test10'); }; SKIP: { skip 'Previous eval block failed', 3 unless ($@ eq ''); my $results = $rsobj10->search({ subject => 'xyzzy' }); is( $results->count(), 1, 'One $rsobj10 returned from search' ); my $obj10_3 = $results->first(); isa_ok( $obj10_3, $class10 ); is( $obj10_3->loader_test11()->id(), $obj11->id(), 'found same $rsobj11 object we expected' ); } } SKIP: { skip 'This vendor cannot do inline relationship definitions', 6 if $self->{no_inline_rels}; my $moniker12 = $monikers->{loader_test12}; my $class12 = $classes->{loader_test12}; my $rsobj12 = $conn->resultset($moniker12); my $moniker13 = $monikers->{loader_test13}; my $class13 = $classes->{loader_test13}; my $rsobj13 = $conn->resultset($moniker13); isa_ok( $rsobj12, "DBIx::Class::ResultSet" ); isa_ok( $rsobj13, "DBIx::Class::ResultSet" ); my $obj13 = $rsobj13->find(1); isa_ok( $obj13->id, $class12 ); isa_ok( $obj13->loader_test12, $class12); isa_ok( $obj13->dat, $class12); my $obj12 = $rsobj12->find(1); isa_ok( $obj12->loader_test13_ids, "DBIx::Class::ResultSet" ); } SKIP: { skip 'This vendor cannot do out-of-line implicit rel defs', 3 if $self->{no_implicit_rels}; my $moniker14 = $monikers->{loader_test14}; my $class14 = $classes->{loader_test14}; my $rsobj14 = $conn->resultset($moniker14); my $moniker15 = $monikers->{loader_test15}; my $class15 = $classes->{loader_test15}; my $rsobj15 = $conn->resultset($moniker15); isa_ok( $rsobj14, "DBIx::Class::ResultSet" ); isa_ok( $rsobj15, "DBIx::Class::ResultSet" ); my $obj15 = $rsobj15->find(1); isa_ok( $obj15->loader_test14, $class14 ); } } # rescan test SKIP: { skip $self->{skip_rels}, 4 if $self->{skip_rels}; my @statements_rescan = ( qq{ CREATE TABLE loader_test30 ( id INTEGER NOT NULL PRIMARY KEY, loader_test2 INTEGER NOT NULL, FOREIGN KEY (loader_test2) REFERENCES loader_test2 (id) ) $self->{innodb} }, q{ INSERT INTO loader_test30 (id,loader_test2) VALUES(123, 1) }, q{ INSERT INTO loader_test30 (id,loader_test2) VALUES(321, 2) }, ); { my $dbh = $self->dbconnect(1); $dbh->do($_) for @statements_rescan; $dbh->disconnect; } my @new = do { local $SIG{__WARN__} = sub {}; $conn->rescan; }; is(scalar(@new), 1); is($new[0], 'LoaderTest30'); my $rsobj30 = $conn->resultset('LoaderTest30'); isa_ok($rsobj30, 'DBIx::Class::ResultSet'); my $obj30 = $rsobj30->find(123); isa_ok( $obj30->loader_test2, $class2); } } sub dbconnect { my ($self, $complain) = @_; my $dbh = DBI->connect( $self->{dsn}, $self->{user}, $self->{password}, { RaiseError => $complain, PrintError => $complain, AutoCommit => 1, } ); if ($self->{dsn} =~ /^[^:]+:SQLite:/) { $dbh->do ('PRAGMA synchronous = OFF'); } elsif ($self->{dsn} =~ /^[^:]+:Pg:/) { $dbh->do ('SET client_min_messages=WARNING'); } die "Failed to connect to database: $DBI::errstr" if !$dbh; return $dbh; } sub create { my $self = shift; $self->{_created} = 1; my $make_auto_inc = $self->{auto_inc_cb} || sub {}; my @statements = ( qq{ CREATE TABLE loader_test1 ( id $self->{auto_inc_pk}, dat VARCHAR(32) NOT NULL UNIQUE ) $self->{innodb} }, $make_auto_inc->(qw/loader_test1 id/), q{ INSERT INTO loader_test1 (dat) VALUES('foo') }, q{ INSERT INTO loader_test1 (dat) VALUES('bar') }, q{ INSERT INTO loader_test1 (dat) VALUES('baz') }, qq{ CREATE TABLE loader_test2 ( id $self->{auto_inc_pk}, dat VARCHAR(32) NOT NULL, dat2 VARCHAR(32) NOT NULL, UNIQUE (dat2, dat) ) $self->{innodb} }, $make_auto_inc->(qw/loader_test2 id/), q{ INSERT INTO loader_test2 (dat, dat2) VALUES('aaa', 'zzz') }, q{ INSERT INTO loader_test2 (dat, dat2) VALUES('bbb', 'yyy') }, q{ INSERT INTO loader_test2 (dat, dat2) VALUES('ccc', 'xxx') }, q{ INSERT INTO loader_test2 (dat, dat2) VALUES('ddd', 'www') }, qq{ CREATE TABLE LOADER_TEST23 ( ID INTEGER NOT NULL PRIMARY KEY, DAT VARCHAR(32) NOT NULL UNIQUE ) $self->{innodb} }, qq{ CREATE TABLE LoAdEr_test24 ( iD INTEGER NOT NULL PRIMARY KEY, DaT VARCHAR(32) NOT NULL UNIQUE ) $self->{innodb} }, ); my @statements_reltests = ( qq{ CREATE TABLE loader_test3 ( id INTEGER NOT NULL PRIMARY KEY, dat VARCHAR(32) ) $self->{innodb} }, q{ INSERT INTO loader_test3 (id,dat) VALUES(1,'aaa') }, q{ INSERT INTO loader_test3 (id,dat) VALUES(2,'bbb') }, q{ INSERT INTO loader_test3 (id,dat) VALUES(3,'ccc') }, q{ INSERT INTO loader_test3 (id,dat) VALUES(4,'ddd') }, qq{ CREATE TABLE loader_test4 ( id INTEGER NOT NULL PRIMARY KEY, fkid INTEGER NOT NULL, dat VARCHAR(32), FOREIGN KEY( fkid ) REFERENCES loader_test3 (id) ) $self->{innodb} }, q{ INSERT INTO loader_test4 (id,fkid,dat) VALUES(123,1,'aaa') }, q{ INSERT INTO loader_test4 (id,fkid,dat) VALUES(124,2,'bbb') }, q{ INSERT INTO loader_test4 (id,fkid,dat) VALUES(125,3,'ccc') }, q{ INSERT INTO loader_test4 (id,fkid,dat) VALUES(126,4,'ddd') }, qq{ CREATE TABLE loader_test5 ( id1 INTEGER NOT NULL, iD2 INTEGER NOT NULL, dat VARCHAR(8), from_id INTEGER, to_id INTEGER, PRIMARY KEY (id1,id2), FOREIGN KEY (from_id) REFERENCES loader_test4 (id), FOREIGN KEY (to_id) REFERENCES loader_test4 (id) ) $self->{innodb} }, q{ INSERT INTO loader_test5 (id1,id2,dat) VALUES (1,1,'aaa') }, qq{ CREATE TABLE loader_test6 ( id INTEGER NOT NULL PRIMARY KEY, Id2 INTEGER, loader_test2 INTEGER, dat VARCHAR(8), FOREIGN KEY (loader_test2) REFERENCES loader_test2 (id), FOREIGN KEY(id,Id2) REFERENCES loader_test5 (id1,iD2) ) $self->{innodb} }, (q{ INSERT INTO loader_test6 (id, id2,loader_test2,dat) } . q{ VALUES (1, 1,1,'aaa') }), qq{ CREATE TABLE loader_test7 ( id INTEGER NOT NULL PRIMARY KEY, id2 VARCHAR(8) NOT NULL UNIQUE, dat VARCHAR(8) ) $self->{innodb} }, q{ INSERT INTO loader_test7 (id,id2,dat) VALUES (1,'aaa','bbb') }, qq{ CREATE TABLE loader_test8 ( id INTEGER NOT NULL PRIMARY KEY, loader_test7 VARCHAR(8) NOT NULL, dat VARCHAR(8), FOREIGN KEY (loader_test7) REFERENCES loader_test7 (id2) ) $self->{innodb} }, (q{ INSERT INTO loader_test8 (id,loader_test7,dat) } . q{ VALUES (1,'aaa','bbb') }), qq{ CREATE TABLE loader_test9 ( loader_test9 VARCHAR(8) NOT NULL ) $self->{innodb} }, qq{ CREATE TABLE loader_test16 ( id INTEGER NOT NULL PRIMARY KEY, dat VARCHAR(8) ) $self->{innodb} }, qq{ INSERT INTO loader_test16 (id,dat) VALUES (2,'x16') }, qq{ INSERT INTO loader_test16 (id,dat) VALUES (4,'y16') }, qq{ INSERT INTO loader_test16 (id,dat) VALUES (6,'z16') }, qq{ CREATE TABLE loader_test17 ( id INTEGER NOT NULL PRIMARY KEY, loader16_one INTEGER, loader16_two INTEGER, FOREIGN KEY (loader16_one) REFERENCES loader_test16 (id), FOREIGN KEY (loader16_two) REFERENCES loader_test16 (id) ) $self->{innodb} }, qq{ INSERT INTO loader_test17 (id, loader16_one, loader16_two) VALUES (3, 2, 4) }, qq{ INSERT INTO loader_test17 (id, loader16_one, loader16_two) VALUES (33, 4, 6) }, qq{ CREATE TABLE loader_test18 ( id INTEGER NOT NULL PRIMARY KEY, dat VARCHAR(8) ) $self->{innodb} }, qq{ INSERT INTO loader_test18 (id,dat) VALUES (1,'x18') }, qq{ INSERT INTO loader_test18 (id,dat) VALUES (2,'y18') }, qq{ INSERT INTO loader_test18 (id,dat) VALUES (3,'z18') }, qq{ CREATE TABLE loader_test19 ( id INTEGER NOT NULL PRIMARY KEY, dat VARCHAR(8) ) $self->{innodb} }, qq{ INSERT INTO loader_test19 (id,dat) VALUES (4,'x19') }, qq{ INSERT INTO loader_test19 (id,dat) VALUES (5,'y19') }, qq{ INSERT INTO loader_test19 (id,dat) VALUES (6,'z19') }, qq{ CREATE TABLE loader_test20 ( parent INTEGER NOT NULL, child INTEGER NOT NULL, PRIMARY KEY (parent, child), FOREIGN KEY (parent) REFERENCES loader_test18 (id), FOREIGN KEY (child) REFERENCES loader_test19 (id) ) $self->{innodb} }, q{ INSERT INTO loader_test20 (parent, child) VALUES (1,4) }, q{ INSERT INTO loader_test20 (parent, child) VALUES (2,5) }, q{ INSERT INTO loader_test20 (parent, child) VALUES (3,6) }, qq{ CREATE TABLE loader_test21 ( id INTEGER NOT NULL PRIMARY KEY, dat VARCHAR(8) ) $self->{innodb} }, q{ INSERT INTO loader_test21 (id,dat) VALUES (7,'a21')}, q{ INSERT INTO loader_test21 (id,dat) VALUES (11,'b21')}, q{ INSERT INTO loader_test21 (id,dat) VALUES (13,'c21')}, q{ INSERT INTO loader_test21 (id,dat) VALUES (17,'d21')}, qq{ CREATE TABLE loader_test22 ( parent INTEGER NOT NULL, child INTEGER NOT NULL, PRIMARY KEY (parent, child), FOREIGN KEY (parent) REFERENCES loader_test21 (id), FOREIGN KEY (child) REFERENCES loader_test21 (id) ) $self->{innodb} }, q{ INSERT INTO loader_test22 (parent, child) VALUES (7,11)}, q{ INSERT INTO loader_test22 (parent, child) VALUES (11,13)}, q{ INSERT INTO loader_test22 (parent, child) VALUES (13,17)}, qq{ CREATE TABLE loader_test25 ( id1 INTEGER NOT NULL, id2 INTEGER NOT NULL, dat VARCHAR(8), PRIMARY KEY (id1,id2) ) $self->{innodb} }, q{ INSERT INTO loader_test25 (id1,id2,dat) VALUES (33,5,'x25') }, q{ INSERT INTO loader_test25 (id1,id2,dat) VALUES (33,7,'y25') }, q{ INSERT INTO loader_test25 (id1,id2,dat) VALUES (3,42,'z25') }, qq{ CREATE TABLE loader_test26 ( id INTEGER NOT NULL PRIMARY KEY, rel1 INTEGER NOT NULL, rel2 INTEGER NOT NULL, FOREIGN KEY (id, rel1) REFERENCES loader_test25 (id1, id2), FOREIGN KEY (id, rel2) REFERENCES loader_test25 (id1, id2) ) $self->{innodb} }, q{ INSERT INTO loader_test26 (id,rel1,rel2) VALUES (33,5,7) }, q{ INSERT INTO loader_test26 (id,rel1,rel2) VALUES (3,42,42) }, ); my @statements_advanced = ( qq{ CREATE TABLE loader_test10 ( id10 $self->{auto_inc_pk}, subject VARCHAR(8), loader_test11 INTEGER ) $self->{innodb} }, $make_auto_inc->(qw/loader_test10 id10/), qq{ CREATE TABLE loader_test11 ( id11 $self->{auto_inc_pk}, message VARCHAR(8) DEFAULT 'foo', loader_test10 INTEGER, FOREIGN KEY (loader_test10) REFERENCES loader_test10 (id10) ) $self->{innodb} }, $make_auto_inc->(qw/loader_test11 id11/), (q{ ALTER TABLE loader_test10 ADD CONSTRAINT } . q{ loader_test11_fk FOREIGN KEY (loader_test11) } . q{ REFERENCES loader_test11 (id11) }), ); my @statements_inline_rels = ( qq{ CREATE TABLE loader_test12 ( id INTEGER NOT NULL PRIMARY KEY, id2 VARCHAR(8) NOT NULL UNIQUE, dat VARCHAR(8) NOT NULL UNIQUE ) $self->{innodb} }, q{ INSERT INTO loader_test12 (id,id2,dat) VALUES (1,'aaa','bbb') }, qq{ CREATE TABLE loader_test13 ( id INTEGER NOT NULL PRIMARY KEY REFERENCES loader_test12, loader_test12 VARCHAR(8) NOT NULL REFERENCES loader_test12 (id2), dat VARCHAR(8) REFERENCES loader_test12 (dat) ) $self->{innodb} }, (q{ INSERT INTO loader_test13 (id,loader_test12,dat) } . q{ VALUES (1,'aaa','bbb') }), ); my @statements_implicit_rels = ( qq{ CREATE TABLE loader_test14 ( id INTEGER NOT NULL PRIMARY KEY, dat VARCHAR(8) ) $self->{innodb} }, q{ INSERT INTO loader_test14 (id,dat) VALUES (123,'aaa') }, qq{ CREATE TABLE loader_test15 ( id INTEGER NOT NULL PRIMARY KEY, loader_test14 INTEGER NOT NULL, FOREIGN KEY (loader_test14) REFERENCES loader_test14 ) $self->{innodb} }, q{ INSERT INTO loader_test15 (id,loader_test14) VALUES (1,123) }, ); $self->drop_tables; my $dbh = $self->dbconnect(1); $dbh->do($_) for (@statements); unless($self->{skip_rels}) { # hack for now, since DB2 doesn't like inline comments, and we need # to test one for mysql, which works on everyone else... # this all needs to be refactored anyways. $dbh->do($_) for (@statements_reltests); unless($self->{vendor} =~ /sqlite/i) { $dbh->do($_) for (@statements_advanced); } unless($self->{no_inline_rels}) { $dbh->do($_) for (@statements_inline_rels); } unless($self->{no_implicit_rels}) { $dbh->do($_) for (@statements_implicit_rels); } } $dbh->disconnect(); } sub drop_tables { my $self = shift; my @tables = qw/ loader_test1 loader_test2 LOADER_TEST23 LoAdEr_test24 /; my @tables_auto_inc = ( [ qw/loader_test1 id/ ], [ qw/loader_test2 id/ ], ); my @tables_reltests = qw/ loader_test4 loader_test3 loader_test6 loader_test5 loader_test8 loader_test7 loader_test9 loader_test17 loader_test16 loader_test20 loader_test19 loader_test18 loader_test22 loader_test21 loader_test26 loader_test25 /; my @tables_advanced = qw/ loader_test11 loader_test10 /; my @tables_advanced_auto_inc = ( [ qw/loader_test10 id10/ ], [ qw/loader_test11 id11/ ], ); my @tables_inline_rels = qw/ loader_test13 loader_test12 /; my @tables_implicit_rels = qw/ loader_test15 loader_test14 /; my @tables_rescan = qw/ loader_test30 /; my $drop_fk_mysql = q{ALTER TABLE loader_test10 DROP FOREIGN KEY loader_test11_fk}; my $drop_fk = q{ALTER TABLE loader_test10 DROP CONSTRAINT loader_test11_fk}; my $dbh = $self->dbconnect(0); my $drop_auto_inc = $self->{auto_inc_drop_cb} || sub {}; unless($self->{skip_rels}) { $dbh->do("DROP TABLE $_") for (@tables_reltests); unless($self->{vendor} =~ /sqlite/i) { if($self->{vendor} =~ /mysql/i) { $dbh->do($drop_fk_mysql); } else { $dbh->do($drop_fk); } $dbh->do("DROP TABLE $_") for (@tables_advanced); $dbh->do($_) for map { $drop_auto_inc->(@$_) } @tables_advanced_auto_inc; } unless($self->{no_inline_rels}) { $dbh->do("DROP TABLE $_") for (@tables_inline_rels); } unless($self->{no_implicit_rels}) { $dbh->do("DROP TABLE $_") for (@tables_implicit_rels); } $dbh->do("DROP TABLE $_") for (@tables_rescan); } $dbh->do("DROP TABLE $_") for (@tables); $dbh->do($_) for map { $drop_auto_inc->(@$_) } @tables_auto_inc; $dbh->disconnect; } sub DESTROY { my $self = shift; $self->drop_tables if $self->{_created}; } 1; DBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/lib/DBIx/0000755000175000017500000000000012650450355022157 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/lib/DBIx/Class/0000755000175000017500000000000012650450355023224 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/lib/DBIx/Class/TestComponent.pm0000644000175000017500000000020512542756321026363 0ustar ilmariilmaripackage DBIx::Class::TestComponent; use strict; use warnings; sub dbix_class_testcomponent { 'dbix_class_testcomponent works' } 1; DBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/lib/TestAdditional.pm0000644000175000017500000000015112542756321024636 0ustar ilmariilmaripackage TestAdditional; use strict; use warnings; sub test_additional { return "test_additional"; } 1; DBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/lib/dbixcsl_test_dir.pm0000644000175000017500000000204612542756321025260 0ustar ilmariilmaripackage dbixcsl_test_dir; use strict; use warnings; use File::Path 'rmtree'; use File::Temp 'tempdir'; use Scalar::Util 'weaken'; use namespace::clean; use DBI (); use base qw/Exporter/; our @EXPORT_OK = '$tdir'; die "/t does not exist, this can't be right...\n" unless -d 't'; my $tbdir = 't/var'; unless (-d $tbdir) { mkdir $tbdir or die "Unable to create $tbdir: $!\n"; } our $tdir = tempdir(DIR => $tbdir); # We need to disconnect all active DBI handles before deleting the directory, # otherwise the SQLite .db files cannot be deleted on Win32 (file in use) since # END does not run in any sort of order. no warnings 'redefine'; my $connect = \&DBI::connect; my @handles; *DBI::connect = sub { my $dbh = $connect->(@_); push @handles, $dbh; weaken $handles[-1]; return $dbh; }; END { if (not $ENV{SCHEMA_LOADER_TESTS_NOCLEANUP}) { foreach my $dbh (@handles) { $dbh->disconnect if $dbh; } rmtree($tdir, 1, 1); rmdir($tbdir); # remove if empty, ignore otherwise } } 1; DBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/lib/make_dbictest_db.pm0000644000175000017500000000172312131533457025174 0ustar ilmariilmaripackage make_dbictest_db; use strict; use warnings; use DBI; use dbixcsl_test_dir qw/$tdir/; eval { require DBD::SQLite }; my $class = $@ ? 'SQLite2' : 'SQLite'; my $fn = "$tdir/dbictest.db"; unlink($fn); our $dsn = "dbi:$class:dbname=$fn"; my $dbh = DBI->connect($dsn); $dbh->do ('PRAGMA SYNCHRONOUS = OFF'); $dbh->do($_) for ( q|CREATE TABLE foo ( fooid INTEGER PRIMARY KEY, footext TEXT )|, q|CREATE TABLE bar ( barid INTEGER PRIMARY KEY, fooref INTEGER REFERENCES foo(fooid) )|, q|INSERT INTO foo VALUES (1,'Foo text for number 1')|, q|INSERT INTO foo VALUES (2,'Foo record associated with the Bar with barid 3')|, q|INSERT INTO foo VALUES (3,'Foo text for number 3')|, q|INSERT INTO foo VALUES (4,'Foo text for number 4')|, q|INSERT INTO bar VALUES (1,4)|, q|INSERT INTO bar VALUES (2,3)|, q|INSERT INTO bar VALUES (3,2)|, q|INSERT INTO bar VALUES (4,1)|, ); END { unlink($fn); } 1; DBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/lib/DBIXCSL_Test/0000755000175000017500000000000012650450355023460 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/lib/DBIXCSL_Test/Schema/0000755000175000017500000000000012650450355024660 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/lib/DBIXCSL_Test/Schema/LoaderTest1.pm0000644000175000017500000000026412542756321027351 0ustar ilmariilmaripackage DBIXCSL_Test::Schema::LoaderTest1; use strict; use warnings; sub loader_test1_classmeth { 'all is well' } sub loader_test1_rsmeth : ResultSet { 'all is still well' } 1; DBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/lib/TestLeftBase.pm0000644000175000017500000000017512542756321024261 0ustar ilmariilmaripackage TestLeftBase; use strict; use warnings; sub test_additional_base_override { return "test_left_base_override"; } 1; DBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/lib/My/0000755000175000017500000000000012650450355021756 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/lib/My/SchemaBaseClass.pm0000644000175000017500000000013412542756321025275 0ustar ilmariilmaripackage My::SchemaBaseClass; use strict; use warnings; use base 'DBIx::Class::Schema'; 1; DBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/lib/My/ResultBaseClass.pm0000644000175000017500000000013212542756321025351 0ustar ilmariilmaripackage My::ResultBaseClass; use strict; use warnings; use base 'DBIx::Class::Core'; 1; DBIx-Class-Schema-Loader-0.07045/t/backcompat/0.04006/12pg_common.t0000644000175000017500000000106012542756321023130 0ustar ilmariilmariuse DBIx::Class::Schema::Loader::Optional::Dependencies -skip_all_without => qw(test_backcompat test_rdbms_pg); use strict; use warnings; use lib qw(t/backcompat/0.04006/lib); use dbixcsl_common_tests; use Test::More; my $dsn = $ENV{DBICTEST_PG_DSN} || ''; my $user = $ENV{DBICTEST_PG_USER} || ''; my $password = $ENV{DBICTEST_PG_PASS} || ''; dbixcsl_common_tests->new( vendor => 'Pg', auto_inc_pk => 'SERIAL NOT NULL PRIMARY KEY', dsn => $dsn, user => $user, password => $password, )->run_tests(); DBIx-Class-Schema-Loader-0.07045/Changes0000644000175000017500000011135512650450260016741 0ustar ilmariilmariRevision history for Perl extension DBIx::Class::Schema::Loader 0.07045 2016-01-22 - Regenerate tarball without author-mode test dependencies 0.07044 2016-01-22 - Fix Pg date/time types with zero fractional second digits - Add support for nested hashref form of col_accessor_map 0.07043 2015-05-13 - Fix many_to_many bridges with overlapping foreign keys - Add option to allow extra columns in many_to_many link tables - Document how to add perltidy markers via filter_generated_code - Fix DB2 foreign-key introspection - Remove dependency on List::MoreUtils and Sub::Name - Ensure schema files are generated as binary files on Windows - Fix overwrite_modifications not overwriting if the table hasn't changed - Filter out disabled constraints and triggers for Oracle (GH#5) 0.07042 2014-08-20 - Fix unescaped left braces in regexes in tests - Use undef instead of '%' for the table and type arguments to $dbh->tables (fixes breakage with DBD::Pg 3.4.0) 0.07041 2014-08-09 - Fix many_to_many bridges going back to the same table - Don't disconnect after ->load in static mode (fixes Pg test failure with the upcoming DBIC 0.082800) - Set up Travis smoking (mostly stolen from DBIC) 0.07040 2014-05-27 - Add options to omit the version and timestamp from the generated code (RT#92300) - Skip dumping unique indexes with expressions (RT#93613) - Fix dumping unique indexes with DBD::Pg < 1.50 - Fix inconsistent naming of duplicate unique constraints - Avoid clobbering caller's $_ (RT#96010) 0.07039 2014-01-06 - Fix table listing with DBD::DB2 >= 1.85 (RT#91764) - Add accessor for the list of (re)generated classes - Add dry-run mode for static schema creation 0.07038 2013-11-20 - Allow coderef maps to call back into the hashref mapping code - Fix MySQL column info detection with multiple schemas (RT#82358) - Fix skip count for Oracle multi-schema tests - Actually test data types that require separate tables - Fix national character type sizes on DBD::Oracle >= 1.52 - Fix detection of qualified sequence names for Oracle (RT#90341) 0.07037 2013-10-30 - Allow overriding individual moniker parts 0.07036_04 2013-10-24 - Set table_class to DBIx::Class::ResultSource::View for views, in supported backends (SQLite, MySQL, and Pg) (arc@cpan.org) 0.07036_03 2013-10-22 - Restore support for PostgreSQL 8.3 (RT#87291) - Fix t/23dumpmore on perl 5.8.8 and earlier - Silence warnings from pure-perl Cwd::abs_path() 0.07036_02 2013-09-25 - Skip many_to_many bridges involving might_have relationships 0.07036_01 2013-08-11 - Fix typos in POD and comments (RT#87644) - Don't ship MYMETA.* files (RT#87713) - Fix many_to_many bridges involving might_have relationships - Allow specifying custom attributes for many_to_many bridges - Allow specifying the separator when joining database, schema and table names to form a moniker - Allow using all the moniker parts in hashref moniker_map - Allow matching all the moniker parts in constraint/exclude 0.07036 2013-07-08 - Fix stray comma in Pg on_delete/on_update => CASCADE (RT#84706) - Fix MySQL enums with empty strings and leading/trailing quotes (RT#86091) - Fix "table" parameter in col_accessor_map callback (RT#84050) - Fix ordering issues in Pg loader 0.07035 2013-02-26 - Release 0.07034_01 with a stable version number. 0.07034 is skipped due to the improper dev release versioning. 0.07034_01 2013-01-21 - Fix fixture generation helper to work with older DBD::SQLite versions 0.07034_01 2013-01-16 - MSSQL: on > 2000 use schema_name() instead of user_name() to detect current schema and query sys.schemas instead of sysusers. - SQL Anywhere: introspect ON DELETE/UPDATE rules, default is now RESTRICT. is_deferrable still defaults to 1 - rewrite pg fk introspection to use catalog views instead of information_schema as information_schema does not work for readonly users - add rel_type param for relationship_attrs coderef - pass link table details to rel_name_map for many_to_many bridges (RT#81091) 0.07033 2012-09-09 16:11:47 - more thoroughly document the new behavior for relationship attributes under "relationship_attrs" in ::Base POD - add a loud WARNING to Makefile.PL about the new behavior for relationship attributes 0.07032 2012-09-09 13:17:20 - SQLite: detect is_deferrable for inline FKs - support coderefs for relationship_attrs 0.07031 2012-09-06 15:07:08 - fix 02pod.t failure due to lack of =encoding utf8 statement (patch by Marcel Gruenauer) (RT#79481) 0.07030 2012-09-06 03:27:09 - allow user to set qualify_objects=0 in multischema configurations (andrewalker) 0.07029 2012-09-05 16:41:56 - Oracle: introspect ON DELETE and DEFERRABLE FK clauses - Oracle WARNING: on_delete is now 'NO ACTION' by default, not 'CASCADE'. on_update is now 'NO ACTION' by default (Oracle does not have update rules, this was done to preserve the behavior of the schema when cross-deploying to SQLite.) is_deferrable is now 0 by default, not 1. - DB2: introspect ON DELETE/UPDATE FK clauses - DB2 WARNING: the default for on_delete/on_update is now 'NO ACTION' not 'CASCADE', the default for is_deferrable is still 1 because DB2 does not have deferrable constraints. - SQLite: introspect ON DELETE/UPDATE and DEFERRABLE FK clauses - SQLite WARNING: the default for on_delete/on_update is now 'NO ACTION' not 'CASCADE', and the default for is_deferrable is now 0 not 1. 0.07028 2012-08-30 05:32:42 - MSSQL: introspect ON DELETE/UPDATE clauses for foreign keys - MSSQL WARNING: the default for on_delete/on_update is now 'NO ACTION' not 'CASCADE'. 0.07027 2012-08-26 22:39:45 - PostgreSQL: introspect ON DELETE/UPDATE clauses for foreign keys and the DEFERRABLE clause. - PostgreSQL WARNING: the default for on_delete/on_update attributes for belongs_to relationships is now 'NO ACTION' not 'CASCADE! The default for is_deferrable is now 0 not 1. 0.07026 2012-08-26 01:01:26 - MySQL: introspect ON DELETE/UPDATE clauses for foreign keys. - MySQL WARNING: the default on_delete/on_update attributes for belongs_to relationships is now RESTRICT, *NOT* CASCADE! This is overridable via the relationship_attrs option. 0.07025 2012-06-08 22:48:05 - support SQL Server 2000 again (broken in 0.07011) - some slight optimization for SQL Server driver 0.07024 2012-05-08 15:35:16 - work around broken keyseq in DBD::Pg foreign_key_info (RT#77062) 0.07023 2012-05-05 11:44:15 - properly order FK columns when using base ::DBI loader (SineSwiper) - bump Class::Inspector dep to 1.27 due to test failures with earlier versions on perl >= 5.15.7 (RT#74236) 0.07022 2012-04-08 12:11:00 - do separate queries for default_value on Sybase ASE as some servers can't join to that table (pcmantz) (RT#74170) - set correct size for nchar/nvarchar columns for Sybase ASE, depending on @@ncharsize 0.07021 2012-04-04 23:47:34 - use ::Schema::connect instead of ::Schema::connection in make_schema_at (RT#74175) - register sources on the schema class, never the instance, regardless of how the connection is made for dynamic schemas 0.07020 2012-03-31 21:34:06 - fix some mro issues under perl 5.8 0.07019 2012-03-28 17:23:09 - fix some errors due to case issues (RT#75805) 0.07018 2012-03-27 05:55:10 - skip dbicdump tests on Win32 due to test fails (RT#75732) - fix undefined warnings for DBDs without schemas - work around ORA-24345 from $dbh->column_info - fix spelling mistake in Base POD (RT#74796) 0.07017 2012-02-07 07:23:48 - *EXPERIMENTAL* support for dumping PostgreSQL schemas inside of a transaction - use DBI table_info/column_info REMARKS field if/where available for table/column comments (SineSwiper) - better compatibility with more DBDs (SineSwiper) 0.07015 2011-12-09 10:36:17 - generate many_to_many bridges for targets of link tables 0.07014 2011-11-18 17:06:34 - fix a bug in the automatic multischema clashing moniker disambiguation code that overwrote $loader->moniker_parts 0.07013 2011-11-17 23:12:47 - automatically prefix database/schema to clashing monikers for the same table name in multischema configurations 0.07012 2011-11-09 15:16:29 - as of 0.07011 all callbacks receive a ::Loader::Table or interface-compatible object instead of the table name, this object stringifies to the table name (RT#72260) - fix a bug in dynamic schema_base_class/schema_components implementation that ran the connection method twice on subsequent connects - use a temp file for filter_generated_code with a string program name instead of IPC::Open2, which hangs on Win32 (RT#72226) - previous version referred to the wrong RT# for the uniq_to_primary change, it is actually (RT#51696) 0.07011 2011-11-01 09:00:00 - add -I option to dbicdump - do not delete default custom content comment and ending 1; from custom content in files that are being renamed (RT#70507) - use MooseX::MarkAsMethods instead of namespace::autoclean for the use_moose option, this protects operator overloads, only_autoclean option added for the old behavior - add experimental naming=v8 mode with better CamelCase identifier support, relationship naming and conversion of non-identifier chars (RT#71945) - add naming => { force_ascii => 1 } option for Unicode database names - implement schema_base_class and schema_components for dynamic and working schemas - remove dependency on File::Slurp - allow the constraint and exclude options to be used simultaneously (bphillips) - fix Oracle multi-db_schema unique detection (RT#70851) - fix Oracle common tests fail with multi_schema due to not resetting the preserve_case option after the preserve_case tests (RT#70829) - handle DEFAULT NULL for Pg - handle boolean DEFAULT 0::boolean for Pg - config file support for dbicdump script (alnewkirk) - added filter_generated_code option (RT#53841) - generic table and column comments support - MySQL table and column comments support - support DOS line endings on *nix and *nix line ending on Win32 - add quiet option - $schema->loader is now a public method - add schema_components option - sort relationships so they always come out in the same order - also sort unique constraints so they always come out in the same order - multi db_schema support with cross-schema rels (RT#39478) - added moniker_parts option for name clashes in multi db_schema setups - add rel_name_map option - fix the decimal data type for MS Access over ODBC - fix enum/set detection for MySQL (RT#68717) - fix is_nullable detection on MS Access - remove '$table has no primary key' warning - added uniq_to_primary option to promote unique keys to primary keys (RT#25944) - support arrayrefs for result_namespace and resultset_namespace (RT#40214) - add naming => { monikers => 'preserve' } or 'singular'/'plural' to control moniker inflection (RT#44935) - add naming => { column_accessors => 'preserve' } to not normalize CamelCase column names to lower case for accessors (RT#64668) - support quoted PostgreSQL schema names with special chars (RT#64766) - automatically turn on quoting for MySQL (RT#60469) - become utf8-aware (RT#67920) - handle duplicate relationship names (RT#64041) - fix a bug in Sybase ASE foreign key detection - generate POD for result_base_class, additional_classes, additional_base_classes, left_base_classes, components, result_components_map, result_roles, result_roles_map, unique constraints, set_primary_key and table - rename result_component_map to result_components_map (old name still works) - fix accessor collision detection for methods from result_components_map components - add result_roles and result_roles_map options - fix for mysql rel detection in mixed-case tables on mixed-case filesystems (OSX and Windows) - support for DBD::Firebird - support for unicode Firebird data types - handle "use warnings FATAL => 'all';" in custom/external content (RT#59849) - for dynamic schemas, if the naming option is set, will automatically turn on use_namespaces=1 as well. Set use_namespaces=0 to disable this behavior (RT#59849) 0.07010 2011-03-04 08:26:31 - add result_component_map option 0.07009 2011-02-25 11:06:51 - fix a syntax error in MS Access ADO driver 0.07008 2011-02-25 01:54:43 - rename column_accessor_map to col_accessor_map, the old alias still works - support MSSQL over DBD::ADO - support for MS Access over DBD::ODBC and DBD::ADO 0.07007 2011-02-15 10:00:07 - bump DBIx::Class dep to 0.08127 - fix MSSQL data types for native client and EasySoft driver 0.07006 2011-02-01 02:18:32 - turn unloading of RelBuilder temp classes back on, now with proper check for class existance using Class::Inspector->loaded - bump up dep on namespace::clean to avoid breakage with earlier versions (RT#65149) 0.07005 2011-01-25 23:07:55 - support extra connect_info options like quote_char for dbicdump - fix breakage on perl 5.8.x related to unloading temporary classes 0.07004 2011-01-24 03:43:05 - fix bug with result class methods being cached on in a closure instead of the object, which breaks for multiple dynamic schemas in a single perl instance 0.07003 2011-01-21 06:43:05 - fix relname/method collisions (RT#62648) - fix fully qualified component classes (RT#62624) - improve sybase/mssql db_schema detection - remove MooseX::NonMoose from Schema files under use_moose=1 - better _tables_list for Sybase ASE - add datetime_undef_if_invalid => 1 for MySQL datetime data types (RT#64820) This behavior can be turned off by passing datetime_undef_if_invalid=0 as a loader option - added column_accessor_map option - Preserve relationship names when redumping and another FK is added (RT#62424) - Remove resultset_components as ResultSetManager is deprecated - Fix a fail when very old Moose/CMOP is installed - Added warning for column-accessor collisions, doc section in ::Base ("COLUMN ACCESSOR COLLISIONS") and the col_collision_map option. - Handle column accessor collisions with UNIVERSAL methods - Generate custom_type_name hint for PostgreSQL enums, as used by very recent SQL::Translator - Added support for PostgreSQL enum types - Added table/column comment support for Oracle - Fix missing require (RT#62072) 0.07002 2010-09-11 01:48:00 - Properly detect a schema loaded with use_moose on subsequent reloads - Die with a sensible message when a schema loaded with use_moose => 1 is reloaded with use_moose => 0 - Switch to MRO::Compat - Fix oracle common tests failure / lc(undef) warnings - Bump Moose/Moosex::NonMoose optional dependencies to fixed-up versions - Fix mssql common tests failures with MSSQL 2005 (skip test of datatypes found only on MSSQL 2008) - Fix DB2 v8 test failures (skip tests of graphics types found only on DB2 v9) - Fix dangerous invocation of ->meta on classes during upgrade (may be *non* moosified and contain a user-defined meta() ) - Multiple test cleanups and refactorings 0.07001 2010-07-24 21:28:08 - put is_deferrable => 1 back into default attributes for belongs_to - fix Postgres sequence detection for qualified sequences - detect DOS line ends in table/column comments and convert to \n - added use_moose option - always mark pk columns is_nullable=0 - fix unique constraint names for SQLite (actual names break ->deploy) - fix bug in qualify_objects that would add schema to relnames - better type info for Informix, except for DATETIME precision and INTERVAL support - better type info for DB2 - fix some newly-introduced test bugs - fix composite PKs getting marked is_auto_increment on SQLite 0.07000 2010-05-22 23:40:15 - added qualify_objects option to prepend db_schema to table names - fix for negative numeric default values - sequence is detected for Oracle - fix for SQLite is_auto_increment detection when table is empty (hobbs) - rescan now reloads all tables - minor type info improvements for all DBs - fix erroneous default_value for MySQL NOT NULL columns (RT#57225) - remove is_deferrable => 1 from default for belongs_to rels - better type info for Oracle - preliminary Informix support - unregister dropped sources on rescan - added 'preserve_case' option with support for all DBs where it makes sense; removed the MSSQL 'case_sensitive_collation' and the Firebird/InterBase 'unquoted_ddl' options in favor of it. - support CamelCase table names and column names (in case-preserving mode) at the v7 naming level - rewrite datetime default functions as \'current_timestamp' where possible (except for Sybase ASE) to ease cross-deployment - use column_info instead of select to get Oracle column list (RT#42281) - match quotes in MySQL parser in more places (RT#42101) - fix unique detection in DB2 for multiple schemas (RT#39622) - fix column name collisions with methods (RT#49443) - fix loading MySQL views on older MySQL versions (RT#47399) 0.06001 2010-04-10 01:31:12 - fix type info for MSSQL - fix MSSQL collation detection on freetds tds version 8.0 0.06000 2010-04-06 01:12:25 - better type info for MySQL - initial MySQL data type tests (jhannah) - don't set result_namespace if it's 'Result' - support for MSSQL databases with case sensitive collation, manually overridable with 'case_sensitive_collation' option - do not try to detect driver and rebless when used with a custom 'loader_class' - suppress 'bad table or view' warnings for filtered tables/views - croak if several tables reduce to an identical moniker (ribasushi) - better type info for Sybase ASE - better type info for Pg: sets sequence for serials, handles numerics without precision - better _tables_list for MSSQL - pick up views in SQLite too - better rel inflection using Lingua::EN::Inflect::Phrase - cascade_delete and cascade_copy are turned off for has_many/might_have by default, and belongs_to has on_delete => 'CASCADE', on_update => 'CASCADE' and is_deferrable => 1 by default, overridable via relationship_attrs - added config_file option for loading loader options from a file - set inflate_datetime => 1 for 'AS getdate()' computed columns in Sybase - Firebird support - use introspection pragmas instead of regexes to introspect SQLite (hobbs) - generate POD for refs correctly from column_info - fix tables list, fk introspection and type info for SQL Anywhere 0.05003 2010-02-20 05:19:51 - support for custom_column_info, datetime_timezone and datetime_locale (rbo) - improve parsing of SQLite tables when a column definition spans multiple lines (hobbs) - fix missing trailing _id stripping for some relationship names (rbuels) - fixed accessor POD bug, was not dereferencing scalar refs before printing (rbuels) 0.05002 2010-02-15 10:17:47 - support for SQLAnywhere via DBD::SQLAnywhere and ODBC - fix picking up quoted tables for SQLite (RT#54538) patch from schwern - validate class/component loader_options to make sure classes are available before generating the schema, patch from bphillips 0.05001 2010-02-05 14:29:27 - correct default_value for all backends with common tests - fix bug with quoted Pg tables from $dbh->tables (RT#54338) - add inflate_datetime => 0 to 'timestamp' types for Sybase 0.05000 2010-02-01 09:24:24 - better data_type, default_value and size for Sybase - added 'generate_pod' option, defaults to on - added 'pod_comment_mode' and 'pod_comment_spillover_length' to control table comment generation (waawaamilk) 0.04999_14 2010-01-14 06:47:07 - use_namespaces now default, with upgrade/downgrade support - filter out un-selectable tables/views - fix NUMERIC/DECIMAL size column_info for postgres - now mentions skip_load_external feature in comments (jhannah) - moniker_map POD correction (jhannah) 0.04999_13 2010-01-03 12:32:25 - exclude 'size' column_info for postgres when unnecessary, and use the correct precision for varying types (except NUMERIC) - 'naming' attribute and backward compatibility with 0.04006 - added relationship_attrs option for setting attributes in generated relationships - added overwrite_modifications option that ignores md5sums on generated code - added skip_load_external (jhannah) - remove Class::Data::Accessor and Class::Accessor::Fast and switch everything to Class::Accessor::Grouped (jhannah) - better handling of db_schema for Oracle, based on (RT#35732) 0.04999_12 2009-11-30 23:36:14 - fix MySQL rel introspection with on_connect_call => 'set_strict_mode' (RT#52087) - now using base 'DBIx::Class::Core' for Results (RT#52141) 0.04999_11 2009-11-29 18:08:46 - added patch to generate POD from postgres by Andrey Kostenko (GUGU) - added test for norewrite feature - fix default_value for MSSQL 0.04999_10 2009-10-31 12:28:53 - patch from Robert Bohne to make _table_uniq_info more correct for Oracle - fix data_type for identity columns with MSSQL 0.04999_09 2009-10-08 - Only redump the files when something has actually changed - Place a warning at the top of the files saying 'do not modify' to match the one at the bottom of the auto-gen'd section 0.04999_08 2009-08-28 - Replace UNIVERSAL::require with Class::C3::Componentised - Add Sybase/MSSQL support through DBD::Sybase - use $dbh->get_info(29/41) for qote_car/name_sep if available (semifor) - add MSSQL support through DBD::ODBC - support MSSQL table names with a '.' in the name - support MySQL CURRENT_TIMESTAMP() 0.04999_07 2009-04-18 - Add result_base_class and schema_base_class options (RT #43977) - Ignore duplicate uniq indices (including duplicates of the PK). - Fix for DBD::SQLite 1.20 - Fix for DBIx::Class 0.08100 0.04999_06 Tue Nov 11, 2008 - Singularise table monikers by default - Strip trailing _id from single-column belongs_to relationships - Add "dbicdump" script for easy commandline dumping - Throw out the in-memory class generation, just dump to a temporary directory if the user didn't specify one - Fix Oracle constraint and auto-increment detection for non-owned schemas (RT #35732) - Handle ResultSetManager deprecation warning in common tests 0.04999_05 Mon Apr 14, 2008 - Fix limiting table list to the specified schema for DB2 - Default db_schema to the username for DB2 - Allow specifying a custom loader_class, overriding the storage_type-based detection - Cosmetic fixes to dumping of externally defined classes - Make ResultSetManager notice externally defined :ResultSet methods - Fix test failure for non-InnoDB MySQL due to wrong skip count - Fix base class ordering in dumped classes - Run the common tests against both dynamic and dumped versions of the schema 0.04999_04 Wed Mar 12, 2008 - Add is_auto_increment detecton for DB2 0.04999_03 Wed Mar 12, 2008 - Fix DB2 support 0.04999_02 Tue Feb 12, 2008 - Add is_auto_increment detection for Oracle - Unnhide the Oracle module now that the CPAN perms are sorted out. Thanks to Tsunoda Kazuya for the quick response. 0.04999_01 Tue Feb 5, 2008 - Mark foreign key columns with is_foreign_key => 1 - Add support for vendor-specific extra column attributes. - Add support for extra => { unsigned => 1 } for MySQL. - Add support for enum value lists for MySQL - Set join_type => 'LEFT OUTER' for nullable foreign keys (patch from Bernhard Weißhuhn) - Set is_auto_increment for auto-increment columns (RT #31473) (Only SQLite, MySQL and PostgreSQL are currently supported) - Generate one-to-one accessors for unique foreign keys (ilmari) - Add support for load_namespaces-style class layout - Fix test skip count for main skip_rels block - Fix auto-inc column creation for the Oracle tests - Fix column ordering in unique constraints for Oracle - Fix Win32 test skip counts for good (RT #30568, Kenichi Ishigaki) - Default Oracle db_schema to db username (patch from Johannes Plunien) 0.04003 Wed Oct 4, 2007 - Prevent users from running Kwalitee test automatically - Fix extra whitespace being added to output on regeneration (from ilmari) 0.04002 Tue Jul 24, 2007 - rescan method now returns the actual list of new tables loaded (previously, the return value wasn't taking constraint/exclude into account, even though the meat of the operation was). - Hid the Oracle module so that search.cpan.org will stop ignoring this package, temporary fix until perms are sorted out - Fix Win32 test skip counts (RT #27715, Alexandr Ciornii) - Fix a small output quoting bug (RT #28073, Tokuhiro Matsuno) 0.04001 Tue Jun 26, 2007 - Deprecated dump_overwrite. The changed behavior from 0.03xxx was confusing. - Added new option really_erase_my_files, which does what dump_overwrite did in 0.04000, which is not what it did in 0.03xxx. 0.04000 Thu Jun 7, 2007 - Added some env vars for controlling the Makefile.PL feature questions, to make automation easier. 0.03999_02 Tue May 22, 2007 - Converted to Module::Install 0.03012 Tue May 22, 2007 - Relationship names for multiple multi-col rels between the same table fixed by ilmari - Fix from Marc Espie for CREATE TABLE 'foo' for SQLite - skip ^sqlite_ tables in SQLite (thanks chromatic) 0.03999_01 Sat Apr 14 19:57:40 GMT 2007 - Added *experimental* Oracle support from work done by Tsunoda Kazuya some months ago. Not well tested. - Added "rescan" schema (and loader) method, which picks up newly created tables at runtime - Made dump_to_dir / dump_overwrite much more intelligent (they now preserve customizations by default) - Added support for DBI's new standard "statistics_info" method to gather unique key info (only supported by DBD::Pg trunk afaik) - columns_info_for imported from DBIx::Class - relationships are now on by default, use skip_relationships to disable them - Removed previously deprecated methods/options - Added $VERSION to all packages in this dist 0.03011 Sat Apr 14 19:03:07 UTC 2007 - fix case-sensitivity in UNIQUE parsing for SQLite 0.03010 Thu Mar 29 12:36:19 UTC 2007 - Workaround for new incompatible changes in DBD::mysql's "tables" method, which was causing us to find no tables w/ DBD::mysql 4.002+ - Fixed quoting problem in _table_columns (could cause crash when dumping/doing a static create) (from ash) 0.03009 Wed Nov 15 14:03:37 UTC 2006 - fix for rt.cpan.org #22425 (use File::Spec where appropriate) - use full instead of short classnames in relationships (from victori) 0.03008 Fri Oct 20 18:08:20 UTC 2006 - fix for rt.cpan.org #21084 (dump_overwrite pathological output recursion) - fix for rt.cpan.org #21758 (mysql reserved words as table names) - fix for rt.cpan.org #21025 (SQLite FK parsing) - workaround for rt.cpan.org #21746 ($Class::Accessor::Fast::VERSION issues) 0.03007 Thu Jul 27 16:19:59 UTC 2006 - Kill erroneous warning about connect/loader_options order (the real case is warned about elsewhere) - Fix t/22dump to work around ActiveState issues 0.03006 Wed Jul 26 00:14:58 UTC 2006 - Fixed column-case issue w/ columns_info_for 0.03005 Wed Jul 19 15:09:30 UTC 2006 [ Pretty much everything in this release originates from nilsonsfj patches ] - bugfix: mysql unique constraint code had an obvious but longstanding error - bugfix: columns were being specified out-of-order, bug was introduced in the 0.03004 column metadata dumping feature - dump code now skips+warns instead of dies when dump_overwrite not set 0.03004 Tue Jul 11 04:38:09 UTC 2006 - make_schema_at efficiency improvements - improved debugging output - column metadata now included in dumped schemas - Carp::Clan added, and some dies converted to croaks - no longer overwrites files when dumping, unless asked to do so via the dump_overwrite option - loader_options can now be embedded in the connection info - Documentation improvements - Deprecation notices updated, most things that became "deprecated" in 0.03 are now marked for death in 0.04000 - All deprecated usage patterns should now generate annoying warnings (most did before). - Somewhat improved test coverage 0.03003 Tue Jun 6 02:22:49 UTC 2006 - Fix inclusion of external add-on class definitions in dump_to_dir output. 0.03002 Tue Jun 6 01:27:25 UTC 2006 - rethrow exceptions that occur during make_schema_at 0.03001 Mon Jun 5 23:17:57 UTC 2006 - load_from_connection deprecation notice now mentions upgrading Catalyst::Model::DBIC::Schema if that module seems to be in use. - DBIx::Class required version number fixed - Loader statement caching for better load-time performance - Improved Pg unique index loader, based on RDBO 0.03000 Tue May 23 12:56:05 UTC 2006 - weakened the circular schema reference 0.02999_10 Mon May 22 18:58:20 UTC 2006 - a few more small bugfixes - more dump/debug improvements - new exportable function "make_schema_at" 0.02999_09 Sun May 21 23:26:58 UTC 2006 - More docs improvements - default uniq_info just warns and returns nothing now, instead of dying. In theory, this allows unsupported DBD drivers to potentially work with this module, if the generic methods happen to work for that vendor. - New tests for the various current and legacy/deprecated methods of connecting a Schema::Loader class/object. - Bugfix to the new runtime object connect/load code. 0.02999_08 Sat May 20 22:36:45 UTC 2006 - support for dumping to a directory for conversion to manual DBIx::Class::Schema - improved debugging output - more documentation updates - more backwards compatibility fixes - runtime connection definitions (and cloning) work fine now. - A couple of bugfixes related to db vendor "schemas", including a fix for http://rt.cpan.org/Public/Bug/Display.html?id=19164 0.02999_06 Thu May 18 16:32:41 UTC 2006 - backwards compat with all earlier versions - no longer requires schema class to have a connection - correctly determine source class names in the rel code generator - fixed mysql testing w/o InnoDB - Writing guide updated - docs updated - various trivial updates / fixes 0.02999_05 Sun Mar 26 06:46:09 UTC 2006 - bugfixes to constraint/exclude code - friendly warnings if we don't find any tables - inflect_map becomes inflect_plural and inflect_singular - Singularize relationship names where appropriate - Test updates - Supports multiple rels between the same pair of tables 0.02007 Wed Mar 22 06:03:53 UTC 2006 - Backported Class::C3::reinitialize changes from -refactor branch, resulting in significantly reduced load time 0.02006 Fri Mar 17 04:55:55 UTC 2006 - Fix long-standing table/col-name case bugs 0.02999_04 Fri Mar 17 03:55:09 UTC 2006 - Fixed case-sensitivity issues for table/col names - Punt columns_info_for to ->storage - Large loading speedup (get rid of redundant C3 reinits) - Removed TEST_POD checks - Removed unneccesary storage->disconnect 0.02999_03 Mon Mar 13 15:01:11 UTC 2006 - Added EXAMPLE section to pod [Kieren Diment] - Invasive heavy changes to the DBI- and vendor-specific code (expect some breakage in some cases until this settles down) - Support for loading UNIQUE constraints - Tests cleaned up a bit - Relationship building seperated out into it's own file for the changes that are coming, but still does basically what it did before (this work is the next step). 0.02999_02 Sat Mar 4 16:53:21 UTC 2006 - Merged in relevant changes from trunk since the split 0.02005 Mon Feb 27 23:53:17 UTC 2006 - Move the external file loading to after everything else loader does, in case people want to define, override, or build on top of the rels. 0.02004 Mon Feb 27 23:53:17 UTC 2006 - Minor fix to debugging message for loading external files 0.02999_01 Sun Feb 28 00:24:00 UTC 2006 - Shuffle the modules around - Make ourselves theoretically storage_type-agnostic - Remove the _db_classes stuff, bump PK::Auto to Base - Change default inflections to Lingua::EN::Inflect::Number::to_PL() 0.02003 Sun Feb 19 20:42:01 UTC 2006 - Deprecated arguments: dsn, user, password, options - New argument: connect_info 0.02002 Sat Feb 18 19:53:12 UTC 2006 - Added moniker_map and inflect_map 0.02001 Fri Feb 17 20:25:40 UTC 2006 - tests fixed up a bit - auto-loading of on-disk class definitions layered on top of the generated definitions (create Foo::Schema::Bar, then also try to ->require it if it exists on disk). - new parameters components and resultset_components, which do the obvious for the generated table classes. - DBIx::Class pre-req bumped to 0.05006, since Schema::Loader is virtually gauranteed to cause subtle mod_perl problems without those fixes. 0.02000 Sun Feb 12 22:43:47 UTC 2006 - Just docs/version update, 0.01004 code released as 0.02000 0.01004 Tue Feb 7 03:58:01 UTC 2006 - No longer tries to parse out the table name from the dsn for mysql, was unneccesary vestigial code from previous method. 0.01003 Mon Feb 6 14:57:56 UTC 2006 - Fixed the has_many side of _make_cond_rel 0.01002 Fri Feb 3 23:14:38 UTC 2006 - Email address typo :( 0.01001 Fri Feb 3 05:15:41 UTC 2006 - Fixed up some documentation issues - Load C3 at the right time 0.01 Fri Feb 3 01:53:46 UTC 2006 - original release - created from DBIx::Class::Loader 0.14 DBIx-Class-Schema-Loader-0.07045/lib/0000755000175000017500000000000012650450355016213 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/lib/DBIx/0000755000175000017500000000000012650450355017001 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/0000755000175000017500000000000012650450355020046 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/0000755000175000017500000000000012650450355021246 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader.pm0000644000175000017500000004530012650450246023013 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader; use strict; use warnings; use base qw/DBIx::Class::Schema Class::Accessor::Grouped/; use MRO::Compat; use mro 'c3'; use Carp::Clan qw/^DBIx::Class/; use Scalar::Util 'weaken'; use Sub::Util 'set_subname'; use DBIx::Class::Schema::Loader::Utils 'array_eq'; use Try::Tiny; use Hash::Merge 'merge'; use namespace::clean; # Always remember to do all digits for the version even if they're 0 # i.e. first release of 0.XX *must* be 0.XX000. This avoids fBSD ports # brain damage and presumably various other packaging systems too our $VERSION = '0.07045'; __PACKAGE__->mk_group_accessors('inherited', qw/ _loader_args dump_to_dir _loader_invoked _loader loader_class naming use_namespaces /); __PACKAGE__->_loader_args({}); =encoding UTF-8 =head1 NAME DBIx::Class::Schema::Loader - Create a DBIx::Class::Schema based on a database =head1 SYNOPSIS ### use this module to generate a set of class files # in a script use DBIx::Class::Schema::Loader qw/ make_schema_at /; make_schema_at( 'My::Schema', { debug => 1, dump_directory => './lib', }, [ 'dbi:Pg:dbname="foo"', 'myuser', 'mypassword', { loader_class => 'MyLoader' } # optionally ], ); # from the command line or a shell script with dbicdump (distributed # with this module). Do `perldoc dbicdump` for usage. dbicdump -o dump_directory=./lib \ -o components='["InflateColumn::DateTime"]' \ -o debug=1 \ My::Schema \ 'dbi:Pg:dbname=foo' \ myuser \ mypassword ### or generate and load classes at runtime # note: this technique is not recommended # for use in production code package My::Schema; use base qw/DBIx::Class::Schema::Loader/; __PACKAGE__->loader_options( constraint => '^foo.*', # debug => 1, ); #### in application code elsewhere: use My::Schema; my $schema1 = My::Schema->connect( $dsn, $user, $password, $attrs); # -or- my $schema1 = "My::Schema"; $schema1->connection(as above); =head1 DESCRIPTION DBIx::Class::Schema::Loader automates the definition of a L by scanning database table definitions and setting up the columns, primary keys, unique constraints and relationships. See L for the C utility. DBIx::Class::Schema::Loader currently supports only the DBI storage type. It has explicit support for L, L, L, L, L, L, L, L, L (for Sybase ASE and MSSSQL), L (for MSSQL, MSAccess, Firebird and SQL Anywhere) L (for MSSQL and MSAccess) and L. Other DBI drivers may function to a greater or lesser degree with this loader, depending on how much of the DBI spec they implement, and how standard their implementation is. Patches to make other DBDs work correctly welcome. See L for notes on writing your own vendor-specific subclass for an unsupported DBD driver. This module requires L 0.08127 or later, and obsoletes the older L. See L for available options. =head1 METHODS =head2 loader The loader object, as class data on your Schema. For methods available see L and L. =cut sub loader { my $self = shift; $self->_loader(@_); } =head2 loader_class =over 4 =item Argument: $loader_class =back Set the loader class to be instantiated when L is called. If the classname starts with "::", "DBIx::Class::Schema::Loader" is prepended. Defaults to L (which must start with "::" when using L). This is mostly useful for subclassing existing loaders or in conjunction with L. =head2 loader_options =over 4 =item Argument: \%loader_options =back Example in Synopsis above demonstrates a few common arguments. For detailed information on all of the arguments, most of which are only useful in fairly complex scenarios, see the L documentation. If you intend to use C, you must call C before any connection is made, or embed the C in the connection information itself as shown below. Setting C after the connection has already been made is useless. =cut sub loader_options { my $self = shift; my %args = (ref $_[0] eq 'HASH') ? %{$_[0]} : @_; $self->_loader_args(\%args); $self; } sub _invoke_loader { my $self = shift; my $class = ref $self || $self; my $args = $self->_loader_args; # temporarily copy $self's storage to class my $class_storage = $class->storage; if (ref $self) { $class->storage($self->storage); $class->storage->set_schema($class); } $args->{schema} = $class; $args->{schema_class} = $class; $args->{dump_directory} ||= $self->dump_to_dir; $args->{naming} = $self->naming if $self->naming; $args->{use_namespaces} = $self->use_namespaces if defined $self->use_namespaces; # XXX this only works for relative storage_type, like ::DBI ... my $loader_class = $self->loader_class; if ($loader_class) { $loader_class = "DBIx::Class::Schema::Loader${loader_class}" if $loader_class =~ /^::/; $args->{loader_class} = $loader_class; }; my $impl = $loader_class || "DBIx::Class::Schema::Loader" . $self->storage_type; try { $self->ensure_class_loaded($impl) } catch { croak qq/Could not load loader_class "$impl": "$_"/; }; $class->loader($impl->new(%$args)); $class->loader->load; $class->_loader_invoked(1); # copy to $self if (ref $self) { $self->loader($class->loader); $self->_loader_invoked(1); $self->_merge_state_from($class); } # restore $class's storage $class->storage($class_storage); return $self; } # FIXME This needs to be moved into DBIC at some point, otherwise we are # maintaining things to do with DBIC guts, which we have no business of # maintaining. But at the moment it would be just dead code in DBIC, so we'll # maintain it here. sub _merge_state_from { my ($self, $from) = @_; my $orig_class_mappings = $self->class_mappings; my $orig_source_registrations = $self->source_registrations; $self->_copy_state_from($from); $self->class_mappings(merge($orig_class_mappings, $self->class_mappings)) if $orig_class_mappings; $self->source_registrations(merge($orig_source_registrations, $self->source_registrations)) if $orig_source_registrations; } sub _copy_state_from { my $self = shift; my ($from) = @_; # older DBIC's do not have this method if (try { DBIx::Class->VERSION('0.08197'); 1 }) { return $self->next::method(@_); } else { # this is a copy from DBIC git master pre 0.08197 $self->class_mappings({ %{$from->class_mappings} }); $self->source_registrations({ %{$from->source_registrations} }); foreach my $moniker ($from->sources) { my $source = $from->source($moniker); my $new = $source->new($source); # we use extra here as we want to leave the class_mappings as they are # but overwrite the source_registrations entry with the new source $self->register_extra_source($moniker => $new); } if ($from->storage) { $self->storage($from->storage); $self->storage->set_schema($self); } } } =head2 connection =over 4 =item Arguments: @args =item Return Value: $new_schema =back See L for basic usage. If the final argument is a hashref, and it contains the keys C or C, those keys will be deleted, and their values value will be used for the loader options or class, respectively, just as if set via the L or L methods above. The actual auto-loading operation (the heart of this module) will be invoked as soon as the connection information is defined. =cut sub connection { my $self = shift; my $class = ref $self || $self; if($_[-1] && ref $_[-1] eq 'HASH') { for my $option (qw/loader_class loader_options/) { if(my $value = delete $_[-1]->{$option}) { $self->$option($value); } } pop @_ if !keys %{$_[-1]}; } # Make sure we inherit from schema_base_class and load schema_components # before connecting. require DBIx::Class::Schema::Loader::Base; my $temp_loader = DBIx::Class::Schema::Loader::Base->new( %{ $self->_loader_args }, schema => $self, naming => 'current', use_namespaces => 1, ); my $modify_isa = 0; my @components; if ($temp_loader->schema_base_class || $temp_loader->schema_components) { @components = @{ $temp_loader->schema_components } if $temp_loader->schema_components; push @components, ('+'.$temp_loader->schema_base_class) if $temp_loader->schema_base_class; my $class_isa = do { no strict 'refs'; \@{"${class}::ISA"}; }; my @component_classes = map { /^\+/ ? substr($_, 1, length($_) - 1) : "DBIx::Class::$_" } @components; $modify_isa++ if not array_eq([ @$class_isa[0..(@components-1)] ], \@component_classes) } if ($modify_isa) { $class->load_components(@components); # This hack is necessary because we changed @ISA of $self through # ->load_components and we are now in a different place in the mro. no warnings 'redefine'; local *connection = set_subname __PACKAGE__.'::connection' => sub { my $self = shift; $self->next::method(@_); }; my @linear_isa = @{ mro::get_linear_isa($class) }; my $next_method; foreach my $i (1..$#linear_isa) { no strict 'refs'; $next_method = *{$linear_isa[$i].'::connection'}{CODE}; last if $next_method; } $self = $self->$next_method(@_); } else { $self = $self->next::method(@_); } if(!$class->_loader_invoked) { $self->_invoke_loader } return $self; } =head2 clone See L. =cut sub clone { my $self = shift; my $clone = $self->next::method(@_); if($clone->_loader_args) { $clone->_loader_args->{schema} = $clone; weaken($clone->_loader_args->{schema}); } $clone; } =head2 dump_to_dir =over 4 =item Argument: $directory =back Calling this as a class method on either L or any derived schema class will cause all schemas to dump manual versions of themselves to the named directory when they are loaded. In order to be effective, this must be set before defining a connection on this schema class or any derived object (as the loading happens as soon as both a connection and loader_options are set, and only once per class). See L for more details on the dumping mechanism. This can also be set at module import time via the import option C to L, where C is the target directory. Examples: # My::Schema isa DBIx::Class::Schema::Loader, and has connection info # hardcoded in the class itself: perl -MDBIx::Class::Schema::Loader=dump_to_dir:/foo/bar -MMy::Schema -e1 # Same, but no hard-coded connection, so we must provide one: perl -MDBIx::Class::Schema::Loader=dump_to_dir:/foo/bar -MMy::Schema -e 'My::Schema->connection("dbi:Pg:dbname=foo", ...)' # Or as a class method, as long as you get it done *before* defining a # connection on this schema class or any derived object: use My::Schema; My::Schema->dump_to_dir('/foo/bar'); My::Schema->connection(........); # Or as a class method on the DBIx::Class::Schema::Loader itself, which affects all # derived schemas use My::Schema; use My::OtherSchema; DBIx::Class::Schema::Loader->dump_to_dir('/foo/bar'); My::Schema->connection(.......); My::OtherSchema->connection(.......); # Another alternative to the above: use DBIx::Class::Schema::Loader qw| dump_to_dir:/foo/bar |; use My::Schema; use My::OtherSchema; My::Schema->connection(.......); My::OtherSchema->connection(.......); =cut sub import { my $self = shift; return if !@_; my $cpkg = (caller)[0]; foreach my $opt (@_) { if($opt =~ m{^dump_to_dir:(.*)$}) { $self->dump_to_dir($1) } elsif($opt eq 'make_schema_at') { no strict 'refs'; *{"${cpkg}::make_schema_at"} = \&make_schema_at; } elsif($opt eq 'naming') { no strict 'refs'; *{"${cpkg}::naming"} = sub { $self->naming(@_) }; } elsif($opt eq 'use_namespaces') { no strict 'refs'; *{"${cpkg}::use_namespaces"} = sub { $self->use_namespaces(@_) }; } } } =head2 make_schema_at =over 4 =item Arguments: $schema_class_name, \%loader_options, \@connect_info =item Return Value: $schema_class_name =back This function creates a DBIx::Class schema from an existing RDBMS schema. With the C option, generates a set of DBIx::Class classes from an existing database schema read from the given dsn. Without a C, creates schema classes in memory at runtime without generating on-disk class files. For a complete list of supported loader_options, see L The last hashref in the C<\@connect_info> can specify the L. This function can be imported in the usual way, as illustrated in these Examples: # Simple example, creates as a new class 'New::Schema::Name' in # memory in the running perl interpreter. use DBIx::Class::Schema::Loader qw/ make_schema_at /; make_schema_at( 'New::Schema::Name', { debug => 1 }, [ 'dbi:Pg:dbname="foo"','postgres','', { loader_class => 'MyLoader' } # optionally ], ); # Inside a script, specifying a dump directory in which to write # class files use DBIx::Class::Schema::Loader qw/ make_schema_at /; make_schema_at( 'New::Schema::Name', { debug => 1, dump_directory => './lib' }, [ 'dbi:Pg:dbname="foo"','postgres','', { loader_class => 'MyLoader' } # optionally ], ); The last hashref in the C<\@connect_info> is checked for loader arguments such as C and C, see L for more details. =cut sub make_schema_at { my ($target, $opts, $connect_info) = @_; { no strict 'refs'; @{$target . '::ISA'} = qw/DBIx::Class::Schema::Loader/; } $target->_loader_invoked(0); $target->loader_options($opts); my $temp_schema = $target->connect(@$connect_info); $target->storage($temp_schema->storage); $target->storage->set_schema($target); return $target; } =head2 rescan =over 4 =item Return Value: @new_monikers =back Re-scans the database for newly added tables since the initial load, and adds them to the schema at runtime, including relationships, etc. Does not process drops or changes. Returns a list of the new monikers added. =cut sub rescan { my $self = shift; $self->loader->rescan($self) } =head2 naming =over 4 =item Arguments: \%opts | $ver =back Controls the naming options for backward compatibility, see L for details. To upgrade a dynamic schema, use: __PACKAGE__->naming('current'); Can be imported into your dump script and called as a function as well: naming('v4'); =head2 use_namespaces =over 4 =item Arguments: 1|0 =back Controls the use_namespaces options for backward compatibility, see L for details. To upgrade a dynamic schema, use: __PACKAGE__->use_namespaces(1); Can be imported into your dump script and called as a function as well: use_namespaces(1); =head1 KNOWN ISSUES =head2 Multiple Database Schemas See L. =head1 ACKNOWLEDGEMENTS Matt S Trout, all of the #dbix-class folks, and everyone who's ever sent in a bug report or suggestion. Based on L by Sebastian Riedel Based upon the work of IKEBE Tomohiro =head1 AUTHORS Caelum: Rafael Kitover Dag-Erling Smørgrav Matias E. Fernandez SineSwiper: Brendan Byrd TSUNODA Kazuya acmoore: Andrew Moore alnewkirk: Al Newkirk andrewalker: André Walker angelixd: Paul C. Mantz arcanez: Justin Hunter ash: Ash Berlin blblack: Brandon Black bphillips: Brian Phillips btilly: Ben Tilly domm: Thomas Klausner gugu: Andrey Kostenko hobbs: Andrew Rodland ilmari: Dagfinn Ilmari MannsEker jhannah: Jay Hannah jnap: John Napiorkowski kane: Jos Boumans mattp: Matt Phillips mephinet: Philipp Gortan moritz: Moritz Lenz mst: Matt S. Trout mstratman: Mark A. Stratman oalders: Olaf Alders rbo: Robert Bohne rbuels: Robert Buels ribasushi: Peter Rabbitson schwern: Michael G. Schwern spb: Stephen Bennett timbunce: Tim Bunce waawaamilk: Nigel McNie ... and lots of other folks. If we forgot you, please write the current maintainer or RT. =head1 COPYRIGHT & LICENSE Copyright (c) 2006 - 2015 by the aforementioned L. This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =head1 SEE ALSO L, L, L, L =cut 1; # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/0000755000175000017500000000000012650450355022454 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/0000755000175000017500000000000012650450355023052 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/Writing.pm0000644000175000017500000000400212650450246025026 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBI::Writing; use strict; use warnings; our $VERSION = '0.07045'; # Empty. POD only. =head1 NAME DBIx::Class::Schema::Loader::DBI::Writing - Loader subclass writing guide for DBI =head1 SYNOPSIS package DBIx::Class::Schema::Loader::DBI::Foo; # THIS IS JUST A TEMPLATE TO GET YOU STARTED. use strict; use warnings; use base 'DBIx::Class::Schema::Loader::DBI'; use mro 'c3'; sub _table_uniq_info { my ($self, $table) = @_; # ... get UNIQUE info for $table somehow # and return a data structure that looks like this: return [ [ 'keyname' => [ 'colname' ] ], [ 'keyname2' => [ 'col1name', 'col2name' ] ], [ 'keyname3' => [ 'colname' ] ], ]; # Where the "keyname"'s are just unique identifiers, such as the # name of the unique constraint, or the names of the columns involved # concatenated if you wish. } sub _table_comment { my ( $self, $table ) = @_; return 'Comment'; } sub _column_comment { my ( $self, $table, $column_number ) = @_; return 'Col. comment'; } 1; =head1 DETAILS The only required method for new subclasses is C<_table_uniq_info>, as there is not (yet) any standardized, DBD-agnostic way for obtaining this information from DBI. The base DBI Loader contains generic methods that *should* work for everything else in theory, although in practice some DBDs need to override one or more of the other methods. The other methods one might likely want to override are: C<_table_pk_info>, C<_table_fk_info>, C<_tables_list> and C<_extra_column_info>. See the included DBD drivers for examples of these. To import comments from the database you need to implement C<_table_comment>, C<_column_comment> =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/DB2.pm0000644000175000017500000001641112650450246023761 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBI::DB2; use strict; use warnings; use base qw/ DBIx::Class::Schema::Loader::DBI::Component::QuotedDefault DBIx::Class::Schema::Loader::DBI /; use mro 'c3'; use List::Util 'any'; use namespace::clean; use DBIx::Class::Schema::Loader::Table (); our $VERSION = '0.07045'; =head1 NAME DBIx::Class::Schema::Loader::DBI::DB2 - DBIx::Class::Schema::Loader::DBI DB2 Implementation. =head1 DESCRIPTION See L and L. =cut sub _system_schemas { my $self = shift; return ($self->next::method(@_), qw/ SYSCAT SYSIBM SYSIBMADM SYSPUBLIC SYSSTAT SYSTOOLS /); } sub _setup { my $self = shift; $self->next::method(@_); my $ns = $self->name_sep; $self->db_schema([ $self->dbh->selectrow_array(<<"EOF", {}) ]) unless $self->db_schema; SELECT CURRENT_SCHEMA FROM sysibm${ns}sysdummy1 EOF if (not defined $self->preserve_case) { $self->preserve_case(0); } elsif ($self->preserve_case) { $self->schema->storage->sql_maker->quote_char('"'); $self->schema->storage->sql_maker->name_sep($ns); } } sub _table_uniq_info { my ($self, $table) = @_; my @uniqs; my $sth = $self->{_cache}->{db2_uniq} ||= $self->dbh->prepare(<<'EOF'); SELECT kcu.colname, kcu.constname, kcu.colseq FROM syscat.tabconst as tc JOIN syscat.keycoluse as kcu ON tc.constname = kcu.constname AND tc.tabschema = kcu.tabschema AND tc.tabname = kcu.tabname WHERE tc.tabschema = ? and tc.tabname = ? and tc.type = 'U' EOF $sth->execute($table->schema, $table->name); my %keydata; while(my $row = $sth->fetchrow_arrayref) { my ($col, $constname, $seq) = @$row; push(@{$keydata{$constname}}, [ $seq, $self->_lc($col) ]); } foreach my $keyname (sort keys %keydata) { my @ordered_cols = map { $_->[1] } sort { $a->[0] <=> $b->[0] } @{$keydata{$keyname}}; push(@uniqs, [ $keyname => \@ordered_cols ]); } $sth->finish; return \@uniqs; } sub _table_fk_info { my ($self, $table) = @_; my $sth = $self->{_cache}->{db2_fk} ||= $self->dbh->prepare(<<'EOF'); SELECT tc.constname, sr.reftabschema, sr.reftabname, kcu.colname, rkcu.colname, kcu.colseq, sr.deleterule, sr.updaterule FROM syscat.tabconst tc JOIN syscat.keycoluse kcu ON tc.constname = kcu.constname AND tc.tabschema = kcu.tabschema AND tc.tabname = kcu.tabname JOIN syscat.references sr ON tc.constname = sr.constname AND tc.tabschema = sr.tabschema AND tc.tabname = sr.tabname JOIN syscat.keycoluse rkcu ON sr.refkeyname = rkcu.constname AND sr.reftabschema = rkcu.tabschema AND sr.reftabname = rkcu.tabname AND kcu.colseq = rkcu.colseq WHERE tc.tabschema = ? AND tc.tabname = ? AND tc.type = 'F'; EOF $sth->execute($table->schema, $table->name); my %rels; my %rules = ( A => 'NO ACTION', C => 'CASCADE', N => 'SET NULL', R => 'RESTRICT', ); COLS: while (my @row = $sth->fetchrow_array) { my ($fk, $remote_schema, $remote_table, $local_col, $remote_col, $colseq, $delete_rule, $update_rule) = @row; if (not exists $rels{$fk}) { if ($self->db_schema && $self->db_schema->[0] ne '%' && (not any { $_ eq $remote_schema } @{ $self->db_schema })) { next COLS; } $rels{$fk}{remote_table} = DBIx::Class::Schema::Loader::Table->new( loader => $self, name => $remote_table, schema => $remote_schema, ); } $rels{$fk}{local_columns}[$colseq-1] = $self->_lc($local_col); $rels{$fk}{remote_columns}[$colseq-1] = $self->_lc($remote_col); $rels{$fk}{attrs} ||= { on_delete => $rules{$delete_rule}, on_update => $rules{$update_rule}, is_deferrable => 1, # DB2 has no deferrable constraints }; } return [ values %rels ]; } # DBD::DB2 doesn't follow the DBI API for ->tables (pre 1.85), but since its # backwards compatible we don't change it. # DBD::DB2 1.85 and beyond default TABLE_NAME to '', previously defaulted to # '%'. so we supply it. sub _dbh_tables { my ($self, $schema) = @_; return $self->dbh->tables($schema ? { TABLE_SCHEM => $schema, TABLE_NAME => '%' } : undef); } sub _dbh_table_info { my $self = shift; local $^W = 0; # shut up undef warning from DBD::DB2 $self->next::method(@_); } sub _columns_info_for { my $self = shift; my ($table) = @_; my $result = $self->next::method(@_); while (my ($col, $info) = each %$result) { # check for identities my $sth = $self->dbh->prepare_cached( q{ SELECT COUNT(*) FROM syscat.columns WHERE tabschema = ? AND tabname = ? AND colname = ? AND identity = 'Y' AND generated != '' }, {}, 1); $sth->execute($table->schema, $table->name, $self->_uc($col)); if ($sth->fetchrow_array) { $info->{is_auto_increment} = 1; } my $data_type = $info->{data_type}; if ($data_type !~ /^(?:(?:var)?(?:char|graphic)|decimal)\z/i) { delete $info->{size}; } if ($data_type eq 'double') { $info->{data_type} = 'double precision'; } elsif ($data_type eq 'decimal') { no warnings 'uninitialized'; $info->{data_type} = 'numeric'; my @size = @{ $info->{size} || [] }; if ($size[0] == 5 && $size[1] == 0) { delete $info->{size}; } } elsif ($data_type =~ /^(?:((?:var)?char) \(\) for bit data|(long varchar) for bit data)\z/i) { my $base_type = lc($1 || $2); (my $original_type = $data_type) =~ s/[()]+ //; $info->{original}{data_type} = $original_type; if ($base_type eq 'long varchar') { $info->{data_type} = 'blob'; } else { if ($base_type eq 'char') { $info->{data_type} = 'binary'; } elsif ($base_type eq 'varchar') { $info->{data_type} = 'varbinary'; } my ($size) = $self->dbh->selectrow_array(<<'EOF', {}, $table->schema, $table->name, $self->_uc($col)); SELECT length FROM syscat.columns WHERE tabschema = ? AND tabname = ? AND colname = ? EOF $info->{size} = $size if $size; } } if ((eval { lc ${ $info->{default_value} } }||'') =~ /^current (date|time(?:stamp)?)\z/i) { my $type = lc($1); ${ $info->{default_value} } = 'current_timestamp'; my $orig_deflt = "current $type"; $info->{original}{default_value} = \$orig_deflt; } } return $result; } =head1 SEE ALSO L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/SQLAnywhere.pm0000644000175000017500000002165312650450246025560 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBI::SQLAnywhere; use strict; use warnings; use base 'DBIx::Class::Schema::Loader::DBI::Component::QuotedDefault'; use mro 'c3'; use List::Util 'any'; use namespace::clean; use DBIx::Class::Schema::Loader::Table (); our $VERSION = '0.07045'; =head1 NAME DBIx::Class::Schema::Loader::DBI::SQLAnywhere - DBIx::Class::Schema::Loader::DBI SQL Anywhere Implementation. =head1 DESCRIPTION See L and L. =cut sub _system_schemas { return (qw/dbo SYS diagnostics rs_systabgroup SA_DEBUG/); } sub _setup { my $self = shift; $self->next::method(@_); $self->preserve_case(1) unless defined $self->preserve_case; $self->schema->storage->sql_maker->quote_char('"'); $self->schema->storage->sql_maker->name_sep('.'); $self->db_schema([($self->dbh->selectrow_array('select user'))[0]]) unless $self->db_schema; if (ref $self->db_schema eq 'ARRAY' && $self->db_schema->[0] eq '%') { my @users = grep { my $uname = $_; not any { $_ eq $uname } $self->_system_schemas } @{ $self->dbh->selectcol_arrayref('select user_name from sysuser') }; $self->db_schema(\@users); } } sub _tables_list { my ($self) = @_; my @tables; foreach my $schema (@{ $self->db_schema }) { my $sth = $self->dbh->prepare(<<'EOF'); SELECT t.table_name name FROM systab t JOIN sysuser u ON t.creator = u.user_id WHERE u.user_name = ? EOF $sth->execute($schema); my @table_names = map @$_, @{ $sth->fetchall_arrayref }; foreach my $table_name (@table_names) { push @tables, DBIx::Class::Schema::Loader::Table->new( loader => $self, name => $table_name, schema => $schema, ); } } return $self->_filter_tables(\@tables); } sub _columns_info_for { my $self = shift; my ($table) = @_; my $result = $self->next::method(@_); my $dbh = $self->schema->storage->dbh; while (my ($col, $info) = each %$result) { my $def = $info->{default_value}; if (ref $def eq 'SCALAR' && $$def eq 'autoincrement') { delete $info->{default_value}; $info->{is_auto_increment} = 1; } my ($user_type) = $dbh->selectrow_array(<<'EOF', {}, $table->schema, $table->name, lc($col)); SELECT ut.type_name FROM systabcol tc JOIN systab t ON tc.table_id = t.table_id JOIN sysuser u ON t.creator = u.user_id JOIN sysusertype ut ON tc.user_type = ut.type_id WHERE u.user_name = ? AND t.table_name = ? AND lower(tc.column_name) = ? EOF $info->{data_type} = $user_type if defined $user_type; if ($info->{data_type} eq 'double') { $info->{data_type} = 'double precision'; } if ($info->{data_type} =~ /^(?:char|varchar|binary|varbinary)\z/ && ref($info->{size}) eq 'ARRAY') { $info->{size} = $info->{size}[0]; } elsif ($info->{data_type} !~ /^(?:char|varchar|binary|varbinary|numeric|decimal)\z/) { delete $info->{size}; } my $sth = $dbh->prepare(<<'EOF'); SELECT tc.width, tc.scale FROM systabcol tc JOIN systab t ON t.table_id = tc.table_id JOIN sysuser u ON t.creator = u.user_id WHERE u.user_name = ? AND t.table_name = ? AND lower(tc.column_name) = ? EOF $sth->execute($table->schema, $table->name, lc($col)); my ($width, $scale) = $sth->fetchrow_array; $sth->finish; if ($info->{data_type} =~ /^(?:numeric|decimal)\z/) { # We do not check for the default precision/scale, because they can be changed as PUBLIC database options. $info->{size} = [$width, $scale]; } elsif ($info->{data_type} =~ /^(?:n(?:varchar|char) | varbit)\z/x) { $info->{size} = $width; } elsif ($info->{data_type} eq 'float') { $info->{data_type} = 'real'; } if ((eval { lc ${ $info->{default_value} } }||'') eq 'current timestamp') { ${ $info->{default_value} } = 'current_timestamp'; my $orig_deflt = 'current timestamp'; $info->{original}{default_value} = \$orig_deflt; } } return $result; } sub _table_pk_info { my ($self, $table) = @_; local $self->dbh->{FetchHashKeyName} = 'NAME_lc'; my $sth = $self->dbh->prepare(qq{sp_pkeys ?, ?}); $sth->execute($table->name, $table->schema); my @keydata; while (my $row = $sth->fetchrow_hashref) { push @keydata, $self->_lc($row->{column_name}); } return \@keydata; } my %sqlany_rules = ( C => 'CASCADE', D => 'SET DEFAULT', N => 'SET NULL', R => 'RESTRICT', ); sub _table_fk_info { my ($self, $table) = @_; my ($local_cols, $remote_cols, $remote_table, $attrs, @rels); my $sth = $self->dbh->prepare(<<'EOF'); SELECT fki.index_name fk_name, fktc.column_name local_column, pku.user_name remote_schema, pkt.table_name remote_table, pktc.column_name remote_column, on_delete.referential_action, on_update.referential_action FROM sysfkey fk JOIN ( select foreign_table_id, foreign_index_id, row_number() over (partition by foreign_table_id order by foreign_index_id) foreign_key_num from sysfkey ) fkid ON fkid.foreign_table_id = fk.foreign_table_id and fkid.foreign_index_id = fk.foreign_index_id JOIN systab pkt ON fk.primary_table_id = pkt.table_id JOIN sysuser pku ON pkt.creator = pku.user_id JOIN systab fkt ON fk.foreign_table_id = fkt.table_id JOIN sysuser fku ON fkt.creator = fku.user_id JOIN sysidx pki ON fk.primary_table_id = pki.table_id AND fk.primary_index_id = pki.index_id JOIN sysidx fki ON fk.foreign_table_id = fki.table_id AND fk.foreign_index_id = fki.index_id JOIN sysidxcol fkic ON fkt.table_id = fkic.table_id AND fki.index_id = fkic.index_id JOIN systabcol pktc ON pkt.table_id = pktc.table_id AND fkic.primary_column_id = pktc.column_id JOIN systabcol fktc ON fkt.table_id = fktc.table_id AND fkic.column_id = fktc.column_id LEFT JOIN systrigger on_delete ON on_delete.foreign_table_id = fkt.table_id AND on_delete.foreign_key_id = fkid.foreign_key_num AND on_delete.event = 'D' LEFT JOIN systrigger on_update ON on_update.foreign_table_id = fkt.table_id AND on_update.foreign_key_id = fkid.foreign_key_num AND on_update.event = 'C' WHERE fku.user_name = ? AND fkt.table_name = ? ORDER BY fk.primary_table_id, pktc.column_id EOF $sth->execute($table->schema, $table->name); while (my ($fk, $local_col, $remote_schema, $remote_tab, $remote_col, $on_delete, $on_update) = $sth->fetchrow_array) { push @{$local_cols->{$fk}}, $self->_lc($local_col); push @{$remote_cols->{$fk}}, $self->_lc($remote_col); $remote_table->{$fk} = DBIx::Class::Schema::Loader::Table->new( loader => $self, name => $remote_tab, schema => $remote_schema, ); $attrs->{$fk} ||= { on_delete => $sqlany_rules{$on_delete||''} || 'RESTRICT', on_update => $sqlany_rules{$on_update||''} || 'RESTRICT', # We may be able to use the value of the 'CHECK ON COMMIT' option, as it seems # to be some sort of workaround for lack of deferred constraints. Unclear on # how good of a substitute it is, and it requires the 'RESTRICT' rule. Also it # only works for INSERT and UPDATE, not DELETE. Will get back to this. is_deferrable => 1, }; } foreach my $fk (keys %$remote_table) { push @rels, { local_columns => $local_cols->{$fk}, remote_columns => $remote_cols->{$fk}, remote_table => $remote_table->{$fk}, attrs => $attrs->{$fk}, }; } return \@rels; } sub _table_uniq_info { my ($self, $table) = @_; my $sth = $self->dbh->prepare(<<'EOF'); SELECT c.constraint_name, tc.column_name FROM sysconstraint c JOIN systab t ON c.table_object_id = t.object_id JOIN sysuser u ON t.creator = u.user_id JOIN sysidx i ON c.ref_object_id = i.object_id JOIN sysidxcol ic ON i.table_id = ic.table_id AND i.index_id = ic.index_id JOIN systabcol tc ON ic.table_id = tc.table_id AND ic.column_id = tc.column_id WHERE c.constraint_type = 'U' AND u.user_name = ? AND t.table_name = ? EOF $sth->execute($table->schema, $table->name); my $constraints; while (my ($constraint_name, $column) = $sth->fetchrow_array) { push @{$constraints->{$constraint_name}}, $self->_lc($column); } return [ map { [ $_ => $constraints->{$_} ] } sort keys %$constraints ]; } =head1 SEE ALSO L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; # vim:et sw=4 sts=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/Pg.pm0000644000175000017500000002664312650450246023770 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBI::Pg; use strict; use warnings; use base 'DBIx::Class::Schema::Loader::DBI::Component::QuotedDefault'; use mro 'c3'; our $VERSION = '0.07045'; =head1 NAME DBIx::Class::Schema::Loader::DBI::Pg - DBIx::Class::Schema::Loader::DBI PostgreSQL Implementation. =head1 DESCRIPTION See L and L. =cut sub _setup { my $self = shift; $self->next::method(@_); $self->{db_schema} ||= ['public']; if (not defined $self->preserve_case) { $self->preserve_case(0); } elsif ($self->preserve_case) { $self->schema->storage->sql_maker->quote_char('"'); $self->schema->storage->sql_maker->name_sep('.'); } } sub _system_schemas { my $self = shift; return ($self->next::method(@_), 'pg_catalog'); } my %pg_rules = ( a => 'NO ACTION', r => 'RESTRICT', c => 'CASCADE', n => 'SET NULL', d => 'SET DEFAULT', ); sub _table_fk_info { my ($self, $table) = @_; my $sth = $self->dbh->prepare_cached(<<"EOF"); select constr.conname, to_ns.nspname, to_class.relname, from_col.attname, to_col.attname, constr.confdeltype, constr.confupdtype, constr.condeferrable from pg_catalog.pg_constraint constr join pg_catalog.pg_namespace from_ns on constr.connamespace = from_ns.oid join pg_catalog.pg_class from_class on constr.conrelid = from_class.oid and from_class.relnamespace = from_ns.oid join pg_catalog.pg_class to_class on constr.confrelid = to_class.oid join pg_catalog.pg_namespace to_ns on to_class.relnamespace = to_ns.oid -- can't do unnest() until 8.4, so join against a series table instead join pg_catalog.generate_series(1, pg_catalog.current_setting('max_index_keys')::integer) colnum(i) on colnum.i <= pg_catalog.array_upper(constr.conkey,1) join pg_catalog.pg_attribute to_col on to_col.attrelid = constr.confrelid and to_col.attnum = constr.confkey[colnum.i] join pg_catalog.pg_attribute from_col on from_col.attrelid = constr.conrelid and from_col.attnum = constr.conkey[colnum.i] where from_ns.nspname = ? and from_class.relname = ? and from_class.relkind = 'r' and constr.contype = 'f' order by constr.conname, colnum.i EOF $sth->execute($table->schema, $table->name); my %rels; while (my ($fk, $remote_schema, $remote_table, $col, $remote_col, $delete_rule, $update_rule, $is_deferrable) = $sth->fetchrow_array) { push @{ $rels{$fk}{local_columns} }, $self->_lc($col); push @{ $rels{$fk}{remote_columns} }, $self->_lc($remote_col); $rels{$fk}{remote_table} = DBIx::Class::Schema::Loader::Table->new( loader => $self, name => $remote_table, schema => $remote_schema, ) unless exists $rels{$fk}{remote_table}; $rels{$fk}{attrs} ||= { on_delete => $pg_rules{$delete_rule}, on_update => $pg_rules{$update_rule}, is_deferrable => $is_deferrable, }; } return [ map { $rels{$_} } sort keys %rels ]; } sub _table_uniq_info { my ($self, $table) = @_; # Use the default support if available return $self->next::method($table) if $DBD::Pg::VERSION >= 1.50; my @uniqs; # Most of the SQL here is mostly based on # Rose::DB::Object::Metadata::Auto::Pg, after some prodding from # John Siracusa to use his superior SQL code :) my $attr_sth = $self->{_cache}->{pg_attr_sth} ||= $self->dbh->prepare( q{SELECT attname FROM pg_catalog.pg_attribute WHERE attrelid = ? AND attnum = ?} ); my $uniq_sth = $self->{_cache}->{pg_uniq_sth} ||= $self->dbh->prepare( q{SELECT x.indrelid, i.relname, x.indkey FROM pg_catalog.pg_index x JOIN pg_catalog.pg_class c ON c.oid = x.indrelid JOIN pg_catalog.pg_class i ON i.oid = x.indexrelid JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace WHERE x.indisunique = 't' AND x.indpred IS NULL AND c.relkind = 'r' AND i.relkind = 'i' AND n.nspname = ? AND c.relname = ? ORDER BY i.relname} ); $uniq_sth->execute($table->schema, $table->name); while(my $row = $uniq_sth->fetchrow_arrayref) { my ($tableid, $indexname, $col_nums) = @$row; $col_nums =~ s/^\s+//; my @col_nums = split(/\s+/, $col_nums); my @col_names; foreach (@col_nums) { $attr_sth->execute($tableid, $_); my $name_aref = $attr_sth->fetchrow_arrayref; push(@col_names, $self->_lc($name_aref->[0])) if $name_aref; } # skip indexes with missing column names (e.g. expression indexes) if(@col_names == @col_nums) { push(@uniqs, [ $indexname => \@col_names ]); } } return \@uniqs; } sub _table_comment { my $self = shift; my ($table) = @_; my $table_comment = $self->next::method(@_); return $table_comment if $table_comment; ($table_comment) = $self->dbh->selectrow_array(<<'EOF', {}, $table->name, $table->schema); SELECT pg_catalog.obj_description(oid) FROM pg_catalog.pg_class WHERE relname=? AND relnamespace=(SELECT oid FROM pg_catalog.pg_namespace WHERE nspname=?) EOF return $table_comment } sub _column_comment { my $self = shift; my ($table, $column_number, $column_name) = @_; my $column_comment = $self->next::method(@_); return $column_comment if $column_comment; return $self->dbh->selectrow_array(<<'EOF', {}, $column_number, $table->name, $table->schema); SELECT pg_catalog.col_description(oid, ?) FROM pg_catalog.pg_class WHERE relname=? AND relnamespace=(SELECT oid FROM pg_catalog.pg_namespace WHERE nspname=?) EOF } # Make sure data_type's that don't need it don't have a 'size' column_info, and # set the correct precision for datetime and varbit types. sub _columns_info_for { my $self = shift; my ($table) = @_; my $result = $self->next::method(@_); while (my ($col, $info) = each %$result) { my $data_type = $info->{data_type}; # these types are fixed size # XXX should this be a negative match? if ($data_type =~ /^(?:bigint|int8|bigserial|serial8|bool(?:ean)?|box|bytea|cidr|circle|date|double precision|float8|inet|integer|int|int4|line|lseg|macaddr|money|path|point|polygon|real|float4|smallint|int2|serial|serial4|text)\z/i) { delete $info->{size}; } # for datetime types, check if it has a precision or not elsif ($data_type =~ /^(?:interval|time|timestamp)\b/i) { if (lc($data_type) eq 'timestamp without time zone') { $info->{data_type} = 'timestamp'; } elsif (lc($data_type) eq 'time without time zone') { $info->{data_type} = 'time'; } my ($precision) = $self->schema->storage->dbh ->selectrow_array(<name, $col); SELECT datetime_precision FROM information_schema.columns WHERE table_name = ? and column_name = ? EOF if ($data_type =~ /^time\b/i) { if ((not defined $precision) || $precision !~ /^\d/) { delete $info->{size}; } else { my ($integer_datetimes) = $self->dbh ->selectrow_array('show integer_datetimes'); my $max_precision = $integer_datetimes =~ /^on\z/i ? 6 : 10; if ($precision == $max_precision) { delete $info->{size}; } else { $info->{size} = $precision; } } } elsif ((not defined $precision) || $precision !~ /^\d/ || $precision == 6) { delete $info->{size}; } else { $info->{size} = $precision; } } elsif ($data_type =~ /^(?:bit(?: varying)?|varbit)\z/i) { $info->{data_type} = 'varbit' if $data_type =~ /var/i; my ($precision) = $self->dbh->selectrow_array(<name, $col); SELECT character_maximum_length FROM information_schema.columns WHERE table_name = ? and column_name = ? EOF $info->{size} = $precision if $precision; $info->{size} = 1 if (not $precision) && lc($data_type) eq 'bit'; } elsif ($data_type =~ /^(?:numeric|decimal)\z/i && (my $size = $info->{size})) { $size =~ s/\s*//g; my ($scale, $precision) = split /,/, $size; $info->{size} = [ $precision, $scale ]; } elsif (lc($data_type) eq 'character varying') { $info->{data_type} = 'varchar'; if (not $info->{size}) { $info->{data_type} = 'text'; $info->{original}{data_type} = 'varchar'; } } elsif (lc($data_type) eq 'character') { $info->{data_type} = 'char'; } else { my ($typetype) = $self->schema->storage->dbh ->selectrow_array(<dbh->{pg_server_version} >= 90100 ? 'enumsortorder' : 'oid'; $info->{extra}{list} = $self->dbh ->selectcol_arrayref(<{data_type}); SELECT e.enumlabel FROM pg_catalog.pg_enum e JOIN pg_catalog.pg_type t ON t.oid = e.enumtypid WHERE t.typname = ? ORDER BY e.$order_column EOF # Store its original name in extra for SQLT to pick up. $info->{extra}{custom_type_name} = $info->{data_type}; $info->{data_type} = 'enum'; delete $info->{size}; } } # process SERIAL columns if (ref($info->{default_value}) eq 'SCALAR' && ${ $info->{default_value} } =~ /\bnextval\('([^:]+)'/i) { $info->{is_auto_increment} = 1; $info->{sequence} = $1; delete $info->{default_value}; } # alias now() to current_timestamp for deploying to other DBs if ((eval { lc ${ $info->{default_value} } }||'') eq 'now()') { # do not use a ref to a constant, that breaks Data::Dump output ${$info->{default_value}} = 'current_timestamp'; my $now = 'now()'; $info->{original}{default_value} = \$now; } # detect 0/1 for booleans and rewrite if ($data_type =~ /^bool/i && exists $info->{default_value}) { if ($info->{default_value} eq '0') { my $false = 'false'; $info->{default_value} = \$false; } elsif ($info->{default_value} eq '1') { my $true = 'true'; $info->{default_value} = \$true; } } } return $result; } =head1 SEE ALSO L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/ADO.pm0000644000175000017500000000270212650450246024013 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBI::ADO; use strict; use warnings; use base 'DBIx::Class::Schema::Loader::DBI'; use mro 'c3'; our $VERSION = '0.07045'; =head1 NAME DBIx::Class::Schema::Loader::DBI::ADO - L proxy =head1 DESCRIPTION Reblesses into an C<::ADO::> class when connecting via L. See L for usage information. =cut sub _rebless { my $self = shift; return if ref $self ne __PACKAGE__; my $dbh = $self->schema->storage->dbh; my $dbtype = eval { $dbh->get_info(17) }; unless ( $@ ) { # Translate the backend name into a perl identifier $dbtype =~ s/\W/_/gi; my $class = "DBIx::Class::Schema::Loader::DBI::ADO::${dbtype}"; if ($self->load_optional_class($class) && !$self->isa($class)) { bless $self, $class; $self->_rebless; } } } sub _filter_tables { my $self = shift; local $^W = 0; # turn off exception printing from Win32::OLE $self->next::method(@_); } =head1 SEE ALSO L, L, L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/InterBase.pm0000644000175000017500000002517512650450246025275 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBI::InterBase; use strict; use warnings; use base qw/DBIx::Class::Schema::Loader::DBI/; use mro 'c3'; use Carp::Clan qw/^DBIx::Class/; use List::Util 'first'; use namespace::clean; use DBIx::Class::Schema::Loader::Table (); our $VERSION = '0.07045'; sub _supports_db_schema { 0 } =head1 NAME DBIx::Class::Schema::Loader::DBI::InterBase - DBIx::Class::Schema::Loader::DBI Firebird Implementation. =head1 DESCRIPTION See L and L. =head1 COLUMN NAME CASE ISSUES By default column names from unquoted DDL will be generated in lowercase, for consistency with other backends. Set the L option to true if you would like to have column names in the internal case, which is uppercase for DDL that uses unquoted identifiers. Do not use quoting (the L option in L when in the default C<< preserve_case => 0 >> mode. Be careful to also not use any SQL reserved words in your DDL. This will generate lowercase column names (as opposed to the actual uppercase names) in your Result classes that will only work with quoting off. Mixed-case table and column names will be ignored when this option is on and will not work with quoting turned off. =cut sub _setup { my $self = shift; $self->next::method(@_); if (not defined $self->preserve_case) { $self->preserve_case(0); } elsif ($self->preserve_case) { $self->schema->storage->sql_maker->quote_char('"'); $self->schema->storage->sql_maker->name_sep('.'); } if ($self->db_schema) { carp "db_schema is not supported on Firebird"; if ($self->db_schema->[0] eq '%') { $self->db_schema(undef); } } } sub _table_pk_info { my ($self, $table) = @_; my $sth = $self->dbh->prepare(<<'EOF'); SELECT iseg.rdb$field_name FROM rdb$relation_constraints rc JOIN rdb$index_segments iseg ON rc.rdb$index_name = iseg.rdb$index_name WHERE rc.rdb$constraint_type = 'PRIMARY KEY' and rc.rdb$relation_name = ? ORDER BY iseg.rdb$field_position EOF $sth->execute($table->name); my @keydata; while (my ($col) = $sth->fetchrow_array) { s/^\s+//, s/\s+\z// for $col; push @keydata, $self->_lc($col); } return \@keydata; } sub _table_fk_info { my ($self, $table) = @_; my ($local_cols, $remote_cols, $remote_table, @rels); my $sth = $self->dbh->prepare(<<'EOF'); SELECT rc.rdb$constraint_name fk, iseg.rdb$field_name local_col, ri.rdb$relation_name remote_tab, riseg.rdb$field_name remote_col FROM rdb$relation_constraints rc JOIN rdb$index_segments iseg ON rc.rdb$index_name = iseg.rdb$index_name JOIN rdb$indices li ON rc.rdb$index_name = li.rdb$index_name JOIN rdb$indices ri ON li.rdb$foreign_key = ri.rdb$index_name JOIN rdb$index_segments riseg ON iseg.rdb$field_position = riseg.rdb$field_position and ri.rdb$index_name = riseg.rdb$index_name WHERE rc.rdb$constraint_type = 'FOREIGN KEY' and rc.rdb$relation_name = ? ORDER BY iseg.rdb$field_position EOF $sth->execute($table->name); while (my ($fk, $local_col, $remote_tab, $remote_col) = $sth->fetchrow_array) { s/^\s+//, s/\s+\z// for $fk, $local_col, $remote_tab, $remote_col; push @{$local_cols->{$fk}}, $self->_lc($local_col); push @{$remote_cols->{$fk}}, $self->_lc($remote_col); $remote_table->{$fk} = DBIx::Class::Schema::Loader::Table->new( loader => $self, name => $remote_tab, ($self->db_schema ? ( schema => $self->db_schema->[0], ignore_schema => 1, ) : ()), ); } foreach my $fk (sort keys %$remote_table) { push @rels, { local_columns => $local_cols->{$fk}, remote_columns => $remote_cols->{$fk}, remote_table => $remote_table->{$fk}, }; } return \@rels; } sub _table_uniq_info { my ($self, $table) = @_; my $sth = $self->dbh->prepare(<<'EOF'); SELECT rc.rdb$constraint_name, iseg.rdb$field_name FROM rdb$relation_constraints rc JOIN rdb$index_segments iseg ON rc.rdb$index_name = iseg.rdb$index_name WHERE rc.rdb$constraint_type = 'UNIQUE' and rc.rdb$relation_name = ? ORDER BY iseg.rdb$field_position EOF $sth->execute($table->name); my $constraints; while (my ($constraint_name, $column) = $sth->fetchrow_array) { s/^\s+//, s/\s+\z// for $constraint_name, $column; push @{$constraints->{$constraint_name}}, $self->_lc($column); } return [ map { [ $_ => $constraints->{$_} ] } sort keys %$constraints ]; } sub _columns_info_for { my $self = shift; my ($table) = @_; my $result = $self->next::method(@_); local $self->dbh->{LongReadLen} = 100000; local $self->dbh->{LongTruncOk} = 1; while (my ($column, $info) = each %$result) { my $data_type = $info->{data_type}; my $sth = $self->dbh->prepare(<<'EOF'); SELECT t.rdb$trigger_source FROM rdb$triggers t WHERE t.rdb$relation_name = ? AND t.rdb$system_flag = 0 -- user defined AND t.rdb$trigger_type = 1 -- BEFORE INSERT EOF $sth->execute($table->name); while (my ($trigger) = $sth->fetchrow_array) { my @trig_cols = map { /^"([^"]+)/ ? $1 : uc($_) } $trigger =~ /new\.("?\w+"?)/ig; my ($quoted, $generator) = $trigger =~ /(?:gen_id\s* \( \s* |next \s* value \s* for \s*)(")?(\w+)/ix; if ($generator) { $generator = uc $generator unless $quoted; if (first { $self->_uc($_) eq $self->_uc($column) } @trig_cols) { $info->{is_auto_increment} = 1; $info->{sequence} = $generator; last; } } } # fix up types $sth = $self->dbh->prepare(<<'EOF'); SELECT f.rdb$field_precision, f.rdb$field_scale, f.rdb$field_type, f.rdb$field_sub_type, f.rdb$character_set_id, f.rdb$character_length, t.rdb$type_name, st.rdb$type_name FROM rdb$fields f JOIN rdb$relation_fields rf ON rf.rdb$field_source = f.rdb$field_name LEFT JOIN rdb$types t ON f.rdb$field_type = t.rdb$type AND t.rdb$field_name = 'RDB$FIELD_TYPE' LEFT JOIN rdb$types st ON f.rdb$field_sub_type = st.rdb$type AND st.rdb$field_name = 'RDB$FIELD_SUB_TYPE' WHERE rf.rdb$relation_name = ? AND rf.rdb$field_name = ? EOF $sth->execute($table->name, $self->_uc($column)); my ($precision, $scale, $type_num, $sub_type_num, $char_set_id, $char_length, $type_name, $sub_type_name) = $sth->fetchrow_array; $scale = -$scale if $scale && $scale < 0; if ($type_name && $sub_type_name) { s/\s+\z// for $type_name, $sub_type_name; # fixups primarily for DBD::InterBase if ($data_type =~ /^(?:integer|int|smallint|bigint|-9581)\z/) { if ($precision && $type_name =~ /^(?:LONG|INT64)\z/ && $sub_type_name eq 'BLR') { $info->{data_type} = 'decimal'; } elsif ($precision && $type_name =~ /^(?:LONG|SHORT|INT64)\z/ && $sub_type_name eq 'TEXT') { $info->{data_type} = 'numeric'; } elsif ((not $precision) && $type_name eq 'INT64' && $sub_type_name eq 'BINARY') { $info->{data_type} = 'bigint'; } } # ODBC makes regular blobs sub_type blr elsif ($type_name eq 'BLOB') { if ($sub_type_name eq 'BINARY') { $info->{data_type} = 'blob'; } elsif ($sub_type_name eq 'TEXT') { if (defined $char_set_id && $char_set_id == 3) { $info->{data_type} = 'blob sub_type text character set unicode_fss'; } else { $info->{data_type} = 'blob sub_type text'; } } } } $data_type = $info->{data_type}; if ($data_type =~ /^(?:decimal|numeric)\z/ && defined $precision && defined $scale) { if ($precision == 9 && $scale == 0) { delete $info->{size}; } else { $info->{size} = [$precision, $scale]; } } if ($data_type eq '11') { $info->{data_type} = 'timestamp'; } elsif ($data_type eq '10') { $info->{data_type} = 'time'; } elsif ($data_type eq '9') { $info->{data_type} = 'date'; } elsif ($data_type eq 'character varying') { $info->{data_type} = 'varchar'; } elsif ($data_type eq 'character') { $info->{data_type} = 'char'; } elsif ($data_type eq 'float') { $info->{data_type} = 'real'; } elsif ($data_type eq 'int64' || $data_type eq '-9581') { # the constant is just in case, the query should pick up the type $info->{data_type} = 'bigint'; } $data_type = $info->{data_type}; if ($data_type =~ /^(?:char|varchar)\z/) { $info->{size} = $char_length; if (defined $char_set_id && $char_set_id == 3) { $info->{data_type} .= '(x) character set unicode_fss'; } } elsif ($data_type !~ /^(?:numeric|decimal)\z/) { delete $info->{size}; } # get default delete $info->{default_value} if $info->{default_value} && $info->{default_value} eq 'NULL'; $sth = $self->dbh->prepare(<<'EOF'); SELECT rf.rdb$default_source FROM rdb$relation_fields rf WHERE rf.rdb$relation_name = ? AND rf.rdb$field_name = ? EOF $sth->execute($table->name, $self->_uc($column)); my ($default_src) = $sth->fetchrow_array; if ($default_src && (my ($def) = $default_src =~ /^DEFAULT \s+ (\S+)/ix)) { if (my ($quoted) = $def =~ /^'(.*?)'\z/) { $info->{default_value} = $quoted; } else { $info->{default_value} = $def =~ /^-?\d/ ? $def : \$def; } } ${ $info->{default_value} } = 'current_timestamp' if ref $info->{default_value} && ${ $info->{default_value} } eq 'CURRENT_TIMESTAMP'; } return $result; } =head1 SEE ALSO L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; # vim:et sw=4 sts=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/Sybase.pm0000644000175000017500000003361712650450246024647 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBI::Sybase; use strict; use warnings; use base 'DBIx::Class::Schema::Loader::DBI::Sybase::Common'; use mro 'c3'; use List::Util 'any'; use namespace::clean; use DBIx::Class::Schema::Loader::Table::Sybase (); our $VERSION = '0.07045'; =head1 NAME DBIx::Class::Schema::Loader::DBI::Sybase - DBIx::Class::Schema::Loader::DBI Sybase ASE Implementation. =head1 DESCRIPTION See L and L. This class reblesses into the L class for connections to MSSQL. =cut sub _rebless { my $self = shift; my $dbh = $self->schema->storage->dbh; my $DBMS_VERSION = @{$dbh->selectrow_arrayref(qq{sp_server_info \@attribute_id=1})}[2]; if ($DBMS_VERSION =~ /^Microsoft /i) { $DBMS_VERSION =~ s/\s/_/g; my $subclass = "DBIx::Class::Schema::Loader::DBI::Sybase::$DBMS_VERSION"; if ($self->load_optional_class($subclass) && !$self->isa($subclass)) { bless $self, $subclass; $self->_rebless; } } } sub _system_databases { return (qw/ master model sybsystemdb sybsystemprocs tempdb /); } sub _system_tables { return (qw/ sysquerymetrics /); } sub _setup { my $self = shift; $self->next::method(@_); $self->preserve_case(1); my ($current_db) = $self->dbh->selectrow_array('SELECT db_name()'); if (ref $self->db_schema eq 'HASH') { if (keys %{ $self->db_schema } < 2) { my ($db) = keys %{ $self->db_schema }; $db ||= $current_db; if ($db eq '%') { my $owners = $self->db_schema->{$db}; my $db_names = $self->dbh->selectcol_arrayref(<<'EOF'); SELECT name FROM master.dbo.sysdatabases EOF my @dbs; foreach my $db_name (@$db_names) { push @dbs, $db_name unless any { $_ eq $db_name } $self->_system_databases; } $self->db_schema({}); DB: foreach my $db (@dbs) { if (not ((ref $owners eq 'ARRAY' && $owners->[0] eq '%') || $owners eq '%')) { my @owners; foreach my $owner (@$owners) { push @owners, $owner if defined $self->_uid($db, $owner); } next DB unless @owners; $self->db_schema->{$db} = \@owners; } else { # for post-processing below $self->db_schema->{$db} = '%'; } } $self->qualify_objects(1); } else { if ($db ne $current_db) { $self->dbh->do("USE [$db]"); $self->qualify_objects(1); } } } else { $self->qualify_objects(1); } } elsif (ref $self->db_schema eq 'ARRAY' || (not defined $self->db_schema)) { my $owners = $self->db_schema; $owners ||= [ $self->dbh->selectrow_array('SELECT user_name()') ]; $self->qualify_objects(1) if @$owners > 1; $self->db_schema({ $current_db => $owners }); } foreach my $db (keys %{ $self->db_schema }) { if ($self->db_schema->{$db} eq '%') { my $owners = $self->dbh->selectcol_arrayref(<<"EOF"); SELECT name FROM [$db].dbo.sysusers WHERE uid <> gid EOF $self->db_schema->{$db} = $owners; $self->qualify_objects(1); } } } sub _tables_list { my ($self) = @_; my @tables; while (my ($db, $owners) = each %{ $self->db_schema }) { foreach my $owner (@$owners) { my ($uid) = $self->_uid($db, $owner); my $table_names = $self->dbh->selectcol_arrayref(<<"EOF"); SELECT name FROM [$db].dbo.sysobjects WHERE uid = $uid AND type IN ('U', 'V') EOF TABLE: foreach my $table_name (@$table_names) { next TABLE if any { $_ eq $table_name } $self->_system_tables; push @tables, DBIx::Class::Schema::Loader::Table::Sybase->new( loader => $self, name => $table_name, database => $db, schema => $owner, ); } } } return $self->_filter_tables(\@tables); } sub _uid { my ($self, $db, $owner) = @_; my ($uid) = $self->dbh->selectrow_array(<<"EOF"); SELECT uid FROM [$db].dbo.sysusers WHERE name = @{[ $self->dbh->quote($owner) ]} EOF return $uid; } sub _table_columns { my ($self, $table) = @_; my $db = $table->database; my $owner = $table->schema; my $columns = $self->dbh->selectcol_arrayref(<<"EOF"); SELECT c.name FROM [$db].dbo.syscolumns c JOIN [$db].dbo.sysobjects o ON c.id = o.id WHERE o.name = @{[ $self->dbh->quote($table->name) ]} AND o.type IN ('U', 'V') AND o.uid = @{[ $self->_uid($db, $owner) ]} ORDER BY c.colid ASC EOF return $columns; } sub _table_pk_info { my ($self, $table) = @_; my ($current_db) = $self->dbh->selectrow_array('SELECT db_name()'); my $db = $table->database; $self->dbh->do("USE [$db]"); local $self->dbh->{FetchHashKeyName} = 'NAME_lc'; my $sth = $self->dbh->prepare(<<"EOF"); sp_pkeys @{[ $self->dbh->quote($table->name) ]}, @{[ $self->dbh->quote($table->schema) ]}, @{[ $self->dbh->quote($db) ]} EOF $sth->execute; my @keydata; while (my $row = $sth->fetchrow_hashref) { push @keydata, $row->{column_name}; } $self->dbh->do("USE [$current_db]"); return \@keydata; } sub _table_fk_info { my ($self, $table) = @_; my $db = $table->database; my $owner = $table->schema; my $sth = $self->dbh->prepare(<<"EOF"); SELECT sr.reftabid, sd2.name, sr.keycnt, fokey1, fokey2, fokey3, fokey4, fokey5, fokey6, fokey7, fokey8, fokey9, fokey10, fokey11, fokey12, fokey13, fokey14, fokey15, fokey16, refkey1, refkey2, refkey3, refkey4, refkey5, refkey6, refkey7, refkey8, refkey9, refkey10, refkey11, refkey12, refkey13, refkey14, refkey15, refkey16 FROM [$db].dbo.sysreferences sr JOIN [$db].dbo.sysobjects so1 ON sr.tableid = so1.id JOIN [$db].dbo.sysusers su1 ON so1.uid = su1.uid JOIN master.dbo.sysdatabases sd2 ON sr.pmrydbid = sd2.dbid WHERE so1.name = @{[ $self->dbh->quote($table->name) ]} AND su1.name = @{[ $self->dbh->quote($table->schema) ]} EOF $sth->execute; my @rels; REL: while (my @rel = $sth->fetchrow_array) { my ($remote_tab_id, $remote_db, $key_cnt) = splice @rel, 0, 3; my ($remote_tab_owner, $remote_tab_name) = $self->dbh->selectrow_array(<<"EOF"); SELECT su.name, so.name FROM [$remote_db].dbo.sysusers su JOIN [$remote_db].dbo.sysobjects so ON su.uid = so.uid WHERE so.id = $remote_tab_id EOF next REL unless any { $_ eq $remote_tab_owner } @{ $self->db_schema->{$remote_db} || [] }; my @local_col_ids = splice @rel, 0, 16; my @remote_col_ids = splice @rel, 0, 16; @local_col_ids = splice @local_col_ids, 0, $key_cnt; @remote_col_ids = splice @remote_col_ids, 0, $key_cnt; my $remote_table = DBIx::Class::Schema::Loader::Table::Sybase->new( loader => $self, name => $remote_tab_name, database => $remote_db, schema => $remote_tab_owner, ); my $all_local_cols = $self->_table_columns($table); my $all_remote_cols = $self->_table_columns($remote_table); my @local_cols = map $all_local_cols->[$_-1], @local_col_ids; my @remote_cols = map $all_remote_cols->[$_-1], @remote_col_ids; next REL if (any { not defined $_ } @local_cols) || (any { not defined $_ } @remote_cols); push @rels, { local_columns => \@local_cols, remote_table => $remote_table, remote_columns => \@remote_cols, }; }; return \@rels; } sub _table_uniq_info { my ($self, $table) = @_; my $db = $table->database; my $owner = $table->schema; my $uid = $self->_uid($db, $owner); my ($current_db) = $self->dbh->selectrow_array('SELECT db_name()'); $self->dbh->do("USE [$db]"); my $sth = $self->dbh->prepare(<<"EOF"); SELECT si.name, si.indid, si.keycnt FROM [$db].dbo.sysindexes si JOIN [$db].dbo.sysobjects so ON si.id = so.id WHERE so.name = @{[ $self->dbh->quote($table->name) ]} AND so.uid = $uid AND si.indid > 0 AND si.status & 2048 <> 2048 AND si.status2 & 2 = 2 EOF $sth->execute; my %uniqs; while (my ($ind_name, $ind_id, $key_cnt) = $sth->fetchrow_array) { COLS: foreach my $col_idx (1 .. ($key_cnt+1)) { my ($next_col) = $self->dbh->selectrow_array(<<"EOF"); SELECT index_col( @{[ $self->dbh->quote($table->name) ]}, $ind_id, $col_idx, $uid ) EOF last COLS unless defined $next_col; push @{ $uniqs{$ind_name} }, $next_col; } } $self->dbh->do("USE [$current_db]"); return [ map { [ $_ => $uniqs{$_} ] } sort keys %uniqs ]; } sub _columns_info_for { my $self = shift; my ($table) = @_; my $result = $self->next::method(@_); my $db = $table->database; my $owner = $table->schema; my $uid = $self->_uid($db, $owner); local $self->dbh->{FetchHashKeyName} = 'NAME_lc'; my $sth = $self->dbh->prepare(<<"EOF"); SELECT c.name, bt.name base_type, ut.name user_type, c.prec prec, c.scale scale, c.length len, c.cdefault dflt_id, c.computedcol comp_id, (c.status & 0x80) is_id FROM [$db].dbo.syscolumns c LEFT JOIN [$db].dbo.sysobjects o ON c.id = o.id LEFT JOIN [$db].dbo.systypes bt ON c.type = bt.type LEFT JOIN [$db].dbo.systypes ut ON c.usertype = ut.usertype WHERE o.name = @{[ $self->dbh->quote($table) ]} AND o.uid = $uid AND o.type IN ('U', 'V') EOF $sth->execute; my $info = $sth->fetchall_hashref('name'); while (my ($col, $res) = each %$result) { $res->{data_type} = $info->{$col}{user_type} || $info->{$col}{base_type}; if ($info->{$col}{is_id}) { $res->{is_auto_increment} = 1; } $sth->finish; # column has default value if (my $default_id = $info->{$col}{dflt_id}) { my $sth = $self->dbh->prepare(<<"EOF"); SELECT cm.id, cm.text FROM [$db].dbo.syscomments cm WHERE cm.id = $default_id EOF $sth->execute; if (my ($d_id, $default) = $sth->fetchrow_array) { my $constant_default = ($default =~ /^DEFAULT \s+ (\S.*\S)/ix) ? $1 : $default; $constant_default = substr($constant_default, 1, length($constant_default) - 2) if ( substr($constant_default, 0, 1) =~ m{['"\[]} && substr($constant_default, -1) =~ m{['"\]]}); $res->{default_value} = $constant_default; } } # column is a computed value if (my $comp_id = $info->{$col}{comp_id}) { my $sth = $self->dbh->prepare(<<"EOF"); SELECT cm.id, cm.text FROM [$db].dbo.syscomments cm WHERE cm.id = $comp_id EOF $sth->execute; if (my ($c_id, $comp) = $sth->fetchrow_array) { my $function = ($comp =~ /^AS \s+ (\S+)/ix) ? $1 : $comp; $res->{default_value} = \$function; if ($function =~ /^getdate\b/) { $res->{inflate_datetime} = 1; } delete $res->{size}; $res->{data_type} = undef; } } if (my $data_type = $res->{data_type}) { if ($data_type eq 'int') { $data_type = $res->{data_type} = 'integer'; } elsif ($data_type eq 'decimal') { $data_type = $res->{data_type} = 'numeric'; } elsif ($data_type eq 'float') { $data_type = $res->{data_type} = ($info->{$col}{len} <= 4 ? 'real' : 'double precision'); } if ($data_type eq 'timestamp') { $res->{inflate_datetime} = 0; } if ($data_type =~ /^(?:text|unitext|image|bigint|integer|smallint|tinyint|real|double|double precision|float|date|time|datetime|smalldatetime|money|smallmoney|timestamp|bit)\z/i) { delete $res->{size}; } elsif ($data_type eq 'numeric') { my ($prec, $scale) = @{$info->{$col}}{qw/prec scale/}; if (!defined $prec && !defined $scale) { $data_type = $res->{data_type} = 'integer'; delete $res->{size}; } elsif ($prec == 18 && $scale == 0) { delete $res->{size}; } else { $res->{size} = [ $prec, $scale ]; } } elsif ($data_type =~ /char/) { $res->{size} = $info->{$col}{len}; if ($data_type =~ /^(?:unichar|univarchar)\z/i) { $res->{size} /= 2; } elsif ($data_type =~ /^n(?:var)?char\z/i) { my ($nchar_size) = $self->dbh->selectrow_array('SELECT @@ncharsize'); $res->{size} /= $nchar_size; } } } } return $result; } =head1 SEE ALSO L, L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/ODBC.pm0000644000175000017500000000271612650450246024124 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBI::ODBC; use strict; use warnings; use base 'DBIx::Class::Schema::Loader::DBI'; use mro 'c3'; our $VERSION = '0.07045'; =head1 NAME DBIx::Class::Schema::Loader::DBI::ODBC - L proxy =head1 DESCRIPTION Reblesses into an C<::ODBC::> class when connecting via L. Code stolen from the L ODBC storage. See L for usage information. =cut sub _rebless { my $self = shift; return if ref $self ne __PACKAGE__; # stolen from DBIC ODBC storage my $dbh = $self->schema->storage->dbh; my $dbtype = eval { $dbh->get_info(17) }; unless ( $@ ) { # Translate the backend name into a perl identifier $dbtype =~ s/\W/_/gi; my $class = "DBIx::Class::Schema::Loader::DBI::ODBC::${dbtype}"; if ($self->load_optional_class($class) && !$self->isa($class)) { bless $self, $class; $self->_rebless; } } } =head1 SEE ALSO L, L, L, L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/MSSQL.pm0000644000175000017500000003711212650450246024312 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBI::MSSQL; use strict; use warnings; use base 'DBIx::Class::Schema::Loader::DBI::Sybase::Common'; use mro 'c3'; use Try::Tiny; use List::Util 'any'; use namespace::clean; use DBIx::Class::Schema::Loader::Table::Sybase (); our $VERSION = '0.07045'; =head1 NAME DBIx::Class::Schema::Loader::DBI::MSSQL - DBIx::Class::Schema::Loader::DBI MSSQL Implementation. =head1 DESCRIPTION Base driver for Microsoft SQL Server, used by L for support via L and L for support via L. See L and L for usage information. =head1 CASE SENSITIVITY Most MSSQL databases use C (case-insensitive) collation, for this reason generated column names are lower-cased as this makes them easier to work with in L. We attempt to detect the database collation at startup for any database included in L, and set the column lowercasing behavior accordingly, as lower-cased column names do not work on case-sensitive databases. To manually control case-sensitive mode, put: preserve_case => 1|0 in your Loader options. See L. B this option used to be called C, but has been renamed to a more generic option. =cut # SQL Server 2000: Ancient as time itself, but still out in the wild sub _is_2k { return shift->schema->storage->_server_info->{normalized_dbms_version} < 9; } sub _system_databases { return (qw/ master model tempdb msdb /); } sub _system_tables { return (qw/ spt_fallback_db spt_fallback_dev spt_fallback_usg spt_monitor spt_values MSreplication_options /); } sub _schemas { my ($self, $db) = @_; my $owners = $self->dbh->selectcol_arrayref($self->_is_2k ? <<"EOF2K" : <<"EOF"); SELECT name FROM [$db].dbo.sysusers WHERE uid <> gid EOF2K SELECT name FROM [$db].sys.schemas EOF return grep !/^(?:#|guest|INFORMATION_SCHEMA|sys)/, @$owners; } sub _current_schema { my $self = shift; if ($self->_is_2k) { return ($self->dbh->selectrow_array('SELECT user_name()'))[0]; } return ($self->dbh->selectrow_array('SELECT schema_name()'))[0]; } sub _current_db { my $self = shift; return ($self->dbh->selectrow_array('SELECT db_name()'))[0]; } sub _switch_db { my ($self, $db) = @_; $self->dbh->do("use [$db]"); } sub _setup { my $self = shift; $self->next::method(@_); my $current_db = $self->_current_db; if (ref $self->db_schema eq 'HASH') { if (keys %{ $self->db_schema } < 2) { my ($db) = keys %{ $self->db_schema }; $db ||= $current_db; if ($db eq '%') { my $owners = $self->db_schema->{$db}; my $db_names = $self->dbh->selectcol_arrayref(<<'EOF'); SELECT name FROM master.dbo.sysdatabases EOF my @dbs; foreach my $db_name (@$db_names) { push @dbs, $db_name unless any { $_ eq $db_name } $self->_system_databases; } $self->db_schema({}); DB: foreach my $db (@dbs) { if (not ((ref $owners eq 'ARRAY' && $owners->[0] eq '%') || $owners eq '%')) { my @owners; foreach my $owner (@$owners) { push @owners, $owner if $self->dbh->selectrow_array(<<"EOF"); SELECT name FROM [$db].dbo.sysusers WHERE name = @{[ $self->dbh->quote($owner) ]} EOF } next DB unless @owners; $self->db_schema->{$db} = \@owners; } else { # for post-processing below $self->db_schema->{$db} = '%'; } } $self->qualify_objects(1); } else { if ($db ne $current_db) { $self->_switch_db($db); $self->qualify_objects(1); } } } else { $self->qualify_objects(1); } } elsif (ref $self->db_schema eq 'ARRAY' || (not defined $self->db_schema)) { my $owners = $self->db_schema; $owners ||= [ $self->_current_schema ]; $self->qualify_objects(1) if @$owners > 1; $self->db_schema({ $current_db => $owners }); } foreach my $db (keys %{ $self->db_schema }) { if ($self->db_schema->{$db} eq '%') { $self->db_schema->{$db} = [ $self->_schemas($db) ]; $self->qualify_objects(1); } } if (not defined $self->preserve_case) { foreach my $db (keys %{ $self->db_schema }) { # We use the sys.databases query for the general case, and fallback to # databasepropertyex() if for some reason sys.databases is not available, # which does not work over DBD::ODBC with unixODBC+FreeTDS. # # XXX why does databasepropertyex() not work over DBD::ODBC ? # # more on collations here: http://msdn.microsoft.com/en-us/library/ms143515.aspx my $current_db = $self->_current_db; $self->_switch_db($db); my $collation_name = (eval { $self->dbh->selectrow_array("SELECT collation_name FROM [$db].sys.databases WHERE name = @{[ $self->dbh->quote($db) ]}") })[0] || (eval { $self->dbh->selectrow_array("SELECT CAST(databasepropertyex(@{[ $self->dbh->quote($db) ]}, 'Collation') AS VARCHAR)") })[0]; $self->_switch_db($current_db); if (not $collation_name) { warn <<"EOF"; WARNING: MSSQL Collation detection failed for database '$db'. Defaulting to case-insensitive mode. Override the 'preserve_case' attribute in your Loader options if needed. See 'preserve_case' in perldoc DBIx::Class::Schema::Loader::Base EOF $self->preserve_case(0) unless $self->preserve_case; } else { my $case_sensitive = $collation_name =~ /_(?:CS|BIN2?)(?:_|\z)/; if ($case_sensitive && (not $self->preserve_case)) { $self->preserve_case(1); } else { $self->preserve_case(0); } } } } } sub _tables_list { my ($self) = @_; my @tables; while (my ($db, $owners) = each %{ $self->db_schema }) { foreach my $owner (@$owners) { my $table_names = $self->dbh->selectcol_arrayref(<<"EOF"); SELECT table_name FROM [$db].INFORMATION_SCHEMA.TABLES WHERE table_schema = @{[ $self->dbh->quote($owner) ]} EOF TABLE: foreach my $table_name (@$table_names) { next TABLE if any { $_ eq $table_name } $self->_system_tables; push @tables, DBIx::Class::Schema::Loader::Table::Sybase->new( loader => $self, name => $table_name, database => $db, schema => $owner, ); } } } return $self->_filter_tables(\@tables); } sub _table_pk_info { my ($self, $table) = @_; my $db = $table->database; my $pk = $self->dbh->selectcol_arrayref(<<"EOF"); SELECT kcu.column_name FROM [$db].INFORMATION_SCHEMA.TABLE_CONSTRAINTS tc JOIN [$db].INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu ON kcu.table_name = tc.table_name AND kcu.table_schema = tc.table_schema AND kcu.constraint_name = tc.constraint_name WHERE tc.table_name = @{[ $self->dbh->quote($table->name) ]} AND tc.table_schema = @{[ $self->dbh->quote($table->schema) ]} AND tc.constraint_type = 'PRIMARY KEY' ORDER BY kcu.ordinal_position EOF $pk = [ map $self->_lc($_), @$pk ]; return $pk; } sub _table_fk_info { my ($self, $table) = @_; my $db = $table->database; my $sth = $self->dbh->prepare(<<"EOF"); SELECT rc.constraint_name, rc.unique_constraint_schema, uk_tc.table_name, fk_kcu.column_name, uk_kcu.column_name, rc.delete_rule, rc.update_rule FROM [$db].INFORMATION_SCHEMA.TABLE_CONSTRAINTS fk_tc JOIN [$db].INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS rc ON rc.constraint_name = fk_tc.constraint_name AND rc.constraint_schema = fk_tc.table_schema JOIN [$db].INFORMATION_SCHEMA.KEY_COLUMN_USAGE fk_kcu ON fk_kcu.constraint_name = fk_tc.constraint_name AND fk_kcu.table_name = fk_tc.table_name AND fk_kcu.table_schema = fk_tc.table_schema JOIN [$db].INFORMATION_SCHEMA.TABLE_CONSTRAINTS uk_tc ON uk_tc.constraint_name = rc.unique_constraint_name AND uk_tc.table_schema = rc.unique_constraint_schema JOIN [$db].INFORMATION_SCHEMA.KEY_COLUMN_USAGE uk_kcu ON uk_kcu.constraint_name = rc.unique_constraint_name AND uk_kcu.ordinal_position = fk_kcu.ordinal_position AND uk_kcu.table_name = uk_tc.table_name AND uk_kcu.table_schema = rc.unique_constraint_schema WHERE fk_tc.table_name = @{[ $self->dbh->quote($table->name) ]} AND fk_tc.table_schema = @{[ $self->dbh->quote($table->schema) ]} ORDER BY fk_kcu.ordinal_position EOF $sth->execute; my %rels; while (my ($fk, $remote_schema, $remote_table, $col, $remote_col, $delete_rule, $update_rule) = $sth->fetchrow_array) { push @{ $rels{$fk}{local_columns} }, $self->_lc($col); push @{ $rels{$fk}{remote_columns} }, $self->_lc($remote_col); $rels{$fk}{remote_table} = DBIx::Class::Schema::Loader::Table::Sybase->new( loader => $self, name => $remote_table, database => $db, schema => $remote_schema, ) unless exists $rels{$fk}{remote_table}; $rels{$fk}{attrs} ||= { on_delete => uc $delete_rule, on_update => uc $update_rule, is_deferrable => 1 # constraints can be temporarily disabled, but DEFERRABLE is not supported }; } return [ values %rels ]; } sub _table_uniq_info { my ($self, $table) = @_; my $db = $table->database; my $sth = $self->dbh->prepare(<<"EOF"); SELECT tc.constraint_name, kcu.column_name FROM [$db].INFORMATION_SCHEMA.TABLE_CONSTRAINTS tc JOIN [$db].INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu ON kcu.constraint_name = tc.constraint_name AND kcu.table_name = tc.table_name AND kcu.table_schema = tc.table_schema wHERE tc.table_name = @{[ $self->dbh->quote($table->name) ]} AND tc.table_schema = @{[ $self->dbh->quote($table->schema) ]} AND tc.constraint_type = 'UNIQUE' ORDER BY kcu.ordinal_position EOF $sth->execute; my %uniq; while (my ($constr, $col) = $sth->fetchrow_array) { push @{ $uniq{$constr} }, $self->_lc($col); } return [ map [ $_ => $uniq{$_} ], sort keys %uniq ]; } sub _columns_info_for { my $self = shift; my ($table) = @_; my $db = $table->database; my $result = $self->next::method(@_); # get type info (and identity) my $rows = $self->dbh->selectall_arrayref($self->_is_2k ? <<"EOF2K" : <<"EOF"); SELECT c.column_name, c.character_maximum_length, c.data_type, c.datetime_precision, c.column_default, (sc.status & 0x80) is_identity FROM [$db].INFORMATION_SCHEMA.COLUMNS c JOIN [$db].dbo.sysusers ss ON c.table_schema = ss.name JOIN [$db].dbo.sysobjects so ON c.table_name = so.name AND so.uid = ss.uid JOIN [$db].dbo.syscolumns sc ON c.column_name = sc.name AND sc.id = so.Id WHERE c.table_schema = @{[ $self->dbh->quote($table->schema) ]} AND c.table_name = @{[ $self->dbh->quote($table->name) ]} EOF2K SELECT c.column_name, c.character_maximum_length, c.data_type, c.datetime_precision, c.column_default, sc.is_identity FROM [$db].INFORMATION_SCHEMA.COLUMNS c JOIN [$db].sys.schemas ss ON c.table_schema = ss.name JOIN [$db].sys.objects so ON c.table_name = so.name AND so.schema_id = ss.schema_id JOIN [$db].sys.columns sc ON c.column_name = sc.name AND sc.object_id = so.object_id WHERE c.table_schema = @{[ $self->dbh->quote($table->schema) ]} AND c.table_name = @{[ $self->dbh->quote($table->name) ]} EOF foreach my $row (@$rows) { my ($col, $char_max_length, $data_type, $datetime_precision, $default, $is_identity) = @$row; $col = lc $col unless $self->preserve_case; my $info = $result->{$col} || next; $info->{data_type} = $data_type; if (defined $char_max_length) { $info->{size} = $char_max_length; $info->{size} = 0 if $char_max_length < 0; } if ($is_identity) { $info->{is_auto_increment} = 1; $info->{data_type} =~ s/\s*identity//i; delete $info->{size}; } # fix types if ($data_type eq 'int') { $info->{data_type} = 'integer'; } elsif ($data_type eq 'timestamp') { $info->{inflate_datetime} = 0; } elsif ($data_type =~ /^(?:numeric|decimal)\z/) { if (ref($info->{size}) && $info->{size}[0] == 18 && $info->{size}[1] == 0) { delete $info->{size}; } } elsif ($data_type eq 'float') { $info->{data_type} = 'double precision'; delete $info->{size}; } elsif ($data_type =~ /^(?:small)?datetime\z/) { # fixup for DBD::Sybase if ($info->{default_value} && $info->{default_value} eq '3') { delete $info->{default_value}; } } elsif ($data_type =~ /^(?:datetime(?:2|offset)|time)\z/) { $info->{size} = $datetime_precision; delete $info->{size} if $info->{size} == 7; } elsif ($data_type eq 'varchar' && $info->{size} == 0) { $info->{data_type} = 'text'; delete $info->{size}; } elsif ($data_type eq 'nvarchar' && $info->{size} == 0) { $info->{data_type} = 'ntext'; delete $info->{size}; } elsif ($data_type eq 'varbinary' && $info->{size} == 0) { $info->{data_type} = 'image'; delete $info->{size}; } if ($data_type !~ /^(?:n?char|n?varchar|binary|varbinary|numeric|decimal|float|datetime(?:2|offset)|time)\z/) { delete $info->{size}; } if (defined $default) { # strip parens $default =~ s/^\( (.*) \)\z/$1/x; # Literal strings are in ''s, numbers are in ()s (in some versions of # MSSQL, in others they are unquoted) everything else is a function. $info->{default_value} = $default =~ /^['(] (.*) [)']\z/x ? $1 : $default =~ /^\d/ ? $default : \$default; if ((eval { lc ${ $info->{default_value} } }||'') eq 'getdate()') { ${ $info->{default_value} } = 'current_timestamp'; my $getdate = 'getdate()'; $info->{original}{default_value} = \$getdate; } } } return $result; } =head1 SEE ALSO L, L, L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/Oracle.pm0000644000175000017500000003261112650450246024617 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBI::Oracle; use strict; use warnings; use base 'DBIx::Class::Schema::Loader::DBI::Component::QuotedDefault'; use mro 'c3'; use Try::Tiny; use DBIx::Class::Schema::Loader::Utils qw/sigwarn_silencer/; use namespace::clean; our $VERSION = '0.07045'; =head1 NAME DBIx::Class::Schema::Loader::DBI::Oracle - DBIx::Class::Schema::Loader::DBI Oracle Implementation. =head1 DESCRIPTION See L and L. =cut sub _setup { my $self = shift; $self->next::method(@_); my ($current_schema) = $self->dbh->selectrow_array('SELECT USER FROM DUAL'); $self->db_schema([ $current_schema ]) unless $self->db_schema; if (@{ $self->db_schema } == 1 && $self->db_schema->[0] ne '%' && lc($self->db_schema->[0]) ne lc($current_schema)) { $self->dbh->do('ALTER SESSION SET current_schema=' . $self->db_schema->[0]); } if (not defined $self->preserve_case) { $self->preserve_case(0); } elsif ($self->preserve_case) { $self->schema->storage->sql_maker->quote_char('"'); $self->schema->storage->sql_maker->name_sep('.'); } } sub _build_name_sep { '.' } sub _system_schemas { my $self = shift; # From http://www.adp-gmbh.ch/ora/misc/known_schemas.html return ($self->next::method(@_), qw/ANONYMOUS APEX_PUBLIC_USER APEX_030200 APPQOSSYS CTXSYS DBSNMP DIP DMSYS EXFSYS LBACSYS MDDATA MDSYS MGMT_VIEW OLAPSYS ORACLE_OCM ORDDATA ORDPLUGINS ORDSYS OUTLN SI_INFORMTN_SCHEMA SPATIAL_CSW_ADMIN_USR SPATIAL_WFS_ADMIN_USR SYS SYSMAN SYSTEM TRACESRV MTSSYS OASPUBLIC OWBSYS OWBSYS_AUDIT WEBSYS WK_PROXY WKSYS WK_TEST WMSYS XDB OSE$HTTP$ADMIN AURORA$JIS$UTILITY$ AURORA$ORB$UNAUTHENTICATED/, qr/^FLOWS_\d\d\d\d\d\d\z/); } sub _system_tables { my $self = shift; return ($self->next::method(@_), 'PLAN_TABLE'); } sub _dbh_tables { my ($self, $schema) = @_; return $self->dbh->tables(undef, $schema, '%', 'TABLE,VIEW'); } sub _filter_tables { my $self = shift; # silence a warning from older DBD::Oracles in tests local $SIG{__WARN__} = sigwarn_silencer( qr/^Field \d+ has an Oracle type \(\d+\) which is not explicitly supported/ ); return $self->next::method(@_); } sub _table_fk_info { my $self = shift; my ($table) = @_; my $rels = $self->next::method(@_); my $deferrable_sth = $self->dbh->prepare_cached(<<'EOF'); select deferrable from all_constraints where owner = ? and table_name = ? and constraint_name = ? and status = 'ENABLED' EOF my @enabled_rels; foreach my $rel (@$rels) { # Oracle does not have update rules $rel->{attrs}{on_update} = 'NO ACTION';; # DBD::Oracle's foreign_key_info does not return DEFERRABILITY, so we get it ourselves # Also use this to filter out disabled foreign keys, which are returned by DBD::Oracle < 1.76 my $deferrable = $self->dbh->selectrow_array( $deferrable_sth, undef, $table->schema, $table->name, $rel->{_constraint_name} ) or next; $rel->{attrs}{is_deferrable} = $deferrable =~ /^DEFERRABLE/i ? 1 : 0; push @enabled_rels, $rel; } return \@enabled_rels; } sub _table_uniq_info { my ($self, $table) = @_; my $sth = $self->dbh->prepare_cached(<<'EOF', {}, 1); SELECT ac.constraint_name, acc.column_name FROM all_constraints ac, all_cons_columns acc WHERE acc.table_name=? AND acc.owner = ? AND ac.table_name = acc.table_name AND ac.owner = acc.owner AND acc.constraint_name = ac.constraint_name AND ac.constraint_type = 'U' AND ac.status = 'ENABLED' ORDER BY acc.position EOF $sth->execute($table->name, $table->schema); my %constr_names; while(my $constr = $sth->fetchrow_arrayref) { my $constr_name = $self->_lc($constr->[0]); my $constr_col = $self->_lc($constr->[1]); push @{$constr_names{$constr_name}}, $constr_col; } return [ map { [ $_ => $constr_names{$_} ] } sort keys %constr_names ]; } sub _table_comment { my $self = shift; my ($table) = @_; my $table_comment = $self->next::method(@_); return $table_comment if $table_comment; ($table_comment) = $self->dbh->selectrow_array(<<'EOF', {}, $table->schema, $table->name); SELECT comments FROM all_tab_comments WHERE owner = ? AND table_name = ? AND (table_type = 'TABLE' OR table_type = 'VIEW') EOF return $table_comment } sub _column_comment { my $self = shift; my ($table, $column_number, $column_name) = @_; my $column_comment = $self->next::method(@_); return $column_comment if $column_comment; ($column_comment) = $self->dbh->selectrow_array(<<'EOF', {}, $table->schema, $table->name, $self->_uc($column_name)); SELECT comments FROM all_col_comments WHERE owner = ? AND table_name = ? AND column_name = ? EOF return $column_comment } sub _columns_info_for { my $self = shift; my ($table) = @_; my $result = $self->next::method(@_); local $self->dbh->{LongReadLen} = 1_000_000; local $self->dbh->{LongTruncOk} = 1; my $sth = $self->dbh->prepare_cached(<<'EOF', {}, 1); SELECT trigger_body FROM all_triggers WHERE table_name = ? AND table_owner = ? AND status = 'ENABLED' AND upper(trigger_type) LIKE '%BEFORE EACH ROW%' AND lower(triggering_event) LIKE '%insert%' EOF $sth->execute($table->name, $table->schema); while (my ($trigger_body) = $sth->fetchrow_array) { if (my ($seq_schema, $seq_name) = $trigger_body =~ /(?:"?(\w+)"?\.)?"?(\w+)"?\.nextval/i) { if (my ($col_name) = $trigger_body =~ /:new\.(\w+)/i) { $col_name = $self->_lc($col_name); $result->{$col_name}{is_auto_increment} = 1; $seq_schema = $self->_lc($seq_schema || $table->schema); $seq_name = $self->_lc($seq_name); $result->{$col_name}{sequence} = ($self->qualify_objects ? ($seq_schema . '.') : '') . $seq_name; } } } # Old DBD::Oracle report the size in (UTF-16) bytes, not characters my $nchar_size_factor = $DBD::Oracle::VERSION >= 1.52 ? 1 : 2; while (my ($col, $info) = each %$result) { no warnings 'uninitialized'; my $sth = $self->dbh->prepare_cached(<<'EOF', {}, 1); SELECT data_type, data_length FROM all_tab_columns WHERE column_name = ? AND table_name = ? AND owner = ? EOF $sth->execute($self->_uc($col), $table->name, $table->schema); my ($data_type, $data_length) = $sth->fetchrow_array; $sth->finish; $data_type = lc $data_type; if ($data_type =~ /^(?:n(?:var)?char2?|u?rowid|nclob|timestamp\(\d+\)(?: with(?: local)? time zone)?|binary_(?:float|double))\z/i) { $info->{data_type} = $data_type; if ($data_type =~ /^u?rowid\z/i) { $info->{size} = $data_length; } } if ($info->{data_type} =~ /^(?:n?[cb]lob|long(?: raw)?|bfile|date|binary_(?:float|double)|rowid)\z/i) { delete $info->{size}; } if ($info->{data_type} =~ /^n(?:var)?char2?\z/i) { if (ref $info->{size}) { $info->{size} = $info->{size}[0] / 8; } else { $info->{size} = $info->{size} / $nchar_size_factor; } } elsif ($info->{data_type} =~ /^(?:var)?char2?\z/i) { if (ref $info->{size}) { $info->{size} = $info->{size}[0]; } } elsif (lc($info->{data_type}) =~ /^(?:number|decimal)\z/i) { $info->{original}{data_type} = 'number'; $info->{data_type} = 'numeric'; if (try { $info->{size}[0] == 38 && $info->{size}[1] == 0 }) { $info->{original}{size} = $info->{size}; $info->{data_type} = 'integer'; delete $info->{size}; } } elsif (my ($precision) = $info->{data_type} =~ /^timestamp\((\d+)\)(?: with (?:local )?time zone)?\z/i) { $info->{data_type} = join ' ', $info->{data_type} =~ /[a-z]+/ig; if ($precision == 6) { delete $info->{size}; } else { $info->{size} = $precision; } } elsif ($info->{data_type} =~ /timestamp/i && ref $info->{size} && $info->{size}[0] == 0) { my $size = $info->{size}[1]; delete $info->{size}; $info->{size} = $size unless $size == 6; } elsif (($precision) = $info->{data_type} =~ /^interval year\((\d+)\) to month\z/i) { $info->{data_type} = join ' ', $info->{data_type} =~ /[a-z]+/ig; if ($precision == 2) { delete $info->{size}; } else { $info->{size} = $precision; } } elsif (my ($day_precision, $second_precision) = $info->{data_type} =~ /^interval day\((\d+)\) to second\((\d+)\)\z/i) { $info->{data_type} = join ' ', $info->{data_type} =~ /[a-z]+/ig; if ($day_precision == 2 && $second_precision == 6) { delete $info->{size}; } else { $info->{size} = [ $day_precision, $second_precision ]; } } elsif ($info->{data_type} =~ /^interval year to month\z/i && ref $info->{size}) { my $precision = $info->{size}[0]; if ($precision == 2) { delete $info->{size}; } else { $info->{size} = $precision; } } elsif ($info->{data_type} =~ /^interval day to second\z/i && ref $info->{size}) { if ($info->{size}[0] == 2 && $info->{size}[1] == 6) { delete $info->{size}; } } elsif (lc($info->{data_type}) eq 'float') { $info->{original}{data_type} = 'float'; $info->{original}{size} = $info->{size}; if ($info->{size} <= 63) { $info->{data_type} = 'real'; } else { $info->{data_type} = 'double precision'; } delete $info->{size}; } elsif (lc($info->{data_type}) eq 'double precision') { $info->{original}{data_type} = 'float'; my $size = try { $info->{size}[0] }; $info->{original}{size} = $size; if ($size <= 63) { $info->{data_type} = 'real'; } delete $info->{size}; } elsif (lc($info->{data_type}) eq 'urowid' && $info->{size} == 4000) { delete $info->{size}; } elsif ($info->{data_type} eq '-9104') { $info->{data_type} = 'rowid'; delete $info->{size}; } elsif ($info->{data_type} eq '-2') { $info->{data_type} = 'raw'; $info->{size} = try { $info->{size}[0] / 2 }; } elsif (lc($info->{data_type}) eq 'date') { $info->{data_type} = 'datetime'; $info->{original}{data_type} = 'date'; } elsif (lc($info->{data_type}) eq 'binary_float') { $info->{data_type} = 'real'; $info->{original}{data_type} = 'binary_float'; } elsif (lc($info->{data_type}) eq 'binary_double') { $info->{data_type} = 'double precision'; $info->{original}{data_type} = 'binary_double'; } # DEFAULT could be missed by ::DBI because of ORA-24345 if (not defined $info->{default_value}) { local $self->dbh->{LongReadLen} = 1_000_000; local $self->dbh->{LongTruncOk} = 1; my $sth = $self->dbh->prepare_cached(<<'EOF', {}, 1); SELECT data_default FROM all_tab_columns WHERE column_name = ? AND table_name = ? AND owner = ? EOF $sth->execute($self->_uc($col), $table->name, $table->schema); my ($default) = $sth->fetchrow_array; $sth->finish; # this is mostly copied from ::DBI::QuotedDefault if (defined $default) { s/^\s+//, s/\s+\z// for $default; if ($default =~ /^'(.*?)'\z/) { $info->{default_value} = $1; } elsif ($default =~ /^(-?\d.*?)\z/) { $info->{default_value} = $1; } elsif ($default =~ /^NULL\z/i) { my $null = 'null'; $info->{default_value} = \$null; } elsif ($default ne '') { my $val = $default; $info->{default_value} = \$val; } } } if ((try { lc(${ $info->{default_value} }) }||'') eq 'sysdate') { my $current_timestamp = 'current_timestamp'; $info->{default_value} = \$current_timestamp; my $sysdate = 'sysdate'; $info->{original}{default_value} = \$sysdate; } } return $result; } sub _dbh_column_info { my $self = shift; my ($dbh) = @_; # try to avoid ORA-24345 local $dbh->{LongReadLen} = 1_000_000; local $dbh->{LongTruncOk} = 1; return $self->next::method(@_); } =head1 SEE ALSO L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/SQLite.pm0000644000175000017500000002064012650450246024552 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBI::SQLite; use strict; use warnings; use base 'DBIx::Class::Schema::Loader::DBI::Component::QuotedDefault'; use mro 'c3'; use DBIx::Class::Schema::Loader::Table (); our $VERSION = '0.07045'; =head1 NAME DBIx::Class::Schema::Loader::DBI::SQLite - DBIx::Class::Schema::Loader::DBI SQLite Implementation. =head1 DESCRIPTION See L and L. =head1 METHODS =head2 rescan SQLite will fail all further commands on a connection if the underlying schema has been modified. Therefore, any runtime changes requiring C also require us to re-connect to the database. The C method here handles that reconnection for you, but beware that this must occur for any other open sqlite connections as well. =cut sub _setup { my $self = shift; $self->next::method(@_); if (not defined $self->preserve_case) { $self->preserve_case(0); } if ($self->db_schema) { warn <<'EOF'; db_schema is not supported on SQLite, the option is implemented only for qualify_objects testing. EOF if ($self->db_schema->[0] eq '%') { $self->db_schema(undef); } } } sub rescan { my ($self, $schema) = @_; $schema->storage->disconnect if $schema->storage; $self->next::method($schema); } sub _columns_info_for { my $self = shift; my ($table) = @_; my $result = $self->next::method(@_); local $self->dbh->{FetchHashKeyName} = 'NAME_lc'; my $sth = $self->dbh->prepare( "pragma table_info(" . $self->dbh->quote_identifier($table) . ")" ); $sth->execute; my $cols = $sth->fetchall_hashref('name'); # copy and case according to preserve_case mode # no need to check for collisions, SQLite does not allow them my %cols; while (my ($col, $info) = each %$cols) { $cols{ $self->_lc($col) } = $info; } my ($num_pk, $pk_col) = (0); # SQLite doesn't give us the info we need to do this nicely :( # If there is exactly one column marked PK, and its type is integer, # set it is_auto_increment. This isn't 100%, but it's better than the # alternatives. while (my ($col_name, $info) = each %$result) { if ($cols{$col_name}{pk}) { $num_pk++; if (lc($cols{$col_name}{type}) eq 'integer') { $pk_col = $col_name; } } } while (my ($col, $info) = each %$result) { if ((eval { ${ $info->{default_value} } }||'') eq 'CURRENT_TIMESTAMP') { ${ $info->{default_value} } = 'current_timestamp'; } if ($num_pk == 1 and defined $pk_col and $pk_col eq $col) { $info->{is_auto_increment} = 1; } } return $result; } sub _table_fk_info { my ($self, $table) = @_; my $sth = $self->dbh->prepare( "pragma foreign_key_list(" . $self->dbh->quote_identifier($table) . ")" ); $sth->execute; my @rels; while (my $fk = $sth->fetchrow_hashref) { my $rel = $rels[ $fk->{id} ] ||= { local_columns => [], remote_columns => undef, remote_table => DBIx::Class::Schema::Loader::Table->new( loader => $self, name => $fk->{table}, ($self->db_schema ? ( schema => $self->db_schema->[0], ignore_schema => 1, ) : ()), ), }; push @{ $rel->{local_columns} }, $self->_lc($fk->{from}); push @{ $rel->{remote_columns} }, $self->_lc($fk->{to}) if defined $fk->{to}; $rel->{attrs} ||= { on_delete => uc $fk->{on_delete}, on_update => uc $fk->{on_update}, }; warn "This is supposed to be the same rel but remote_table changed from ", $rel->{remote_table}->name, " to ", $fk->{table} if $rel->{remote_table}->name ne $fk->{table}; } $sth->finish; # now we need to determine whether each FK is DEFERRABLE, this can only be # done by parsing the DDL from sqlite_master my $ddl = $self->dbh->selectcol_arrayref(<<"EOF", undef, $table->name, $table->name)->[0]; select sql from sqlite_master where name = ? and tbl_name = ? EOF foreach my $fk (@rels) { my $local_cols = '"?' . (join '"? \s* , \s* "?', map quotemeta, @{ $fk->{local_columns} }) . '"?'; my $remote_cols = '"?' . (join '"? \s* , \s* "?', map quotemeta, @{ $fk->{remote_columns} || [] }) . '"?'; my ($deferrable_clause) = $ddl =~ / foreign \s+ key \s* \( \s* $local_cols \s* \) \s* references \s* (?:\S+|".+?(?{attrs}{is_deferrable} = $deferrable_clause =~ /not/i ? 0 : 1; } else { # check for inline constraint if 1 local column if (@{ $fk->{local_columns} } == 1) { my ($local_col) = @{ $fk->{local_columns} }; my ($remote_col) = @{ $fk->{remote_columns} || [] }; $remote_col ||= ''; my ($deferrable_clause) = $ddl =~ / "?\Q$local_col\E"? \s* (?:\w+\s*)* (?: \( \s* \d\+ (?:\s*,\s*\d+)* \s* \) )? \s* references \s+ (?:\S+|".+?(?{attrs}{is_deferrable} = $deferrable_clause =~ /not/i ? 0 : 1; } else { $fk->{attrs}{is_deferrable} = 0; } } else { $fk->{attrs}{is_deferrable} = 0; } } } return \@rels; } sub _table_uniq_info { my ($self, $table) = @_; my $sth = $self->dbh->prepare( "pragma index_list(" . $self->dbh->quote($table) . ")" ); $sth->execute; my @uniqs; while (my $idx = $sth->fetchrow_hashref) { next unless $idx->{unique}; my $name = $idx->{name}; my $get_idx_sth = $self->dbh->prepare("pragma index_info(" . $self->dbh->quote($name) . ")"); $get_idx_sth->execute; my @cols; while (my $idx_row = $get_idx_sth->fetchrow_hashref) { push @cols, $self->_lc($idx_row->{name}); } $get_idx_sth->finish; # Rename because SQLite complains about sqlite_ prefixes on identifiers # and ignores constraint names in DDL. $name = (join '_', @cols) . '_unique'; push @uniqs, [ $name => \@cols ]; } $sth->finish; return [ sort { $a->[0] cmp $b->[0] } @uniqs ]; } sub _tables_list { my ($self) = @_; my $sth = $self->dbh->prepare("SELECT * FROM sqlite_master"); $sth->execute; my @tables; while ( my $row = $sth->fetchrow_hashref ) { next unless $row->{type} =~ /^(?:table|view)\z/i; next if $row->{tbl_name} =~ /^sqlite_/; push @tables, DBIx::Class::Schema::Loader::Table->new( loader => $self, name => $row->{tbl_name}, ($self->db_schema ? ( schema => $self->db_schema->[0], ignore_schema => 1, # for qualify_objects tests ) : ()), ); } $sth->finish; return $self->_filter_tables(\@tables); } sub _table_info_matches { my ($self, $table, $info) = @_; my $table_schema = $table->schema; $table_schema = 'main' if !defined $table_schema; return $info->{TABLE_SCHEM} eq $table_schema && $info->{TABLE_NAME} eq $table->name; } =head1 SEE ALSO L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/ODBC/0000755000175000017500000000000012650450355023561 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/ODBC/ACCESS.pm0000644000175000017500000002154212650450246025063 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBI::ODBC::ACCESS; use strict; use warnings; use base 'DBIx::Class::Schema::Loader::DBI::ODBC'; use mro 'c3'; use Try::Tiny; use namespace::clean; use DBIx::Class::Schema::Loader::Table (); our $VERSION = '0.07045'; __PACKAGE__->mk_group_accessors('simple', qw/ __ado_connection __adox_catalog /); =head1 NAME DBIx::Class::Schema::Loader::DBI::ODBC::ACCESS - Microsoft Access driver for DBIx::Class::Schema::Loader =head1 DESCRIPTION See L for usage information. =cut sub _supports_db_schema { 0 } sub _db_path { my $self = shift; $self->schema->storage->dbh->get_info(16); } sub _open_ado_connection { my ($self, $conn, $user, $pass) = @_; my @info = ({ provider => 'Microsoft.ACE.OLEDB.12.0', dsn_extra => 'Persist Security Info=False', }, { provider => 'Microsoft.Jet.OLEDB.4.0', }); my $opened = 0; my $exception; for my $info (@info) { $conn->{Provider} = $info->{provider}; my $dsn = 'Data Source='.($self->_db_path); $dsn .= ";$info->{dsn_extra}" if exists $info->{dsn_extra}; try { $conn->Open($dsn, $user, $pass); undef $exception; } catch { $exception = $_; }; next if $exception; $opened = 1; last; } return ($opened, $exception); } sub _ado_connection { my $self = shift; return $self->__ado_connection if $self->__ado_connection; my ($dsn, $user, $pass) = @{ $self->schema->storage->_dbi_connect_info }; my $have_pass = 1; if (ref $dsn eq 'CODE') { ($dsn, $user, $pass) = $self->_try_infer_connect_info_from_coderef($dsn); if (not $dsn) { my $dbh = $self->schema->storage->dbh; $dsn = $dbh->{Name}; $user = $dbh->{Username}; $have_pass = 0; } } require Win32::OLE; my $conn = Win32::OLE->new('ADODB.Connection'); $user = '' unless defined $user; if ((not $have_pass) && exists $self->_passwords->{$dsn}{$user}) { $pass = $self->_passwords->{$dsn}{$user}; $have_pass = 1; } $pass = '' unless defined $pass; my ($opened, $exception) = $self->_open_ado_connection($conn, $user, $pass); if ((not $opened) && (not $have_pass)) { if (exists $ENV{DBI_PASS}) { $pass = $ENV{DBI_PASS}; ($opened, $exception) = $self->_open_ado_connection($conn, $user, $pass); if ($opened) { $self->_passwords->{$dsn}{$user} = $pass; } else { print "Enter database password for $user ($dsn): "; chomp($pass = ); ($opened, $exception) = $self->_open_ado_connection($conn, $user, $pass); if ($opened) { $self->_passwords->{$dsn}{$user} = $pass; } } } else { print "Enter database password for $user ($dsn): "; chomp($pass = ); ($opened, $exception) = $self->_open_ado_connection($conn, $user, $pass); if ($opened) { $self->_passwords->{$dsn}{$user} = $pass; } } } if (not $opened) { die "Failed to open ADO connection: $exception"; } $self->__ado_connection($conn); return $conn; } sub _adox_catalog { my $self = shift; return $self->__adox_catalog if $self->__adox_catalog; require Win32::OLE; my $cat = Win32::OLE->new('ADOX.Catalog'); $cat->{ActiveConnection} = $self->_ado_connection; $self->__adox_catalog($cat); return $cat; } sub _adox_column { my ($self, $table, $col) = @_; my $col_obj; my $cols = $self->_adox_catalog->Tables->Item($table->name)->Columns; for my $col_idx (0..$cols->Count-1) { $col_obj = $cols->Item($col_idx); if ($self->preserve_case) { last if $col_obj->Name eq $col; } else { last if lc($col_obj->Name) eq lc($col); } } return $col_obj; } sub rescan { my $self = shift; if ($self->__adox_catalog) { $self->__ado_connection(undef); $self->__adox_catalog(undef); } return $self->next::method(@_); } sub _table_pk_info { my ($self, $table) = @_; return [] if $self->_disable_pk_detection; my @keydata; my $indexes = try { $self->_adox_catalog->Tables->Item($table->name)->Indexes } catch { warn "Could not retrieve indexes in table '$table', disabling primary key detection: $_\n"; return undef; }; if (not $indexes) { $self->_disable_pk_detection(1); return []; } for my $idx_num (0..($indexes->Count-1)) { my $idx = $indexes->Item($idx_num); if ($idx->PrimaryKey) { my $cols = $idx->Columns; for my $col_idx (0..$cols->Count-1) { push @keydata, $self->_lc($cols->Item($col_idx)->Name); } } } return \@keydata; } sub _table_fk_info { my ($self, $table) = @_; return [] if $self->_disable_fk_detection; my $keys = try { $self->_adox_catalog->Tables->Item($table->name)->Keys; } catch { warn "Could not retrieve keys in table '$table', disabling relationship detection: $_\n"; return undef; }; if (not $keys) { $self->_disable_fk_detection(1); return []; } my @rels; for my $key_idx (0..($keys->Count-1)) { my $key = $keys->Item($key_idx); next unless $key->Type == 2; my $local_cols = $key->Columns; my $remote_table = $key->RelatedTable; my (@local_cols, @remote_cols); for my $col_idx (0..$local_cols->Count-1) { my $col = $local_cols->Item($col_idx); push @local_cols, $self->_lc($col->Name); push @remote_cols, $self->_lc($col->RelatedColumn); } push @rels, { local_columns => \@local_cols, remote_columns => \@remote_cols, remote_table => DBIx::Class::Schema::Loader::Table->new( loader => $self, name => $remote_table, ($self->db_schema ? ( schema => $self->db_schema->[0], ignore_schema => 1, ) : ()), ), }; } return \@rels; } sub _columns_info_for { my $self = shift; my ($table) = @_; my $result = $self->next::method(@_); while (my ($col, $info) = each %$result) { my $data_type = $info->{data_type}; my $col_obj = $self->_adox_column($table, $col); $info->{is_nullable} = ($col_obj->Attributes & 2) == 2 ? 1 : 0; if ($data_type eq 'counter') { $info->{data_type} = 'integer'; $info->{is_auto_increment} = 1; delete $info->{size}; } elsif ($data_type eq 'longbinary') { $info->{data_type} = 'image'; $info->{original}{data_type} = 'longbinary'; } elsif ($data_type eq 'longchar') { $info->{data_type} = 'text'; $info->{original}{data_type} = 'longchar'; } elsif ($data_type eq 'double') { $info->{data_type} = 'double precision'; $info->{original}{data_type} = 'double'; } elsif ($data_type eq 'guid') { $info->{data_type} = 'uniqueidentifier'; $info->{original}{data_type} = 'guid'; } elsif ($data_type eq 'byte') { $info->{data_type} = 'tinyint'; $info->{original}{data_type} = 'byte'; } elsif ($data_type eq 'currency') { $info->{data_type} = 'money'; $info->{original}{data_type} = 'currency'; if (ref $info->{size} eq 'ARRAY' && $info->{size}[0] == 19 && $info->{size}[1] == 4) { # Actual money column via ODBC, otherwise we pass the sizes on to the ADO driver for # decimal columns (which masquerade as money columns...) delete $info->{size}; } } elsif ($data_type eq 'decimal') { if (ref $info->{size} eq 'ARRAY' && $info->{size}[0] == 18 && $info->{size}[1] == 0) { delete $info->{size}; } } # Pass through currency (which can be decimal for ADO.) if ($data_type !~ /^(?:(?:var)?(?:char|binary)|decimal)\z/ && $data_type ne 'currency') { delete $info->{size}; } } return $result; } =head1 SEE ALSO L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/ODBC/Microsoft_SQL_Server.pm0000644000175000017500000000161312650450246030131 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBI::ODBC::Microsoft_SQL_Server; use strict; use warnings; use base qw/ DBIx::Class::Schema::Loader::DBI::ODBC DBIx::Class::Schema::Loader::DBI::MSSQL /; use mro 'c3'; our $VERSION = '0.07045'; =head1 NAME DBIx::Class::Schema::Loader::DBI::ODBC::Microsoft_SQL_Server - ODBC wrapper for L =head1 DESCRIPTION Proxy for L when using L. See L for usage information. =head1 SEE ALSO L, L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/ODBC/Firebird.pm0000644000175000017500000000324412650450246025647 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBI::ODBC::Firebird; use strict; use warnings; use base qw/ DBIx::Class::Schema::Loader::DBI::ODBC DBIx::Class::Schema::Loader::DBI::InterBase /; use mro 'c3'; our $VERSION = '0.07045'; =head1 NAME DBIx::Class::Schema::Loader::DBI::ODBC::Firebird - ODBC wrapper for L =head1 DESCRIPTION Proxy for L when using L. See L for usage information. =cut # Some (current) versions of the ODBC driver have a bug where ->type_info breaks # with "data truncated". This "fixes" it, but some type names are truncated. sub _dbh_type_info_type_name { my ($self, $type_num) = @_; my $dbh = $self->schema->storage->dbh; local $dbh->{LongReadLen} = 100_000; local $dbh->{LongTruncOk} = 1; my $type_info = $dbh->type_info($type_num); return undef if not $type_info; my $type_name = $type_info->{TYPE_NAME}; # fix up truncated type names if ($type_name eq "VARCHAR(x) CHARACTER SET UNICODE_\0") { return 'VARCHAR(x) CHARACTER SET UNICODE_FSS'; } elsif ($type_name eq "BLOB SUB_TYPE TEXT CHARACTER SET \0") { return 'BLOB SUB_TYPE TEXT CHARACTER SET UNICODE_FSS'; } return $type_name; } =head1 SEE ALSO L, L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/ODBC/SQL_Anywhere.pm0000644000175000017500000000247312650450246026425 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBI::ODBC::SQL_Anywhere; use strict; use warnings; use base qw/ DBIx::Class::Schema::Loader::DBI::ODBC DBIx::Class::Schema::Loader::DBI::SQLAnywhere /; use mro 'c3'; our $VERSION = '0.07045'; =head1 NAME DBIx::Class::Schema::Loader::DBI::ODBC::SQL_Anywhere - ODBC wrapper for L =head1 DESCRIPTION Proxy for L when using L. See L for usage information. =cut sub _columns_info_for { my $self = shift; my $result = $self->next::method(@_); while (my ($col, $info) = each %$result) { # The ODBC driver sets the default value to NULL even when it was not specified. if (ref $info->{default_value} && ${ $info->{default_value} } eq 'null') { delete $info->{default_value}; } } return $result; } =head1 SEE ALSO L, L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; # vim:et sw=4 sts=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/Informix.pm0000644000175000017500000003410212650450246025202 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBI::Informix; use strict; use warnings; use base qw/DBIx::Class::Schema::Loader::DBI/; use mro 'c3'; use Scalar::Util 'looks_like_number'; use List::Util 'any'; use Try::Tiny; use namespace::clean; use DBIx::Class::Schema::Loader::Table::Informix (); our $VERSION = '0.07045'; =head1 NAME DBIx::Class::Schema::Loader::DBI::Informix - DBIx::Class::Schema::Loader::DBI Informix Implementation. =head1 DESCRIPTION See L and L. =cut sub _build_name_sep { '.' } sub _system_databases { return (qw/ sysmaster sysutils sysuser sysadmin /); } sub _current_db { my $self = shift; my ($current_db) = $self->dbh->selectrow_array(<<'EOF'); SELECT rtrim(ODB_DBName) FROM sysmaster:informix.SysOpenDB WHERE ODB_SessionID = ( SELECT DBINFO('sessionid') FROM informix.SysTables WHERE TabID = 1 ) and ODB_IsCurrent = 'Y' EOF return $current_db; } sub _owners { my ($self, $db) = @_; my ($owners) = $self->dbh->selectcol_arrayref(<<"EOF"); SELECT distinct(rtrim(owner)) FROM ${db}:informix.systables EOF my @owners = grep $_ && $_ ne 'informix' && !/^\d/, @$owners; return @owners; } sub _setup { my $self = shift; $self->next::method(@_); if (not defined $self->preserve_case) { $self->preserve_case(0); } elsif ($self->preserve_case) { $self->schema->storage->sql_maker->quote_char('"'); $self->schema->storage->sql_maker->name_sep('.'); } my $current_db = $self->_current_db; if (ref $self->db_schema eq 'HASH') { if (keys %{ $self->db_schema } < 2) { my ($db) = keys %{ $self->db_schema }; $db ||= $current_db; if ($db eq '%') { my $owners = $self->db_schema->{$db}; my $db_names = $self->dbh->selectcol_arrayref(<<'EOF'); SELECT rtrim(name) FROM sysmaster:sysdatabases EOF my @dbs; foreach my $db_name (@$db_names) { push @dbs, $db_name unless any { $_ eq $db_name } $self->_system_databases; } $self->db_schema({}); DB: foreach my $db (@dbs) { if (not ((ref $owners eq 'ARRAY' && $owners->[0] eq '%') || $owners eq '%')) { my @owners; my @db_owners = try { $self->_owners($db); } catch { if (/without logging/) { warn "Database '$db' is unreferencable due to lack of logging.\n"; } return (); }; foreach my $owner (@$owners) { push @owners, $owner if any { $_ eq $owner } @db_owners; } next DB unless @owners; $self->db_schema->{$db} = \@owners; } else { # for post-processing below $self->db_schema->{$db} = '%'; } } $self->qualify_objects(1); } else { if ($db ne $current_db) { $self->qualify_objects(1); } } } else { $self->qualify_objects(1); } } elsif (ref $self->db_schema eq 'ARRAY' || (not defined $self->db_schema)) { my $owners = $self->db_schema; $owners ||= [ $self->dbh->selectrow_array(<<'EOF') ]; SELECT rtrim(username) FROM sysmaster:syssessions WHERE sid = DBINFO('sessionid') EOF $self->qualify_objects(1) if @$owners > 1; $self->db_schema({ $current_db => $owners }); } DB: foreach my $db (keys %{ $self->db_schema }) { if ($self->db_schema->{$db} eq '%') { my @db_owners = try { $self->_owners($db); } catch { if (/without logging/) { warn "Database '$db' is unreferencable due to lack of logging.\n"; } return (); }; if (not @db_owners) { delete $self->db_schema->{$db}; next DB; } $self->db_schema->{$db} = \@db_owners; $self->qualify_objects(1); } } } sub _tables_list { my ($self) = @_; my @tables; while (my ($db, $owners) = each %{ $self->db_schema }) { foreach my $owner (@$owners) { my $table_names = $self->dbh->selectcol_arrayref(<<"EOF", {}, $owner); select tabname FROM ${db}:informix.systables WHERE rtrim(owner) = ? EOF TABLE: foreach my $table_name (@$table_names) { next if $table_name =~ /^\s/; push @tables, DBIx::Class::Schema::Loader::Table::Informix->new( loader => $self, name => $table_name, database => $db, schema => $owner, ); } } } return $self->_filter_tables(\@tables); } sub _constraints_for { my ($self, $table, $type) = @_; local $self->dbh->{FetchHashKeyName} = 'NAME_lc'; my $db = $table->database; my $sth = $self->dbh->prepare(<<"EOF"); SELECT c.constrname, i.* FROM ${db}:informix.sysconstraints c JOIN ${db}:informix.systables t ON t.tabid = c.tabid JOIN ${db}:informix.sysindexes i ON c.idxname = i.idxname WHERE t.tabname = ? and c.constrtype = ? EOF $sth->execute($table, $type); my $indexes = $sth->fetchall_hashref('constrname'); $sth->finish; my $cols = $self->_colnames_by_colno($table); my $constraints; while (my ($constr_name, $idx_def) = each %$indexes) { $constraints->{$constr_name} = $self->_idx_colnames($idx_def, $cols); } return $constraints; } sub _idx_colnames { my ($self, $idx_info, $table_cols_by_colno) = @_; return [ map $table_cols_by_colno->{$_}, grep $_, map $idx_info->{$_}, map "part$_", (1..16) ]; } sub _colnames_by_colno { my ($self, $table) = @_; local $self->dbh->{FetchHashKeyName} = 'NAME_lc'; my $db = $table->database; my $sth = $self->dbh->prepare(<<"EOF"); SELECT c.colname, c.colno FROM ${db}:informix.syscolumns c JOIN ${db}:informix.systables t ON c.tabid = t.tabid WHERE t.tabname = ? EOF $sth->execute($table); my $cols = $sth->fetchall_hashref('colno'); $cols = { map +($_, $self->_lc($cols->{$_}{colname})), keys %$cols }; return $cols; } sub _table_pk_info { my ($self, $table) = @_; my $pk = (values %{ $self->_constraints_for($table, 'P') || {} })[0]; return $pk; } sub _table_uniq_info { my ($self, $table) = @_; my $constraints = $self->_constraints_for($table, 'U'); return [ map { [ $_ => $constraints->{$_} ] } sort keys %$constraints ]; } sub _table_fk_info { my ($self, $table) = @_; my $local_columns = $self->_constraints_for($table, 'R'); local $self->dbh->{FetchHashKeyName} = 'NAME_lc'; my $db = $table->database; my $sth = $self->dbh->prepare(<<"EOF"); SELECT c.constrname local_constraint, rt.tabname remote_table, rtrim(rt.owner) remote_owner, rc.constrname remote_constraint, ri.* FROM ${db}:informix.sysconstraints c JOIN ${db}:informix.systables t ON c.tabid = t.tabid JOIN ${db}:informix.sysreferences r ON c.constrid = r.constrid JOIN ${db}:informix.sysconstraints rc ON rc.constrid = r.primary JOIN ${db}:informix.systables rt ON r.ptabid = rt.tabid JOIN ${db}:informix.sysindexes ri ON rc.idxname = ri.idxname WHERE t.tabname = ? and c.constrtype = 'R' EOF $sth->execute($table); my $remotes = $sth->fetchall_hashref('local_constraint'); $sth->finish; my @rels; while (my ($local_constraint, $remote_info) = each %$remotes) { my $remote_table = DBIx::Class::Schema::Loader::Table::Informix->new( loader => $self, name => $remote_info->{remote_table}, database => $db, schema => $remote_info->{remote_owner}, ); push @rels, { local_columns => $local_columns->{$local_constraint}, remote_columns => $self->_idx_colnames($remote_info, $self->_colnames_by_colno($remote_table)), remote_table => $remote_table, }; } return \@rels; } # This is directly from http://www.ibm.com/developerworks/data/zones/informix/library/techarticle/0305parker/0305parker.html # it doesn't work at all sub _informix_datetime_precision { my @date_type = qw/DUMMY year month day hour minute second fraction(1) fraction(2) fraction(3) fraction(4) fraction(5)/; my @start_end = ( [], [1,5],[5,7],[7,9],[9,11],[11,13],[13,15],[15,16], [16,17], [17,18], [18,19], [19,20] ); my ($self, $collength) = @_; my $i = ($collength % 16) + 1; my $j = int(($collength % 256) / 16) + 1; my $k = int($collength / 256); my $len = $start_end[$i][1] - $start_end[$j][0]; $len = $k - $len; if ($len == 0 || $j > 11) { return $date_type[$j] . ' to ' . $date_type[$i]; } $k = $start_end[$j][1] - $start_end[$j][0]; $k += $len; return $date_type[$j] . "($k) to " . $date_type[$i]; } sub _columns_info_for { my $self = shift; my ($table) = @_; my $result = $self->next::method(@_); my $db = $table->database; my $sth = $self->dbh->prepare(<<"EOF"); SELECT c.colname, c.coltype, c.collength, c.colmin, d.type deflt_type, d.default deflt FROM ${db}:informix.syscolumns c JOIN ${db}:informix.systables t ON c.tabid = t.tabid LEFT JOIN ${db}:informix.sysdefaults d ON t.tabid = d.tabid AND c.colno = d.colno WHERE t.tabname = ? EOF $sth->execute($table); my $cols = $sth->fetchall_hashref('colname'); $sth->finish; while (my ($col, $info) = each %$cols) { $col = $self->_lc($col); my $type = $info->{coltype} % 256; if ($type == 6) { # SERIAL $result->{$col}{is_auto_increment} = 1; } elsif ($type == 7) { $result->{$col}{data_type} = 'date'; } elsif ($type == 10) { $result->{$col}{data_type} = 'datetime year to fraction(5)'; # this doesn't work yet # $result->{$col}{data_type} = 'datetime ' . $self->_informix_datetime_precision($info->{collength}); } elsif ($type == 17 || $type == 52) { $result->{$col}{data_type} = 'bigint'; } elsif ($type == 40) { $result->{$col}{data_type} = 'lvarchar'; $result->{$col}{size} = $info->{collength}; } elsif ($type == 12) { $result->{$col}{data_type} = 'text'; } elsif ($type == 11) { $result->{$col}{data_type} = 'bytea'; $result->{$col}{original}{data_type} = 'byte'; } elsif ($type == 41) { # XXX no way to distinguish opaque types boolean, blob and clob $result->{$col}{data_type} = 'blob' unless $result->{$col}{data_type} eq 'smallint'; } elsif ($type == 21) { $result->{$col}{data_type} = 'list'; } elsif ($type == 20) { $result->{$col}{data_type} = 'multiset'; } elsif ($type == 19) { $result->{$col}{data_type} = 'set'; } elsif ($type == 15) { $result->{$col}{data_type} = 'nchar'; } elsif ($type == 16) { $result->{$col}{data_type} = 'nvarchar'; } # XXX untested! elsif ($info->{coltype} == 2061) { $result->{$col}{data_type} = 'idssecuritylabel'; } my $data_type = $result->{$col}{data_type}; if ($data_type !~ /^(?:[nl]?(?:var)?char|decimal)\z/i) { delete $result->{$col}{size}; } if (lc($data_type) eq 'decimal') { no warnings 'uninitialized'; $result->{$col}{data_type} = 'numeric'; my @size = @{ $result->{$col}{size} || [] }; if ($size[0] == 16 && $size[1] == -4) { delete $result->{$col}{size}; } elsif ($size[0] == 16 && $size[1] == 2) { $result->{$col}{data_type} = 'money'; delete $result->{$col}{size}; } } elsif (lc($data_type) eq 'smallfloat') { $result->{$col}{data_type} = 'real'; } elsif (lc($data_type) eq 'float') { $result->{$col}{data_type} = 'double precision'; } elsif ($data_type =~ /^n?(?:var)?char\z/i) { $result->{$col}{size} = $result->{$col}{size}[0]; } # XXX colmin doesn't work for min size of varchar columns, it's NULL # if (lc($data_type) eq 'varchar') { # $result->{$col}{size}[1] = $info->{colmin}; # } my ($default_type, $default) = @{$info}{qw/deflt_type deflt/}; next unless $default_type; if ($default_type eq 'C') { my $current = 'current year to fraction(5)'; $result->{$col}{default_value} = \$current; } elsif ($default_type eq 'T') { my $today = 'today'; $result->{$col}{default_value} = \$today; } else { $default = (split ' ', $default, 2)[-1]; $default =~ s/\s+\z// if looks_like_number $default; # remove trailing 0s in floating point defaults # disabled, this is unsafe since it might be a varchar default #$default =~ s/0+\z// if $default =~ /^\d+\.\d+\z/; $result->{$col}{default_value} = $default; } } return $result; } =head1 SEE ALSO L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; # vim:et sw=4 sts=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/ADO/0000755000175000017500000000000012650450355023455 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/ADO/Microsoft_SQL_Server.pm0000644000175000017500000000232512650450246030026 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBI::ADO::Microsoft_SQL_Server; use strict; use warnings; use base qw/ DBIx::Class::Schema::Loader::DBI::ADO DBIx::Class::Schema::Loader::DBI::MSSQL /; use mro 'c3'; use DBIx::Class::Schema::Loader::Utils qw/sigwarn_silencer/; use namespace::clean; our $VERSION = '0.07045'; =head1 NAME DBIx::Class::Schema::Loader::DBI::ADO::Microsoft_SQL_Server - ADO wrapper for L =head1 DESCRIPTION Proxy for L when using L. See L for usage information. =cut # Silence ADO "Changed database context" warnings sub _switch_db { my $self = shift; local $SIG{__WARN__} = sigwarn_silencer(qr/Changed database context/); return $self->next::method(@_); } =head1 SEE ALSO L, L, L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/ADO/MS_Jet.pm0000644000175000017500000001305312650450246025135 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBI::ADO::MS_Jet; use strict; use warnings; use base qw/ DBIx::Class::Schema::Loader::DBI::ADO DBIx::Class::Schema::Loader::DBI::ODBC::ACCESS /; use mro 'c3'; use Try::Tiny; use namespace::clean; our $VERSION = '0.07045'; =head1 NAME DBIx::Class::Schema::Loader::DBI::ADO::MS_Jet - ADO wrapper for L =head1 DESCRIPTION Proxy for L when using L. See L for usage information. =cut sub _db_path { my $self = shift; $self->schema->storage->dbh->get_info(2); } sub _ado_connection { my $self = shift; return $self->__ado_connection if $self->__ado_connection; my ($dsn, $user, $pass) = @{ $self->schema->storage->_dbi_connect_info }; my $have_pass = 1; if (ref $dsn eq 'CODE') { ($dsn, $user, $pass) = $self->_try_infer_connect_info_from_coderef($dsn); if (not $dsn) { my $dbh = $self->schema->storage->dbh; $dsn = $dbh->{Name}; $user = $dbh->{Username}; $have_pass = 0; } } require Win32::OLE; my $conn = Win32::OLE->new('ADODB.Connection'); $dsn =~ s/^dbi:[^:]+://i; local $Win32::OLE::Warn = 0; my @dsn; for my $s (split /;/, $dsn) { my ($k, $v) = split /=/, $s, 2; if (defined $conn->{$k}) { $conn->{$k} = $v; next; } push @dsn, $s; } $dsn = join ';', @dsn; $user = '' unless defined $user; if ((not $have_pass) && exists $self->_passwords->{$dsn}{$user}) { $pass = $self->_passwords->{$dsn}{$user}; $have_pass = 1; } $pass = '' unless defined $pass; try { $conn->Open($dsn, $user, $pass); } catch { if (not $have_pass) { if (exists $ENV{DBI_PASS}) { $pass = $ENV{DBI_PASS}; try { $conn->Open($dsn, $user, $pass); $self->_passwords->{$dsn}{$user} = $pass; } catch { print "Enter database password for $user ($dsn): "; chomp($pass = ); $conn->Open($dsn, $user, $pass); $self->_passwords->{$dsn}{$user} = $pass; }; } else { print "Enter database password for $user ($dsn): "; chomp($pass = ); $conn->Open($dsn, $user, $pass); $self->_passwords->{$dsn}{$user} = $pass; } } else { die $_; } }; $self->__ado_connection($conn); return $conn; } sub _columns_info_for { my $self = shift; my ($table) = @_; my $result = $self->next::method(@_); while (my ($col, $info) = each %$result) { my $data_type = $info->{data_type}; my $col_obj = $self->_adox_column($table, $col); if ($data_type eq 'long') { $info->{data_type} = 'integer'; delete $info->{size}; my $props = $col_obj->Properties; for my $prop_idx (0..$props->Count-1) { my $prop = $props->Item($prop_idx); if ($prop->Name eq 'Autoincrement' && $prop->Value == 1) { $info->{is_auto_increment} = 1; last; } } } elsif ($data_type eq 'short') { $info->{data_type} = 'smallint'; delete $info->{size}; } elsif ($data_type eq 'single') { $info->{data_type} = 'real'; delete $info->{size}; } elsif ($data_type eq 'money') { if (ref $info->{size} eq 'ARRAY') { if ($info->{size}[0] == 19 && $info->{size}[1] == 255) { delete $info->{size}; } else { # it's really a decimal $info->{data_type} = 'decimal'; if ($info->{size}[0] == 18 && $info->{size}[1] == 0) { # default size delete $info->{size}; } delete $info->{original}; } } } elsif ($data_type eq 'varchar') { $info->{data_type} = 'char' if $col_obj->Type == 130; $info->{size} = $col_obj->DefinedSize; } elsif ($data_type eq 'bigbinary') { $info->{data_type} = 'varbinary'; my $props = $col_obj->Properties; for my $prop_idx (0..$props->Count-1) { my $prop = $props->Item($prop_idx); if ($prop->Name eq 'Fixed Length' && $prop->Value == 1) { $info->{data_type} = 'binary'; last; } } $info->{size} = $col_obj->DefinedSize; } elsif ($data_type eq 'longtext') { $info->{data_type} = 'text'; $info->{original}{data_type} = 'longchar'; delete $info->{size}; } } return $result; } =head1 SEE ALSO L, L, L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/Sybase/0000755000175000017500000000000012650450355024300 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/Sybase/Common.pm0000644000175000017500000000271112650450246026066 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBI::Sybase::Common; use strict; use warnings; use base 'DBIx::Class::Schema::Loader::DBI'; use mro 'c3'; our $VERSION = '0.07045'; =head1 NAME DBIx::Class::Schema::Loader::DBI::Sybase::Common - Common methods for Sybase and MSSQL =head1 DESCRIPTION See L and L. =cut # DBD::Sybase doesn't implement get_info properly sub _build_quote_char { '[]' } sub _build_name_sep { '.' } sub _setup { my $self = shift; $self->next::method(@_); $self->schema->storage->sql_maker->quote_char([qw/[ ]/]); $self->schema->storage->sql_maker->name_sep('.'); } # remove 'IDENTITY' from column data_type sub _columns_info_for { my $self = shift; my $result = $self->next::method(@_); foreach my $col (keys %$result) { $result->{$col}->{data_type} =~ s/\s* identity \s*//ix; } return $result; } =head1 SEE ALSO L, L, L, L, L L, L, =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/Sybase/Microsoft_SQL_Server.pm0000644000175000017500000000170612650450246030653 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBI::Sybase::Microsoft_SQL_Server; use strict; use warnings; use base 'DBIx::Class::Schema::Loader::DBI::MSSQL'; use mro 'c3'; our $VERSION = '0.07045'; =head1 NAME DBIx::Class::Schema::Loader::DBI::Sybase::Microsoft_SQL_Server - Driver for using Microsoft SQL Server through DBD::Sybase =head1 DESCRIPTION Subclasses L. See L and L. =head1 SEE ALSO L, L, L, L L, L, =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/Firebird.pm0000644000175000017500000000170512650450246025140 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBI::Firebird; use strict; use warnings; use base qw/DBIx::Class::Schema::Loader::DBI::InterBase/; use mro 'c3'; our $VERSION = '0.07045'; =head1 NAME DBIx::Class::Schema::Loader::DBI::Firebird - DBIx::Class::Schema::Loader::DBI L subclass =head1 DESCRIPTION This is an empty subclass of L for use with L, see that driver for details. See L and L for general Schema::Loader information. =head1 SEE ALSO L, L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; # vim:et sw=4 sts=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/Component/0000755000175000017500000000000012650450355025014 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/Component/QuotedDefault.pm0000644000175000017500000000356012650450246030123 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBI::Component::QuotedDefault; use strict; use warnings; use base 'DBIx::Class::Schema::Loader::DBI'; use mro 'c3'; our $VERSION = '0.07045'; =head1 NAME DBIx::Class::Schema::Loader::DBI::Component::QuotedDefault -- Loader::DBI Component to parse quoted default constants and functions =head1 DESCRIPTION If C from L returns character constants quoted, then we need to remove the quotes. This also allows distinguishing between default functions without information schema introspection. =cut sub _columns_info_for { my $self = shift; my ($table) = @_; my $result = $self->next::method(@_); while (my ($col, $info) = each %$result) { if (my $def = $info->{default_value}) { $def =~ s/^\s+//; $def =~ s/\s+\z//; # remove Pg typecasts (e.g. 'foo'::character varying) too if ($def =~ /^["'](.*?)['"](?:::[\w\s]+)?\z/) { $info->{default_value} = $1; } # Some DBs (eg. Pg) put parenthesis around negative number defaults elsif ($def =~ /^\((-?\d.*?)\)(?:::[\w\s]+)?\z/) { $info->{default_value} = $1; } elsif ($def =~ /^(-?\d.*?)(?:::[\w\s]+)?\z/) { $info->{default_value} = $1; } elsif ($def =~ /^NULL:?/i) { my $null = 'null'; $info->{default_value} = \$null; } else { $info->{default_value} = \$def; } } } return $result; } 1; =head1 SEE ALSO L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI/mysql.pm0000644000175000017500000002664612650450246024572 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBI::mysql; use strict; use warnings; use base 'DBIx::Class::Schema::Loader::DBI'; use mro 'c3'; use Carp::Clan qw/^DBIx::Class/; use List::Util qw/any first/; use Try::Tiny; use Scalar::Util 'blessed'; use DBIx::Class::Schema::Loader::Utils qw/sigwarn_silencer/; use namespace::clean; use DBIx::Class::Schema::Loader::Table (); our $VERSION = '0.07045'; =head1 NAME DBIx::Class::Schema::Loader::DBI::mysql - DBIx::Class::Schema::Loader::DBI mysql Implementation. =head1 DESCRIPTION See L and L. =cut sub _setup { my $self = shift; $self->schema->storage->sql_maker->quote_char("`"); $self->schema->storage->sql_maker->name_sep("."); $self->next::method(@_); if (not defined $self->preserve_case) { $self->preserve_case(0); } if ($self->db_schema && $self->db_schema->[0] eq '%') { my @schemas = try { $self->_show_databases; } catch { croak "no SHOW DATABASES privileges: $_"; }; @schemas = grep { my $schema = $_; not any { lc($schema) eq lc($_) } $self->_system_schemas } @schemas; $self->db_schema(\@schemas); } } sub _show_databases { my $self = shift; return map $_->[0], @{ $self->dbh->selectall_arrayref('SHOW DATABASES') }; } sub _system_schemas { my $self = shift; return ($self->next::method(@_), 'mysql'); } sub _table_fk_info { my ($self, $table) = @_; my $table_def_ref = eval { $self->dbh->selectrow_arrayref("SHOW CREATE TABLE ".$table->sql_name) }; my $table_def = $table_def_ref->[1]; return [] if not $table_def; my $qt = qr/["`]/; my $nqt = qr/[^"`]/; my (@reldata) = ($table_def =~ /CONSTRAINT ${qt}${nqt}+${qt} FOREIGN KEY \($qt(.*)$qt\) REFERENCES (?:$qt($nqt+)$qt\.)?$qt($nqt+)$qt \($qt(.+)$qt\)\s*(.*)/ig ); my @rels; while (scalar @reldata > 0) { my ($cols, $f_schema, $f_table, $f_cols, $rest) = splice @reldata, 0, 5; my @cols = map { s/$qt//g; $self->_lc($_) } split(/$qt?\s*$qt?,$qt?\s*$qt?/, $cols); my @f_cols = map { s/$qt//g; $self->_lc($_) } split(/$qt?\s*$qt?,$qt?\s*$qt?/, $f_cols); # Match case of remote schema to that in SHOW DATABASES, if it's there # and we have permissions to run SHOW DATABASES. if ($f_schema) { my $matched = first { lc($_) eq lc($f_schema) } try { $self->_show_databases }; $f_schema = $matched if $matched; } my $remote_table = do { # Get ->tables_list to return tables from the remote schema, in case it is not in the db_schema list. local $self->{db_schema} = [ $f_schema ] if $f_schema; first { lc($_->name) eq lc($f_table) && ((not $f_schema) || lc($_->schema) eq lc($f_schema)) } $self->_tables_list; }; # The table may not be in any database, or it may not have been found by the previous code block for whatever reason. if (not $remote_table) { my $remote_schema = $f_schema || $self->db_schema && @{ $self->db_schema } == 1 && $self->db_schema->[0]; $remote_table = DBIx::Class::Schema::Loader::Table->new( loader => $self, name => $f_table, ($remote_schema ? ( schema => $remote_schema, ) : ()), ); } my %attrs; if ($rest) { my @on_clauses = $rest =~ /(ON DELETE|ON UPDATE) (RESTRICT|CASCADE|SET NULL|NO ACTION) ?/ig; while (my ($clause, $value) = splice @on_clauses, 0, 2) { $clause = lc $clause; $clause =~ s/ /_/; $value = uc $value; $attrs{$clause} = $value; } } # The default behavior is RESTRICT. Specifying RESTRICT explicitly just removes # that ON clause from the SHOW CREATE TABLE output. For this reason, even # though the default for these clauses everywhere else in Schema::Loader is # CASCADE, we change the default here to RESTRICT in order to reproduce the # schema faithfully. $attrs{on_delete} ||= 'RESTRICT'; $attrs{on_update} ||= 'RESTRICT'; # MySQL does not have a DEFERRABLE attribute, but there is a way to defer FKs. $attrs{is_deferrable} = 1; push(@rels, { local_columns => \@cols, remote_columns => \@f_cols, remote_table => $remote_table, attrs => \%attrs, }); } return \@rels; } # primary and unique info comes from the same sql statement, # so cache it here for both routines to use sub _mysql_table_get_keys { my ($self, $table) = @_; if(!exists($self->{_cache}->{_mysql_keys}->{$table->sql_name})) { my %keydata; my $sth = $self->dbh->prepare('SHOW INDEX FROM '.$table->sql_name); $sth->execute; while(my $row = $sth->fetchrow_hashref) { next if $row->{Non_unique}; push(@{$keydata{$row->{Key_name}}}, [ $row->{Seq_in_index}, $self->_lc($row->{Column_name}) ] ); } foreach my $keyname (keys %keydata) { my @ordered_cols = map { $_->[1] } sort { $a->[0] <=> $b->[0] } @{$keydata{$keyname}}; $keydata{$keyname} = \@ordered_cols; } $self->{_cache}->{_mysql_keys}->{$table->sql_name} = \%keydata; } return $self->{_cache}->{_mysql_keys}->{$table->sql_name}; } sub _table_pk_info { my ( $self, $table ) = @_; return $self->_mysql_table_get_keys($table)->{PRIMARY}; } sub _table_uniq_info { my ( $self, $table ) = @_; my @uniqs; my $keydata = $self->_mysql_table_get_keys($table); foreach my $keyname (sort keys %$keydata) { next if $keyname eq 'PRIMARY'; push(@uniqs, [ $keyname => $keydata->{$keyname} ]); } return \@uniqs; } sub _columns_info_for { my $self = shift; my ($table) = @_; my $result = $self->next::method(@_); while (my ($col, $info) = each %$result) { if ($info->{data_type} eq 'int') { $info->{data_type} = 'integer'; } elsif ($info->{data_type} eq 'double') { $info->{data_type} = 'double precision'; } my $data_type = $info->{data_type}; delete $info->{size} if $data_type !~ /^(?: (?:var)?(?:char(?:acter)?|binary) | bit | year)\z/ix; # information_schema is available in 5.0+ my ($precision, $scale, $column_type, $default) = eval { $self->dbh->selectrow_array(<<'EOF', {}, $table->name, lc($col)) }; SELECT numeric_precision, numeric_scale, column_type, column_default FROM information_schema.columns WHERE table_schema = schema() AND table_name = ? AND lower(column_name) = ? EOF my $has_information_schema = not $@; $column_type = '' if not defined $column_type; if ($data_type eq 'bit' && (not exists $info->{size})) { $info->{size} = $precision if defined $precision; } elsif ($data_type =~ /^(?:float|double precision|decimal)\z/i) { if (defined $precision && defined $scale) { if ($precision == 10 && $scale == 0) { delete $info->{size}; } else { $info->{size} = [$precision,$scale]; } } } elsif ($data_type eq 'year') { if ($column_type =~ /\(2\)/) { $info->{size} = 2; } elsif ($column_type =~ /\(4\)/ || $info->{size} == 4) { delete $info->{size}; } } elsif ($data_type =~ /^(?:date(?:time)?|timestamp)\z/) { if (not (defined $self->datetime_undef_if_invalid && $self->datetime_undef_if_invalid == 0)) { $info->{datetime_undef_if_invalid} = 1; } } elsif ($data_type =~ /^(?:enum|set)\z/ && $has_information_schema && $column_type =~ /^(?:enum|set)\(/) { delete $info->{extra}{list}; while ($column_type =~ /'((?:[^']* (?:''|\\')* [^']*)* [^\\']?)',?/xg) { my $el = $1; $el =~ s/''/'/g; push @{ $info->{extra}{list} }, $el; } } # Sometimes apparently there's a bug where default_value gets set to '' # for things that don't actually have or support that default (like ints.) if (exists $info->{default_value} && $info->{default_value} eq '') { if ($has_information_schema) { if (not defined $default) { delete $info->{default_value}; } } else { # just check if it's a char/text type, otherwise remove delete $info->{default_value} unless $data_type =~ /char|text/i; } } } return $result; } sub _extra_column_info { no warnings 'uninitialized'; my ($self, $table, $col, $info, $dbi_info) = @_; my %extra_info; if ($dbi_info->{mysql_is_auto_increment}) { $extra_info{is_auto_increment} = 1 } if ($dbi_info->{mysql_type_name} =~ /\bunsigned\b/i) { $extra_info{extra}{unsigned} = 1; } if ($dbi_info->{mysql_values}) { $extra_info{extra}{list} = $dbi_info->{mysql_values}; } if ((not blessed $dbi_info) # isa $sth && lc($dbi_info->{COLUMN_DEF}) eq 'current_timestamp' && lc($dbi_info->{mysql_type_name}) eq 'timestamp') { my $current_timestamp = 'current_timestamp'; $extra_info{default_value} = \$current_timestamp; } return \%extra_info; } sub _dbh_column_info { my $self = shift; local $SIG{__WARN__} = sigwarn_silencer( qr/^column_info: unrecognized column type/ ); $self->next::method(@_); } sub _table_comment { my ( $self, $table ) = @_; my $comment = $self->next::method($table); if (not $comment) { ($comment) = try { $self->schema->storage->dbh->selectrow_array( qq{SELECT table_comment FROM information_schema.tables WHERE table_schema = schema() AND table_name = ? }, undef, $table->name); }; # InnoDB likes to auto-append crap. if (not $comment) { # Do nothing. } elsif ($comment =~ /^InnoDB free:/) { $comment = undef; } else { $comment =~ s/; InnoDB.*//; } } return $comment; } sub _column_comment { my ( $self, $table, $column_number, $column_name ) = @_; my $comment = $self->next::method($table, $column_number, $column_name); if (not $comment) { ($comment) = try { $self->schema->storage->dbh->selectrow_array( qq{SELECT column_comment FROM information_schema.columns WHERE table_schema = schema() AND table_name = ? AND lower(column_name) = ? }, undef, $table->name, lc($column_name)); }; } return $comment; } =head1 SEE ALSO L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; # vim:et sw=4 sts=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/Table/0000755000175000017500000000000012650450355023503 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/Table/Sybase.pm0000644000175000017500000000150512542756321025272 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::Table::Sybase; use strict; use warnings; use base 'DBIx::Class::Schema::Loader::DBObject::Sybase'; use mro 'c3'; =head1 NAME DBIx::Class::Schema::Loader::Table::Sybase - Class for Sybase ASE and MSSQL Tables in L =head1 DESCRIPTION Inherits from L, see that module for details. =head1 SEE ALSO L, L, L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/Table/Informix.pm0000644000175000017500000000150312542756321025635 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::Table::Informix; use strict; use warnings; use base 'DBIx::Class::Schema::Loader::DBObject::Informix'; use mro 'c3'; =head1 NAME DBIx::Class::Schema::Loader::Table::Informix - Class for Informix Tables in L =head1 DESCRIPTION Inherits from L, see that module for details. =head1 SEE ALSO L, L, L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBObject/0000755000175000017500000000000012650450355024070 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBObject/Sybase.pm0000644000175000017500000000475512542756321025671 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBObject::Sybase; use strict; use warnings; use base 'DBIx::Class::Schema::Loader::DBObject'; use mro 'c3'; use namespace::clean; =head1 NAME DBIx::Class::Schema::Loader::DBObject::Sybase - Class for Database Objects for Sybase ASE and MSSQL Such as Tables and Views in L =head1 DESCRIPTION This is a subclass of L that adds support for fully qualified objects in Sybase ASE and MSSQL including both L and L of the form: database.owner.object_name =head1 METHODS =head2 database The database name this object belongs to. Returns undef if L is set. =cut __PACKAGE__->mk_group_accessors(simple => qw/ _database /); sub new { my $class = shift; my $self = $class->next::method(@_); $self->{_database} = delete $self->{database}; return $self; } sub database { my $self = shift; return $self->_database(@_) unless $self->ignore_schema; return undef; } =head1 sql_name Returns the properly quoted full identifier with L, L and L. =cut sub sql_name { my $self = shift; my $name_sep = $self->loader->name_sep; if ($self->database) { return $self->_quote($self->database) . $name_sep . $self->_quote($self->schema) . $name_sep . $self->_quote($self->name); } return $self->next::method(@_); } sub dbic_name { my $self = shift; my $name_sep = $self->loader->name_sep; if ($self->loader->qualify_objects && $self->_database) { if ($self->_database =~ /\W/ || $self->_schema =~ /\W/ || $self->name =~ /\W/) { return \ $self->sql_name; } return $self->_database . $name_sep . $self->_schema . $name_sep . $self->name; } return $self->next::method(@_); } =head1 SEE ALSO L, L, L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBObject/Informix.pm0000644000175000017500000000471712542756321026234 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBObject::Informix; use strict; use warnings; use base 'DBIx::Class::Schema::Loader::DBObject'; use mro 'c3'; use namespace::clean; =head1 NAME DBIx::Class::Schema::Loader::DBObject::Informix - Class for Database Objects for Informix Such as Tables and Views in L =head1 DESCRIPTION This is a subclass of L that adds support for fully qualified objects in Informix including both L and L of the form: database:owner.object_name =head1 METHODS =head2 database The database name this object belongs to. Returns undef if L is set. =cut __PACKAGE__->mk_group_accessors(simple => qw/ _database /); sub new { my $class = shift; my $self = $class->next::method(@_); $self->{_database} = delete $self->{database}; return $self; } sub database { my $self = shift; return $self->_database(@_) unless $self->ignore_schema; return undef; } =head1 sql_name Returns the properly quoted full identifier with L, L and L. =cut sub sql_name { my $self = shift; my $name_sep = $self->loader->name_sep; if ($self->database) { return $self->_quote($self->database) . ':' . $self->_quote($self->schema) . $name_sep . $self->_quote($self->name); } return $self->next::method(@_); } sub dbic_name { my $self = shift; my $name_sep = $self->loader->name_sep; if ($self->loader->qualify_objects && $self->_database) { if ($self->_database =~ /\W/ || $self->_schema =~ /\W/ || $self->name =~ /\W/) { return \ $self->sql_name; } return $self->_database . ':' . $self->_schema . $name_sep . $self->name; } return $self->next::method(@_); } =head1 SEE ALSO L, L, L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBI.pm0000644000175000017500000004570012650450246023415 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBI; use strict; use warnings; use base qw/DBIx::Class::Schema::Loader::Base/; use mro 'c3'; use Try::Tiny; use List::Util 'any'; use Carp::Clan qw/^DBIx::Class/; use namespace::clean; use DBIx::Class::Schema::Loader::Table (); our $VERSION = '0.07045'; __PACKAGE__->mk_group_accessors('simple', qw/ _disable_pk_detection _disable_uniq_detection _disable_fk_detection _passwords quote_char name_sep /); =head1 NAME DBIx::Class::Schema::Loader::DBI - DBIx::Class::Schema::Loader DBI Implementation. =head1 SYNOPSIS See L =head1 DESCRIPTION This is the base class for L classes for DBI-based storage backends, and implements the common functionality between them. See L for the available options. =head1 METHODS =head2 new Overlays L to do some DBI-specific things. =cut sub new { my $self = shift->next::method(@_); # rebless to vendor-specific class if it exists and loads and we're not in a # custom class. if (not $self->loader_class) { my $driver = $self->dbh->{Driver}->{Name}; my $subclass = 'DBIx::Class::Schema::Loader::DBI::' . $driver; if ((not $self->isa($subclass)) && $self->load_optional_class($subclass)) { bless $self, $subclass; $self->_rebless; Class::C3::reinitialize() if $] < 5.009005; } } # Set up the default quoting character and name separators $self->quote_char($self->_build_quote_char); $self->name_sep($self->_build_name_sep); $self->_setup; return $self; } sub _build_quote_char { my $self = shift; my $quote_char = $self->dbh->get_info(29) || $self->schema->storage->sql_maker->quote_char || q{"}; # For our usage as regex matches, concatenating multiple quote_char # values works fine (e.g. s/[\Q<>\E]// if quote_char was [ '<', '>' ]) if (ref $quote_char eq 'ARRAY') { $quote_char = join '', @$quote_char; } return $quote_char; } sub _build_name_sep { my $self = shift; return $self->dbh->get_info(41) || $self->schema->storage->sql_maker->name_sep || '.'; } # Override this in vendor modules to do things at the end of ->new() sub _setup { } # Override this in vendor module to load a subclass if necessary sub _rebless { } sub _system_schemas { return ('information_schema'); } sub _system_tables { return (); } sub _dbh_tables { my $self = shift; return $self->dbh->tables(undef, @_); } # default to be overridden in subclasses if necessary sub _supports_db_schema { 1 } # Returns an array of table objects sub _tables_list { my ($self) = @_; my @tables; my $qt = qr/[\Q$self->{quote_char}\E"'`\[\]]/; my $nqt = qr/[^\Q$self->{quote_char}\E"'`\[\]]/; my $ns = qr/[\Q$self->{name_sep}\E]/; my $nns = qr/[^\Q$self->{name_sep}\E]/; foreach my $schema (@{ $self->db_schema || [undef] }) { my @raw_table_names = $self->_dbh_tables($schema); TABLE: foreach my $raw_table_name (@raw_table_names) { my $quoted = $raw_table_name =~ /^$qt/; # These regexes are not entirely correct, but hopefully they will work # in most cases. RT reports welcome. my ($schema_name, $table_name1, $table_name2) = $quoted ? $raw_table_name =~ /^(?:${qt}(${nqt}+?)${qt}${ns})?(?:${qt}(.+?)${qt}|(${nns}+))\z/ : $raw_table_name =~ /^(?:(${nns}+?)${ns})?(?:${qt}(.+?)${qt}|(${nns}+))\z/; my $table_name = $table_name1 || $table_name2; foreach my $system_schema ($self->_system_schemas) { if ($schema_name) { my $matches = 0; if (ref $system_schema) { $matches = 1 if $schema_name =~ $system_schema && $schema !~ $system_schema; } else { $matches = 1 if $schema_name eq $system_schema && $schema ne $system_schema; } next TABLE if $matches; } } foreach my $system_table ($self->_system_tables) { my $matches = 0; if (ref $system_table) { $matches = 1 if $table_name =~ $system_table; } else { $matches = 1 if $table_name eq $system_table } next TABLE if $matches; } $schema_name ||= $schema; my $table = DBIx::Class::Schema::Loader::Table->new( loader => $self, name => $table_name, schema => $schema_name, ($self->_supports_db_schema ? () : ( ignore_schema => 1 )), ); push @tables, $table; } } return $self->_filter_tables(\@tables); } sub _recurse_constraint { my ($constraint, @parts) = @_; my $name = shift @parts; # If there are any parts left, the constraint must be an arrayref croak "depth of constraint/exclude array does not match length of moniker_parts" unless !!@parts == !!(ref $constraint eq 'ARRAY'); # if ths is the last part, use the constraint directly return $name =~ $constraint unless @parts; # recurse into the first matching subconstraint foreach (@{$constraint}) { my ($re, $sub) = @{$_}; return _recurse_constraint($sub, @parts) if $name =~ $re; } return 0; } sub _check_constraint { my ($include, $constraint, @tables) = @_; return @tables unless defined $constraint; return grep { !$include xor _recurse_constraint($constraint, @{$_}) } @tables if ref $constraint eq 'ARRAY'; return grep { !$include xor /$constraint/ } @tables; } # apply constraint/exclude and ignore bad tables and views sub _filter_tables { my ($self, $tables) = @_; my @tables = @$tables; my @filtered_tables; @tables = _check_constraint(1, $self->constraint, @tables); @tables = _check_constraint(0, $self->exclude, @tables); TABLE: for my $table (@tables) { try { local $^W = 0; # for ADO my $sth = $self->_sth_for($table, undef, \'1 = 0'); $sth->execute; 1; } catch { warn "Bad table or view '$table', ignoring: $_\n"; 0; } or next TABLE; push @filtered_tables, $table; } return @filtered_tables; } =head2 load We override L here to hook in our localized settings for C<$dbh> error handling. =cut sub load { my $self = shift; local $self->dbh->{RaiseError} = 1; local $self->dbh->{PrintError} = 0; $self->next::method(@_); } sub _sth_for { my ($self, $table, $fields, $where) = @_; my $sth = $self->dbh->prepare($self->schema->storage->sql_maker ->select(\$table->sql_name, $fields, $where)); return $sth; } # Returns an arrayref of column names sub _table_columns { my ($self, $table) = @_; my $sth = $self->_sth_for($table, undef, \'1 = 0'); $sth->execute; my $retval = [ map $self->_lc($_), @{$sth->{NAME}} ]; $sth->finish; return $retval; } # Returns arrayref of pk col names sub _table_pk_info { my ($self, $table) = @_; return [] if $self->_disable_pk_detection; my @primary = try { $self->dbh->primary_key('', $table->schema, $table->name); } catch { warn "Cannot find primary keys for this driver: $_"; $self->_disable_pk_detection(1); return (); }; return [] if not @primary; @primary = map { $self->_lc($_) } @primary; s/[\Q$self->{quote_char}\E]//g for @primary; return \@primary; } # Override this for vendor-specific uniq info sub _table_uniq_info { my ($self, $table) = @_; return [] if $self->_disable_uniq_detection; if (not $self->dbh->can('statistics_info')) { warn "No UNIQUE constraint information can be gathered for this driver"; $self->_disable_uniq_detection(1); return []; } my %indices; my $sth = $self->dbh->statistics_info(undef, $table->schema, $table->name, 1, 1); while(my $row = $sth->fetchrow_hashref) { # skip table-level stats, conditional indexes, and any index missing # critical fields next if $row->{TYPE} eq 'table' || defined $row->{FILTER_CONDITION} || !$row->{INDEX_NAME} || !defined $row->{ORDINAL_POSITION}; $indices{$row->{INDEX_NAME}}[$row->{ORDINAL_POSITION}] = $self->_lc($row->{COLUMN_NAME} || ''); } $sth->finish; my @retval; foreach my $index_name (sort keys %indices) { my (undef, @cols) = @{$indices{$index_name}}; # skip indexes with missing column names (e.g. expression indexes) next unless @cols == grep $_, @cols; push(@retval, [ $index_name => \@cols ]); } return \@retval; } sub _table_comment { my ($self, $table) = @_; my $dbh = $self->dbh; my $comments_table = $table->clone; $comments_table->name($self->table_comments_table); my ($comment) = (exists $self->_tables->{$comments_table->sql_name} || undef) && try { $dbh->selectrow_array(<<"EOF") }; SELECT comment_text FROM @{[ $comments_table->sql_name ]} WHERE table_name = @{[ $dbh->quote($table->name) ]} EOF # Failback: try the REMARKS column on table_info if (!$comment) { my $info = $self->_dbh_table_info( $dbh, $table ); $comment = $info->{REMARKS} if $info; } return $comment; } sub _column_comment { my ($self, $table, $column_number, $column_name) = @_; my $dbh = $self->dbh; my $comments_table = $table->clone; $comments_table->name($self->column_comments_table); my ($comment) = (exists $self->_tables->{$comments_table->sql_name} || undef) && try { $dbh->selectrow_array(<<"EOF") }; SELECT comment_text FROM @{[ $comments_table->sql_name ]} WHERE table_name = @{[ $dbh->quote($table->name) ]} AND column_name = @{[ $dbh->quote($column_name) ]} EOF # Failback: try the REMARKS column on column_info if (!$comment && $dbh->can('column_info')) { if (my $sth = try { $self->_dbh_column_info( $dbh, undef, $table->schema, $table->name, $column_name ) }) { my $info = $sth->fetchrow_hashref(); $comment = $info->{REMARKS}; } } return $comment; } # Find relationships sub _table_fk_info { my ($self, $table) = @_; return [] if $self->_disable_fk_detection; my $sth = try { $self->dbh->foreign_key_info( '', '', '', '', ($table->schema || ''), $table->name ); } catch { warn "Cannot introspect relationships for this driver: $_"; $self->_disable_fk_detection(1); return undef; }; return [] if !$sth; my %rels; my @rules = ( 'CASCADE', 'RESTRICT', 'SET NULL', 'NO ACTION', 'SET DEFAULT', ); my $i = 1; # for unnamed rels, which hopefully have only 1 column ... REL: while(my $raw_rel = $sth->fetchrow_arrayref) { my $uk_scm = $raw_rel->[1]; my $uk_tbl = $raw_rel->[2]; my $uk_col = $self->_lc($raw_rel->[3]); my $fk_scm = $raw_rel->[5]; my $fk_col = $self->_lc($raw_rel->[7]); my $key_seq = $raw_rel->[8] - 1; my $relid = ($raw_rel->[11] || ( "__dcsld__" . $i++ )); my $update_rule = $raw_rel->[9]; my $delete_rule = $raw_rel->[10]; $update_rule = $rules[$update_rule] if defined $update_rule; $delete_rule = $rules[$delete_rule] if defined $delete_rule; my $is_deferrable = $raw_rel->[13]; ($is_deferrable = $is_deferrable == 7 ? 0 : 1) if defined $is_deferrable; foreach my $var ($uk_scm, $uk_tbl, $uk_col, $fk_scm, $fk_col, $relid) { $var =~ s/[\Q$self->{quote_char}\E]//g if defined $var; } if ($self->db_schema && $self->db_schema->[0] ne '%' && (not any { $_ eq $uk_scm } @{ $self->db_schema })) { next REL; } $rels{$relid}{tbl} ||= DBIx::Class::Schema::Loader::Table->new( loader => $self, name => $uk_tbl, schema => $uk_scm, ($self->_supports_db_schema ? () : ( ignore_schema => 1 )), ); $rels{$relid}{attrs}{on_delete} = $delete_rule if $delete_rule; $rels{$relid}{attrs}{on_update} = $update_rule if $update_rule; $rels{$relid}{attrs}{is_deferrable} = $is_deferrable if defined $is_deferrable; # Add this data IN ORDER $rels{$relid}{rcols}[$key_seq] = $uk_col; $rels{$relid}{lcols}[$key_seq] = $fk_col; } $sth->finish; my @rels; foreach my $relid (keys %rels) { push(@rels, { remote_columns => [ grep defined, @{ $rels{$relid}{rcols} } ], local_columns => [ grep defined, @{ $rels{$relid}{lcols} } ], remote_table => $rels{$relid}->{tbl}, (exists $rels{$relid}{attrs} ? (attrs => $rels{$relid}{attrs}) : () ), _constraint_name => $relid, }); } return \@rels; } # ported in from DBIx::Class::Storage::DBI: sub _columns_info_for { my ($self, $table) = @_; my $dbh = $self->schema->storage->dbh; my %result; if (my $sth = try { $self->_dbh_column_info($dbh, undef, $table->schema, $table->name, '%' ) }) { COL_INFO: while (my $info = try { $sth->fetchrow_hashref } catch { +{} }) { next COL_INFO unless %$info; my $column_info = {}; $column_info->{data_type} = lc $info->{TYPE_NAME}; my $size = $info->{COLUMN_SIZE}; if (defined $size && defined $info->{DECIMAL_DIGITS}) { $column_info->{size} = [$size, $info->{DECIMAL_DIGITS}]; } elsif (defined $size) { $column_info->{size} = $size; } $column_info->{is_nullable} = $info->{NULLABLE} ? 1 : 0; $column_info->{default_value} = $info->{COLUMN_DEF} if defined $info->{COLUMN_DEF}; my $col_name = $info->{COLUMN_NAME}; $col_name =~ s/^\"(.*)\"$/$1/; my $extra_info = $self->_extra_column_info( $table, $col_name, $column_info, $info ) || {}; $column_info = { %$column_info, %$extra_info }; $result{$col_name} = $column_info; } $sth->finish; } my $sth = $self->_sth_for($table, undef, \'1 = 0'); $sth->execute; my @columns = @{ $sth->{NAME} }; COL: for my $i (0 .. $#columns) { next COL if %{ $result{ $columns[$i] }||{} }; my $column_info = {}; $column_info->{data_type} = lc $sth->{TYPE}[$i]; my $size = $sth->{PRECISION}[$i]; if (defined $size && defined $sth->{SCALE}[$i]) { $column_info->{size} = [$size, $sth->{SCALE}[$i]]; } elsif (defined $size) { $column_info->{size} = $size; } $column_info->{is_nullable} = $sth->{NULLABLE}[$i] ? 1 : 0; if ($column_info->{data_type} =~ m/^(.*?)\((.*?)\)$/) { $column_info->{data_type} = $1; $column_info->{size} = $2; } my $extra_info = $self->_extra_column_info($table, $columns[$i], $column_info, $sth) || {}; $column_info = { %$column_info, %$extra_info }; $result{ $columns[$i] } = $column_info; } $sth->finish; foreach my $col (keys %result) { my $colinfo = $result{$col}; my $type_num = $colinfo->{data_type}; my $type_name; if (defined $type_num && $type_num =~ /^-?\d+\z/ && $dbh->can('type_info')) { my $type_name = $self->_dbh_type_info_type_name($type_num); $colinfo->{data_type} = lc $type_name if $type_name; } } # check for instances of the same column name with different case in preserve_case=0 mode if (not $self->preserve_case) { my %lc_colnames; foreach my $col (keys %result) { push @{ $lc_colnames{lc $col} }, $col; } if (keys %lc_colnames != keys %result) { my @offending_colnames = map @$_, grep @$_ > 1, values %lc_colnames; my $offending_colnames = join ", ", map "'$_'", @offending_colnames; croak "columns $offending_colnames in table @{[ $table->sql_name ]} collide in preserve_case=0 mode. preserve_case=1 mode required"; } # apply lowercasing my %lc_result; while (my ($col, $info) = each %result) { $lc_result{ $self->_lc($col) } = $info; } %result = %lc_result; } return \%result; } # Need to override this for the buggy Firebird ODBC driver. sub _dbh_type_info_type_name { my ($self, $type_num) = @_; # We wrap it in a try block for MSSQL+DBD::Sybase, which can have issues. # TODO investigate further my $type_info = try { $self->dbh->type_info($type_num) }; return $type_info ? $type_info->{TYPE_NAME} : undef; } # do not use this, override _columns_info_for instead sub _extra_column_info {} # override to mask warnings if needed sub _dbh_table_info { my ($self, $dbh, $table) = (shift, shift, shift); return undef if !$dbh->can('table_info'); my $sth = $dbh->table_info(undef, $table->schema, $table->name); while (my $info = $sth->fetchrow_hashref) { next if !$self->_table_info_matches($table, $info); return $info; } return undef; } sub _table_info_matches { my ($self, $table, $info) = @_; no warnings 'uninitialized'; return $info->{TABLE_SCHEM} eq $table->schema && $info->{TABLE_NAME} eq $table->name; } # override to mask warnings if needed (see mysql) sub _dbh_column_info { my ($self, $dbh) = (shift, shift); return $dbh->column_info(@_); } # If a coderef uses DBI->connect, this should get its connect info. sub _try_infer_connect_info_from_coderef { my ($self, $code) = @_; my ($dsn, $user, $pass, $params); no warnings 'redefine'; local *DBI::connect = sub { (undef, $dsn, $user, $pass, $params) = @_; }; $code->(); return ($dsn, $user, $pass, $params); } sub dbh { my $self = shift; return $self->schema->storage->dbh; } sub _table_is_view { my ($self, $table) = @_; my $info = $self->_dbh_table_info($self->dbh, $table) or return 0; return $info->{TABLE_TYPE} eq 'VIEW'; } =head1 SEE ALSO L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/Base.pm0000644000175000017500000031030212650450246023662 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::Base; use strict; use warnings; use base qw/Class::Accessor::Grouped Class::C3::Componentised/; use MRO::Compat; use mro 'c3'; use Carp::Clan qw/^DBIx::Class/; use DBIx::Class::Schema::Loader::RelBuilder (); use Data::Dump 'dump'; use POSIX (); use File::Spec (); use Cwd (); use Digest::MD5 (); use Lingua::EN::Inflect::Number (); use Lingua::EN::Inflect::Phrase (); use String::ToIdentifier::EN (); use String::ToIdentifier::EN::Unicode (); use File::Temp (); use Class::Unload; use Class::Inspector (); use Scalar::Util 'looks_like_number'; use DBIx::Class::Schema::Loader::Column; use DBIx::Class::Schema::Loader::Utils qw/split_name dumper_squashed eval_package_without_redefine_warnings class_path slurp_file sigwarn_silencer firstidx uniq/; use DBIx::Class::Schema::Loader::Optional::Dependencies (); use Try::Tiny; use DBIx::Class (); use Encode qw/encode decode/; use List::Util qw/all any none/; use File::Temp 'tempfile'; use namespace::clean; our $VERSION = '0.07045'; __PACKAGE__->mk_group_ro_accessors('simple', qw/ schema schema_class exclude constraint additional_classes additional_base_classes left_base_classes components schema_components skip_relationships skip_load_external moniker_map col_accessor_map custom_column_info inflect_singular inflect_plural debug dump_directory dump_overwrite really_erase_my_files resultset_namespace default_resultset_class schema_base_class result_base_class result_roles use_moose only_autoclean overwrite_modifications dry_run generated_classes omit_version omit_timestamp relationship_attrs _tables classes _upgrading_classes monikers dynamic naming datetime_timezone datetime_locale config_file loader_class table_comments_table column_comments_table class_to_table moniker_to_table uniq_to_primary quiet allow_extra_m2m_cols /); __PACKAGE__->mk_group_accessors('simple', qw/ version_to_dump schema_version_to_dump _upgrading_from _upgrading_from_load_classes _downgrading_to_load_classes _rewriting_result_namespace use_namespaces result_namespace generate_pod pod_comment_mode pod_comment_spillover_length preserve_case col_collision_map rel_collision_map rel_name_map real_dump_directory result_components_map result_roles_map datetime_undef_if_invalid _result_class_methods naming_set filter_generated_code db_schema qualify_objects moniker_parts moniker_part_separator moniker_part_map /); my $CURRENT_V = 'v7'; my @CLASS_ARGS = qw( schema_components schema_base_class result_base_class additional_base_classes left_base_classes additional_classes components result_roles ); my $CR = "\x0d"; my $LF = "\x0a"; my $CRLF = "\x0d\x0a"; =head1 NAME DBIx::Class::Schema::Loader::Base - Base DBIx::Class::Schema::Loader Implementation. =head1 SYNOPSIS See L. =head1 DESCRIPTION This is the base class for the storage-specific C classes, and implements the common functionality between them. =head1 CONSTRUCTOR OPTIONS These constructor options are the base options for L. Available constructor options are: =head2 skip_relationships Skip setting up relationships. The default is to attempt the loading of relationships. =head2 skip_load_external Skip loading of other classes in @INC. The default is to merge all other classes with the same name found in @INC into the schema file we are creating. =head2 naming Static schemas (ones dumped to disk) will, by default, use the new-style relationship names and singularized Results, unless you're overwriting an existing dump made by an older version of L, in which case the backward compatible RelBuilder will be activated, and the appropriate monikerization used. Specifying naming => 'current' will disable the backward-compatible RelBuilder and use the new-style relationship names along with singularized Results, even when overwriting a dump made with an earlier version. The option also takes a hashref: naming => { relationships => 'v8', monikers => 'v8', column_accessors => 'v8', force_ascii => 1, } or naming => { ALL => 'v8', force_ascii => 1 } The keys are: =over 4 =item ALL Set L, L and L to the specified value. =item relationships How to name relationship accessors. =item monikers How to name Result classes. =item column_accessors How to name column accessors in Result classes. =item force_ascii For L mode and later, uses L instead of L to force monikers and other identifiers to ASCII. =back The values can be: =over 4 =item current Latest style, whatever that happens to be. =item v4 Unsingularlized monikers, C only relationships with no _id stripping. =item v5 Monikers singularized as whole words, C relationships for FKs on C constraints, C<_id> stripping for belongs_to relationships. Some of the C<_id> stripping edge cases in C<0.05003> have been reverted for the v5 RelBuilder. =item v6 All monikers and relationships are inflected using L, and there is more aggressive C<_id> stripping from relationship names. In general, there is very little difference between v5 and v6 schemas. =item v7 This mode is identical to C mode, except that monikerization of CamelCase table names is also done better (but best in v8.) CamelCase column names in case-preserving mode will also be handled better for relationship name inflection (but best in v8.) See L. In this mode, CamelCase L are normalized based on case transition instead of just being lowercased, so C becomes C. =item v8 (EXPERIMENTAL) The default mode is L, to get L mode, you have to specify it in L explicitly until C<0.08> comes out. L and L are created using L or L if L is set; this is only significant for names with non-C<\w> characters such as C<.>. CamelCase identifiers with words in all caps, e.g. C are supported correctly in this mode. For relationships, belongs_to accessors are made from column names by stripping postfixes other than C<_id> as well, for example just C, C<_?ref>, C<_?cd>, C<_?code> and C<_?num>, case insensitively. =item preserve For L, this option does not inflect the table names but makes monikers based on the actual name. For L this option does not normalize CamelCase column names to lowercase column accessors, but makes accessors that are the same names as the columns (with any non-\w chars replaced with underscores.) =item singular For L, singularizes the names using the most current inflector. This is the same as setting the option to L. =item plural For L, pluralizes the names, using the most current inflector. =back Dynamic schemas will always default to the 0.04XXX relationship names and won't singularize Results for backward compatibility, to activate the new RelBuilder and singularization put this in your C file: __PACKAGE__->naming('current'); Or if you prefer to use 0.07XXX features but insure that nothing breaks in the next major version upgrade: __PACKAGE__->naming('v7'); =head2 quiet If true, will not print the usual C messages. Does not affect warnings (except for warnings related to L.) =head2 dry_run If true, don't actually write out the generated files. This can only be used with static schema generation. =head2 generate_pod By default POD will be generated for columns and relationships, using database metadata for the text if available and supported. Comment metadata can be stored in two ways. The first is that you can create two tables named C and C respectively. These tables must exist in the same database and schema as the tables they describe. They both need to have columns named C and C. The second one needs to have a column named C. Then data stored in these tables will be used as a source of metadata about tables and comments. (If you wish you can change the name of these tables with the parameters L and L.) As a fallback you can use built-in commenting mechanisms. Currently this is only supported for PostgreSQL, Oracle and MySQL. To create comments in PostgreSQL you add statements of the form C, the same syntax is used in Oracle. To create comments in MySQL you add C to the end of the column or table definition. Note that MySQL restricts the length of comments, and also does not handle complex Unicode characters properly. Set this to C<0> to turn off all POD generation. =head2 pod_comment_mode Controls where table comments appear in the generated POD. Smaller table comments are appended to the C section of the documentation, and larger ones are inserted into C instead. You can force a C section to be generated with the comment always, only use C, or choose the length threshold at which the comment is forced into the description. =over 4 =item name Use C section only. =item description Force C always. =item auto Use C if length > L, this is the default. =back =head2 pod_comment_spillover_length When pod_comment_mode is set to C, this is the length of the comment at which it will be forced into a separate description section. The default is C<60> =head2 table_comments_table The table to look for comments about tables in. By default C. See L for details. This must not be a fully qualified name, the table will be looked for in the same database and schema as the table whose comment is being retrieved. =head2 column_comments_table The table to look for comments about columns in. By default C. See L for details. This must not be a fully qualified name, the table will be looked for in the same database and schema as the table/column whose comment is being retrieved. =head2 relationship_attrs Hashref of attributes to pass to each generated relationship, listed by type. Also supports relationship type 'all', containing options to pass to all generated relationships. Attributes set for more specific relationship types override those set in 'all', and any attributes specified by this option override the introspected attributes of the foreign key if any. For example: relationship_attrs => { has_many => { cascade_delete => 1, cascade_copy => 1 }, might_have => { cascade_delete => 1, cascade_copy => 1 }, }, use this to turn L cascades to on on your L and L relationships, they default to off. Can also be a coderef, for more precise control, in which case the coderef gets this hash of parameters (as a list:) rel_name # the name of the relationship rel_type # the type of the relationship: 'belongs_to', 'has_many' or 'might_have' local_source # the DBIx::Class::ResultSource object for the source the rel is *from* remote_source # the DBIx::Class::ResultSource object for the source the rel is *to* local_table # a DBIx::Class::Schema::Loader::Table object for the table of the source the rel is from local_cols # an arrayref of column names of columns used in the rel in the source it is from remote_table # a DBIx::Class::Schema::Loader::Table object for the table of the source the rel is to remote_cols # an arrayref of column names of columns used in the rel in the source it is to attrs # the attributes that would be set it should return the new hashref of attributes, or nothing for no changes. For example: relationship_attrs => sub { my %p = @_; say "the relationship name is: $p{rel_name}"; say "the relationship is a: $p{rel_type}"; say "the local class is: ", $p{local_source}->result_class; say "the remote class is: ", $p{remote_source}->result_class; say "the local table is: ", $p{local_table}->sql_name; say "the rel columns in the local table are: ", (join ", ", @{$p{local_cols}}); say "the remote table is: ", $p{remote_table}->sql_name; say "the rel columns in the remote table are: ", (join ", ", @{$p{remote_cols}}); if ($p{local_table} eq 'dogs' && @{$p{local_cols}} == 1 && $p{local_cols}[0] eq 'name') { $p{attrs}{could_be_snoopy} = 1; reutrn $p{attrs}; } }, These are the default attributes: has_many => { cascade_delete => 0, cascade_copy => 0, }, might_have => { cascade_delete => 0, cascade_copy => 0, }, belongs_to => { on_delete => 'CASCADE', on_update => 'CASCADE', is_deferrable => 1, }, For L relationships, these defaults are overridden by the attributes introspected from the foreign key in the database, if this information is available (and the driver is capable of retrieving it.) This information overrides the defaults mentioned above, and is then itself overridden by the user's L for C if any are specified. In general, for most databases, for a plain foreign key with no rules, the values for a L relationship will be: on_delete => 'NO ACTION', on_update => 'NO ACTION', is_deferrable => 0, In the cases where an attribute is not supported by the DB, a value matching the actual behavior is used, for example Oracle does not support C rules, so C is set to C. This is done so that the behavior of the schema is preserved when cross deploying to a different RDBMS such as SQLite for testing. In the cases where the DB does not support C foreign keys, the value is set to C<1> if L has a working C<< $storage->with_deferred_fk_checks >>. This is done so that the same L code can be used, and cross deployed from and to such databases. =head2 debug If set to true, each constructive L statement the loader decides to execute will be C-ed before execution. =head2 db_schema Set the name of the schema to load (schema in the sense that your database vendor means it). Can be set to an arrayref of schema names for multiple schemas, or the special value C<%> for all schemas. For MSSQL, Sybase ASE, and Informix can be set to a hashref of databases as keys and arrays of owners as values, set to the value: { '%' => '%' } for all owners in all databases. Name clashes resulting from the same table name in different databases/schemas will be resolved automatically by prefixing the moniker with the database and/or schema. To prefix/suffix all monikers with the database and/or schema, see L. =head2 moniker_parts The database table names are represented by the L class in the loader, the L class for Sybase ASE and L for Informix. Monikers are created normally based on just the L property, corresponding to the table name, but can consist of other parts of the fully qualified name of the table. The L option is an arrayref of methods on the table class corresponding to parts of the fully qualified table name, defaulting to C<['name']>, in the order those parts are used to create the moniker name. The parts are joined together using L. The C<'name'> entry B be present. Below is a table of supported databases and possible L. =over 4 =item * DB2, Firebird, mysql, Oracle, Pg, SQLAnywhere, SQLite, MS Access C, C =item * Informix, MSSQL, Sybase ASE C, C, C =back =head2 moniker_part_separator String used to join L when creating the moniker. Defaults to the empty string. Use C<::> to get a separate namespace per database and/or schema. =head2 constraint Only load matching tables. =head2 exclude Exclude matching tables. These can be specified either as a regex (preferrably on the C form), or as an arrayref of arrayrefs. Regexes are matched against the (unqualified) table name, while arrayrefs are matched according to L. For example: db_schema => [qw(some_schema other_schema)], moniker_parts => [qw(schema name)], constraint => [ [ qr/\Asome_schema\z/ => qr/\A(?:foo|bar)\z/ ], [ qr/\Aother_schema\z/ => qr/\Abaz\z/ ], ], In this case only the tables C and C in C and C in C will be dumped. =head2 moniker_map Overrides the default table name to moniker translation. Either =over =item * a nested hashref, which will be traversed according to L For example: moniker_parts => [qw(schema name)], moniker_map => { foo => { bar => "FooishBar", }, }, In which case the table C in the C schema would get the moniker C. =item * a hashref of unqualified table name keys and moniker values =item * a coderef for a translator function taking a L argument (which stringifies to the unqualified table name) and returning a scalar moniker The function is also passed a coderef that can be called with either of the hashref forms to get the moniker mapped accordingly. This is useful if you need to handle some monikers specially, but want to use the hashref form for the rest. =back If the hash entry does not exist, or the function returns a false value, the code falls back to default behavior for that table name. The default behavior is to split on case transition and non-alphanumeric boundaries, singularize the resulting phrase, then join the titlecased words together. Examples: Table Name | Moniker Name --------------------------------- luser | Luser luser_group | LuserGroup luser-opts | LuserOpt stations_visited | StationVisited routeChange | RouteChange =head2 moniker_part_map Map for overriding the monikerization of individual L. The keys are the moniker part to override, the value is either a hashref of coderef for mapping the corresponding part of the moniker. If a coderef is used, it gets called with the moniker part and the hash key the code ref was found under. For example: moniker_part_map => { schema => sub { ... }, }, Given the table C, the code ref would be called with the arguments C and C, plus a coderef similar to the one described in L. L takes precedence over this. =head2 col_accessor_map Same as moniker_map, but for column accessor names. The nested hashref form is traversed according to L, with an extra level at the bottom for the column name. If a coderef is passed, the code is called with arguments of the DBIx::Class::Schema::Loader::Column object for the column, default accessor name that DBICSL would ordinarily give this column, { table_class => name of the DBIC class we are building, table_moniker => calculated moniker for this table (after moniker_map if present), table => table object of interface DBIx::Class::Schema::Loader::Table, full_table_name => schema-qualified name of the database table (RDBMS specific), schema_class => name of the schema class we are building, column_info => hashref of column info (data_type, is_nullable, etc), } coderef ref that can be called with a hashref map The L and L objects stringify to their unqualified names. =head2 rel_name_map Similar in idea to moniker_map, but different in the details. It can be a hashref or a code ref. If it is a hashref, keys can be either the default relationship name, or the moniker. The keys that are the default relationship name should map to the name you want to change the relationship to. Keys that are monikers should map to hashes mapping relationship names to their translation. You can do both at once, and the more specific moniker version will be picked up first. So, for instance, you could have { bar => "baz", Foo => { bar => "blat", }, } and relationships that would have been named C will now be named C except that in the table whose moniker is C it will be named C. If it is a coderef, it will be passed a hashref of this form: { name => default relationship name, type => the relationship type eg: C, local_class => name of the DBIC class we are building, local_moniker => moniker of the DBIC class we are building, local_columns => columns in this table in the relationship, remote_class => name of the DBIC class we are related to, remote_moniker => moniker of the DBIC class we are related to, remote_columns => columns in the other table in the relationship, # for type => "many_to_many" only: link_class => name of the DBIC class for the link table link_moniker => moniker of the DBIC class for the link table link_rel_name => name of the relationship to the link table } In addition it is passed a coderef that can be called with a hashref map. DBICSL will try to use the value returned as the relationship name. =head2 inflect_plural Just like L above (can be hash/code-ref, falls back to default if hash key does not exist or coderef returns false), but acts as a map for pluralizing relationship names. The default behavior is to utilize L. =head2 inflect_singular As L above, but for singularizing relationship names. Default behavior is to utilize L. =head2 schema_base_class Base class for your schema classes. Defaults to 'DBIx::Class::Schema'. =head2 schema_components List of components to load into the Schema class. =head2 result_base_class Base class for your table classes (aka result classes). Defaults to 'DBIx::Class::Core'. =head2 additional_base_classes List of additional base classes all of your table classes will use. =head2 left_base_classes List of additional base classes all of your table classes will use that need to be leftmost. =head2 additional_classes List of additional classes which all of your table classes will use. =head2 components List of additional components to be loaded into all of your Result classes. A good example would be L =head2 result_components_map A hashref of moniker keys and component values. Unlike L, which loads the given components into every Result class, this option allows you to load certain components for specified Result classes. For example: result_components_map => { StationVisited => '+YourApp::Schema::Component::StationVisited', RouteChange => [ '+YourApp::Schema::Component::RouteChange', 'InflateColumn::DateTime', ], } You may use this in conjunction with L. =head2 result_roles List of L roles to be applied to all of your Result classes. =head2 result_roles_map A hashref of moniker keys and role values. Unlike L, which applies the given roles to every Result class, this option allows you to apply certain roles for specified Result classes. For example: result_roles_map => { StationVisited => [ 'YourApp::Role::Building', 'YourApp::Role::Destination', ], RouteChange => 'YourApp::Role::TripEvent', } You may use this in conjunction with L. =head2 use_namespaces This is now the default, to go back to L pass a C<0>. Generate result class names suitable for L and call that instead of L. When using this option you can also specify any of the options for C (i.e. C, C, C), and they will be added to the call (and the generated result class names adjusted appropriately). =head2 dump_directory The value of this option is a perl libdir pathname. Within that directory this module will create a baseline manual L module set, based on what it creates at runtime. The created schema class will have the same classname as the one on which you are setting this option (and the ResultSource classes will be based on this name as well). Normally you wouldn't hard-code this setting in your schema class, as it is meant for one-time manual usage. See L for examples of the recommended way to access this functionality. =head2 dump_overwrite Deprecated. See L below, which does *not* mean the same thing as the old C setting from previous releases. =head2 really_erase_my_files Default false. If true, Loader will unconditionally delete any existing files before creating the new ones from scratch when dumping a schema to disk. The default behavior is instead to only replace the top portion of the file, up to and including the final stanza which contains C<# DO NOT MODIFY THE FIRST PART OF THIS FILE> leaving any customizations you placed after that as they were. When C is not set, if the output file already exists, but the aforementioned final stanza is not found, or the checksum contained there does not match the generated contents, Loader will croak and not touch the file. You should really be using version control on your schema classes (and all of the rest of your code for that matter). Don't blame me if a bug in this code wipes something out when it shouldn't have, you've been warned. =head2 overwrite_modifications Default false. If false, when updating existing files, Loader will refuse to modify any Loader-generated code that has been modified since its last run (as determined by the checksum Loader put in its comment lines). If true, Loader will discard any manual modifications that have been made to Loader-generated code. Again, you should be using version control on your schema classes. Be careful with this option. =head2 omit_version Omit the package version from the signature comment. =head2 omit_timestamp Omit the creation timestamp from the signature comment. =head2 custom_column_info Hook for adding extra attributes to the L for a column. Must be a coderef that returns a hashref with the extra attributes. Receives the L
(which stringifies to the unqualified table name), column name and column_info. For example: custom_column_info => sub { my ($table, $column_name, $column_info) = @_; if ($column_name eq 'dog' && $column_info->{default_value} eq 'snoopy') { return { is_snoopy => 1 }; } }, This attribute can also be used to set C on a non-datetime column so it also receives the L and/or L. =head2 datetime_timezone Sets the timezone attribute for L for all columns with the DATE/DATETIME/TIMESTAMP data_types. =head2 datetime_locale Sets the locale attribute for L for all columns with the DATE/DATETIME/TIMESTAMP data_types. =head2 datetime_undef_if_invalid Pass a C<0> for this option when using MySQL if you B want C<< datetime_undef_if_invalid => 1 >> in your column info for DATE, DATETIME and TIMESTAMP columns. The default is recommended to deal with data such as C<00/00/00> which sometimes ends up in such columns in MySQL. =head2 config_file File in Perl format, which should return a HASH reference, from which to read loader options. =head2 preserve_case Normally database names are lowercased and split by underscore, use this option if you have CamelCase database names. Drivers for case sensitive databases like Sybase ASE or MSSQL with a case-sensitive collation will turn this option on unconditionally. B L = C is highly recommended with this option as the semantics of this mode are much improved for CamelCase database names. L = C or greater is required with this option. =head2 qualify_objects Set to true to prepend the L to table names for C<< __PACKAGE__->table >> calls, and to some other things like Oracle sequences. This attribute is automatically set to true for multi db_schema configurations, unless explicitly set to false by the user. =head2 use_moose Creates Schema and Result classes that use L, L and L (or L, see below). The default content after the md5 sum also makes the classes immutable. It is safe to upgrade your existing Schema to this option. =head2 only_autoclean By default, we use L to remove imported functions from your generated classes. It uses L to do this, after telling your object's metaclass that any operator Ls in your class are methods, which will cause namespace::autoclean to spare them from removal. This prevents the "Hey, where'd my overloads go?!" effect. If you don't care about operator overloads, enabling this option falls back to just using L itself. If none of the above made any sense, or you don't have some pressing need to only use L, leaving this set to the default is recommended. =head2 col_collision_map This option controls how accessors for column names which collide with perl methods are named. See L for more information. This option takes either a single L format or a hashref of strings which are compiled to regular expressions that map to L formats. Examples: col_collision_map => 'column_%s' col_collision_map => { '(.*)' => 'column_%s' } col_collision_map => { '(foo).*(bar)' => 'column_%s_%s' } =head2 rel_collision_map Works just like L, but for relationship names/accessors rather than column names/accessors. The default is to just append C<_rel> to the relationship name, see L. =head2 uniq_to_primary Automatically promotes the largest unique constraints with non-nullable columns on tables to primary keys, assuming there is only one largest unique constraint. =head2 allow_extra_m2m_cols Generate C relationship bridges even if the link table has extra columns other than the foreign keys. The primary key must still equal the union of the foreign keys. =head2 filter_generated_code An optional hook that lets you filter the generated text for various classes through a function that change it in any way that you want. The function will receive the type of file, C or C, class and code; and returns the new code to use instead. For instance you could add custom comments, or do anything else that you want. The option can also be set to a string, which is then used as a filter program, e.g. C. If this exists but fails to return text matching C, no file will be generated. filter_generated_code => sub { my ($type, $class, $text) = @_; ... return $new_code; } You can also use this option to set L in your generated classes. This will leave the generated code in the default format, but will allow you to tidy your classes at any point in future, without worrying about changing the portions of the file which are checksummed, since C will just ignore all text between the markers. filter_generated_code => sub { return "#<<<\n$_[2]\n#>>>"; } =head1 METHODS None of these methods are intended for direct invocation by regular users of L. Some are proxied via L. =cut # ensure that a piece of object data is a valid arrayref, creating # an empty one or encapsulating whatever's there. sub _ensure_arrayref { my $self = shift; foreach (@_) { $self->{$_} ||= []; $self->{$_} = [ $self->{$_} ] unless ref $self->{$_} eq 'ARRAY'; } } =head2 new Constructor for L, used internally by L. =cut sub new { my ( $class, %args ) = @_; if (exists $args{column_accessor_map}) { $args{col_accessor_map} = delete $args{column_accessor_map}; } my $self = { %args }; # don't lose undef options for (values %$self) { $_ = 0 unless defined $_; } bless $self => $class; if (my $config_file = $self->config_file) { my $config_opts = do $config_file; croak "Error reading config from $config_file: $@" if $@; croak "Config file $config_file must be a hashref" unless ref($config_opts) eq 'HASH'; while (my ($k, $v) = each %$config_opts) { $self->{$k} = $v unless exists $self->{$k}; } } if (defined $self->{result_component_map}) { if (defined $self->result_components_map) { croak "Specify only one of result_components_map or result_component_map"; } $self->result_components_map($self->{result_component_map}) } if (defined $self->{result_role_map}) { if (defined $self->result_roles_map) { croak "Specify only one of result_roles_map or result_role_map"; } $self->result_roles_map($self->{result_role_map}) } croak "the result_roles and result_roles_map options may only be used in conjunction with use_moose=1" if ((not defined $self->use_moose) || (not $self->use_moose)) && ((defined $self->result_roles) || (defined $self->result_roles_map)); $self->_ensure_arrayref(qw/schema_components additional_classes additional_base_classes left_base_classes components result_roles /); $self->_validate_class_args; croak "result_components_map must be a hash" if defined $self->result_components_map && ref $self->result_components_map ne 'HASH'; if ($self->result_components_map) { my %rc_map = %{ $self->result_components_map }; foreach my $moniker (keys %rc_map) { $rc_map{$moniker} = [ $rc_map{$moniker} ] unless ref $rc_map{$moniker}; } $self->result_components_map(\%rc_map); } else { $self->result_components_map({}); } $self->_validate_result_components_map; croak "result_roles_map must be a hash" if defined $self->result_roles_map && ref $self->result_roles_map ne 'HASH'; if ($self->result_roles_map) { my %rr_map = %{ $self->result_roles_map }; foreach my $moniker (keys %rr_map) { $rr_map{$moniker} = [ $rr_map{$moniker} ] unless ref $rr_map{$moniker}; } $self->result_roles_map(\%rr_map); } else { $self->result_roles_map({}); } $self->_validate_result_roles_map; if ($self->use_moose) { if (not DBIx::Class::Schema::Loader::Optional::Dependencies->req_ok_for('use_moose')) { die sprintf "You must install the following CPAN modules to enable the use_moose option: %s.\n", DBIx::Class::Schema::Loader::Optional::Dependencies->req_missing_for('use_moose'); } } $self->{_tables} = {}; $self->{monikers} = {}; $self->{moniker_to_table} = {}; $self->{class_to_table} = {}; $self->{classes} = {}; $self->{_upgrading_classes} = {}; $self->{generated_classes} = []; $self->{schema_class} ||= ( ref $self->{schema} || $self->{schema} ); $self->{schema} ||= $self->{schema_class}; $self->{table_comments_table} ||= 'table_comments'; $self->{column_comments_table} ||= 'column_comments'; croak "dump_overwrite is deprecated. Please read the" . " DBIx::Class::Schema::Loader::Base documentation" if $self->{dump_overwrite}; $self->{dynamic} = ! $self->{dump_directory}; croak "dry_run can only be used with static schema generation" if $self->dynamic and $self->dry_run; $self->{temp_directory} ||= File::Temp::tempdir( 'dbicXXXX', TMPDIR => 1, CLEANUP => 1, ); $self->{dump_directory} ||= $self->{temp_directory}; $self->real_dump_directory($self->{dump_directory}); $self->version_to_dump($DBIx::Class::Schema::Loader::VERSION); $self->schema_version_to_dump($DBIx::Class::Schema::Loader::VERSION); if (not defined $self->naming) { $self->naming_set(0); } else { $self->naming_set(1); } if ((not ref $self->naming) && defined $self->naming) { my $naming_ver = $self->naming; $self->{naming} = { relationships => $naming_ver, monikers => $naming_ver, column_accessors => $naming_ver, }; } elsif (ref $self->naming eq 'HASH' && exists $self->naming->{ALL}) { my $val = delete $self->naming->{ALL}; $self->naming->{$_} = $val foreach qw/relationships monikers column_accessors/; } if ($self->naming) { foreach my $key (qw/relationships monikers column_accessors/) { $self->naming->{$key} = $CURRENT_V if ($self->naming->{$key}||'') eq 'current'; } } $self->{naming} ||= {}; if ($self->custom_column_info && ref $self->custom_column_info ne 'CODE') { croak 'custom_column_info must be a CODE ref'; } $self->_check_back_compat; $self->use_namespaces(1) unless defined $self->use_namespaces; $self->generate_pod(1) unless defined $self->generate_pod; $self->pod_comment_mode('auto') unless defined $self->pod_comment_mode; $self->pod_comment_spillover_length(60) unless defined $self->pod_comment_spillover_length; if (my $col_collision_map = $self->col_collision_map) { if (my $reftype = ref $col_collision_map) { if ($reftype ne 'HASH') { croak "Invalid type $reftype for option 'col_collision_map'"; } } else { $self->col_collision_map({ '(.*)' => $col_collision_map }); } } if (my $rel_collision_map = $self->rel_collision_map) { if (my $reftype = ref $rel_collision_map) { if ($reftype ne 'HASH') { croak "Invalid type $reftype for option 'rel_collision_map'"; } } else { $self->rel_collision_map({ '(.*)' => $rel_collision_map }); } } if (defined(my $rel_name_map = $self->rel_name_map)) { my $reftype = ref $rel_name_map; if ($reftype ne 'HASH' && $reftype ne 'CODE') { croak "Invalid type $reftype for option 'rel_name_map', must be HASH or CODE"; } } if (defined(my $filter = $self->filter_generated_code)) { my $reftype = ref $filter; if ($reftype && $reftype ne 'CODE') { croak "Invalid type $reftype for option 'filter_generated_code, must be a scalar or a CODE reference"; } } if (defined $self->db_schema) { if (ref $self->db_schema eq 'ARRAY') { if (@{ $self->db_schema } > 1 && not defined $self->{qualify_objects}) { $self->{qualify_objects} = 1; } elsif (@{ $self->db_schema } == 0) { $self->{db_schema} = undef; } } elsif (not ref $self->db_schema) { if ($self->db_schema eq '%' && not defined $self->{qualify_objects}) { $self->{qualify_objects} = 1; } $self->{db_schema} = [ $self->db_schema ]; } } if (not $self->moniker_parts) { $self->moniker_parts(['name']); } else { if (not ref $self->moniker_parts) { $self->moniker_parts([ $self->moniker_parts ]); } if (ref $self->moniker_parts ne 'ARRAY') { croak 'moniker_parts must be an arrayref'; } if (none { $_ eq 'name' } @{ $self->moniker_parts }) { croak "moniker_parts option *must* contain 'name'"; } } if (not defined $self->moniker_part_separator) { $self->moniker_part_separator(''); } if (not defined $self->moniker_part_map) { $self->moniker_part_map({}), } return $self; } sub _check_back_compat { my ($self) = @_; # dynamic schemas will always be in 0.04006 mode, unless overridden if ($self->dynamic) { # just in case, though no one is likely to dump a dynamic schema $self->schema_version_to_dump('0.04006'); if (not $self->naming_set) { warn <_upgrading_from('v4'); } if ((not defined $self->use_namespaces) && ($self->naming_set)) { $self->use_namespaces(1); } $self->naming->{relationships} ||= 'v4'; $self->naming->{monikers} ||= 'v4'; if ($self->use_namespaces) { $self->_upgrading_from_load_classes(1); } else { $self->use_namespaces(0); } return; } # otherwise check if we need backcompat mode for a static schema my $filename = $self->get_dump_filename($self->schema_class); return unless -e $filename; my ($old_gen, $old_md5, $old_ver, $old_ts, $old_custom) = $self->_parse_generated_file($filename); return unless $old_ver; # determine if the existing schema was dumped with use_moose => 1 if (! defined $self->use_moose) { $self->{use_moose} = 1 if $old_gen =~ /^ (?!\s*\#) use \s+ Moose/xm; } my $load_classes = ($old_gen =~ /^__PACKAGE__->load_classes;/m) ? 1 : 0; my $result_namespace = do { ($old_gen =~ /result_namespace => (.+)/) ? $1 : '' }; my $ds = eval $result_namespace; die <<"EOF" if $@; Could not eval expression '$result_namespace' for result_namespace from $filename: $@ EOF $result_namespace = $ds || ''; if ($load_classes && (not defined $self->use_namespaces)) { warn <<"EOF" unless $ENV{SCHEMA_LOADER_BACKCOMPAT}; 'load_classes;' static schema detected, turning off 'use_namespaces'. Set the 'use_namespaces' attribute or the SCHEMA_LOADER_BACKCOMPAT environment variable to disable this warning. See perldoc DBIx::Class::Schema::Loader::Manual::UpgradingFromV4 for more details. EOF $self->use_namespaces(0); } elsif ($load_classes && $self->use_namespaces) { $self->_upgrading_from_load_classes(1); } elsif ((not $load_classes) && defined $self->use_namespaces && ! $self->use_namespaces) { $self->_downgrading_to_load_classes( $result_namespace || 'Result' ); } elsif ((not defined $self->use_namespaces) || $self->use_namespaces) { if (not $self->result_namespace) { $self->result_namespace($result_namespace || 'Result'); } elsif ($result_namespace ne $self->result_namespace) { $self->_rewriting_result_namespace( $result_namespace || 'Result' ); } } # XXX when we go past .0 this will need fixing my ($v) = $old_ver =~ /([1-9])/; $v = "v$v"; return if ($v eq $CURRENT_V || $old_ver =~ /^0\.\d\d999/); if (not %{ $self->naming }) { warn <<"EOF" unless $ENV{SCHEMA_LOADER_BACKCOMPAT}; Version $old_ver static schema detected, turning on backcompat mode. Set the 'naming' attribute or the SCHEMA_LOADER_BACKCOMPAT environment variable to disable this warning. See: 'naming' in perldoc DBIx::Class::Schema::Loader::Base . See perldoc DBIx::Class::Schema::Loader::Manual::UpgradingFromV4 if upgrading from version 0.04006. EOF $self->naming->{relationships} ||= $v; $self->naming->{monikers} ||= $v; $self->naming->{column_accessors} ||= $v; $self->schema_version_to_dump($old_ver); } else { $self->_upgrading_from($v); } } sub _validate_class_args { my $self = shift; foreach my $k (@CLASS_ARGS) { next unless $self->$k; my @classes = ref $self->$k eq 'ARRAY' ? @{ $self->$k } : $self->$k; $self->_validate_classes($k, \@classes); } } sub _validate_result_components_map { my $self = shift; foreach my $classes (values %{ $self->result_components_map }) { $self->_validate_classes('result_components_map', $classes); } } sub _validate_result_roles_map { my $self = shift; foreach my $classes (values %{ $self->result_roles_map }) { $self->_validate_classes('result_roles_map', $classes); } } sub _validate_classes { my $self = shift; my $key = shift; my $classes = shift; # make a copy to not destroy original my @classes = @$classes; foreach my $c (@classes) { # components default to being under the DBIx::Class namespace unless they # are preceded with a '+' if ( $key =~ m/component/ && $c !~ s/^\+// ) { $c = 'DBIx::Class::' . $c; } # 1 == installed, 0 == not installed, undef == invalid classname my $installed = Class::Inspector->installed($c); if ( defined($installed) ) { if ( $installed == 0 ) { croak qq/$c, as specified in the loader option "$key", is not installed/; } } else { croak qq/$c, as specified in the loader option "$key", is an invalid class name/; } } } sub _find_file_in_inc { my ($self, $file) = @_; foreach my $prefix (@INC) { my $fullpath = File::Spec->catfile($prefix, $file); # abs_path pure-perl fallback warns for non-existent files local $SIG{__WARN__} = sigwarn_silencer(qr/^stat\(.*\Q$file\E\)/); return $fullpath if -f $fullpath # abs_path throws on Windows for nonexistent files and (try { Cwd::abs_path($fullpath) }) ne ((try { Cwd::abs_path(File::Spec->catfile($self->dump_directory, $file)) }) || ''); } return; } sub _find_class_in_inc { my ($self, $class) = @_; return $self->_find_file_in_inc(class_path($class)); } sub _rewriting { my $self = shift; return $self->_upgrading_from || $self->_upgrading_from_load_classes || $self->_downgrading_to_load_classes || $self->_rewriting_result_namespace ; } sub _rewrite_old_classnames { my ($self, $code) = @_; return $code unless $self->_rewriting; my %old_classes = reverse %{ $self->_upgrading_classes }; my $re = join '|', keys %old_classes; $re = qr/\b($re)\b/; $code =~ s/$re/$old_classes{$1} || $1/eg; return $code; } sub _load_external { my ($self, $class) = @_; return if $self->{skip_load_external}; # so that we don't load our own classes, under any circumstances local *INC = [ grep $_ ne $self->dump_directory, @INC ]; my $real_inc_path = $self->_find_class_in_inc($class); my $old_class = $self->_upgrading_classes->{$class} if $self->_rewriting; my $old_real_inc_path = $self->_find_class_in_inc($old_class) if $old_class && $old_class ne $class; return unless $real_inc_path || $old_real_inc_path; if ($real_inc_path) { # If we make it to here, we loaded an external definition warn qq/# Loaded external class definition for '$class'\n/ if $self->debug; my $code = $self->_rewrite_old_classnames(slurp_file $real_inc_path); if ($self->dynamic) { # load the class too eval_package_without_redefine_warnings($class, $code); } $self->_ext_stmt($class, qq|# These lines were loaded from '$real_inc_path' found in \@INC.\n| .qq|# They are now part of the custom portion of this file\n| .qq|# for you to hand-edit. If you do not either delete\n| .qq|# this section or remove that file from \@INC, this section\n| .qq|# will be repeated redundantly when you re-create this\n| .qq|# file again via Loader! See skip_load_external to disable\n| .qq|# this feature.\n| ); chomp $code; $self->_ext_stmt($class, $code); $self->_ext_stmt($class, qq|# End of lines loaded from '$real_inc_path'| ); } if ($old_real_inc_path) { my $code = slurp_file $old_real_inc_path; $self->_ext_stmt($class, <<"EOF"); # These lines were loaded from '$old_real_inc_path', # based on the Result class name that would have been created by an older # version of the Loader. For a static schema, this happens only once during # upgrade. See skip_load_external to disable this feature. EOF $code = $self->_rewrite_old_classnames($code); if ($self->dynamic) { warn <<"EOF"; Detected external content in '$old_real_inc_path', a class name that would have been used by an older version of the Loader. * PLEASE RENAME THIS CLASS: from '$old_class' to '$class', as that is the new name of the Result. EOF eval_package_without_redefine_warnings($class, $code); } chomp $code; $self->_ext_stmt($class, $code); $self->_ext_stmt($class, qq|# End of lines loaded from '$old_real_inc_path'| ); } } =head2 load Does the actual schema-construction work. =cut sub load { my $self = shift; $self->_load_tables($self->_tables_list); } =head2 rescan Arguments: schema Rescan the database for changes. Returns a list of the newly added table monikers. The schema argument should be the schema class or object to be affected. It should probably be derived from the original schema_class used during L. =cut sub rescan { my ($self, $schema) = @_; $self->{schema} = $schema; $self->_relbuilder->{schema} = $schema; my @created; my @current = $self->_tables_list; foreach my $table (@current) { if(!exists $self->_tables->{$table->sql_name}) { push(@created, $table); } } my %current; @current{map $_->sql_name, @current} = (); foreach my $table (values %{ $self->_tables }) { if (not exists $current{$table->sql_name}) { $self->_remove_table($table); } } delete @$self{qw/_dump_storage _relations_started _uniqs_started/}; my $loaded = $self->_load_tables(@current); foreach my $table (@created) { $self->monikers->{$table->sql_name} = $self->_table2moniker($table); } return map { $self->monikers->{$_->sql_name} } @created; } sub _relbuilder { my ($self) = @_; return if $self->{skip_relationships}; return $self->{relbuilder} ||= do { my $relbuilder_suff = {qw{ v4 ::Compat::v0_040 v5 ::Compat::v0_05 v6 ::Compat::v0_06 v7 ::Compat::v0_07 }} ->{$self->naming->{relationships}||$CURRENT_V} || ''; my $relbuilder_class = 'DBIx::Class::Schema::Loader::RelBuilder'.$relbuilder_suff; $self->ensure_class_loaded($relbuilder_class); $relbuilder_class->new($self); }; } sub _load_tables { my ($self, @tables) = @_; # Save the new tables to the tables list and compute monikers foreach (@tables) { $self->_tables->{$_->sql_name} = $_; $self->monikers->{$_->sql_name} = $self->_table2moniker($_); } # check for moniker clashes my $inverse_moniker_idx; foreach my $imtable (values %{ $self->_tables }) { push @{ $inverse_moniker_idx->{$self->monikers->{$imtable->sql_name}} }, $imtable; } my @clashes; foreach my $moniker (keys %$inverse_moniker_idx) { my $imtables = $inverse_moniker_idx->{$moniker}; if (@$imtables > 1) { my $different_databases = $imtables->[0]->can('database') && (uniq map $_->database||'', @$imtables) > 1; my $different_schemas = (uniq map $_->schema||'', @$imtables) > 1; if ($different_databases || $different_schemas) { my ($use_schema, $use_database) = (1, 0); if ($different_databases) { $use_database = 1; # If any monikers are in the same database, we have to distinguish by # both schema and database. my %db_counts; $db_counts{$_}++ for map $_->database, @$imtables; $use_schema = any { $_ > 1 } values %db_counts; } foreach my $tbl (@$imtables) { delete $self->monikers->{$tbl->sql_name}; } my $moniker_parts = [ @{ $self->moniker_parts } ]; my $have_schema = any { $_ eq 'schema' } @{ $self->moniker_parts }; my $have_database = any { $_ eq 'database' } @{ $self->moniker_parts }; unshift @$moniker_parts, 'schema' if $use_schema && !$have_schema; unshift @$moniker_parts, 'database' if $use_database && !$have_database; local $self->{moniker_parts} = $moniker_parts; my %new_monikers; foreach my $tbl (@$imtables) { $new_monikers{$tbl->sql_name} = $self->_table2moniker($tbl); } foreach my $name (map $_->sql_name, @$imtables) { $self->monikers->{$name} = $new_monikers{$name}; } # check if there are still clashes my %by_moniker; while (my ($t, $m) = each %new_monikers) { push @{ $by_moniker{$m} }, $t; } foreach my $m (grep @{ $by_moniker{$_} } > 1, keys %by_moniker) { push @clashes, sprintf ("tried disambiguating by moniker_parts, but tables %s still reduced to the same source moniker '%s'", join (', ', @{ $by_moniker{$m} }), $m, ); } } else { push @clashes, sprintf ("tables %s reduced to the same source moniker '%s'", join (', ', map $_->sql_name, @$imtables), $moniker, ); } } } if (@clashes) { die 'Unable to load schema - chosen moniker/class naming style results in moniker clashes. ' . 'Change the naming style, or supply an explicit moniker_map: ' . join ('; ', @clashes) . "\n" ; } foreach my $tbl (@tables) { $self->_make_src_class($tbl); } foreach my $tbl (@tables) { $self->_setup_src_meta($tbl); } if(!$self->skip_relationships) { # The relationship loader needs a working schema local $self->{quiet} = 1; local $self->{dump_directory} = $self->{temp_directory}; local $self->{generated_classes} = []; local $self->{dry_run} = 0; $self->_reload_classes(\@tables); $self->_load_relationships(\@tables); # Remove that temp dir from INC so it doesn't get reloaded @INC = grep $_ ne $self->dump_directory, @INC; } foreach my $tbl (@tables) { $self->_load_roles($tbl); } foreach my $tbl (map { $self->classes->{$_->sql_name} } @tables) { $self->_load_external($tbl); } # Reload without unloading first to preserve any symbols from external # packages. $self->_reload_classes(\@tables, { unload => 0 }); # Drop temporary cache delete $self->{_cache}; return \@tables; } sub _reload_classes { my ($self, $tables, $opts) = @_; my @tables = @$tables; my $unload = $opts->{unload}; $unload = 1 unless defined $unload; # so that we don't repeat custom sections @INC = grep $_ ne $self->dump_directory, @INC; $self->_dump_to_dir(map { $self->classes->{$_->sql_name} } @tables); unshift @INC, $self->dump_directory; return if $self->dry_run; my @to_register; my %have_source = map { $_ => $self->schema->source($_) } $self->schema->sources; for my $table (@tables) { my $moniker = $self->monikers->{$table->sql_name}; my $class = $self->classes->{$table->sql_name}; { no warnings 'redefine'; local *Class::C3::reinitialize = sub {}; # to speed things up, reinitialized below use warnings; if (my $mc = $self->_moose_metaclass($class)) { $mc->make_mutable; } Class::Unload->unload($class) if $unload; my ($source, $resultset_class); if ( ($source = $have_source{$moniker}) && ($resultset_class = $source->resultset_class) && ($resultset_class ne 'DBIx::Class::ResultSet') ) { my $has_file = Class::Inspector->loaded_filename($resultset_class); if (my $mc = $self->_moose_metaclass($resultset_class)) { $mc->make_mutable; } Class::Unload->unload($resultset_class) if $unload; $self->_reload_class($resultset_class) if $has_file; } $self->_reload_class($class); } push @to_register, [$moniker, $class]; } Class::C3->reinitialize; for (@to_register) { $self->schema->register_class(@$_); } } sub _moose_metaclass { return undef unless $INC{'Class/MOP.pm'}; # if CMOP is not loaded the class could not have loaded in the 1st place my $class = $_[1]; my $mc = try { Class::MOP::class_of($class) } or return undef; return $mc->isa('Moose::Meta::Class') ? $mc : undef; } # We use this instead of ensure_class_loaded when there are package symbols we # want to preserve. sub _reload_class { my ($self, $class) = @_; delete $INC{ +class_path($class) }; try { eval_package_without_redefine_warnings ($class, "require $class"); } catch { my $source = slurp_file $self->_get_dump_filename($class); die "Failed to reload class $class: $_.\n\nCLASS SOURCE:\n\n$source"; }; } sub _get_dump_filename { my ($self, $class) = (@_); $class =~ s{::}{/}g; return $self->dump_directory . q{/} . $class . q{.pm}; } =head2 get_dump_filename Arguments: class Returns the full path to the file for a class that the class has been or will be dumped to. This is a file in a temp dir for a dynamic schema. =cut sub get_dump_filename { my ($self, $class) = (@_); local $self->{dump_directory} = $self->real_dump_directory; return $self->_get_dump_filename($class); } sub _ensure_dump_subdirs { my ($self, $class) = (@_); return if $self->dry_run; my @name_parts = split(/::/, $class); pop @name_parts; # we don't care about the very last element, # which is a filename my $dir = $self->dump_directory; while (1) { if(!-d $dir) { mkdir($dir) or croak "mkdir('$dir') failed: $!"; } last if !@name_parts; $dir = File::Spec->catdir($dir, shift @name_parts); } } sub _dump_to_dir { my ($self, @classes) = @_; my $schema_class = $self->schema_class; my $schema_base_class = $self->schema_base_class || 'DBIx::Class::Schema'; my $target_dir = $self->dump_directory; warn "Dumping manual schema for $schema_class to directory $target_dir ...\n" unless $self->dynamic or $self->quiet; my $schema_text = qq|use utf8;\n| . qq|package $schema_class;\n\n| . qq|# Created by DBIx::Class::Schema::Loader\n| . qq|# DO NOT MODIFY THE FIRST PART OF THIS FILE\n\n|; my $autoclean = $self->only_autoclean ? 'namespace::autoclean' : 'MooseX::MarkAsMethods autoclean => 1' ; if ($self->use_moose) { $schema_text.= qq|use Moose;\nuse $autoclean;\nextends '$schema_base_class';\n\n|; } else { $schema_text .= qq|use strict;\nuse warnings;\n\nuse base '$schema_base_class';\n\n|; } my @schema_components = @{ $self->schema_components || [] }; if (@schema_components) { my $schema_components = dump @schema_components; $schema_components = "($schema_components)" if @schema_components == 1; $schema_text .= "__PACKAGE__->load_components${schema_components};\n\n"; } if ($self->use_namespaces) { $schema_text .= qq|__PACKAGE__->load_namespaces|; my $namespace_options; my @attr = qw/resultset_namespace default_resultset_class/; unshift @attr, 'result_namespace' unless (not $self->result_namespace) || $self->result_namespace eq 'Result'; for my $attr (@attr) { if ($self->$attr) { my $code = dumper_squashed $self->$attr; $namespace_options .= qq| $attr => $code,\n| } } $schema_text .= qq|(\n$namespace_options)| if $namespace_options; $schema_text .= qq|;\n|; } else { $schema_text .= qq|__PACKAGE__->load_classes;\n|; } { local $self->{version_to_dump} = $self->schema_version_to_dump; $self->_write_classfile($schema_class, $schema_text, 1); } my $result_base_class = $self->result_base_class || 'DBIx::Class::Core'; foreach my $src_class (@classes) { my $src_text = qq|use utf8;\n| . qq|package $src_class;\n\n| . qq|# Created by DBIx::Class::Schema::Loader\n| . qq|# DO NOT MODIFY THE FIRST PART OF THIS FILE\n\n|; $src_text .= $self->_make_pod_heading($src_class); $src_text .= qq|use strict;\nuse warnings;\n\n|; $src_text .= $self->_base_class_pod($result_base_class) unless $result_base_class eq 'DBIx::Class::Core'; if ($self->use_moose) { $src_text.= qq|use Moose;\nuse MooseX::NonMoose;\nuse $autoclean;|; # these options 'use base' which is compile time if (@{ $self->left_base_classes } || @{ $self->additional_base_classes }) { $src_text .= qq|\nBEGIN { extends '$result_base_class' }\n|; } else { $src_text .= qq|\nextends '$result_base_class';\n|; } } else { $src_text .= qq|use base '$result_base_class';\n|; } $self->_write_classfile($src_class, $src_text); } # remove Result dir if downgrading from use_namespaces, and there are no # files left. if (my $result_ns = $self->_downgrading_to_load_classes || $self->_rewriting_result_namespace) { my $result_namespace = $self->_result_namespace( $schema_class, $result_ns, ); (my $result_dir = $result_namespace) =~ s{::}{/}g; $result_dir = $self->dump_directory . '/' . $result_dir; unless (my @files = glob "$result_dir/*") { rmdir $result_dir; } } warn "Schema dump completed.\n" unless $self->dynamic or $self->quiet; } sub _sig_comment { my ($self, $version, $ts) = @_; return qq|\n\n# Created by DBIx::Class::Schema::Loader| . (defined($version) ? q| v| . $version : '') . (defined($ts) ? q| @ | . $ts : '') . qq|\n# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:|; } sub _write_classfile { my ($self, $class, $text, $is_schema) = @_; my $filename = $self->_get_dump_filename($class); $self->_ensure_dump_subdirs($class); if (-f $filename && $self->really_erase_my_files && !$self->dry_run) { warn "Deleting existing file '$filename' due to " . "'really_erase_my_files' setting\n" unless $self->quiet; unlink($filename); } my ($old_gen, $old_md5, $old_ver, $old_ts, $old_custom) = $self->_parse_generated_file($filename); if (! $old_gen && -f $filename) { croak "Cannot overwrite '$filename' without 'really_erase_my_files'," . " it does not appear to have been generated by Loader" } my $custom_content = $old_custom || ''; # Use custom content from a renamed class, the class names in it are # rewritten below. if (my $renamed_class = $self->_upgrading_classes->{$class}) { my $old_filename = $self->_get_dump_filename($renamed_class); if (-f $old_filename) { $custom_content = ($self->_parse_generated_file ($old_filename))[4]; unlink $old_filename unless $self->dry_run; } } $custom_content ||= $self->_default_custom_content($is_schema); # If upgrading to use_moose=1 replace default custom content with default Moose custom content. # If there is already custom content, which does not have the Moose content, add it. if ($self->use_moose) { my $non_moose_custom_content = do { local $self->{use_moose} = 0; $self->_default_custom_content; }; if ($custom_content eq $non_moose_custom_content) { $custom_content = $self->_default_custom_content($is_schema); } elsif ($custom_content !~ /\Q@{[$self->_default_moose_custom_content($is_schema)]}\E/) { $custom_content .= $self->_default_custom_content($is_schema); } } elsif (defined $self->use_moose && $old_gen) { croak 'It is not possible to "downgrade" a schema that was loaded with use_moose => 1 to use_moose => 0, due to differing custom content' if $old_gen =~ /use \s+ MooseX?\b/x; } $custom_content = $self->_rewrite_old_classnames($custom_content); $text .= qq|$_\n| for @{$self->{_dump_storage}->{$class} || []}; if ($self->filter_generated_code) { my $filter = $self->filter_generated_code; if (ref $filter eq 'CODE') { $text = $filter->( ($is_schema ? 'schema' : 'result'), $class, $text ); } else { my ($fh, $temp_file) = tempfile(); binmode $fh, ':encoding(UTF-8)'; print $fh $text; close $fh; open my $out, qq{$filter < "$temp_file"|} or croak "Could not open pipe to $filter: $!"; $text = decode('UTF-8', do { local $/; <$out> }); $text =~ s/$CR?$LF/\n/g; close $out; my $exit_code = $? >> 8; unlink $temp_file or croak "Could not remove temporary file '$temp_file': $!"; if ($exit_code != 0) { croak "filter '$filter' exited non-zero: $exit_code"; } } if (not $text or not $text =~ /\bpackage\b/) { warn("$class skipped due to filter") if $self->debug; return; } } # Check and see if the dump is in fact different my $compare_to; if ($old_md5) { $compare_to = $text . $self->_sig_comment($old_ver, $old_ts); if (Digest::MD5::md5_base64(encode 'UTF-8', $compare_to) eq $old_md5) { return unless $self->_upgrading_from && $is_schema; } } push @{$self->generated_classes}, $class; return if $self->dry_run; $text .= $self->_sig_comment( $self->omit_version ? undef : $self->version_to_dump, $self->omit_timestamp ? undef : POSIX::strftime('%Y-%m-%d %H:%M:%S', localtime) ); open(my $fh, '>:raw:encoding(UTF-8)', $filename) or croak "Cannot open '$filename' for writing: $!"; # Write the top half and its MD5 sum print $fh $text . Digest::MD5::md5_base64(encode 'UTF-8', $text) . "\n"; # Write out anything loaded via external partial class file in @INC print $fh qq|$_\n| for @{$self->{_ext_storage}->{$class} || []}; # Write out any custom content the user has added print $fh $custom_content; close($fh) or croak "Error closing '$filename': $!"; } sub _default_moose_custom_content { my ($self, $is_schema) = @_; if (not $is_schema) { return qq|\n__PACKAGE__->meta->make_immutable;|; } return qq|\n__PACKAGE__->meta->make_immutable(inline_constructor => 0);|; } sub _default_custom_content { my ($self, $is_schema) = @_; my $default = qq|\n\n# You can replace this text with custom| . qq| code or comments, and it will be preserved on regeneration|; if ($self->use_moose) { $default .= $self->_default_moose_custom_content($is_schema); } $default .= qq|\n1;\n|; return $default; } sub _parse_generated_file { my ($self, $fn) = @_; return unless -f $fn; open(my $fh, '<:encoding(UTF-8)', $fn) or croak "Cannot open '$fn' for reading: $!"; my $mark_re = qr{^(# DO NOT MODIFY THIS OR ANYTHING ABOVE! md5sum:)([A-Za-z0-9/+]{22})\r?\n}; my ($real_md5, $ts, $ver, $gen); local $_; while(<$fh>) { if(/$mark_re/) { my $pre_md5 = $1; my $mark_md5 = $2; # Pull out the version and timestamp from the line above ($ver, $ts) = $gen =~ m/^# Created by DBIx::Class::Schema::Loader( v[\d.]+)?( @ [\d-]+ [\d:]+)?\r?\Z/m; $ver =~ s/^ v// if $ver; $ts =~ s/^ @ // if $ts; $gen .= $pre_md5; $real_md5 = Digest::MD5::md5_base64(encode 'UTF-8', $gen); croak "Checksum mismatch in '$fn', the auto-generated part of the file has been modified outside of this loader. Aborting.\nIf you want to overwrite these modifications, set the 'overwrite_modifications' loader option.\n" if !$self->overwrite_modifications && $real_md5 ne $mark_md5; last; } else { $gen .= $_; } } my $custom = do { local $/; <$fh> } if $real_md5; $custom ||= ''; $custom =~ s/$CRLF|$LF/\n/g; close $fh; return ($gen, $real_md5, $ver, $ts, $custom); } sub _use { my $self = shift; my $target = shift; foreach (@_) { warn "$target: use $_;" if $self->debug; $self->_raw_stmt($target, "use $_;"); } } sub _inject { my $self = shift; my $target = shift; my $blist = join(q{ }, @_); return unless $blist; warn "$target: use base qw/$blist/;" if $self->debug; $self->_raw_stmt($target, "use base qw/$blist/;"); } sub _with { my $self = shift; my $target = shift; my $rlist = join(q{, }, map { qq{'$_'} } @_); return unless $rlist; warn "$target: with $rlist;" if $self->debug; $self->_raw_stmt($target, "\nwith $rlist;"); } sub _result_namespace { my ($self, $schema_class, $ns) = @_; my @result_namespace; $ns = $ns->[0] if ref $ns; if ($ns =~ /^\+(.*)/) { # Fully qualified namespace @result_namespace = ($1) } else { # Relative namespace @result_namespace = ($schema_class, $ns); } return wantarray ? @result_namespace : join '::', @result_namespace; } # Create class with applicable bases, setup monikers, etc sub _make_src_class { my ($self, $table) = @_; my $schema = $self->schema; my $schema_class = $self->schema_class; my $table_moniker = $self->monikers->{$table->sql_name}; my @result_namespace = ($schema_class); if ($self->use_namespaces) { my $result_namespace = $self->result_namespace || 'Result'; @result_namespace = $self->_result_namespace( $schema_class, $result_namespace, ); } my $table_class = join(q{::}, @result_namespace, $table_moniker); if ((my $upgrading_v = $self->_upgrading_from) || $self->_rewriting) { local $self->naming->{monikers} = $upgrading_v if $upgrading_v; my @result_namespace = @result_namespace; if ($self->_upgrading_from_load_classes) { @result_namespace = ($schema_class); } elsif (my $ns = $self->_downgrading_to_load_classes) { @result_namespace = $self->_result_namespace( $schema_class, $ns, ); } elsif ($ns = $self->_rewriting_result_namespace) { @result_namespace = $self->_result_namespace( $schema_class, $ns, ); } my $old_table_moniker = do { local $self->naming->{monikers} = $upgrading_v; $self->_table2moniker($table); }; my $old_class = join(q{::}, @result_namespace, $old_table_moniker); $self->_upgrading_classes->{$table_class} = $old_class unless $table_class eq $old_class; } $self->classes->{$table->sql_name} = $table_class; $self->moniker_to_table->{$table_moniker} = $table; $self->class_to_table->{$table_class} = $table; $self->_pod_class_list($table_class, 'ADDITIONAL CLASSES USED', @{$self->additional_classes}); $self->_use ($table_class, @{$self->additional_classes}); $self->_pod_class_list($table_class, 'LEFT BASE CLASSES', @{$self->left_base_classes}); $self->_inject($table_class, @{$self->left_base_classes}); my @components = @{ $self->components || [] }; push @components, @{ $self->result_components_map->{$table_moniker} } if exists $self->result_components_map->{$table_moniker}; my @fq_components = @components; foreach my $component (@fq_components) { if ($component !~ s/^\+//) { $component = "DBIx::Class::$component"; } } $self->_pod_class_list($table_class, 'COMPONENTS LOADED', @fq_components); $self->_dbic_stmt($table_class, 'load_components', @components) if @components; $self->_pod_class_list($table_class, 'ADDITIONAL BASE CLASSES', @{$self->additional_base_classes}); $self->_inject($table_class, @{$self->additional_base_classes}); } sub _is_result_class_method { my ($self, $name, $table) = @_; my $table_moniker = $table ? $self->monikers->{$table->sql_name} : ''; $self->_result_class_methods({}) if not defined $self->_result_class_methods; if (not exists $self->_result_class_methods->{$table_moniker}) { my (@methods, %methods); my $base = $self->result_base_class || 'DBIx::Class::Core'; my @components = @{ $self->components || [] }; push @components, @{ $self->result_components_map->{$table_moniker} } if exists $self->result_components_map->{$table_moniker}; for my $c (@components) { $c = $c =~ /^\+/ ? substr($c,1) : "DBIx::Class::$c"; } my @roles = @{ $self->result_roles || [] }; push @roles, @{ $self->result_roles_map->{$table_moniker} } if exists $self->result_roles_map->{$table_moniker}; for my $class ( $base, @components, @roles, ($self->use_moose ? 'Moose::Object' : ()), ) { $self->ensure_class_loaded($class); push @methods, @{ Class::Inspector->methods($class) || [] }; } push @methods, @{ Class::Inspector->methods('UNIVERSAL') }; @methods{@methods} = (); $self->_result_class_methods->{$table_moniker} = \%methods; } my $result_methods = $self->_result_class_methods->{$table_moniker}; return exists $result_methods->{$name}; } sub _resolve_col_accessor_collisions { my ($self, $table, $col_info) = @_; while (my ($col, $info) = each %$col_info) { my $accessor = $info->{accessor} || $col; next if $accessor eq 'id'; # special case (very common column) if ($self->_is_result_class_method($accessor, $table)) { my $mapped = 0; if (my $map = $self->col_collision_map) { for my $re (keys %$map) { if (my @matches = $col =~ /$re/) { $info->{accessor} = sprintf $map->{$re}, @matches; $mapped = 1; } } } if (not $mapped) { warn <<"EOF"; Column '$col' in table '$table' collides with an inherited method. See "COLUMN ACCESSOR COLLISIONS" in perldoc DBIx::Class::Schema::Loader::Base . EOF $info->{accessor} = undef; } } } } # use the same logic to run moniker_map, col_accessor_map sub _run_user_map { my ( $self, $map, $default_code, $ident, @extra ) = @_; my $default_ident = $default_code->( $ident, @extra ); my $new_ident; if( $map && ref $map eq 'HASH' ) { if (my @parts = try { @{ $ident } }) { my $part_map = $map; while (@parts) { my $part = shift @parts; last unless exists $part_map->{ $part }; if ( !ref $part_map->{ $part } && !@parts ) { $new_ident = $part_map->{ $part }; last; } elsif ( ref $part_map->{ $part } eq 'HASH' ) { $part_map = $part_map->{ $part }; } } } if( !$new_ident && !ref $map->{ $ident } ) { $new_ident = $map->{ $ident }; } } elsif( $map && ref $map eq 'CODE' ) { my $cb = sub { my ($cb_map) = @_; croak "reentered map must be a hashref" unless 'HASH' eq ref($cb_map); return $self->_run_user_map($cb_map, $default_code, $ident, @extra); }; $new_ident = $map->( $ident, $default_ident, @extra, $cb ); } $new_ident ||= $default_ident; return $new_ident; } sub _default_column_accessor_name { my ( $self, $column_name ) = @_; my $preserve = ($self->naming->{column_accessors}||'') eq 'preserve'; my $v = $self->_get_naming_v('column_accessors'); my $accessor_name = $preserve ? $self->_to_identifier('column_accessors', $column_name) # assume CamelCase : $self->_to_identifier('column_accessors', $column_name, '_'); $accessor_name =~ s/\W+/_/g; # only if naming < v8, otherwise to_identifier # takes care of it if ($preserve) { return $accessor_name; } elsif ($v < 7 || (not $self->preserve_case)) { # older naming just lc'd the col accessor and that's all. return lc $accessor_name; } return join '_', map lc, split_name $column_name, $v; } sub _make_column_accessor_name { my ($self, $column_name, $column_context_info ) = @_; my $accessor = $self->_run_user_map( $self->col_accessor_map, sub { $self->_default_column_accessor_name( shift ) }, $column_name, $column_context_info, ); return $accessor; } sub _table_is_view { #my ($self, $table) = @_; return 0; } # Set up metadata (cols, pks, etc) sub _setup_src_meta { my ($self, $table) = @_; my $schema = $self->schema; my $schema_class = $self->schema_class; my $table_class = $self->classes->{$table->sql_name}; my $table_moniker = $self->monikers->{$table->sql_name}; $self->_dbic_stmt($table_class, 'table_class', 'DBIx::Class::ResultSource::View') if $self->_table_is_view($table); $self->_dbic_stmt($table_class, 'table', $table->dbic_name); my $cols = $self->_table_columns($table); my $col_info = $self->__columns_info_for($table); ### generate all the column accessor names while (my ($col, $info) = each %$col_info) { # hashref of other info that could be used by # user-defined accessor map functions my $context = { table_class => $table_class, table_moniker => $table_moniker, table_name => $table, # bugwards compatibility, RT#84050 table => $table, full_table_name => $table->dbic_name, schema_class => $schema_class, column_info => $info, }; my $col_obj = DBIx::Class::Schema::Loader::Column->new( table => $table, name => $col, ); $info->{accessor} = $self->_make_column_accessor_name( $col_obj, $context ); } $self->_resolve_col_accessor_collisions($table, $col_info); # prune any redundant accessor names while (my ($col, $info) = each %$col_info) { no warnings 'uninitialized'; delete $info->{accessor} if $info->{accessor} eq $col; } my $fks = $self->_table_fk_info($table); foreach my $fkdef (@$fks) { for my $col (@{ $fkdef->{local_columns} }) { $col_info->{$col}{is_foreign_key} = 1; } } my $pks = $self->_table_pk_info($table) || []; my %uniq_tag; # used to eliminate duplicate uniqs $uniq_tag{ join("\0", @$pks) }++ if @$pks; # pk is a uniq my $uniqs = $self->_table_uniq_info($table) || []; my @uniqs; foreach my $uniq (@$uniqs) { my ($name, $cols) = @$uniq; next if $uniq_tag{ join("\0", @$cols) }++; # skip duplicates push @uniqs, [$name, $cols]; } my @non_nullable_uniqs = grep { all { $col_info->{$_}{is_nullable} == 0 } @{ $_->[1] } } @uniqs; if ($self->uniq_to_primary && (not @$pks) && @non_nullable_uniqs) { my @by_colnum = sort { $b->[0] <=> $a->[0] } map [ scalar @{ $_->[1] }, $_ ], @non_nullable_uniqs; if (not (@by_colnum > 1 && $by_colnum[0][0] == $by_colnum[1][0])) { my @keys = map $_->[1], @by_colnum; my $pk = $keys[0]; # remove the uniq from list @uniqs = grep { $_->[0] ne $pk->[0] } @uniqs; $pks = $pk->[1]; } } foreach my $pkcol (@$pks) { $col_info->{$pkcol}{is_nullable} = 0; } $self->_dbic_stmt( $table_class, 'add_columns', map { $_, ($col_info->{$_}||{}) } @$cols ); $self->_dbic_stmt($table_class, 'set_primary_key', @$pks) if @$pks; # Sort unique constraints by constraint name for repeatable results (rels # are sorted as well elsewhere.) @uniqs = sort { $a->[0] cmp $b->[0] } @uniqs; foreach my $uniq (@uniqs) { my ($name, $cols) = @$uniq; $self->_dbic_stmt($table_class,'add_unique_constraint', $name, $cols); } } sub __columns_info_for { my ($self, $table) = @_; my $result = $self->_columns_info_for($table); while (my ($col, $info) = each %$result) { $info = { %$info, %{ $self->_custom_column_info ($table, $col, $info) } }; $info = { %$info, %{ $self->_datetime_column_info($table, $col, $info) } }; $result->{$col} = $info; } return $result; } =head2 tables Returns a sorted list of loaded tables, using the original database table names. =cut sub tables { my $self = shift; return values %{$self->_tables}; } sub _get_naming_v { my ($self, $naming_key) = @_; my $v; if (($self->naming->{$naming_key}||'') =~ /^v(\d+)\z/) { $v = $1; } else { ($v) = $CURRENT_V =~ /^v(\d+)\z/; } return $v; } sub _to_identifier { my ($self, $naming_key, $name, $sep_char, $force) = @_; my $v = $self->_get_naming_v($naming_key); my $to_identifier = $self->naming->{force_ascii} ? \&String::ToIdentifier::EN::to_identifier : \&String::ToIdentifier::EN::Unicode::to_identifier; return $v >= 8 || $force ? $to_identifier->($name, $sep_char) : $name; } # Make a moniker from a table sub _default_table2moniker { my ($self, $table) = @_; my $v = $self->_get_naming_v('monikers'); my @moniker_parts = @{ $self->moniker_parts }; my @name_parts = map $table->$_, @moniker_parts; my $name_idx = firstidx { $_ eq 'name' } @{ $self->moniker_parts }; my @all_parts; foreach my $i (0 .. $#name_parts) { my $part = $name_parts[$i]; my $moniker_part = $self->_run_user_map( $self->moniker_part_map->{$moniker_parts[$i]}, sub { '' }, $part, $moniker_parts[$i], ); if (length $moniker_part) { push @all_parts, $moniker_part; next; } if ($i != $name_idx || $v >= 8) { $part = $self->_to_identifier('monikers', $part, '_', 1); } if ($i == $name_idx && $v == 5) { $part = Lingua::EN::Inflect::Number::to_S($part); } my @part_parts = map lc, $v > 6 ? # use v8 semantics for all moniker parts except name ($i == $name_idx ? split_name $part, $v : split_name $part) : split /[\W_]+/, $part; if ($i == $name_idx && $v >= 6) { my $as_phrase = join ' ', @part_parts; my $inflected = ($self->naming->{monikers}||'') eq 'plural' ? Lingua::EN::Inflect::Phrase::to_PL($as_phrase) : ($self->naming->{monikers}||'') eq 'preserve' ? $as_phrase : Lingua::EN::Inflect::Phrase::to_S($as_phrase); @part_parts = split /\s+/, $inflected; } push @all_parts, join '', map ucfirst, @part_parts; } return join $self->moniker_part_separator, @all_parts; } sub _table2moniker { my ( $self, $table ) = @_; $self->_run_user_map( $self->moniker_map, sub { $self->_default_table2moniker( shift ) }, $table ); } sub _load_relationships { my ($self, $tables) = @_; my @tables; foreach my $table (@$tables) { my $local_moniker = $self->monikers->{$table->sql_name}; my $tbl_fk_info = $self->_table_fk_info($table); foreach my $fkdef (@$tbl_fk_info) { $fkdef->{local_table} = $table; $fkdef->{local_moniker} = $local_moniker; $fkdef->{remote_source} = $self->monikers->{$fkdef->{remote_table}->sql_name}; } my $tbl_uniq_info = $self->_table_uniq_info($table); push @tables, [ $local_moniker, $tbl_fk_info, $tbl_uniq_info ]; } my $rel_stmts = $self->_relbuilder->generate_code(\@tables); foreach my $src_class (sort keys %$rel_stmts) { # sort by rel name my @src_stmts = map $_->[2], sort { $a->[0] <=> $b->[0] || $a->[1] cmp $b->[1] } map [ ($_->{method} eq 'many_to_many' ? 1 : 0), $_->{args}[0], $_, ], @{ $rel_stmts->{$src_class} }; foreach my $stmt (@src_stmts) { $self->_dbic_stmt($src_class,$stmt->{method}, @{$stmt->{args}}); } } } sub _load_roles { my ($self, $table) = @_; my $table_moniker = $self->monikers->{$table->sql_name}; my $table_class = $self->classes->{$table->sql_name}; my @roles = @{ $self->result_roles || [] }; push @roles, @{ $self->result_roles_map->{$table_moniker} } if exists $self->result_roles_map->{$table_moniker}; if (@roles) { $self->_pod_class_list($table_class, 'L ROLES APPLIED', @roles); $self->_with($table_class, @roles); } } # Overload these in driver class: # Returns an arrayref of column names sub _table_columns { croak "ABSTRACT METHOD" } # Returns arrayref of pk col names sub _table_pk_info { croak "ABSTRACT METHOD" } # Returns an arrayref of uniqs [ [ foo => [ col1, col2 ] ], [ bar => [ ... ] ] ] sub _table_uniq_info { croak "ABSTRACT METHOD" } # Returns an arrayref of foreign key constraints, each # being a hashref with 3 keys: # local_columns (arrayref), remote_columns (arrayref), remote_table sub _table_fk_info { croak "ABSTRACT METHOD" } # Returns an array of lower case table names sub _tables_list { croak "ABSTRACT METHOD" } # Execute a constructive DBIC class method, with debug/dump_to_dir hooks. sub _dbic_stmt { my $self = shift; my $class = shift; my $method = shift; # generate the pod for this statement, storing it with $self->_pod $self->_make_pod( $class, $method, @_ ) if $self->generate_pod; my $args = dump(@_); $args = '(' . $args . ')' if @_ < 2; my $stmt = $method . $args . q{;}; warn qq|$class\->$stmt\n| if $self->debug; $self->_raw_stmt($class, '__PACKAGE__->' . $stmt); return; } sub _make_pod_heading { my ($self, $class) = @_; return '' if not $self->generate_pod; my $table = $self->class_to_table->{$class}; my $pod; my $pcm = $self->pod_comment_mode; my ($comment, $comment_overflows, $comment_in_name, $comment_in_desc); $comment = $self->__table_comment($table); $comment_overflows = ($comment and length $comment > $self->pod_comment_spillover_length); $comment_in_name = ($pcm eq 'name' or ($pcm eq 'auto' and !$comment_overflows)); $comment_in_desc = ($pcm eq 'description' or ($pcm eq 'auto' and $comment_overflows)); $pod .= "=head1 NAME\n\n"; my $table_descr = $class; $table_descr .= " - " . $comment if $comment and $comment_in_name; $pod .= "$table_descr\n\n"; if ($comment and $comment_in_desc) { $pod .= "=head1 DESCRIPTION\n\n${comment}\n\n"; } $pod .= "=cut\n\n"; return $pod; } # generates the accompanying pod for a DBIC class method statement, # storing it with $self->_pod sub _make_pod { my $self = shift; my $class = shift; my $method = shift; if ($method eq 'table') { my $table = $_[0]; $table = $$table if ref $table eq 'SCALAR'; $self->_pod($class, "=head1 TABLE: C<$table>"); $self->_pod_cut($class); } elsif ( $method eq 'add_columns' ) { $self->_pod( $class, "=head1 ACCESSORS" ); my $col_counter = 0; my @cols = @_; while( my ($name,$attrs) = splice @cols,0,2 ) { $col_counter++; $self->_pod( $class, '=head2 ' . $name ); $self->_pod( $class, join "\n", map { my $s = $attrs->{$_}; $s = !defined $s ? 'undef' : length($s) == 0 ? '(empty string)' : ref($s) eq 'SCALAR' ? $$s : ref($s) ? dumper_squashed $s : looks_like_number($s) ? $s : qq{'$s'}; " $_: $s" } sort keys %$attrs, ); if (my $comment = $self->__column_comment($self->class_to_table->{$class}, $col_counter, $name)) { $self->_pod( $class, $comment ); } } $self->_pod_cut( $class ); } elsif ( $method =~ /^(?:belongs_to|has_many|might_have)\z/ ) { $self->_pod( $class, "=head1 RELATIONS" ) unless $self->{_relations_started} { $class } ; my ( $accessor, $rel_class ) = @_; $self->_pod( $class, "=head2 $accessor" ); $self->_pod( $class, 'Type: ' . $method ); $self->_pod( $class, "Related object: L<$rel_class>" ); $self->_pod_cut( $class ); $self->{_relations_started} { $class } = 1; } elsif ( $method eq 'many_to_many' ) { $self->_pod( $class, "=head1 RELATIONS" ) unless $self->{_relations_started} { $class } ; my ( $accessor, $rel1, $rel2 ) = @_; $self->_pod( $class, "=head2 $accessor" ); $self->_pod( $class, 'Type: many_to_many' ); $self->_pod( $class, "Composing rels: L -> $rel2" ); $self->_pod_cut( $class ); $self->{_relations_started} { $class } = 1; } elsif ($method eq 'add_unique_constraint') { $self->_pod($class, '=head1 UNIQUE CONSTRAINTS') unless $self->{_uniqs_started}{$class}; my ($name, $cols) = @_; $self->_pod($class, "=head2 C<$name>"); $self->_pod($class, '=over 4'); foreach my $col (@$cols) { $self->_pod($class, "=item \* L"); } $self->_pod($class, '=back'); $self->_pod_cut($class); $self->{_uniqs_started}{$class} = 1; } elsif ($method eq 'set_primary_key') { $self->_pod($class, "=head1 PRIMARY KEY"); $self->_pod($class, '=over 4'); foreach my $col (@_) { $self->_pod($class, "=item \* L"); } $self->_pod($class, '=back'); $self->_pod_cut($class); } } sub _pod_class_list { my ($self, $class, $title, @classes) = @_; return unless @classes && $self->generate_pod; $self->_pod($class, "=head1 $title"); $self->_pod($class, '=over 4'); foreach my $link (@classes) { $self->_pod($class, "=item * L<$link>"); } $self->_pod($class, '=back'); $self->_pod_cut($class); } sub _base_class_pod { my ($self, $base_class) = @_; return '' unless $self->generate_pod; return "\n=head1 BASE CLASS: L<$base_class>\n\n=cut\n\n"; } sub _filter_comment { my ($self, $txt) = @_; $txt = '' if not defined $txt; $txt =~ s/(?:\015?\012|\015\012?)/\n/g; return $txt; } sub __table_comment { my $self = shift; if (my $code = $self->can('_table_comment')) { return $self->_filter_comment($self->$code(@_)); } return ''; } sub __column_comment { my $self = shift; if (my $code = $self->can('_column_comment')) { return $self->_filter_comment($self->$code(@_)); } return ''; } # Stores a POD documentation sub _pod { my ($self, $class, $stmt) = @_; $self->_raw_stmt( $class, "\n" . $stmt ); } sub _pod_cut { my ($self, $class ) = @_; $self->_raw_stmt( $class, "\n=cut\n" ); } # Store a raw source line for a class (for dumping purposes) sub _raw_stmt { my ($self, $class, $stmt) = @_; push(@{$self->{_dump_storage}->{$class}}, $stmt); } # Like above, but separately for the externally loaded stuff sub _ext_stmt { my ($self, $class, $stmt) = @_; push(@{$self->{_ext_storage}->{$class}}, $stmt); } sub _custom_column_info { my ( $self, $table_name, $column_name, $column_info ) = @_; if (my $code = $self->custom_column_info) { return $code->($table_name, $column_name, $column_info) || {}; } return {}; } sub _datetime_column_info { my ( $self, $table_name, $column_name, $column_info ) = @_; my $result = {}; my $type = $column_info->{data_type} || ''; if ((grep $_, @{ $column_info }{map "inflate_$_", qw/date datetime timestamp/}) or ($type =~ /date|timestamp/i)) { $result->{timezone} = $self->datetime_timezone if $self->datetime_timezone; $result->{locale} = $self->datetime_locale if $self->datetime_locale; } return $result; } sub _lc { my ($self, $name) = @_; return $self->preserve_case ? $name : lc($name); } sub _uc { my ($self, $name) = @_; return $self->preserve_case ? $name : uc($name); } sub _remove_table { my ($self, $table) = @_; try { my $schema = $self->schema; # in older DBIC it's a private method my $unregister = $schema->can('unregister_source') || $schema->can('_unregister_source'); $schema->$unregister(delete $self->monikers->{$table->sql_name}); delete $self->_upgrading_classes->{delete $self->classes->{$table->sql_name}}; delete $self->_tables->{$table->sql_name}; }; } # remove the dump dir from @INC on destruction sub DESTROY { my $self = shift; @INC = grep $_ ne $self->dump_directory, @INC; } =head2 monikers Returns a hashref of loaded table to moniker mappings. There will be two entries for each table, the original name and the "normalized" name, in the case that the two are different (such as databases that like uppercase table names, or preserve your original mixed-case definitions, or what-have-you). =head2 classes Returns a hashref of table to class mappings. In some cases it will contain multiple entries per table for the original and normalized table names, as above in L. =head2 generated_classes Returns an arrayref of classes that were actually generated (i.e. not skipped because there were no changes). =head1 NON-ENGLISH DATABASES If you use the loader on a database with table and column names in a language other than English, you will want to turn off the English language specific heuristics. To do so, use something like this in your loader options: naming => { monikers => 'v4' }, inflect_singular => sub { "$_[0]_rel" }, inflect_plural => sub { "$_[0]_rel" }, =head1 COLUMN ACCESSOR COLLISIONS Occasionally you may have a column name that collides with a perl method, such as C. In such cases, the default action is to set the C of the column spec to C. You can then name the accessor yourself by placing code such as the following below the md5: __PACKAGE__->add_column('+can' => { accessor => 'my_can' }); Another option is to use the L option. =head1 RELATIONSHIP NAME COLLISIONS In very rare cases, you may get a collision between a generated relationship name and a method in your Result class, for example if you have a foreign key called C. This is a problem because relationship names are also relationship accessor methods in L. The default behavior is to append C<_rel> to the relationship name and print out a warning that refers to this text. You can also control the renaming with the L option. =head1 SEE ALSO L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/Column.pm0000644000175000017500000000233712625643053024255 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::Column; use strict; use warnings; use base 'Class::Accessor::Grouped'; use mro 'c3'; use Carp::Clan qw/^DBIx::Class/; use Scalar::Util 'weaken'; use namespace::clean; =head1 NAME DBIx::Class::Schema::Loader::Column - Class for Columns in L =head1 DESCRIPTION Used for representing columns in L. Stringifies to L, and arrayrefifies to the L of L
plus L
. =cut __PACKAGE__->mk_group_accessors(simple => qw/ table name /); use overload '""' => sub { $_[0]->name }, '@{}' => sub { [ @{$_[0]->table->name_parts}, $_[0]->name ] }, fallback => 1; =head1 METHODS =head2 new The constructor. Takes L and L
key-value parameters. =cut sub new { my $class = shift; my $self = { @_ }; croak "table is required" unless ref $self->{table}; weaken $self->{table}; return bless $self, $class; } =head2 table The L
object this column belongs to. Required parameter for L =head2 name The name of the column. Required parameter for L. =cut 1; DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/Optional/0000755000175000017500000000000012650450355024241 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/Optional/Dependencies.pm0000644000175000017500000010770112542756321027175 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::Optional::Dependencies; ### This may look crazy, but it in fact tangibly ( by 50(!)% ) shortens # the skip-test time when everything requested is unavailable BEGIN { if ( $ENV{RELEASE_TESTING} ) { require warnings and warnings->import; require strict and strict->import; } } sub croak { require Carp; Carp::croak(@_); }; ### # NO EXTERNAL NON-5.8.1 CORE DEPENDENCIES EVER (e.g. C::A::G) # This module is to be loaded by Makefile.PM on a pristine system # POD is generated automatically by calling _gen_pod from the # Makefile.PL in $AUTHOR mode my $dbic_reqs = { use_moose => { req => { 'Moose' => '1.12', 'MooseX::NonMoose' => '0.25', 'namespace::autoclean' => '0.09', 'MooseX::MarkAsMethods' => '0.13', }, pod => { title => 'use_moose', desc => 'Modules required for the use_moose option', }, }, dbicdump_config => { req => { 'Config::Any' => '0', }, pod => { title => 'dbicdump config file', desc => 'Modules required for using a config file with dbicdump', }, }, test_dbicdump_config => { include => 'dbicdump_config', req => { 'Config::General' => '0', }, pod => { title => 'dbicdump config file testing', desc => 'Modules required for using testing using a config file with dbicdump', }, }, _rdbms_generic_odbc => { req => { 'DBD::ODBC' => 0, } }, _rdbms_generic_ado => { req => { 'DBD::ADO' => 0, } }, # must list any dep used by adhoc testing # this prevents the "skips due to forgotten deps" issue test_adhoc => { req => { }, }, id_shortener => { req => { 'Math::BigInt' => '1.80', 'Math::Base36' => '0.07', }, }, test_pod => { req => { 'Test::Pod' => '1.14', 'Pod::Simple' => '3.22', }, pod => { title => 'POD testing', desc => 'Modules required for testing POD in this distribution', }, release_testing_mandatory => 1, }, test_podcoverage => { req => { 'Test::Pod::Coverage' => '1.08', 'Pod::Coverage' => '0.20', }, release_testing_mandatory => 1, }, test_whitespace => { req => { 'Test::EOL' => '1.0', 'Test::NoTabs' => '0.9', }, release_testing_mandatory => 1, }, test_strictures => { req => { 'Test::Strict' => '0.20', }, release_testing_mandatory => 1, }, test_backcompat => { env => [ SCHEMA_LOADER_TESTS_BACKCOMPAT => 1, ], }, # this is just for completeness as SQLite # is a core dep of DBIC for testing rdbms_sqlite => { req => { 'DBD::SQLite' => 0, }, pod => { title => 'SQLite support', desc => 'Modules required to connect to SQLite', }, augment => { }, }, rdbms_pg => { req => { # when changing this list make sure to adjust xt/optional_deps.t 'DBD::Pg' => 0, }, pod => { title => 'PostgreSQL support', desc => 'Modules required to connect to PostgreSQL', }, }, _rdbms_mssql_common => { }, rdbms_mssql_odbc => { include => [qw( _rdbms_generic_odbc _rdbms_mssql_common )], pod => { title => 'MSSQL support via DBD::ODBC', desc => 'Modules required to connect to MSSQL via DBD::ODBC', }, }, rdbms_mssql_sybase => { include => '_rdbms_mssql_common', req => { 'DBD::Sybase' => 0, }, pod => { title => 'MSSQL support via DBD::Sybase', desc => 'Modules required to connect to MSSQL via DBD::Sybase', }, }, rdbms_mssql_ado => { include => [qw( _rdbms_generic_ado _rdbms_mssql_common )], pod => { title => 'MSSQL support via DBD::ADO (Windows only)', desc => 'Modules required to connect to MSSQL via DBD::ADO. This particular DBD is available on Windows only', }, }, _rdbms_msaccess_common => { }, rdbms_msaccess_odbc => { include => [qw( _rdbms_generic_odbc _rdbms_msaccess_common )], pod => { title => 'MS Access support via DBD::ODBC', desc => 'Modules required to connect to MS Access via DBD::ODBC', }, }, rdbms_msaccess_ado => { include => [qw( _rdbms_generic_ado _rdbms_msaccess_common )], pod => { title => 'MS Access support via DBD::ADO (Windows only)', desc => 'Modules required to connect to MS Access via DBD::ADO. This particular DBD is available on Windows only', }, }, rdbms_mysql => { req => { 'DBD::mysql' => 0, }, pod => { title => 'MySQL support', desc => 'Modules required to connect to MySQL', }, }, rdbms_oracle => { include => 'id_shortener', req => { 'DBD::Oracle' => 0, }, pod => { title => 'Oracle support', desc => 'Modules required to connect to Oracle', }, augment => { }, }, rdbms_ase => { req => { 'DBD::Sybase' => 0, }, pod => { title => 'Sybase ASE support', desc => 'Modules required to connect to Sybase ASE', }, }, rdbms_db2 => { req => { 'DBD::DB2' => 0, }, pod => { title => 'DB2 support', desc => 'Modules required to connect to DB2', }, }, rdbms_informix => { req => { 'DBD::Informix' => 0, }, pod => { title => 'Informix support', desc => 'Modules required to connect to Informix', }, }, _rdbms_sqlanywhere_common => { }, rdbms_sqlanywhere => { include => '_rdbms_sqlanywhere_common', req => { 'DBD::SQLAnywhere' => 0, }, pod => { title => 'SQLAnywhere support', desc => 'Modules required to connect to SQLAnywhere', }, }, rdbms_sqlanywhere_odbc => { include => [qw( _rdbms_generic_odbc _rdbms_sqlanywhere_common )], pod => { title => 'SQLAnywhere support via DBD::ODBC', desc => 'Modules required to connect to SQLAnywhere via DBD::ODBC', }, }, _rdbms_firebird_common => { }, rdbms_firebird => { include => '_rdbms_firebird_common', req => { 'DBD::Firebird' => 0, }, pod => { title => 'Firebird support', desc => 'Modules required to connect to Firebird', }, }, rdbms_firebird_interbase => { include => '_rdbms_firebird_common', req => { 'DBD::InterBase' => 0, }, pod => { title => 'Firebird support via DBD::InterBase', desc => 'Modules required to connect to Firebird via DBD::InterBase', }, }, rdbms_firebird_odbc => { include => [qw( _rdbms_generic_odbc _rdbms_firebird_common )], pod => { title => 'Firebird support via DBD::ODBC', desc => 'Modules required to connect to Firebird via DBD::ODBC', }, }, test_rdbms_sqlite => { include => 'rdbms_sqlite', req => { ### ### IMPORTANT - do not raise this dependency ### even though many bugfixes are present in newer versions, the general DBIC ### rule is to bend over backwards for available DBDs (given upgrading them is ### often *not* easy or even possible) ### 'DBD::SQLite' => '1.29', }, }, test_rdbms_pg => { include => 'rdbms_pg', env => [ DBICTEST_PG_DSN => 1, DBICTEST_PG_USER => 0, DBICTEST_PG_PASS => 0, ], req => { # the order does matter because the rdbms support group might require # a different version that the test group # # when changing this list make sure to adjust xt/optional_deps.t 'DBD::Pg' => '2.009002', # specific version to test bytea }, }, test_rdbms_mssql_odbc => { include => 'rdbms_mssql_odbc', env => [ DBICTEST_MSSQL_ODBC_DSN => 1, DBICTEST_MSSQL_ODBC_USER => 0, DBICTEST_MSSQL_ODBC_PASS => 0, ], }, test_rdbms_mssql_ado => { include => 'rdbms_mssql_ado', env => [ DBICTEST_MSSQL_ADO_DSN => 1, DBICTEST_MSSQL_ADO_USER => 0, DBICTEST_MSSQL_ADO_PASS => 0, ], }, test_rdbms_mssql_sybase => { include => 'rdbms_mssql_sybase', env => [ DBICTEST_MSSQL_DSN => 1, DBICTEST_MSSQL_USER => 0, DBICTEST_MSSQL_PASS => 0, ], }, test_rdbms_msaccess_odbc => { include => 'rdbms_msaccess_odbc', env => [ DBICTEST_MSACCESS_ODBC_DSN => 1, DBICTEST_MSACCESS_ODBC_USER => 0, DBICTEST_MSACCESS_ODBC_PASS => 0, ], req => { 'Data::GUID' => '0', }, }, test_rdbms_msaccess_ado => { include => 'rdbms_msaccess_ado', env => [ DBICTEST_MSACCESS_ADO_DSN => 1, DBICTEST_MSACCESS_ADO_USER => 0, DBICTEST_MSACCESS_ADO_PASS => 0, ], req => { 'Data::GUID' => 0, }, }, test_rdbms_mysql => { include => 'rdbms_mysql', env => [ DBICTEST_MYSQL_DSN => 1, DBICTEST_MYSQL_USER => 0, DBICTEST_MYSQL_PASS => 0, ], }, test_rdbms_oracle => { include => 'rdbms_oracle', env => [ DBICTEST_ORA_DSN => 1, DBICTEST_ORA_USER => 0, DBICTEST_ORA_PASS => 0, ], req => { 'DBD::Oracle' => '1.24', }, }, test_rdbms_ase => { include => 'rdbms_ase', env => [ DBICTEST_SYBASE_DSN => 1, DBICTEST_SYBASE_USER => 0, DBICTEST_SYBASE_PASS => 0, ], }, test_rdbms_db2 => { include => 'rdbms_db2', env => [ DBICTEST_DB2_DSN => 1, DBICTEST_DB2_USER => 0, DBICTEST_DB2_PASS => 0, ], }, test_rdbms_informix => { include => 'rdbms_informix', env => [ DBICTEST_INFORMIX_DSN => 1, DBICTEST_INFORMIX_USER => 0, DBICTEST_INFORMIX_PASS => 0, ], }, test_rdbms_sqlanywhere => { include => 'rdbms_sqlanywhere', env => [ DBICTEST_SQLANYWHERE_DSN => 1, DBICTEST_SQLANYWHERE_USER => 0, DBICTEST_SQLANYWHERE_PASS => 0, ], }, test_rdbms_sqlanywhere_odbc => { include => 'rdbms_sqlanywhere_odbc', env => [ DBICTEST_SQLANYWHERE_ODBC_DSN => 1, DBICTEST_SQLANYWHERE_ODBC_USER => 0, DBICTEST_SQLANYWHERE_ODBC_PASS => 0, ], }, test_rdbms_firebird => { include => 'rdbms_firebird', env => [ DBICTEST_FIREBIRD_DSN => 1, DBICTEST_FIREBIRD_USER => 0, DBICTEST_FIREBIRD_PASS => 0, ], }, test_rdbms_firebird_interbase => { include => 'rdbms_firebird_interbase', env => [ DBICTEST_FIREBIRD_INTERBASE_DSN => 1, DBICTEST_FIREBIRD_INTERBASE_USER => 0, DBICTEST_FIREBIRD_INTERBASE_PASS => 0, ], }, test_rdbms_firebird_odbc => { include => 'rdbms_firebird_odbc', env => [ DBICTEST_FIREBIRD_ODBC_DSN => 1, DBICTEST_FIREBIRD_ODBC_USER => 0, DBICTEST_FIREBIRD_ODBC_PASS => 0, ], }, }; ### Public API sub import { my $class = shift; if (@_) { my $action = shift; if ($action eq '-die_without') { my $err; { local $@; eval { $class->die_unless_req_ok_for(\@_); 1 } or $err = $@; } die "\n$err\n" if $err; } elsif ($action eq '-list_missing') { print $class->modreq_missing_for(\@_); print "\n"; exit 0; } elsif ($action eq '-skip_all_without') { # sanity check - make sure ->current_test is 0 and no plan has been declared do { local $@; defined eval { Test::Builder->new->current_test or Test::Builder->new->has_plan }; } and croak("Unable to invoke -skip_all_without after testing has started"); if ( my $missing = $class->req_missing_for(\@_) ) { die ("\nMandatory requirements not satisfied during release-testing: $missing\n\n") if $ENV{RELEASE_TESTING} and $class->_groups_to_reqs(\@_)->{release_testing_mandatory}; print "1..0 # SKIP requirements not satisfied: $missing\n"; exit 0; } } elsif ($action =~ /^-/) { croak "Unknown import-time action '$action'"; } else { croak "$class is not an exporter, unable to import '$action'"; } } 1; } sub unimport { croak( __PACKAGE__ . " does not implement unimport" ); } # OO for (mistakenly considered) ease of extensibility, not due to any need to # carry state of any sort. This API is currently used outside, so leave as-is. # FIXME - make sure to not propagate this further if module is extracted as a # standalone library - keep the stupidity to a DBIC-secific shim! # sub req_list_for { shift->_groups_to_reqs(shift)->{effective_modreqs}; } sub modreq_list_for { shift->_groups_to_reqs(shift)->{modreqs}; } sub req_group_list { +{ map { $_ => $_[0]->_groups_to_reqs($_) } grep { $_ !~ /^_/ } keys %$dbic_reqs } } sub req_errorlist_for { shift->modreq_errorlist_for(shift) } # deprecated sub modreq_errorlist_for { my ($self, $groups) = @_; $self->_errorlist_for_modreqs( $self->_groups_to_reqs($groups)->{modreqs} ); } sub req_ok_for { shift->req_missing_for(shift) ? 0 : 1; } sub req_missing_for { my ($self, $groups) = @_; my $reqs = $self->_groups_to_reqs($groups); my $mods_missing = $reqs->{missing_envvars} ? $self->_list_physically_missing_modules( $reqs->{modreqs} ) : $self->modreq_missing_for($groups) ; return '' if ! $mods_missing and ! $reqs->{missing_envvars} ; my @res = $mods_missing || (); push @res, 'the following group(s) of environment variables: ' . join ' and ', sort map { __envvar_group_desc($_) } @{$reqs->{missing_envvars}} if $reqs->{missing_envvars}; return ( ( join ' as well as ', @res ) . ( $reqs->{modreqs_fully_documented} ? " (see @{[ ref $self || $self ]} documentation for details)" : '' ), ); } sub modreq_missing_for { my ($self, $groups) = @_; my $reqs = $self->_groups_to_reqs($groups); my $modreq_errors = $self->_errorlist_for_modreqs($reqs->{modreqs}) or return ''; join ' ', map { $reqs->{modreqs}{$_} ? "$_~$reqs->{modreqs}{$_}" : $_ } sort { lc($a) cmp lc($b) } keys %$modreq_errors ; } my $tb; sub skip_without { my ($self, $groups) = @_; $tb ||= do { local $@; eval { Test::Builder->new } } or croak "Calling skip_without() before loading Test::Builder makes no sense"; if ( my $err = $self->req_missing_for($groups) ) { my ($fn, $ln) = (caller(0))[1,2]; $tb->skip("block in $fn around line $ln requires $err"); local $^W = 0; last SKIP; } 1; } sub die_unless_req_ok_for { if (my $err = shift->req_missing_for(shift) ) { die "Unable to continue due to missing requirements: $err\n"; } } ### Private functions # potentially shorten group desc sub __envvar_group_desc { my @envs = @{$_[0]}; my (@res, $last_prefix); while (my $ev = shift @envs) { my ($pref, $sep, $suff) = split / ([\_\-]) (?= [^\_\-]+ \z )/x, $ev; if ( defined $sep and ($last_prefix||'') eq $pref ) { push @res, "...${sep}${suff}" } else { push @res, $ev; } $last_prefix = $pref if $sep; } join '/', @res; } my $groupname_re = qr/ [a-z_] [0-9_a-z]* /x; my $modname_re = qr/ [A-Z_a-z] [0-9A-Z_a-z]* (?:::[0-9A-Z_a-z]+)* /x; my $modver_re = qr/ [0-9]+ (?: \. [0-9]+ )? /x; # Expand includes from a random group in a specific order: # nonvariable groups first, then their includes, then the variable groups, # then their includes. # This allows reliably marking the rest of the mod reqs as variable (this is # also why variable includes are currently not allowed) sub __expand_includes { my ($groups, $seen) = @_; # !! DIFFERENT !! behavior and return depending on invocation mode # (easier to recurse this way) my $is_toplevel = $seen ? 0 : !! ($seen = {}) ; my ($res_per_type, $missing_envvars); # breadth-first evaluation, with non-variable includes on top for my $g (@$groups) { croak "Invalid requirement group name '$g': only ascii alphanumerics and _ are allowed" if $g !~ qr/ \A $groupname_re \z/x; my $r = $dbic_reqs->{$g} or croak "Requirement group '$g' is not defined"; # always do this check *before* the $seen check croak "Group '$g' with variable effective_modreqs can not be specified as an 'include'" if ( $r->{env} and ! $is_toplevel ); next if $seen->{$g}++; my $req_type = 'static'; if ( my @e = @{$r->{env}||[]} ) { croak "Unexpected 'env' attribute under group '$g' (only allowed in test_* groups)" unless $g =~ /^test_/; croak "Unexpected *odd* list in 'env' under group '$g'" if @e % 2; # deconstruct the whole thing my (@group_envnames_list, $some_envs_required, $some_required_missing); while (@e) { push @group_envnames_list, my $envname = shift @e; # env required or not next unless shift @e; $some_envs_required ||= 1; $some_required_missing ||= ( ! defined $ENV{$envname} or ! length $ENV{$envname} ); } croak "None of the envvars in group '$g' declared as required, making the requirement moot" unless $some_envs_required; if ($some_required_missing) { push @{$missing_envvars->{$g}}, \@group_envnames_list; $req_type = 'variable'; } } push @{$res_per_type->{"base_${req_type}"}}, $g; if (my $i = $dbic_reqs->{$g}{include}) { $i = [ $i ] unless ref $i eq 'ARRAY'; croak "Malformed 'include' for group '$g': must be another existing group name or arrayref of existing group names" unless @$i; push @{$res_per_type->{"incs_${req_type}"}}, @$i; } } my @ret = map { @{ $res_per_type->{"base_${_}"} || [] }, ( $res_per_type->{"incs_${_}"} ? __expand_includes( $res_per_type->{"incs_${_}"}, $seen ) : () ), } qw(static variable); return ! $is_toplevel ? @ret : do { my $rv = {}; $rv->{$_} = { idx => 1 + keys %$rv, missing_envvars => $missing_envvars->{$_}, } for @ret; $rv->{$_}{user_requested} = 1 for @$groups; $rv; }; } ### Private OO API our %req_unavailability_cache; # this method is just a lister and envvar/metadata checker - it does not try to load anything sub _groups_to_reqs { my ($self, $want) = @_; $want = [ $want || () ] unless ref $want eq 'ARRAY'; croak "@{[ (caller(1))[3] ]}() expects a requirement group name or arrayref of group names" unless @$want; my $ret = { modreqs => {}, modreqs_fully_documented => 1, }; my $groups; for my $piece (@$want) { if ($piece =~ qr/ \A $groupname_re \z /x) { push @$groups, $piece; } elsif ( my ($mod, $ver) = $piece =~ qr/ \A ($modname_re) \>\= ($modver_re) \z /x ) { croak "Ad hoc module specification lists '$mod' twice" if exists $ret->{modreqs}{$mod}; croak "Ad hoc module specification '${mod} >= $ver' (or greater) not listed in the test_adhoc optdep group" if ( ! defined $dbic_reqs->{test_adhoc}{req}{$mod} or $dbic_reqs->{test_adhoc}{req}{$mod} < $ver ); $ret->{modreqs}{$mod} = $ver; $ret->{modreqs_fully_documented} = 0; } else { croak "Unsupported argument '$piece' supplied to @{[ (caller(1))[3] ]}()" } } my $all_groups = __expand_includes($groups); # pre-assemble list of augmentations, perform basic sanity checks # Note that below we *DO NOT* respect the source/target reationship, but # instead always default to augment the "later" group # This is done so that the "stable/variable" boundary keeps working as # expected my $augmentations; for my $requesting_group (keys %$all_groups) { if (my $ag = $dbic_reqs->{$requesting_group}{augment}) { for my $target_group (keys %$ag) { croak "Group '$requesting_group' claims to augment a non-existent group '$target_group'" unless $dbic_reqs->{$target_group}; croak "Augmentation combined with variable effective_modreqs currently unsupported for group '$requesting_group'" if $dbic_reqs->{$requesting_group}{env}; croak "Augmentation of group '$target_group' with variable effective_modreqs unsupported (requested by '$requesting_group')" if $dbic_reqs->{$target_group}{env}; if (my @foreign = grep { $_ ne 'req' } keys %{$ag->{$target_group}} ) { croak "Only 'req' augmentations are currently supported (group '$requesting_group' attempts to alter '$foreign[0]' of group '$target_group'"; } $ret->{augments}{$target_group} = 1; # no augmentation for stuff that hasn't been selected if ( $all_groups->{$target_group} and my $ar = $ag->{$target_group}{req} ) { push @{$augmentations->{ ( $all_groups->{$requesting_group}{idx} < $all_groups->{$target_group}{idx} ) ? $target_group : $requesting_group }}, $ar; } } } } for my $group (sort { $all_groups->{$a}{idx} <=> $all_groups->{$b}{idx} } keys %$all_groups ) { my $group_reqs = $dbic_reqs->{$group}{req}; # sanity-check for my $req_bag ($group_reqs, @{ $augmentations->{$group} || [] } ) { for (keys %$req_bag) { $_ =~ / \A $modname_re \z /x or croak "Requirement '$_' in group '$group' is not a valid module name"; # !!!DO NOT CHANGE!!! # remember - version.pm may not be available on the system croak "Requirement '$_' in group '$group' specifies an invalid version '$req_bag->{$_}' (only plain non-underscored floating point decimals are supported)" if ( ($req_bag->{$_}||0) !~ qr/ \A $modver_re \z /x ); } } if (my $e = $all_groups->{$group}{missing_envvars}) { push @{$ret->{missing_envvars}}, @$e; } # assemble into the final ret for my $type ( 'modreqs', ( $ret->{missing_envvars} ? () : 'effective_modreqs' ), ) { for my $req_bag ($group_reqs, @{ $augmentations->{$group} || [] } ) { for my $mod (keys %$req_bag) { $ret->{$type}{$mod} = $req_bag->{$mod}||0 if ( ! exists $ret->{$type}{$mod} or # we sanitized the version to be numeric above - we can just -gt it ($req_bag->{$mod}||0) > $ret->{$type}{$mod} ); } } } $ret->{modreqs_fully_documented} &&= !!$dbic_reqs->{$group}{pod} if $all_groups->{$group}{user_requested}; $ret->{release_testing_mandatory} ||= !!$dbic_reqs->{$group}{release_testing_mandatory}; } return $ret; } # this method tries to find/load specified modreqs and returns a hashref of # module/loaderror pairs for anything that failed sub _errorlist_for_modreqs { # args supposedly already went through _groups_to_reqs and are therefore sanitized # safe to eval at will my ($self, $reqs) = @_; my $ret; for my $m ( keys %$reqs ) { my $v = $reqs->{$m}; if (! exists $req_unavailability_cache{$m}{$v} ) { local $@; eval( "require $m;" . ( $v ? "$m->VERSION(q($v))" : '' ) ); $req_unavailability_cache{$m}{$v} = $@; } $ret->{$m} = $req_unavailability_cache{$m}{$v} if $req_unavailability_cache{$m}{$v}; } $ret; } # Unlike the above DO NOT try to load anything # This is executed when some needed envvars are not available # which in turn means a module load will never be reached anyway # This is important because some modules (especially DBDs) can be # *really* fickle when a require() is attempted, with pretty confusing # side-effects (especially on windows) sub _list_physically_missing_modules { my ($self, $modreqs) = @_; # in case there is a coderef in @INC there is nothing we can definitively prove # so short circuit directly return '' if grep { length ref $_ } @INC; my @definitely_missing; for my $mod (keys %$modreqs) { (my $fn = $mod . '.pm') =~ s|::|/|g; push @definitely_missing, $mod unless grep # this should work on any combination of slashes { $_ and -d $_ and -f "$_/$fn" and -r "$_/$fn" } @INC ; } join ' ', map { $modreqs->{$_} ? "$_~$modreqs->{$_}" : $_ } sort { lc($a) cmp lc($b) } @definitely_missing ; } # This is to be called by the author only (automatically in Makefile.PL) sub _gen_pod { my ($class, $distver, $pod_dir) = @_; die "No POD root dir supplied" unless $pod_dir; $distver ||= eval { require DBIx::Class::Schema::Loader; DBIx::Class::Schema::Loader->VERSION; } || die "\n\n---------------------------------------------------------------------\n" . 'Unable to load core DBIx::Class::Schema::Loader module to determine current version, '. 'possibly due to missing dependencies. Author-mode autodocumentation ' . "halted\n\n" . $@ . "\n\n---------------------------------------------------------------------\n" ; # do not ask for a recent version, use 1.x API calls # this *may* execute on a smoker with old perl or whatnot require File::Path; (my $modfn = __PACKAGE__ . '.pm') =~ s|::|/|g; (my $podfn = "$pod_dir/$modfn") =~ s/\.pm$/\.pod/; (my $dir = $podfn) =~ s|/[^/]+$||; File::Path::mkpath([$dir]); my $moosever = $class->req_list_for('use_moose')->{'Moose'} or die "Hrmm? No Moose dep?"; my @chunks; #@@ #@@ HEADER #@@ push @chunks, <<"EOC"; ######################################################################### ##################### A U T O G E N E R A T E D ######################## ######################################################################### # # The contents of this POD file are auto-generated. Any changes you make # will be lost. If you need to change the generated text edit _gen_pod() # at the end of $modfn # =head1 NAME $class - Optional module dependency specifications (for module authors) EOC #@@ #@@ SYNOPSIS HEADING #@@ push @chunks, <<"EOC"; =head1 SYNOPSIS Somewhere in your build-file (e.g. L's F): ... \$EUMM_ARGS{CONFIGURE_REQUIRES} = { \%{ \$EUMM_ARGS{CONFIGURE_REQUIRES} || {} }, 'DBIx::Class::Schema::Loader' => '$distver', }; ... my %DBIC_CONFIG_AND_ORACLE_DEPS = %{ eval { require $class; $class->req_list_for([qw( dbicdump_config rdbms_oracle )]); } || {} }; \$EUMM_ARGS{PREREQ_PM} = { \%DBIC_CONFIG_AND_ORACLE_DEPS, \%{ \$EUMM_ARGS{PREREQ_PM} || {} }, }; ... ExtUtils::MakeMaker::WriteMakefile(\%EUMM_ARGS); B: The C protection within the example is due to support for requirements during L build phase|CPAN::Meta::Spec/Phases> not being available on a sufficient portion of production installations of Perl. Robust support for such dependency requirements is available in the L installer only since version C<1.94_56> first made available for production with perl version C<5.12>. It is the belief of the current maintainer that support for requirements during the C build phase will not be sufficiently ubiquitous until the B at the earliest, hence the extra care demonstrated above. It should also be noted that some 3rd party installers (e.g. L) do the right thing with configure requirements independent from the versions of perl and CPAN available. EOC #@@ #@@ DESCRIPTION HEADING #@@ push @chunks, <<'EOC'; =head1 DESCRIPTION Some of the less-frequently used features of L have external module dependencies on their own. In order not to burden the average user with modules they will never use, these optional dependencies are not included in the base Makefile.PL. Instead an exception with a descriptive message is thrown when a specific feature can't find one or several modules required for its operation. This module is the central holding place for the current list of such dependencies, for DBIx::Class::Schema::Loader core authors, and DBIx::Class::Schema::Loader extension authors alike. Dependencies are organized in L where each group can list one or more required modules, with an optional minimum version (or 0 for any version). In addition groups prefixed with C can specify a set of environment variables, some (or all) of which are marked as required for the group to be considered by L Each group name (or a combination thereof) can be used in the L as described below. EOC #@@ #@@ REQUIREMENT GROUPLIST HEADING #@@ push @chunks, '=head1 CURRENT REQUIREMENT GROUPS'; my $standalone_info; for my $group (sort keys %$dbic_reqs) { my $info = $standalone_info->{$group} ||= $class->_groups_to_reqs($group); next unless ( $info->{modreqs_fully_documented} and ( $info->{augments} or $info->{modreqs} ) ); my $p = $dbic_reqs->{$group}{pod}; push @chunks, ( "=head2 $p->{title}", "=head3 $group", $p->{desc}, '=over', ); if ( keys %{ $info->{modreqs}||{} } ) { push @chunks, map { "=item * $_" . ($info->{modreqs}{$_} ? " >= $info->{modreqs}{$_}" : '') } ( sort keys %{ $info->{modreqs} } ) ; } else { push @chunks, '=item * No standalone requirements', } push @chunks, '=back'; for my $ag ( sort keys %{ $info->{augments} || {} } ) { my $ag_info = $standalone_info->{$ag} ||= $class->_groups_to_reqs($ag); my $newreqs = $class->modreq_list_for([ $group, $ag ]); for (keys %$newreqs) { delete $newreqs->{$_} if ( ( defined $info->{modreqs}{$_} and $info->{modreqs}{$_} == $newreqs->{$_} ) or ( defined $ag_info->{modreqs}{$_} and $ag_info->{modreqs}{$_} == $newreqs->{$_} ) ); } if (keys %$newreqs) { push @chunks, ( "Combined with L additionally requires:", '=over', ( map { "=item * $_" . ($newreqs->{$_} ? " >= $newreqs->{$_}" : '') } ( sort keys %$newreqs ) ), '=back', ); } } } #@@ #@@ API DOCUMENTATION HEADING #@@ push @chunks, <<'EOC'; =head1 IMPORT-LIKE ACTIONS Even though this module is not an L, it recognizes several C supplied to its C method. =head2 -skip_all_without =over =item Arguments: @group_names =back A convenience wrapper for use during testing: EOC push @chunks, " use $class -skip_all_without => qw(admin test_rdbms_mysql);"; push @chunks, 'Roughly equivalent to the following code:'; push @chunks, sprintf <<'EOS', ($class) x 2; BEGIN { require %s; if ( my $missing = %s->req_missing_for(\@group_names_) ) { print "1..0 # SKIP requirements not satisfied: $missing\n"; exit 0; } } EOS push @chunks, <<'EOC'; It also takes into account the C environment variable and behaves like L for any requirement groups marked as C. =head2 -die_without =over =item Arguments: @group_names =back A convenience wrapper around L: EOC push @chunks, " use $class -die_without => qw(deploy admin);"; push @chunks, <<'EOC'; =head2 -list_missing =over =item Arguments: @group_names =back A convenience wrapper around L: perl -Ilib -MDBIx::Class::Schema::Loader::Optional::Dependencies=-list_missing,dbicdump_config,rdbms_oracle | cpanm =head1 METHODS =head2 req_group_list =over =item Arguments: none =item Return Value: \%list_of_requirement_groups =back This method should be used by DBIx::Class::Schema::Loader packagers, to get a hashref of all dependencies B by dependency group. Each key (group name), or a combination thereof (as an arrayref) can be supplied to the methods below. The B of the returned hash are currently a set of options B. If you have use for any of the contents - contact the maintainers, instead of treating this as public (left alone stable) API. =head2 req_list_for =over =item Arguments: $group_name | \@group_names =item Return Value: \%set_of_module_version_pairs =back This method should be used by DBIx::Class::Schema::Loader extension authors, to determine the version of modules a specific set of features requires for this version of DBIx::Class::Schema::Loader (regardless of their availability on the system). See the L for a real-world example. When handling C groups this method behaves B from L below (and is the only such inconsistency among the C methods). If a particular group declares as requirements some C and these requirements are not satisfied (the envvars are unset) - then the C of this group are not included in the returned list. =head2 modreq_list_for =over =item Arguments: $group_name | \@group_names =item Return Value: \%set_of_module_version_pairs =back Same as L but does not take into consideration any C - returns just the list of required modules. =head2 req_ok_for =over =item Arguments: $group_name | \@group_names =item Return Value: 1|0 =back Returns true or false depending on whether all modules/envvars required by the group(s) are loadable/set on the system. =head2 req_missing_for =over =item Arguments: $group_name | \@group_names =item Return Value: $error_message_string =back Returns a single-line string suitable for inclusion in larger error messages. This method would normally be used by DBIx::Class::Schema::Loader core features, to indicate to the user that they need to install specific modules and/or set specific environment variables before being able to use a specific feature set. For example if some of the requirements for C are not available, the returned string could look like: EOC push @chunks, qq{ "Moose~$moosever" (see $class documentation for details)}; push @chunks, <<'EOC'; The author is expected to prepend the necessary text to this message before returning the actual error seen by the user. See also L =head2 modreq_missing_for =over =item Arguments: $group_name | \@group_names =item Return Value: $error_message_string =back Same as L except that the error string is guaranteed to be either empty, or contain a set of module requirement specifications suitable for piping to e.g. L. The method explicitly does not attempt to validate the state of required environment variables (if any). For instance if some of the requirements for C are not available, the returned string could look like: EOC push @chunks, qq{ "Moose~$moosever"}; push @chunks, <<'EOC'; See also L. =head2 skip_without =over =item Arguments: $group_name | \@group_names =back A convenience wrapper around L. It does not take neither a reason (it is generated by L) nor an amount of skipped tests (it is always C<1>, thus mandating unconditional use of L). Most useful in combination with ad hoc requirement specifications: EOC push @chunks, <skip_without([ deploy YAML>=0.90 ]); ... } EOC push @chunks, <<'EOC'; =head2 die_unless_req_ok_for =over =item Arguments: $group_name | \@group_names =back Checks if L passes for the supplied group(s), and in case of failure throws an exception including the information from L. See also L. =head2 modreq_errorlist_for =over =item Arguments: $group_name | \@group_names =item Return Value: \%set_of_loaderrors_per_module =back Returns a hashref containing the actual errors that occurred while attempting to load each module in the requirement group(s). =head2 req_errorlist_for Deprecated method name, equivalent (via proxy) to L. EOC #@@ #@@ FOOTER #@@ push @chunks, <<'EOC'; =head1 FURTHER QUESTIONS? Check the list of L. =head1 COPYRIGHT AND LICENSE This module is free software L by the L. You can redistribute it and/or modify it under the same terms as the L. EOC eval { open (my $fh, '>', $podfn) or die; print $fh join ("\n\n", @chunks) or die; print $fh "\n" or die; close ($fh) or die; } or croak( "Unable to write $podfn: " . ( $! || $@ || 'unknown error') ); } 1; DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/Manual/0000755000175000017500000000000012650450355023671 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/Manual/UpgradingFromV4.pod0000644000175000017500000000523212542756321027357 0ustar ilmariilmari=pod =head1 NAME DBIx::Class::Schema::Loader::Manual::UpgradingFromV4 - Important Information Related to Upgrading from Version 0.04006 =head1 What Changed =over 4 =item * add_column The new Loader detects much more information about columns and sets flags like C that it didn't set before. =item * RelBuilder The new RelBuilder will give you nicer accessor names for relationships, so you will no longer have conflicts between a foreign key column and the relationship accessor itself (if the FK is named C<_id>.) It will also more correctly infer the relationship type, e.g. some relationships that were previously detected as a C will now be a C (when it detects a unique constraint on the foreign key column.) Also C and C are turned off for by default for C and C relationships, while C relationships are created with C<< on_delete => 'CASCADE' >> and C<< on_update => 'CASCADE' >> by default. This is overridable via L. =item * moniker_map Table names are now singularized when determining the C class names. So the table C would have become C in C<0.04006> but now becomes C instead. =item * use_namespaces Now defaults to on. See L and L. =item * Support for more databases We now support Microsoft SQL Server and Sybase, and there are also many improvements to the other backends. =back =head1 Backward Compatibility In backward compatibility mode, the Loader will use the old relationship names and types, will not singularize monikers for tables, and C will be off. To control this behavior see L and L. =head2 Static Schemas When reading a C from a static schema generated with an C<0.04> version of Loader, backward compatibility mode will default to on, unless overridden with the C and/or C attributes. =head2 Dynamic Schemas Dynamic schemas will always by default use C<0.04006> mode and have C off. To upgrade a dynamic schema, set the C and C attributes (which is proxied to the loader) in your C: __PACKAGE__->naming('current'); __PACKAGE__->use_namespaces(1); =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/DBObject.pm0000644000175000017500000000701712625642777024450 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::DBObject; use strict; use warnings; use base 'Class::Accessor::Grouped'; use mro 'c3'; use Carp::Clan qw/^DBIx::Class/; use Scalar::Util 'weaken'; use namespace::clean; =head1 NAME DBIx::Class::Schema::Loader::DBObject - Base Class for Database Objects Such as Tables and Views in L =head1 METHODS =head2 loader The loader object this object is associated with, this is a required parameter to L. =head2 name Name of the object. The object stringifies to this value. =cut __PACKAGE__->mk_group_accessors(simple => qw/ loader name _schema ignore_schema /); use overload '""' => sub { $_[0]->name }, '@{}' => sub { $_[0]->name_parts }, fallback => 1; =head2 new The constructor, takes L, L
, L, and L as key-value parameters. =cut sub new { my $class = shift; my $self = { @_ }; croak "loader is required" unless ref $self->{loader}; weaken $self->{loader}; $self->{_schema} = delete $self->{schema}; return bless $self, $class; } =head2 clone Make a shallow copy of the object. =cut sub clone { my $self = shift; return bless { %$self }, ref $self; } =head2 schema The schema (or owner) of the object. Returns nothing if L is true. =head2 ignore_schema Set to true to make L and L not use the defined L. Does not affect L (for L testing on SQLite.) =cut sub schema { my $self = shift; return $self->_schema(@_) unless $self->ignore_schema; return undef; } sub _quote { my ($self, $identifier) = @_; $identifier = '' if not defined $identifier; my $qt = $self->loader->quote_char || ''; if (length $qt > 1) { my @qt = split //, $qt; return $qt[0] . $identifier . $qt[1]; } return "${qt}${identifier}${qt}"; } =head1 sql_name Returns the properly quoted full identifier with L and L
. =cut sub sql_name { my $self = shift; my $name_sep = $self->loader->name_sep; if ($self->schema) { return $self->_quote($self->schema) . $name_sep . $self->_quote($self->name); } return $self->_quote($self->name); } =head1 dbic_name Returns a value suitable for the C<< __PACKAGE__->table >> call in L Result files. =cut sub dbic_name { my $self = shift; my $name_sep = $self->loader->name_sep; if ($self->loader->qualify_objects && $self->_schema) { if ($self->_schema =~ /\W/ || $self->name =~ /\W/) { return \ $self->sql_name; } return $self->_schema . $name_sep . $self->name; } if ($self->name =~ /\W/) { return \ $self->_quote($self->name); } return $self->name; } =head2 name_parts Returns an arrayref of the values returned by the methods specified in the L of the L object. The object arrayrefifies to this value. =cut sub name_parts { my ($self) = shift; return [ map { $self->$_ } @{$self->loader->moniker_parts} ]; } =head1 SEE ALSO L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/RelBuilder.pm0000644000175000017500000007600112650450246025046 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::RelBuilder; use strict; use warnings; use base 'Class::Accessor::Grouped'; use mro 'c3'; use Carp::Clan qw/^DBIx::Class/; use Scalar::Util 'weaken'; use DBIx::Class::Schema::Loader::Utils qw/split_name slurp_file array_eq apply uniq/; use Try::Tiny; use List::Util qw/all any first/; use namespace::clean; use Lingua::EN::Inflect::Phrase (); use Lingua::EN::Tagger (); use String::ToIdentifier::EN (); use String::ToIdentifier::EN::Unicode (); use Class::Unload (); use Class::Inspector (); our $VERSION = '0.07045'; # Glossary: # # local_relname -- name of relationship from the local table referring to the remote table # remote_relname -- name of relationship from the remote table referring to the local table # remote_method -- relationship type from remote table to local table, usually has_many =head1 NAME DBIx::Class::Schema::Loader::RelBuilder - Builds relationships for DBIx::Class::Schema::Loader =head1 SYNOPSIS See L and L. =head1 DESCRIPTION This class builds relationships for L. This is module is not (yet) for external use. =head1 METHODS =head2 new Arguments: $loader object =head2 generate_code Arguments: [ [ local_moniker1 (scalar), fk_info1 (arrayref), uniq_info1 (arrayref) ] [ local_moniker2 (scalar), fk_info2 (arrayref), uniq_info2 (arrayref) ] ... ] This generates the code for the relationships of each table. C is the moniker name of the table which had the REFERENCES statements. The fk_info arrayref's contents should take the form: [ { local_table => 'some_table', local_moniker => 'SomeTable', local_columns => [ 'col2', 'col3' ], remote_table => 'another_table_moniker', remote_moniker => 'AnotherTableMoniker', remote_columns => [ 'col5', 'col7' ], }, { local_table => 'some_other_table', local_moniker => 'SomeOtherTable', local_columns => [ 'col1', 'col4' ], remote_table => 'yet_another_table_moniker', remote_moniker => 'YetAnotherTableMoniker', remote_columns => [ 'col1', 'col2' ], }, # ... ], The uniq_info arrayref's contents should take the form: [ [ uniq_constraint_name => [ 'col1', 'col2' ], ], [ another_uniq_constraint_name => [ 'col1', 'col2' ], ], ], This method will return the generated relationships as a hashref keyed on the class names. The values are arrayrefs of hashes containing method name and arguments, like so: { 'Some::Source::Class' => [ { method => 'belongs_to', arguments => [ 'col1', 'Another::Source::Class' ] }, { method => 'has_many', arguments => [ 'anothers', 'Yet::Another::Source::Class', 'col15' ] }, ], 'Another::Source::Class' => [ # ... ], # ... } =cut __PACKAGE__->mk_group_accessors('simple', qw/ loader schema inflect_plural inflect_singular relationship_attrs rel_collision_map rel_name_map allow_extra_m2m_cols _temp_classes __tagger /); sub new { my ($class, $loader) = @_; # from old POD about this constructor: # C<$schema_class> should be a schema class name, where the source # classes have already been set up and registered. Column info, # primary key, and unique constraints will be drawn from this # schema for all of the existing source monikers. # Options inflect_plural and inflect_singular are optional, and # are better documented in L. my $self = { loader => $loader, (map { $_ => $loader->$_ } qw( schema inflect_plural inflect_singular relationship_attrs rel_collision_map rel_name_map allow_extra_m2m_cols )), _temp_classes => [], }; weaken $self->{loader}; #< don't leak bless $self => $class; # validate the relationship_attrs arg if( defined $self->relationship_attrs ) { (ref $self->relationship_attrs eq 'HASH' || ref $self->relationship_attrs eq 'CODE') or croak "relationship_attrs must be a hashref or coderef"; } return $self; } # pluralize a relationship name sub _inflect_plural { my ($self, $relname) = @_; return '' if !defined $relname || $relname eq ''; my $result; my $mapped = 0; if( ref $self->inflect_plural eq 'HASH' ) { if (exists $self->inflect_plural->{$relname}) { $result = $self->inflect_plural->{$relname}; $mapped = 1; } } elsif( ref $self->inflect_plural eq 'CODE' ) { my $inflected = $self->inflect_plural->($relname); if ($inflected) { $result = $inflected; $mapped = 1; } } return ($result, $mapped) if $mapped; return ($self->_to_PL($relname), 0); } # Singularize a relationship name sub _inflect_singular { my ($self, $relname) = @_; return '' if !defined $relname || $relname eq ''; my $result; my $mapped = 0; if( ref $self->inflect_singular eq 'HASH' ) { if (exists $self->inflect_singular->{$relname}) { $result = $self->inflect_singular->{$relname}; $mapped = 1; } } elsif( ref $self->inflect_singular eq 'CODE' ) { my $inflected = $self->inflect_singular->($relname); if ($inflected) { $result = $inflected; $mapped = 1; } } return ($result, $mapped) if $mapped; return ($self->_to_S($relname), 0); } sub _to_PL { my ($self, $name) = @_; $name =~ s/_/ /g; my $plural = Lingua::EN::Inflect::Phrase::to_PL($name); $plural =~ s/ /_/g; return $plural; } sub _to_S { my ($self, $name) = @_; $name =~ s/_/ /g; my $singular = Lingua::EN::Inflect::Phrase::to_S($name); $singular =~ s/ /_/g; return $singular; } sub _default_relationship_attrs { +{ has_many => { cascade_delete => 0, cascade_copy => 0, }, might_have => { cascade_delete => 0, cascade_copy => 0, }, belongs_to => { on_delete => 'CASCADE', on_update => 'CASCADE', is_deferrable => 1, }, } } # Accessor for options to be passed to each generated relationship type. takes # the relationship type name and optionally any attributes from the database # (such as FK ON DELETE/UPDATE and DEFERRABLE clauses), and returns a # hashref or undef if nothing is set. # # The attributes from the database override the default attributes, which in # turn are overridden by user supplied attributes. sub _relationship_attrs { my ( $self, $reltype, $db_attrs, $params ) = @_; my $r = $self->relationship_attrs; my %composite = ( %{ $self->_default_relationship_attrs->{$reltype} || {} }, %{ $db_attrs || {} }, ( ref $r eq 'HASH' ? ( %{ $r->{all} || {} }, %{ $r->{$reltype} || {} }, ) : () ), ); if (ref $r eq 'CODE') { $params->{attrs} = \%composite; my %ret = %{ $r->(%$params) || {} }; %composite = %ret if %ret; } return %composite ? \%composite : undef; } sub _strip_id_postfix { my ($self, $name) = @_; $name =~ s/_?(?:id|ref|cd|code|num)\z//i; return $name; } sub _remote_attrs { my ($self, $local_moniker, $local_cols, $fk_attrs, $params) = @_; # get our set of attrs from _relationship_attrs, which uses the FK attrs if available my $attrs = $self->_relationship_attrs('belongs_to', $fk_attrs, $params) || {}; # If any referring column is nullable, make 'belongs_to' an # outer join, unless explicitly set by relationship_attrs my $nullable = first { $self->schema->source($local_moniker)->column_info($_)->{is_nullable} } @$local_cols; $attrs->{join_type} = 'LEFT' if $nullable && !defined $attrs->{join_type}; return $attrs; } sub _sanitize_name { my ($self, $name) = @_; $name = $self->loader->_to_identifier('relationships', $name, '_'); $name =~ s/\W+/_/g; # if naming >= 8 to_identifier takes care of it return $name; } sub _normalize_name { my ($self, $name) = @_; $name = $self->_sanitize_name($name); my @words = split_name $name, $self->loader->_get_naming_v('relationships'); return join '_', map lc, @words; } sub _local_relname { my ($self, $remote_table, $cond) = @_; my $local_relname; # for single-column case, set the remote relname to the column # name, to make filter accessors work, but strip trailing _id if(scalar keys %{$cond} == 1) { my ($col) = values %{$cond}; $col = $self->_strip_id_postfix($self->_normalize_name($col)); ($local_relname) = $self->_inflect_singular($col); } else { ($local_relname) = $self->_inflect_singular($self->_normalize_name($remote_table)); } return $local_relname; } sub _resolve_relname_collision { my ($self, $moniker, $cols, $relname) = @_; return $relname if $relname eq 'id'; # this shouldn't happen, but just in case my $table = $self->loader->moniker_to_table->{$moniker}; if ($self->loader->_is_result_class_method($relname, $table)) { if (my $map = $self->rel_collision_map) { for my $re (keys %$map) { if (my @matches = $relname =~ /$re/) { return sprintf $map->{$re}, @matches; } } } my $new_relname = $relname; while ($self->loader->_is_result_class_method($new_relname, $table)) { $new_relname .= '_rel' } warn <<"EOF"; Relationship '$relname' in source '$moniker' for columns '@{[ join ',', @$cols ]}' collides with an inherited method. Renaming to '$new_relname'. See "RELATIONSHIP NAME COLLISIONS" in perldoc DBIx::Class::Schema::Loader::Base . EOF return $new_relname; } return $relname; } sub generate_code { my ($self, $tables) = @_; # make a copy to destroy my @tables = @$tables; my $all_code = {}; while (my ($local_moniker, $rels, $uniqs) = @{ shift @tables || [] }) { my $local_class = $self->schema->class($local_moniker); my %counters; foreach my $rel (@$rels) { next if !$rel->{remote_source}; $counters{$rel->{remote_source}}++; } foreach my $rel (@$rels) { my $remote_moniker = $rel->{remote_source} or next; my $remote_class = $self->schema->class($remote_moniker); my $remote_obj = $self->schema->source($remote_moniker); my $remote_cols = $rel->{remote_columns} || [ $remote_obj->primary_columns ]; my $local_cols = $rel->{local_columns}; if($#$local_cols != $#$remote_cols) { croak "Column count mismatch: $local_moniker (@$local_cols) " . "$remote_moniker (@$remote_cols)"; } my %cond; @cond{@$remote_cols} = @$local_cols; my ( $local_relname, $remote_relname, $remote_method ) = $self->_relnames_and_method( $local_moniker, $rel, \%cond, $uniqs, \%counters ); my $local_method = 'belongs_to'; ($local_relname) = $self->_rel_name_map( $local_relname, $local_method, $local_class, $local_moniker, $local_cols, $remote_class, $remote_moniker, $remote_cols, ); ($remote_relname) = $self->_rel_name_map( $remote_relname, $remote_method, $remote_class, $remote_moniker, $remote_cols, $local_class, $local_moniker, $local_cols, ); $local_relname = $self->_resolve_relname_collision( $local_moniker, $local_cols, $local_relname, ); $remote_relname = $self->_resolve_relname_collision( $remote_moniker, $remote_cols, $remote_relname, ); my $rel_attrs_params = { rel_name => $local_relname, rel_type => $local_method, local_source => $self->schema->source($local_moniker), remote_source => $self->schema->source($remote_moniker), local_table => $rel->{local_table}, local_cols => $local_cols, remote_table => $rel->{remote_table}, remote_cols => $remote_cols, }; push @{$all_code->{$local_class}}, { method => $local_method, args => [ $local_relname, $remote_class, \%cond, $self->_remote_attrs($local_moniker, $local_cols, $rel->{attrs}, $rel_attrs_params), ], extra => { local_class => $local_class, local_moniker => $local_moniker, remote_moniker => $remote_moniker, }, }; my %rev_cond = reverse %cond; for (keys %rev_cond) { $rev_cond{"foreign.$_"} = "self.".$rev_cond{$_}; delete $rev_cond{$_}; } $rel_attrs_params = { rel_name => $remote_relname, rel_type => $remote_method, local_source => $self->schema->source($remote_moniker), remote_source => $self->schema->source($local_moniker), local_table => $rel->{remote_table}, local_cols => $remote_cols, remote_table => $rel->{local_table}, remote_cols => $local_cols, }; push @{$all_code->{$remote_class}}, { method => $remote_method, args => [ $remote_relname, $local_class, \%rev_cond, $self->_relationship_attrs($remote_method, {}, $rel_attrs_params), ], extra => { local_class => $remote_class, local_moniker => $remote_moniker, remote_moniker => $local_moniker, }, }; } } $self->_generate_m2ms($all_code); # disambiguate rels with the same name foreach my $class (keys %$all_code) { my $dups = $self->_duplicates($all_code->{$class}); $self->_disambiguate($all_code, $class, $dups) if $dups; } $self->_cleanup; return $all_code; } # Find classes with only 2 FKs which are the PK and make many_to_many bridges for them. sub _generate_m2ms { my ($self, $all_code) = @_; LINK_CLASS: foreach my $link_class (sort keys %$all_code) { my @rels = grep $_->{method} eq 'belongs_to', @{$all_code->{$link_class}}; next unless @rels == 2; my @class; foreach my $this (0, 1) { my $that = $this ? 0 : 1; my %class; $class[$this] = \%class; $class{local_moniker} = $rels[$this]{extra}{remote_moniker}; $class{remote_moniker} = $rels[$that]{extra}{remote_moniker}; $class{class} = $rels[$this]{args}[1]; my %link_cols = map { $_ => 1 } apply { s/^self\.//i } values %{ $rels[$this]{args}[2] }; $class{link_table_rel} = first { $_->{method} eq 'has_many' and $_->{args}[1] eq $link_class and all { $link_cols{$_} } apply { s/^foreign\.//i } keys %{$_->{args}[2]} } @{ $all_code->{$class{class}} }; next LINK_CLASS unless $class{link_table_rel}; $class{link_table_rel_name} = $class{link_table_rel}{args}[0]; $class{link_rel} = $rels[$that]{args}[0]; $class{from_cols} = [ apply { s/^self\.//i } values %{ $class{link_table_rel}->{args}[2] } ]; $class{to_cols} = [ apply { s/^foreign\.//i } keys %{ $rels[$that]{args}[2] } ]; $class{from_link_cols} = [ apply { s/^self\.//i } values %{ $rels[$this]{args}[2] } ]; } my $link_moniker = $rels[0]{extra}{local_moniker}; my @link_table_cols = @{[ $self->schema->source($link_moniker)->columns ]}; my @link_table_primary_cols = @{[ $self->schema->source($link_moniker)->primary_columns ]}; next unless array_eq( [ sort +uniq @{$class[0]{from_link_cols}}, @{$class[1]{from_link_cols}} ], [ sort @link_table_primary_cols ], ) && ($self->allow_extra_m2m_cols || @link_table_cols == @link_table_primary_cols); foreach my $this (0, 1) { my $that = $this ? 0 : 1; ($class[$this]{m2m_relname}) = $self->_rel_name_map( ($self->_inflect_plural($class[$this]{link_rel}))[0], 'many_to_many', @{$class[$this]}{qw(class local_moniker from_cols)}, $class[$that]{class}, @{$class[$this]}{qw(remote_moniker to_cols)}, { link_class => $link_class, link_moniker => $link_moniker, link_rel_name => $class[$this]{link_table_rel_name}, }, ); $class[$this]{m2m_relname} = $self->_resolve_relname_collision( @{$class[$this]}{qw(local_moniker from_cols m2m_relname)}, ); } for my $this (0, 1) { my $that = $this ? 0 : 1; push @{$all_code->{$class[$this]{class}}}, { method => 'many_to_many', args => [ @{$class[$this]}{qw(m2m_relname link_table_rel_name link_rel)}, $self->_relationship_attrs('many_to_many', {}, { rel_type => 'many_to_many', rel_name => $class[$this]{class2_relname}, local_source => $self->schema->source($class[$this]{local_moniker}), remote_source => $self->schema->source($class[$this]{remote_moniker}), local_table => $self->loader->class_to_table->{$class[$this]{class}}, local_cols => $class[$this]{from_cols}, remote_table => $self->loader->class_to_table->{$class[$that]{class}}, remote_cols => $class[$that]{from_cols}, }) || (), ], extra => { local_class => $class[$this]{class}, link_class => $link_class, local_moniker => $class[$this]{local_moniker}, remote_moniker => $class[$this]{remote_moniker}, }, }; } } } sub _duplicates { my ($self, $rels) = @_; my @rels = map [ $_->{args}[0] => $_ ], @$rels; my %rel_names; $rel_names{$_}++ foreach map $_->[0], @rels; my @dups = grep $rel_names{$_} > 1, keys %rel_names; my %dups; foreach my $dup (@dups) { $dups{$dup} = [ map $_->[1], grep { $_->[0] eq $dup } @rels ]; } return if not %dups; return \%dups; } sub _tagger { my $self = shift; $self->__tagger(Lingua::EN::Tagger->new) unless $self->__tagger; return $self->__tagger; } sub _adjectives { my ($self, @cols) = @_; my @adjectives; foreach my $col (@cols) { my @words = split_name $col; my $tagged = $self->_tagger->get_readable(join ' ', @words); push @adjectives, $tagged =~ m{\G(\w+)/JJ\s+}g; } return @adjectives; } sub _name_to_identifier { my ($self, $name) = @_; my $to_identifier = $self->loader->naming->{force_ascii} ? \&String::ToIdentifier::EN::to_identifier : \&String::ToIdentifier::EN::Unicode::to_identifier; return join '_', map lc, split_name $to_identifier->($name, '_'); } sub _disambiguate { my ($self, $all_code, $in_class, $dups) = @_; DUP: foreach my $dup (keys %$dups) { my @rels = @{ $dups->{$dup} }; # Check if there are rels to the same table name in different # schemas/databases, if so qualify them. my @tables = map $self->loader->moniker_to_table->{$_->{extra}{remote_moniker}}, @rels; # databases are different, prepend database if ($tables[0]->can('database') && (uniq map $_->database||'', @tables) > 1) { # If any rels are in the same database, we have to distinguish by # both schema and database. my %db_counts; $db_counts{$_}++ for map $_->database, @tables; my $use_schema = any { $_ > 1 } values %db_counts; foreach my $i (0..$#rels) { my $rel = $rels[$i]; my $table = $tables[$i]; $rel->{args}[0] = $self->_name_to_identifier($table->database) . ($use_schema ? ('_' . $self->name_to_identifier($table->schema)) : '') . '_' . $rel->{args}[0]; } next DUP; } # schemas are different, prepend schema elsif ((uniq map $_->schema||'', @tables) > 1) { foreach my $i (0..$#rels) { my $rel = $rels[$i]; my $table = $tables[$i]; $rel->{args}[0] = $self->_name_to_identifier($table->schema) . '_' . $rel->{args}[0]; } next DUP; } foreach my $rel (@rels) { next if $rel->{method} =~ /^(?:belongs_to|many_to_many)\z/; my @to_cols = apply { s/^foreign\.//i } keys %{ $rel->{args}[2] }; my @adjectives = $self->_adjectives(@to_cols); # If there are no adjectives, and there is only one might_have # rel to that class, we hardcode 'active'. my $to_class = $rel->{args}[1]; if ((not @adjectives) && (grep { $_->{method} eq 'might_have' && $_->{args}[1] eq $to_class } @{ $all_code->{$in_class} }) == 1) { @adjectives = 'active'; } if (@adjectives) { my $rel_name = join '_', sort(@adjectives), $rel->{args}[0]; ($rel_name) = $rel->{method} eq 'might_have' ? $self->_inflect_singular($rel_name) : $self->_inflect_plural($rel_name); my ($local_class, $local_moniker, $remote_moniker) = @{ $rel->{extra} } {qw/local_class local_moniker remote_moniker/}; my @from_cols = apply { s/^self\.//i } values %{ $rel->{args}[2] }; ($rel_name) = $self->_rel_name_map($rel_name, $rel->{method}, $local_class, $local_moniker, \@from_cols, $to_class, $remote_moniker, \@to_cols); $rel_name = $self->_resolve_relname_collision($local_moniker, \@from_cols, $rel_name); $rel->{args}[0] = $rel_name; } } } # Check again for duplicates, since the heuristics above may not have resolved them all. if ($dups = $self->_duplicates($all_code->{$in_class})) { foreach my $dup (keys %$dups) { # sort by method my @rels = map $_->[1], sort { $a->[0] <=> $b->[0] } map [ { belongs_to => 3, has_many => 2, might_have => 1, many_to_many => 0, }->{$_->{method}}, $_ ], @{ $dups->{$dup} }; my $rel_num = 2; foreach my $rel (@rels[1 .. $#rels]) { my $inflect_type = $rel->{method} =~ /^(?:many_to_many|has_many)\z/ ? 'inflect_plural' : 'inflect_singular'; my $inflect_method = "_$inflect_type"; my $relname_new_uninflected = $rel->{args}[0] . "_$rel_num"; $rel_num++; my ($local_class, $local_moniker, $remote_moniker) = @{ $rel->{extra} } {qw/local_class local_moniker remote_moniker/}; my (@from_cols, @to_cols, $to_class); if ($rel->{method} eq 'many_to_many') { @from_cols = apply { s/^self\.//i } values %{ (first { $_->{args}[0] eq $rel->{args}[1] } @{ $all_code->{$local_class} }) ->{args}[2] }; @to_cols = apply { s/^foreign\.//i } keys %{ (first { $_->{args}[0] eq $rel->{args}[2] } @{ $all_code->{ $rel->{extra}{link_class} } }) ->{args}[2] }; $to_class = $self->schema->source($remote_moniker)->result_class; } else { @from_cols = apply { s/^self\.//i } values %{ $rel->{args}[2] }; @to_cols = apply { s/^foreign\.//i } keys %{ $rel->{args}[2] }; $to_class = $rel->{args}[1]; } my ($relname_new, $inflect_mapped) = $self->$inflect_method($relname_new_uninflected); my $rel_name_mapped; ($relname_new, $rel_name_mapped) = $self->_rel_name_map($relname_new, $rel->{method}, $local_class, $local_moniker, \@from_cols, $to_class, $remote_moniker, \@to_cols); my $mapped = $inflect_mapped || $rel_name_mapped; warn <<"EOF" unless $mapped; Could not find a proper name for relationship '$relname_new' in source '$local_moniker' for columns '@{[ join ',', @from_cols ]}'. Supply a value in '$inflect_type' for '$relname_new_uninflected' or 'rel_name_map' for '$relname_new' to name this relationship. EOF $relname_new = $self->_resolve_relname_collision($local_moniker, \@from_cols, $relname_new); $rel->{args}[0] = $relname_new; } } } } sub _relnames_and_method { my ( $self, $local_moniker, $rel, $cond, $uniqs, $counters ) = @_; my $remote_moniker = $rel->{remote_source}; my $remote_obj = $self->schema->source( $remote_moniker ); my $remote_class = $self->schema->class( $remote_moniker ); my $local_relname = $self->_local_relname( $rel->{remote_table}, $cond); my $local_cols = $rel->{local_columns}; my $local_table = $rel->{local_table}; my $local_class = $self->schema->class($local_moniker); my $local_source = $self->schema->source($local_moniker); my $remote_relname_uninflected = $self->_normalize_name($local_table); my ($remote_relname) = $self->_inflect_plural($self->_normalize_name($local_table)); my $remote_method = 'has_many'; # If the local columns have a UNIQUE constraint, this is a one-to-one rel if (array_eq([ $local_source->primary_columns ], $local_cols) || first { array_eq($_->[1], $local_cols) } @$uniqs) { $remote_method = 'might_have'; ($remote_relname) = $self->_inflect_singular($remote_relname_uninflected); } # If more than one rel between this pair of tables, use the local # col names to distinguish, unless the rel was created previously. if ($counters->{$remote_moniker} > 1) { my $relationship_exists = 0; if (-f (my $existing_remote_file = $self->loader->get_dump_filename($remote_class))) { my $class = "${remote_class}Temporary"; if (not Class::Inspector->loaded($class)) { my $code = slurp_file $existing_remote_file; $code =~ s/(?<=package $remote_class)/Temporary/g; $code =~ s/__PACKAGE__->meta->make_immutable[^;]*;//g; eval $code; die $@ if $@; push @{ $self->_temp_classes }, $class; } if ($class->has_relationship($remote_relname)) { my $rel_cols = [ sort { $a cmp $b } apply { s/^foreign\.//i } (keys %{ $class->relationship_info($remote_relname)->{cond} }) ]; $relationship_exists = 1 if array_eq([ sort @$local_cols ], $rel_cols); } } if (not $relationship_exists) { my $colnames = q{_} . $self->_normalize_name(join '_', @$local_cols); $local_relname .= $colnames if keys %$cond > 1; $remote_relname = $self->_strip_id_postfix($self->_normalize_name($local_table . $colnames)); $remote_relname_uninflected = $remote_relname; ($remote_relname) = $self->_inflect_plural($remote_relname); # if colnames were added and this is a might_have, re-inflect if ($remote_method eq 'might_have') { ($remote_relname) = $self->_inflect_singular($remote_relname_uninflected); } } } return ($local_relname, $remote_relname, $remote_method); } sub _rel_name_map { my ($self, $relname, $method, $local_class, $local_moniker, $local_cols, $remote_class, $remote_moniker, $remote_cols, $extra) = @_; my $info = { %{$extra || {}}, name => $relname, type => $method, local_class => $local_class, local_moniker => $local_moniker, local_columns => $local_cols, remote_class => $remote_class, remote_moniker => $remote_moniker, remote_columns => $remote_cols, }; $self->_run_user_map($self->rel_name_map, $info); } sub _run_user_map { my ($self, $map, $info) = @_; my $new_name = $info->{name}; my $mapped = 0; if ('HASH' eq ref($map)) { my $name = $info->{name}; my $moniker = $info->{local_moniker}; if ($map->{$moniker} and 'HASH' eq ref($map->{$moniker}) and $map->{$moniker}{$name} ) { $new_name = $map->{$moniker}{$name}; $mapped = 1; } elsif ($map->{$name} and not 'HASH' eq ref($map->{$name})) { $new_name = $map->{$name}; $mapped = 1; } } elsif ('CODE' eq ref($map)) { my $cb = sub { my ($cb_map) = @_; croak "reentered rel_name_map must be a hashref" unless 'HASH' eq ref($cb_map); my ($cb_name, $cb_mapped) = $self->_run_user_map($cb_map, $info); return $cb_mapped && $cb_name; }; my $name = $map->($info, $cb); if ($name) { $new_name = $name; $mapped = 1; } } return ($new_name, $mapped); } sub _cleanup { my $self = shift; for my $class (@{ $self->_temp_classes }) { Class::Unload->unload($class); } $self->_temp_classes([]); } =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/Utils.pm0000644000175000017500000001065712517670775024137 0ustar ilmariilmaripackage # hide from PAUSE DBIx::Class::Schema::Loader::Utils; use strict; use warnings; use Test::More; use String::CamelCase 'wordsplit'; use Carp::Clan qw/^DBIx::Class/; use List::Util 'all'; use namespace::clean; use Exporter 'import'; use Data::Dumper (); our @EXPORT_OK = qw/split_name dumper dumper_squashed eval_package_without_redefine_warnings class_path no_warnings warnings_exist warnings_exist_silent slurp_file write_file array_eq sigwarn_silencer apply firstidx uniq/; use constant BY_CASE_TRANSITION_V7 => qr/(?<=[[:lower:]\d])[\W_]*(?=[[:upper:]])|[\W_]+/; use constant BY_NON_ALPHANUM => qr/[\W_]+/; my $LF = "\x0a"; my $CRLF = "\x0d\x0a"; sub split_name($;$) { my ($name, $v) = @_; my $is_camel_case = $name =~ /[[:upper:]]/ && $name =~ /[[:lower:]]/; if ((not $v) || $v >= 8) { return map split(BY_NON_ALPHANUM, $_), wordsplit($name); } return split $is_camel_case ? BY_CASE_TRANSITION_V7 : BY_NON_ALPHANUM, $name; } sub dumper($) { my $val = shift; my $dd = Data::Dumper->new([]); $dd->Terse(1)->Indent(1)->Useqq(1)->Deparse(1)->Quotekeys(0)->Sortkeys(1); return $dd->Values([ $val ])->Dump; } sub dumper_squashed($) { my $val = shift; my $dd = Data::Dumper->new([]); $dd->Terse(1)->Indent(1)->Useqq(1)->Deparse(1)->Quotekeys(0)->Sortkeys(1)->Indent(0); return $dd->Values([ $val ])->Dump; } # copied from DBIx::Class::_Util, import from there once it's released sub sigwarn_silencer { my $pattern = shift; croak "Expecting a regexp" if ref $pattern ne 'Regexp'; my $orig_sig_warn = $SIG{__WARN__} || sub { CORE::warn(@_) }; return sub { &$orig_sig_warn unless $_[0] =~ $pattern }; } # Copied with stylistic adjustments from List::MoreUtils::PP sub firstidx (&@) { my $f = shift; foreach my $i (0..$#_) { local *_ = \$_[$i]; return $i if $f->(); } return -1; } sub uniq (@) { my %seen = (); grep { not $seen{$_}++ } @_; } sub apply (&@) { my $action = shift; $action->() foreach my @values = @_; wantarray ? @values : $values[-1]; } sub eval_package_without_redefine_warnings { my ($pkg, $code) = @_; local $SIG{__WARN__} = sigwarn_silencer(qr/^Subroutine \S+ redefined/); # This hairiness is to handle people using "use warnings FATAL => 'all';" # in their custom or external content. my @delete_syms; my $try_again = 1; while ($try_again) { eval $code; if (my ($sym) = $@ =~ /^Subroutine (\S+) redefined/) { delete $INC{ +class_path($pkg) }; push @delete_syms, $sym; foreach my $sym (@delete_syms) { no strict 'refs'; undef *{"${pkg}::${sym}"}; } } elsif ($@) { die $@ if $@; } else { $try_again = 0; } } } sub class_path { my $class = shift; my $class_path = $class; $class_path =~ s{::}{/}g; $class_path .= '.pm'; return $class_path; } sub no_warnings(&;$) { my ($code, $test_name) = @_; my $failed = 0; my $warn_handler = $SIG{__WARN__} || sub { warn @_ }; local $SIG{__WARN__} = sub { $failed = 1; $warn_handler->(@_); }; $code->(); ok ((not $failed), $test_name); } sub warnings_exist(&$$) { my ($code, $re, $test_name) = @_; my $matched = 0; my $warn_handler = $SIG{__WARN__} || sub { warn @_ }; local $SIG{__WARN__} = sub { if ($_[0] =~ $re) { $matched = 1; } else { $warn_handler->(@_) } }; $code->(); ok $matched, $test_name; } sub warnings_exist_silent(&$$) { my ($code, $re, $test_name) = @_; my $matched = 0; local $SIG{__WARN__} = sub { $matched = 1 if $_[0] =~ $re; }; $code->(); ok $matched, $test_name; } sub slurp_file($) { my $file_name = shift; open my $fh, '<:encoding(UTF-8)', $file_name, or croak "Can't open '$file_name' for reading: $!"; my $data = do { local $/; <$fh> }; close $fh; $data =~ s/$CRLF|$LF/\n/g; return $data; } sub write_file($$) { my $file_name = shift; open my $fh, '>:encoding(UTF-8)', $file_name, or croak "Can't open '$file_name' for writing: $!"; print $fh shift; close $fh; } sub array_eq($$) { no warnings 'uninitialized'; my ($l, $r) = @_; return @$l == @$r && all { $l->[$_] eq $r->[$_] } 0..$#$l; } 1; # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/Table.pm0000644000175000017500000000127612542756321024051 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::Table; use strict; use warnings; use base 'DBIx::Class::Schema::Loader::DBObject'; use mro 'c3'; =head1 NAME DBIx::Class::Schema::Loader::Table - Class for Tables in L =head1 DESCRIPTION Inherits from L. Stringifies to C<< $table->name >>. =head1 SEE ALSO L, L, L =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/RelBuilder/0000755000175000017500000000000012650450355024505 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/RelBuilder/Compat/0000755000175000017500000000000012650450355025730 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/RelBuilder/Compat/v0_05.pm0000644000175000017500000000510112650450246027113 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::RelBuilder::Compat::v0_05; use strict; use warnings; use base 'DBIx::Class::Schema::Loader::RelBuilder::Compat::v0_06'; use mro 'c3'; use DBIx::Class::Schema::Loader::Utils 'array_eq'; use namespace::clean; use Lingua::EN::Inflect::Number (); our $VERSION = '0.07045'; sub _to_PL { my ($self, $name) = @_; return Lingua::EN::Inflect::Number::to_PL($name); } sub _to_S { my ($self, $name) = @_; return Lingua::EN::Inflect::Number::to_S($name); } sub _default_relationship_attrs { +{} } sub _relnames_and_method { my ( $self, $local_moniker, $rel, $cond, $uniqs, $counters ) = @_; my $remote_moniker = $rel->{remote_source}; my $remote_obj = $self->{schema}->source( $remote_moniker ); my $remote_class = $self->{schema}->class( $remote_moniker ); my $local_relname = $self->_local_relname( $rel->{remote_table}, $cond); my $local_cols = $rel->{local_columns}; my $local_table = $rel->{local_table}; # If more than one rel between this pair of tables, use the local # col names to distinguish my ($remote_relname, $remote_relname_uninflected); if ( $counters->{$remote_moniker} > 1) { my $colnames = lc(q{_} . join(q{_}, map lc($_), @$local_cols)); $local_relname .= $colnames if keys %$cond > 1; $remote_relname = lc($local_table) . $colnames; $remote_relname_uninflected = $remote_relname; ($remote_relname) = $self->_inflect_plural( $remote_relname ); } else { $remote_relname_uninflected = lc $local_table; ($remote_relname) = $self->_inflect_plural(lc $local_table); } my $remote_method = 'has_many'; # If the local columns have a UNIQUE constraint, this is a one-to-one rel my $local_source = $self->{schema}->source($local_moniker); if (array_eq([ $local_source->primary_columns ], $local_cols) || grep { array_eq($_->[1], $local_cols) } @$uniqs) { $remote_method = 'might_have'; ($remote_relname) = $self->_inflect_singular($remote_relname_uninflected); } return ( $local_relname, $remote_relname, $remote_method ); } =head1 NAME DBIx::Class::Schema::Loader::RelBuilder::Compat::v0_05 - RelBuilder for compatibility with DBIx::Class::Schema::Loader version 0.05003 =head1 DESCRIPTION See L and L. =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/RelBuilder/Compat/v0_040.pm0000644000175000017500000000331712650450246027201 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::RelBuilder::Compat::v0_040; use strict; use warnings; use base 'DBIx::Class::Schema::Loader::RelBuilder::Compat::v0_05'; use mro 'c3'; our $VERSION = '0.07045'; sub _relnames_and_method { my ( $self, $local_moniker, $rel, $cond, $uniqs, $counters ) = @_; my $remote_moniker = $rel->{remote_source}; my $remote_table = $rel->{remote_table}; my $local_table = $rel->{local_table}; my $local_cols = $rel->{local_columns}; # for single-column case, set the remote relname to just the column name my ($local_relname) = scalar keys %{$cond} == 1 ? $self->_inflect_singular( values %$cond ) : $self->_inflect_singular( lc $remote_table ); # If more than one rel between this pair of tables, use the local # col names to distinguish my $remote_relname; if ($counters->{$remote_moniker} > 1) { my $colnames = '_' . join( '_', @$local_cols ); $local_relname .= $colnames if keys %$cond > 1; ($remote_relname) = $self->_inflect_plural( lc($local_table) . $colnames ); } else { ($remote_relname) = $self->_inflect_plural(lc $local_table); } return ( $local_relname, $remote_relname, 'has_many' ); } sub _remote_attrs { } =head1 NAME DBIx::Class::Schema::Loader::RelBuilder::Compat::v0_040 - RelBuilder for compatibility with DBIx::Class::Schema::Loader version 0.04006 =head1 DESCRIPTION See L and L. =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/RelBuilder/Compat/v0_06.pm0000644000175000017500000000144612650450246027124 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::RelBuilder::Compat::v0_06; use strict; use warnings; use base 'DBIx::Class::Schema::Loader::RelBuilder::Compat::v0_07'; use mro 'c3'; our $VERSION = '0.07045'; sub _normalize_name { my ($self, $name) = @_; $name = $self->_sanitize_name($name); return lc $name; } =head1 NAME DBIx::Class::Schema::Loader::RelBuilder::Compat::v0_06 - RelBuilder for compatibility with DBIx::Class::Schema::Loader version 0.06000 =head1 DESCRIPTION See L and L. =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/lib/DBIx/Class/Schema/Loader/RelBuilder/Compat/v0_07.pm0000644000175000017500000000141212650450246027116 0ustar ilmariilmaripackage DBIx::Class::Schema::Loader::RelBuilder::Compat::v0_07; use strict; use warnings; use base 'DBIx::Class::Schema::Loader::RelBuilder'; use mro 'c3'; =head1 NAME DBIx::Class::Schema::Loader::RelBuilder::Compat::v0_07 - RelBuilder for compatibility with DBIx::Class::Schema::Loader version 0.07000 =head1 DESCRIPTION See L and L. =cut our $VERSION = '0.07045'; sub _strip_id_postfix { my ($self, $name) = @_; $name =~ s/_id\z//; return $name; } =head1 AUTHORS See L. =head1 LICENSE This library is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; # vim:et sts=4 sw=4 tw=0: DBIx-Class-Schema-Loader-0.07045/script/0000755000175000017500000000000012650450355016751 5ustar ilmariilmariDBIx-Class-Schema-Loader-0.07045/script/dbicdump0000644000175000017500000001204412542756321020466 0ustar ilmariilmari#!/usr/bin/perl =encoding UTF-8 =head1 NAME dbicdump - Dump a schema using DBIx::Class::Schema::Loader =head1 SYNOPSIS dbicdump dbicdump [-I ] [-o = ] \ Examples: $ dbicdump schema.conf $ dbicdump -o dump_directory=./lib \ -o components='["InflateColumn::DateTime"]' \ MyApp::Schema dbi:SQLite:./foo.db $ dbicdump -o dump_directory=./lib \ -o components='["InflateColumn::DateTime"]' \ MyApp::Schema dbi:SQLite:./foo.db '{ quote_char => "\"" }' $ dbicdump -Ilib -o dump_directory=./lib \ -o components='["InflateColumn::DateTime"]' \ -o preserve_case=1 \ MyApp::Schema dbi:mysql:database=foo user pass \ '{ quote_char => "`" }' $ dbicdump -o dump_directory=./lib \ -o components='["InflateColumn::DateTime"]' \ MyApp::Schema 'dbi:mysql:database=foo;host=domain.tld;port=3306' \ user pass On Windows that would be: $ dbicdump -o dump_directory=.\lib ^ -o components="[q{InflateColumn::DateTime}]" ^ -o preserve_case=1 ^ MyApp::Schema dbi:mysql:database=foo user pass ^ "{ quote_char => q{`} }" Configuration files must have schema_class and connect_info sections, an example of a general config file is as follows: schema_class MyApp::Schema lib /extra/perl/libs # connection string dsn dbi:mysql:example user root pass secret # dbic loader options dump_directory ./lib components InflateColumn::DateTime components TimeStamp Using a config file requires L installed. The optional C key is equivalent to the C<-I> option. =head1 DESCRIPTION Dbicdump generates a L schema using L and dumps it to disk. You can pass any L constructor option using C<< -o