Wiki-Toolkit-0.83/0000755000175000017500000000000012243760534013666 5ustar vagrantvagrantWiki-Toolkit-0.83/META.json0000664000175000017500000000205512243760523015311 0ustar vagrantvagrant{ "abstract" : "unknown", "author" : [ "unknown" ], "dynamic_config" : 1, "generated_by" : "ExtUtils::MakeMaker version 6.82, CPAN::Meta::Converter version 2.120351", "license" : [ "unknown" ], "meta-spec" : { "url" : "http://search.cpan.org/perldoc?CPAN::Meta::Spec", "version" : "2" }, "name" : "Wiki-Toolkit", "no_index" : { "directory" : [ "t", "inc" ] }, "prereqs" : { "build" : { "requires" : { "ExtUtils::MakeMaker" : "0" } }, "configure" : { "requires" : { "ExtUtils::MakeMaker" : "0" } }, "runtime" : { "requires" : { "DBD::SQLite" : "0.25", "DBI" : "0", "Digest::MD5" : "0", "HTML::PullParser" : "0", "Plucene" : "1.19", "Test::More" : "0", "Text::WikiFormat" : "0.78", "Time::Piece" : "0" } } }, "release_status" : "stable", "version" : "0.83" } Wiki-Toolkit-0.83/bin/0000755000175000017500000000000012243760523014434 5ustar vagrantvagrantWiki-Toolkit-0.83/bin/wiki-toolkit-revert-to-date0000755000175000017500000001260112243757607021661 0ustar vagrantvagrant#!/usr/bin/perl -w use strict; use Getopt::Long; my ($dbtype, $dbname, $dbuser, $dbpass, $dbhost, $dbport, $help, $date, $time ); GetOptions( "type=s" => \$dbtype, "name=s" => \$dbname, "user=s" => \$dbuser, "pass=s" => \$dbpass, "host=s" => \$dbhost, "port=s" => \$dbport, "help" => \$help, "date=s" => \$date, "time=s" => \$time, ); unless (defined($dbtype)) { print "You must supply a database type with the --type option.\n"; print "Further help can be found by typing 'perldoc $0'\n"; exit 1; } unless (defined($dbname)) { print "You must supply a database name with the --name option.\n"; print "Further help can be found by typing 'perldoc $0'\n"; exit 1; } unless (defined($date)) { print "You must supply the date with the --date option.\n"; print "Further help can be found by typing 'perldoc $0'\n"; exit 1; } unless ($date =~ /^\d{4}\-\d{2}\-\d{2}$/) { print "You must supply the date with --date in the format YYYY-MM-DD.\n"; print "Further help can be found by typing 'perldoc $0'\n"; exit 1; } unless (!$time || $time =~ /^\d{2}:\d{2}:\d{2}$/) { print "You must supply either no time, or the time in the format HH:MM:SS.\n"; print "Further help can be found by typing 'perldoc $0'\n"; exit 1; } if ($help) { print "Help can be found by typing 'perldoc $0'\n"; exit 0; } my %setup_modules = ( postgres => "Wiki::Toolkit::Store::Pg", mysql => "Wiki::Toolkit::Store::MySQL", sqlite => "Wiki::Toolkit::Store::SQLite" ); unless ( defined($setup_modules{$dbtype}) ) { print "dbtype must be one of 'postgres', 'mysql', and 'sqlite'\n"; print "further help can be found by typing 'perldoc $0'\n"; exit 1; } # Load classes require Wiki::Toolkit; my $class = $setup_modules{$dbtype}; eval "require $class"; if ( $@ ) { print "Couldn't 'use' $class: $@\n"; exit 1; } # Create a store instance my $store; my $args = "dbname=>'$dbname', dbuser=>'$dbuser'"; if($dbpass) { $args .= ", dbpass=>'$dbpass'"; } if($dbhost) { $args .= ", dbhost=>'$dbhost'"; } if($dbport) { $args .= ", dbport=>'$dbport'"; } eval "\$store = $class->new($args);"; # Create a Wiki instance my $wiki = Wiki::Toolkit->new(store=>$store); # Grab the state as of then if($time) { $date .= " ".$time; } print "Reverting to the state as of $date\n"; my @nodes = $wiki->list_last_version_before($date); foreach my $node (@nodes) { my %newnode = $wiki->retrieve_node($node->{name}); my $thenver = $node->{version}; if($thenver) { $thenver = sprintf("v%02d", $thenver); } else { $thenver = "(d)"; } print sprintf(' %03d - %s (now v%02d) - %s', $node->{id}, $thenver, $newnode{version}, $node->{name})."\n"; } print "\nProceed? (y/n)\n"; my $ok = <>; chomp $ok; unless($ok eq "y") { die("Aborting revert\n"); } # Revert each node foreach my $node (@nodes) { if($node->{version}) { # Delete versions between now and then my %newnode = $wiki->retrieve_node($node->{name}); for (my $ver=$newnode{version}; $ver>$node->{version}; $ver--) { $wiki->delete_node( name=>$node->{name}, version=>$ver, wiki=>$wiki ); print sprintf('Deleted node v%02d of %03d - %s',$ver, $node->{id},$node->{name})."\n"; } } else { # No version then, delete $wiki->delete_node( name=>$node->{name}, wiki=>$wiki ); print sprintf('Deleted node %03d - %s',$node->{id},$node->{name})."\n"; } } # All done print "\nDone revert to $date\n"; =head1 NAME wiki-toolkit-revert-to-date - Revert the state of a Wiki::Toolkit instance to an earlier point in time. =head1 SYNOPSIS # Removes any changes made to a Wiki::Toolkit instance since a given date # (and optionally time), restoring it to the state at that point. wiki-toolkit-revert-to-date --type postgres --name mywiki \ --user wiki \ --pass wiki \ --host 'db.example.com' \ --port 1234 \ --date 2007-01-05 \ --time 11:23:21 =head1 DESCRIPTION Takes three mandatory arguments: =over 4 =item type The database type. Should be one of 'postgres', 'mysql' and 'sqlite'. =item name The database name. =item date The date to revert the state back to, in the format YYYY-MM-DD =back five optional arguments: =over 4 =item time The time (on the specified date) to revert the state back to, in the format hh:mm:ss. If not specified, will use midnight. =item user The user that connects to the database. It must have permission to create and drop tables in the database. =item pass The user's database password. =item host The hostname of the machine the database server is running on (omit for local databases). =item port The port number that the database server is expecting connections to. =back =head1 AUTHOR Nick Burch =head1 COPYRIGHT Copyright (C) 2006 Nick Burch. All Rights Reserved. This code is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =head1 SEE ALSO L =cut 1; Wiki-Toolkit-0.83/bin/wiki-toolkit-setupdb0000755000175000017500000000645412243757607020476 0ustar vagrantvagrant#!/usr/bin/perl -w use strict; use Getopt::Long; my ($dbtype, $dbname, $dbuser, $dbpass, $dbhost, $help, $preclear); GetOptions( "type=s" => \$dbtype, "name=s" => \$dbname, "user=s" => \$dbuser, "pass=s" => \$dbpass, "host=s" => \$dbhost, "help" => \$help, "force-preclear" => \$preclear ); unless (defined($dbtype)) { print "You must supply a database type with the --type option.\n"; print "Further help can be found by typing 'perldoc $0'\n"; exit 1; } unless (defined($dbname)) { print "You must supply a database name with the --name option.\n"; print "Further help can be found by typing 'perldoc $0'\n"; exit 1; } if ($help) { print "Help can be found by typing 'perldoc $0'\n"; exit 0; } my %setup_modules = ( postgres => "Wiki::Toolkit::Setup::Pg", mysql => "Wiki::Toolkit::Setup::MySQL", sqlite => "Wiki::Toolkit::Setup::SQLite" ); unless ( defined($setup_modules{$dbtype}) ) { print "dbtype must be one of 'postgres', 'mysql', and 'sqlite'\n"; print "further help can be found by typing 'perldoc $0'\n"; exit 1; } my $class = $setup_modules{$dbtype}; eval "require $class"; if ( $@ ) { print "Couldn't 'use' $class: $@\n"; exit 1; } if ($preclear) { no strict 'refs'; &{$class."::cleardb"}($dbname, $dbuser, $dbpass, $dbhost); } { no strict 'refs'; &{$class."::setup"}($dbname, $dbuser, $dbpass, $dbhost); } =head1 NAME wiki-toolkit-setupdb - Set up a database storage backend for Wiki::Toolkit. =head1 SYNOPSIS # Set up or update the storage backend, leaving any existing data # intact. Useful for upgrading from old versions of Wiki::Toolkit to # newer ones with more backend features. wiki-toolkit-setupdb --type postgres --name mywiki \ --user wiki \ --pass wiki \ --host 'db.example.com' # Clear out any existing data and set up a fresh backend from scratch. wiki-toolkit-setupdb --type postgres --name mywiki \ --user wiki \ --pass wiki \ --force-preclear =head1 DESCRIPTION Takes three mandatory arguments: =over 4 =item type The database type. Should be one of 'postgres', 'mysql' and 'sqlite'. =item name The database name. =item user The user that connects to the database. It must have permission to create and drop tables in the database. =back two optional arguments: =over 4 =item pass The user's database password. =item host The hostname of the machine the database server is running on (omit for local databases). =back and one optional flag: =over 4 =item force-preclear By default, this script will leave any existing data alone. To force that to be cleared out first, pass the C<--force-preclear> flag. =back =head1 AUTHOR Kake Pugh (kake@earth.li). =head1 COPYRIGHT Copyright (C) 2002-2003 Kake Pugh. All Rights Reserved. Copyright (C) 2006 the Wiki::Toolkit team. All Rights Reserved. This code is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =head1 SEE ALSO L =cut 1; Wiki-Toolkit-0.83/bin/wiki-toolkit-rename-node0000755000175000017500000000733712243757607021223 0ustar vagrantvagrant#!/usr/bin/perl -w use strict; use Getopt::Long; my ($dbtype, $dbname, $dbuser, $dbpass, $dbhost, $dbport, $help, $oldname, $newname); GetOptions( "type=s" => \$dbtype, "name=s" => \$dbname, "user=s" => \$dbuser, "pass=s" => \$dbpass, "host=s" => \$dbhost, "port=s" => \$dbport, "help" => \$help, "oldname=s" => \$oldname, "newname=s" => \$newname, ); unless (defined($dbtype)) { print "You must supply a database type with the --type option.\n"; print "Further help can be found by typing 'perldoc $0'\n"; exit 1; } unless (defined($dbname)) { print "You must supply a database name with the --name option.\n"; print "Further help can be found by typing 'perldoc $0'\n"; exit 1; } unless (defined($oldname)) { print "You must supply the old node name with the --oldname option.\n"; print "Further help can be found by typing 'perldoc $0'\n"; exit 1; } unless (defined($newname)) { print "You must supply the new node name with the --newname option.\n"; print "Further help can be found by typing 'perldoc $0'\n"; exit 1; } if ($help) { print "Help can be found by typing 'perldoc $0'\n"; exit 0; } my %setup_modules = ( postgres => "Wiki::Toolkit::Store::Pg", mysql => "Wiki::Toolkit::Store::MySQL", sqlite => "Wiki::Toolkit::Store::SQLite" ); unless ( defined($setup_modules{$dbtype}) ) { print "dbtype must be one of 'postgres', 'mysql', and 'sqlite'\n"; print "further help can be found by typing 'perldoc $0'\n"; exit 1; } # Load classes require Wiki::Toolkit; my $class = $setup_modules{$dbtype}; eval "require $class"; if ( $@ ) { print "Couldn't 'use' $class: $@\n"; exit 1; } # Create a store instance my $store; my $args = "dbname=>'$dbname', dbuser=>'$dbuser'"; if($dbpass) { $args .= ", dbpass=>'$dbpass'"; } if($dbhost) { $args .= ", dbhost=>'$dbhost'"; } if($dbport) { $args .= ", dbport=>'$dbport'"; } eval "\$store = $class->new($args);"; # Create a Wiki instance my $wiki = Wiki::Toolkit->new(store=>$store); # Do the rename $wiki->rename_node(old_name=>$oldname, new_name=>$newname); # All done print "Renamed '$oldname' to '$newname'\n"; =head1 NAME wiki-toolkit-rename-node - Rename a node stored in a Wiki::Toolkit instance. =head1 SYNOPSIS # Rename a node in a Wiki::Toolkit instance, updating internal links # and references if the formatter supports link re-writing. wiki-toolkit-rename-node --type postgres --name mywiki \ --user wiki \ --pass wiki \ --host 'db.example.com' \ --port 1234 --oldname MyOldNodeName \ --nemname FancyNewNodeName =head1 DESCRIPTION Takes four mandatory arguments: =over 4 =item type The database type. Should be one of 'postgres', 'mysql' and 'sqlite'. =item name The database name. =item oldname The name of the node to be renamed. =item newname The new name for the node. =back four optional arguments: =over 4 =item user The user that connects to the database. It must have permission to create and drop tables in the database. =item pass The user's database password. =item host The hostname of the machine the database server is running on (omit for local databases). =item port The port number that the database server is expecting connections to. =back =head1 AUTHOR Nick Burch =head1 COPYRIGHT Copyright (C) 2006 Nick Burch. All Rights Reserved. This code is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =head1 SEE ALSO L =cut 1; Wiki-Toolkit-0.83/bin/user-setup-mysql-dbixfts.pl0000755000175000017500000000257112243757607021732 0ustar vagrantvagrant#!/usr/bin/perl -w use strict; use Getopt::Long; use Wiki::Toolkit::Setup::DBIxFTSMySQL; my ($dbname, $dbuser, $dbpass, $help); GetOptions("name=s" => \$dbname, "user=s" => \$dbuser, "pass=s" => \$dbpass, "help" => \$help,); unless (defined($dbname)) { print "You must supply a database name with the --name option\n"; print "further help can be found by typing 'perldoc $0'\n"; exit 1; } if ($help) { print "Help can be found by typing 'perldoc $0'\n"; exit 0; } Wiki::Toolkit::Setup::DBIxFTSMySQL::setup($dbname, $dbuser, $dbpass); =head1 NAME user-setup-mysql-dbixfts - set up a DBIx::FullTextSearch backend for Wiki::Toolkit =head1 SYNOPSIS user-setup-mysql-dbixfts --name mywiki \ --user wiki \ --pass wiki \ =head1 DESCRIPTION Takes three arguments: =over 4 =item name The database name. =item user The user that connects to the database. It must have permission to create and drop tables in the database. =item pass The user's database password. =back =head1 AUTHOR Kake Pugh (kake@earth.li). =head1 COPYRIGHT Copyright (C) 2002 Kake Pugh. All Rights Reserved. This code is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =head1 SEE ALSO L, L =cut 1; Wiki-Toolkit-0.83/bin/wiki-toolkit-delete-node0000755000175000017500000001054212243757607021206 0ustar vagrantvagrant#!/usr/bin/perl -w use strict; use Getopt::Long; my ($dbtype, $dbname, $dbuser, $dbpass, $dbhost, $dbport, $help, $id, $node_name, $version); GetOptions( "type=s" => \$dbtype, "name=s" => \$dbname, "user=s" => \$dbuser, "pass=s" => \$dbpass, "host=s" => \$dbhost, "port=s" => \$dbport, "help" => \$help, "id=s" => \$id, "nodename=s" => \$node_name, "version=s" => \$version, ); unless (defined($dbtype)) { print "You must supply a database type with the --type option.\n"; print "Further help can be found by typing 'perldoc $0'\n"; exit 1; } unless (defined($dbname)) { print "You must supply a database name with the --name option.\n"; print "Further help can be found by typing 'perldoc $0'\n"; exit 1; } if(defined($id) and defined($node_name)) { print "You should supply either a node name, or an id, but not both.\n"; print "Further help can be found by typing 'perldoc $0'\n"; exit 1; } if(not defined($id) and not defined($node_name)) { print "You must supply the id of the node with the --id option,\n"; print " or the node name of the node with the --nodename option.\n"; print "Further help can be found by typing 'perldoc $0'\n"; exit 1; } if ($help) { print "Help can be found by typing 'perldoc $0'\n"; exit 0; } my %setup_modules = ( postgres => "Wiki::Toolkit::Store::Pg", mysql => "Wiki::Toolkit::Store::MySQL", sqlite => "Wiki::Toolkit::Store::SQLite" ); unless ( defined($setup_modules{$dbtype}) ) { print "dbtype must be one of 'postgres', 'mysql', and 'sqlite'\n"; print "further help can be found by typing 'perldoc $0'\n"; exit 1; } # Load classes require Wiki::Toolkit; my $class = $setup_modules{$dbtype}; eval "require $class"; if ( $@ ) { print "Couldn't 'use' $class: $@\n"; exit 1; } # Create a store instance my $store; my $args = "dbname=>'$dbname', dbuser=>'$dbuser'"; if($dbpass) { $args .= ", dbpass=>'$dbpass'"; } if($dbhost) { $args .= ", dbhost=>'$dbhost'"; } if($dbport) { $args .= ", dbport=>'$dbport'"; } eval "\$store = $class->new($args);"; # Create a Wiki instance my $wiki = Wiki::Toolkit->new(store=>$store); # If they gave the ID, get the name if($id) { $node_name = $wiki->store->node_name_for_id($id); unless($node_name) { die("No node found with id '$id'\n"); } } # Report what we're going to do print "Deleting node with name '$node_name'"; if($id) { print " (id $id)"; } if($version) { print " at version $version"; } print "\n"; # Do the delete $wiki->delete_node(name => $node_name, version => $version); # All done print "done.\n"; =head1 NAME wiki-toolkit-delete-node - Delete a node stored in a Wiki::Toolkit instance. =head1 SYNOPSIS # Delete a node in a Wiki::Toolkit instance wiki-toolkit-delete-node --type postgres --name mywiki \ --user wiki \ --pass wiki \ --host 'db.example.com' \ --port 1234 \ --nodename MyNodeName wiki-toolkit-delete-node --type postgres --name mywiki \ --user wiki \ --pass wiki \ --host 'db.example.com' \ --port 1234 \ --id 2 \ --version 7 =head1 DESCRIPTION Takes four mandatory arguments: =over 4 =item type The database type. Should be one of 'postgres', 'mysql' and 'sqlite'. =item name The database name. =item nodename The name of the node to be deleted. =item id The id of the node to be deleted =back four optional arguments: =over 4 =item user The user that connects to the database. It must have permission to create and drop tables in the database. =item pass The user's database password. =item host The hostname of the machine the database server is running on (omit for local databases). =item port The port number that the database server is expecting connections to. =item version The version number of the node to delete =back =head1 AUTHOR Nick Burch =head1 COPYRIGHT Copyright (C) 2006 Nick Burch. All Rights Reserved. This code is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =head1 SEE ALSO L =cut 1; Wiki-Toolkit-0.83/INSTALL0000644000175000017500000001427112243757607014733 0ustar vagrantvagrantNOTES FOR UPGRADERS: I added an index to the metadata table in the postgres setup in version 0.31 and in the MySQL setup in version 0.40 - this really speeds up RecentChanges on larger wikis. See the 'Changes' file for details on applying the index to existing databases. I've not done any benchmarks on SQLite yet, so I'm leaving that alone for now. HOW TO INSTALL THIS: This module can be installed just like any other standard Perl module: perl Makefile.PL make make test make install You may wish to use your local version of 'make' (e.g. 'nmake' on Windows.) On most systems 'make install' requires you to have root privileges. 'perl Makefile.PL' will ask a few questions about which databases it can use in the 'make test' stage - the default is to only run those tests that don't need external test databases. It is recommended that you test with as many backend combinations as your system will allow. ***************************************************** **** **** **** THESE TESTS ARE DESTRUCTIVE. **** **** DO NOT RUN THEM ON A DATABASE THAT CONTAINS **** **** OR EVER WILL CONTAIN LIVE DATA. **** **** **** ***************************************************** ***************************************************** **** **** **** THE DATABASE VALUES YOU GIVE HERE WILL BE **** **** STORED IN Wiki::Toolkit::TestConfig AND **** **** WILL BE USED FOR FUTURE INSTALLS OF THIS **** **** AND RELATED MODULES. **** **** **** ***************************************************** For noninteractive installation, or to provide defaults for interactive installation, set the following environment variables to suitable values. Leave them blank or explicitly set them to 'undef' if you don't want to test that particular backend. WIKI_TOOLKIT_MYSQL_DBNAME # If 'undef', following two variables are ignored WIKI_TOOLKIT_MYSQL_DBUSER WIKI_TOOLKIT_MYSQL_DBPASS WIKI_TOOLKIT_MYSQL_DBHOST # Leave blank if database is local WIKI_TOOLKIT_PG_DBNAME # If 'undef', following two variables are ignored WIKI_TOOLKIT_PG_DBUSER WIKI_TOOLKIT_PG_DBPASS WIKI_TOOLKIT_PG_DBHOST # Leave blank if database is local WIKI_TOOLKIT_DBIXFTS_MYSQL # NOTE: Any non-blank value other than 'undef' # or '0' signifies 'test this' Second and subsequent successful runs of 'perl Makefile.PL' will use the testing setup specified in the first run and hence will not ask questions or look for options in environment variables. To over-ride this behaviour and be asked the questions again, do 'perl Makefile.PL -s' or set the environment variable WIKI_TOOLKIT_RERUN_CONFIG to something non-blank. If you do this then your previous answers will be offered as defaults. When re-running configuration (ie with the '-s' flag or WIKI_TOOLKIT_RERUN_CONFIG set), explicit settings in environment variables will over-ride settings specified during previous runs. If you provide settings for a backend that you don't have the drivers installed for (DBD::mysql, DBD::Pg, DBIx::FullTextSearch) then they will be ignored. POST INSTALL: You'll want to run the wiki-toolkit-setupdb script to set up the initial database tables. This script will have been installed with the rest of the distribution. There is a hole in this documentation here. If you are upgrading from an earlier version of Wiki::Toolkit, and there has been no change to the database schema in the newer releases, wiki-toolkit-setupdb will do nothing. If there has been a change to the schema, it will leave your existing data alone but update the table schema. user-setup-mysql-dbixfts.pl This final script will, if used, set up DBIx::FullTextSearch indexes and attempt to index any existing data. Because of this, it must be run *after* user-setup-mysql.pl Note that installing Wiki::Toolkit *does* install wiki-toolkit-setupdb for you, but does *not* install user-setup-mysql-dbixfts.pl (this is mainly because the name is really ugly). TESTING NOTES: 'perl Makefile.PL' will ask some questions about which backends to run the tests for. You will need to create at least one test database before you run the tests, and your test user will need to be able to create and drop tables in that database. Postgres notes: You can create a database from the shell: shell$ createdb wiki_toolkit_test And give a user password access by editing pg_hba.conf (which lives somewhere like /var/lib/postgres/data/pg_hba.conf) to contain the line: local wiki_toolkit_test md5 Then in psql run something like: postgres=# create user wiki with password 'wiki'; You can give a database to a user by using psql to update the pg_database table -- the datdba UID comes from the pg_user table.) MySQL notes: Something like "grant all privileges on wiki_toolkit_test.* to wiki@localhost identified by 'wiki'" will create a suitable user and give it password access to, and all required privileges on the test database. PREREQUISITES: The following modules *must* be installed before you try to build this: * DBI * Digest::MD5 * HTML::PullParser * Test::More * Test::Warn * Test::MockObject (version 0.07 or later) * Text::WikiFormat (version 0.45 or later) * Time::Piece You will also need the relevant DBD database driver module for whichever storage backend you plan to use, for example one of: * DBD::mysql * DBD::Pg * DBD::SQLite (version 0.21 or later) In addition, if you want to use the DBIx::FullTextSearch backend, you will need: * DBIx::FullTextSearch (version 0.71 or later) * Lingua::Stem For the Search::InvertedIndex backend you need: * Search::InvertedIndex And finally, the Postgres and SQLite backends require the following for testing purposes: * Hook::LexWrap Wiki-Toolkit-0.83/SIGNATURE0000664000175000017500000001710012243760534015153 0ustar vagrantvagrantThis file contains message digests of all files listed in MANIFEST, signed via the Module::Signature module, version 0.73. To verify the content in this distribution, first make sure you have Module::Signature installed, then type: % cpansign -v It will check each file's integrity, as well as the signature's validity. If "==> Signature verified OK! <==" is not displayed, the distribution may already have been compromised, and you should not run its Makefile.PL or Build.PL. -----BEGIN PGP SIGNED MESSAGE----- Hash: SHA1 SHA1 01c798449c7fe70eb92708f8661edd7cc68b3ee2 Changes SHA1 eef58741b82a1f029e62cc854755dfc2f4ada7b5 INSTALL SHA1 e61eb1cb36dce85df76485098efec4fdfcdcf5e6 MANIFEST SHA1 9c30128790059b6ba5b5d004520f830252a81da0 META.json SHA1 eeb85b085057ff9a29258d139497a29bef730c75 META.yml SHA1 a1187153911d49c7d72656d050eed633ee220cff Makefile.PL SHA1 0c94c05497c37c21a91b89eb94ba2ae2b6c9199d README SHA1 b609af99f3e2315da9a65c50f0b6744d61bcd0bc bin/user-setup-mysql-dbixfts.pl SHA1 e57ba0b4eaa405246ff6768507c2dff380e4988f bin/wiki-toolkit-delete-node SHA1 b0e09c8a7380cf28e0745f5067d199bf600297d8 bin/wiki-toolkit-rename-node SHA1 8c77175cffbc7941daa59e3d932d8dbca494a8e3 bin/wiki-toolkit-revert-to-date SHA1 53224377b164bfd46034f6e3749c96d9c7fe97ad bin/wiki-toolkit-setupdb SHA1 b9ca9b5aaea01cb13d167588a5106c1d45a2f80c lib/Wiki/Toolkit.pm SHA1 d94d5a76e824fa260e726fbcd256d7b8709d8f00 lib/Wiki/Toolkit/Extending.pod SHA1 c1fca2977999ad797368106c96aa7257b71e86b4 lib/Wiki/Toolkit/Feed/Atom.pm SHA1 9705573a60cb8ebc17dc7939c67207b52e44c722 lib/Wiki/Toolkit/Feed/Listing.pm SHA1 7e1f2d10851ecd7a5067d6361cdd0a39e3bc06f8 lib/Wiki/Toolkit/Feed/RSS.pm SHA1 58be3b135711f9abfea1c3715713730e1c8cddc8 lib/Wiki/Toolkit/Formatter/Default.pm SHA1 841470aa33e8287e436c6f4680b23a395bad2d43 lib/Wiki/Toolkit/Formatter/Multiple.pm SHA1 0a52d967a3065ce98dd7350d03f6407b19a6f1bc lib/Wiki/Toolkit/Formatter/WikiLinkFormatterParent.pm SHA1 977a1604f01d3ea74e95b90b3235fd6b25a3eae6 lib/Wiki/Toolkit/Plugin.pm SHA1 e76d7723c0826e2df9d3d218f1734346a01d8d43 lib/Wiki/Toolkit/Search/Base.pm SHA1 d66d0d743a74b2b5a86c251d6fd5a3d4c9777a3b lib/Wiki/Toolkit/Search/DBIxFTS.pm SHA1 1fae5553755f5b3d2186e94a26220c12c5d78d6d lib/Wiki/Toolkit/Search/Lucy.pm SHA1 0bcc09a90349b723f5d6c4a945ecf05c7b0f3cbc lib/Wiki/Toolkit/Search/Plucene.pm SHA1 9e4b253033b5e5546d1fc2ad208893beffe4b11f lib/Wiki/Toolkit/Search/SII.pm SHA1 1ca6436c912d89042e7c4fb6e91c2f5e782e1384 lib/Wiki/Toolkit/Setup/DBIxFTSMySQL.pm SHA1 84c0431f366bec41a136de78c54841400ff98f50 lib/Wiki/Toolkit/Setup/Database.pm SHA1 1e3bd15f46bfc7ff7ae4ca3f8c3cea6d3e8e6a8b lib/Wiki/Toolkit/Setup/MySQL.pm SHA1 9e0ddd7269cce1d5a5178724bd95968d0208f16a lib/Wiki/Toolkit/Setup/Pg.pm SHA1 bab05bcb78464ddd3fc5f3a115c379bc77891e78 lib/Wiki/Toolkit/Setup/SII.pm SHA1 fa38885ff7746c1b83864d05aeead2d4f28be2df lib/Wiki/Toolkit/Setup/SQLite.pm SHA1 a2464bd48b3c7e867337bd1d76fac063d601ec47 lib/Wiki/Toolkit/Store/Database.pm SHA1 24c27460c4e406b0c0152b9524f59cabaa7fdbfc lib/Wiki/Toolkit/Store/MySQL.pm SHA1 930fd48491c98860d76b8a54f07226ea0e601b35 lib/Wiki/Toolkit/Store/Pg.pm SHA1 87f87e00d4505653acffb0c60c2e547c4e496add lib/Wiki/Toolkit/Store/SQLite.pm SHA1 ffb913278dfdf3dcd218df1b8eef34a3b49bec8a lib/Wiki/Toolkit/TestConfig/Utilities.pm SHA1 728709ea39831912e55dc14829e470476a605a71 lib/Wiki/Toolkit/TestLib.pm SHA1 42e299ea6ea7e8196bed2a27b9c2ff1bedfdcfdd t/001_load.t SHA1 2a3dcdf422275c94003924b96528fd6cd36d9c32 t/002_datastore_setup.t SHA1 2297741bce71cd8019f073fa6300a378b7355764 t/003_instantiate.t SHA1 4aca7bc185dd9807958a02e77df3c3e562b953d3 t/004_write_and_retrieve.t SHA1 0b0ddffa885e2bdfa0dbdb34bbab0ccd724fd200 t/005_delete.t SHA1 762cb2b5dd64d47a7ad35506dbc0f2e82dd4bad7 t/006_list_all.t SHA1 0c88d41b9dd148ade30629506384796be15ac0db t/007_search.t SHA1 d58fbfdfaead5ff0b21eb9707d526559a5a46974 t/008_write_and_rewrite.t SHA1 cc04461a34b0da8cffe318f6cbd4da1a75d77ecc t/009_backlinks.t SHA1 db911060b3a3bb76800586b2af1ce1bb59e1f820 t/010_metadata.t SHA1 1f701b47ea49d98c01fc3330e65ec7128f94c97e t/011_recent_changes.t SHA1 9eaf665441379797837578ecc79e695b38047a49 t/012_blank_data.t SHA1 acc6c003e6cdfd847b80bbb47923ceebde79240d t/013_fuzzy_title_match.t SHA1 38a1f4e33bf54e5a27566fe334fcfd517b5aba1a t/014_advanced_metadata.t SHA1 ec866646127ffba2cf319d261cc27dd0e58b148d t/015_dangling_links.t SHA1 2b0dc6641c6e1b8290043f4c81cbb1cd90ff92b0 t/016_recent_changes_between.t SHA1 6d8b292281a044030eaab63c52d215b774a55a45 t/017_delete_version.t SHA1 056d69169f5d755695fbbd140db63ae1d043150b t/018_rc_multiple_metadata_criteria.t SHA1 a66ec66d0d6160a08ea8d07f516ebbe3e77f8bdc t/019_recent_changes_case.t SHA1 e78dffe59869cb7d3034a685d0505b0ff14e849b t/020_node_exists_case_insensitive.t SHA1 172804f543f304afe6599fbf341598ecd2a8ac48 t/021_moderation.t SHA1 a0a08d58894b033b3a05fc1401fdb60251e4931f t/022_list_unmoderated.t SHA1 26ed2cc986e5a250e9e4d844a9806076d1aeedac t/025_list_node_all_versions.t SHA1 e509921acbbc81ef9854a10d844e218fa733742d t/026_schema_current.t SHA1 50d38bca1b5181b797b3236247d4e571ae9d760a t/027_list_last_version_before.t SHA1 6af75767d3799b05020128a4b8102c2fc0113abc t/040_rename.t SHA1 7535dd0d99fe1061e83ecf0b6073bac3543741bf t/050_mysql_store.t SHA1 ece618af3160f836af18d9045dc90c2875f6210c t/051_pg_store.t SHA1 fef0ca79290c643f60f6635c28a5767e1338129c t/052_sqlite_store.t SHA1 bd2b42bf5dbc133b315d3490773506b76e1cf2a4 t/060_recent_changes_new_only.t SHA1 d39ec155e3e210f99823217390b8060e6aa429ed t/061_list_metadata_by_type.t SHA1 6a12f95b5ff292c842b918d014a3109cb9f9a3a9 t/062_recent_changes_metadata_was.t SHA1 6adec606eae81ca262ff2b04c70f03b55566bbc6 t/100_formatting.t SHA1 e3a21e8441b1b6b92942cc3346f091da410116f7 t/101_default_formatter.t SHA1 77269781bbea9c82a2def6977fb4389d3f599d50 t/102_multiple_formatter.t SHA1 2afd8e2873f64323796e22404898e58f539906ca t/150_plugins.t SHA1 11f06774a1c282551424d43b1c5f61b217a6008b t/151_plugin_methods.t SHA1 a2abb23523645b1a4db6792b0bc301512bc3e7e9 t/152_pre_plugin_approval.t SHA1 f1fd13ed7ec7058a6a7710593163eb80282e04ce t/297_feed_rss_recentchanges.t SHA1 baae4956bdcdee9dea01a996ecf60f6be3e9836a t/298_feed_rss_node_all_versions.t SHA1 69f2b5827cdf152af29a999aa82dcc68a307a095 t/299_feed_rss_gen_node_dist_feed.t SHA1 52d17b764873682eb3698d422fcea96754e03798 t/300_feed_atom_setup.t SHA1 bda92f8eded61520352737b906a4c581401d16c3 t/301_feed_atom_add_test_data.t SHA1 92eaf1aa4de5f882f886711606b287d496c969ba t/302_feed_atom_recentchanges.t SHA1 66752df9042f2031826f41774179a07d7e98202a t/303_feed_atom_node_all_versions.t SHA1 44a9dd3d35623b6814f73d8b9ae3d28a008685d7 t/304_feed_atom_gen_node_dist_feed.t SHA1 677cefac48fec94b52c8374065f12d6571cbc8a4 t/400_upgrade.t SHA1 c41e905ec9c1f9b421cc3fa517084f883a67dd3c t/401_null_change.t SHA1 61e28ddd05ab0f3722f2a8cdce80a8f314b86391 t/500_pod.t SHA1 380e8a31c59fdff6fdfb520dbac077e5a19fd660 t/700_lucy_bug.t SHA1 f9e3379a88e74fadd003718a775cb124d4f01797 t/701_lucy_setup.t SHA1 c20f21c6b2fe3329a1a4db7c42d202cf6c2ceb39 t/702_metadata_search.t SHA1 86b5cd9b1fc5bf513b7005ec13958d107945951c t/703_search_weightings.t SHA1 0944b423c21abacdd290e2c035b58b9a3f72f686 t/lib/Wiki/Toolkit/Plugin/Bar.pm SHA1 5ba3e2fa69ad7ca30aa726289275e1e2f7220fc6 t/lib/Wiki/Toolkit/Plugin/Foo.pm -----BEGIN PGP SIGNATURE----- Version: GnuPG v1.4.12 (GNU/Linux) iQEcBAEBAgAGBQJSj+FTAAoJEJsOndGQ5lgDMHoH/2yTCmkiyZBJCVW5/uFQ1zvC dfLUWPd0R6TYPqHH0ysPd0PO+PDL0a2emqQ6+cBBpBPuCfB5EIGxH0WnmyxQ16o5 uHX2mGYZBtsjYR35PgfEyo2XpA3ZR42ehc/3nsFozrN05owDIRHc3GVdgFio/kzR J8FJWxxzuLjla0fvjFkW7wNP1CSCoKTi5/TofZEzgUclgfuXwVv4rZYIjtt/msRS X6S73oeXX+Mn42aYntm1U/zyM3VGCMUAj+A/JnOVFnWWaKa5kWUfX8m1/Nbjh5/a GhQrxIH9Zf5sdJH621fRM3y3+ysg4VkeBks5AtwdXdcXYegSfGElB3vUwDQOqzA= =Uagh -----END PGP SIGNATURE----- Wiki-Toolkit-0.83/META.yml0000664000175000017500000000106712243760523015143 0ustar vagrantvagrant--- abstract: unknown author: - unknown build_requires: ExtUtils::MakeMaker: 0 configure_requires: ExtUtils::MakeMaker: 0 dynamic_config: 1 generated_by: 'ExtUtils::MakeMaker version 6.82, CPAN::Meta::Converter version 2.120351' license: unknown meta-spec: url: http://module-build.sourceforge.net/META-spec-v1.4.html version: 1.4 name: Wiki-Toolkit no_index: directory: - t - inc requires: DBD::SQLite: 0.25 DBI: 0 Digest::MD5: 0 HTML::PullParser: 0 Plucene: 1.19 Test::More: 0 Text::WikiFormat: 0.78 Time::Piece: 0 version: 0.83 Wiki-Toolkit-0.83/MANIFEST0000644000175000017500000000516212243760523015021 0ustar vagrantvagrantChanges INSTALL MANIFEST Makefile.PL README bin/wiki-toolkit-rename-node bin/wiki-toolkit-delete-node bin/wiki-toolkit-setupdb bin/wiki-toolkit-revert-to-date bin/user-setup-mysql-dbixfts.pl lib/Wiki/Toolkit/Extending.pod lib/Wiki/Toolkit/Feed/Atom.pm lib/Wiki/Toolkit/Feed/Listing.pm lib/Wiki/Toolkit/Feed/RSS.pm lib/Wiki/Toolkit/Formatter/Default.pm lib/Wiki/Toolkit/Formatter/Multiple.pm lib/Wiki/Toolkit/Formatter/WikiLinkFormatterParent.pm lib/Wiki/Toolkit/Plugin.pm lib/Wiki/Toolkit/Search/Base.pm lib/Wiki/Toolkit/Search/DBIxFTS.pm lib/Wiki/Toolkit/Search/Lucy.pm lib/Wiki/Toolkit/Search/Plucene.pm lib/Wiki/Toolkit/Search/SII.pm lib/Wiki/Toolkit/Setup/Database.pm lib/Wiki/Toolkit/Setup/DBIxFTSMySQL.pm lib/Wiki/Toolkit/Setup/MySQL.pm lib/Wiki/Toolkit/Setup/Pg.pm lib/Wiki/Toolkit/Setup/SII.pm lib/Wiki/Toolkit/Setup/SQLite.pm lib/Wiki/Toolkit/Store/Database.pm lib/Wiki/Toolkit/Store/MySQL.pm lib/Wiki/Toolkit/Store/Pg.pm lib/Wiki/Toolkit/Store/SQLite.pm lib/Wiki/Toolkit/TestConfig/Utilities.pm lib/Wiki/Toolkit/TestLib.pm lib/Wiki/Toolkit.pm t/001_load.t t/002_datastore_setup.t t/003_instantiate.t t/004_write_and_retrieve.t t/005_delete.t t/006_list_all.t t/007_search.t t/008_write_and_rewrite.t t/009_backlinks.t t/010_metadata.t t/011_recent_changes.t t/012_blank_data.t t/013_fuzzy_title_match.t t/014_advanced_metadata.t t/015_dangling_links.t t/016_recent_changes_between.t t/017_delete_version.t t/018_rc_multiple_metadata_criteria.t t/019_recent_changes_case.t t/020_node_exists_case_insensitive.t t/021_moderation.t t/022_list_unmoderated.t t/025_list_node_all_versions.t t/026_schema_current.t t/027_list_last_version_before.t t/040_rename.t t/050_mysql_store.t t/051_pg_store.t t/052_sqlite_store.t t/060_recent_changes_new_only.t t/061_list_metadata_by_type.t t/062_recent_changes_metadata_was.t t/100_formatting.t t/101_default_formatter.t t/102_multiple_formatter.t t/150_plugins.t t/151_plugin_methods.t t/152_pre_plugin_approval.t t/297_feed_rss_recentchanges.t t/298_feed_rss_node_all_versions.t t/299_feed_rss_gen_node_dist_feed.t t/300_feed_atom_setup.t t/301_feed_atom_add_test_data.t t/302_feed_atom_recentchanges.t t/303_feed_atom_node_all_versions.t t/304_feed_atom_gen_node_dist_feed.t t/400_upgrade.t t/401_null_change.t t/500_pod.t t/700_lucy_bug.t t/701_lucy_setup.t t/702_metadata_search.t t/703_search_weightings.t t/lib/Wiki/Toolkit/Plugin/Bar.pm t/lib/Wiki/Toolkit/Plugin/Foo.pm META.yml Module YAML meta-data (added by MakeMaker) META.json Module JSON meta-data (added by MakeMaker) SIGNATURE Public-key signature (added by MakeMaker) Wiki-Toolkit-0.83/Makefile.PL0000644000175000017500000002336412243757607015657 0ustar vagrantvagrantuse strict; use 5.006; #by perlver use ExtUtils::MakeMaker; use Data::Dumper; # Large chunks of this were inspired by the Makefile.PL supplied with # DBIx::FullTextSearch -- thanks! # See if we already have some config variables set. use lib "lib"; eval "use Wiki::Toolkit::TestConfig"; # Even if we do have a previous configuration saved, we can over-ride and # be asked all the questions again by specifying the -s flag or setting # the appropriate environment variable. if ($Wiki::Toolkit::TestConfig::configured and not (@ARGV and $ARGV[0] eq '-s') and not $ENV{WIKI_TOOLKIT_RERUN_CONFIG}) { print "\nFor the test suite, we use the database and user info\n" . "specified during the previous run. If you want to change\n" . "some or all of the values, run 'perl Makefile.PL -s'.\n\n" . "**** REMEMBER THAT THESE TESTS ARE DESTRUCTIVE. ****\n" . "**** DO NOT RUN THEM ON A DATABASE THAT CONTAINS ****\n" . "**** OR EVER WILL CONTAIN LIVE DATA. ****\n" . "**** ****\n" . "**** THE DATABASE VALUES YOU GAVE PREVIOUSLY ARE ****\n" . "**** STORED IN Wiki::Toolkit::TestConfig AND ****\n" . "**** WILL BE USED FOR FUTURE INSTALLS OF THIS ****\n" . "**** AND RELATED MODULES. ****\n\n"; } else { print "\nYou should supply at least one set of options for testing,\n" . "preferably relevant to the backend(s) you intend to use live.\n" . "Running the tests under every possible backend combination is\n" . "recommended. To enter an undefined value, accept the empty\n" . "string or explicitly enter 'undef'.\n\n" . "**** THESE TESTS ARE DESTRUCTIVE. ****\n" . "**** DO NOT RUN THEM ON A DATABASE THAT CONTAINS ****\n" . "**** OR EVER WILL CONTAIN LIVE DATA. ****\n" . "**** ****\n" . "**** THE DATABASE VALUES YOU GIVE HERE WILL BE ****\n" . "**** STORED IN Wiki::Toolkit::TestConfig AND ****\n" . "**** WILL BE USED FOR FUTURE INSTALLS OF THIS ****\n" . "**** AND RELATED MODULES. ****\n\n"; my %config; # Grab information from previous runs. if ($Wiki::Toolkit::TestConfig::configured) { %config = %Wiki::Toolkit::TestConfig::config; } # Grab information from environment variables. foreach my $store (qw(MySQL Pg)) { my $dbname = $ENV{"WIKI_TOOLKIT_".uc($store)."_DBNAME"}; if ($dbname and $dbname ne "undef") { $config{$store}{dbname} = $dbname; foreach my $var (qw(dbuser dbpass dbhost)) { my $value = $ENV{"WIKI_TOOLKIT_".uc($store)."_".uc($var)}; if ($value and $value ne "undef") { $config{$store}{$var} = $value; } elsif ($value eq "undef") { $config{$store}{$var} = undef; } } } elsif ($dbname eq "undef") { $config{$store}{dbname} = undef; } } my $dbixfts = $ENV{WIKI_TOOLKIT_DBIXFTS_MYSQL}; if ($dbixfts and $dbixfts ne "undef") { $config{dbixfts} = 1; } elsif ($dbixfts eq "undef") { $config{dbixfts} = 0; } # Finally ask questions; then check the settings work. my %driver = ( MySQL => "DBD::mysql", Pg => "DBD::Pg" ); foreach my $store_type (qw(MySQL Pg)) { # See whether we have the driver installed. eval "require " . $driver{$store_type}; if ($@) { print "\n$driver{$store_type} not installed... skipping...\n"; $config{$store_type}{dbname} = undef; next; } # Prompt for the options. my ($dbname, $dbuser, $dbpass, $dbhost); my $pad = ' ' x (7-length $store_type); $dbname = prompt "\n${pad}Database name for $store_type: ", $config{$store_type}{dbname}; undef $dbname unless ($dbname and $dbname ne "undef"); if ($dbname and $dbname ne "undef") { $dbuser = prompt " Database user: ", $config{$store_type}{dbuser}; undef $dbuser unless ($dbuser and $dbuser ne "undef"); $dbpass = prompt " Database password: ", $config{$store_type}{dbpass}; undef $dbpass unless ($dbpass and $dbpass ne "undef"); $dbhost = prompt "Database host (if needed): ", $config{$store_type}{dbhost}; undef $dbhost unless ($dbhost and $dbhost ne "undef"); $config{$store_type}{dbname} = $dbname; $config{$store_type}{dbuser} = $dbuser; $config{$store_type}{dbpass} = $dbpass; $config{$store_type}{dbhost} = $dbhost; } else { print "\nNo database name supplied... skipping...\n"; $config{$store_type}{dbname} = undef; } } print "\n"; # Copy the config hash to the right namespace. %Wiki::Toolkit::TestConfig::config = %config; } # If we have a MySQL store configured, we can test the DBIx::FullTextSearch # search backend. eval { require DBIx::FullTextSearch; require Lingua::Stem; }; my $fts_inst = $@ ? 0 : 1; if ($Wiki::Toolkit::TestConfig::config{MySQL}{dbname} and $fts_inst) { print "You have DBIx::FullTextSearch and Lingua::Stem installed,\n"; print " and a MySQL store configured... configuring for test...\n\n"; $Wiki::Toolkit::TestConfig::config{dbixfts} = 1; } else { print "Either DBIx::FullTextSearch or Lingua::Stem not installed,\n"; print "or no MySQL store configured... so won't test that...\n\n"; $Wiki::Toolkit::TestConfig::config{dbixfts} = undef; } # We can test the SQLite backend without asking questions, if it's installed. eval { require DBD::SQLite; }; if ($@) { print "DBD::SQLite not installed... so won't test that...\n\n"; $Wiki::Toolkit::TestConfig::config{SQLite} = { dbname => undef }; } else { print "You have DBD::SQLite... configuring test SQLite database...\n\n"; $Wiki::Toolkit::TestConfig::config{SQLite} = { dbname => "t/sqlite-test.db" }; } # If we have Search::InvertedIndex installed, we can test that without # asking questions. eval { require Search::InvertedIndex; }; my $sii_inst = $@ ? 0 : 1; if ( $sii_inst ) { print "You have Search::InvertedIndex installed, so will test the S:II\n"; print "search backend...\n\n"; $Wiki::Toolkit::TestConfig::config{search_invertedindex} = 1; } else { print "You do not have Search::InvertedIndex installed; skipping test\n"; print "of S:II search backend...\n\n"; $Wiki::Toolkit::TestConfig::config{search_invertedindex} = undef; } # If we have Plucene installed, we can test that without asking questions. eval { require Plucene; require File::Spec::Functions }; my $plucene_inst = $@ ? 0 : 1; if ( $plucene_inst ) { print "You have Plucene installed, so will test with that...\n\n"; $Wiki::Toolkit::TestConfig::config{plucene} = 1; } else { print "Either Plucene or File::Spec::Functions not installed; skipping test...\n\n"; $Wiki::Toolkit::TestConfig::config{plucene} = undef; } # If we have Lucy installed, we can test that without asking questions. eval { require Lucy; require File::Path }; my $lucy_inst = $@ ? 0 : 1; if ( $lucy_inst ) { print "You have Lucy installed, so will test with that...\n\n"; $Wiki::Toolkit::TestConfig::config{lucy} = 1; } else { print "Either Lucy or File::Path not installed; skipping test...\n\n"; $Wiki::Toolkit::TestConfig::config{lucy} = undef; } # Write out the config for next run. open OUT, ">lib/Wiki/Toolkit/TestConfig.pm" or die "Couldn't open lib/Wiki/Toolkit/TestConfig.pm for writing: $!"; # warning - blind copy and paste follows. FIXME. print OUT Data::Dumper->new([ \%Wiki::Toolkit::TestConfig::config ], [ '*Wiki::Toolkit::TestConfig::config' ] )->Dump, "\$Wiki::Toolkit::TestConfig::configured = 1;\n1;\n"; close OUT; # Some modules are only prerequisites if we intend to test a # particular backend. my %config = %Wiki::Toolkit::TestConfig::config; my %extras; if ( $config{MySQL}{dbname} ) { $extras{'DBD::mysql'} = 0; } if ( $config{Pg}{dbname} ) { $extras{'DBD::Pg'} = 0; } if ( $config{SQLite}{dbname} ) { $extras{'DBD::SQLite'} = '0.25'; # some of my tests fail on earlier ones } if ( $config{dbixfts} ) { $extras{'DBIx::FullTextSearch'} = '0.71'; # earlier ones buggy } if ( $config{plucene} ) { $extras{'Plucene'} = '1.19'; # earlier has trouble with delete } # Write the Makefile. WriteMakefile( (MM->can('signature_target') ? (SIGN => 1) : ()), NAME => "Wiki::Toolkit", VERSION_FROM => "lib/Wiki/Toolkit.pm", PREREQ_PM => { 'Text::WikiFormat' => '0.78', #earlier's buggy 'HTML::PullParser' => 0, 'Digest::MD5' => 0, 'Test::More' => 0, 'Time::Piece' => 0, 'DBI' => 0, %extras }, EXE_FILES => [ "bin/wiki-toolkit-setupdb", "bin/wiki-toolkit-rename-node", "bin/wiki-toolkit-delete-node", "bin/wiki-toolkit-revert-to-date" ], clean => { FILES => "Config lib/Wiki/Toolkit/TestConfig.pm " . "t/sqlite-test.db t/sii-db-file-test.db " . "t/node.db t/plucene t/lucy" } ); Wiki-Toolkit-0.83/README0000644000175000017500000000541412243757607014561 0ustar vagrantvagrantNAME Wiki::Toolkit - A toolkit for building Wikis. DESCRIPTION Helps you develop Wikis quickly by taking care of the boring bits for you. You will still need to write some code - this isn't an instant Wiki. Wiki::Toolkit used to be called CGI::Wiki. SEE ALSO Tom Insam's Wiki::Toolkit::Kwiki is however an instant wiki, running on a Wiki::Toolkit backend. SYNOPSIS # Set up a wiki object with an SQLite storage backend, and an # inverted index/DB_File search backend. This store/search # combination can be used on systems with no access to an actual # database server. my $store = Wiki::Toolkit::Store::SQLite->new( dbname => "/home/wiki/store.db" ); my $indexdb = Search::InvertedIndex::DB::DB_File_SplitHash->new( -map_name => "/home/wiki/indexes.db", -lock_mode => "EX" ); my $search = Wiki::Toolkit::Search::SII->new( indexdb => $indexdb ); my $wiki = Wiki::Toolkit->new( store => $store, search => $search ); MAJOR METHODS write_node $wiki->write_node($node_name, $content, $checksum); $wiki->write_node( "Calthorpe Arms", "A rather nice pub on Gray's Inn Road", $checksum, { category => [ "Pub", "Pub Food", "Bloomsbury" ] } ); format my $cooked = $wiki->format($raw); delete_node $wiki->delete_node($node_name); list_all_nodes my @node_names = $wiki->list_all_nodes; list_backlinks my @links_to_me = $wiki->list_backlinks($node_name); list_nodes_by_metadata my @pubs = $wiki->list_nodes_by_metadata( metadata_type => "category", metadata_value => "Pub" ); list_recent_changes my @last_week_edits = $wiki->list_recent_changes( days => 7 ); my @last_ten_changes = $wiki->list_recent_changes( last_n_changes => 10 ); node_exists print "Got wombats" if $wiki->node_exists("Wombats"); retrieve_node my $homepage_content = $wiki->retrieve_node("Home Page"); my %node_data = $wiki->retrieve_node( $node_name ); print "Last Modified: $node_data{last_modified}\n"; print "Current Version: $node_data{version}\n"; print "Current Checksum: $node_data{checksum}\n"; print "Current Content: $node_data{content}\n"; print "Categories: " . join(", ", @{$node_data{metadata}{category}}) . "\n"; verify_checksum my $as_i_left_it = $wiki->verify_checksum( $node_name, $checksum ); search_nodes my @nodes = $search->nodes( "camel" ); Wiki-Toolkit-0.83/lib/0000755000175000017500000000000012243760523014432 5ustar vagrantvagrantWiki-Toolkit-0.83/lib/Wiki/0000755000175000017500000000000012243760523015335 5ustar vagrantvagrantWiki-Toolkit-0.83/lib/Wiki/Toolkit.pm0000644000175000017500000007634212243757607017345 0ustar vagrantvagrantpackage Wiki::Toolkit; use strict; use vars qw( $VERSION ); $VERSION = '0.83'; use Carp qw(croak carp); use Digest::MD5 "md5_hex"; # first, detect if Encode is available - it's not under 5.6. If we _are_ # under 5.6, give up - we'll just have to hope that nothing explodes. This # is the current 0.54 behaviour, so that's ok. my $CAN_USE_ENCODE; BEGIN { eval " use Encode "; $CAN_USE_ENCODE = $@ ? 0 : 1; } =head1 NAME Wiki::Toolkit - A toolkit for building Wikis. =head1 DESCRIPTION Helps you develop Wikis quickly by taking care of the boring bits for you. You will still need to write some code - this isn't an instant Wiki. =head1 SYNOPSIS # Set up a wiki object with an SQLite storage backend, and an # inverted index/DB_File search backend. This store/search # combination can be used on systems with no access to an actual # database server. my $store = Wiki::Toolkit::Store::SQLite->new( dbname => "/home/wiki/store.db" ); my $indexdb = Search::InvertedIndex::DB::DB_File_SplitHash->new( -map_name => "/home/wiki/indexes.db", -lock_mode => "EX" ); my $search = Wiki::Toolkit::Search::SII->new( indexdb => $indexdb ); my $wiki = Wiki::Toolkit->new( store => $store, search => $search ); # Do all the CGI stuff. my $q = CGI->new; my $action = $q->param("action"); my $node = $q->param("node"); if ($action eq 'display') { my $raw = $wiki->retrieve_node($node); my $cooked = $wiki->format($raw); print_page(node => $node, content => $cooked); } elsif ($action eq 'preview') { my $submitted_content = $q->param("content"); my $preview_html = $wiki->format($submitted_content); print_editform(node => $node, content => $submitted_content, preview => $preview_html); } elsif ($action eq 'commit') { my $submitted_content = $q->param("content"); my $cksum = $q->param("checksum"); my $written = $wiki->write_node($node, $submitted_content, $cksum); if ($written) { print_success($node); } else { handle_conflict($node, $submitted_content); } } =head1 METHODS =over 4 =item B # Set up store, search and formatter objects. my $store = Wiki::Toolkit::Store::SQLite->new( dbname => "/home/wiki/store.db" ); my $indexdb = Search::InvertedIndex::DB::DB_File_SplitHash->new( -map_name => "/home/wiki/indexes.db", -lock_mode => "EX" ); my $search = Wiki::Toolkit::Search::SII->new( indexdb => $indexdb ); my $formatter = My::HomeMade::Formatter->new; my $wiki = Wiki::Toolkit->new( store => $store, # mandatory search => $search, # defaults to undef formatter => $formatter # defaults to something suitable ); C must be an object of type C and C if supplied must be of type C (though this isn't checked yet - FIXME). If C isn't supplied, it defaults to an object of class L. You can get a searchable Wiki up and running on a system without an actual database server by using the SQLite storage backend with the SII/DB_File search backend - cut and paste the lines above for a quick start, and see L, L, and L when you want to learn the details. C can be any object that behaves in the right way; this essentially means that it needs to provide a C method which takes in raw text and returns the formatted version. See L for a simple example. Note that you can create a suitable object from a sub very quickly by using L like so: my $formatter = Test::MockObject->new(); $formatter->mock( 'format', sub { my ($self, $raw) = @_; return uc( $raw ); } ); I'm not sure whether to put this in the module or not - it'd let you just supply a sub instead of an object as the formatter, but it feels wrong to be using a Test::* module in actual code. =cut sub new { my ($class, @args) = @_; my $self = {}; bless $self, $class; $self->_init(@args) or return undef; return $self; } sub _init { my ($self, %args) = @_; # Check for scripts written with old versions of Wiki::Toolkit foreach my $obsolete_param ( qw( storage_backend search_backend ) ) { carp "You seem to be using a script written for a pre-0.10 version " . "of Wiki::Toolkit - the $obsolete_param parameter is no longer used. " . "Please read the documentation with 'perldoc Wiki::Toolkit'" if $args{$obsolete_param}; } croak "No store supplied" unless $args{store}; foreach my $k ( qw( store search formatter ) ) { $self->{"_".$k} = $args{$k}; } # Make a default formatter object if none was actually supplied. unless ( $args{formatter} ) { require Wiki::Toolkit::Formatter::Default; # Ensure backwards compatibility - versions prior to 0.11 allowed the # following options to alter the default behaviour of Text::WikiFormat. my %config; foreach ( qw( extended_links implicit_links allowed_tags macros node_prefix ) ) { $config{$_} = $args{$_} if defined $args{$_}; } $self->{_formatter} = Wiki::Toolkit::Formatter::Default->new( %config ); } # Make a place to store plugins. $self->{_registered_plugins} = [ ]; return $self; } =item B my $content = $wiki->retrieve_node($node); # Or get additional data about the node as well. my %node = $wiki->retrieve_node("HomePage"); print "Current Version: " . $node{version}; # Maybe we stored some of our own custom metadata too. my $categories = $node{metadata}{category}; print "Categories: " . join(", ", @$categories); print "Postcode: $node{metadata}{postcode}[0]"; # Or get an earlier version: my %node = $wiki->retrieve_node( name => "HomePage", version => 2, ); print $node{content}; In scalar context, returns the current (raw Wiki language) contents of the specified node. In list context, returns a hash containing the contents of the node plus additional data: =over 4 =item B =item B =item B =item B - a reference to a hash containing any caller-supplied metadata sent along the last time the node was written =back The C parameter is mandatory. The C parameter is optional and defaults to the newest version. If the node hasn't been created yet, it is considered to exist but be empty (this behaviour might change). B on metadata - each hash value is returned as an array ref, even if that type of metadata only has one value. =cut sub retrieve_node { my ($self, @rawargs) = @_; my %args = scalar @rawargs == 1 ? ( name => $rawargs[0] ) : @rawargs; my @plugins = $self->get_registered_plugins; $args{plugins} = \@plugins if scalar @plugins; $self->store->retrieve_node( %args ); } =item B my $ok = $wiki->moderate_node(name => $node, version => $version); Marks the given version of the node as moderated. If this is the highest moderated version, then update the node's contents to hold this version. =cut sub moderate_node { my ($self, %args) = @_; my @plugins = $self->get_registered_plugins; $args{plugins} = \@plugins if scalar @plugins; my $ret = $self->store->moderate_node( %args ); if($ret == -1) { return $ret; } return 1; } =item B my $ok = $wiki->set_node_moderation(name => $node, required => $required); Sets if a node requires moderation or not. (Moderation is required when $required is true). When moderation is required, new versions of a node will sit about until they're tagged as moderated, when they will become the new node. =cut sub set_node_moderation { my ($self, @args) = @_; return $self->store->set_node_moderation( @args ); } =item B my $ok = $wiki->rename_node(old_name => $old_name, new_name => $new_name, create_new_versions => $create_new_versions ); Renames a node, updating any references to it as required. Uses the internal_links table to identify the nodes that link to this one, and re-writes any wiki links in these to point to the new name. If required, it can mark these updates to other pages as a new version. =cut sub rename_node { my ($self, @argsarray) = @_; my %args = @argsarray; if ((scalar @argsarray) == 2 || (scalar @argsarray) == 3) { # Missing keys %args = ( old_name => $argsarray[0], new_name => $argsarray[1], create_new_versions => $argsarray[2] ); } my @plugins = $self->get_registered_plugins; $args{plugins} = \@plugins if scalar @plugins; $args{wiki} = $self; my $ret = $self->store->rename_node( %args ); if ($ret && $ret == -1) { return $ret; } return 1; } =item B my $ok = $wiki->verify_checksum($node, $checksum); Sees whether your checksum is current for the given node. Returns true if so, false if not. B Be aware that when called directly and without locking, this might not be accurate, since there is a small window between the checking and the returning where the node might be changed, so B rely on it for safe commits; use C for that. It can however be useful when previewing edits, for example. =cut sub verify_checksum { my ($self, @args) = @_; $self->store->verify_checksum( @args ); } =item B # List all nodes that link to the Home Page. my @links = $wiki->list_backlinks( node => "Home Page" ); =cut sub list_backlinks { my ($self, @args) = @_; $self->store->list_backlinks( @args ); } =item B # List all nodes that have been linked to from other nodes but don't # yet exist. my @links = $wiki->list_dangling_links; Each node is returned once only, regardless of how many other nodes link to it. =cut sub list_dangling_links { my ($self, @args) = @_; $self->store->list_dangling_links( @args ); } =item B my @nodes = $wiki->list_all_nodes; Returns a list containing the name of every existing node. The list won't be in any kind of order; do any sorting in your calling script. =cut sub list_all_nodes { my ($self, @args) = @_; $self->store->list_all_nodes( @args ); } =item B # All documentation nodes. my @nodes = $wiki->list_nodes_by_metadata( metadata_type => "category", metadata_value => "documentation", ignore_case => 1, # optional but recommended (see below) ); # All pubs in Hammersmith. my @pubs = $wiki->list_nodes_by_metadata( metadata_type => "category", metadata_value => "Pub", ); my @hsm = $wiki->list_nodes_by_metadata( metadata_type => "category", metadata_value => "Hammersmith", ); my @results = my_l33t_method_for_ANDing_arrays( \@pubs, \@hsm ); Returns a list containing the name of every node whose caller-supplied metadata matches the criteria given in the parameters. By default, the case-sensitivity of both C and C depends on your database - if it will return rows with an attribute value of "Pubs" when you asked for "pubs", or not. If you supply a true value to the C parameter, then you can be sure of its being case-insensitive. This is recommended. If you don't supply any criteria then you'll get an empty list. This is a really really really simple way of finding things; if you want to be more complicated then you'll need to call the method multiple times and combine the results yourself, or write a plugin. =cut sub list_nodes_by_metadata { my ($self, @args) = @_; $self->store->list_nodes_by_metadata( @args ); } =item B Returns nodes where either the metadata doesn't exist, or is blank Unlike list_nodes_by_metadata(), the metadata value is optional (the metadata type is required). # All nodes missing documentation my @nodes = $store->list_nodes_by_missing_metadata( metadata_type => "category", metadata_value => "documentation", ignore_case => 1, # optional but recommended (see below) ); # All nodes which don't have a latitude defined my @nodes = $store->list_nodes_by_missing_metadata( metadata_type => "latitude" ); =cut sub list_nodes_by_missing_metadata { my ($self, @args) = @_; $self->store->list_nodes_by_missing_metadata( @args ); } =item B This is documented in L; see there for parameters and return values. All parameters are passed through directly to the store object, so, for example, my @nodes = $wiki->list_recent_changes( days => 7 ); does exactly the same thing as my @nodes = $wiki->store->list_recent_changes( days => 7 ); =cut sub list_recent_changes { my ($self, @args) = @_; $self->store->list_recent_changes( @args ); } =item B my @nodes = $wiki->list_unmoderated_nodes(); my @nodes = $wiki->list_unmoderated_nodes( only_where_latest => 1 ); $nodes[0]->{'name'} # The name of the node $nodes[0]->{'node_id'} # The id of the node $nodes[0]->{'version'} # The version in need of moderation $nodes[0]->{'moderated_version'} # The newest moderated version Fetches details of all the node versions that require moderation (id, name, version, and latest moderated version). If only_where_latest is set, then only the latest version of nodes where the latest version needs moderating are returned. Otherwise, all node versions (including old ones, and possibly multiple per node) are returned. =cut sub list_unmoderated_nodes { my ($self, @args) = @_; $self->store->list_unmoderated_nodes( @args ); } =item B my @versions = $wiki->list_node_all_versions("HomePage"); my @versions = $wiki->list_node_all_versions( name => 'HomePage', with_content => 1, with_metadata => 0 ); Returns all the versions of a node, optionally including the content and metadata, as an array of hashes (newest versions first). =cut sub list_node_all_versions { my ($self,@argsarray) = @_; my %args; if(scalar @argsarray == 1) { $args{'name'} = $argsarray[0]; } else { %args = @argsarray; } return $self->store->list_node_all_versions(%args); } =item B List the last version of every node before a given date. If no version existed before that date, will return undef for version. Returns a hash of id, name, version and date my @nv = $wiki->list_last_version_before('2007-01-02 10:34:11') foreach my $data (@nv) { } =cut sub list_last_version_before { my ($self,@argsarray) = @_; return $self->store->list_last_version_before(@argsarray); } =item B my $ok = $wiki->node_exists( "Wombat Defenestration" ); # or ignore case - optional but recommended my $ok = $wiki->node_exists( name => "monkey brains", ignore_case => 1, ); Returns true if the node has ever been created (even if it is currently empty), and false otherwise. By default, the case-sensitivity of C depends on your store backend. If you supply a true value to the C parameter, then you can be sure of its being case-insensitive. This is recommended. =cut sub node_exists { my ($self, @args) = @_; $self->store->node_exists( @args ); } =item B my $needs = $wiki->node_required_moderation( "Wombat Defenestration" ); Returns true if the node exists and requires moderation, and false otherwise. =cut sub node_required_moderation { my ($self, @args) = @_; my %node = $self->retrieve_node(@args); # Return false if it doesn't exist unless(%node) { return 0; } unless($node{node_requires_moderation}) { return 0; } # Otherwise return the state of the flag return $node{node_requires_moderation}; } =item B $wiki->delete_node( name => "Home Page", version => 15 ); C is optional. If it is supplied then only that version of the node will be deleted. Otherwise the node and all its history will be completely deleted. Doesn't do any locking though - to fix? You probably don't want to let anyone except Wiki admins call this. You may not want to use it at all. Croaks on error, silently does nothing if the node or version doesn't exist, returns true if no error. =cut sub delete_node { my $self = shift; # Backwards compatibility. my %args = ( scalar @_ == 1 ) ? ( name => $_[0] ) : @_; my @plugins = $self->get_registered_plugins; my $plugins_ref = \@plugins if scalar @plugins; return 1 unless $self->node_exists( $args{name} ); $self->store->delete_node( name => $args{name}, version => $args{version}, wiki => $self, plugins => $plugins_ref, ); if ( my $search = $self->search_obj ) { # Remove old data. $search->delete_node( $args{name} ); # If we have any versions left, index the new latest version. my %new_current_data = $self->retrieve_node( $args{name } ); # Nonexistent nodes will return blank content. if ( $new_current_data{content} ) { $search->index_node( $args{name}, $new_current_data{content}, $new_current_data{metadata} ); } } return 1; } =item B # Find all the nodes which contain the word 'expert'. my %results = $wiki->search_nodes('expert'); Returns a (possibly empty) hash whose keys are the node names and whose values are the scores in some kind of relevance-scoring system I haven't entirely come up with yet. For OR searches, this could initially be the number of terms that appear in the node, perhaps. Defaults to AND searches (if $and_or is not supplied, or is anything other than C or C). Searches are case-insensitive. Croaks if you haven't defined a search backend. =cut sub search_nodes { my ($self, @args) = @_; my @terms = map { $self->store->charset_encode($_) } @args; if ( $self->search_obj ) { $self->search_obj->search_nodes( @terms ); } else { croak "No search backend defined."; } } =item B if ( $wiki->supports_phrase_searches ) { return $wiki->search_nodes( '"fox in socks"' ); } Returns true if your chosen search backend supports phrase searching, and false otherwise. =cut sub supports_phrase_searches { my ($self, @args) = @_; $self->search_obj->supports_phrase_searches( @args ) if $self->search_obj; } =item B if ( $wiki->supports_fuzzy_searches ) { return $wiki->fuzzy_title_match( 'Kings Cross, St Pancreas' ); } Returns true if your chosen search backend supports fuzzy title searching, and false otherwise. =cut sub supports_fuzzy_searches { my ($self, @args) = @_; $self->search_obj->supports_fuzzy_searches( @args ) if $self->search_obj; } =item B B This section of the documentation assumes you are using a search engine which supports fuzzy matching. (See above.) The L backend in particular does not. $wiki->write_node( "King's Cross St Pancras", "A station." ); my %matches = $wiki->fuzzy_title_match( "Kings Cross St. Pancras" ); Returns a (possibly empty) hash whose keys are the node names and whose values are the scores in some kind of relevance-scoring system I haven't entirely come up with yet. Note that even if an exact match is found, any other similar enough matches will also be returned. However, any exact match is guaranteed to have the highest relevance score. The matching is done against "canonicalised" forms of the search string and the node titles in the database: stripping vowels, repeated letters and non-word characters, and lowercasing. Croaks if you haven't defined a search backend. =cut sub fuzzy_title_match { my ($self, @args) = @_; if ( $self->search_obj ) { if ($self->search_obj->supports_fuzzy_searches) { $self->search_obj->fuzzy_title_match( @args ); } else { croak "Search backend doesn't support fuzzy searches"; } } else { croak "No search backend defined."; } } =item B my $plugin = Wiki::Toolkit::Plugin::Foo->new; $wiki->register_plugin( plugin => $plugin ); Registers the plugin with the wiki as one that needs to be informed when we write a node. If the plugin C L, calls the methods set up by that parent class to let it know about the backend store, search and formatter objects. Finally, calls the plugin class's C method, which should be used to check tables are set up etc. Note that because of the order these things are done in, C for L subclasses can use the C, C and C methods as it needs to. =cut sub register_plugin { my ($self, %args) = @_; my $plugin = $args{plugin} || ""; croak "no plugin supplied" unless $plugin; if ( $plugin->isa( "Wiki::Toolkit::Plugin" ) ) { $plugin->wiki( $self ); $plugin->datastore( $self->store ); $plugin->indexer( $self->search_obj ); $plugin->formatter( $self->formatter ); } if ( $plugin->can( "on_register" ) ) { $plugin->on_register; } push @{ $self->{_registered_plugins} }, $plugin; } =item B my @plugins = $wiki->get_registered_plugins; Returns an array of plugin objects. =cut sub get_registered_plugins { my $self = shift; my $ref = $self->{_registered_plugins}; return wantarray ? @$ref : $ref; } =item B my $written = $wiki->write_node($node, $content, $checksum, \%metadata, $requires_moderation); if ($written) { display_node($node); } else { handle_conflict(); } Writes the specified content into the specified node in the backend storage; and indexes/reindexes the node in the search indexes (if a search is set up); calls C on any registered plugins. Note that you can blank out a node without deleting it by passing the empty string as $content, if you want to. If you expect the node to already exist, you must supply a checksum, and the node is write-locked until either your checksum has been proved old, or your checksum has been accepted and your change committed. If no checksum is supplied, and the node is found to already exist and be nonempty, a conflict will be raised. The first two parameters are mandatory, the others optional. If you want to supply metadata but have no checksum (for a newly-created node), supply a checksum of C. The final parameter, $requires_moderation (which defaults to false), is ignored except on new nodes. For existing nodes, use $wiki->toggle_node_moderation to change the node moderation flag. Returns the version of the updated node on success, 0 on conflict, croaks on error. B on the metadata hashref: Any data in here that you wish to access directly later must be a key-value pair in which the value is either a scalar or a reference to an array of scalars. For example: $wiki->write_node( "Calthorpe Arms", "nice pub", $checksum, { category => [ "Pubs", "Bloomsbury" ], postcode => "WC1X 8JR" } ); # and later my @nodes = $wiki->list_nodes_by_metadata( metadata_type => "category", metadata_value => "Pubs" ); For more advanced usage (passing data through to registered plugins) you may if you wish pass key-value pairs in which the value is a hashref or an array of hashrefs. The data in the hashrefs will not be stored as metadata; it will be checksummed and the checksum will be stored instead. Such data can I be accessed via plugins. =cut sub write_node { my ($self, $node, $content, $checksum, $metadata, $requires_moderation) = @_; croak "No valid node name supplied for writing" unless $node; croak "No content parameter supplied for writing" unless defined $content; $checksum = md5_hex("") unless defined $checksum; my $formatter = $self->{_formatter}; my @links_to; if ( $formatter->can( "find_internal_links" ) ) { # Supply $metadata to formatter in case it's needed to alter the # behaviour of the formatter, eg for Wiki::Toolkit::Formatter::Multiple. my @all_links_to = $formatter->find_internal_links($content,$metadata); my %unique = map { $_ => 1 } @all_links_to; @links_to = keys %unique; } my %data = ( node => $node, content => $content, checksum => $checksum, metadata => $metadata, requires_moderation => $requires_moderation ); $data{links_to} = \@links_to if scalar @links_to; my @plugins = $self->get_registered_plugins; $data{plugins} = \@plugins if scalar @plugins; my $store = $self->store; my $ret = $store->check_and_write_node( %data ) or return 0; if($ret == -1) { return -1; } my $search = $self->{_search}; if ($search and $content) { $search->index_node( $node, $store->charset_encode( $content ), $metadata ); } return $ret; } =item B my $cooked = $wiki->format($raw, $metadata); Passed straight through to your chosen formatter object. You do not I to supply the C<$metadata> hashref, but if your formatter allows node metadata to affect the rendering of the node then you will want to. =cut sub format { my ( $self, $raw, $metadata ) = @_; my $formatter = $self->{_formatter}; # Add on $self to the call so the formatter can access things like whether # a linked-to node exists, etc. my $result = $formatter->format( $raw, $self, $metadata ); # Nasty hack to work around an HTML::Parser deficiency # see http://rt.cpan.org/NoAuth/Bug.html?id=7014 if ($CAN_USE_ENCODE) { if (Encode::is_utf8($raw)) { Encode::_utf8_on( $result ); } } return $result; } =item B my $store = $wiki->store; my $dbname = eval { $wiki->store->dbname; } or warn "Not a DB backend"; Returns the storage backend object. =cut sub store { my $self = shift; return $self->{_store}; } =item B my $search_obj = $wiki->search_obj; Returns the search backend object. =cut sub search_obj { my $self = shift; return $self->{_search}; } =item B my $formatter = $wiki->formatter; Returns the formatter backend object. =cut sub formatter { my $self = shift; return $self->{_formatter}; } =back =head1 SEE ALSO For a very quick Wiki startup without any of that icky programming stuff, see Tom Insam's L, an instant wiki based on Wiki::Toolkit. Or for the specialised application of a wiki about a city, see the L distribution. L allows you to use different formatting modules. L might be useful for anyone wanting to write a custom formatter. Existing formatters include: =over 4 =item * L (in this distro) =item * L =item * L =back There's currently a choice of three storage backends - all database-backed. =over 4 =item * L (in this distro) =item * L (in this distro) =item * L (in this distro) =item * L (parent class for the above - in this distro) =back A search backend is optional: =over 4 =item * L (in this distro, uses L) =item * L (in this distro, uses L) =back Standalone plugins can also be written - currently they should only read from the backend storage, but write access guidelines are coming soon. Plugins written so far and available from CPAN: =over 4 =item * L =item * L =item * L =item * L =back If writing a plugin you might want an easy way to run tests for it on all possible backends: =over 4 =item * L (in this distro) =back Other ways to implement Wikis in Perl include: =over 4 =item * L (an instant wiki) =item * L =item * L =item * L =item * UseModWiki L =item * Chiq Chaq L =back =head1 AUTHOR Kake Pugh (kake@earth.li) and the Wiki::Toolkit team (including Nick Burch and Dominic Hargreaves) =head1 SUPPORT Questions should go to cgi-wiki-dev@earth.li. =head1 COPYRIGHT Copyright (C) 2002-2004 Kake Pugh. All Rights Reserved. Copyright (C) 2006-2013 the Wiki::Toolkit team. All Rights Reserved. This module is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =head1 FEEDBACK The developer web site and bug tracker is at http://www.wiki-toolkit.org/ - please file bugs there as appropriate. You could also subscribe to the dev list at http://www.earth.li/cgi-bin/mailman/listinfo/cgi-wiki-dev =head1 BUGS Versions between 0.75 and 0.79 inclusive contain a bug which prevents Recent Changes routines from working correctly if minor changes are excluded . You may wish to avoid upgrading to this version until it is fixed if this is important to you; the fix is however not trivial so noone has been able to step up yet. Other minor bugs are documented at =head1 CREDITS Various London.pm types helped out with code review, encouragement, JFDI, style advice, code snippets, module recommendations, and so on; far too many to name individually, but particularly Richard Clamp, Tony Fisher, Mark Fowler, and Chris Ball. blair christensen sent patches and gave me some good ideas. chromatic continues to patiently apply my patches to L and help me get it working in just the way I need. Paul Makepeace helped me add support for connecting to non-local databases. Shevek has been prodding me a lot lately. The L team keep me well-supplied with encouragement and bug reports. Nick Burch has been leading the way with development leading up to the release under the Wiki::Toolkit name. =head1 GRATUITOUS PLUG I'm only obsessed with Wikis because of the Open Guide to London -- L =cut 1; Wiki-Toolkit-0.83/lib/Wiki/Toolkit/0000755000175000017500000000000012243760523016762 5ustar vagrantvagrantWiki-Toolkit-0.83/lib/Wiki/Toolkit/Setup/0000755000175000017500000000000012243760523020062 5ustar vagrantvagrantWiki-Toolkit-0.83/lib/Wiki/Toolkit/Setup/Database.pm0000644000175000017500000002543712243757607022150 0ustar vagrantvagrantpackage Wiki::Toolkit::Setup::Database; use strict; use vars qw( $VERSION @SUPPORTED_SCHEMAS); $VERSION = 0.09; @SUPPORTED_SCHEMAS = qw(8 9 10); =head1 NAME Wiki::Toolkit::Setup::Database - parent class for database storage setup classes for Wiki::Toolkit =cut sub fetch_upgrade_old_to_8 { # Compatible with old_to_10 fetch_upgrade_old_to_10(@_); } sub fetch_upgrade_old_to_9 { # Compatible with old_to_10 fetch_upgrade_old_to_10(@_); } # Fetch from the old style database, ready for an upgrade to db version 10 sub fetch_upgrade_old_to_10 { my $dbh = shift; my %nodes; my %metadatas; my %contents; my @internal_links; my %ids; print "Grabbing and upgrading old data... "; # Grab all the nodes, and give them an ID my $sth = $dbh->prepare("SELECT name,version,text,modified FROM node"); $sth->execute; my $id = 0; while( my($name,$version,$text,$modified) = $sth->fetchrow_array) { my %node; $id++; $node{'name'} = $name; $node{'version'} = $version; $node{'text'} = $text; $node{'modified'} = $modified; $node{'id'} = $id; $node{'moderate'} = 0; $nodes{$name} = \%node; $ids{$name} = $id; } print " read $id nodes... "; # Grab all the content, and upgrade to ID from name $sth = $dbh->prepare("SELECT name,version,text,modified,comment FROM content"); $sth->execute; while ( my($name,$version,$text,$modified,$comment) = $sth->fetchrow_array) { my $id = $ids{$name}; if($id) { my %content; $content{'node_id'} = $id; $content{'version'} = $version; $content{'text'} = $text; $content{'modified'} = $modified; $content{'comment'} = $comment; $content{'moderated'} = 1; $contents{$id."-".$version} = \%content; } else { warn("There was no node entry for content with name '$name', unable to migrate it!"); } } print " read ".(scalar keys %contents)." contents... "; # Grab all the metadata, and upgrade to ID from node $sth = $dbh->prepare("SELECT node,version,metadata_type,metadata_value FROM metadata"); $sth->execute; my $i = 0; while( my($node,$version,$metadata_type,$metadata_value) = $sth->fetchrow_array) { my $id = $ids{$node}; if($id) { my %metadata; $metadata{'node_id'} = $id; $metadata{'version'} = $version; $metadata{'metadata_type'} = $metadata_type; $metadata{'metadata_value'} = $metadata_value; $metadatas{$id."-".($i++)} = \%metadata; } else { warn("There was no node entry for metadata with name (node) '$node', unable to migrate it!"); } } # Grab all the internal links $sth = $dbh->prepare("SELECT link_from,link_to FROM internal_links"); $sth->execute; while( my($link_from,$link_to) = $sth->fetchrow_array) { my %il; $il{'link_from'} = $link_from; $il{'link_to'} = $link_to; push @internal_links, \%il; } print "done\n"; # Return it all return (\%nodes,\%contents,\%metadatas,\@internal_links,\%ids); } sub fetch_upgrade_8_to_9 { # Compatible with 8_to_10 fetch_upgrade_8_to_10(@_); } # Fetch from schema version 8, and upgrade to version 10 sub fetch_upgrade_8_to_10 { my $dbh = shift; my %nodes; my %metadatas; my %contents; my @internal_links; print "Grabbing and upgrading old data... "; # Grab all the nodes my $sth = $dbh->prepare("SELECT id,name,version,text,modified FROM node"); $sth->execute; while( my($id,$name,$version,$text,$modified) = $sth->fetchrow_array) { my %node; $node{'name'} = $name; $node{'version'} = $version; $node{'text'} = $text; $node{'modified'} = $modified; $node{'id'} = $id; $node{'moderate'} = 0; $nodes{$name} = \%node; } # Grab all the content $sth = $dbh->prepare("SELECT node_id,version,text,modified,comment FROM content"); $sth->execute; while ( my($node_id,$version,$text,$modified,$comment) = $sth->fetchrow_array) { my %content; $content{'node_id'} = $node_id; $content{'version'} = $version; $content{'text'} = $text; $content{'modified'} = $modified; $content{'comment'} = $comment; $content{'moderated'} = 1; $contents{$node_id."-".$version} = \%content; } # Grab all the metadata $sth = $dbh->prepare("SELECT node_id,version,metadata_type,metadata_value FROM metadata"); $sth->execute; my $i = 0; while( my($node_id,$version,$metadata_type,$metadata_value) = $sth->fetchrow_array) { my %metadata; $metadata{'node_id'} = $node_id; $metadata{'version'} = $version; $metadata{'metadata_type'} = $metadata_type; $metadata{'metadata_value'} = $metadata_value; $metadatas{$node_id."-".($i++)} = \%metadata; } # Grab all the internal links $sth = $dbh->prepare("SELECT link_from,link_to FROM internal_links"); $sth->execute; while( my($link_from,$link_to) = $sth->fetchrow_array) { my %il; $il{'link_from'} = $link_from; $il{'link_to'} = $link_to; push @internal_links, \%il; } print "done\n"; # Return it all return (\%nodes,\%contents,\%metadatas,\@internal_links); } # Fetch from schema version 9, and upgrade to version 10 sub fetch_upgrade_9_to_10 { my $dbh = shift; my %nodes; my %metadatas; my %contents; my @internal_links; print "Grabbing and upgrading old data... "; # Grab all the nodes my $sth = $dbh->prepare("SELECT id,name,version,text,modified,moderate FROM node"); $sth->execute; while( my($id,$name,$version,$text,$modified,$moderate) = $sth->fetchrow_array) { my %node; $node{'name'} = $name; $node{'version'} = $version; $node{'text'} = $text; $node{'modified'} = $modified; $node{'id'} = $id; $node{'moderate'} = $moderate; $nodes{$name} = \%node; } # Grab all the content $sth = $dbh->prepare("SELECT node_id,version,text,modified,comment,moderated FROM content"); $sth->execute; while ( my($node_id,$version,$text,$modified,$comment,$moderated) = $sth->fetchrow_array) { my %content; $content{'node_id'} = $node_id; $content{'version'} = $version; $content{'text'} = $text; $content{'modified'} = $modified; $content{'comment'} = $comment; $content{'moderated'} = $moderated; $contents{$node_id."-".$version} = \%content; } # Grab all the metadata $sth = $dbh->prepare("SELECT node_id,version,metadata_type,metadata_value FROM metadata"); $sth->execute; my $i = 0; while( my($node_id,$version,$metadata_type,$metadata_value) = $sth->fetchrow_array) { my %metadata; $metadata{'node_id'} = $node_id; $metadata{'version'} = $version; $metadata{'metadata_type'} = $metadata_type; $metadata{'metadata_value'} = $metadata_value; $metadatas{$node_id."-".($i++)} = \%metadata; } # Grab all the internal links $sth = $dbh->prepare("SELECT link_from,link_to FROM internal_links"); $sth->execute; while( my($link_from,$link_to) = $sth->fetchrow_array) { my %il; $il{'link_from'} = $link_from; $il{'link_to'} = $link_to; push @internal_links, \%il; } print "done\n"; # Return it all return (\%nodes,\%contents,\%metadatas,\@internal_links); } # Get the version of the database schema sub get_database_version { my $dbh = shift; my $sql = "SELECT version FROM schema_info"; my $sth; eval{ $sth = $dbh->prepare($sql) }; if($@) { return "old"; } eval{ $sth->execute }; if($@) { return "old"; } my ($cur_schema) = $sth->fetchrow_array; unless($cur_schema) { return "old"; } return $cur_schema; } # Is an upgrade to the database required? sub get_database_upgrade_required { my ($dbh,$new_version) = @_; # Get the schema version my $schema_version = get_database_version($dbh); # Compare it if($schema_version eq $new_version) { # At latest version return undef; } elsif ($schema_version eq 'old' or $schema_version < $new_version) { return $schema_version."_to_".$new_version; } else { die "Aiee! We seem to be trying to downgrade the database schema from $schema_version to $new_version. Aborting.\n"; } } # Put the latest data into the latest database structure sub bulk_data_insert { my ($dbh, $nodesref, $contentsref, $metadataref, $internallinksref) = @_; print "Bulk inserting upgraded data... "; # Add nodes my $sth = $dbh->prepare("INSERT INTO node (id,name,version,text,modified,moderate) VALUES (?,?,?,?,?,?)"); foreach my $name (keys %$nodesref) { my %node = %{$nodesref->{$name}}; $sth->execute($node{'id'}, $node{'name'}, $node{'version'}, $node{'text'}, $node{'modified'}, $node{'moderate'}); } print "added ".(scalar keys %$nodesref)." nodes... "; # Add content $sth = $dbh->prepare("INSERT INTO content (node_id,version,text,modified,comment,moderated) VALUES (?,?,?,?,?,?)"); foreach my $key (keys %$contentsref) { my %content = %{$contentsref->{$key}}; $sth->execute($content{'node_id'}, $content{'version'}, $content{'text'}, $content{'modified'}, $content{'comment'}, $content{'moderated'}); } # Add metadata $sth = $dbh->prepare("INSERT INTO metadata (node_id,version,metadata_type,metadata_value) VALUES (?,?,?,?)"); foreach my $key (keys %$metadataref) { my %metadata = %{$metadataref->{$key}}; $sth->execute($metadata{'node_id'}, $metadata{'version'}, $metadata{'metadata_type'}, $metadata{'metadata_value'}); } # Add internal links $sth = $dbh->prepare("INSERT INTO internal_links (link_from,link_to) VALUES (?,?)"); foreach my $ilr (@$internallinksref) { my %il = %{$ilr}; $sth->execute($il{'link_from'}, $il{'link_to'}); } print "done\n"; } sub perm_check { my $dbh = shift; # If we can do all this, we'll be able to do a bulk upgrade too eval { my $sth = $dbh->prepare("CREATE TABLE dbtest (test int)"); $sth->execute; $sth = $dbh->prepare("CREATE INDEX dbtest_index ON dbtest (test)"); $sth->execute; $sth = $dbh->prepare("DROP TABLE dbtest"); $sth->execute; }; return $@; } Wiki-Toolkit-0.83/lib/Wiki/Toolkit/Setup/SII.pm0000644000175000017500000000424412243757607021061 0ustar vagrantvagrantpackage Wiki::Toolkit::Setup::SII; use strict; use vars qw( $VERSION ); $VERSION = '0.03'; use DBI; use Search::InvertedIndex; use Wiki::Toolkit::Search::SII; use Carp; =head1 NAME Wiki::Toolkit::Setup::SII - Set up Search::InvertedIndex indexes for Wiki::Toolkit =head1 SYNOPSIS use Wiki::Toolkit::Setup::SII; my $indexdb = Search::InvertedIndex::DB::Mysql->new( -db_name => $dbname, -username => $dbuser, -password => $dbpass, -hostname => '', -table_name => 'siindex', -lock_mode => 'EX' ); Wiki::Toolkit::Setup::SII::setup( indexdb => $indexdb ); =head1 DESCRIPTION Set up L indexes for use with L Has only one function, C, which takes one mandatory argument, C, the C object to use as the backend, and one optional argument, C, a C corresponding to existing data that you wish to (re-)index. Note that any pre-existing L indexes stored in C will be I by this function, so if you have existing data you probably want to use the C parameter to get it re-indexed. =cut sub setup { my %args = @_; my $indexdb = $args{indexdb}; croak "Must supply indexdb" unless $indexdb; # Drop indexes if they already exist. $indexdb->open; $indexdb->clear; $indexdb->close; # If we've been passed a store, index all its data. my $store = $args{store}; if ( $store ) { my @nodes = $store->list_all_nodes; my $search = Wiki::Toolkit::Search::SII->new( indexdb => $indexdb ); foreach my $node ( @nodes ) { my $content = $store->retrieve_node( $node ); $search->index_node( $node, $content ); } } } =head1 AUTHOR Kake Pugh (kake@earth.li). =head1 COPYRIGHT Copyright (C) 2002 Kake Pugh. All Rights Reserved. This module is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =head1 SEE ALSO L, L, L =cut 1; Wiki-Toolkit-0.83/lib/Wiki/Toolkit/Setup/MySQL.pm0000644000175000017500000003254512243757607021407 0ustar vagrantvagrantpackage Wiki::Toolkit::Setup::MySQL; use strict; use vars qw( @ISA $VERSION $SCHEMA_VERSION ); use Wiki::Toolkit::Setup::Database; @ISA = qw( Wiki::Toolkit::Setup::Database ); $VERSION = '0.10'; use DBI; use Carp; $SCHEMA_VERSION = $VERSION*100; my $create_sql = { 8 => { schema_info => [ qq| CREATE TABLE schema_info ( version int(10) NOT NULL default 0 ) |, qq| INSERT INTO schema_info VALUES (8) | ], node => [ qq| CREATE TABLE node ( id integer NOT NULL AUTO_INCREMENT, name varchar(200) NOT NULL DEFAULT '', version int(10) NOT NULL default 0, text mediumtext NOT NULL default '', modified datetime default NULL, PRIMARY KEY (id) ) | ], content => [ qq| CREATE TABLE content ( node_id integer NOT NULL, version int(10) NOT NULL default 0, text mediumtext NOT NULL default '', modified datetime default NULL, comment mediumtext NOT NULL default '', PRIMARY KEY (node_id, version) ) | ], internal_links => [ qq| CREATE TABLE internal_links ( link_from varchar(200) NOT NULL default '', link_to varchar(200) NOT NULL default '', PRIMARY KEY (link_from, link_to) ) | ], metadata => [ qq| CREATE TABLE metadata ( node_id integer NOT NULL, version int(10) NOT NULL default 0, metadata_type varchar(200) NOT NULL DEFAULT '', metadata_value mediumtext NOT NULL DEFAULT '' ) |, qq| CREATE INDEX metadata_index ON metadata(node_id, version, metadata_type, metadata_value(10)) | ] }, 9 => { schema_info => [ qq| CREATE TABLE schema_info ( version int(10) NOT NULL default 0 ) |, qq| INSERT INTO schema_info VALUES (9) | ], node => [ qq| CREATE TABLE node ( id integer NOT NULL AUTO_INCREMENT, name varchar(200) NOT NULL DEFAULT '', version int(10) NOT NULL default 0, text mediumtext NOT NULL default '', modified datetime default NULL, moderate bool NOT NULL default '0', PRIMARY KEY (id) ) | ], content => [ qq| CREATE TABLE content ( node_id integer NOT NULL, version int(10) NOT NULL default 0, text mediumtext NOT NULL default '', modified datetime default NULL, comment mediumtext NOT NULL default '', moderated bool NOT NULL default '1', PRIMARY KEY (node_id, version) ) | ], internal_links => [ qq| CREATE TABLE internal_links ( link_from varchar(200) NOT NULL default '', link_to varchar(200) NOT NULL default '', PRIMARY KEY (link_from, link_to) ) | ], metadata => [ qq| CREATE TABLE metadata ( node_id integer NOT NULL, version int(10) NOT NULL default 0, metadata_type varchar(200) NOT NULL DEFAULT '', metadata_value mediumtext NOT NULL DEFAULT '' ) |, qq| CREATE INDEX metadata_index ON metadata(node_id, version, metadata_type, metadata_value(10)) | ] }, 10 => { schema_info => [ qq| CREATE TABLE schema_info ( version int(10) NOT NULL default 0 ) |, qq| INSERT INTO schema_info VALUES (10) | ], node => [ qq| CREATE TABLE node ( id integer NOT NULL AUTO_INCREMENT, name varchar(200) NOT NULL DEFAULT '', version int(10) NOT NULL default 0, text mediumtext NOT NULL default '', modified datetime default NULL, moderate bool NOT NULL default '0', PRIMARY KEY (id) ) |, qq| CREATE UNIQUE INDEX node_name ON node (name) | ], content => [ qq| CREATE TABLE content ( node_id integer NOT NULL, version int(10) NOT NULL default 0, text mediumtext NOT NULL default '', modified datetime default NULL, comment mediumtext NOT NULL default '', moderated bool NOT NULL default '1', verified datetime default NULL, verified_info mediumtext NOT NULL default '', PRIMARY KEY (node_id, version) ) | ], internal_links => [ qq| CREATE TABLE internal_links ( link_from varchar(200) NOT NULL default '', link_to varchar(200) NOT NULL default '', PRIMARY KEY (link_from, link_to) ) | ], metadata => [ qq| CREATE TABLE metadata ( node_id integer NOT NULL, version int(10) NOT NULL default 0, metadata_type varchar(200) NOT NULL DEFAULT '', metadata_value mediumtext NOT NULL DEFAULT '' ) |, qq| CREATE INDEX metadata_index ON metadata(node_id, version, metadata_type, metadata_value(10)) | ] }, }; my %fetch_upgrades = ( old_to_8 => 1, old_to_9 => 1, old_to_10 => 1, '8_to_9' => 1, '8_to_10' => 1, ); my %upgrades = ( '9_to_10' => [ sub { my $dbh = shift; my $sth = $dbh->prepare('SHOW INDEX FROM node WHERE key_name="node_name"'); $sth->execute(); unless ( $sth->rows ) { $dbh->do('CREATE UNIQUE INDEX node_name ON node (name)') or croak $dbh->errstr; } }, qq| ALTER TABLE content ADD COLUMN verified datetime default NULL |, qq| ALTER TABLE content ADD COLUMN verified_info mediumtext NOT NULL default '' |, qq| UPDATE schema_info SET version = 10 | ] ); =head1 NAME Wiki::Toolkit::Setup::MySQL - Set up tables for a Wiki::Toolkit store in a MySQL database. =head1 SYNOPSIS use Wiki::Toolkit::Setup::MySQL; Wiki::Toolkit::Setup::MySQL::setup($dbname, $dbuser, $dbpass, $dbhost); Omit $dbhost if the database is local. =head1 DESCRIPTION Set up a MySQL database for use as a Wiki::Toolkit store. =head1 FUNCTIONS =over 4 =item B use Wiki::Toolkit::Setup::MySQL; Wiki::Toolkit::Setup::MySQL::setup($dbname, $dbuser, $dbpass, $dbhost); or Wiki::Toolkit::Setup::Mysql::setup( $dbh ); You can either provide an active database handle C<$dbh> or connection parameters. If you provide connection parameters the following arguments are mandatory -- the database name, the username and the password. The username must be able to create and drop tables in the database. The $dbhost argument is optional -- omit it if the database is local. B If a table that the module wants to create already exists, C will leave it alone. This means that you can safely run this on an existing L database to bring the schema up to date with the current L version. If you wish to completely start again with a fresh database, run C first. =cut sub setup { my @args = @_; my $dbh = _get_dbh( @args ); my $disconnect_required = _disconnect_required( @args ); my $wanted_schema = _get_wanted_schema( @args ) || $SCHEMA_VERSION; die "No schema information for requested schema version $wanted_schema\n" unless $create_sql->{$wanted_schema}; # Check whether tables exist my %tables = fetch_tables_listing($dbh, $wanted_schema); # Do we need to upgrade the schema of existing tables? # (Don't check if no tables currently exist) my $upgrade_schema; my @cur_data; if(scalar keys %tables > 0) { $upgrade_schema = Wiki::Toolkit::Setup::Database::get_database_upgrade_required($dbh,$wanted_schema); } if($upgrade_schema) { if ($fetch_upgrades{$upgrade_schema}) { # Grab current data print "Upgrading: $upgrade_schema\n"; @cur_data = eval("&Wiki::Toolkit::Setup::Database::fetch_upgrade_".$upgrade_schema."(\$dbh)"); if($@) { warn $@; } # Check to make sure we can create, index and drop tables # before doing any more my $perm_check = Wiki::Toolkit::Setup::Database::perm_check($dbh); if ($perm_check) { die "Unable to create/drop database tables as required by upgrade: $perm_check"; } # Drop the current tables cleardb($dbh); # Grab new list of tables %tables = fetch_tables_listing($dbh, $wanted_schema); } } # Set up tables if not found foreach my $required ( keys %{$create_sql->{$wanted_schema}} ) { if ( $tables{$required} ) { print "Table $required already exists... skipping...\n"; } else { print "Creating table $required... done\n"; foreach my $sql ( @{$create_sql->{$wanted_schema}->{$required}} ) { $dbh->do($sql) or croak $dbh->errstr; } } } # If upgrading, load in the new data if($upgrade_schema) { if ($fetch_upgrades{$upgrade_schema}) { Wiki::Toolkit::Setup::Database::bulk_data_insert($dbh,@cur_data); } else { print "Upgrading schema: $upgrade_schema\n"; my @updates = @{$upgrades{$upgrade_schema}}; foreach my $update (@updates) { if(ref($update) eq "CODE") { &$update($dbh); } elsif(ref($update) eq "ARRAY") { foreach my $nupdate (@$update) { $dbh->do($nupdate); } } else { $dbh->do($update); } } } } # Clean up if we made our own dbh. $dbh->disconnect if $disconnect_required; } # Internal method - what Wiki::Toolkit tables are defined? sub fetch_tables_listing { my $dbh = shift; my $wanted_schema = shift; # Check what tables exist my $sth = $dbh->prepare("SHOW TABLES") or croak $dbh->errstr; $sth->execute; my %tables; while ( my $table = $sth->fetchrow_array ) { exists $create_sql->{$wanted_schema}->{$table} and $tables{$table} = 1; } return %tables; } =item B use Wiki::Toolkit::Setup::MySQL; # Clear out all Wiki::Toolkit tables from the database. Wiki::Toolkit::Setup::MySQL::cleardb($dbname, $dbuser, $dbpass, $dbhost); or Wiki::Toolkit::Setup::Mysql::cleardb( $dbh ); You can either provide an active database handle C<$dbh> or connection parameters. If you provide connection parameters the following arguments are mandatory -- the database name, the username and the password. The username must be able to drop tables in the database. The $dbhost argument is optional -- omit if the database is local. Clears out all L store tables from the database. B that this will lose all your data; you probably only want to use this for testing purposes or if you really screwed up somewhere. Note also that it doesn't touch any L search backend tables; if you have any of those in the same or a different database see either L or L, depending on which search backend you're using. =cut sub cleardb { my @args = @_; my $dbh = _get_dbh( @args ); my $disconnect_required = _disconnect_required( @args ); print "Dropping tables... "; $dbh->do("DROP TABLE IF EXISTS " . join( ",", keys %{$create_sql->{$SCHEMA_VERSION}} ) ) or croak $dbh->errstr; print "done\n"; # Clean up if we made our own dbh. $dbh->disconnect if $disconnect_required; } sub _get_dbh { # Database handle passed in. if ( ref $_[0] and ref $_[0] eq 'DBI::db' ) { return $_[0]; } # Args passed as hashref. if ( ref $_[0] and ref $_[0] eq 'HASH' ) { my %args = %{$_[0]}; if ( $args{dbh} ) { return $args{dbh}; } else { return _make_dbh( %args ); } } # Args passed as list of connection details. return _make_dbh( dbname => $_[0], dbuser => $_[1], dbpass => $_[2], dbhost => $_[3], ); } sub _get_wanted_schema { # Database handle passed in. if ( ref $_[0] and ref $_[0] eq 'DBI::db' ) { return undef; } # Args passed as hashref. if ( ref $_[0] and ref $_[0] eq 'HASH' ) { my %args = %{$_[0]}; return $args{wanted_schema}; } } sub _disconnect_required { # Database handle passed in. if ( ref $_[0] and ref $_[0] eq 'DBI::db' ) { return 0; } # Args passed as hashref. if ( ref $_[0] and ref $_[0] eq 'HASH' ) { my %args = %{$_[0]}; if ( $args{dbh} ) { return 0; } else { return 1; } } # Args passed as list of connection details. return 1; } sub _make_dbh { my %args = @_; my $dsn = "dbi:mysql:$args{dbname}"; $dsn .= ";host=$args{dbhost}" if $args{dbhost}; my $dbh = DBI->connect($dsn, $args{dbuser}, $args{dbpass}, { PrintError => 1, RaiseError => 1, AutoCommit => 1 } ) or croak DBI::errstr; return $dbh; } =back =head1 ALTERNATIVE CALLING SYNTAX As requested by Podmaster. Instead of passing arguments to the methods as ($dbname, $dbuser, $dbpass, $dbhost) you can pass them as ( { dbname => $dbname, dbuser => $dbuser, dbpass => $dbpass, dbhost => $dbhost } ) or indeed as ( { dbh => $dbh } ) Note that's a hashref, not a hash. =head1 AUTHOR Kake Pugh (kake@earth.li). =head1 COPYRIGHT Copyright (C) 2002-2004 Kake Pugh. All Rights Reserved. Copyright (C) 2006-2008 the Wiki::Toolkit team. All Rights Reserved. This module is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =head1 SEE ALSO L, L, L =cut 1; Wiki-Toolkit-0.83/lib/Wiki/Toolkit/Setup/Pg.pm0000644000175000017500000003661512243757607021012 0ustar vagrantvagrantpackage Wiki::Toolkit::Setup::Pg; use strict; use vars qw( @ISA $VERSION $SCHEMA_VERSION ); use Wiki::Toolkit::Setup::Database; @ISA = qw( Wiki::Toolkit::Setup::Database ); $VERSION = '0.10'; use DBI; use Carp; $SCHEMA_VERSION = $VERSION*100; my $create_sql = { 8 => { schema_info => [ qq| CREATE TABLE schema_info ( version integer NOT NULL default 0 ) |, qq| INSERT INTO schema_info VALUES (8) | ], node => [ qq| CREATE SEQUENCE node_seq |, qq| CREATE TABLE node ( id integer NOT NULL DEFAULT NEXTVAL('node_seq'), name varchar(200) NOT NULL DEFAULT '', version integer NOT NULL default 0, text text NOT NULL default '', modified timestamp without time zone default NULL, CONSTRAINT pk_id PRIMARY KEY (id) ) |, qq| CREATE UNIQUE INDEX node_name ON node (name) | ], content => [ qq| CREATE TABLE content ( node_id integer NOT NULL, version integer NOT NULL default 0, text text NOT NULL default '', modified timestamp without time zone default NULL, comment text NOT NULL default '', CONSTRAINT pk_node_id PRIMARY KEY (node_id,version), CONSTRAINT fk_node_id FOREIGN KEY (node_id) REFERENCES node (id) ) | ], internal_links => [ qq| CREATE TABLE internal_links ( link_from varchar(200) NOT NULL default '', link_to varchar(200) NOT NULL default '' ) |, qq| CREATE UNIQUE INDEX internal_links_pkey ON internal_links (link_from, link_to) | ], metadata => [ qq| CREATE TABLE metadata ( node_id integer NOT NULL, version integer NOT NULL default 0, metadata_type varchar(200) NOT NULL DEFAULT '', metadata_value text NOT NULL DEFAULT '', CONSTRAINT fk_node_id FOREIGN KEY (node_id) REFERENCES node (id) ) |, qq| CREATE INDEX metadata_index ON metadata (node_id, version, metadata_type, metadata_value) | ] }, 9 => { schema_info => [ qq| CREATE TABLE schema_info ( version integer NOT NULL default 0 ) |, qq| INSERT INTO schema_info VALUES (9) | ], node => [ qq| CREATE SEQUENCE node_seq |, qq| CREATE TABLE node ( id integer NOT NULL DEFAULT NEXTVAL('node_seq'), name varchar(200) NOT NULL DEFAULT '', version integer NOT NULL default 0, text text NOT NULL default '', modified timestamp without time zone default NULL, moderate boolean NOT NULL default '0', CONSTRAINT pk_id PRIMARY KEY (id) ) |, qq| CREATE UNIQUE INDEX node_name ON node (name) | ], content => [ qq| CREATE TABLE content ( node_id integer NOT NULL, version integer NOT NULL default 0, text text NOT NULL default '', modified timestamp without time zone default NULL, comment text NOT NULL default '', moderated boolean NOT NULL default '1', CONSTRAINT pk_node_id PRIMARY KEY (node_id,version), CONSTRAINT fk_node_id FOREIGN KEY (node_id) REFERENCES node (id) ) | ], internal_links => [ qq| CREATE TABLE internal_links ( link_from varchar(200) NOT NULL default '', link_to varchar(200) NOT NULL default '' ) |, qq| CREATE UNIQUE INDEX internal_links_pkey ON internal_links (link_from, link_to) | ], metadata => [ qq| CREATE TABLE metadata ( node_id integer NOT NULL, version integer NOT NULL default 0, metadata_type varchar(200) NOT NULL DEFAULT '', metadata_value text NOT NULL DEFAULT '', CONSTRAINT fk_node_id FOREIGN KEY (node_id) REFERENCES node (id) ) |, qq| CREATE INDEX metadata_index ON metadata (node_id, version, metadata_type, metadata_value) | ] }, 10 => { schema_info => [ qq| CREATE TABLE schema_info ( version integer NOT NULL default 0 ) |, qq| INSERT INTO schema_info VALUES (10) | ], node => [ qq| CREATE SEQUENCE node_seq |, qq| CREATE TABLE node ( id integer NOT NULL DEFAULT NEXTVAL('node_seq'), name varchar(200) NOT NULL DEFAULT '', version integer NOT NULL default 0, text text NOT NULL default '', modified timestamp without time zone default NULL, moderate boolean NOT NULL default '0', CONSTRAINT pk_id PRIMARY KEY (id) ) |, qq| CREATE UNIQUE INDEX node_name ON node (name) | ], content => [ qq| CREATE TABLE content ( node_id integer NOT NULL, version integer NOT NULL default 0, text text NOT NULL default '', modified timestamp without time zone default NULL, comment text NOT NULL default '', moderated boolean NOT NULL default '1', verified timestamp without time zone default NULL, verified_info text NOT NULL default '', CONSTRAINT pk_node_id PRIMARY KEY (node_id,version), CONSTRAINT fk_node_id FOREIGN KEY (node_id) REFERENCES node (id) ) | ], internal_links => [ qq| CREATE TABLE internal_links ( link_from varchar(200) NOT NULL default '', link_to varchar(200) NOT NULL default '' ) |, qq| CREATE UNIQUE INDEX internal_links_pkey ON internal_links (link_from, link_to) | ], metadata => [ qq| CREATE TABLE metadata ( node_id integer NOT NULL, version integer NOT NULL default 0, metadata_type varchar(200) NOT NULL DEFAULT '', metadata_value text NOT NULL DEFAULT '', CONSTRAINT fk_node_id FOREIGN KEY (node_id) REFERENCES node (id) ) |, qq| CREATE INDEX metadata_index ON metadata (node_id, version, metadata_type, metadata_value) | ] }, }; my %upgrades = ( old_to_8 => [ qq| CREATE SEQUENCE node_seq; ALTER TABLE node ADD COLUMN id INTEGER; UPDATE node SET id = NEXTVAL('node_seq'); |, qq| ALTER TABLE node ALTER COLUMN id SET NOT NULL; ALTER TABLE node ALTER COLUMN id SET DEFAULT NEXTVAL('node_seq'); |, qq| DROP INDEX node_pkey; ALTER TABLE node ADD CONSTRAINT pk_id PRIMARY KEY (id); CREATE UNIQUE INDEX node_name ON node (name) |, qq| ALTER TABLE content ADD COLUMN node_id INTEGER; UPDATE content SET node_id = (SELECT id FROM node where node.name = content.name) |, qq| DELETE FROM content WHERE node_id IS NULL; ALTER TABLE content ALTER COLUMN node_id SET NOT NULL; ALTER TABLE content DROP COLUMN name; ALTER TABLE content ADD CONSTRAINT pk_node_id PRIMARY KEY (node_id,version); ALTER TABLE content ADD CONSTRAINT fk_node_id FOREIGN KEY (node_id) REFERENCES node (id) |, qq| ALTER TABLE metadata ADD COLUMN node_id INTEGER; UPDATE metadata SET node_id = (SELECT id FROM node where node.name = metadata.node) |, qq| DELETE FROM metadata WHERE node_id IS NULL; ALTER TABLE metadata ALTER COLUMN node_id SET NOT NULL; ALTER TABLE metadata DROP COLUMN node; ALTER TABLE metadata ADD CONSTRAINT fk_node_id FOREIGN KEY (node_id) REFERENCES node (id); CREATE INDEX metadata_index ON metadata (node_id, version, metadata_type, metadata_value) |, qq| CREATE TABLE schema_info (version integer NOT NULL default 0); INSERT INTO schema_info VALUES (8) | ], '8_to_9' => [ qq| ALTER TABLE node ADD COLUMN moderate boolean; UPDATE node SET moderate = '0'; ALTER TABLE node ALTER COLUMN moderate SET DEFAULT '0'; ALTER TABLE node ALTER COLUMN moderate SET NOT NULL; |, qq| ALTER TABLE content ADD COLUMN moderated boolean; UPDATE content SET moderated = '1'; ALTER TABLE content ALTER COLUMN moderated SET DEFAULT '1'; ALTER TABLE content ALTER COLUMN moderated SET NOT NULL; UPDATE schema_info SET version = 9; | ], '9_to_10' => [ qq| ALTER TABLE content ADD COLUMN verified timestamp without time zone default NULL; ALTER TABLE content ADD COLUMN verified_info text NOT NULL default ''; |, qq| UPDATE schema_info SET version = 10; | ], ); my @old_to_10 = ($upgrades{'old_to_8'},$upgrades{'8_to_9'},$upgrades{'9_to_10'}); my @eight_to_10 = ($upgrades{'8_to_9'},$upgrades{'9_to_10'}); $upgrades{'old_to_10'} = \@old_to_10; $upgrades{'8_to_10'} = \@eight_to_10; =head1 NAME Wiki::Toolkit::Setup::Pg - Set up tables for a Wiki::Toolkit store in a Postgres database. =head1 SYNOPSIS use Wiki::Toolkit::Setup::Pg; Wiki::Toolkit::Setup::Pg::setup($dbname, $dbuser, $dbpass, $dbhost); Omit $dbhost if the database is local. =head1 DESCRIPTION Set up a Postgres database for use as a Wiki::Toolkit store. =head1 FUNCTIONS =over 4 =item B use Wiki::Toolkit::Setup::Pg; Wiki::Toolkit::Setup::Pg::setup($dbname, $dbuser, $dbpass, $dbhost); or Wiki::Toolkit::Setup::Pg::setup( $dbh ); You can either provide an active database handle C<$dbh> or connection parameters. If you provide connection parameters the following arguments are mandatory -- the database name, the username and the password. The username must be able to create and drop tables in the database. The $dbhost argument is optional -- omit it if the database is local. B If a table that the module wants to create already exists, C will leave it alone. This means that you can safely run this on an existing L database to bring the schema up to date with the current L version. If you wish to completely start again with a fresh database, run C first. =cut sub setup { my @args = @_; my $dbh = _get_dbh( @args ); my $disconnect_required = _disconnect_required( @args ); my $wanted_schema = _get_wanted_schema( @args ) || $SCHEMA_VERSION; die "No schema information for requested schema version $wanted_schema\n" unless $create_sql->{$wanted_schema}; # Check whether tables exist my $sql = "SELECT tablename FROM pg_tables WHERE tablename in (" . join( ",", map { $dbh->quote($_) } keys %{$create_sql->{$wanted_schema}} ) . ")"; my $sth = $dbh->prepare($sql) or croak $dbh->errstr; $sth->execute; my %tables; while ( my $table = $sth->fetchrow_array ) { exists $create_sql->{$wanted_schema}->{$table} and $tables{$table} = 1; } # Do we need to upgrade the schema of existing tables? # (Don't check if no tables currently exist) my $upgrade_schema; if(scalar keys %tables > 0) { $upgrade_schema = Wiki::Toolkit::Setup::Database::get_database_upgrade_required($dbh,$wanted_schema); } else { print "Skipping schema upgrade check - no tables found\n"; } # Set up tables if not found foreach my $required ( reverse sort keys %{$create_sql->{$wanted_schema}} ) { if ( $tables{$required} ) { print "Table $required already exists... skipping...\n"; } else { print "Creating table $required... done\n"; foreach my $sql ( @{ $create_sql->{$wanted_schema}->{$required} } ) { $dbh->do($sql) or croak $dbh->errstr; } } } # Do the upgrade if required if($upgrade_schema) { print "Upgrading schema: $upgrade_schema\n"; my @updates = @{$upgrades{$upgrade_schema}}; foreach my $update (@updates) { if(ref($update) eq "CODE") { &$update($dbh); } elsif(ref($update) eq "ARRAY") { foreach my $nupdate (@$update) { $dbh->do($nupdate); } } else { $dbh->do($update); } } } # Clean up if we made our own dbh. $dbh->disconnect if $disconnect_required; } =item B use Wiki::Toolkit::Setup::Pg; # Clear out all Wiki::Toolkit tables from the database. Wiki::Toolkit::Setup::Pg::cleardb($dbname, $dbuser, $dbpass, $dbhost); or Wiki::Toolkit::Setup::Pg::cleardb( $dbh ); You can either provide an active database handle C<$dbh> or connection parameters. If you provide connection parameters the following arguments are mandatory -- the database name, the username and the password. The username must be able to drop tables in the database. The $dbhost argument is optional -- omit it if the database is local. Clears out all L store tables from the database. B that this will lose all your data; you probably only want to use this for testing purposes or if you really screwed up somewhere. Note also that it doesn't touch any L search backend tables; if you have any of those in the same or a different database see L or L, depending on which search backend you're using. =cut sub cleardb { my @args = @_; my $dbh = _get_dbh( @args ); my $disconnect_required = _disconnect_required( @args ); print "Dropping tables... "; my $sql = "SELECT tablename FROM pg_tables WHERE tablename in (" . join( ",", map { $dbh->quote($_) } keys %{$create_sql->{$SCHEMA_VERSION}} ) . ")"; foreach my $tableref (@{$dbh->selectall_arrayref($sql)}) { $dbh->do("DROP TABLE $tableref->[0] CASCADE") or croak $dbh->errstr; } $sql = "SELECT relname FROM pg_statio_all_sequences WHERE relname = 'node_seq'"; foreach my $seqref (@{$dbh->selectall_arrayref($sql)}) { $dbh->do("DROP SEQUENCE $seqref->[0]") or croak $dbh->errstr; } print "done\n"; # Clean up if we made our own dbh. $dbh->disconnect if $disconnect_required; } sub _get_dbh { # Database handle passed in. if ( ref $_[0] and ref $_[0] eq 'DBI::db' ) { return $_[0]; } # Args passed as hashref. if ( ref $_[0] and ref $_[0] eq 'HASH' ) { my %args = %{$_[0]}; if ( $args{dbh} ) { return $args{dbh}; } else { return _make_dbh( %args ); } } # Args passed as list of connection details. return _make_dbh( dbname => $_[0], dbuser => $_[1], dbpass => $_[2], dbhost => $_[3], ); } sub _get_wanted_schema { # Database handle passed in. if ( ref $_[0] and ref $_[0] eq 'DBI::db' ) { return undef; } # Args passed as hashref. if ( ref $_[0] and ref $_[0] eq 'HASH' ) { my %args = %{$_[0]}; return $args{wanted_schema}; } } sub _disconnect_required { # Database handle passed in. if ( ref $_[0] and ref $_[0] eq 'DBI::db' ) { return 0; } # Args passed as hashref. if ( ref $_[0] and ref $_[0] eq 'HASH' ) { my %args = %{$_[0]}; if ( $args{dbh} ) { return 0; } else { return 1; } } # Args passed as list of connection details. return 1; } sub _make_dbh { my %args = @_; my $dsn = "dbi:Pg:dbname=$args{dbname}"; $dsn .= ";host=$args{dbhost}" if $args{dbhost}; my $dbh = DBI->connect($dsn, $args{dbuser}, $args{dbpass}, { PrintError => 1, RaiseError => 1, AutoCommit => 1 } ) or croak DBI::errstr; return $dbh; } =back =head1 ALTERNATIVE CALLING SYNTAX As requested by Podmaster. Instead of passing arguments to the methods as ($dbname, $dbuser, $dbpass, $dbhost) you can pass them as ( { dbname => $dbname, dbuser => $dbuser, dbpass => $dbpass, dbhost => $dbhost } ) or indeed as ( { dbh => $dbh } ) Note that's a hashref, not a hash. =head1 AUTHOR Kake Pugh (kake@earth.li). =head1 COPYRIGHT Copyright (C) 2002-2004 Kake Pugh. All Rights Reserved. Copyright (C) 2006-2008 the Wiki::Toolkit team. All Rights Reserved. This module is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =head1 SEE ALSO L, L, L =cut 1; Wiki-Toolkit-0.83/lib/Wiki/Toolkit/Setup/DBIxFTSMySQL.pm0000644000175000017500000000605112243757607022464 0ustar vagrantvagrantpackage Wiki::Toolkit::Setup::DBIxFTSMySQL; use strict; use vars qw( $VERSION ); $VERSION = 0.04; use DBI; use DBIx::FullTextSearch; use Carp; =head1 NAME Wiki::Toolkit::Setup::DBIxFTSMySQL - set up fulltext indexes for Wiki::Toolkit =head1 SYNOPSIS use Wiki::Toolkit::Setup::DBIxFTSMySQL; Wiki::Toolkit::Setup::DBIxFTSMySQL::setup($dbname, $dbuser, $dbpass, $dbhost); Omit $dbhost if the database is local. =head1 DESCRIPTION Set up DBIx::FullTextSearch indexes for use with Wiki::Toolkit. Has only one function, C, which takes as arguments B the database name, the username and the password B a database handle . The username must be able to create and drop tables in the database. The $dbhost argument is optional -- omit it if the database is local. Note that any pre-existing L indexes stored in the database will be I by this function, so if you have existing data you probably want to use the C parameter to get it re-indexed. =cut sub setup { my $dbh = _get_dbh( @_ ); # Drop FTS indexes if they already exist. my $fts = DBIx::FullTextSearch->open($dbh, "_content_and_title_fts"); $fts->drop if $fts; $fts = DBIx::FullTextSearch->open($dbh, "_title_fts"); $fts->drop if $fts; # Set up FullText indexes and index anything already extant. my $fts_all = DBIx::FullTextSearch->create($dbh, "_content_and_title_fts", frontend => "table", backend => "phrase", table_name => "node", column_name => ["name","text"], column_id_name => "name", stemmer => "en-uk"); my $fts_title = DBIx::FullTextSearch->create($dbh, "_title_fts", frontend => "table", backend => "phrase", table_name => "node", column_name => "name", column_id_name => "name", stemmer => "en-uk"); my $sql = "SELECT name FROM node"; my $sth = $dbh->prepare($sql); $sth->execute(); while (my ($name, $version) = $sth->fetchrow_array) { $fts_title->index_document($name); $fts_all->index_document($name); } $sth->finish; } sub _get_dbh { return $_[0] if ( ref $_[0] and ref $_[0] eq 'DBI::db' ); my ($dbname, $dbuser, $dbpass, $dbhost) = @_; my $dsn = "dbi:mysql:$dbname"; $dsn .= ";host=$dbhost" if $dbhost; my $dbh = DBI->connect($dsn, $dbuser, $dbpass, { PrintError => 1, RaiseError => 1, AutoCommit => 1 } ) or croak DBI::errstr; return $dbh; } =head1 AUTHOR Kake Pugh (kake@earth.li). =head1 COPYRIGHT Copyright (C) 2002-2004 Kake Pugh. All Rights Reserved. This module is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =head1 SEE ALSO L, L, L =cut 1; Wiki-Toolkit-0.83/lib/Wiki/Toolkit/Setup/SQLite.pm0000644000175000017500000002731512243757607021602 0ustar vagrantvagrantpackage Wiki::Toolkit::Setup::SQLite; use strict; use vars qw( @ISA $VERSION $SCHEMA_VERSION ); use Wiki::Toolkit::Setup::Database; @ISA = qw( Wiki::Toolkit::Setup::Database ); $VERSION = '0.10'; use DBI; use Carp; $SCHEMA_VERSION = $VERSION*100; my $create_sql = { 8 => { schema_info => [ qq| CREATE TABLE schema_info ( version integer NOT NULL default 0 ); |, qq| INSERT INTO schema_info VALUES (8) | ], node => [ qq| CREATE TABLE node ( id integer NOT NULL PRIMARY KEY AUTOINCREMENT, name varchar(200) NOT NULL DEFAULT '', version integer NOT NULL default 0, text mediumtext NOT NULL default '', modified datetime default NULL ) | ], content => [ qq| CREATE TABLE content ( node_id integer NOT NULL, version integer NOT NULL default 0, text mediumtext NOT NULL default '', modified datetime default NULL, comment mediumtext NOT NULL default '', PRIMARY KEY (node_id, version) ) | ], internal_links => [ qq| CREATE TABLE internal_links ( link_from varchar(200) NOT NULL default '', link_to varchar(200) NOT NULL default '', PRIMARY KEY (link_from, link_to) ) | ], metadata => [ qq| CREATE TABLE metadata ( node_id integer NOT NULL, version integer NOT NULL default 0, metadata_type varchar(200) NOT NULL DEFAULT '', metadata_value mediumtext NOT NULL DEFAULT '' ) | ] }, 9 => { schema_info => [ qq| CREATE TABLE schema_info ( version integer NOT NULL default 0 ); |, qq| INSERT INTO schema_info VALUES (9) | ], node => [ qq| CREATE TABLE node ( id integer NOT NULL PRIMARY KEY AUTOINCREMENT, name varchar(200) NOT NULL DEFAULT '', version integer NOT NULL default 0, text mediumtext NOT NULL default '', modified datetime default NULL, moderate boolean NOT NULL default '0' ) | ], content => [ qq| CREATE TABLE content ( node_id integer NOT NULL, version integer NOT NULL default 0, text mediumtext NOT NULL default '', modified datetime default NULL, comment mediumtext NOT NULL default '', moderated boolean NOT NULL default '1', PRIMARY KEY (node_id, version) ) | ], internal_links => [ qq| CREATE TABLE internal_links ( link_from varchar(200) NOT NULL default '', link_to varchar(200) NOT NULL default '', PRIMARY KEY (link_from, link_to) ) | ], metadata => [ qq| CREATE TABLE metadata ( node_id integer NOT NULL, version integer NOT NULL default 0, metadata_type varchar(200) NOT NULL DEFAULT '', metadata_value mediumtext NOT NULL DEFAULT '' ) | ] }, 10 => { schema_info => [ qq| CREATE TABLE schema_info ( version integer NOT NULL default 0 ); |, qq| INSERT INTO schema_info VALUES (10) | ], node => [ qq| CREATE TABLE node ( id integer NOT NULL PRIMARY KEY AUTOINCREMENT, name varchar(200) NOT NULL DEFAULT '', version integer NOT NULL default 0, text mediumtext NOT NULL default '', modified datetime default NULL, moderate boolean NOT NULL default '0' ) |, qq| CREATE UNIQUE INDEX node_name ON node (name) | ], content => [ qq| CREATE TABLE content ( node_id integer NOT NULL, version integer NOT NULL default 0, text mediumtext NOT NULL default '', modified datetime default NULL, comment mediumtext NOT NULL default '', moderated boolean NOT NULL default '1', verified datetime default NULL, verified_info mediumtext NOT NULL default '', PRIMARY KEY (node_id, version) ) | ], internal_links => [ qq| CREATE TABLE internal_links ( link_from varchar(200) NOT NULL default '', link_to varchar(200) NOT NULL default '', PRIMARY KEY (link_from, link_to) ) | ], metadata => [ qq| CREATE TABLE metadata ( node_id integer NOT NULL, version integer NOT NULL default 0, metadata_type varchar(200) NOT NULL DEFAULT '', metadata_value mediumtext NOT NULL DEFAULT '' ) | ] }, }; my %fetch_upgrades = ( old_to_8 => 1, old_to_9 => 1, old_to_10 => 1, '8_to_9' => 1, '8_to_10' => 1, '9_to_10' => 1, ); my %upgrades = (); =head1 NAME Wiki::Toolkit::Setup::SQLite - Set up tables for a Wiki::Toolkit store in a SQLite database. =head1 SYNOPSIS use Wiki::Toolkit::Setup::SQLite; Wiki::Toolkit::Setup::SQLite::setup( $dbfile ); =head1 DESCRIPTION Set up a SQLite database for use as a Wiki::Toolkit store. =head1 FUNCTIONS =over 4 =item B use Wiki::Toolkit::Setup::SQLite; Wiki::Toolkit::Setup::SQLite::setup( $filename ); or Wiki::Toolkit::Setup::SQLite::setup( $dbh ); Takes one argument - B the name of the file that the SQLite database is stored in B an active database handle. B If a table that the module wants to create already exists, C will leave it alone. This means that you can safely run this on an existing L database to bring the schema up to date with the current L version. If you wish to completely start again with a fresh database, run C first. An optional second argument may be passed specifying the schema version to use; this is B intended to be used during unit testing and should not normally be specified. =cut sub setup { my @args = @_; my $dbh = _get_dbh( @args ); my $disconnect_required = _disconnect_required( @args ); my $wanted_schema = _get_wanted_schema( @args ) || $SCHEMA_VERSION; die "No schema information for requested schema version $wanted_schema\n" unless $create_sql->{$wanted_schema}; # Check whether tables exist, set them up if not. my %tables = fetch_tables_listing($dbh, $wanted_schema); # Do we need to upgrade the schema? # (Don't check if no tables currently exist) my $upgrade_schema; my @cur_data; if(scalar keys %tables > 0) { $upgrade_schema = Wiki::Toolkit::Setup::Database::get_database_upgrade_required($dbh,$wanted_schema); } if($upgrade_schema) { if ($fetch_upgrades{$upgrade_schema}) { # Grab current data print "Upgrading: $upgrade_schema\n"; @cur_data = eval("&Wiki::Toolkit::Setup::Database::fetch_upgrade_".$upgrade_schema."(\$dbh)"); # Drop the current tables cleardb($dbh); # Grab new list of tables %tables = fetch_tables_listing($dbh, $wanted_schema); } } # Set up tables if not found foreach my $required ( keys %{$create_sql->{$wanted_schema}} ) { if ( $tables{$required} ) { print "Table $required already exists... skipping...\n"; } else { print "Creating table $required... done\n"; foreach my $sql (@{$create_sql->{$wanted_schema}->{$required}} ) { $dbh->do($sql) or croak $dbh->errstr; } } } # If upgrading, load in the new data if($upgrade_schema) { if ($fetch_upgrades{$upgrade_schema}) { Wiki::Toolkit::Setup::Database::bulk_data_insert($dbh,@cur_data); } else { print "Upgrading schema: $upgrade_schema\n"; my @updates = @{$upgrades{$upgrade_schema}}; foreach my $update (@updates) { if(ref($update) eq "CODE") { &$update($dbh); } elsif(ref($update) eq "ARRAY") { foreach my $nupdate (@$update) { $dbh->do($nupdate); } } else { $dbh->do($update); } } } } # Clean up if we made our own dbh. $dbh->disconnect if $disconnect_required; } # Internal method - what tables are defined? sub fetch_tables_listing { my $dbh = shift; my $wanted_schema = shift; # Check whether tables exist, set them up if not. my $sql = "SELECT name FROM sqlite_master WHERE type='table' AND name in (" . join( ",", map { $dbh->quote($_) } keys %{$create_sql->{$wanted_schema}} ) . ")"; my $sth = $dbh->prepare($sql) or croak $dbh->errstr; $sth->execute; my %tables; while ( my $table = $sth->fetchrow_array ) { $tables{$table} = 1; } return %tables; } =item B use Wiki::Toolkit::Setup::SQLite; # Clear out all Wiki::Toolkit tables from the database. Wiki::Toolkit::Setup::SQLite::cleardb( $filename ); or Wiki::Toolkit::Setup::SQLite::cleardb( $dbh ); Takes one argument - B the name of the file that the SQLite database is stored in B an active database handle. Clears out all L store tables from the database. B that this will lose all your data; you probably only want to use this for testing purposes or if you really screwed up somewhere. Note also that it doesn't touch any L search backend tables; if you have any of those in the same or a different database see L or L, depending on which search backend you're using. =cut sub cleardb { my @args = @_; my $dbh = _get_dbh( @args ); my $disconnect_required = _disconnect_required( @args ); print "Dropping tables... "; my $sql = "SELECT name FROM sqlite_master WHERE type='table' AND name in (" . join( ",", map { $dbh->quote($_) } keys %{$create_sql->{$SCHEMA_VERSION}} ) . ")"; foreach my $tableref (@{$dbh->selectall_arrayref($sql)}) { $dbh->do("DROP TABLE $tableref->[0]") or croak $dbh->errstr; } print "done\n"; # Clean up if we made our own dbh. $dbh->disconnect if $disconnect_required; } sub _get_dbh { # Database handle passed in. if ( ref $_[0] and ref $_[0] eq 'DBI::db' ) { return $_[0]; } # Args passed as hashref. if ( ref $_[0] and ref $_[0] eq 'HASH' ) { my %args = %{$_[0]}; if ( $args{dbh} ) { return $args{dbh}; } else { return _make_dbh( %args ); } } # Args passed as list of connection details. return _make_dbh( dbname => $_[0] ); } sub _get_wanted_schema { # Database handle passed in. if ( ref $_[0] and ref $_[0] eq 'DBI::db' ) { return undef; } # Args passed as hashref. if ( ref $_[0] and ref $_[0] eq 'HASH' ) { my %args = %{$_[0]}; return $args{wanted_schema}; } } sub _disconnect_required { # Database handle passed in. if ( ref $_[0] and ref $_[0] eq 'DBI::db' ) { return 0; } # Args passed as hashref. if ( ref $_[0] and ref $_[0] eq 'HASH' ) { my %args = %{$_[0]}; if ( $args{dbh} ) { return 0; } else { return 1; } } # Args passed as list of connection details. return 1; } sub _make_dbh { my %args = @_; my $dbh = DBI->connect("dbi:SQLite:dbname=$args{dbname}", "", "", { PrintError => 1, RaiseError => 1, AutoCommit => 1 } ) or croak DBI::errstr; return $dbh; } =back =head1 ALTERNATIVE CALLING SYNTAX As requested by Podmaster. Instead of passing arguments to the methods as ($filename) you can pass them as ( { dbname => $filename } ) or indeed ( { dbh => $dbh } ) Note that's a hashref, not a hash. =head1 AUTHOR Kake Pugh (kake@earth.li). =head1 COPYRIGHT Copyright (C) 2002-2004 Kake Pugh. All Rights Reserved. Copyright (C) 2006-2009 the Wiki::Toolkit team. All Rights Reserved. This module is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =head1 SEE ALSO L, L, L =cut 1; Wiki-Toolkit-0.83/lib/Wiki/Toolkit/Search/0000755000175000017500000000000012243760523020167 5ustar vagrantvagrantWiki-Toolkit-0.83/lib/Wiki/Toolkit/Search/Lucy.pm0000644000175000017500000001634712243757607021465 0ustar vagrantvagrantpackage Wiki::Toolkit::Search::Lucy; use strict; use Lucy::Analysis::PolyAnalyzer; use Lucy::Index::Indexer; use Lucy::Index::PolyReader; use Lucy::Plan::FullTextType; use Lucy::Plan::Schema; use Lucy::Search::IndexSearcher; use Lucy::Search::QueryParser; use vars qw( @ISA $VERSION ); $VERSION = '0.03'; use base 'Wiki::Toolkit::Search::Base'; =head1 NAME Wiki::Toolkit::Search::Lucy - Use Lucy to search your Wiki::Toolkit wiki. =head1 SYNOPSIS my $search = Wiki::Toolkit::Search::Lucy->new( path => "/var/lucy/wiki" ); my %wombat_nodes = $search->search_nodes( "wombat" ); Provides L-based search methods for L. =cut =head1 METHODS =over 4 =item B my $search = Wiki::Toolkit::Search::Lucy->new( path => "/var/lucy/wiki", metadata_fields => [ "category", "locale", "address" ], boost => { title => 2.5 } ); The C parameter is mandatory. C must be a directory for storing the indexed data. It should exist and be writeable. The C parameter is optional. It should be a reference to an array of metadata field names. The C parameter is also optional. It should be a reference to a hash in which the keys are fields and the values are numbers - see L for more info. Only C is currently supported as a field value. =cut sub _init { my ( $self, %args ) = @_; # Set up the Lucy schema. Content and fuzzy title will be indexed but # not stored (since we don't need to retrieve them). my $schema = Lucy::Plan::Schema->new; my $polyanalyzer = Lucy::Analysis::PolyAnalyzer->new( language => "en" ); my $stored_type = Lucy::Plan::FullTextType->new( analyzer => $polyanalyzer ); my $unstored_type = Lucy::Plan::FullTextType->new( analyzer => $polyanalyzer, stored => 0 ); my %title_args = ( analyzer => $polyanalyzer, stored => 1 ); if ( $args{boost}{title} ) { $title_args{boost} = $args{boost}{title}; } my $title_type = Lucy::Plan::FullTextType->new( %title_args ); $schema->spec_field( name => "content", type => $unstored_type ); $schema->spec_field( name => "fuzzy", type => $unstored_type ); $schema->spec_field( name => "title", type => $title_type ); $schema->spec_field( name => "key", type => $stored_type ); foreach my $md_field ( @{$args{metadata_fields}} ) { $schema->spec_field( name => $md_field, type => $unstored_type ); } $self->{_schema} = $schema; $self->{_dir} = $args{path}; $self->{_metadata_fields} = $args{metadata_fields}; return $self; } sub _dir { shift->{_dir} } sub _schema { shift->{_schema} } =item B<index_node> $search->index_node( $node, $content, $metadata ); Indexes or reindexes the given node in the search engine indexes. You must supply both the node name and its content, but metadata is optional. If you do supply metadata, it should be a reference to a hash where the keys are the names of the metadata fields and the values are either scalars or references to arrays of scalars. For example: $search->index_node( "Calthorpe Arms", "Nice pub in Bloomsbury.", { category => [ "Pubs", "Bloomsbury" ], postcode => "WC1X 8JR" } ); Only those metadata fields which were supplied to ->new will be taken notice of - others will be silently ignored. =cut sub index_node { my ( $self, $node, $content, $metadata ) = @_; # Delete the old version. $self->_delete_node( $node ); my $indexer = Lucy::Index::Indexer->new( index => $self->_dir, schema => $self->_schema, create => 1, truncate => 0, ); my $key = $self->_make_key( $node ); my $fuzzy = $self->canonicalise_title( $node ); my %data = ( content => join( " ", $node, $content ), fuzzy => $fuzzy, title => $node, key => $key, ); my @fields = @{$self->{_metadata_fields}}; foreach my $field ( @fields ) { my $value = $metadata->{$field}; if ( $value ) { if ( ref $value ) { $data{$field} = join( " ", @$value ); } else { $data{$field} = $value; } } } $indexer->add_doc( \%data ); $indexer->commit; } sub _delete_node { my ( $self, $node ) = @_; my $indexer = Lucy::Index::Indexer->new( index => $self->_dir, schema => $self->_schema, create => 1, truncate => 0, ); my $key = $self->_make_key( $node ); $indexer->delete_by_term( field => "key", term => $key ); $indexer->commit; } # We need to make a unique key for when we come to delete a doc - can't just # delete on title as it does a search rather than an exact match on the field. sub _make_key { my ( $self, $node ) = @_; $node =~ s/\s//g; return lc( $node ); } =item B<search_nodes> # Find all the nodes which contain the word 'expert'. my %results = $search->search_nodes( "expert" ); Returns a (possibly empty) hash whose keys are the node names and whose values are the scores. Defaults to AND searches (if C<$and_or> is not supplied, or is anything other than C<OR> or C<or>). Searches are case-insensitive. =cut sub search_nodes { my ( $self, $searchstring, $and_or ) = @_; # Bail and return empty list if nothing stored. return () unless $self->_index_exists; $and_or = uc( $and_or || "" ); $and_or = "AND" unless $and_or eq "OR"; my $queryparser = Lucy::Search::QueryParser->new( schema => $self->_schema, default_boolop => $and_or, ); my $query = $queryparser->parse( $searchstring ); my $searcher = Lucy::Search::IndexSearcher->new( index => $self->_dir, ); my $num_docs = $searcher->doc_max(); my $hits = $searcher->hits( query => $query, num_wanted => $num_docs, ); my %results; while ( my $hit = $hits->next ) { $results{ $hit->{title} } = $hit->get_score; } return %results; } # Fuzzy title search - exact match has highest score. sub _fuzzy_match { my ( $self, $string, $canonical ) = @_; # Bail and return empty list if nothing stored. return () unless $self->_index_exists; my $queryparser = Lucy::Search::QueryParser->new( schema => $self->_schema, default_boolop => "AND", ); my $query = $queryparser->parse( $canonical ); my $searcher = Lucy::Search::IndexSearcher->new( index => $self->_dir, ); my $num_docs = $searcher->doc_max(); my $hits = $searcher->hits( query => $query, num_wanted => $num_docs, ); my %results; while ( my $hit = $hits->next ) { $results{ $hit->{title} } = $hit->get_score; } return map { $_ => ($_ eq $string ? 2 : 1) } keys %results; } # Returns true if and only if we have data stored. sub _index_exists { my $self = shift; my $reader = Lucy::Index::PolyReader->open( index => $self->_dir ); return @{ $reader->seg_readers }; } sub supports_fuzzy_searches { 1; } sub supports_phrase_searches { 1; } sub supports_metadata_indexing { 1; } =back =head1 SEE ALSO L<Wiki::Toolkit>, L<Wiki::Toolkit::Search::Base>. =cut 1; �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/lib/Wiki/Toolkit/Search/SII.pm����������������������������������������������������0000644�0001750�0001750�00000007712�12243757607�021171� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������package Wiki::Toolkit::Search::SII; use strict; use Search::InvertedIndex; use Carp "croak"; use base 'Wiki::Toolkit::Search::Base'; use vars qw( @ISA $VERSION ); $VERSION = 0.09; =head1 NAME Wiki::Toolkit::Search::SII - Search::InvertedIndex plugin for Wiki::Toolkit. =head1 SYNOPSIS my $indexdb = Search::InvertedIndex::DB::Mysql->new( ... ); my $search = Wiki::Toolkit::Search::SII->new( indexdb => $indexdb ); my %wombat_nodes = $search->search_nodes("wombat"); Provides search-related methods for L<Wiki::Toolkit>. See also L<Wiki::Toolkit::Search::Base>, for methods not documented here. =cut =head1 METHODS =over 4 =item B<new> # EITHER my $indexdb = Search::InvertedIndex::DB::Mysql->new( -db_name => $dbname, -username => $dbuser, -password => $dbpass, -hostname => '', -table_name => 'siindex', -lock_mode => 'EX' ); # OR my $indexdb = Search::InvertedIndex::DB::DB_File_SplitHash->new( -map_name => "/home/wiki/indexes.db", -lock_mode => "EX" ); # THEN my $search = Wiki::Toolkit::Search::SII->new( indexdb => $indexdb ); Takes only one parameter, which is mandatory. C<indexdb> must be a C<Search::InvertedIndex::DB::*> object. =cut sub _init { my ($self, %args) = @_; my $indexdb = $args{indexdb}; my $map = Search::InvertedIndex->new( -database => $indexdb ) or croak "Couldn't set up Search::InvertedIndex map"; $map->add_group( -group => "nodes" ); $map->add_group( -group => "fuzzy_titles" ); $self->{_map} = $map; return $self; } sub _do_search { my ($self, $and_or, $terms) = @_; # Create a leaf for each search term. my @leaves; foreach my $term ( @$terms ) { my $leaf = Search::InvertedIndex::Query::Leaf->new(-key => $term, -group => "nodes" ); push @leaves, $leaf; } # Collate the leaves. my $query = Search::InvertedIndex::Query->new( -logic => $and_or, -leafs => \@leaves ); # Perform the search and extract the results. my $result = $self->{_map}->search( -query => $query ); my $num_results = $result->number_of_index_entries || 0; my %results; for my $i ( 1 .. $num_results ) { my ($index, $data, $ranking) = $result->entry( -number => $i - 1 ); $results{$index} = $ranking; } return %results; } sub _fuzzy_match { my ($self, $string, $canonical) = @_; my $leaf = Search::InvertedIndex::Query::Leaf->new( -key => $canonical, -group => "fuzzy_titles" ); my $query = Search::InvertedIndex::Query->new( -leafs => [ $leaf ] ); my $result = $self->{_map}->search( -query => $query ); my $num_results = $result->number_of_index_entries || 0; my %results; for my $i ( 1 .. $num_results ) { my ($index, $data) = $result->entry( -number => $i - 1 ); $results{$data} = $data eq $string ? 2 : 1; } return %results; } sub _index_node { my ($self, $node, $content, $keys) = @_; my $update = Search::InvertedIndex::Update->new( -group => "nodes", -index => $node, -data => $content, -keys => { map { $_ => 1 } @$keys } ); $self->{_map}->update( -update => $update ); } sub _index_fuzzy { my ($self, $node, $canonical) = @_; my $update = Search::InvertedIndex::Update->new( -group => "fuzzy_titles", -index => $node . "_fuzzy_title", -data => $node, -keys => { $canonical => 1 } ); $self->{_map}->update( -update => $update ); } sub _delete_node { my ($self, $node) = @_; $self->{_map}->remove_index_from_all({ -index => $node }); } sub supports_phrase_searches { return 0; } sub supports_fuzzy_searches { return 1; } =back =head1 SEE ALSO L<Wiki::Toolkit>, L<Wiki::Toolkit::Search::Base>. =cut 1; ������������������������������������������������������Wiki-Toolkit-0.83/lib/Wiki/Toolkit/Search/DBIxFTS.pm������������������������������������������������0000644�0001750�0001750�00000006345�12243757607�021711� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������package Wiki::Toolkit::Search::DBIxFTS; use strict; use DBIx::FullTextSearch; use Carp "croak"; use base 'Wiki::Toolkit::Search::Base'; use vars qw( @ISA $VERSION ); $VERSION = 0.05; =head1 NAME Wiki::Toolkit::Search::DBIxFTS - DBIx::FullTextSearch search plugin for Wiki::Toolkit. =head1 REQUIRES DBIx::FullTextSearch =head1 SYNOPSIS my $store = Wiki::Toolkit::Store::MySQL->new( dbname => "wiki", dbpass=>"wiki" ); my $search = Wiki::Toolkit::Search::DBIxFTS->new( dbh => $store->dbh ); my %wombat_nodes = $search->search_nodes("wombat"); Provides search-related methods for Wiki::Toolkit. See also L<Wiki::Toolkit::Search::Base>, for methods not documented here. =cut =head1 METHODS =over 4 =item B<new> my $search = Wiki::Toolkit::Search::DBIxFTS->new( dbh => $dbh ); You must supply a handle to a database that has the DBIx::FullTextSearch indexes already set up. (Currently though there's no checking that what you supply is even a database handle at all, let alone one that is compatible with DBIx::FullTextSearch.) =cut sub _init { my ($self, %args) = @_; croak "Must supply a database handle" unless $args{dbh}; $self->{_dbh} = $args{dbh}; return $self; } # We can't use the base version, since we're doing the analysis # differently between searching and indexing sub search_nodes { my ($self, $termstr, $and_or) = @_; $and_or = uc($and_or); unless ( defined $and_or and $and_or eq "OR" ) { $and_or = "AND"; } # Extract individual search terms - first phrases (between double quotes). my @terms = ($termstr =~ m/"([^"]+)"/g); $termstr =~ s/"[^"]*"//g; # And now the phrases are gone, just split on whitespace. push @terms, split(/\s+/, $termstr); # If this is an AND search, tell DBIx::FTS we want every term. @terms = map { "+$_" } @terms if $and_or eq "AND"; # Open and perform the FTS. my $dbh = $self->{_dbh}; my $fts = DBIx::FullTextSearch->open($dbh, "_content_and_title_fts"); my @finds = $fts->econtains(@terms); # Well, no scoring yet, you see. return map { $_ => 1 } @finds; } sub index_node { my ($self, $node) = @_; my $dbh = $self->{_dbh}; my $fts_all = DBIx::FullTextSearch->open($dbh, "_content_and_title_fts"); my $fts_titles = DBIx::FullTextSearch->open($dbh, "_title_fts"); $fts_all->index_document($node); $fts_titles->index_document($node); delete $fts_all->{db_backend}; # hack around buglet in DBIx::FTS delete $fts_titles->{db_backend}; # ditto } sub delete_node { my ($self, $node) = @_; my $dbh = $self->{_dbh}; my $fts_all = DBIx::FullTextSearch->open($dbh, "_content_and_title_fts") or croak "Can't open _content_and_title_fts"; my $fts_titles = DBIx::FullTextSearch->open($dbh, "_title_fts") or croak "Can't open _title_fts"; eval { $fts_all->delete_document($node); }; croak "Couldn't delete from full index: $@" if $@; eval { $fts_titles->delete_document($node); }; croak "Couldn't delete from title-only index: $@" if $@; return 1; } sub supports_phrase_searches { return 1; } sub supports_fuzzy_searches { return 0; } =back =head1 SEE ALSO L<Wiki::Toolkit>, L<Wiki::Toolkit::Search::Base>. =cut 1; �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/lib/Wiki/Toolkit/Search/Base.pm���������������������������������������������������0000644�0001750�0001750�00000014610�12243757607�021412� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������package Wiki::Toolkit::Search::Base; use strict; use Carp "croak"; use vars qw( @ISA $VERSION ); sub _abstract { my $who = (caller(1))[3]; croak "$who is an abstract method which the ".(ref shift). " class has not provided"; } $VERSION = 0.01; =head1 NAME Wiki::Toolkit::Search::Base - Base class for Wiki::Toolkit search plugins. =head1 SYNOPSIS my $search = Wiki::Toolkit::Search::XXX->new( @args ); my %wombat_nodes = $search->search_nodes("wombat"); This class details the methods that need to be overridden by search plugins. =cut =head1 METHODS =head2 C<new> my $search = Wiki::Toolkit::Search::XXX->new( @args ); Creates a new searcher. By default the arguments are just passed to C<_init>, so you may wish to override that instead. =cut sub new { my ($class, @args) = @_; my $self = {}; bless $self, $class; return $self->_init(@args); } sub _init { my ($self, %args) = @_; @{$self}{keys %args} = values %args; return $self; } =head2 C<search_nodes> # Find all the nodes which contain the word 'expert'. my %results = $search->search_nodes('expert'); Returns a (possibly empty) hash whose keys are the node names and whose values are the scores in some kind of relevance-scoring system I haven't entirely come up with yet. For OR searches, this could initially be the number of terms that appear in the node, perhaps. Defaults to AND searches (if $and_or is not supplied, or is anything other than C<OR> or C<or>). Searches are case-insensitive. =cut sub search_nodes { my ($self, $termstr, $and_or) = @_; $and_or = lc($and_or); unless ( defined $and_or and $and_or eq "or" ) { $and_or = "and"; } # Extract individual search terms. my @terms = $self->analyze($termstr); return $self->_do_search($and_or, \@terms); } sub _do_search { shift->_abstract }; =head2 C<analyze> @terms = $self->analyze($string) Splits a string into a set of terms for indexing and searching. Typically this is done case-insensitively, splitting at word boundaries, and extracting words that contain at least 1 word characters. =cut sub analyze { my ($self, $string) = @_; return grep { length > 1 # ignore single characters and ! /^\W*$/ } # and things composed entirely # of non-word characters split( /\b/, # split at word boundaries lc($string) # be case-insensitive ); } =head2 C<fuzzy_title_match> $wiki->write_node( "King's Cross St Pancras", "A station." ); my %matches = $search->fuzzy_title_match( "Kings Cross St. Pancras" ); Returns a (possibly empty) hash whose keys are the node names and whose values are the scores in some kind of relevance-scoring system I haven't entirely come up with yet. Note that even if an exact match is found, any other similar enough matches will also be returned. However, any exact match is guaranteed to have the highest relevance score. The matching is done against "canonicalised" forms of the search string and the node titles in the database: stripping vowels, repeated letters and non-word characters, and lowercasing. =cut sub fuzzy_title_match { my ($self, $string) = @_; my $canonical = $self->canonicalise_title( $string ); $self->_fuzzy_match($string, $canonical); } sub _fuzzy_match { shift->_abstract }; =head2 C<index_node> $search->index_node( $node, $content, $metadata ); Indexes or reindexes the given node in the search engine indexes. You must supply both the node name and its content, but metadata is optional. If you do supply metadata, it will be used if and only if your chosen search backend supports metadata indexing (see C<supports_metadata_indexing>). It should be a reference to a hash where the keys are the names of the metadata fields and the values are either scalars or references to arrays of scalars. For example: $search->index_node( "Calthorpe Arms", "Nice pub in Bloomsbury.", { category => [ "Pubs", "Bloomsbury" ], postcode => "WC1X 8JR" } ); =cut sub index_node { my ($self, $node, $content) = @_; croak "Must supply a node name" unless $node; croak "Must supply node content" unless defined $content; # Index the individual words in the node content and title. my @keys = $self->analyze("$content $node"); $self->_index_node($node, $content, \@keys); $self->_index_fuzzy($node, $self->canonicalise_title( $node )); } sub _index_node { shift->_abstract }; sub _index_fuzzy { shift->_abstract }; =head2 B<canonicalise_title> $fuzzy = $self->canonicalise_title( $ node); Returns the node title as suitable for fuzzy searching: with punctuation and spaces removes, vowels removed, and double letters squashed. =cut sub canonicalise_title { my ($self, $title) = @_; return "" unless $title; my $canonical = lc($title); $canonical =~ s/\W//g; # remove non-word characters $canonical =~ s/[aeiouy]//g; # remove vowels and 'y' $canonical =~ tr/a-z//s; # collapse doubled (or tripled, etc) letters return $canonical; } =head2 C<delete_node> $search->delete_node($node); Removes the given node from the search indexes. NOTE: It's up to you to make sure the node is removed from the backend store. Croaks on error. =cut sub delete_node { my ($self, $node) = @_; croak "Must supply a node name" unless $node; $self->_delete_node($node); } sub _delete_node { shift->_abstract }; =head2 C<supports_phrase_searches> if ( $search->supports_phrase_searches ) { return $search->search_nodes( '"fox in socks"' ); } Returns true if this search backend supports phrase searching, and false otherwise. =cut sub supports_phrase_searches { shift->_abstract }; =head2 C<supports_fuzzy_searches> if ( $search->supports_fuzzy_searches ) { return $search->fuzzy_title_match("Kings Cross St Pancreas"); } Returns true if this search backend supports fuzzy title matching, and false otherwise. =cut sub supports_fuzzy_searches { shift->_abstract }; =head2 C<supports_metadata_indexing> if ( $search->supports_metadata_indexing ) { print "This search backend indexes metadata as well as content."; } Returns true if this search backend supports metadata indexing, and false otherwise. =cut sub supports_metadata_indexing { 0; }; =head1 SEE ALSO L<Wiki::Toolkit> =cut 1; ������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/lib/Wiki/Toolkit/Search/Plucene.pm������������������������������������������������0000644�0001750�0001750�00000010757�12243757607�022143� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������package Wiki::Toolkit::Search::Plucene; use strict; use Carp "croak"; use File::Spec::Functions qw(catfile); use Plucene::Document; use Plucene::Document::Field; use Plucene::Index::Writer; use Plucene::Analysis::SimpleAnalyzer; use Plucene::QueryParser; use Plucene::Search::IndexSearcher; use Plucene::Search::HitCollector; use vars qw( @ISA $VERSION ); $VERSION = '0.02'; use base 'Wiki::Toolkit::Search::Base'; =head1 NAME Wiki::Toolkit::Search::Plucene - Use Plucene to search your Wiki::Toolkit wiki. =head1 SYNOPSIS my $search = Wiki::Toolkit::Search::Plucene->new( path => "/var/plucene/wiki" ); my %wombat_nodes = $search->search_nodes("wombat"); Provides search-related methods for L<Wiki::Toolkit>. =cut =head1 METHODS =over 4 =item B<new> my $search = Wiki::Toolkit::Search::Plucene->new( path => "/var/plucene/wiki" ); Takes only one parameter, which is mandatory. C<path> must be a directory for storing the indexed data. It should exist and be writeable. =cut sub _init { my ($self, %args) = @_; $self->{_dir} = $args{path}; return $self; } sub _dir { shift->{_dir} } sub _parsed_query { my ($self, $query, $default) = @_; my $parser = Plucene::QueryParser->new({ analyzer => Plucene::Analysis::SimpleAnalyzer->new(), default => $default }); $parser->parse($query); } # Make new searchers, readers and writers each time we're asked for # one - if we made them in _init then they would always think the index # has the same stuff in as it had when they were made. sub _searcher { my $self = shift; return Plucene::Search::IndexSearcher->new( $self->_dir ); } sub _reader { my $self = shift; return Plucene::Index::Reader->open( $self->_dir ); } sub _writer { my $self = shift; return Plucene::Index::Writer->new( $self->_dir, Plucene::Analysis::SimpleAnalyzer->new, -e catfile($self->_dir, "segments") ? 0 : 1 ); } sub _index_exists { my $self = shift; if ( -e catfile($self->_dir, "segments") ) { return 1; } } sub _search_nodes { my ($self, $query, $and_or) = @_; # Bail and return empty list if nothing stored in directory. return () unless $self->_index_exists; local $Plucene::QueryParser::DefaultOperator = "AND" unless ( $and_or and lc($and_or) eq "or" ); my @docs; my $searcher = $self->_searcher; my $hc = Plucene::Search::HitCollector->new( collect => sub { my ($self, $doc, $score) = @_; my $res = eval { $searcher->doc($doc) }; push @docs, [ $res, $score ] if $res; }); my $parsed_query = $self->_parsed_query( $query, 'text' ); $searcher->search_hc($parsed_query, $hc); @docs = map $_->[0]->get("id")->string, sort { $b->[1] <=> $a->[1] } @docs; return @docs; } sub search_nodes { my ($self, @args) = @_; my @docs = $self->_search_nodes( @args ); my $i = 1; return map { $_ => $i++ } @docs; } sub _fuzzy_match { my ($self, $string, $canonical) = @_; return map { $_ => ($_ eq $string ? 2 : 1) } $self->_search_nodes("fuzzy:$canonical"); } sub index_node { my ($self, $node, $content) = @_; # Delete the old version. $self->delete_node( $node ); my $writer = $self->_writer; my $doc = Plucene::Document->new; my $fuzzy = $self->canonicalise_title( $node ); $doc->add( Plucene::Document::Field->Text( "content", join( " ", $node, $content ) ) ); $doc->add( Plucene::Document::Field->Text( "fuzzy", $fuzzy ) ); $doc->add( Plucene::Document::Field->Text( "title", $node ) ); $doc->add(Plucene::Document::Field->Keyword(id => $node)); $doc->add(Plucene::Document::Field->UnStored('text' => join( " ", $node, $content ))); $writer->add_document($doc); } sub optimize { shift->_writer->optimize() } sub indexed { my ($self, $id) = @_; my $term = Plucene::Index::Term->new({ field => 'id', text => $id }); return $self->_reader->doc_freq($term); } sub delete_node { my ($self, $id) = @_; return unless $self->_index_exists; my $reader = $self->_reader; $reader->delete_term( Plucene::Index::Term->new({ field => "id", text => $id })); $reader->close; } sub supports_phrase_searches { return 1; } sub supports_fuzzy_searches { return 1; } =back =head1 SEE ALSO L<Wiki::Toolkit>, L<Wiki::Toolkit::Search::Base>. =cut 1; �����������������Wiki-Toolkit-0.83/lib/Wiki/Toolkit/Formatter/�������������������������������������������������������0000755�0001750�0001750�00000000000�12243760523�020725� 5����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/lib/Wiki/Toolkit/Formatter/Default.pm���������������������������������������������0000644�0001750�0001750�00000010431�12243757607�022657� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������package Wiki::Toolkit::Formatter::Default; use strict; use vars qw( $VERSION @ISA @_links_found ); $VERSION = '0.02'; use Wiki::Toolkit::Formatter::WikiLinkFormatterParent; use CGI ":standard"; use Carp qw(croak carp); use Text::WikiFormat as => 'wikiformat'; use HTML::PullParser; @ISA = qw( Wiki::Toolkit::Formatter::WikiLinkFormatterParent ); =head1 NAME Wiki::Toolkit::Formatter::Default - A formatter for Wiki::Toolkit. =head1 DESCRIPTION A formatter backend for L<Wiki::Toolkit>. =head1 SYNOPSIS my $store = Wiki::Toolkit::Store::SQLite->new( ... ); # See below for parameter details. my $formatter = Wiki::Toolkit::Formatter::Default->new( %config ); my $wiki = Wiki::Toolkit->new( store => $store, formatter => $formatter ); =head1 METHODS =over 4 =item B<new> my $formatter = Wiki::Toolkit::Formatter::Default->new( extended_links => 0, implicit_links => 1, allowed_tags => [qw(b i)], # defaults to none macros => {}, node_prefix => 'wiki.cgi?node=' ); Parameters will default to the values shown above (apart from C<allowed_tags>, which defaults to allowing no tags). =over 4 =item * macros - be aware that macros are processed I<after> filtering out disallowed HTML tags. Currently macros are just strings, maybe later we can add in subs if we think it might be useful. =back Macro example: macros => { qr/(^|\b)\@SEARCHBOX(\b|$)/ => qq(<form action="wiki.cgi" method="get"> <input type="hidden" name="action" value="search"> <input type="text" size="20" name="terms"> <input type="submit"></form>) } =cut sub new { my ($class, @args) = @_; my $self = {}; bless $self, $class; $self->_init(@args) or return undef; return $self; } sub _init { my ($self, %args) = @_; # Store the parameters or their defaults. my %defs = ( extended_links => 0, implicit_links => 1, allowed_tags => [], macros => {}, node_prefix => 'wiki.cgi?node=', ); my %collated = (%defs, %args); foreach my $k (keys %defs) { $self->{"_".$k} = $collated{$k}; } return $self; } =item B<format> my $html = $formatter->format( $content ); Escapes any tags which weren't specified as allowed on creation, then interpolates any macros, then calls Text::WikiFormat::format (with the config set up when B<new> was called) to translate the raw Wiki language supplied into HTML. =cut sub format { my ($self, $raw) = @_; my $safe = ""; my %allowed = map {lc($_) => 1, "/".lc($_) => 1} @{$self->{_allowed_tags}}; if (scalar keys %allowed) { # If we are allowing some HTML, parse and get rid of the nasties. my $parser = HTML::PullParser->new(doc => $raw, start => '"TAG", tag, text', end => '"TAG", tag, text', text => '"TEXT", tag, text'); while (my $token = $parser->get_token) { my ($flag, $tag, $text) = @$token; if ($flag eq "TAG" and !defined $allowed{lc($tag)}) { $safe .= CGI::escapeHTML($text); } else { $safe .= $text; } } } else { # Else just escape everything. $safe = CGI::escapeHTML($raw); } # Now process any macros. my %macros = %{$self->{_macros}}; foreach my $regexp (keys %macros) { $safe =~ s/$regexp/$macros{$regexp}/g; } return wikiformat($safe, {}, { extended => $self->{_extended_links}, prefix => $self->{_node_prefix}, implicit_links => $self->{_implicit_links} } ); } =back =head1 SEE ALSO L<Wiki::Toolkit::Formatter::WikiLinkFormatterParent> L<Wiki::Toolkit> =head1 AUTHOR Kake Pugh (kake@earth.li). =head1 COPYRIGHT Copyright (C) 2002-2003 Kake Pugh. All Rights Reserved. Copyright (C) 2006 the Wiki::Toolkit team. All Rights Reserved. This module is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/lib/Wiki/Toolkit/Formatter/WikiLinkFormatterParent.pm�����������������������������0000644�0001750�0001750�00000005434�12243757607�026061� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������package Wiki::Toolkit::Formatter::WikiLinkFormatterParent; use strict; use vars qw( $VERSION @_links_found ); $VERSION = '0.01'; use Text::WikiFormat as => 'wikiformat'; =head1 NAME Wiki::Toolkit::Formatter::WikiLinkFormatterParent - The parent of Wiki::Toolkit formatters that work with Wiki Links. =head1 DESCRIPTION A provider of common formatter methods for L<Wiki::Toolkit> formatters that deal with Wiki Links. =cut sub new { my ($class, @args) = @_; my $self = {}; bless $self, $class; $self->_init(@args) or return undef; return $self; } =head1 METHODS =head2 C<rename_links> $formatter->rename_links( $from, $to, $content ); Renames all the links to a certain page in the supplied content. (Obviously this is dependent on object properties such as C<extended_links> and C<implicit_links>.) =cut sub rename_links { my ($self, $from, $to, $content) = @_; # If we support extended (square bracket) links, update those if($self->{_extended_links}) { $content =~ s/\[$from\]/\[$to\]/g; $content =~ s/\[$from(\s*|.*?)\]/\[$to$1\]/g; } # If we support implicit (camelcase) links, update those if($self->{_implicit_links}) { $content =~ s/\b$from\b/$to/g; $content =~ s/^$from\b/$to/gm; $content =~ s/\b$from$/$to/gm; } return $content; } =head2 C<find_internal_links> my @links_to = $formatter->find_internal_links( $content ); Returns a list of all nodes that the supplied content links to. (Obviously this is dependent on object properties such as C<extended_links> and C<implict_links>.) =cut sub find_internal_links { my ($self, $raw) = @_; @_links_found = (); my $foo = wikiformat($raw, { link => sub { my ($link, $opts) = @_; $opts ||= {}; my $title; ($link, $title) = split(/\|/, $link, 2) if $opts->{extended}; push @Wiki::Toolkit::Formatter::WikiLinkFormatterParent::_links_found, $link; return ""; # don't care about output } }, { extended => $self->{_extended_links}, prefix => $self->{_node_prefix}, implicit_links => $self->{_implicit_links} } ); my @links = @_links_found; @_links_found = (); return @links; } =head1 SEE ALSO L<Wiki::Toolkit::Formatter::Default> =head1 AUTHOR Kake Pugh (kake@earth.li). =head1 COPYRIGHT Copyright (C) 2002-2003 Kake Pugh. All Rights Reserved. Copyright (C) 2006-2009 the Wiki::Toolkit team. All Rights Reserved. This module is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/lib/Wiki/Toolkit/Formatter/Multiple.pm��������������������������������������������0000644�0001750�0001750�00000010102�12243757607�023061� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������package Wiki::Toolkit::Formatter::Multiple; use strict; use vars qw( $VERSION ); $VERSION = '0.02'; =head1 NAME Wiki::Toolkit::Formatter::Multiple - Allows a Wiki::Toolkit wiki to use more than one formatter. =head1 DESCRIPTION A "dummy" formatter for L<Wiki::Toolkit>. Passes methods through to other Wiki::Toolkit formatters, depending on supplied metadata. =head1 SYNOPSIS use Wiki::Toolkit::Formatter::Multiple; use Wiki::Toolkit::Formatter::Pod; use Wiki::Toolkit::Formatter::UseMod; my $pod_fmtr = Wiki::Toolkit::Formatter::Pod->new( node_prefix => "wiki.cgi?node=", ); my $usemod_fmtr = Wiki::Toolkit::Formatter::UseMod->new( node_prefix => "wiki.cgi?node=", extended_links => 1, allowed_tags => [ qw( p b i div br ) ], ); my $formatter = Wiki::Toolkit::Formatter::Multiple->new( documentation => $pod_fmtr, discussion => $usemod_fmtr, _DEFAULT => $usemod_fmtr, ); my $wiki = Wiki::Toolkit->new( store => ..., formatter => $formatter ); my $output = $wiki->format( "This is some discussion.", { formatter => "discussion" } ); =head1 METHODS =over =item B<new> my $formatter = Wiki::Toolkit::Formatter::Multiple->new( label_1 => Formatter1->new( ... ), label_2 => Formatter2->new( ... ), _DEFAULT => Wiki::Toolkit::Formatter::Default->new, ); You may supply as many formatter objects as you wish. They don't have to be of different classes; you may just wish to, for example, permit different HTML tags to be used on different types of pages. The "labels" supplied as the keys of the parameter hash should be unique. When you write a node, you should store a key-value pair in its metadata where the key is C<formatter> and the value is the label of the formatter that should be used to render that node. The C<_DEFAULT> label is special - it defines the formatter that will be used for any node that does not have a C<formatter> stored in its metadata. The C<_DEFAULT> formatter, if not supplied to C<< ->new >>, will default to the very basic L<Wiki::Toolkit::Formatter::Default>. =cut sub new { my ($class, %args) = @_; my $self = bless {}, $class; unless ( $args{_DEFAULT} ) { require Wiki::Toolkit::Formatter::Default; $args{_DEFAULT} = Wiki::Toolkit::Formatter::Default->new; } $self->{formatters} = \%args; return $self; } =item B<format( $raw, \%metadata )> my $output = $formatter->format( "Here is some text.", undef, { formatter => "discussion" } ); Uses the value of C<formatter> given in the metadata to decide which of the formatter objects passed on instantiation to use, then uses it to format the provided rawwikitext. The C<undef> second element of the parameter array in the example is there because when this is called from a L<Wiki::Toolkit> object, the wiki object passes itself in as the second parameter. =cut sub format { my ($self, $raw, $wiki, $metadata) = @_; return $self->_formatter($metadata)->format($raw, $wiki); } =item B<find_internal_links( $raw, $metadata )> =cut sub find_internal_links { my ($self, $raw, $metadata) = @_; return () unless $self->_formatter($metadata); return () unless $self->_formatter($metadata)->can("find_internal_links"); return $self->_formatter($metadata)->find_internal_links($raw, $metadata); } # internal method to return the correct formatter for the current # page. sub _formatter { my $self = shift; my $metadata = shift; my $label = $metadata->{formatter} || "_DEFAULT"; $label = $label->[0] if ref($label); return $self->{formatters}{$label} || $self->{formatters}{_DEFAULT}; } =back =head1 SEE ALSO L<Wiki::Toolkit> =head1 AUTHOR Kake Pugh <kake@earth.li> =head1 SUPPORT Bug reports, questions and feature requests should go to cgi-wiki-dev@earth.li =head1 COPYRIGHT Copyright (C) 2003-4 Kake Pugh. All Rights Reserved. This module is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/lib/Wiki/Toolkit/TestLib.pm�������������������������������������������������������0000644�0001750�0001750�00000027623�12243757607�020711� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������package Wiki::Toolkit::TestLib; use 5.006; #by perlver use strict; use Carp "croak"; use Wiki::Toolkit; use Wiki::Toolkit::TestConfig; use DBI; use vars qw( $VERSION @wiki_info ); $VERSION = '0.05'; =head1 NAME Wiki::Toolkit::TestLib - Utilities for writing Wiki::Toolkit tests. =head1 DESCRIPTION When 'perl Makefile.PL' is run on a Wiki::Toolkit distribution, information will be gathered about test databases etc that can be used for running tests. Wiki::Toolkit::TestLib gives convenient access to this information. =head1 SYNOPSIS use strict; use Wiki::Toolkit::TestLib; use Test::More; my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; plan tests => ( $iterator->number * 6 ); while ( my $wiki = $iterator->new_wiki ) { # put some test data in # run six tests } Each time you call C<< ->next >> on your iterator, you will get a fresh blank wiki object. The iterator will iterate over all configured search and storage backends. The Lucy search backend will be configured to index three metadata fields: address, category, and locale. =cut my %configured = %Wiki::Toolkit::TestConfig::config; my %datastore_info; foreach my $dbtype (qw( MySQL Pg SQLite )) { if ( $configured{$dbtype}{dbname} ) { my %config = %{ $configured{$dbtype} }; my $store_class = "Wiki::Toolkit::Store::$dbtype"; my $setup_class = "Wiki::Toolkit::Setup::$dbtype"; eval "require $store_class"; if ( $@ ) { warn "Couldn't require $store_class: $@\n"; warn "Will skip $dbtype tests.\n"; next; } my $dsn = $store_class->_dsn( @config{ qw( dbname dbhost dbport ) } ); my $err; if ( $err = _test_dsn( $dsn, $config{dbuser}, $config{dbpass} ) ) { warn "connecting to test $dbtype database failed: $err\n"; warn "will skip $dbtype tests\n"; next; } $datastore_info{$dbtype} = { class => $store_class, setup_class => $setup_class, params => { dbname => $config{dbname}, dbuser => $config{dbuser}, dbpass => $config{dbpass}, dbhost => $config{dbhost}, }, dsn => $dsn }; } } my %dbixfts_info; # DBIxFTS only works with MySQL. if ( $configured{dbixfts} && $configured{MySQL}{dbname} ) { my %config = %{ $configured{MySQL} }; $dbixfts_info{MySQL} = { db_params => { dbname => $config{dbname}, dbuser => $config{dbuser}, dbpass => $config{dbpass}, dbhost => $config{dbhost}, }, }; } my %sii_info; # Test the MySQL SII backend, if we can. if ( $configured{search_invertedindex} && $configured{MySQL}{dbname} ) { my %config = %{ $configured{MySQL} }; $sii_info{MySQL} = { db_class => "Search::InvertedIndex::DB::Mysql", db_params => { -db_name => $config{dbname}, -username => $config{dbuser}, -password => $config{dbpass}, -hostname => $config{dbhost} || "", -table_name => 'siindex', -lock_mode => 'EX', }, }; } # Test the Pg SII backend, if we can. It's not in the main S::II package. eval { require Search::InvertedIndex::DB::Pg; }; my $sii_pg = $@ ? 0 : 1; if ( $configured{search_invertedindex} && $configured{Pg}{dbname} && $sii_pg ) { my %config = %{ $configured{Pg} }; $sii_info{Pg} = { db_class => "Search::InvertedIndex::DB::Pg", db_params => { -db_name => $config{dbname}, -username => $config{dbuser}, -password => $config{dbpass}, -hostname => $config{dbhost}, -table_name => 'siindex', -lock_mode => 'EX', }, }; } # Also test the default DB_File backend, if we have S::II installed at all. if ( $configured{search_invertedindex} ) { $sii_info{DB_File} = { db_class => "Search::InvertedIndex::DB::DB_File_SplitHash", db_params => { -map_name => 't/sii-db-file-test.db', -lock_mode => 'EX', }, }; } my ( $plucene_path, $lucy_path ); # Test with Plucene and Lucy if possible. if ( $configured{plucene} ) { $plucene_path = "t/plucene"; } if ( $configured{lucy} ) { $lucy_path = "t/lucy"; } # @wiki_info describes which searches work with which stores. # Database-specific searchers. push @wiki_info, { datastore_info => $datastore_info{MySQL}, dbixfts_info => $dbixfts_info{MySQL} } if ( $datastore_info{MySQL} and $dbixfts_info{MySQL} ); push @wiki_info, { datastore_info => $datastore_info{MySQL}, sii_info => $sii_info{MySQL} } if ( $datastore_info{MySQL} and $sii_info{MySQL} ); push @wiki_info, { datastore_info => $datastore_info{Pg}, sii_info => $sii_info{Pg} } if ( $datastore_info{Pg} and $sii_info{Pg} ); # All stores are compatible with the default S::II search, and with Plucene, # and with Lucy, and with no search. foreach my $dbtype ( qw( MySQL Pg SQLite ) ) { push @wiki_info, { datastore_info => $datastore_info{$dbtype}, sii_info => $sii_info{DB_File} } if ( $datastore_info{$dbtype} and $sii_info{DB_File} ); push @wiki_info, { datastore_info => $datastore_info{$dbtype}, plucene_path => $plucene_path } if ( $datastore_info{$dbtype} and $plucene_path ); push @wiki_info, { datastore_info => $datastore_info{$dbtype}, lucy_path => $lucy_path } if ( $datastore_info{$dbtype} and $lucy_path ); push @wiki_info, { datastore_info => $datastore_info{$dbtype} } if $datastore_info{$dbtype}; } =head1 METHODS =over 4 =item B<new_wiki_maker> my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; =cut sub new_wiki_maker { my $class = shift; my $count = 0; my $iterator = \$count; bless $iterator, $class; return $iterator; } =item B<number> use Test::More; my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; plan tests => ( $iterator->number * 6 ); Returns the number of new wikis that your iterator will be able to give you. =cut sub number { return scalar @wiki_info; } =item B<new_wiki> my $wiki = $iterator->new_wiki; Returns a fresh blank wiki object, or false if you've used up all the configured search and storage backends. =cut sub new_wiki { my $self = shift; return undef if $$self > $#wiki_info; my $details = $wiki_info[$$self]; my %wiki_config; # Set up and clear datastore. my %datastore_info = %{ $details->{datastore_info } }; my $setup_class = $datastore_info{setup_class}; eval "require $setup_class"; { no strict "refs"; &{"$setup_class\:\:cleardb"}( $datastore_info{params} ); &{"$setup_class\:\:setup"}( $datastore_info{params} ); } my $class = $datastore_info{class}; eval "require $class"; $wiki_config{store} = $class->new( %{ $datastore_info{params} } ); # Set up and clear search object (if required). if ( $details->{dbixfts_info} ) { my %fts_info = %{ $details->{dbixfts_info} }; require Wiki::Toolkit::Store::MySQL; my %dbconfig = %{ $fts_info{db_params} }; my $dsn = Wiki::Toolkit::Store::MySQL->_dsn( $dbconfig{dbname}, $dbconfig{dbhost} ); my $dbh = DBI->connect( $dsn, $dbconfig{dbuser}, $dbconfig{dbpass}, { PrintError => 0, RaiseError => 1, AutoCommit => 1 } ) or croak "Can't connect to $dbconfig{dbname} using $dsn: " . DBI->errstr; require Wiki::Toolkit::Setup::DBIxFTSMySQL; Wiki::Toolkit::Setup::DBIxFTSMySQL::setup( @dbconfig{ qw( dbname dbuser dbpass dbhost ) } ); require Wiki::Toolkit::Search::DBIxFTS; $wiki_config{search} = Wiki::Toolkit::Search::DBIxFTS->new( dbh => $dbh ); } elsif ( $details->{sii_info} ) { my %sii_info = %{ $details->{sii_info} }; my $db_class = $sii_info{db_class}; eval "use $db_class"; my %db_params = %{ $sii_info{db_params} }; my $indexdb = $db_class->new( %db_params ); require Wiki::Toolkit::Setup::SII; Wiki::Toolkit::Setup::SII::setup( indexdb => $indexdb ); $wiki_config{search} = Wiki::Toolkit::Search::SII->new(indexdb =>$indexdb); } elsif ( $details->{plucene_path} ) { require Wiki::Toolkit::Search::Plucene; my $dir = $details->{plucene_path}; unlink <$dir/*>; # don't die if false since there may be no files if ( -d $dir ) { rmdir $dir or die $!; } mkdir $dir or die $!; $wiki_config{search} = Wiki::Toolkit::Search::Plucene->new( path => $dir ); } elsif ( $details->{lucy_path} ) { require Wiki::Toolkit::Search::Lucy; require File::Path; my $dir = $details->{lucy_path}; File::Path::rmtree( $dir, 0, 1 ); # 0 = verbose, 1 = safe mkdir $dir or die $!; $wiki_config{search} = Wiki::Toolkit::Search::Lucy->new( path => $dir, metadata_fields => [ "address", "category", "locale" ] ); } # Make a wiki. my $wiki = Wiki::Toolkit->new( %wiki_config ); $$self++; return $wiki; } =item B<configured_databases> my @configured_databases = $iterator->configured_databases; Returns the @configured_databases array detailing configured test databases. Useful for very low-level testing only. =cut sub configured_databases { my @configured_databases; foreach my $dbtype (qw( MySQL Pg SQLite )) { push @configured_databases, $datastore_info{$dbtype} if $datastore_info{$dbtype}; } return @configured_databases; } sub _test_dsn { my ( $dsn, $dbuser, $dbpass ) = @_; my $dbh = eval { DBI->connect($dsn, $dbuser, $dbpass, {RaiseError => 1}); }; return $@; } =back =head1 SEE ALSO L<Wiki::Toolkit> =head1 AUTHOR Kake Pugh (kake@earth.li). =head1 COPYRIGHT Copyright (C) 2003-2004 Kake Pugh. All Rights Reserved. Copyright (C) 2008 the Wiki::Toolkit team. All Rights Reserved. This module is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =head1 CAVEATS If you have the L<Search::InvertedIndex> backend configured (see L<Wiki::Toolkit::Search::SII>) then your tests will raise warnings like (in cleanup) Search::InvertedIndex::DB::Mysql::lock() - testdb is not open. Can't lock. at /usr/local/share/perl/5.6.1/Search/InvertedIndex.pm line 1348 or (in cleanup) Can't call method "sync" on an undefined value at /usr/local/share/perl/5.6.1/Tie/DB_File/SplitHash.pm line 331 during global destruction. in unexpected places. I don't know whether this is a bug in me or in L<Search::InvertedIndex>. =cut 1; �������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/lib/Wiki/Toolkit/Extending.pod����������������������������������������������������0000644�0001750�0001750�00000012506�12243757607�021430� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������=head1 NAME Extending.pod - How to extend Wiki::Toolkit with your own plugins. =head1 LIMITATIONS The extension mechanism is currently only defined for database-backed setups, but since nobody has written any other kind of backend I think we're fine for now. =head1 THE SIMPLEST WAY You can extend L<Wiki::Toolkit> in a fairly simplified way without the use of plugins, by supplying a hash of metadata when you write a node. For example: $wiki->write_node( $node, $content, $checksum, { postcode => $postcode } ); and on node retrieval you'll get it back again: my %node = $wiki->retrieve_node( $node ); my $postcode = $node{metadata}{postcode}[0]; You can supply more than one value for each type of metadata: $wiki->write_node( $node, $content, $checksum, { postcode => "W6 9PL", category => [ "Thai Food", "Restaurant", "Hammersmith" ] } ); And get back a list of nodes which have a given value for a given metadata type: my @nodes = $wiki->list_nodes_by_metadata( metadata_type => "category", metadata_value => "Hammersmith" ); For anything more complicated you will need to write a plugin. =head1 PLUGIN BASE CLASS Plugins should inherit from C<Wiki::Toolkit::Plugin>. This base class provides the following methods to give access to a C<Wiki::Toolkit> object's backends: =over 4 =item * C<datastore> - returns the store object =item * C<indexer> - returns the search object =item * C<formatter> - returns the formatter object =back If you want these methods to return anything useful then call $wiki->register_plugin( plugin => $plugin); before calling say my %node_data = $plugin->datastore->retrieve_node( "Foo" ); =head1 CALLING API my $plugin = Wiki::Toolkit::Plugin::Foo->new( ...any required args... ); $wiki->register_plugin( plugin => $plugin ); $wiki->write_node( "Test Node" ,"Test", $checksum, { foo_data => { a => "apple", b => "banana" } } ); my $bee = $plugin->get_word( node => "Test Node", letter => "b" ); or my $plugin = OpenGuides::London::Underground->new; $wiki->register_plugin( plugin => $plugin ); $wiki->write_node( "Hammersmith Station", "a station", $checksum, { tube_data => [ { line => "Piccadilly", direction => "Eastbound", next_station => "Baron's Court Station" }, { line => "Piccadilly", direction => "Westbound", next_station => "Acton Town Station" } ] } ); # Put more data in, then my @route = $plugin->find_route( from => "Holborn Station", to => "Acton Town Station" ); =head1 STORE ACCESS A plugin named Wiki::Toolkit::Plugin::Foo may access the backend database directly like so: =over 4 =item * Read-only access to any table =item * Read-write access to any table whose name begins with C<"p_" . $Wiki::Toolkit::Plugin::Foo::plugin_key . "_"> C<$Wiki::Toolkit::Plugin::Foo::plugin_key> should be different from the keys of all other plugins. No, I haven't set anything up to ensure this. =back =head1 REQUIREMENTS FOR PLUGIN AUTHORS Either be database-agnostic, or state clearly in your docs which databases you support, and handle errors nicely. Be aware that non-database backends may exist in the future. Be aware of whether you need to check for locks explicitly in different databases (see code of Wiki::Toolkit::Store::* to find out). =head1 REQUIRED METHODS =over 4 =item B<on_register> Check that any tables you require are set up, and set them up if not. =back =head1 OPTIONAL METHODS =over 4 =item B<post_write> This will be called every time a node is written, with the arguments like so: $plugin->post_write( node => $node_name, version => $version_number, content => $content, metadata => \%user_defined_metadata ); This will happen after the node data is all written, but before any lock is released. We could probably reimplement the searches as plugins like this if we want to, but this will require writing extra backends for Search::InvertedIndex so it can work within the same database. The user-defined metadata will already have been stored in the backend but it is available here for you to do what you will with it. Its return value should be true on success and false on error. =item B<post_read> B<THIS IS NOT YET IMPLEMENTED.> This will be called every time a node is read, with the arguments like so: $plugin->post_read( node => $node_name, version => $version_number, content => $content, metadata => \%user_defined_metadata ); It cannot affect the data returned to the caller. It should be used for its side-effects, for example tracking the number of times a given node is accessed. Its return value should be true on success and false on error. =back =head1 PLUGIN CONFLICTS What if we have more than one plugin registered? What if we change the mechanism to allow the plugins to change the data stored in the database/returned to the caller? ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/lib/Wiki/Toolkit/Feed/������������������������������������������������������������0000755�0001750�0001750�00000000000�12243760523�017625� 5����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/lib/Wiki/Toolkit/Feed/Listing.pm��������������������������������������������������0000644�0001750�0001750�00000015262�12243757607�021613� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������package Wiki::Toolkit::Feed::Listing; use strict; use Carp qw( croak ); =head1 NAME Wiki::Toolkit::Feed::Listing - parent class for Feeds from Wiki::Toolkit. =head1 DESCRIPTION Handles common data fetching tasks, so that child classes need only worry about formatting the feeds. Also enforces some common methods that must be implemented. =head1 METHODS =head2 C<fetch_recently_changed_nodes> Based on the supplied criteria, fetch a list of the recently changed nodes =cut sub fetch_recently_changed_nodes { my ($self, %args) = @_; my $wiki = $self->{wiki}; my %criteria = ( ignore_case => 1, ); # If we're not passed any parameters to limit the items returned, # default to 15. $args{days} ? $criteria{days} = $args{days} : $criteria{last_n_changes} = $args{items} || 15; my %was_filter; if ( $args{filter_on_metadata} ) { %was_filter = %{ $args{filter_on_metadata} }; } if ( $args{ignore_minor_edits} ) { %was_filter = ( %was_filter, major_change => 1 ); } $criteria{metadata_was} = \%was_filter; my @changes = $wiki->list_recent_changes(%criteria); return @changes; } =head2 C<fetch_newest_for_recently_changed> Based on the supplied criteria (but not using all of those used by B<fetch_recently_changed_nodes>), find the newest node from the recently changed nodes set. Normally used for dating the whole of a Feed. =cut sub fetch_newest_for_recently_changed { my ($self, %args) = @_; my @changes = $self->fetch_recently_changed_nodes( %args ); return $changes[0]; } =head2 C<fetch_node_all_versions> For a given node (name or ID), return all the versions there have been, including all metadata required for it to go into a "recent changes" style listing. =cut sub fetch_node_all_versions { my ($self, %args) = @_; # Check we got the right options unless($args{'name'}) { return (); } # Do the fetch my @nodes = $self->{wiki}->list_node_all_versions( name => $args{'name'}, with_content => 0, with_metadata => 1, ); # Ensure that all the metadata fields are arrays and not strings foreach my $node (@nodes) { foreach my $mdk (keys %{$node->{'metadata'}}) { unless(ref($node->{'metadata'}->{$mdk}) eq "ARRAY") { $node->{'metadata'}->{$mdk} = [ $node->{'metadata'}->{$mdk} ]; } } } return @nodes; } =head2 C<recent_changes> Build an Atom Feed of the recent changes to the Wiki::Toolkit instance, using any supplied parameters to narrow the results. If the argument "also_return_timestamp" is supplied, it will return an array of the feed, and the feed timestamp. Otherwise it just returns the feed. =cut sub recent_changes { my ($self, %args) = @_; my @changes = $self->fetch_recently_changed_nodes(%args); my $feed_timestamp = $self->feed_timestamp( $self->fetch_newest_for_recently_changed(%args) ); my $feed = $self->generate_node_list_feed($feed_timestamp, @changes); if ($args{'also_return_timestamp'}) { return ($feed,$feed_timestamp); } else { return $feed; } } =head2 C<node_all_versions> Build an Atom Feed of all the different versions of a given node. If the argument "also_return_timestamp" is supplied, it will return an array of the feed, and the feed timestamp. Otherwise it just returns the feed. =cut sub node_all_versions { my ($self, %args) = @_; my @all_versions = $self->fetch_node_all_versions(%args); my $feed_timestamp = $self->feed_timestamp( $all_versions[0] ); my $feed = $self->generate_node_list_feed($feed_timestamp, @all_versions); if($args{'also_return_timestamp'}) { return ($feed,$feed_timestamp); } else { return $feed; } } =head2 C<format_geo> Using the geo and space xml namespaces, format the supplied node metadata into geo: and space: tags, suitable for inclusion in a feed with those namespaces imported. =cut sub format_geo { my ($self, @args) = @_; my %metadata; if(ref($args[0]) eq "HASH") { %metadata = %{$_[1]}; } else { %metadata = @args; } my %mapping = ( "os_x" => "space:os_x", "os_y" => "space:os_y", "latitude" => "geo:lat", "longitude" => "geo:long", "distance" => "space:distance", ); my $feed = ""; foreach my $geo (keys %metadata) { my $geo_val = $metadata{$geo}; if(ref($geo_val) eq "ARRAY") { $geo_val = $geo_val->[0]; } if($mapping{$geo}) { my $tag = $mapping{$geo}; $feed .= " <$tag>$geo_val</$tag>\n"; } } return $feed; } # Utility method, to help with argument passing where one of a list of # arguments must be supplied sub handle_supply_one_of { my ($self,$mref,$aref) = @_; my %mustoneof = %{$mref}; my %args = %{$aref}; foreach my $oneof (keys %mustoneof) { my $val = undef; foreach my $poss (@{$mustoneof{$oneof}}) { unless($val) { if($args{$poss}) { $val = $args{$poss}; } } } if($val) { $self->{$oneof} = $val; } else { croak "No $oneof supplied, or one of its equivalents (".join(",", @{$mustoneof{$oneof}}).")"; } } } =pod The following are methods that any feed renderer must provide: =head2 C<feed_timestamp> All implementing feed renderers must implement a method to produce a feed specific timestamp, based on the supplied node =cut sub feed_timestamp { die("Not implemented by feed renderer!"); } =head2 C<generate_node_list_feed> All implementing feed renderers must implement a method to produce a feed from the supplied list of nodes =cut sub generate_node_list_feed { die("Not implemented by feed renderer!"); } =head2 C<generate_node_name_distance_feed> All implementing feed renderers must implement a method to produce a stripped down feed from the supplied list of node names, and optionally locations and distance from a reference point. =cut sub generate_node_name_distance_feed { die("Not implemented by feed renderer!"); } =head2 C<parse_feed_timestamp> Take a feed_timestamp and return a Time::Piece object. =cut sub parse_feed_timestamp { die("Not implemented by feed renderer!"); } 1; __END__ =head1 MAINTAINER The Wiki::Toolkit team, http://www.wiki-toolkit.org/. =head1 COPYRIGHT AND LICENSE Copyright 2006-2009 the Wiki::Toolkit team. This module is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/lib/Wiki/Toolkit/Feed/Atom.pm�����������������������������������������������������0000755�0001750�0001750�00000032211�12243757607�021076� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������package Wiki::Toolkit::Feed::Atom; use strict; use vars qw( @ISA $VERSION ); $VERSION = '0.03'; use POSIX 'strftime'; use Time::Piece; use URI::Escape; use Carp qw( croak ); use Wiki::Toolkit::Feed::Listing; @ISA = qw( Wiki::Toolkit::Feed::Listing ); =head1 NAME Wiki::Toolkit::Feed::Atom - A Wiki::Toolkit plugin to output RecentChanges Atom. =head1 DESCRIPTION This is an alternative access to the recent changes of a Wiki::Toolkit wiki. It outputs the Atom Syndication Format as described at L<http://www.atomenabled.org/developers/syndication/>. This module is a straight port of L<Wiki::Toolkit::Feed::RSS>. =head1 SYNOPSIS use Wiki::Toolkit; use Wiki::Toolkit::Feed::Atom; my $wiki = Wiki::Toolkit->new( ... ); # See perldoc Wiki::Toolkit # Set up the RSS feeder with the mandatory arguments - see # C<new()> below for more, optional, arguments. my $atom = Wiki::Toolkit::Feed::Atom->new( wiki => $wiki, site_name => 'My Wiki', site_url => 'http://example.com/', make_node_url => sub { my ($node_name, $version) = @_; return 'http://example.com/?id=' . uri_escape($node_name) . ';version=' . uri_escape($version); }, html_equiv_link => 'http://example.com/?RecentChanges', atom_link => 'http://example.com/?action=rc;format=atom', ); print "Content-type: application/atom+xml\n\n"; print $atom->recent_changes; =head1 METHODS =head2 C<new()> my $atom = Wiki::Toolkit::Feed::Atom->new( # Mandatory arguments: wiki => $wiki, site_name => 'My Wiki', site_url => 'http://example.com/', make_node_url => sub { my ($node_name, $version) = @_; return 'http://example.com/?id=' . uri_escape($node_name) . ';version=' . uri_escape($version); }, html_equiv_link => 'http://example.com/?RecentChanges',, atom_link => 'http://example.com/?action=rc;format=atom', # Optional arguments: site_description => 'My wiki about my stuff', software_name => $your_software_name, # e.g. "Wiki::Toolkit" software_version => $your_software_version, # e.g. "0.73" software_homepage => $your_software_homepage, # e.g. "http://search.cpan.org/dist/CGI-Wiki/" encoding => 'UTF-8' ); C<wiki> must be a L<Wiki::Toolkit> object. C<make_node_url>, if supplied, must be a coderef. The mandatory arguments are: =over 4 =item * wiki =item * site_name =item * site_url =item * make_node_url =item * html_equiv_link or recent_changes_link =item * atom_link =back The three optional arguments =over 4 =item * software_name =item * software_version =item * software_homepage =back are used to generate the C<generator> part of the feed. The optional argument =over 4 =item * encoding =back will be used to specify the character encoding in the feed. If not set, will default to the wiki store's encoding. =head2 C<recent_changes()> $wiki->write_node( 'About This Wiki', 'blah blah blah', $checksum, { comment => 'Stub page, please update!', username => 'Fred', } ); print "Content-type: application/atom+xml\n\n"; print $atom->recent_changes; # Or get something other than the default of the latest 15 changes. print $atom->recent_changes( items => 50 ); print $atom->recent_changes( days => 7 ); # Or ignore minor edits. print $atom->recent_changes( ignore_minor_edits => 1 ); # Personalise your feed further - consider only changes # made by Fred to pages about bookshops. print $atom->recent_changes( filter_on_metadata => { username => 'Fred', category => 'Bookshops', }, ); If using C<filter_on_metadata>, note that only changes satisfying I<all> criteria will be returned. B<Note:> Many of the fields emitted by the Atom generator are taken from the node metadata. The form of this metadata is I<not> mandated by L<Wiki::Toolkit>. Your wiki application should make sure to store some or all of the following metadata when calling C<write_node>: =over 4 =item B<comment> - a brief comment summarising the edit that has just been made; will be used in the summary for this item. Defaults to the empty string. =item B<username> - an identifier for the person who made the edit; will be used as the Dublin Core contributor for this item, and also in the RDF description. Defaults to 'No description given for change'. =item B<host> - the hostname or IP address of the computer used to make the edit; if no username is supplied then this will be used as the author for this item. Defaults to 'Anonymous'. =back =cut sub new { my $class = shift; my $self = {}; bless $self, $class; my %args = @_; my $wiki = $args{wiki}; unless ($wiki && UNIVERSAL::isa($wiki, 'Wiki::Toolkit')) { croak 'No Wiki::Toolkit object supplied'; } $self->{wiki} = $wiki; # Mandatory arguments. foreach my $arg (qw/site_name site_url make_node_url atom_link/) { croak "No $arg supplied" unless $args{$arg}; $self->{$arg} = $args{$arg}; } # Must-supply-one-of arguments my %mustoneof = ( 'html_equiv_link' => ['html_equiv_link','recent_changes_link'] ); $self->handle_supply_one_of(\%mustoneof,\%args); # Optional arguments. foreach my $arg (qw/site_description software_name software_version software_homepage encoding/) { $self->{$arg} = $args{$arg} || ''; } # Supply some defaults, if a blank string isn't what we want unless($self->{encoding}) { $self->{encoding} = $self->{wiki}->store->{_charset}; } $self->{timestamp_fmt} = $Wiki::Toolkit::Store::Database::timestamp_fmt; $self->{utc_offset} = strftime "%z", localtime; $self->{utc_offset} =~ s/(..)(..)$/$1:$2/; # Escape any &'s in the urls foreach my $key (qw(site_url atom_link)) { my @ands = ($self->{$key} =~ /(\&.{1,6})/g); foreach my $and (@ands) { if($and ne "&") { my $new_and = $and; $new_and =~ s/\&/\&/; $self->{$key} =~ s/$and/$new_and/; } } } $self; } # Internal method, to build all the stuff that will go at the start of a feed. # Outputs the feed header, and initial feed info. sub build_feed_start { my ($self,$atom_timestamp) = @_; my $generator = ''; if ($self->{software_name}) { $generator = ' <generator'; $generator .= ' uri="' . $self->{software_homepage} . '"' if $self->{software_homepage}; $generator .= ' version=' . $self->{software_version} . '"' if $self->{software_version}; $generator .= ">\n"; $generator .= $self->{software_name} . "</generator>\n"; } my $subtitle = $self->{site_description} ? '<subtitle>' . $self->{site_description} . "</subtitle>\n" : ''; $atom_timestamp ||= ''; my $atom = qq{<?xml version="1.0" encoding="} . $self->{encoding} . qq{"?> <feed xmlns = "http://www.w3.org/2005/Atom" xmlns:geo = "http://www.w3.org/2003/01/geo/wgs84_pos#" xmlns:space = "http://frot.org/space/0.1/" > <link href="} . $self->{site_url} . qq{" /> <title>} . $self->{site_name} . qq{ } . $atom_timestamp . qq{ } . $self->{site_url} . qq{ $subtitle}; return $atom; } # Internal method, to build all the stuff that will go at the end of a feed. sub build_feed_end { my ($self,$feed_timestamp) = @_; return "\n"; } =head2 C Generate and return an Atom feed for a list of nodes =cut sub generate_node_list_feed { my ($self,$atom_timestamp,@nodes) = @_; my $atom = $self->build_feed_start($atom_timestamp); my (@urls, @items); foreach my $node (@nodes) { my $node_name = $node->{name}; my $item_timestamp = $node->{last_modified}; # Make a Time::Piece object. my $time = Time::Piece->strptime($item_timestamp, $self->{timestamp_fmt}); my $utc_offset = $self->{utc_offset}; $item_timestamp = $time->strftime( "%Y-%m-%dT%H:%M:%S$utc_offset" ); my $author = $node->{metadata}{username}[0] || $node->{metadata}{host}[0] || 'Anonymous'; my $description = $node->{metadata}{comment}[0] || 'No description given for node'; $description .= " [$author]" if $author; my $version = $node->{version}; my $status = (1 == $version) ? 'new' : 'updated'; my $major_change = $node->{metadata}{major_change}[0]; $major_change = 1 unless defined $major_change; my $importance = $major_change ? 'major' : 'minor'; my $url = $self->{make_node_url}->($node_name, $version); # make XML-clean my $title = $node_name; $title =~ s/&/&/g; $title =~ s//>/g; # Pop the categories into atom:category elements (4.2.2) # We can do this because the spec says: # "This specification assigns no meaning to the content (if any) # of this element." # TODO: Decide if we should include the "all categories listing" url # as the scheme (URI) attribute? my $category_atom = ""; if ($node->{metadata}->{category}) { foreach my $cat (@{ $node->{metadata}->{category} }) { $category_atom .= " \n"; } } # Include geospacial data, if we have it my $geo_atom = $self->format_geo($node->{metadata}); # TODO: Find an Atom equivalent of ModWiki, so we can include more info push @items, qq{ $title $url $description $item_timestamp $author $category_atom $geo_atom }; } $atom .= join('', @items) . "\n"; $atom .= $self->build_feed_end($atom_timestamp); return $atom; } =head2 C Generate a very cut down atom feed, based just on the nodes, their locations (if given), and their distance from a reference location (if given). Typically used on search feeds. =cut sub generate_node_name_distance_feed { my ($self,$atom_timestamp,@nodes) = @_; my $atom = $self->build_feed_start($atom_timestamp); my (@urls, @items); foreach my $node (@nodes) { my $node_name = $node->{name}; my $url = $self->{make_node_url}->($node_name); # make XML-clean my $title = $node_name; $title =~ s/&/&/g; $title =~ s//>/g; # What location stuff do we have? my $geo_atom = $self->format_geo($node); push @items, qq{ $title $url $geo_atom }; } $atom .= join('', @items) . "\n"; $atom .= $self->build_feed_end($atom_timestamp); return $atom; } =head2 C print $atom->feed_timestamp(); Returns the timestamp of the feed in POSIX::strftime style ("Tue, 29 Feb 2000 12:34:56 GMT"), which is equivalent to the timestamp of the most recent item in the feed. Takes the same arguments as recent_changes(). You will most likely need this to print a Last-Modified HTTP header so user-agents can determine whether they need to reload the feed or not. =cut sub feed_timestamp { my ($self, $newest_node) = @_; my $time; if ($newest_node->{last_modified}) { $time = Time::Piece->strptime( $newest_node->{last_modified}, $self->{timestamp_fmt} ); } else { $time = localtime; } my $utc_offset = $self->{utc_offset}; return $time->strftime( "%Y-%m-%dT%H:%M:%S$utc_offset" ); } =head2 C Take a feed_timestamp and return a Time::Piece object. =cut sub parse_feed_timestamp { my ($self, $feed_timestamp) = @_; $feed_timestamp = substr($feed_timestamp, 0, -length( $self->{utc_offset})); return Time::Piece->strptime( $feed_timestamp, '%Y-%m-%dT%H:%M:%S' ); } 1; __END__ =head1 SEE ALSO =over 4 =item * L =item * L =back =head1 MAINTAINER The Wiki::Toolkit team, http://www.wiki-toolkit.org/. =head1 COPYRIGHT AND LICENSE Copyright 2006-2009 Earle Martin and the Wiki::Toolkit team. Copyright 2012 the Wiki::Toolkit team. This module is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =head1 THANKS Kake Pugh for originally writing Wiki::Toolkit::Feed::RSS and indeed Wiki::Toolkit itself. =cut Wiki-Toolkit-0.83/lib/Wiki/Toolkit/Feed/RSS.pm0000755000175000017500000004031312243757607020647 0ustar vagrantvagrantpackage Wiki::Toolkit::Feed::RSS; use strict; use vars qw( @ISA $VERSION ); $VERSION = '0.11'; use POSIX 'strftime'; use Time::Piece; use URI::Escape; use Carp qw( croak ); use Wiki::Toolkit::Feed::Listing; @ISA = qw( Wiki::Toolkit::Feed::Listing ); =head1 NAME Wiki::Toolkit::Feed::RSS - Output RecentChanges RSS for Wiki::Toolkit. =head1 DESCRIPTION This is an alternative access to the recent changes of a Wiki::Toolkit wiki. It outputs RSS as described by the ModWiki proposal at L =head1 SYNOPSIS use Wiki::Toolkit; use Wiki::Toolkit::Feed::RSS; my $wiki = CGI::Wiki->new( ... ); # See perldoc Wiki::Toolkit # Set up the RSS feeder with the mandatory arguments - see # C below for more, optional, arguments. my $rss = Wiki::Toolkit::Feed::RSS->new( wiki => $wiki, site_name => 'My Wiki', site_url => 'http://example.com/', make_node_url => sub { my ($node_name, $version) = @_; return 'http://example.com/?id=' . uri_escape($node_name) . ';version=' . uri_escape($version); }, html_equiv_link => 'http://example.com/?RecentChanges', encoding => 'UTF-8' ); print "Content-type: application/xml\n\n"; print $rss->recent_changes; =head1 METHODS =head2 C my $rss = Wiki::Toolkit::Feed::RSS->new( # Mandatory arguments: wiki => $wiki, site_name => 'My Wiki', site_url => 'http://example.com/', make_node_url => sub { my ($node_name, $version) = @_; return 'http://example.com/?id=' . uri_escape($node_name) . ';version=' . uri_escape($version); }, html_equiv_link => 'http://example.com/?RecentChanges', # Optional arguments: site_description => 'My wiki about my stuff', interwiki_identifier => 'MyWiki', make_diff_url => sub { my $node_name = shift; return 'http://example.com/?diff=' . uri_escape($node_name) }, make_history_url => sub { my $node_name = shift; return 'http://example.com/?hist=' . uri_escape($node_name) }, software_name => $your_software_name, # e.g. "CGI::Wiki" software_version => $your_software_version, # e.g. "0.73" software_homepage => $your_software_homepage, # e.g. "http://search.cpan.org/dist/Wiki-Toolkit/" ); C must be a L object. C, and C and C, if supplied, must be coderefs. The mandatory arguments are: =over 4 =item * wiki =item * site_name =item * site_url =item * make_node_url =item * html_equiv_link or recent_changes_link =back The three optional arguments =over 4 =item * software_name =item * software_version =item * software_homepage =back are used to generate DOAP (Description Of A Project - see L) metadata for the feed to show what generated it. The optional argument =over 4 =item * encoding =back will be used to specify the character encoding in the feed. If not set, will default to the wiki store's encoding. =head2 C $wiki->write_node( 'About This Wiki', 'blah blah blah', $checksum, { comment => 'Stub page, please update!', username => 'Fred', } ); print "Content-type: application/xml\n\n"; print $rss->recent_changes; # Or get something other than the default of the latest 15 changes. print $rss->recent_changes( items => 50 ); print $rss->recent_changes( days => 7 ); # Or ignore minor edits. print $rss->recent_changes( ignore_minor_edits => 1 ); # Personalise your feed further - consider only changes # made by Fred to pages about bookshops. print $rss->recent_changes( filter_on_metadata => { username => 'Fred', category => 'Bookshops', }, ); If using C, note that only changes satisfying I criteria will be returned. B Many of the fields emitted by the RSS generator are taken from the node metadata. The form of this metadata is I mandated by L. Your wiki application should make sure to store some or all of the following metadata when calling C: =over 4 =item B - a brief comment summarising the edit that has just been made; will be used in the RDF description for this item. Defaults to the empty string. =item B - an identifier for the person who made the edit; will be used as the Dublin Core contributor for this item, and also in the RDF description. Defaults to the empty string. =item B - the hostname or IP address of the computer used to make the edit; if no username is supplied then this will be used as the Dublin Core contributor for this item. Defaults to the empty string. =item B - true if the edit was a major edit and false if it was a minor edit; used for the importance of the item. Defaults to true (ie if C was not defined or was explicitly stored as C). =back =head2 C print $rss->feed_timestamp(); Returns the timestamp of the feed in POSIX::strftime style ("Tue, 29 Feb 2000 12:34:56 GMT"), which is equivalent to the timestamp of the most recent item in the feed. Takes the same arguments as recent_changes(). You will most likely need this to print a Last-Modified HTTP header so user-agents can determine whether they need to reload the feed or not. =cut sub new { my $class = shift; my $self = {}; bless $self, $class; my %args = @_; my $wiki = $args{wiki}; unless ($wiki && UNIVERSAL::isa($wiki, 'Wiki::Toolkit')) { croak 'No Wiki::Toolkit object supplied'; } $self->{wiki} = $wiki; # Mandatory arguments. foreach my $arg (qw/site_name site_url make_node_url/) { croak "No $arg supplied" unless $args{$arg}; $self->{$arg} = $args{$arg}; } # Must-supply-one-of arguments my %mustoneof = ( 'html_equiv_link' => ['html_equiv_link','recent_changes_link'] ); $self->handle_supply_one_of(\%mustoneof,\%args); # Optional arguments. foreach my $arg (qw/site_description interwiki_identifier make_diff_url make_history_url encoding software_name software_version software_homepage/) { $self->{$arg} = $args{$arg} || ''; } # Supply some defaults, if a blank string isn't what we want unless($self->{encoding}) { $self->{encoding} = $self->{wiki}->store->{_charset}; } $self->{timestamp_fmt} = $Wiki::Toolkit::Store::Database::timestamp_fmt; $self->{utc_offset} = strftime "%z", localtime; $self->{utc_offset} =~ s/(..)(..)$/$1:$2/; $self; } # Internal method, to build all the stuff that will go at the start of a feed. # Generally will output namespaces, headers and so on. sub build_feed_start { my ($self,$feed_timestamp) = @_; #"http://purl.org/rss/1.0/modules/wiki/" return qq{{encoding} .qq{"?> }; } # Internal method, to build all the stuff (except items) to go inside the channel sub build_feed_mid { my ($self,$feed_timestamp) = @_; my $rss .= qq{} . $self->{site_url} . qq{\n}; if ($self->{software_name}) { $rss .= qq{ } . $self->{software_name} . qq{\n}; } if ($self->{software_name} && $self->{software_homepage}) { $rss .= qq{ \n}; } if ($self->{software_name} && $self->{software_version}) { $rss .= qq{ } . $self->{software_version} . qq{ \n}; } if ($self->{software_name}) { $rss .= qq{ \n}; } $feed_timestamp ||= ''; $rss .= qq{} . $self->{site_name} . qq{ } . $self->{html_equiv_link} . qq{ } . $self->{site_description} . qq{ } . $feed_timestamp . qq{ } . $self->{interwiki_identifier} . qq{}; return $rss; } # Internal method, to build all the stuff that will go at the end of a feed sub build_feed_end { my ($self,$feed_timestamp) = @_; return "\n"; } =head2 C Generate and return an RSS feed for a list of nodes =cut sub generate_node_list_feed { my ($self,$feed_timestamp,@nodes) = @_; # Start our feed my $rss = $self->build_feed_start($feed_timestamp); $rss .= qq{ }; $rss .= $self->build_feed_mid($feed_timestamp); # Generate the items list, and the individiual item entries my (@urls, @items); foreach my $node (@nodes) { my $node_name = $node->{name}; my $timestamp = $node->{last_modified}; # Make a Time::Piece object. my $time = Time::Piece->strptime($timestamp, $self->{timestamp_fmt}); my $utc_offset = $self->{utc_offset}; $timestamp = $time->strftime( "%Y-%m-%dT%H:%M:%S$utc_offset" ); my $author = $node->{metadata}{username}[0] || $node->{metadata}{host}[0] || ''; my $description = $node->{metadata}{comment}[0] || ''; $description .= " [$author]" if $author; my $version = $node->{version}; my $status = (1 == $version) ? 'new' : 'updated'; my $major_change = $node->{metadata}{major_change}[0]; $major_change = 1 unless defined $major_change; my $importance = $major_change ? 'major' : 'minor'; my $url = $self->{make_node_url}->($node_name, $version); push @urls, qq{ \n}; my $diff_url = ''; if ($self->{make_diff_url}) { $diff_url = $self->{make_diff_url}->($node_name); } my $history_url = ''; if ($self->{make_history_url}) { $history_url = $self->{make_history_url}->($node_name); } my $node_url = $self->{make_node_url}->($node_name); my $rdf_url = $node_url; $rdf_url =~ s/\?/\?id=/; $rdf_url .= ';format=rdf'; # make XML-clean my $title = $node_name; $title =~ s/&/&/g; $title =~ s//>/g; # Pop the categories into dublin core subject elements # (http://dublincore.org/usage/terms/history/#subject-004) # TODO: Decide if we should include the "all categories listing" url # as the scheme (URI) attribute? my $category_rss = ""; if($node->{metadata}->{category}) { foreach my $cat (@{ $node->{metadata}->{category} }) { $category_rss .= " $cat\n"; } } # Include geospacial data, if we have it my $geo_rss = $self->format_geo($node->{metadata}); push @items, qq{ $title $url $description $timestamp $author $status $importance $diff_url $version $history_url $category_rss $geo_rss }; } # Output the items list $rss .= qq{ } . join('', @urls) . qq{ }; # Output the individual item entries $rss .= join('', @items) . "\n"; # Finish up $rss .= $self->build_feed_end($feed_timestamp); return $rss; } =head2 C Generate a very cut down rss feed, based just on the nodes, their locations (if given), and their distance from a reference location (if given). Typically used on search feeds. =cut sub generate_node_name_distance_feed { my ($self,$feed_timestamp,@nodes) = @_; # Start our feed my $rss = $self->build_feed_start($feed_timestamp); $rss .= qq{ }; $rss .= $self->build_feed_mid($feed_timestamp); # Generate the items list, and the individiual item entries my (@urls, @items); foreach my $node (@nodes) { my $node_name = $node->{name}; my $url = $self->{make_node_url}->($node_name); push @urls, qq{ \n}; my $rdf_url = $url; $rdf_url =~ s/\?/\?id=/; $rdf_url .= ';format=rdf'; # make XML-clean my $title = $node_name; $title =~ s/&/&/g; $title =~ s//>/g; # What location stuff do we have? my $geo_rss = $self->format_geo($node); push @items, qq{ $title $url $geo_rss }; } # Output the items list $rss .= qq{ } . join('', @urls) . qq{ }; # Output the individual item entries $rss .= join('', @items) . "\n"; # Finish up $rss .= $self->build_feed_end($feed_timestamp); return $rss; } =head2 C Generate the timestamp for the RSS, based on the newest node (if available). Will return a timestamp for now if no node dates are available =cut sub feed_timestamp { my ($self, $newest_node) = @_; my $time; if ($newest_node->{last_modified}) { $time = Time::Piece->strptime( $newest_node->{last_modified}, $self->{timestamp_fmt} ); } else { $time = localtime; } my $utc_offset = $self->{utc_offset}; return $time->strftime( "%Y-%m-%dT%H:%M:%S$utc_offset" ); } # Compatibility method - use feed_timestamp with a node instead sub rss_timestamp { my ($self, %args) = @_; warn("Old style method used - please convert to calling feed_timestamp with a node!"); my $feed_timestamp = $self->feed_timestamp( $self->fetch_newest_for_recently_changed(%args) ); return $feed_timestamp; } =head2 C Take a feed_timestamp and return a Time::Piece object. =cut sub parse_feed_timestamp { my ($self, $feed_timestamp) = @_; $feed_timestamp = substr($feed_timestamp, 0, -length( $self->{utc_offset})); return Time::Piece->strptime( $feed_timestamp, '%Y-%m-%dT%H:%M:%S' ); } 1; __END__ =head1 SEE ALSO =over 4 =item * L =item * L =item * L =back =head1 MAINTAINER The Wiki::Toolkit project. Originally by Kake Pugh . =head1 COPYRIGHT AND LICENSE Copyright 2003-4 Kake Pugh. Copyright 2005 Earle Martin. Copyright 2006-2009 the Wiki::Toolkit team This module is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =head1 THANKS The members of the Semantic Web Interest Group channel on irc.freenode.net, #swig, were very useful in the development of this module. =cut Wiki-Toolkit-0.83/lib/Wiki/Toolkit/TestConfig/0000755000175000017500000000000012243760523021027 5ustar vagrantvagrantWiki-Toolkit-0.83/lib/Wiki/Toolkit/TestConfig/Utilities.pm0000644000175000017500000001372212243757607023356 0ustar vagrantvagrantpackage Wiki::Toolkit::TestConfig::Utilities; use strict; use Wiki::Toolkit::TestConfig; use vars qw( $num_stores $num_combinations $VERSION ); $VERSION = '0.06'; =head1 NAME Wiki::Toolkit::TestConfig::Utilities - Utilities for testing Wiki::Toolkit things (deprecated). =head1 DESCRIPTION Deprecated - use L instead. =cut my %stores; foreach my $dbtype (qw( MySQL Pg SQLite )) { if ($Wiki::Toolkit::TestConfig::config{$dbtype}->{dbname}) { my %config = %{$Wiki::Toolkit::TestConfig::config{$dbtype}}; my $store_class = "Wiki::Toolkit::Store::$dbtype"; eval "require $store_class"; my $store = $store_class->new( dbname => $config{dbname}, dbuser => $config{dbuser}, dbpass => $config{dbpass}, dbhost => $config{dbhost} ); $stores{$dbtype} = $store; } else { $stores{$dbtype} = undef; } } $num_stores = scalar keys %stores; my %searches; # DBIxFTS only works with MySQL. if ( $Wiki::Toolkit::TestConfig::config{dbixfts} && $stores{MySQL} ) { require Wiki::Toolkit::Search::DBIxFTS; my $dbh = $stores{MySQL}->dbh; $searches{DBIxFTSMySQL} = Wiki::Toolkit::Search::DBIxFTS->new( dbh => $dbh ); } else { $searches{DBIxFTSMySQL} = undef; } # Test the MySQL SII backend, if we can. if ( $Wiki::Toolkit::TestConfig::config{search_invertedindex} && $stores{MySQL} ) { require Search::InvertedIndex::DB::Mysql; require Wiki::Toolkit::Search::SII; my %dbconfig = %{$Wiki::Toolkit::TestConfig::config{MySQL}}; my $indexdb = Search::InvertedIndex::DB::Mysql->new( -db_name => $dbconfig{dbname}, -username => $dbconfig{dbuser}, -password => $dbconfig{dbpass}, -hostname => $dbconfig{dbhost} || "", -table_name => 'siindex', -lock_mode => 'EX' ); $searches{SIIMySQL} = Wiki::Toolkit::Search::SII->new( indexdb => $indexdb ); } else { $searches{SIIMySQL} = undef; } # Test the Pg SII backend, if we can. eval { require Search::InvertedIndex::DB::Pg; }; my $sii_pg = $@ ? 0 : 1; if ( $Wiki::Toolkit::TestConfig::config{search_invertedindex} && $stores{Pg} && $sii_pg ) { require Search::InvertedIndex::DB::Pg; require Wiki::Toolkit::Search::SII; my %dbconfig = %{$Wiki::Toolkit::TestConfig::config{Pg}}; my $indexdb = Search::InvertedIndex::DB::Pg->new( -db_name => $dbconfig{dbname}, -username => $dbconfig{dbuser}, -password => $dbconfig{dbpass}, -hostname => $dbconfig{dbhost}, -table_name => 'siindex', -lock_mode => 'EX' ); $searches{SIIPg} = Wiki::Toolkit::Search::SII->new( indexdb => $indexdb ); } else { $searches{SIIPg} = undef; } # Also test the default DB_File backend, if we have S::II installed at all. if ( $Wiki::Toolkit::TestConfig::config{search_invertedindex} ) { require Search::InvertedIndex; require Wiki::Toolkit::Search::SII; my $indexdb = Search::InvertedIndex::DB::DB_File_SplitHash->new( -map_name => 't/sii-db-file-test.db', -lock_mode => 'EX' ); $searches{SII} = Wiki::Toolkit::Search::SII->new( indexdb => $indexdb ); } else { $searches{SII} = undef; } my @combinations; # which searches work with which stores. push @combinations, { store_name => "MySQL", store => $stores{MySQL}, search_name => "DBIxFTSMySQL", search => $searches{DBIxFTSMySQL} }; push @combinations, { store_name => "MySQL", store => $stores{MySQL}, search_name => "SIIMySQL", search => $searches{SIIMySQL} }; push @combinations, { store_name => "Pg", store => $stores{Pg}, search_name => "SIIPg", search => $searches{SIIPg} }; # All stores are compatible with the default S::II search, and with no search. foreach my $store_name ( keys %stores ) { push @combinations, { store_name => $store_name, store => $stores{$store_name}, search_name => "SII", search => $searches{SII} }; push @combinations, { store_name => $store_name, store => $stores{$store_name}, search_name => "undef", search => undef }; } foreach my $comb ( @combinations ) { # There must be a store configured for us to test, but a search is optional $comb->{configured} = $comb->{store} ? 1 : 0; } $num_combinations = scalar @combinations; sub reinitialise_stores { my $class = shift; my %stores = $class->stores; my ($store_name, $store); while ( ($store_name, $store) = each %stores ) { next unless $store; my $dbname = $store->dbname; my $dbuser = $store->dbuser; my $dbpass = $store->dbpass; my $dbhost = $store->dbhost; # Clear out the test database, then set up tables afresh. my $setup_class = "Wiki::Toolkit::Setup::$store_name"; eval "require $setup_class"; { no strict "refs"; &{"$setup_class\:\:cleardb"}($dbname, $dbuser, $dbpass, $dbhost); &{"$setup_class\:\:setup"}($dbname, $dbuser, $dbpass, $dbhost); } } } sub stores { return %stores; } sub combinations { return @combinations; } =head1 SEE ALSO L, the replacement for this module. =head1 AUTHOR Kake Pugh (kake@earth.li). =head1 COPYRIGHT Copyright (C) 2003 Kake Pugh. All Rights Reserved. This module is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; Wiki-Toolkit-0.83/lib/Wiki/Toolkit/Plugin.pm0000644000175000017500000001263612243757607020577 0ustar vagrantvagrantpackage Wiki::Toolkit::Plugin; use strict; use vars qw( $VERSION ); $VERSION = '0.04'; =head1 NAME Wiki::Toolkit::Plugin - A base class for Wiki::Toolkit plugins. =head1 DESCRIPTION Provides methods for accessing the backend store, search and formatter objects of the L object that a plugin instance is registered with. =head1 SYNOPSIS package Wiki::Toolkit::Plugin::Foo; use base qw( Wiki::Toolkit::Plugin); # And then in your script: my $wiki = Wiki::Toolkit->new( ... ); my $plugin = Wiki::Toolkit::Plugin::Foo->new; $wiki->register_plugin( plugin => $plugin ); my $node = $plugin->datastore->retrieve_node( "Home" ); =head1 POSSIBLE METHODS =over 4 =item B Called before moderation is performed. Allows changes to the parameters used in moderation. my %args = @_; my ($name_ref,$version_ref) = @args{ qw( node version ) }; $$name_ref =~ s/\s/_/g; return 0; =item B Called after moderation has been performed. Allows additional actions to occur after node moderation. my %args = @_; my ($node,$node_id,$version) = @args{ qw( node node_id version ) }; &update_pending_list($node,$version); =item B Called before a rename is performed. Allows changes to the parameters used by rename. my %args = @_; my ($old_name_ref,$new_name_ref,$create_new_versions_ref) = @args{ qw( old_name new_name create_new_versions ) }; $$old_name_ref =~ s/\s/_/g; $$new_name_ref =~ s/\s/_/g; return 0; =item B Called after a rename has been performed. Allows additional actions to occur after node renames. my %args = @_; my ($old_name,$new_name,$node_id) = @args{ qw( old_name new_name node_id ) }; &recalculate_category_listings(); =item B Called before a retrieve is performed. Allows changes to the parameters used by retrieve. my %args = @_; my ($name_ref,$version_ref) = @args{ qw( node version ) }; return &check_retrive_allowed($$name_ref); TODO: Allow declining of the read. =item B Called before a write is performed. Allows changes to the parameters used by the write; my %args = @_; my ($node_ref,$content_ref,$metadata_ref) = @args{ qw( node content metadata ) }; $$content_ref =~ s/\bpub\b/Pub/g; return 1; =item B Called after a write has been performed. Allows additional actions to occur after node writes. my %args = @_; my ($node,$node_id,$version,$content,$metadata) = @args{ qw( node node_id version content metadata ) }; &log_node_write($node,gmtime); =item B Called after a delete has been performed. Allows additional actions to occur after node deletions. my %args = @_; my ($node,$node_id,$version) = @args{ qw( node node_id version ) }; &log_node_delete($node,gmtime); =back =head1 DECLINING ACTIONS FROM PRE_ METHODS Note: This functionality is missing for pre_retrieve It is possible for the pre_ methods (eg C) to decline the action. This could be due to an authentication check done by the plugin, due to the content, or whatever else the plugin fancies. There are three possible return values from a pre_ plugin: C<-1> - Deny this action C<0> or C - I have no opinion C<1> - Allow this action If you have only zeros, the action will be allowed. If you have ones and zeros, it will also be allowed. If you have minus ones and zeros, it will be denied. If you have minus ones, ones and zeros, the sum will be used to decide. For default deny, have one plugin return -1, and another only return 1 if the action is explicity allowed) =head1 METHODS =over 4 =item B sub new { my $class = shift; my $self = bless {}, $class; $self->_init if $self->can("_init"); return $self; } Generic contructor, just returns a blessed object. =cut sub new { my $class = shift; my $self = bless {}, $class; $self->_init if $self->can("_init"); return $self; } =item B Returns the Wiki::Toolkit object, or C if the C method hasn't been called on a L object yet. =cut sub wiki { my $self = shift; $self->{_wiki} = $_[0] if $_[0]; return $self->{_wiki}; } =item B Returns the backend store object, or C if the C method hasn't been called on a L object yet. =cut sub datastore { my $self = shift; $self->{_datastore} = $_[0] if $_[0]; return $self->{_datastore}; } =item B Returns the backend search object, or C if the C method hasn't been called on a L object yet, or if the wiki object had no search object defined. =cut sub indexer { my $self = shift; $self->{_indexer} = $_[0] if $_[0]; return $self->{_indexer}; } =item B Returns the backend formatter object, or C if the C method hasn't been called on a L object yet. =cut sub formatter { my $self = shift; $self->{_formatter} = $_[0] if $_[0]; return $self->{_formatter}; } =back =head1 SEE ALSO L =head1 AUTHOR Kake Pugh (kake@earth.li). =head1 COPYRIGHT Copyright (C) 2003-4 Kake Pugh. All Rights Reserved. Copyright (C) 2006 the Wiki::Toolkit team. All Rights Reserved. This module is free software; you can redistribute it and/or modify it under the same terms as Perl itself. =cut 1; Wiki-Toolkit-0.83/lib/Wiki/Toolkit/Store/0000755000175000017500000000000012243760523020056 5ustar vagrantvagrantWiki-Toolkit-0.83/lib/Wiki/Toolkit/Store/Database.pm0000644000175000017500000021633312243757607022141 0ustar vagrantvagrantpackage Wiki::Toolkit::Store::Database; use strict; use vars qw( $VERSION $timestamp_fmt ); $timestamp_fmt = "%Y-%m-%d %H:%M:%S"; use DBI; use Time::Piece; use Time::Seconds; use Carp qw( carp croak ); use Digest::MD5 qw( md5_hex ); $VERSION = '0.31'; my $SCHEMA_VER = 10; # first, detect if Encode is available - it's not under 5.6. If we _are_ # under 5.6, give up - we'll just have to hope that nothing explodes. This # is the current 0.54 behaviour, so that's ok. my $CAN_USE_ENCODE; BEGIN { eval " use Encode "; $CAN_USE_ENCODE = $@ ? 0 : 1; } =head1 NAME Wiki::Toolkit::Store::Database - parent class for database storage backends for Wiki::Toolkit =head1 SYNOPSIS This is probably only useful for Wiki::Toolkit developers. # See below for parameter details. my $store = Wiki::Toolkit::Store::MySQL->new( %config ); =head1 METHODS =over 4 =item B my $store = Wiki::Toolkit::Store::MySQL->new( dbname => "wiki", dbuser => "wiki", dbpass => "wiki", dbhost => "db.example.com", dbport => 1234, charset => "iso-8859-1" ); or my $store = Wiki::Toolkit::Store::MySQL->new( dbh => $dbh ); C is optional, defaults to C, and does nothing unless you're using perl 5.8 or newer. If you do not provide an active database handle in C, then C is mandatory. C, C, C and C are optional, but you'll want to supply them unless your database's connection method doesn't require them. If you do provide C then it must have the following parameters set; otherwise you should just provide the connection information and let us create our own handle: =over 4 =item * C = 1 =item * C = 0 =item * C = 1 =back =cut sub new { my ($class, @args) = @_; my $self = {}; bless $self, $class; return $self->_init(@args); } sub _init { my ($self, %args) = @_; if ( $args{dbh} ) { $self->{_dbh} = $args{dbh}; $self->{_external_dbh} = 1; # don't disconnect at DESTROY time $self->{_charset} = $args{charset} || "iso-8859-1"; } else { die "Must supply a dbname" unless defined $args{dbname}; $self->{_dbname} = $args{dbname}; $self->{_dbuser} = $args{dbuser} || ""; $self->{_dbpass} = $args{dbpass} || ""; $self->{_dbhost} = $args{dbhost} || ""; $self->{_dbport} = $args{dbport} || ""; $self->{_charset} = $args{charset} || "iso-8859-1"; # Connect to database and store the database handle. my ($dbname, $dbuser, $dbpass, $dbhost, $dbport) = @$self{qw(_dbname _dbuser _dbpass _dbhost _dbport)}; my $dsn = $self->_dsn($dbname, $dbhost, $dbport) or croak "No data source string provided by class"; $self->{_dbh} = DBI->connect( $dsn, $dbuser, $dbpass, $self->_get_dbh_connect_attr ) or croak "Can't connect to database $dbname using $dsn: " . DBI->errstr; } my ($cur_ver, $db_ver) = $self->schema_current; if ($db_ver < $cur_ver) { croak "Database schema version $db_ver is too old (need $cur_ver)"; } elsif ($db_ver > $cur_ver) { croak "Database schema version $db_ver is too new (need $cur_ver)"; } return $self; } # Internal method to get attributes for passing to DBI->connect(). # Override in subclasses to add database-dependent attributes. sub _get_dbh_connect_attr { return { PrintError => 0, RaiseError => 1, AutoCommit => 1, }; } # Internal method, used to handle the logic of how to add up return # values from pre_ plugins sub handle_pre_plugin_ret { my ($running_total_ref,$result) = @_; if(($result && $result == 0) || !$result) { # No opinion, no need to change things } elsif($result == -1 || $result == 1) { # Increase or decrease as requested $$running_total_ref += $result; } else { # Invalid return code warn("Pre_ plugin returned invalid accept/deny value of '$result'"); } } =item B my $content = $store->retrieve_node($node); # Or get additional meta-data too. my %node = $store->retrieve_node("HomePage"); print "Current Version: " . $node{version}; # Maybe we stored some metadata too. my $categories = $node{metadata}{category}; print "Categories: " . join(", ", @$categories); print "Postcode: $node{metadata}{postcode}[0]"; # Or get an earlier version: my %node = $store->retrieve_node(name => "HomePage", version => 2 ); print $node{content}; In scalar context, returns the current (raw Wiki language) contents of the specified node. In list context, returns a hash containing the contents of the node plus additional data: =over 4 =item B =item B =item B =item B - a reference to a hash containing any caller-supplied metadata sent along the last time the node was written =back The node parameter is mandatory. The version parameter is optional and defaults to the newest version. If the node hasn't been created yet, it is considered to exist but be empty (this behaviour might change). B on metadata - each hash value is returned as an array ref, even if that type of metadata only has one value. =cut sub retrieve_node { my $self = shift; my %args = scalar @_ == 1 ? ( name => $_[0] ) : @_; unless($args{'version'}) { $args{'version'} = undef; } # Call pre_retrieve on any plugins, in case they want to tweak anything my @plugins = @{ $args{plugins} || [ ] }; foreach my $plugin (@plugins) { if ( $plugin->can( "pre_retrieve" ) ) { $plugin->pre_retrieve( node => \$args{'name'}, version => \$args{'version'} ); } } # Note _retrieve_node_data is sensitive to calling context. unless(wantarray) { # Scalar context, will return just the content return $self->_retrieve_node_data( %args ); } my %data = $self->_retrieve_node_data( %args ); $data{'checksum'} = $self->_checksum(%data); return %data; } # Returns hash or scalar depending on calling context. sub _retrieve_node_data { my ($self, %args) = @_; my %data = $self->_retrieve_node_content( %args ); unless(wantarray) { # Scalar context, return just the content return $data{content}; } # If we want additional data then get it. Note that $data{version} # will already have been set by C<_retrieve_node_content>, if it wasn't # specified in the call. my $dbh = $self->dbh; my $sql = "SELECT metadata_type, metadata_value " . "FROM node " . "INNER JOIN metadata ON (node_id = id) " . "WHERE name=? " . "AND metadata.version=?"; my $sth = $dbh->prepare($sql); $sth->execute($args{name},$data{version}) or croak $dbh->errstr; my %metadata; while ( my ($type, $val) = $self->charset_decode( $sth->fetchrow_array ) ) { if ( defined $metadata{$type} ) { push @{$metadata{$type}}, $val; } else { $metadata{$type} = [ $val ]; } } $data{metadata} = \%metadata; return %data; } # $store->_retrieve_node_content( name => $node_name, # version => $node_version ); # Params: 'name' is compulsory, 'version' is optional and defaults to latest. # Returns a hash of data for C - content, version, last modified sub _retrieve_node_content { my ($self, %args) = @_; croak "No valid node name supplied" unless $args{name}; my $dbh = $self->dbh; my $sql; my $version_sql_val; my $text_source; if ( $args{version} ) { # Version given - get that version, and the content for that version $version_sql_val = $dbh->quote($self->charset_encode($args{version})); $text_source = "content"; } else { # No version given, grab latest version (and content for that) $version_sql_val = "node.version"; $text_source = "node"; } $sql = "SELECT " . " $text_source.text, content.version, " . " content.modified, content.moderated, " . " node.moderate " . "FROM node " . "INNER JOIN content ON (id = node_id) " . "WHERE name=" . $dbh->quote($self->charset_encode($args{name})) . " AND content.version=" . $version_sql_val; my @results = $self->charset_decode( $dbh->selectrow_array($sql) ); @results = ("", 0, "") unless scalar @results; my %data; @data{ qw( content version last_modified moderated node_requires_moderation ) } = @results; return %data; } # Expects a hash as returned by ->retrieve_node - it's actually slightly lax # in this, in that while ->retrieve_node always wraps up the metadata values in # (refs to) arrays, this method will accept scalar metadata values too. sub _checksum { my ($self, %node_data) = @_; my $string = $node_data{content}; my %metadata = %{ $node_data{metadata} || {} }; foreach my $key ( sort keys %metadata ) { $string .= "\0\0\0" . $key . "\0\0"; my $val = $metadata{$key}; if ( ref $val eq "ARRAY" ) { $string .= join("\0", sort @$val ); } else { $string .= $val; } } return md5_hex($self->charset_encode($string)); } # Expects an array of hashes whose keys and values are scalars. sub _checksum_hashes { my ($self, @hashes) = @_; my @strings = ""; foreach my $hashref ( @hashes ) { my %hash = %$hashref; my $substring = ""; foreach my $key ( sort keys %hash ) { $substring .= "\0\0" . $key . "\0" . $hash{$key}; } push @strings, $substring; } my $string = join("\0\0\0", sort @strings); return md5_hex($string); } =item B my $ok = $store->node_exists( "Wombat Defenestration" ); # or ignore case - optional but recommended my $ok = $store->node_exists( name => "monkey brains", ignore_case => 1, ); Returns true if the node has ever been created (even if it is currently empty), and false otherwise. By default, the case-sensitivity of C depends on your database. If you supply a true value to the C parameter, then you can be sure of its being case-insensitive. This is recommended. =cut sub node_exists { my $self = shift; if ( scalar @_ == 1 ) { my $node = shift; return $self->_do_old_node_exists( $node ); } else { my %args = @_; return $self->_do_old_node_exists( $args{name} ) unless $args{ignore_case}; my $sql = $self->_get_node_exists_ignore_case_sql; my $sth = $self->dbh->prepare( $sql ); $sth->execute( $args{name} ); my $found_name = $sth->fetchrow_array || ""; $sth->finish; return lc($found_name) eq lc($args{name}) ? 1 : 0; } } sub _do_old_node_exists { my ($self, $node) = @_; my %data = $self->retrieve_node($node) or return (); return $data{version}; # will be 0 if node doesn't exist, >=1 otherwise } =item B my $ok = $store->verify_checksum($node, $checksum); Sees whether your checksum is current for the given node. Returns true if so, false if not. B Be aware that when called directly and without locking, this might not be accurate, since there is a small window between the checking and the returning where the node might be changed, so B rely on it for safe commits; use C for that. It can however be useful when previewing edits, for example. =cut sub verify_checksum { my ($self, $node, $checksum) = @_; #warn $self; my %node_data = $self->_retrieve_node_data( name => $node ); return ( $checksum eq $self->_checksum( %node_data ) ); } =item B # List all nodes that link to the Home Page. my @links = $store->list_backlinks( node => "Home Page" ); =cut sub list_backlinks { my ( $self, %args ) = @_; my $node = $args{node}; croak "Must supply a node name" unless $node; my $dbh = $self->dbh; # XXX see comment in list_dangling_links my $sql = "SELECT link_from FROM internal_links INNER JOIN node AS node_from ON node_from.name=internal_links.link_from WHERE link_to=" . $dbh->quote($node); my $sth = $dbh->prepare($sql); $sth->execute or croak $dbh->errstr; my @backlinks; while ( my ($backlink) = $self->charset_decode( $sth->fetchrow_array ) ) { push @backlinks, $backlink; } return @backlinks; } =item B # List all nodes that have been linked to from other nodes but don't # yet exist. my @links = $store->list_dangling_links; Each node is returned once only, regardless of how many other nodes link to it. =cut sub list_dangling_links { my $self = shift; my $dbh = $self->dbh; # XXX this is really hiding an inconsistency in the database; # should really fix the constraints so that this inconsistency # cannot be introduced; also rework this table completely so # that it uses IDs, not node names (will simplify rename_node too) my $sql = "SELECT DISTINCT internal_links.link_to FROM internal_links INNER JOIN node AS node_from ON node_from.name=internal_links.link_from LEFT JOIN node AS node_to ON node_to.name=internal_links.link_to WHERE node_to.version IS NULL"; my $sth = $dbh->prepare($sql); $sth->execute or croak $dbh->errstr; my @links; while ( my ($link) = $self->charset_decode( $sth->fetchrow_array ) ) { push @links, $link; } return @links; } =item B $store->write_node_post_locking( node => $node, content => $content, links_to => \@links_to, metadata => \%metadata, requires_moderation => $requires_moderation, plugins => \@plugins ) or handle_error(); Writes the specified content into the specified node, then calls C on all supplied plugins, with arguments C, C, C, C. Making sure that locking/unlocking/transactions happen is left up to you (or your chosen subclass). This method shouldn't really be used directly as it might overwrite someone else's changes. Croaks on error but otherwise returns the version number of the update just made. A return value of -1 indicates that the change was not applied. This may be because the plugins voted against the change, or because the content and metadata in the proposed new version were identical to the current version (a "null" change). Supplying a ref to an array of nodes that this ones links to is optional, but if you do supply it then this node will be returned when calling C on the nodes in C<@links_to>. B that if you don't supply the ref then the store will assume that this node doesn't link to any others, and update itself accordingly. The metadata hashref is also optional, as is requires_moderation. B on the metadata hashref: Any data in here that you wish to access directly later must be a key-value pair in which the value is either a scalar or a reference to an array of scalars. For example: $wiki->write_node( "Calthorpe Arms", "nice pub", $checksum, { category => [ "Pubs", "Bloomsbury" ], postcode => "WC1X 8JR" } ); # and later my @nodes = $wiki->list_nodes_by_metadata( metadata_type => "category", metadata_value => "Pubs" ); For more advanced usage (passing data through to registered plugins) you may if you wish pass key-value pairs in which the value is a hashref or an array of hashrefs. The data in the hashrefs will not be stored as metadata; it will be checksummed and the checksum will be stored instead (as C<__metadatatypename__checksum>). Such data can I be accessed via plugins. =cut sub write_node_post_locking { my ($self, %args) = @_; my ($node, $content, $links_to_ref, $metadata_ref, $requires_moderation) = @args{ qw( node content links_to metadata requires_moderation) }; my $dbh = $self->dbh; my $timestamp = $self->_get_timestamp(); my @links_to = @{ $links_to_ref || [] }; # default to empty array my $version; unless($requires_moderation) { $requires_moderation = 0; } # Call pre_write on any plugins, in case they want to tweak anything my @preplugins = @{ $args{plugins} || [ ] }; my $write_allowed = 1; foreach my $plugin (@preplugins) { if ( $plugin->can( "pre_write" ) ) { handle_pre_plugin_ret( \$write_allowed, $plugin->pre_write( node => \$node, content => \$content, metadata => \$metadata_ref ) ); } } if($write_allowed < 1) { # The plugins didn't want to allow this action return -1; } if ( $self->_checksum( %args ) eq $args{checksum} ) { # Refuse to commit as nothing has changed return -1; } # Either inserting a new page or updating an old one. my $sql = "SELECT count(*) FROM node WHERE name=" . $dbh->quote($node); my $exists = @{ $dbh->selectcol_arrayref($sql) }[0] || 0; # If it doesn't exist, add it right now if(! $exists) { # Add in a new version $version = 1; # Handle initial moderation my $node_content = $content; if($requires_moderation) { $node_content = "=== This page has yet to be moderated. ==="; } # Add the node and content my $add_sql = "INSERT INTO node " ." (name, version, text, modified, moderate) " ."VALUES (?, ?, ?, ?, ?)"; my $add_sth = $dbh->prepare($add_sql); $add_sth->execute( map{ $self->charset_encode($_) } ($node, $version, $node_content, $timestamp, $requires_moderation) ) or croak "Error updating database: " . DBI->errstr; } # Get the ID of the node we've added / we're about to update # Also get the moderation status for it $sql = "SELECT id, moderate FROM node WHERE name=" . $dbh->quote($node); my ($node_id,$node_requires_moderation) = $dbh->selectrow_array($sql); # Only update node if it exists, and moderation isn't enabled on the node # Whatever happens, if it exists, generate a new version number if($exists) { # Get the new version number $sql = "SELECT max(content.version) FROM node INNER JOIN content ON (id = node_id) WHERE name=" . $dbh->quote($node); $version = @{ $dbh->selectcol_arrayref($sql) }[0] || 0; croak "Can't get version number" unless $version; $version++; # Update the node only if node doesn't require moderation if(!$node_requires_moderation) { $sql = "UPDATE node SET version=" . $dbh->quote($version) . ", text=" . $dbh->quote($self->charset_encode($content)) . ", modified=" . $dbh->quote($timestamp) . " WHERE name=" . $dbh->quote($self->charset_encode($node)); $dbh->do($sql) or croak "Error updating database: " . DBI->errstr; } # You can't use this to enable moderation on an existing node if($requires_moderation) { warn("Moderation not added to existing node '$node', use normal moderation methods instead"); } } # Now node is updated (if required), add to the history my $add_sql = "INSERT INTO content " ." (node_id, version, text, modified, moderated) " ."VALUES (?, ?, ?, ?, ?)"; my $add_sth = $dbh->prepare($add_sql); $add_sth->execute( map { $self->charset_encode($_) } ($node_id, $version, $content, $timestamp, (1-$node_requires_moderation)) ) or croak "Error updating database: " . DBI->errstr; # Update the backlinks. $dbh->do("DELETE FROM internal_links WHERE link_from=" . $dbh->quote($self->charset_encode($node)) ) or croak $dbh->errstr; foreach my $links_to ( @links_to ) { $sql = "INSERT INTO internal_links (link_from, link_to) VALUES (" . join(", ", map { $dbh->quote($self->charset_encode($_)) } ( $node, $links_to ) ) . ")"; # Better to drop a backlink or two than to lose the whole update. # Shevek wants a case-sensitive wiki, Jerakeen wants a case-insensitive # one, MySQL compares case-sensitively on varchars unless you add # the binary keyword. Case-sensitivity to be revisited. eval { $dbh->do($sql); }; carp "Couldn't index backlink: " . $dbh->errstr if $@; } # And also store any metadata. Note that any entries already in the # metadata table refer to old versions, so we don't need to delete them. my %metadata = %{ $metadata_ref || {} }; # default to no metadata foreach my $type ( keys %metadata ) { my $val = $metadata{$type}; # We might have one or many values; make an array now to merge cases. my @values = (ref $val and ref $val eq 'ARRAY') ? @$val : ( $val ); # Find out whether all values for this type are scalars. my $all_scalars = 1; foreach my $value (@values) { $all_scalars = 0 if ref $value; } # For adding to metadata my $add_sql = "INSERT INTO metadata " ." (node_id, version, metadata_type, metadata_value) " ."VALUES (?, ?, ?, ?)"; my $add_sth = $dbh->prepare($add_sql); # If all values for this type are scalars, strip out any duplicates # and store the data. if ( $all_scalars ) { my %unique = map { $_ => 1 } @values; @values = keys %unique; foreach my $value ( @values ) { $add_sth->execute( map { $self->charset_encode($_) } ( $node_id, $version, $type, $value ) ) or croak $dbh->errstr; } } else { # Otherwise grab a checksum and store that. my $type_to_store = "__" . $type . "__checksum"; my $value_to_store = $self->_checksum_hashes( @values ); $add_sth->execute( map { $self->charset_encode($_) } ( $node_id, $version, $type_to_store, $value_to_store ) ) or croak $dbh->errstr; } } # Finally call post_write on any plugins. my @postplugins = @{ $args{plugins} || [ ] }; foreach my $plugin (@postplugins) { if ( $plugin->can( "post_write" ) ) { $plugin->post_write( node => $node, node_id => $node_id, version => $version, content => $content, metadata => $metadata_ref ); } } return $version; } # Returns the timestamp of now, unless epoch is supplied. sub _get_timestamp { my $self = shift; # I don't care about no steenkin' timezones (yet). my $time = shift || localtime; # Overloaded by Time::Piece. unless( ref $time ) { $time = localtime($time); # Make it into an object for strftime } return $time->strftime($timestamp_fmt); # global } =item B $store->rename_node( old_name => $node, new_name => $new_node, wiki => $wiki, create_new_versions => $create_new_versions, ); Renames a node, updating any references to it as required (assuming your chosen formatter supports rename, that is). Uses the internal_links table to identify the nodes that link to this one, and re-writes any wiki links in these to point to the new name. =cut sub rename_node { my ($self, %args) = @_; my ($old_name,$new_name,$wiki,$create_new_versions) = @args{ qw( old_name new_name wiki create_new_versions ) }; my $dbh = $self->dbh; my $formatter = $wiki->{_formatter}; my $timestamp = $self->_get_timestamp(); # Call pre_rename on any plugins, in case they want to tweak anything my @preplugins = @{ $args{plugins} || [ ] }; my $rename_allowed = 1; foreach my $plugin (@preplugins) { if ( $plugin->can( "pre_rename" ) ) { handle_pre_plugin_ret( \$rename_allowed, $plugin->pre_rename( old_name => \$old_name, new_name => \$new_name, create_new_versions => \$create_new_versions, ) ); } } if($rename_allowed < 1) { # The plugins didn't want to allow this action return -1; } # Get the ID of the node my $sql = "SELECT id FROM node WHERE name=?"; my $sth = $dbh->prepare($sql); $sth->execute($old_name); my ($node_id) = $sth->fetchrow_array; $sth->finish; # If the formatter supports it, get a list of the internal # links to the page, which will have their links re-written # (Do now before we update the name of the node, in case of # self links) my @links; if($formatter->can("rename_links")) { # Get a list of the pages that link to the page $sql = "SELECT id, name, version " ."FROM internal_links " ."INNER JOIN node " ." ON (link_from = name) " ."WHERE link_to = ?"; $sth = $dbh->prepare($sql); $sth->execute($old_name); # Grab them all, then update, so no locking problems while(my @l = $sth->fetchrow_array) { push (@links, \@l); } } # Rename the node $sql = "UPDATE node SET name=? WHERE id=?"; $sth = $dbh->prepare($sql); $sth->execute($new_name,$node_id); # Fix the internal links from this page # (Otherwise write_node will get confused if we rename links later on) $sql = "UPDATE internal_links SET link_from=? WHERE link_from=?"; $sth = $dbh->prepare($sql); $sth->execute($new_name,$old_name); # Update the text of internal links, if the formatter supports it if($formatter->can("rename_links")) { # Update the linked pages (may include renamed page) foreach my $l (@links) { my ($page_id, $page_name, $page_version) = @$l; # Self link special case if($page_name eq $old_name) { $page_name = $new_name; } # Grab the latest version of that page my %page = $self->retrieve_node( name=>$page_name, version=>$page_version ); # Update the content of the page my $new_content = $formatter->rename_links($old_name,$new_name,$page{'content'}); # Did it change? if($new_content ne $page{'content'}) { # Write the updated page out if($create_new_versions) { # Write out as a new version of the node # (This will also fix our internal links) $wiki->write_node( $page_name, $new_content, $page{checksum}, $page{metadata} ); } else { # Just update the content my $update_sql_a = "UPDATE node SET text=? WHERE id=?"; my $update_sql_b = "UPDATE content SET text=? ". "WHERE node_id=? AND version=?"; my $u_sth = $dbh->prepare($update_sql_a); $u_sth->execute($new_content,$page_id); $u_sth = $dbh->prepare($update_sql_b); $u_sth->execute($new_content,$page_id,$page_version); } } } # Fix the internal links if we didn't create new versions of the node if(! $create_new_versions) { $sql = "UPDATE internal_links SET link_to=? WHERE link_to=?"; $sth = $dbh->prepare($sql); $sth->execute($new_name,$old_name); } } else { warn("Internal links not updated following node rename - unsupported by formatter"); } # Call post_rename on any plugins, in case they want to do anything my @postplugins = @{ $args{plugins} || [ ] }; foreach my $plugin (@postplugins) { if ( $plugin->can( "post_rename" ) ) { $plugin->post_rename( old_name => $old_name, new_name => $new_name, node_id => $node_id, ); } } } =item B $store->moderate_node( name => $node, version => $version ); Marks the given version of the node as moderated. If this is the highest moderated version, then update the node's contents to hold this version. =cut sub moderate_node { my $self = shift; my %args = scalar @_ == 2 ? ( name => $_[0], version => $_[1] ) : @_; my $dbh = $self->dbh; my ($name,$version) = ($args{name},$args{version}); # Call pre_moderate on any plugins. my @plugins = @{ $args{plugins} || [ ] }; my $moderation_allowed = 1; foreach my $plugin (@plugins) { if ( $plugin->can( "pre_moderate" ) ) { handle_pre_plugin_ret( \$moderation_allowed, $plugin->pre_moderate( node => \$name, version => \$version ) ); } } if($moderation_allowed < 1) { # The plugins didn't want to allow this action return -1; } # Get the ID of this node my $id_sql = "SELECT id FROM node WHERE name=?"; my $id_sth = $dbh->prepare($id_sql); $id_sth->execute($name); my ($node_id) = $id_sth->fetchrow_array; $id_sth->finish; # Check what the current highest moderated version is my $hv_sql = "SELECT max(version) " ."FROM content " ."WHERE node_id = ? " ."AND moderated = ?"; my $hv_sth = $dbh->prepare($hv_sql); $hv_sth->execute($node_id, "1") or croak $dbh->errstr; my ($highest_mod_version) = $hv_sth->fetchrow_array; $hv_sth->finish; unless($highest_mod_version) { $highest_mod_version = 0; } # Mark this version as moderated my $update_sql = "UPDATE content " ."SET moderated = ? " ."WHERE node_id = ? " ."AND version = ?"; my $update_sth = $dbh->prepare($update_sql); $update_sth->execute("1", $node_id, $version) or croak $dbh->errstr; # Are we now the highest moderated version? if(int($version) > int($highest_mod_version)) { # Newly moderated version is newer than previous moderated version # So, make the current version the latest version my %new_data = $self->retrieve_node( name => $name, version => $version ); # Make sure last modified is properly null, if not set unless($new_data{last_modified}) { $new_data{last_modified} = undef; } my $newv_sql = "UPDATE node " ."SET version=?, text=?, modified=? " ."WHERE id = ?"; my $newv_sth = $dbh->prepare($newv_sql); $newv_sth->execute( $version, $self->charset_encode($new_data{content}), $new_data{last_modified}, $node_id ) or croak $dbh->errstr; } else { # A higher version is already moderated, so don't change node } # TODO: Do something about internal links, if required # Finally call post_moderate on any plugins. @plugins = @{ $args{plugins} || [ ] }; foreach my $plugin (@plugins) { if ( $plugin->can( "post_moderate" ) ) { $plugin->post_moderate( node => $name, node_id => $node_id, version => $version ); } } return 1; } =item B $store->set_node_moderation( name => $node, required => $required ); Sets if new node versions will require moderation or not =cut sub set_node_moderation { my $self = shift; my %args = scalar @_ == 2 ? ( name => $_[0], required => $_[1] ) : @_; my $dbh = $self->dbh; my ($name,$required) = ($args{name},$args{required}); # Get the ID of this node my $id_sql = "SELECT id FROM node WHERE name=?"; my $id_sth = $dbh->prepare($id_sql); $id_sth->execute($name); my ($node_id) = $id_sth->fetchrow_array; $id_sth->finish; # Check we really got an ID unless($node_id) { return 0; } # Mark it as requiring / not requiring moderation my $mod_sql = "UPDATE node " ."SET moderate = ? " ."WHERE id = ? "; my $mod_sth = $dbh->prepare($mod_sql); $mod_sth->execute("$required", $node_id) or croak $dbh->errstr; return 1; } =item B $store->delete_node( name => $node, version => $version, wiki => $wiki ); C is optional. If it is supplied then only that version of the node will be deleted. Otherwise the node and all its history will be completely deleted. C is also optional, but if you care about updating the backlinks you want to include it. Again, doesn't do any locking. You probably don't want to let anyone except Wiki admins call this. You may not want to use it at all. Croaks on error, silently does nothing if the node or version doesn't exist, returns true if no error. =cut sub delete_node { my $self = shift; # Backwards compatibility. my %args = ( scalar @_ == 1 ) ? ( name => $_[0] ) : @_; my $dbh = $self->dbh; my ($name, $version, $wiki) = @args{ qw( name version wiki ) }; # Grab the ID of this node # (It will only ever have one entry in node, but might have entries # for other versions in metadata and content) my $id_sql = "SELECT id FROM node WHERE name=?"; my $id_sth = $dbh->prepare($id_sql); $id_sth->execute($name); my ($node_id) = $id_sth->fetchrow_array; $id_sth->finish; # Trivial case - delete the whole node and all its history. unless ( $version ) { my $sql; # Should start a transaction here. FIXME. # Do deletes $sql = "DELETE FROM content WHERE node_id = $node_id"; $dbh->do($sql) or croak "Deletion failed: " . DBI->errstr; $sql = "DELETE FROM internal_links WHERE link_from=".$dbh->quote($name); $dbh->do($sql) or croak $dbh->errstr; $sql = "DELETE FROM metadata WHERE node_id = $node_id"; $dbh->do($sql) or croak $dbh->errstr; $sql = "DELETE FROM node WHERE id = $node_id"; $dbh->do($sql) or croak "Deletion failed: " . DBI->errstr; # And finish it here. post_delete_node($name,$node_id,$version,$args{plugins}); return 1; } # Skip out early if we're trying to delete a nonexistent version. my %verdata = $self->retrieve_node( name => $name, version => $version ); unless($verdata{version}) { warn( "Asked to delete nonexistent version $version of node " . "$node_id ($name)" ); return 1; } # Reduce to trivial case if deleting the only version. my $sql = "SELECT COUNT(*) FROM content WHERE node_id = $node_id"; my $sth = $dbh->prepare( $sql ); $sth->execute() or croak "Deletion failed: " . $dbh->errstr; my ($count) = $sth->fetchrow_array; $sth->finish; if($count == 1) { # Only one version, so can do the non version delete return $self->delete_node( name=>$name, plugins=>$args{plugins} ); } # Check whether we're deleting the latest (moderated) version. my %currdata = $self->retrieve_node( name => $name ); if ( $currdata{version} == $version ) { # Deleting latest version, so need to update the copy in node # (Can't just grab version ($version - 1) since it may have been # deleted itself, or might not be moderated.) my $try = $version - 1; my %prevdata; until ( $prevdata{version} && $prevdata{moderated} ) { %prevdata = $self->retrieve_node( name => $name, version => $try, ); $try--; } # Move to new (old) version my $sql="UPDATE node SET version=?, text=?, modified=? WHERE name=?"; my $sth = $dbh->prepare( $sql ); $sth->execute( @prevdata{ qw( version content last_modified ) }, $name) or croak "Deletion failed: " . $dbh->errstr; # Remove the current version from content $sql = "DELETE FROM content WHERE node_id = $node_id AND version = $version"; $sth = $dbh->prepare( $sql ); $sth->execute() or croak "Deletion failed: " . $dbh->errstr; # Update the internal links to reflect the new version $sql = "DELETE FROM internal_links WHERE link_from=?"; $sth = $dbh->prepare( $sql ); $sth->execute( $name ) or croak "Deletion failed: " . $dbh->errstr; my @links_to; my $formatter = $wiki->formatter; if ( $formatter->can( "find_internal_links" ) ) { # Supply $metadata to formatter in case it's needed to alter the # behaviour of the formatter, eg for Wiki::Toolkit::Formatter::Multiple my @all = $formatter->find_internal_links( $prevdata{content}, $prevdata{metadata} ); my %unique = map { $_ => 1 } @all; @links_to = keys %unique; } $sql = "INSERT INTO internal_links (link_from, link_to) VALUES (?,?)"; $sth = $dbh->prepare( $sql ); foreach my $link ( @links_to ) { eval { $sth->execute( $name, $link ); }; carp "Couldn't index backlink: " . $dbh->errstr if $@; } # Delete the metadata for the old version $sql = "DELETE FROM metadata WHERE node_id = $node_id AND version = $version"; $sth = $dbh->prepare( $sql ); $sth->execute() or croak "Deletion failed: " . $dbh->errstr; # All done post_delete_node($name,$node_id,$version,$args{plugins}); return 1; } # If we're still here, then we're deleting neither the latest # nor the only version. $sql = "DELETE FROM content WHERE node_id = $node_id AND version=?"; $sth = $dbh->prepare( $sql ); $sth->execute( $version ) or croak "Deletion failed: " . $dbh->errstr; $sql = "DELETE FROM metadata WHERE node_id = $node_id AND version=?"; $sth = $dbh->prepare( $sql ); $sth->execute( $version ) or croak "Deletion failed: " . $dbh->errstr; # All done post_delete_node($name,$node_id,$version,$args{plugins}); return 1; } # Returns the name of the node with the given ID # Not normally used except when doing low-level maintenance sub node_name_for_id { my ($self, $node_id) = @_; my $dbh = $self->dbh; my $name_sql = "SELECT name FROM node WHERE id=?"; my $name_sth = $dbh->prepare($name_sql); $name_sth->execute($node_id); my ($name) = $name_sth->fetchrow_array; $name_sth->finish; return $name; } # Internal Method sub post_delete_node { my ($name,$node_id,$version,$plugins) = @_; # Call post_delete on any plugins, having done the delete my @plugins = @{ $plugins || [ ] }; foreach my $plugin (@plugins) { if ( $plugin->can( "post_delete" ) ) { $plugin->post_delete( node => $name, node_id => $node_id, version => $version ); } } } =item B # Nodes changed in last 7 days - each node listed only once. my @nodes = $store->list_recent_changes( days => 7 ); # Nodes added in the last 7 days. my @nodes = $store->list_recent_changes( days => 7, new_only => 1, ); # All changes in last 7 days - nodes changed more than once will # be listed more than once. my @nodes = $store->list_recent_changes( days => 7, include_all_changes => 1, ); # Nodes changed between 1 and 7 days ago. my @nodes = $store->list_recent_changes( between_days => [ 1, 7 ] ); # Nodes changed since a given time. my @nodes = $store->list_recent_changes( since => 1036235131 ); # Most recent change and its details. my @nodes = $store->list_recent_changes( last_n_changes => 1 ); print "Node: $nodes[0]{name}"; print "Last modified: $nodes[0]{last_modified}"; print "Comment: $nodes[0]{metadata}{comment}"; # Last 5 restaurant nodes edited. my @nodes = $store->list_recent_changes( last_n_changes => 5, metadata_is => { category => "Restaurants" } ); # Last 5 nodes edited by Kake. my @nodes = $store->list_recent_changes( last_n_changes => 5, metadata_was => { username => "Kake" } ); # All minor edits made by Earle in the last week. my @nodes = $store->list_recent_changes( days => 7, metadata_was => { username => "Earle", edit_type => "Minor tidying." } ); # Last 10 changes that weren't minor edits. my @nodes = $store->list_recent_changes( last_n_changes => 10, metadata_wasnt => { edit_type => "Minor tidying" } ); You I supply one of the following constraints: C (integer), C (epoch), C (integer). You I also supply moderation => 1 if you only want to see versions that are moderated. Another optional parameter is C, which if set to 1 will only return newly added nodes. You I also supply I C (and optionally C), I C (and optionally C). Each of these should be a ref to a hash with scalar keys and values. If the hash has more than one entry, then only changes satisfying I criteria will be returned when using C or C, but all changes which fail to satisfy any one of the criteria will be returned when using C or C. C and C look only at the metadata that the node I has. C and C take into account the metadata of previous versions of a node. Don't mix C with C - there's no check for this, but the results are undefined. Returns results as an array, in reverse chronological order. Each element of the array is a reference to a hash with the following entries: =over 4 =item * B: the name of the node =item * B: the version number of the node =item * B: timestamp showing when this version was written =item * B: a ref to a hash containing any metadata attached to this version of the node =back Unless you supply C, C or C, each node will only be returned once regardless of how many times it has been changed recently. By default, the case-sensitivity of both C and C depends on your database - if it will return rows with an attribute value of "Pubs" when you asked for "pubs", or not. If you supply a true value to the C parameter, then you can be sure of its being case-insensitive. This is recommended. =cut sub list_recent_changes { my $self = shift; my %args = @_; if ($args{since}) { return $self->_find_recent_changes_by_criteria( %args ); } elsif ($args{between_days}) { return $self->_find_recent_changes_by_criteria( %args ); } elsif ( $args{days} ) { my $now = localtime; my $then = $now - ( ONE_DAY * $args{days} ); $args{since} = $then; delete $args{days}; return $self->_find_recent_changes_by_criteria( %args ); } elsif ( $args{last_n_changes} ) { $args{limit} = delete $args{last_n_changes}; return $self->_find_recent_changes_by_criteria( %args ); } else { croak "Need to supply some criteria to list_recent_changes."; } } sub _find_recent_changes_by_criteria { my ($self, %args) = @_; my ($since, $limit, $between_days, $ignore_case, $new_only, $metadata_is, $metadata_isnt, $metadata_was, $metadata_wasnt, $moderation, $include_all_changes ) = @args{ qw( since limit between_days ignore_case new_only metadata_is metadata_isnt metadata_was metadata_wasnt moderation include_all_changes) }; my $dbh = $self->dbh; my @where; my @metadata_joins; my $use_content_table; # some queries won't need this if ( $metadata_is ) { my $main_table = "node"; if ( $include_all_changes ) { $main_table = "content"; $use_content_table = 1; } my $i = 0; foreach my $type ( keys %$metadata_is ) { $i++; my $value = $metadata_is->{$type}; croak "metadata_is must have scalar values" if ref $value; my $mdt = "md_is_$i"; push @metadata_joins, "LEFT JOIN metadata AS $mdt ON $main_table." . ( ($main_table eq "node") ? "id" : "node_id" ) . "=$mdt.node_id AND $main_table.version=$mdt.version\n"; # Why is this inside 'if ( $metadata_is )'? # Shouldn't it apply to all cases? # What's it doing in @metadata_joins? if (defined $moderation) { push @metadata_joins, "AND $main_table.moderate=$moderation"; } push @where, "( " . $self->_get_comparison_sql( thing1 => "$mdt.metadata_type", thing2 => $dbh->quote($type), ignore_case => $ignore_case, ) . " AND " . $self->_get_comparison_sql( thing1 => "$mdt.metadata_value", thing2 => $dbh->quote( $self->charset_encode($value) ), Ignore_case => $ignore_case, ) . " )"; } } if ( $metadata_isnt ) { foreach my $type ( keys %$metadata_isnt ) { my $value = $metadata_isnt->{$type}; croak "metadata_isnt must have scalar values" if ref $value; } my @omits = $self->_find_recent_changes_by_criteria( since => $since, between_days => $between_days, metadata_is => $metadata_isnt, ignore_case => $ignore_case, ); foreach my $omit ( @omits ) { push @where, "( node.name != " . $dbh->quote($omit->{name}) . " OR node.version != " . $dbh->quote($omit->{version}) . ")"; } } if ( $metadata_was ) { $use_content_table = 1; my $i = 0; foreach my $type ( keys %$metadata_was ) { $i++; my $value = $metadata_was->{$type}; croak "metadata_was must have scalar values" if ref $value; my $mdt = "md_was_$i"; push @metadata_joins, "LEFT JOIN metadata AS $mdt ON content.node_id=$mdt.node_id AND content.version=$mdt.version\n"; push @where, "( " . $self->_get_comparison_sql( thing1 => "$mdt.metadata_type", thing2 => $dbh->quote($type), ignore_case => $ignore_case, ) . " AND " . $self->_get_comparison_sql( thing1 => "$mdt.metadata_value", thing2 => $dbh->quote( $self->charset_encode($value) ), ignore_case => $ignore_case, ) . " )"; } } if ( $metadata_wasnt ) { foreach my $type ( keys %$metadata_wasnt ) { my $value = $metadata_was->{$type}; croak "metadata_was must have scalar values" if ref $value; } my @omits = $self->_find_recent_changes_by_criteria( since => $since, between_days => $between_days, metadata_was => $metadata_wasnt, ignore_case => $ignore_case, ); foreach my $omit ( @omits ) { push @where, "( node.name != " . $dbh->quote($omit->{name}) . " OR content.version != " . $dbh->quote($omit->{version}) . ")"; } $use_content_table = 1; } # Figure out which table we should be joining to to check the dates and # versions - node or content. my $date_table = "node"; if ( $include_all_changes || $new_only || $metadata_was || $metadata_wasnt ) { $date_table = "content"; $use_content_table = 1; } if ( $new_only ) { push @where, "content.version=1"; } if ( $since ) { my $timestamp = $self->_get_timestamp( $since ); push @where, "$date_table.modified >= " . $dbh->quote($timestamp); } elsif ( $between_days ) { my $now = localtime; # Start is the larger number of days ago. my ($start, $end) = @$between_days; ($start, $end) = ($end, $start) if $start < $end; my $ts_start = $self->_get_timestamp( $now - (ONE_DAY * $start) ); my $ts_end = $self->_get_timestamp( $now - (ONE_DAY * $end) ); push @where, "$date_table.modified >= " . $dbh->quote($ts_start); push @where, "$date_table.modified <= " . $dbh->quote($ts_end); } my $sql = "SELECT DISTINCT node.name, "; if ( $include_all_changes || $new_only || $use_content_table ) { $sql .= " content.version, content.modified "; } else { $sql .= " node.version, node.modified "; } $sql .= " FROM node "; if ( $use_content_table ) { $sql .= " INNER JOIN content ON (node.id = content.node_id ) "; } $sql .= join("\n", @metadata_joins) . ( scalar @where ? " WHERE " . join(" AND ",@where) : "" ) . " ORDER BY " . ( $use_content_table ? "content" : "node" ) . ".modified DESC"; if ( $limit ) { croak "Bad argument $limit" unless $limit =~ /^\d+$/; $sql .= " LIMIT $limit"; } my $nodesref = $dbh->selectall_arrayref($sql); my @finds = map { { name => $_->[0], version => $_->[1], last_modified => $_->[2] } } @$nodesref; foreach my $find ( @finds ) { my %metadata; my $sth = $dbh->prepare( "SELECT metadata_type, metadata_value FROM node INNER JOIN metadata ON (id = node_id) WHERE name=? AND metadata.version=?" ); $sth->execute( $find->{name}, $find->{version} ); while ( my ($type, $value) = $self->charset_decode( $sth->fetchrow_array ) ) { if ( defined $metadata{$type} ) { push @{$metadata{$type}}, $value; } else { $metadata{$type} = [ $value ]; } } $find->{metadata} = \%metadata; } return @finds; } =item B my @nodes = $store->list_all_nodes(); print "First node is $nodes[0]\n"; my @nodes = $store->list_all_nodes( with_details=> 1 ); print "First node is ".$nodes[0]->{'name'}." at version ".$nodes[0]->{'version'}."\n"; Returns a list containing the name of every existing node. The list won't be in any kind of order; do any sorting in your calling script. Optionally also returns the id, version and moderation flag. =cut sub list_all_nodes { my ($self,%args) = @_; my $dbh = $self->dbh; my @nodes; if($args{with_details}) { my $sql = "SELECT id, name, version, moderate FROM node;"; my $sth = $dbh->prepare( $sql ); $sth->execute(); while(my @results = $sth->fetchrow_array) { my %data; @data{ qw( node_id name version moderate ) } = @results; push @nodes, \%data; } } else { my $sql = "SELECT name FROM node;"; my $raw_nodes = $dbh->selectall_arrayref($sql); @nodes = ( map { $self->charset_decode( $_->[0] ) } (@$raw_nodes) ); } return @nodes; } =item B my @all_versions = $store->list_node_all_versions( name => 'HomePage', with_content => 1, with_metadata => 0 ); Returns all the versions of a node, optionally including the content and metadata, as an array of hashes (newest versions first). =cut sub list_node_all_versions { my ($self, %args) = @_; my ($node_id,$name,$with_content,$with_metadata) = @args{ qw( node_id name with_content with_metadata ) }; my $dbh = $self->dbh; my $sql; # If they only gave us the node name, get the node id unless ($node_id) { $sql = "SELECT id FROM node WHERE name=" . $dbh->quote($name); $node_id = $dbh->selectrow_array($sql); } # If they didn't tell us what they wanted / we couldn't find it, # return an empty array return () unless($node_id); # Build up our SQL $sql = "SELECT id, name, content.version, content.modified "; if ( $with_content ) { $sql .= ", content.text "; } if ( $with_metadata ) { $sql .= ", metadata_type, metadata_value "; } $sql .= " FROM node INNER JOIN content ON (id = content.node_id) "; if ( $with_metadata ) { $sql .= " LEFT OUTER JOIN metadata ON " . "(id = metadata.node_id AND content.version = metadata.version) "; } $sql .= " WHERE id = ? ORDER BY content.version DESC"; # Do the fetch my $sth = $dbh->prepare( $sql ); $sth->execute( $node_id ); # Need to hold onto the last row by hash ref, so we don't trash # it every time my %first_data; my $dataref = \%first_data; # Haul out the data my @versions; while ( my @results = $sth->fetchrow_array ) { my %data = %$dataref; # Is it the same version as last time? if ( %data && $data{'version'} != $results[2] ) { # New version push @versions, $dataref; %data = (); } else { # Same version as last time, must be more metadata } # Grab the core data (will be the same on multi-row for metadata) @data{ qw( node_id name version last_modified ) } = @results; my $i = 4; if ( $with_content ) { $data{'content'} = $results[$i]; $i++; } if ( $with_metadata ) { my ($m_type,$m_value) = @results[$i,($i+1)]; unless ( $data{'metadata'} ) { $data{'metadata'} = {}; } if ( $m_type ) { # If we have existing data, then put it into an array if ( $data{'metadata'}->{$m_type} ) { unless ( ref($data{'metadata'}->{$m_type}) eq "ARRAY" ) { $data{'metadata'}->{$m_type} = [ $data{'metadata'}->{$m_type} ]; } push @{$data{'metadata'}->{$m_type}}, $m_value; } else { # Otherwise, just store it in a normal string $data{'metadata'}->{$m_type} = $m_value; } } } # Save where we've got to $dataref = \%data; } # Handle final row saving if ( $dataref ) { push @versions, $dataref; } # Return return @versions; } =item B # All documentation nodes. my @nodes = $store->list_nodes_by_metadata( metadata_type => "category", metadata_value => "documentation", ignore_case => 1, # optional but recommended (see below) ); # All pubs in Hammersmith. my @pubs = $store->list_nodes_by_metadata( metadata_type => "category", metadata_value => "Pub", ); my @hsm = $store->list_nodes_by_metadata( metadata_type => "category", metadata_value => "Hammersmith", ); my @results = my_l33t_method_for_ANDing_arrays( \@pubs, \@hsm ); Returns a list containing the name of every node whose caller-supplied metadata matches the criteria given in the parameters. By default, the case-sensitivity of both C and C depends on your database - if it will return rows with an attribute value of "Pubs" when you asked for "pubs", or not. If you supply a true value to the C parameter, then you can be sure of its being case-insensitive. This is recommended. If you don't supply any criteria then you'll get an empty list. This is a really really really simple way of finding things; if you want to be more complicated then you'll need to call the method multiple times and combine the results yourself, or write a plugin. =cut sub list_nodes_by_metadata { my ($self, %args) = @_; my ( $type, $value ) = @args{ qw( metadata_type metadata_value ) }; return () unless $type; my $dbh = $self->dbh; if ( $args{ignore_case} ) { $type = lc( $type ); $value = lc( $value ); } my $sql = $self->_get_list_by_metadata_sql( ignore_case => $args{ignore_case} ); my $sth = $dbh->prepare( $sql ); $sth->execute( $type, $self->charset_encode($value) ); my @nodes; while ( my ($id, $node) = $sth->fetchrow_array ) { push @nodes, $node; } return @nodes; } =item B Returns nodes where either the metadata doesn't exist, or is blank Unlike list_nodes_by_metadata(), the metadata value is optional. # All nodes missing documentation my @nodes = $store->list_nodes_by_missing_metadata( metadata_type => "category", metadata_value => "documentation", ignore_case => 1, # optional but recommended (see below) ); # All nodes which don't have a latitude defined my @nodes = $store->list_nodes_by_missing_metadata( metadata_type => "latitude" ); =cut sub list_nodes_by_missing_metadata { my ($self, %args) = @_; my ( $type, $value ) = @args{ qw( metadata_type metadata_value ) }; return () unless $type; my $dbh = $self->dbh; if ( $args{ignore_case} ) { $type = lc( $type ); $value = lc( $value ); } my @nodes; # If the don't want to match by value, then we can do it with # a LEFT OUTER JOIN, and either NULL or LENGTH() = 0 if( ! $value ) { my $sql = $self->_get_list_by_missing_metadata_sql( ignore_case => $args{ignore_case} ); my $sth = $dbh->prepare( $sql ); $sth->execute( $type ); while ( my ($id, $node) = $sth->fetchrow_array ) { push @nodes, $node; } } else { # To find those without the value in this case would involve # some seriously brain hurting SQL. # So, cheat - find those with, and return everything else my @with = $self->list_nodes_by_metadata(%args); my %with_hash; foreach my $node (@with) { $with_hash{$node} = 1; } my @all_nodes = $self->list_all_nodes(); foreach my $node (@all_nodes) { unless($with_hash{$node}) { push @nodes, $node; } } } return @nodes; } =item B<_get_list_by_metadata_sql> Return the SQL to do a match by metadata. Should expect the metadata type as the first SQL parameter, and the metadata value as the second. If possible, should take account of $args{ignore_case} =cut sub _get_list_by_metadata_sql { # SQL 99 version # Can be over-ridden by database-specific subclasses my ($self, %args) = @_; if ( $args{ignore_case} ) { return "SELECT node.id, node.name " . "FROM node " . "INNER JOIN metadata " . " ON (node.id = metadata.node_id " . " AND node.version=metadata.version) " . "WHERE ". $self->_get_lowercase_compare_sql("metadata.metadata_type") . " AND ". $self->_get_lowercase_compare_sql("metadata.metadata_value"); } else { return "SELECT node.id, node.name " . "FROM node " . "INNER JOIN metadata " . " ON (node.id = metadata.node_id " . " AND node.version=metadata.version) " . "WHERE ". $self->_get_casesensitive_compare_sql("metadata.metadata_type") . " AND ". $self->_get_casesensitive_compare_sql("metadata.metadata_value"); } } =item B<_get_list_by_missing_metadata_sql> Return the SQL to do a match by missing metadata. Should expect the metadata type as the first SQL parameter. If possible, should take account of $args{ignore_case} =cut sub _get_list_by_missing_metadata_sql { # SQL 99 version # Can be over-ridden by database-specific subclasses my ($self, %args) = @_; my $sql = ""; if ( $args{ignore_case} ) { $sql = "SELECT node.id, node.name " . "FROM node " . "LEFT OUTER JOIN metadata " . " ON (node.id = metadata.node_id " . " AND node.version=metadata.version " . " AND ". $self->_get_lowercase_compare_sql("metadata.metadata_type") . ")"; } else { $sql = "SELECT node.id, node.name " . "FROM node " . "LEFT OUTER JOIN metadata " . " ON (node.id = metadata.node_id " . " AND node.version=metadata.version " . " AND ". $self->_get_casesensitive_compare_sql("metadata.metadata_type") . ")"; } $sql .= "WHERE (metadata.metadata_value IS NULL OR LENGTH(metadata.metadata_value) = 0) "; return $sql; } sub _get_lowercase_compare_sql { my ($self, $column) = @_; # SQL 99 version # Can be over-ridden by database-specific subclasses return "lower($column) = ?"; } sub _get_casesensitive_compare_sql { my ($self, $column) = @_; # SQL 99 version # Can be over-ridden by database-specific subclasses return "$column = ?"; } sub _get_comparison_sql { my ($self, %args) = @_; # SQL 99 version # Can be over-ridden by database-specific subclasses return "$args{thing1} = $args{thing2}"; } sub _get_node_exists_ignore_case_sql { # SQL 99 version # Can be over-ridden by database-specific subclasses return "SELECT name FROM node WHERE name = ? "; } =item B my @nodes = $wiki->list_unmoderated_nodes(); my @nodes = $wiki->list_unmoderated_nodes( only_where_latest => 1 ); $nodes[0]->{'name'} # The name of the node $nodes[0]->{'node_id'} # The id of the node $nodes[0]->{'version'} # The version in need of moderation $nodes[0]->{'moderated_version'} # The newest moderated version With only_where_latest set, return the id, name and version of all the nodes where the most recent version needs moderation. Otherwise, returns the id, name and version of all node versions that need to be moderated. =cut sub list_unmoderated_nodes { my ($self,%args) = @_; my $only_where_lastest = $args{'only_where_latest'}; my $sql = "SELECT " ." id, name, " ." node.version AS last_moderated_version, " ." content.version AS version " ."FROM content " ."INNER JOIN node " ." ON (id = node_id) " ."WHERE moderated = ? " ; if($only_where_lastest) { $sql .= "AND node.version = content.version "; } $sql .= "ORDER BY name, content.version "; # Query my $dbh = $self->dbh; my $sth = $dbh->prepare( $sql ); $sth->execute( "0" ); my @nodes; while(my @results = $sth->fetchrow_array) { my %data; @data{ qw( node_id name moderated_version version ) } = @results; push @nodes, \%data; } return @nodes; } =item B List the last version of every node before a given date. If no version existed before that date, will return undef for version. Returns a hash of id, name, version and date my @nv = $wiki->list_last_version_before('2007-01-02 10:34:11') foreach my $data (@nv) { } =cut sub list_last_version_before { my ($self, $date) = @_; my $sql = "SELECT " ." id, name, " ."MAX(content.version) AS version, MAX(content.modified) AS modified " ."FROM node " ."LEFT OUTER JOIN content " ." ON (id = node_id " ." AND content.modified <= ?) " ."GROUP BY id, name " ."ORDER BY id " ; # Query my $dbh = $self->dbh; my $sth = $dbh->prepare( $sql ); $sth->execute( $date ); my @nodes; while(my @results = $sth->fetchrow_array) { my %data; @data{ qw( id name version modified ) } = @results; $data{'node_id'} = $data{'id'}; unless($data{'version'}) { $data{'version'} = undef; } push @nodes, \%data; } return @nodes; } # Internal function only, used when querying latest metadata sub _current_node_id_versions { my ($self) = @_; my $dbh = $self->dbh; my $nv_sql = "SELECT node_id, MAX(version) ". "FROM content ". "WHERE moderated ". "GROUP BY node_id "; my $sth = $dbh->prepare( $nv_sql ); $sth->execute(); my @nv_where; while(my @results = $sth->fetchrow_array) { my ($node_id, $version) = @results; my $where = "(node_id=$node_id AND version=$version)"; push @nv_where, $where; } return @nv_where; } =item B List all the currently defined values of the given type of metadata. Will only return data from the latest moderated version of each node # List all of the different metadata values with the type 'category' my @categories = $wiki->list_metadata_by_type('category'); =cut sub list_metadata_by_type { my ($self, $type) = @_; return undef unless $type; my $dbh = $self->dbh; # Ideally we'd do this as one big query # However, this would need a temporary table on many # database engines, so we cheat and do it as two my @nv_where = $self->_current_node_id_versions(); # Now the metadata bit my $sql = "SELECT DISTINCT metadata_value ". "FROM metadata ". "WHERE metadata_type = ? ". "AND (". join(" OR ", @nv_where). ")"; my $sth = $dbh->prepare( $sql ); $sth->execute($type); my $values = $sth->fetchall_arrayref([0]); return ( map { $self->charset_decode( $_->[0] ) } (@$values) ); } =item B List all the currently defined kinds of metadata, eg Locale, Postcode Will only return data from the latest moderated version of each node # List all of the different kinds of metadata my @metadata_types = $wiki->list_metadata_names() =cut sub list_metadata_names { my ($self) = @_; my $dbh = $self->dbh; # Ideally we'd do this as one big query # However, this would need a temporary table on many # database engines, so we cheat and do it as two my @nv_where = $self->_current_node_id_versions(); # Now the metadata bit my $sql = "SELECT DISTINCT metadata_type ". "FROM metadata ". "WHERE (". join(" OR ", @nv_where). ")"; my $sth = $dbh->prepare( $sql ); $sth->execute(); my $types = $sth->fetchall_arrayref([0]); return ( map { $self->charset_decode( $_->[0] ) } (@$types) ); } =item B my ($code_version, $db_version) = $store->schema_current; if ($code_version == $db_version) # Do stuff } else { # Bail } =cut sub schema_current { my $self = shift; my $dbh = $self->dbh; my $sth; eval { $sth = $dbh->prepare("SELECT version FROM schema_info") }; if ($@) { return ($SCHEMA_VER, 0); } eval { $sth->execute }; if ($@) { return ($SCHEMA_VER, 0); } my $version; eval { $version = $sth->fetchrow_array }; if ($@) { return ($SCHEMA_VER, 0); } else { return ($SCHEMA_VER, $version); } } =item B my $dbh = $store->dbh; Returns the database handle belonging to this storage backend instance. =cut sub dbh { my $self = shift; return $self->{_dbh}; } =item B my $dbname = $store->dbname; Returns the name of the database used for backend storage. =cut sub dbname { my $self = shift; return $self->{_dbname}; } =item B my $dbuser = $store->dbuser; Returns the username used to connect to the database used for backend storage. =cut sub dbuser { my $self = shift; return $self->{_dbuser}; } =item B my $dbpass = $store->dbpass; Returns the password used to connect to the database used for backend storage. =cut sub dbpass { my $self = shift; return $self->{_dbpass}; } =item B my $dbhost = $store->dbhost; Returns the optional host used to connect to the database used for backend storage. =cut sub dbhost { my $self = shift; return $self->{_dbhost}; } # Cleanup. sub DESTROY { my $self = shift; return if $self->{_external_dbh}; my $dbh = $self->dbh; $dbh->disconnect if $dbh; } # decode a string of octets into perl's internal encoding, based on the # charset parameter we were passed. Takes a list, returns a list. sub charset_decode { my $self = shift; my @input = @_; if ($CAN_USE_ENCODE) { my @output; for (@input) { push( @output, Encode::decode( $self->{_charset}, $_ ) ); } return @output; } return @input; } # convert a perl string into a series of octets we can put into the database # takes a list, returns a list sub charset_encode { my $self = shift; my @input = @_; if ($CAN_USE_ENCODE) { my @output; for (@input) { push( @output, Encode::encode( $self->{_charset}, $_ ) ); } return @output; } return @input; } =back =cut 1; Wiki-Toolkit-0.83/lib/Wiki/Toolkit/Store/MySQL.pm0000644000175000017500000000512712243757607021377 0ustar vagrantvagrantpackage Wiki::Toolkit::Store::MySQL; use strict; use vars qw( @ISA $VERSION ); use Wiki::Toolkit::Store::Database; use Carp qw/carp croak/; @ISA = qw( Wiki::Toolkit::Store::Database ); $VERSION = 0.05; =head1 NAME Wiki::Toolkit::Store::MySQL - MySQL storage backend for Wiki::Toolkit =head1 REQUIRES Subclasses Wiki::Toolkit::Store::Database. =head1 SYNOPSIS See Wiki::Toolkit::Store::Database =cut # Internal method to return the data source string required by DBI. sub _dsn { my ($self, $dbname, $dbhost, $dbport) = @_; my $dsn = "dbi:mysql:$dbname"; $dsn .= ";host=$dbhost" if $dbhost; $dsn .= ";port=$dbport" if $dbport; return $dsn; } =head1 METHODS =over 4 =item B $store->check_and_write_node( node => $node, checksum => $checksum, %other_args ); Locks the node, verifies the checksum, calls C with all supplied arguments, unlocks the node. Returns the version of the updated node on successful writing, 0 if checksum doesn't match, -1 if the change was not applied, croaks on error. Note: Uses MySQL's user level locking, so any locks are released when the database handle disconnects. Doing it like this because I can't seem to get it to work properly with transactions. =back =cut sub check_and_write_node { my ($self, %args) = @_; my ($node, $checksum) = @args{qw( node checksum )}; $self->_lock_node($node) or croak "Can't lock node"; my $ok = $self->verify_checksum($node, $checksum); unless ($ok) { $self->_unlock_node($node) or carp "Can't unlock node"; return 0; } $ok = $self->write_node_post_locking( %args ); $self->_unlock_node($node) or carp "Can't unlock node"; return $ok; } # Returns 1 if we can get a lock, 0 if we can't, croaks on error. sub _lock_node { my ($self, $node) = @_; my $dbh = $self->{_dbh}; $node = $dbh->quote($node); my $sql = "SELECT GET_LOCK($node, 10)"; my $sth = $dbh->prepare($sql); $sth->execute or croak $dbh->errstr; my $locked = $sth->fetchrow_array; $sth->finish; return $locked; } # Returns 1 if we can unlock, 0 if we can't, croaks on error. sub _unlock_node { my ($self, $node) = @_; my $dbh = $self->{_dbh}; $node = $dbh->quote($node); my $sql = "SELECT RELEASE_LOCK($node)"; my $sth = $dbh->prepare($sql); $sth->execute or croak $dbh->errstr; my $unlocked = $sth->fetchrow_array; $sth->finish; return $unlocked; } sub _get_casesensitive_compare_sql { my ($self, $column) = @_; return "BINARY $column = ?"; } 1; Wiki-Toolkit-0.83/lib/Wiki/Toolkit/Store/Pg.pm0000644000175000017500000000435112243757607020776 0ustar vagrantvagrantpackage Wiki::Toolkit::Store::Pg; use strict; use vars qw( @ISA $VERSION ); use Wiki::Toolkit::Store::Database; use Carp qw/carp croak/; @ISA = qw( Wiki::Toolkit::Store::Database ); $VERSION = 0.07; =head1 NAME Wiki::Toolkit::Store::Pg - Postgres storage backend for Wiki::Toolkit =head1 REQUIRES Subclasses Wiki::Toolkit::Store::Database. =head1 SYNOPSIS See Wiki::Toolkit::Store::Database =cut # Internal method to return the data source string required by DBI. sub _dsn { my ($self, $dbname, $dbhost, $dbport) = @_; my $dsn = "dbi:Pg:dbname=$dbname"; $dsn .= ";host=$dbhost" if $dbhost; $dsn .= ";port=$dbport" if $dbport; return $dsn; } =head1 METHODS =over 4 =item B $store->check_and_write_node( node => $node, checksum => $checksum, %other_args ); Locks the node, verifies the checksum, calls C with all supplied arguments, unlocks the node. Returns the version of the updated node on successful writing, 0 if checksum doesn't match, -1 if the change was not applied, croaks on error. =back =cut sub check_and_write_node { my ($self, %args) = @_; my ($node, $checksum) = @args{qw( node checksum )}; my $dbh = $self->{_dbh}; $dbh->{AutoCommit} = 0; my $ok = eval { $dbh->do("SET TRANSACTION ISOLATION LEVEL SERIALIZABLE"); $self->verify_checksum($node, $checksum) or return 0; $self->write_node_post_locking( %args ); }; if ($@) { my $error = $@; $dbh->rollback; $dbh->{AutoCommit} = 1; if ( $error =~ /can't serialize access due to concurrent update/i or $error =~ /could not serialize access due to concurrent update/i ) { return 0; } else { croak $error; } } else { $dbh->commit; $dbh->{AutoCommit} = 1; return $ok; } } sub _get_comparison_sql { my ($self, %args) = @_; if ( $args{ignore_case} ) { return "lower($args{thing1}) = lower($args{thing2})"; } else { return "$args{thing1} = $args{thing2}"; } } sub _get_node_exists_ignore_case_sql { return "SELECT name FROM node WHERE lower(name) = lower(?) "; } 1; Wiki-Toolkit-0.83/lib/Wiki/Toolkit/Store/SQLite.pm0000644000175000017500000000524212243757607021571 0ustar vagrantvagrantpackage Wiki::Toolkit::Store::SQLite; use strict; use vars qw( @ISA $VERSION ); use Wiki::Toolkit::Store::Database; use Carp qw/carp croak/; @ISA = qw( Wiki::Toolkit::Store::Database ); $VERSION = 0.06; =head1 NAME Wiki::Toolkit::Store::SQLite - SQLite storage backend for Wiki::Toolkit =head1 SYNOPSIS See Wiki::Toolkit::Store::Database =cut # Internal method to return the data source string required by DBI. sub _dsn { my ($self, $dbname) = @_; return "dbi:SQLite:dbname=$dbname"; } =head1 METHODS =over 4 =item B my $store = Wiki::Toolkit::Store::SQLite->new( dbname => "wiki" ); The dbname parameter is mandatory. =cut sub new { my ($class, %args) = @_; my $self = {}; bless $self, $class; @args{qw(dbuser dbpass)} = ("", ""); # for the parent class _init return $self->_init(%args); } =item B $store->check_and_write_node( node => $node, checksum => $checksum, %other_args ); Locks the node, verifies the checksum, calls C with all supplied arguments, unlocks the node. Returns the version of the updated node on successful writing, 0 if checksum doesn't match, -1 if the change was not applied, croaks on error. =back =cut sub check_and_write_node { my ($self, %args) = @_; my ($node, $checksum) = @args{qw( node checksum )}; my $dbh = $self->{_dbh}; $dbh->begin_work; my $ok = eval { $self->verify_checksum($node, $checksum) or return 0; $self->write_node_post_locking( %args ); }; if ($@) { my $error = $@; $dbh->rollback; if ( $error =~ /database is locked/ or $error =~ /DBI connect.+failed/ ) { return 0; } else { croak "Unhandled error: [$error]"; } } else { $dbh->commit; return $ok; } } # Get the attributes for the database connection. We set # sqlite_use_immediate_transaction to false because we use database locking # explicitly in check_and_write_node. This is required for DBD::SQLite 1.38. sub _get_dbh_connect_attr { my $self = shift; my $attrs = $self->SUPER::_get_dbh_connect_attr; return { %$attrs, sqlite_use_immediate_transaction => 0 }; } sub _get_lowercase_compare_sql { my ($self, $column) = @_; return "$column LIKE ?"; } sub _get_comparison_sql { my ($self, %args) = @_; if ( $args{ignore_case} ) { return "$args{thing1} LIKE $args{thing2}"; } else { return "$args{thing1} = $args{thing2}"; } } sub _get_node_exists_ignore_case_sql { return "SELECT name FROM node WHERE name LIKE ? "; } 1; Wiki-Toolkit-0.83/Changes0000644000175000017500000005760612243757607015206 0ustar vagrantvagrant0.83 22 November 2013 Fix bug in Lucy backend (if you wrote a node Foo Bar and then one called Foo, it would delete Foo Bar from its index). Make Lucy backend capable of indexing metadata as well. Allow weighting of node title in Lucy search. 0.82 12 October 2013 Fix uninitialised value warnings in Lucy backend 0.81 25 May 2013 Fix compatibility with DBD::SQLite 1.83+ (RT#84680) Added a backend for searching with Lucy - Wiki::Toolkit::Search::Lucy Fix POD errors which are fatal with perl 5.18. 0.80 3 May 2012 Spelling fix in Wiki::Toolkit::Search::Base Fixed deprecated use of foreach ... qw (fixes #53 and #54) Fixed bug with metadata_was in recent changes (#41). 0.79 14 August 2010 Fix Win32 test failure http://www.cpantesters.org/cpan/report/8099980 Declare minimum required Perl version (thanks to Alexandr Ciornii for the patches) 0.78 16 December 2009 Fix various POD errors and add POD testing (#45) More recent changes tests (for #41) Fix transaction handling in SQLite backend (#49) 0.77 24 December 2008 Complete support for store->list_metadata_by_type, which returns a list of all the metadata values Add store->list_metadata_names, which will tell you all the different metadata types (names) write_node: return the version of the node that was just committed, if successful Don't write out a new version of a node if the checksum is the same as the one already stored (#43) t/400_upgrade.t: skip all tests correctly if no backends configured (#44) Configure charset correctly when passing in an existing $dbh (#24) 0.76 13 July 2008 Really add missing prereq of DBI! Add testing of database schema upgrades (#32) Move to new schema version 10, including some missing indexes and support for verified flags. Note that the code using this column has not yet been written (#25, #34). Changed Wiki::Toolkit::Feed::Listing to use metadata_was instead of metadata_wasnt, for efficiency reasons. list_dangling_links, list_backlinks: don't return internal links that are from a node which does not exist. This works around database inconsistencies (introduced by manual deletion of nodes) which should be fixed by adding constraints to the database (see #38) 0.75 11 May 2008 Added support for selecting versions by moderation status (see OpenGuides ticket #142). Added "new_only" parameter to ->list_recent_changes, so you can ask for e.g. all pages added in the last week. This involved a bit of a rewrite of ->list_recent_changes, so please keep an eye out for any bugs that may have been introduced (I'm not convinced the test suite covered everything it should have). Fix some badly-formatted POD documentation which accidentally included some code Fix uninitialized warnings in feed tests Fix database setup to ignore tables that aren't Wiki::Toolkit tables Fix test suite's incorrect passing of DBIxFTSMySQL connection parameters Add missing prereq of DBI (caught by the CPAN testers) and don't use DBI in Makefile.PL Add new scripts wiki-toolkit-delete-node and wiki-toolkit-revert-to-date 0.74 9 June 2007 Added list_last_version_before method, to get the version of all the nodes at a given point in time Updated version on Text::WikiFormat dependency to avoid long-standing test failure with old version Bump version number of Wiki::Toolkit::Plugin as this was not done the last time it was changed (for CPAN.pm compatibility) 0.73 12 December 2006 Added parse_feed_timestamp method Added encoding parameter to the Feed methods, to allow overriding the store's encoding. 0.72 15 September 2006 Install wiki-toolkit-rename-node 0.71 30 August 2006 More feed code refactoring, to allow "mini" feeds with just name, and optionally distance and location. Typically used in search results, especially with Wiki::Toolkit::Plugin::Locator's Ability to filter by nodes without certain metadata, by type or type+value ("everything without a longitude" or "everything not category=Pub") Fix bug in Listing.pm pod output 0.70 6 June 2006 Feed code has been refactored. Check that the database schema is up-to-date when initializing the store and abort otherwise. In PostgresSQL setup code, delete orphaned content/metadata rows. When we bulk delete and insert data for an upgrade, check to make sure we can create the tables and indexes before dropping the old tables. Other miscellaneous fixes. 0.69_03 13 May 2006 Add Wiki::Toolkit::Feed::RSS (formerly CGI::Wiki::Plugin::RSS::Modwiki) and Wiki::Toolkit::Feed::Atom (formerly CGI::Wiki::Plugin::Atom) 0.69_02 27 April 2006 Add missing new file lib/Wiki/Toolkit/Setup/Database.pm to distribution. 0.69_01 23 April 2006 *** IMPORTANT NOTICE *** This release introduces a new database schema which should be considered beta in nature at this stage. It is not yet suitable for deployment on production data and should only be tested on copies of your production data. If it eats your data, you get to keep the pieces. Additionally, functionality exposed in this beta release is subject to change. Rename to Wiki::Toolkit. Support for moderation on a per-node basis. Update to the database schema: added needs_moderation column to node, and moderated column to content You will need to run wiki-toolkit-setupdb to upgrade your database Many more plugin points: pre_write + pre_retrieve (get to change the data), pre_moderate + post_moderate, pre_rename + post_rename, + post_delete. See test 150 for examples, and the perldoc of Wiki::Toolkit::Plugin for a description. Merge Wiki::Toolkit::Formatter::Multiple: 0.02 15 September 2004 Applied patch from Tom Insam to fix bug with find_internal_links. 0.01 22 September 2003 Initial release. 0.69 22 February 2006 Update to database schema: added an ID column to node, and changed the content + metadata tables to FK to the ID field, rather than the node name. Also added rails like schema_info table, to hold the current database schema version number You will need to run cgi-wiki-setupdb to upgrade your database 0.62 26 November 2004 Fixed delete_node bug - now it's OK to delete eg version 2 and then version 3 of a node (ie you no longer have to delete versions in order newest first). 0.61 3 October 2004 Added "ignore_case" option to ->node_exists. 0.60 30 September 2004 Added generic ->new method to CGI::Wiki::Plugin (Tom Insam). 0.59 18 September 2004 Added "ignore_case" option to list_recent_changes for use with metadata filtering. 0.58 18 September 2004 Even more powerful metadata filtering! You can now supply both "metadata_was" and "metadata_wasnt", or both "metadata_is" and "metadata_isnt", and they will be ANDed. (Also added the test I forgot to add to MANIFEST last time.) 0.57 18 September 2004 Extended capabilities of list_recent_changes - more powerful metadata filtering (see docs for details). 0.56 14 September 2004 Added Plucene search backend (with help from Simon Cozens). Applied Tom Insam's encoding patch. 0.55 There wasn't one. Something bizarre happened with PAUSE. 0.54 25 June 2004 Let ->delete_node take an optional version parameter. 0.53 18 June 2004 Fixed CGI::Wiki::TestLib so $iterator->number actually does something. 0.52 10 June 2004 Add include_all_changes and between_days to list_recent_changes. 0.51 8 June 2004 Time for a non-developer release. 0.51_02 6 June 2004 Added support for passing in a database handle instead of connection parameters (Chris Winters). 0.51_01 24 February 2004 Refactoring of search classes in preparation for new search backend. (Simon Cozens) Please test against your applications and let me know of any problems ASAP. 0.50 21 December 2003 First non-developer release with new test system. 0.50_02 21 November 2003 Make tests skip rather than try to run zero tests if no backends configured - thanks to nothingmuch via CPAN testers for the test failure report. 0.50_01 18 November 2003 Huge overhaul of test suite, no change in functionality. Removed dependency on Test::MockObject and Hook::LexWrap (the relevant tests are now skipped if these aren't installed). Removed the examples since they were very out of date. Made a small clarification to the CGI::Wiki::Plugin docs. 0.49 23 September 2003 Added "ignore_case" option to ->list_nodes_by_metadata 0.48 22 September 2003 Couple of small changes so I can write formatters that allow node metadata to affect the rendering of the node. A rewrite and renaming of the CGI::Wiki distribution is on the cards - subscribe to cgi-wiki-dev if you're interested: http://www.earth.li/cgi-bin/mailman/listinfo/cgi-wiki-dev 0.47 29 August 2003 Added metadata_was and metadata_wasnt parameters to list_recent_changes 0.46 14 August 2003 Fix for Shevek. ->write_node used to croak if it failed to index the backlinks, but since the MySQL varchar type is case-insensitive by default, we were getting a duplicate key error. As a temporary fix pending a proper think about an explicit way for you to specify whether you want your wiki case-sensitive or not (yes, people have asked me for both), this now just warns if it has trouble writing backlinks. 0.45 11 August 2003 Removed dependency on Class::Delegation. Fixed Makefile.PL to check for Lingua::Stem before trying to test DBIx::FullTextSearch stuff. Fixed DBIx and Search::InvertedIndex tests to take note of database host if one supplied. 0.44 10 July 2003 Fixed bug with metadata_isnt - it wasn't picking up nodes where that metadata type wasn't set. 0.43 10 July 2003 Added metadata_isnt constraint to ->list_recent_changes. 0.42 16 June 2003 Dropped Test::Warn from the dependencies - it's too much installing for too little gain (I was hardly using it). Made CGI::Wiki::TestConfig::Utilities check for the Postgres Search::InvertedIndex backend, and test using that if possible. 0.41 27 May 2003 Changed the order of things when registering a plugin, so the on_register method can access the datastore properly. 0.40 24 May 2003 Added an index to the metadata table in the MySQL setup, to speed up RecentChanges. To apply this index to an existing database you need to do (as the database superuser): CREATE INDEX metadata_index ON metadata (node, version, metadata_type, metadata_value(10)) Also made the warnings in Makefile.PL and INSTALL stronger, since we had another data-eating incident. 0.39 21 May 2003 Amended Makefile.PL and INSTALL to make it absolutely clear that you should not run the tests on databases containing valuable data. Thanks to Rocco for, er, stress-testing this, and sorry to 'know' for eating its brains... 0.38 17 May 2003 Added ->list_dangling_links method; thanks to Simon Cozens for the idea. 0.37 12 May 2003 Oh dear. No real changes, but I moved Extending.pod to stop it getting installed as CGI::Extending. Sorry. 0.36 9 May 2003 Added CGI::Wiki::Plugin as a base class for plugins to inherit from. 0.35 5 May 2003 Fixed bug in CGI::Wiki::Store::SQLite introduced with new calling syntax in version shipped with 0.34. Very basic start at plugin support as described in Extending.pod - see 'perldoc CGI::Wiki' for details. 0.34 5 May 2003 Moving towards adding proper plugin support - the proposed API is in Extending.pod, please take a look and send comments. In preparation for the above, the metadata hash passed to ->write_node can now contain more complex data structures - but these will *not* be stored directly, just checksummed. The way to access them will be via plugins. See 'perldoc CGI::Wiki;' for details. Also added alternative calling syntax for the CGI::Wiki::Setup::* store modules, as requested by Podmaster. 0.33 3 May 2003 Added ->fuzzy_title_match method to the Search::InvertedIndex backend (CGI::Wiki::Search::SII). You will need to re-index all existing nodes in your wiki in order to take advantage of this. Take your wiki offline and do something like my $wiki = CGI::Wiki->new( %conf ); # (Where %conf is exactly as you would set this up for your actual # Wiki application, including your store, search and formatter # options exactly as you use them live.) my @nodes = $wiki->list_all_nodes; foreach my $node ( @nodes ) { my %node_data = $wiki->retrieve_node( $node ); $wiki->write_node( $node, $node_data{content}, $node_data{checksum}, $node_data{metadata} ); print "Reindexed $node\n"; } to refresh all the nodes in your database. Note that for wikis of more than a couple of nodes, this can take quite some time. 0.32 2 May 2003 Fixed bug with write_node dying when using Search::InvertedIndex and writing node with blank content (thanks to Bob Walker for the bug report). 0.31 26 April 2003 Added an index to the metadata table in the postgres setup, to speed up RecentChanges. To apply this index to an existing database you need to do (as the database superuser): bench=# create index metadata_nodeindex on metadata (node, version, metadata_type, metadata_value); It will also help speed things up if you run: bench=# analyze; every so often, maybe once a week. 0.30 22 April 2003 Added support for supplying 'host' parameter when connecting to MySQL/Postgres databases (requested and assisted by Paul Makepeace). 0.29 20 April 2003 Got rid of the separate bin/user-setup-* scripts, added bin/cgi-wiki-setupdb and made it be installed when the rest of the distribution is. 0.28 6 April 2003 Added ->formatter accessor to Wiki.pm 0.27 5 April 2003 Added ->reinitialise_stores method to CGI::Wiki::TestConfig::Utilities to make it easier for plugins and so forth to make sure they have nice virginal test stores before they start running their tests. Altered my setup tests to use this. 0.26 3 April 2003 Cleanup: - Removed deprecated method retrieve_node_and_checksum. - Added DBI as a prerequisite (ta Max). - Fixed up the SEE ALSO in the pod. Bugfix: - The SQLite backend was failing tests with DBI 1.34 or up; fixed now (with a kluge, but no worse than the one already there) Thanks to DH for the test failure report. 0.25 29 March 2003 list_recent_changes can now filter on a single metadata criterion - multiple criteria coming soon. 0.24 29 March 2003 list_recent_changes now returns any metadata attached to the node as well - so you can put comments and usernames and things in there and display them on RecentChanges. Note that it no longer pretends to return a comment separately since you can do it this way now. See perldoc CGI::Wiki::Store::Database for the new API. Moved recent changes tests out into their own file. 0.23 17 March 2003 Fixed bug - metadata wasn't getting deleted when ->delete_node was called. 0.22 8 March 2003 Rejig of the way the tests work, in preparation for allowing third-party plugins: When 'perl Makefile.PL' is run on a CGI::Wiki distribution, information will be gathered about test databases etc that can be used for running tests. CGI::Wiki::TestConfig::Utilities gives you convenient access to this information, so you can easily write and run tests for your own CGI::Wiki plugins. No functionality changes. 0.21 6 March 2003 Purely a documentation update; thanks to Alex McLintock for comments. 0.20 22 February 2003 Added simple (and intentionally naive) metadata support. Note that the database schema has changed (additional 'metadata' table), so you will need to re-run the relevant database setup script again as described below for upgrading to 0.15. Much of this release was written on David Woolger's laptop; thanks :) 0.16 5 February 2003 Changed CGI::Wiki::Setup::Pg to use the 'timestamp' data type instead of 'datetime', since 'datetime' was deprecated and has been removed in Postgres 7.3. Fixed bug with supplying blank database username/password; thanks to DH for the bug report. Fixed mistake in pod, pointed out by Podmaster. 0.15 5 January 2003 Amended store setup modules so their 'setup' functions don't wipe pre-existing data; added 'cleardb' functions for when you really do want to wipe it. Along with that, amended the setup scripts in ./bin/ to take a --force-preclear option. Now they leave existing data by default. Added standalone tests for CGI::Wiki::Formatter::Default Added $formatter->find_internal_links method and tests. Implemented backlinks! Thanks to blair christensen for the idea, and sorry for taking so long to get around to it. *** IMPORTANT NOTE *** *After* upgrading, you will need to re-run the relevant database setup script (in ./bin/) on any databases created using earlier versions of CGI::Wiki, in order that the internal_links table gets created. From version 0.15, these scripts won't affect data in existing tables, as long as you don't supply the --force-preclear option, so this is safe. ===> *** IF YOU DON'T DO THIS THEN YOUR CODE WILL FALL OVER AND DIE. *** You have been warned. (You can do the database munging before you install this new version -- the old versions won't mind the extra table -- but ===> *** MAKE SURE *** to invoke the scripts as something like perl -Ilib bin/user-setup-[...] so you get the *new* setup modules which *won't* hose your data (the old ones did, ugh).) The backlink data will also not exist for links *from* a given node until you re-write that node. Take your wiki offline then do something like my $wiki = CGI::Wiki->new( %conf ); # (Where %conf is exactly as you would set this up for your actual # Wiki application, including your store, search and formatter # options exactly as you use them live.) my @nodes = $wiki->list_all_nodes; foreach my $node ( @nodes ) { my %nodedata = $wiki->retrieve_node( $node ); $wiki->write_node($node, $nodedata{content}, $nodedata{checksum}); } to refresh all the nodes in your database. 0.14 3 January 2003 Added $store->node_exists method and tests. Amended $store->list_recent_changes to take a 'last_n_changes' parameter, so you can find the last 10 (or whatever) nodes edited. Made an internal change to the way ->format is delegated to the formatter object, to give said object access to the store. 0.13 2 January 2003 Minor fix - t/031_formatting.t was being reported as failing on systems without support for any of the backends, since I forgot to update the SKIP condition when I added six extra tests. Thanks to root@ostend.org for the report via cpan-testers. 0.12 1 January 2003 The Search::InvertedIndex backend wasn't indexing the node titles - fixed and added tests. Added tests for non-MySQL Search::InvertedIndex backends and fixed CGI::Wiki::Search::SII to be case-insensitive all the time (instead of just when using MySQL). Tweaked the documentation some more - offers to simplify the docs and/or write a tutorial would be greatly appreciated. 0.11 31 December 2002 Changes suggested by blair christensen, to allow alternate formatters. See the README for details and CGI::Wiki::Formatter::Default for an example. Took out some leftover debug stuff from CGI::Wiki::Search::SII. 0.10 19 December 2002 Added a Search::InvertedIndex backend; currently only tested with the MySQL version of Search::InvertedIndex. When running make test, if the Search::InvertedIndex tests are being run, the following tests will warn 'testdb is not open. Can't lock.' (but should pass): 005_setup_mysql_search_invertedindex.t 011_cgi_wiki.t *** NOTE INTERFACE CHANGE **** Cleaned up the initialisation of the Wiki object, at the expense of a small interface change - you now need to create your store and (optional) search objects yourself and pass them as arguments to CGI::Wiki->new - read 'perldoc CGI::Wiki' (once installed) for details, or see examples/wiki.cgi in the tarball. 0.05 17 November 2002 I'd uploaded an unfinished version by mistake. One day I will figure out how to do this upload thing without screwing up. 0.04 17 November 2002 Added a DBD::SQLite storage backend (thanks to blair christensen for a patch that makes up part of this). Added tests and docs for retrieval of old versions of pages (bad Kake, should have written those before implementing the feature; blair and Richard shamed me into getting it sorted now). retrieve_node_and_checksum is now deprecated -- retrieve_node is more clever and will return content only or a hash with content plus meta-data, according to context. So you should get at the checksum via that. user-setup-postgres.pl was buggy, fixed now. Only two beer rewards now remain. 0.03 9 November 2002 Forgot to regenerate and add the README (I've put it in the MANIFEST now to stop that happening again). Also forgot to mention that I've upped the bribe. 0.02 9 November 2002 Pulled out the database setup stuff into modules (Mark Fowler did most of this bit, thanks). Added recent_changes method and its tests, changed the example wiki to show how this can be used. Renamed some tests since the order they're run in matters now. Added better support for noninteractive installation (with help from Mark again). 0.01 28 October 2002 Initial release. Wiki-Toolkit-0.83/t/0000755000175000017500000000000012243760523014127 5ustar vagrantvagrantWiki-Toolkit-0.83/t/010_metadata.t0000644000175000017500000001622012243757607016466 0ustar vagrantvagrantuse strict; use Wiki::Toolkit::TestLib; use Test::More; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 22 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { # Put some test data in. $wiki->write_node( "Reun Thai", "A restaurant", undef, { postcode => "W6 9PL", category => [ "Thai Food", "Restaurant", "Hammersmith" ], latitude => "51.911", longitude => "" } ); my %node = $wiki->retrieve_node( "Reun Thai" ); my $data = $node{metadata}{postcode}; is( ref $data, "ARRAY", "arrayref always returned" ); is( $node{metadata}{postcode}[0], "W6 9PL", "...simple metadata retrieved" ); my $cats = $node{metadata}{category}; is_deeply( [ sort @{$cats||[]} ], [ "Hammersmith", "Restaurant", "Thai Food" ], "...more complex metadata too" ); # Test list_nodes_by_metadata. $wiki->write_node( "The Old Trout", "A pub", undef, { category => [ "Pub", "Hammersmith" ] } ); my @nodes = $wiki->list_nodes_by_metadata( metadata_type => "category", metadata_value => "Hammersmith" ); is_deeply( [ sort @nodes ], [ "Reun Thai", "The Old Trout" ], "list_nodes_by_metadata returns everything it should" ); $wiki->write_node( "The Three Cups", "Another pub", undef, { category => "Pub" } ); @nodes = $wiki->list_nodes_by_metadata( metadata_type => "category", metadata_value => "Pub" ); is_deeply( [ sort @nodes ], [ "The Old Trout", "The Three Cups" ], "...and not things it shouldn't" ); # Case insensitivity option. @nodes = $wiki->list_nodes_by_metadata( metadata_type => "category", metadata_value => "hammersmith", ignore_case => 0, ); is_deeply( [ sort @nodes ], [ ], "ignore_case => 0 doesn't ignore case of metadata_value" ); @nodes = $wiki->list_nodes_by_metadata( metadata_type => "category", metadata_value => "hammersmith", ignore_case => 1, ); is_deeply( [ sort @nodes ], [ "Reun Thai", "The Old Trout" ], "ignore_case => 1 ignores case of metadata_value" ); @nodes = $wiki->list_nodes_by_metadata( metadata_type => "Category", metadata_value => "Hammersmith", ignore_case => 1, ); is_deeply( [ sort @nodes ], [ "Reun Thai", "The Old Trout" ], "...and case of metadata_type" ); # Test list_nodes_by_missing_metadata # Shouldn't get any if we search on category @nodes = $wiki->list_nodes_by_missing_metadata( metadata_type => "category" ); is( scalar @nodes, 0, "All have metadata category" ); # By latitude, should only get The Old Trout+The Three Cups @nodes = $wiki->list_nodes_by_missing_metadata( metadata_type => "latitude" ); is_deeply( [ sort @nodes ], [ "The Old Trout", "The Three Cups" ], "By lat, not Reun Thai" ); # By longitude, we should get all (Reun Thai has it blank) @nodes = $wiki->list_nodes_by_missing_metadata( metadata_type => "longitude" ); is_deeply( [ sort @nodes ], [ "Reun Thai", "The Old Trout", "The Three Cups" ], "By long, get all" ); # With category=Pub, we should get only the Reun Thai @nodes = $wiki->list_nodes_by_missing_metadata( metadata_type => "category", metadata_value => "Pub" ); is_deeply( [ sort @nodes ], [ "Reun Thai" ], "Reun Thai not a pub" ); # With Category, we should get all @nodes = $wiki->list_nodes_by_missing_metadata( metadata_type => "Category" ); is_deeply( [ sort @nodes ], [ "Reun Thai", "The Old Trout", "The Three Cups" ], "By Category, get all" ); # With category=hammersmith, we should get all @nodes = $wiki->list_nodes_by_missing_metadata( metadata_type => "category", metadata_value => "hammersmith" ); is_deeply( [ sort @nodes ], [ "Reun Thai", "The Old Trout", "The Three Cups" ], "By category=hammersmith (case sensitive), get all" ); # But with category=hammersmith+case insensitive, shouldn't get any @nodes = $wiki->list_nodes_by_missing_metadata( metadata_type => "category", metadata_value => "hammersmith", ignore_case => 1 ); is_deeply( [ sort @nodes ], [ "The Three Cups" ], "By category=hammersmith (ci), get all but the three cups" ); %node = $wiki->retrieve_node("The Three Cups"); $wiki->write_node( "The Three Cups", "Not a pub any more", $node{checksum} ); @nodes = $wiki->list_nodes_by_metadata( metadata_type => "category", metadata_value => "Pub" ); is_deeply( [ sort @nodes ], [ "The Old Trout" ], "removing metadata from a node stops it showing up in list_nodes_by_metadata" ); my $dbh = eval { $wiki->store->dbh; }; my $id_sql = "SELECT id FROM node WHERE name='Reun Thai'"; my $id = @{ $dbh->selectcol_arrayref($id_sql) }[0]; $wiki->delete_node("Reun Thai"); @nodes = $wiki->list_nodes_by_metadata( metadata_type => "category", metadata_value => "Hammersmith" ); is_deeply( [ sort @nodes ], [ "The Old Trout" ], "...as does deleting a node" ); # Check that deleting a node really does clear out the metadata. SKIP: { skip "Test only works on database backends", 1 unless $dbh; # White box testing. my $sql = "SELECT metadata_type, metadata_value FROM metadata WHERE node_id = $id"; my $sth = $dbh->prepare($sql); $sth->execute; my ( $type, $value ) = $sth->fetchrow_array; is_deeply( [ $type, $value ], [undef, undef], "deletion of a node removes metadata from database" ); } # Test checksumming. %node = $wiki->retrieve_node("The Three Cups"); ok( $wiki->write_node( "The Three Cups", "Not a pub any more", $node{checksum}, { newdata => "foo" } ), "writing node with metadata succeeds when checksum fresh" ); ok( !$wiki->write_node( "The Three Cups", "Not a pub any more", $node{checksum}, { newdata => "bar" } ), "writing node with identical content but different metadata fails when checksum not updated" ); # Test with duplicate metadata. $wiki->write_node( "Dupe Test", "test", undef, { foo => [ "bar", "bar" ] } ); %node = $wiki->retrieve_node( "Dupe Test" ); is( scalar @{$node{metadata}{foo}}, 1, "duplicate metadata only written once" ); # Test version is updated when metadata is removed. $wiki->write_node( "Dupe Test", "test", $node{checksum} ); %node = $wiki->retrieve_node( "Dupe Test" ); is( $node{version}, 2, "version updated when metadata removed" ); } Wiki-Toolkit-0.83/t/101_default_formatter.t0000644000175000017500000000470512243757607020423 0ustar vagrantvagrantuse strict; use Wiki::Toolkit::TestConfig; use Test::More tests => 11; # These are standalone tests for the default formatter module, # Wiki::Toolkit::Formatter::Default -- they can be adapted to test any # formatter object without the need for the rest of the distribution. use_ok( "Wiki::Toolkit::Formatter::Default" ); # Test that the implicit_links flag gets passed through right. my $raw = "This paragraph has StudlyCaps in."; my $formatter = Wiki::Toolkit::Formatter::Default->new( implicit_links => 1, node_prefix => "wiki.cgi?node=" ); my $cooked = $formatter->format( $raw ); like( $cooked, qr!StudlyCaps!, "StudlyCaps turned into link when we specify implicit_links=1" ); $formatter = Wiki::Toolkit::Formatter::Default->new( implicit_links => 0, node_prefix => "wiki.cgi?node=" ); $cooked = $formatter->format($raw); unlike( $cooked, qr!StudlyCaps!, "...but not when we specify implicit_links=0" ); $raw = <new( implicit_links => 1, extended_links => 1, node_prefix => "wiki.cgi?node=" ); my @links_to = $formatter->find_internal_links( $raw ); my %links_hash = map { $_ => 1 } @links_to; ok( $links_hash{"Extended Link"}, "find_internal_links finds extended link" ); ok( $links_hash{"Another Node"}, "...and titled extended link" ); ok( $links_hash{"WikiWord"}, "...and implicit link" ); is( scalar @links_to, 3, "...and has found the right number of links" ); $formatter = Wiki::Toolkit::Formatter::Default->new( implicit_links => 1, extended_links => 0, node_prefix => "wiki.cgi?node=" ); @links_to = $formatter->find_internal_links( $raw ); %links_hash = map { $_ => 1 } @links_to; ok( ! $links_hash{"Extended Link"}, "find_internal_links doesn't find extended links when they're turned off" ); ok( ! $links_hash{"Another Node"}, "...or titled ones" ); ok( $links_hash{"WikiWord"}, "...but does find implicit links" ); $formatter = Wiki::Toolkit::Formatter::Default->new( implicit_links => 0, node_prefix => "wiki.cgi?node=" ); @links_to = $formatter->find_internal_links( $raw ); %links_hash = map { $_ => 1 } @links_to; ok( ! $links_hash{"WikiWord"}, "find_internal_links doesn't find implicit links when they're turned off" ); Wiki-Toolkit-0.83/t/303_feed_atom_node_all_versions.t0000644000175000017500000000730312243757607022425 0ustar vagrantvagrantuse strict; use Wiki::Toolkit::TestConfig::Utilities; use Wiki::Toolkit; use URI::Escape; # Note - update the count in the skip block to match the number here # we would put the number in a variable, but that doesn't seem to work use Test::More tests => (3 + 14 * $Wiki::Toolkit::TestConfig::Utilities::num_stores); use_ok( "Wiki::Toolkit::Feed::Atom" ); eval { my $atom = Wiki::Toolkit::Feed::Atom->new; }; ok( $@, "new croaks if no wiki object supplied" ); eval { my $atom = Wiki::Toolkit::Feed::Atom->new( wiki => "foo" ); }; ok( $@, "new croaks if something that isn't a wiki object supplied" ); my %stores = Wiki::Toolkit::TestConfig::Utilities->stores; my ($store_name, $store); while ( ($store_name, $store) = each %stores ) { SKIP: { skip "$store_name storage backend not configured for testing", 14 unless $store; print "#\n##### TEST CONFIG: Store: $store_name\n#\n"; my $wiki = Wiki::Toolkit->new( store => $store ); my %default_config = ( wiki => $wiki, site_name => "Wiki::Toolkit Test Site", make_node_url => sub { my $id = uri_escape($_[0]); my $version = $_[1] || ''; $version = uri_escape($version) if $version; "http://example.com/?id=$id;version=$version"; }, recent_changes_link => "http://example.com/?RecentChanges", atom_link => "http://example.com/?action=rc;format=atom", ); my $atom = eval { Wiki::Toolkit::Feed::Atom->new( %default_config, site_url => "http://example.com/kakeswiki/" ); }; is( $@, "", "'new' doesn't croak if wiki object and mandatory parameters supplied" ); isa_ok( $atom, "Wiki::Toolkit::Feed::Atom" ); my $feed = eval { $atom->node_all_versions; }; is( $@, "", "->node_all_versions doesn't croak" ); # Should be empty to start with unlike($feed, qr||, "no entry items as empty" ); # Now retry with a node name $feed = eval { $atom->node_all_versions(name=>'Test Node 1'); }; is( $@, "", "->node_all_versions doesn't croak with a name" ); # Should only have it once my @items = $feed =~ /(\<\/entry\>)/g; is( scalar @items, 1, "Only found it once" ); # And should have the name like( $feed, qr|Test Node 1|, "Found right node" ); # And the be the first version like( $feed, '//', "And right version" ); #like( $feed, qr|1|, "And right version" ); # Now try again, with a 2 version node $feed = eval { $atom->node_all_versions(name=>'Old Node'); }; is( $@, "", "->node_all_versions doesn't croak with a name" ); # Check we found two versions @items = $feed =~ /(\<\/entry\>)/g; is( scalar @items, 2, "Found it twice" ); # Both the right name @items = $feed =~ /(Old Node<\/title>)/g; is( scalar @items, 2, "Had the right name" ); # And the right version like( $feed, '/<link href=".*?;version=2" \/>/', "And right version" ); like( $feed, '/<link href=".*?;version=1" \/>/', "And right version" ); #like( $feed, qr|<modwiki:version>2</modwiki:version>|, "And right version" ); #like( $feed, qr|<modwiki:version>1</modwiki:version>|, "And right version" ); # And in the right order like( $feed, '/;version=2.*;version=1/s', "Right order" ); #like( $feed, '/<modwiki:version>2<\/modwiki:version>.*<modwiki:version>1<\/modwiki:version>/s', "Right order" ); } } �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/t/401_null_change.t���������������������������������������������������������������0000644�0001750�0001750�00000003016�12243757607�017170� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use strict; use Wiki::Toolkit::TestLib; use Test::More; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 4 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; my $metadata_orig = { foo => [ 7 ], bar => [ 9 ] }; my $metadata_changed = { foo => [ "changed" ], bar => [ 9 ] }; my $content_orig = "Node content."; my $content_changed = "Node content -- changed"; my $id = "A Node"; while ( my $wiki = $iterator->new_wiki ) { $wiki->write_node($id, $content_orig, undef, $metadata_orig ); my %node_data = $wiki->retrieve_node($id); is( $wiki->write_node( $id, $content_orig, $node_data{checksum}, $metadata_orig, ), -1, "refuses to update if new content and metadata is the same", ); %node_data = $wiki->retrieve_node($id); ok( $wiki->write_node( $id, $content_orig, $node_data{checksum}, $metadata_changed, ) >= 1, "still updates if metadata is different", ); %node_data = $wiki->retrieve_node($id); ok( $wiki->write_node( $id, $content_changed, $node_data{checksum}, $metadata_changed, ) >= 1, "still updates if content is different", ); %node_data = $wiki->retrieve_node($id); is( $wiki->write_node( $id, $content_changed, $node_data{checksum}, $metadata_changed, ), -1, "... and refuses again when nothing changed", ); } ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/t/151_plugin_methods.t������������������������������������������������������������0000644�0001750�0001750�00000002132�12243757607�017732� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use strict; use Wiki::Toolkit::TestLib; use Test::More; use lib "t/lib"; use Wiki::Toolkit::Plugin::Foo; use Wiki::Toolkit::Plugin::Bar; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 6 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { my $plugin = Wiki::Toolkit::Plugin::Foo->new; isa_ok( $plugin, "Wiki::Toolkit::Plugin::Foo" ); isa_ok( $plugin, "Wiki::Toolkit::Plugin" ); can_ok( $plugin, qw( datastore indexer formatter ) ); $wiki->register_plugin( plugin => $plugin ); ok( ref $plugin->datastore, "->datastore seems to return an object after registration" ); is_deeply( $plugin->datastore, $wiki->store, "...the right one" ); # Check that the datastore etc attrs are set up before on_register # is called. my $plugin_2 = Wiki::Toolkit::Plugin::Bar->new; eval { $wiki->register_plugin( plugin => $plugin_2 ); }; is( $@, "", "->on_register can access datastore" ); } ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/t/051_pg_store.t������������������������������������������������������������������0000644�0001750�0001750�00000010716�12243757607�016541� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use strict; use Wiki::Toolkit; use Wiki::Toolkit::Store::Pg; use Wiki::Toolkit::Setup::Pg; use Wiki::Toolkit::TestConfig; use Test::More tests => 9; my $class = "Wiki::Toolkit::Store::Pg"; eval { $class->new; }; ok( $@, "Failed creation dies" ); my %config = %{$Wiki::Toolkit::TestConfig::config{Pg}}; my ($dbname, $dbuser, $dbpass, $dbhost) = @config{qw(dbname dbuser dbpass dbhost)}; SKIP: { skip "No Postgres database configured for testing", 8 unless $dbname; { my $store = eval { $class->new( dbname => $dbname, dbuser => $dbuser, dbpass => $dbpass, dbhost => $dbhost ); }; is( $@, "", "Creation doesn't die when given connection params" ); isa_ok( $store, $class ); ok( $store->dbh, "...and has set up a database handle" ); } { my $dsn = "dbi:Pg:dbname=$dbname"; $dsn .= ";host=$dbhost" if $dbhost; my $dbh = DBI->connect( $dsn, $dbuser, $dbpass ); my $store = eval { $class->new( dbh => $dbh ); }; is( $@, "", "Creation doesn't die when given a dbh" ); isa_ok( $store, $class ); ok( $store->dbh, "...and we can retrieve the database handle" ); $dbh->disconnect; } Wiki::Toolkit::Setup::Pg::cleardb({ dbname => $dbname, dbuser => $dbuser, dbpass => $dbpass, dbhost => $dbhost }); Wiki::Toolkit::Setup::Pg::setup({ dbname => $dbname, dbuser => $dbuser, dbpass => $dbpass, dbhost => $dbhost }); SKIP: { eval { require Hook::LexWrap; require Test::MockObject; }; skip "either Hook::LexWrap or Test::MockObject not installed", 2 if $@; my $store = $class->new( dbname => $dbname, dbuser => $dbuser, dbpass => $dbpass, dbhost => $dbhost ); my $wiki = Wiki::Toolkit->new( store => $store ); # Write some test data. $wiki->write_node( "Home", "This is the home node." ) or die "Couldn't setup"; # White box testing - override verify_node_checksum to first verify the # checksum and then if it's OK set up a new wiki object that sneakily # writes to the node before letting us have control back. my $temp; $temp = Hook::LexWrap::wrap( # fully qualify since we're requiring 'Wiki::Toolkit::Store::Database::verify_checksum', post => sub { undef $temp; # Don't want to wrap our sneaking-in my $node = $_[1]; my $evil_store = $class->new( dbname => $dbname, dbuser => $dbuser, dbpass => $dbpass, dbhost => $dbhost ); my $evil_wiki = Wiki::Toolkit->new( store => $evil_store ); my %node_data = $evil_wiki->retrieve_node($node); $evil_wiki->write_node($node, "foo", $node_data{checksum}) or die "Evil wiki got conflict on writing"; } ); # Now try to write to a node -- it should fail. my %node_data = $wiki->retrieve_node("Home"); ok( ! $wiki->write_node("Home", "bar", $node_data{checksum}), "write_node handles overlapping write attempts correctly" ); # Check actual real database errors croak rather than flagging conflict %node_data = $wiki->retrieve_node("Home"); my $dbh = $store->dbh; $dbh->disconnect; # Mock a database handle. Need to mock rollback() and disconnect() # as well to avoid warnings that an unmocked method has been called # (we don't actually care). my $fake_dbh = Test::MockObject->new(); $fake_dbh->mock("do", sub { die "Dave told us to"; }); $fake_dbh->set_true("rollback"); $fake_dbh->set_true("disconnect"); $store->{_dbh} = $fake_dbh; eval { $store->check_and_write_node( node => "Home", content => "This is a node.", checksum => $node_data{checksum} ); }; ok( $@ =~ /Dave told us to/, "...and croaks on database error" ); } } ��������������������������������������������������Wiki-Toolkit-0.83/t/102_multiple_formatter.t��������������������������������������������������������0000644�0001750�0001750�00000006062�12243757607�020631� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use strict; use Wiki::Toolkit; use Wiki::Toolkit::Setup::SQLite; use Wiki::Toolkit::Store::SQLite; use Wiki::Toolkit::Formatter::Default; use Wiki::Toolkit::Formatter::Multiple; use vars qw( $num_sqlite_tests ); BEGIN { $num_sqlite_tests = 7; } use Test::More tests => 1 + $num_sqlite_tests; my $default_fmtr = Wiki::Toolkit::Formatter::Default->new; my $uc_fmtr = Local::Test::Formatter::UC->new; my $append_fmtr = Local::Test::Formatter::Append->new; my $formatter = Wiki::Toolkit::Formatter::Multiple->new( normal => $default_fmtr, uc => $uc_fmtr, _DEFAULT => $append_fmtr, ); isa_ok( $formatter, "Wiki::Toolkit::Formatter::Multiple" ); eval { require DBD::SQLite }; my $run_tests = $@ ? 0 : 1; SKIP: { skip "DBD::SQLite not installed - can't make test database", $num_sqlite_tests unless $run_tests; my $db_file = "./t/wiki.db"; if(-f $db_file) { Wiki::Toolkit::Setup::SQLite::cleardb($db_file); } Wiki::Toolkit::Setup::SQLite::setup($db_file); my $store = Wiki::Toolkit::Store::SQLite->new( dbname => "./t/wiki.db" ); my $wiki = Wiki::Toolkit->new( store => $store, formatter => $formatter ); isa_ok( $wiki, "Wiki::Toolkit" ); $wiki->write_node( "Normal Node", "foo bar FooBar", undef, { formatter => "normal" } ) or die "Can't write node"; $wiki->write_node( "UC Node", "foo bar", undef, { formatter => "uc" } ) or die "Can't write node"; $wiki->write_node( "Other Node", "foo bar" ) or die "Can't write node"; my %data1 = $wiki->retrieve_node( "Normal Node" ); my $output1 = $wiki->format( $data1{content}, $data1{metadata} ); like( $output1, qr|\Q<p>foo bar <a href="wiki.cgi?node=FooBar">FooBar</a></p>|, "'normal' node formatted as expected" ); my %data2 = $wiki->retrieve_node( "UC Node" ); my $output2 = $wiki->format( $data2{content}, $data2{metadata} ); like( $output2, qr|FOO BAR|, "'uc' node formatted as expected" ); my %data3 = $wiki->retrieve_node( "Other Node" ); my $output3 = $wiki->format( $data3{content}, $data3{metadata} ); like( $output3, qr|foo bar XXXX|, "default node formatted as expected" ); # Now test we get a sensible default _DEFAULT. $formatter = Wiki::Toolkit::Formatter::Multiple->new( uc => $uc_fmtr ); $wiki = Wiki::Toolkit->new( store => $store, formatter => $formatter ); my %data4 = $wiki->retrieve_node( "Other Node" ); my $output4 = $wiki->format( $data4{content}, $data4{metadata} ); like( $output4, qr|<p>\s*foo bar\s*</p>|, "default _DEFAULT as expected" ); ok( $formatter->can("find_internal_links"), "formatter can find_internal_links" ); my @links = $formatter->find_internal_links( $data1{content}, $data1{metadata} ); is_deeply(\@links, [ 'FooBar' ], "links are correct"); } # end of SKIP package Local::Test::Formatter::UC; sub new { return bless {}, shift; } sub format { return uc( $_[1] ); } package Local::Test::Formatter::Append; sub new { return bless {}, shift; } sub format { return $_[1] . " XXXX"; } ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/t/299_feed_rss_gen_node_dist_feed.t�����������������������������������������������0000644�0001750�0001750�00000007767�12243757607�022407� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use strict; use Wiki::Toolkit::TestConfig::Utilities; use Wiki::Toolkit; use URI::Escape; # Note - update the count in the skip block to match the number here # we would put the number in a variable, but that doesn't seem to work use Test::More tests => (3 + 18 * $Wiki::Toolkit::TestConfig::Utilities::num_stores); use_ok( "Wiki::Toolkit::Feed::RSS" ); eval { my $rss = Wiki::Toolkit::Feed::RSS->new; }; ok( $@, "new croaks if no wiki object supplied" ); eval { my $rss = Wiki::Toolkit::Feed::RSS->new( wiki => "foo" ); }; ok( $@, "new croaks if something that isn't a wiki object supplied" ); my %stores = Wiki::Toolkit::TestConfig::Utilities->stores; my ($store_name, $store); while ( ($store_name, $store) = each %stores ) { SKIP: { skip "$store_name storage backend not configured for testing", 18 unless $store; print "#\n##### TEST CONFIG: Store: $store_name\n#\n"; my $wiki = Wiki::Toolkit->new( store => $store ); my %default_config = ( wiki => $wiki, site_name => "Wiki::Toolkit Test Site", make_node_url => sub { my $id = uri_escape($_[0]); my $version = $_[1] || ''; $version = uri_escape($version) if $version; "http://example.com/?id=$id;version=$version"; }, recent_changes_link => "http://example.com/recentchanges" ); my $rss = eval { Wiki::Toolkit::Feed::RSS->new( %default_config, site_url => "http://example.com/kakeswiki/" ); }; is( $@, "", "'new' doesn't croak if wiki object and mandatory parameters supplied" ); isa_ok( $rss, "Wiki::Toolkit::Feed::RSS" ); my $feed = eval { $rss->generate_node_name_distance_feed; }; is( $@, "", "->generate_node_name_distance_feed doesn't croak" ); # Should be empty to start with is( 1, $feed =~ /\<items\>\s+\<rdf:Seq\>\s*\<\/rdf:Seq\>\s*\<\/items\>/s, "empty list with no node name" ); # Should be RSS though like( $feed, qr/<rdf:RDF/, "Is RSS" ); # Now retry with a single node my @nodes = ( {name=>'Test Node 1'} ); $feed = eval { $rss->generate_node_name_distance_feed('12345',@nodes); }; is( $@, "", "->generate_node_name_distance_feed doesn't croak with a single node" ); # Should only have it once my @items = $feed =~ /(\<\/item\>)/g; is( scalar @items, 1, "Only found it once" ); # And should have the name like( $feed, qr|<title>Test Node 1|, "Found right node" ); # Now try again, with two nodes, one with distances @nodes = ( {name=>'Test Node 1',distance=>'2 miles'}, {name=>'Old Node'} ); $feed = eval { $rss->generate_node_name_distance_feed('12345',@nodes); }; is( $@, "", "->generate_node_name_distance_feed doesn't croak with distances" ); # Check we found two nodes @items = $feed =~ /(\<\/item\>)/g; is( scalar @items, 2, "Found two nodes" ); # Both the right name my @items_a = $feed =~ /(Test Node 1<\/title>)/g; my @items_b = $feed =~ /(<title>Old Node<\/title>)/g; is( scalar @items_a, 1, "Had the right name" ); is( scalar @items_b, 1, "Had the right name" ); # And only one had the distance @items = $feed =~ /(<space:distance>)/g; is( scalar @items, 1, "Only had distance once" ); # Now with all the geo bits @nodes = ( {name=>'Test Node 1',distance=>'2 miles',latitude=>'1.23',longitude=>'-1.33',os_x=>'2345',os_y=>'5678'}, {name=>'Old Node'} ); $feed = eval { $rss->generate_node_name_distance_feed('12345',@nodes); }; is( $@, "", "->generate_node_name_distance_feed doesn't croak with full geo" ); like( $feed, qr/space:os_x/, "Had os_x" ); like( $feed, qr/space:os_y/, "Had os_y" ); like( $feed, qr/geo:lat/, "Had latitude" ); like( $feed, qr/geo:long/, "Had longitude" ); } } ���������Wiki-Toolkit-0.83/t/703_search_weightings.t���������������������������������������������������������0000644�0001750�0001750�00000005546�12243757607�020425� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use strict; use Wiki::Toolkit::TestLib; use Test::More; my $num_tests = 3; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( $num_tests * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { SKIP: { print "# Store is: " . ( ref $wiki->store ) . "\n"; print "# Search is: " . ( ref $wiki->search_obj ) . "\n"; skip "Tests only relevant to Lucy search backend", $num_tests unless ( ref $wiki->search_obj && $wiki->search_obj->isa("Wiki::Toolkit::Search::Lucy")); # Write data without title weighting, store the search scores. write_data( $wiki ); my %results = $wiki->search_nodes( "putney tandoori" ); my $orig_putney = $results{"Putney"}; my $orig_tandoori = $results{"Putney Tandoori"}; print "# Putney score: $orig_putney\n"; print "# Putney Tandoori score: $orig_tandoori\n"; # Clear database, set up a new search backend with title weighting, # write data again and check search scores. $wiki->delete_node( "Putney" ) or die "Couldn't delete Putney"; $wiki->delete_node( "Putney Tandoori" ) or die "Couldn't delete Putney Tandoori"; # Copied from Wiki::Toolkit::TestLib. require Wiki::Toolkit::Search::Lucy; require File::Path; my $dir = "t/lucy"; File::Path::rmtree( $dir, 0, 1 ); # 0 = verbose, 1 = safe mkdir $dir or die $!; my $new_search = Wiki::Toolkit::Search::Lucy->new( path => $dir, metadata_fields => [ "address", "category", "locale" ], boost => { title => 5 } ); my $new_wiki = Wiki::Toolkit->new( store => $wiki->store, search => $new_search ); write_data( $new_wiki ); %results = $new_wiki->search_nodes( "putney tandoori" ); my $new_putney = $results{"Putney"}; my $new_tandoori = $results{"Putney Tandoori"}; print "# New Putney score: $new_putney\n"; print "# New Putney Tandoori score: $new_tandoori\n"; ok( $new_putney > $orig_putney, "Lucy title score boosting works for single word" ); ok( $new_tandoori > $orig_tandoori, "...and for two words" ); ok( $results{"Putney Tandoori"} > $results{"Putney"}, "We can make sure that words in title score higher" ); } } sub write_data { my $wiki= shift; $wiki->write_node( "Putney Tandoori", "Indian food", undef, { address => "London Road" } ) or die "Couldn't write node"; $wiki->write_node( "Putney", "There is a tandoori restaurant here", undef, { locale => "London" } ) or die "Couldn't write node"; } ����������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/t/702_metadata_search.t�����������������������������������������������������������0000644�0001750�0001750�00000007673�12243757607�020037� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use strict; use Wiki::Toolkit::TestLib; use Test::More; # Test for search backends that support metadata indexing # (currently just Lucy). Note that Wiki::Toolkit::TestLib sets up three # indexed metadata fields in the Lucy search - address, category, and locale. my $num_tests = 11; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( $num_tests * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { SKIP: { print "# Store is: " . ( ref $wiki->store ) . "\n"; print "# Search is: " . ( ref $wiki->search_obj ) . "\n"; skip "No search configured for this backend", $num_tests unless ref $wiki->search_obj; skip "Metadata indexing not supported by this backend", $num_tests unless $wiki->search_obj->supports_metadata_indexing; # Put some test data in. $wiki->write_node( "Red Lion", "A drinking establishment", undef, { category => [ "Pubs", "Real Ale" ], locale => [ "London", "Soho" ], not_indexed => "wombats" } ) or die "Couldn't write node: Red Lion"; $wiki->write_node( "Wiki Etiquette", "Be excellent to each other" ) or die "Couldn't write node: Wiki Etiquette"; my %results = $wiki->search_nodes( "etiquette" ); ok( defined $results{"Wiki Etiquette"}, "Search with metadata " . "indexing set up can find things with no metadata" ); %results = $wiki->search_nodes( "real ale" ); ok( defined $results{"Red Lion"}, "Search finds things in metadata" ); %results = $wiki->search_nodes( "london pubs" ); ok( defined $results{"Red Lion"}, "Search finds things partly in one " . "metadata field and partly in another" ); %results = $wiki->search_nodes( "soho drinking" ); ok( defined $results{"Red Lion"}, "Search finds things partly in " . "content and partly in metadata" ); %results = $wiki->search_nodes( "red lion london" ); ok( defined $results{"Red Lion"}, "Search finds things partly in " . "title and partly in metadata" ); %results = $wiki->search_nodes( "wombats" ); ok( !defined $results{"Red Lion"}, "Search ignores metadata fields " . "that it's not been told to index" ); # Write a new version with different metadata, make sure the old data # is removed and the new data is picked up. my %data = $wiki->retrieve_node( "Red Lion" ); my $checksum = $data{checksum}; $wiki->write_node( "Red Lion", "A drinking establishment", $data{checksum}, { category => [ "Bars", "Cocktails" ], locale => [ "Oxford", "Cowley Road" ] } ) or die "Couldn't write node"; %results = $wiki->search_nodes( "real ale" ); ok( !defined $results{"Red Lion"}, "Search doesn't look at old versions of metadata" ); %results = $wiki->search_nodes( "cocktails" ); ok( defined $results{"Red Lion"}, "...but it does look at the new versions" ); # Delete the new version, check the old data is now picked up. $wiki->delete_node( name => "Red Lion", version => 2 ); %results = $wiki->search_nodes( "real ale" ); ok( defined $results{"Red Lion"}, "Search picks up most recent metadata when one version deleted" ); %results = $wiki->search_nodes( "cocktails" ); ok( !defined $results{"Red Lion"}, "...and ignores the deleted stuff" ); # Delete node entirely, make sure it doesn't get picked up. $wiki->delete_node( name => "Red Lion" ); %results = $wiki->search_nodes( "real ale" ); ok( !defined $results{"Red Lion"}, "Search ignores metadata of deleted nodes" ); } } ���������������������������������������������������������������������Wiki-Toolkit-0.83/t/400_upgrade.t�������������������������������������������������������������������0000644�0001750�0001750�00000011545�12243757607�016345� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use 5.006; use strict; use warnings; use Test::More; use Wiki::Toolkit; use Wiki::Toolkit::TestLib; use Wiki::Toolkit::Setup::Database; # XXX needs to be more exhaustive my $test_sql = { 8 => [ qq| INSERT INTO node VALUES (1, 'Test node 1', 1, 'Some content', 'now')|, qq| INSERT INTO node VALUES (2, 'Test node 2', 1, 'More content', 'now')|, qq| INSERT INTO content VALUES (1, 1, 'Some content', 'now', 'no comment')|, qq| INSERT INTO content VALUES (2, 1, 'More content', 'now', 'no comment')|, qq| INSERT INTO metadata VALUES (1, 1, 'foo', 'bar')|, qq| INSERT INTO metadata VALUES (2, 1, 'baz', 'quux')| ], 9 => [ qq| INSERT INTO node (id, name, version, text, modified) VALUES (1, 'Test node 1', 1, 'Some content', 'now')|, qq| INSERT INTO node (id, name, version, text, modified) VALUES (2, 'Test node 2', 1, 'More content', 'now')|, qq| INSERT INTO content (node_id, version, text, modified, comment) VALUES (1, 1, 'Some content', 'now', 'no comment')|, qq| INSERT INTO content (node_id, version, text, modified, comment) VALUES (2, 1, 'More content', 'now', 'no comment')|, qq| INSERT INTO metadata VALUES (1, 1, 'foo', 'bar')|, qq| INSERT INTO metadata VALUES (2, 1, 'baz', 'quux')| ], }; my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; my @configured_databases = $iterator->configured_databases; my @schemas_to_test; use Wiki::Toolkit::Setup::SQLite; my $num_mysql_only_tests = 0; my @mysql_databases; foreach my $db (@configured_databases) { my $setup_class = $db->{setup_class}; eval "require $setup_class"; my $current_schema; { no strict 'refs'; $current_schema = eval ${$setup_class . '::SCHEMA_VERSION'}; } foreach my $schema (@Wiki::Toolkit::Setup::Database::SUPPORTED_SCHEMAS) { push @schemas_to_test, $schema if $schema < $current_schema; } if ( $db->{dsn} =~ /mysql/i ) { $num_mysql_only_tests = 2; push @mysql_databases, $db; } } my $num_tests = (scalar @schemas_to_test * scalar @configured_databases * 2) + $num_mysql_only_tests; if ( $num_tests == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => $num_tests; } foreach my $database (@configured_databases) { my $setup_class = $database->{setup_class}; my $current_schema; { no strict 'refs'; $current_schema = eval ${$setup_class . '::SCHEMA_VERSION'}; } foreach my $schema (@schemas_to_test) { # Set up database with old schema my $params = $database->{params}; $params->{wanted_schema} = $schema; { no strict 'refs'; eval &{$setup_class . '::cleardb'} ( $params ); eval &{$setup_class . '::setup'} ( $params ); } my $class = $database->{class}; eval "require $class"; my $dsn = $database->{dsn}; my $dbh = DBI->connect($dsn, $params->{dbuser}, $params->{dbpass}); foreach my $sql (@{$test_sql->{$schema}}) { $dbh->do($sql); } # Upgrade to current schema delete $params->{wanted_schema}; { no strict 'refs'; eval &{$setup_class . '::setup'} ( $params ); } # Test the data looks sane my $store = $class->new( %{$params} ); my %wiki_config = ( store => $store ); my $wiki = Wiki::Toolkit->new( %wiki_config ); is( $wiki->retrieve_node("Test node 1"), "Some content", "can retrieve first test node after $schema to $current_schema" ); is( $wiki->retrieve_node("Test node 2"), "More content", "can retrieve second test node after $schema to $current_schema" ); } } if ( $num_mysql_only_tests ) { foreach my $database ( @mysql_databases ) { my $setup_class = $database->{setup_class}; my $current_schema; { no strict 'refs'; $current_schema = eval ${$setup_class . '::SCHEMA_VERSION'}; } # Set up database with old schema my $params = $database->{params}; $params->{wanted_schema} = 9; { no strict 'refs'; eval &{$setup_class . '::cleardb'} ( $params ); eval &{$setup_class . '::setup'} ( $params ); } my $class = $database->{class}; eval "require $class"; my $dsn = $database->{dsn}; my $dbh = DBI->connect($dsn, $params->{dbuser}, $params->{dbpass}); # Manually create index that the upgrade also wants to create eval { $dbh->do('CREATE UNIQUE INDEX node_name ON node (name);') or die $dbh->errstr }; is( $@, '', "Manually creating confusing index didn't die" ); # Now upgrade delete $params->{wanted_schema}; { no strict 'refs'; eval &{$setup_class . '::setup'} ( $params ); is( $@, '', "Upgrade didn't die even though node_name index had been created manually" ); } } } �����������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/t/152_pre_plugin_approval.t�������������������������������������������������������0000644�0001750�0001750�00000006517�12243757607�020775� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use strict; use Wiki::Toolkit::TestLib; use Test::More; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 12 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { SKIP: { eval { require Test::MockObject; }; skip "Test::MockObject not installed", 12 if $@; my $null_plugin = Test::MockObject->new; my $plugin = Test::MockObject->new; $plugin->mock( "on_register", sub { my $self = shift; $self->{__registered} = 1; $self->{__pre_moderate_called} = 0; $self->{__pre_rename_called} = 0; $self->{__pre_write_called} = 0; } ); eval { $wiki->register_plugin( plugin => $plugin ); }; ok( $plugin->{__registered}, "->on_register method called" ); # =========================================================================== # Test the pre moderation plugin not allowing moderation $plugin->mock( "pre_moderate", sub { my ($self, %args) = @_; $self->{__pre_moderate_called}++; return -1; } ); # Add a node $wiki->write_node( "Test Node 3", "bar", undef, undef, 1 ) or die "Can't write first version node"; # Try to Moderate my $ok = $wiki->moderate_node( name=>"Test Node 3", version=>1 ) or die "Can't moderate node"; is($plugin->{__pre_moderate_called}, 1, "Plugin was called"); is($ok, -1, "Wasn't allowed to moderate the node"); # Check it really wasn't my %node = $wiki->retrieve_node("Test Node 3"); is($node{'version'}, 1, "Node correctly retrieved"); is($node{'moderated'}, 0, "Still not moderated"); # =========================================================================== # Test the pre rename plugin not allowing rename $plugin->mock( "pre_rename", sub { my ($self, %args) = @_; $self->{__pre_rename_called}++; return -1; } ); # Add another node $wiki->write_node( "Test Node 2", "bar" ) or die "Can't write first version node"; # Try to Rename $ok = $wiki->rename_node( old_name=>"Test Node 2", new_name=>"ren" ) or die "Can't rename node"; is($plugin->{__pre_rename_called}, 1, "Plugin was called"); is($ok, -1, "Wasn't allowed to rename the node"); # Check it really wasn't %node = $wiki->retrieve_node("Test Node 2"); is($node{'version'}, 1, "Node correctly retrieved"); # =========================================================================== # Test the pre write plugin not allowing write $plugin->mock( "pre_write", sub { my ($self, %args) = @_; $self->{__pre_write_called}++; return -1; } ); # Try to Add $ok = $wiki->write_node( "Test Node 4", "bar" ) or die "Can't add node"; is($plugin->{__pre_write_called}, 1, "Plugin was called"); is($ok, -1, "Wasn't allowed to write the node"); # Check it really wasn't %node = $wiki->retrieve_node("Test Node 4"); is($node{'version'}, 0, "Node wasn't added"); is($node{'content'}, '', "Node wasn't added"); } } ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/t/018_rc_multiple_metadata_criteria.t���������������������������������������������0000644�0001750�0001750�00000005576�12243757607�022773� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use strict; use Wiki::Toolkit::TestLib; use Test::More; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 6 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { $wiki->write_node( "Test 1", "test", undef, { username => "Earle", edit_type => "Minor tidying", } ); $wiki->write_node( "Test 2", "test", undef, { username => "Kake", edit_type => "Minor tidying", } ); $wiki->write_node( "Test 3", "test", undef, { username => "Earle", } ); my @nodes = $wiki->list_recent_changes( days => 7, metadata_was => { username => "Earle", edit_type => "Minor tidying" } ); my @names = sort map { $_->{name} } @nodes; is_deeply( \@names, [ "Test 1" ], "can supply multiple criteria to metadata_was" ); @nodes = $wiki->list_recent_changes( days => 7, metadata_wasnt => { username => "Earle", edit_type => "Minor tidying" } ); @names = sort map { $_->{name} } @nodes; is_deeply( \@names, [ "Test 2", "Test 3" ], "can supply multiple criteria to metadata_wasnt" ); @nodes = $wiki->list_recent_changes( days => 7, metadata_is => { username => "Earle", edit_type => "Minor tidying" } ); @names = sort map { $_->{name} } @nodes; is_deeply( \@names, [ "Test 1" ], "can supply multiple criteria to metadata_is" ); @nodes = $wiki->list_recent_changes( days => 7, metadata_isnt => { username => "Earle", edit_type => "Minor tidying" } ); @names = sort map { $_->{name} } @nodes; is_deeply( \@names, [ "Test 2", "Test 3" ], "can supply multiple criteria to metadata_isnt" ); @nodes = $wiki->list_recent_changes( days => 7, metadata_was => { username => "Earle" }, metadata_wasnt => { edit_type => "Minor tidying" }, ); @names = sort map { $_->{name} } @nodes; is_deeply( \@names, [ "Test 3" ], "can supply both metadata_was and metadata_wasnt" ); @nodes = $wiki->list_recent_changes( days => 7, metadata_is => { username => "Earle" }, metadata_isnt => { edit_type => "Minor tidying" }, ); @names = sort map { $_->{name} } @nodes; is_deeply( \@names, [ "Test 3" ], "can supply both metadata_is and metadata_isnt" ); } ����������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/t/701_lucy_setup.t����������������������������������������������������������������0000644�0001750�0001750�00000001103�12243757607�017103� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use strict; use Test::More tests => 2; eval { require Lucy; }; SKIP: { skip "Lucy not installed", 2 if $@; require Wiki::Toolkit::Search::Lucy; my $search = Wiki::Toolkit::Search::Lucy->new( path => "t/lucy" ); isa_ok( $search, "Wiki::Toolkit::Search::Lucy", "Lucy search with no metadata indexing" ); $search = Wiki::Toolkit::Search::Lucy->new( path => "t/lucy", metadata_fields => [ "category", "locale", "address" ] ); isa_ok( $search, "Wiki::Toolkit::Search::Lucy", "Lucy search with metadata indexing" ); } �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/t/021_moderation.t����������������������������������������������������������������0000644�0001750�0001750�00000025335�12243757607�017060� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use strict; use Wiki::Toolkit::TestLib; use Test::More; use Time::Piece; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 89 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { # Put some test data in. $wiki->write_node( "Home", "This is the home node." ) or die "Couldn't write node"; # Now add another version my %node_data = $wiki->retrieve_node("Home"); ok( $wiki->write_node("Home", "xx", $node_data{checksum}), "write_node succeeds when node matches checksum" ); # Fetch it with and without specifying a version my %new_node_data = $wiki->retrieve_node("Home"); my %new_node_data_v = $wiki->retrieve_node(name=>"Home", version=>$new_node_data{version}); print "# version now: [$new_node_data{version}]\n"; is( $new_node_data{version}, $node_data{version} + 1, "...and the version number is updated on successful writing" ); is( $new_node_data{version}, $new_node_data_v{version}, "...and the version number is updated on successful writing" ); # Ensure that the moderation required flag isn't set on the node is( $node_data{node_requires_moderation}, '0', "New node correctly doesn't require moderation" ); is( $new_node_data{node_requires_moderation}, '0', "Nor does it require moderation after being updated" ); is( $new_node_data_v{node_requires_moderation}, '0', "Nor does it require moderation after being updated via version" ); # Ensure the moderated flag is set on the two entries in content is( $node_data{moderated}, '1', "No moderation required, so is moderated" ); is( $new_node_data{moderated}, '1', "No moderation required, so is moderated" ); is( $new_node_data_v{moderated}, '1', "No moderation required, so is moderated" ); # Now add a new node requiring moderation $wiki->write_node( "Moderation", "This is the moderated node.", undef, undef, 1); my %mn_data = $wiki->retrieve_node("Moderation"); is( $mn_data{moderated}, '0', "First version shouldn't be moderated" ); is( $mn_data{node_requires_moderation}, '1', "New node needs moderation" ); # Shouldn't have the text if fetched without the version is( $mn_data{content}, "=== This page has yet to be moderated. ===", "First version isn't moderated" ); # If we fetch with a version, we should get the text my %mnv_data = $wiki->retrieve_node(name=>"Moderation", version=>1); is( $mnv_data{content}, "This is the moderated node.", "Should get text if a version is given" ); is( $mnv_data{moderated}, '0', "First version shouldn't be moderated" ); is( $mnv_data{node_requires_moderation}, '1', "New node needs moderation" ); # Update it my $nmn_ver = $wiki->write_node("Moderation", "yy", $mn_data{checksum}); ok( $nmn_ver, "Can update where moderation is enabled" ); my %nmn_data = $wiki->retrieve_node("Moderation"); my %nmnv_data = $wiki->retrieve_node(name=>"Moderation", version=>2); is( $nmn_data{version}, '1', "Latest moderated version" ); is( $nmnv_data{version}, '2', "Latest unmoderated version" ); is( $nmn_ver, '2', "Latest (unmoderated) version returned by write_node" ); # Check content was updated right is( $nmnv_data{content}, "yy", "Version 2 text"); # Should still be the same as before (unmoderated v1) is_deeply(\%mn_data,\%nmn_data, "Should still be the unmod first ver"); is( $nmn_data{content}, "=== This page has yet to be moderated. ===", "No version is moderated" ); # Check node requires it still is( $nmnv_data{node_requires_moderation}, '1', "New node needs moderation" ); # Check content not moderated is( $nmnv_data{moderated}, '0', "Second version shouldn't be moderated" ); # Add the third entry ok( $wiki->write_node("Moderation", "foo foo", $nmn_data{checksum}), "Can update where moderation is enabled" ); my %mn3_data = $wiki->retrieve_node("Moderation"); is( $mn3_data{node_requires_moderation}, '1', "New node needs moderation" ); is( $mn3_data{moderated}, '0', "Third version shouldn't be moderated" ); is( $mn3_data{content}, "=== This page has yet to be moderated. ===", "No version is moderated" ); # Moderate the second entry ok( $wiki->moderate_node("Moderation", 2), "Can't moderate 2nd version" ); %node_data = $wiki->retrieve_node(name=>"Moderation"); my %mmn2 = $wiki->retrieve_node(name=>"Moderation",version=>2); # Node should now hold 2nd version is( $mmn2{moderated}, '1', "Second version should now be moderated" ); is( $mmn2{node_requires_moderation}, '1', "Still requires moderation" ); is( $node_data{moderated}, '1', "Current version should now be moderated" ); is( $node_data{node_requires_moderation}, '1', "Still requires moderation" ); is( $node_data{content}, "yy", "Node should be second version" ); is( $node_data{version}, "2", "Node should be second version" ); # Moderate the first entry ok( $wiki->moderate_node(name=>"Moderation", version=>1), "Can't moderate 1st version" ); %node_data = $wiki->retrieve_node(name=>"Moderation"); my %mmn1 = $wiki->retrieve_node(name=>"Moderation",version=>1); # First entry should now be moderated, but node should not be changed is( $mmn1{moderated}, '1', "First version should now be moderated" ); is( $mmn1{node_requires_moderation}, '1', "Still requires moderation" ); is( $node_data{moderated}, '1', "Current version should still be moderated" ); is( $node_data{node_requires_moderation}, '1', "Still requires moderation" ); is( $node_data{content}, "yy", "Node should still be second version" ); is( $node_data{version}, "2", "Node should still be second version" ); # Moderate the third entry ok( $wiki->moderate_node(name=>"Moderation", version=>3), "Can't moderate 3rd version" ); %node_data = $wiki->retrieve_node(name=>"Moderation"); my %mmn3 = $wiki->retrieve_node(name=>"Moderation",version=>3); # Third entry should now be moderated, and node should have been changed is( $mmn3{moderated}, '1', "Third version should now be moderated" ); is( $mmn3{node_requires_moderation}, '1', "Still requires moderation" ); is( $node_data{moderated}, '1', "Current version should still be moderated" ); is( $node_data{node_requires_moderation}, '1', "Still requires moderation" ); is( $node_data{content}, "foo foo", "Node should be third version" ); is( $node_data{version}, "3", "Node should be third version" ); # Add a 4th entry ok( $wiki->write_node("Moderation", "bar bar", $node_data{checksum}), "Can update where moderation is enabled" ); %node_data = $wiki->retrieve_node("Moderation"); my %mn4_data = $wiki->retrieve_node(name=>"Moderation", version=>4); # Node should still be third entry, with 4th needing moderation is( $node_data{moderated}, '1', "Current version should still be moderated" ); is( $node_data{node_requires_moderation}, '1', "Still requires moderation" ); is( $node_data{content}, "foo foo", "Node should still be third version" ); is( $node_data{version}, "3", "Node should still be third version" ); is( $mn4_data{moderated}, '0', "New version shouldn't be moderated" ); is( $mn4_data{node_requires_moderation}, '1', "Still requires moderation" ); is( $mn4_data{content}, "bar bar", "Content should have fourth version" ); is( $mn4_data{version}, "4", "Content should have fourth version" ); # Add the 5th entry, and moderate it ok( $wiki->write_node("Moderation", "I shall be deleted", $node_data{checksum}), "Can update where moderation is enabled" ); %node_data = $wiki->retrieve_node("Moderation"); # Moderate it ok( $wiki->moderate_node(name=>"Moderation", version=>5), "Can't moderate 5th version" ); my %mn5_data = $wiki->retrieve_node(name=>"Moderation", version=>5); is( $mn5_data{moderated}, '1', "Current version should be moderated" ); is( $mn5_data{node_requires_moderation}, '1', "Still requires moderation" ); is( $mn5_data{content}, "I shall be deleted", "Node should be fifth version" ); is( $mn5_data{version}, "5", "Node should be fifth version" ); # Delete the 5th entry - should fall back to the 3rd is( 1, $wiki->delete_node(name=>"Moderation", version=>5), "Can't delete 5th version" ); %node_data = $wiki->retrieve_node("Moderation"); is( $node_data{moderated}, '1', "Current version should still be moderated" ); is( $node_data{node_requires_moderation}, '1', "Still requires moderation" ); is( $node_data{content}, "foo foo", "Node should now be third version" ); is( $node_data{version}, "3", "Node should now be third version" ); # Delete the 4th version, should remain the 3rd version # Now mark this node as not needing moderation, and add a new version is( 1, $wiki->set_node_moderation(name=>"Moderation", required=>0), "Can set as not needing moderation" ); %node_data = $wiki->retrieve_node("Moderation"); is( $node_data{moderated}, '1', "Current version should still be moderated" ); is( $node_data{node_requires_moderation}, '0', "Doesn't requires moderation" ); is( $node_data{content}, "foo foo", "Node should now be third version" ); is( $node_data{version}, "3", "Node should now be third version" ); # Check now not moderated ok( $wiki->write_node("Moderation", "No moderation", $node_data{checksum}), "Can update where moderation is disabled again" ); my %mn5b_data = $wiki->retrieve_node(name=>"Moderation", version=>5); %node_data = $wiki->retrieve_node("Moderation"); is_deeply( \%mn5b_data, \%node_data, "Version 5 (again) is the latest" ); is( $node_data{moderated}, '1', "Current version should be moderated" ); is( $node_data{node_requires_moderation}, '0', "Doesn't requires moderation" ); is( $node_data{content}, "No moderation", "Node should now be fifth version" ); is( $node_data{version}, "5", "Node should now be fifth version" ); # Now turn moderation back on is( 1, $wiki->set_node_moderation(name=>"Moderation", required=>1), "Can set as needing moderation" ); %node_data = $wiki->retrieve_node("Moderation"); is( $node_data{moderated}, '1', "Current version should be moderated" ); is( $node_data{node_requires_moderation}, '1', "Now requires moderation" ); is( $node_data{content}, "No moderation", "Node should now be fifth version" ); is( $node_data{version}, "5", "Node should now be fifth version" ); # Test that the shorthand node_required_moderation behaves is( 0, $wiki->node_required_moderation("MADE_UP"), "node_required_moderation behaves"); is( 0, $wiki->node_required_moderation("Home"), "node_required_moderation behaves"); is( 1, $wiki->node_required_moderation("Moderation"), "node_required_moderation behaves"); # Check that we get 0, not 1 back, when trying to set moderation # on a node that doesn't exist is( 0, $wiki->set_node_moderation(name=>"NODE THAT DOES NOT EXIST", required=>1), "returns 0 if you set moderation on an unknown node" ); } ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/t/020_node_exists_case_insensitive.t����������������������������������������������0000644�0001750�0001750�00000001654�12243757607�022653� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use strict; use Wiki::Toolkit::TestLib; use Test::More; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 3 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { print "# Store: " . (ref $wiki->store) . "\n"; # Write data. $wiki->write_node( "Node 1", "foo" ) or die "Can't write node"; # Test old syntax. ok( $wiki->node_exists( "Node 1" ), "old calling syntax for ->node_exists still works" ); # Now test case-insensitivity works on all backends. ok( $wiki->node_exists( name => "node 1", ignore_case => 1 ), "->node_exists OK when ignore_case is true, name lowercase" ); ok( $wiki->node_exists( name => "NODE 1", ignore_case => 1 ), "->node_exists OK when ignore_case is true, name uppercase" ); } ������������������������������������������������������������������������������������Wiki-Toolkit-0.83/t/014_advanced_metadata.t���������������������������������������������������������0000644�0001750�0001750�00000007212�12243757607�020320� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use strict; use Wiki::Toolkit::TestLib; use Test::More; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 6 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { # Put some test data in. $wiki->write_node( "Hammersmith Station", "a station", undef, { tube_data => { line => "Piccadilly", direction => "Eastbound", next_station => "Baron's Court Station" } } ); my %node_data = $wiki->retrieve_node( "Hammersmith Station" ); my %metadata = %{ $node_data{metadata} || {} }; ok( !defined $metadata{tube_data}, "hashref metadata not stored directly" ); ok( defined $metadata{__tube_data__checksum}, "checksum stored instead" ); ok( $wiki->write_node( "Hammersmith Station", "a station", $node_data{checksum}, { tube_data => [ { line => "Piccadilly", direction => "Eastbound", next_station => "Baron's Court Station" }, { line => "Piccadilly", direction => "Westbound", next_station => "Acton Town Station" } ] } ), "writing node with metadata succeeds when node checksum fresh" ); ok( !$wiki->write_node( "Hammersmith Station", "a station", $node_data{checksum}, { tube_data => [ { line => "Piccadilly", direction => "Eastbound", next_station => "Baron's Court Station" }, { line => "Piccadilly", direction => "Westbound", next_station => "Acton Town Station" } ] } ), "...but fails when node checksum old and hashref metadata changed"); # Make sure that order doesn't matter in the arrayrefs. %node_data = $wiki->retrieve_node( "Hammersmith Station" ); $wiki->write_node( "Hammersmith Station", "a station", $node_data{checksum}, { tube_data => [ { line => "Piccadilly", direction => "Westbound", next_station => "Acton Town Station" }, { line => "Piccadilly", direction => "Eastbound", next_station => "Baron's Court Station" }, ] } ) or die "Couldn't write node"; ok( $wiki->verify_checksum("Hammersmith Station",$node_data{checksum}), "order within arrayrefs doesn't affect checksum" ); my %node_data_check = $wiki->retrieve_node( "Hammersmith Station" ); my %metadata_check = %{ $node_data_check{metadata} || {} }; is( scalar @{ $metadata_check{__tube_data__checksum} }, 1, "metadata checksum only written once even if multiple entries" ); } ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/t/304_feed_atom_gen_node_dist_feed.t����������������������������������������������0000644�0001750�0001750�00000010007�12243757607�022500� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use strict; use Wiki::Toolkit::TestConfig::Utilities; use Wiki::Toolkit; use URI::Escape; # Note - update the count in the skip block to match the number here # we would put the number in a variable, but that doesn't seem to work use Test::More tests => (3 + 18 * $Wiki::Toolkit::TestConfig::Utilities::num_stores); use_ok( "Wiki::Toolkit::Feed::Atom" ); eval { my $rss = Wiki::Toolkit::Feed::Atom->new; }; ok( $@, "new croaks if no wiki object supplied" ); eval { my $rss = Wiki::Toolkit::Feed::Atom->new( wiki => "foo" ); }; ok( $@, "new croaks if something that isn't a wiki object supplied" ); my %stores = Wiki::Toolkit::TestConfig::Utilities->stores; my ($store_name, $store); while ( ($store_name, $store) = each %stores ) { SKIP: { skip "$store_name storage backend not configured for testing", 18 unless $store; print "#\n##### TEST CONFIG: Store: $store_name\n#\n"; my $wiki = Wiki::Toolkit->new( store => $store ); my %default_config = ( wiki => $wiki, site_name => "Wiki::Toolkit Test Site", make_node_url => sub { my $id = uri_escape($_[0]); my $version = $_[1] || ''; $version = uri_escape($version) if $version; "http://example.com/?id=$id;version=$version"; }, recent_changes_link => "http://example.com/?RecentChanges", atom_link => "http://example.com/?action=rc;format=atom", ); my $rss = eval { Wiki::Toolkit::Feed::Atom->new( %default_config, site_url => "http://example.com/kakeswiki/" ); }; is( $@, "", "'new' doesn't croak if wiki object and mandatory parameters supplied" ); isa_ok( $rss, "Wiki::Toolkit::Feed::Atom" ); my $feed = eval { $rss->generate_node_name_distance_feed; }; is( $@, "", "->generate_node_name_distance_feed doesn't croak" ); # Should be empty to start with unlike( $feed, qr/<entry>/, "empty list" ); # Should be Atom though like( $feed, qr/<feed/, "Is Atom" ); # Now retry with a single node my @nodes = ( {name=>'Test Node 1'} ); $feed = eval { $rss->generate_node_name_distance_feed('12345',@nodes); }; is( $@, "", "->generate_node_name_distance_feed doesn't croak with a single node" ); # Should only have it once my @items = $feed =~ /(\<\/entry\>)/g; is( scalar @items, 1, "Only found it once" ); # And should have the name like( $feed, qr|<title>Test Node 1|, "Found right node" ); # Now try again, with two nodes, one with distances @nodes = ( {name=>'Test Node 1',distance=>'2 miles'}, {name=>'Old Node'} ); $feed = eval { $rss->generate_node_name_distance_feed('12345',@nodes); }; is( $@, "", "->generate_node_name_distance_feed doesn't croak with distances" ); # Check we found two nodes @items = $feed =~ /(\<\/entry\>)/g; is( scalar @items, 2, "Found two nodes" ); # Both the right name my @items_a = $feed =~ /(Test Node 1<\/title>)/g; my @items_b = $feed =~ /(<title>Old Node<\/title>)/g; is( scalar @items_a, 1, "Had the right name" ); is( scalar @items_b, 1, "Had the right name" ); # And only one had the distance @items = $feed =~ /(<space:distance>)/g; is( scalar @items, 1, "Only had distance once" ); # Now with all the geo bits @nodes = ( {name=>'Test Node 1',distance=>'2 miles',latitude=>'1.23',longitude=>'-1.33',os_x=>'2345',os_y=>'5678'}, {name=>'Old Node'} ); $feed = eval { $rss->generate_node_name_distance_feed('12345',@nodes); }; is( $@, "", "->generate_node_name_distance_feed doesn't croak with full geo" ); like( $feed, qr/space:os_x/, "Had os_x" ); like( $feed, qr/space:os_y/, "Had os_y" ); like( $feed, qr/geo:lat/, "Had latitude" ); like( $feed, qr/geo:long/, "Had longitude" ); } } �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/t/013_fuzzy_title_match.t���������������������������������������������������������0000644�0001750�0001750�00000003515�12243757607�020460� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use strict; use Wiki::Toolkit::TestLib; use Test::More; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 5 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { SKIP: { my $search = $wiki->search_obj; skip "No search backend in this combination", 5 unless $search; skip "Search backend doesn't support fuzzy searching", 5 unless $search->supports_fuzzy_searches; # Fuzzy match with differing punctuation. $wiki->write_node( "King's Cross St Pancras", "station" ) or die "Can't write node"; my %finds = $search->fuzzy_title_match("Kings Cross St. Pancras"); is_deeply( [ keys %finds ], [ "King's Cross St Pancras" ], "fuzzy_title_match works when punctuation differs" ); # Fuzzy match when we actually got the string right. $wiki->write_node( "Potato", "A delicious vegetable" ) or die "Can't write node"; $wiki->write_node( "Patty", "A kind of burger type thing" ) or die "Can't write node"; %finds = $search->fuzzy_title_match("Potato"); is_deeply( [ sort keys %finds ], [ "Patty", "Potato" ], "...returns all things found" ); ok( $finds{Potato} > $finds{Patty}, "...and exact match has highest relevance score" ); # Now try matching indirectly, through the wiki object. %finds = eval { $wiki->fuzzy_title_match("kings cross st pancras"); }; is( $@, "", "fuzzy_title_match works when called on wiki object" ); is_deeply( [ keys %finds ], [ "King's Cross St Pancras" ], "...and returns the right thing" ); } } �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/t/500_pod.t�����������������������������������������������������������������������0000644�0001750�0001750�00000000316�12243757607�015473� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use strict; use Test::More; eval "use Test::Pod 1.00"; plan skip_all => "Test::Pod 1.00 required for testing POD" if $@; my @poddirs = qw( blib blib/script ); all_pod_files_ok( all_pod_files( @poddirs ) ); ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/t/002_datastore_setup.t�����������������������������������������������������������0000644�0001750�0001750�00000005445�12243757607�020124� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use strict; use Test::More tests => 27; use Wiki::Toolkit; use Wiki::Toolkit::TestConfig; use DBI; foreach my $dbtype (qw( MySQL Pg SQLite )) { SKIP: { skip "$dbtype backend not configured", 8 unless $Wiki::Toolkit::TestConfig::config{$dbtype}->{dbname}; my %config = %{$Wiki::Toolkit::TestConfig::config{$dbtype}}; my $setup_class = "Wiki::Toolkit::Setup::$dbtype"; eval "require $setup_class"; my $store_class = "Wiki::Toolkit::Store::$dbtype"; eval "require $store_class"; { no strict 'refs'; my $dsn = $store_class->_dsn( $config{dbname}, $config{dbhost} ); foreach my $method ( qw( cleardb setup ) ) { eval { &{$setup_class . "::" . $method}( @config{ qw( dbname dbuser dbpass dbhost ) } ); }; is( $@, "", "${setup_class}::$method doesn't die when called with connection details list"); eval { &{$setup_class . "::" . $method}( \% config ); }; is( $@, "", "${setup_class}::$method doesn't die when called with connection details hashref"); eval { my $dbh = DBI->connect($dsn, @config{ qw( dbuser dbpass )}, { PrintError => 0, RaiseError => 1, AutoCommit => 1 } ) or die DBI->errstr; &{$setup_class . "::" . $method}( $dbh ); $dbh->disconnect; }; is( $@, "", "${setup_class}::$method doesn't die when called with dbh"); eval { my $dbh = DBI->connect($dsn, @config{ qw( dbuser dbpass )}, { PrintError => 0, RaiseError => 1, AutoCommit => 1 } ) or die DBI->errstr; &{$setup_class . "::" . $method}( { dbh => $dbh } ); $dbh->disconnect; }; is( $@, "", "${setup_class}::$method doesn't die when called with dbh in hashref"); } } } } SKIP: { skip "SQLite backend not configured", 3 unless $Wiki::Toolkit::TestConfig::config{SQLite}; my @mistakes = <HASH*>; is( scalar @mistakes, 0, "Wiki::Toolkit::Setup::SQLite doesn't create erroneous files called things like 'HASH(0x80fd394)'" ); @mistakes = <ARRAY*>; is( scalar @mistakes, 0, "Wiki::Toolkit::Setup::SQLite doesn't create erroneous files called things like 'ARRAY(0x83563fc)'" ); @mistakes = <4*>; is( scalar @mistakes, 0, "Wiki::Toolkit::Setup::SQLite doesn't create erroneous files called '4'" ); } ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/t/007_search.t��������������������������������������������������������������������0000644�0001750�0001750�00000012011�12243757607�016153� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use strict; use Wiki::Toolkit::TestLib; use Test::More; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 16 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { SKIP: { skip "Not testing search for this configuration", 16 unless $wiki->search_obj; print '# $wiki->search_obj is a ' . $wiki->search_obj . "\n"; my %results = eval { $wiki->search_nodes( "foo" ); }; is( $@, "", "->search_nodes doesn't die when we've not written anything" ); # Put some test data in. $wiki->write_node( "Home", "This is the home node." ) or die "Couldn't write node"; $wiki->write_node( "Another Node", "This isn't the home node." ) or die "Couldn't write node"; $wiki->write_node( "Everyone's Favourite Hobby", "Performing expert wombat defenestration." ) or die "Couldn't write node"; $wiki->write_node( "001 Defenestration", "Expert advice for all your defenestration needs!") or die "Couldn't write node"; %results = eval { local $SIG{__WARN__} = sub { die $_[0] }; $wiki->search_nodes('home'); }; is( $@, "", "search_nodes doesn't throw warning" ); isnt( scalar keys %results, 0, "...and can find a single word" ); is( scalar keys %results, 2, "...the right number of times" ); is_deeply( [sort keys %results], ["Another Node", "Home"], "...and the hash returned has node names as keys" ); %results = $wiki->search_nodes('expert defenestration'); isnt( scalar keys %results, 0, "...and can find two words on an AND search" ); my %and_results = $wiki->search_nodes('wombat home', 'AND'); if ( scalar keys %and_results ) { print "# " . join( "\n# ", map { "$_: " . $and_results{$_} } keys %and_results ) . "\n"; } is( scalar keys %and_results, 0, "...AND search doesn't find nodes with only one term." ); %results = $wiki->search_nodes('wombat home', 'OR'); isnt( scalar keys %results, 0, "...and the OR search seems to work" ); SKIP: { skip "Search backend doesn't support phrase searches", 2 unless $wiki->supports_phrase_searches; %results=$wiki->search_nodes('expert "wombat defenestration"'); isnt( scalar keys %results, 0, "...and can find a phrase" ); ok( ! defined $results{"001 Defenestration"}, "...and ignores nodes that only have part of the phrase" ); } # Test case-insensitivity. %results = $wiki->search_nodes('performing'); ok( defined $results{"Everyone's Favourite Hobby"}, "a lower-case search finds things defined in mixed case" ); %results = $wiki->search_nodes('WoMbAt'); ok( defined $results{"Everyone's Favourite Hobby"}, "a mixed-case search finds things defined in lower case" ); # Check that titles are searched. %results = $wiki->search_nodes('Another'); ok( defined $results{"Another Node"}, "titles are searched" ); ##### Test that newly-created nodes come up in searches, and that ##### once deleted they don't come up any more. %results = $wiki->search_nodes('Sunnydale'); unless ( scalar keys %results == 0 ) { die "'Sunnydale' already in indexes -- rerun init script"; } unless ( ! defined $results{"New Searching Node"} ) { die "'New Searching Node' already in indexes -- rerun init script"; } $wiki->write_node("New Searching Node", "Sunnydale") or die "Can't write 'New Searching Node'"; # will die if node already exists %results = $wiki->search_nodes('Sunnydale'); ok( defined $results{"New Searching Node"}, "new nodes are correctly indexed for searching" ); $wiki->delete_node("New Searching Node") or die "Can't delete 'New Searching Node'"; %results = $wiki->search_nodes('Sunnydale'); ok( ! defined $results{"New Searching Node"}, "...and removed from the indexes on deletion" ); # Make sure that overwritten content doesn't come up in searches. $wiki->write_node( "Overwritten Node", "aubergines" ) or die "Can't write 'Overwritten Node'"; my %node_data = $wiki->retrieve_node( "Overwritten Node" ); $wiki->write_node( "Overwritten Node", "bananas", $node_data{checksum} ) or die "Can't write 'Overwritten Node'"; %results = $wiki->search_nodes( "aubergines" ); ok( ! defined $results{ "Overwritten Node" }, "Overwritten content doesn't show up in searches." ); } } �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/t/022_list_unmoderated.t����������������������������������������������������������0000644�0001750�0001750�00000012635�12243757607�020261� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use strict; use Wiki::Toolkit::TestLib; use Test::More; use Time::Piece; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 40 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { # Put some test data in. $wiki->write_node( "Home", "This is the home node." ) or die "Couldn't write node"; # Now add another version my %node_data = $wiki->retrieve_node("Home"); ok( $wiki->write_node("Home", "xx", $node_data{checksum}), "write_node succeeds when node matches checksum" ); # Now add a new node requiring moderation $wiki->write_node( "Moderation", "This is the moderated node.", undef, undef, 1); my %mn_data = $wiki->retrieve_node("Moderation"); is( $mn_data{moderated}, '0', "First version shouldn't be moderated" ); is( $mn_data{node_requires_moderation}, '1', "New node needs moderation" ); # Update it ok( $wiki->write_node("Moderation", "yy", $mn_data{checksum}), "Can update where moderation is enabled" ); # And another needing moderation $wiki->write_node( "Moderation2", "This is another moderated node.", undef, undef, 1); # And two versions of a third $wiki->write_node( "Moderation3", "3 This is another moderated node.", undef, undef, 1); my %mn3_data = $wiki->retrieve_node("Moderation3"); ok( $wiki->write_node("Moderation3", "3yy3", $mn3_data{checksum}), "Can update where moderation is enabled" ); # Now look for all nodes needing moderation my @all_mod_nodes = $wiki->list_unmoderated_nodes(); my @new_mod_nodes = $wiki->list_unmoderated_nodes(only_where_latest=>1); # All should have nodes 2 (2 vers), 3 and 4 (2 vers) is( scalar @all_mod_nodes, 5, "Should find 5 needing moderation"); # New should have nodes 2, 3 and 4 is( scalar @new_mod_nodes, 3, "Should find 3 needing moderation"); # Check we did get the right data back my %m21 = (name=>'Moderation', node_id=>2, version=>1, moderated_version=>1); my %m22 = (name=>'Moderation', node_id=>2, version=>2, moderated_version=>1); my %m31 = (name=>'Moderation2', node_id=>3, version=>1, moderated_version=>1); my %m41 = (name=>'Moderation3', node_id=>4, version=>1, moderated_version=>1); my %m42 = (name=>'Moderation3', node_id=>4, version=>2, moderated_version=>1); is_deeply( $all_mod_nodes[0], \%m21, "Should have right data" ); is_deeply( $all_mod_nodes[1], \%m22, "Should have right data" ); is_deeply( $all_mod_nodes[2], \%m31, "Should have right data" ); is_deeply( $all_mod_nodes[3], \%m41, "Should have right data" ); is_deeply( $all_mod_nodes[4], \%m42, "Should have right data" ); is_deeply( $new_mod_nodes[0], \%m21, "Should have right data" ); is_deeply( $new_mod_nodes[1], \%m31, "Should have right data" ); is_deeply( $new_mod_nodes[2], \%m41, "Should have right data" ); # Mark the last (only) version Moderation2 as moderated ok( $wiki->moderate_node("Moderation2", 1), "Can't moderate 1st version" ); # Check counts now @all_mod_nodes = $wiki->list_unmoderated_nodes(); @new_mod_nodes = $wiki->list_unmoderated_nodes(only_where_latest=>1); is( scalar @all_mod_nodes, 4, "Should find 4 needing moderation"); is( scalar @new_mod_nodes, 2, "Should find 2 needing moderation"); # Check data now is_deeply( $all_mod_nodes[0], \%m21, "Should have right data" ); is_deeply( $all_mod_nodes[1], \%m22, "Should have right data" ); is_deeply( $all_mod_nodes[2], \%m41, "Should have right data" ); is_deeply( $all_mod_nodes[3], \%m42, "Should have right data" ); is_deeply( $new_mod_nodes[0], \%m21, "Should have right data" ); is_deeply( $new_mod_nodes[1], \%m41, "Should have right data" ); # Mark the last version Moderation3 as moderated ok( $wiki->moderate_node("Moderation3", 2), "Can't moderate 2nd version" ); # Check counts now @all_mod_nodes = $wiki->list_unmoderated_nodes(); @new_mod_nodes = $wiki->list_unmoderated_nodes(only_where_latest=>1); is( scalar @all_mod_nodes, 3, "Should find 3 needing moderation"); is( scalar @new_mod_nodes, 1, "Should find 1 needing moderation"); # Check data now $m41{'moderated_version'} = 2; # Moderated version now shows 2 is_deeply( $all_mod_nodes[0], \%m21, "Should have right data" ); is_deeply( $all_mod_nodes[1], \%m22, "Should have right data" ); is_deeply( $all_mod_nodes[2], \%m41, "Should have right data" ); is_deeply( $new_mod_nodes[0], \%m21, "Should have right data" ); # Check that we can make ->list_recent_changes show us only things # that have been moderated. my @rc_mod_nodes = $wiki->list_recent_changes( days => 7, moderation => 1); # Sort them by name, since otherwise we get spurious test failures # if the initial node-writing takes more than a second. @rc_mod_nodes = sort { $a->{name} cmp $b->{name} } @rc_mod_nodes; is( scalar(@rc_mod_nodes), 4, "Count of recent changes nodes"); is( $rc_mod_nodes[0]{name}, 'Home', "RC node 0 name" ); is( $rc_mod_nodes[0]{version}, 2, "RC node 0 version" ); is( $rc_mod_nodes[1]{name}, 'Moderation', "RC node 1 name" ); is( $rc_mod_nodes[1]{version}, 1, "RC node 1 version" ); is( $rc_mod_nodes[2]{name}, 'Moderation2', "RC node 2 name" ); is( $rc_mod_nodes[2]{version}, 1, "RC node 2 version" ); is( $rc_mod_nodes[3]{name}, 'Moderation3', "RC node 3 name" ); is( $rc_mod_nodes[3]{version}, 2, "RC node 3 version" ); } ���������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/t/040_rename.t��������������������������������������������������������������������0000644�0001750�0001750�00000016373�12243757607�016171� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use strict; use Wiki::Toolkit::TestLib; use Test::More; use Time::Piece; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 35 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { my %non_existent_node = ( content => "", version => 0, last_modified => "", checksum => "d41d8cd98f00b204e9800998ecf8427e", moderated => undef, node_requires_moderation => undef, metadata => {} ); # Ensure our formatter supports renaming ok( $wiki->{_formatter}->can("rename_links"), "The formatter must be able to rename links for these tests to work" ); # Add three pages, which all link to each other, where there # are multiple versions of two of the three $wiki->write_node( "NodeOne", "This is the first node, which links to NodeTwo, NodeThree, [NodeTwo] and [NodeThree | Node Three]." ) or die "Couldn't write node"; my %nodeone1 = $wiki->retrieve_node("NodeOne"); $wiki->write_node( "NodeOne", "This is the second version of the first node, which links to NodeTwo, NodeThree, [NodeTwo], [NodeFour|Node Four] and [NodeThree | Node Three].", $nodeone1{checksum} ) or die "Couldn't write node"; my %nodeone2 = $wiki->retrieve_node("NodeOne"); $wiki->write_node( "NodeTwo", "This is the second node, which links to just NodeOne [NodeOne | twice].") or die "Couldn't write node"; my %nodetwo1 = $wiki->retrieve_node("NodeTwo"); $wiki->write_node( "NodeTwo", "This is the second version of the second node, which links to [NodeTwo|itself] and NodeOne", $nodetwo1{checksum}) or die "Couldn't write node"; my %nodetwo2 = $wiki->retrieve_node("NodeTwo"); $wiki->write_node( "NodeThree", "This is the third node, which links to all 3 via NodeOne, NodeTwo and [NodeThree]") or die "Couldn't write node"; my %nodethree1 = $wiki->retrieve_node("NodeThree"); # Rename NodeOne to NodeFoo, without new versions # (Don't pass in the key names) ok( $wiki->rename_node("NodeOne", "NodeFoo"), "Rename node"); # Should be able to find it as NodeFoo, but not NodeOne my %asnode1 = $wiki->retrieve_node("NodeOne"); my %asnodef = $wiki->retrieve_node("NodeFoo"); $nodeone2{checksum} = $asnodef{checksum}; is_deeply( \%asnode1, \%non_existent_node, "Renamed to NodeFoo" ); is_deeply( \%asnodef, \%nodeone2, "Renamed to NodeFoo" ); is( "This is the second version of the first node, which links to NodeTwo, NodeThree, [NodeTwo], [NodeFour|Node Four] and [NodeThree | Node Three].", $asnodef{"content"}, "no change needed to node" ); # Check that the other pages were updated as required # NodeTwo linked implicitly my %anode2 = $wiki->retrieve_node("NodeTwo"); is( "This is the second version of the second node, which links to [NodeTwo|itself] and NodeFoo", $anode2{'content'}, "implicit link was updated" ); is( 2, $anode2{'version'}, "no new version" ); # NodeThree linked implicitly my %anode3 = $wiki->retrieve_node("NodeThree"); is( "This is the third node, which links to all 3 via NodeFoo, NodeTwo and [NodeThree]", $anode3{'content'}, "implicit link was updated" ); is( 1, $anode3{'version'}, "no new version" ); # Rename it back to NodeOne # (Pass in the key names) ok( $wiki->rename_node(new_name=>"NodeOne", old_name=>"NodeFoo"), "Rename node"); # Should be able to find it as NodeOne again, but not NodeFoo %asnode1 = $wiki->retrieve_node("NodeOne"); %asnodef = $wiki->retrieve_node("NodeFoo"); $nodeone2{checksum} = $asnode1{checksum}; is_deeply( \%asnodef, \%non_existent_node, "Renamed to NodeOne" ); is_deeply( \%asnode1, \%nodeone2, "Renamed to NodeFoo" ); is( "This is the second version of the first node, which links to NodeTwo, NodeThree, [NodeTwo], [NodeFour|Node Four] and [NodeThree | Node Three].", $asnode1{"content"}, "no change needed to node" ); # Now check two and three changed back %anode2 = $wiki->retrieve_node("NodeTwo"); is( "This is the second version of the second node, which links to [NodeTwo|itself] and NodeOne", $anode2{'content'}, "implicit link was updated" ); is( 2, $anode2{'version'}, "no new version" ); %anode3 = $wiki->retrieve_node("NodeThree"); is( "This is the third node, which links to all 3 via NodeOne, NodeTwo and [NodeThree]", $anode3{'content'}, "implicit link was updated" ); is( 1, $anode3{'version'}, "no new version" ); # Tweak the formatter - swap to extended links from implicit $wiki->{_formatter} = Wiki::Toolkit::Formatter::Default->new( extended_links=>1, implicit_links=>0 ); ok( $wiki->{_formatter}->can("rename_links"), "The formatter must be able to rename links for these tests to work" ); # Rename NodeTwo to NodeFooBar ok( $wiki->rename_node(old_name=>"NodeTwo", new_name=>"NodeFooBar"), "Rename node"); # Check NodeTwo is now as expected my %asnode2 = $wiki->retrieve_node("NodeTwo"); %asnodef = $wiki->retrieve_node("NodeFooBar"); $nodetwo2{checksum} = $asnodef{checksum}; $nodetwo2{content} = "This is the second version of the second node, which links to [NodeFooBar|itself] and NodeOne"; $nodetwo2{last_modified} = $asnodef{last_modified}; is_deeply( \%asnode2, \%non_existent_node, "Renamed to NodeFooBar" ); is_deeply( \%asnodef, \%nodetwo2, "Renamed to NodeFooBar" ); is( $asnodef{"content"}, $nodetwo2{content}, "node was changed" ); # Check the other two nodes my %anode1 = $wiki->retrieve_node("NodeOne"); is( "This is the second version of the first node, which links to NodeTwo, NodeThree, [NodeFooBar], [NodeFour|Node Four] and [NodeThree | Node Three].", $anode1{'content'}, "explicit link was updated, implicit not" ); is( 2, $anode1{'version'}, "no new version" ); %anode3 = $wiki->retrieve_node("NodeThree"); is( "This is the third node, which links to all 3 via NodeOne, NodeTwo and [NodeThree]", $anode3{'content'}, "no explicit to update, implicit link not" ); is( 1, $anode3{'version'}, "no new version" ); # Now rename back, but with the new version stuff # (Nodes 1 and 2 should get new versions, but not node 3) ok( $wiki->rename_node(new_name=>"NodeTwo", old_name=>"NodeFooBar", create_new_versions=>1), "Rename node"); %asnode2 = $wiki->retrieve_node("NodeTwo"); %asnodef = $wiki->retrieve_node("NodeFooBar"); $nodetwo2{checksum} = $asnode2{checksum}; $nodetwo2{content} = "This is the second version of the second node, which links to [NodeTwo|itself] and NodeOne"; $nodetwo2{version} = 3; $nodetwo2{last_modified} = $asnode2{last_modified}; is_deeply( \%asnodef, \%non_existent_node, "Renamed back to NodeTwo" ); is_deeply( \%asnode2, \%nodetwo2, "Renamed back to NodeTwo" ); is( $asnode2{"content"}, $nodetwo2{content}, "node was changed" ); is( $asnode2{"version"}, 3, "new node version" ); # Check the other two nodes %anode1 = $wiki->retrieve_node("NodeOne"); is( "This is the second version of the first node, which links to NodeTwo, NodeThree, [NodeTwo], [NodeFour|Node Four] and [NodeThree | Node Three].", $anode1{'content'}, "explicit link was updated, implicit not" ); is( 3, $anode1{'version'}, "new version" ); %anode3 = $wiki->retrieve_node("NodeThree"); is( "This is the third node, which links to all 3 via NodeOne, NodeTwo and [NodeThree]", $anode3{'content'}, "no explicit to update, implicit link not" ); is( 1, $anode3{'version'}, "no new version" ); # Now with implicit and explicit } ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/t/017_delete_version.t������������������������������������������������������������0000644�0001750�0001750�00000022531�12243757607�017726� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use strict; use Wiki::Toolkit::TestLib; use Test::More; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 39 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { print "# Store: " . (ref $wiki->store) . "\n"; # Test deletion of the first version of a node. $wiki->write_node( "A Node", "Node content.", undef, { one => 1 } ) or die "Can't write node"; my %data = $wiki->retrieve_node( "A Node" ); $wiki->write_node( "A Node", "foo", $data{checksum}, { one => 2 } ) or die "Can't write node"; %data = $wiki->retrieve_node( "A Node" ); $wiki->write_node( "A Node", "bar", $data{checksum}, { one => 3 } ) or die "Can't write node"; eval { $wiki->delete_node( name => "A Node", version => 1 ); }; is( $@, "", "delete_node doesn't die when deleting the first version" ); ok( $wiki->node_exists( "A Node" ), "...and the node still exists" ); is( $wiki->retrieve_node( "A Node" ), "bar", "...latest version returned by retrieve_node" ); SKIP: { skip "No search configured for this combination", 1 unless $wiki->search_obj; my %results = $wiki->search_nodes("bar"); is_deeply( [ keys %results ], [ "A Node" ], "...and returned in search too." ); } my @nodes; my %nodehash; @nodes = $wiki->list_recent_changes( days => 7, metadata_was => { one => 1 } ); is_deeply( \@nodes, [], "...deleted version doesn't show up in metadata_was search" ); @nodes = $wiki->list_recent_changes( days => 7, metadata_wasnt => { one => 1 } ); %nodehash = map { $_->{name} => 1 } @nodes; ok($nodehash{"A Node"}, "...node does show up in metadata_wasnt search" ); # Test deletion of the latest version of a node. $wiki->write_node( "Two Node", "Node content.", undef, { two => 1 } ) or die "Can't write node"; %data = $wiki->retrieve_node( "Two Node" ); $wiki->write_node( "Two Node", "baz HmPg", $data{checksum}, { two => 2 } ) or die "Can't write node"; %data = $wiki->retrieve_node( "Two Node" ); $wiki->write_node( "Two Node", "quux RcCh", $data{checksum}, { two => 3 } ) or die "Can't write node"; eval { $wiki->delete_node( name => "Two Node", version => 3 ); }; is( $@, "", "delete_node doesn't die when deleting the latest version" ); ok( $wiki->node_exists( "Two Node" ), "...and the node still exists" ); is( $wiki->retrieve_node( "Two Node" ), "baz HmPg", "...latest but one version returned by retrieve_node" ); SKIP: { skip "No search configured for this combination", 2 unless $wiki->search_obj; my %results = $wiki->search_nodes("baz"); is_deeply( [ keys %results ], [ "Two Node" ], "...and returned in search too." ); %results = $wiki->search_nodes("quux"); is_deeply( \%results, {}, "...and deleted version removed from search indexes" ); } @nodes = $wiki->list_backlinks( node => "RcCh" ); is( scalar @nodes, 0, "...backlinks in deleted version ignored" ); @nodes = $wiki->list_backlinks( node => "HmPg" ); is_deeply( \@nodes, [ "Two Node" ], "...backlinks in previous version show up" ); @nodes = $wiki->list_recent_changes( days => 7, metadata_was => { two => 3 } ); is_deeply( \@nodes, [], "...deleted version doesn't show up in metadata_was search" ); @nodes = $wiki->list_recent_changes( days => 7, metadata_wasnt => { two => 3 } ); %nodehash = map { $_->{name} => 1 } @nodes; ok($nodehash{"Two Node"}, "...node does show up in metadata_wasnt search" ); @nodes = $wiki->list_recent_changes( days => 7, metadata_isnt => { two => 3 } ); %nodehash = map { $_->{name} => 1 } @nodes; ok($nodehash{"Two Node"}, "...node does show up in metadata_isnt search" ); @nodes = $wiki->list_recent_changes( days => 7, metadata_is => { two => 2 } ); %nodehash = map { $_->{name} => 1 } @nodes; ok($nodehash{"Two Node"}, "...previous version does show up in metadata_is search" ); @nodes = $wiki->list_recent_changes( days => 7, metadata_is => { two => 3 } ); is_deeply( \@nodes, [], "...deleted version doesn't show up in metadata_is search" ); # Test deletion of an intermediate version of a node. $wiki->write_node( "Three Node", "plate", undef, { three => 1 } ) or die "Can't write node"; %data = $wiki->retrieve_node( "Three Node" ); $wiki->write_node( "Three Node", "cup", $data{checksum}, { three => 2 } ) or die "Can't write node."; %data = $wiki->retrieve_node( "Three Node" ); $wiki->write_node("Three Node", "saucer", $data{checksum}, { three => 3 } ) or die "Can't write node"; print "# Deleting version 2\n"; eval { $wiki->delete_node( name => "Three Node", version => 2 ); }; is( $@, "", "delete_node doesn't die when deleting intermediate version" ); ok( $wiki->node_exists( "Three Node" ), "...and the node still exists" ); is( $wiki->retrieve_node( "Three Node" ), "saucer", "...latest version returned by retrieve_node" ); SKIP: { skip "No search configured for this combination", 2 unless $wiki->search_obj; my %results = $wiki->search_nodes("saucer"); is_deeply( [ keys %results ], [ "Three Node" ], "...and returned in search too." ); %results = $wiki->search_nodes("cup"); is_deeply( \%results, {}, "...and deleted version removed from search indexes" ); } @nodes = $wiki->list_recent_changes( days => 7, metadata_was => { three => 2 } ); is_deeply( \@nodes, [], "...doesn't show up in metadata_was search" ); @nodes = $wiki->list_recent_changes( days => 7, metadata_wasnt => { three => 2 } ); %nodehash = map { $_->{name} => 1 } @nodes; ok($nodehash{"Three Node"}, "...does show up in metadata_wasnt search" ); print "# Deleting version 3\n"; eval { $wiki->delete_node( name => "Three Node", version => 3 ); }; is( $@, "", "delete_node doesn't die when we now try to delete the latest version" ); %data = $wiki->retrieve_node( "Three Node" ); is( $data{version}, 1, "...and the current version is 1" ); is( $data{content}, "plate", "...and has correct content" ); ok( $data{last_modified}, "...and has non-blank timestamp" ); # Test deletion of the only version of a node. $wiki->write_node( "Four Node", "television", undef, { four => 1 } ) or die "Can't write node"; eval { $wiki->delete_node( name => "Four Node", version => 1 ); }; is( $@, "", "delete_node doesn't die when deleting the only version of a node" ); is( $wiki->retrieve_node("Four Node"), "", "...and retrieving that deleted node returns the empty string" ); ok( ! $wiki->node_exists("Four Node"), "...and ->node_exists now returns false" ); SKIP: { skip "No search configured for this combination", 1 unless $wiki->search_obj; my %results = $wiki->search_nodes("television"); is_deeply( \%results, { }, "...and a search does not find the node" ); } @nodes = $wiki->list_recent_changes( days => 7, metadata_was => { four => 1 } ); is_deeply( \@nodes, [], "...doesn't show up in metadata_was search" ); @nodes = $wiki->list_recent_changes( days => 7, metadata_is => { four => 1 } ); is_deeply( \@nodes, [], "...doesn't show up in metadata_is search" ); # Test deletion of a nonexistent node. eval { $wiki->delete_node( name => "idonotexist", version => 2 ); }; is( $@, "", "delete_node doesn't die when deleting a nonexistent node" ); # Test deletion of a nonexistent version. There will be a warning, so # capture it and print it as a diagnostic. $wiki->write_node( "Five Node", "elephant", undef, { five => 1 } ) or die "Can't write node"; my @warnings; eval { local $SIG{__WARN__} = sub { push @warnings, $_[0]; }; $wiki->delete_node( name => "Five Node", version => 2 ); }; is( $@, "", "delete_node doesn't die when deleting a nonexistent version" ); print "# ...but it does warn: " . join( " ", @warnings ) if scalar @warnings; ok( $wiki->node_exists("Five Node"), "...and ->node_exists still returns true" ); is( $wiki->retrieve_node("Five Node"), "elephant", "...and retrieving the node returns the correct thing" ); } �����������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/t/060_recent_changes_new_only.t���������������������������������������������������0000644�0001750�0001750�00000015170�12243757607�021600� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use strict; use Wiki::Toolkit::Store::Database; use Wiki::Toolkit::TestLib; use DBI; use Test::More; use Time::Piece; use Time::Seconds; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 10 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } # These tests are for the "new_only" parameter to ->list_recent_changes. # We set things up so that we have the following nodes: # - Ae Pub, in the Pubs category, added 8 days ago, edited 2 days ago and today # - Ae Bar, in the Bars category, added 5 days ago, edited today # - Ae Restaurant, in the Restaurants category, added today # - Ae Nother Pub, in the Pubs category, added today # Also, make it so the categories of these things were only added today. my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { # Store the time now, so we have a timestamp which precedes "today"'s # additions and edits. my $start_time = time; my $slept = sleep(2); warn "Slept for less than a second; test results may be unreliable" unless $slept >= 1; # Set up the data and run the tests. setup_nodes( wiki => $wiki ); my @nodes = $wiki->list_recent_changes( days => 4, ); my @names = sort map { $_->{name} } @nodes; is_deeply( \@names, [ "Ae Bar", "Ae Nother Pub", "Ae Pub", "Ae Restaurant" ], "all nodes returned when new_only omitted" ); @nodes = $wiki->list_recent_changes( days => 4, new_only => 0, ); @names = sort map { $_->{name} } @nodes; is_deeply( \@names, [ "Ae Bar", "Ae Nother Pub", "Ae Pub", "Ae Restaurant" ], "...and when it's set to false" ); @nodes = $wiki->list_recent_changes( days => 4, new_only => 1, ); @names = sort map { $_->{name} } @nodes; is_deeply( \@names, [ "Ae Nother Pub", "Ae Restaurant" ], "nodes edited but not added in last n days are omitted with new_only" ); @nodes = $wiki->list_recent_changes( between_days => [ 1, 6 ], include_all_changes => 1, # to make between_days work - bug? new_only => 1, ); @names = sort map { $_->{name} } @nodes; is_deeply( \@names, [ "Ae Bar" ], "...and this works for between_days too" ); @nodes = $wiki->list_recent_changes( since => $start_time, new_only => 1, ); @names = sort map { $_->{name} } @nodes; is_deeply( \@names, [ "Ae Nother Pub", "Ae Restaurant" ], "...and for since" ); @nodes = $wiki->list_recent_changes( last_n_changes => 3, new_only => 1, ); @names = sort map { $_->{name} } @nodes; is_deeply( \@names, [ "Ae Bar", "Ae Nother Pub", "Ae Restaurant" ], "...and for last n" ); @nodes = $wiki->list_recent_changes( days => 2, new_only => 1, metadata_is => { category => "Pubs" }, ); @names = sort map { $_->{name} } @nodes; is_deeply( \@names, [ "Ae Nother Pub" ], "combination of days and metadata_is omits things edited but " . "not added in recent days" ); # Ae Bar wasn't in the Bars category when it was added, but it is now. @nodes = $wiki->list_recent_changes( days => 6, new_only => 1, metadata_is => { category => "Bars" }, ); @names = sort map { $_->{name} } @nodes; is_deeply( \@names, [ "Ae Bar" ], "...and includes the things it should include" ); @nodes = $wiki->list_recent_changes( days => 6, new_only => 1, metadata_wasnt => { category => "Bars" }, ); @names = sort map { $_->{name} } @nodes; is_deeply( \@names, [ "Ae Bar", "Ae Nother Pub", "Ae Restaurant" ], "...and metadata_wasnt works too" ); # Ae Nother Pub was the only pub added to the Pubs category on creation. @nodes = $wiki->list_recent_changes( days => 10, new_only => 1, metadata_was => { category => "Pubs" }, ); @names = sort map { $_->{name} } @nodes; is_deeply( \@names, [ "Ae Nother Pub" ], "combination of new_only and metadata_was omits things which " . "didn't have the relevant data when they were created" ); } sub get_timestamp { my %args = @_; my $days = $args{days}; my $now = localtime; # overloaded by Time::Piece my $time = $now - ( $days * ONE_DAY ); return Wiki::Toolkit::Store::Database->_get_timestamp( $time ); } sub setup_nodes { my %args = @_; my $wiki = $args{wiki}; # For "Ae Pub", write directly to the database so we can fake having # written something in the past (white box testing). It might be a good # idea at some point to factor this out into Wiki::Toolkit::TestLib, as # it's used in other tests too. my $dbh = $wiki->store->dbh; my $content_sth = $dbh->prepare("INSERT INTO content (node_id,version,text,modified) VALUES (?,?,?,?)"); my $node_sth = $dbh->prepare("INSERT INTO node (id,name,version,text,modified) VALUES (?,?,?,?,?)"); $node_sth->execute( 10, "Ae Pub", 2, "foo", get_timestamp( days => 2 ) ) or die $dbh->errstr; $content_sth->execute( 10, 2, "foo", get_timestamp( days => 2 ) ) or die $dbh->errstr; $content_sth->execute( 10, 1, "foo", get_timestamp( days => 8 ) ) or die $dbh->errstr; # Now write it as per usual, to get the categories in. my %data = $wiki->retrieve_node( "Ae Pub" ); $wiki->write_node( "Ae Pub", $data{content}, $data{checksum}, { category => [ "Pubs" ] } ) or die "Couldn't write Ae Pub node"; # Now do Ae Bar the same way. $node_sth->execute( 20, "Ae Bar", 1, "foo", get_timestamp( days => 5 ) ) or die $dbh->errstr; $content_sth->execute( 20, 1, "foo", get_timestamp( days => 5 ) ) or die $dbh->errstr; %data = $wiki->retrieve_node( "Ae Bar" ); $wiki->write_node( "Ae Bar", $data{content}, $data{checksum}, { category => [ "Bars" ] } ) or die "Couldn't write Ae Bar node"; # The other nodes are simple. $wiki->write_node( "Ae Restaurant", "lalalalala", undef, { category => [ "Restaurants" ] } ) or die "Couldn't write Ae Restaurant node"; $wiki->write_node( "Ae Nother Pub", "beer", undef, { category => [ "Pubs" ] } ) or die "Couldn't write Ae Nother Pub node"; } ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/t/297_feed_rss_recentchanges.t����������������������������������������������������0000644�0001750�0001750�00000017042�12243757607�021415� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use strict; use Test::More; use URI::Escape; use Wiki::Toolkit::TestLib; my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; plan tests => ( 3 + $iterator->number * 22 ); use_ok( "Wiki::Toolkit::Feed::RSS" ); eval { my $rss = Wiki::Toolkit::Feed::RSS->new; }; ok( $@, "new croaks if no wiki object supplied" ); eval { my $rss = Wiki::Toolkit::Feed::RSS->new( wiki => "foo" ); }; ok( $@, "new croaks if something that isn't a wiki object supplied" ); while ( my $wiki = $iterator->new_wiki ) { # First, write some data. # Write two versions of one node # The recent changes should only show it once though $wiki->write_node( "Old Node", "First version of Old Node" ); my %old_node = $wiki->retrieve_node("Old Node"); $wiki->write_node( "Old Node", "We will write at least 15 nodes after this one", $old_node{'checksum'} ); my $slept = sleep(2); warn "Slept for less than a second, 'days=n' test may pass even if buggy" unless $slept >= 1; for my $i ( 1 .. 15 ) { $wiki->write_node( "Temp Node $i", "foo" ); } $slept = sleep(2); warn "Slept for less than a second, test results may not be trustworthy" unless $slept >= 1; $wiki->write_node( "Test Node 1", "Just a plain test", undef, { username => "Kake", comment => "new node", category => [ 'TestCategory1', 'Meta' ] } ); $slept = sleep(2); warn "Slept for less than a second, 'items=n' test may fail" unless $slept >= 1; $wiki->write_node( "Calthorpe Arms", "CAMRA-approved pub near King's Cross", undef, { comment => "Stub page, please update!", username => "Kake", postcode => "WC1X 8JR", locale => [ "Bloomsbury" ] } ); $wiki->write_node( "Test Node 2", "Gosh, another test!", undef, { username => "nou", comment => "This is a minor edit.", major_change => 0, } ); # Now set up a feed object and test it. my %default_config = ( wiki => $wiki, site_name => "Wiki::Toolkit Test Site", make_node_url => sub { my $id = uri_escape($_[0]); my $version = $_[1] || ''; $version = uri_escape($version) if $version; "http://example.com/?id=$id;version=$version"; }, recent_changes_link => "http://example.com/recentchanges" ); my $rss = eval { Wiki::Toolkit::Feed::RSS->new( %default_config, site_url => "http://example.com/kakeswiki/", ); }; is( $@, "", "'new' doesn't croak if wiki object and mandatory parameters supplied" ); isa_ok( $rss, "Wiki::Toolkit::Feed::RSS" ); my $feed = eval { $rss->recent_changes; }; is( $@, "", "->recent_changes doesn't croak" ); # Check the things that are generated by the mandatory arguments. like( $feed, qr|<item rdf:about="http://example.com/\?id=Test%20Node%201;version=1">|, "make_node_url is used" ); like( $feed, qr|<modwiki:version>1</modwiki:version>|, "version numbers included in feed" ); like( $feed, qr|<modwiki:status>new</modwiki:status>|, "page status included in feed" ); like( $feed, qr|<modwiki:importance>major</modwiki:importance>|, "change importance included and defaults to 'major'" ); my $charset = $wiki->store->{_charset}; like( $feed, qr|<?xml version="1.0"|, "is xml" ); like( $feed, qr|<?xml version="1.0" encoding="$charset"|, "is xml" ); # Check stuff that comes from the metadata. like( $feed, qr|<dc:contributor>Kake</dc:contributor>|, "username picked up as contributor" ); like( $feed, qr|<description>.*\[nou]</description>|, "username included in description" ); like( $feed, qr|<dc:subject>TestCategory1</dc:subject>|, "dublin core subject contains category" ); # Check that interwiki things are passed through right. $rss = Wiki::Toolkit::Feed::RSS->new( %default_config, interwiki_identifier => "KakesWiki", site_url => "http://example.com/kakeswiki/", ); $feed = $rss->recent_changes; like( $feed, qr|<modwiki:interwiki>KakesWiki</modwiki:interwiki>|, "interwiki identifier passed through OK" ); # Check that diff url comes through. $rss = Wiki::Toolkit::Feed::RSS->new( %default_config, make_diff_url => sub { my $node_name = shift; return "http://example.com/?action=show_diff;id=" . uri_escape($node_name) }, site_url => "http://example.com/kakeswiki/", ); $feed = $rss->recent_changes; like( $feed, qr|<modwiki:diff>http://example.com/\?action=show_diff;id=Calthorpe%20Arms</modwiki:diff>|, "make_diff_url used" ); # Check that history url comes through. # Will use a different character set $rss = Wiki::Toolkit::Feed::RSS->new( %default_config, make_history_url => sub { my $node_name = shift; return "http://example.com/?action=history;id=" . uri_escape($node_name) }, site_url => "http://example.com/kakeswiki/", encoding => "UTF-16" ); $feed = $rss->recent_changes; like( $feed, qr|<modwiki:history>http://example.com/\?action=history;id=Calthorpe%20Arms</modwiki:history>|, "make_history_url used" ); # Test the 'items' parameter. $feed = $rss->recent_changes( items => 2 ); unlike( $feed, qr|<title>Test Node 1|, "items param works" ); # Test the 'days' parameter. $feed = $rss->recent_changes( days => 2 ); like( $feed, qr|Old Node|, "days param works" ); # Test ignoring minor changes. $feed = $rss->recent_changes( ignore_minor_edits => 1 ); unlike( $feed, qr|This is a minor change.|, "ignore_minor_edits works" ); # Test personalised feeds. $feed = $rss->recent_changes( filter_on_metadata => { username => "Kake", }, ); unlike( $feed, qr|nou|, "can filter on a single metadata criterion" ); $feed = $rss->recent_changes( filter_on_metadata => { username => "Kake", locale => "Bloomsbury", }, ); unlike( $feed, qr|Test Node 1|, "can filter on two criteria" ); # Test the xml headers again, now we have given a character set like( $feed, qr| (3 + 16 * $Wiki::Toolkit::TestConfig::Utilities::num_stores); use_ok( "Wiki::Toolkit::Feed::Atom" ); eval { my $atom = Wiki::Toolkit::Feed::Atom->new; }; ok( $@, "new croaks if no wiki object supplied" ); eval { my $atom = Wiki::Toolkit::Feed::Atom->new( wiki => "foo" ); }; ok( $@, "new croaks if something that isn't a wiki object supplied" ); my %stores = Wiki::Toolkit::TestConfig::Utilities->stores; my ($store_name, $store); while ( ($store_name, $store) = each %stores ) { SKIP: { skip "$store_name storage backend not configured for testing", 16 unless $store; print "#\n##### TEST CONFIG: Store: $store_name\n#\n"; my $wiki = Wiki::Toolkit->new( store => $store ); my %default_config = ( wiki => $wiki, site_name => "Wiki::Toolkit Test Site", make_node_url => sub { my $id = uri_escape($_[0]); my $version = $_[1] || ''; $version = uri_escape($version) if $version; "http://example.com/?id=$id;version=$version"; }, recent_changes_link => "http://example.com/?RecentChanges", atom_link => "http://example.com/?action=rc;format=atom", ); my $atom = eval { Wiki::Toolkit::Feed::Atom->new( %default_config, site_url => "http://example.com/kakeswiki/" ); }; is( $@, "", "'new' doesn't croak if wiki object and mandatory parameters supplied" ); isa_ok( $atom, "Wiki::Toolkit::Feed::Atom" ); my $feed = eval { $atom->recent_changes; }; is( $@, "", "->recent_changes doesn't croak" ); # Check the things that are generated by the mandatory arguments. like( $feed, qr||, "make_node_url is used" ); # Check stuff that comes from the metadata. like( $feed, qr|Kake|, "username picked up as author" ); like( $feed, qr|.*\[nou]|, "username included in summary" ); # Check we also have some categories like( $feed, qr||, "contains categories" ); my $charset = $wiki->store->{_charset}; like( $feed, qr|recent_changes( items => 2 ); unlike( $feed, qr|Test Node 1|, "items param works" ); # Test the 'days' parameter. $feed = $atom->recent_changes( days => 2 ); like( $feed, qr|Old Node|, "days param works" ); # Test ignoring minor changes. $feed = $atom->recent_changes( ignore_minor_edits => 1 ); unlike( $feed, qr|This is a minor change.|, "ignore_minor_edits works" ); # Get a new Atom feed maker, with a different encoding $atom = eval { Wiki::Toolkit::Feed::Atom->new( %default_config, site_url => "http://example.com/kakeswiki/", encoding => "UTF-16" ); }; # Test personalised feeds. $feed = $atom->recent_changes( filter_on_metadata => { username => "Kake", }, ); unlike( $feed, qr|nou|, "can filter on a single metadata criterion" ); $feed = $atom->recent_changes( filter_on_metadata => { username => "Kake", locale => "Bloomsbury", }, ); unlike( $feed, qr|Test Node 1|, "can filter on two criteria" ); # Test the xml headers again, now we have given a character set like( $feed, qr| "no backends configured"; } else { plan tests => ( 7 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { eval { $wiki->write_node( "Test 1", undef, undef ); }; ok( $@, "->write_node dies if undef content and metadata supplied" ); eval { $wiki->write_node( "Test 2", "", undef ); }; is( $@, "", "...but not if blank content and undef metadata supplied"); eval { $wiki->write_node( "Test 3", "foo", undef ); }; is( $@, "", "...and not if just content defined" ); eval { $wiki->write_node( "Test 4", "", undef, { category => "Foo" }); }; is( $@, "", "...and not if just metadata defined" ); # Test deleting nodes with blank data. eval { $wiki->delete_node( "Test 2"); }; is( $@, "", "->delete_node doesn't die when called on node with blank content and undef metadata" ); eval { $wiki->delete_node( "Test 3"); }; is( $@, "", "...nor on node with only content defined" ); eval { $wiki->delete_node( "Test 4"); }; is( $@, "", "...nor on node with only metadata defined" ); } Wiki-Toolkit-0.83/t/026_schema_current.t0000644000175000017500000000227712243757607017726 0ustar vagrantvagrantuse strict; use Wiki::Toolkit::TestLib; use Test::More; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 3 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { my $store = $wiki->store; my ($cur_ver, $db_ver); ($cur_ver, $db_ver) = $store->schema_current; cmp_ok( $cur_ver, '==', $db_ver, "schema_current returns matching versions when schema is current" ); # Now we munge the database to simulate an old schema my $dbh = $store->dbh; my $sth = $dbh->prepare("UPDATE schema_info SET version = 1"); $sth->execute; ($cur_ver, $db_ver) = $store->schema_current; cmp_ok ($cur_ver, '>', $db_ver, "schema_current returns \$cur_ver > \$db_ver when schema is older" ); # Now we get rid of the schema table to simulate a really old DB $sth = $dbh->prepare("DROP TABLE schema_info"); $sth->execute; ($cur_ver, $db_ver) = $store->schema_current; cmp_ok ($cur_ver, '>', $db_ver, "schema_current returns \$cur_ver > \$db_ver when schema is missing" ); } Wiki-Toolkit-0.83/t/005_delete.t0000644000175000017500000000225312243757607016155 0ustar vagrantvagrantuse strict; use Wiki::Toolkit::TestLib; use Test::More; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 5 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { $wiki->write_node("A Node", "Node content.") or die "Can't write node"; # Test deletion of an existing node. eval { $wiki->delete_node("A Node") }; is( $@, "", "delete_node doesn't die when deleting an existing node" ); is( $wiki->retrieve_node("A Node"), "", "...and retrieving a deleted node returns the empty string" ); ok( ! $wiki->node_exists("A Node"), "...and ->node_exists now returns false" ); SKIP: { skip "No search configured for this combination", 1 unless $wiki->search_obj; my %results = $wiki->search_nodes("content"); is_deeply( \%results, { }, "...and a search does not find the node" ); } # Test deletion of a nonexistent node. eval { $wiki->delete_node("idonotexist") }; is( $@, "", "delete_node doesn't die when deleting a non-existent node" ); } Wiki-Toolkit-0.83/t/052_sqlite_store.t0000644000175000017500000000660312243757607017435 0ustar vagrantvagrantuse strict; use Wiki::Toolkit; use Wiki::Toolkit::Setup::SQLite; use Wiki::Toolkit::Store::SQLite; use Wiki::Toolkit::TestConfig; use Test::More tests => 9; my $class = "Wiki::Toolkit::Store::SQLite"; eval { $class->new; }; ok( $@, "Failed creation dies" ); my $dbname = $Wiki::Toolkit::TestConfig::config{SQLite}{dbname}; SKIP: { skip "No SQLite database configured for testing", 8 unless $dbname; Wiki::Toolkit::Setup::SQLite::cleardb( $dbname ); Wiki::Toolkit::Setup::SQLite::setup( $dbname ); my $store = eval { $class->new( dbname => $dbname ) }; is( $@, "", "Creation doesn't die when given connection parameters" ); isa_ok( $store, $class ); ok( $store->dbh, "...and has set up a database handle" ); my $dbh = DBI->connect( "dbi:SQLite:dbname=$dbname" ); my $store2 = eval { $class->new( dbh => $dbh ) }; is( $@, "", "Creation doesn't die when given dbh" ); isa_ok( $store2, $class ); ok( $store2->dbh, "...and we can access the database handle" ); SKIP: { eval { require Hook::LexWrap; require Test::MockObject; }; skip "either Hook::LexWrap or Test::MockObject not installed", 2 if $@; my $wiki = Wiki::Toolkit->new( store => $store ); # Write some test data. $wiki->write_node( "Home", "This is the home node." ) or die "Couldn't setup"; # White box testing - override verify_node_checksum to first verify the # checksum and then if it's OK set up a new wiki object that sneakily # writes to the node before letting us have control back. my $temp; $temp = Hook::LexWrap::wrap( # fully qualify since we're requiring 'Wiki::Toolkit::Store::Database::verify_checksum', post => sub { undef $temp; # Don't want to wrap our sneaking-in my $node = $_[1]; my $evil_store = $class->new( dbname => $dbname ); my $evil_wiki = Wiki::Toolkit->new( store => $evil_store ); my %node_data = $evil_wiki->retrieve_node($node); $evil_wiki->write_node($node, "foo", $node_data{checksum}) or die "Evil wiki got conflict on writing"; } ); # Now try to write to a node -- it should fail. my %node_data = $wiki->retrieve_node("Home"); ok( ! $wiki->write_node("Home", "bar", $node_data{checksum}), "write_node handles overlapping write attempts correctly" ); # Check actual real database errors croak rather than flagging conflict %node_data = $wiki->retrieve_node("Home"); my $dbh = $store->dbh; $dbh->disconnect; # Mock a database handle. Need to mock rollback() and disconnect() # as well to avoid warnings that an unmocked method has been called # (we don't actually care). my $fake_dbh = Test::MockObject->new(); $fake_dbh->mock("quote", sub { die "Dave told us to"; }); $fake_dbh->set_true("begin_work"); $fake_dbh->set_true("rollback"); $fake_dbh->set_true("disconnect"); $store->{_dbh} = $fake_dbh; eval { $store->check_and_write_node( node => "Home", content => "This is a node.", checksum => $node_data{checksum} ); }; like( $@,qr/Dave told us to/, "...and croaks on database error" ); } } Wiki-Toolkit-0.83/t/061_list_metadata_by_type.t0000644000175000017500000000631012243757607021261 0ustar vagrantvagrantuse strict; use Wiki::Toolkit::TestLib; use Test::More; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 9 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { # Put some test data in. $wiki->write_node( "Reun Thai", "A restaurant", undef, { postcode => "W6 9PL", category => [ "Thai Food", "Restaurant", "Hammersmith" ], latitude => "51.911", longitude => "" } ); $wiki->write_node( "GBK", "A burget restaurant", undef, { postcode => "OX1 2AY", category => [ "Burgers", "Restaurant", "Oxford" ] }); # And one with an un-moderated version $wiki->write_node( "Cafe Roma", "A cafe", undef, { category => [ "Cafe", "Oxford" ], latitude => "51.759", longitude => "-1.270" }, 1 ); $wiki->moderate_node("Cafe Roma", 1); my %node = $wiki->retrieve_node( "Cafe Roma" ); $wiki->write_node( "Cafe Roma", "A cafe unmod", $node{"checksum"}, { category => [ "Cafe", "Oxford", "Unmoderated", "NotSeen" ], latitude => "51.759", longitude => "-1.270", locale => [ "Oxford" ] }, ); my @md; # With nothing, get back undef is($wiki->store->list_metadata_by_type(), undef, "Needs a type given"); # Postcode should be easy @md = $wiki->store->list_metadata_by_type("postcode"); is_deeply( [sort @md], [ "OX1 2AY", "W6 9PL" ], "Correct metadata listing" ); # Latitude also @md = $wiki->store->list_metadata_by_type("latitude"); is_deeply( [sort @md], [ "51.759", "51.911" ], "Correct metadata listing" ); # For category, will not see unmoderated versio @md = $wiki->store->list_metadata_by_type("category"); is_deeply( [sort @md], [ "Burgers", "Cafe", "Hammersmith", "Oxford", "Restaurant", "Thai Food" ], "Correct metadata listing" ); @md = $wiki->store->list_metadata_names(); is_deeply( [sort @md], [ "category", "latitude", "longitude", "postcode" ], "Correct metadata names" ); # Now moderate that one, see it come in $wiki->moderate_node("Cafe Roma", 2); @md = $wiki->store->list_metadata_by_type("category"); is_deeply( [sort @md], [ "Burgers", "Cafe", "Hammersmith", "NotSeen", "Oxford", "Restaurant", "Thai Food", "Unmoderated" ], "Correct metadata listing" ); @md = $wiki->store->list_metadata_names(); is_deeply( [sort @md], [ "category", "latitude", "locale", "longitude", "postcode" ], "Correct metadata names" ); # And un-moderate another, see its go away $wiki->store->dbh->do("UPDATE content SET moderated = 'f' WHERE version = 2"); @md = $wiki->store->list_metadata_by_type("category"); is_deeply( [sort @md], [ "Burgers", "Cafe", "Hammersmith", "Oxford", "Restaurant", "Thai Food" ], "Correct metadata listing" ); @md = $wiki->store->list_metadata_names(); is_deeply( [sort @md], [ "category", "latitude", "longitude", "postcode" ], "Correct metadata names" ); } Wiki-Toolkit-0.83/t/009_backlinks.t0000644000175000017500000000532212243757607016660 0ustar vagrantvagrantuse strict; use Wiki::Toolkit::TestLib; use Test::More; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 7 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { # Put some test data in. All these nodes have some node linking # to them except "HomePage". $wiki->write_node("HomePage", "the home node") or die "Can't write node"; $wiki->write_node("BacklinkTestOne", "This is some text. It contains a link to BacklinkTestTwo.") or die "Can't write node"; $wiki->write_node("BacklinkTestTwo", # don't break this line to pretty-indent it or the formatter will # think the second line is code and not pick up the link. "This is some text. It contains a link to BacklinkTestThree and one to BacklinkTestOne.") or die "Can't write node"; $wiki->write_node("BacklinkTestThree", "This is some text. It contains a link to BacklinkTestOne.") or die "Can't write node"; my @links = $wiki->list_backlinks( node => "BacklinkTestTwo" ); is_deeply( \@links, [ "BacklinkTestOne" ], "backlinks work on nodes linked to once" ); @links = $wiki->list_backlinks( node => "BacklinkTestOne" ); is_deeply( [ sort @links], [ "BacklinkTestThree", "BacklinkTestTwo" ], "...and nodes linked to twice" ); @links = $wiki->list_backlinks( node => "NonexistentNode" ); is_deeply( \@links, [], "...returns empty list for nonexistent node not linked to" ); @links = $wiki->list_backlinks( node => "HomePage" ); is_deeply( \@links, [], "...returns empty list for existing node not linked to" ); $wiki->delete_node("BacklinkTestOne") or die "Couldn't delete node"; @links = $wiki->list_backlinks( node => "BacklinkTestTwo" ); is_deeply( \@links, [], "...returns empty list when the only node linking to this one has been deleted" ); eval { $wiki->write_node("MultipleBacklinkTest", "This links to NodeOne and again to NodeOne"); }; is( $@, "", "doesn't die when writing a node that links to the same place twice" ); # Now test that we don't get tripped up by case-sensitivity. my $content = "CleanupNode CleanUpNode"; my @warnings; eval { local $SIG{__WARN__} = sub { push @warnings, $_[0]; }; $wiki->write_node( "TestNode", $content ); }; is( $@, "", "->write_node doesn't die when content links to nodes differing only in case" ); print "# ...but it does warn: " . join(" ", @warnings ) . "\n" if scalar @warnings; } Wiki-Toolkit-0.83/t/015_dangling_links.t0000644000175000017500000000200012243757607017665 0ustar vagrantvagrantuse strict; use Wiki::Toolkit::TestLib; use Test::More; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 3 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { # Put some test data in. $wiki->write_node( "NodeOne", "NonExistentNode" ) or die "Couldn't write node"; $wiki->write_node( "NodeTwo", "NodeOne" ) or die "Couldn't write node"; $wiki->write_node( "NodeThree", "NonExistentNode" ) or die "Couldn't write node"; my @links = $wiki->list_dangling_links; my %dangling; foreach my $link (@links) { $dangling{$link}++; } ok( $dangling{"NonExistentNode"}, "dangling links returned by ->list_dangling_links" ); ok( !$dangling{"NodeOne"}, "...but not existing ones" ); is( $dangling{"NonExistentNode"}, 1, "...and each dangling link only returned once" ); } Wiki-Toolkit-0.83/t/008_write_and_rewrite.t0000644000175000017500000000553412243757607020440 0ustar vagrantvagrantuse strict; use Wiki::Toolkit::TestLib; use Test::More; use Time::Piece; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 12 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { # Put some test data in. $wiki->write_node( "Home", "This is the home node." ) or die "Couldn't write node"; # Test writing to existing nodes. my %node_data = $wiki->retrieve_node("Home"); my $slept = sleep(2); warn "Slept for less than a second, 'lastmod' test may fail" unless $slept >= 1; ok( $wiki->write_node("Home", "xx", $node_data{checksum}), "write_node succeeds when node matches checksum" ); ok( ! $wiki->write_node("Home", "foo", $node_data{checksum}), "...and flags when it doesn't" ); my %new_node_data = $wiki->retrieve_node("Home"); print "# version now: [$new_node_data{version}]\n"; is( $new_node_data{version}, $node_data{version} + 1, "...and the version number is updated on successful writing" ); my $lastmod = Time::Piece->strptime($new_node_data{last_modified}, $Wiki::Toolkit::Store::Database::timestamp_fmt); my $prev_lastmod = Time::Piece->strptime($node_data{last_modified}, $Wiki::Toolkit::Store::Database::timestamp_fmt); print "# [$lastmod] [$prev_lastmod]\n"; ok( $lastmod > $prev_lastmod, "...as is last_modified" ); my %old_content = $wiki->retrieve_node( name => "Home", version => 2 ); is( $old_content{'version'}, "2", "...and old versions are still available" ); is( $old_content{'content'}, "xx", "...and old versions have right content" ); # Ensure it's the same whichever way we fetch the new version is_deeply(\%old_content, \%new_node_data, "New version the same however fetched"); # Test retrieving with checksums. %node_data = $wiki->retrieve_node("Home"); ok( $node_data{checksum}, "retrieve_node does return a checksum" ); is( $node_data{content}, $wiki->retrieve_node("Home"), "...and the same content as when called in scalar context" ); ok( $wiki->verify_checksum("Home", $node_data{checksum}), "...and verify_checksum is happy with the checksum" ); $wiki->write_node( "Home", $node_data{content}, $node_data{checksum} ) or die "Couldn't write node"; ok( $wiki->verify_checksum("Home", $node_data{checksum}), "...still happy when we write node again with exact same content" ); $wiki->write_node("Home", "foo bar wibble", $node_data{checksum} ) or die "Couldn't write node"; ok( ! $wiki->verify_checksum("Home", $node_data{checksum}), "...but not once we've changed the node content" ); } Wiki-Toolkit-0.83/t/300_feed_atom_setup.t0000644000175000017500000000036212243757607020053 0ustar vagrantvagrantuse strict; use Test::More tests => 1; use Wiki::Toolkit; use Wiki::Toolkit::TestConfig::Utilities; # Reinitialise every configured storage backend. Wiki::Toolkit::TestConfig::Utilities->reinitialise_stores; pass( "Reinitialised stores" ); Wiki-Toolkit-0.83/t/lib/0000755000175000017500000000000012243760523014675 5ustar vagrantvagrantWiki-Toolkit-0.83/t/lib/Wiki/0000755000175000017500000000000012243760523015600 5ustar vagrantvagrantWiki-Toolkit-0.83/t/lib/Wiki/Toolkit/0000755000175000017500000000000012243760523017225 5ustar vagrantvagrantWiki-Toolkit-0.83/t/lib/Wiki/Toolkit/Plugin/0000755000175000017500000000000012243760523020463 5ustar vagrantvagrantWiki-Toolkit-0.83/t/lib/Wiki/Toolkit/Plugin/Bar.pm0000644000175000017500000000023212243757607021533 0ustar vagrantvagrantpackage Wiki::Toolkit::Plugin::Bar; use base qw( Wiki::Toolkit::Plugin ); sub on_register { my $self = shift; die unless $self->datastore; } 1; Wiki-Toolkit-0.83/t/lib/Wiki/Toolkit/Plugin/Foo.pm0000644000175000017500000000011612243757607021553 0ustar vagrantvagrantpackage Wiki::Toolkit::Plugin::Foo; use base qw( Wiki::Toolkit::Plugin ); 1; Wiki-Toolkit-0.83/t/001_load.t0000644000175000017500000000217712243757607015633 0ustar vagrantvagrantuse Test::More tests => 17; use_ok( "Wiki::Toolkit" ); use_ok( "Wiki::Toolkit::Formatter::Default" ); use_ok( "Wiki::Toolkit::Plugin" ); use_ok( "Wiki::Toolkit::Search::Base" ); eval { require DBIx::FullTextSearch; }; SKIP: { skip "DBIx::FullTextSearch not installed", 1 if $@; use_ok( "Wiki::Toolkit::Search::DBIxFTS" ); } eval { require Search::InvertedIndex; }; SKIP: { skip "Search::InvertedIndex not installed", 2 if $@; use_ok( "Wiki::Toolkit::Search::SII" ); use_ok( "Wiki::Toolkit::Setup::SII" ); } eval { require Plucene; }; SKIP: { skip "Plucene not installed", 1 if $@; use_ok( "Wiki::Toolkit::Search::Plucene" ); } eval { require Lucy; }; SKIP: { skip "Lucy not installed", 1 if $@; use_ok( "Wiki::Toolkit::Search::Lucy" ); } use_ok( "Wiki::Toolkit::Setup::MySQL" ); use_ok( "Wiki::Toolkit::Setup::Pg" ); use_ok( "Wiki::Toolkit::Setup::SQLite" ); use_ok( "Wiki::Toolkit::Store::Database" ); use_ok( "Wiki::Toolkit::Store::MySQL" ); use_ok( "Wiki::Toolkit::Store::Pg" ); use_ok( "Wiki::Toolkit::Store::SQLite" ); use_ok( "Wiki::Toolkit::Formatter::Multiple" ); Wiki-Toolkit-0.83/t/027_list_last_version_before.t0000644000175000017500000000563012243757607022006 0ustar vagrantvagrantuse strict; use Wiki::Toolkit::TestLib; use Test::More; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 21 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { # Add three base nodes foreach my $name ( qw( Carrots Handbags Cheese ) ) { $wiki->write_node( $name, "content" ) or die "Can't write node"; } # Add three more versions of Cheese my %node = $wiki->retrieve_node("Cheese"); $wiki->write_node("Cheese", "Content v2", $node{checksum}, { "foo" => "bar" } ) or die "Can't write node"; %node = $wiki->retrieve_node("Cheese"); $wiki->write_node("Cheese", "Content v3", $node{checksum}, { "foo" => "bar", "bar" => "foo" } ) or die "Can't write node"; %node = $wiki->retrieve_node("Cheese"); $wiki->write_node("Cheese", "Content v4", $node{checksum} ) or die "Can't write node"; # Nobble the dates on these $wiki->store->dbh->do("UPDATE content SET modified='2006-10-01' WHERE version = 1"); $wiki->store->dbh->do("UPDATE content SET modified='2006-09-04' WHERE version = 1 and node_id = 2"); $wiki->store->dbh->do("UPDATE content SET modified='2006-10-02' WHERE version = 2"); $wiki->store->dbh->do("UPDATE content SET modified='2006-10-03' WHERE version = 3"); $wiki->store->dbh->do("UPDATE content SET modified='2006-11-01' WHERE version = 4"); # Fetch everything before 2007 my @all = $wiki->list_last_version_before('2007-01-01'); is( scalar @all, 3, "list_last_version_before gives the right number back" ); # Check them is( $all[0]->{'version'}, 1, "right ordering" ); is( $all[1]->{'version'}, 1, "right ordering" ); is( $all[2]->{'version'}, 4, "right ordering" ); is( $all[0]->{'name'}, 'Carrots', "right ordering" ); is( $all[1]->{'name'}, 'Handbags', "right ordering" ); is( $all[2]->{'name'}, 'Cheese', "right ordering" ); # Now before 2006-10-02 @all = $wiki->list_last_version_before('2006-10-02'); is( scalar @all, 3, "list_last_version_before gives the right number back" ); is( $all[0]->{'version'}, 1, "right ordering" ); is( $all[1]->{'version'}, 1, "right ordering" ); is( $all[2]->{'version'}, 2, "right ordering" ); is( $all[0]->{'name'}, 'Carrots', "right ordering" ); is( $all[1]->{'name'}, 'Handbags', "right ordering" ); is( $all[2]->{'name'}, 'Cheese', "right ordering" ); # Now before 2006-09-10 @all = $wiki->store->list_last_version_before('2006-09-10'); is( scalar @all, 3, "list_last_version_before gives the right number back" ); is( $all[0]->{'version'}, undef, "right ordering" ); is( $all[1]->{'version'}, 1, "right ordering" ); is( $all[2]->{'version'}, undef, "right ordering" ); is( $all[0]->{'name'}, 'Carrots', "right ordering" ); is( $all[1]->{'name'}, 'Handbags', "right ordering" ); is( $all[2]->{'name'}, 'Cheese', "right ordering" ); } Wiki-Toolkit-0.83/t/011_recent_changes.t0000644000175000017500000002001512243757607017654 0ustar vagrantvagrantuse strict; use Wiki::Toolkit::TestLib; use Test::More; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 27 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { # Put some test data in. ##### Test recent_changes. We'll write these nodes, in this order, ##### with this metadata, sleeping for at least a second in between ##### writes, and then run the tests. ##### ##### Node1 (Kake, minor tidying) ##### Everyone's Favourite Hobby (nou) ##### Another Node (nou) my $start_time = time; do_sleep(); my $node = "Node1"; my %node_data = $wiki->retrieve_node( $node ); $wiki->write_node( $node, "data", $node_data{checksum} , { username => "Kake", edit_type => "Minor tidying", comment => "Test", } ); do_sleep(); $node = "Everyone's Favourite Hobby"; %node_data = $wiki->retrieve_node( $node ); $wiki->write_node( $node, "hobby data", $node_data{checksum}, { comment => "Test", edit_type => "Normal edit", } ); do_sleep(); $node = "Another Node"; %node_data = $wiki->retrieve_node( $node ); $wiki->write_node( $node, "another node 1", $node_data{checksum}, { username => "nou", comment => "Test", edit_type => "Normal edit", } ); ##### ##### Ready to run the tests now. ##### # Test by "in last n days". my @nodes = $wiki->list_recent_changes( days => 1 ); my @nodenames = map { $_->{name} } @nodes; my %unique = map { $_ => 1 } @nodenames; is_deeply( [sort keys %unique], ["Another Node", "Everyone's Favourite Hobby", "Node1"], "recent_changes for last 1 day gets the right results" ); is_deeply( \@nodenames, ["Another Node", "Everyone's Favourite Hobby", "Node1"], "...in the right order" ); # returns in reverse chron. order foreach my $node ( @nodes ) { is( ref $node->{metadata}{comment}, "ARRAY", "...metadata is returned as a hash of array refs" ); my @comments = @{$node->{metadata}{comment}}; is( $comments[0], "Test", "...correct metadata is returned" ); } # Test by "last n nodes changed". @nodes = $wiki->list_recent_changes( last_n_changes => 2 ); @nodenames = map { $_->{name} } @nodes; print "# Found nodes: " . join(" ", @nodenames) . "\n"; is_deeply( \@nodenames, ["Another Node", "Everyone's Favourite Hobby"], "recent_changes 'last_n_changes' works" ); eval { $wiki->list_recent_changes( last_n_changes => "foo" ); }; ok( $@, "...and croaks on bad input" ); # Test by "since time T". @nodes = $wiki->list_recent_changes( since => $start_time ); @nodenames = sort map { $_->{name} } @nodes; is_deeply( \@nodenames, ["Another Node", "Everyone's Favourite Hobby", "Node1"], "recent_changes 'since' returns the right results" ); ok( $nodes[0]{last_modified}, "...and a plausible (not undef or empty) last_modified timestamp"); # Test metadata_is. (We only actually expect a single result.) @nodes = $wiki->list_recent_changes( last_n_changes => 2, metadata_is => { username => "Kake" } ); @nodenames = map { $_->{name} } @nodes; print "# Found nodes: " . join(" ", @nodenames) . "\n"; is( scalar @nodes, 1, "metadata_is does constrain the search" ); is( $nodes[0]{name}, "Node1", "...correctly" ); # Test metadata_isnt. @nodes = $wiki->list_recent_changes( last_n_changes => 1, metadata_isnt => { username => "Kake" } ); is( scalar @nodes, 1, "metadata_isnt, too" ); is( $nodes[0]{name}, "Another Node", "...correctly" ); print "# " . join(" ", map { $_->{name} } @nodes) . "\n"; @nodes = $wiki->list_recent_changes( last_n_changes => 1, metadata_isnt => { edit_type => "Minor tidying" } ); @nodenames = map { $_->{name} } @nodes; print "# Found nodes: " . join(" ", @nodenames) . "\n"; is( scalar @nodes, 1, "metadata_isnt includes nodes where this metadata type isn't set" ); is( $nodes[0]{name}, "Another Node", "...correctly" ); eval { @nodes = $wiki->list_recent_changes( last_n_changes => 1, metadata_isnt => { arthropod => "millipede" } ); }; is( $@, "", "list_recent_changes doesn't die when metadata_isnt doesn't omit anything" ); ##### ##### Write to Another Node again for testing metadata_was and the ##### effect of the presence and absence of include_all_changes ##### do_sleep(); $node = "Another Node"; %node_data = $wiki->retrieve_node( $node ); $wiki->write_node( $node, "another node 2", $node_data{checksum}, { username => "Kake", comment => "Kake writes the node that nou wrote", edit_type => "Minor tidying", } ); @nodes = $wiki->list_recent_changes( days => 1 ); @nodenames = map { $_->{name} } @nodes; print "# Found nodes: " . join(" ", @nodenames) . "\n"; is( scalar @nodes, 3, "By default each node returned only once however many times changed" ); @nodes = $wiki->list_recent_changes( days => 1, include_all_changes => 1 ); is( scalar @nodes, 4, "...returned more than once when 'include_all_changes' set" ); @nodes = $wiki->list_recent_changes( last_n_changes => 5, metadata_was => { username => "nou" } ); @nodenames = map { $_->{name} } @nodes; print "# Found nodes: " . join(" ", @nodenames) . "\n"; is( scalar @nodes, 1, "metadata_was returns nodes whose current version doesn't match" ); is( $nodes[0]{name}, "Another Node", "...correctly" ); is( $nodes[0]{version}, 1, "...and the correct version" ); ##### Testing metadata_wasnt - Everyone's Favourite Hobby and ##### Another Node were both written as *not* minor edits, but ##### then a minor edit was made on Another Node. We expect ##### metadata_wasnt to still return Another Node though. @nodes = $wiki->list_recent_changes( last_n_changes => 5, metadata_wasnt => { edit_type => "Minor tidying", }, ); @nodenames = map { $_->{name} } @nodes; print "# Found nodes: " . join(" ", @nodenames) . "\n"; is( scalar @nodes, 2, "metadata_wasnt returns nodes whose current version matches" ); # lets add yet another normal edit to check proper display of multple normal edits do_sleep(); $node = "Another Node"; %node_data = $wiki->retrieve_node( $node ); $wiki->write_node( $node, "another node 3", $node_data{checksum}, { username => "bob", comment => "bob writes the node that nou and Kake aready wrote", edit_type => "Normal edit", } ); @nodenames = map { $_->{name} } @nodes; print "# Found nodes: " . join(" ", @nodenames) . "\n"; @nodes = $wiki->list_recent_changes( days => 1 ); is( scalar @nodes, 3, "By default each node returned only once however many times changed" ); @nodes = $wiki->list_recent_changes( days => 1, include_all_changes => 1 ); is( scalar @nodes, 5, "...returned more than once when 'include_all_changes' set" ); } sub do_sleep { my $slept = sleep(2); warn "Slept for less than a second, test results may be unreliable" unless $slept >= 1; } Wiki-Toolkit-0.83/t/700_lucy_bug.t0000644000175000017500000000235412243757607016530 0ustar vagrantvagrantuse strict; use Wiki::Toolkit::TestLib; use Test::More; # Test for bug discovered in version 0.02 of Wiki::Toolkit::Search::Lucy # - if you wrote a node called Foo Bar and then another one called Foo, # it would delete Foo Bar from the index. if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 2 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { SKIP: { print "# Store is: " . ( ref $wiki->store ) . "\n"; print "# Search is: " . ( ref $wiki->search_obj ) . "\n"; skip "No search configured for this backend", 2 unless ref $wiki->search_obj; # Put some test data in. $wiki->write_node( "Foo Bar", "baz" ) or die "Couldn't write node"; $wiki->write_node( "Foo", "quux" ) or die "Couldn't write node"; my %results = $wiki->search_nodes( "baz" ); ok( defined $results{"Foo Bar"}, "Search doesn't forget about Foo Bar when we write Foo" ); %results = $wiki->search_nodes( "quux" ); ok( defined $results{Foo}, "...and it remembers Foo too" ); } } Wiki-Toolkit-0.83/t/016_recent_changes_between.t0000644000175000017500000000361212243757607021376 0ustar vagrantvagrantuse strict; use Wiki::Toolkit::Store::Database; use Wiki::Toolkit::TestLib; use DBI; use Test::More; use Time::Piece; use Time::Seconds; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 1 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { # Write directly to the database so we can fake having written something # a week ago (white box testing). my $dbh = $wiki->store->dbh; my $content_sth = $dbh->prepare("INSERT INTO content (node_id,version,text,modified) VALUES (?,?,?,?)"); my $node_sth = $dbh->prepare("INSERT INTO node (id,name,version,text,modified) VALUES (?,?,?,?,?)"); my $now = localtime; # overloaded by Time::Piece my $two_days_ago = $now - (2 * ONE_DAY ); my $week_ago = $now - ONE_WEEK; my $ts_now = Wiki::Toolkit::Store::Database->_get_timestamp; my $ts_two_days_ago = Wiki::Toolkit::Store::Database->_get_timestamp( $two_days_ago ); my $ts_week_ago = Wiki::Toolkit::Store::Database->_get_timestamp( $week_ago ); $node_sth->execute( 10, "Home", 3, "foo", $ts_now ) or die $dbh->errstr; $content_sth->execute( 10, 1, "foo", $ts_week_ago ) or die $dbh->errstr; $content_sth->execute( 10, 2, "foo", $ts_two_days_ago ) or die $dbh->errstr; $content_sth->execute( 10, 3, "foo", $ts_now ) or die $dbh->errstr; my @nodes = $wiki->list_recent_changes( include_all_changes => 1, between_days => [ 1, 3 ], ); is (scalar @nodes, 1, "between_days flag to list_recent_changes works" ); } Wiki-Toolkit-0.83/t/019_recent_changes_case.t0000644000175000017500000000235512243757607020666 0ustar vagrantvagrantuse strict; use Wiki::Toolkit::TestLib; use Test::More; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 2 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { print "# Store type: " . ref($wiki->store) . "\n"; $wiki->write_node( "Test 1", "test", undef, { username => "Earle", } ); my @nodes = $wiki->list_recent_changes( days => 7, metadata_was => { username => "earle", }, ignore_case => 1, ); my @names = sort map { $_->{name} } @nodes; is_deeply( \@names, [ "Test 1" ], "ignore_case => 1 ignores case of metadata value" ); @nodes = $wiki->list_recent_changes( days => 7, metadata_was => { Username => "Earle", }, ignore_case => 1, ); @names = sort map { $_->{name} } @nodes; is_deeply( \@names, [ "Test 1" ], "ignore_case => 1 ignores case of metadata type" ); } Wiki-Toolkit-0.83/t/301_feed_atom_add_test_data.t0000644000175000017500000000454012243757607021476 0ustar vagrantvagrantuse strict; use Wiki::Toolkit::TestConfig::Utilities; use Wiki::Toolkit; use Test::More tests => $Wiki::Toolkit::TestConfig::Utilities::num_stores; # Add test data to the stores. my %stores = Wiki::Toolkit::TestConfig::Utilities->stores; my ($store_name, $store); while ( ($store_name, $store) = each %stores ) { SKIP: { skip "$store_name storage backend not configured for testing", 1 unless $store; print "#\n##### TEST CONFIG: Store: $store_name\n#\n"; my $wiki = Wiki::Toolkit->new( store => $store ); # Write two versions of one node # The recent changes should only show it once though $wiki->write_node( "Old Node", "First version of Old Node" ); my %old_node = $wiki->retrieve_node("Old Node"); $wiki->write_node( "Old Node", "We will write at least 15 nodes after this one", $old_node{'checksum'} ); my $slept = sleep(2); warn "Slept for less than a second, 'days=n' test may pass even if buggy" unless $slept >= 1; for my $i ( 1 .. 15 ) { $wiki->write_node( "Temp Node $i", "foo" ); } $slept = sleep(2); warn "Slept for less than a second, test results may not be trustworthy" unless $slept >= 1; $wiki->write_node( "Test Node 1", "Just a plain test", undef, { username => "Kake", comment => "new node", category => [ 'TestCategory1', 'Meta' ] } ); $slept = sleep(2); warn "Slept for less than a second, 'items=n' test may fail" unless $slept >= 1; $wiki->write_node( "Calthorpe Arms", "CAMRA-approved pub near King's Cross", undef, { comment => "Stub page, please update!", username => "Kake", postcode => "WC1X 8JR", locale => [ "Bloomsbury" ] } ); $wiki->write_node( "Test Node 2", "Gosh, another test!", undef, { username => "nou", comment => "This is a minor edit.", major_change => 0, } ); pass "$store_name test backend primed with test data"; } } Wiki-Toolkit-0.83/t/004_write_and_retrieve.t0000644000175000017500000000331212243757607020570 0ustar vagrantvagrantuse strict; use Wiki::Toolkit::TestLib; use Test::More; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 12 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { # Test a simple write and retrieve. ok( $wiki->write_node("A Node", "Node content."), "write_node can create a node" ); is( $wiki->retrieve_node("A Node"), "Node content.", "retrieve_node can retrieve it" ); # Test calling syntax of ->retrieve_node. eval { $wiki->retrieve_node; }; ok( $@, "retrieve_node dies if we don't tell it a node parameter" ); is( $wiki->retrieve_node(name => "A Node"), "Node content.", "retrieve_node still works if we supply params as a hash" ); is( $wiki->retrieve_node(name => "A Node", version => 1), "Node content.", "...still works if we supply a version param" ); my %node_data = $wiki->retrieve_node("A Node"); is( $node_data{content}, "Node content.", "...still works when called in list context" ); foreach (qw( last_modified version checksum )) { ok( defined $node_data{$_}, "...and $_ is defined" ); } # Test ->node_exists. ok( $wiki->node_exists("A Node"), "node_exists returns true for an existing node" ); ok( ! $wiki->node_exists("This Is A Nonexistent Node"), "...and false for a nonexistent one" ); # Test -> node_name_for_id my $id = $wiki->store->{_dbh}->selectrow_array("SELECT id FROM node WHERE name = 'A Node'"); is( "A Node", $wiki->store->node_name_for_id($id), "Can fetch the name of a node with a id" ); } Wiki-Toolkit-0.83/t/050_mysql_store.t0000644000175000017500000000337512243757607017302 0ustar vagrantvagrant#!/usr/bin/perl -w use strict; use Test::More tests => 13; use Wiki::Toolkit::TestConfig; my $class; BEGIN { $class = "Wiki::Toolkit::Store::MySQL"; use_ok($class); } eval { $class->new; }; ok( $@, "Failed creation dies" ); my %config = %{$Wiki::Toolkit::TestConfig::config{MySQL}}; my ($dbname, $dbuser, $dbpass, $dbhost) = @config{qw(dbname dbuser dbpass dbhost)}; SKIP: { skip "No MySQL database configured for testing", 11 unless $dbname; my $store = eval { $class->new( dbname => $dbname, dbuser => $dbuser, dbpass => $dbpass, dbhost => $dbhost ); }; is( $@, "", "Creation succeeds with connection parameters" ); isa_ok( $store, $class ); ok( $store->dbh, "...and has set up a database handle" ); my $dsn = "dbi:mysql:$dbname"; $dsn .= ";host=$dbhost" if $dbhost; my $dbh = DBI->connect( $dsn, $dbuser, $dbpass ); my $evil_store = eval { $class->new( dbh => $dbh ); }; is( $@, "", "Creation succeeds with dbh" ); isa_ok( $evil_store, $class ); ok( $evil_store->dbh, "...and we can retrieve the database handle" ); # White box test - do internal locking functions work the way we expect? ok( $store->_lock_node("Home"), "Can lock a node" ); ok( ! $evil_store->_lock_node("Home"), "...and now other people can't get a lock on it" ); ok( ! $evil_store->_unlock_node("Home"), "...or unlock it" ); ok( $store->_unlock_node("Home"), "...but I can unlock it" ); ok( $evil_store->_lock_node("Home"), "...and now other people can lock it" ); # Cleanup (not necessary, since this thread is about to die, but here # in case I forget and add some more tests at the end). $evil_store->_unlock_node("Home"); } Wiki-Toolkit-0.83/t/025_list_node_all_versions.t0000644000175000017500000001306712243757607021462 0ustar vagrantvagrantuse strict; use Wiki::Toolkit::TestLib; use Test::More; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 59 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { # Add three base nodes foreach my $name ( qw( Carrots Handbags Cheese ) ) { $wiki->write_node( $name, "content" ) or die "Can't write node"; } # Add three more versions of Cheese my %node = $wiki->retrieve_node("Cheese"); $wiki->write_node("Cheese", "Content v2", $node{checksum}, { "foo" => "bar" } ) or die "Can't write node"; %node = $wiki->retrieve_node("Cheese"); $wiki->write_node("Cheese", "Content v3", $node{checksum}, { "foo" => "bar", "bar" => "foo" } ) or die "Can't write node"; %node = $wiki->retrieve_node("Cheese"); $wiki->write_node("Cheese", "Content v4", $node{checksum} ) or die "Can't write node"; # Fetch all the versions my @all_versions = $wiki->list_node_all_versions("Cheese"); is( scalar @all_versions, 4, "list_node_all_versions gives the right number back" ); # Check them is( $all_versions[0]->{'version'}, 4, "right ordering" ); is( $all_versions[1]->{'version'}, 3, "right ordering" ); is( $all_versions[2]->{'version'}, 2, "right ordering" ); is( $all_versions[3]->{'version'}, 1, "right ordering" ); is( $all_versions[0]->{'name'}, "Cheese", "right node" ); is( $all_versions[1]->{'name'}, "Cheese", "right node" ); is( $all_versions[2]->{'name'}, "Cheese", "right node" ); is( $all_versions[3]->{'name'}, "Cheese", "right node" ); # Fetch with content too @all_versions = $wiki->list_node_all_versions( name => "Cheese", with_content => 1 ); is( scalar @all_versions, 4, "list_node_all_versions gives the right number back" ); # Check them is( $all_versions[0]->{'version'}, 4, "right ordering" ); is( $all_versions[1]->{'version'}, 3, "right ordering" ); is( $all_versions[2]->{'version'}, 2, "right ordering" ); is( $all_versions[3]->{'version'}, 1, "right ordering" ); is( $all_versions[0]->{'name'}, "Cheese", "right node" ); is( $all_versions[1]->{'name'}, "Cheese", "right node" ); is( $all_versions[2]->{'name'}, "Cheese", "right node" ); is( $all_versions[3]->{'name'}, "Cheese", "right node" ); is( $all_versions[0]->{'content'}, "Content v4", "right node" ); is( $all_versions[1]->{'content'}, "Content v3", "right node" ); is( $all_versions[2]->{'content'}, "Content v2", "right node" ); is( $all_versions[3]->{'content'}, "content", "right node" ); # With metadata, but not content @all_versions = $wiki->list_node_all_versions( name => "Cheese", with_content => 0, with_metadata => 1 ); is( scalar @all_versions, 4, "list_node_all_versions gives the right number back" ); # Check them is( $all_versions[0]->{'version'}, 4, "right ordering" ); is( $all_versions[1]->{'version'}, 3, "right ordering" ); is( $all_versions[2]->{'version'}, 2, "right ordering" ); is( $all_versions[3]->{'version'}, 1, "right ordering" ); is( $all_versions[0]->{'name'}, "Cheese", "right node" ); is( $all_versions[1]->{'name'}, "Cheese", "right node" ); is( $all_versions[2]->{'name'}, "Cheese", "right node" ); is( $all_versions[3]->{'name'}, "Cheese", "right node" ); is( $all_versions[0]->{'content'}, undef, "right node" ); is( $all_versions[1]->{'content'}, undef, "right node" ); is( $all_versions[2]->{'content'}, undef, "right node" ); is( $all_versions[3]->{'content'}, undef, "right node" ); my %md_1 = (); my %md_2 = (foo=>'bar'); my %md_3 = (foo=>'bar',bar=>'foo'); my %md_4 = (); is_deeply( $all_versions[0]->{'metadata'}, \%md_4, "right metadata" ); is_deeply( $all_versions[1]->{'metadata'}, \%md_3, "right metadata" ); is_deeply( $all_versions[2]->{'metadata'}, \%md_2, "right metadata" ); is_deeply( $all_versions[3]->{'metadata'}, \%md_1, "right metadata" ); # With both @all_versions = $wiki->list_node_all_versions( name => "Cheese", with_content => 1, with_metadata => 1 ); is( scalar @all_versions, 4, "list_node_all_versions gives the right number back" ); # Check them is( $all_versions[0]->{'version'}, 4, "right ordering" ); is( $all_versions[1]->{'version'}, 3, "right ordering" ); is( $all_versions[2]->{'version'}, 2, "right ordering" ); is( $all_versions[3]->{'version'}, 1, "right ordering" ); is( $all_versions[0]->{'name'}, "Cheese", "right node" ); is( $all_versions[1]->{'name'}, "Cheese", "right node" ); is( $all_versions[2]->{'name'}, "Cheese", "right node" ); is( $all_versions[3]->{'name'}, "Cheese", "right node" ); is( $all_versions[0]->{'content'}, "Content v4", "right node" ); is( $all_versions[1]->{'content'}, "Content v3", "right node" ); is( $all_versions[2]->{'content'}, "Content v2", "right node" ); is( $all_versions[3]->{'content'}, "content", "right node" ); is_deeply( $all_versions[0]->{'metadata'}, \%md_4, "right metadata" ); is_deeply( $all_versions[1]->{'metadata'}, \%md_3, "right metadata" ); is_deeply( $all_versions[2]->{'metadata'}, \%md_2, "right metadata" ); is_deeply( $all_versions[3]->{'metadata'}, \%md_1, "right metadata" ); # Finally, check that we still only have 1 version of the carrots node my @carrots_versions = $wiki->list_node_all_versions( name => "Carrots", with_content => 1, with_metadata => 1 ); is( scalar @carrots_versions, 1, "list_node_all_versions gives the right number back" ); is( $carrots_versions[0]->{'version'}, 1, "right ordering" ); is( $carrots_versions[0]->{'name'}, "Carrots", "right node" ); } Wiki-Toolkit-0.83/t/150_plugins.t0000644000175000017500000002463012243757607016400 0ustar vagrantvagrantuse strict; use Wiki::Toolkit::TestLib; use Test::More; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 28 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { SKIP: { eval { require Test::MockObject; }; skip "Test::MockObject not installed", 28 if $@; my $null_plugin = Test::MockObject->new; my $plugin = Test::MockObject->new; $plugin->mock( "on_register", sub { my $self = shift; $self->{__registered} = 1; $self->{__seen_nodes} = [ ]; $self->{__deleted_nodes} = [ ]; $self->{__moderated_nodes} = [ ]; $self->{__pre_moderated_nodes} = [ ]; $self->{__pre_write_nodes} = [ ]; $self->{__pre_retrieve_nodes} = [ ]; } ); eval { $wiki->register_plugin; }; ok( $@, "->register_plugin dies if no plugin supplied" ); eval { $wiki->register_plugin( plugin => $null_plugin ); }; is( $@, "", "->register_plugin doesn't die if plugin which can't on_register supplied" ); eval { $wiki->register_plugin( plugin => $plugin ); }; is( $@, "", "->register_plugin doesn't die if plugin which can on_register supplied" ); ok( $plugin->{__registered}, "->on_register method called" ); my @registered = $wiki->get_registered_plugins; is( scalar @registered, 2, "->get_registered_plugins returns right number" ); ok( ref $registered[0], "...and they're objects" ); my $regref = $wiki->get_registered_plugins; is( ref $regref, "ARRAY", "...returns arrayref in scalar context" ); # =========================================================================== # Test the post_write (adding/updating a node) plugin call # (Writes a node, and ensures the post_write plugin was called # with the appropriate options) $plugin->mock( "post_write", sub { my ($self, %args) = @_; push @{ $self->{__seen_nodes} }, { node => $args{node}, node_id => $args{node_id}, version => $args{version}, content => $args{content}, metadata => $args{metadata} }; } ); $wiki->write_node( "Test Node", "foo", undef, {bar => "baz"} ) or die "Can't write node"; ok( $plugin->called("post_write"), "->post_write method called" ); my @seen = @{ $plugin->{__seen_nodes} }; is_deeply( $seen[0], { node => "Test Node", node_id => 1, version => 1, content => "foo", metadata => { bar => "baz" } }, "...with the right arguments" ); # =========================================================================== # Test the post_delete (deletion) plugin call # (Deletes nodes with and without versions, and ensured that # post_delete was called with the appropriate options) $plugin->mock( "post_delete", sub { my ($self, %args) = @_; push @{ $self->{__deleted_nodes} }, { node => $args{node}, node_id => $args{node_id}, version => $args{version}, }; } ); # Delete with a version $wiki->delete_node( name=>"Test Node", version=>1 ) or die "Can't delete node"; ok( $plugin->called("post_delete"), "->post_delete method called" ); my @deleted = @{ $plugin->{__deleted_nodes} }; is_deeply( $deleted[0], { node => "Test Node", node_id => 1, version => undef }, "...with the right arguments" ); $plugin->{__deleted_nodes} = []; # Now add a two new versions my %node = $wiki->retrieve_node("Test Node 2"); $wiki->write_node( "Test Node 2", "bar", $node{checksum} ) or die "Can't write second version node"; %node = $wiki->retrieve_node("Test Node 2"); $wiki->write_node( "Test Node 2", "foofoo", $node{checksum} ) or die "Can't write second version node"; # Delete newest with a version $wiki->delete_node( name=>"Test Node 2", version=>2 ) or die "Can't delete node"; ok( $plugin->called("post_delete"), "->post_delete method called" ); @deleted = @{ $plugin->{__deleted_nodes} }; is_deeply( $deleted[0], { node => "Test Node 2", node_id => 2, version => 2 }, "...with the right arguments" ); # And delete without a version $wiki->delete_node( name=>"Test Node 2" ) or die "Can't delete node"; ok( $plugin->called("post_delete"), "->post_delete method called" ); @deleted = @{ $plugin->{__deleted_nodes} }; is_deeply( $deleted[1], { node => "Test Node 2", node_id => 2, version => undef }, "...with the right arguments" ); # =========================================================================== # Test the moderation plugins # (Adds nodes that require moderation and moderates them, # ensuring pre_moderate and post_moderate are called with # the appropriate options) $plugin->mock( "pre_moderate", sub { my ($self, %args) = @_; push @{ $self->{__pre_moderated_nodes} }, { node => ${$args{node}}, version => ${$args{version}} }; } ); $plugin->mock( "post_moderate", sub { my ($self, %args) = @_; push @{ $self->{__moderated_nodes} }, { node => $args{node}, node_id => $args{node_id}, version => $args{version}, }; } ); # Add $wiki->write_node( "Test Node 3", "bar" ) or die "Can't write first version node"; # Moderate $wiki->moderate_node( name=>"Test Node 3", version=>1 ) or die "Can't moderate node"; ok( $plugin->called("pre_moderate"), "->pre_moderate method called" ); ok( $plugin->called("post_moderate"), "->post_moderate method called" ); my @pre_moderated = @{ $plugin->{__pre_moderated_nodes} }; is_deeply( $pre_moderated[0], { node => "Test Node 3", version => 1 }, "...with the right arguments" ); my @moderated = @{ $plugin->{__moderated_nodes} }; is_deeply( $moderated[0], { node => "Test Node 3", node_id => 3, version => 1 }, "...with the right arguments" ); # =========================================================================== # Test using pre_write to alter things # (Adds a pre_write plugin that alters the settings, writes, and # ensure that pre_write gets the unaltered stuff, and post_write # the altered) $plugin->mock( "pre_write", sub { my ($self, %args) = @_; # Tweak ${$args{node}} = "CHANGED_NAME"; ${$args{content}} = "Changed: ".${$args{content}}; ${$args{metadata}}->{foo} = "bar"; # Save push @{ $self->{__pre_write_nodes} }, { node => ${$args{node}}, content => ${$args{content}}, metadata => ${$args{metadata}}, }; } ); $wiki->write_node( "Test Node", "foo", undef, {bar => "baz"} ) or die "Can't write node with pre_write"; ok( $plugin->called("pre_write"), "->pre_write method called" ); my @changed = @{ $plugin->{__pre_write_nodes} }; is_deeply( $changed[0], { node => "CHANGED_NAME", content => "Changed: foo", metadata => { bar=>"baz", foo=>"bar" } }, "...with the right (changed) arguments" ); @seen = @{ $plugin->{__seen_nodes} }; is_deeply( $seen[4], { node => "CHANGED_NAME", node_id => 4, version => 1, content => "Changed: foo", metadata => { bar=>"baz", foo=>"bar" } }, "...with the right (changed) arguments" ); # =========================================================================== # Test using pre_retrieve to alter things # (Adds a pre_retrieve plugin that alters the settings, and # ensure that pre_retrieve gets the unaltered stuff, and the read # gets the altered) # Do a normal fetch my %nv = $wiki->retrieve_node(name=>"CHANGED_NAME",version=>1); # Register the plugin $plugin->mock( "pre_retrieve", sub { my ($self, %args) = @_; my $orig_node = ${$args{node}}; my $orig_ver = ${$args{version}}; # Tweak ${$args{node}} = "CHANGED_NAME"; ${$args{version}} = 1; # Save push @{ $self->{__pre_retrieve_nodes} }, { node => ${$args{node}}, version => ${$args{version}}, orig_node => $orig_node, orig_ver => $orig_ver, }; } ); # Do a fetch with no version my %dnv = $wiki->retrieve_node("foo"); my @ret = @{ $plugin->{__pre_retrieve_nodes} }; is_deeply( $ret[0], { node => "CHANGED_NAME", version => 1, orig_node => "foo", orig_ver => undef }, "...with the right (changed) arguments" ); is($dnv{'content'}, "Changed: foo", "Retrieve was altered" ); is_deeply( \%dnv, \%nv, "Retrieve was altered" ); # And with too high a version my %dv = $wiki->retrieve_node(name=>"foo", version=>22); @ret = @{ $plugin->{__pre_retrieve_nodes} }; is_deeply( $ret[1], { node => "CHANGED_NAME", version => 1, orig_node => "foo", orig_ver => 22 }, "...with the right (changed) arguments" ); is($dv{'content'}, "Changed: foo", "Retrieve was altered" ); is_deeply( \%dv, \%nv, "Retrieve was altered" ); } } Wiki-Toolkit-0.83/t/006_list_all.t0000644000175000017500000000250212243757607016514 0ustar vagrantvagrantuse strict; use Wiki::Toolkit::TestLib; use Test::More; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 6 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { foreach my $name ( qw( Carrots Handbags Cheese ) ) { $wiki->write_node( $name, "content" ) or die "Can't write node"; } my @all_nodes = $wiki->list_all_nodes; is( scalar @all_nodes, 3, "list_all_nodes returns the right number of nodes" ); is_deeply( [sort @all_nodes], [ qw( Carrots Cheese Handbags ) ], "...and the right ones, too" ); @all_nodes = $wiki->list_all_nodes(with_details=>1); is( scalar @all_nodes, 3, "list_all_nodes returns the right number of nodes" ); @all_nodes = sort { $a->{'name'} cmp $b->{'name'} } @all_nodes; is_deeply( $all_nodes[0], { name=>'Carrots', version=>'1', node_id=>'1', moderate=>'0' }, "...and the right ones, too" ); is_deeply( $all_nodes[1], { name=>'Cheese', version=>'1', node_id=>'3', moderate=>'0' }, "...and the right ones, too" ); is_deeply( $all_nodes[2], { name=>'Handbags', version=>'1', node_id=>'2', moderate=>'0' }, "...and the right ones, too" ); } Wiki-Toolkit-0.83/t/062_recent_changes_metadata_was.t0000644000175000017500000001151312243757607022377 0ustar vagrantvagrantuse strict; use Wiki::Toolkit::Store::Database; use Wiki::Toolkit::TestLib; use DBI; use Test::More; use Time::Piece; use Time::Seconds; if ( scalar @Wiki::Toolkit::TestLib::wiki_info == 0 ) { plan skip_all => "no backends configured"; } else { plan tests => ( 4 * scalar @Wiki::Toolkit::TestLib::wiki_info ); } # These tests are related to ticket #41: http://www.wiki-toolkit.org/ticket/41 # "If you list the Recent Changes with minor edits excluded, then it # returns not only the most recent changes, but also some more ancient # changes to nodes that have been edited recently." # # We set things up so that we have the following nodes and edit types. All # nodes added "9 days ago". # - Red Lion, edited 3 days ago (normal) and today (minor) # - Blue Lion, edited 7 days ago (minor), 5 days ago (normal) & today (minor) my $iterator = Wiki::Toolkit::TestLib->new_wiki_maker; while ( my $wiki = $iterator->new_wiki ) { # Store the time now, so we have a timestamp which precedes "today"'s # additions and edits. my $start_time = time; my $slept = sleep(2); warn "Slept for less than a second; test results may be unreliable" unless $slept >= 1; # Set up the data and run the tests. setup_nodes( wiki => $wiki ); my @nodes = $wiki->list_recent_changes( days => 4, ); my @names = sort map { $_->{name} } @nodes; is_deeply( \@names, [ "Blue Lion", "Red Lion" ], "All nodes returned when no criteria given except time" ); @nodes = $wiki->list_recent_changes( days => 4, metadata_was => { edit_type => "Normal edit" }, ); my @names_vers = sort map { "$_->{name} (version $_->{version})" } @nodes; my %namehash = map { $_->{name} => 1 } @nodes; ok( !$namehash{"Blue Lion"}, "Normal edits only: nodes not returned if not edited recently enough" ); ok( $namehash{"Red Lion"}, "...those edited recently enough are returned"); is_deeply( \@names_vers, [ "Red Lion (version 2)" ], "...but only their versions within the stated time period" ); print "# Found nodes: " . join(", ", @names_vers) . "\n"; } sub get_timestamp { my %args = @_; my $days = $args{days}; my $now = localtime; # overloaded by Time::Piece my $time = $now - ( $days * ONE_DAY ); return Wiki::Toolkit::Store::Database->_get_timestamp( $time ); } sub setup_nodes { my %args = @_; my $wiki = $args{wiki}; # Write directly to the database so we can fake having # written something in the past (white box testing). It might be a good # idea at some point to factor this out into Wiki::Toolkit::TestLib, as # it's used in other tests too. my $dbh = $wiki->store->dbh; my $content_sth = $dbh->prepare( "INSERT INTO content (node_id,version,text,modified) VALUES (?,?,?,?)"); my $node_sth = $dbh->prepare( "INSERT INTO node (id,name,version,text,modified) VALUES (?,?,?,?,?)"); my $md_sth = $dbh->prepare( "INSERT INTO metadata (node_id,version,metadata_type,metadata_value) VALUES (?,?,?,?)"); # Red Lion first. $node_sth->execute( 10, "Red Lion", 2, "red 2", get_timestamp( days => 3 ) ) or die $dbh->errstr; $content_sth->execute( 10, 2, "red 2", get_timestamp( days => 3 ) ) or die $dbh->errstr; $content_sth->execute( 10, 1, "red 1", get_timestamp( days => 9 ) ) or die $dbh->errstr; $md_sth->execute( 10, 2, "edit_type", "Normal edit" ); $md_sth->execute( 10, 1, "edit_type", "Normal edit" ); # Now write it as per usual. my %data = $wiki->retrieve_node( "Red Lion" ); $wiki->write_node( "Red Lion", "red 3", $data{checksum}, { edit_type => [ "Minor tidying" ] } ) or die "Couldn't write Red Lion node"; # Now Blue Lion. $node_sth->execute( 20, "Blue Lion", 3, "blue 3", get_timestamp( days => 5 ) ) or die $dbh->errstr; $content_sth->execute( 20, 3, "blue 3", get_timestamp( days => 5 ) ) or die $dbh->errstr; $content_sth->execute( 20, 2, "blue 2", get_timestamp( days => 7 ) ) or die $dbh->errstr; $content_sth->execute( 20, 1, "blue 1", get_timestamp( days => 9 ) ) or die $dbh->errstr; $md_sth->execute( 20, 3, "edit_type", "Normal edit" ); $md_sth->execute( 20, 2, "edit_type", "Minor tidying" ); $md_sth->execute( 20, 1, "edit_type", "Normal edit" ); # Now write it as per usual. %data = $wiki->retrieve_node( "Blue Lion" ); $wiki->write_node( "Blue Lion", "blue 4", $data{checksum}, { edit_type => [ "Minor tidying" ] } ) or die "Couldn't write Blue Lion node"; } Wiki-Toolkit-0.83/t/003_instantiate.t0000644000175000017500000000610112243757607017230 0ustar vagrantvagrantuse strict; use Wiki::Toolkit; use Wiki::Toolkit::TestLib; use Test::More tests => ( 1 + 3 * scalar @Wiki::Toolkit::TestLib::wiki_info ); # Test failed creation. Note this has a few tests missing. eval { Wiki::Toolkit->new; }; ok( $@, "Creation dies if no store supplied" ); # Test successful creation, for each configured store/search combination. my @wiki_info = @Wiki::Toolkit::TestLib::wiki_info; foreach my $infoid ( @wiki_info ) { my %wiki_config; # Test store instantiation. my %datastore_info = %{ $infoid->{datastore_info } }; my $class = $datastore_info{class}; eval "require $class"; my $store = $class->new( %{ $datastore_info{params} } ); isa_ok( $store, $class ); $wiki_config{store} = $store; # Test search instantiation. SKIP: { skip "No search configured for this combination", 1 unless ($infoid->{dbixfts_info} or $infoid->{sii_info} or $infoid->{plucene_path} ); if ( $infoid->{dbixfts_info} ) { my %fts_info = %{ $infoid->{dbixfts_info} }; require Wiki::Toolkit::Store::MySQL; my %dbconfig = %{ $fts_info{db_params} }; my $dsn = Wiki::Toolkit::Store::MySQL->_dsn( $dbconfig{dbname}, $dbconfig{dbhost} ); my $dbh = DBI->connect( $dsn, $dbconfig{dbuser}, $dbconfig{dbpass}, { PrintError => 0, RaiseError => 1, AutoCommit => 1 } ) or die "Can't connect to $dbconfig{dbname} using $dsn: " . DBI->errstr; require Wiki::Toolkit::Search::DBIxFTS; my $search = Wiki::Toolkit::Search::DBIxFTS->new( dbh => $dbh ); isa_ok( $search, "Wiki::Toolkit::Search::DBIxFTS" ); $wiki_config{search} = $search; } elsif ( $infoid->{sii_info} ) { my %sii_info = %{ $infoid->{sii_info} }; my $db_class = $sii_info{db_class}; my %db_params = %{ $sii_info{db_params} }; eval "require $db_class"; my $indexdb = $db_class->new( %db_params ); require Wiki::Toolkit::Search::SII; my $search = Wiki::Toolkit::Search::SII->new(indexdb =>$indexdb); isa_ok( $search, "Wiki::Toolkit::Search::SII" ); $wiki_config{search} = $search; } elsif ( $infoid->{plucene_path} ) { require Wiki::Toolkit::Search::Plucene; my $search = Wiki::Toolkit::Search::Plucene->new( path => $infoid->{plucene_path} ); isa_ok( $search, "Wiki::Toolkit::Search::Plucene" ); $wiki_config{search} = $search; } elsif ( $infoid->{lucy_path} ) { require Wiki::Toolkit::Search::Lucy; my $search = Wiki::Toolkit::Search::Lucy->new( path => $infoid->{lucy_path} ); isa_ok( $search, "Wiki::Toolkit::Search::Lucy" ); $wiki_config{search} = $search; } } # end of SKIP for no search # Test wiki instantiation. my $wiki = Wiki::Toolkit->new( %wiki_config ); isa_ok( $wiki, "Wiki::Toolkit" ); } Wiki-Toolkit-0.83/t/298_feed_rss_node_all_versions.t0000644000175000017500000000655712243757607022323 0ustar vagrantvagrantuse strict; use Wiki::Toolkit::TestConfig::Utilities; use Wiki::Toolkit; use URI::Escape; # Note - update the count in the skip block to match the number here # we would put the number in a variable, but that doesn't seem to work use Test::More tests => (3 + 14 * $Wiki::Toolkit::TestConfig::Utilities::num_stores); use_ok( "Wiki::Toolkit::Feed::RSS" ); eval { my $rss = Wiki::Toolkit::Feed::RSS->new; }; ok( $@, "new croaks if no wiki object supplied" ); eval { my $rss = Wiki::Toolkit::Feed::RSS->new( wiki => "foo" ); }; ok( $@, "new croaks if something that isn't a wiki object supplied" ); my %stores = Wiki::Toolkit::TestConfig::Utilities->stores; my ($store_name, $store); while ( ($store_name, $store) = each %stores ) { SKIP: { skip "$store_name storage backend not configured for testing", 14 unless $store; print "#\n##### TEST CONFIG: Store: $store_name\n#\n"; my $wiki = Wiki::Toolkit->new( store => $store ); my %default_config = ( wiki => $wiki, site_name => "Wiki::Toolkit Test Site", make_node_url => sub { my $id = uri_escape($_[0]); my $version = $_[1] || ''; $version = uri_escape($version) if $version; "http://example.com/?id=$id;version=$version"; }, recent_changes_link => "http://example.com/recentchanges" ); my $rss = eval { Wiki::Toolkit::Feed::RSS->new( %default_config, site_url => "http://example.com/kakeswiki/" ); }; is( $@, "", "'new' doesn't croak if wiki object and mandatory parameters supplied" ); isa_ok( $rss, "Wiki::Toolkit::Feed::RSS" ); my $feed = eval { $rss->node_all_versions; }; is( $@, "", "->node_all_versions doesn't croak" ); # Should be empty to start with is( 1, $feed =~ /\\s+\\s*\<\/rdf:Seq\>\s*\<\/items\>/s, "empty list with no node name" ); # Now retry with a node name $feed = eval { $rss->node_all_versions(name=>'Test Node 1'); }; is( $@, "", "->node_all_versions doesn't croak with a name" ); # Should only have it once my @items = $feed =~ /(\<\/item\>)/g; is( scalar @items, 1, "Only found it once" ); # And should have the name like( $feed, qr|Test Node 1|, "Found right node" ); # And the be the first version like( $feed, qr|1|, "And right version" ); # Now try again, with a 2 version node $feed = eval { $rss->node_all_versions(name=>'Old Node'); }; is( $@, "", "->node_all_versions doesn't croak with a name" ); # Check we found two versions @items = $feed =~ /(\<\/item\>)/g; is( scalar @items, 2, "Found it twice" ); # Both the right name @items = $feed =~ /(Old Node<\/title>)/g; is( scalar @items, 2, "Had the right name" ); # And the right version like( $feed, qr|<modwiki:version>2</modwiki:version>|, "And right version" ); like( $feed, qr|<modwiki:version>1</modwiki:version>|, "And right version" ); # And in the right order like( $feed, '/<modwiki:version>2<\/modwiki:version>.*<modwiki:version>1<\/modwiki:version>/s', "Right order" ); } } �������������������������������������������������������������������������������������������������������������������������������������������������Wiki-Toolkit-0.83/t/100_formatting.t����������������������������������������������������������������0000644�0001750�0001750�00000005135�12243757607�017063� 0����������������������������������������������������������������������������������������������������ustar �vagrant�������������������������vagrant����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������use strict; use Wiki::Toolkit; use Wiki::Toolkit::TestConfig::Utilities; use Test::More tests => (9 * $Wiki::Toolkit::TestConfig::Utilities::num_stores); my %stores = Wiki::Toolkit::TestConfig::Utilities->stores; my ($store_name, $store); while ( ($store_name, $store) = each %stores ) { SKIP: { skip "$store_name storage backend not configured for testing", 9 unless $store; my ($wiki, $cooked); # Test that a Wiki object created without an explicit formatter sets # defaults sensibly in its default formatter. $wiki = Wiki::Toolkit->new( store => $store ); isa_ok( $wiki->formatter, "Wiki::Toolkit::Formatter::Default", "default formatter used if not specified" ); # White box testing. foreach my $want_defined ( qw ( extended_links implicit_links allowed_tags macros node_prefix ) ) { ok( defined $wiki->{_formatter}{"_".$want_defined}, "...default set for $want_defined" ); } # Test that the implicit_links flag gets passed through right. my $raw = "This paragraph has StudlyCaps in."; $wiki = Wiki::Toolkit->new( store => $store, implicit_links => 1, node_prefix => "wiki.cgi?node=" ); $cooked = $wiki->format($raw); like( $cooked, qr!StudlyCaps</a>!, "StudlyCaps turned into link when we specify implicit_links=1" ); $wiki = Wiki::Toolkit->new( store => $store, implicit_links => 0, node_prefix => "wiki.cgi?node=" ); $cooked = $wiki->format($raw); unlike( $cooked, qr!StudlyCaps</a>!, "...but not when we specify implicit_links=0" ); # Test that we can use an alternative formatter. SKIP: { eval { require Test::MockObject; }; skip "Test::MockObject not installed", 1 if $@; my $mock = Test::MockObject->new(); $mock->mock( 'format', sub { my ($self, $raw) = @_; return uc( $raw ); } ); $wiki = Wiki::Toolkit->new( store => $store, formatter => $mock ); $cooked = $wiki->format( "in the [future] there will be <b>robots</b>"); is( $cooked, "IN THE [FUTURE] THERE WILL BE <B>ROBOTS</B>", "can use an alternative formatter" ); } } } ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������