MediaWiki-Bot-5.006003/0000775000175000017500000000000012737341477013011 5ustar mikemikeMediaWiki-Bot-5.006003/Makefile.PL0000644000175000017500000000456012737341477014766 0ustar mikemike# This file was automatically generated by Dist::Zilla::Plugin::MakeMaker v6.005. use strict; use warnings; use 5.008; use ExtUtils::MakeMaker; my %WriteMakefileArgs = ( "ABSTRACT" => "a high-level bot framework for interacting with MediaWiki wikis", "AUTHOR" => "Dan Collins , Mike.lifeguard , Alex Rowe , Oleg Alexandrov , jmax.code , Stefan Petrea , kc2aei , bosborne\@alum.mit.edu, Brian Obio , patch and bug report contributors", "CONFIGURE_REQUIRES" => { "ExtUtils::MakeMaker" => 0 }, "DISTNAME" => "MediaWiki-Bot", "LICENSE" => "gpl", "MIN_PERL_VERSION" => "5.008", "NAME" => "MediaWiki::Bot", "PREREQ_PM" => { "Carp" => 0, "Constant::Generate" => 0, "Digest::MD5" => "2.39", "Encode" => 0, "Exporter" => 0, "File::Basename" => 0, "HTML::Entities" => "3.28", "LWP::Protocol::https" => "6.06", "List::Util" => 0, "MediaWiki::API" => "0.36", "Module::Pluggable" => 0, "strict" => 0, "warnings" => 0 }, "TEST_REQUIRES" => { "File::Spec" => 0, "IO::Handle" => 0, "IPC::Open3" => 0, "List::MoreUtils" => 0, "Test::Is" => 0, "Test::More" => "0.96", "Test::RequiresInternet" => 0, "Test::Warn" => 0, "blib" => "1.01", "utf8" => 0 }, "VERSION" => "5.006003", "test" => { "TESTS" => "t/*.t" } ); my %FallbackPrereqs = ( "Carp" => 0, "Constant::Generate" => 0, "Digest::MD5" => "2.39", "Encode" => 0, "Exporter" => 0, "File::Basename" => 0, "File::Spec" => 0, "HTML::Entities" => "3.28", "IO::Handle" => 0, "IPC::Open3" => 0, "LWP::Protocol::https" => "6.06", "List::MoreUtils" => 0, "List::Util" => 0, "MediaWiki::API" => "0.36", "Module::Pluggable" => 0, "Test::Is" => 0, "Test::More" => "0.96", "Test::RequiresInternet" => 0, "Test::Warn" => 0, "blib" => "1.01", "strict" => 0, "utf8" => 0, "warnings" => 0 ); unless ( eval { ExtUtils::MakeMaker->VERSION(6.63_03) } ) { delete $WriteMakefileArgs{TEST_REQUIRES}; delete $WriteMakefileArgs{BUILD_REQUIRES}; $WriteMakefileArgs{PREREQ_PM} = \%FallbackPrereqs; } delete $WriteMakefileArgs{CONFIGURE_REQUIRES} unless eval { ExtUtils::MakeMaker->VERSION(6.52) }; WriteMakefile(%WriteMakefileArgs); MediaWiki-Bot-5.006003/INSTALL0000644000175000017500000000220612737341477014040 0ustar mikemikeThis is the Perl distribution MediaWiki-Bot. Installing MediaWiki-Bot is straightforward. ## Installation with cpanm If you have cpanm, you only need one line: % cpanm MediaWiki::Bot If it does not have permission to install modules to the current perl, cpanm will automatically set up and install to a local::lib in your home directory. See the local::lib documentation (https://metacpan.org/pod/local::lib) for details on enabling it in your environment. ## Installing with the CPAN shell Alternatively, if your CPAN shell is set up, you should just be able to do: % cpan MediaWiki::Bot ## Manual installation As a last resort, you can manually install it. Download the tarball, untar it, then build it: % perl Makefile.PL % make && make test Then install it: % make install If your perl is system-managed, you can create a local::lib in your home directory to install modules to. For details, see the local::lib documentation: https://metacpan.org/pod/local::lib ## Documentation MediaWiki-Bot documentation is available as POD. You can run perldoc from a shell to read the documentation: % perldoc MediaWiki::Bot MediaWiki-Bot-5.006003/t/0000775000175000017500000000000012737341477013254 5ustar mikemikeMediaWiki-Bot-5.006003/t/01-api_error.t0000644000175000017500000000156712737341477015650 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 5; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); my $revid = $bot->get_last(q{User:Mike.lifeguard/doesn't exist}); # Leaves out the username, a required param ok(defined($bot->{error}), 'The error data is there'); is(ref $bot->{error}, 'HASH', 'The error data is a hash'); is($bot->{error}->{code}, 3, 'The right error code is there'); like($bot->{error}->{stacktrace}, qr/MediaWiki::Bot/, 'The stacktrace includes "MediaWiki::Bot"'); like($bot->{error}->{details}, qr/^rvbaduser_rvexcludeuser:.*rvexcludeuser/, 'The API error text was returned'); MediaWiki-Bot-5.006003/t/08-get_last.t0000644000175000017500000000107512737341477015471 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 2; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); cmp_ok($bot->get_last('User:Mike.lifeguard/06-get history.t', 'Not a real editor'), '>', 0, 'Find the last revision'); is($bot->get_last('User:Mike.lifeguard/06-get history.t', 'Mike.lifeguard'), undef, 'There are no revisions not by Mike.lifeguard'); MediaWiki-Bot-5.006003/t/20-assertion.t0000644000175000017500000000156612737341477015675 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 1; use MediaWiki::Bot qw(:constants); my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); my $rand = rand(); my $status = $bot->edit({ page => 'User:Mike.lifeguard/19-assert_edit.t', text => $rand, assert => 'bot', # was 'false', but AssertEdit isn't a standard extension }); SKIP: { skip q{Unexpected error: } . $bot->{error}->{details}, 1 if defined $bot->{error}->{code} and $bot->{error}->{code} == ERR_API and $bot->{error}->{details} !~ m{^assert\w+failed:}; is $status->{edit}->{result} => undef, 'Intentionally bad assertion' or diag explain { edit => $status, error => $bot->{error} }; } MediaWiki-Bot-5.006003/t/06-get_history.t0000644000175000017500000000263312737341477016226 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 4; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); my $title = 'User:Mike.lifeguard/06-get_history.t'; my @history = $bot->get_history($title, 2); is_deeply(\@history, [ { 'timestamp_time' => '00:17:05', 'revid' => 132956, 'comment' => qq{Protected "[[User:Mike.lifeguard/06-get history.t]]": history must be static (\x{200e}[edit=sysop] (indefinite) \x{200e}[move=sysop] (indefinite))}, 'timestamp_date' => '2012-05-09', 'minor' => 1, 'user' => 'Mike.lifeguard' }, { 'timestamp_time' => '00:16:54', 'revid' => 132955, 'comment' => 'Created page with "."', 'timestamp_date' => '2012-05-09', 'minor' => '', 'user' => 'Mike.lifeguard' } ], 'Loaded page history OK') or diag explain \@history; my $time = $history[0]->{'timestamp_time'}; my $date = $history[0]->{'timestamp_date'}; my ($timestamp, $user) = $bot->recent_edit_to_page($title); like($timestamp, qr/^\d{4}-\d{1,2}-\d{1,2}T\d\d:\d\d:\d\dZ$/, 'Timestamp formed properly'); is($timestamp, "${date}T${time}Z", 'Timestamp found OK'); is($user, 'Mike.lifeguard', 'User returned!'); # Unreported bug MediaWiki-Bot-5.006003/t/43-recentchanges.t0000644000175000017500000000513212737341477016475 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'en.wikipedia.org' => 80; use Test::More 0.96 tests => 2; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'en.wikipedia.org', }); subtest 'basic' => sub { my $tests_run = 0; { # General structure my @rc = $bot->recentchanges(); my @keys = qw(comment ns old_revid pageid rcid revid timestamp title type user); ok exists $rc[0]->{$_}, "$_ present in hashref" for @keys; $tests_run += @keys; foreach (@rc) { is( $_->{ns}, 0, 'ns 0 used by default'); $tests_run++; } } { # Test some constraints my $rows = 10; my $ns = [0, 1, 4]; my @rc = $bot->recentchanges($ns, $rows); is( scalar @rc, $rows, 'Returned the right number of rows'); $tests_run++; for my $i (0..$rows-1) { ok(grep($rc[$i]->{ns} == $_, @$ns), 'Right namespaces'); $tests_run++; like($rc[$i]->{timestamp}, qr/^\d{4}-\d\d-\d\dT\d\d:\d\d:\d\dZ$/, 'Timestamp validates'); $tests_run++; like($rc[$i]->{type}, qr/^\w+$/, 'Type looks vaguely OK'); $tests_run++; cmp_ok(length $rc[$i]->{title}, '>', 0, 'Title looks vaguely OK'); $tests_run++; } } { # Test using an arrayref of namespace numbers, and the $options_hashref my $rows = 10; my $ns = 4; my @rc = $bot->recentchanges($ns, $rows, { hook => sub { my ($res) = @_; foreach my $hashref (@$res) { is($hashref->{ns}, $ns, 'Right namespace returned'); $tests_run++; } } }); } done_testing($tests_run); }; subtest 'new method signature' => sub { my @rc = $bot->recentchanges({ ns => 4, limit => 100 }); foreach my $hashref (@rc) { ok exists $hashref->{title} && length $hashref->{title}; } # Or, use a callback for incremental processing: $bot->recentchanges( { ns => [0,1], limit => 200 }, { hook => sub { my ($res) = @_; foreach my $hashref (@$res) { ok exists $hashref->{title} && length $hashref->{title}, 'title is there'; ok exists $hashref->{ns} && ($hashref->{ns} == 0 || $hashref->{ns} == 1), 'ns 1/2'; } }}); }; MediaWiki-Bot-5.006003/t/31-is_g_blocked.t0000644000175000017500000000067212737341477016271 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 1; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); # 127.0.4.4 is almost certainly not blocked right now my $result = $bot->is_g_blocked('127.0.4.4'); ok(!$result, 'current global blocks'); MediaWiki-Bot-5.006003/t/48-get_image.t0000644000175000017500000000357012737341477015616 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More 0.96; use MediaWiki::Bot; my $t = __FILE__; plan eval q{ use Imager; use Imager::File::JPEG; 1 } ? (tests => 3) : (skip_all => q{Imager & Imager::File::JPEG required}); my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); my $image_name = 'File:Albert_Einstein_Head.jpg'; subtest 'no width, no height' => sub { plan tests => 4; my $data = $bot->get_image($image_name); ok $data, 'nonscaled image retrieved'; my $img = Imager->new; my $did_read = $img->read(data => $data); ok $did_read, 'retrieved nonscaled data is an image' or diag $img->errstr; is $img->getwidth(), 3250, 'nonscaled img has w 3,250'; is $img->getheight(), 4333, 'nonscaled img has h 4,333'; }; subtest 'supply a width' => sub { plan tests => 3; my $data = $bot->get_image($image_name, {width => 12}); ok $data, 'wscaled image retrieved'; my $img = Imager->new; my $did_read = $img->read(data => $data); ok $did_read, 'retrieved wscaled data is an image.' or diag $img->errstr; is $img->getwidth(), 12, 'wscaled img has w 12'; }; #supply a width & a not-to-scale height. These # should both be considered maximum dimensions, # and scale should be proportional. subtest 'supply a width and a not-to-scale height' => sub { plan tests => 4; my $data = $bot->get_image($image_name, {width => 200, height => 200}); ok $data, 'whscaled image retrieved'; my $img = Imager->new; my $did_read = $img->read(data => $data); ok $did_read, 'retrieved whscaled data is an image.' or diag $img->errstr; cmp_ok $img->getwidth(), '<=', 200, '200 height is max'; cmp_ok $img->getheight(), '<=', 200, '200 width is max'; }; MediaWiki-Bot-5.006003/t/47-global_image_usage.t0000644000175000017500000000232512737341477017457 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'commons.wikimedia.org' => 80; use Test::More tests => 3; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'commons.wikimedia.org', }); my $file = 'File:Example.jpg'; subtest 'default' => sub { plan tests => 5; my @pages = $bot->global_image_usage($file); ok( @pages, 'No error'); cmp_ok( scalar @pages, '>', 1, 'More than one result'); ok( defined($pages[0]), 'Something was returned'); isa_ok( $pages[0], 'HASH', 'Results are hashref'); is_deeply( [sort keys %{ $pages[0] }], [sort qw(title url wiki)], 'Has the right keys'); }; subtest 'limit' => sub { my $limit = 20; my @pages = $bot->global_image_usage($file, $limit); is scalar @pages, $limit, "$limit results returned"; }; subtest 'more' => sub { my $limit = 10000000; my @pages = $bot->global_image_usage('SadSmiley.svg', $limit, 1); cmp_ok scalar @pages, '<', $limit, "<$limit results returned"; }; MediaWiki-Bot-5.006003/t/00-compile.t0000644000175000017500000000237412737341477015312 0ustar mikemikeuse 5.006; use strict; use warnings; # this test was generated with Dist::Zilla::Plugin::Test::Compile 2.054 use Test::More; plan tests => 2 + ($ENV{AUTHOR_TESTING} ? 1 : 0); my @module_files = ( 'MediaWiki/Bot.pm', 'MediaWiki/Bot/Constants.pm' ); # no fake home requested my $inc_switch = -d 'blib' ? '-Mblib' : '-Ilib'; use File::Spec; use IPC::Open3; use IO::Handle; open my $stdin, '<', File::Spec->devnull or die "can't open devnull: $!"; my @warnings; for my $lib (@module_files) { # see L my $stderr = IO::Handle->new; my $pid = open3($stdin, '>&STDERR', $stderr, $^X, $inc_switch, '-e', "require q[$lib]"); binmode $stderr, ':crlf' if $^O eq 'MSWin32'; my @_warnings = <$stderr>; waitpid($pid, 0); is($?, 0, "$lib loaded ok"); shift @_warnings if @_warnings and $_warnings[0] =~ /^Using .*\bblib/ and not eval { require blib; blib->VERSION('1.01') }; if (@_warnings) { warn @_warnings; push @warnings, @_warnings; } } is(scalar(@warnings), 0, 'no warnings found') or diag 'got warnings: ', ( Test::More->can('explain') ? Test::More::explain(\@warnings) : join("\n", '', @warnings) ) if $ENV{AUTHOR_TESTING}; MediaWiki-Bot-5.006003/t/27-prefixindex.t0000644000175000017500000000126012737341477016211 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 4; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); my @pages = $bot->prefixindex('User:Mike.lifeguard/27-prefixindex.t'); is(scalar @pages, 3, 'Correct number of pages'); is($pages[0]->{'title'}, 'User:Mike.lifeguard/27-prefixindex.t', 'Page 0 correct'); is($pages[1]->{'title'}, 'User:Mike.lifeguard/27-prefixindex.t/one', 'Page 1 correct'); is($pages[2]->{'title'}, 'User:Mike.lifeguard/27-prefixindex.t/two', 'Page 2 correct'); MediaWiki-Bot-5.006003/t/49-get_all_categories.t0000644000175000017500000000105112737341477017502 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 3; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); my @categories = $bot->get_all_categories; ok(@categories, "Retrieved categories"); is(scalar @categories, 10, "Got right default number"); @categories = $bot->get_all_categories({max => 0}); is(scalar @categories, 500, "Got right maximum number"); MediaWiki-Bot-5.006003/t/25-sitematrix.t0000644000175000017500000000274712737341477016066 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 4; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); { # db->domain my @wikis = ('enwiktionary', 'bat-smgwiki', 'nonexistentwiki', 'meta', 'otrs-wiki', 'aawiki'); my $ought = [ 'en.wiktionary.org', # ok 'bat-smg.wikipedia.org', # ok undef, # doesn't exist 'meta.wikimedia.org', # ok undef, # private undef # closed ]; my $domains = $bot->db_to_domain(\@wikis); ok( @$domains, 'Something was returned'); is_deeply($domains, $ought, 'db->domain OK'); } { # domain->db my @domains = ('en.wiktionary.org', 'bat-smg.wikipedia.org', 'this.dont.exist', 'meta.wikimedia.org', 'otrs-wiki.wikimedia.org', 'aa.wikipedia.org'); my $wikis = $bot->domain_to_db(\@domains); my $ought = [ 'enwiktionary', # ok 'bat-smgwiki', # ok undef, # doesn't exist 'meta', # ok undef, # private undef # closed ]; ok( @$wikis, 'Something was returned'); is_deeply($wikis, $ought, 'domain->db OK'); } MediaWiki-Bot-5.006003/t/22-get_id.t0000644000175000017500000000071312737341477015114 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 2; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); my $result = $bot->get_id('Main Page'); is($result, 11791, 'Main Page found'); $result = $bot->get_text('egaP niaM'); is($result, undef, 'No page found'); MediaWiki-Bot-5.006003/t/16-last_active.t0000644000175000017500000000100312737341477016153 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 2; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); my $result = $bot->last_active('Mike.lifeguard'); like($result, qr/20\d{2}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z/, 'last active'); is($bot->last_active('User:Mike.lifeguard'), $result, 'Same result with User: prefix'); MediaWiki-Bot-5.006003/t/34-secure.t0000644000175000017500000000211712737341477015152 0ustar mikemikeuse strict; use warnings; use Test::Is qw(extended); use Test::RequiresInternet 'test.wikipedia.org' => 443; use Test::More tests => 1; use MediaWiki::Bot qw(:constants); my $t = __FILE__; my $username = $ENV{'PWPUsername'}; my $password = $ENV{'PWPPassword'}; my $login_data; if (defined($username) and defined($password)) { $login_data = { username => $username, password => $password }; } my $agent = "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)"; my $bot = MediaWiki::Bot->new({ agent => $agent, host => 'test.wikipedia.org', protocol => 'https', login_data => $login_data, }); my $rand = rand(); my $page = 'User:Mike.lifeguard/34-secure.t'; my $status = $bot->edit({ page => $page, text => $rand, summary => $agent, }); SKIP: { skip 'You are blocked, cannot use editing tests', 1 if defined $bot->{error}->{code} and ($bot->{error}->{code} == ERR_API or $bot->{error}->{code} == ERR_CAPTCHA); my $is = $bot->get_text($page); is($is, $rand, 'Edited via secure server successfully'); } MediaWiki-Bot-5.006003/t/38-test_image_exists.t0000644000175000017500000000237312737341477017414 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 3; use MediaWiki::Bot qw(:constants); my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); my @images = ( 'File:D2c6ac30964d4348d1a2b3ff7e97fa08.png', 'File:Test image 13.png', 'File:Albert Einstein Head.jpg', ); subtest 'numeric codes' => sub { plan tests => 3; ok($bot->test_image_exists($images[0]) == 0, 'Nonexistent image not found'); ok($bot->test_image_exists($images[1]) == 1, 'Image is local'); ok($bot->test_image_exists($images[2]) == 2, 'Image is on Commons'); }; subtest 'constant codes' => sub { plan tests => 3; is($bot->test_image_exists($images[0]), FILE_NONEXISTENT, 'Nonexistent image not found'); is($bot->test_image_exists($images[1]), FILE_LOCAL, 'Image is local'); is($bot->test_image_exists($images[2]), FILE_SHARED, 'Image is on Commons'); }; my $is = $bot->test_image_exists(\@images); my $ought = [FILE_NONEXISTENT, FILE_LOCAL, FILE_SHARED]; is_deeply($is, $ought, 'Multiple images checked OK') or diag explain { is => $is, ought => $ought }; MediaWiki-Bot-5.006003/t/37-move.t0000644000175000017500000000321512737341477014635 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 3; use MediaWiki::Bot; my $t = __FILE__; SKIP: { skip('No account credentials provided in %ENV', 3) unless $ENV{PWPUsername} and $ENV{PWPPassword}; my $agent = "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)"; my $bot = MediaWiki::Bot->new({ agent => $agent, host => 'test.wikipedia.org', login_data => { username => $ENV{PWPUsername}, password => $ENV{PWPPassword} }, protocol => 'https', }); my $res = $bot->{api}->api({ action => 'query', meta => 'userinfo', uiprop => 'rights', }); my @rights = @{ $res->{'query'}->{'userinfo'}->{'rights'} }; # grep is slow; might be worth using List::Util if the main module gains that as a dependency if (! grep $_ eq 'suppressredirect', @rights) { skip( qq{The account doesn't have the 'suppressredirect' right}, 3); } my $rand = rand(); my $status = $bot->move('User:Mike.lifeguard/37-move.t', "User:Mike.lifeguard/$rand", $agent); if ((defined($bot->{'error'}->{'code'})) and ($bot->{'error'}->{'code'} == 3)) { skip('You are blocked, cannot use editing tests', 3); } ok($status, 'Page moved successfully'); $status = $bot->move("User:Mike.lifeguard/$rand", 'User:Mike.lifeguard/37-move.t', $agent, { noredirect => 1 }); ok($status, 'Page moved back successfully'); my $text = $bot->get_text("User:Mike.lifeguard/$rand"); is($text, undef, 'Redirect creation successfully suppressed'); } MediaWiki-Bot-5.006003/t/19-get_pages.t0000644000175000017500000000303612737341477015626 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 9; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); my @pages = ('Main Page', 'Wikipedia:What Test Wiki is not', 'This page had better not exist..........', 'WP:SAND'); # Do test once with arrayref my $result = $bot->get_pages(\@pages); is( keys %{$result}, 4, 'Got the right number of pages returned'); isnt( $result->{'Wikipedia:What Test Wiki is not'}, undef, 'Check that page exists'); is( $result->{'This page had better not exist..........'}, undef, 'Check that page does not exist'); ok( defined($result->{'Wikipedia:What Test Wiki is not'}), 'Check for something not horribly wrong'); ok(! defined($result->{'Wikipedia:SAND'}), 'Should not return expanded names where an alias was requested'); ok( defined($result->{'WP:SAND'}), 'Namespace aliases work as expected'); like( $result->{'Main Page'}, qr/MediaWiki/, 'Got Main Page on multi-page get'); like( $result->{'Wikipedia:What Test Wiki is not'}, qr/Wikipedia/, '[[Wikipedia:What Test Wiki is not]] contains the string "Wikipedia"'); # Do tests again with array my $repeat = $bot->get_pages(@pages); is_deeply($repeat, $result, 'Array and Arrayref return the same data'); MediaWiki-Bot-5.006003/t/33-is_locked.t0000644000175000017500000000153212737341477015617 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 2; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); TODO: { todo_skip 'is_locked not implemented yet', 2 unless $bot->can('is_locked'); # Jimbo is almost certainly not locked right now my $result = $bot->is_locked('Jimbo Wales'); is($result, 0, 'current locks'); # A random old account I chose - it will probably be locked forever # 23:44, 4 March 2009 Mike.lifeguard (talk | contribs) locked global account "User:PLEASE STOP BLOCKING@global" ‎ (inappropriate username) $result = $bot->is_locked('User:PLEASE STOP BLOCKING'); is($result, 1, 'current locks'); } MediaWiki-Bot-5.006003/t/36-email.t0000644000175000017500000000222212737341477014752 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More; use MediaWiki::Bot; my $t = __FILE__; # Need to figure out a new testing strategy here. [[User:Perlwikibot testing]] # was created with a confirmed email so you could send emails to it. The # account was then locked (in CentralAuth), but this still permitted emails # to be sent. MediaWiki no longer allows this. We need to figure out another # plan. plan skip_all => "Can't email locked accounts"; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', login_data => { username => $ENV{PWPUsername}, password => $ENV{PWPPassword} }, protocol => 'https', }); my $rand = rand(); my $res = $bot->email('User:Perlwikibot testing', "MediaWiki::Bot test $rand", $rand); ok($res, 'Sending an email succeeded') or diag explain $bot->{error}; note 'This test sent an email to [[User:Perlwikibot testing]].'; note 'The email registered for this account is perlwikibot@mailinator.com'; note 'You can find the inbox at https://mailinator.com/inbox2.jsp?public_to=perlwikibot'; MediaWiki-Bot-5.006003/t/40-upload.t0000644000175000017500000000566212737341477015155 0ustar mikemikeuse strict; use warnings; use Test::Is qw(extended); use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More 0.88; use File::Spec; use MediaWiki::Bot; my $t = __FILE__; my $username = $ENV{'PWPUsername'}; my $password = $ENV{'PWPPassword'}; plan $username && $password ? (tests => 2) : (skip_all => 'upload test requires login with upload permission'); my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', login_data => { username => $username, password => $password }, protocol => 'https', }); my $tiny_png_filename = File::Spec->catfile(qw/t tiny.png/); END { unlink $tiny_png_filename if $tiny_png_filename && -e $tiny_png_filename } subtest 'file upload' => sub { plan skip_all => q{Data::Random and GD needed for generating images} unless (eval q{use Data::Random qw(rand_image); use GD; 1 }); open my $png_out, '>:raw', $tiny_png_filename or die "Couldn't open $tiny_png_filename for writing: $!"; print $png_out rand_image(); close $png_out or die "Couldn't close $tiny_png_filename: $!"; { my $status = $bot->upload({ data => do { local $/; open my $in, '<:raw', $tiny_png_filename or die $!; <$in> }, }); is $status, undef or diag explain $status; is_deeply $bot->{error}, { code => 6, details => q{You must specify a title to upload to.} } or diag explain $bot; } { my $status = $bot->upload({ title => rand() }); is $status, undef or diag explain $status; is_deeply $bot->{error}, { code => 6, details => q{You must provide either file contents or a filename.} } or diag explain $bot; } { my $filename = rand() . '.png'; my $status = $bot->upload({ title => $filename, file => $tiny_png_filename, }); ok $status and diag "Uploaded to $filename"; like $status->{upload}->{result}, qr/Success|Warning/, 'Success or Warning' or diag explain $status; is $status->{upload}->{filename}, $filename or diag explain $status if $status->{upload}->{result} eq 'Success'; } { my $filename = rand() . '.png'; my $status = $bot->upload({ title => $filename, data => rand_image(), }); ok $status and diag "Uploaded to $filename"; like $status->{upload}->{result}, qr/Success|Warning/ or diag explain $status; is $status->{upload}->{filename}, $filename or diag explain $status if $status->{upload}->{result} eq 'Success'; } }; subtest 'url' => sub { plan skip_all => 'Unsupported by testwiki'; diag explain $bot->upload_from_url({ url => 'http://farm9.staticflickr.com/8282/7874109806_756828bf0e_b_d.jpg', title => rand() . '.png', summary => "Testing $t", text => "testing $t", }); diag explain $bot->{error}; }; MediaWiki-Bot-5.006003/t/09-update_rc.t0000644000175000017500000000150312737341477015632 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 5; use Test::Warn; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); my $num = 2; my @rc; warning_is( sub { @rc = $bot->update_rc($num); }, 'update_rc is deprecated, and may be removed in a future release. Please use recentchanges(), which provides more data, including rcid', 'update_rc is deprecated' ); is(scalar(@rc), $num, 'Right number of results returned'); isa_ok($rc[0], 'HASH', 'Right kind of data structure'); ok(defined $rc[0]->{title}, 'Has a title'); ok(defined $rc[0]->{timestamp}, 'Has a timestamp'); MediaWiki-Bot-5.006003/t/28-search.t0000644000175000017500000000113212737341477015130 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 3; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); my @pages = $bot->search('Main Page'); isa_ok(\@pages, 'ARRAY', 'Right return type'); is($pages[0], 'Main Page', 'Found [[Main Page]]'); @pages = $bot->search('62c77d65adf258464e0f0820696b871251c21eb4'); is scalar @pages, 0, 'No results found for a nonsensical search' or diag explain \@pages; MediaWiki-Bot-5.006003/t/10-what_links_here.t0000644000175000017500000000233112737341477017022 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 7; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); my @pages = $bot->what_links_here('Main Page', 'redirects', undef, {max=>1}); ok( defined $pages[0], 'Something was returned'); isa_ok( $pages[0], 'HASH', 'A hash was returned'); ok( defined $pages[0]->{'title'}, 'The hash contains a title'); like( $pages[0]->{'title'}, qr/\w+/, 'The title looks valid'); ok( defined $pages[0]->{'redirect'}, 'Redirect status is defined'); ok( defined($pages[0]->{'redirect'}), 'We got a redirect when we asked for it'); $bot->what_links_here('Project:Sandbox', 'nonredirects', 0, {max => 1, hook => \&mysub}); my $is_redir; sub mysub { my ($res) = @_; my $hash = $res->[0]; $is_redir = $hash->{'redirect'}; } isnt( $is_redir, 'We got a normal link when we asked for no redirects'); MediaWiki-Bot-5.006003/t/04-edit.t0000644000175000017500000000461512737341477014613 0ustar mikemikeuse strict; use warnings; use Test::Is qw(extended); use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More; use MediaWiki::Bot qw(:constants); my $t = __FILE__; plan tests => ($ENV{PWPUsername} && $ENV{PWPPassword} ? 3 : 2); my $agent = "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)"; my $bot = MediaWiki::Bot->new({ agent => $agent, host => 'test.wikipedia.org', protocol => 'https', ( $ENV{PWPUsername} && $ENV{PWPPassword} ? ( login_data => { username => $ENV{PWPUsername}, password => $ENV{PWPPassword} } ) : () ), }); my $rand = rand(); my $rand2 = rand(); my $title = 'User:Mike.lifeguard/04-edit.t'; my $status = $bot->edit({ page => $title, text => $rand, summary => $agent . ' (should be a minor edit)', minor => 1, }); SKIP: { skip 'Cannot use editing tests: ' . $bot->{error}->{details}, 2 if defined $bot->{error}->{code} and ($bot->{error}->{code} == ERR_API or $bot->{error}->{code} == ERR_CAPTCHA); is $bot->get_text($title, $status->{newrevid}) => $rand, 'Did whole-page editing successfully'; $status = $bot->edit({ page => $title, text => $rand2, section => 'new', summary => $agent, }); skip 'Cannot use editing tests: ' . $bot->{error}->{details}, 1 if defined $bot->{error}->{code} and ($bot->{error}->{code} == ERR_API or $bot->{error}->{code} == ERR_CAPTCHA); diag explain $bot->{error} unless $status; like $bot->get_text($title, $status->{edit}->{newrevid}) => qr{== \Q$agent\E ==\n\n\Q$rand2\E}, 'Did section editing successfully' or diag explain { status => $status, error => $bot->{error} }; } subtest 'check history' => sub { my $do_history_test = $ENV{PWPUsername} && $ENV{PWPPassword} && !($bot->{error}->{code} == ERR_API or $bot->{error}->{code} == ERR_CAPTCHA); plan ($do_history_test ? (tests => 2) : (skip_all => "previous test didn't run")); my @hist = $bot->get_history($title, 2); ok $hist[1]->{minor}, 'Minor edit' or diag explain \@hist; $status = $bot->edit({ page => $title, text => $rand2.$rand, summary => $agent . ' (major)', minor => 0, }); @hist = $bot->get_history($title, 1); ok !$hist[0]->{minor}, 'Not a minor edit' or diag explain { hist => \@hist, status => $status }; } MediaWiki-Bot-5.006003/t/32-was_locked.t0000644000175000017500000000076612737341477016005 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'meta.wikimedia.org' => 80; use Test::More tests => 2; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'meta.wikimedia.org', }); # Hasn't been locked (yet) my $result = $bot->was_locked('Jimbo Wales'); ok(!$result, 'lock history'); # I was once locked $result = $bot->was_locked('Mike.lifeguard'); ok($result, 'lock history'); MediaWiki-Bot-5.006003/t/11-get_pages_in_category.t0000644000175000017500000000307112737341477020200 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More 0.96 tests => 3; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); subtest 'category loop' => sub { plan tests => 1; my @pages = $bot->get_all_pages_in_category('Category:Category loop', { max => 5 }); is(scalar @pages, 1, 'Category loop protection works'); }; subtest 'big' => sub { plan tests => 2; my @pages = $bot->get_all_pages_in_category('Category:Really big category', { max => 51 }); cmp_ok( scalar(@pages), '>', 500, 'Get big category, enough elements'); ok(defined $pages[0], 'Get big category'); }; subtest 'callback' => sub { plan tests => 6; my $title; my $ns; my $pageid; $bot->get_all_pages_in_category('Category:Wikipedia', { hook => sub { my ($res) = @_; $title = $res->[0]->{title}; $ns = $res->[0]->{ns}; $pageid = $res->[0]->{pageid}; } }); ok( defined($title), 'Title returned via callback'); like( $title, qr/\w+/, 'Title looks valid'); ok( defined($ns), 'Namespace returned via callback'); like( $ns, qr/\d/, 'Namespace is a number'); ok( defined($pageid), 'Pageid returned via callback'); like( $pageid, qr/\d/, 'Pageid is a number'); }; MediaWiki-Bot-5.006003/t/05-revert.t0000644000175000017500000000332712737341477015175 0ustar mikemikeuse strict; use warnings; use Test::Is qw(extended); use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 2; use MediaWiki::Bot qw(:constants); my $t = __FILE__; my $agent = "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)"; my $bot = MediaWiki::Bot->new({ agent => $agent, host => 'test.wikipedia.org', protocol => 'https', ( $ENV{PWPUsername} && $ENV{PWPPassword} ? ( login_data => { username => $ENV{PWPUsername}, password => $ENV{PWPPassword} } ) : () ), }); my $title = 'User:Mike.lifeguard/05-revert.t'; subtest revert => sub { my @history = $bot->get_history($title, 20); my $oldrevid = $history[ int( rand() * 20 ) ]->{revid}; my $res = $bot->revert($title, $oldrevid, $agent); plan defined $bot->{error}->{code} && ($bot->{error}->{code} == ERR_API or $bot->{error}->{code} == ERR_CAPTCHA) ? (skip_all => q{Can't use editing tests: } . $bot->{error}->{details}) : (tests => 1); is $bot->get_text($title, $res->{edit}->{newrevid}) => $bot->get_text($title, $oldrevid), 'Reverted successfully'; }; subtest undo => sub { my @history = $bot->get_history($title, 2); my $res = $bot->undo($title, $history[0]->{revid}); plan defined $bot->{error}->{code} && ($bot->{error}->{code} == ERR_API or $bot->{error}->{code} == ERR_CAPTCHA) ? (skip_all => q{Can't use editing tests: } . $bot->{error}->{details}) : (tests => 1); my $is = $bot->get_text($title, $res->{edit}->{newrevid}); my $ought = $bot->get_text($title, $history[1]->{revid}); is $is => $ought, 'Undo was successful' or diag explain { is => $is, ought => $ought, history => \@history }; }; MediaWiki-Bot-5.006003/t/42-expandtemplates.t0000644000175000017500000000113212737341477017055 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 2; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); is $bot->expandtemplates(undef, '{{tlxtest|tlxtest}}') => '{{[[Template:tlxtest|tlxtest]]}}', '[[Template:Tlxtest]] expanded OK'; isnt $bot->get_text('Main Page') => $bot->expandtemplates('Main Page'), 'Wikitext != expanded text'; MediaWiki-Bot-5.006003/t/46-usergroups.t0000644000175000017500000000077412737341477016114 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 1; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); my @usergroups = $bot->usergroups('Mike.lifeguard'); is_deeply [ sort @usergroups ], [ sort qw(* user autoconfirmed patroller editor reviewer sysop ipblock-exempt) ], 'Right usergroups were returned'; MediaWiki-Bot-5.006003/t/13-get_namespace_names.t0000644000175000017500000000173212737341477017641 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More 0.96 tests => 2; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); subtest 'normal namespaces' => sub { plan tests => 7; my %ns_names = $bot->get_namespace_names(); is($ns_names{7}, 'File talk', 'File talk OK'); is($ns_names{2}, 'User', 'User OK'); is($ns_names{1}, 'Talk', 'Talk OK'); is($ns_names{14}, 'Category', 'Category OK'); is($ns_names{0}, '', 'Main OK'); is($ns_names{-2}, 'Media', 'Media OK'); is($ns_names{-1}, 'Special', 'Special OK'); }; subtest 'namespace aliases' => sub { plan tests => 2; my $ns_aliases = $bot->_get_ns_alias_data(); isa_ok $ns_aliases => 'HASH'; is $ns_aliases->{Image} => 'File', 'Image alias OK'; }; MediaWiki-Bot-5.006003/t/18-is_blocked.t0000644000175000017500000000302712737341477015765 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 6; use Test::Warn; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); { # Jimbo is almost certainly not blocked right now my $result = $bot->is_blocked('Jimbo Wales'); my $bc; warning_is( sub { $bc = $bot->test_blocked('User:Jimbo Wales'); }, 'test_blocked is an alias of is_blocked; please use the new name. This alias might be removed in a future release', 'test_blocked is deprecated' ); ok(!$result, 'current blocks'); is($result, $bc, 'BC method returned the same as the current method'); } { # A random old account I chose - it will probably be blocked forever # (del/undel) 21:48, July 26, 2008 Cometstyles (talk | contribs | block) blocked Hiwhispees (talk | contribs) with an expiry time of infinite (account creation disabled, e-mail blocked) ‎ (bye grawp) (unblock | change block) my $result = $bot->is_blocked('User:Hiwhispees~testwiki'); my $bc; warning_is( sub { $bc = $bot->test_blocked('Hiwhispees~testwiki'); }, 'test_blocked is an alias of is_blocked; please use the new name. This alias might be removed in a future release', 'test_blocked is deprecated' ); ok($result, 'current blocks'); is($result, $bc, 'BC method returned the same as the current method'); } MediaWiki-Bot-5.006003/t/00-init.t0000644000175000017500000000604412737341477014623 0ustar mikemikeuse strict; use warnings; use Test::More 0.96 tests => 5; BEGIN { my $bail_diagnostic = <<'END'; There was a problem loading the module. Typically, this means you have installed MediaWiki::Bot without the prerequisites. Please check the documentation for installation instructions, or ask for help from the members of perlwikibot@googlegroups.com. The test suite will bail out now; doing more testing is pointless since everything will fail. END use_ok('MediaWiki::Bot') or do { diag($bail_diagnostic); BAIL_OUT("Couldn't load the module"); }; }; # Provide some info to the tester unless ($ENV{AUTOMATED_TESTING}) { diag <<'END'; Thanks for using MediaWiki::Bot. If any of these tests fail, or you need any other assistance with the module, please email our support mailing list at perlwikibot@googlegroups.com, or submit a bug to our tracker on github: http://goo.gl/5Ns48 END if (!defined($ENV{'PWPUsername'}) and !defined($ENV{'PWPPassword'})) { diag <<'END'; If you want, you can log in for editing tests. To log in for those tests, stop the test suite now, set the environment variables PWPUsername and PWPPassword, and run the test suite. END sleep(2); } } # Some deeper diagnostics my $useragent = 'MediaWiki::Bot tests (00-init.t)'; my $host = '127.0.0.1'; my $assert = 'bot'; my $operator = 'MediaWiki::Bot tester'; my $bot = new_ok('MediaWiki::Bot'=> [{ # agent => $useragent, operator => $operator }]); # outside subtest b/c reused later subtest 'diag-one' => sub { plan tests => 5; my $test_one = MediaWiki::Bot->new({ agent => $useragent, host => $host, path => '', assert => $assert, operator => $operator, }); is($test_one->{api}->{ua}->agent(), $useragent, 'Specified useragent set correctly'); is($test_one->{assert}, $assert, 'Specified assert set orrectly'); is($test_one->{operator}, $operator, 'Specified operator set correctly'); is($test_one->{api}->{config}->{api_url}, "https://$host/api.php",'api.php with null path is OK'); # Issue 111: Null $path value returns "w" like($bot->{api}->{ua}->agent(), qr{^Perl MediaWiki::Bot/(v?[[:digit:]._]+|dev) \Q(https://metacpan.org/MediaWiki::Bot; [[User:$operator]]}, 'Useragent built correctly'); }; subtest 'diag-two' => sub { plan tests => 2; my $test_two = MediaWiki::Bot->new({ host => $host, path => undef, operator => $operator, }); is( $test_two->{api}->{config}->{api_url}, 'https://127.0.0.1/w/api.php', 'api.php with undef path is OK'); like($test_two->{api}->{ua}->agent(), qr/\Q$operator\E/, 'operator appears in the useragent'); }; subtest 'no assert' => sub { plan tests => 1; my $no_assert_bot = MediaWiki::Bot->new({ host => $host, operator => $operator, }); ok( not exists $bot->{assert} ) or diag explain $bot; }; MediaWiki-Bot-5.006003/t/30-was_g_blocked.t0000644000175000017500000000101512737341477016437 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'meta.wikimedia.org' => 80; use Test::More tests => 2; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'meta.wikimedia.org', }); # 127.0.0.1 has been blocked before ok $bot->was_g_blocked('127.0.0.1'), q{127.0.0.1 has been globalblocked}; # 127.0.4.4 probably hasn't been ok !$bot->was_g_blocked('127.0.4.4'), q{127.0.4.4 hasn't been globalblocked}; MediaWiki-Bot-5.006003/t/02-login.t0000644000175000017500000001150612737341477014771 0ustar mikemikeuse strict; use warnings; use Test::Is qw(extended); use Test::RequiresInternet 'test.wikipedia.org' => 80, 'test.wikipedia.org' => 443; use Test::More 0.96; use Test::Warn; use MediaWiki::Bot; my $t = __FILE__; my $username = $ENV{PWPUsername}; my $password = $ENV{PWPPassword}; plan $username && $password ? (tests => 7) : (skip_all => q{I can't log in without credentials}); unlink ".mediawiki-bot-$username-cookies" if $username and -e ".mediawiki-bot-$username-cookies"; my $useragent = "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)"; my $host = 'test.wikipedia.org'; subtest 'warn on HTTP' => sub { plan tests => 2; my $insecure = MediaWiki::Bot->new({ agent => $useragent, protocol => 'http', host => 'test.wikipedia.org' }); warning_like( sub { is($insecure->login($username, $password), 1, 'Warning logging in w/ HTTP'); }, [ { carped => qr/^\QPlease pass a hashref/ }, { carped => qr/^\QLogging in over plain HTTP is a bad idea/ }, ], 'Got expected warnings' ); }; subtest 'one wiki' => sub { plan tests => 3; my $bot = MediaWiki::Bot->new({ agent => $useragent, host => $host, protocol => 'https' }); warning_is( sub {is($bot->login($username, $password), 1, 'Login OK'); }, 'Please pass a hashref; this method of calling login is deprecated and will be removed in a future release', 'old login call style warns' ); ok($bot->_is_loggedin(), q{Double-check we're logged in}); }; subtest 'cookies' => sub { plan tests => 3; my $cookiemonster = MediaWiki::Bot->new({ agent => $useragent, host => $host, protocol => 'https' }); is($cookiemonster->login({username => $username}), 1, 'Cookie log in'); ok($cookiemonster->_is_loggedin(), q{Double-check we're logged in with only cookies}); ok($cookiemonster->logout(), 'Logged out'); }; subtest 'SUL' => sub { plan tests => 9; my $bot = MediaWiki::Bot->new({ agent => $useragent, host => $host, protocol => 'https' }); is($bot->login({ username => $username, password => $password, do_sul => 1 }), 1, q{SUL login}); is($bot->{host}, $host, q{We're still on the wiki we started on}); ok($bot->_is_loggedin(), q{Double-check we're logged in}); is($bot->set_wiki({host=>'meta.wikimedia.org'}), 1, q{Switched wikis OK}); ok($bot->_is_loggedin(), q{Double-check we're logged in via SUL}); is($bot->logout(), 1, q{logout returned true}); ok(!$bot->_is_loggedin(), q{Double-check we're actually logged out}); is($bot->set_wiki({host=>'en.wikipedia.org'}), 1, q{Switched wikis OK}); TODO: { local $TODO = "Possible regression: logging out on one wiki doesn't affect others any more"; ok(!$bot->_is_loggedin(), q{Double-check we're logged out for SUL}); } }; subtest 'fail' => sub { plan tests => 1; my $failbot = MediaWiki::Bot->new({ agent => $useragent, login_data => { username => q{Mike's test account}, password => q{} }, }); is($failbot, undef, 'Auto-login failed'); }; subtest 'secure' => sub { plan tests => 1; my $secure = MediaWiki::Bot->new({ agent => $useragent, protocol => 'https', host => 'secure.wikimedia.org', path => 'wikipedia/en/w', }); warning_like( sub { $secure->login({ username => $username, password => $password, do_sul => 1 }) }, qr{^\QSSL is now supported on the main Wikimedia Foundation sites.} ); }; subtest 'new-secure' => sub { plan tests => 5; my $secure = MediaWiki::Bot->new({ agent => $useragent, protocol => 'https', host => 'en.wikipedia.org', }); is($secure->login({ username => $username, password => $password, do_sul => 1, }), 1, q{Secure login}); ok($secure->_is_loggedin(), q{Double-check we're actually logged in}); is($secure->set_wiki({host => 'fr.wikipedia.org'}), 1, q{Switched wikis OK}); # Don't specify path or protocol is($secure->{api}->{config}->{api_url}, 'https://fr.wikipedia.org/w/api.php', q{Protocol and path retained properly}); TODO: { local $TODO = "Possible regression: SUL doesn't apply to all language subdomains of a project"; ok($secure->_is_loggedin(), q{Check we're logged in on new wiki}); } }; END { unlink ".mediawiki-bot-$username-cookies" if $username and -e ".mediawiki-bot-$username-cookies"; } MediaWiki-Bot-5.006003/t/15-count_contributions.t0000644000175000017500000000102312737341477017770 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 2; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); cmp_ok($bot->count_contributions('Mike.lifeguard'), '>', 10, q{Count Mike's contribs}); is($bot->count_contributions('Non-existent username!! (hopefully)'), undef, q{Count a nonexistent user's contribs}); MediaWiki-Bot-5.006003/t/24-purge_page.t0000644000175000017500000000150512737341477016001 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 3; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); my $logged_in = $bot->login({username => 'Perlwikibot testing', password => 'test'}); SKIP: { skip q{Couldn't log in}, 3 unless $logged_in; my $result = $bot->purge_page('Main Page'); is($result, 1, 'Purge a single page'); $result = $bot->purge_page('tsixe reven lliw'); is($result, 0, 'Fail to purge a non-existent page'); my @purges = ('Main Page', 'Main Page', 'tsixe reven lliw', 'User:Mike.lifeguard'); $result = $bot->purge_page(\@purges); is($result, 2, 'Purge some of an array of pages'); } MediaWiki-Bot-5.006003/t/35-get_protection.t0000644000175000017500000000242112737341477016710 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 6; use Test::Warn; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); { # [[Main Page]] is probably protected my @pages = ('Main Page', 'SyntaxHighlight GeSHi'); my $result = $bot->get_protection(\@pages); isa_ok($result, 'HASH', 'Return value of get_protection()'); isa_ok($result->{'Main Page'}, 'ARRAY', '[[Main Page]] protection'); is($result->{'SyntaxHighlight GeSHi'}, undef, '[[SyntaxHighlight GeSHi]] protection'); } { # [[User talk:Mike.lifeguard]] is probably not protected my $result = $bot->get_protection('User talk:Mike.lifeguard'); my $bc; warning_is( sub { $bc = $bot->is_protected('User talk:Mike.lifeguard'); }, 'is_protected is deprecated, and might be removed in a future release; please use get_protection instead', 'is_protected is deprecated' ); is($result, undef, '[[User talk:Mike.lifeguard]] protection'); is($result, $bc, 'Agreement between new and old methods'); } MediaWiki-Bot-5.006003/t/41-get_users.t0000644000175000017500000000111612737341477015660 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 1; use List::MoreUtils qw/uniq/; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); my $title = 'User:Mike.lifeguard/03-get text.t'; my @history = uniq map { $_->{user} } $bot->get_history($title, 5); my @users = uniq $bot->get_users($title, 5); is_deeply(\@users, \@history, 'Concordance between two methods of getting the same data'); MediaWiki-Bot-5.006003/t/07-unicode.t0000644000175000017500000000505312737341477015314 0ustar mikemikeuse strict; use warnings; use utf8; use Test::Is qw(extended); use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More 0.94 tests => 2; BEGIN { # Fix "Wide character in print" warning on failure my $builder = Test::More->builder; binmode $builder->output, ':encoding(UTF-8)'; binmode $builder->failure_output, ':encoding(UTF-8)'; binmode $builder->todo_output, ':encoding(UTF-8)'; binmode STDOUT, ':encoding(UTF-8)'; binmode STDERR, ':encoding(UTF-8)'; } use MediaWiki::Bot qw(:constants); my $t = __FILE__; my $agent = "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)"; my $bot = MediaWiki::Bot->new({ agent => $agent, host => 'test.wikipedia.org', protocol => 'https', ( $ENV{PWPUsername} && $ENV{PWPPassword} ? (login_data => { username => $ENV{PWPUsername}, password => $ENV{PWPPassword} }) : () ), }); my $base = 'User:Mike.lifeguard/07-unicode.t'; my $string = 'éółŽć'; subtest 'read' => sub { plan tests => 1; is $bot->get_text("$base/1") => $string, 'Is our string the same as what we load?'; }; subtest 'write' => sub { plan tests => 4; my $old = $bot->get_text("$base/2"); my $rand = rand(); my $status = $bot->edit({ page => "$base/2", text => "$rand\n$string\n", summary => $agent }); SKIP: { skip 'Cannot use editing tests: ' . $bot->{error}->{details}, 4 if defined $bot->{error}->{code} and ($bot->{error}->{code} == ERR_API or $bot->{error}->{code} == ERR_CAPTCHA); is $bot->get_text("$base/2", $status->{edit}->{newrevid}) => "$rand\n$string", "Successfully edited $base/2"; my $rand2 = rand(); $status = $bot->edit({ page => "$base/3", text => "$rand2\n$string\n", summary => "$agent ($string)" }); is $bot->get_text("$base/3", $status->{edit}->{newrevid}) => "$rand2\n$string", "Edited $base/3 OK"; my @history = $bot->get_history("$base/3", 1); is $history[0]->{comment} => "$agent ($string)", "Edited $base/3 with unicode in an edit summary"; my $rand3 = rand(); $status = $bot->edit({ page => "$base/$string", text => "$rand3\n$string\n", summary => $agent }); is $bot->get_text("$base/$string", $status->{edit}->{newrevid}) => "$rand3\n$string", "Edited $base/$string OK"; } # end SKIP }; MediaWiki-Bot-5.006003/t/29-get_log.t0000644000175000017500000000265312737341477015315 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 1; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); my $is = $bot->get_log({ type => 'delete', user => 'Mark', target => 'Main Page', limit => 1, }); my $ought = [ { 'ns' => 0, 'timestamp' => '2007-05-07T17:06:47Z', 'comment' => '24 revisions restored', 'pageid' => 11791, 'action' => 'restore', 'user' => 'Mark', 'title' => 'Main Page', 'type' => 'delete', 'logid' => 3672, 'logpage' => 0, 'params' => {}, }, { 'ns' => 0, 'timestamp' => '2007-05-07T16:58:39Z', 'comment' => 'content was: \'This is a test wiki that runs from the current NFS copy of MediaWiki. Changes to the code will generally appear here a few minutes before they appear ...\'', 'pageid' => 11791, 'action' => 'delete', 'user' => 'Mark', 'title' => 'Main Page', 'type' => 'delete', 'logid' => 3671, 'logpage' => 0, 'params' => {}, } ]; is_deeply($is, $ought, 'The same - all the way down'); MediaWiki-Bot-5.006003/t/26-diff.t0000644000175000017500000000434512737341477014602 0ustar mikemikeuse strict; use warnings; use utf8; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 1; BEGIN { unless (eval q{ use Test::Differences; 1 }) { # If Test::Differences isn't available... no warnings 'redefine'; note 'Test::Differences unavailable - use Test::More::is_deeply to approximate'; *eq_or_diff_text = \&is_deeply; # make Test::Differences::eq_or_diff an alias to Test::More::is_deeply *unified_diff = sub { 1 }; # shim } } # Fix "Wide character in print" warning on failure my $builder = Test::More->builder; binmode $builder->output, ':encoding(UTF-8)'; binmode $builder->failure_output, ':encoding(UTF-8)'; binmode $builder->todo_output, ':encoding(UTF-8)'; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); my $is = $bot->diff({ revid => 92376, oldid => 92373, }); $is =~ s{}{}; # This cache key will change, so strip it out my $ought = do { local $/; }; 1 while (chomp $is); 1 while (chomp $ought); unified_diff; eq_or_diff_text($is, $ought, 'Diff retrieved correctly'); __DATA__ Line 24: Line 24:  
;21-get_allusers.t:gets a list of users from [[Special:ListUsers]] and [[Special:ListUsers/sysop]]
 
;21-get_allusers.t:gets a list of users from [[Special:ListUsers]] and [[Special:ListUsers/sysop]]
 
;22-get_id.t:gets the pageid for [[Main Page]]
 
;22-get_id.t:gets the pageid for [[Main Page]]
;23-list_transclusions.t:requires [[Template:Perlwikibot-test]] and for [[Template:Tlx]] to be used
  MediaWiki-Bot-5.006003/t/39-image_usage.t0000644000175000017500000000255012737341477016140 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 7; use Test::Warn; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); my $file = 'File:Wiki.png'; my @pages = $bot->image_usage($file, undef, undef, { max => 1 }); my @pages_bc; warning_like( sub { @pages_bc = $bot->links_to_image($file, undef, undef, { max => 1 }); }, qr/links_to_image is an alias of image_usage; please use the new name/, 'links_to_image is deprecated' ); ok( @pages, 'No error'); cmp_ok( scalar @pages, '>', 1, 'More than one result'); ok( defined($pages[0]), 'Something was returned'); like( $pages[0], qr/\w+/, 'The title looks valid'); is_deeply(\@pages, \@pages_bc, 'The BC method returned the same as the current method'); $bot->image_usage($file, undef, 'nonredirects', { hook => \&mysub, max => 5 }); my $is_redir = 1; sub mysub { my $res = shift; $is_redir = exists $res->[0]->{redirect}; } isnt( $is_redir, 'We got a normal link when we asked for no redirects'); MediaWiki-Bot-5.006003/t/23-list_transclusions.t0000644000175000017500000000231112737341477017620 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 7; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); my @pages = $bot->list_transclusions('Template:Tlx', 'nonredirects', undef, {max=>1}); ok( defined($pages[0]), 'Something was returned'); isa_ok( $pages[0], 'HASH', 'A hash was returned'); ok( defined($pages[0]->{'title'}), 'The hash contains a title'); like( $pages[0]->{'title'}, qr/\w+/, 'The title looks valid'); ok( defined($pages[0]->{'redirect'}), 'Redirect status is defined'); is( $pages[0]->{'redirect'}, '', 'We got a redirect when we asked for it'); $bot->list_transclusions('Template:Tlx', 'redirects', undef, { max => 1, hook => \&test_hook}); my $is_redir; sub test_hook { my ($res) = @_; $is_redir = $res->[0]->{'redirect'}; } isnt( $is_redir, 'We got a redirect when we asked for it'); MediaWiki-Bot-5.006003/t/12-linksearch.t0000644000175000017500000000274312737341477016010 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 10; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); my @pages = $bot->linksearch('*.example.com', undef, undef, { max => 1 }); ok( defined $pages[0], 'Something was returned'); isa_ok( $pages[0], 'HASH', 'A hash was returned'); ok( defined $pages[0]->{'url'}, 'The hash contains a URL'); like( $pages[0]->{'url'}, qr/example\.com/, 'The URL is one we requested'); ok( defined $pages[0]->{'title'}, 'The has contains a page title'); like( $pages[0]->{'title'}, qr/\w+/, 'The title looks valid'); $bot->linksearch('*.example.com', undef, undef, { max=> 1, hook => \&test_hook }); my $url; my $title; sub test_hook { my ($res) = @_; my $hashref = $res->[0]; $url = $hashref->{'url'}; $title = $hashref->{'title'}; } ok( defined($url), 'A URL was returned via callback'); like( $url, qr/example\.com/, 'The URL is right'); ok( defined($title), 'A title was returned via callback'); like( $title, qr/\w+/, 'The title looks valid'); MediaWiki-Bot-5.006003/t/45-contributions.t0000644000175000017500000000250412737341477016570 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More 0.96 tests => 3; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); subtest 'patrolled' => sub { # issue 151 plan tests => 2; my @contribs = $bot->contributions('Mike.lifeguard', 0); ok(!$bot->{error}->{code}, 'No error in bot') or diag explain $bot->{error}; ok(!$bot->{api}->{error}->{code}, 'No error in api') or diag explain $bot->{api}->{error}; }; subtest 'contribs' => sub { plan tests => 1; my @contribs = $bot->contributions('Mike.lifeguard'); isa_ok $contribs[0], 'HASH', 'array of hashes' or diag explain \@contribs; }; subtest 'multiple users' => sub { plan tests => 3; my @contribs = $bot->contributions(['User:Mike.lifeguard', 'User:Reedy']); isa_ok $contribs[0], 'HASH', 'array of hashes' or diag explain \@contribs; my %users = map { $_->{user} => 1 } @contribs; ok exists $users{'Mike.lifeguard'}, 'Mike.lifeguard is represented in the results' or diag explain { users => [keys %users] }; ok exists $users{'Reedy'}, 'Reedy is represented in the results' or diag explain { users => [keys %users] }; }; MediaWiki-Bot-5.006003/t/17-was_blocked.t0000644000175000017500000000243612737341477016146 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 6; use Test::Warn; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); { my $user = 'Bad Username'; # has been blocked before my $result = $bot->was_blocked($user); my $bc; warning_is( sub { $bc = $bot->test_block_hist($user); }, 'test_block_hist is an alias of was_blocked; please use the new method name. This alias might be removed in a future release', 'test_block_hist is deprecated' ); ok($result, 'block history - has been blocked'); is($result, $bc, 'BC method agrees with current method'); } { my $user = 'Mike.lifeguard'; # I haven't ever been blocked my $result = $bot->was_blocked($user); my $bc; warning_is( sub { $bc = $bot->test_block_hist($user); }, 'test_block_hist is an alias of was_blocked; please use the new method name. This alias might be removed in a future release', 'test_block_hist is deprecated' ); ok(!$result, 'block history - never blocked'); is($result, $bc, 'BC method agrees with current method'); } MediaWiki-Bot-5.006003/t/14-get_pages_in_namespace.t0000644000175000017500000000164012737341477020322 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 5; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); my $template_ns = 10; my @pages = $bot->get_pages_in_namespace($template_ns); like $pages[0] => qr/^Template:/, 'Template namespace found'; my $page_limit = 1; @pages = $bot->get_pages_in_namespace($template_ns, $page_limit); is scalar @pages, $page_limit, 'Correct number of pages retrieved'; @pages = $bot->get_pages_in_namespace('non-existent'); is $pages[0], undef, 'Error code received'; is $bot->{error}->{code}, 3, 'Error code in MediaWiki::Bot object'; @pages = $bot->get_pages_in_namespace(2, 'max', { max => 0 }); cmp_ok scalar @pages, '>', 500, 'Got more than 500 pages' or diag explain \@pages; # RT 66790 MediaWiki-Bot-5.006003/t/21-get_allusers.t0000644000175000017500000000075212737341477016354 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 2; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); { my @array = $bot->get_allusers(10); is(scalar(@array), 10, 'Got 10 users'); } { my @array = $bot->get_allusers(10, 'sysop'); is(scalar(@array), 10, 'Got 10 sysops'); } MediaWiki-Bot-5.006003/t/03-get_text.t0000644000175000017500000000172012737341477015502 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More tests => 6; use MediaWiki::Bot; my $t = __FILE__; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", host => 'test.wikipedia.org', }); my $wikitext = $bot->get_text('Main Page'); like($wikitext, qr/MediaWiki/, 'Main Page found'); $wikitext = $bot->get_text('User:Mike.lifeguard/03-get text.t'); is($wikitext, q{I know for a ''fact'' that this page contains 60 characters.}, 'Known text retrieved'); my $page = 'Main Page'; $wikitext = $bot->get_text($page); my $section_wikitext = $bot->get_text($page, undef, 3); isnt $section_wikitext => undef, 'Section load pass/fail'; isnt $wikitext => $section_wikitext, 'Section loaded content correctly'; like $wikitext => qr/\Q$section_wikitext\E/, 'Section loaded content correctly'; is $bot->get_text('egaP niaM') => undef, 'No page found'; MediaWiki-Bot-5.006003/t/44-patrol.t0000644000175000017500000000402012737341477015161 0ustar mikemikeuse strict; use warnings; use Test::RequiresInternet 'test.wikipedia.org' => 80; use Test::More 0.88; use MediaWiki::Bot; my $t = __FILE__; my $host = 'test.wikipedia.org'; my $username = $ENV{'PWPUsername'}; my $password = $ENV{'PWPPassword'}; plan skip_all => 'Login with patrol rights required' unless $host and $username and defined $password; my $bot = MediaWiki::Bot->new({ agent => "MediaWiki::Bot tests (https://metacpan.org/MediaWiki::Bot; $t)", login_data => { username => $username, password => $password, do_sul => 0, }, host => $host, protocol => 'https', }); my $tests_run = 0; { my @rc = grep { defined $_->{rcid} and $_->{type} eq 'edit' } $bot->recentchanges(0, 5); foreach my $change (@rc) { my $success = $bot->patrol($change->{rcid}); if ($bot->{error}->{details} and $bot->{error}->{details} =~ m/^(?:permissiondenied|badtoken)/) { pass q{Account isn't permitted to patrol}; note explain $bot->{error}; $tests_run++; last; } else { ok $success, 'Patrolled OK' or diag explain { res => $success, err => $bot->{error} }; $tests_run++; } } } { my @rc = $bot->recentchanges(0, 5, { hook => \&mysub }); sub mysub { my ($res) = @_; foreach my $hashref (@$res) { next unless defined $hashref->{rcid} and $hashref->{type} eq 'edit'; my $success = $bot->patrol($hashref->{rcid}); if ($bot->{error}->{details} and $bot->{error}->{details} =~ m/^(?:permissiondenied|badtoken)/) { pass q{Account isn't permitted to patrol}; note explain $bot->{error}; $tests_run++; last; } else { ok $success, 'Patrolled the page OK' or diag explain { res => $res, err => $bot->{error} }; $tests_run++; } } } } done_testing($tests_run); MediaWiki-Bot-5.006003/README.mkdn0000644000175000017500000013114312737341477014622 0ustar mikemike# NAME MediaWiki::Bot - a high-level bot framework for interacting with MediaWiki wikis # VERSION version 5.006003 # SYNOPSIS use MediaWiki::Bot qw(:constants); my $bot = MediaWiki::Bot->new({ assert => 'bot', host => 'de.wikimedia.org', login_data => { username => "Mike's bot account", password => "password" }, }); my $revid = $bot->get_last("User:Mike.lifeguard/sandbox", "Mike.lifeguard"); print "Reverting to $revid\n" if defined($revid); $bot->revert('User:Mike.lifeguard', $revid, 'rvv'); # DESCRIPTION **MediaWiki::Bot** is a framework that can be used to write bots which interface with the MediaWiki API ([http://en.wikipedia.org/w/api.php](http://en.wikipedia.org/w/api.php)). # METHODS ## new my $bot = MediaWiki::Bot({ host => 'en.wikipedia.org', operator => 'Mike.lifeguard', }); Calling `MediaWiki::Bot->new()` will create a new MediaWiki::Bot object. The only parameter is a hashref with keys: - _agent_ sets a custom useragent. It is recommended to use `operator` instead, which is all we need to do the right thing for you. If you really want to do it yourself, see [https://meta.wikimedia.org/wiki/User-agent\_policy](https://meta.wikimedia.org/wiki/User-agent_policy) for guidance on what information must be included. - _assert_ sets a parameter for the AssertEdit extension (commonly 'bot') Refer to [http://mediawiki.org/wiki/Extension:AssertEdit](http://mediawiki.org/wiki/Extension:AssertEdit). - _operator_ allows the bot to send you a message when it fails an assert. This is also the recommended way to customize the user agent string, which is required by the Wikimedia Foundation. A warning will be emitted if you omit this. - _maxlag_ allows you to set the maxlag parameter (default is the recommended 5s). Please refer to the MediaWiki documentation prior to changing this from the default. - _protocol_ allows you to specify 'http' or 'https' (default is 'http') - _host_ sets the domain name of the wiki to connect to - _path_ sets the path to api.php (with no leading or trailing slash) - _login\_data_ is a hashref of credentials to pass to ["login"](#login). - _debug_ - whether to provide debug output. 1 provides only error messages; 2 provides further detail on internal operations. For example: my $bot = MediaWiki::Bot->new({ assert => 'bot', protocol => 'https', host => 'en.wikimedia.org', agent => sprintf( 'PerlWikiBot/%s (https://metacpan.org/MediaWiki::Bot; User:Mike.lifeguard)', MediaWiki::Bot->VERSION ), login_data => { username => "Mike's bot account", password => "password" }, }); For backward compatibility, you can specify up to three parameters: my $bot = MediaWiki::Bot->new('My custom useragent string', $assert, $operator); **This form is deprecated** will never do auto-login or autoconfiguration, and emits deprecation warnings. For further reading: - [MediaWiki::Bot wiki](https://github.com/MediaWiki-Bot/MediaWiki-Bot/wiki) - [ - [Creating a new bot](https://github.com/MediaWiki-Bot/MediaWiki-Bot/wiki/Creating-a-new-bot) - [Setting the wiki](https://github.com/MediaWiki-Bot/MediaWiki-Bot/wiki/Setting-the-wiki) - [Where is api.php](https://github.com/MediaWiki-Bot/MediaWiki-Bot/wiki/Where-is-api.php) ## set\_wiki Set what wiki to use. The parameter is a hashref with keys: - _host_ - the domain name - _path_ - the part of the path before api.php (usually 'w') - _protocol_ is either 'http' or 'https'. If you don't set any parameter, it's previous value is used. If it has never been set, the default settings are 'http', 'en.wikipedia.org' and 'w'. For example: $bot->set_wiki({ protocol => 'https', host => 'secure.wikimedia.org', path => 'wikipedia/meta/w', }); For backward compatibility, you can specify up to two parameters: $bot->set_wiki($host, $path); **This form is deprecated**, and will emit deprecation warnings. ## login This method takes a hashref with keys _username_ and _password_ at a minimum. See ["Single User Login"](#single-user-login) and ["Basic authentication"](#basic-authentication) for additional options. Logs the use $username in, optionally using $password. First, an attempt will be made to use cookies to log in. If this fails, an attempt will be made to use the password provided to log in, if any. If the login was successful, returns true; false otherwise. $bot->login({ username => $username, password => $password, }) or die "Login failed"; Once logged in, attempt to do some simple auto-configuration. At present, this consists of: - Warning if the account doesn't have the bot flag, and isn't a sysop account. - Setting an appropriate default assert. You can skip this autoconfiguration by passing `autoconfig => 0` For backward compatibility, you can call this as $bot->login($username, $password); **This form is deprecated**, and will emit deprecation warnings. It will never do autoconfiguration or SUL login. ### Single User Login On WMF wikis, `do_sul` specifies whether to log in on all projects. The default is false. But even when false, you still get a CentralAuth cookie for, and are thus logged in on, all languages of a given domain (`*.wikipedia.org`, for example). When set, a login is done on each WMF domain so you are logged in on all ~800 content wikis. Since `*.wikimedia.org` is not possible, we explicitly include meta, commons, incubator, and wikispecies. ### Basic authentication If you need to supply basic auth credentials, pass a hashref of data as described by [LWP::UserAgent](https://metacpan.org/pod/LWP::UserAgent): $bot->login({ username => $username, password => $password, basic_auth => { netloc => "private.wiki.com:80", realm => "Authentication Realm", uname => "Basic auth username", pass => "password", } }) or die "Couldn't log in"; ### Bot passwords `MediaWiki::Bot` doesn't yet support the more complicated (but more secure) oAuth login flow for bots. Instead, we support a simpler "bot password", which is a generated password connected to a (possibly-reduced) set of on-wiki privileges, and IP ranges from which it can be used. To create one, visit `Special:BotPasswords` on the wiki. Enter a label for the password, then select the privileges you want to use with that password. This set should be as restricted as possible; most bots only edit existing pages. Keeping the set of privileges as restricted as possible limits the possible damage if the password were ever compromised. Submit the form, and you'll be given a new "username" that looks like "AccountUsername@bot\_password\_label", and a generated bot password. To log in, provide those to `MediaWiki::Bot` verbatim. **References:** [API:Login](https://www.mediawiki.org/wiki/API:Login), [Logging in](https://github.com/MediaWiki-Bot/MediaWiki-Bot/wiki/Logging-in) ## logout $bot->logout(); The logout method logs the bot out of the wiki. This invalidates all login cookies. **References:** [API:Logging out](https://www.mediawiki.org/wiki/API:Logout) ## edit my $text = $bot->get_text('My page'); $text .= "\n\n* More text\n"; $bot->edit({ page => 'My page', text => $text, summary => 'Adding new content', section => 'new', }); This method edits a wiki page, and takes a hashref of data with keys: - _page_ - the page title to edit - _text_ - the page text to write - _summary_ - an edit summary - _minor_ - whether to mark the edit as minor or not (boolean) - _bot_ - whether to mark the edit as a bot edit (boolean) - _assertion_ - usually 'bot', but see [http://mediawiki.org/wiki/Extension:AssertEdit](http://mediawiki.org/wiki/Extension:AssertEdit). - _section_ - edit a single section (identified by number) instead of the whole page An MD5 hash is sent to guard against data corruption while in transit. You can also call this as: $bot->edit($page, $text, $summary, $is_minor, $assert, $markasbot); **This form is deprecated**, and will emit deprecation warnings. ### CAPTCHAs If a [CAPTCHA](https://en.wikipedia.org/wiki/CAPTCHA) is encountered, the call to `edit` will return false, with the error code set to `ERR_CAPTCHA` and the details informing you that solving a CAPTCHA is required for this action. The information you need to actually solve the captcha (for example the URL for the image) is given in `$bot->{error}->{captcha}` as a hash reference. You will want to grab the keys 'url' (a relative URL to the image) and 'id' (the ID of the CAPTCHA). Once you have solved the CAPTCHA (presumably by interacting with a human), retry the edit, adding `captcha_id` and `captcha_solution` parameters: my $edit = {page => 'Main Page', text => 'got your nose'}; my $edit_status = $bot->edit($edit); if (not $edit_status) { if ($bot->{error}->{code} == ERR_CAPTCHA) { my @captcha_uri = split /\Q?/, $bot->{error}{captcha}{url}, 2; my $image = URI->new(sprintf '%s://%s%s?%s' => $bot->{protocol}, $bot->{host}, $captcha_uri[0], $captcha_uri[1], ); require Term::ReadLine; my $term = Term::ReadLine->new('Solve the captcha'); $term->ornaments(0); my $answer = $term->readline("Please solve $image and type the answer: "); # Add new CAPTCHA params to the edit we're attempting $edit->{captcha_id} = $bot->{error}->{captcha}->{id}; $edit->{captcha_solution} = $answer; $status = $bot->edit($edit); } } **References:** [Editing pages](https://github.com/MediaWiki-Bot/MediaWiki-Bot/wiki/Editing-pages), [API:Edit](https://www.mediawiki.org/wiki/API:Edit), [API:Tokens](https://www.mediawiki.org/wiki/API:Tokens) ## move $bot->move($from_title, $to_title, $reason, $options_hashref); This moves a wiki page. If you wish to specify more options (like whether to suppress creation of a redirect), use $options\_hashref, which has keys: - _movetalk_ specifies whether to attempt to the talk page. - _noredirect_ specifies whether to suppress creation of a redirect. - _movesubpages_ specifies whether to move subpages, if applicable. - _watch_ and _unwatch_ add or remove the page and the redirect from your watchlist. - _ignorewarnings_ ignores warnings. my @pages = ("Humor", "Rumor"); foreach my $page (@pages) { my $to = $page; $to =~ s/or$/our/; $bot->move($page, $to, "silly 'merricans"); } **References:** [API:Move](https://www.mediawiki.org/wiki/API:Move) ## get\_history my @hist = $bot->get_history($title, $limit, $revid, $direction); Returns an array containing the history of the specified $page\_title, with $limit number of revisions (default is as many as possible). The array returned contains hashrefs with keys: revid, user, comment, minor, timestamp\_date, and timestamp\_time. **References**: [Getting page history](https://github.com/MediaWiki-Bot/MediaWiki-Bot/wiki/Getting-page-history), [API:Properties#revisions](https://www.mediawiki.org/wiki/API:Properties#revisions_.2F_rv) ## get\_text Returns an the wikitext of the specified $page\_title. The second parameter is $revid - if defined, returns the text of that revision; the third is $section\_number - if defined, returns the text of that section. A blank page will return wikitext of "" (which evaluates to false in Perl, but is defined); a nonexistent page will return undef (which also evaluates to false in Perl, but is obviously undefined). You can distinguish between blank and nonexistent pages by using [defined](https://metacpan.org/pod/perlfunc#defined): my $wikitext = $bot->get_text('Page title'); print "Wikitext: $wikitext\n" if defined $wikitext; **References:** [Fetching page text](https://github.com/MediaWiki-Bot/MediaWiki-Bot/wiki/Fetching-page-text), [API:Properties#revisions](https://www.mediawiki.org/wiki/API:Properties#revisions_.2F_rv) ## get\_id Returns the id of the specified $page\_title. Returns undef if page does not exist. my $pageid = $bot->get_id("Main Page"); die "Page doesn't exist\n" if !defined($pageid); **Revisions:** [API:Properties#info](https://www.mediawiki.org/wiki/API:Properties#info_.2F_in) ## get\_pages Returns the text of the specified pages in a hashref. Content of undef means page does not exist. Also handles redirects or article names that use namespace aliases. my @pages = ('Page 1', 'Page 2', 'Page 3'); my $thing = $bot->get_pages(\@pages); foreach my $page (keys %$thing) { my $text = $thing->{$page}; print "$text\n" if defined($text); } **References:** [Fetching page text](https://github.com/MediaWiki-Bot/MediaWiki-Bot/wiki/Fetching-page-text), [API:Properties#revisions](https://www.mediawiki.org/wiki/API:Properties#revisions_.2F_rv) ## get\_image $buffer = $bot->get_image('File:Foo.jpg', { width=>256, height=>256 }); Download an image from a wiki. This is derived from a similar function in [MediaWiki::API](https://metacpan.org/pod/MediaWiki::API). This one allows the image to be scaled down by passing a hashref with height & width parameters. It returns raw data in the original format. You may simply spew it to a file, or process it directly with a library such as [Imager](https://metacpan.org/pod/Imager). use File::Slurp qw(write_file); my $img_data = $bot->get_image('File:Foo.jpg'); write_file( 'Foo.jpg', {binmode => ':raw'}, \$img_data ); Images are scaled proportionally. (height/width) will remain constant, except for rounding errors. Height and width parameters describe the **maximum** dimensions. A 400x200 image will never be scaled to greater dimensions. You can scale it yourself; having the wiki do it is just lazy & selfish. **References:** [API:Properties#imageinfo](https://www.mediawiki.org/wiki/API:Properties#imageinfo_.2F_ii) ## revert Reverts the specified $page\_title to $revid, with an edit summary of $summary. A default edit summary will be used if $summary is omitted. my $revid = $bot->get_last("User:Mike.lifeguard/sandbox", "Mike.lifeguard"); print "Reverting to $revid\n" if defined($revid); $bot->revert('User:Mike.lifeguard', $revid, 'rvv'); **References:** [API:Edit](https://www.mediawiki.org/wiki/API:Edit) ## undo $bot->undo($title, $revid, $summary, $after); Reverts the specified $revid, with an edit summary of $summary, using the undo function. To undo all revisions from $revid up to but not including this one, set $after to another revid. If not set, just undo the one revision ($revid). **References:** [API:Edit](https://www.mediawiki.org/wiki/API:Edit) ## get\_last Returns the revid of the last revision to $page not made by $user. undef is returned if no result was found, as would be the case if the page is deleted. my $revid = $bot->get_last('User:Mike.lifeguard/sandbox', 'Mike.lifeguard'); if defined($revid) { print "Reverting to $revid\n"; $bot->revert('User:Mike.lifeguard', $revid, 'rvv'); } **References:** [API:Properties#revisions](https://www.mediawiki.org/wiki/API:Properties#revisions_.2F_rv) ## update\_rc **This method is deprecated**, and will emit deprecation warnings. Replace calls to `update_rc()` with calls to the newer `recentchanges()`, which returns all available data, including rcid. Returns an array containing the $limit most recent changes to the wiki's _main namespace_. The array contains hashrefs with keys title, revid, old\_revid, and timestamp. my @rc = $bot->update_rc(5); foreach my $hashref (@rc) { my $title = $hash->{'title'}; print "$title\n"; } The ["Options hashref"](#options-hashref) is also available: # Use a callback for incremental processing: my $options = { hook => \&mysub, }; $bot->update_rc($options); sub mysub { my ($res) = @_; foreach my $hashref (@$res) { my $page = $hashref->{'title'}; print "$page\n"; } } ## recentchanges($wiki\_hashref, $options\_hashref) Returns an array of hashrefs containing recentchanges data. The first parameter is a hashref with the following keys: - _ns_ - the namespace number, or an arrayref of numbers to specify several; default is the main namespace - _limit_ - the number of rows to fetch; default is 50 - _user_ - only list changes by this user - _show_ - itself a hashref where the key is a category and the value is a boolean. If true, the category will be included; if false, excluded. The categories are kinds of edits: minor, bot, anon, redirect, patrolled. See "rcshow" at [http://www.mediawiki.org/wiki/API:Recentchanges#Parameters](http://www.mediawiki.org/wiki/API:Recentchanges#Parameters). An ["Options hashref"](#options-hashref) can be used as the second parameter: my @rc = $bot->recentchanges({ ns => 4, limit => 100 }); foreach my $hashref (@rc) { print $hashref->{title} . "\n"; } # Or, use a callback for incremental processing: $bot->recentchanges({ ns => [0,1], limit => 500 }, { hook => \&mysub }); sub mysub { my ($res) = @_; foreach my $hashref (@$res) { my $page = $hashref->{title}; print "$page\n"; } } The hashref returned might contain the following keys: - _ns_ - the namespace number - _revid_ - _old\_revid_ - _timestamp_ - _rcid_ - can be used with ["patrol"](#patrol) - _pageid_ - _type_ - one of edit, new, log (there may be others) - _title_ For backwards compatibility, the previous method signature is still supported: $bot->recentchanges($ns, $limit, $options_hashref); **References:** [API:Recentchanges](https://www.mediawiki.org/wiki/API:Recentchanges) ## what\_links\_here Returns an array containing a list of all pages linking to $page. Additional optional parameters are: - One of: all (default), redirects, or nonredirects. - A namespace number to search (pass an arrayref to search in multiple namespaces) - An ["Options hashref"](#options-hashref). A typical query: my @links = $bot->what_links_here("Meta:Sandbox", undef, 1, { hook=>\&mysub } ); sub mysub{ my ($res) = @_; foreach my $hash (@$res) { my $title = $hash->{'title'}; my $is_redir = $hash->{'redirect'}; print "Redirect: $title\n" if $is_redir; print "Page: $title\n" unless $is_redir; } } Transclusions are no longer handled by what\_links\_here() - use ["list\_transclusions"](#list_transclusions) instead. **References:** [Listing incoming links](https://github.com/MediaWiki-Bot/MediaWiki-Bot/wiki/Listing-incoming-links), [API:Backlinks](https://www.mediawiki.org/wiki/API:Backlinks) ## list\_transclusions Returns an array containing a list of all pages transcluding $page. Other parameters are: - One of: all (default), redirects, or nonredirects - A namespace number to search (pass an arrayref to search in multiple namespaces). - $options\_hashref as described by [MediaWiki::API](https://metacpan.org/pod/MediaWiki::API): Set max to limit the number of queries performed. Set hook to a subroutine reference to use a callback hook for incremental processing. Refer to the section on ["linksearch"](#linksearch) for examples. A typical query: $bot->list_transclusions("Template:Tlx", undef, 4, {hook => \&mysub}); sub mysub{ my ($res) = @_; foreach my $hash (@$res) { my $title = $hash->{'title'}; my $is_redir = $hash->{'redirect'}; print "Redirect: $title\n" if $is_redir; print "Page: $title\n" unless $is_redir; } } **References:** [Listing transclusions](https://github.com/MediaWiki-Bot/MediaWiki-Bot/wiki/Listing-transclusions) [API:Embeddedin](https://www.mediawiki.org/wiki/API:Embeddedin) ## get\_pages\_in\_category Returns an array containing the names of all pages in the specified category (include the Category: prefix). Does not recurse into sub-categories. my @pages = $bot->get_pages_in_category('Category:People on stamps of Gabon'); print "The pages in Category:People on stamps of Gabon are:\n@pages\n"; The options hashref is as described in ["Options hashref"](#options-hashref). Use `{ max => 0 }` to get all results. **References:** [Listing category contents](https://github.com/MediaWiki-Bot/MediaWiki-Bot/wiki/Listing-category-contents), [API:Categorymembers](https://www.mediawiki.org/wiki/API:Categorymembers) ## get\_all\_pages\_in\_category my @pages = $bot->get_all_pages_in_category($category, $options_hashref); Returns an array containing the names of **all** pages in the specified category (include the Category: prefix), including sub-categories. The $options\_hashref is described fully in ["Options hashref"](#options-hashref). **References:** [Listing category contents](https://github.com/MediaWiki-Bot/MediaWiki-Bot/wiki/Listing-category-contents), [API:Categorymembers](https://www.mediawiki.org/wiki/API:Categorymembers) ## get\_all\_categories Returns an array containing the names of all categories. my @categories = $bot->get_all_categories(); print "The categories are:\n@categories\n"; Use `{ max => 0 }` to get all results. The default number of categories returned is 10, the maximum allowed is 500. **References:** [API:Allcategories](https://www.mediawiki.org/wiki/API:Allcategories) ## linksearch Runs a linksearch on the specified $link and returns an array containing anonymous hashes with keys 'url' for the outbound URL, and 'title' for the page the link is on. Additional parameters are: - A namespace number to search (pass an arrayref to search in multiple namespaces). - You can search by $protocol (http is default). - $options\_hashref is fully documented in ["Options hashref"](#options-hashref): Set _max_ in $options to get more than one query's worth of results: my $options = { max => 10, }; # I only want some results my @links = $bot->linksearch("slashdot.org", 1, undef, $options); foreach my $hash (@links) { my $url = $hash->{'url'}; my $page = $hash->{'title'}; print "$page: $url\n"; } Set _hook_ to a subroutine reference to use a callback hook for incremental processing: my $options = { hook => \&mysub, }; # I want to do incremental processing $bot->linksearch("slashdot.org", 1, undef, $options); sub mysub { my ($res) = @_; foreach my $hashref (@$res) { my $url = $hashref->{'url'}; my $page = $hashref->{'title'}; print "$page: $url\n"; } } **References:** [Finding external links](https://github.com/MediaWiki-Bot/MediaWiki-Bot/wiki/Finding-external-links), [API:Exturlusage](https://www.mediawiki.org/wiki/API:Exturlusage) ## purge\_page Purges the server cache of the specified $page. Returns true on success; false on failure. Pass an array reference to purge multiple pages. If you really care, a true return value is the number of pages successfully purged. You could check that it is the same as the number you wanted to purge - maybe some pages don't exist, or you passed invalid titles, or you aren't allowed to purge the cache: my @to_purge = ('Main Page', 'A', 'B', 'C', 'Very unlikely to exist'); my $size = scalar @to_purge; print "all-at-once:\n"; my $success = $bot->purge_page(\@to_purge); if ($success == $size) { print "@to_purge: OK ($success/$size)\n"; } else { my $missed = @to_purge - $success; print "We couldn't purge $missed pages (list was: " . join(', ', @to_purge) . ")\n"; } # OR print "\n\none-at-a-time:\n"; foreach my $page (@to_purge) { my $ok = $bot->purge_page($page); print "$page: $ok\n"; } **References:** [Purging the server cache](https://github.com/MediaWiki-Bot/MediaWiki-Bot/wiki/Purging-the-server-cache), [API:Purge](https://www.mediawiki.org/wiki/API:Purge) ## get\_namespace\_names my %namespace_names = $bot->get_namespace_names(); Returns a hash linking the namespace id, such as 1, to its named equivalent, such as "Talk". **References:** [API:Meta#siteinfo](https://www.mediawiki.org/wiki/API:Meta#siteinfo_.2F_si) ## image\_usage Gets a list of pages which include a certain $image. Include the `File:` namespace prefix to avoid incurring an extra round-trip (which will also emit a deprecation warnings). Additional parameters are: - A namespace number to fetch results from (or an arrayref of multiple namespace numbers) - One of all, redirect, or nonredirects. - $options is a hashref as described in the section for ["linksearch"](#linksearch). my @pages = $bot->image_usage("File:Albert Einstein Head.jpg"); Or, make use of the ["Options hashref"](#options-hashref) to do incremental processing: $bot->image_usage("File:Albert Einstein Head.jpg", undef, undef, { hook=>\&mysub, max=>5 } ); sub mysub { my $res = shift; foreach my $page (@$res) { my $title = $page->{'title'}; print "$title\n"; } } **References:** [API:Imageusage](https://www.mediawiki.org/wiki/API:Imageusage) ## global\_image\_usage($image, $results, $filterlocal) Returns an array of hashrefs of data about pages which use the given image. my @data = $bot->global_image_usage('File:Albert Einstein Head.jpg'); The keys in each hashref are title, url, and wiki. `$results` is the maximum number of results that will be returned (not the maximum number of requests that will be sent, like `max` in the ["Options hashref"](#options-hashref)); the default is to attempt to fetch 500 (set to 0 to get all results). `$filterlocal` will filter out local uses of the image. **References:** [Extension:GlobalUsage#API](https://www.mediawiki.org/wiki/Extension:GlobalUsage#API) ## links\_to\_image A backward-compatible call to ["image\_usage"](#image_usage). You can provide only the image title. **This method is deprecated**, and will emit deprecation warnings. ## is\_blocked my $blocked = $bot->is_blocked('User:Mike.lifeguard'); Checks if a user is currently blocked. **References:** [API:Blocks](https://www.mediawiki.org/wiki/API:Blocks) ## test\_blocked Retained for backwards compatibility. Use ["is\_blocked"](#is_blocked) for clarity. **This method is deprecated**, and will emit deprecation warnings. ## test\_image\_exists Checks if an image exists at $page. - `FILE_NONEXISTENT` (0) means "Nothing there" - `FILE_LOCAL` (1) means "Yes, an image exists locally" - `FILE_SHARED` (2) means "Yes, an image exists on [Commons](http://commons.wikimedia.org)" - `FILE_PAGE_TEXT_ONLY` (3) means "No image exists, but there is text on the page" If you pass in an arrayref of images, you'll get out an arrayref of results. use MediaWiki::Bot::Constants; my $exists = $bot->test_image_exists('File:Albert Einstein Head.jpg'); if ($exists == FILE_NONEXISTENT) { print "Doesn't exist\n"; } elsif ($exists == FILE_LOCAL) { print "Exists locally\n"; } elsif ($exists == FILE_SHARED) { print "Exists on Commons\n"; } elsif ($exists == FILE_PAGE_TEXT_ONLY) { print "Page exists, but no image\n"; } **References:** [API:Properties#imageinfo](https://www.mediawiki.org/wiki/API:Properties#imageinfo_.2F_ii) ## get\_pages\_in\_namespace $bot->get_pages_in_namespace($namespace, $limit, $options_hashref); Returns an array containing the names of all pages in the specified namespace. The $namespace\_id must be a number, not a namespace name. Setting $page\_limit is optional, and specifies how many items to retrieve at once. Setting this to 'max' is recommended, and this is the default if omitted. If $page\_limit is over 500, it will be rounded up to the next multiple of 500. If $page\_limit is set higher than you are allowed to use, it will silently be reduced. Consider setting key 'max' in the ["Options hashref"](#options-hashref) to retrieve multiple sets of results: # Gotta get 'em all! my @pages = $bot->get_pages_in_namespace(6, 'max', { max => 0 }); **References:** [API:Allpages](https://www.mediawiki.org/wiki/API:Allpages) ## count\_contributions my $count = $bot->count_contributions($user); Uses the API to count $user's contributions. **References:** [API:Users](https://www.mediawiki.org/wiki/API:Users) ## timed\_count\_contributions ($timed_edits_count, $total_count) = $bot->timed_count_contributions($user, $days); Uses the API to count $user's contributions in last number of $days and total number of user's contributions (if needed). Example: If you want to get user contribs for last 30 and 365 days, and total number of edits you would write something like this: my ($last30days, $total) = $bot->timed_count_contributions($user, 30); my $last365days = $bot->timed_count_contributions($user, 365); You could get total number of edits also by separately calling count\_contributions like this: my $total = $bot->count_contributions($user); and use timed\_count\_contributions only in scalar context, but that would mean one more call to server (meaning more server load) of which you are excused as timed\_count\_contributions returns array with two parameters. **References:** [Extension:UserDailyContribs](https://www.mediawiki.org/wiki/Extension:UserDailyContribs) ## last\_active my $latest_timestamp = $bot->last_active($user); Returns the last active time of $user in `YYYY-MM-DDTHH:MM:SSZ`. **References:** [API:Usercontribs](https://www.mediawiki.org/wiki/API:Usercontribs) ## recent\_edit\_to\_page my ($timestamp, $user) = $bot->recent_edit_to_page($title); Returns timestamp and username for most recent (top) edit to $page. **References:** [API:Properties#revisions](https://www.mediawiki.org/wiki/API:Properties#revisions_.2F_rv) ## get\_users my @recent_editors = $bot->get_users($title, $limit, $revid, $direction); Gets the most recent editors to $page, up to $limit, starting from $revision and going in $direction. **References:** [API:Properties#revisions](https://www.mediawiki.org/wiki/API:Properties#revisions_.2F_rv) ## was\_blocked for ("Mike.lifeguard", "Jimbo Wales") { print "$_ was blocked\n" if $bot->was_blocked($_); } Returns whether $user has ever been blocked. **References:** [API:Logevents](https://www.mediawiki.org/wiki/API:Logevents) ## test\_block\_hist Retained for backwards compatibility. Use ["was\_blocked"](#was_blocked) for clarity. **This method is deprecated**, and will emit deprecation warnings. ## expandtemplates my $expanded = $bot->expandtemplates($title, $wikitext); Expands templates on $page, using $text if provided, otherwise loading the page text automatically. **References:** [API:Parsing wikitext](https://www.mediawiki.org/wiki/API:Parsing_wikitext) ## get\_allusers my @users = $bot->get_allusers($limit, $user_group, $options_hashref); Returns an array of all users. Default $limit is 500. Optionally specify a $group (like 'sysop') to list that group only. The last optional parameter is an ["Options hashref"](#options-hashref). **References:** [API:Allusers](https://www.mediawiki.org/wiki/API:Allusers) ## db\_to\_domain Converts a wiki/database name (enwiki) to the domain name (en.wikipedia.org). my @wikis = ("enwiki", "kowiki", "bat-smgwiki", "nonexistent"); foreach my $wiki (@wikis) { my $domain = $bot->db_to_domain($wiki); next if !defined($domain); print "$wiki: $domain\n"; } You can pass an arrayref to do bulk lookup: my @wikis = ("enwiki", "kowiki", "bat-smgwiki", "nonexistent"); my $domains = $bot->db_to_domain(\@wikis); foreach my $domain (@$domains) { next if !defined($domain); print "$domain\n"; } **References:** [Extension:SiteMatrix](https://www.mediawiki.org/wiki/Extension:SiteMatrix) ## domain\_to\_db my $db = $bot->domain_to_db($domain_name); As you might expect, does the opposite of ["domain\_to\_db"](#domain_to_db): Converts a domain name (meta.wikimedia.org) into a database/wiki name (metawiki). **References:** [Extension:SiteMatrix](https://www.mediawiki.org/wiki/Extension:SiteMatrix) ## diff This allows retrieval of a diff from the API. The return is a scalar containing the _HTML table_ of the diff. Options are passed as a hashref with keys: - _title_ is the title to use. Provide _either_ this or revid. - _revid_ is any revid to diff from. If you also specified title, only title will be honoured. - _oldid_ is an identifier to diff to. This can be a revid, or the special values 'cur', 'prev' or 'next' **References:** [API:Properties#revisions](https://www.mediawiki.org/wiki/API:Properties#revisions_.2F_rv) ## prefixindex This returns an array of hashrefs containing page titles that start with the given $prefix. The hashref has keys 'title' and 'redirect' (present if the page is a redirect, not present otherwise). Additional parameters are: - One of all, redirects, or nonredirects - A single namespace number (unlike linksearch etc, which can accept an arrayref of numbers). - $options\_hashref as described in ["Options hashref"](#options-hashref). my @prefix_pages = $bot->prefixindex("User:Mike.lifeguard"); # Or, the more efficient equivalent my @prefix_pages = $bot->prefixindex("Mike.lifeguard", 2); foreach my $hashref (@pages) { my $title = $hashref->{'title'}; if $hashref->{'redirect'} { print "$title is a redirect\n"; } else { print "$title\n is not a redirect\n"; } } **References:** [API:Allpages](https://www.mediawiki.org/wiki/API:Allpages) ## search This is a simple search for your $search\_term in page text. It returns an array of page titles matching. Additional optional parameters are: - A namespace number to search in, or an arrayref of numbers (default is the main namespace) - $options\_hashref is a hashref as described in ["Options hashref"](#options-hashref): my @pages = $bot->search("Mike.lifeguard", 2); print "@pages\n"; Or, use a callback for incremental processing: my @pages = $bot->search("Mike.lifeguard", 2, { hook => \&mysub }); sub mysub { my ($res) = @_; foreach my $hashref (@$res) { my $page = $hashref->{'title'}; print "$page\n"; } } **References:** [API:Search](https://www.mediawiki.org/wiki/API:Search) ## get\_log This fetches log entries, and returns results as an array of hashes. The first parameter is a hashref with keys: - _type_ is the log type (block, delete...) - _user_ is the user who _performed_ the action. Do not include the User: prefix - _target_ is the target of the action. Where an action was performed to a page, it is the page title. Where an action was performed to a user, it is User:$username. The second is the familiar ["Options hashref"](#options-hashref). my $log = $bot->get_log({ type => 'block', user => 'User:Mike.lifeguard', }); foreach my $entry (@$log) { my $user = $entry->{'title'}; print "$user\n"; } $bot->get_log({ type => 'block', user => 'User:Mike.lifeguard', }, { hook => \&mysub, max => 10 } ); sub mysub { my ($res) = @_; foreach my $hashref (@$res) { my $title = $hashref->{'title'}; print "$title\n"; } } **References:** [API:Logevents](https://www.mediawiki.org/wiki/API:Logevents) ## is\_g\_blocked my $is_globally_blocked = $bot->is_g_blocked('127.0.0.1'); Returns what IP/range block _currently in place_ affects the IP/range. The return is a scalar of an IP/range if found (evaluates to true in boolean context); undef otherwise (evaluates false in boolean context). Pass in a single IP or CIDR range. **References:** [Extension:GlobalBlocking](https://www.mediawiki.org/wiki/Extension:GlobalBlocking/API) ## was\_g\_blocked print "127.0.0.1 was globally blocked\n" if $bot->was_g_blocked('127.0.0.1'); Returns whether an IP/range was ever globally blocked. You should probably call this method only when your bot is operating on Meta - this method will warn if not. **References:** [API:Logevents](https://www.mediawiki.org/wiki/API:Logevents) ## was\_locked my $was_locked = $bot->was_locked('Mike.lifeguard'); Returns whether a user was ever locked. You should probably call this method only when your bot is operating on Meta - this method will warn if not. **References:** [API:Logevents](https://www.mediawiki.org/wiki/API:Logevents) ## get\_protection Returns data on page protection as a array of up to two hashrefs. Each hashref has a type, level, and expiry. Levels are 'sysop' and 'autoconfirmed'; types are 'move' and 'edit'; expiry is a timestamp. Additionally, the key 'cascade' will exist if cascading protection is used. my $page = 'Main Page'; $bot->edit({ page => $page, text => rand(), summary => 'test', }) unless $bot->get_protection($page); You can also pass an arrayref of page titles to do bulk queries: my @pages = ('Main Page', 'User:Mike.lifeguard', 'Project:Sandbox'); my $answer = $bot->get_protection(\@pages); foreach my $title (keys %$answer) { my $protected = $answer->{$title}; print "$title is protected\n" if $protected; print "$title is unprotected\n" unless $protected; } **References:** [API:Properties#info](https://www.mediawiki.org/wiki/API:Properties#info_.2F_in) ## is\_protected This is a synonym for ["get\_protection"](#get_protection), which should be used in preference. **This method is deprecated**, and will emit deprecation warnings. ## patrol $bot->patrol($rcid); Marks a page or revision identified by the $rcid as patrolled. To mark several RCIDs as patrolled, you may pass an arrayref of them. Returns false and sets `$bot->{error}` if the account cannot patrol. **References:** [API:Patrol](https://www.mediawiki.org/wiki/API:Patrol) ## email $bot->email($user, $subject, $body); This allows you to send emails through the wiki. All 3 of $user (without the User: prefix), $subject and $body are required. If $user is an arrayref, this will send the same email (subject and body) to all users. **References:** [API:Email](https://www.mediawiki.org/wiki/API:Email) ## top\_edits Returns an array of the page titles where the $user is the latest editor. The second parameter is the familiar [$options\_hashref](#linksearch). my @pages = $bot->top_edits("Mike.lifeguard", {max => 5}); foreach my $page (@pages) { $bot->rollback($page, "Mike.lifeguard"); } Note that accessing the data with a callback happens **before** filtering the top edits is done. For that reason, you should use ["contributions"](#contributions) if you need to use a callback. If you use a callback with top\_edits(), you **will not** necessarily get top edits returned. It is only safe to use a callback if you _check_ that it is a top edit: $bot->top_edits("Mike.lifeguard", { hook => \&rv }); sub rv { my $data = shift; foreach my $page (@$data) { if (exists($page->{'top'})) { $bot->rollback($page->{'title'}, "Mike.lifeguard"); } } } **References:** [API:Usercontribs](https://www.mediawiki.org/wiki/API:Usercontribs) ## contributions my @contribs = $bot->contributions($user, $namespace, $options); Returns an array of hashrefs of data for the user's contributions. $ns can be an arrayref of namespace numbers. $options can be specified as in ["linksearch"](#linksearch). Specify an arrayref of users to get results for multiple users. **References:** [API:Usercontribs](https://www.mediawiki.org/wiki/API:Usercontribs) ## upload $bot->upload({ data => $file_contents, summary => 'uploading file' }); $bot->upload({ file => $file_name, title => 'Target filename.png' }); Upload a file to the wiki. Specify the file by either giving the filename, which will be read in, or by giving the data directly. **References:** [API:Upload](https://www.mediawiki.org/wiki/API:Upload) ## upload\_from\_url Upload file directly from URL to the wiki. Specify URL, the new filename and summary. Summary and new filename are optional. $bot->upload_from_url({ url => 'http://some.domain.ext/pic.png', title => 'Target_filename.png', summary => 'uploading new pic', }); If on your target wiki is enabled uploading from URL, meaning `$wgAllowCopyUploads` is set to true in LocalSettings.php and you have appropriate user rights, you can use this function to upload files to your wiki directly from remote server. **References:** [API:Upload#Uploading\_from\_URL](https://www.mediawiki.org/wiki/API:Upload#Uploading_from_URL) ## usergroups Returns a list of the usergroups a user is in: my @usergroups = $bot->usergroups('Mike.lifeguard'); **References:** [API:Users](https://www.mediawiki.org/wiki/API:Users) ## Options hashref This is passed through to the lower-level interface [MediaWiki::API](https://metacpan.org/pod/MediaWiki::API), and is fully documented there. The hashref can have 3 keys: - max Specifies the maximum number of queries to retrieve data from the wiki. This is independent of the _size_ of each query (how many items each query returns). Set to 0 to retrieve all the results. - hook Specifies a coderef to a hook function that can be used to process large lists as they come in. When this is used, your subroutine will get the raw data. This is noted in cases where it is known to be significant. For example, when using a hook with `top_edits()`, you need to check whether the edit is the top edit yourself - your subroutine gets results as they come in, and before they're filtered. - skip\_encoding MediaWiki's API uses UTF-8 and any 8 bit character string parameters are encoded automatically by the API call. If your parameters are already in UTF-8 this will be detected and the encoding will be skipped. If your parameters for some reason contain UTF-8 data but no UTF-8 flag is set (i.e. you did not use the `use [utf8](https://metacpan.org/pod/utf8);` pragma) you should prevent re-encoding by passing an option `skip_encoding => 1`. For example: $category ="Cat\x{e9}gorie:moyen_fran\x{e7}ais"; # latin1 string $bot->get_all_pages_in_category($category); # OK $category = "Cat". pack("U", 0xe9)."gorie:moyen_fran".pack("U",0xe7)."ais"; # unicode string $bot->get_all_pages_in_category($category); # OK $category ="Cat\x{c3}\x{a9}gorie:moyen_fran\x{c3}\x{a7}ais"; # unicode data without utf-8 flag # $bot->get_all_pages_in_category($category); # NOT OK $bot->get_all_pages_in_category($category, { skip_encoding => 1 }); # OK If you need this, it probably means you're doing something wrong. Feel free to ask for help. # ERROR HANDLING All functions will return undef in any handled error situation. Further error data is stored in `$bot->{error}->{code}` and `$bot->{error}->{details}`. Error codes are provided as constants in [MediaWiki::Bot::Constants](https://metacpan.org/pod/MediaWiki::Bot::Constants), and can also be imported through this module: use MediaWiki::Bot qw(:constants); # AVAILABILITY The project homepage is [https://metacpan.org/module/MediaWiki::Bot](https://metacpan.org/module/MediaWiki::Bot). The latest version of this module is available from the Comprehensive Perl Archive Network (CPAN). Visit [http://www.perl.com/CPAN/](http://www.perl.com/CPAN/) to find a CPAN site near you, or see [https://metacpan.org/module/MediaWiki::Bot/](https://metacpan.org/module/MediaWiki::Bot/). # SOURCE The development version is on github at [http://github.com/MediaWiki-Bot/MediaWiki-Bot](http://github.com/MediaWiki-Bot/MediaWiki-Bot) and may be cloned from [git://github.com/MediaWiki-Bot/MediaWiki-Bot.git](git://github.com/MediaWiki-Bot/MediaWiki-Bot.git) # BUGS AND LIMITATIONS You can make new bug reports, and view existing ones, through the web interface at [https://github.com/MediaWiki-Bot/MediaWiki-Bot/issues](https://github.com/MediaWiki-Bot/MediaWiki-Bot/issues). # AUTHORS - Dan Collins - Mike.lifeguard - Alex Rowe - Oleg Alexandrov - jmax.code - Stefan Petrea - kc2aei - bosborne@alum.mit.edu - Brian Obio - patch and bug report contributors # COPYRIGHT AND LICENSE This software is Copyright (c) 2016 by the MediaWiki::Bot team . This is free software, licensed under: The GNU General Public License, Version 3, June 2007 MediaWiki-Bot-5.006003/LICENSE0000644000175000017500000010501312737341477014014 0ustar mikemikeThis software is Copyright (c) 2016 by the MediaWiki::Bot team . This is free software, licensed under: The GNU General Public License, Version 3, June 2007 GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: Copyright (C) This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . MediaWiki-Bot-5.006003/MANIFEST0000644000175000017500000000306112737341477014140 0ustar mikemike# This file was automatically generated by Dist::Zilla::Plugin::Manifest v6.005. Changes INSTALL LICENSE MANIFEST MANIFEST.SKIP META.json META.yml Makefile.PL README.mkdn dist.ini lib/MediaWiki/Bot.pm lib/MediaWiki/Bot/Constants.pm perlcritic.rc t/00-compile.t t/00-init.t t/01-api_error.t t/02-login.t t/03-get_text.t t/04-edit.t t/05-revert.t t/06-get_history.t t/07-unicode.t t/08-get_last.t t/09-update_rc.t t/10-what_links_here.t t/11-get_pages_in_category.t t/12-linksearch.t t/13-get_namespace_names.t t/14-get_pages_in_namespace.t t/15-count_contributions.t t/16-last_active.t t/17-was_blocked.t t/18-is_blocked.t t/19-get_pages.t t/20-assertion.t t/21-get_allusers.t t/22-get_id.t t/23-list_transclusions.t t/24-purge_page.t t/25-sitematrix.t t/26-diff.t t/27-prefixindex.t t/28-search.t t/29-get_log.t t/30-was_g_blocked.t t/31-is_g_blocked.t t/32-was_locked.t t/33-is_locked.t t/34-secure.t t/35-get_protection.t t/36-email.t t/37-move.t t/38-test_image_exists.t t/39-image_usage.t t/40-upload.t t/41-get_users.t t/42-expandtemplates.t t/43-recentchanges.t t/44-patrol.t t/45-contributions.t t/46-usergroups.t t/47-global_image_usage.t t/48-get_image.t t/49-get_all_categories.t xt/author/critic.t xt/author/eol.t xt/author/mojibake.t xt/author/no-tabs.t xt/author/pod-coverage.t xt/author/pod-syntax.t xt/author/portability.t xt/author/synopsis.t xt/author/test-version.t xt/release/cpan-changes.t xt/release/dist-manifest.t xt/release/distmeta.t xt/release/kwalitee.t xt/release/meta-json.t xt/release/minimum-version.t xt/release/pod-linkcheck.t xt/release/unused-vars.t MediaWiki-Bot-5.006003/META.yml0000644000175000017500000000302512737341477014260 0ustar mikemike--- abstract: 'a high-level bot framework for interacting with MediaWiki wikis' author: - 'Dan Collins ' - 'Mike.lifeguard ' - 'Alex Rowe ' - 'Oleg Alexandrov ' - 'jmax.code ' - 'Stefan Petrea ' - 'kc2aei ' - bosborne@alum.mit.edu - 'Brian Obio ' - 'patch and bug report contributors' build_requires: File::Spec: '0' IO::Handle: '0' IPC::Open3: '0' List::MoreUtils: '0' Test::Is: '0' Test::More: '0.96' Test::RequiresInternet: '0' Test::Warn: '0' blib: '1.01' perl: '5.008' utf8: '0' configure_requires: ExtUtils::MakeMaker: '0' perl: '5.006' dynamic_config: 0 generated_by: 'Dist::Zilla version 6.005, CPAN::Meta::Converter version 2.150005' license: gpl meta-spec: url: http://module-build.sourceforge.net/META-spec-v1.4.html version: '1.4' name: MediaWiki-Bot no_index: directory: - corpus - examples - inc requires: Carp: '0' Constant::Generate: '0' Digest::MD5: '2.39' Encode: '0' Exporter: '0' File::Basename: '0' HTML::Entities: '3.28' LWP::Protocol::https: '6.06' List::Util: '0' MediaWiki::API: '0.36' Module::Pluggable: '0' perl: '5.008' strict: '0' warnings: '0' resources: bugtracker: https://github.com/MediaWiki-Bot/MediaWiki-Bot/issues homepage: https://metacpan.org/module/MediaWiki::Bot repository: git://github.com/MediaWiki-Bot/MediaWiki-Bot.git version: '5.006003' MediaWiki-Bot-5.006003/Changes0000644000175000017500000004615412737341477014314 0ustar mikemikeChangelog for MediaWiki-Bot 5.006003 2016-07-06 - Changed the file the test suite used to test fetching images - Documentation tweaks 5.006002 2014-09-08 - Re-release 5.006001 to fix a packaging error 5.006001 2014-09-08 - Get github metadata from the MediaWiki-Bot organization's repo [gh-69] - Use Test::RequiresInternet to make sure we can reach the wikis we test against - Mark flaky tests as extended - Use HTTPS by default and warn if ever trying to log in over HTTP - Added pointers to API documentation in POD [gh-63] 5.006000 2014-09-01 - Move subpages correctly when requested [GH#55 - labster] - Use new prop paramter in expandtemplates [GH#58, thanks fhocutt] - Warn if neither operator nor a UA string were given [gh-61] - Show a good example of customizing the UA string [gh-66] - Use symbolic constants instead of magic numbers, and add MediaWiki::Bot::Constants so developers can use them too [gh-59] - Support solving CAPTCHAs [gh-2] - Remove automatic editing of the bot operator's talk page [gh-68] - Use new method for getting tokens 5.005007 2013-11-27 - Don't fail due to new Module::Pluggable warnings - Ensure t/48-get_image.t has JPEG support - Corrected documentation for get_image (GH#54, thanks sanxiago!) - Correct a precedence issue uncovered by new warnings in perl v5.19.4 5.005006 2013-04-07 - Plan t/40-upload.t properly [GH#50] 5.005005 2013-03-29 - Added upload_from_url function (GH#48; stemd) - Added two parameters to recentchanges functions (GH#48; stemd) - Added timed_count_contributions function (GH#48; stemd) 5.005004 2012-10-27 - More robust error-checking in some cases (GH#45, James DuPont) 5.005003 2012-09-14 - Be more liberal with matching whitespace in the diff test - Correctly handle namespace #0 (GH#43, thanks Seth!) - Improved error handling for uploads (GH#33) - Fix an uninitialized warning in get_log (GH#42) 3.005002 2012-05-08 - Updated test suite for on-wiki changes 3.005001 2012-04-08 - Fixed erroneous documentation on the edit method - Fixed edit failures due to changes in HTTP::Message 6.03 (GH#39) 3.005000 2012-03-03 - Expandtemplates will now behave correctly when the page title is omitted - Better detection of login failure 3.004003 2012-01-14 - Edits may be marked as major once again - Don't use an invalid undefined assertion by default - Fix tests for logging in (now requires credentials provided in PWPUsername and PWPPassword environment variables) - Fix page diff test - Fix the expandtemplates test - Fix the page history test - Make fetching a patrol token fail-fast. If the account cannot get a patrol token, instead of attempting to patrol (and getting a badtoken error), return false and set $bot->{error} with a handcrafted error. - Fix test for emailing - Fix test for uploading (optional test requirements: GD, Data::Random) v3.4.2 2011-08-26 - Silence spurious warnings in the test suite - Use HTTP GET so some requests are cacheable (Thanks to Jools Wills) - Add support for file uploads via API (Thanks to Jools Wills) - Patrolling edits now fetches the patrol token manually, and should work - Add support for querying a user's usergroups, and infrastructure to support changing usergroups with the Admin plugin - get_pages now handles namespace aliases dynamically, and should work on all wikis 3.4.0 2011-06-19 - Packaging updates & changes from last 2 alphas 3.3.1_2 2011-06-04 - set_highlimits, which had no effect and was deprecated, has been removed. - Deprecated aliases for MediaWiki::Bot were removed. - global_image_usage() now supports queries of http://www.mediawiki.org/wiki/GlobalUsage 3.3.1_1 2011-05-08 - Dependencies for HTTPS fixed - Old, unnecessary dependencies removed - Calling test_image_exists() with an arrayref of images now returns results in the correct order. 3.3.1 2011-04-23 - Deprecation warnings now respect whether deprecation warnings are turned on 3.3.0 2011-04-22 - NOTE: PWP and perlwikipedia are officially deprecated, and *will* be removed in a future release - Added deprecation warnings to PWP/perlwikipedia aliases, as well as methods/call styles retained only for backwards compatibility, and which might be removed in future releases. - General cleanup of documentation - get_pages_in_namespace() can now properly retrieve all results - recent_edit_to_page() now returns what it was documented to return (both timestamp and username) - Require a more recent version of MediaWiki::API - Resolve permissions error in contributions() - Make contributions() return data as documented 3.2.7 2011-01-24 - get_allusers() can now filter by usergroup, and accepts an options hashref for greater versatility. - Test suite now uses test.wikipedia.org - Don't wait forever if the wiki doesn't respond 3.2.6 2010-10-28 - recentchanges() now filters by namespace and returns all available data, including rcid; see Pod for details. - get_pages_in_category() now requests the maximum number of results; this resolves a major performance issue when getting the contents of large categories. - The same performance issue (or potential issue) was resolved in many other methods. 3.2.4 2010-09-06 - Removed dependency on WWW::Mechanize - revert() was broken by refactoring; this error has been fixed 3.2.0 2010-08-17 - BREAKING CHANGE: The constructor now returns false when auto-login fails - Some cached data is now invalidated when set_wiki() is called - Setting a useragent now works - is_g_blocked() checks if an IP/range is currently blocked, as well as what the address is - was_g_blocked() checks if an IP/range was ever globally blocked - "Loaded Plugin X" messages were removed - INSTALL has slightly better instructions - is_protected() checks whether a page is protected or not - It is now possible to get all results from get_pages_in_category() by setting max to zero in the options hashref - get_all_pages_in_category() now has protection against infinite recursion due to category loops - get_pages() now actually accepts an arrayref of pages, as documented. - get_(all_)pages_in_category now have consistent input parsing - misc/Login.pl was removed - Handling of null path to api.php was fixed - Useragent format now follows the RFC - Performance of login() was improved by about 30% - Calls to list-based methods which use a callback hook now return true for error-checking. Calls which don't use a callback are unchanged. - links_to_image() was renamed to image_usage() and now uses the API and has new features. The old name is retained for backward compatibility. - login() can now do SUL; see POD for details - test_image_exists() sometimes returned undef when it shouldn't have; this was fixed. - The test suite was made more robust - SUL on secure.wikimedia.org is useless, and is now disabled - test_image_exists() now accepts an arrayref of images - Two levels of debug output are now available. 1 is failure notification; 2 is internal operations. - top_edits() and contributions() offer two ways to get user contributions. See Pod for details. - Support for LDAP login was added thanks to a patch by philippe.cade - Protocol, host, and path are now retained when a call to set_wiki() doesn't specify one of them. 3.1.0 2010-06-05 - BREAKING CHANGE: update_rc() now accepts a hashref of options, and the hash structure returned is changed. See POD for details. - get_pages_in_category() and get_all_pages_in_category() now work properly for non-English wikis (issue 42). - Moving pages is now supported (issue 43). - diff() now supports basic diffing. - prefixindex() now supports listing pages with a common prefix - edit() can now do section editing. - Another alias was added: perlwikipedia - Searching is now supported by search() - Methods like linksearch(), get_pages_in_category(), and list_transcludes() now perform only one query by default. Pass max => 5 (or whatever) to do more queries. Note that this controls the number of queries, not the number of results fetched per query. See POD for details. - get_log() was added to support the basics of searching the log. - Support for basic auth was added (issue 68). - Support for GlobalBlocking queries was added. 3.0.0 2010-05-30 - BREAKING CHANGE: get_text() now returns the page's wikitext. Blank pages return ""; nonexistent pages return undef. These both evaluate to false; you can differentiate between them with defined(). get_id() now returns undef instead of 2 when there is no pageid (page is nonexistent or title is invalid). - BREAKING CHANGE: All subs return undef when they encounter an error that is passed off to _handle_api_error(). Error details are still available in $bot->{'error'}. - BREAKING CHANGE: linksearch() now uses keys 'url' and 'title' like the API does. This is to avoid confusion when using a callback hook, which is now exposed to callers. linksearch() now uses the API. - BREAKING CHANGE: login()'s return values are reversed, and properly documented. Returns true for success; false for failure: $bot->login($user, $pass) or die "Login failed\n"; # This is in accord with Perl standards - BREAKING CHANGE: what_links_here() no longer handles transclusions. To get transclusions, use list_transclusions(). Also, key names are now 'title' and 'redirect' (defined when the page is a redirect; undefined otherwise). - Codebase underwent spring cleaning. - Default useragent now includes the version number. - Functionality which still uses index.php no longer fails on non-English wikis. Patch by piotr.kubowicz. - Crypt::SSLeay is now correctly listed as a prerequisite, - is_blocked() replaces test_blocked(), and now uses the API. test_blocked() remains for backwards compatibility - was_blocked() replaces test_block_hist(), and now uses the API. test_block_hist() remains for backwards compatibility. - $summary is now optional for undelete(); a generic reason will be used if ommitted. undelete() also uses API now. - edit() now uses an md5 hash to ensure data integrity. - set_highlimits() can now be used to *not* use highlimits. - revert() now uses the API. - get_last() now uses the API. - undo() now uses the API, and is documented accurately. - logout() was added. - purge_page() now uses the API and returns the number of pages successfully purged, which may be useful for the caller's error-checking. - New methods were added to query WMF's sitematrix, and convert between db name (enwikibooks) to the domain name (en.wikibooks.org): db_to_domain() and domain_to_db(). - Admin functions were moved into the plugin MediaWiki::Bot::Plugin::Admin, which is auto-loaded if present. - The constructor now does more for script authors. It sets the wiki, logs in, and does some autoconfiguration. See updated POD for details. - edit() now accepts a hashref of options. See updated POD for details. - expandtemplates() now uses the API. 2.3.0 2009-05-23 - Get_pages now makes sense when following namespace aliases. The title you give is the title you get back. Aliases are not expanded. Patch by Stefan Petrea. - Unicode test will now take a bit longer but hopefully avoid any errors that shouldn't be there 2.2.3 2009-05-05 - RELEASE-NOTES renamed to Changes - Test suite now outputs a message when there is an expected error to be printed to terminal - Some debugging code removed - Simplify Unicode tests, now only run editing tests if not blocked - More specific error detection in test 11 (get_pages_in_namespace) - More verbose error handling in all code - Add unblock capability 2.1.1 2009-02-24 - Additions of sub unblock, some test changes - Error checking is about to be updated. If an error occurs, any subs will return -1*the integer error number. See the updated documentation in 2.2.0. 2.1.0 2009-02-03 - Some more unicode changes - If unicode doesn't work, TELL ME 2.0.1 2009-01-26 - Removed some redundant error messages 2.0.0 2008-12-30 - I was going to make this 1.5.3, but then I realized I shouldn't hide something this big behind a sub-minor release. For reasons of having the module included in the module list on CPAN, making everything be a bit neater, and for all-around happiness, I've moved Perlwikipedia into the preexisting MediaWiki:: namespace. The module is now called MediaWiki::Bot. - You can also now use "PWP" instead of "MediaWiki::Bot" in your code. The distribution on CPAN and in the svn repo includes an alias. - Plugins are now loaded automatically if installed. This means you don't have to do as much arcane magic. The plugins will be listed under MediaWiki::Bot::Plugin:: in CPAN and are not currently in svn. If you have a task that you would like to make into a plugin and publish on CPAN, please get in touch with me at DCOLLINS@cpan.org - Added the ImageTester plugin to CPAN. Purpose is to analyze and tag copyrighted images which do not follow en.wikipedia's policies - Added the CUP plugin to CPAN as a basic example of what a plugin does. This is used by the bot that will run the WikiCup stats, and these methods can be called just like any other methods in the main module 1.5.2 2008-12-27 - Add support for 'plugins' which add additional features to MediaWiki::Bot. See MediaWiki::Bot::Plugin::ImageTester on CPAN for an example. - Loosened requirements for test_image_exists to determine that an image is on commons: the missing key is not always present. 1.5.1 2008-12-21 - Fixed the bug where logins wouldn't transfer from API to wiki again, now part of test 01. - Added tests for AssertEdit, maxlag, get_allusers, get_pages - Added sub set_highlimits for api queries, since PWP can't tell if you're a bot - Sub test_image_exists can now tell if an image page exists but has no image. Also uses API. - Added defaults for protection, now uses API. In-house tests exist for protection and deletion. - Some variable normalizing in tests. - Added testing environment variables: PWPMakeTestSetWikiHost, PWPMakeTestSetWikiDir - Reenabled tests for get_namespace_names, get_pages_in_namespace - Some fixes for potential race conditions on database replication for tests, should not be necessary in real production code 1.5.0 2008-12-08 - Added a sub get_pages(), which accepts a list of pages and gets each page's text in one go and returns a hash. Much faster than get_text() for more than one page. - Switched sub protect over to API, cascading works now - Add a few more tests 1.4.2 2008-12-07 - Apparently I accidentally removed get_allusers? Fixed that. - Some CPAN testers have been reporting a failure to make the test edit in t/02_edit.t so I added a call to Data::Dumper. 1.4.1 2008-11-27 - Remove my tsting code from sub login, now using an updated version of MediaWiki::API to handle cookies properly. MediaWiki::API 0.20 or higher now required. 1.4.0 2008-11-18 - Fix a bug in get_allusers() by bosborne - Fix a bug which made some methods, such as delete_old_image, attempt to execute logged-out because the method used to transfer cookies from the API useragent, which does the logging in, to the MediaWiki::Bot useragent, which does the other stuff, was not functional. This bug only affected non-api methods and has been fixed through a nasty hack, which I hope can be made cleaner in the coming weeks. 1.3.8 2008-11-16 - New method: get_allusers($limit) by bosborne - That bug I fixed in 1.3.7? I fixed it again. For real this time. - Using 'retries' instead of 'max retries' config variable for MediaWiki::API HTTP errors 1.3.7 2008-11-11 - Minor bugfix in one of the tests. 1.3.6 2008-10-27 - Added maxlag compatibility. (Thanks to the developers of MediaWiki::API for their implementation) The settings are as follows: - Maxlag parameter: Dafault 5 seconds, can be configured when creating the object - Delay: 1 second - Retries: Until success - Make test 98 fail gracefully on platforms without the test suite - Fix some really weird spacing in the Makefile.PL - The other maxlag options, along with lots of other defaults, will probably be modifiable in 1.4 with a sort of config sub. This will allow the custom configuration of options for WWW::Mechanize and MediaWiki::API, so you don't need to go sourcediving or remember the order for the parameters to the constructor. - Since wiki.xyrael.net is down, tests 1, 8, 10, 11 1.3.5 2008-10-04 - Allowing cascade protection now. This may or may not work, since there are some changes being made to the protection API. - Anonymous editing should work. If you still can not use this framework to edit, I need to be contacted. To this end, I've enabled an editing test, hopefully cpan-testers sends me good results :) - Fix a stupid mistake I made regarding the names of certain parameters to get_history 1.3.4 2008-09-11 - Some triage after the first CPAN test results came in: - Appearantly, get_pages_in_category was running test file 08, test 2, very slowly because I was loading a 1000+ page category in sets of 10 pages at a time. This has been remedied, the limit is now 500. - Cannot reproduce a bug in get_namespace_names and another in count_contributions - so instead, if we get weird results, we run the sub again. - There is a bug in get_history which was causing weird errors on some test machines. I've switched it to use MediaWiki::API. 1.3.3 2008-09-10 - get_pages_in_category now uses the API - Test 8 has been modified to test nested categories - Test 7 is now faster, but still effective - Bug checking added on edit for anon edit bug - Bots now use the bot flag when editing, as was done before switching to API editing - Regarding anonymous editing: MediaWiki bug 15420 has been marked as FIXED by Roan Kattouw. Waiting for that to be scapped so I can test it and enable the editing tests. - Before I updated get_pages_in_category, it seemed to have some encoding errors: returning escape sequences where it shouldn't. I'll be looking into its status on that front. 1.3.2 2008-08-30 - Now logging in via API if password is supplied - Fixed an encoding issue in page saving - Anonymous editing is currently broken. This is an upstream issue which I am working to resolve 1.3.1 2008-08-28 - Maintenance: update manifest, readme - More MediaWiki::API: blocking, deleting, get_text - Add localization for MediaWiki::API. This is important... - Update tests to test for loading of sections of pages - AssertEdit assertaions can now be passed as 'bot' OR as '&assert=bot' - MediaWiki::API didn't localise with set_wiki() - Unicode issues with MediaWiki::API editing fixed - Fix one warning in edit() - undefined value in string eq near line 252 1.3 2008-08-27 - Convert over to use MediaWiki::API 1.01 2007 - Standardized all functions to return 1 and set $object->{errstr} in the event of an error - get_history and other _get_api-based functions will now properly escape utf8 characters in the query - Fixed memory leak issues with WWW::Mechanize 0.1 2007-08-25 - original version MediaWiki-Bot-5.006003/MANIFEST.SKIP0000644000175000017500000000157512737341477014715 0ustar mikemike# Avoid version control files. \bRCS\b \bCVS\b \bSCCS\b ,v$ \B\.svn\b \B\.git\b \B\.gitignore\b \b_darcs\b \B\.cvsignore$ # Avoid VMS specific MakeMaker generated files \bDescrip.MMS$ \bDESCRIP.MMS$ \bdescrip.mms$ # Avoid Makemaker generated and utility files. \bMANIFEST\.bak \bMakefile$ \bblib/ \bMakeMaker-\d \bpm_to_blib\.ts$ \bpm_to_blib$ \bblibdirs\.ts$ # 6.18 through 6.25 generated this # Avoid Module::Build generated and utility files. \bBuild$ \b_build/ \bBuild.bat$ \bBuild.COM$ \bBUILD.COM$ \bbuild.com$ # Avoid temp and backup files. ~$ \.old$ \#$ \b\.# \.bak$ \.tmp$ \.# \.rej$ # Avoid OS-specific files/dirs # Mac OSX metadata \B\.DS_Store # Mac OSX SMB mount metadata files \B\._ # Avoid Devel::Cover and Devel::CoverX::Covered files. \bcover_db\b \bcovered\b # Avoid MYMETA files ^MYMETA\. # Specific to this project ^script\.pl$ ^\.mediawiki-bot-.*-cookies$ MediaWiki-Bot-5.006003/perlcritic.rc0000644000175000017500000000011012737341477015465 0ustar mikemike# until we start using exceptions exclude = ProhibitExplicitReturnUndef MediaWiki-Bot-5.006003/META.json0000644000175000017500000000620312737341477014431 0ustar mikemike{ "abstract" : "a high-level bot framework for interacting with MediaWiki wikis", "author" : [ "Dan Collins ", "Mike.lifeguard ", "Alex Rowe ", "Oleg Alexandrov ", "jmax.code ", "Stefan Petrea ", "kc2aei ", "bosborne@alum.mit.edu", "Brian Obio ", "patch and bug report contributors" ], "dynamic_config" : 0, "generated_by" : "Dist::Zilla version 6.005, CPAN::Meta::Converter version 2.150005", "license" : [ "gpl_3" ], "meta-spec" : { "url" : "http://search.cpan.org/perldoc?CPAN::Meta::Spec", "version" : 2 }, "name" : "MediaWiki-Bot", "no_index" : { "directory" : [ "corpus", "examples", "inc" ] }, "prereqs" : { "configure" : { "requires" : { "ExtUtils::MakeMaker" : "0", "perl" : "5.006" } }, "develop" : { "requires" : { "English" : "0", "Pod::Coverage::TrustPod" : "0", "Test::CPAN::Changes" : "0.19", "Test::CPAN::Meta" : "0", "Test::CPAN::Meta::JSON" : "0.16", "Test::EOL" : "0", "Test::Kwalitee" : "1.21", "Test::Mojibake" : "0", "Test::More" : "0.96", "Test::NoTabs" : "0", "Test::Pod" : "1.41", "Test::Pod::Coverage" : "1.08", "Test::Pod::LinkCheck" : "0", "Test::Portability::Files" : "0", "Test::Synopsis" : "0", "Test::Vars" : "0", "Test::Version" : "1" } }, "runtime" : { "requires" : { "Carp" : "0", "Constant::Generate" : "0", "Digest::MD5" : "2.39", "Encode" : "0", "Exporter" : "0", "File::Basename" : "0", "HTML::Entities" : "3.28", "LWP::Protocol::https" : "6.06", "List::Util" : "0", "MediaWiki::API" : "0.36", "Module::Pluggable" : "0", "perl" : "5.008", "strict" : "0", "warnings" : "0" } }, "test" : { "requires" : { "File::Spec" : "0", "IO::Handle" : "0", "IPC::Open3" : "0", "List::MoreUtils" : "0", "Test::Is" : "0", "Test::More" : "0.96", "Test::RequiresInternet" : "0", "Test::Warn" : "0", "blib" : "1.01", "perl" : "5.008", "utf8" : "0" } } }, "release_status" : "stable", "resources" : { "bugtracker" : { "web" : "https://github.com/MediaWiki-Bot/MediaWiki-Bot/issues" }, "homepage" : "https://metacpan.org/module/MediaWiki::Bot", "repository" : { "type" : "git", "url" : "git://github.com/MediaWiki-Bot/MediaWiki-Bot.git", "web" : "https://github.com/MediaWiki-Bot/MediaWiki-Bot" } }, "version" : "5.006003" } MediaWiki-Bot-5.006003/xt/0000775000175000017500000000000012737341477013444 5ustar mikemikeMediaWiki-Bot-5.006003/xt/release/0000775000175000017500000000000012737341477015064 5ustar mikemikeMediaWiki-Bot-5.006003/xt/release/distmeta.t0000644000175000017500000000017212737341477017061 0ustar mikemike#!perl # This file was automatically generated by Dist::Zilla::Plugin::MetaTests. use Test::CPAN::Meta; meta_yaml_ok(); MediaWiki-Bot-5.006003/xt/release/kwalitee.t0000644000175000017500000000027512737341477017060 0ustar mikemike# this test was generated with Dist::Zilla::Plugin::Test::Kwalitee 2.12 use strict; use warnings; use Test::More 0.88; use Test::Kwalitee 1.21 'kwalitee_ok'; kwalitee_ok(); done_testing; MediaWiki-Bot-5.006003/xt/release/minimum-version.t0000644000175000017500000000026712737341477020412 0ustar mikemike#!perl use Test::More; eval "use Test::MinimumVersion"; plan skip_all => "Test::MinimumVersion required for testing minimum versions" if $@; all_minimum_version_ok( qq{5.10.1} ); MediaWiki-Bot-5.006003/xt/release/dist-manifest.t0000644000175000017500000000023012737341477020011 0ustar mikemike#!perl use Test::More; eval "use Test::DistManifest"; plan skip_all => "Test::DistManifest required for testing the manifest" if $@; manifest_ok(); MediaWiki-Bot-5.006003/xt/release/pod-linkcheck.t0000644000175000017500000000053712737341477017767 0ustar mikemike#!perl use strict; use warnings; use Test::More; foreach my $env_skip ( qw( SKIP_POD_LINKCHECK ) ){ plan skip_all => "\$ENV{$env_skip} is set, skipping" if $ENV{$env_skip}; } eval "use Test::Pod::LinkCheck"; if ( $@ ) { plan skip_all => 'Test::Pod::LinkCheck required for testing POD'; } else { Test::Pod::LinkCheck->new->all_pod_ok; } MediaWiki-Bot-5.006003/xt/release/unused-vars.t0000644000175000017500000000036212737341477017524 0ustar mikemike#!perl use Test::More 0.96 tests => 1; eval { require Test::Vars }; SKIP: { skip 1 => 'Test::Vars required for testing for unused vars' if $@; Test::Vars->import; subtest 'unused vars' => sub { all_vars_ok(); }; }; MediaWiki-Bot-5.006003/xt/release/meta-json.t0000644000175000017500000000006412737341477017144 0ustar mikemike#!perl use Test::CPAN::Meta::JSON; meta_json_ok(); MediaWiki-Bot-5.006003/xt/release/cpan-changes.t0000644000175000017500000000034412737341477017577 0ustar mikemikeuse strict; use warnings; # this test was generated with Dist::Zilla::Plugin::Test::CPAN::Changes 0.012 use Test::More 0.96 tests => 1; use Test::CPAN::Changes; subtest 'changes_ok' => sub { changes_file_ok('Changes'); }; MediaWiki-Bot-5.006003/xt/author/0000775000175000017500000000000012737341477014746 5ustar mikemikeMediaWiki-Bot-5.006003/xt/author/synopsis.t0000644000175000017500000000006012737341477017014 0ustar mikemike#!perl use Test::Synopsis; all_synopsis_ok(); MediaWiki-Bot-5.006003/xt/author/eol.t0000644000175000017500000000307012737341477015710 0ustar mikemikeuse strict; use warnings; # this test was generated with Dist::Zilla::Plugin::Test::EOL 0.18 use Test::More 0.88; use Test::EOL; my @files = ( 'lib/MediaWiki/Bot.pm', 'lib/MediaWiki/Bot/Constants.pm', 't/00-compile.t', 't/00-init.t', 't/01-api_error.t', 't/02-login.t', 't/03-get_text.t', 't/04-edit.t', 't/05-revert.t', 't/06-get_history.t', 't/07-unicode.t', 't/08-get_last.t', 't/09-update_rc.t', 't/10-what_links_here.t', 't/11-get_pages_in_category.t', 't/12-linksearch.t', 't/13-get_namespace_names.t', 't/14-get_pages_in_namespace.t', 't/15-count_contributions.t', 't/16-last_active.t', 't/17-was_blocked.t', 't/18-is_blocked.t', 't/19-get_pages.t', 't/20-assertion.t', 't/21-get_allusers.t', 't/22-get_id.t', 't/23-list_transclusions.t', 't/24-purge_page.t', 't/25-sitematrix.t', 't/26-diff.t', 't/27-prefixindex.t', 't/28-search.t', 't/29-get_log.t', 't/30-was_g_blocked.t', 't/31-is_g_blocked.t', 't/32-was_locked.t', 't/33-is_locked.t', 't/34-secure.t', 't/35-get_protection.t', 't/36-email.t', 't/37-move.t', 't/38-test_image_exists.t', 't/39-image_usage.t', 't/40-upload.t', 't/41-get_users.t', 't/42-expandtemplates.t', 't/43-recentchanges.t', 't/44-patrol.t', 't/45-contributions.t', 't/46-usergroups.t', 't/47-global_image_usage.t', 't/48-get_image.t', 't/49-get_all_categories.t' ); eol_unix_ok($_, { trailing_whitespace => 1 }) foreach @files; done_testing; MediaWiki-Bot-5.006003/xt/author/test-version.t0000644000175000017500000000063712737341477017601 0ustar mikemikeuse strict; use warnings; use Test::More; # generated by Dist::Zilla::Plugin::Test::Version 1.09 use Test::Version; my @imports = qw( version_all_ok ); my $params = { is_strict => 0, has_version => 1, multiple => 0, }; push @imports, $params if version->parse( $Test::Version::VERSION ) >= version->parse('1.002'); Test::Version->import(@imports); version_all_ok; done_testing; MediaWiki-Bot-5.006003/xt/author/critic.t0000644000175000017500000000043512737341477016410 0ustar mikemike#!perl use strict; use warnings; use Test::More; use English qw(-no_match_vars); eval "use Test::Perl::Critic"; plan skip_all => 'Test::Perl::Critic required to criticise code' if $@; Test::Perl::Critic->import( -profile => "perlcritic.rc" ) if -e "perlcritic.rc"; all_critic_ok(); MediaWiki-Bot-5.006003/xt/author/pod-coverage.t0000644000175000017500000000033412737341477017504 0ustar mikemike#!perl # This file was automatically generated by Dist::Zilla::Plugin::PodCoverageTests. use Test::Pod::Coverage 1.08; use Pod::Coverage::TrustPod; all_pod_coverage_ok({ coverage_class => 'Pod::Coverage::TrustPod' }); MediaWiki-Bot-5.006003/xt/author/no-tabs.t0000644000175000017500000000303612737341477016476 0ustar mikemikeuse strict; use warnings; # this test was generated with Dist::Zilla::Plugin::Test::NoTabs 0.15 use Test::More 0.88; use Test::NoTabs; my @files = ( 'lib/MediaWiki/Bot.pm', 'lib/MediaWiki/Bot/Constants.pm', 't/00-compile.t', 't/00-init.t', 't/01-api_error.t', 't/02-login.t', 't/03-get_text.t', 't/04-edit.t', 't/05-revert.t', 't/06-get_history.t', 't/07-unicode.t', 't/08-get_last.t', 't/09-update_rc.t', 't/10-what_links_here.t', 't/11-get_pages_in_category.t', 't/12-linksearch.t', 't/13-get_namespace_names.t', 't/14-get_pages_in_namespace.t', 't/15-count_contributions.t', 't/16-last_active.t', 't/17-was_blocked.t', 't/18-is_blocked.t', 't/19-get_pages.t', 't/20-assertion.t', 't/21-get_allusers.t', 't/22-get_id.t', 't/23-list_transclusions.t', 't/24-purge_page.t', 't/25-sitematrix.t', 't/26-diff.t', 't/27-prefixindex.t', 't/28-search.t', 't/29-get_log.t', 't/30-was_g_blocked.t', 't/31-is_g_blocked.t', 't/32-was_locked.t', 't/33-is_locked.t', 't/34-secure.t', 't/35-get_protection.t', 't/36-email.t', 't/37-move.t', 't/38-test_image_exists.t', 't/39-image_usage.t', 't/40-upload.t', 't/41-get_users.t', 't/42-expandtemplates.t', 't/43-recentchanges.t', 't/44-patrol.t', 't/45-contributions.t', 't/46-usergroups.t', 't/47-global_image_usage.t', 't/48-get_image.t', 't/49-get_all_categories.t' ); notabs_ok($_) foreach @files; done_testing; MediaWiki-Bot-5.006003/xt/author/portability.t0000644000175000017500000000027712737341477017501 0ustar mikemike#!perl use strict; use warnings; use Test::More; eval 'use Test::Portability::Files'; plan skip_all => 'Test::Portability::Files required for testing portability' if $@; run_tests(); MediaWiki-Bot-5.006003/xt/author/mojibake.t0000644000175000017500000000015112737341477016707 0ustar mikemike#!perl use strict; use warnings qw(all); use Test::More; use Test::Mojibake; all_files_encoding_ok(); MediaWiki-Bot-5.006003/xt/author/pod-syntax.t0000644000175000017500000000025212737341477017236 0ustar mikemike#!perl # This file was automatically generated by Dist::Zilla::Plugin::PodSyntaxTests. use strict; use warnings; use Test::More; use Test::Pod 1.41; all_pod_files_ok(); MediaWiki-Bot-5.006003/dist.ini0000644000175000017500000000227412737341477014460 0ustar mikemikemain_module = lib/MediaWiki/Bot.pm name = MediaWiki-Bot license = GPL_3 copyright_holder = the MediaWiki::Bot team author = Dan Collins author = Mike.lifeguard ; I hope I have divined names from emails properly author = Alex Rowe author = Oleg Alexandrov author = jmax.code author = Stefan Petrea author = kc2aei author = bosborne@alum.mit.edu author = Brian Obio ; Should we include people with no commits? author = patch and bug report contributors [Prereqs] LWP::Protocol::https = 6.06 ; Needed for secure login/edits/etc ; authordep Data::Random ; authordep GD ; authordep Imager ; authordep Imager::File::JPEG [@Author::DOHERTY] critic_config = perlcritic.rc tag_format = %v version_regexp = ^v?(.+)$ push_to = origin push_to = upstream ; MediaWiki-Bot organization's repo github_metadata_remote = upstream MediaWiki-Bot-5.006003/lib/0000775000175000017500000000000012737341477013557 5ustar mikemikeMediaWiki-Bot-5.006003/lib/MediaWiki/0000775000175000017500000000000012737341477015422 5ustar mikemikeMediaWiki-Bot-5.006003/lib/MediaWiki/Bot/0000775000175000017500000000000012737341477016146 5ustar mikemikeMediaWiki-Bot-5.006003/lib/MediaWiki/Bot/Constants.pm0000644000175000017500000001072112737341477020457 0ustar mikemikepackage MediaWiki::Bot::Constants; use strict; use warnings; # ABSTRACT: constants for MediaWiki::Bot our $VERSION = '5.006003'; # VERSION use MediaWiki::API; # How to grab these constants? use Constant::Generate { ERR_NO_ERROR => MediaWiki::API->ERR_NO_ERROR, ERR_CONFIG => MediaWiki::API->ERR_CONFIG, ERR_HTTP => MediaWiki::API->ERR_HTTP, ERR_API => MediaWiki::API->ERR_API, ERR_LOGIN => MediaWiki::API->ERR_LOGIN, ERR_EDIT => MediaWiki::API->ERR_EDIT, ERR_PARAMS => MediaWiki::API->ERR_PARAMS, ERR_UPLOAD => MediaWiki::API->ERR_UPLOAD, ERR_DOWNLOAD => MediaWiki::API->ERR_DOWNLOAD, ERR_CAPTCHA => 10, RET_TRUE => !!1, RET_FALSE => !!0, PAGE_NONEXISTENT => -1, FILE_NONEXISTENT => 0, FILE_LOCAL => 1, FILE_SHARED => 2, FILE_PAGE_TEXT_ONLY => 3, NS_USER => 2, NS_FILE => 6, NS_CATEGORY => 14, };#, dualvar => 1; use Exporter qw(import); our %EXPORT_TAGS = ( err => [qw( ERR_NO_ERROR ERR_CONFIG ERR_HTTP ERR_API ERR_LOGIN ERR_EDIT ERR_PARAMS ERR_UPLOAD ERR_DOWNLOAD ERR_CAPTCHA )], bool => [qw( RET_TRUE RET_FALSE )], page => [qw( PAGE_NONEXISTENT )], file => [qw( FILE_NONEXISTENT FILE_LOCAL FILE_SHARED FILE_PAGE_TEXT_ONLY )], ns => [qw( NS_USER NS_FILE NS_CATEGORY )], ); Exporter::export_tags(qw(err)); Exporter::export_ok_tags(qw(bool page file ns)); { my %seen; push @{$EXPORT_TAGS{all}}, grep {!$seen{$_}++} @{$EXPORT_TAGS{$_}} foreach keys %EXPORT_TAGS; } 1; __END__ =pod =encoding UTF-8 =head1 NAME MediaWiki::Bot::Constants - constants for MediaWiki::Bot =head1 VERSION version 5.006003 =head1 SYNOPSIS use MediaWiki::Bot; use MediaWiki::Bot::Constants qw(:file); my $bot = MediaWiki::Bot->new(); my $file_existence = $bot->test_image_exists("File:..."); # Make sense of MediaWiki::Bot's random numbers if ($file_existence == FILE_LOCAL) { # Get from local media repository } elsif ($file_existence == FILE_SHARED) { # Get from shared (remote) media repository } =head1 DESCRIPTION Exportable constants used by L. Use these constants in your code to avoid the use of magical numbers, and to ensure compatibility with future changes in C. You can also import C<:constants> or any constant name(s) from L: use MediaWiki::Bot qw(:constants); use MediaWiki::Bot qw(PAGE_NONEXISTENT); =head1 CONSTANTS The available constants are divided into 5 tags, which can be imported individually: =over 4 =item * err - the error constants, inherited from L =item * bool - boolean constants =item * page - page existence =item * file - file (image/media) existence status (which is not boolean) =item * ns - some namespace numbers. B Incomplete! Use L's functions for getting namespace information for your wiki. =back =head1 EXPORTS No symbols are exported by default. The available tags are err, bool, page, file, ns, and all. =head1 AVAILABILITY The project homepage is L. The latest version of this module is available from the Comprehensive Perl Archive Network (CPAN). Visit L to find a CPAN site near you, or see L. =head1 SOURCE The development version is on github at L and may be cloned from L =head1 BUGS AND LIMITATIONS You can make new bug reports, and view existing ones, through the web interface at L. =head1 AUTHORS =over 4 =item * Dan Collins =item * Mike.lifeguard =item * Alex Rowe =item * Oleg Alexandrov =item * jmax.code =item * Stefan Petrea =item * kc2aei =item * bosborne@alum.mit.edu =item * Brian Obio =item * patch and bug report contributors =back =head1 COPYRIGHT AND LICENSE This software is Copyright (c) 2016 by the MediaWiki::Bot team . This is free software, licensed under: The GNU General Public License, Version 3, June 2007 =cut MediaWiki-Bot-5.006003/lib/MediaWiki/Bot.pm0000644000175000017500000033110212737341477016502 0ustar mikemikepackage MediaWiki::Bot; use strict; use warnings; # ABSTRACT: a high-level bot framework for interacting with MediaWiki wikis our $VERSION = '5.006003'; # VERSION use HTML::Entities 3.28; use Carp; use Digest::MD5 2.39 qw(md5_hex); use Encode qw(encode_utf8); use MediaWiki::API 0.36; use List::Util qw(sum); use MediaWiki::Bot::Constants qw(:all); use Exporter qw(import); our @EXPORT_OK = @{ $MediaWiki::Bot::Constants::EXPORT_TAGS{all} }; our %EXPORT_TAGS = ( constants => \@EXPORT_OK ); use Module::Pluggable search_path => [qw(MediaWiki::Bot::Plugin)], 'require' => 1; foreach my $plugin (__PACKAGE__->plugins) { #print "Found plugin $plugin\n"; $plugin->import(); } sub new { my $package = shift; my $agent; my $assert; my $operator; my $maxlag; my $protocol; my $host; my $path; my $login_data; my $debug; if (ref $_[0] eq 'HASH') { $agent = $_[0]->{agent}; $assert = $_[0]->{assert}; $operator = $_[0]->{operator}; $maxlag = $_[0]->{maxlag}; $protocol = $_[0]->{protocol}; $host = $_[0]->{host}; $path = $_[0]->{path}; $login_data = $_[0]->{login_data}; $debug = $_[0]->{debug}; } else { warnings::warnif('deprecated', 'Please pass a hashref; this method of calling ' . 'the constructor is deprecated and will be removed in a future release') if @_; $agent = shift; $assert = shift; $operator = shift; $maxlag = shift; $protocol = shift; $host = shift; $path = shift; $debug = shift; } $assert =~ s/[&?]assert=// if $assert; # Strip out param part, leaving just the value $operator =~ s/^User://i if $operator; if (not $agent and not $operator) { carp q{You should provide either a customized user agent string } . q{(see https://meta.wikimedia.org/wiki/User-agent_policy) } . q{or provide your username as `operator'.}; } elsif (not $agent and $operator) { $operator =~ s{^User:}{}; $agent = sprintf( 'Perl MediaWiki::Bot/%s (%s; [[User:%s]])', (defined __PACKAGE__->VERSION ? __PACKAGE__->VERSION : 'dev'), 'https://metacpan.org/MediaWiki::Bot', $operator ); } my $self = bless({}, $package); $self->{errstr} = ''; $self->{assert} = $assert if $assert; $self->{operator} = $operator; $self->{debug} = $debug || 0; $self->{api} = MediaWiki::API->new({ max_lag => (defined $maxlag ? $maxlag : 5), max_lag_delay => 5, max_lag_retries => 5, retries => 5, retry_delay => 10, # no infinite loops use_http_get => 1, # use HTTP GET to make certain requests cacheable }); $self->{api}->{ua}->agent($agent) if defined $agent; # Set wiki (handles setting $self->{host} etc) $self->set_wiki({ protocol => $protocol, host => $host, path => $path, }); # Log-in, and maybe autoconfigure if ($login_data) { my $success = $self->login($login_data); if ($success) { return $self; } else { carp "Couldn't log in with supplied settings" if $self->{debug}; return; } } return $self; } sub set_wiki { my $self = shift; my $host; my $path; my $protocol; if (ref $_[0] eq 'HASH') { $host = $_[0]->{host}; $path = $_[0]->{path}; $protocol = $_[0]->{protocol}; } else { warnings::warnif('deprecated', 'Please pass a hashref; this method of calling ' . 'set_wiki is deprecated, and will be removed in a future release'); $host = shift; $path = shift; } # Set defaults $protocol = $self->{protocol} || 'https' unless defined($protocol); $host = $self->{host} || 'en.wikipedia.org' unless defined($host); $path = $self->{path} || 'w' unless defined($path); # Clean up the parts we will build a URL with $protocol =~ s,://$,,; if ($host =~ m,^(http|https)(://)?, && !$protocol) { $protocol = $1; } $host =~ s,^https?://,,; $host =~ s,/$,,; $path =~ s,/$,,; # Invalidate wiki-specific cached data if ( ((defined($self->{host})) and ($self->{host} ne $host)) or ((defined($self->{path})) and ($self->{path} ne $path)) or ((defined($self->{protocol})) and ($self->{protocol} ne $protocol)) ) { delete $self->{ns_data} if $self->{ns_data}; delete $self->{ns_alias_data} if $self->{ns_alias_data}; } $self->{protocol} = $protocol; $self->{host} = $host; $self->{path} = $path; $self->{api}->{config}->{api_url} = $path ? "$protocol://$host/$path/api.php" : "$protocol://$host/api.php"; # $path is '', so don't use http://domain.com//api.php warn "Wiki set to " . $self->{api}->{config}{api_url} . "\n" if $self->{debug} > 1; return RET_TRUE; } sub login { my $self = shift; my $username; my $password; my $lgdomain; my $autoconfig; my $basic_auth; my $do_sul; if (ref $_[0] eq 'HASH') { $username = $_[0]->{username}; $password = $_[0]->{password}; $autoconfig = defined($_[0]->{autoconfig}) ? $_[0]->{autoconfig} : 1; $basic_auth = $_[0]->{basic_auth}; $do_sul = $_[0]->{do_sul} || 0; $lgdomain = $_[0]->{lgdomain}; } else { warnings::warnif('deprecated', 'Please pass a hashref; this method of calling ' . 'login is deprecated and will be removed in a future release'); $username = shift; $password = shift; $autoconfig = 0; $do_sul = 0; } # strip off the "@bot_password_label" suffix, if any $self->{username} = (split /@/, $username, 2)[0]; # normal human-readable username $self->{login_username} = $username; # to be used for login (includes "@bot_password_label") carp "Logging in over plain HTTP is a bad idea, we would be sending secrets" . " (passwords or cookies) in plaintext over an insecure connection." . " To protect against eavesdroppers, set protocol => 'https'" unless $self->{protocol} eq 'https'; # Handle basic auth first, if needed if ($basic_auth) { warn 'Applying basic auth credentials' if $self->{debug} > 1; $self->{api}->{ua}->credentials( $basic_auth->{netloc}, $basic_auth->{realm}, $basic_auth->{uname}, $basic_auth->{pass} ); } if ($self->{host} eq 'secure.wikimedia.org') { warnings::warnif('deprecated', 'SSL is now supported on the main Wikimedia Foundation sites. ' . 'Use en.wikipedia.org (or whatever) instead of secure.wikimedia.org.'); return; } if($do_sul) { my $sul_success = $self->_do_sul($password); warn 'Some or all SUL logins failed' if $self->{debug} > 1 and !$sul_success; } my $cookies = ".mediawiki-bot-$username-cookies"; if (-r $cookies) { $self->{api}->{ua}->{cookie_jar}->load($cookies); $self->{api}->{ua}->{cookie_jar}->{ignore_discard} = 1; # $self->{api}->{ua}->add_handler("request_send", sub { shift->dump; return }); if ($self->_is_loggedin()) { $self->_do_autoconfig() if $autoconfig; warn 'Logged in successfully with cookies' if $self->{debug} > 1; return 1; # If we're already logged in, nothing more is needed } } unless ($password) { carp q{Cookies didn't get us logged in, and no password to continue with authentication} if $self->{debug}; return; } my $res; RETRY: for (1..2) { # Fetch a login token $res = $self->{api}->api({ action => 'query', meta => 'tokens', type => 'login', }) or return $self->_handle_api_error(); my $token = $res->{query}->{tokens}->{logintoken}; # Do the login $res = $self->{api}->api({ action => 'login', lgname => $self->{login_username}, lgpassword => $password, lgdomain => $lgdomain, lgtoken => $token, }) or return $self->_handle_api_error(); last RETRY if $res->{login}->{result} eq 'Success'; }; $self->{api}->{ua}->{cookie_jar}->extract_cookies($self->{api}->{response}); $self->{api}->{ua}->{cookie_jar}->save($cookies) if (-w($cookies) or -w('.')); if ($res->{login}->{result} eq 'Success') { if ($res->{login}->{lgusername} eq $self->{username}) { $self->_do_autoconfig() if $autoconfig; warn 'Logged in successfully with password' if $self->{debug} > 1; } } return ((defined($res->{login}->{lgusername})) and (defined($res->{login}->{result})) and ($res->{login}->{lgusername} eq $self->{username}) and ($res->{login}->{result} eq 'Success')); } sub _do_sul { my $self = shift; my $password = shift; my $debug = $self->{debug}; # Remember these for later my $host = $self->{host}; my $path = $self->{path}; my $protocol = $self->{protocol}; my $username = $self->{login_username}; $self->{debug} = 0; # Turn off debugging for these internal calls my @logins; # Keep track of our successes my @WMF_projects = qw( en.wikipedia.org en.wiktionary.org en.wikibooks.org en.wikinews.org en.wikiquote.org en.wikisource.org en.wikiversity.org meta.wikimedia.org commons.wikimedia.org species.wikimedia.org incubator.wikimedia.org ); SUL: foreach my $project (@WMF_projects) { # Could maybe be parallelized print STDERR "Logging in on $project..." if $debug > 1; $self->set_wiki({ host => $project, }); my $success = $self->login({ username => $username, password => $password, do_sul => 0, autoconfig => 0, }); warn ($success ? " OK\n" : " FAILED:\n") if $debug > 1; warn $self->{api}->{error}->{code} . ': ' . $self->{api}->{error}->{details} if $debug > 1 and !$success; push(@logins, $success); } $self->set_wiki({ # Switch back to original wiki protocol => $protocol, host => $host, path => $path, }); my $sum = sum 0, @logins; my $total = scalar @WMF_projects; warn "$sum/$total logins succeeded" if $debug > 1; $self->{debug} = $debug; # Reset debug to it's old value return $sum == $total; } sub logout { my $self = shift; $self->{api}->api({ action => 'logout' }); return RET_TRUE; } sub edit { my $self = shift; my $page; my $text; my $summary; my $is_minor; my $assert; my $markasbot; my $section; my $captcha_id; my $captcha_solution; if (ref $_[0] eq 'HASH') { $page = $_[0]->{page}; $text = $_[0]->{text}; $summary = $_[0]->{summary}; $is_minor = $_[0]->{minor}; $assert = $_[0]->{assert}; $markasbot = $_[0]->{markasbot}; $section = $_[0]->{section}; $captcha_id = $_[0]->{captcha_id}; $captcha_solution = $_[0]->{captcha_solution}; } else { warnings::warnif('deprecated', 'Please pass a hashref; this method of calling ' . 'edit is deprecated, and will be removed in a future release.'); $page = shift; $text = shift; $summary = shift; $is_minor = shift; $assert = shift; $markasbot = shift; $section = shift; } # Set defaults $summary = 'BOT: Changing page text' unless $summary; if ($assert) { $assert =~ s/^[&?]assert=//; } else { $assert = $self->{assert}; } $is_minor = 1 unless defined($is_minor); $markasbot = 1 unless defined($markasbot); # Clear any captcha data that might remain from a previous edit attempt delete $self->{error}->{captcha}; carp 'Need both captcha_id and captcha_solution when editing with a solved CAPTCHA' if (defined $captcha_id and not defined $captcha_solution) or (defined $captcha_solution and not defined $captcha_id); my ($edittoken, $lastedit, $tokentime) = $self->_get_edittoken($page); return $self->_handle_api_error() unless $edittoken; # HTTP::Message will do this eventually as of 6.03 (RT#75592), so we need # to do it here - otherwise, the md5 won't match what eventually is sent to # the server, and the edit will fail - GH#39. # If HTTP::Message becomes unbroken in the future, might have to keep this # workaround for people using 6.03 and other future broken versions. $text =~ s{(? 'edit', title => $page, token => $edittoken, text => $text, md5 => $md5, # Guard against data corruption summary => $summary, basetimestamp => $lastedit, # Guard against edit conflicts starttimestamp => $tokentime, # Guard against the page being deleted/moved bot => $markasbot, ( $section ? (section => $section) : ()), ( $assert ? (assert => $assert) : ()), ( $is_minor ? (minor => 1) : (notminor => 1)), ( $captcha_id ? (captchaid => $captcha_id) : ()), ( $captcha_solution ? (captchaword => $captcha_solution) : ()), }; ### Actually do the edit my $res = $self->{api}->api($hash); return $self->_handle_api_error() unless $res; if ($res->{edit}->{result} && $res->{edit}->{result} eq 'Failure') { # https://www.mediawiki.org/wiki/API:Edit#CAPTCHAs_and_extension_errors # You need to solve the CAPTCHA, then retry the request with the ID in # this error response and the solution. if (exists $res->{edit}->{captcha}) { return $self->_handle_api_error({ code => ERR_CAPTCHA, details => 'captcharequired: This action requires that a CAPTCHA be solved', captcha => $res->{edit}->{captcha}, }); } return $self->_handle_api_error(); } return $res; } sub move { my $self = shift; my $from = shift; my $to = shift; my $reason = shift; my $opts = shift; my $hash = { action => 'move', from => $from, to => $to, reason => $reason, }; $hash->{movetalk} = $opts->{movetalk} if defined($opts->{movetalk}); $hash->{noredirect} = $opts->{noredirect} if defined($opts->{noredirect}); $hash->{movesubpages} = $opts->{movesubpages} if defined($opts->{movesubpages}); my $res = $self->{api}->edit($hash); return $self->_handle_api_error() unless $res; return $res; # should we return something more useful? } sub get_history { my $self = shift; my $pagename = shift; my $limit = shift || 'max'; my $rvstartid = shift; my $direction = shift; my $hash = { action => 'query', prop => 'revisions', titles => $pagename, rvprop => 'ids|timestamp|user|comment|flags', rvlimit => $limit }; $hash->{rvstartid} = $rvstartid if ($rvstartid); $hash->{direction} = $direction if ($direction); my $res = $self->{api}->api($hash); return $self->_handle_api_error() unless $res; my ($id) = keys %{ $res->{query}->{pages} }; my $array = $res->{query}->{pages}->{$id}->{revisions}; my @return; foreach my $hash (@{$array}) { my $revid = $hash->{revid}; my $user = $hash->{user}; my ($timestamp_date, $timestamp_time) = split(/T/, $hash->{timestamp}); $timestamp_time =~ s/Z$//; my $comment = $hash->{comment}; push( @return, { revid => $revid, user => $user, timestamp_date => $timestamp_date, timestamp_time => $timestamp_time, comment => $comment, minor => exists $hash->{minor}, }); } return @return; } sub get_text { my $self = shift; my $pagename = shift; my $revid = shift; my $section = shift; my $hash = { action => 'query', titles => $pagename, prop => 'revisions', rvprop => 'content', }; $hash->{rvstartid} = $revid if ($revid); $hash->{rvsection} = $section if ($section); my $res = $self->{api}->api($hash); return $self->_handle_api_error() unless $res; my ($id, $data) = %{ $res->{query}->{pages} }; return if $id == PAGE_NONEXISTENT; return $data->{revisions}[0]->{'*'}; # the wikitext } sub get_id { my $self = shift; my $pagename = shift; my $hash = { action => 'query', titles => $pagename, }; my $res = $self->{api}->api($hash); return $self->_handle_api_error() unless $res; my ($id) = %{ $res->{query}->{pages} }; return if $id == PAGE_NONEXISTENT; return $id; } sub get_pages { my $self = shift; my @pages = (ref $_[0] eq 'ARRAY') ? @{$_[0]} : @_; my %return; my $hash = { action => 'query', titles => join('|', @pages), prop => 'revisions', rvprop => 'content', }; my $diff; # Used to track problematic article names map { $diff->{$_} = 1; } @pages; my $res = $self->{api}->api($hash); return $self->_handle_api_error() unless $res; foreach my $id (keys %{ $res->{query}->{pages} }) { my $page = $res->{query}->{pages}->{$id}; if ($diff->{ $page->{title} }) { $diff->{ $page->{title} }++; } else { next; } if (defined($page->{missing})) { $return{ $page->{title} } = undef; next; } if (defined($page->{revisions})) { my $revisions = @{ $page->{revisions} }[0]->{'*'}; if (!defined $revisions) { $return{ $page->{title} } = $revisions; } elsif (length($revisions) < 150 && $revisions =~ m/\#REDIRECT\s\[\[([^\[\]]+)\]\]/) { # FRAGILE! my $redirect_to = $1; $return{ $page->{title} } = $self->get_text($redirect_to); } else { $return{ $page->{title} } = $revisions; } } } my $expand = $self->_get_ns_alias_data(); # Only for those article names that remained after the first part # If we're here we are dealing most likely with a WP:CSD type of article name for my $title (keys %$diff) { if ($diff->{$title} == 1) { my @pieces = split(/:/, $title); if (@pieces > 1) { $pieces[0] = ($expand->{ $pieces[0] } || $pieces[0]); my $v = $self->get_text(join ':', @pieces); warn "Detected article name that needed expanding $title\n" if $self->{debug} > 1; $return{$title} = $v; if (defined $v and $v =~ m/\#REDIRECT\s\[\[([^\[\]]+)\]\]/) { $v = $self->get_text($1); $return{$title} = $v; } } } } return \%return; } sub get_image{ my $self = shift; my $name = shift; my $options = shift; my %sizeparams; $sizeparams{iiurlwidth} = $options->{width} if $options->{width}; $sizeparams{iiurlheight} = $options->{height} if $options->{height}; my $ref = $self->{api}->api({ action => 'query', titles => $name, prop => 'imageinfo', iiprop => 'url|size', %sizeparams }); return $self->_handle_api_error() unless $ref; my ($pageref) = values %{ $ref->{query}->{pages} }; return unless defined $pageref->{imageinfo}; # if the image is missing my $url = @{ $pageref->{imageinfo} }[0]->{thumburl} || @{ $pageref->{imageinfo} }[0]->{url}; die "$url should be absolute or something." unless ( $url =~ m{^https?://} ); my $response = $self->{api}->{ua}->get($url); return $self->_handle_api_error() unless ( $response->code == 200 ); return $response->decoded_content; } sub revert { my $self = shift; my $pagename = shift; my $revid = shift; my $summary = shift || "Reverting to old revision $revid"; my $text = $self->get_text($pagename, $revid); my $res = $self->edit({ page => $pagename, text => $text, summary => $summary, }); return $res; } sub undo { my $self = shift; my $page = shift; my $revid = shift || croak "No revid given"; my $summary = shift || "Reverting revision #$revid"; my $after = shift; $summary = "Reverting edits between #$revid & #$after" if defined($after); # Is that clear? Correct? my ($edittoken, $basetimestamp, $starttimestamp) = $self->_get_edittoken($page); my $hash = { action => 'edit', title => $page, undo => $revid, (undoafter => $after)x!! defined $after, summary => $summary, token => $edittoken, starttimestamp => $starttimestamp, basetimestamp => $basetimestamp, }; my $res = $self->{api}->api($hash); return $self->_handle_api_error() unless $res; return $res; } sub get_last { my $self = shift; my $page = shift; my $user = shift; my $res = $self->{api}->api({ action => 'query', titles => $page, prop => 'revisions', rvlimit => 1, rvprop => 'ids|user', rvexcludeuser => $user || '', }); return $self->_handle_api_error() unless $res; my (undef, $data) = %{ $res->{query}->{pages} }; my $revid = $data->{revisions}[0]->{revid}; return $revid; } sub update_rc { warnings::warnif('deprecated', 'update_rc is deprecated, and may be removed ' . 'in a future release. Please use recentchanges(), which provides more ' . 'data, including rcid'); my $self = shift; my $limit = shift || 'max'; my $options = shift; my $hash = { action => 'query', list => 'recentchanges', rcnamespace => 0, rclimit => $limit, }; $options->{max} = 1 unless $options->{max}; my $res = $self->{api}->list($hash, $options); return $self->_handle_api_error() unless $res; return RET_TRUE if not ref $res; # Not a ref when using callback my @rc_table; foreach my $hash (@{$res}) { push( @rc_table, { title => $hash->{title}, revid => $hash->{revid}, old_revid => $hash->{old_revid}, timestamp => $hash->{timestamp}, } ); } return @rc_table; } sub recentchanges { my $self = shift; my $ns; my $limit; my $options; my $user; my $show; if (ref $_[0] eq 'HASH') { # unpack for new args my %args = %{ +shift }; $ns = delete $args{ns}; $limit = delete $args{limit}; $user = delete $args{user}; if (ref $args{show} eq 'HASH') { my @show; while (my ($k, $v) = each %{ $args{show} }) { push @show, '!'x!$v . $k; } $show = join '|', @show; } else { $show = delete $args{show}; } $options = shift; } else { $ns = shift || 0; $limit = shift || 50; $options = shift; } $ns = join('|', @$ns) if ref $ns eq 'ARRAY'; my $hash = { action => 'query', list => 'recentchanges', rcnamespace => $ns, rclimit => $limit, rcprop => 'user|comment|timestamp|title|ids', }; $hash->{rcuser} = $user if defined $user; $hash->{rcshow} = $show if defined $show; $options->{max} = 1 unless $options->{max}; my $res = $self->{api}->list($hash, $options) or return $self->_handle_api_error(); return RET_TRUE unless ref $res; # Not a ref when using callback return @$res; } sub what_links_here { my $self = shift; my $page = shift; my $filter = shift; my $ns = shift; my $options = shift; $ns = join('|', @$ns) if (ref $ns eq 'ARRAY'); # Allow array of namespaces if (defined($filter) and $filter =~ m/(all|redirects|nonredirects)/) { # Verify $filter $filter = $1; } # http://en.wikipedia.org/w/api.php?action=query&list=backlinks&bltitle=template:tlx my $hash = { action => 'query', list => 'backlinks', bltitle => $page, bllimit => 'max', }; $hash->{blnamespace} = $ns if defined $ns; $hash->{blfilterredir} = $filter if $filter; $options->{max} = 1 unless $options->{max}; my $res = $self->{api}->list($hash, $options); return $self->_handle_api_error() unless $res; return RET_TRUE if not ref $res; # When using a callback hook, this won't be a reference my @links; foreach my $hashref (@$res) { my $title = $hashref->{title}; my $redirect = defined($hashref->{redirect}); push @links, { title => $title, redirect => $redirect }; } return @links; } sub list_transclusions { my $self = shift; my $page = shift; my $filter = shift; my $ns = shift; my $options = shift; $ns = join('|', @$ns) if (ref $ns eq 'ARRAY'); if (defined($filter) and $filter =~ m/(all|redirects|nonredirects)/) { # Verify $filter $filter = $1; } # http://en.wikipedia.org/w/api.php?action=query&list=embeddedin&eititle=Template:Stub my $hash = { action => 'query', list => 'embeddedin', eititle => $page, eilimit => 'max', }; $hash->{eifilterredir} = $filter if $filter; $hash->{einamespace} = $ns if defined $ns; $options->{max} = 1 unless $options->{max}; my $res = $self->{api}->list($hash, $options); return $self->_handle_api_error() unless $res; return RET_TRUE if not ref $res; # When using a callback hook, this won't be a reference my @links; foreach my $hashref (@$res) { my $title = $hashref->{title}; my $redirect = defined($hashref->{redirect}); push @links, { title => $title, redirect => $redirect }; } return @links; } sub get_pages_in_category { my $self = shift; my $category = shift; my $options = shift; if ($category =~ m/:/) { # It might have a namespace name my ($cat) = split(/:/, $category, 2); if ($cat ne 'Category') { # 'Category' is a canonical name for ns14 my $ns_data = $self->_get_ns_data(); my $cat_ns_name = $ns_data->{+NS_CATEGORY}; if ($cat ne $cat_ns_name) { $category = "$cat_ns_name:$category"; } } } else { # Definitely no namespace name, since there's no colon $category = "Category:$category"; } warn "Category to fetch is [[$category]]" if $self->{debug} > 1; my $hash = { action => 'query', list => 'categorymembers', cmtitle => $category, cmlimit => 'max', }; $options->{max} = 1 unless defined($options->{max}); delete($options->{max}) if $options->{max} == 0; my $res = $self->{api}->list($hash, $options); return RET_TRUE if not ref $res; # Not a hashref when using callback return $self->_handle_api_error() unless $res; return map { $_->{title} } @$res; } { # Instead of using the state pragma, use a bare block my %data; sub get_all_pages_in_category { my $self = shift; my $base_category = shift; my $options = shift; $options->{max} = 0 unless defined($options->{max}); my @first = $self->get_pages_in_category($base_category, $options); %data = () unless $_[0]; # This is a special flag for internal use. # It marks a call to this method as being # internal. Since %data is a fake state variable, # it needs to be cleared for every *external* # call, but not cleared when the call is recursive. my $ns_data = $self->_get_ns_data(); my $cat_ns_name = $ns_data->{+NS_CATEGORY}; foreach my $page (@first) { if ($page =~ m/^$cat_ns_name:/) { if (!exists($data{$page})) { $data{$page} = ''; my @pages = $self->get_all_pages_in_category($page, $options, 1); foreach (@pages) { $data{$_} = ''; } } else { $data{$page} = ''; } } else { $data{$page} = ''; } } return keys %data; } } # This ends the bare block around get_all_pages_in_category() sub get_all_categories { my $self = shift; my $options = shift; my $query = { action => 'query', list => 'allcategories', }; if ( defined $options && $options->{'max'} == '0' ) { $query->{'aclimit'} = 'max'; } my $res = $self->{api}->api($query); return $self->_handle_api_error() unless $res; return map { $_->{'*'} } @{ $res->{'query'}->{'allcategories'} }; } sub linksearch { my $self = shift; my $link = shift; my $ns = shift; my $prot = shift; my $options = shift; $ns = join('|', @$ns) if (ref $ns eq 'ARRAY'); my $hash = { action => 'query', list => 'exturlusage', euprop => 'url|title', euquery => $link, eulimit => 'max', }; $hash->{eunamespace} = $ns if defined $ns; $hash->{euprotocol} = $prot if $prot; $options->{max} = 1 unless $options->{max}; my $res = $self->{api}->list($hash, $options); return $self->_handle_api_error() unless $res; return RET_TRUE if not ref $res; # When using a callback hook, this won't be a reference return map {{ url => $_->{url}, title => $_->{title}, }} @$res; } sub purge_page { my $self = shift; my $page = shift; my $hash; if (ref $page eq 'ARRAY') { # If it is an array reference... $hash = { action => 'purge', titles => join('|', @$page), # dereference it and purge all those titles }; } else { # Just one page $hash = { action => 'purge', titles => $page, }; } my $res = $self->{api}->api($hash); return $self->_handle_api_error() unless $res; my $success = 0; foreach my $hashref (@{ $res->{purge} }) { $success++ if exists $hashref->{purged}; } return $success; } sub get_namespace_names { my $self = shift; my $res = $self->{api}->api({ action => 'query', meta => 'siteinfo', siprop => 'namespaces', }); return $self->_handle_api_error() unless $res; return map { $_ => $res->{query}->{namespaces}->{$_}->{'*'} } keys %{ $res->{query}->{namespaces} }; } sub image_usage { my $self = shift; my $image = shift; my $ns = shift; my $filter = shift; my $options = shift; if ($image !~ m/^File:|Image:/) { warnings::warnif('deprecated', q{Please include the canonical File: } . q{namespace in the image name. If you don't, MediaWiki::Bot might } . q{incur a network round-trip to get the localized namespace name}); my $ns_data = $self->_get_ns_data(); my $file_ns_name = $ns_data->{+NS_FILE}; if ($image !~ m/^\Q$file_ns_name\E:/) { $image = "$file_ns_name:$image"; } } $options->{max} = 1 unless defined($options->{max}); delete($options->{max}) if $options->{max} == 0; $ns = join('|', @$ns) if (ref $ns eq 'ARRAY'); my $hash = { action => 'query', list => 'imageusage', iutitle => $image, iulimit => 'max', }; $hash->{iunamespace} = $ns if defined $ns; if (defined($filter) and $filter =~ m/(all|redirects|nonredirects)/) { $hash->{'iufilterredir'} = $1; } my $res = $self->{api}->list($hash, $options); return $self->_handle_api_error() unless $res; return RET_TRUE if not ref $res; # When using a callback hook, this won't be a reference return map { $_->{title} } @$res; } sub global_image_usage { my $self = shift; my $image = shift; my $limit = shift; my $filterlocal = shift; $limit = defined $limit ? $limit : 500; if ($image !~ m/^File:|Image:/) { my $ns_data = $self->_get_ns_data(); my $image_ns_name = $ns_data->{+NS_FILE}; if ($image !~ m/^\Q$image_ns_name\E:/) { $image = "$image_ns_name:$image"; } } my @data; my $cont; while ($limit ? scalar @data < $limit : 1) { my $hash = { action => 'query', prop => 'globalusage', titles => $image, # gufilterlocal => $filterlocal, gulimit => 'max', }; $hash->{gufilterlocal} = $filterlocal if $filterlocal; $hash->{gucontinue} = $cont if $cont; my $res = $self->{api}->api($hash); return $self->_handle_api_error() unless $res; $cont = $res->{'query-continue'}->{globalusage}->{gucontinue}; warn "gucontinue: $cont\n" if $cont and $self->{debug} > 1; my $page_id = (keys %{ $res->{query}->{pages} })[0]; my $results = $res->{query}->{pages}->{$page_id}->{globalusage}; push @data, @$results; last unless $cont; } return @data > $limit ? @data[0 .. $limit-1] : @data; } sub links_to_image { warnings::warnif('deprecated', 'links_to_image is an alias of image_usage; ' . 'please use the new name'); my $self = shift; return $self->image_usage($_[0]); } sub is_blocked { my $self = shift; my $user = shift; # http://en.wikipedia.org/w/api.php?action=query&meta=blocks&bkusers=$user&bklimit=1&bkprop=id my $hash = { action => 'query', list => 'blocks', bkusers => $user, bklimit => 1, bkprop => 'id', }; my $res = $self->{api}->api($hash); return $self->_handle_api_error() unless $res; my $number = scalar @{ $res->{query}->{blocks} }; # The number of blocks returned if ($number == 1) { return RET_TRUE; } elsif ($number == 0) { return RET_FALSE; } else { confess "This query should return at most one result, but the API returned more than that."; } } sub test_blocked { # For backwards-compatibility warnings::warnif('deprecated', 'test_blocked is an alias of is_blocked; ' . 'please use the new name. This alias might be removed in a future release'); return (is_blocked(@_)); } sub test_image_exists { my $self = shift; my $image = shift; my $multi; if (ref $image eq 'ARRAY') { $multi = $image; # so we know to return a hash/scalar & keep track of order $image = join('|', @$image); } my $res = $self->{api}->api({ action => 'query', titles => $image, iilimit => 1, prop => 'imageinfo' }); return $self->_handle_api_error() unless $res; my @sorted_ids; if ($multi) { my %mapped; $mapped{ $res->{query}->{pages}->{$_}->{title} } = $_ for (keys %{ $res->{query}->{pages} }); foreach my $file ( @$multi ) { unshift @sorted_ids, $mapped{$file}; } } else { push @sorted_ids, keys %{ $res->{query}->{pages} }; } my @return; foreach my $id (@sorted_ids) { if ($res->{query}->{pages}->{$id}->{imagerepository} eq 'shared') { if ($multi) { unshift @return, FILE_SHARED; } else { return FILE_SHARED; } } elsif (exists($res->{query}->{pages}->{$id}->{missing})) { if ($multi) { unshift @return, FILE_NONEXISTENT; } else { return FILE_NONEXISTENT; } } elsif ($res->{query}->{pages}->{$id}->{imagerepository} eq '') { if ($multi) { unshift @return, FILE_PAGE_TEXT_ONLY; } else { return FILE_PAGE_TEXT_ONLY; } } elsif ($res->{query}->{pages}->{$id}->{imagerepository} eq 'local') { if ($multi) { unshift @return, FILE_LOCAL; } else { return FILE_LOCAL; } } } return \@return; } sub get_pages_in_namespace { my $self = shift; my $namespace = shift; my $limit = shift || 'max'; my $options = shift; my $hash = { action => 'query', list => 'allpages', apnamespace => $namespace, aplimit => $limit, }; $options->{max} = 1 unless defined $options->{max}; delete $options->{max} if exists $options->{max} and $options->{max} == 0; my $res = $self->{api}->list($hash, $options); return $self->_handle_api_error() unless $res; return RET_TRUE if not ref $res; # Not a ref when using callback return map { $_->{title} } @$res; } sub count_contributions { my $self = shift; my $username = shift; $username =~ s/User://i; # Strip namespace my $res = $self->{api}->list({ action => 'query', list => 'users', ususers => $username, usprop => 'editcount' }, { max => 1 }); return $self->_handle_api_error() unless $res; return ${$res}[0]->{editcount}; } sub timed_count_contributions { my $self = shift; my $username = shift; my $days = shift; $username =~ s/User://i; # Strip namespace my $res = $self->{api}->api({ action => 'userdailycontribs', user => $username, daysago => $days, }, { max => 1 }); return $self->_handle_api_error() unless $res; return ($res->{userdailycontribs}->{timeFrameEdits}, $res->{userdailycontribs}->{totalEdits}); } sub last_active { my $self = shift; my $username = shift; $username = "User:$username" unless $username =~ /User:/i; my $res = $self->{api}->list({ action => 'query', list => 'usercontribs', ucuser => $username, uclimit => 1 }, { max => 1 }); return $self->_handle_api_error() unless $res; return ${$res}[0]->{timestamp}; } sub recent_edit_to_page { my $self = shift; my $page = shift; my $res = $self->{api}->api({ action => 'query', prop => 'revisions', titles => $page, rvlimit => 1 }, { max => 1 }); return $self->_handle_api_error() unless $res; my $data = ( %{ $res->{query}->{pages} } )[1]; return ($data->{revisions}[0]->{timestamp}, $data->{revisions}[0]->{user}); } sub get_users { my $self = shift; my $pagename = shift; my $limit = shift || 'max'; my $rvstartid = shift; my $direction = shift; if ($limit > 50) { $self->{errstr} = "Error requesting history for $pagename: Limit may not be set to values above 50"; carp $self->{errstr}; return; } my $hash = { action => 'query', prop => 'revisions', titles => $pagename, rvprop => 'ids|timestamp|user|comment', rvlimit => $limit, }; $hash->{rvstartid} = $rvstartid if ($rvstartid); $hash->{rvdir} = $direction if ($direction); my $res = $self->{api}->api($hash); return $self->_handle_api_error() unless $res; my ($id) = keys %{ $res->{query}->{pages} }; return map { $_->{user} } @{$res->{query}->{pages}->{$id}->{revisions}}; } sub was_blocked { my $self = shift; my $user = shift; $user =~ s/User://i; # Strip User: prefix, if present # http://en.wikipedia.org/w/api.php?action=query&list=logevents&letype=block&letitle=User:127.0.0.1&lelimit=1&leprop=ids my $hash = { action => 'query', list => 'logevents', letype => 'block', letitle => "User:$user", # Ensure the User: prefix is there! lelimit => 1, leprop => 'ids', }; my $res = $self->{api}->api($hash); return $self->_handle_api_error() unless $res; my $number = scalar @{ $res->{query}->{logevents} }; # The number of blocks returned if ($number == 1) { return RET_TRUE; } elsif ($number == 0) { return RET_FALSE; } else { confess "This query should return at most one result, but the API returned more than that."; } } sub test_block_hist { # Backwards compatibility warnings::warnif('deprecated', 'test_block_hist is an alias of was_blocked; ' . 'please use the new method name. This alias might be removed in a future release'); return (was_blocked(@_)); } sub expandtemplates { my $self = shift; my $page = shift; my $text = shift; unless ($text) { croak q{You must provide a page title} unless $page; $text = $self->get_text($page); } my $hash = { action => 'expandtemplates', prop => 'wikitext', ( $page ? (title => $page) : ()), text => $text, }; my $res = $self->{api}->api($hash); return $self->_handle_api_error() unless $res; return exists $res->{expandtemplates}->{'*'} ? $res->{expandtemplates}->{'*'} : $res->{expandtemplates}->{wikitext}; } sub get_allusers { my $self = shift; my $limit = shift || 'max'; my $group = shift; my $opts = shift; my $hash = { action => 'query', list => 'allusers', aulimit => $limit, }; $hash->{augroup} = $group if defined $group; $opts->{max} = 1 unless exists $opts->{max}; delete $opts->{max} if exists $opts->{max} and $opts->{max} == 0; my $res = $self->{api}->list($hash, $opts); return $self->_handle_api_error() unless $res; return RET_TRUE if not ref $res; # Not a ref when using callback return map { $_->{name} } @$res; } sub db_to_domain { my $self = shift; my $wiki = shift; if (!$self->{sitematrix}) { $self->_get_sitematrix(); } if (ref $wiki eq 'ARRAY') { my @return; foreach my $w (@$wiki) { $wiki =~ s/_p$//; # Strip off a _p suffix, if present my $domain = $self->{sitematrix}->{$w} || undef; $domain =~ s/^https\:\/\/// if (defined $domain); # Strip off a https:// prefix, if present push(@return, $domain); } return \@return; } else { $wiki =~ s/_p$//; # Strip off a _p suffix, if present my $domain = $self->{sitematrix}->{$wiki} || undef; $domain =~ s/^https\:\/\/// if (defined $domain); # Strip off a https:// prefix, if present return $domain; } } sub domain_to_db { my $self = shift; my $wiki = shift; if (!$self->{sitematrix}) { $self->_get_sitematrix(); } if (ref $wiki eq 'ARRAY') { my @return; foreach my $w (@$wiki) { $w = "https://".$w if ($w !~ /^https\:\//); # Prepend a https:// prefix, if not present my $db = $self->{sitematrix}->{$w} || undef; push(@return, $db); } return \@return; } else { $wiki = "https://".$wiki if ($wiki !~ /^https\:\//); # Prepend a https:// prefix, if not present my $db = $self->{sitematrix}->{$wiki} || undef; return $db; } } sub diff { my $self = shift; my $title; my $revid; my $oldid; if (ref $_[0] eq 'HASH') { $title = $_[0]->{title}; $revid = $_[0]->{revid}; $oldid = $_[0]->{oldid}; } else { $title = shift; $revid = shift; $oldid = shift; } my $hash = { action => 'query', prop => 'revisions', rvdiffto => $oldid, }; if ($title) { $hash->{titles} = $title; $hash->{rvlimit} = 1; } elsif ($revid) { $hash->{'revids'} = $revid; } my $res = $self->{api}->api($hash); return $self->_handle_api_error() unless $res; my @revids = keys %{ $res->{query}->{pages} }; my $diff = $res->{query}->{pages}->{ $revids[0] }->{revisions}->[0]->{diff}->{'*'}; return $diff; } sub prefixindex { my $self = shift; my $prefix = shift; my $ns = shift; my $filter = shift; my $options = shift; if (defined($filter) and $filter =~ m/(all|redirects|nonredirects)/) { # Verify $filter = $1; } if (!defined $ns && $prefix =~ m/:/) { print STDERR "Converted '$prefix' to..." if $self->{debug} > 1; my ($name) = split(/:/, $prefix, 2); my $ns_data = $self->_get_ns_data(); $ns = $ns_data->{$name}; $prefix =~ s/^$name://; warn "'$prefix' with a namespace filter $ns" if $self->{debug} > 1; } my $hash = { action => 'query', list => 'allpages', apprefix => $prefix, aplimit => 'max', }; $hash->{apnamespace} = $ns if defined $ns; $hash->{apfilterredir} = $filter if $filter; $options->{max} = 1 unless $options->{max}; my $res = $self->{api}->list($hash, $options); return $self->_handle_api_error() unless $res; return RET_TRUE if not ref $res; # Not a ref when using callback hook return map { { title => $_->{title}, redirect => defined $_->{redirect} } } @$res; } sub search { my $self = shift; my $term = shift; my $ns = shift || 0; my $options = shift; if (ref $ns eq 'ARRAY') { # Accept a hashref $ns = join('|', @$ns); } my $hash = { action => 'query', list => 'search', srsearch => $term, srwhat => 'text', srlimit => 'max', #srinfo => 'totalhits', srprop => 'size', srredirects => 0, }; $options->{max} = 1 unless $options->{max}; my $res = $self->{api}->list($hash, $options); return $self->_handle_api_error() unless $res; return RET_TRUE if not ref $res; # Not a ref when used with callback return map { $_->{title} } @$res; } sub get_log { my $self = shift; my $data = shift; my $options = shift; my $log_type = $data->{type}; my $user = $data->{user}; my $target = $data->{target}; if ($user) { my $ns_data = $self->_get_ns_data(); my $user_ns_name = $ns_data->{+NS_USER}; $user =~ s/^$user_ns_name://; } my $hash = { action => 'query', list => 'logevents', lelimit => 'max', }; $hash->{letype} = $log_type if $log_type; $hash->{leuser} = $user if $user; $hash->{letitle} = $target if $target; $options->{max} = 1 unless $options->{max}; my $res = $self->{api}->list($hash, $options); return $self->_handle_api_error() unless $res; return RET_TRUE if not ref $res; # Not a ref when using callback return $res; } sub is_g_blocked { my $self = shift; my $ip = shift; # http://en.wikipedia.org/w/api.php?action=query&list=globalblocks&bglimit=1&bgprop=address&bgip=127.0.0.1 my $res = $self->{api}->api({ action => 'query', list => 'globalblocks', bglimit => 1, bgprop => 'address', # So handy! It searches for blocks affecting this IP or IP range, # including rangeblocks! Can't get that from UI. bgip => $ip, }); return $self->_handle_api_error() unless $res; return RET_FALSE unless ($res->{query}->{globalblocks}->[0]); return $res->{query}->{globalblocks}->[0]->{address}; } sub was_g_blocked { my $self = shift; my $ip = shift; $ip =~ s/User://i; # Strip User: prefix, if present # This query should always go to Meta unless ( $self->{host} eq 'meta.wikimedia.org' ) { carp "GlobalBlocking queries should probably be sent to Meta; it doesn't look like you're doing so" if $self->{debug}; } # http://meta.wikimedia.org/w/api.php?action=query&list=logevents&letype=gblblock&letitle=User:127.0.0.1&lelimit=1&leprop=ids my $res = $self->{api}->api({ action => 'query', list => 'logevents', letype => 'gblblock', letitle => "User:$ip", # Ensure the User: prefix is there! lelimit => 1, leprop => 'ids', }); return $self->_handle_api_error() unless $res; my $number = scalar @{ $res->{query}->{logevents} }; # The number of blocks returned if ($number == 1) { return RET_TRUE; } elsif ($number == 0) { return RET_FALSE; } else { confess "This query should return at most one result, but the API gave more than that."; } } sub was_locked { my $self = shift; my $user = shift; # This query should always go to Meta unless ( $self->{api}->{config}->{api_url} =~ m, \Qhttp://meta.wikimedia.org/w/api.php\E | \Qhttps://secure.wikimedia.org/wikipedia/meta/w/api.php\E ,x # /x flag is pretty awesome :) ) { carp "CentralAuth queries should probably be sent to Meta; it doesn't look like you're doing so" if $self->{debug}; } $user =~ s/^User://i; $user =~ s/\@global$//i; my $res = $self->{api}->api({ action => 'query', list => 'logevents', letype => 'globalauth', letitle => "User:$user\@global", lelimit => 1, leprop => 'ids', }); return $self->_handle_api_error() unless $res; my $number = scalar @{ $res->{query}->{logevents} }; if ($number == 1) { return RET_TRUE; } elsif ($number == 0) { return RET_FALSE; } else { confess "This query should return at most one result, but the API returned more than that."; } } sub get_protection { my $self = shift; my $page = shift; if (ref $page eq 'ARRAY') { $page = join('|', @$page); } my $hash = { action => 'query', titles => $page, prop => 'info', inprop => 'protection', }; my $res = $self->{api}->api($hash); return $self->_handle_api_error() unless $res; my $data = $res->{query}->{pages}; my $out_data; foreach my $item (keys %$data) { my $title = $data->{$item}->{title}; my $protection = $data->{$item}->{protection}; if (@$protection == 0) { $protection = undef; } $out_data->{$title} = $protection; } if (scalar keys %$out_data == 1) { return $out_data->{$page}; } else { return $out_data; } } sub is_protected { warnings::warnif('deprecated', 'is_protected is deprecated, and might be ' . 'removed in a future release; please use get_protection instead'); my $self = shift; return $self->get_protection(@_); } sub patrol { my $self = shift; my $rcid = shift; if (ref $rcid eq 'ARRAY') { my @return; foreach my $id (@$rcid) { my $res = $self->patrol($id); push(@return, $res); } return @return; } else { my ($token) = $self->_get_edittoken('patrol'); my $res = $self->{api}->api({ action => 'patrol', rcid => $rcid, token => $token, }); return $self->_handle_api_error() if !$res or $self->{error}->{details} && $self->{error}->{details} =~ m/^(?:permissiondenied|badtoken)/; return $res; } } sub email { my $self = shift; my $user = shift; my $subject = shift; my $body = shift; if (ref $user eq 'ARRAY') { my @return; foreach my $target (@$user) { my $res = $self->email($target, $subject, $body); push(@return, $res); } return @return; } $user =~ s/^User://; if ($user =~ m/:/) { my $user_ns_name = $self->_get_ns_data()->{+NS_USER}; $user =~ s/^$user_ns_name://; } my ($token) = $self->_get_edittoken; my $res = $self->{api}->api({ action => 'emailuser', target => $user, subject => $subject, text => $body, token => $token, }); return $self->_handle_api_error() unless $res; return $res; } sub top_edits { my $self = shift; my $user = shift; my $options = shift; $user =~ s/^User://; $options->{max} = 1 unless defined($options->{max}); delete($options->{max}) if $options->{max} == 0; my $res = $self->{'api'}->list({ action => 'query', list => 'usercontribs', ucuser => $user, ucprop => 'title|flags', uclimit => 'max', }, $options); return $self->_handle_api_error() unless $res; return RET_TRUE if not ref $res; # Not a ref when using callback return map { $_->{title} } grep { exists $_->{top} } @$res; } sub contributions { my $self = shift; my $user = shift; my $ns = shift; my $opts = shift; if (ref $user eq 'ARRAY') { $user = join '|', map { my $u = $_; $u =~ s{^User:}{}; $u } @$user; } else { $user =~ s{^User:}{}; } $ns = join '|', @$ns if ref $ns eq 'ARRAY'; $opts->{max} = 1 unless defined($opts->{max}); delete($opts->{max}) if $opts->{max} == 0; my $query = { action => 'query', list => 'usercontribs', ucuser => $user, ( defined $ns ? (ucnamespace => $ns) : ()), ucprop => 'ids|title|timestamp|comment|flags', uclimit => 'max', }; my $res = $self->{api}->list($query, $opts); return $self->_handle_api_error() unless $res->[0]; return RET_TRUE if not ref $res; # Not a ref when using callback return @$res; } sub upload { my $self = shift; my $args = shift; my $data = delete $args->{data}; if (!defined $data and defined $args->{file}) { $data = do { local $/; open my $in, '<:raw', $args->{file} or die $!; <$in> }; } unless (defined $data) { $self->{error}->{code} = ERR_PARAMS; $self->{error}->{details} = q{You must provide either file contents or a filename.}; return undef; } unless (defined $args->{file} or defined $args->{title}) { $self->{error}->{code} = ERR_PARAMS; $self->{error}->{details} = q{You must specify a title to upload to.}; return undef; } my $filename = $args->{title} || do { require File::Basename; File::Basename::basename($args->{file}) }; my $success = $self->{api}->edit({ action => 'upload', filename => $filename, comment => $args->{summary}, file => [ undef, $filename, Content => $data ], }) || return $self->_handle_api_error(); return $success; } sub upload_from_url { my $self = shift; my $args = shift; my $url = delete $args->{url}; unless (defined $url) { $self->{error}->{code} = ERR_PARAMS; $self->{error}->{details} = q{You must provide URL of file to upload.}; return undef; } my $filename = $args->{title} || do { require File::Basename; File::Basename::basename($url) }; my $success = $self->{api}->edit({ action => 'upload', filename => $filename, comment => $args->{summary}, url => $url, ignorewarnings => 1, }) || return $self->_handle_api_error(); return $success; } sub usergroups { my $self = shift; my $user = shift; $user =~ s/^User://; my $res = $self->{api}->api({ action => 'query', list => 'users', ususers => $user, usprop => 'groups', ustoken => 'userrights', }); return $self->_handle_api_error() unless $res; foreach my $res_user (@{ $res->{query}->{users} }) { next unless $res_user->{name} eq $user; # Cache the userrights token on the assumption that we'll use it shortly to change the rights $self->{userrightscache} = { user => $user, token => $res_user->{userrightstoken}, groups => $res_user->{groups}, }; return @{ $res_user->{groups} }; # SUCCESS } return $self->_handle_api_error({ code => ERR_API, details => qq{Results for $user weren't returned by the API} }); } ################ # Internal use # ################ sub _get_edittoken { # Actually returns ($token, $base_timestamp, $start_timestamp) my $self = shift; my $page = shift || 'Main Page'; my $type = shift || 'csrf'; my $res = $self->{api}->api({ action => 'query', meta => 'siteinfo|tokens', titles => $page, prop => 'revisions', rvprop => 'timestamp', type => $type, }) or return $self->_handle_api_error(); my $data = ( %{ $res->{query}->{pages} })[1]; my $base_timestamp = $data->{revisions}[0]->{timestamp}; my $start_timestamp = $res->{query}->{general}->{time}; my $token = $res->{query}->{tokens}->{"${type}token"}; return ($token, $base_timestamp, $start_timestamp); } sub _handle_api_error { my $self = shift; my $error = shift; $self->{error} = {}; carp 'Error code ' . $self->{api}->{error}->{code} . ': ' . $self->{api}->{error}->{details} if $self->{debug}; $self->{error} = (defined $error and ref $error eq 'HASH' and exists $error->{code} and exists $error->{details}) ? $error : $self->{api}->{error}; return undef; } sub _is_loggedin { my $self = shift; my $is = $self->_whoami() || return $self->_handle_api_error(); my $ought = $self->{username}; warn "Testing if logged in: we are $is, and we should be $ought" if $self->{debug} > 1; return ($is eq $ought); } sub _whoami { my $self = shift; my $res = $self->{api}->api({ action => 'query', meta => 'userinfo', }) or return $self->_handle_api_error(); return $res->{query}->{userinfo}->{name}; } sub _do_autoconfig { my $self = shift; # http://en.wikipedia.org/w/api.php?action=query&meta=userinfo&uiprop=rights|groups my $hash = { action => 'query', meta => 'userinfo', uiprop => 'rights|groups', }; my $res = $self->{api}->api($hash); return $self->_handle_api_error() unless $res; return $self->_handle_api_error() unless $res->{query}; return $self->_handle_api_error() unless $res->{query}->{userinfo}; return $self->_handle_api_error() unless $res->{query}->{userinfo}->{name}; my $is = $res->{query}->{userinfo}->{name}; my $ought = $self->{username}; # Should we try to recover by logging in again? croak? carp "We're logged in as $is but we should be logged in as $ought" if ($is ne $ought); my @rights = @{ $res->{query}->{userinfo}->{rights} || [] }; my $has_bot = 0; my $default_assert = 'user'; # At a *minimum*, the bot should be logged in. foreach my $right (@rights) { if ($right eq 'bot') { $has_bot = 1; $default_assert = 'bot'; } } my @groups = @{ $res->{query}->{userinfo}->{groups} || [] }; # there may be no groups my $is_sysop = 0; foreach my $group (@groups) { if ($group eq 'sysop') { $is_sysop = 1; } } unless ($has_bot && !$is_sysop) { warn "$is doesn't have a bot flag; edits will be visible in RecentChanges" if $self->{debug} > 1; } $self->{assert} = $default_assert unless $self->{assert}; return RET_TRUE; } sub _get_sitematrix { my $self = shift; my $res = $self->{api}->api({ action => 'sitematrix' }); return $self->_handle_api_error() unless $res; my %sitematrix = %{ $res->{sitematrix} }; # This hash is a monstrosity (see http://sprunge.us/dfBD?pl), and needs # lots of post-processing to have a sane data structure :\ my %by_db; SECTION: foreach my $hashref (%sitematrix) { if (ref $hashref ne 'HASH') { # Yes, there are non-hashrefs in here, wtf?! if ($hashref eq 'specials') { SPECIAL: foreach my $special (@{ $sitematrix{specials} }) { next SPECIAL if (exists($special->{private}) or exists($special->{fishbowl})); my $db = $special->{code}; my $domain = $special->{url}; $domain =~ s,^http://,,; $by_db{$db} = $domain; } } next SECTION; } my $lang = $hashref->{code}; WIKI: foreach my $wiki_ref ($hashref->{site}) { WIKI2: foreach my $wiki_ref2 (@$wiki_ref) { my $family = $wiki_ref2->{code}; my $domain = $wiki_ref2->{url}; $domain =~ s,^http://,,; my $db = $lang . $family; # Is simple concatenation /always/ correct? $by_db{$db} = $domain; } } } # Now filter out closed wikis my $response = $self->{api}->{ua}->get('http://noc.wikimedia.org/conf/closed.dblist'); if ($response->is_success()) { my @closed_list = split(/\n/, $response->decoded_content); CLOSED: foreach my $closed (@closed_list) { delete($by_db{$closed}); } } # Now merge in the reverse, so you can look up by domain as well as db my %by_domain; while (my ($key, $value) = each %by_db) { $by_domain{$value} = $key; } %by_db = (%by_db, %by_domain); # This could be saved to disk with Storable. Next time you call this # method, if mtime is less than, say, 14d, you could load it from # disk instead of over network. $self->{sitematrix} = \%by_db; return $self->{sitematrix}; } sub _get_ns_data { my $self = shift; # If we have it already, return the cached data return $self->{ns_data} if exists $self->{ns_data}; # If we haven't returned by now, we have to ask the API my %ns_data = $self->get_namespace_names(); my %reverse = reverse %ns_data; %ns_data = (%ns_data, %reverse); $self->{ns_data} = \%ns_data; # Save for later use return $self->{ns_data}; } sub _get_ns_alias_data { my $self = shift; return $self->{ns_alias_data} if exists $self->{ns_alias_data}; my $ns_res = $self->{api}->api({ action => 'query', meta => 'siteinfo', siprop => 'namespacealiases|namespaces', }); my %ns_alias_data = map { # Map namespace alias names like "WP" to the canonical namespace name # from the "namespaces" part of the response $_->{ns_alias} => $ns_res->{query}->{namespaces}->{ $_->{ns_number} }->{canonical} } map { # Map namespace alias names (from the "namespacealiases" part of the response) # like "WP" to the namespace number (usd to look up canonical data in the # "namespaces" part of the response) { ns_alias => $_->{'*'}, ns_number => $_->{id} } } @{ $ns_res->{query}->{namespacealiases} }; $self->{ns_alias_data} = \%ns_alias_data; return $self->{ns_alias_data}; } 1; __END__ =pod =encoding UTF-8 =head1 NAME MediaWiki::Bot - a high-level bot framework for interacting with MediaWiki wikis =head1 VERSION version 5.006003 =head1 SYNOPSIS use MediaWiki::Bot qw(:constants); my $bot = MediaWiki::Bot->new({ assert => 'bot', host => 'de.wikimedia.org', login_data => { username => "Mike's bot account", password => "password" }, }); my $revid = $bot->get_last("User:Mike.lifeguard/sandbox", "Mike.lifeguard"); print "Reverting to $revid\n" if defined($revid); $bot->revert('User:Mike.lifeguard', $revid, 'rvv'); =head1 DESCRIPTION B is a framework that can be used to write bots which interface with the MediaWiki API (L). =head1 METHODS =head2 new my $bot = MediaWiki::Bot({ host => 'en.wikipedia.org', operator => 'Mike.lifeguard', }); Calling C<< MediaWiki::Bot->new() >> will create a new MediaWiki::Bot object. The only parameter is a hashref with keys: =over 4 =item * I sets a custom useragent. It is recommended to use C instead, which is all we need to do the right thing for you. If you really want to do it yourself, see L for guidance on what information must be included. =item * I sets a parameter for the AssertEdit extension (commonly 'bot') Refer to L. =item * I allows the bot to send you a message when it fails an assert. This is also the recommended way to customize the user agent string, which is required by the Wikimedia Foundation. A warning will be emitted if you omit this. =item * I allows you to set the maxlag parameter (default is the recommended 5s). Please refer to the MediaWiki documentation prior to changing this from the default. =item * I allows you to specify 'http' or 'https' (default is 'http') =item * I sets the domain name of the wiki to connect to =item * I sets the path to api.php (with no leading or trailing slash) =item * I is a hashref of credentials to pass to L. =item * I - whether to provide debug output. 1 provides only error messages; 2 provides further detail on internal operations. =back For example: my $bot = MediaWiki::Bot->new({ assert => 'bot', protocol => 'https', host => 'en.wikimedia.org', agent => sprintf( 'PerlWikiBot/%s (https://metacpan.org/MediaWiki::Bot; User:Mike.lifeguard)', MediaWiki::Bot->VERSION ), login_data => { username => "Mike's bot account", password => "password" }, }); For backward compatibility, you can specify up to three parameters: my $bot = MediaWiki::Bot->new('My custom useragent string', $assert, $operator); B will never do auto-login or autoconfiguration, and emits deprecation warnings. For further reading: =over 4 =item * L =item * L<|https://github.com/MediaWiki-Bot/MediaWiki-Bot/wiki/Install>> =item * L =item * L =item * L =back =head2 set_wiki Set what wiki to use. The parameter is a hashref with keys: =over 4 =item * I - the domain name =item * I - the part of the path before api.php (usually 'w') =item * I is either 'http' or 'https'. =back If you don't set any parameter, it's previous value is used. If it has never been set, the default settings are 'http', 'en.wikipedia.org' and 'w'. For example: $bot->set_wiki({ protocol => 'https', host => 'secure.wikimedia.org', path => 'wikipedia/meta/w', }); For backward compatibility, you can specify up to two parameters: $bot->set_wiki($host, $path); B, and will emit deprecation warnings. =head2 login This method takes a hashref with keys I and I at a minimum. See L and L for additional options. Logs the use $username in, optionally using $password. First, an attempt will be made to use cookies to log in. If this fails, an attempt will be made to use the password provided to log in, if any. If the login was successful, returns true; false otherwise. $bot->login({ username => $username, password => $password, }) or die "Login failed"; Once logged in, attempt to do some simple auto-configuration. At present, this consists of: =over 4 =item * Warning if the account doesn't have the bot flag, and isn't a sysop account. =item * Setting an appropriate default assert. =back You can skip this autoconfiguration by passing C 0> For backward compatibility, you can call this as $bot->login($username, $password); B, and will emit deprecation warnings. It will never do autoconfiguration or SUL login. =head3 Single User Login On WMF wikis, C specifies whether to log in on all projects. The default is false. But even when false, you still get a CentralAuth cookie for, and are thus logged in on, all languages of a given domain (C<*.wikipedia.org>, for example). When set, a login is done on each WMF domain so you are logged in on all ~800 content wikis. Since C<*.wikimedia.org> is not possible, we explicitly include meta, commons, incubator, and wikispecies. =head3 Basic authentication If you need to supply basic auth credentials, pass a hashref of data as described by L: $bot->login({ username => $username, password => $password, basic_auth => { netloc => "private.wiki.com:80", realm => "Authentication Realm", uname => "Basic auth username", pass => "password", } }) or die "Couldn't log in"; =head3 Bot passwords C doesn't yet support the more complicated (but more secure) oAuth login flow for bots. Instead, we support a simpler "bot password", which is a generated password connected to a (possibly-reduced) set of on-wiki privileges, and IP ranges from which it can be used. To create one, visit C on the wiki. Enter a label for the password, then select the privileges you want to use with that password. This set should be as restricted as possible; most bots only edit existing pages. Keeping the set of privileges as restricted as possible limits the possible damage if the password were ever compromised. Submit the form, and you'll be given a new "username" that looks like "AccountUsername@bot_password_label", and a generated bot password. To log in, provide those to C verbatim. B L, L =head2 logout $bot->logout(); The logout method logs the bot out of the wiki. This invalidates all login cookies. B L =head2 edit my $text = $bot->get_text('My page'); $text .= "\n\n* More text\n"; $bot->edit({ page => 'My page', text => $text, summary => 'Adding new content', section => 'new', }); This method edits a wiki page, and takes a hashref of data with keys: =over 4 =item * I - the page title to edit =item * I - the page text to write =item * I - an edit summary =item * I - whether to mark the edit as minor or not (boolean) =item * I - whether to mark the edit as a bot edit (boolean) =item * I - usually 'bot', but see L. =item * I
- edit a single section (identified by number) instead of the whole page =back An MD5 hash is sent to guard against data corruption while in transit. You can also call this as: $bot->edit($page, $text, $summary, $is_minor, $assert, $markasbot); B, and will emit deprecation warnings. =head3 CAPTCHAs If a L is encountered, the call to C will return false, with the error code set to C and the details informing you that solving a CAPTCHA is required for this action. The information you need to actually solve the captcha (for example the URL for the image) is given in C<< $bot->{error}->{captcha} >> as a hash reference. You will want to grab the keys 'url' (a relative URL to the image) and 'id' (the ID of the CAPTCHA). Once you have solved the CAPTCHA (presumably by interacting with a human), retry the edit, adding C and C parameters: my $edit = {page => 'Main Page', text => 'got your nose'}; my $edit_status = $bot->edit($edit); if (not $edit_status) { if ($bot->{error}->{code} == ERR_CAPTCHA) { my @captcha_uri = split /\Q?/, $bot->{error}{captcha}{url}, 2; my $image = URI->new(sprintf '%s://%s%s?%s' => $bot->{protocol}, $bot->{host}, $captcha_uri[0], $captcha_uri[1], ); require Term::ReadLine; my $term = Term::ReadLine->new('Solve the captcha'); $term->ornaments(0); my $answer = $term->readline("Please solve $image and type the answer: "); # Add new CAPTCHA params to the edit we're attempting $edit->{captcha_id} = $bot->{error}->{captcha}->{id}; $edit->{captcha_solution} = $answer; $status = $bot->edit($edit); } } B L, L, L =head2 move $bot->move($from_title, $to_title, $reason, $options_hashref); This moves a wiki page. If you wish to specify more options (like whether to suppress creation of a redirect), use $options_hashref, which has keys: =over 4 =item * I specifies whether to attempt to the talk page. =item * I specifies whether to suppress creation of a redirect. =item * I specifies whether to move subpages, if applicable. =item * I and I add or remove the page and the redirect from your watchlist. =item * I ignores warnings. =back my @pages = ("Humor", "Rumor"); foreach my $page (@pages) { my $to = $page; $to =~ s/or$/our/; $bot->move($page, $to, "silly 'merricans"); } B L =head2 get_history my @hist = $bot->get_history($title, $limit, $revid, $direction); Returns an array containing the history of the specified $page_title, with $limit number of revisions (default is as many as possible). The array returned contains hashrefs with keys: revid, user, comment, minor, timestamp_date, and timestamp_time. B: L, L =head2 get_text Returns an the wikitext of the specified $page_title. The second parameter is $revid - if defined, returns the text of that revision; the third is $section_number - if defined, returns the text of that section. A blank page will return wikitext of "" (which evaluates to false in Perl, but is defined); a nonexistent page will return undef (which also evaluates to false in Perl, but is obviously undefined). You can distinguish between blank and nonexistent pages by using L: my $wikitext = $bot->get_text('Page title'); print "Wikitext: $wikitext\n" if defined $wikitext; B L, L =head2 get_id Returns the id of the specified $page_title. Returns undef if page does not exist. my $pageid = $bot->get_id("Main Page"); die "Page doesn't exist\n" if !defined($pageid); B L =head2 get_pages Returns the text of the specified pages in a hashref. Content of undef means page does not exist. Also handles redirects or article names that use namespace aliases. my @pages = ('Page 1', 'Page 2', 'Page 3'); my $thing = $bot->get_pages(\@pages); foreach my $page (keys %$thing) { my $text = $thing->{$page}; print "$text\n" if defined($text); } B L, L =head2 get_image $buffer = $bot->get_image('File:Foo.jpg', { width=>256, height=>256 }); Download an image from a wiki. This is derived from a similar function in L. This one allows the image to be scaled down by passing a hashref with height & width parameters. It returns raw data in the original format. You may simply spew it to a file, or process it directly with a library such as L. use File::Slurp qw(write_file); my $img_data = $bot->get_image('File:Foo.jpg'); write_file( 'Foo.jpg', {binmode => ':raw'}, \$img_data ); Images are scaled proportionally. (height/width) will remain constant, except for rounding errors. Height and width parameters describe the B dimensions. A 400x200 image will never be scaled to greater dimensions. You can scale it yourself; having the wiki do it is just lazy & selfish. B L =head2 revert Reverts the specified $page_title to $revid, with an edit summary of $summary. A default edit summary will be used if $summary is omitted. my $revid = $bot->get_last("User:Mike.lifeguard/sandbox", "Mike.lifeguard"); print "Reverting to $revid\n" if defined($revid); $bot->revert('User:Mike.lifeguard', $revid, 'rvv'); B L =head2 undo $bot->undo($title, $revid, $summary, $after); Reverts the specified $revid, with an edit summary of $summary, using the undo function. To undo all revisions from $revid up to but not including this one, set $after to another revid. If not set, just undo the one revision ($revid). B L =head2 get_last Returns the revid of the last revision to $page not made by $user. undef is returned if no result was found, as would be the case if the page is deleted. my $revid = $bot->get_last('User:Mike.lifeguard/sandbox', 'Mike.lifeguard'); if defined($revid) { print "Reverting to $revid\n"; $bot->revert('User:Mike.lifeguard', $revid, 'rvv'); } B L =head2 update_rc B, and will emit deprecation warnings. Replace calls to C with calls to the newer C, which returns all available data, including rcid. Returns an array containing the $limit most recent changes to the wiki's I
. The array contains hashrefs with keys title, revid, old_revid, and timestamp. my @rc = $bot->update_rc(5); foreach my $hashref (@rc) { my $title = $hash->{'title'}; print "$title\n"; } The L is also available: # Use a callback for incremental processing: my $options = { hook => \&mysub, }; $bot->update_rc($options); sub mysub { my ($res) = @_; foreach my $hashref (@$res) { my $page = $hashref->{'title'}; print "$page\n"; } } =head2 recentchanges($wiki_hashref, $options_hashref) Returns an array of hashrefs containing recentchanges data. The first parameter is a hashref with the following keys: =over 4 =item * I - the namespace number, or an arrayref of numbers to specify several; default is the main namespace =item * I - the number of rows to fetch; default is 50 =item * I - only list changes by this user =item * I - itself a hashref where the key is a category and the value is a boolean. If true, the category will be included; if false, excluded. The categories are kinds of edits: minor, bot, anon, redirect, patrolled. See "rcshow" at L. =back An L can be used as the second parameter: my @rc = $bot->recentchanges({ ns => 4, limit => 100 }); foreach my $hashref (@rc) { print $hashref->{title} . "\n"; } # Or, use a callback for incremental processing: $bot->recentchanges({ ns => [0,1], limit => 500 }, { hook => \&mysub }); sub mysub { my ($res) = @_; foreach my $hashref (@$res) { my $page = $hashref->{title}; print "$page\n"; } } The hashref returned might contain the following keys: =over 4 =item * I - the namespace number =item * I =item * I =item * I =item * I - can be used with L =item * I =item * I - one of edit, new, log (there may be others) =item * I =back For backwards compatibility, the previous method signature is still supported: $bot->recentchanges($ns, $limit, $options_hashref); B<References:> L<API:Recentchanges|https://www.mediawiki.org/wiki/API:Recentchanges> =head2 what_links_here Returns an array containing a list of all pages linking to $page. Additional optional parameters are: =over 4 =item * One of: all (default), redirects, or nonredirects. =item * A namespace number to search (pass an arrayref to search in multiple namespaces) =item * An L</"Options hashref">. =back A typical query: my @links = $bot->what_links_here("Meta:Sandbox", undef, 1, { hook=>\&mysub } ); sub mysub{ my ($res) = @_; foreach my $hash (@$res) { my $title = $hash->{'title'}; my $is_redir = $hash->{'redirect'}; print "Redirect: $title\n" if $is_redir; print "Page: $title\n" unless $is_redir; } } Transclusions are no longer handled by what_links_here() - use L</list_transclusions> instead. B<References:> L<Listing incoming links|https://github.com/MediaWiki-Bot/MediaWiki-Bot/wiki/Listing-incoming-links>, L<API:Backlinks|https://www.mediawiki.org/wiki/API:Backlinks> =head2 list_transclusions Returns an array containing a list of all pages transcluding $page. Other parameters are: =over 4 =item * One of: all (default), redirects, or nonredirects =item * A namespace number to search (pass an arrayref to search in multiple namespaces). =item * $options_hashref as described by L<MediaWiki::API>: Set max to limit the number of queries performed. Set hook to a subroutine reference to use a callback hook for incremental processing. Refer to the section on L</linksearch> for examples. =back A typical query: $bot->list_transclusions("Template:Tlx", undef, 4, {hook => \&mysub}); sub mysub{ my ($res) = @_; foreach my $hash (@$res) { my $title = $hash->{'title'}; my $is_redir = $hash->{'redirect'}; print "Redirect: $title\n" if $is_redir; print "Page: $title\n" unless $is_redir; } } B<References:> L<Listing transclusions|https://github.com/MediaWiki-Bot/MediaWiki-Bot/wiki/Listing-transclusions> L<API:Embeddedin|https://www.mediawiki.org/wiki/API:Embeddedin> =head2 get_pages_in_category Returns an array containing the names of all pages in the specified category (include the Category: prefix). Does not recurse into sub-categories. my @pages = $bot->get_pages_in_category('Category:People on stamps of Gabon'); print "The pages in Category:People on stamps of Gabon are:\n@pages\n"; The options hashref is as described in L</"Options hashref">. Use C<< { max => 0 } >> to get all results. B<References:> L<Listing category contents|https://github.com/MediaWiki-Bot/MediaWiki-Bot/wiki/Listing-category-contents>, L<API:Categorymembers|https://www.mediawiki.org/wiki/API:Categorymembers> =head2 get_all_pages_in_category my @pages = $bot->get_all_pages_in_category($category, $options_hashref); Returns an array containing the names of B<all> pages in the specified category (include the Category: prefix), including sub-categories. The $options_hashref is described fully in L</"Options hashref">. B<References:> L<Listing category contents|https://github.com/MediaWiki-Bot/MediaWiki-Bot/wiki/Listing-category-contents>, L<API:Categorymembers|https://www.mediawiki.org/wiki/API:Categorymembers> =head2 get_all_categories Returns an array containing the names of all categories. my @categories = $bot->get_all_categories(); print "The categories are:\n@categories\n"; Use C<< { max => 0 } >> to get all results. The default number of categories returned is 10, the maximum allowed is 500. B<References:> L<API:Allcategories|https://www.mediawiki.org/wiki/API:Allcategories> =head2 linksearch Runs a linksearch on the specified $link and returns an array containing anonymous hashes with keys 'url' for the outbound URL, and 'title' for the page the link is on. Additional parameters are: =over 4 =item * A namespace number to search (pass an arrayref to search in multiple namespaces). =item * You can search by $protocol (http is default). =item * $options_hashref is fully documented in L</"Options hashref">: Set I<max> in $options to get more than one query's worth of results: my $options = { max => 10, }; # I only want some results my @links = $bot->linksearch("slashdot.org", 1, undef, $options); foreach my $hash (@links) { my $url = $hash->{'url'}; my $page = $hash->{'title'}; print "$page: $url\n"; } Set I<hook> to a subroutine reference to use a callback hook for incremental processing: my $options = { hook => \&mysub, }; # I want to do incremental processing $bot->linksearch("slashdot.org", 1, undef, $options); sub mysub { my ($res) = @_; foreach my $hashref (@$res) { my $url = $hashref->{'url'}; my $page = $hashref->{'title'}; print "$page: $url\n"; } } =back B<References:> L<Finding external links|https://github.com/MediaWiki-Bot/MediaWiki-Bot/wiki/Finding-external-links>, L<API:Exturlusage|https://www.mediawiki.org/wiki/API:Exturlusage> =head2 purge_page Purges the server cache of the specified $page. Returns true on success; false on failure. Pass an array reference to purge multiple pages. If you really care, a true return value is the number of pages successfully purged. You could check that it is the same as the number you wanted to purge - maybe some pages don't exist, or you passed invalid titles, or you aren't allowed to purge the cache: my @to_purge = ('Main Page', 'A', 'B', 'C', 'Very unlikely to exist'); my $size = scalar @to_purge; print "all-at-once:\n"; my $success = $bot->purge_page(\@to_purge); if ($success == $size) { print "@to_purge: OK ($success/$size)\n"; } else { my $missed = @to_purge - $success; print "We couldn't purge $missed pages (list was: " . join(', ', @to_purge) . ")\n"; } # OR print "\n\none-at-a-time:\n"; foreach my $page (@to_purge) { my $ok = $bot->purge_page($page); print "$page: $ok\n"; } B<References:> L<Purging the server cache|https://github.com/MediaWiki-Bot/MediaWiki-Bot/wiki/Purging-the-server-cache>, L<API:Purge|https://www.mediawiki.org/wiki/API:Purge> =head2 get_namespace_names my %namespace_names = $bot->get_namespace_names(); Returns a hash linking the namespace id, such as 1, to its named equivalent, such as "Talk". B<References:> L<API:Meta#siteinfo|https://www.mediawiki.org/wiki/API:Meta#siteinfo_.2F_si> =head2 image_usage Gets a list of pages which include a certain $image. Include the C<File:> namespace prefix to avoid incurring an extra round-trip (which will also emit a deprecation warnings). Additional parameters are: =over 4 =item * A namespace number to fetch results from (or an arrayref of multiple namespace numbers) =item * One of all, redirect, or nonredirects. =item * $options is a hashref as described in the section for L</linksearch>. =back my @pages = $bot->image_usage("File:Albert Einstein Head.jpg"); Or, make use of the L</"Options hashref"> to do incremental processing: $bot->image_usage("File:Albert Einstein Head.jpg", undef, undef, { hook=>\&mysub, max=>5 } ); sub mysub { my $res = shift; foreach my $page (@$res) { my $title = $page->{'title'}; print "$title\n"; } } B<References:> L<API:Imageusage|https://www.mediawiki.org/wiki/API:Imageusage> =head2 global_image_usage($image, $results, $filterlocal) Returns an array of hashrefs of data about pages which use the given image. my @data = $bot->global_image_usage('File:Albert Einstein Head.jpg'); The keys in each hashref are title, url, and wiki. C<$results> is the maximum number of results that will be returned (not the maximum number of requests that will be sent, like C<max> in the L</"Options hashref">); the default is to attempt to fetch 500 (set to 0 to get all results). C<$filterlocal> will filter out local uses of the image. B<References:> L<Extension:GlobalUsage#API|https://www.mediawiki.org/wiki/Extension:GlobalUsage#API> =head2 links_to_image A backward-compatible call to L</image_usage>. You can provide only the image title. B<This method is deprecated>, and will emit deprecation warnings. =head2 is_blocked my $blocked = $bot->is_blocked('User:Mike.lifeguard'); Checks if a user is currently blocked. B<References:> L<API:Blocks|https://www.mediawiki.org/wiki/API:Blocks> =head2 test_blocked Retained for backwards compatibility. Use L</is_blocked> for clarity. B<This method is deprecated>, and will emit deprecation warnings. =head2 test_image_exists Checks if an image exists at $page. =over 4 =item * C<FILE_NONEXISTENT> (0) means "Nothing there" =item * C<FILE_LOCAL> (1) means "Yes, an image exists locally" =item * C<FILE_SHARED> (2) means "Yes, an image exists on L<Commons|http://commons.wikimedia.org>" =item * C<FILE_PAGE_TEXT_ONLY> (3) means "No image exists, but there is text on the page" =back If you pass in an arrayref of images, you'll get out an arrayref of results. use MediaWiki::Bot::Constants; my $exists = $bot->test_image_exists('File:Albert Einstein Head.jpg'); if ($exists == FILE_NONEXISTENT) { print "Doesn't exist\n"; } elsif ($exists == FILE_LOCAL) { print "Exists locally\n"; } elsif ($exists == FILE_SHARED) { print "Exists on Commons\n"; } elsif ($exists == FILE_PAGE_TEXT_ONLY) { print "Page exists, but no image\n"; } B<References:> L<API:Properties#imageinfo|https://www.mediawiki.org/wiki/API:Properties#imageinfo_.2F_ii> =head2 get_pages_in_namespace $bot->get_pages_in_namespace($namespace, $limit, $options_hashref); Returns an array containing the names of all pages in the specified namespace. The $namespace_id must be a number, not a namespace name. Setting $page_limit is optional, and specifies how many items to retrieve at once. Setting this to 'max' is recommended, and this is the default if omitted. If $page_limit is over 500, it will be rounded up to the next multiple of 500. If $page_limit is set higher than you are allowed to use, it will silently be reduced. Consider setting key 'max' in the L</"Options hashref"> to retrieve multiple sets of results: # Gotta get 'em all! my @pages = $bot->get_pages_in_namespace(6, 'max', { max => 0 }); B<References:> L<API:Allpages|https://www.mediawiki.org/wiki/API:Allpages> =head2 count_contributions my $count = $bot->count_contributions($user); Uses the API to count $user's contributions. B<References:> L<API:Users|https://www.mediawiki.org/wiki/API:Users> =head2 timed_count_contributions ($timed_edits_count, $total_count) = $bot->timed_count_contributions($user, $days); Uses the API to count $user's contributions in last number of $days and total number of user's contributions (if needed). Example: If you want to get user contribs for last 30 and 365 days, and total number of edits you would write something like this: my ($last30days, $total) = $bot->timed_count_contributions($user, 30); my $last365days = $bot->timed_count_contributions($user, 365); You could get total number of edits also by separately calling count_contributions like this: my $total = $bot->count_contributions($user); and use timed_count_contributions only in scalar context, but that would mean one more call to server (meaning more server load) of which you are excused as timed_count_contributions returns array with two parameters. B<References:> L<Extension:UserDailyContribs|https://www.mediawiki.org/wiki/Extension:UserDailyContribs> =head2 last_active my $latest_timestamp = $bot->last_active($user); Returns the last active time of $user in C<YYYY-MM-DDTHH:MM:SSZ>. B<References:> L<API:Usercontribs|https://www.mediawiki.org/wiki/API:Usercontribs> =head2 recent_edit_to_page my ($timestamp, $user) = $bot->recent_edit_to_page($title); Returns timestamp and username for most recent (top) edit to $page. B<References:> L<API:Properties#revisions|https://www.mediawiki.org/wiki/API:Properties#revisions_.2F_rv> =head2 get_users my @recent_editors = $bot->get_users($title, $limit, $revid, $direction); Gets the most recent editors to $page, up to $limit, starting from $revision and going in $direction. B<References:> L<API:Properties#revisions|https://www.mediawiki.org/wiki/API:Properties#revisions_.2F_rv> =head2 was_blocked for ("Mike.lifeguard", "Jimbo Wales") { print "$_ was blocked\n" if $bot->was_blocked($_); } Returns whether $user has ever been blocked. B<References:> L<API:Logevents|https://www.mediawiki.org/wiki/API:Logevents> =head2 test_block_hist Retained for backwards compatibility. Use L</was_blocked> for clarity. B<This method is deprecated>, and will emit deprecation warnings. =head2 expandtemplates my $expanded = $bot->expandtemplates($title, $wikitext); Expands templates on $page, using $text if provided, otherwise loading the page text automatically. B<References:> L<API:Parsing wikitext|https://www.mediawiki.org/wiki/API:Parsing_wikitext> =head2 get_allusers my @users = $bot->get_allusers($limit, $user_group, $options_hashref); Returns an array of all users. Default $limit is 500. Optionally specify a $group (like 'sysop') to list that group only. The last optional parameter is an L</"Options hashref">. B<References:> L<API:Allusers|https://www.mediawiki.org/wiki/API:Allusers> =head2 db_to_domain Converts a wiki/database name (enwiki) to the domain name (en.wikipedia.org). my @wikis = ("enwiki", "kowiki", "bat-smgwiki", "nonexistent"); foreach my $wiki (@wikis) { my $domain = $bot->db_to_domain($wiki); next if !defined($domain); print "$wiki: $domain\n"; } You can pass an arrayref to do bulk lookup: my @wikis = ("enwiki", "kowiki", "bat-smgwiki", "nonexistent"); my $domains = $bot->db_to_domain(\@wikis); foreach my $domain (@$domains) { next if !defined($domain); print "$domain\n"; } B<References:> L<Extension:SiteMatrix|https://www.mediawiki.org/wiki/Extension:SiteMatrix> =head2 domain_to_db my $db = $bot->domain_to_db($domain_name); As you might expect, does the opposite of L</domain_to_db>: Converts a domain name (meta.wikimedia.org) into a database/wiki name (metawiki). B<References:> L<Extension:SiteMatrix|https://www.mediawiki.org/wiki/Extension:SiteMatrix> =head2 diff This allows retrieval of a diff from the API. The return is a scalar containing the I<HTML table> of the diff. Options are passed as a hashref with keys: =over 4 =item * I<title> is the title to use. Provide I<either> this or revid. =item * I<revid> is any revid to diff from. If you also specified title, only title will be honoured. =item * I<oldid> is an identifier to diff to. This can be a revid, or the special values 'cur', 'prev' or 'next' =back B<References:> L<API:Properties#revisions|https://www.mediawiki.org/wiki/API:Properties#revisions_.2F_rv> =head2 prefixindex This returns an array of hashrefs containing page titles that start with the given $prefix. The hashref has keys 'title' and 'redirect' (present if the page is a redirect, not present otherwise). Additional parameters are: =over 4 =item * One of all, redirects, or nonredirects =item * A single namespace number (unlike linksearch etc, which can accept an arrayref of numbers). =item * $options_hashref as described in L</"Options hashref">. =back my @prefix_pages = $bot->prefixindex("User:Mike.lifeguard"); # Or, the more efficient equivalent my @prefix_pages = $bot->prefixindex("Mike.lifeguard", 2); foreach my $hashref (@pages) { my $title = $hashref->{'title'}; if $hashref->{'redirect'} { print "$title is a redirect\n"; } else { print "$title\n is not a redirect\n"; } } B<References:> L<API:Allpages|https://www.mediawiki.org/wiki/API:Allpages> =head2 search This is a simple search for your $search_term in page text. It returns an array of page titles matching. Additional optional parameters are: =over 4 =item * A namespace number to search in, or an arrayref of numbers (default is the main namespace) =item * $options_hashref is a hashref as described in L</"Options hashref">: =back my @pages = $bot->search("Mike.lifeguard", 2); print "@pages\n"; Or, use a callback for incremental processing: my @pages = $bot->search("Mike.lifeguard", 2, { hook => \&mysub }); sub mysub { my ($res) = @_; foreach my $hashref (@$res) { my $page = $hashref->{'title'}; print "$page\n"; } } B<References:> L<API:Search|https://www.mediawiki.org/wiki/API:Search> =head2 get_log This fetches log entries, and returns results as an array of hashes. The first parameter is a hashref with keys: =over 4 =item * I<type> is the log type (block, delete...) =item * I<user> is the user who I<performed> the action. Do not include the User: prefix =item * I<target> is the target of the action. Where an action was performed to a page, it is the page title. Where an action was performed to a user, it is User:$username. =back The second is the familiar L</"Options hashref">. my $log = $bot->get_log({ type => 'block', user => 'User:Mike.lifeguard', }); foreach my $entry (@$log) { my $user = $entry->{'title'}; print "$user\n"; } $bot->get_log({ type => 'block', user => 'User:Mike.lifeguard', }, { hook => \&mysub, max => 10 } ); sub mysub { my ($res) = @_; foreach my $hashref (@$res) { my $title = $hashref->{'title'}; print "$title\n"; } } B<References:> L<API:Logevents|https://www.mediawiki.org/wiki/API:Logevents> =head2 is_g_blocked my $is_globally_blocked = $bot->is_g_blocked('127.0.0.1'); Returns what IP/range block I<currently in place> affects the IP/range. The return is a scalar of an IP/range if found (evaluates to true in boolean context); undef otherwise (evaluates false in boolean context). Pass in a single IP or CIDR range. B<References:> L<Extension:GlobalBlocking|https://www.mediawiki.org/wiki/Extension:GlobalBlocking/API> =head2 was_g_blocked print "127.0.0.1 was globally blocked\n" if $bot->was_g_blocked('127.0.0.1'); Returns whether an IP/range was ever globally blocked. You should probably call this method only when your bot is operating on Meta - this method will warn if not. B<References:> L<API:Logevents|https://www.mediawiki.org/wiki/API:Logevents> =head2 was_locked my $was_locked = $bot->was_locked('Mike.lifeguard'); Returns whether a user was ever locked. You should probably call this method only when your bot is operating on Meta - this method will warn if not. B<References:> L<API:Logevents|https://www.mediawiki.org/wiki/API:Logevents> =head2 get_protection Returns data on page protection as a array of up to two hashrefs. Each hashref has a type, level, and expiry. Levels are 'sysop' and 'autoconfirmed'; types are 'move' and 'edit'; expiry is a timestamp. Additionally, the key 'cascade' will exist if cascading protection is used. my $page = 'Main Page'; $bot->edit({ page => $page, text => rand(), summary => 'test', }) unless $bot->get_protection($page); You can also pass an arrayref of page titles to do bulk queries: my @pages = ('Main Page', 'User:Mike.lifeguard', 'Project:Sandbox'); my $answer = $bot->get_protection(\@pages); foreach my $title (keys %$answer) { my $protected = $answer->{$title}; print "$title is protected\n" if $protected; print "$title is unprotected\n" unless $protected; } B<References:> L<API:Properties#info|https://www.mediawiki.org/wiki/API:Properties#info_.2F_in> =head2 is_protected This is a synonym for L</get_protection>, which should be used in preference. B<This method is deprecated>, and will emit deprecation warnings. =head2 patrol $bot->patrol($rcid); Marks a page or revision identified by the $rcid as patrolled. To mark several RCIDs as patrolled, you may pass an arrayref of them. Returns false and sets C<< $bot->{error} >> if the account cannot patrol. B<References:> L<API:Patrol|https://www.mediawiki.org/wiki/API:Patrol> =head2 email $bot->email($user, $subject, $body); This allows you to send emails through the wiki. All 3 of $user (without the User: prefix), $subject and $body are required. If $user is an arrayref, this will send the same email (subject and body) to all users. B<References:> L<API:Email|https://www.mediawiki.org/wiki/API:Email> =head2 top_edits Returns an array of the page titles where the $user is the latest editor. The second parameter is the familiar L<$options_hashref|/linksearch>. my @pages = $bot->top_edits("Mike.lifeguard", {max => 5}); foreach my $page (@pages) { $bot->rollback($page, "Mike.lifeguard"); } Note that accessing the data with a callback happens B<before> filtering the top edits is done. For that reason, you should use L</contributions> if you need to use a callback. If you use a callback with top_edits(), you B<will not> necessarily get top edits returned. It is only safe to use a callback if you I<check> that it is a top edit: $bot->top_edits("Mike.lifeguard", { hook => \&rv }); sub rv { my $data = shift; foreach my $page (@$data) { if (exists($page->{'top'})) { $bot->rollback($page->{'title'}, "Mike.lifeguard"); } } } B<References:> L<API:Usercontribs|https://www.mediawiki.org/wiki/API:Usercontribs> =head2 contributions my @contribs = $bot->contributions($user, $namespace, $options); Returns an array of hashrefs of data for the user's contributions. $ns can be an arrayref of namespace numbers. $options can be specified as in L</linksearch>. Specify an arrayref of users to get results for multiple users. B<References:> L<API:Usercontribs|https://www.mediawiki.org/wiki/API:Usercontribs> =head2 upload $bot->upload({ data => $file_contents, summary => 'uploading file' }); $bot->upload({ file => $file_name, title => 'Target filename.png' }); Upload a file to the wiki. Specify the file by either giving the filename, which will be read in, or by giving the data directly. B<References:> L<API:Upload|https://www.mediawiki.org/wiki/API:Upload> =head2 upload_from_url Upload file directly from URL to the wiki. Specify URL, the new filename and summary. Summary and new filename are optional. $bot->upload_from_url({ url => 'http://some.domain.ext/pic.png', title => 'Target_filename.png', summary => 'uploading new pic', }); If on your target wiki is enabled uploading from URL, meaning C<$wgAllowCopyUploads> is set to true in LocalSettings.php and you have appropriate user rights, you can use this function to upload files to your wiki directly from remote server. B<References:> L<API:Upload#Uploading_from_URL|https://www.mediawiki.org/wiki/API:Upload#Uploading_from_URL> =head2 usergroups Returns a list of the usergroups a user is in: my @usergroups = $bot->usergroups('Mike.lifeguard'); B<References:> L<API:Users|https://www.mediawiki.org/wiki/API:Users> =head2 Options hashref This is passed through to the lower-level interface L<MediaWiki::API>, and is fully documented there. The hashref can have 3 keys: =over 4 =item max Specifies the maximum number of queries to retrieve data from the wiki. This is independent of the I<size> of each query (how many items each query returns). Set to 0 to retrieve all the results. =item hook Specifies a coderef to a hook function that can be used to process large lists as they come in. When this is used, your subroutine will get the raw data. This is noted in cases where it is known to be significant. For example, when using a hook with C<top_edits()>, you need to check whether the edit is the top edit yourself - your subroutine gets results as they come in, and before they're filtered. =item skip_encoding MediaWiki's API uses UTF-8 and any 8 bit character string parameters are encoded automatically by the API call. If your parameters are already in UTF-8 this will be detected and the encoding will be skipped. If your parameters for some reason contain UTF-8 data but no UTF-8 flag is set (i.e. you did not use the C<< use L<utf8>; >> pragma) you should prevent re-encoding by passing an option C<< skip_encoding => 1 >>. For example: $category ="Cat\x{e9}gorie:moyen_fran\x{e7}ais"; # latin1 string $bot->get_all_pages_in_category($category); # OK $category = "Cat". pack("U", 0xe9)."gorie:moyen_fran".pack("U",0xe7)."ais"; # unicode string $bot->get_all_pages_in_category($category); # OK $category ="Cat\x{c3}\x{a9}gorie:moyen_fran\x{c3}\x{a7}ais"; # unicode data without utf-8 flag # $bot->get_all_pages_in_category($category); # NOT OK $bot->get_all_pages_in_category($category, { skip_encoding => 1 }); # OK If you need this, it probably means you're doing something wrong. Feel free to ask for help. =back =head1 ERROR HANDLING All functions will return undef in any handled error situation. Further error data is stored in C<< $bot->{error}->{code} >> and C<< $bot->{error}->{details} >>. Error codes are provided as constants in L<MediaWiki::Bot::Constants>, and can also be imported through this module: use MediaWiki::Bot qw(:constants); =head1 AVAILABILITY The project homepage is L<https://metacpan.org/module/MediaWiki::Bot>. The latest version of this module is available from the Comprehensive Perl Archive Network (CPAN). Visit L<http://www.perl.com/CPAN/> to find a CPAN site near you, or see L<https://metacpan.org/module/MediaWiki::Bot/>. =head1 SOURCE The development version is on github at L<http://github.com/MediaWiki-Bot/MediaWiki-Bot> and may be cloned from L<git://github.com/MediaWiki-Bot/MediaWiki-Bot.git> =head1 BUGS AND LIMITATIONS You can make new bug reports, and view existing ones, through the web interface at L<https://github.com/MediaWiki-Bot/MediaWiki-Bot/issues>. =head1 AUTHORS =over 4 =item * Dan Collins <dcollins@cpan.org> =item * Mike.lifeguard <lifeguard@cpan.org> =item * Alex Rowe <alex.d.rowe@gmail.com> =item * Oleg Alexandrov <oleg.alexandrov@gmail.com> =item * jmax.code <jmax.code@gmail.com> =item * Stefan Petrea <stefan.petrea@gmail.com> =item * kc2aei <kc2aei@gmail.com> =item * bosborne@alum.mit.edu =item * Brian Obio <brianobio@gmail.com> =item * patch and bug report contributors =back =head1 COPYRIGHT AND LICENSE This software is Copyright (c) 2016 by the MediaWiki::Bot team <perlwikibot@googlegroups.com>. This is free software, licensed under: The GNU General Public License, Version 3, June 2007 =cut ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������