forked from github/kensanata.oddmuse
98 lines
2.9 KiB
Perl
Executable File
98 lines
2.9 KiB
Perl
Executable File
#!/usr/bin/perl -w
|
|
#
|
|
# Copyright (C) 2004, 2005 Alex Schroeder <alex@emacswiki.org>
|
|
#
|
|
# This program is free software; you can redistribute it and/or modify
|
|
# it under the terms of the GNU General Public License as published by
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
# (at your option) any later version.
|
|
#
|
|
# This program is distributed in the hope that it will be useful,
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
# GNU General Public License for more details.
|
|
#
|
|
# You should have received a copy of the GNU General Public License
|
|
# along with this program; if not, write to the
|
|
# Free Software Foundation, Inc.
|
|
# 59 Temple Place, Suite 330
|
|
# Boston, MA 02111-1307 USA
|
|
|
|
use strict;
|
|
use Getopt::Std;
|
|
use LWP::UserAgent;
|
|
|
|
my $usage = "$0 [-i URL] [-t SECONDS] SOURCE TARGET\n"
|
|
. "Where SOURCE and TARGET are the base URLs for the two wikis.\n"
|
|
. "SECONDS is the number of seconds to wait between requests.\n"
|
|
. "URL is the raw URL returning page names if you want to copy it all.\n"
|
|
. "Example:\n\n"
|
|
. "wikicopy -i 'http://www.emacswiki.org/cgi-bin/alex?action=index;raw=1' \\\n"
|
|
. " http://www.emacswiki.org/cgi-bin/alex \\\n"
|
|
. " http://localhost/cgi-bin/wiki.pl\n";
|
|
|
|
sub UrlEncode {
|
|
my $str = shift;
|
|
return '' unless $str;
|
|
my @letters = split(//, $str);
|
|
my @safe = ('a' .. 'z', 'A' .. 'Z', '0' .. '9', '-', '_', '.', '!', '~', '*', "'", '(', ')', '#');
|
|
foreach my $letter (@letters) {
|
|
my $pattern = quotemeta($letter);
|
|
if (not grep(/$pattern/, @safe)) {
|
|
$letter = sprintf("%%%02x", ord($letter));
|
|
}
|
|
}
|
|
return join('', @letters);
|
|
}
|
|
|
|
sub GetRaw {
|
|
my ($uri) = @_;
|
|
my $ua = LWP::UserAgent->new;
|
|
my $response = $ua->get($uri);
|
|
return $response->content if $response->is_success;
|
|
}
|
|
|
|
sub PostRaw {
|
|
my ($uri, $id, $data) = @_;
|
|
my $ua = LWP::UserAgent->new;
|
|
my $response = $ua->post($uri, {title=>$id, text=>$data, raw=>1}); # fix for other wikis
|
|
warn "POST $id failed.\n" unless $response->is_success;
|
|
}
|
|
|
|
sub copy {
|
|
my ($source, $target, $interval, @pages) = @_;
|
|
foreach my $page (@pages) {
|
|
print $page, "\n";
|
|
$page = UrlEncode ($page);
|
|
my $data = GetRaw("$source?action=browse;id=$page;raw=1"); # fix for other wikis
|
|
if (not $data) {
|
|
warn "GET $page returned no data.\n";
|
|
next;
|
|
}
|
|
PostRaw($target, $page, $data);
|
|
sleep($interval);
|
|
}
|
|
}
|
|
|
|
sub main {
|
|
our($opt_i, $opt_t);
|
|
getopts('i:t:');
|
|
my $interval = $opt_t ? $opt_t : 5;
|
|
my ($source, $target) = @ARGV;
|
|
die $usage unless $target;
|
|
my @pages = ();
|
|
if ($opt_i) {
|
|
my $data = GetRaw($opt_i);
|
|
@pages = split(/\n/, $data);
|
|
} else {
|
|
print "List of pages:\n";
|
|
while (<STDIN>) {
|
|
push(@pages, $_);
|
|
}
|
|
}
|
|
die "The list of pages is missing. Did you use -i?\n" unless @pages;
|
|
copy($source, $target, $interval, @pages);
|
|
}
|
|
|
|
main();
|