Files
oddmuse/wiki.pl

4087 lines
157 KiB
Perl
Raw Normal View History

#! /usr/bin/perl
2015-03-06 10:08:14 +01:00
# Copyright (C) 2001-2015
# Alex Schroeder <alex@gnu.org>
2015-03-06 10:08:14 +01:00
# Copyright (C) 2014-2015
# Alex Jakimenko <alex.jakimenko@gmail.com>
# Copyleft 2008 Brian Curry <http://www.raiazome.com>
# ... including lots of patches from the UseModWiki site
# Copyright (C) 2001, 2002 various authors
# ... which was based on UseModWiki version 0.92 (April 21, 2001)
# Copyright (C) 2000, 2001 Clifford A. Adams
# <caadams@frontiernet.net> or <usemod@usemod.com>
# ... which was based on the GPLed AtisWiki 0.3
# Copyright (C) 1998 Markus Denker <marcus@ira.uka.de>
# ... which was based on the LGPLed CVWiki CVS-patches
# Copyright (C) 1997 Peter Merel
# ... and The Original WikiWikiWeb
# Copyright (C) 1996, 1997 Ward Cunningham <ward@c2.com>
# (code reused with permission)
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
2003-03-21 13:50:35 +00:00
package OddMuse;
use strict;
use warnings;
no warnings 'numeric';
no warnings 'uninitialized';
use utf8; # in case anybody ever adds UTF8 characters to the source
use CGI qw/-utf8/;
use CGI::Carp qw(fatalsToBrowser);
use File::Glob ':glob';
use Encode qw(encode_utf8 decode_utf8);
use sigtrap 'handler' => \&HandleSignals, 'normal-signals', 'error-signals';
2014-07-18 01:46:59 +03:00
local $| = 1; # Do not buffer output (localized for mod_perl)
2003-03-21 13:50:35 +00:00
# Options:
our ($ScriptName, $FullUrl, $PageDir, $TempDir, $LockDir, $KeepDir, $RssDir,
$RcFile, $RcOldFile, $IndexFile, $NoEditFile, $VisitorFile, $DeleteFile, $RssLicense,
$FreeLinkPattern, $LinkPattern, $FreeInterLinkPattern, $InterLinkPattern,
$UrlPattern, $FullUrlPattern, $InterSitePattern,
$UrlProtocols, $ImageExtensions, $LastUpdate,
%LockOnCreation, %PlainTextPages, %AdminPages,
@MyAdminCode, @MyFormChanges, @MyInitVariables, @MyMacros, @MyMaintenance,
$DocumentHeader, %HtmlEnvironmentContainers, $FS, $Counter, @Debugging);
2003-03-21 13:50:35 +00:00
# Internal variables:
our ($q, $bol, $OpenPageName, %Page, %Translate, %IndexHash, @IndexList,
@HtmlStack, @HtmlAttrStack, @Blocks, @Flags,
%Includes, $FootnoteNumber, $CollectingJournal, $HeaderIsPrinted,
%Locks, $Fragment, $Today, $ModulesDescription, %RssInterwikiTranslate,
$Message, $Now, %RecentVisitors, %MyInc, $WikiDescription, %InterSite, %OldCookie);
2003-03-21 13:50:35 +00:00
# Can be set outside the script: $DataDir, $UseConfig, $ConfigFile, $ModuleDir,
# $ConfigPage, $AdminPass, $EditPass, $ScriptName, $FullUrl, $RunCGI.
# 1 = load config file in the data directory
our $UseConfig //= 1;
# Main wiki directory
2015-07-14 23:07:02 +03:00
our $DataDir;
$DataDir ||= decode_utf8($ENV{WikiDataDir}) if $UseConfig;
$DataDir ||= '/tmp/oddmuse'; # FIXME: /var/opt/oddmuse/wiki ?
our $ConfigFile;
$ConfigFile ||= $ENV{WikiConfigFile} if $UseConfig;
our $ModuleDir;
$ModuleDir ||= $ENV{WikiModuleDir} if $UseConfig;
our $ConfigPage ||= '';
# 1 = Run script as CGI instead of loading as module
our $RunCGI //= 1;
# 1 = allow page views using wiki.pl/PageName
our $UsePathInfo = 1;
# -1 = disabled, 0 = 10s; 1 = partial HTML cache; 2 = HTTP/1.1 caching
our $UseCache = 2;
our $SiteName = 'Wiki'; # Name of site (used for titles)
our $HomePage = 'HomePage'; # Home page
our $CookieName = 'Wiki'; # Name for this wiki (for multi-wiki sites)
our $MaxPost = 1024 * 210; # Maximum 210K posts (about 200K for pages)
our $StyleSheet = ''; # URL for CSS stylesheet (like '/wiki.css')
our $StyleSheetPage = ''; # Page for CSS sheet
our $LogoUrl = ''; # URL for site logo ('' for no logo)
our $NotFoundPg = ''; # Page for not-found links ('' for blank pg)
our $EditAllowed = 1; # 0 = no, 1 = yes, 2 = comments pages only, 3 = comments only
our $AdminPass //= ''; # Whitespace separated passwords.
our $EditPass //= ''; # Whitespace separated passwords.
our $PassHashFunction //= ''; # Name of the function to create hashes
our $PassSalt //= ''; # Salt will be added to any password before hashing
our $BannedHosts = 'BannedHosts'; # Page for banned hosts
our $BannedCanRead = 1; # 1 = banned cannot edit, 0 = banned cannot read
our $BannedContent = 'BannedContent'; # Page for banned content (usually for link-ban)
our $WikiLinks = ''; # 1 = LinkPattern is a link
our $FreeLinks = 1; # 1 = [[some text]] is a link
our $UseQuestionmark = 1; # 1 = append questionmark to links to nonexisting pages
our $BracketText = 1; # 1 = [URL desc] uses a description for the URL
our $BracketWiki = 1; # 1 = [WikiLink desc] uses a desc for the local link
our $NetworkFile = 1; # 1 = file: is a valid protocol for URLs
our $AllNetworkFiles = 0; # 1 = file:///foo is allowed -- the default allows only file://foo
our $InterMap = 'InterMap'; # name of the intermap page, '' = disable
our $RssInterwikiTranslate = 'RssInterwikiTranslate'; # name of RSS interwiki translation page, '' = disable
$ENV{PATH} = '/bin:/usr/bin'; # Path used to find 'diff' and 'grep'
our $UseDiff = 1; # 1 = use diff
our $SurgeProtection = 1; # 1 = protect against leeches
our $SurgeProtectionTime = 20; # Size of the protected window in seconds
our $SurgeProtectionViews = 20; # How many page views to allow in this window
our $DeletedPage = 'DeletedPage'; # Pages starting with this can be deleted
our $RCName = 'RecentChanges'; # Name of changes page
our @RcDays = qw(1 3 7 30 90); # Days for links on RecentChanges
our $RcDefault = 30; # Default number of RecentChanges days
our $KeepDays = 0; # Days to keep old revisions (0 means keep forever)
our $KeepMajor = 1; # 1 = keep at least one major rev when expiring pages
our $SummaryHours = 4; # Hours to offer the old subject when editing a page
our $SummaryDefaultLength = 150; # Length of default text for summary (0 to disable)
our $ShowEdits = 0; # 1 = major and show minor edits in recent changes
our $ShowAll = 0; # 1 = show multiple edits per page in recent changes
our $ShowRollbacks = 0; # 1 = show rollbacks in recent changes
our $RecentLink = 1; # 1 = link to usernames
our $PageCluster = ''; # name of cluster page, eg. 'Cluster' to enable
our $InterWikiMoniker = ''; # InterWiki prefix for this wiki for RSS
our $SiteDescription = ''; # RSS Description of this wiki
our $RssStrip = '^\d\d\d\d-\d\d-\d\d_'; # Regexp to strip from feed item titles
our $RssImageUrl = $LogoUrl; # URL to image to associate with your RSS feed
our $RssRights = ''; # Copyright notice for RSS, usually an URL to the appropriate text
our $RssExclude = 'RssExclude'; # name of the page that lists pages to be excluded from the feed
our $RssCacheHours = 1; # How many hours to cache remote RSS files
our $RssStyleSheet = ''; # External style sheet for RSS files
our $UploadAllowed = 0; # 1 = yes, 0 = administrators only
our @UploadTypes = ('image/jpeg', 'image/png'); # MIME types allowed, all allowed if empty list
our $EmbedWiki = 0; # 1 = no headers/footers
our $FooterNote = ''; # HTML for bottom of every page
our $EditNote = ''; # HTML notice above buttons on edit page
our $TopLinkBar = 1; # 0 = goto bar both at the top and bottom; 1 = top, 2 = bottom
our $TopSearchForm = 1; # 0 = search form both at the top and bottom; 1 = top, 2 = bottom
our $MatchingPages = 0; # 1 = search page content and page titles
our @UserGotoBarPages = (); # List of pagenames
our $UserGotoBar = ''; # HTML added to end of goto bar
our $CommentsPrefix = ''; # prefix for comment pages, eg. 'Comments_on_' to enable
our $CommentsPattern = undef; # regex used to match comment pages
our $HtmlHeaders = ''; # Additional stuff to put in the HTML <head> section
our $IndentLimit = 20; # Maximum depth of nested lists
our $LanguageLimit = 3; # Number of matches req. for each language
our $JournalLimit = 200; # how many pages can be collected in one go?
our $PageNameLimit = 120; # max length of page name in bytes
$DocumentHeader = qq(<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN")
. qq( "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">\n)
. qq(<html xmlns="http://www.w3.org/1999/xhtml">);
our @MyFooters = (\&GetCommentForm, \&WrapperEnd, \&DefaultFooter);
# Checkboxes at the end of the index.
our @IndexOptions = ();
2003-03-21 13:50:35 +00:00
# Display short comments below the GotoBar for special days
# Example: %SpecialDays = ('1-1' => 'New Year', '1-2' => 'Next Day');
our %SpecialDays = ();
2003-03-21 13:50:35 +00:00
# Replace regular expressions with inlined images
# Example: %Smilies = (":-?D(?=\\W)" => '/pics/grin.png');
our %Smilies = ();
2003-03-21 13:50:35 +00:00
# Detect page languages when saving edits
# Example: %Languages = ('de' => '\b(der|die|das|und|oder)\b');
our %Languages = ();
our @KnownLocks = qw(main diff index merge visitors); # locks to remove
our $LockExpiration = 60; # How long before expirable locks are expired
our %LockExpires = (diff=>1, index=>1, merge=>1, visitors=>1); # locks to expire after some time
our %LockCleaners = (); # What to do if a job under a lock gets a signal like SIGINT. e.g. 'diff' => \&CleanDiff
our %CookieParameters = (username=>'', pwd=>'', homepage=>'', theme=>'', css=>'', msg=>'', lang=>'', embed=>$EmbedWiki,
toplinkbar=>$TopLinkBar, topsearchform=>$TopSearchForm, matchingpages=>$MatchingPages, );
our %Action = (rc => \&BrowseRc, rollback => \&DoRollback,
browse => \&BrowseResolvedPage, maintain => \&DoMaintain,
random => \&DoRandom, pagelock => \&DoPageLock,
history => \&DoHistory, editlock => \&DoEditLock,
edit => \&DoEdit, version => \&DoShowVersion,
download => \&DoDownload, rss => \&DoRss,
unlock => \&DoUnlock, password => \&DoPassword,
index => \&DoIndex, admin => \&DoAdminPage,
clear => \&DoClearCache, debug => \&DoDebug,
contrib => \&DoContributors, more => \&DoJournal);
our @MyRules = (\&LinkRules, \&ListRule); # don't set this variable, add to it!
our %RuleOrder = (\&LinkRules => 0, \&ListRule => 0);
# The 'main' program, called at the end of this script file (aka. as handler)
2003-03-21 13:50:35 +00:00
sub DoWikiRequest {
2008-03-07 23:27:51 +00:00
Init();
DoSurgeProtection();
if (not $BannedCanRead and UserIsBanned() and not UserIsEditor()) {
ReportError(T('Reading not allowed: user, ip, or network is blocked.'), '403 FORBIDDEN',
2014-07-18 01:46:59 +03:00
0, $q->p(ScriptLink('action=password', T('Login'), 'password')));
2008-03-07 23:27:51 +00:00
}
DoBrowseRequest();
}
sub ReportError { # fatal!
2008-03-07 23:27:51 +00:00
my ($errmsg, $status, $log, @html) = @_;
InitRequest(); # make sure we can report errors before InitRequest
2008-03-07 23:27:51 +00:00
print GetHttpHeader('text/html', 'nocache', $status), GetHtmlHeader(T('Error')),
$q->start_div({class=>"error"}), $q->h1(QuoteHtml($errmsg)), @html, $q->end_div,
$q->end_html, "\n\n"; # newlines for FCGI because of exit()
2008-03-07 23:27:51 +00:00
WriteStringToFile("$TempDir/error", '<body>' . $q->h1("$status $errmsg") . $q->Dump) if $log;
map { ReleaseLockDir($_); } keys %Locks;
2014-07-21 20:48:47 +02:00
exit 2;
}
sub Init {
2015-05-17 03:35:35 +03:00
binmode(STDOUT, ':encoding(UTF-8)'); # this is where the HTML gets printed
binmode(STDERR, ':encoding(UTF-8)'); # just in case somebody prints debug info to stderr
2008-03-07 23:27:51 +00:00
InitDirConfig();
2014-07-18 01:46:59 +03:00
$FS = "\x1e"; # The FS character is the RECORD SEPARATOR control char in ASCII
$Message = ''; # Warnings and non-fatal errors.
InitLinkPatterns(); # Link pattern can be changed in config files
2008-03-07 23:27:51 +00:00
InitModules(); # Modules come first so that users can change module variables in config
InitConfig(); # Config comes as early as possible; remember $q is not available here
InitRequest(); # get $q with $MaxPost; set these in the config file
InitCookie(); # After InitRequest, because $q is used
2008-03-07 23:27:51 +00:00
InitVariables(); # After config, to change variables, after InitCookie for GetParam
}
sub InitModules {
if ($UseConfig and $ModuleDir and IsDir($ModuleDir)) {
2016-06-19 11:55:58 +02:00
foreach my $lib (Glob("$ModuleDir/*.p[ml]")) {
if (not $MyInc{$lib}) {
$MyInc{$lib} = 1; # Cannot use %INC in mod_perl settings
my $file = encode_utf8($lib);
do $file;
$Message .= CGI::p("$lib: $@") if $@; # no $q exists, yet
}
2008-03-07 23:27:51 +00:00
}
}
}
sub InitConfig {
if ($UseConfig and $ConfigFile and not $INC{$ConfigFile} and IsFile($ConfigFile)) {
2008-03-07 23:27:51 +00:00
do $ConfigFile; # these options must be set in a wrapper script or via the environment
$Message .= CGI::p("$ConfigFile: $@") if $@; # remember, no $q exists, yet
}
if ($ConfigPage) { # $FS and $MaxPost must be set in config file!
2008-03-07 23:27:51 +00:00
my ($status, $data) = ReadFile(GetPageFile(FreeToNormal($ConfigPage)));
my $page = ParseData($data); # before InitVariables so GetPageContent won't work
eval $page->{text} if $page->{text}; # perlcritic dislikes the use of eval here but we really mean it
2008-03-07 23:27:51 +00:00
$Message .= CGI::p("$ConfigPage: $@") if $@;
}
}
sub InitDirConfig {
2008-03-07 23:27:51 +00:00
$PageDir = "$DataDir/page"; # Stores page data
$KeepDir = "$DataDir/keep"; # Stores kept (old) page data
$TempDir = "$DataDir/temp"; # Temporary files and locks
$LockDir = "$TempDir/lock"; # DB is locked if this exists
$NoEditFile = "$DataDir/noedit"; # Indicates that the site is read-only
$RcFile = "$DataDir/rc.log"; # New RecentChanges logfile
2014-07-21 20:48:47 +02:00
$RcOldFile = "$DataDir/oldrc.log"; # Old RecentChanges logfile
$IndexFile = "$DataDir/pageidx"; # List of all pages
2008-03-07 23:27:51 +00:00
$VisitorFile = "$DataDir/visitors.log"; # List of recent visitors
$DeleteFile = "$DataDir/delete.log"; # Deletion logfile
$RssDir = "$DataDir/rss"; # For rss feed cache
2014-07-21 20:48:47 +02:00
$ConfigFile ||= "$DataDir/config"; # Config file with Perl code to execute
$ModuleDir ||= "$DataDir/modules"; # For extensions (ending in .pm or .pl)
}
sub InitRequest { # set up $q
2008-03-07 23:27:51 +00:00
$CGI::POST_MAX = $MaxPost;
2014-07-18 01:46:59 +03:00
$q ||= new CGI;
2008-03-07 23:27:51 +00:00
}
sub InitVariables { # Init global session variables for mod_perl!
$WikiDescription = $q->p($q->a({-href=>'https://www.oddmuse.org/'}, 'Oddmuse'),
2014-07-18 01:46:59 +03:00
$Counter++ > 0 ? Ts('%s calls', $Counter) : '');
2008-03-07 23:27:51 +00:00
$WikiDescription .= $ModulesDescription if $ModulesDescription;
$HeaderIsPrinted = 0; # print HTTP headers only once
2014-07-18 01:46:59 +03:00
$ScriptName //= $q->url(); # URL used in links
$FullUrl ||= $ScriptName; # URL used in forms
2008-03-07 23:27:51 +00:00
%Locks = ();
@Blocks = ();
@Flags = ();
$Fragment = '';
%RecentVisitors = ();
$OpenPageName = ''; # Currently open page
2008-03-07 23:27:51 +00:00
my $add_space = $CommentsPrefix =~ /[ \t_]$/;
2015-07-14 23:10:25 +03:00
$$_ = FreeToNormal($$_) for # convert spaces to underscores on all configurable pagenames
2008-03-07 23:27:51 +00:00
(\$HomePage, \$RCName, \$BannedHosts, \$InterMap, \$StyleSheetPage, \$CommentsPrefix,
\$ConfigPage, \$NotFoundPg, \$RssInterwikiTranslate, \$BannedContent, \$RssExclude, );
$CommentsPrefix .= '_' if $add_space;
2014-07-18 01:46:59 +03:00
$CommentsPattern = "^$CommentsPrefix(.*)" unless defined $CommentsPattern or not $CommentsPrefix;
2008-03-07 23:27:51 +00:00
@UserGotoBarPages = ($HomePage, $RCName) unless @UserGotoBarPages;
my @pages = sort($BannedHosts, $StyleSheetPage, $ConfigPage, $InterMap,
$RssInterwikiTranslate, $BannedContent);
2008-03-07 23:27:51 +00:00
%AdminPages = map { $_ => 1} @pages, $RssExclude unless %AdminPages;
%LockOnCreation = map { $_ => 1} @pages unless %LockOnCreation;
%PlainTextPages = ($BannedHosts => 1, $BannedContent => 1,
2014-07-18 01:46:59 +03:00
$StyleSheetPage => 1, $ConfigPage => 1) unless %PlainTextPages;
2008-03-07 23:27:51 +00:00
delete $PlainTextPages{''}; # $ConfigPage and others might be empty.
CreateDir($DataDir); # Create directory if it doesn't exist
$Now = time; # Reset in case script is persistent
my $ts = Modified($IndexFile); # always stat for multiple server processes
2008-03-07 23:27:51 +00:00
ReInit() if not $ts or $LastUpdate != $ts; # reinit if another process changed files (requires $DataDir)
$LastUpdate = $ts;
unshift(@MyRules, \&MyRules) if defined(&MyRules) && (not @MyRules or $MyRules[0] != \&MyRules);
@MyRules = sort {$RuleOrder{$a} <=> $RuleOrder{$b}} @MyRules; # default is 0
ReportError(Ts('Cannot create %s', $DataDir) . ": $!", '500 INTERNAL SERVER ERROR') unless IsDir($DataDir);
@IndexOptions = (['pages', T('Include normal pages'), 1, \&AllPagesList]);
2008-03-07 23:27:51 +00:00
foreach my $sub (@MyInitVariables) {
my $result = $sub->();
2008-03-07 23:27:51 +00:00
$Message .= $q->p($@) if $@;
}
}
sub ReInit { # init everything we need if we want to link to stuff
2008-03-07 23:27:51 +00:00
my $id = shift; # when saving a page, what to do depends on the page being saved
2014-07-18 01:46:59 +03:00
AllPagesList() unless $id;
2008-03-07 23:27:51 +00:00
InterInit() if $InterMap and (not $id or $id eq $InterMap);
%RssInterwikiTranslate = () if not $id or $id eq $RssInterwikiTranslate; # special since rarely used
}
sub InitCookie {
2008-03-07 23:27:51 +00:00
undef $q->{'.cookies'}; # Clear cache if it exists (for SpeedyCGI)
my $cookie = $q->cookie($CookieName);
%OldCookie = split(/$FS/, UrlDecode($cookie));
my %provided = map { $_ => 1 } $q->param;
for my $key (keys %OldCookie) {
SetParam($key, $OldCookie{$key}) unless $provided{$key};
}
2008-03-07 23:27:51 +00:00
CookieUsernameFix();
CookieRollbackFix();
}
sub CookieUsernameFix {
2008-03-07 23:27:51 +00:00
# Only valid usernames get stored in the new cookie.
my $name = GetParam('username', '');
$q->delete('username');
2014-07-18 01:46:59 +03:00
if (not $name) {
2008-03-07 23:27:51 +00:00
# do nothing
} elsif ($WikiLinks and not $FreeLinks and $name !~ /^$LinkPattern$/) {
2008-03-07 23:27:51 +00:00
$Message .= $q->p(Ts('Invalid UserName %s: not saved.', $name));
} elsif ($FreeLinks and $name !~ /^$FreeLinkPattern$/) {
2008-03-07 23:27:51 +00:00
$Message .= $q->p(Ts('Invalid UserName %s: not saved.', $name));
} elsif (length($name) > 50) { # Too long
$Message .= $q->p(T('UserName must be 50 characters or less: not saved'));
} else {
SetParam('username', $name);
}
}
sub CookieRollbackFix {
2008-03-07 23:27:51 +00:00
my @rollback = grep(/rollback-(\d+)/, $q->param);
if (@rollback and $rollback[0] =~ /(\d+)/) {
SetParam('to', $1);
$q->delete('action');
SetParam('action', 'rollback');
}
}
sub GetParam {
2008-03-07 23:27:51 +00:00
my ($name, $default) = @_;
my $result = $q->param(encode_utf8($name));
2014-07-18 01:46:59 +03:00
$result //= $default;
2008-03-07 23:27:51 +00:00
return QuoteHtml($result); # you need to unquote anything that can have <tags>
}
2003-12-24 04:05:18 +00:00
sub SetParam {
2008-03-07 23:27:51 +00:00
my ($name, $val) = @_;
$q->param($name, $val);
2003-12-24 04:05:18 +00:00
}
2003-03-21 13:50:35 +00:00
sub InitLinkPatterns {
my ($WikiWord, $QDelim);
2008-03-07 23:27:51 +00:00
$QDelim = '(?:"")?'; # Optional quote delimiter (removed from the output)
$WikiWord = '\p{Uppercase}+\p{Lowercase}+\p{Uppercase}\p{Alphabetic}*';
2008-03-07 23:27:51 +00:00
$LinkPattern = "($WikiWord)$QDelim";
$FreeLinkPattern = "([-,.()'%&?;<> _1-9A-Za-z\x{0080}-\x{fffd}]|[-,.()'%&?;<> _0-9A-Za-z\x{0080}-\x{fffd}][-,.()'%&?;<> _0-9A-Za-z\x{0080}-\x{fffd}]+)"; # disallow "0" and must match HTML and plain text (ie. > and &gt;)
2008-03-07 23:27:51 +00:00
# Intersites must start with uppercase letter to avoid confusion with URLs.
$InterSitePattern = '[A-Z\x{0080}-\x{fffd}]+[A-Za-z\x{0080}-\x{fffd}]+';
$InterLinkPattern = "($InterSitePattern:[-a-zA-Z0-9\x{0080}-\x{fffd}_=!?#\$\@~`\%&*+\\/:;.,]*[-a-zA-Z0-9\x{0080}-\x{fffd}_=#\$\@~`\%&*+\\/])$QDelim";
$FreeInterLinkPattern = "($InterSitePattern:[-a-zA-Z0-9\x{0080}-\x{fffd}_=!?#\$\@~`\%&*+\\/:;.,()' ]+)"; # plus space and other characters, and no restrictions on the end of the pattern
2008-03-07 23:27:51 +00:00
$UrlProtocols = 'http|https|ftp|afs|news|nntp|mid|cid|mailto|wais|prospero|telnet|gopher|irc|feed';
2014-07-18 01:46:59 +03:00
$UrlProtocols .= '|file' if $NetworkFile;
2008-03-07 23:27:51 +00:00
my $UrlChars = '[-a-zA-Z0-9/@=+$_~*.,;:?!\'"()&#%]'; # see RFC 2396
my $EndChars = '[-a-zA-Z0-9/@=+$_~*]'; # no punctuation at the end of the url.
$UrlPattern = "((?:$UrlProtocols):$UrlChars+$EndChars)";
$FullUrlPattern="((?:$UrlProtocols):$UrlChars+)"; # when used in square brackets
2014-07-21 20:48:47 +02:00
$ImageExtensions = '(gif|jpg|jpeg|png|bmp|svg)';
2003-03-21 13:50:35 +00:00
}
sub Clean {
2008-03-07 23:27:51 +00:00
my $block = shift;
return 0 unless defined($block); # "0" must print
return 1 if $block eq ''; # '' is the result of a dirty rule
$Fragment .= $block;
return 1;
}
sub Dirty { # arg 1 is the raw text; the real output must be printed instead
2008-03-07 23:27:51 +00:00
if ($Fragment ne '') {
* wiki.pl (@MyAfterApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Unlike @MyRules, whose subroutines are called by ApplyRules() while iterating through wiki page markup, @MyAfterApplyRules subroutines are called by PrintWikiToHTML(), iteratively, after all calls to ApplyRules(). This may not sound like much -- certainly, no deep invention. Actually, however, it permits post-processing of all emitted HTML. This, in turn, allows us to reimplement the Table of Contents module ("toc.pl"), so as to remove the hacks upon which that module formerly depended. This is a good thing. Perhaps, even a great thing! As Alex can attest, "toc.pl" (and its retinue of oddball hacks) has given Oddmuse some dire trouble, over time. That's all past now...thanks be to the Nordic Gods. (@MyBeforeApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Like @MyAfterApplyRules, above, @MyBeforeApplyRules subroutines are not called by ApplyRules() while iterating through wiki page markup but rather by PrintWikiToHTML(), iteratively, before all calls to ApplyRules(). This, also, may not sound like much. However, it permits pre- processing of all emitted HTML. This, in turn, allows us to reimplement the Sidebar module ("sidebar.pl"), so as to remove the hacks upon which that module formerly depended. (Praise be to Odin.) (%BlockLevelElements): New global hash. This hash should not be assigned to directly, but through the new RegisterBlockLevelElement() function. Its keys are HTML tags: 'table', 'blockquote', 'div', and so. Its values are regular expressions matching HTML tag attributes: 'class="poem"', 'align="right"', and so on. That's the structure. Here's how it works: this hash has one key for each HTML tag to be considered a "block-level element." Block-level elements are not closed when closing HTML environments with CloseHtmlEnvironments(). Instead, they must be explicitly closed with, say, CloseHtmlEnvironment('table'). CloseHtmlEnvironments() now closes all environments up to but not including those whose HTML tag matches a key in %BlockLevelElements and whose HTML tag attributes match a value in %BlockLevelElements. Thus, if Oddmuse is currently in a "<table>...</table>" environment, calling CloseHtmlEnvironments() now closes all environments up to but not including the table. This is a good thing. This allows modules to define block-level markup that can, itself, contain block-level markup -- say, markup for a "<blockquotes>...</blockquotes>" environment containing markup for a "<pre class='poem'>...</pre>" environment containing markup for a "<ol>...</ol>" environment. Prior to the introduction of this hash, block-level markup could not contain other block-level markup -- since most modules "open" a new HTML environment for such markup by first calling CloseHtmlEnvironments(), which, of course, closes the HTML environments of all "parent" block-level markup of this markup. Slightly mind-bending, isn't it? (RegisterBlockLevelElement): New function. (I'm not entirely happy with the verb 'register', here, but could think of nothing better.) (CloseHtmlEnvironments): Use the %BlockLevelElements hash to ensure block-level element HTML environments are not closed. (Of course, this is an amortized constant-time lookup. No efficiency concern, here!) (PrintWikiToHTML): Use the @MyBeforeApplyRules and @MyAfterApplyRules lists, to perform HTML pre- and post-processing.
2008-11-15 12:40:49 +00:00
$Fragment =~ s|<p>\s*</p>||g; # clean up extra paragraphs (see end of ApplyRules)
2008-03-07 23:27:51 +00:00
print $Fragment;
push(@Blocks, $Fragment);
push(@Flags, 0);
}
2014-07-21 20:48:47 +02:00
push(@Blocks, shift);
2008-03-07 23:27:51 +00:00
push(@Flags, 1);
$Fragment = '';
}
2003-03-21 13:50:35 +00:00
sub ApplyRules {
2008-03-07 23:27:51 +00:00
# locallinks: apply rules that create links depending on local config (incl. interlink!)
my ($text, $locallinks, $withanchors, $revision, @tags) = @_; # $revision is used for images
$text =~ s/\r\n/\n/g; # DOS to Unix
$text =~ s/\n+$//g; # No trailing paragraphs
2014-07-18 01:46:59 +03:00
return if $text eq ''; # allow the text '0'
local $Fragment = ''; # the clean HTML fragment not yet on @Blocks
2014-07-21 20:48:47 +02:00
local @Blocks = (); # the list of cached HTML blocks
local @Flags = (); # a list for each block, 1 = dirty, 0 = clean
2008-03-07 23:27:51 +00:00
Clean(join('', map { AddHtmlEnvironment($_) } @tags));
if ($OpenPageName and $PlainTextPages{$OpenPageName}) { # there should be no $PlainTextPages{''}
Clean(CloseHtmlEnvironments() . $q->pre($text));
2014-07-21 20:48:47 +02:00
} elsif (my ($type) = TextIsFile($text)) { # TODO? $type defined here??
2008-03-07 23:27:51 +00:00
Clean(CloseHtmlEnvironments() . $q->p(T('This page contains an uploaded file:'))
. $q->p(GetDownloadLink($OpenPageName, (substr($type, 0, 6) eq 'image/'), $revision))
. (length $Page{summary} > 0 ? $q->blockquote(QuoteHtml($Page{summary})) : $q->p(T('No summary was provided for this file.'))));
2008-03-07 23:27:51 +00:00
} else {
my $smileyregex = join "|", keys %Smilies;
$smileyregex = qr/(?=$smileyregex)/;
local $_ = $text;
local $bol = 1;
while (1) {
# Block level elements should eat trailing empty lines to prevent empty p elements.
2014-07-18 01:46:59 +03:00
if ($bol and m/\G(\s*\n)+/cg) {
Clean(CloseHtmlEnvironments() . AddHtmlEnvironment('p'));
2014-07-18 01:46:59 +03:00
} elsif ($bol and m/\G(\&lt;include(\s+(text|with-anchors))?\s+"(.*)"\&gt;[ \t]*\n?)/cgi) {
# <include "uri..."> includes the text of the given URI verbatim
Clean(CloseHtmlEnvironments());
Dirty($1);
my ($oldpos, $old_, $type, $uri) = ((pos), $_, $3, UnquoteHtml($4)); # remember, page content is quoted!
if ($uri =~ /^($UrlProtocols):/) {
if ($type eq 'text') {
print $q->pre({class=>"include $uri"}, QuoteHtml(GetRaw($uri)));
} else { # never use local links for remote pages, with a starting tag
print $q->start_div({class=>"include"});
ApplyRules(QuoteHtml(GetRaw($uri)), 0, ($type eq 'with-anchors'), undef, 'p');
print $q->end_div();
}
} else {
$Includes{$OpenPageName} = 1;
local $OpenPageName = FreeToNormal($uri);
if ($type eq 'text') {
print $q->pre({class=>"include $OpenPageName"}, QuoteHtml(GetPageContent($OpenPageName)));
} elsif (not $Includes{$OpenPageName}) { # with a starting tag, watch out for recursion
print $q->start_div({class=>"include $OpenPageName"});
ApplyRules(QuoteHtml(GetPageContent($OpenPageName)), $locallinks, $withanchors, undef, 'p');
print $q->end_div();
delete $Includes{$OpenPageName};
} else {
print $q->p({-class=>'error'}, $q->strong(Ts('Recursive include of %s!', $OpenPageName)));
}
}
Clean(AddHtmlEnvironment('p')); # if dirty block is looked at later, this will disappear
($_, pos) = ($old_, $oldpos); # restore \G (assignment order matters!)
2014-07-18 01:46:59 +03:00
} elsif ($bol and m/\G(\&lt;(journal|titles):?(\d*)((\s+|:)(\d*),?(\d*))?(\s+"(.*?)")?(\s+(reverse|past|future))?(\s+search\s+(.*))?\&gt;[ \t]*\n?)/cgi) {
# <journal 10 "regexp"> includes 10 pages matching regexp
Clean(CloseHtmlEnvironments());
Dirty($1);
my ($oldpos, $old_) = (pos, $_); # remember these because of the call to PrintJournal()
PrintJournal($6, $7, $9, $11, $3, $13, $2);
Clean(AddHtmlEnvironment('p')); # if dirty block is looked at later, this will disappear
($_, pos) = ($old_, $oldpos); # restore \G (assignment order matters!)
2014-07-18 01:46:59 +03:00
} elsif ($bol and m/\G(\&lt;rss(\s+(\d*))?\s+(.*?)\&gt;[ \t]*\n?)/cgis) {
# <rss "uri..."> stores the parsed RSS of the given URI
Clean(CloseHtmlEnvironments());
Dirty($1);
my ($oldpos, $old_) = (pos, $_); # remember these because of the call to RSS()
2014-07-21 20:48:47 +02:00
print RSS($3 || 15, split(/\s+/, UnquoteHtml($4)));
Clean(AddHtmlEnvironment('p')); # if dirty block is looked at later, this will disappear
($_, pos) = ($old_, $oldpos); # restore \G (assignment order matters!)
2008-03-07 23:27:51 +00:00
} elsif (/\G(&lt;search (.*?)&gt;)/cgis) {
# <search regexp>
Clean(CloseHtmlEnvironments());
Dirty($1);
my ($oldpos, $old_) = (pos, $_);
print $q->start_div({-class=>'search'});
SearchTitleAndBody($2, \&PrintSearchResult, SearchRegexp($2));
print $q->end_div;
Clean(AddHtmlEnvironment('p')); # if dirty block is looked at later, this will disappear
($_, pos) = ($old_, $oldpos); # restore \G (assignment order matters!)
2014-07-18 01:46:59 +03:00
} elsif ($bol and m/\G(&lt;&lt;&lt;&lt;&lt;&lt;&lt; )/cg) {
my ($str, $count, $limit, $oldpos) = ($1, 0, 100, pos);
while (m/\G(.*\n)/cg and $count++ < $limit) {
$str .= $1;
last if (substr($1, 0, 29) eq '&gt;&gt;&gt;&gt;&gt;&gt;&gt; ');
}
if ($count >= $limit) {
pos = $oldpos; # reset because we did not find a match
Clean('&lt;&lt;&lt;&lt;&lt;&lt;&lt; ');
} else {
Clean(CloseHtmlEnvironments() . $q->pre({-class=>'conflict'}, $str) . AddHtmlEnvironment('p'));
}
2008-03-07 23:27:51 +00:00
} elsif ($bol and m/\G#REDIRECT/cg) {
Clean('#REDIRECT');
} elsif (%Smilies and m/\G$smileyregex/cg and Clean(SmileyReplace())) {
2008-03-07 23:27:51 +00:00
} elsif (Clean(RunMyRules($locallinks, $withanchors))) {
} elsif (m/\G\s*\n(\s*\n)+/cg) { # paragraphs: at least two newlines
Clean(CloseHtmlEnvironments() . AddHtmlEnvironment('p')); # another one like this further up
} elsif (m/\G&amp;([A-Za-z]+|#[0-9]+|#x[A-Za-f0-9]+);/cg) { # entity references
Clean("&$1;");
2008-03-07 23:27:51 +00:00
} elsif (m/\G\s+/cg) {
Clean(' ');
} elsif (m/\G([A-Za-z\x{0080}-\x{fffd}]+([ \t]+[a-z\x{0080}-\x{fffd}]+)*[ \t]+)/cg
or m/\G([A-Za-z\x{0080}-\x{fffd}]+)/cg or m/\G(\S)/cg) {
Clean($1); # multiple words but do not match http://foo
} else {
last;
}
2014-07-18 01:46:59 +03:00
$bol = (substr($_, pos() - 1, 1) eq "\n");
2008-03-07 23:27:51 +00:00
}
}
pos = length $_; # notify module functions we've completed rule handling
Clean(CloseHtmlEnvironments()); # last block -- close it, cache it
2008-03-07 23:27:51 +00:00
if ($Fragment ne '') {
* wiki.pl (@MyAfterApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Unlike @MyRules, whose subroutines are called by ApplyRules() while iterating through wiki page markup, @MyAfterApplyRules subroutines are called by PrintWikiToHTML(), iteratively, after all calls to ApplyRules(). This may not sound like much -- certainly, no deep invention. Actually, however, it permits post-processing of all emitted HTML. This, in turn, allows us to reimplement the Table of Contents module ("toc.pl"), so as to remove the hacks upon which that module formerly depended. This is a good thing. Perhaps, even a great thing! As Alex can attest, "toc.pl" (and its retinue of oddball hacks) has given Oddmuse some dire trouble, over time. That's all past now...thanks be to the Nordic Gods. (@MyBeforeApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Like @MyAfterApplyRules, above, @MyBeforeApplyRules subroutines are not called by ApplyRules() while iterating through wiki page markup but rather by PrintWikiToHTML(), iteratively, before all calls to ApplyRules(). This, also, may not sound like much. However, it permits pre- processing of all emitted HTML. This, in turn, allows us to reimplement the Sidebar module ("sidebar.pl"), so as to remove the hacks upon which that module formerly depended. (Praise be to Odin.) (%BlockLevelElements): New global hash. This hash should not be assigned to directly, but through the new RegisterBlockLevelElement() function. Its keys are HTML tags: 'table', 'blockquote', 'div', and so. Its values are regular expressions matching HTML tag attributes: 'class="poem"', 'align="right"', and so on. That's the structure. Here's how it works: this hash has one key for each HTML tag to be considered a "block-level element." Block-level elements are not closed when closing HTML environments with CloseHtmlEnvironments(). Instead, they must be explicitly closed with, say, CloseHtmlEnvironment('table'). CloseHtmlEnvironments() now closes all environments up to but not including those whose HTML tag matches a key in %BlockLevelElements and whose HTML tag attributes match a value in %BlockLevelElements. Thus, if Oddmuse is currently in a "<table>...</table>" environment, calling CloseHtmlEnvironments() now closes all environments up to but not including the table. This is a good thing. This allows modules to define block-level markup that can, itself, contain block-level markup -- say, markup for a "<blockquotes>...</blockquotes>" environment containing markup for a "<pre class='poem'>...</pre>" environment containing markup for a "<ol>...</ol>" environment. Prior to the introduction of this hash, block-level markup could not contain other block-level markup -- since most modules "open" a new HTML environment for such markup by first calling CloseHtmlEnvironments(), which, of course, closes the HTML environments of all "parent" block-level markup of this markup. Slightly mind-bending, isn't it? (RegisterBlockLevelElement): New function. (I'm not entirely happy with the verb 'register', here, but could think of nothing better.) (CloseHtmlEnvironments): Use the %BlockLevelElements hash to ensure block-level element HTML environments are not closed. (Of course, this is an amortized constant-time lookup. No efficiency concern, here!) (PrintWikiToHTML): Use the @MyBeforeApplyRules and @MyAfterApplyRules lists, to perform HTML pre- and post-processing.
2008-11-15 12:40:49 +00:00
$Fragment =~ s|<p>\s*</p>||g; # clean up extra paragraphs (see end Dirty())
2008-03-07 23:27:51 +00:00
print $Fragment;
push(@Blocks, $Fragment);
push(@Flags, 0);
}
# this can be stored in the page cache -- see PrintCache
return (join($FS, @Blocks), join($FS, @Flags));
2003-03-21 13:50:35 +00:00
}
2007-10-26 16:18:10 +00:00
sub ListRule {
2008-03-07 23:27:51 +00:00
if ($bol && m/\G(\s*\n)*(\*+)[ \t]+/cg
or InElement('li') && m/\G(\s*\n)+(\*+)[ \t]+/cg) {
return CloseHtmlEnvironmentUntil('li')
2014-07-18 01:46:59 +03:00
. OpenHtmlEnvironment('ul', length($2)) . AddHtmlEnvironment('li');
2008-03-07 23:27:51 +00:00
}
return;
2007-10-26 16:18:10 +00:00
}
sub LinkRules {
2008-03-07 23:27:51 +00:00
my ($locallinks, $withanchors) = @_;
if ($locallinks
and ($BracketText && m/\G(\[$InterLinkPattern\s+([^\]]+?)\])/cg
or $BracketText && m/\G(\[\[$FreeInterLinkPattern\|([^\]]+?)\]\])/cg
or m/\G(\[$InterLinkPattern\])/cg or m/\G(\[\[\[$FreeInterLinkPattern\]\]\])/cg
or m/\G($InterLinkPattern)/cg or m/\G(\[\[$FreeInterLinkPattern\]\])/cg)) {
2008-03-07 23:27:51 +00:00
# [InterWiki:FooBar text] or [InterWiki:FooBar] or
# InterWiki:FooBar or [[InterWiki:foo bar|text]] or
# [[InterWiki:foo bar]] or [[[InterWiki:foo bar]]]-- Interlinks
# can change when the intermap changes (local config, therefore
# depend on $locallinks). The intermap is only read if
# necessary, so if this not an interlink, we have to backtrack a
# bit.
my $bracket = (substr($1, 0, 1) eq '[') # but \[\[$FreeInterLinkPattern\]\] it not bracket!
&& !((substr($1, 0, 2) eq '[[') && (substr($1, 2, 1) ne '[') && index($1, '|') < 0);
my $quote = (substr($1, 0, 2) eq '[[');
my ($oldmatch, $output) = ($1, GetInterLink($2, $3, $bracket, $quote)); # $3 may be empty
if ($oldmatch eq $output) { # no interlink
my ($site, $rest) = split(/:/, $oldmatch, 2);
Clean($site);
pos = (pos) - length($rest) - 1; # skip site, but reparse rest
} else {
Dirty($oldmatch);
print $output; # this is an interlink
}
} elsif ($BracketText && m/\G(\[$FullUrlPattern[|[:space:]]([^\]]+?)\])/cg
or $BracketText && m/\G(\[\[$FullUrlPattern[|[:space:]]([^\]]+?)\]\])/cg
or m/\G(\[$FullUrlPattern\])/cg or m/\G($UrlPattern)/cg) {
2008-03-07 23:27:51 +00:00
# [URL text] makes [text] link to URL, [URL] makes footnotes [1]
my ($str, $url, $text, $bracket, $rest) = ($1, $2, $3, (substr($1, 0, 1) eq '['), '');
if ($url =~ /(&lt|&gt|&amp)$/) { # remove trailing partial named entitites and add them as
$rest = $1; # back again at the end as trailing text.
2008-03-07 23:27:51 +00:00
$url =~ s/&(lt|gt|amp)$//;
}
if ($bracket and not defined $text) { # [URL] is dirty because the number may change
Dirty($str);
print GetUrl($url, $text, $bracket), $rest;
} else {
Clean(GetUrl($url, $text, $bracket, not $bracket) . $rest); # $text may be empty, no images in brackets
}
} elsif ($WikiLinks && m/\G!$LinkPattern/cg) {
2008-03-07 23:27:51 +00:00
Clean($1); # ! gets eaten
} elsif ($WikiLinks && $locallinks
&& ($BracketWiki && m/\G(\[$LinkPattern\s+([^\]]+?)\])/cg
or m/\G(\[$LinkPattern\])/cg or m/\G($LinkPattern)/cg)) {
2008-03-07 23:27:51 +00:00
# [LocalPage text], [LocalPage], LocalPage
Dirty($1);
my $bracket = (substr($1, 0, 1) eq '[' and not $3);
print GetPageOrEditLink($2, $3, $bracket);
} elsif ($locallinks && $FreeLinks && (m/\G(\[\[image:$FreeLinkPattern\]\])/cg
or m/\G(\[\[image:$FreeLinkPattern\|([^]|]+)\]\])/cg)) {
2008-03-07 23:27:51 +00:00
# [[image:Free Link]], [[image:Free Link|alt text]]
Dirty($1);
print GetDownloadLink(FreeToNormal($2), 1, undef, UnquoteHtml($3));
2008-03-07 23:27:51 +00:00
} elsif ($FreeLinks && $locallinks
&& ($BracketWiki && m/\G(\[\[$FreeLinkPattern\|([^\]]+)\]\])/cg
or m/\G(\[\[\[$FreeLinkPattern\]\]\])/cg
or m/\G(\[\[$FreeLinkPattern\]\])/cg)) {
2008-03-07 23:27:51 +00:00
# [[Free Link|text]], [[[Free Link]]], [[Free Link]]
Dirty($1);
my $bracket = (substr($1, 0, 3) eq '[[[');
print GetPageOrEditLink($2, $3, $bracket, 1); # $3 may be empty
} else {
return; # nothing matched
2008-03-07 23:27:51 +00:00
}
return ''; # one of the dirty rules matched (and they all are)
}
sub SetHtmlEnvironmentContainer {
* wiki.pl (@MyAfterApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Unlike @MyRules, whose subroutines are called by ApplyRules() while iterating through wiki page markup, @MyAfterApplyRules subroutines are called by PrintWikiToHTML(), iteratively, after all calls to ApplyRules(). This may not sound like much -- certainly, no deep invention. Actually, however, it permits post-processing of all emitted HTML. This, in turn, allows us to reimplement the Table of Contents module ("toc.pl"), so as to remove the hacks upon which that module formerly depended. This is a good thing. Perhaps, even a great thing! As Alex can attest, "toc.pl" (and its retinue of oddball hacks) has given Oddmuse some dire trouble, over time. That's all past now...thanks be to the Nordic Gods. (@MyBeforeApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Like @MyAfterApplyRules, above, @MyBeforeApplyRules subroutines are not called by ApplyRules() while iterating through wiki page markup but rather by PrintWikiToHTML(), iteratively, before all calls to ApplyRules(). This, also, may not sound like much. However, it permits pre- processing of all emitted HTML. This, in turn, allows us to reimplement the Sidebar module ("sidebar.pl"), so as to remove the hacks upon which that module formerly depended. (Praise be to Odin.) (%BlockLevelElements): New global hash. This hash should not be assigned to directly, but through the new RegisterBlockLevelElement() function. Its keys are HTML tags: 'table', 'blockquote', 'div', and so. Its values are regular expressions matching HTML tag attributes: 'class="poem"', 'align="right"', and so on. That's the structure. Here's how it works: this hash has one key for each HTML tag to be considered a "block-level element." Block-level elements are not closed when closing HTML environments with CloseHtmlEnvironments(). Instead, they must be explicitly closed with, say, CloseHtmlEnvironment('table'). CloseHtmlEnvironments() now closes all environments up to but not including those whose HTML tag matches a key in %BlockLevelElements and whose HTML tag attributes match a value in %BlockLevelElements. Thus, if Oddmuse is currently in a "<table>...</table>" environment, calling CloseHtmlEnvironments() now closes all environments up to but not including the table. This is a good thing. This allows modules to define block-level markup that can, itself, contain block-level markup -- say, markup for a "<blockquotes>...</blockquotes>" environment containing markup for a "<pre class='poem'>...</pre>" environment containing markup for a "<ol>...</ol>" environment. Prior to the introduction of this hash, block-level markup could not contain other block-level markup -- since most modules "open" a new HTML environment for such markup by first calling CloseHtmlEnvironments(), which, of course, closes the HTML environments of all "parent" block-level markup of this markup. Slightly mind-bending, isn't it? (RegisterBlockLevelElement): New function. (I'm not entirely happy with the verb 'register', here, but could think of nothing better.) (CloseHtmlEnvironments): Use the %BlockLevelElements hash to ensure block-level element HTML environments are not closed. (Of course, this is an amortized constant-time lookup. No efficiency concern, here!) (PrintWikiToHTML): Use the @MyBeforeApplyRules and @MyAfterApplyRules lists, to perform HTML pre- and post-processing.
2008-11-15 12:40:49 +00:00
my ($html_tag, $html_tag_attr) = @_;
$HtmlEnvironmentContainers{$html_tag} = defined $html_tag_attr ? (
2014-07-21 20:48:47 +02:00
$HtmlEnvironmentContainers{$html_tag} ? '|' . $HtmlEnvironmentContainers{$html_tag} : '')
. $html_tag_attr : '';
* wiki.pl (@MyAfterApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Unlike @MyRules, whose subroutines are called by ApplyRules() while iterating through wiki page markup, @MyAfterApplyRules subroutines are called by PrintWikiToHTML(), iteratively, after all calls to ApplyRules(). This may not sound like much -- certainly, no deep invention. Actually, however, it permits post-processing of all emitted HTML. This, in turn, allows us to reimplement the Table of Contents module ("toc.pl"), so as to remove the hacks upon which that module formerly depended. This is a good thing. Perhaps, even a great thing! As Alex can attest, "toc.pl" (and its retinue of oddball hacks) has given Oddmuse some dire trouble, over time. That's all past now...thanks be to the Nordic Gods. (@MyBeforeApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Like @MyAfterApplyRules, above, @MyBeforeApplyRules subroutines are not called by ApplyRules() while iterating through wiki page markup but rather by PrintWikiToHTML(), iteratively, before all calls to ApplyRules(). This, also, may not sound like much. However, it permits pre- processing of all emitted HTML. This, in turn, allows us to reimplement the Sidebar module ("sidebar.pl"), so as to remove the hacks upon which that module formerly depended. (Praise be to Odin.) (%BlockLevelElements): New global hash. This hash should not be assigned to directly, but through the new RegisterBlockLevelElement() function. Its keys are HTML tags: 'table', 'blockquote', 'div', and so. Its values are regular expressions matching HTML tag attributes: 'class="poem"', 'align="right"', and so on. That's the structure. Here's how it works: this hash has one key for each HTML tag to be considered a "block-level element." Block-level elements are not closed when closing HTML environments with CloseHtmlEnvironments(). Instead, they must be explicitly closed with, say, CloseHtmlEnvironment('table'). CloseHtmlEnvironments() now closes all environments up to but not including those whose HTML tag matches a key in %BlockLevelElements and whose HTML tag attributes match a value in %BlockLevelElements. Thus, if Oddmuse is currently in a "<table>...</table>" environment, calling CloseHtmlEnvironments() now closes all environments up to but not including the table. This is a good thing. This allows modules to define block-level markup that can, itself, contain block-level markup -- say, markup for a "<blockquotes>...</blockquotes>" environment containing markup for a "<pre class='poem'>...</pre>" environment containing markup for a "<ol>...</ol>" environment. Prior to the introduction of this hash, block-level markup could not contain other block-level markup -- since most modules "open" a new HTML environment for such markup by first calling CloseHtmlEnvironments(), which, of course, closes the HTML environments of all "parent" block-level markup of this markup. Slightly mind-bending, isn't it? (RegisterBlockLevelElement): New function. (I'm not entirely happy with the verb 'register', here, but could think of nothing better.) (CloseHtmlEnvironments): Use the %BlockLevelElements hash to ensure block-level element HTML environments are not closed. (Of course, this is an amortized constant-time lookup. No efficiency concern, here!) (PrintWikiToHTML): Use the @MyBeforeApplyRules and @MyAfterApplyRules lists, to perform HTML pre- and post-processing.
2008-11-15 12:40:49 +00:00
}
sub InElement { # is $html_tag in @HtmlStack?
my ($html_tag, $html_tag_attr) = @_;
my $i = 0;
foreach my $html_tag_current (@HtmlStack) {
return 1 if $html_tag_current eq $html_tag and
($html_tag_attr ? $HtmlAttrStack[$i] =~ m/$html_tag_attr/ : 1);
$i++;
} return '';
}
sub AddOrCloseHtmlEnvironment { # add $html_tag, if not already added; close, otherwise
my ($html_tag, $html_tag_attr) = @_;
2014-07-21 20:48:47 +02:00
return InElement ($html_tag, '^' . $html_tag_attr . '$')
? CloseHtmlEnvironment($html_tag, '^' . $html_tag_attr . '$')
: AddHtmlEnvironment ($html_tag, $html_tag_attr);
2003-03-21 13:50:35 +00:00
}
sub AddHtmlEnvironment { # add a new $html_tag
my ($html_tag, $html_tag_attr) = @_;
2014-07-18 01:46:59 +03:00
$html_tag_attr //= '';
if ($html_tag and not (@HtmlStack and $HtmlStack[0] eq $html_tag and
2014-07-18 01:46:59 +03:00
($html_tag_attr ? $HtmlAttrStack[0] =~ m/$html_tag_attr/ : 1))) {
unshift(@HtmlStack, $html_tag);
unshift(@HtmlAttrStack, $html_tag_attr);
2014-07-21 20:48:47 +02:00
return '<' . $html_tag . ($html_tag_attr ? ' ' . $html_tag_attr : '') . '>';
} return ''; # always return something
}
sub OpenHtmlEnvironment { # close the previous $html_tag and open a new one
my ($html_tag, $depth, $html_tag_attr, $tag_regex) = @_;
my ($html, $found, @stack) = ('', 0); # always return something
while (@HtmlStack and $found < $depth) { # determine new stack
my $tag = pop(@HtmlStack);
$found++ if ($tag_regex ? $tag =~ $tag_regex : $tag eq $html_tag);
unshift(@stack, $tag);
2008-03-07 23:27:51 +00:00
}
unshift(@stack, pop(@HtmlStack)) if @HtmlStack and $found < $depth; # nested sublist coming up, keep list item
2014-07-18 01:46:59 +03:00
@HtmlStack = @stack unless $found; # if starting a new list
$html .= CloseHtmlEnvironments(); # close remaining elements (or all elements if a new list)
@HtmlStack = @stack if $found; # if not starting a new list
$depth = $IndentLimit if $depth > $IndentLimit; # requested depth 0 makes no sense
$html_tag_attr = qq/class="$html_tag_attr"/ # backwards-compatibility hack: classically, the third argument to this function was a single CSS class, rather than string of HTML tag attributes as in the second argument to the "AddHtmlEnvironment" function. To allow both sorts, we conditionally change this string to 'class="$html_tag_attr"' when this string is a single CSS class.
2014-07-18 01:46:59 +03:00
if $html_tag_attr and $html_tag_attr !~ m/^\s*[[:alpha:]]@@+\s*=\s*('|").+\1/;
splice(@HtmlAttrStack, 0, @HtmlAttrStack - @HtmlStack); # truncate to size of @HtmlStack
2014-07-21 20:48:47 +02:00
foreach ($found .. $depth - 1) {
unshift(@HtmlStack, $html_tag);
unshift(@HtmlAttrStack, $html_tag_attr);
$html .= $html_tag_attr ? "<$html_tag $html_tag_attr>" : "<$html_tag>";
}
return $html;
2003-03-21 13:50:35 +00:00
}
sub CloseHtmlEnvironments { # close all -- remember to use AddHtmlEnvironment('p') if required!
* wiki.pl (@MyAfterApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Unlike @MyRules, whose subroutines are called by ApplyRules() while iterating through wiki page markup, @MyAfterApplyRules subroutines are called by PrintWikiToHTML(), iteratively, after all calls to ApplyRules(). This may not sound like much -- certainly, no deep invention. Actually, however, it permits post-processing of all emitted HTML. This, in turn, allows us to reimplement the Table of Contents module ("toc.pl"), so as to remove the hacks upon which that module formerly depended. This is a good thing. Perhaps, even a great thing! As Alex can attest, "toc.pl" (and its retinue of oddball hacks) has given Oddmuse some dire trouble, over time. That's all past now...thanks be to the Nordic Gods. (@MyBeforeApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Like @MyAfterApplyRules, above, @MyBeforeApplyRules subroutines are not called by ApplyRules() while iterating through wiki page markup but rather by PrintWikiToHTML(), iteratively, before all calls to ApplyRules(). This, also, may not sound like much. However, it permits pre- processing of all emitted HTML. This, in turn, allows us to reimplement the Sidebar module ("sidebar.pl"), so as to remove the hacks upon which that module formerly depended. (Praise be to Odin.) (%BlockLevelElements): New global hash. This hash should not be assigned to directly, but through the new RegisterBlockLevelElement() function. Its keys are HTML tags: 'table', 'blockquote', 'div', and so. Its values are regular expressions matching HTML tag attributes: 'class="poem"', 'align="right"', and so on. That's the structure. Here's how it works: this hash has one key for each HTML tag to be considered a "block-level element." Block-level elements are not closed when closing HTML environments with CloseHtmlEnvironments(). Instead, they must be explicitly closed with, say, CloseHtmlEnvironment('table'). CloseHtmlEnvironments() now closes all environments up to but not including those whose HTML tag matches a key in %BlockLevelElements and whose HTML tag attributes match a value in %BlockLevelElements. Thus, if Oddmuse is currently in a "<table>...</table>" environment, calling CloseHtmlEnvironments() now closes all environments up to but not including the table. This is a good thing. This allows modules to define block-level markup that can, itself, contain block-level markup -- say, markup for a "<blockquotes>...</blockquotes>" environment containing markup for a "<pre class='poem'>...</pre>" environment containing markup for a "<ol>...</ol>" environment. Prior to the introduction of this hash, block-level markup could not contain other block-level markup -- since most modules "open" a new HTML environment for such markup by first calling CloseHtmlEnvironments(), which, of course, closes the HTML environments of all "parent" block-level markup of this markup. Slightly mind-bending, isn't it? (RegisterBlockLevelElement): New function. (I'm not entirely happy with the verb 'register', here, but could think of nothing better.) (CloseHtmlEnvironments): Use the %BlockLevelElements hash to ensure block-level element HTML environments are not closed. (Of course, this is an amortized constant-time lookup. No efficiency concern, here!) (PrintWikiToHTML): Use the @MyBeforeApplyRules and @MyAfterApplyRules lists, to perform HTML pre- and post-processing.
2008-11-15 12:40:49 +00:00
return CloseHtmlEnvironmentUntil() if pos($_) == length($_); # close all HTML environments if we're are at the end of this page
my $html = '';
while (@HtmlStack) {
defined $HtmlEnvironmentContainers{$HtmlStack[0]} and # avoid closing block level elements
($HtmlEnvironmentContainers{$HtmlStack[0]} ? $HtmlAttrStack[0] =~
m/$HtmlEnvironmentContainers{$HtmlStack[0]}/ : 1) and return $html;
2014-07-18 01:46:59 +03:00
shift(@HtmlAttrStack);
2014-07-21 20:48:47 +02:00
$html .= '</' . shift(@HtmlStack) . '>';
}
return $html;
2003-03-21 13:50:35 +00:00
}
sub CloseHtmlEnvironment { # close environments up to and including $html_tag
my $html = (@_ and InElement(@_)) ? CloseHtmlEnvironmentUntil(@_) : '';
if (@HtmlStack and (not(@_) or defined $html)) {
2014-07-18 01:46:59 +03:00
shift(@HtmlAttrStack);
2014-07-21 20:48:47 +02:00
return $html . '</' . shift(@HtmlStack) . '>';
}
return $html || ''; # avoid returning undefined
}
sub CloseHtmlEnvironmentUntil { # close environments up to but not including $html_tag
* wiki.pl (@MyAfterApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Unlike @MyRules, whose subroutines are called by ApplyRules() while iterating through wiki page markup, @MyAfterApplyRules subroutines are called by PrintWikiToHTML(), iteratively, after all calls to ApplyRules(). This may not sound like much -- certainly, no deep invention. Actually, however, it permits post-processing of all emitted HTML. This, in turn, allows us to reimplement the Table of Contents module ("toc.pl"), so as to remove the hacks upon which that module formerly depended. This is a good thing. Perhaps, even a great thing! As Alex can attest, "toc.pl" (and its retinue of oddball hacks) has given Oddmuse some dire trouble, over time. That's all past now...thanks be to the Nordic Gods. (@MyBeforeApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Like @MyAfterApplyRules, above, @MyBeforeApplyRules subroutines are not called by ApplyRules() while iterating through wiki page markup but rather by PrintWikiToHTML(), iteratively, before all calls to ApplyRules(). This, also, may not sound like much. However, it permits pre- processing of all emitted HTML. This, in turn, allows us to reimplement the Sidebar module ("sidebar.pl"), so as to remove the hacks upon which that module formerly depended. (Praise be to Odin.) (%BlockLevelElements): New global hash. This hash should not be assigned to directly, but through the new RegisterBlockLevelElement() function. Its keys are HTML tags: 'table', 'blockquote', 'div', and so. Its values are regular expressions matching HTML tag attributes: 'class="poem"', 'align="right"', and so on. That's the structure. Here's how it works: this hash has one key for each HTML tag to be considered a "block-level element." Block-level elements are not closed when closing HTML environments with CloseHtmlEnvironments(). Instead, they must be explicitly closed with, say, CloseHtmlEnvironment('table'). CloseHtmlEnvironments() now closes all environments up to but not including those whose HTML tag matches a key in %BlockLevelElements and whose HTML tag attributes match a value in %BlockLevelElements. Thus, if Oddmuse is currently in a "<table>...</table>" environment, calling CloseHtmlEnvironments() now closes all environments up to but not including the table. This is a good thing. This allows modules to define block-level markup that can, itself, contain block-level markup -- say, markup for a "<blockquotes>...</blockquotes>" environment containing markup for a "<pre class='poem'>...</pre>" environment containing markup for a "<ol>...</ol>" environment. Prior to the introduction of this hash, block-level markup could not contain other block-level markup -- since most modules "open" a new HTML environment for such markup by first calling CloseHtmlEnvironments(), which, of course, closes the HTML environments of all "parent" block-level markup of this markup. Slightly mind-bending, isn't it? (RegisterBlockLevelElement): New function. (I'm not entirely happy with the verb 'register', here, but could think of nothing better.) (CloseHtmlEnvironments): Use the %BlockLevelElements hash to ensure block-level element HTML environments are not closed. (Of course, this is an amortized constant-time lookup. No efficiency concern, here!) (PrintWikiToHTML): Use the @MyBeforeApplyRules and @MyAfterApplyRules lists, to perform HTML pre- and post-processing.
2008-11-15 12:40:49 +00:00
my ($html_tag, $html_tag_attr) = @_;
my $html = '';
while (@HtmlStack && (pos($_) == length($_) || # while there is an HTML tag-stack and we are at the end of this page or...
* wiki.pl (@MyAfterApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Unlike @MyRules, whose subroutines are called by ApplyRules() while iterating through wiki page markup, @MyAfterApplyRules subroutines are called by PrintWikiToHTML(), iteratively, after all calls to ApplyRules(). This may not sound like much -- certainly, no deep invention. Actually, however, it permits post-processing of all emitted HTML. This, in turn, allows us to reimplement the Table of Contents module ("toc.pl"), so as to remove the hacks upon which that module formerly depended. This is a good thing. Perhaps, even a great thing! As Alex can attest, "toc.pl" (and its retinue of oddball hacks) has given Oddmuse some dire trouble, over time. That's all past now...thanks be to the Nordic Gods. (@MyBeforeApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Like @MyAfterApplyRules, above, @MyBeforeApplyRules subroutines are not called by ApplyRules() while iterating through wiki page markup but rather by PrintWikiToHTML(), iteratively, before all calls to ApplyRules(). This, also, may not sound like much. However, it permits pre- processing of all emitted HTML. This, in turn, allows us to reimplement the Sidebar module ("sidebar.pl"), so as to remove the hacks upon which that module formerly depended. (Praise be to Odin.) (%BlockLevelElements): New global hash. This hash should not be assigned to directly, but through the new RegisterBlockLevelElement() function. Its keys are HTML tags: 'table', 'blockquote', 'div', and so. Its values are regular expressions matching HTML tag attributes: 'class="poem"', 'align="right"', and so on. That's the structure. Here's how it works: this hash has one key for each HTML tag to be considered a "block-level element." Block-level elements are not closed when closing HTML environments with CloseHtmlEnvironments(). Instead, they must be explicitly closed with, say, CloseHtmlEnvironment('table'). CloseHtmlEnvironments() now closes all environments up to but not including those whose HTML tag matches a key in %BlockLevelElements and whose HTML tag attributes match a value in %BlockLevelElements. Thus, if Oddmuse is currently in a "<table>...</table>" environment, calling CloseHtmlEnvironments() now closes all environments up to but not including the table. This is a good thing. This allows modules to define block-level markup that can, itself, contain block-level markup -- say, markup for a "<blockquotes>...</blockquotes>" environment containing markup for a "<pre class='poem'>...</pre>" environment containing markup for a "<ol>...</ol>" environment. Prior to the introduction of this hash, block-level markup could not contain other block-level markup -- since most modules "open" a new HTML environment for such markup by first calling CloseHtmlEnvironments(), which, of course, closes the HTML environments of all "parent" block-level markup of this markup. Slightly mind-bending, isn't it? (RegisterBlockLevelElement): New function. (I'm not entirely happy with the verb 'register', here, but could think of nothing better.) (CloseHtmlEnvironments): Use the %BlockLevelElements hash to ensure block-level element HTML environments are not closed. (Of course, this is an amortized constant-time lookup. No efficiency concern, here!) (PrintWikiToHTML): Use the @MyBeforeApplyRules and @MyAfterApplyRules lists, to perform HTML pre- and post-processing.
2008-11-15 12:40:49 +00:00
!($html_tag ? $HtmlStack[0] eq $html_tag && # the top tag is not the desired tag and...
($html_tag_attr ? $HtmlAttrStack[0] =~ # its attributes do not match,
* wiki.pl (@MyAfterApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Unlike @MyRules, whose subroutines are called by ApplyRules() while iterating through wiki page markup, @MyAfterApplyRules subroutines are called by PrintWikiToHTML(), iteratively, after all calls to ApplyRules(). This may not sound like much -- certainly, no deep invention. Actually, however, it permits post-processing of all emitted HTML. This, in turn, allows us to reimplement the Table of Contents module ("toc.pl"), so as to remove the hacks upon which that module formerly depended. This is a good thing. Perhaps, even a great thing! As Alex can attest, "toc.pl" (and its retinue of oddball hacks) has given Oddmuse some dire trouble, over time. That's all past now...thanks be to the Nordic Gods. (@MyBeforeApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Like @MyAfterApplyRules, above, @MyBeforeApplyRules subroutines are not called by ApplyRules() while iterating through wiki page markup but rather by PrintWikiToHTML(), iteratively, before all calls to ApplyRules(). This, also, may not sound like much. However, it permits pre- processing of all emitted HTML. This, in turn, allows us to reimplement the Sidebar module ("sidebar.pl"), so as to remove the hacks upon which that module formerly depended. (Praise be to Odin.) (%BlockLevelElements): New global hash. This hash should not be assigned to directly, but through the new RegisterBlockLevelElement() function. Its keys are HTML tags: 'table', 'blockquote', 'div', and so. Its values are regular expressions matching HTML tag attributes: 'class="poem"', 'align="right"', and so on. That's the structure. Here's how it works: this hash has one key for each HTML tag to be considered a "block-level element." Block-level elements are not closed when closing HTML environments with CloseHtmlEnvironments(). Instead, they must be explicitly closed with, say, CloseHtmlEnvironment('table'). CloseHtmlEnvironments() now closes all environments up to but not including those whose HTML tag matches a key in %BlockLevelElements and whose HTML tag attributes match a value in %BlockLevelElements. Thus, if Oddmuse is currently in a "<table>...</table>" environment, calling CloseHtmlEnvironments() now closes all environments up to but not including the table. This is a good thing. This allows modules to define block-level markup that can, itself, contain block-level markup -- say, markup for a "<blockquotes>...</blockquotes>" environment containing markup for a "<pre class='poem'>...</pre>" environment containing markup for a "<ol>...</ol>" environment. Prior to the introduction of this hash, block-level markup could not contain other block-level markup -- since most modules "open" a new HTML environment for such markup by first calling CloseHtmlEnvironments(), which, of course, closes the HTML environments of all "parent" block-level markup of this markup. Slightly mind-bending, isn't it? (RegisterBlockLevelElement): New function. (I'm not entirely happy with the verb 'register', here, but could think of nothing better.) (CloseHtmlEnvironments): Use the %BlockLevelElements hash to ensure block-level element HTML environments are not closed. (Of course, this is an amortized constant-time lookup. No efficiency concern, here!) (PrintWikiToHTML): Use the @MyBeforeApplyRules and @MyAfterApplyRules lists, to perform HTML pre- and post-processing.
2008-11-15 12:40:49 +00:00
m/$html_tag_attr/ : 1) : ''))) { # then...
2014-07-18 01:46:59 +03:00
shift(@HtmlAttrStack); # shift off the top tag and
2014-07-21 20:48:47 +02:00
$html .= '</' . shift(@HtmlStack) . '>'; # append it to our HTML string.
}
return $html;
2003-03-21 13:50:35 +00:00
}
sub SmileyReplace {
2008-03-07 23:27:51 +00:00
foreach my $regexp (keys %Smilies) {
if (m/\G($regexp)/cg) {
return $q->img({-src=>$Smilies{$regexp}, -alt=>UnquoteHtml($1), -class=>'smiley'});
}
}
2003-03-21 13:50:35 +00:00
}
sub RunMyRules {
2008-03-07 23:27:51 +00:00
my ($locallinks, $withanchors) = @_;
foreach my $sub (@MyRules) {
my $result = $sub->($locallinks, $withanchors);
2008-03-07 23:27:51 +00:00
SetParam('msg', $@) if $@;
return $result if defined($result);
}
return;
}
sub RunMyMacros {
$_ = shift;
foreach my $macro (@MyMacros) { $macro->() };
return $_;
}
2003-03-21 13:50:35 +00:00
sub PrintWikiToHTML {
* wiki.pl (@MyAfterApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Unlike @MyRules, whose subroutines are called by ApplyRules() while iterating through wiki page markup, @MyAfterApplyRules subroutines are called by PrintWikiToHTML(), iteratively, after all calls to ApplyRules(). This may not sound like much -- certainly, no deep invention. Actually, however, it permits post-processing of all emitted HTML. This, in turn, allows us to reimplement the Table of Contents module ("toc.pl"), so as to remove the hacks upon which that module formerly depended. This is a good thing. Perhaps, even a great thing! As Alex can attest, "toc.pl" (and its retinue of oddball hacks) has given Oddmuse some dire trouble, over time. That's all past now...thanks be to the Nordic Gods. (@MyBeforeApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Like @MyAfterApplyRules, above, @MyBeforeApplyRules subroutines are not called by ApplyRules() while iterating through wiki page markup but rather by PrintWikiToHTML(), iteratively, before all calls to ApplyRules(). This, also, may not sound like much. However, it permits pre- processing of all emitted HTML. This, in turn, allows us to reimplement the Sidebar module ("sidebar.pl"), so as to remove the hacks upon which that module formerly depended. (Praise be to Odin.) (%BlockLevelElements): New global hash. This hash should not be assigned to directly, but through the new RegisterBlockLevelElement() function. Its keys are HTML tags: 'table', 'blockquote', 'div', and so. Its values are regular expressions matching HTML tag attributes: 'class="poem"', 'align="right"', and so on. That's the structure. Here's how it works: this hash has one key for each HTML tag to be considered a "block-level element." Block-level elements are not closed when closing HTML environments with CloseHtmlEnvironments(). Instead, they must be explicitly closed with, say, CloseHtmlEnvironment('table'). CloseHtmlEnvironments() now closes all environments up to but not including those whose HTML tag matches a key in %BlockLevelElements and whose HTML tag attributes match a value in %BlockLevelElements. Thus, if Oddmuse is currently in a "<table>...</table>" environment, calling CloseHtmlEnvironments() now closes all environments up to but not including the table. This is a good thing. This allows modules to define block-level markup that can, itself, contain block-level markup -- say, markup for a "<blockquotes>...</blockquotes>" environment containing markup for a "<pre class='poem'>...</pre>" environment containing markup for a "<ol>...</ol>" environment. Prior to the introduction of this hash, block-level markup could not contain other block-level markup -- since most modules "open" a new HTML environment for such markup by first calling CloseHtmlEnvironments(), which, of course, closes the HTML environments of all "parent" block-level markup of this markup. Slightly mind-bending, isn't it? (RegisterBlockLevelElement): New function. (I'm not entirely happy with the verb 'register', here, but could think of nothing better.) (CloseHtmlEnvironments): Use the %BlockLevelElements hash to ensure block-level element HTML environments are not closed. (Of course, this is an amortized constant-time lookup. No efficiency concern, here!) (PrintWikiToHTML): Use the @MyBeforeApplyRules and @MyAfterApplyRules lists, to perform HTML pre- and post-processing.
2008-11-15 12:40:49 +00:00
my ($markup, $is_saving_cache, $revision, $is_locked) = @_;
my ($blocks, $flags);
2008-03-07 23:27:51 +00:00
$FootnoteNumber = 0;
$markup =~ s/$FS//g if $markup; # Remove separators (paranoia)
* wiki.pl (@MyAfterApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Unlike @MyRules, whose subroutines are called by ApplyRules() while iterating through wiki page markup, @MyAfterApplyRules subroutines are called by PrintWikiToHTML(), iteratively, after all calls to ApplyRules(). This may not sound like much -- certainly, no deep invention. Actually, however, it permits post-processing of all emitted HTML. This, in turn, allows us to reimplement the Table of Contents module ("toc.pl"), so as to remove the hacks upon which that module formerly depended. This is a good thing. Perhaps, even a great thing! As Alex can attest, "toc.pl" (and its retinue of oddball hacks) has given Oddmuse some dire trouble, over time. That's all past now...thanks be to the Nordic Gods. (@MyBeforeApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Like @MyAfterApplyRules, above, @MyBeforeApplyRules subroutines are not called by ApplyRules() while iterating through wiki page markup but rather by PrintWikiToHTML(), iteratively, before all calls to ApplyRules(). This, also, may not sound like much. However, it permits pre- processing of all emitted HTML. This, in turn, allows us to reimplement the Sidebar module ("sidebar.pl"), so as to remove the hacks upon which that module formerly depended. (Praise be to Odin.) (%BlockLevelElements): New global hash. This hash should not be assigned to directly, but through the new RegisterBlockLevelElement() function. Its keys are HTML tags: 'table', 'blockquote', 'div', and so. Its values are regular expressions matching HTML tag attributes: 'class="poem"', 'align="right"', and so on. That's the structure. Here's how it works: this hash has one key for each HTML tag to be considered a "block-level element." Block-level elements are not closed when closing HTML environments with CloseHtmlEnvironments(). Instead, they must be explicitly closed with, say, CloseHtmlEnvironment('table'). CloseHtmlEnvironments() now closes all environments up to but not including those whose HTML tag matches a key in %BlockLevelElements and whose HTML tag attributes match a value in %BlockLevelElements. Thus, if Oddmuse is currently in a "<table>...</table>" environment, calling CloseHtmlEnvironments() now closes all environments up to but not including the table. This is a good thing. This allows modules to define block-level markup that can, itself, contain block-level markup -- say, markup for a "<blockquotes>...</blockquotes>" environment containing markup for a "<pre class='poem'>...</pre>" environment containing markup for a "<ol>...</ol>" environment. Prior to the introduction of this hash, block-level markup could not contain other block-level markup -- since most modules "open" a new HTML environment for such markup by first calling CloseHtmlEnvironments(), which, of course, closes the HTML environments of all "parent" block-level markup of this markup. Slightly mind-bending, isn't it? (RegisterBlockLevelElement): New function. (I'm not entirely happy with the verb 'register', here, but could think of nothing better.) (CloseHtmlEnvironments): Use the %BlockLevelElements hash to ensure block-level element HTML environments are not closed. (Of course, this is an amortized constant-time lookup. No efficiency concern, here!) (PrintWikiToHTML): Use the @MyBeforeApplyRules and @MyAfterApplyRules lists, to perform HTML pre- and post-processing.
2008-11-15 12:40:49 +00:00
$markup = QuoteHtml($markup);
($blocks, $flags) = ApplyRules($markup, 1, $is_saving_cache, $revision, 'p');
* wiki.pl (@MyAfterApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Unlike @MyRules, whose subroutines are called by ApplyRules() while iterating through wiki page markup, @MyAfterApplyRules subroutines are called by PrintWikiToHTML(), iteratively, after all calls to ApplyRules(). This may not sound like much -- certainly, no deep invention. Actually, however, it permits post-processing of all emitted HTML. This, in turn, allows us to reimplement the Table of Contents module ("toc.pl"), so as to remove the hacks upon which that module formerly depended. This is a good thing. Perhaps, even a great thing! As Alex can attest, "toc.pl" (and its retinue of oddball hacks) has given Oddmuse some dire trouble, over time. That's all past now...thanks be to the Nordic Gods. (@MyBeforeApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Like @MyAfterApplyRules, above, @MyBeforeApplyRules subroutines are not called by ApplyRules() while iterating through wiki page markup but rather by PrintWikiToHTML(), iteratively, before all calls to ApplyRules(). This, also, may not sound like much. However, it permits pre- processing of all emitted HTML. This, in turn, allows us to reimplement the Sidebar module ("sidebar.pl"), so as to remove the hacks upon which that module formerly depended. (Praise be to Odin.) (%BlockLevelElements): New global hash. This hash should not be assigned to directly, but through the new RegisterBlockLevelElement() function. Its keys are HTML tags: 'table', 'blockquote', 'div', and so. Its values are regular expressions matching HTML tag attributes: 'class="poem"', 'align="right"', and so on. That's the structure. Here's how it works: this hash has one key for each HTML tag to be considered a "block-level element." Block-level elements are not closed when closing HTML environments with CloseHtmlEnvironments(). Instead, they must be explicitly closed with, say, CloseHtmlEnvironment('table'). CloseHtmlEnvironments() now closes all environments up to but not including those whose HTML tag matches a key in %BlockLevelElements and whose HTML tag attributes match a value in %BlockLevelElements. Thus, if Oddmuse is currently in a "<table>...</table>" environment, calling CloseHtmlEnvironments() now closes all environments up to but not including the table. This is a good thing. This allows modules to define block-level markup that can, itself, contain block-level markup -- say, markup for a "<blockquotes>...</blockquotes>" environment containing markup for a "<pre class='poem'>...</pre>" environment containing markup for a "<ol>...</ol>" environment. Prior to the introduction of this hash, block-level markup could not contain other block-level markup -- since most modules "open" a new HTML environment for such markup by first calling CloseHtmlEnvironments(), which, of course, closes the HTML environments of all "parent" block-level markup of this markup. Slightly mind-bending, isn't it? (RegisterBlockLevelElement): New function. (I'm not entirely happy with the verb 'register', here, but could think of nothing better.) (CloseHtmlEnvironments): Use the %BlockLevelElements hash to ensure block-level element HTML environments are not closed. (Of course, this is an amortized constant-time lookup. No efficiency concern, here!) (PrintWikiToHTML): Use the @MyBeforeApplyRules and @MyAfterApplyRules lists, to perform HTML pre- and post-processing.
2008-11-15 12:40:49 +00:00
if ($is_saving_cache and not $revision and $Page{revision} # don't save revision 0 pages
2008-03-07 23:27:51 +00:00
and $Page{blocks} ne $blocks and $Page{flags} ne $flags) {
$Page{blocks} = $blocks;
2014-07-21 20:48:47 +02:00
$Page{flags} = $flags;
* wiki.pl (@MyAfterApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Unlike @MyRules, whose subroutines are called by ApplyRules() while iterating through wiki page markup, @MyAfterApplyRules subroutines are called by PrintWikiToHTML(), iteratively, after all calls to ApplyRules(). This may not sound like much -- certainly, no deep invention. Actually, however, it permits post-processing of all emitted HTML. This, in turn, allows us to reimplement the Table of Contents module ("toc.pl"), so as to remove the hacks upon which that module formerly depended. This is a good thing. Perhaps, even a great thing! As Alex can attest, "toc.pl" (and its retinue of oddball hacks) has given Oddmuse some dire trouble, over time. That's all past now...thanks be to the Nordic Gods. (@MyBeforeApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Like @MyAfterApplyRules, above, @MyBeforeApplyRules subroutines are not called by ApplyRules() while iterating through wiki page markup but rather by PrintWikiToHTML(), iteratively, before all calls to ApplyRules(). This, also, may not sound like much. However, it permits pre- processing of all emitted HTML. This, in turn, allows us to reimplement the Sidebar module ("sidebar.pl"), so as to remove the hacks upon which that module formerly depended. (Praise be to Odin.) (%BlockLevelElements): New global hash. This hash should not be assigned to directly, but through the new RegisterBlockLevelElement() function. Its keys are HTML tags: 'table', 'blockquote', 'div', and so. Its values are regular expressions matching HTML tag attributes: 'class="poem"', 'align="right"', and so on. That's the structure. Here's how it works: this hash has one key for each HTML tag to be considered a "block-level element." Block-level elements are not closed when closing HTML environments with CloseHtmlEnvironments(). Instead, they must be explicitly closed with, say, CloseHtmlEnvironment('table'). CloseHtmlEnvironments() now closes all environments up to but not including those whose HTML tag matches a key in %BlockLevelElements and whose HTML tag attributes match a value in %BlockLevelElements. Thus, if Oddmuse is currently in a "<table>...</table>" environment, calling CloseHtmlEnvironments() now closes all environments up to but not including the table. This is a good thing. This allows modules to define block-level markup that can, itself, contain block-level markup -- say, markup for a "<blockquotes>...</blockquotes>" environment containing markup for a "<pre class='poem'>...</pre>" environment containing markup for a "<ol>...</ol>" environment. Prior to the introduction of this hash, block-level markup could not contain other block-level markup -- since most modules "open" a new HTML environment for such markup by first calling CloseHtmlEnvironments(), which, of course, closes the HTML environments of all "parent" block-level markup of this markup. Slightly mind-bending, isn't it? (RegisterBlockLevelElement): New function. (I'm not entirely happy with the verb 'register', here, but could think of nothing better.) (CloseHtmlEnvironments): Use the %BlockLevelElements hash to ensure block-level element HTML environments are not closed. (Of course, this is an amortized constant-time lookup. No efficiency concern, here!) (PrintWikiToHTML): Use the @MyBeforeApplyRules and @MyAfterApplyRules lists, to perform HTML pre- and post-processing.
2008-11-15 12:40:49 +00:00
if ($is_locked or RequestLockDir('main')) { # not fatal!
2008-03-07 23:27:51 +00:00
SavePage();
* wiki.pl (@MyAfterApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Unlike @MyRules, whose subroutines are called by ApplyRules() while iterating through wiki page markup, @MyAfterApplyRules subroutines are called by PrintWikiToHTML(), iteratively, after all calls to ApplyRules(). This may not sound like much -- certainly, no deep invention. Actually, however, it permits post-processing of all emitted HTML. This, in turn, allows us to reimplement the Table of Contents module ("toc.pl"), so as to remove the hacks upon which that module formerly depended. This is a good thing. Perhaps, even a great thing! As Alex can attest, "toc.pl" (and its retinue of oddball hacks) has given Oddmuse some dire trouble, over time. That's all past now...thanks be to the Nordic Gods. (@MyBeforeApplyRules): New global list. This is a list of subroutine references, much like @MyRules. Like @MyAfterApplyRules, above, @MyBeforeApplyRules subroutines are not called by ApplyRules() while iterating through wiki page markup but rather by PrintWikiToHTML(), iteratively, before all calls to ApplyRules(). This, also, may not sound like much. However, it permits pre- processing of all emitted HTML. This, in turn, allows us to reimplement the Sidebar module ("sidebar.pl"), so as to remove the hacks upon which that module formerly depended. (Praise be to Odin.) (%BlockLevelElements): New global hash. This hash should not be assigned to directly, but through the new RegisterBlockLevelElement() function. Its keys are HTML tags: 'table', 'blockquote', 'div', and so. Its values are regular expressions matching HTML tag attributes: 'class="poem"', 'align="right"', and so on. That's the structure. Here's how it works: this hash has one key for each HTML tag to be considered a "block-level element." Block-level elements are not closed when closing HTML environments with CloseHtmlEnvironments(). Instead, they must be explicitly closed with, say, CloseHtmlEnvironment('table'). CloseHtmlEnvironments() now closes all environments up to but not including those whose HTML tag matches a key in %BlockLevelElements and whose HTML tag attributes match a value in %BlockLevelElements. Thus, if Oddmuse is currently in a "<table>...</table>" environment, calling CloseHtmlEnvironments() now closes all environments up to but not including the table. This is a good thing. This allows modules to define block-level markup that can, itself, contain block-level markup -- say, markup for a "<blockquotes>...</blockquotes>" environment containing markup for a "<pre class='poem'>...</pre>" environment containing markup for a "<ol>...</ol>" environment. Prior to the introduction of this hash, block-level markup could not contain other block-level markup -- since most modules "open" a new HTML environment for such markup by first calling CloseHtmlEnvironments(), which, of course, closes the HTML environments of all "parent" block-level markup of this markup. Slightly mind-bending, isn't it? (RegisterBlockLevelElement): New function. (I'm not entirely happy with the verb 'register', here, but could think of nothing better.) (CloseHtmlEnvironments): Use the %BlockLevelElements hash to ensure block-level element HTML environments are not closed. (Of course, this is an amortized constant-time lookup. No efficiency concern, here!) (PrintWikiToHTML): Use the @MyBeforeApplyRules and @MyAfterApplyRules lists, to perform HTML pre- and post-processing.
2008-11-15 12:40:49 +00:00
ReleaseLock() unless $is_locked;
2008-03-07 23:27:51 +00:00
}
}
2003-03-21 13:50:35 +00:00
}
sub DoClearCache {
2008-03-07 23:27:51 +00:00
return unless UserIsAdminOrError();
RequestLockOrError();
print GetHeader('', T('Clear Cache')), $q->start_div({-class=>'content clear'}),
$q->p(T('Main lock obtained.')), '<p>';
foreach my $id (AllPagesList()) {
OpenPage($id);
2014-07-21 20:48:47 +02:00
delete @Page{qw(blocks flags languages)};
2008-03-07 23:27:51 +00:00
$Page{languages} = GetLanguages($Page{blocks}) unless TextIsFile($Page{blocks});
SavePage();
print $q->br(), GetPageLink($id);
}
print '</p>', $q->p(T('Main lock released.')), $q->end_div();
utime time, time, $IndexFile; # touch index file
ReleaseLock();
PrintFooter();
}
2003-03-21 13:50:35 +00:00
sub QuoteHtml {
2008-03-07 23:27:51 +00:00
my $html = shift;
$html =~ s/&/&amp;/g;
$html =~ s/</&lt;/g;
$html =~ s/>/&gt;/g;
$html =~ s/[\x00-\x08\x0b\x0c\x0e-\x1f]/ /g; # legal xml: #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]
return $html;
2003-03-21 13:50:35 +00:00
}
sub UnquoteHtml {
2008-03-07 23:27:51 +00:00
my $html = shift;
$html =~ s/&lt;/</g;
$html =~ s/&gt;/>/g;
$html =~ s/&amp;/&/g;
$html =~ s/%26/&/g;
2008-03-07 23:27:51 +00:00
return $html;
2003-03-21 13:50:35 +00:00
}
sub UrlEncode {
2008-03-07 23:27:51 +00:00
my $str = shift;
return '' unless $str;
my @letters = split(//, encode_utf8($str));
2008-03-07 23:27:51 +00:00
my %safe = map {$_ => 1} ('a' .. 'z', 'A' .. 'Z', '0' .. '9', '-', '_', '.', '!', '~', '*', "'", '(', ')', '#');
foreach my $letter (@letters) {
$letter = sprintf("%%%02x", ord($letter)) unless $safe{$letter};
}
return join('', @letters);
}
sub UrlDecode {
2008-03-07 23:27:51 +00:00
my $str = shift;
return decode_utf8($str) if $str =~ s/%([0-9a-f][0-9a-f])/chr(hex($1))/eg;
2008-03-07 23:27:51 +00:00
return $str;
}
sub QuoteRegexp {
2008-03-07 23:27:51 +00:00
my $re = shift;
$re =~ s/([\\\[\]\$()^.])/\\$1/g;
return $re;
}
sub GetRaw {
2008-03-07 23:27:51 +00:00
my $uri = shift;
return unless eval { require LWP::UserAgent; };
my $ua = LWP::UserAgent->new;
my $response = $ua->get($uri);
return $response->decoded_content if $response->is_success;
2003-03-21 13:50:35 +00:00
}
sub DoJournal {
2008-03-07 23:27:51 +00:00
print GetHeader(undef, T('Journal'));
print $q->start_div({-class=>'content journal'});
PrintJournal(map { GetParam($_, ''); } qw(num num regexp mode offset search variation));
print $q->end_div();
2008-03-07 23:27:51 +00:00
PrintFooter();
}
sub JournalSort { $b cmp $a }
sub PrintJournal {
2008-03-07 23:27:51 +00:00
return if $CollectingJournal; # avoid infinite loops
local $CollectingJournal = 1;
my ($num, $numMore, $regexp, $mode, $offset, $search, $variation) = @_;
2014-07-18 01:46:59 +03:00
$variation ||= 'journal';
$regexp ||= '^\d\d\d\d-\d\d-\d\d';
$num ||= 10;
$numMore = $num unless $numMore ne '';
2014-07-18 01:46:59 +03:00
$offset ||= 0;
2009-03-21 08:29:13 +00:00
# FIXME: Should pass filtered list of pages to SearchTitleAndBody to save time?
2008-03-07 23:27:51 +00:00
my @pages = sort JournalSort (grep(/$regexp/, $search ? SearchTitleAndBody($search) : AllPagesList()));
2014-07-21 20:48:47 +02:00
@pages = reverse @pages if $mode eq 'reverse' or $mode eq 'future';
$b = $Today // CalcDay($Now);
2014-07-10 15:29:50 +03:00
if ($mode eq 'future' || $mode eq 'past') {
my $compare = $mode eq 'future' ? -1 : 1;
2008-03-07 23:27:51 +00:00
for (my $i = 0; $i < @pages; $i++) {
$a = $pages[$i];
2014-07-10 15:29:50 +03:00
if (JournalSort() == $compare) {
2014-07-21 20:48:47 +02:00
@pages = @pages[$i .. $#pages];
last;
2008-03-07 23:27:51 +00:00
}
}
}
return unless $pages[$offset];
print $q->start_div({-class=>'journal'});
my $next = $offset + PrintAllPages(1, 1, $num, $variation, @pages[$offset .. $#pages]);
print $q->end_div();
$regexp = UrlEncode($regexp);
$search = UrlEncode($search);
if ($pages[$next] and $numMore != 0) {
print $q->p({-class=>'more'}, ScriptLink("action=more;num=$numMore;regexp=$regexp;search=$search;mode=$mode;offset=$next;variation=$variation", T('More...'), 'more'));
}
}
sub PrintAllPages {
my ($links, $comments, $num, $variation, @pages) = @_;
2008-03-07 23:27:51 +00:00
my $lang = GetParam('lang', 0);
my ($i, $n) = 0;
2008-03-07 23:27:51 +00:00
for my $id (@pages) {
last if $n >= $JournalLimit and not UserIsAdmin() or $num and $n >= $num;
$i++; # pages looked at
local ($OpenPageName, %Page); # this is local!
2008-03-07 23:27:51 +00:00
OpenPage($id);
my @languages = split(/,/, $Page{languages});
next if $lang and @languages and not grep(/$lang/, @languages);
next if PageMarkedForDeletion();
next if substr($Page{text}, 0, 10) eq '#REDIRECT ';
2008-03-07 23:27:51 +00:00
print $q->start_div({-class=>'page'}),
$q->h1($links ? GetPageLink($id)
: $q->a({-name=>$id}, UrlEncode(FreeToNormal($id))));
if ($variation ne 'titles') {
PrintPageHtml();
PrintPageCommentsLink($id, $comments);
}
print $q->end_div();
$n++; # pages actually printed
2008-03-07 23:27:51 +00:00
}
return $i;
}
sub PrintPageCommentsLink {
my ($id, $comments) = @_;
if ($comments and $CommentsPattern and $id !~ /$CommentsPattern/) {
print $q->p({-class=>'comment'},
GetPageLink($CommentsPrefix . $id, T('Comments on this page')));
}
}
2003-03-21 13:50:35 +00:00
sub RSS {
2008-03-07 23:27:51 +00:00
return if $CollectingJournal; # avoid infinite loops when using full=1
local $CollectingJournal = 1;
my $maxitems = shift;
my @uris = @_;
my %lines;
2014-07-21 20:48:47 +02:00
if (not eval { require XML::RSS; }) {
2008-03-07 23:27:51 +00:00
my $err = $@;
2014-07-21 20:48:47 +02:00
return $q->div({-class=>'rss'}, $q->p({-class=>'error'}, $q->strong(T('XML::RSS is not available on this system.')), $err));
2008-03-07 23:27:51 +00:00
}
# All strings that are concatenated with strings returned by the RSS
# feed must be decoded. Without this decoding, 'diff' and 'history'
# translations will be double encoded when printing the result.
my $tDiff = T('diff');
my $tHistory = T('history');
my $wikins = 'http://purl.org/rss/1.0/modules/wiki/';
my $rdfns = 'http://www.w3.org/1999/02/22-rdf-syntax-ns#';
@uris = map { my $x = $_; $x =~ s/^"?(.*?)"?$/$1/; $x; } @uris; # strip quotes of uris
2008-03-07 23:27:51 +00:00
my ($str, %data) = GetRss(@uris);
foreach my $uri (keys %data) {
my $data = $data{$uri};
if (not $data) {
$str .= $q->p({-class=>'error'}, $q->strong(Ts('%s returned no data, or LWP::UserAgent is not available.',
$q->a({-href=>$uri}, $uri))));
2008-03-07 23:27:51 +00:00
} else {
my $rss = new XML::RSS;
eval { local $SIG{__DIE__}; $rss->parse($data); };
if ($@) {
$str .= $q->p({-class=>'error'}, $q->strong(Ts('RSS parsing failed for %s', $q->a({-href=>$uri}, $uri)) . ': ' . $@));
} else {
my $interwiki;
if (@uris > 1) {
RssInterwikiTranslateInit(); # not needed anywhere else thus init only now and not in ReInit
$interwiki = $rss->{channel}->{$wikins}->{interwiki};
$interwiki =~ s/^\s+//; # when RDF is used, sometimes whitespace remains,
$interwiki =~ s/\s+$//; # which breaks the test for an existing $interwiki below
2014-07-18 01:46:59 +03:00
$interwiki ||= $rss->{channel}->{$rdfns}->{value};
$interwiki = $RssInterwikiTranslate{$interwiki} if $RssInterwikiTranslate{$interwiki};
2014-07-18 01:46:59 +03:00
$interwiki ||= $RssInterwikiTranslate{$uri};
}
my $num = 999;
$str .= $q->p({-class=>'error'}, $q->strong(Ts('No items found in %s.', $q->a({-href=>$uri}, $uri))))
unless @{$rss->{items}};
foreach my $i (@{$rss->{items}}) {
my $line;
my $date = $i->{dc}->{date};
if (not $date and $i->{pubDate}) {
$date = $i->{pubDate};
my %mon = (Jan=>1, Feb=>2, Mar=>3, Apr=>4, May=>5, Jun=>6,
Jul=>7, Aug=>8, Sep=>9, Oct=>10, Nov=>11, Dec=>12);
$date =~ s/^(?:[A-Z][a-z][a-z], )?(\d\d?) ([A-Z][a-z][a-z]) (\d\d(?:\d\d)?)/ # pubDate uses RFC 822
sprintf('%04d-%02d-%02d', ($3 < 100 ? 1900 + $3 : $3), $mon{$2}, $1)/e;
}
2014-07-18 01:46:59 +03:00
$date ||= sprintf("%03d", $num--); # for RSS 0.91 feeds without date, descending
my $title = $i->{title};
my $description = $i->{description};
if (not $title and $description) { # title may be missing in RSS 2.00
$title = $description;
$description = '';
}
$title = $i->{link} if not $title and $i->{link}; # if description and title are missing
2014-07-21 20:48:47 +02:00
$line .= ' (' . $q->a({-href=>$i->{$wikins}->{diff}}, $tDiff) . ')' if $i->{$wikins}->{diff};
2014-07-18 01:46:59 +03:00
$line .= ' (' . $q->a({-href=>$i->{$wikins}->{history}}, $tHistory) . ')' if $i->{$wikins}->{history};
if ($title) {
if ($i->{link}) {
$line .= ' ' . $q->a({-href=>$i->{link}, -title=>$date},
($interwiki ? $interwiki . ':' : '') . $title);
} else {
$line .= ' ' . $title;
}
}
my $contributor = $i->{dc}->{contributor};
2014-07-18 01:46:59 +03:00
$contributor ||= $i->{$wikins}->{username};
$contributor =~ s/^\s+//;
$contributor =~ s/\s+$//;
2014-07-18 01:46:59 +03:00
$contributor ||= $i->{$rdfns}->{value};
$line .= $q->span({-class=>'contributor'}, $q->span(T(' . . . .') . ' ') . $contributor) if $contributor;
if ($description) {
if ($description =~ /</) {
$line .= $q->div({-class=>'description'}, $description);
} else {
$line .= $q->span({class=>'dash'}, ' &#8211; ') . $q->strong({-class=>'description'}, $description);
}
}
2014-07-10 15:29:50 +03:00
$date .= ' ' while ($lines{$date}); # make sure this is unique
$lines{$date} = $line;
}
2008-03-07 23:27:51 +00:00
}
}
}
my @lines = sort { $b cmp $a } keys %lines;
2014-07-21 20:48:47 +02:00
@lines = @lines[0 .. $maxitems-1] if $maxitems and $#lines > $maxitems;
2008-03-07 23:27:51 +00:00
my $date = '';
foreach my $key (@lines) {
my $line = $lines{$key};
if ($key =~ /(\d\d\d\d(?:-\d?\d)?(?:-\d?\d)?)(?:[T ](\d?\d:\d\d))?/) {
my ($day, $time) = ($1, $2);
if ($day ne $date) {
$str .= '</ul>' if $date; # close ul except for the first time where no open ul exists
$date = $day;
$str .= $q->p($q->strong($day)) . '<ul>';
2008-03-07 23:27:51 +00:00
}
$line = $q->span({-class=>'time'}, $time . ' UTC ') . $line if $time;
} elsif (not $date) {
$str .= '<ul>'; # if the feed doesn't have any dates we need to start the list anyhow
$date = $Now; # to ensure the list starts only once
2008-03-07 23:27:51 +00:00
}
$str .= $q->li($line);
}
$str .= '</ul>' if $date;
return $q->div({-class=>'rss'}, $str);
2003-03-21 13:50:35 +00:00
}
sub GetRss {
2008-03-07 23:27:51 +00:00
my %todo = map {$_, GetRssFile($_)} @_;
my %data = ();
my $str = '';
if (GetParam('cache', $UseCache) > 0) {
foreach my $uri (keys %todo) { # read cached rss files if possible
if ($Now - Modified($todo{$uri}) < $RssCacheHours * 3600) {
$data{$uri} = ReadFile($todo{$uri});
delete($todo{$uri}); # no need to fetch them below
2008-03-07 23:27:51 +00:00
}
}
}
my @need_cache = keys %todo;
if (keys %todo > 1) { # try parallel access if available
2013-01-01 14:13:06 +01:00
eval { # see code example in LWP::Parallel, not LWP::Parallel::UserAgent (no callbacks here)
2008-03-07 23:27:51 +00:00
require LWP::Parallel::UserAgent;
my $pua = LWP::Parallel::UserAgent->new();
foreach my $uri (keys %todo) {
if (my $res = $pua->register(HTTP::Request->new('GET', $uri))) {
$str .= $res->error_as_HTML;
}
2008-03-07 23:27:51 +00:00
}
%todo = (); # because the uris in the response may have changed due to redirects
my $entries = $pua->wait();
foreach (keys %$entries) {
my $uri = $entries->{$_}->request->uri;
$data{$uri} = $entries->{$_}->response->decoded_content;
2008-03-07 23:27:51 +00:00
}
}
}
foreach my $uri (keys %todo) { # default operation: synchronous fetching
$data{$uri} = GetRaw($uri);
}
if (GetParam('cache', $UseCache) > 0) {
CreateDir($RssDir);
foreach my $uri (@need_cache) {
my $data = $data{$uri};
# possibly a Latin-1 file without encoding attribute will cause a problem?
$data =~ s/encoding="[^"]*"/encoding="UTF-8"/; # content was converted
WriteStringToFile(GetRssFile($uri), $data) if $data;
2008-03-07 23:27:51 +00:00
}
}
return $str, %data;
}
sub GetRssFile {
2008-03-07 23:27:51 +00:00
return $RssDir . '/' . UrlEncode(shift);
}
sub RssInterwikiTranslateInit {
2008-03-07 23:27:51 +00:00
return unless $RssInterwikiTranslate;
%RssInterwikiTranslate = ();
foreach (split(/\n/, GetPageContent($RssInterwikiTranslate))) {
if (/^ ([^ ]+)[ \t]+([^ ]+)$/) {
$RssInterwikiTranslate{$1} = $2;
}
}
}
2003-12-24 04:05:18 +00:00
sub GetInterSiteUrl {
2008-03-07 23:27:51 +00:00
my ($site, $page, $quote) = @_;
return unless $page;
$page = join('/', map { UrlEncode($_) } split(/\//, $page)) if $quote; # Foo:bar+baz is not quoted, [[Foo:bar baz]] is.
my $url = $InterSite{$site} or return;
$url =~ s/\%s/$page/g or $url .= $page;
return $url;
2003-12-24 04:05:18 +00:00
}
sub BracketLink { # brackets can be removed via CSS
2008-03-07 23:27:51 +00:00
return $q->span($q->span({class=>'bracket'}, '[') . (shift) . $q->span({class=>'bracket'}, ']'));
}
2003-03-21 13:50:35 +00:00
sub GetInterLink {
2008-03-07 23:27:51 +00:00
my ($id, $text, $bracket, $quote) = @_;
my ($site, $page) = split(/:/, $id, 2);
$page =~ s/&amp;/&/g; # Unquote common URL HTML
2008-03-07 23:27:51 +00:00
my $url = GetInterSiteUrl($site, $page, $quote);
my $class = 'inter ' . $site;
2014-07-21 20:48:47 +02:00
return "[$id $text]" if $text and $bracket and not $url;
return "[$id]" if $bracket and not $url;
return $id if not $url;
if ($bracket and not $text) {
2008-03-07 23:27:51 +00:00
$text = BracketLink(++$FootnoteNumber);
$class .= ' number';
2014-07-18 01:46:59 +03:00
} elsif (not $text) {
2008-03-07 23:27:51 +00:00
$text = $q->span({-class=>'site'}, $site)
. $q->span({-class=>'separator'}, ':')
. $q->span({-class=>'interpage'}, $page);
} elsif ($bracket) { # and $text is set
2008-03-07 23:27:51 +00:00
$class .= ' outside';
}
return $q->a({-href=>$url, -class=>$class}, $text);
2003-03-21 13:50:35 +00:00
}
2003-12-24 04:05:18 +00:00
sub InterInit {
2008-03-07 23:27:51 +00:00
%InterSite = ();
foreach (split(/\n/, GetPageContent($InterMap))) {
if (/^ ($InterSitePattern)[ \t]+([^ ]+)$/) {
$InterSite{$1} = $2;
}
}
2003-03-21 13:50:35 +00:00
}
sub GetUrl {
2008-03-07 23:27:51 +00:00
my ($url, $text, $bracket, $images) = @_;
$url =~ /^($UrlProtocols)/;
my $class = "url $1";
if ($NetworkFile && $url =~ m|^file:///| && !$AllNetworkFiles
or !$NetworkFile && $url =~ m|^file:|) {
# Only do remote file:// links. No file:///c|/windows.
return $url;
} elsif ($bracket and not defined $text) {
$text = BracketLink(++$FootnoteNumber);
$class .= ' number';
} elsif (not defined $text) {
$text = $url;
} elsif ($bracket) { # and $text is set
2008-03-07 23:27:51 +00:00
$class .= ' outside';
}
$url = UnquoteHtml($url); # links should be unquoted again
2014-07-18 01:46:59 +03:00
if ($images and $url =~ /^(http:|https:|ftp:).+\.$ImageExtensions$/i) {
2008-03-07 23:27:51 +00:00
return $q->img({-src=>$url, -alt=>$url, -class=>$class});
} else {
return $q->a({-href=>$url, -class=>$class}, $text);
}
2003-03-21 13:50:35 +00:00
}
sub GetPageOrEditLink { # use GetPageLink and GetEditLink if you know the result!
2008-03-07 23:27:51 +00:00
my ($id, $text, $bracket, $free) = @_;
$id = FreeToNormal($id);
my ($class, $resolved, $title, $exists) = ResolveId($id);
2014-07-18 01:46:59 +03:00
if (not $text and $resolved and $bracket) {
2008-03-07 23:27:51 +00:00
$text = BracketLink(++$FootnoteNumber);
$class .= ' number';
$title = NormalToFree($id);
}
2014-07-21 20:48:47 +02:00
my $link = $text || NormalToFree($id);
2008-03-07 23:27:51 +00:00
if ($resolved) { # anchors don't exist as pages, therefore do not use $exists
return ScriptLink(UrlEncode($resolved), $link, $class, undef, $title);
} else { # reproduce markup if $UseQuestionmark
2014-07-18 01:46:59 +03:00
return GetEditLink($id, UnquoteHtml($bracket ? "[$link]" : $link)) unless $UseQuestionmark;
$link = QuoteHtml($id) . GetEditLink($id, '?');
$link .= ($free ? '|' : ' ') . $text if $text and FreeToNormal($text) ne $id;
2008-03-07 23:27:51 +00:00
$link = "[[$link]]" if $free;
$link = "[$link]" if $bracket or not $free and $text;
return $link;
}
2003-03-21 13:50:35 +00:00
}
2004-06-04 17:43:20 +00:00
sub GetPageLink { # use if you want to force a link to local pages, whether it exists or not
my ($id, $name, $class, $accesskey) = @_;
2008-03-07 23:27:51 +00:00
$id = FreeToNormal($id);
2014-07-18 01:46:59 +03:00
$name ||= $id;
2008-03-07 23:27:51 +00:00
$class .= ' ' if $class;
return ScriptLink(UrlEncode($id), NormalToFree($name), $class . 'local',
undef, undef, $accesskey);
2003-03-21 13:50:35 +00:00
}
sub GetEditLink { # shortcut
2008-03-07 23:27:51 +00:00
my ($id, $name, $upload, $accesskey) = @_;
$id = FreeToNormal($id);
my $action = 'action=edit;id=' . UrlEncode($id);
$action .= ';upload=1' if $upload;
return ScriptLink($action, NormalToFree($name), 'edit', undef, T('Click to edit this page'), $accesskey);
2003-03-21 13:50:35 +00:00
}
sub ScriptUrl {
my $action = shift;
if ($action =~ /^($UrlProtocols)\%3a/ or $action =~ /^\%2f/) { # nearlinks and other URLs
$action =~ s/%([0-9a-f][0-9a-f])/chr(hex($1))/eg; # undo urlencode
# do nothing
} else {
2014-07-21 20:48:47 +02:00
$action = $ScriptName . (($UsePathInfo and index($action, '=') == -1) ? '/' : '?') . $action;
}
return $action unless wantarray;
return ($action, index($action, '=') != -1);
}
2003-03-21 13:50:35 +00:00
sub ScriptLink {
2008-03-07 23:27:51 +00:00
my ($action, $text, $class, $name, $title, $accesskey) = @_;
my ($url, $nofollow) = ScriptUrl($action);
my %params;
$params{-href} = $url;
2014-07-18 01:46:59 +03:00
$params{'-rel'} = 'nofollow' if $nofollow;
$params{'-class'} = $class if $class;
$params{'-name'} = $name if $name;
$params{'-title'} = $title if $title;
2008-03-07 23:27:51 +00:00
$params{'-accesskey'} = $accesskey if $accesskey;
return $q->a(\%params, $text);
2003-03-21 13:50:35 +00:00
}
sub GetDownloadLink {
my ($id, $image, $revision, $alt) = @_;
2014-07-18 01:46:59 +03:00
$alt ||= NormalToFree($id);
2008-03-07 23:27:51 +00:00
# if the page does not exist
return '[[' . ($image ? 'image' : 'download') . ':'
. ($UseQuestionmark ? QuoteHtml($id) . GetEditLink($id, '?', 1)
: GetEditLink($id, $id, 1)) . ']]'
2008-03-07 23:27:51 +00:00
unless $IndexHash{$id};
my $action;
if ($revision) {
$action = "action=download;id=" . UrlEncode($id) . ";revision=$revision";
} elsif ($UsePathInfo) {
$action = "download/" . UrlEncode($id);
} else {
$action = "action=download;id=" . UrlEncode($id);
}
if ($image) {
2014-07-21 20:48:47 +02:00
$action = $ScriptName . (($UsePathInfo and not $revision) ? '/' : '?') . $action;
2008-03-07 23:27:51 +00:00
return $action if $image == 2;
my $result = $q->img({-src=>$action, -alt=>UnquoteHtml($alt), -title=>UnquoteHtml($alt), -class=>'upload'});
2014-07-18 01:46:59 +03:00
$result = ScriptLink(UrlEncode($id), $result, 'image') unless $id eq $OpenPageName;
2008-03-07 23:27:51 +00:00
return $result;
} else {
return ScriptLink($action, $alt, 'upload');
2008-03-07 23:27:51 +00:00
}
}
sub PrintCache { # Use after OpenPage!
2014-07-18 01:46:59 +03:00
my @blocks = split($FS, $Page{blocks});
my @flags = split($FS, $Page{flags});
2008-03-07 23:27:51 +00:00
$FootnoteNumber = 0;
foreach my $block (@blocks) {
if (shift(@flags)) {
ApplyRules($block, 1, 1); # local links, anchors, current revision, no start tag
} else {
print $block;
}
}
}
sub PrintPageHtml { # print an open page
2008-03-07 23:27:51 +00:00
return unless GetParam('page', 1);
if ($Page{blocks} and defined $Page{flags} and GetParam('cache', $UseCache) > 0) {
2008-03-07 23:27:51 +00:00
PrintCache();
} else {
PrintWikiToHTML($Page{text}, 1); # save cache, current revision, no main lock
}
}
sub PrintPageDiff { # print diff for open page
2008-03-07 23:27:51 +00:00
my $diff = GetParam('diff', 0);
2014-07-18 01:46:59 +03:00
if ($UseDiff and $diff) {
2008-03-07 23:27:51 +00:00
PrintHtmlDiff($diff);
print $q->hr() if GetParam('page', 1);
}
}
2015-05-17 03:35:35 +03:00
sub ToString {
my $sub_ref = shift;
2015-05-17 03:35:35 +03:00
my $output;
open(my $outputFH, '>:encoding(UTF-8)', \$output) or die "Can't open memory file: $!";
my $oldFH = select $outputFH;
$sub_ref->(@_);
2015-05-17 03:35:35 +03:00
select $oldFH;
close $outputFH;
return decode_utf8($output);
2015-05-17 03:35:35 +03:00
}
sub PageHtml {
2008-03-07 23:27:51 +00:00
my ($id, $limit, $error) = @_;
OpenPage($id);
2015-05-17 03:35:35 +03:00
my $diff = ToString \&PrintPageDiff;
return $error if $limit and length($diff) > $limit;
2015-05-17 03:35:35 +03:00
my $page = ToString \&PrintPageHtml;
return $diff . $q->p($error) if $limit and length($diff . $page) > $limit;
return $diff . $page;
2004-08-13 01:24:24 +00:00
}
2003-03-21 13:50:35 +00:00
sub T {
2008-03-07 23:27:51 +00:00
my $text = shift;
2014-07-21 20:48:47 +02:00
return $Translate{$text} || $text;
2003-03-21 13:50:35 +00:00
}
sub Ts {
2008-03-07 23:27:51 +00:00
my ($text, $string) = @_;
$text = T($text);
$text =~ s/\%s/$string/ if defined($string);
return $text;
2003-03-21 13:50:35 +00:00
}
sub Tss {
2008-03-07 23:27:51 +00:00
my $text = $_[0];
$text = T($text);
$text =~ s/\%([1-9])/$_[$1]/eg;
2008-03-07 23:27:51 +00:00
return $text;
}
sub GetId {
my $id = UnquoteHtml(GetParam('id', GetParam('title', ''))); # id=x or title=x -> x
if (not $id and $q->keywords) {
$id = decode_utf8(join('_', $q->keywords)); # script?p+q -> p_q
}
if ($UsePathInfo and $q->path_info) {
my @path = map { decode_utf8($_) } split(/\//, $q->path_info);
2014-07-18 01:46:59 +03:00
$id ||= pop(@path); # script/p/q -> q
2008-03-07 23:27:51 +00:00
foreach my $p (@path) {
SetParam($p, 1); # script/p/q -> p=1
2008-03-07 23:27:51 +00:00
}
}
return $id;
}
sub DoBrowseRequest {
2008-03-07 23:27:51 +00:00
# We can use the error message as the HTTP error code
2014-07-18 01:46:59 +03:00
ReportError(Ts('CGI Internal error: %s', $q->cgi_error), $q->cgi_error) if $q->cgi_error;
2008-03-07 23:27:51 +00:00
print $q->header(-status=>'304 NOT MODIFIED') and return if PageFresh(); # return value is ignored
my $id = GetId();
my $action = lc(GetParam('action', '')); # script?action=foo;id=bar
$action = 'download' if GetParam('download', '') and not $action; # script/download/id
if ($Action{$action}) {
&{$Action{$action}}($id);
} elsif ($action and defined &MyActions) {
eval { local $SIG{__DIE__}; MyActions(); };
} elsif ($action) {
ReportError(Ts('Invalid action parameter %s', $action), '501 NOT IMPLEMENTED');
} elsif (GetParam('match', '') ne '') {
SetParam('action', 'index'); # make sure this gets a NOINDEX
DoIndex();
} elsif (GetParam('search', '') ne '') { # allow search for "0"
SetParam('action', 'search'); # make sure this gets a NOINDEX
DoSearch();
2008-03-07 23:27:51 +00:00
} elsif (GetParam('title', '') and not GetParam('Cancel', '')) {
DoPost(GetParam('title', ''));
} else {
2014-07-18 01:46:59 +03:00
BrowseResolvedPage($id || $HomePage); # default action!
2008-03-07 23:27:51 +00:00
}
2003-03-21 13:50:35 +00:00
}
sub ValidId { # hack alert: returns error message if invalid, and unfortunately the empty string if valid!
2008-03-07 23:27:51 +00:00
my $id = FreeToNormal(shift);
return T('Page name is missing') unless $id;
require bytes;
return Ts('Page name is too long: %s', $id) if bytes::length($id) > $PageNameLimit;
2008-03-07 23:27:51 +00:00
return Ts('Invalid Page %s (must not end with .db)', $id) if $id =~ m|\.db$|;
return Ts('Invalid Page %s (must not end with .lck)', $id) if $id =~ m|\.lck$|;
return Ts('Invalid Page %s', $id) if $FreeLinks ? $id !~ m|^$FreeLinkPattern$| : $id !~ m|^$LinkPattern$|;
}
sub ValidIdOrDie {
2008-03-07 23:27:51 +00:00
my $id = shift;
my $error = ValidId($id);
ReportError($error, '400 BAD REQUEST') if $error;
return 1;
}
sub ResolveId { # return css class, resolved id, title (eg. for popups), exist-or-not
2008-03-07 23:27:51 +00:00
my $id = shift;
return ('local', $id, '', 1) if $IndexHash{$id};
return ('', '', '', '');
}
sub BrowseResolvedPage {
2008-03-07 23:27:51 +00:00
my $id = FreeToNormal(shift);
my ($class, $resolved, $title, $exists) = ResolveId($id);
2014-07-18 01:46:59 +03:00
if ($class and $class eq 'near' and not GetParam('rcclusteronly', 0)) { # nearlink (is url)
2008-03-07 23:27:51 +00:00
print $q->redirect({-uri=>$resolved});
2014-07-18 01:46:59 +03:00
} elsif ($class and $class eq 'alias') { # an anchor was found instead of a page
2008-03-07 23:27:51 +00:00
ReBrowsePage($resolved);
} elsif (not $resolved and $NotFoundPg and $id !~ /$CommentsPattern/) { # custom page-not-found message
2008-03-07 23:27:51 +00:00
BrowsePage($NotFoundPg);
} elsif ($resolved) { # an existing page was found
2008-03-07 23:27:51 +00:00
BrowsePage($resolved, GetParam('raw', 0));
} else { # new page!
2008-03-07 23:27:51 +00:00
BrowsePage($id, GetParam('raw', 0), undef, '404 NOT FOUND') if ValidIdOrDie($id);
}
}
sub NewText { # only if no revision is available
my $id = shift;
if ($CommentsPrefix and $id =~ /^($CommentsPrefix)/) {
return T('There are no comments, yet. Be the first to leave a comment!');
} elsif ($id eq $HomePage) {
return T('Welcome!');
} else {
return Ts('This page does not exist, but you can %s.',
'[' . ScriptUrl('action=edit;id=' . UrlEncode($id)) . ' '
. T('create it now') . ']');
}
}
2003-03-21 13:50:35 +00:00
sub BrowsePage {
2008-03-07 23:27:51 +00:00
my ($id, $raw, $comment, $status) = @_;
OpenPage($id);
my ($revisionPage, $revision) = GetTextRevision(GetParam('revision', ''));
my $text = $revisionPage->{text};
$text = NewText($id) unless $revision or $Page{revision} or $comment; # new text for new pages
2008-03-07 23:27:51 +00:00
# handle a single-level redirect
my $oldId = GetParam('oldid', '');
if ((substr($text, 0, 10) eq '#REDIRECT ')) {
if ($oldId) {
$Message .= $q->p(T('Too many redirections'));
} elsif ($revision) {
$Message .= $q->p(T('No redirection for old revisions'));
} elsif (($FreeLinks and $text =~ /^\#REDIRECT\s+\[\[$FreeLinkPattern\]\]/)
or ($WikiLinks and $text =~ /^\#REDIRECT\s+$LinkPattern/)) {
2008-03-07 23:27:51 +00:00
return ReBrowsePage(FreeToNormal($1), $id);
} else {
$Message .= $q->p(T('Invalid link pattern for #REDIRECT'));
}
}
# shortcut if we only need the raw text: no caching, no diffs, no html.
if ($raw) {
print GetHttpHeader('text/plain', $Page{ts}, $IndexHash{$id} ? undef : '404 NOT FOUND');
2014-07-21 20:48:47 +02:00
print $Page{ts} . " # Do not delete this line when editing!\n" if $raw == 2;
2008-03-07 23:27:51 +00:00
print $text;
return;
}
# normal page view
my $msg = GetParam('msg', '');
$Message .= $q->p($msg) if $msg; # show message if the page is shown
SetParam('msg', '');
print GetHeader($id, NormalToFree($id), $oldId, undef, $status);
2008-03-07 23:27:51 +00:00
my $showDiff = GetParam('diff', 0);
2014-07-18 01:46:59 +03:00
if ($UseDiff and $showDiff) {
PrintHtmlDiff($showDiff, GetParam('diffrevision'), $revisionPage, $Page{revision});
2008-03-07 23:27:51 +00:00
print $q->hr();
}
PrintPageContent($text, $revision, $comment);
2008-03-07 23:27:51 +00:00
SetParam('rcclusteronly', $id) if FreeToNormal(GetCluster($text)) eq $id; # automatically filter by cluster
PrintRcHtml($id);
PrintFooter($id, $revision, $comment, $revisionPage);
2003-03-21 13:50:35 +00:00
}
sub ReBrowsePage {
2008-03-07 23:27:51 +00:00
my ($id, $oldId) = map { UrlEncode($_); } @_; # encode before printing URL
if ($oldId) { # Target of #REDIRECT (loop breaking)
2008-03-07 23:27:51 +00:00
print GetRedirectPage("action=browse;oldid=$oldId;id=$id", $id);
} else {
print GetRedirectPage($id, $id);
}
2003-03-21 13:50:35 +00:00
}
sub GetRedirectPage {
2008-03-07 23:27:51 +00:00
my ($action, $name) = @_;
my ($url, $html);
if (GetParam('raw', 0)) {
$html = GetHttpHeader('text/plain');
$html .= Ts('Please go on to %s.', $action); # no redirect
return $html;
}
2014-07-21 20:48:47 +02:00
$url = $ScriptName . (($UsePathInfo and $action !~ /=/) ? '/' : '?') . $action;
2008-03-07 23:27:51 +00:00
my $nameLink = $q->a({-href=>$url}, $name);
my %headers = (-uri=>$url);
my $cookie = Cookie();
2014-07-18 01:46:59 +03:00
$headers{-cookie} = $cookie if $cookie;
2008-03-07 23:27:51 +00:00
return $q->redirect(%headers);
}
sub DoRandom {
2008-03-07 23:27:51 +00:00
my @pages = AllPagesList();
ReBrowsePage($pages[int(rand($#pages + 1))]);
}
sub PageFresh { # pages can depend on other pages (ie. last update), admin status, and css
2008-03-07 23:27:51 +00:00
return 1 if $q->http('HTTP_IF_NONE_MATCH') and GetParam('cache', $UseCache) >= 2
and $q->http('HTTP_IF_NONE_MATCH') eq PageEtag();
}
sub PageEtag {
my ($changed, %params) = CookieData();
return UrlEncode(join($FS, $LastUpdate||$Now, sort(values %params))); # no CTL in field values
}
sub FileFresh { # old files are never stale, current files are stale when the page was modified
2008-03-07 23:27:51 +00:00
return 1 if $q->http('HTTP_IF_NONE_MATCH') and GetParam('cache', $UseCache) >= 2
and (GetParam('revision', 0) or $q->http('HTTP_IF_NONE_MATCH') eq $Page{ts});
}
sub BrowseRc {
2008-03-07 23:27:51 +00:00
my $id = shift;
if (GetParam('raw', 0)) {
print GetHttpHeader('text/plain');
PrintRcText();
2008-03-07 23:27:51 +00:00
} else {
PrintRcHtml($id || $RCName, 1);
2008-03-07 23:27:51 +00:00
}
}
sub GetRcLines { # starttime, hash of seen pages to use as a second return value
my $starttime = shift || GetParam('from', 0) ||
$Now - GetParam('days', $RcDefault) * 86400; # 24*60*60
my $filterOnly = GetParam('rcfilteronly', '');
# these variables apply accross logfiles
my %match = $filterOnly ? map { $_ => 1 } SearchTitleAndBody($filterOnly) : ();
my %following = ();
my @result = ();
my $ts;
# check the first timestamp in the default file, maybe read old log file
if (open(my $F, '<:encoding(UTF-8)', encode_utf8($RcFile))) {
my $line = <$F>;
($ts) = split(/$FS/, $line); # the first timestamp in the regular rc file
}
if (not $ts or $ts > $starttime) { # we need to read the old rc file, too
2014-07-18 01:46:59 +03:00
push(@result, GetRcLinesFor($RcOldFile, $starttime, \%match, \%following));
2008-03-07 23:27:51 +00:00
}
push(@result, GetRcLinesFor($RcFile, $starttime, \%match, \%following));
# GetRcLinesFor is trying to save memory space, but some operations
# can only happen once we have all the data.
return LatestChanges(StripRollbacks(@result));
}
sub LatestChanges {
my $all = GetParam('all', $ShowAll);
my @result = @_;
my %seen = ();
for (my $i = $#result; $i >= 0; $i--) {
my $id = $result[$i][1];
if ($all) {
$result[$i][9] = 1 unless $seen{$id}; # mark latest edit
} else {
splice(@result, $i, 1) if $seen{$id}; # remove older edits
2008-03-07 23:27:51 +00:00
}
$seen{$id} = 1;
2008-03-07 23:27:51 +00:00
}
my $to = GetParam('upto', 0);
2009-10-13 23:27:24 +00:00
if ($to) {
for (my $i = 0; $i < $#result; $i++) {
if ($result[$i][0] > $to) {
splice(@result, $i);
last;
}
}
}
return reverse @result;
}
sub StripRollbacks {
2008-03-07 23:27:51 +00:00
my @result = @_;
if (not (GetParam('all', $ShowAll) or GetParam('rollback', $ShowRollbacks))) { # strip rollbacks
my (%rollback);
2008-03-07 23:27:51 +00:00
for (my $i = $#result; $i >= 0; $i--) {
# some fields have a different meaning if looking at rollbacks
2014-07-21 20:48:47 +02:00
my ($ts, $id, $target_ts, $target_id) = @{$result[$i]};
if ($id eq '[[rollback]]') {
2009-03-10 07:20:58 +00:00
if ($target_id) {
$rollback{$target_id} = $target_ts; # single page rollback
splice(@result, $i, 1); # strip marker
} else {
my $end = $i;
while ($ts > $target_ts and $i > 0) {
$i--; # quickly skip all these lines
$ts = $result[$i][0];
}
splice(@result, $i + 1, $end - $i);
$i++; # compensate $i-- in for loop
2009-03-10 07:20:58 +00:00
}
2008-03-07 23:27:51 +00:00
} elsif ($rollback{$id} and $ts > $rollback{$id}) {
2009-03-10 07:20:58 +00:00
splice(@result, $i, 1); # strip rolled back single pages
2008-03-07 23:27:51 +00:00
}
}
2009-03-10 07:20:58 +00:00
} else { # just strip the marker left by DoRollback()
2008-03-07 23:27:51 +00:00
for (my $i = $#result; $i >= 0; $i--) {
splice(@result, $i, 1) if $result[$i][1] eq '[[rollback]]'; # id
}
}
return @result;
}
sub GetRcLinesFor {
my $file = shift;
my $starttime = shift;
my %match = %{$_[0]}; # deref
my %following = %{$_[1]}; # deref
# parameters
my $showminoredit = GetParam('showedit', $ShowEdits); # show minor edits
my $all = GetParam('all', $ShowAll);
my ($idOnly, $userOnly, $hostOnly, $clusterOnly, $filterOnly, $match, $lang,
$followup) = map { UnquoteHtml(GetParam($_, '')); }
qw(rcidonly rcuseronly rchostonly
rcclusteronly rcfilteronly match lang followup);
# parsing and filtering
my @result = ();
open(my $F, '<:encoding(UTF-8)', encode_utf8($file)) or return ();
2015-05-02 00:04:29 +03:00
while (my $line = <$F>) {
chomp($line);
my ($ts, $id, $minor, $summary, $host, $username, $revision,
$languages, $cluster) = split(/$FS/, $line);
next if $ts < $starttime;
$following{$id} = $ts if $followup and $followup eq $username;
next if $followup and (not $following{$id} or $ts <= $following{$id});
next if $idOnly and $idOnly ne $id;
next if $filterOnly and not $match{$id};
next if ($userOnly and $userOnly ne $username);
2014-07-18 01:46:59 +03:00
next if $minor == 1 and not $showminoredit; # skip minor edits (if [[rollback]] this is bogus)
next if not $minor and $showminoredit == 2; # skip major edits
next if $match and $id !~ /$match/i;
next if $hostOnly and $host !~ /$hostOnly/i;
my @languages = split(/,/, $languages);
next if $lang and @languages and not grep(/$lang/, @languages);
if ($PageCluster) {
($cluster, $summary) = ($1, $2) if $summary =~ /^\[\[$FreeLinkPattern\]\] ?: *(.*)/
or $summary =~ /^$LinkPattern ?: *(.*)/;
next if ($clusterOnly and $clusterOnly ne $cluster);
$cluster = '' if $clusterOnly; # don't show cluster if $clusterOnly eq $cluster
if ($all < 2 and not $clusterOnly and $cluster) {
$summary = "$id: $summary"; # print the cluster instead of the page
$id = $cluster;
$revision = '';
}
} else {
$cluster = '';
2008-03-07 23:27:51 +00:00
}
$following{$id} = $ts if $followup and $followup eq $username;
push(@result, [$ts, $id, $minor, $summary, $host, $username, $revision,
\@languages, $cluster]);
2008-03-07 23:27:51 +00:00
}
return @result;
2003-03-21 13:50:35 +00:00
}
sub ProcessRcLines {
my ($printDailyTear, $printRCLine) = @_; # code references
# needed for output
my $date = '';
for my $line (GetRcLines()) {
my ($ts, $id, $minor, $summary, $host, $username, $revision, $languageref,
$cluster, $last) = @$line;
if ($date ne CalcDay($ts)) {
$date = CalcDay($ts);
$printDailyTear->($date);
}
$printRCLine->($id, $ts, $host, $username, $summary, $minor, $revision,
$languageref, $cluster, $last);
}
}
sub RcHeader {
my ($from, $upto, $html) = (GetParam('from', 0), GetParam('upto', 0), '');
2015-08-31 11:04:22 +02:00
my $days = GetParam('days') + 0 || $RcDefault; # force numeric $days
my $all = GetParam('all', $ShowAll);
my $edits = GetParam('showedit', $ShowEdits);
my $rollback = GetParam('rollback', $ShowRollbacks);
if ($from) {
$html .= $q->h2(Ts('Updates since %s', TimeToText(GetParam('from', 0))) . ' '
. ($upto ? Ts('up to %s', TimeToText($upto)) : ''));
2008-03-07 23:27:51 +00:00
} else {
$html .= $q->h2((GetParam('days', $RcDefault) != 1)
2014-07-21 20:48:47 +02:00
? Ts('Updates in the last %s days', $days)
: Ts('Updates in the last day'));
2008-03-07 23:27:51 +00:00
}
my $action = '';
my ($idOnly, $userOnly, $hostOnly, $clusterOnly, $filterOnly,
$match, $lang, $followup) =
map {
my $val = GetParam($_, '');
$html .= $q->p($q->b('(' . Ts('for %s only', $val) . ')')) if $val;
$action .= ";$_=$val" if $val; # remember these parameters later!
$val;
} qw(rcidonly rcuseronly rchostonly rcclusteronly rcfilteronly
match lang followup);
2008-03-07 23:27:51 +00:00
my $rss = "action=rss$action;days=$days;all=$all;showedit=$edits";
if ($clusterOnly) {
$action = GetPageParameters('browse', $clusterOnly) . $action;
} else {
$action = "action=rc$action";
}
my @menu;
if ($all) {
push(@menu, ScriptLink("$action;days=$days;all=0;showedit=$edits",
2014-07-18 01:46:59 +03:00
T('List latest change per page only')));
2008-03-07 23:27:51 +00:00
} else {
push(@menu, ScriptLink("$action;days=$days;all=1;showedit=$edits",
2014-07-18 01:46:59 +03:00
T('List all changes')));
2008-03-07 23:27:51 +00:00
if ($rollback) {
push(@menu, ScriptLink("$action;days=$days;all=0;rollback=0;"
2014-07-18 01:46:59 +03:00
. "showedit=$edits", T('Skip rollbacks')));
2008-03-07 23:27:51 +00:00
} else {
push(@menu, ScriptLink("$action;days=$days;all=0;rollback=1;"
2014-07-18 01:46:59 +03:00
. "showedit=$edits", T('Include rollbacks')));
2008-03-07 23:27:51 +00:00
}
}
if ($edits) {
push(@menu, ScriptLink("$action;days=$days;all=$all;showedit=0",
2014-07-18 01:46:59 +03:00
T('List only major changes')));
2008-03-07 23:27:51 +00:00
} else {
push(@menu, ScriptLink("$action;days=$days;all=$all;showedit=1",
2014-07-18 01:46:59 +03:00
T('Include minor changes')));
2008-03-07 23:27:51 +00:00
}
$html .= $q->p(join(' | ', (map { ScriptLink("$action;days=$_;all=$all;showedit=$edits", $_); } @RcDays)),
T('days'), $q->br(), @menu, $q->br(),
ScriptLink($action . ';from=' . ($LastUpdate + 1)
. ";all=$all;showedit=$edits", T('List later changes')),
ScriptLink($rss, T('RSS'), 'rss nopages nodiff'),
ScriptLink("$rss;full=1", T('RSS with pages'), 'rss pages nodiff'),
ScriptLink("$rss;full=1;diff=1", T('RSS with pages and diff'),
'rss pages diff'));
$html .= $q->p({-class => 'documentation'}, T('Using the 「rollback」 button on this page will reset the wiki to that particular point in time, undoing any later changes to all of the pages.')) if UserIsAdmin() and GetParam('all', $ShowAll);
return $html;
}
sub GetScriptUrlWithRcParameters {
my $url = "$ScriptName?action=rss";
2011-07-06 16:44:12 +00:00
foreach my $param (qw(from upto days all showedit rollback rcidonly rcuseronly
rchostonly rcclusteronly rcfilteronly match lang
followup page diff full)) {
my $val = GetParam($param, undef);
$url .= ";$param=$val" if defined $val;
}
return $url;
}
sub GetFilterForm {
2008-03-07 23:27:51 +00:00
my $form = $q->strong(T('Filters'));
$form .= $q->input({-type=>'hidden', -name=>'action', -value=>'rc'});
$form .= $q->input({-type=>'hidden', -name=>'all', -value=>1}) if (GetParam('all', $ShowAll));
$form .= $q->input({-type=>'hidden', -name=>'showedit', -value=>1}) if (GetParam('showedit', $ShowEdits));
2014-07-18 01:46:59 +03:00
if (GetParam('days', $RcDefault) != $RcDefault) {
$form .= $q->input({-type=>'hidden', -name=>'days', -value=>GetParam('days', $RcDefault)});
}
2008-03-07 23:27:51 +00:00
my $table = '';
foreach my $h (['match' => T('Title:')],
['rcfilteronly' => T('Title and Body:')],
['rcuseronly' => T('Username:')], ['rchostonly' => T('Host:')],
['followup' => T('Follow up to:')]) {
2008-03-07 23:27:51 +00:00
$table .= $q->Tr($q->td($q->label({-for=>$h->[0]}, $h->[1])),
2014-07-18 01:46:59 +03:00
$q->td($q->textfield(-name=>$h->[0], -id=>$h->[0], -size=>20)));
}
if (%Languages) {
$table .= $q->Tr($q->td($q->label({-for=>'rclang'}, T('Language:')))
. $q->td($q->textfield(-name=>'lang', -id=>'rclang', -size=>10,
-default=>GetParam('lang', ''))));
}
2008-03-07 23:27:51 +00:00
return GetFormStart(undef, 'get', 'filter') . $q->p($form) . $q->table($table)
. $q->p($q->submit('dofilter', T('Go!'))) . $q->end_form;
2008-03-07 23:27:51 +00:00
}
sub RcHtml {
2008-03-07 23:27:51 +00:00
my ($html, $inlist) = ('', 0);
# Optimize param fetches and translations out of main loop
my $all = GetParam('all', $ShowAll);
2008-03-07 23:27:51 +00:00
my $admin = UserIsAdmin();
my $rollback_was_possible = 0;
2015-05-01 13:32:51 +03:00
my $printDailyTear = sub {
my $date = shift;
if ($inlist) {
$html .= '</ul>';
$inlist = 0;
}
$html .= $q->p($q->strong($date));
if (not $inlist) {
$html .= '<ul>';
$inlist = 1;
}
};
my $printRCLine = sub {
my($id, $ts, $host, $username, $summary, $minor, $revision,
$languages, $cluster, $last) = @_;
my $all_revision = $last ? undef : $revision; # no revision for the last one
$host = QuoteHtml($host);
my $author = GetAuthorLink($host, $username);
my $sum = $summary ? $q->span({class=>'dash'}, ' &#8211; ')
. $q->strong(QuoteHtml($summary)) : '';
my $edit = $minor ? $q->em({class=>'type'}, T('(minor)')) : '';
my $lang = @{$languages}
? $q->span({class=>'lang'}, '[' . join(', ', @{$languages}) . ']') : '';
my ($pagelink, $history, $diff, $rollback) = ('', '', '', '');
if ($all) {
$pagelink = GetOldPageLink('browse', $id, $all_revision, $id, $cluster);
my $rollback_is_possible = RollbackPossible($ts);
if ($admin and ($rollback_is_possible or $rollback_was_possible)) {
$rollback = $q->submit("rollback-$ts", T('rollback'));
$rollback_was_possible = $rollback_is_possible;
} else {
$rollback_was_possible = 0;
}
} elsif ($cluster) {
$pagelink = GetOldPageLink('browse', $id, $revision, $id, $cluster);
} else {
$pagelink = GetPageLink($id, $cluster);
$history = '(' . GetHistoryLink($id, T('history')) . ')';
}
if ($cluster and $PageCluster) {
$diff .= GetPageLink($PageCluster) . ':';
} elsif ($UseDiff and GetParam('diffrclink', 1)) {
if ($revision == 1) {
$diff .= '(' . $q->span({-class=>'new'}, T('new')) . ')';
} elsif ($all) {
$diff .= '(' . ScriptLinkDiff(2, $id, T('diff'), $all_revision) .')';
2015-05-01 13:32:51 +03:00
} else {
$diff .= '(' . ScriptLinkDiff($minor ? 2 : 1, $id, T('diff')) . ')';
2015-05-01 13:32:51 +03:00
}
}
$html .= $q->li($q->span({-class=>'time'}, CalcTime($ts)), $diff, $history,
$rollback, $pagelink, T(' . . . .'), $author, $sum, $lang,
2015-05-01 13:32:51 +03:00
$edit);
};
ProcessRcLines($printDailyTear, $printRCLine);
2008-03-07 23:27:51 +00:00
$html .= '</ul>' if $inlist;
# use delta between from and upto, or use days, whichever is available
my $to = GetParam('from', GetParam('upto', $Now - GetParam('days', $RcDefault) * 86400));
my $from = $to - (GetParam('upto') ? GetParam('upto') - GetParam('from') : GetParam('days', $RcDefault) * 86400);
my $more = "action=rc;from=$from;upto=$to";
foreach (qw(all showedit rollback rcidonly rcuseronly rchostonly
rcclusteronly rcfilteronly match lang followup)) {
my $val = GetParam($_, '');
$more .= ";$_=$val" if $val;
}
$html .= $q->p({-class=>'more'}, ScriptLink($more, T('More...'), 'more'));
return GetFormStart(undef, 'get', 'rc') . $html . $q->end_form;
2003-03-21 13:50:35 +00:00
}
sub PrintRcHtml { # to append RC to existing page, or action=rc directly
my ($id, $standalone) = @_;
my $rc = ($id eq $RCName or $id eq T($RCName) or T($id) eq $RCName);
2014-07-18 01:46:59 +03:00
if ($standalone) {
print GetHeader('', $rc ? NormalToFree($id) : Ts('All changes for %s', NormalToFree($id)));
}
if ($standalone or $rc or GetParam('rcclusteronly', '')) {
print $q->start_div({-class=>'rc'});
print $q->hr() unless $standalone or GetParam('embed', $EmbedWiki);
print RcHeader() . RcHtml() . GetFilterForm() . $q->end_div();
}
PrintFooter($id) if $standalone;
}
sub RcTextItem {
2008-03-07 23:27:51 +00:00
my ($name, $value) = @_;
2014-07-16 10:44:13 +02:00
$value = UnquoteHtml($value);
2008-03-07 23:27:51 +00:00
$value =~ s/\n+$//;
$value =~ s/\n+/\n /;
return $value ? $name . ': ' . $value . "\n" : '';
}
sub RcTextRevision {
my($id, $ts, $host, $username, $summary, $minor, $revision,
$languages, $cluster, $last) = @_;
my $link = $ScriptName
. (GetParam('all', $ShowAll) && ! $last
? '?' . GetPageParameters('browse', $id, $revision, $cluster, $last)
: ($UsePathInfo ? '/' : '?') . UrlEncode($id));
print "\n", RcTextItem('title', NormalToFree($id)),
RcTextItem('description', $summary),
RcTextItem('generator', GetAuthor($host, $username)),
RcTextItem('language', join(', ', @{$languages})), RcTextItem('link', $link),
RcTextItem('last-modified', TimeToW3($ts)),
RcTextItem('revision', $revision),
RcTextItem('minor', $minor);
}
sub PrintRcText { # print text rss header and call ProcessRcLines
2008-03-07 23:27:51 +00:00
local $RecentLink = 0;
print RcTextItem('title', $SiteName),
RcTextItem('description', $SiteDescription), RcTextItem('link', $ScriptName),
RcTextItem('generator', 'Oddmuse'), RcTextItem('rights', $RssRights);
ProcessRcLines(sub {}, \&RcTextRevision);
}
2003-03-21 13:50:35 +00:00
sub GetRcRss {
2008-03-07 23:27:51 +00:00
my $date = TimeToRFC822($LastUpdate);
my %excluded = ();
if (GetParam("exclude", 1)) {
foreach (split(/\n/, GetPageContent($RssExclude))) {
if (/^ ([^ ]+)[ \t]*$/) { # only read lines with one word after one space
2014-07-18 01:46:59 +03:00
$excluded{$1} = 1;
2008-03-07 23:27:51 +00:00
}
}
}
my $rss = qq{<?xml version="1.0" encoding="UTF-8"?>\n};
2008-03-07 23:27:51 +00:00
if ($RssStyleSheet =~ /\.(xslt?|xml)$/) {
2011-07-06 15:26:01 +00:00
$rss .= qq{<?xml-stylesheet type="text/xml" href="$RssStyleSheet" ?>\n};
2008-03-07 23:27:51 +00:00
} elsif ($RssStyleSheet) {
2011-07-06 15:26:01 +00:00
$rss .= qq{<?xml-stylesheet type="text/css" href="$RssStyleSheet" ?>\n};
2008-03-07 23:27:51 +00:00
}
$rss .= qq{<rss version="2.0"
xmlns:wiki="http://purl.org/rss/1.0/modules/wiki/"
xmlns:dc="http://purl.org/dc/elements/1.1/"
2011-07-06 15:26:01 +00:00
xmlns:cc="http://web.resource.org/cc/"
xmlns:atom="http://www.w3.org/2005/Atom">
<channel>
<docs>http://blogs.law.harvard.edu/tech/rss</docs>
};
my $title = QuoteHtml($SiteName) . ': ' . GetParam('title', QuoteHtml(NormalToFree($HomePage)));
2011-07-06 15:26:01 +00:00
$rss .= "<title>$title</title>\n";
$rss .= "<link>" . ScriptUrl($HomePage) . "</link>\n";
$rss .= qq{<atom:link href="} . GetScriptUrlWithRcParameters()
. qq{" rel="self" type="application/rss+xml" />\n};
2014-07-18 01:46:59 +03:00
if ($SiteDescription) {
$rss .= "<description>" . QuoteHtml($SiteDescription) . "</description>\n"
}
2011-07-06 15:26:01 +00:00
$rss .= "<pubDate>$date</pubDate>\n";
$rss .= "<lastBuildDate>$date</lastBuildDate>\n";
2008-03-07 23:27:51 +00:00
$rss .= "<generator>Oddmuse</generator>\n";
2011-07-06 15:26:01 +00:00
$rss .= "<copyright>$RssRights</copyright>\n" if $RssRights;
2014-07-18 01:46:59 +03:00
if ($RssLicense) {
$rss .= join('', map {"<cc:license>" . QuoteHtml($_) . "</cc:license>\n"}
(ref $RssLicense eq 'ARRAY' ? @$RssLicense : $RssLicense))
}
$rss .= "<wiki:interwiki>$InterWikiMoniker</wiki:interwiki>\n" if $InterWikiMoniker;
2008-03-07 23:27:51 +00:00
if ($RssImageUrl) {
$rss .= "\n";
}
my $limit = GetParam("rsslimit", 15); # Only take the first 15 entries
my $count = 0;
ProcessRcLines(sub {}, sub {
my $id = shift;
2014-07-18 01:46:59 +03:00
return if $excluded{$id} or ($limit ne 'all' and $count++ >= $limit);
$rss .= "\n" . RssItem($id, @_);
});
2008-03-07 23:27:51 +00:00
$rss .= "</channel>\n</rss>\n";
return $rss;
2003-03-21 13:50:35 +00:00
}
sub RssItem {
my ($id, $ts, $host, $username, $summary, $minor, $revision,
$languages, $cluster, $last) = @_;
my $name = ItemName($id);
2014-07-18 01:46:59 +03:00
if (GetParam('full', 0)) { # full page means summary is not shown
$summary = PageHtml($id, 50 * 1024, T('This page is too big to send over RSS.'));
} else {
$summary = QuoteHtml($summary); # page summary must be quoted
2014-07-18 01:46:59 +03:00
}
2008-03-07 23:27:51 +00:00
my $date = TimeToRFC822($ts);
$username = QuoteHtml($username);
2014-07-18 01:46:59 +03:00
$username ||= $host;
2008-03-07 23:27:51 +00:00
my $rss = "<item>\n";
$rss .= "<title>$name</title>\n";
2011-07-06 15:26:01 +00:00
my $link = ScriptUrl(GetParam('all', $cluster)
? GetPageParameters('browse', $id, $revision, $cluster, $last)
: UrlEncode($id));
$rss .= "<link>$link</link>\n<guid>$link</guid>\n";
2008-03-07 23:27:51 +00:00
$rss .= "<description>" . QuoteHtml($summary) . "</description>\n" if $summary;
$rss .= "<pubDate>" . $date . "</pubDate>\n";
$rss .= "<comments>" . ScriptUrl($CommentsPrefix . UrlEncode($id))
. "</comments>\n" if $CommentsPattern and $id !~ /$CommentsPattern/;
2011-07-06 15:26:01 +00:00
$rss .= "<dc:contributor>" . $username . "</dc:contributor>\n" if $username;
2014-07-18 01:46:59 +03:00
$rss .= "<wiki:status>" . (1 == $revision ? 'new' : 'updated') . "</wiki:status>\n";
$rss .= "<wiki:importance>" . ($minor ? 'minor' : 'major') . "</wiki:importance>\n";
2008-03-07 23:27:51 +00:00
$rss .= "<wiki:version>" . $revision . "</wiki:version>\n";
$rss .= "<wiki:history>" . ScriptUrl("action=history;id=" . UrlEncode($id))
. "</wiki:history>\n";
$rss .= "<wiki:diff>" . ScriptUrl("action=browse;diff=1;id=" . UrlEncode($id))
. "</wiki:diff>\n" if $UseDiff and GetParam('diffrclink', 1);
2008-03-07 23:27:51 +00:00
return $rss . "</item>\n";
}
2003-03-21 13:50:35 +00:00
sub DoRss {
2008-03-07 23:27:51 +00:00
print GetHttpHeader('application/xml');
print GetRcRss();
2003-03-21 13:50:35 +00:00
}
sub DoHistory {
2008-03-07 23:27:51 +00:00
my $id = shift;
ValidIdOrDie($id);
OpenPage($id);
if (GetParam('raw', 0)) {
DoRawHistory($id);
2008-03-07 23:27:51 +00:00
} else {
DoHtmlHistory($id);
2008-03-07 23:27:51 +00:00
}
2003-03-21 13:50:35 +00:00
}
sub DoRawHistory {
my ($id) = @_;
print GetHttpHeader('text/plain'),
RcTextItem('title', Ts('History of %s', NormalToFree($OpenPageName))),
RcTextItem('date', TimeToText($Now)),
RcTextItem('link', ScriptUrl("action=history;id=$OpenPageName;raw=1")),
RcTextItem('generator', 'Oddmuse');
SetParam('all', 1);
my @languages = split(/,/, $Page{languages});
RcTextRevision($id, $Page{ts}, $Page{host}, $Page{username}, $Page{summary},
$Page{minor}, $Page{revision}, \@languages, undef, 1);
foreach my $revision (GetKeepRevisions($OpenPageName)) {
my $keep = GetKeptRevision($revision);
@languages = split(/,/, $keep->{languages});
RcTextRevision($id, $keep->{ts}, $keep->{host}, $keep->{username},
$keep->{summary}, $keep->{minor}, $keep->{revision}, \@languages);
}
}
sub DoHtmlHistory {
my ($id) = @_;
print GetHeader('', Ts('History of %s', NormalToFree($id)));
my $row = 0;
my $rollback = UserCanEdit($id, 0) && (GetParam('username', '') or UserIsEditor());
my $date = CalcDay($Page{ts});
my @html = (GetFormStart(undef, 'get', 'history'));
push(@html, $q->p({-class => 'documentation'}, T('Using the 「rollback」 button on this page will reset the page to that particular point in time, undoing any later changes to this page.'))) if $rollback;
push(@html, $q->p(# don't use $q->hidden here!
$q->input({-type=>'hidden', -name=>'action', -value=>'browse'}),
$q->input({-type=>'hidden', -name=>'diff', -value=>'1'}),
$q->input({-type=>'hidden', -name=>'id', -value=>$id})));
# list of rows with revisions, starting with current revision
push(@html, $q->p($q->submit({-name=>T('Compare')}))) if $UseDiff;
my @rows = (GetHistoryLine($id, \%Page, $row++, $rollback, $date, 1));
foreach my $revision (GetKeepRevisions($OpenPageName)) {
my $keep = GetKeptRevision($revision);
my $new = CalcDay($keep->{ts});
push(@rows, GetHistoryLine($id, $keep, $row++, $rollback, $new, $new ne $date));
$date = $new;
}
# if we can use diff, add radio-buttons and compare buttons if $UseDiff
if ($UseDiff) {
push(@html, $q->table({-class=>'history'}, @rows),
$q->p($q->submit({-name=>T('Compare')})), $q->end_form());
} else {
push(@html, @rows);
}
if ($KeepDays and $rollback and $Page{revision}) {
push(@html, $q->p(ScriptLink('title=' . UrlEncode($id) . ';text='
. UrlEncode($DeletedPage) . ';summary='
. UrlEncode(T('Deleted')),
T('Mark this page for deletion'))));
}
print $q->div({-class=>'content history'}, @html);
PrintFooter($id, 'history');
}
2003-03-21 13:50:35 +00:00
sub GetHistoryLine {
my ($id, $dataref, $row, $rollback, $date, $newday) = @_;
2008-03-07 23:27:51 +00:00
my %data = %$dataref;
my $revision = $data{revision};
return $q->p(T('No other revisions available')) unless $revision;
my $html = CalcTime($data{ts});
2014-07-21 20:48:47 +02:00
if ($row == 0) { # current revision
2008-03-07 23:27:51 +00:00
$html .= ' (' . T('current') . ')' if $rollback;
$html .= ' ' . GetPageLink($id, Ts('Revision %s', $revision));
} else {
$html .= ' ' . $q->submit("rollback-$data{ts}", T('rollback')) if $rollback;
$html .= ' ' . GetOldPageLink('browse', $id, $revision,
Ts('Revision %s', $revision));
2008-03-07 23:27:51 +00:00
}
2014-07-18 01:46:59 +03:00
my $host = $data{host} || $data{ip};
$html .= T(' . . . .') . ' ' . GetAuthorLink($host, $data{username});
$html .= $q->span({class=>'dash'}, ' &#8211; ')
. $q->strong(QuoteHtml($data{summary})) if $data{summary};
2008-03-07 23:27:51 +00:00
$html .= ' ' . $q->em({class=>'type'}, T('(minor)')) . ' ' if $data{minor};
if ($UseDiff) {
my %attr1 = (-type=>'radio', -name=>'diffrevision', -value=>$revision);
2014-07-21 20:48:47 +02:00
$attr1{-checked} = 'checked' if $row == 1;
my %attr2 = (-type=>'radio', -name=>'revision', -value=> $row ? $revision : '');
2014-07-21 20:48:47 +02:00
$attr2{-checked} = 'checked' if $row == 0; # first row is special
$html = $q->Tr($q->td($q->input(\%attr1)), $q->td($q->input(\%attr2)), $q->td($html));
2008-03-07 23:27:51 +00:00
$html = $q->Tr($q->td({-colspan=>3}, $q->strong($date))) . $html if $newday;
} else {
$html .= $q->br();
$html = $q->strong($date) . $q->br() . $html if $newday;
}
return $html;
2003-03-21 13:50:35 +00:00
}
sub DoContributors {
2008-03-07 23:27:51 +00:00
my $id = shift;
SetParam('rcidonly', $id);
SetParam('all', 1);
print GetHeader('', Ts('Contributors to %s', NormalToFree($id || $SiteName)));
2008-03-07 23:27:51 +00:00
my %contrib = ();
for my $line (GetRcLines(1)) {
my ($ts, $pagename, $minor, $summary, $host, $username) = @$line;
$contrib{$username}++ if $username;
2008-03-07 23:27:51 +00:00
}
print $q->div({-class=>'content contrib'},
2014-07-18 01:46:59 +03:00
$q->p(map { GetPageLink($_) } sort(keys %contrib)));
2008-03-07 23:27:51 +00:00
PrintFooter();
}
sub RollbackPossible {
2008-03-07 23:27:51 +00:00
my $ts = shift; # there can be no rollback to the most recent change(s) made (1s resolution!)
return $ts != $LastUpdate && (!$KeepDays || ($Now - $ts) < $KeepDays * 86400); # 24*60*60
}
sub DoRollback {
2008-03-07 23:27:51 +00:00
my $page = shift;
my $to = GetParam('to', 0);
ReportError(T('Missing target for rollback.'), '400 BAD REQUEST') unless $to;
ReportError(T('Target for rollback is too far back.'), '400 BAD REQUEST') unless $page or RollbackPossible($to);
ReportError(T('A username is required for ordinary users.'), '403 FORBIDDEN') unless GetParam('username', '') or UserIsEditor();
my @ids = ();
if (not $page) { # cannot just use list length because of ('')
2008-03-07 23:27:51 +00:00
return unless UserIsAdminOrError(); # only admins can do mass changes
SetParam('showedit', 1); # make GetRcLines return minor edits as well
SetParam('all', 1); # prevent LatestChanges from interfering
SetParam('rollback', 1); # prevent StripRollbacks from interfering
my %ids = map { my ($ts, $id) = @$_; $id => 1; } # make unique via hash
GetRcLines($to); # list all the pages edited since $to
2008-03-07 23:27:51 +00:00
@ids = keys %ids;
} else {
@ids = ($page);
}
RequestLockOrError();
print GetHeader('', T('Rolling back changes')),
$q->start_div({-class=>'content rollback'}), $q->start_p();
2008-03-07 23:27:51 +00:00
foreach my $id (@ids) {
OpenPage($id);
my ($text, $minor, $ts) = GetTextAtTime($to);
if ($Page{text} eq $text) {
print T("The two revisions are the same."), $q->br() if $page; # no message when doing mass revert
2014-07-18 01:46:59 +03:00
} elsif (not UserCanEdit($id, 1)) {
print Ts('Editing not allowed: %s is read-only.', $id), $q->br();
} elsif (not UserIsEditor() and my $rule = BannedContent($text)) {
print Ts('Rollback of %s would restore banned content.', $id), $rule, $q->br();
2008-03-07 23:27:51 +00:00
} else {
Save($id, $text, Ts('Rollback to %s', TimeToText($to)), $minor, ($Page{host} ne $q->remote_addr()));
2008-03-07 23:27:51 +00:00
print Ts('%s rolled back', GetPageLink($id)), ($ts ? ' ' . Ts('to %s', TimeToText($to)) : ''), $q->br();
}
}
WriteRcLog('[[rollback]]', $page, $to); # leave marker
print $q->end_p() . $q->end_div();
ReleaseLock();
PrintFooter($page, 'edit');
}
sub DoAdminPage {
2008-03-07 23:27:51 +00:00
my ($id, @rest) = @_;
my @menu = ();
2014-07-18 01:46:59 +03:00
push(@menu, ScriptLink('action=index', T('Index of all pages'), 'index')) if $Action{index};
push(@menu, ScriptLink('action=version', T('Wiki Version'), 'version')) if $Action{version};
push(@menu, ScriptLink('action=password', T('Password'), 'password')) if $Action{password};
2014-07-10 15:29:50 +03:00
push(@menu, ScriptLink('action=maintain', T('Run maintenance'), 'maintain')) if $Action{maintain};
my @locks;
for my $pattern (@KnownLocks) {
2016-06-19 11:55:58 +02:00
for my $name (Glob($pattern)) {
if (IsDir($LockDir . $name)) {
push(@locks, $name);
}
}
}
if (@locks and $Action{unlock}) {
push(@menu, ScriptLink('action=unlock', T('Unlock Wiki'), 'unlock') . ' (' . join(', ', @locks) . ')');
};
2008-03-07 23:27:51 +00:00
if (UserIsAdmin()) {
if ($Action{editlock}) {
if (IsFile("$DataDir/noedit")) {
2014-07-10 15:29:50 +03:00
push(@menu, ScriptLink('action=editlock;set=0', T('Unlock site'), 'editlock 0'));
} else {
2014-07-18 01:46:59 +03:00
push(@menu, ScriptLink('action=editlock;set=1', T('Lock site'), 'editlock 1'));
}
2008-03-07 23:27:51 +00:00
}
if ($id and $Action{pagelock}) {
2008-03-07 23:27:51 +00:00
my $title = NormalToFree($id);
if (IsFile(GetLockedPageFile($id))) {
2014-07-18 01:46:59 +03:00
push(@menu, ScriptLink('action=pagelock;set=0;id=' . UrlEncode($id),
Ts('Unlock %s', $title), 'pagelock 0'));
2008-03-07 23:27:51 +00:00
} else {
2014-07-18 01:46:59 +03:00
push(@menu, ScriptLink('action=pagelock;set=1;id=' . UrlEncode($id),
2014-07-21 20:48:47 +02:00
Ts('Lock %s', $title), 'pagelock 1'));
2008-03-07 23:27:51 +00:00
}
}
push(@menu, ScriptLink('action=clear', T('Clear Cache'), 'clear')) if $Action{clear};
2008-03-07 23:27:51 +00:00
}
foreach my $sub (@MyAdminCode) {
$sub->($id, \@menu, \@rest);
2008-03-07 23:27:51 +00:00
$Message .= $q->p($@) if $@; # since this happens before GetHeader is called, the message will be shown
}
print GetHeader('', T('Administration')),
$q->div({-class=>'content admin'}, $q->p(T('Actions:')), $q->ul($q->li(\@menu)),
$q->p(T('Important pages:')) . $q->ul(map { $q->li(GetPageOrEditLink($_, NormalToFree($_))) if $_;
} sort keys %AdminPages),
$q->p(Ts('To mark a page for deletion, put <strong>%s</strong> on the first line.',
2014-07-18 01:46:59 +03:00
$DeletedPage)), @rest);
2008-03-07 23:27:51 +00:00
PrintFooter();
}
sub GetPageParameters {
2008-03-07 23:27:51 +00:00
my ($action, $id, $revision, $cluster, $last) = @_;
$id = FreeToNormal($id);
my $link = "action=$action;id=" . UrlEncode($id);
$link .= ";revision=$revision" if $revision and not $last;
$link .= ';rcclusteronly=' . UrlEncode($cluster) if $cluster;
return $link;
2003-03-21 13:50:35 +00:00
}
sub GetOldPageLink {
2008-03-07 23:27:51 +00:00
my ($action, $id, $revision, $name, $cluster, $last) = @_;
return ScriptLink(GetPageParameters($action, $id, $revision, $cluster, $last),
NormalToFree($name), 'revision');
2003-03-21 13:50:35 +00:00
}
sub GetSearchLink {
2008-03-07 23:27:51 +00:00
my ($text, $class, $name, $title) = @_;
my $id = UrlEncode(QuoteRegexp('"' . $text . '"'));
$name = UrlEncode($name);
$text = NormalToFree($text);
$id =~ s/_/+/g; # Search for url-escaped spaces
2008-03-07 23:27:51 +00:00
return ScriptLink('search=' . $id, $text, $class, $name, $title);
2003-03-21 13:50:35 +00:00
}
sub ScriptLinkDiff {
2008-03-07 23:27:51 +00:00
my ($diff, $id, $text, $new, $old) = @_;
my $action = 'action=browse;diff=' . $diff . ';id=' . UrlEncode($id);
2014-07-18 01:46:59 +03:00
$action .= ";diffrevision=$old" if $old;
$action .= ";revision=$new" if $new;
2008-03-07 23:27:51 +00:00
return ScriptLink($action, $text, 'diff');
2003-03-21 13:50:35 +00:00
}
sub GetAuthor {
my ($host, $username) = @_;
return $username . ' ' . Ts('from %s', $host) if $username and $host;
return $username if $username;
return T($host); # could be 'Anonymous'
}
2003-03-21 13:50:35 +00:00
sub GetAuthorLink {
2008-03-07 23:27:51 +00:00
my ($host, $username) = @_;
$username = FreeToNormal($username);
my $name = NormalToFree($username);
if (ValidId($username) ne '') { # ValidId() returns error string
$username = ''; # Just pretend it isn't there.
2008-03-07 23:27:51 +00:00
}
if ($username and $RecentLink) {
return ScriptLink(UrlEncode($username), $name, 'author', undef, $host);
2008-03-07 23:27:51 +00:00
} elsif ($username) {
return $q->span({-class=>'author'}, $name);
2008-03-07 23:27:51 +00:00
}
return T($host); # could be 'Anonymous'
2003-03-21 13:50:35 +00:00
}
sub GetHistoryLink {
2008-03-07 23:27:51 +00:00
my ($id, $text) = @_;
my $action = 'action=history;id=' . UrlEncode(FreeToNormal($id));
return ScriptLink($action, $text, 'history');
2003-03-21 13:50:35 +00:00
}
sub GetRCLink {
2008-03-07 23:27:51 +00:00
my ($id, $text) = @_;
2014-07-18 01:46:59 +03:00
return ScriptLink('action=rc;all=1;from=1;showedit=1;rcidonly='
. UrlEncode(FreeToNormal($id)), $text, 'rc');
}
2003-03-21 13:50:35 +00:00
sub GetHeader {
2008-03-07 23:27:51 +00:00
my ($id, $title, $oldId, $nocache, $status) = @_;
my $embed = GetParam('embed', $EmbedWiki);
my $result = GetHttpHeader('text/html', $nocache, $status);
if ($oldId) {
$Message .= $q->p('(' . Ts('redirected from %s', GetEditLink($oldId, $oldId)) . ')');
}
$result .= GetHtmlHeader(Ts('%s:', $SiteName) . ' ' . UnWiki($title), $id);
2008-03-07 23:27:51 +00:00
if ($embed) {
2014-07-18 01:46:59 +03:00
$result .= $q->div({-class=>'header'}, $q->div({-class=>'message'}, $Message)) if $Message;
2008-03-07 23:27:51 +00:00
return $result;
}
$result .= GetHeaderDiv($id, $title, $oldId, $embed);
return $result . $q->start_div({-class=>'wrapper'});
}
sub GetHeaderDiv {
my ($id, $title, $oldId, $embed) = @_;
my $result .= $q->start_div({-class=>'header'});
2008-03-07 23:27:51 +00:00
if (not $embed and $LogoUrl) {
my $url = $IndexHash{$LogoUrl} ? GetDownloadLink($LogoUrl, 2) : $LogoUrl;
$result .= ScriptLink(UrlEncode($HomePage), $q->img({-src=>$url, -alt=>T('[Home]'), -class=>'logo'}), 'logo');
2008-03-07 23:27:51 +00:00
}
$result .= $q->start_div({-class=>'menu'});
if (GetParam('toplinkbar', $TopLinkBar) != 2) {
2008-03-07 23:27:51 +00:00
$result .= GetGotoBar($id);
if (%SpecialDays) {
my ($sec, $min, $hour, $mday, $mon, $year) = gmtime($Now);
if ($SpecialDays{($mon + 1) . '-' . $mday}) {
$result .= $q->br() . $q->span({-class=>'specialdays'},
$SpecialDays{($mon + 1) . '-' . $mday});
2008-03-07 23:27:51 +00:00
}
}
}
$result .= GetSearchForm() if GetParam('topsearchform', $TopSearchForm) != 2;
$result .= $q->end_div();
2014-07-18 01:46:59 +03:00
$result .= $q->div({-class=>'message'}, $Message) if $Message;
$result .= GetHeaderTitle($id, $title, $oldId);
$result .= $q->end_div();
return $result;
2003-03-21 13:50:35 +00:00
}
sub GetHeaderTitle {
my ($id, $title, $oldId) = @_;
return $q->h1($title) if $id eq '';
return $q->h1(GetSearchLink($id, '', '', T('Click to search for references to this page')));
}
2003-03-21 13:50:35 +00:00
sub GetHttpHeader {
return if $HeaderIsPrinted; # When calling ReportError, we don't know whether HTTP headers have
$HeaderIsPrinted = 1; # already been printed. We want them printed just once.
my ($type, $ts, $status, $encoding) = @_;
$q->charset($type =~ m!^(text/|application/xml)! ? 'utf-8' : ''); # text/plain, text/html, application/xml: UTF-8
2008-03-07 23:27:51 +00:00
my %headers = (-cache_control=>($UseCache < 0 ? 'no-cache' : 'max-age=10'));
# Set $ts when serving raw content that cannot be modified by cookie
# parameters; or 'nocache'; or undef. If you provide a $ts, the last-modified
# header generated will by used by HTTP/1.0 clients. If you provide no $ts,
# the etag header generated will be used by HTTP/1.1 clients. In this
# situation, cookie parameters can influence the look of the page and we
# cannot rely on $LastUpdate. HTTP/1.0 clients will ignore etags. See RFC 2616
# section 13.3.4.
if (GetParam('cache', $UseCache) >= 2 and $ts ne 'nocache') {
$headers{'-last-modified'} = TimeToRFC822($ts) if $ts;
$headers{-etag} = PageEtag();
}
2008-03-07 23:27:51 +00:00
$headers{-type} = GetParam('mime-type', $type);
$headers{-status} = $status if $status;
$headers{-Content_Encoding} = $encoding if $encoding;
2008-03-07 23:27:51 +00:00
my $cookie = Cookie();
2014-07-18 01:46:59 +03:00
$headers{-cookie} = $cookie if $cookie;
2008-03-07 23:27:51 +00:00
if ($q->request_method() eq 'HEAD') {
print $q->header(%headers), "\n\n"; # add newlines for FCGI because of exit()
exit; # total shortcut -- HEAD never expects anything other than the header!
}
return $q->header(%headers);
2003-03-21 13:50:35 +00:00
}
sub CookieData {
my ($changed, %params);
foreach my $key (keys %CookieParameters) {
2008-03-07 23:27:51 +00:00
my $default = $CookieParameters{$key};
my $value = GetParam($key, $default);
2014-07-18 01:46:59 +03:00
$params{$key} = $value if $value ne $default;
# The cookie is considered to have changed under the following
# condition: If the value was already set, and the new value is
# not the same as the old value, or if there was no old value, and
# the new value is not the default.
2008-03-07 23:27:51 +00:00
my $change = (defined $OldCookie{$key} ? ($value ne $OldCookie{$key}) : ($value ne $default));
$changed = 1 if $change; # note if any parameter changed and needs storing
}
return $changed, %params;
}
sub Cookie {
my ($changed, %params) = CookieData(); # params are URL encoded
2008-03-07 23:27:51 +00:00
if ($changed) {
my $cookie = join(UrlEncode($FS), %params); # no CTL in field values
return $q->cookie(-name=>$CookieName, -value=>$cookie, -expires=>'+2y', secure=>$ENV{'HTTPS'}, httponly=>1);
2008-03-07 23:27:51 +00:00
}
return '';
}
sub GetHtmlHeader { # always HTML!
2008-03-07 23:27:51 +00:00
my ($title, $id) = @_;
2015-07-14 23:07:02 +03:00
my $edit_link = '';
$edit_link = '<link rel="alternate" type="application/wiki" title="'
. T('Edit this page') . '" href="'
. ScriptUrl('action=edit;id=' . UrlEncode($id)) . '" />' if $id;
return $DocumentHeader
2015-07-14 23:07:02 +03:00
. $q->head($q->title($title) . $edit_link
. GetCss() . GetRobots() . GetFeeds() . $HtmlHeaders
. '<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />')
. '<body class="' . GetParam('theme', 'default') . '">';
2008-03-07 23:27:51 +00:00
}
sub GetRobots { # NOINDEX for non-browse pages.
2014-07-18 01:46:59 +03:00
if (GetParam('action', 'browse') eq 'browse' and not GetParam('revision', '')) {
2008-03-07 23:27:51 +00:00
return '<meta name="robots" content="INDEX,FOLLOW" />';
} else {
return '<meta name="robots" content="NOINDEX,FOLLOW" />';
}
}
sub GetFeeds { # default for $HtmlHeaders
2008-03-07 23:27:51 +00:00
my $html = '<link rel="alternate" type="application/rss+xml" title="'
. QuoteHtml($SiteName) . '" href="' . $ScriptName . '?action=rss" />';
my $id = GetId(); # runs during Init, not during DoBrowseRequest
2008-03-07 23:27:51 +00:00
$html .= '<link rel="alternate" type="application/rss+xml" title="'
. QuoteHtml("$SiteName: $id") . '" href="' . $ScriptName
. '?action=rss;rcidonly=' . UrlEncode(FreeToNormal($id)) . '" />' if $id;
2008-03-07 23:27:51 +00:00
my $username = GetParam('username', '');
$html .= '<link rel="alternate" type="application/rss+xml" '
. 'title="Follow-ups for ' . NormalToFree($username) . '" '
. 'href="' . ScriptUrl('action=rss;followup=' . UrlEncode($username))
. '" />' if $username;
2008-03-07 23:27:51 +00:00
return $html;
}
sub GetCss { # prevent javascript injection
my @css = map { my $x = $_; $x =~ s/\".*//; $x; } split(/\s+/, GetParam('css', ''));
push (@css, ref $StyleSheet ? @$StyleSheet : $StyleSheet) if $StyleSheet and not @css;
2014-07-18 01:46:59 +03:00
if ($IndexHash{$StyleSheetPage} and not @css) {
push (@css, "$ScriptName?action=browse;id=" . UrlEncode($StyleSheetPage) . ";raw=1;mime-type=text/css")
}
push (@css, 'https://oddmuse.org/default.css') unless @css;
2008-03-07 23:27:51 +00:00
return join('', map { qq(<link type="text/css" rel="stylesheet" href="$_" />) } @css);
}
sub PrintPageContent {
my ($text, $revision, $comment) = @_;
print $q->start_div({-class=>'content browse'});
if ($revision eq '' and $Page{blocks} and GetParam('cache', $UseCache) > 0) {
PrintCache();
} else {
my $savecache = ($Page{revision} > 0 and $revision eq ''); # new page not cached
PrintWikiToHTML($text, $savecache, $revision); # unlocked, with anchors, unlocked
}
if ($comment) {
print $q->start_div({-class=>'preview'}), $q->hr();
print $q->h2(T('Preview:'));
# no caching, current revision, unlocked
PrintWikiToHTML(AddComment('', $comment));
print $q->hr(), $q->h2(T('Preview only, not yet saved')), $q->end_div();
2014-07-18 01:46:59 +03:00
}
print $q->end_div();
}
sub PrintFooter {
my ($id, $rev, $comment, $page) = @_;
2008-03-07 23:27:51 +00:00
if (GetParam('embed', $EmbedWiki)) {
print $q->end_html, "\n";
return;
}
PrintMyContent($id) if defined(&PrintMyContent);
foreach my $sub (@MyFooters) {
print $sub->(@_);
2008-03-07 23:27:51 +00:00
}
print $q->end_html, "\n";
}
sub WrapperEnd { # called via @MyFooters
return $q->start_div({-class=>'wrapper close'}) . $q->end_div() . $q->end_div(); # closes content
}
sub DefaultFooter { # called via @MyFooters
my ($id, $rev, $comment, $page) = @_;
my $html = $q->start_div({-class=>'footer'}) . $q->hr();
$html .= GetGotoBar($id) if GetParam('toplinkbar', $TopLinkBar) != 1;
$html .= GetFooterLinks($id, $rev);
$html .= GetFooterTimestamp($id, $rev, $page);
$html .= GetSearchForm() if GetParam('topsearchform', $TopSearchForm) != 1;
if ($DataDir =~ m|/tmp/|) {
$html .= $q->p($q->strong(T('Warning') . ': ')
. Ts('Database is stored in temporary directory %s', $DataDir));
}
$html .= T($FooterNote) if $FooterNote;
$html .= $q->p(Ts('%s seconds', (time - $Now))) if GetParam('timing', 0);
$html .= $q->end_div();
return $html;
}
sub GetFooterTimestamp {
my ($id, $rev, $page) = @_;
$page //= \%Page;
if ($id and $rev ne 'history' and $rev ne 'edit' and $page->{revision}) {
my @elements = (($rev eq '' ? T('Last edited') : T('Edited')), TimeToText($page->{ts}),
Ts('by %s', GetAuthorLink($page->{host}, $page->{username})));
push(@elements, ScriptLinkDiff(2, $id, T('(diff)'), $rev)) if $UseDiff and $page->{revision} > 1;
return $q->div({-class=>'time'}, @elements);
2008-03-07 23:27:51 +00:00
}
return '';
}
sub GetFooterLinks {
2008-03-07 23:27:51 +00:00
my ($id, $rev) = @_;
my @elements;
if ($id and $rev ne 'history' and $rev ne 'edit') {
if ($CommentsPattern) {
if ($id =~ /$CommentsPattern/) {
2014-08-06 12:25:54 +02:00
push(@elements, GetPageLink($1, undef, 'original', T('a'))) if $1;
2008-03-07 23:27:51 +00:00
} else {
push(@elements, GetPageLink($CommentsPrefix . $id, undef, 'comment', T('c')));
2008-03-07 23:27:51 +00:00
}
}
if (UserCanEdit($id, 0)) {
if ($rev) { # showing old revision
push(@elements, GetOldPageLink('edit', $id, $rev, Ts('Edit revision %s of this page', $rev)));
} else { # showing current revision
push(@elements, GetEditLink($id, T('Edit this page'), undef, T('e')));
2008-03-07 23:27:51 +00:00
}
} else { # no permission or generated page
2008-03-07 23:27:51 +00:00
push(@elements, ScriptLink('action=password', T('This page is read-only'), 'password'));
}
}
push(@elements, GetHistoryLink($id, T('View other revisions'))) if $Action{history} and $id and $rev ne 'history';
push(@elements, GetPageLink($id, T('View current revision')),
GetRCLink($id, T('View all changes'))) if $Action{history} and $rev ne '';
2014-07-18 01:46:59 +03:00
if ($Action{contrib} and $id and $rev eq 'history') {
push(@elements, ScriptLink("action=contrib;id=" . UrlEncode($id), T('View contributors'), 'contrib'));
}
2008-03-07 23:27:51 +00:00
if ($Action{admin} and GetParam('action', '') ne 'admin') {
my $action = 'action=admin';
$action .= ';id=' . UrlEncode($id) if $id;
push(@elements, ScriptLink($action, T('Administration'), 'admin'));
}
return @elements ? $q->div({-class=>'edit bar'}, @elements) : '';
}
sub GetCommentForm {
2008-03-07 23:27:51 +00:00
my ($id, $rev, $comment) = @_;
if ($CommentsPattern ne '' and $id and $rev ne 'history' and $rev ne 'edit'
and $id =~ /$CommentsPattern/ and UserCanEdit($id, 0, 1)) {
my $html = $q->div({-class=>'comment'},
GetFormStart(undef, undef, 'comment'),
$q->p(GetHiddenValue('title', $id),
$q->label({-for=>'aftertext', -accesskey=>T('c')},
T('Add your comment here:')), $q->br(),
GetTextArea('aftertext', $comment, 10)),
$EditNote,
$q->p($q->span({-class=>'username'},
$q->label({-for=>'username'}, T('Username:')), ' ',
$q->textfield(-name=>'username', -id=>'username',
-default=>GetParam('username', ''),
-override=>1, -size=>20, -maxlength=>50)),
$q->span({-class=>'homepage'},
$q->label({-for=>'homepage'}, T('Homepage URL:')), ' ',
$q->textfield(-name=>'homepage', -id=>'homepage',
-default=>GetParam('homepage', ''),
-override=>1, -size=>40, -maxlength=>100))),
$q->p($q->submit(-name=>'Save', -accesskey=>T('s'), -value=>T('Save')), ' ',
$q->submit(-name=>'Preview', -accesskey=>T('p'), -value=>T('Preview'))),
$q->end_form());
foreach my $sub (@MyFormChanges) {
$html = $sub->($html, 'comment');
}
return $html;
2008-03-07 23:27:51 +00:00
}
return '';
2003-03-21 13:50:35 +00:00
}
sub GetFormStart {
2008-03-07 23:27:51 +00:00
my ($ignore, $method, $class) = @_;
$method ||= 'post';
$class ||= 'form';
return $q->start_multipart_form(-method=>$method, -action=>$FullUrl,
-accept_charset=>'utf-8', -class=>$class);
2003-03-21 13:50:35 +00:00
}
sub GetSearchForm {
2014-11-23 21:22:29 +01:00
my $html = GetFormStart(undef, 'get', 'search') . $q->start_p;
$html .= $q->label({-for=>'search'}, T('Search:')) . ' '
. $q->textfield(-name=>'search', -id=>'search', -size=>20, -accesskey=>T('f')) . ' ';
2015-03-06 13:17:40 +01:00
if (GetParam('search') ne '' and UserIsAdmin()) { # see DoBrowseRequest
$html .= $q->label({-for=>'replace'}, T('Replace:')) . ' '
. $q->textfield(-name=>'replace', -id=>'replace', -size=>20) . ' '
. $q->label({-for=>'delete', -title=>'If you want to replace matches with the empty string'}, T('Delete')) . ' '
. $q->input({-type=>'checkbox', -name=>'delete'})
. $q->submit('preview', T('Preview'));
2008-03-07 23:27:51 +00:00
}
if (GetParam('matchingpages', $MatchingPages)) {
$html .= $q->label({-for=>'matchingpage'}, T('Filter:')) . ' '
. $q->textfield(-name=>'match', -id=>'matchingpage', -size=>20) . ' ';
}
2008-03-07 23:27:51 +00:00
if (%Languages) {
$html .= $q->label({-for=>'searchlang'}, T('Language:')) . ' '
. $q->textfield(-name=>'lang', -id=>'searchlang', -size=>10, -default=>GetParam('lang', '')) . ' ';
2008-03-07 23:27:51 +00:00
}
2014-11-23 21:22:29 +01:00
$html .= $q->submit('dosearch', T('Go!')) . $q->end_p . $q->end_form;
return $html;
}
sub GetGotoBar { # ignore $id parameter
2008-03-07 23:27:51 +00:00
return $q->span({-class=>'gotobar bar'}, (map { GetPageLink($_) }
2014-07-18 01:46:59 +03:00
@UserGotoBarPages), $UserGotoBar);
2003-03-21 13:50:35 +00:00
}
sub PrintHtmlDiff {
my ($type, $old, $page, $current) = @_;
$page //= \%Page;
$current //= $page->{revision};
$type = 2 if $old or $page->{revision} != $current; # explicit revisions means minor diffs!
my $summary = $page->{$type == 1 ? 'lastmajorsummary' : 'summary'};
2008-03-07 23:27:51 +00:00
my $intro = T('Last edit');
my $diff;
# use the cached diff if possible
if (not $old or $old == $page->{$type == 1 ? 'lastmajor' : 'revision'} - 1) {
$diff = GetCacheDiff($type == 1 ? 'major' : 'minor', $page);
$old = $page->{$type == 1 ? 'lastmajor' : 'revision'} - 1 if not $old;
}
# if there was no cached diff: compute it, and new intro
if (not $diff and $old > 0) {
($diff, my $keptPage) = GetKeptDiff($page->{text}, $old);
my $to = $page->{revision} != $current ? Ts('revision %s', $page->{revision}) : T('current revision');
$intro = Tss('Difference between revision %1 and %2', $old, $to);
}
# if this is the last major diff and there are minor diffs to look at, and we
# didn't request a particular old revision
if ($type == 1 and $page->{lastmajor} and $page->{lastmajor} != $current) {
$intro = Ts('Last major edit (%s)', ScriptLinkDiff(2, $OpenPageName, T('later minor edits'),
undef, $page->{lastmajor} || 1));
2008-03-07 23:27:51 +00:00
}
$diff =~ s!<p><strong>(.*?)</strong></p>!'<p><strong>' . T($1) . '</strong></p>'!eg;
2014-07-18 01:46:59 +03:00
$diff ||= T('No diff available.');
print $q->div({-class=>'diff'}, $q->p($q->b($intro)),
$summary ? $q->p({-class=>'summary'}, T('Summary:') . ' ' . QuoteHtml($summary)) : '',
$diff);
2003-03-21 13:50:35 +00:00
}
sub GetCacheDiff {
my ($type, $page) = @_;
my $diff = $page->{"diff-$type"};
$diff = $page->{"diff-minor"} if $diff eq '1'; # if major eq minor diff
2008-03-07 23:27:51 +00:00
return $diff;
2003-03-21 13:50:35 +00:00
}
sub GetKeptDiff {
2008-03-07 23:27:51 +00:00
my ($new, $revision) = @_;
2014-07-18 01:46:59 +03:00
$revision ||= 1;
my ($revisionPage, $rev) = GetTextRevision($revision, 1);
return '', $revisionPage unless $rev;
return T("The two revisions are the same."), $revisionPage if $revisionPage->{text} eq $new;
return GetDiff($revisionPage->{text}, $new, $rev), $revisionPage;
2008-03-07 23:27:51 +00:00
}
sub DoDiff { # Actualy call the diff program
2008-03-07 23:27:51 +00:00
CreateDir($TempDir);
my $oldName = "$TempDir/old";
my $newName = "$TempDir/new";
RequestLockDir('diff') or return '';
WriteStringToFile($oldName, $_[0]);
WriteStringToFile($newName, $_[1]);
2016-07-27 17:35:10 +02:00
my $command = encode_utf8("diff -- \Q$oldName\E \Q$newName\E");
my $diff_out = decode_utf8(qx($command));
2008-03-07 23:27:51 +00:00
ReleaseLockDir('diff');
$diff_out =~ s/\n\K\\ No newline.*\n//g; # Get rid of common complaint.
2008-03-07 23:27:51 +00:00
# No need to unlink temp files--next diff will just overwrite.
return $diff_out;
}
2003-03-21 13:50:35 +00:00
sub GetDiff {
2008-03-07 23:27:51 +00:00
my ($old, $new, $revision) = @_;
my $old_is_file = (TextIsFile($old))[0] || '';
my $old_is_image = ($old_is_file =~ /^image\//);
my $new_is_file = TextIsFile($new);
if ($old_is_file or $new_is_file) {
return $q->p($q->strong(T('Old revision:')))
. $q->div({-class=>'old'}, # don't pring new revision, because that's the one that gets shown!
$q->p($old_is_file ? GetDownloadLink($OpenPageName, $old_is_image, $revision) : $old))
2008-03-07 23:27:51 +00:00
}
$old =~ s/[\r\n]+/\n/g;
$new =~ s/[\r\n]+/\n/g;
return ImproveDiff(DoDiff($old, $new));
}
sub ImproveDiff { # NO NEED TO BE called within a diff lock
2008-03-07 23:27:51 +00:00
my $diff = QuoteHtml(shift);
$diff =~ tr/\r//d;
my @hunks = split (/^(\d+,?\d*[adc]\d+,?\d*\n)/m, $diff);
my $result = shift (@hunks); # intro
2014-07-10 15:29:50 +03:00
while ($#hunks > 0) { # at least one header and a real hunk
my $header = shift (@hunks);
$header =~ s|^(\d+.*c.*)|<p><strong>Changed:</strong></p>| # T('Changed:')
or $header =~ s|^(\d+.*d.*)|<p><strong>Deleted:</strong></p>| # T('Deleted:')
2009-03-27 11:13:12 +00:00
or $header =~ s|^(\d+.*a.*)|<p><strong>Added:</strong></p>|; # T('Added:')
2014-07-10 15:29:50 +03:00
$result .= $header;
my $chunk = shift (@hunks);
my ($old, $new) = split (/\n---\n/, $chunk, 2);
if ($old and $new) {
($old, $new) = DiffMarkWords($old, $new);
$result .= "$old<p><strong>to</strong></p>\n$new"; # T('to')
} else {
2014-07-18 01:46:59 +03:00
if (substr($chunk, 0, 2) eq '&g') {
2014-07-10 15:29:50 +03:00
$result .= DiffAddPrefix(DiffStripPrefix($chunk), '&gt; ', 'new');
2008-03-07 23:27:51 +00:00
} else {
2014-07-10 15:29:50 +03:00
$result .= DiffAddPrefix(DiffStripPrefix($chunk), '&lt; ', 'old');
2008-03-07 23:27:51 +00:00
}
}
2014-07-10 15:29:50 +03:00
}
2008-03-07 23:27:51 +00:00
return $result;
2003-03-21 13:50:35 +00:00
}
sub DiffMarkWords {
2008-03-07 23:27:51 +00:00
my ($old, $new) = map { DiffStripPrefix($_) } @_;
2014-07-18 01:46:59 +03:00
my @diffs = grep(/^\d/, split(/\n/, DoDiff(join("\n", split(/\s+|\b/, $old)) . "\n",
join("\n", split(/\s+|\b/, $new)) . "\n")));
2008-03-07 23:27:51 +00:00
foreach my $diff (reverse @diffs) { # so that new html tags don't confuse word counts
my ($start1, $end1, $type, $start2, $end2) = $diff =~ /^(\d+),?(\d*)([adc])(\d+),?(\d*)$/gm;
2008-03-07 23:27:51 +00:00
if ($type eq 'd' or $type eq 'c') {
2014-07-18 01:46:59 +03:00
$end1 ||= $start1;
$old = DiffHtmlMarkWords($old, $start1, $end1);
2008-03-07 23:27:51 +00:00
}
if ($type eq 'a' or $type eq 'c') {
2014-07-18 01:46:59 +03:00
$end2 ||= $start2;
$new = DiffHtmlMarkWords($new, $start2, $end2);
2008-03-07 23:27:51 +00:00
}
}
return (DiffAddPrefix($old, '&lt; ', 'old'),
2014-07-18 01:46:59 +03:00
DiffAddPrefix($new, '&gt; ', 'new'));
2003-03-21 13:50:35 +00:00
}
sub DiffHtmlMarkWords {
2014-07-18 01:46:59 +03:00
my ($text, $start, $end) = @_;
2008-03-07 23:27:51 +00:00
my @fragments = split(/(\s+|\b)/, $text);
splice(@fragments, 2 * ($start - 1), 0, '<strong class="changes">');
splice(@fragments, 2 * $end, 0, '</strong>');
my $result = join('', @fragments);
$result =~ s!&<(/?)strong([^>]*)>(amp|[gl]t);!<$1strong$2>&$3;!g;
$result =~ s!&(amp|[gl]t)<(/?)strong([^>]*)>;!&$1;<$2strong$3>!g;
return $result;
}
2003-03-21 13:50:35 +00:00
sub DiffStripPrefix {
2008-03-07 23:27:51 +00:00
my $str = shift;
$str =~ s/^&[lg]t; //gm;
return $str;
2003-03-21 13:50:35 +00:00
}
sub DiffAddPrefix {
2008-03-07 23:27:51 +00:00
my ($str, $prefix, $class) = @_;
2014-07-18 01:46:59 +03:00
my @lines = split(/\n/, $str);
2008-03-07 23:27:51 +00:00
for my $line (@lines) {
$line = $prefix . $line;
}
2014-07-18 01:46:59 +03:00
return $q->div({-class=>$class}, $q->p(join($q->br(), @lines)));
2003-03-21 13:50:35 +00:00
}
sub ParseData {
my $data = shift;
2008-03-07 23:27:51 +00:00
my %result;
while ($data =~ /(\S+?): (.*?)(?=\n[^ \t]|\Z)/gs) {
my ($key, $value) = ($1, $2);
$value =~ s/\n\t/\n/g;
$result{$key} = $value;
}
# return unless %result; # undef instead of empty hash # TODO should we do that?
return wantarray ? %result : \%result; # return list sometimes for compatibility
2008-03-07 23:27:51 +00:00
}
sub OpenPage { # Sets global variables
2008-03-07 23:27:51 +00:00
my $id = shift;
2014-07-10 15:29:50 +03:00
return if $OpenPageName eq $id;
2008-03-07 23:27:51 +00:00
if ($IndexHash{$id}) {
%Page = %{ParseData(ReadFileOrDie(GetPageFile($id)))};
2008-03-07 23:27:51 +00:00
} else {
%Page = ();
$Page{ts} = $Now;
$Page{revision} = 0;
}
$OpenPageName = $id;
2003-03-21 13:50:35 +00:00
}
sub GetTextAtTime { # call with opened page, return $minor if all pages between now and $ts are minor!
2008-03-07 23:27:51 +00:00
my $ts = shift;
my $minor = $Page{minor};
return ($Page{text}, $minor, 0) if $Page{ts} <= $ts; # current page is old enough
return ($DeletedPage, $minor, 0) if $Page{revision} == 1 and $Page{ts} > $ts; # created after $ts
my $keep = {}; # info may be needed after the loop
2008-03-07 23:27:51 +00:00
foreach my $revision (GetKeepRevisions($OpenPageName)) {
$keep = GetKeptRevision($revision);
# $minor = 0 unless defined $keep; # TODO?
$minor = 0 if not $keep->{minor} and $keep->{ts} >= $ts; # ignore keep{minor} if keep{ts} is too old
return ($keep->{text}, $minor, 0) if $keep->{ts} <= $ts;
2008-03-07 23:27:51 +00:00
}
return ($DeletedPage, $minor, 0) if $keep->{revision} == 1; # then the page was created after $ts!
return ($keep->{text}, $minor, $keep->{ts}); # the oldest revision available is not old enough
2003-03-21 13:50:35 +00:00
}
sub GetTextRevision {
2008-03-07 23:27:51 +00:00
my ($revision, $quiet) = @_;
$revision =~ s/\D//g; # Remove non-numeric chars
return wantarray ? (\%Page, $revision) : \%Page unless $revision and $revision ne $Page{revision};
my $keep = GetKeptRevision($revision);
if (not defined $keep) {
2008-03-07 23:27:51 +00:00
$Message .= $q->p(Ts('Revision %s not available', $revision)
2014-07-18 01:46:59 +03:00
. ' (' . T('showing current revision instead') . ')') unless $quiet;
return wantarray ? (\%Page, '') : \%Page;
2008-03-07 23:27:51 +00:00
}
$Message .= $q->p(Ts('Showing revision %s', $revision)) unless $quiet;
return wantarray ? ($keep, $revision) : $keep;
}
sub GetPageContent {
2008-03-07 23:27:51 +00:00
my $id = shift;
return ParseData(ReadFileOrDie(GetPageFile($id)))->{text} if $IndexHash{$id};
2008-03-07 23:27:51 +00:00
return '';
2003-03-21 13:50:35 +00:00
}
sub GetKeptRevision { # Call after OpenPage
2008-03-07 23:27:51 +00:00
my ($status, $data) = ReadFile(GetKeepFile($OpenPageName, (shift)));
return unless $status;
2008-03-07 23:27:51 +00:00
return ParseData($data);
2003-03-21 13:50:35 +00:00
}
sub GetPageFile {
my ($id) = @_;
return "$PageDir/$id.pg";
}
sub GetKeepFile {
my ($id, $revision) = @_; die "No revision for $id" unless $revision; #FIXME
return GetKeepDir($id) . "/$revision.kp";
2003-03-21 13:50:35 +00:00
}
sub GetKeepDir {
2008-03-07 23:27:51 +00:00
my $id = shift; die 'No id' unless $id; #FIXME
return "$KeepDir/$id";
}
sub GetKeepFiles {
2016-06-19 11:55:58 +02:00
return Glob(GetKeepDir(shift) . '/*.kp'); # files such as 1.kp, 2.kp, etc.
}
sub GetKeepRevisions {
my @result = sort {$b <=> $a} map { m/([0-9]+)\.kp$/; $1; } GetKeepFiles(shift);
return @result;
}
2003-03-21 13:50:35 +00:00
# Always call SavePage within a lock.
sub SavePage { # updating the cache will not change timestamp and revision!
2014-07-18 01:46:59 +03:00
ReportError(T('Cannot save a nameless page.'), '400 BAD REQUEST', 1) unless $OpenPageName;
2008-03-07 23:27:51 +00:00
ReportError(T('Cannot save a page without revision.'), '400 BAD REQUEST', 1) unless $Page{revision};
CreateDir($PageDir);
2008-03-07 23:27:51 +00:00
WriteStringToFile(GetPageFile($OpenPageName), EncodePage(%Page));
2003-03-21 13:50:35 +00:00
}
sub SaveKeepFile {
2008-03-07 23:27:51 +00:00
return if ($Page{revision} < 1); # Don't keep 'empty' revision
delete $Page{blocks}; # delete some info from the page
2008-03-07 23:27:51 +00:00
delete $Page{flags};
delete $Page{'diff-major'};
delete $Page{'diff-minor'};
$Page{'keep-ts'} = $Now; # expire only $KeepDays from $Now!
CreateDir($KeepDir);
CreateDir(GetKeepDir($OpenPageName));
2008-03-07 23:27:51 +00:00
WriteStringToFile(GetKeepFile($OpenPageName, $Page{revision}), EncodePage(%Page));
2003-03-21 13:50:35 +00:00
}
sub EncodePage {
2008-03-07 23:27:51 +00:00
my @data = @_;
my $result = '';
$result .= (shift @data) . ': ' . EscapeNewlines(shift @data) . "\n" while (@data);
return $result;
2003-03-21 13:50:35 +00:00
}
sub EscapeNewlines {
$_[0] =~ s/\n/\n\t/g; # modify original instead of copying
2008-03-07 23:27:51 +00:00
return $_[0];
}
sub ExpireAllKeepFiles {
foreach my $name (AllPagesList()) {
2015-05-01 13:32:51 +03:00
print $q->br(), GetPageLink($name);
OpenPage($name);
my $delete = PageDeletable();
if ($delete) {
my $status = DeletePage($OpenPageName);
print ' ', ($status ? T('not deleted:') . ' ' . $status : T('deleted'));
} else {
ExpireKeepFiles();
}
}
}
sub ExpireKeepFiles { # call with opened page
2008-03-07 23:27:51 +00:00
return unless $KeepDays;
my $expirets = $Now - ($KeepDays * 86400); # 24*60*60
foreach my $revision (GetKeepRevisions($OpenPageName)) {
my $keep = GetKeptRevision($revision);
next if $keep->{'keep-ts'} >= $expirets;
next if $KeepMajor and $keep->{revision} == $Page{lastmajor};
Unlink(GetKeepFile($OpenPageName, $revision));
2008-03-07 23:27:51 +00:00
}
2003-03-21 13:50:35 +00:00
}
sub ReadFile {
if (open(my $IN, '<:encoding(UTF-8)', encode_utf8(shift))) {
2014-07-18 01:46:59 +03:00
local $/ = undef; # Read complete files
2015-05-02 00:04:29 +03:00
my $data=<$IN>;
close $IN;
2008-03-07 23:27:51 +00:00
return (1, $data);
}
return (0, '');
}
sub ReadFileOrDie {
my ($file) = @_;
2008-03-07 23:27:51 +00:00
my ($status, $data);
($status, $data) = ReadFile($file);
2014-07-18 01:46:59 +03:00
if (not $status) {
ReportError(Ts('Cannot open %s', $file) . ": $!", '500 INTERNAL SERVER ERROR');
2008-03-07 23:27:51 +00:00
}
return $data;
2003-03-21 13:50:35 +00:00
}
sub WriteStringToFile {
2008-03-07 23:27:51 +00:00
my ($file, $string) = @_;
open(my $OUT, '>:encoding(UTF-8)', encode_utf8($file))
2008-03-07 23:27:51 +00:00
or ReportError(Ts('Cannot write %s', $file) . ": $!", '500 INTERNAL SERVER ERROR');
2015-05-02 00:04:29 +03:00
print $OUT $string;
close($OUT);
2003-03-21 13:50:35 +00:00
}
sub AppendStringToFile {
2008-03-07 23:27:51 +00:00
my ($file, $string) = @_;
open(my $OUT, '>>:encoding(UTF-8)', encode_utf8($file))
2008-03-07 23:27:51 +00:00
or ReportError(Ts('Cannot write %s', $file) . ": $!", '500 INTERNAL SERVER ERROR');
2015-05-02 00:04:29 +03:00
print $OUT $string;
close($OUT);
2003-03-21 13:50:35 +00:00
}
sub IsFile { return -f encode_utf8(shift); }
sub IsDir { return -d encode_utf8(shift); }
sub ZeroSize { return -z encode_utf8(shift); }
sub Unlink { return unlink(map { encode_utf8($_) } @_); }
sub Modified { return (stat(encode_utf8(shift)))[9]; }
sub Glob { return map { decode_utf8($_) } bsd_glob(encode_utf8(shift)); }
sub ChangeMod { return chmod(shift, map { encode_utf8($_) } @_); }
sub Rename { return rename(encode_utf8($_[0]), encode_utf8($_[1])); }
sub RemoveDir { return rmdir(encode_utf8(shift)); }
sub ChangeDir { return chdir(encode_utf8(shift)); }
sub CreateDir {
2008-03-07 23:27:51 +00:00
my ($newdir) = @_;
return if IsDir($newdir);
mkdir(encode_utf8($newdir), 0775)
2008-03-07 23:27:51 +00:00
or ReportError(Ts('Cannot create %s', $newdir) . ": $!", '500 INTERNAL SERVER ERROR');
2003-03-21 13:50:35 +00:00
}
sub GetLockedPageFile {
2008-03-07 23:27:51 +00:00
my $id = shift;
return "$PageDir/$id.lck";
2003-03-21 13:50:35 +00:00
}
sub RequestLockDir {
my ($name, $tries, $wait, $error, $retried) = @_;
2014-07-18 01:46:59 +03:00
$tries ||= 4;
$wait ||= 2;
2008-03-07 23:27:51 +00:00
CreateDir($TempDir);
my $lock = $LockDir . $name;
my $n = 0;
# Cannot use CreateDir because we don't want to skip mkdir if the directory
# already exists.
while (mkdir(encode_utf8($lock), 0555) == 0) {
2008-03-07 23:27:51 +00:00
if ($n++ >= $tries) {
my $ts = Modified($lock);
if ($Now - $ts > $LockExpiration and $LockExpires{$name} and not $retried) { # XXX should we remove this now?
ReleaseLockDir($name); # try to expire lock (no checking)
return 1 if RequestLockDir($name, undef, undef, undef, 1);
}
2008-03-07 23:27:51 +00:00
return 0 unless $error;
ReportError(Ts('Could not get %s lock', $name) . ": $!. ",
'503 SERVICE UNAVAILABLE', undef,
Ts('The lock was created %s.', CalcTimeSince($Now - $ts))
. ($retried && ' ' . T('Maybe the user running this script is no longer allowed to remove the lock directory?'))
. ' ' . T('Sometimes locks are left behind if a job crashes.') . ' '
. ($Now - $ts < 600 ? T('After ten minutes, you could try to unlock the wiki.')
: ScriptLink('action=unlock', T('Unlock Wiki'), 'unlock')));
2008-03-07 23:27:51 +00:00
}
sleep($wait);
}
$Locks{$name} = 1;
return 1;
2003-03-21 13:50:35 +00:00
}
sub HandleSignals {
my ($signal) = @_; # TODO should we pass it to CleanLock?
CleanLock($_) foreach keys %Locks;
exit; # let's count it as graceful exit
}
sub CleanLock {
my ($name) = @_;
$LockCleaners{$name}->() if exists $LockCleaners{$name};
ReleaseLockDir($name); # TODO should we log this?
}
2003-03-21 13:50:35 +00:00
sub ReleaseLockDir {
my $name = shift; # We don't check whether we succeeded.
RemoveDir($LockDir . $name); # Before fixing, make sure we only call this
delete $Locks{$name}; # when we know the lock exists.
2003-03-21 13:50:35 +00:00
}
sub RequestLockOrError {
2014-07-21 20:48:47 +02:00
return RequestLockDir('main', 10, 3, 1); # 10 tries, 3 second wait, die on error
2003-03-21 13:50:35 +00:00
}
sub ReleaseLock {
2008-03-07 23:27:51 +00:00
ReleaseLockDir('main');
2003-03-21 13:50:35 +00:00
}
sub ForceReleaseLock {
2008-03-07 23:27:51 +00:00
my $pattern = shift;
my $forced;
2016-06-19 11:55:58 +02:00
foreach my $name (Glob($pattern)) {
2008-03-07 23:27:51 +00:00
# First try to obtain lock (in case of normal edit lock)
2014-07-18 01:46:59 +03:00
$forced = 1 unless RequestLockDir($name, 5, 3, 0);
ReleaseLockDir($name); # Release the lock, even if we didn't get it. This should not happen.
2008-03-07 23:27:51 +00:00
}
return $forced;
2003-03-21 13:50:35 +00:00
}
sub DoUnlock {
2008-03-07 23:27:51 +00:00
my $message = '';
print GetHeader('', T('Unlock Wiki'), undef, 'nocache');
print $q->p(T('This operation may take several seconds...'));
for my $lock (@KnownLocks) {
if (ForceReleaseLock($lock)) {
$message .= $q->p(Ts('Forced unlock of %s lock.', $lock));
}
}
print $message || $q->p(T('No unlock required.'));
2008-03-07 23:27:51 +00:00
PrintFooter();
2003-03-21 13:50:35 +00:00
}
sub CalcDay {
2008-03-07 23:27:51 +00:00
my ($sec, $min, $hour, $mday, $mon, $year) = gmtime(shift);
2014-07-18 01:46:59 +03:00
return sprintf('%4d-%02d-%02d', $year + 1900, $mon + 1, $mday);
2003-03-21 13:50:35 +00:00
}
sub CalcTime {
2008-03-07 23:27:51 +00:00
my ($sec, $min, $hour, $mday, $mon, $year) = gmtime(shift);
return sprintf('%02d:%02d UTC', $hour, $min);
2003-03-21 13:50:35 +00:00
}
sub CalcTimeSince {
2008-03-07 23:27:51 +00:00
my $total = shift;
2014-07-10 15:29:50 +03:00
return Ts('%s hours ago', int($total/3600)) if ($total >= 7200);
return T('1 hour ago') if ($total >= 3600);
return Ts('%s minutes ago', int($total/60)) if ($total >= 120);
return T('1 minute ago') if ($total >= 60);
return Ts('%s seconds ago', int($total)) if ($total >= 2);
return T('1 second ago') if ($total == 1);
return T('just now');
}
2003-03-21 13:50:35 +00:00
sub TimeToText {
2008-03-07 23:27:51 +00:00
my $t = shift;
return CalcDay($t) . ' ' . CalcTime($t);
2003-03-21 13:50:35 +00:00
}
sub TimeToW3 { # Complete date plus hours and minutes: YYYY-MM-DDThh:mmTZD (eg 1997-07-16T19:20+01:00)
2008-03-07 23:27:51 +00:00
my ($sec, $min, $hour, $mday, $mon, $year) = gmtime(shift); # use special UTC designator ("Z")
2014-07-18 01:46:59 +03:00
return sprintf('%4d-%02d-%02dT%02d:%02dZ', $year + 1900, $mon + 1, $mday, $hour, $min);
}
sub TimeToRFC822 {
2008-03-07 23:27:51 +00:00
my ($sec, $min, $hour, $mday, $mon, $year, $wday) = gmtime(shift); # Sat, 07 Sep 2002 00:00:01 GMT
return sprintf("%s, %02d %s %04d %02d:%02d:%02d GMT", qw(Sun Mon Tue Wed Thu Fri Sat)[$wday], $mday,
2014-07-18 01:46:59 +03:00
qw(Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec)[$mon], $year + 1900, $hour, $min, $sec);
}
2003-03-21 13:50:35 +00:00
sub GetHiddenValue {
2008-03-07 23:27:51 +00:00
my ($name, $value) = @_;
return $q->input({-type=>"hidden", -name=>$name, -value=>$value});
2008-03-07 23:27:51 +00:00
}
sub FreeToNormal { # trim all spaces and convert them to underlines
my $id = shift;
return '' unless $id;
$id =~ s/ /_/g;
$id =~ s/__+/_/g;
$id =~ s/^_//;
$id =~ s/_$//;
return UnquoteHtml($id);
2003-03-21 13:50:35 +00:00
}
sub ItemName {
my $id = shift; # id
return NormalToFree($id) unless GetParam('short', 1) and $RssStrip;
my $comment = $id =~ s/^($CommentsPrefix)//; # strip first so that ^ works
$id =~ s/^$RssStrip//;
$id = $CommentsPrefix . $id if $comment;
return NormalToFree($id);
}
sub NormalToFree { # returns HTML quoted title with spaces
2008-03-07 23:27:51 +00:00
my $title = shift;
$title =~ s/_/ /g;
return QuoteHtml($title);
}
sub UnWiki {
2008-03-07 23:27:51 +00:00
my $str = shift;
return $str unless $WikiLinks and $str =~ /^$LinkPattern$/;
$str =~ s/([[:lower:]])([[:upper:]])/$1 $2/g;
return $str;
}
2003-03-21 13:50:35 +00:00
sub DoEdit {
2008-03-07 23:27:51 +00:00
my ($id, $newText, $preview) = @_;
UserCanEditOrDie($id);
2008-03-07 23:27:51 +00:00
my $upload = GetParam('upload', undef);
if ($upload and not $UploadAllowed and not UserIsAdmin()) {
2008-03-07 23:27:51 +00:00
ReportError(T('Only administrators can upload files.'), '403 FORBIDDEN');
}
OpenPage($id);
my ($revisionPage, $revision) = GetTextRevision(GetParam('revision', ''), 1); # maybe revision reset!
my $oldText = $preview ? $newText : $revisionPage->{text};
2008-03-07 23:27:51 +00:00
my $isFile = TextIsFile($oldText);
2014-07-18 01:46:59 +03:00
$upload //= $isFile;
2008-03-07 23:27:51 +00:00
if ($upload and not $UploadAllowed and not UserIsAdmin()) {
ReportError(T('Only administrators can upload files.'), '403 FORBIDDEN');
}
if ($upload) { # shortcut lots of code
2008-03-07 23:27:51 +00:00
$revision = '';
$preview = 0;
2014-07-21 20:48:47 +02:00
} elsif ($isFile) {
2008-03-07 23:27:51 +00:00
$oldText = '';
}
my $header;
if ($revision and not $upload) {
$header = Ts('Editing revision %s of', $revision) . ' ' . NormalToFree($id);
2008-03-07 23:27:51 +00:00
} else {
$header = Ts('Editing %s', NormalToFree($id));
2008-03-07 23:27:51 +00:00
}
print GetHeader('', $header), $q->start_div({-class=>'content edit'});
2008-03-07 23:27:51 +00:00
if ($preview and not $upload) {
print $q->start_div({-class=>'preview'});
print $q->h2(T('Preview:'));
PrintWikiToHTML($oldText); # no caching, current revision, unlocked
print $q->hr(), $q->h2(T('Preview only, not yet saved')), $q->end_div();
}
if ($revision) {
print $q->strong(Ts('Editing old revision %s.', $revision) . ' '
2014-07-18 01:46:59 +03:00
. T('Saving this page will replace the latest revision with this text.'))
2008-03-07 23:27:51 +00:00
}
print GetEditForm($id, $upload, $oldText, $revision), $q->end_div();
PrintFooter($id, 'edit');
}
sub GetEditForm {
my ($page_name, $upload, $oldText, $revision) = @_;
2008-03-07 23:27:51 +00:00
my $html = GetFormStart(undef, undef, $upload ? 'edit upload' : 'edit text') # protected by questionasker
.$q->p(GetHiddenValue("title", $page_name),
($revision ? GetHiddenValue('revision', $revision) : ''),
GetHiddenValue('oldtime', GetParam('oldtime', $Page{ts})), # prefer parameter over actual timestamp
($upload ? GetUpload() : GetTextArea('text', $oldText)));
2008-03-07 23:27:51 +00:00
my $summary = UnquoteHtml(GetParam('summary', ''))
|| ($Now - $Page{ts} < ($SummaryHours * 3600) ? $Page{summary} : '');
$html .= $q->p(T('Summary:').$q->br().GetTextArea('summary', $summary, 2))
.$q->p($q->checkbox(-name=>'recent_edit', -checked=>(GetParam('recent_edit', '') eq 'on'),
-label=>T('This change is a minor edit.')));
2008-03-07 23:27:51 +00:00
$html .= T($EditNote) if $EditNote; # Allow translation
my $username = GetParam('username', '');
$html .= $q->p($q->label({-for=>'username'}, T('Username:')).' '
.$q->textfield(-name=>'username', -id=>'username', -default=>$username,
-override=>1, -size=>20, -maxlength=>50))
.$q->p($q->submit(-name=>'Save', -accesskey=>T('s'), -value=>T('Save')),
($upload ? '' : ' ' . $q->submit(-name=>'Preview', -accesskey=>T('p'), -value=>T('Preview'))).
' '.$q->submit(-name=>'Cancel', -value=>T('Cancel')));
2008-03-07 23:27:51 +00:00
if ($upload) {
2014-07-21 20:48:47 +02:00
$html .= $q->p(ScriptLink('action=edit;upload=0;id=' . UrlEncode($page_name), T('Replace this file with text'), 'upload'));
} elsif ($UploadAllowed or UserIsAdmin()) {
$html .= $q->p(ScriptLink('action=edit;upload=1;id=' . UrlEncode($page_name), T('Replace this text with a file'), 'upload'));
}
$html .= $q->end_form();
foreach my $sub (@MyFormChanges) {
$html = $sub->($html, 'edit', $upload);
}
2008-03-07 23:27:51 +00:00
return $html;
2003-03-21 13:50:35 +00:00
}
sub GetTextArea {
2008-03-07 23:27:51 +00:00
my ($name, $text, $rows) = @_;
2014-07-21 20:48:47 +02:00
return $q->textarea(-id=>$name, -name=>$name, -default=>$text, -rows=>$rows || 25, -columns=>78, -override=>1);
2003-03-21 13:50:35 +00:00
}
sub GetUpload {
return T('File to upload:') . ' ' . $q->filefield(-name=>'file', -size=>50, -maxlength=>100);
}
sub DoDownload {
2008-03-07 23:27:51 +00:00
my $id = shift;
OpenPage($id) if ValidIdOrDie($id);
print $q->header(-status=>'304 NOT MODIFIED') and return if FileFresh(); # FileFresh needs an OpenPage!
my ($revisionPage, $revision) = GetTextRevision(GetParam('revision', '')); # maybe revision reset!
my $text = $revisionPage->{text};
if (my ($type, $encoding) = TextIsFile($text)) {
2008-03-07 23:27:51 +00:00
my ($data) = $text =~ /^[^\n]*\n(.*)/s;
my %allowed = map {$_ => 1} @UploadTypes;
2014-07-18 01:46:59 +03:00
if (@UploadTypes and not $allowed{$type}) {
ReportError(Ts('Files of type %s are not allowed.', $type), '415 UNSUPPORTED MEDIA TYPE');
}
print GetHttpHeader($type, $Page{ts}, undef, $encoding);
2008-03-07 23:27:51 +00:00
require MIME::Base64;
binmode(STDOUT, ":pop:raw"); # need to pop utf8 for Windows users!?
2008-03-07 23:27:51 +00:00
print MIME::Base64::decode($data);
} else {
print GetHttpHeader('text/plain', $Page{ts});
2008-03-07 23:27:51 +00:00
print $text;
}
}
2003-03-21 13:50:35 +00:00
sub DoPassword {
my $id = shift;
2014-07-18 01:46:59 +03:00
print GetHeader('', T('Password')), $q->start_div({-class=>'content password'});
2008-03-07 23:27:51 +00:00
print $q->p(T('Your password is saved in a cookie, if you have cookies enabled. Cookies may get lost if you connect from another machine, from another account, or using another software.'));
if (not $AdminPass and not $EditPass) {
print $q->p(T('This site does not use admin or editor passwords.'));
2008-03-07 23:27:51 +00:00
} else {
if (UserIsAdmin()) {
print $q->p(T('You are currently an administrator on this site.'));
} elsif (UserIsEditor()) {
print $q->p(T('You are currently an editor on this site.'));
} else {
print $q->p(T('You are a normal user on this site.'));
if (not GetParam('pwd')) {
print $q->p(T('You do not have a password set.'));
} else {
print $q->p(T('Your password does not match any of the administrator or editor passwords.'));
}
2008-03-07 23:27:51 +00:00
}
print GetFormStart(undef, undef, 'password'),
$q->p(GetHiddenValue('action', 'password'), T('Password:'), ' ',
$q->password_field(-name=>'pwd', -size=>20, -maxlength=>64),
$q->hidden(-name=>'id', -value=>$id),
$q->submit(-name=>'Save', -accesskey=>T('s'), -value=>T('Save'))),
$q->end_form;
2008-03-07 23:27:51 +00:00
}
if ($id) {
print $q->p(ScriptLink('action=browse;id=' . UrlEncode($id) . ';time=' . time,
Ts('Return to %s', NormalToFree($id))));
}
2008-03-07 23:27:51 +00:00
print $q->end_div();
PrintFooter();
2003-03-21 13:50:35 +00:00
}
sub UserIsEditorOrError {
2008-03-07 23:27:51 +00:00
UserIsEditor()
or ReportError(T('This operation is restricted to site editors only...'), '403 FORBIDDEN');
return 1;
2003-03-21 13:50:35 +00:00
}
sub UserIsAdminOrError {
2008-03-07 23:27:51 +00:00
UserIsAdmin()
or ReportError(T('This operation is restricted to administrators only...'), '403 FORBIDDEN');
return 1;
2003-03-21 13:50:35 +00:00
}
sub UserCanEditOrDie {
my $id = shift;
ValidIdOrDie($id);
if (not UserCanEdit($id, 1)) {
my $rule = UserIsBanned();
if ($rule) {
ReportError(T('Edit Denied'), '403 FORBIDDEN', undef,
$q->p(T('Editing not allowed: user, ip, or network is blocked.')),
$q->p(T('Contact the wiki administrator for more information.')),
$q->p(Ts('The rule %s matched for you.', $rule) . ' '
. Ts('See %s for more information.', GetPageLink($BannedHosts))));
} else {
ReportError(T('Edit Denied'), '403 FORBIDDEN', undef,
$q->p(Ts('Editing not allowed: %s is read-only.', NormalToFree($id))));
}
}
}
2003-03-21 13:50:35 +00:00
sub UserCanEdit {
2008-03-07 23:27:51 +00:00
my ($id, $editing, $comment) = @_;
return 0 if $id eq 'SampleUndefinedPage' or $id eq T('SampleUndefinedPage')
or $id eq 'Sample_Undefined_Page' or $id eq T('Sample_Undefined_Page');
return 1 if UserIsAdmin();
return 0 if $id ne '' and IsFile(GetLockedPageFile($id));
return 0 if $LockOnCreation{$id} and not IsFile(GetPageFile($id)); # new page
2008-03-07 23:27:51 +00:00
return 1 if UserIsEditor();
return 0 if not $EditAllowed or IsFile($NoEditFile);
2008-03-07 23:27:51 +00:00
return 0 if $editing and UserIsBanned(); # this call is more expensive
return 0 if $EditAllowed >= 2 and (not $CommentsPattern or $id !~ /$CommentsPattern/);
return 1 if $EditAllowed >= 3 and GetParam('recent_edit', '') ne 'on' # disallow minor comments
and ($comment or (GetParam('aftertext', '') and not GetParam('text', '')));
2008-03-07 23:27:51 +00:00
return 0 if $EditAllowed >= 3;
return 1;
2003-03-21 13:50:35 +00:00
}
sub UserIsBanned {
2008-03-07 23:27:51 +00:00
return 0 if GetParam('action', '') eq 'password'; # login is always ok
my $host = $q->remote_addr();
2008-03-07 23:27:51 +00:00
foreach (split(/\n/, GetPageContent($BannedHosts))) {
if (/^\s*([^#]\S+)/) { # all lines except empty lines and comments, trim whitespace
my $regexp = $1;
2014-07-21 20:48:47 +02:00
return $regexp if ($host =~ /$regexp/i);
2008-03-07 23:27:51 +00:00
}
}
return 0;
2003-03-21 13:50:35 +00:00
}
sub UserIsAdmin {
return UserHasPassword(GetParam('pwd', ''), $AdminPass);
2003-03-21 13:50:35 +00:00
}
sub UserIsEditor {
return 1 if UserIsAdmin(); # Admin includes editor
return UserHasPassword(GetParam('pwd', ''), $EditPass);
}
sub UserHasPassword {
my ($pwd, $pass) = @_;
2014-07-18 01:46:59 +03:00
return 0 unless $pass;
if ($PassHashFunction ne '') {
2015-07-14 23:11:07 +03:00
no strict 'refs'; # TODO this is kept for compatibility. Feel free to remove it later (comment written on 2015-07-14)
$pwd = $PassHashFunction->($pwd . $PassSalt);
}
foreach (split(/\s+/, $pass)) {
return 1 if $pwd eq $_;
2008-03-07 23:27:51 +00:00
}
return 0;
2003-03-21 13:50:35 +00:00
}
sub BannedContent {
2008-03-07 23:27:51 +00:00
my $str = shift;
my @urls = $str =~ /$FullUrlPattern/g;
2008-03-07 23:27:51 +00:00
foreach (split(/\n/, GetPageContent($BannedContent))) {
next unless m/^\s*([^#]+?)\s*(#\s*(\d\d\d\d-\d\d-\d\d\s*)?(.*))?$/;
my ($regexp, $comment, $re) = ($1, $4, undef);
foreach my $url (@urls) {
eval { $re = qr/$regexp/i; };
2014-07-18 01:46:59 +03:00
if (defined($re) and $url =~ $re) {
return Tss('Rule "%1" matched "%2" on this page.', $regexp, $url) . ' '
. ($comment ? Ts('Reason: %s.', $comment) : T('Reason unknown.')) . ' '
. Ts('See %s for more information.', GetPageLink($BannedContent));
2008-03-07 23:27:51 +00:00
}
}
}
return 0;
}
sub SortIndex {
my ($A, $B) = ($a, $b);
my $aIsComment = $A =~ s/^$CommentsPrefix//;
$B =~ s/^$CommentsPrefix//;
return $aIsComment ? 1 : -1 if $A eq $B;
$A cmp $B;
}
2003-03-21 13:50:35 +00:00
sub DoIndex {
2008-03-07 23:27:51 +00:00
my $raw = GetParam('raw', 0);
my $match = GetParam('match', '');
my @pages = ();
my @menu = ($q->label({-for=>'indexmatch'}, T('Filter:')) . ' '
2014-07-18 01:46:59 +03:00
. $q->textfield(-name=>'match', -id=>'indexmatch', -size=>20));
2008-03-07 23:27:51 +00:00
foreach my $data (@IndexOptions) {
my ($option, $text, $default, $sub) = @$data;
my $value = GetParam($option, $default); # HTML checkbox warning!
$value = 0 if GetParam('manual', 0) and $value ne 'on';
push(@pages, $sub->()) if $value;
2008-03-07 23:27:51 +00:00
push(@menu, $q->checkbox(-name=>$option, -checked=>$value, -label=>$text));
}
@pages = grep /$match/i, @pages if $match;
@pages = sort SortIndex @pages;
2008-03-07 23:27:51 +00:00
if ($raw) {
print GetHttpHeader('text/plain'); # and ignore @menu
} else {
print GetHeader('', T('Index of all pages'));
push(@menu, GetHiddenValue('manual', 1) . $q->submit(-value=>T('Go!')));
push(@menu, $q->b(Ts('(for %s)', GetParam('lang', '')))) if GetParam('lang', '');
print $q->start_div({-class=>'content index'}),
GetFormStart(undef, 'get', 'index'), GetHiddenValue('action', 'index'),
$q->p(join($q->br(), @menu)), $q->end_form(),
$q->h2(Ts('%s pages found.', ($#pages + 1))), $q->start_p();
2008-03-07 23:27:51 +00:00
}
2014-07-21 20:48:47 +02:00
PrintPage($_) foreach (@pages);
2008-03-07 23:27:51 +00:00
print $q->end_p(), $q->end_div() unless $raw;
PrintFooter() unless $raw;
2003-03-21 13:50:35 +00:00
}
sub PrintPage {
2008-03-07 23:27:51 +00:00
my $id = shift;
my $lang = GetParam('lang', 0);
if ($lang) {
OpenPage($id);
my @languages = split(/,/, $Page{languages});
next if (@languages and not grep(/$lang/, @languages));
}
if (GetParam('raw', 0)) {
2014-07-18 01:46:59 +03:00
if (GetParam('search', '') and GetParam('context', 1)) {
print "title: $id\n\n"; # for near links without full search
2008-03-07 23:27:51 +00:00
} else {
print $id, "\n";
}
} else {
print GetPageOrEditLink($id, NormalToFree($id)), $q->br();
}
}
sub AllPagesList {
2008-03-07 23:27:51 +00:00
my $refresh = GetParam('refresh', 0);
return @IndexList if @IndexList and not $refresh;
SetParam('refresh', 0) if $refresh;
return @IndexList if not $refresh and IsFile($IndexFile) and ReadIndex();
# If open fails just refresh the index
RefreshIndex();
return @IndexList;
}
sub ReadIndex {
my ($status, $rawIndex) = ReadFile($IndexFile); # not fatal
if ($status) {
@IndexList = split(/ /, $rawIndex);
%IndexHash = map {$_ => 1} @IndexList;
return @IndexList;
2008-03-07 23:27:51 +00:00
}
return;
}
sub WriteIndex {
WriteStringToFile($IndexFile, join(' ', @IndexList));
}
sub RefreshIndex {
2008-03-07 23:27:51 +00:00
@IndexList = ();
%IndexHash = ();
# If file exists and cannot be changed, error!
my $locked = RequestLockDir('index', undef, undef, IsFile($IndexFile));
2016-06-19 11:55:58 +02:00
foreach (Glob("$PageDir/*.pg"), Glob("$PageDir/.*.pg")) {
2008-03-07 23:27:51 +00:00
next unless m|/.*/(.+)\.pg$|;
my $id = $1;
push(@IndexList, $id);
$IndexHash{$id} = 1;
}
WriteIndex() if $locked;
2008-03-07 23:27:51 +00:00
ReleaseLockDir('index') if $locked;
}
sub AddToIndex {
my ($id) = @_;
$IndexHash{$id} = 1;
@IndexList = sort(keys %IndexHash);
WriteIndex();
}
2003-03-21 13:50:35 +00:00
sub DoSearch {
2015-07-29 10:34:14 +02:00
my $string = shift || GetParam('search', '');
my $re = UnquoteHtml($string);
2008-03-07 23:27:51 +00:00
return DoIndex() if $string eq '';
eval { qr/$re/ } or $re = quotemeta($re);
2014-07-21 20:48:47 +02:00
my $replacement = GetParam('replace', undef);
2014-07-18 01:46:59 +03:00
my $raw = GetParam('raw', '');
2008-03-07 23:27:51 +00:00
my @results;
if ($replacement or GetParam('delete', 0)) {
return unless UserIsAdminOrError();
if (GetParam('preview', '')) { # Preview button was used
print GetHeader('', Ts('Preview: %s', $string . " &#x2192; " . $replacement));
2015-09-12 00:05:30 +02:00
print $q->start_div({-class=>'content replacement'});
print GetFormStart(undef, 'post', 'replace');
print GetHiddenValue('search', $string);
print GetHiddenValue('replace', $replacement);
print GetHiddenValue('delete', GetParam('delete', 0));
print $q->submit(-value=>T('Go!')) . $q->end_form();
@results = ReplaceAndDiff($re, UnquoteHtml($replacement));
} else {
print GetHeader('', Ts('Replaced: %s', $string . " &#x2192; " . $replacement));
print $q->start_div({-class=>'content replacement'});
@results = ReplaceAndSave($re, UnquoteHtml($replacement));
foreach (@results) {
PrintSearchResult($_, quotemeta($replacement || $re)); # the replacement is not a valid regex
}
2008-03-07 23:27:51 +00:00
}
} else {
if ($raw) {
print GetHttpHeader('text/plain');
print RcTextItem('title', Ts('Search for: %s', $string)), RcTextItem('date', TimeToText($Now)),
2009-03-10 13:06:14 +00:00
RcTextItem('link', $q->url(-path_info=>1, -query=>1)), "\n" if GetParam('context', 1);
2008-03-07 23:27:51 +00:00
} else {
print GetHeader('', Ts('Search for: %s', $string)), $q->start_div({-class=>'content search'});
print $q->p({-class=>'links'}, SearchMenu($string));
}
@results = SearchTitleAndBody($re, \&PrintSearchResult, SearchRegexp($re));
2008-03-07 23:27:51 +00:00
}
print SearchResultCount($#results + 1), $q->end_div() unless $raw;
PrintFooter() unless $raw;
2003-03-21 13:50:35 +00:00
}
sub SearchMenu {
2008-03-07 23:27:51 +00:00
return ScriptLink('action=rc;rcfilteronly=' . UrlEncode(shift),
2009-03-10 13:06:14 +00:00
T('View changes for these pages'));
}
sub SearchResultCount { $q->p({-class=>'result'}, Ts('%s pages found.', (shift))); }
sub PageIsUploadedFile {
2008-03-07 23:27:51 +00:00
my $id = shift;
return if $OpenPageName eq $id;
2008-03-07 23:27:51 +00:00
if ($IndexHash{$id}) {
my $file = GetPageFile($id);
open(my $FILE, '<:encoding(UTF-8)', encode_utf8($file))
or ReportError(Ts('Cannot open %s', GetPageFile($id))
. ": $!", '500 INTERNAL SERVER ERROR');
2015-05-02 00:04:29 +03:00
while (defined($_ = <$FILE>) and $_ !~ /^text: /) {
} # read lines until we get to the text key
2015-05-02 00:04:29 +03:00
close $FILE;
return unless length($_) > 6;
2014-07-18 01:46:59 +03:00
return TextIsFile(substr($_, 6)); # pass "#FILE image/png\n" to the test
2008-03-07 23:27:51 +00:00
}
}
sub SearchTitleAndBody {
my ($regex, $func, @args) = @_;
2008-03-07 23:27:51 +00:00
my @found;
my $lang = GetParam('lang', '');
foreach my $id (Filtered($regex, AllPagesList())) {
2008-03-07 23:27:51 +00:00
my $name = NormalToFree($id);
my ($text) = PageIsUploadedFile($id); # set to mime-type if this is an uploaded file
if (not $text) { # not uploaded file, therefore allow searching of page body
local ($OpenPageName, %Page); # this is local!
2008-03-07 23:27:51 +00:00
OpenPage($id); # this opens a page twice if it is not uploaded, but that's ok
if ($lang) {
2009-03-10 13:06:14 +00:00
my @languages = split(/,/, $Page{languages});
next if (@languages and not grep(/$lang/, @languages));
2008-03-07 23:27:51 +00:00
}
$text = $Page{text};
}
if (SearchString($regex, $name . "\n" . $text)) { # the real search code
2008-03-07 23:27:51 +00:00
push(@found, $id);
$func->($id, @args) if $func;
2008-03-07 23:27:51 +00:00
}
}
return @found;
}
sub Filtered { # this is overwriten in extensions such as tags.pl
return @_[1 .. $#_]; # ignores $regex and returns all pages
}
2003-12-24 04:05:18 +00:00
sub SearchString {
2008-03-07 23:27:51 +00:00
my ($string, $data) = @_;
my @strings = grep /./, $string =~ /\"([^\"]+)\"|(\S+)/g; # skip null entries
foreach my $str (@strings) {
return 0 unless ($data =~ /$str/i);
}
return 1;
2003-12-24 04:05:18 +00:00
}
sub SearchRegexp {
2014-07-18 01:46:59 +03:00
my $regexp = join '|', map { index($_, '|') == -1 ? $_ : "($_)" }
grep /./, shift =~ /\"([^\"]+)\"|(\S+)/g; # this acts as OR
$regexp =~ s/\\s/[[:space:]]/g;
return $regexp;
}
sub PrintSearchResult {
2008-03-07 23:27:51 +00:00
my ($name, $regex) = @_;
2014-07-18 01:46:59 +03:00
return PrintPage($name) if not GetParam('context', 1);
OpenPage($name); # should be open already, just making sure!
2008-03-07 23:27:51 +00:00
my $text = $Page{text};
my ($type) = TextIsFile($text); # MIME type if an uploaded file
my %entry;
# get the page, filter it, remove all tags
$text =~ s/$FS//g; # Remove separators (paranoia)
$text =~ s/[\s]+/ /g; # Shrink whitespace
2008-03-07 23:27:51 +00:00
$text =~ s/([-_=\\*\\.]){10,}/$1$1$1$1$1/g ; # e.g. shrink "----------"
$entry{title} = $name;
$entry{description} = $type || SearchHighlight(QuoteHtml(SearchExtract($text, $regex)), QuoteHtml($regex));
2014-07-21 20:48:47 +02:00
$entry{size} = int((length($text) / 1024) + 1) . 'K';
2008-03-07 23:27:51 +00:00
$entry{'last-modified'} = TimeToText($Page{ts});
$entry{username} = $Page{username};
$entry{host} = $Page{host};
PrintSearchResultEntry(\%entry);
2003-03-21 13:50:35 +00:00
}
2003-12-24 04:05:18 +00:00
sub PrintSearchResultEntry {
my %entry = %{(shift)}; # get value from reference
2008-03-07 23:27:51 +00:00
if (GetParam('raw', 0)) {
$entry{generator} = GetAuthor($entry{host}, $entry{username});
2008-03-07 23:27:51 +00:00
foreach my $key (qw(title description size last-modified generator username host)) {
print RcTextItem($key, $entry{$key});
}
print RcTextItem('link', "$ScriptName?$entry{title}"), "\n";
} else {
my $author = GetAuthorLink($entry{host}, $entry{username});
2014-07-18 01:46:59 +03:00
$author ||= $entry{generator};
2008-03-07 23:27:51 +00:00
my $id = $entry{title};
my ($class, $resolved, $title, $exists) = ResolveId($id);
my $text = NormalToFree($id);
my $result = $q->span({-class=>'result'}, ScriptLink(UrlEncode($resolved), $text, $class, undef, $title));
my $description = $entry{description};
$description = $q->br() . $description if $description;
2008-03-07 23:27:51 +00:00
my $info = $entry{size};
$info .= ' - ' if $info;
$info .= T('last updated') . ' ' . $entry{'last-modified'} if $entry{'last-modified'};
$info .= ' ' . T('by') . ' ' . $author if $author;
$info = $q->br() . $q->span({-class=>'info'}, $info) if $info;
print $q->p($result, $description, $info);
}
2003-12-24 04:05:18 +00:00
}
sub SearchHighlight {
2008-03-07 23:27:51 +00:00
my ($data, $regex) = @_;
$data =~ s/($regex)/<strong>$1<\/strong>/gi unless GetParam('raw');
2008-03-07 23:27:51 +00:00
return $data;
}
sub SearchExtract {
my ($data, $regex) = @_;
2014-07-21 20:48:47 +02:00
my ($snippetlen, $maxsnippets) = (100, 4); # these seem nice.
2008-03-07 23:27:51 +00:00
# show a snippet from the beginning of the document
my $j = index($data, ' ', $snippetlen); # end on word boundary
my $t = substr($data, 0, $j);
my $result = $t . ' . . .';
$data = substr($data, $j); # to avoid rematching
2008-03-07 23:27:51 +00:00
my $jsnippet = 0 ;
while ($jsnippet < $maxsnippets and $data =~ m/($regex)/i) {
2008-03-07 23:27:51 +00:00
$jsnippet++;
if (($j = index($data, $1)) > -1 ) {
# get substr containing (start of) match, ending on word boundaries
2014-07-18 01:46:59 +03:00
my $start = index($data, ' ', $j - $snippetlen / 2);
2014-07-21 20:48:47 +02:00
$start = 0 if $start == -1;
2014-07-18 01:46:59 +03:00
my $end = index($data, ' ', $j + $snippetlen / 2);
2014-07-21 20:48:47 +02:00
$end = length($data) if $end == -1;
2014-07-18 01:46:59 +03:00
$t = substr($data, $start, $end - $start);
2008-03-07 23:27:51 +00:00
$result .= $t . ' . . .';
# truncate text to avoid rematching the same string.
$data = substr($data, $end);
}
}
return $result;
2003-03-21 13:50:35 +00:00
}
sub ReplaceAndSave {
2008-03-07 23:27:51 +00:00
my ($from, $to) = @_;
RequestLockOrError(); # fatal
my @result = Replace($from, $to, 1, sub {
my ($id, $new) = @_;
Save($id, $new, $from . ' → ' . $to, 1, ($Page{host} ne $q->remote_addr()));
});
ReleaseLock();
return @result;
}
sub ReplaceAndDiff {
my ($from, $to) = @_;
my @found = Replace($from, $to, 0, sub {
my ($id, $new) = @_;
print $q->h2(GetPageLink($id)), $q->div({-class=>'diff'}, ImproveDiff(DoDiff($Page{text}, $new)));
});
if (@found > GetParam('offset', 0) + GetParam('num', 10)) {
my $more = "search=" . UrlEncode($from) . ";preview=1"
. ";offset=" . (GetParam('num', 10) + GetParam('offset', 0))
. ";num=" . GetParam('num', 10);
$more .= ";replace=" . UrlEncode($to) if $to;
$more .= ";delete=1" unless $to;
print $q->p({-class=>'more'}, ScriptLink($more, T('More...'), 'more'));
}
return @found;
}
sub Replace {
my ($from, $to, $all, $func) = @_; # $func takes $id and $new text
2008-03-07 23:27:51 +00:00
my $lang = GetParam('lang', '');
my $num = GetParam('num', 10);
my $offset = GetParam('offset', 0);
2008-03-07 23:27:51 +00:00
my @result;
foreach my $id (AllPagesList()) {
OpenPage($id);
if ($lang) {
my @languages = split(/,/, $Page{languages});
next if (@languages and not grep(/$lang/, @languages));
}
$_ = $Page{text};
2015-01-25 08:09:07 +02:00
my $replacement = sub {
my ($o1, $o2, $o3, $o4, $o5, $o6, $o7, $o8, $o9) = ($1, $2, $3, $4, $5, $6, $7, $8, $9);
my $str = $to;
$str =~ s/\$([1-9])/'$o' . $1/eeg;
$str
2015-01-25 08:09:07 +02:00
};
if (s/$from/$replacement->()/egi) { # allows use of backreferences
2008-03-07 23:27:51 +00:00
push (@result, $id);
$func->($id, $_) if $all or @result > $offset and @result <= $offset + $num;
2008-03-07 23:27:51 +00:00
}
}
return @result;
}
2003-03-21 13:50:35 +00:00
sub DoPost {
2008-03-07 23:27:51 +00:00
my $id = FreeToNormal(shift);
UserCanEditOrDie($id);
2008-03-07 23:27:51 +00:00
# Lock before getting old page to prevent races
2011-05-01 19:44:09 +00:00
RequestLockOrError(); # fatal
2008-03-07 23:27:51 +00:00
OpenPage($id);
my $old = $Page{text};
my $string = UnquoteHtml(GetParam('text', undef));
$string =~ s/(\r|$FS)//g;
2008-03-07 23:27:51 +00:00
my ($type) = TextIsFile($string); # MIME type if an uploaded file
my $filename = GetParam('file', undef);
if (($filename or $type) and not $UploadAllowed and not UserIsAdmin()) {
ReportError(T('Only administrators can upload files.'), '403 FORBIDDEN');
}
my $comment = UnquoteHtml(GetParam('aftertext', undef));
$comment =~ s/(\r|$FS)//g;
if (defined $comment and $comment eq '') {
ReleaseLock();
return ReBrowsePage($id);
}
2011-05-01 19:44:09 +00:00
if ($filename) { # upload file
2008-03-07 23:27:51 +00:00
my $file = $q->upload('file');
if (not $file and $q->cgi_error) {
ReportError(Ts('Transfer Error: %s', $q->cgi_error), '500 INTERNAL SERVER ERROR');
}
2014-07-18 01:46:59 +03:00
ReportError(T('Browser reports no file info.'), '500 INTERNAL SERVER ERROR') unless $q->uploadInfo($filename);
2008-03-07 23:27:51 +00:00
$type = $q->uploadInfo($filename)->{'Content-Type'};
ReportError(T('Browser reports no file type.'), '415 UNSUPPORTED MEDIA TYPE') unless $type;
2011-05-01 19:44:09 +00:00
local $/ = undef; # Read complete files
my $content = <$file>; # Apparently we cannot count on <$file> to always work within the eval!?
my $encoding = substr($content, 0, 2) eq "\x1f\x8b" ? 'gzip' : '';
eval { require MIME::Base64; $_ = MIME::Base64::encode($content) };
$string = "#FILE $type $encoding\n" . $_;
2011-05-01 19:44:09 +00:00
} else { # ordinary text edit
$string = AddComment($old, $comment) if defined $comment;
2014-07-18 01:46:59 +03:00
if ($comment and substr($string, 0, length($DeletedPage)) eq $DeletedPage) { # look ma, no regexp!
$string = substr($string, length($DeletedPage)); # undelete pages when adding a comment
}
$string .= "\n" if ($string !~ /\n$/); # add trailing newline
$string = RunMyMacros($string); # run macros on text pages only
2008-03-07 23:27:51 +00:00
}
my %allowed = map {$_ => 1} @UploadTypes;
2014-07-21 20:48:47 +02:00
if (@UploadTypes and $type and not $allowed{$type}) {
ReportError(Ts('Files of type %s are not allowed.', $type), '415 UNSUPPORTED MEDIA TYPE');
}
2008-03-07 23:27:51 +00:00
# Banned Content
my $summary = GetSummary();
if (not UserIsEditor()) {
my $rule = BannedContent(NormalToFree($id)) || BannedContent($string) || BannedContent($summary);
2008-03-07 23:27:51 +00:00
ReportError(T('Edit Denied'), '403 FORBIDDEN', undef, $q->p(T('The page contains banned text.')),
2011-05-01 19:44:09 +00:00
$q->p(T('Contact the wiki administrator for more information.')), $q->p($rule)) if $rule;
2008-03-07 23:27:51 +00:00
}
# rebrowse if no changes
my $oldrev = $Page{revision};
if (GetParam('Preview', '')) { # Preview button was used
ReleaseLock();
if (defined $comment) {
BrowsePage($id, 0, RunMyMacros($comment)); # show macros in preview
2008-03-07 23:27:51 +00:00
} else {
DoEdit($id, $string, 1);
}
return;
} elsif ($old eq $string) {
2011-05-01 19:44:09 +00:00
ReleaseLock(); # No changes -- just show the same page again
2008-03-07 23:27:51 +00:00
return ReBrowsePage($id);
} elsif ($oldrev == 0 and $string eq "\n") {
2008-03-07 23:27:51 +00:00
ReportError(T('No changes to be saved.'), '400 BAD REQUEST'); # don't fake page creation because of webdav
}
my $newAuthor = 0;
if ($oldrev) { # the first author (no old revision) is not considered to be "new"
# prefer usernames for potential new author detection
$newAuthor = 1 if not $Page{username} or $Page{username} ne GetParam('username', '');
$newAuthor = 1 if not $q->remote_addr() or not $Page{host} or $q->remote_addr() ne $Page{host};
2008-03-07 23:27:51 +00:00
}
my $oldtime = $Page{ts};
my $myoldtime = GetParam('oldtime', ''); # maybe empty!
# Handle raw edits with the meta info on the first line
if (GetParam('raw', 0) == 2 and $string =~ /^([0-9]+).*\n((.*\n)*.*)/) {
$myoldtime = $1;
$string = $2;
}
my $generalwarning = 0;
if ($newAuthor and $oldtime ne $myoldtime and not defined $comment) {
2008-03-07 23:27:51 +00:00
if ($myoldtime) {
my ($ancestor) = GetTextAtTime($myoldtime);
if ($ancestor and $old ne $ancestor) {
2011-05-01 19:44:09 +00:00
my $new = MergeRevisions($string, $ancestor, $old);
if ($new) {
$string = $new;
if ($new =~ /^<<<<<<</m and $new =~ /^>>>>>>>/m) {
SetParam('msg', Ts('This page was changed by somebody else %s.',
CalcTimeSince($Now - $Page{ts}))
. ' ' . T('The changes conflict. Please check the page again.'));
} # else no conflict
} else {
$generalwarning = 1;
} # else merge revision didn't work
} # else nobody changed the page in the mean time (same text)
2008-03-07 23:27:51 +00:00
} else {
$generalwarning = 1;
2011-05-01 19:44:09 +00:00
} # no way to be sure since myoldtime is missing
2008-03-07 23:27:51 +00:00
} # same author or nobody changed the page in the mean time (same timestamp)
if ($generalwarning and ($Now - $Page{ts}) < 600) {
SetParam('msg', Ts('This page was changed by somebody else %s.',
2011-05-01 19:44:09 +00:00
CalcTimeSince($Now - $Page{ts}))
. ' ' . T('Please check whether you overwrote those changes.'));
2008-03-07 23:27:51 +00:00
}
Save($id, $string, $summary, (GetParam('recent_edit', '') eq 'on'), $filename);
ReleaseLock();
ReBrowsePage($id);
}
sub GetSummary {
2008-03-07 23:27:51 +00:00
my $text = GetParam('aftertext', '') || ($Page{revision} > 0 ? '' : GetParam('text', ''));
return '' if $text =~ /^#FILE /;
2008-03-07 23:27:51 +00:00
if ($SummaryDefaultLength and length($text) > $SummaryDefaultLength) {
$text = substr($text, 0, $SummaryDefaultLength);
$text =~ s/\s*\S*$/ . . ./;
}
my $summary = GetParam('summary', '') || $text; # not GetParam('summary', $text) work because '' is defined
$summary =~ s/$FS|[\r\n]+/ /g; # remove linebreaks and separator characters
$summary =~ s/\[$FullUrlPattern\s+(.*?)\]/$2/g; # fix common annoyance when copying text to summary
$summary =~ s/\[$FullUrlPattern\]//g;
$summary =~ s/\[\[$FreeLinkPattern\]\]/$1/g;
2008-03-07 23:27:51 +00:00
return UnquoteHtml($summary);
}
sub AddComment {
my ($string, $comment) = @_;
$comment =~ s/\r//g; # Remove "\r"-s (0x0d) from the string
$comment =~ s/\s+$//g; # Remove whitespace at the end
if ($comment ne '') {
2008-03-07 23:27:51 +00:00
my $author = GetParam('username', T('Anonymous'));
my $homepage = GetParam('homepage', '');
2014-07-18 01:46:59 +03:00
$homepage = 'http://' . $homepage if $homepage and $homepage !~ /^($UrlProtocols):/;
2008-03-07 23:27:51 +00:00
$author = "[$homepage $author]" if $homepage;
$string .= "\n----\n\n" if $string and $string ne "\n";
$string .= $comment . "\n\n"
. '-- ' . $author . ' ' . TimeToText($Now) . "\n\n";
2008-03-07 23:27:51 +00:00
}
return $string;
}
sub Save { # call within lock, with opened page
2008-03-07 23:27:51 +00:00
my ($id, $new, $summary, $minor, $upload) = @_;
my $user = GetParam('username', '');
my $host = $q->remote_addr();
2008-03-07 23:27:51 +00:00
my $revision = $Page{revision} + 1;
my $old = $Page{text};
my $olddiff = $Page{'diff-major'} == '1' ? $Page{'diff-minor'} : $Page{'diff-major'};
if ($revision == 1 and IsFile($IndexFile) and not Unlink($IndexFile)) { # regenerate index on next request
2008-03-07 23:27:51 +00:00
SetParam('msg', Ts('Cannot delete the index file %s.', $IndexFile)
2014-07-18 01:46:59 +03:00
. ' ' . T('Please check the directory permissions.')
. ' ' . T('Your changes were not saved.'));
return 0;
2008-03-07 23:27:51 +00:00
}
ReInit($id);
TouchIndexFile();
2008-03-07 23:27:51 +00:00
SaveKeepFile(); # deletes blocks, flags, diff-major, and diff-minor, and sets keep-ts
ExpireKeepFiles();
$Page{lastmajor} = $revision unless $minor;
$Page{lastmajorsummary} = $summary unless $minor;
2014-07-21 20:48:47 +02:00
@Page{qw(ts revision summary username host minor text)} =
($Now, $revision, $summary, $user, $host, $minor, $new);
2008-03-07 23:27:51 +00:00
if ($UseDiff and $UseCache > 1 and $revision > 1 and not $upload and not TextIsFile($old)) {
UpdateDiffs($old, $new, $olddiff); # sets diff-major and diff-minor
}
my $languages;
$languages = GetLanguages($new) unless $upload;
$Page{languages} = $languages;
SavePage();
if ($revision == 1 and $LockOnCreation{$id}) {
WriteStringToFile(GetLockedPageFile($id), 'LockOnCreation');
}
WriteRcLog($id, $summary, $minor, $revision, $user, $host, $languages, GetCluster($new));
2015-09-12 00:05:30 +02:00
AddToIndex($id) if ($revision == 1);
}
sub TouchIndexFile {
my $ts = time;
utime $ts, $ts, $IndexFile;
$LastUpdate = $Now = $ts;
}
2003-03-21 13:50:35 +00:00
sub GetLanguages {
2008-03-07 23:27:51 +00:00
my $text = shift;
my @result;
for my $lang (sort keys %Languages) {
my @matches = $text =~ /$Languages{$lang}/gi;
2008-03-07 23:27:51 +00:00
push(@result, $lang) if $#matches >= $LanguageLimit;
}
return join(',', @result);
2003-03-21 13:50:35 +00:00
}
sub GetCluster {
2008-03-07 23:27:51 +00:00
$_ = shift;
return '' unless $PageCluster;
return $1 if ($WikiLinks && /^$LinkPattern\n/)
or ($FreeLinks && /^\[\[$FreeLinkPattern\]\]\n/);
2008-03-07 23:27:51 +00:00
}
sub MergeRevisions { # merge change from file2 to file3 into file1
2008-03-07 23:27:51 +00:00
my ($file1, $file2, $file3) = @_;
my ($name1, $name2, $name3) = ("$TempDir/file1", "$TempDir/file2", "$TempDir/file3");
CreateDir($TempDir);
RequestLockDir('merge') or return T('Could not get a lock to merge!');
WriteStringToFile($name1, $file1);
WriteStringToFile($name2, $file2);
WriteStringToFile($name3, $file3);
2014-07-18 01:46:59 +03:00
my ($you, $ancestor, $other) = (T('you'), T('ancestor'), T('other'));
my $output = decode_utf8(`diff3 -m -L \Q$you\E -L \Q$ancestor\E -L \Q$other\E -- \Q$name1\E \Q$name2\E \Q$name3\E`);
2008-03-07 23:27:51 +00:00
ReleaseLockDir('merge'); # don't unlink temp files--next merge will just overwrite.
return $output;
}
2003-03-21 13:50:35 +00:00
# Note: all diff and recent-list operations should be done within locks.
sub WriteRcLog {
2008-03-07 23:27:51 +00:00
my ($id, $summary, $minor, $revision, $username, $host, $languages, $cluster) = @_;
my $line = join($FS, $Now, $id, $minor, $summary, $host,
2014-07-18 01:46:59 +03:00
$username, $revision, $languages, $cluster);
AppendStringToFile($RcFile, $line . "\n");
2003-03-21 13:50:35 +00:00
}
sub UpdateDiffs { # this could be optimized, but isn't frequent enough
2008-03-07 23:27:51 +00:00
my ($old, $new, $olddiff) = @_;
$Page{'diff-minor'} = GetDiff($old, $new); # create new diff-minor
# 1 is a special value for GetCacheDiff telling it to use diff-minor
$Page{'diff-major'} = $Page{lastmajor} == $Page{revision} ? 1 : $olddiff;
2003-03-21 13:50:35 +00:00
}
sub DoMaintain {
2008-03-07 23:27:51 +00:00
print GetHeader('', T('Run Maintenance')), $q->start_div({-class=>'content maintain'});
my $fname = "$DataDir/maintain";
2014-07-18 01:46:59 +03:00
if (not UserIsAdmin()) {
if (IsFile($fname) and $Now - Modified($fname) < 0.5) {
2008-03-07 23:27:51 +00:00
print $q->p(T('Maintenance not done.') . ' ' . T('(Maintenance can only be done once every 12 hours.)')
2014-07-18 01:46:59 +03:00
. ' ', T('Remove the "maintain" file or wait.')), $q->end_div();
2008-03-07 23:27:51 +00:00
PrintFooter();
return;
}
}
2015-05-01 13:32:51 +03:00
print '<p>', T('Expiring keep files and deleting pages marked for deletion');
ExpireAllKeepFiles();
print '</p>';
RequestLockOrError();
print $q->p(T('Main lock obtained.'));
print $q->p(Ts('Moving part of the %s log file.', $RCName));
2008-03-07 23:27:51 +00:00
# Determine the number of days to go back
my $days = 0;
foreach (@RcDays) {
$days = $_ if $_ > $days;
}
my $starttime = $Now - $days * 86400; # 24*60*60
# Read the current file
my ($status, $data) = ReadFile($RcFile);
2014-07-18 01:46:59 +03:00
if (not $status) {
2014-07-21 20:48:47 +02:00
print $q->p($q->strong(Ts('Could not open %s log file', $RCName) . ':') . ' ' . $RcFile),
2008-03-07 23:27:51 +00:00
$q->p(T('Error was') . ':'), $q->pre($!), $q->p(T('Note: This error is normal if no changes have been made.'));
}
# Move the old stuff from rc to temp
my @rc = split(/\n/, $data);
my @tmp = ();
for my $line (@rc) {
my ($ts, $id, $minor, $summary, $host, @rest) = split(/$FS/, $line);
2014-07-21 20:48:47 +02:00
last if $ts >= $starttime;
push(@tmp, join($FS, $ts, $id, $minor, $summary, 'Anonymous', @rest));
2008-03-07 23:27:51 +00:00
}
print $q->p(Ts('Moving %s log entries.', scalar(@tmp)));
if (@tmp) {
2008-03-07 23:27:51 +00:00
# Write new files, and backups
AppendStringToFile($RcOldFile, join("\n", @tmp) . "\n");
2008-03-07 23:27:51 +00:00
WriteStringToFile($RcFile . '.old', $data);
splice(@rc, 0, scalar(@tmp)); # strip
2014-07-18 01:46:59 +03:00
WriteStringToFile($RcFile, @rc ? join("\n", @rc) . "\n" : '');
2008-03-07 23:27:51 +00:00
}
if (opendir(DIR, $RssDir)) { # cleanup if they should expire anyway
2008-03-07 23:27:51 +00:00
foreach (readdir(DIR)) {
Unlink("$RssDir/$_") if $Now - Modified($_) > $RssCacheHours * 3600;
2008-03-07 23:27:51 +00:00
}
closedir DIR;
}
foreach my $func (@MyMaintenance) {
$func->();
2008-03-07 23:27:51 +00:00
}
WriteStringToFile($fname, 'Maintenance done at ' . TimeToText($Now));
ReleaseLock();
print $q->p(T('Main lock released.')), $q->end_div();
PrintFooter();
2003-03-21 13:50:35 +00:00
}
sub PageDeletable {
2008-03-07 23:27:51 +00:00
return unless $KeepDays;
my $expirets = $Now - ($KeepDays * 86400); # 24*60*60
2015-05-11 17:35:47 +03:00
return 0 if $Page{ts} >= $expirets;
return PageMarkedForDeletion();
}
sub PageMarkedForDeletion {
2015-05-11 16:31:16 +02:00
# Only pages explicitly marked for deletion or whitespace-only pages
# are deleted; taking into account the very rare possiblity of a
# read error and the page text being undefined.
return 1 if defined $Page{text} and $Page{text} =~ /^\s*$/;
return $DeletedPage && substr($Page{text}, 0, length($DeletedPage)) eq $DeletedPage;
2008-03-07 23:27:51 +00:00
}
sub DeletePage { # Delete must be done inside locks.
2008-03-07 23:27:51 +00:00
my $id = shift;
ValidIdOrDie($id);
AppendStringToFile($DeleteFile, "$id\n");
2008-03-07 23:27:51 +00:00
foreach my $name (GetPageFile($id), GetKeepFiles($id), GetKeepDir($id), GetLockedPageFile($id), $IndexFile) {
Unlink($name) if IsFile($name);
RemoveDir($name) if IsDir($name);
2008-03-07 23:27:51 +00:00
}
ReInit($id);
delete $IndexHash{$id};
@IndexList = sort(keys %IndexHash);
return ''; # no error
2003-03-21 13:50:35 +00:00
}
sub DoEditLock {
2008-03-07 23:27:51 +00:00
return unless UserIsAdminOrError();
print GetHeader('', T('Set or Remove global edit lock'));
my $fname = "$NoEditFile";
if (GetParam("set", 1)) {
WriteStringToFile($fname, 'editing locked.');
} else {
Unlink($fname);
2008-03-07 23:27:51 +00:00
}
utime time, time, $IndexFile; # touch index file
print $q->p(IsFile($fname) ? T('Edit lock created.') : T('Edit lock removed.'));
2008-03-07 23:27:51 +00:00
PrintFooter();
2003-03-21 13:50:35 +00:00
}
sub DoPageLock {
2008-03-07 23:27:51 +00:00
return unless UserIsAdminOrError();
print GetHeader('', T('Set or Remove page edit lock'));
my $id = GetParam('id', '');
ValidIdOrDie($id);
my $fname = GetLockedPageFile($id);
2008-03-07 23:27:51 +00:00
if (GetParam('set', 1)) {
WriteStringToFile($fname, 'editing locked.');
} else {
Unlink($fname);
2008-03-07 23:27:51 +00:00
}
utime time, time, $IndexFile; # touch index file
print $q->p(IsFile($fname) ? Ts('Lock for %s created.', GetPageLink($id))
2014-07-18 01:46:59 +03:00
: Ts('Lock for %s removed.', GetPageLink($id)));
2008-03-07 23:27:51 +00:00
PrintFooter();
2003-03-21 13:50:35 +00:00
}
sub DoShowVersion {
2008-03-07 23:27:51 +00:00
print GetHeader('', T('Displaying Wiki Version')), $q->start_div({-class=>'content version'});
print $WikiDescription, $q->p($q->server_software()),
$q->p(sprintf('Perl v%vd', $^V)),
$q->p($ENV{MOD_PERL} ? $ENV{MOD_PERL} : "no mod_perl"), $q->p('CGI: ', $CGI::VERSION),
$q->p('LWP::UserAgent ', eval { local $SIG{__DIE__}; require LWP::UserAgent; $LWP::UserAgent::VERSION; }),
$q->p('XML::RSS: ', eval { local $SIG{__DIE__}; require XML::RSS; $XML::RSS::VERSION; }),
$q->p('XML::Parser: ', eval { local $SIG{__DIE__}; $XML::Parser::VERSION; });
print $q->p('diff: ' . (`diff --version` || $!)), $q->p('diff3: ' . (`diff3 --version` || $!)) if $UseDiff;
2008-03-07 23:27:51 +00:00
print $q->end_div();
PrintFooter();
}
sub DoDebug {
2008-03-07 23:27:51 +00:00
print GetHeader('', T('Debugging Information')),
$q->start_div({-class=>'content debug'});
foreach my $func (@Debugging) { $func->() }
2008-03-07 23:27:51 +00:00
print $q->end_div();
PrintFooter();
2003-03-21 13:50:35 +00:00
}
sub DoSurgeProtection {
2008-03-07 23:27:51 +00:00
return unless $SurgeProtection;
my $name = GetParam('username', $q->remote_addr());
2008-03-07 23:27:51 +00:00
return unless $name;
ReadRecentVisitors();
AddRecentVisitor($name);
if (RequestLockDir('visitors')) { # not fatal
WriteRecentVisitors();
ReleaseLockDir('visitors');
if (DelayRequired($name)) {
2014-07-18 01:46:59 +03:00
ReportError(Ts('Too many connections by %s', $name)
. ': ' . Tss('Please do not fetch more than %1 pages in %2 seconds.',
$SurgeProtectionViews, $SurgeProtectionTime),
'503 SERVICE UNAVAILABLE');
2008-03-07 23:27:51 +00:00
}
} elsif (GetParam('action', '') ne 'unlock') {
ReportError(Ts('Could not get %s lock', 'visitors') . ': ' . Ts('Check whether the web server can create the directory %s and whether it can create files in it.', $TempDir), '503 SERVICE UNAVAILABLE');
}
2003-03-21 13:50:35 +00:00
}
sub DelayRequired {
2008-03-07 23:27:51 +00:00
my $name = shift;
my @entries = @{$RecentVisitors{$name}};
my $ts = $entries[$SurgeProtectionViews];
2008-03-07 23:27:51 +00:00
return ($Now - $ts) < $SurgeProtectionTime;
2003-03-21 13:50:35 +00:00
}
sub AddRecentVisitor {
2008-03-07 23:27:51 +00:00
my $name = shift;
my $value = $RecentVisitors{$name};
my @entries = ($Now);
push(@entries, @{$value}) if $value;
$RecentVisitors{$name} = \@entries;
2003-03-21 13:50:35 +00:00
}
sub ReadRecentVisitors {
2008-03-07 23:27:51 +00:00
my ($status, $data) = ReadFile($VisitorFile);
%RecentVisitors = ();
2014-07-18 01:46:59 +03:00
return unless $status;
foreach (split(/\n/, $data)) {
my @entries = split /$FS/;
2008-03-07 23:27:51 +00:00
my $name = shift(@entries);
$RecentVisitors{$name} = \@entries if $name;
}
2003-03-21 13:50:35 +00:00
}
sub WriteRecentVisitors {
2008-03-07 23:27:51 +00:00
my $data = '';
my $limit = $Now - $SurgeProtectionTime;
foreach my $name (keys %RecentVisitors) {
my @entries = @{$RecentVisitors{$name}};
if ($entries[0] >= $limit) { # if the most recent one is too old, do not keep
$data .= join($FS, $name, @entries[0 .. $SurgeProtectionViews - 1]) . "\n";
}
}
WriteStringToFile($VisitorFile, $data);
2003-03-21 13:50:35 +00:00
}
sub TextIsFile { $_[0] =~ /^#FILE (\S+) ?(\S+)?\n/; }
sub AddModuleDescription { # cannot use $q here because this is module init time
my ($filename, $page, $dir, $tag) = @_;
my $src = "http://git.savannah.gnu.org/cgit/oddmuse.git/tree/modules/$dir" . UrlEncode($filename) . ($tag ? '?' . $tag : '');
my $doc = 'https://www.oddmuse.org/cgi-bin/oddmuse/' . UrlEncode(FreeToNormal($page));
$ModulesDescription .= "<p><a href=\"$src\">" . QuoteHtml($filename) . "</a>" . ($tag ? " ($tag)" : '');
$ModulesDescription .= T(', see') . " <a href=\"$doc\">" . QuoteHtml($page) . "</a>" if $page;
$ModulesDescription .= "</p>";
2014-08-21 04:35:53 +03:00
}
2008-03-07 23:27:51 +00:00
DoWikiRequest() if $RunCGI and not exists $ENV{MOD_PERL}; # Do everything.
1; # In case we are loaded from elsewhere