Compare commits

..

22 Commits

Author SHA1 Message Date
Alex Schroeder
0974b7bbd8 wordcount: fix test 2023-10-25 18:42:25 +02:00
Alex Schroeder
f73d420957 markdown-rule: be more lenient
Don't just limit to full URL: accept any non-whitespace characters as
a link.
2023-10-25 18:40:43 +02:00
Alex Schroeder
17ef2aaf88 CapnDan tells me this line is missing at the end 2023-08-12 21:22:49 +02:00
Alex Schroeder
b70c8e8def Add rolblack marker stripping back in 2023-08-10 11:23:18 +02:00
Alex Schroeder
f8752e69bc Update years 2023-08-10 11:20:46 +02:00
Alex Schroeder
9d48f875a2 Fix rollback code 2023-08-08 16:31:09 +02:00
Alex Schroeder
39e9cea7b0 Add Matched sub
This allows the use of whitespace to separate terms when using page
matching. This seems more natural than requiring underscores to match
the space between words in a page title. So now, a page with id
"one_two" is matched by terms such as "one_two", "two_one", "one two"
and "two one" (notice the word order).
2023-08-08 14:59:53 +02:00
Alex Schroeder
e7b718f610 [network-blocker] Ignore missing file 2023-07-15 21:06:04 +02:00
Alex Schroeder
261aeccb3f [network-blocker] New module 2023-07-15 21:00:58 +02:00
Alex Schroeder
a09c846700 Fix a rollback issue
Without this fix, Oddmuse would enter an infinite loop if the list of
items to show began with a rollback tag.
2023-06-30 13:02:03 +02:00
Alex Schroeder
8dbede3813 Tarballs doesn't link to a latest.tar.gz 2023-06-21 22:05:16 +02:00
Alex Schroeder
89d9f27b2a [rename-pages] Close form 2023-05-29 20:24:55 +02:00
Alex Schroeder
f21f257c1b Fix parenthesis 2023-03-24 21:59:52 +01:00
Alex Schroeder
48916943a1 More spans for the search bar 2023-03-24 21:16:47 +01:00
Alex Schroeder
3b185e5521 Add some spans to the gotobar for better styling 2023-03-24 15:38:50 +01:00
Alex Schroeder
612af8f7fb Make feed link more flexible
The result is that feeds generated by journal-rss.pl contain a link to
the Recent Changes page instead of linking twice to the feed.
2023-02-27 14:12:15 +01:00
Alex Schroeder
dc9131e600 Fix translation-link.t 2023-02-27 14:12:04 +01:00
Alex Schroeder
99af4d984d Handle [an example](#foo "Title") 2023-02-17 17:16:23 +01:00
Alex Schroeder
88f4fe3b89 Whitespace 2023-02-17 17:16:02 +01:00
Sandra Snan
851f2f77e8 Handle image/right
Everyone loves hacky regexes♥
2023-02-17 17:14:24 +01:00
Alex Schroeder
975e15c9f8 Don't turn all whitespace into a space
We want to honor NO-BREAK SPACE and the like!
2022-08-26 13:42:35 +02:00
Alex Schroeder
d235d6ac47 GetId returns the normal form of $id.
This means, "2022-07-15 The Joy of Exploration", which arrives as
"2022-07-15%20The%20Joy%20of%20Exploration", gets turned into
"2022-07-15_The_Joy_of_Exploration". The problem is that when posting,
$id = FreeToNormal(shift), so pages are always written to the page
with underscores. If you then request the raw history of a page,
however, no such call was happening and so no keep files were found by
DoHistory.
2022-07-18 17:51:22 +02:00
17 changed files with 403 additions and 137 deletions

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2004, 2005, 2006 Alex Schroeder <alex@emacswiki.org>
# Copyright (C) 20042023 Alex Schroeder <alex@gnu.org>
# Copyright (C) 2006 Ingo Belka
#
# This program is free software; you can redistribute it and/or modify
@@ -112,9 +112,7 @@ sub DoCollect {
my $search = GetParam('search', '');
ReportError(T('The match parameter is missing.')) unless $match or $search;
print GetHeader('', Ts('Page Collection for %s', $match||$search), '');
my @pages = (grep(/$match/, $search
? SearchTitleAndBody($search)
: AllPagesList()));
my @pages = Matched($match, $search ? SearchTitleAndBody($search) : AllPagesList());
if (!$CollectingJournal) {
$CollectingJournal = 1;
# Now save information required for saving the cache of the current page.

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2004, 2007 Alex Schroeder <alex@emacswiki.org>
# Copyright (C) 20042023 Alex Schroeder <alex@gnu.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -47,8 +47,5 @@ sub PrintableIndexPages {
push(@pages, AllPagesList()) if GetParam('pages', 1);
push(@pages, keys %PermanentAnchors) if GetParam('permanentanchors', 1);
push(@pages, keys %NearSource) if GetParam('near', 0);
my $match = GetParam('match', '');
@pages = grep /$match/i, @pages if $match;
@pages = sort @pages;
return @pages;
return sort Matched(GetParam('match'), @pages);
}

View File

@@ -1,4 +1,4 @@
# Copyright (C) 20042021 Alex Schroeder <alex@gnu.org>
# Copyright (C) 20042023 Alex Schroeder <alex@gnu.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -31,6 +31,7 @@ sub DoJournalRss {
local $CollectingJournal = 1;
# Fake the result of GetRcLines()
local *GetRcLines = \&JournalRssGetRcLines;
local *RcSelfWebsite = \&JournalRssSelfWebsite;
local *RcSelfAction = \&JournalRssSelfAction;
local *RcPreviousAction = \&JournalRssPreviousAction;
local *RcLastAction = \&JournalRssLastAction;
@@ -55,6 +56,15 @@ sub JournalRssParameters {
return $more;
}
sub JournalRssSelfWebsite {
my $more = '';
my $search = GetParam('rcfilteronly', '');
$more .= ";search=" . UrlEncode($search) if $search;
my $match = GetParam('match', '');
$more .= ";match=" . UrlEncode($match) if $match;
return $more;
}
sub JournalRssSelfAction {
return "action=journal" . JournalRssParameters(qw(offset));
}
@@ -76,7 +86,7 @@ sub JournalRssGetRcLines {
my $reverse = GetParam('reverse', 0);
my $monthly = GetParam('monthly', 0);
my $offset = GetParam('offset', 0);
my @pages = sort JournalSort (grep(/$match/, $search ? SearchTitleAndBody($search) : AllPagesList()));
my @pages = sort JournalSort (Matched($match, $search ? SearchTitleAndBody($search) : AllPagesList()));
if ($monthly and not $match) {
my ($sec, $min, $hour, $mday, $mon, $year, $wday, $yday) = gmtime();
$match = '^' . sprintf("%04d-%02d", $year+1900, $mon+1) . '-\d\d';

View File

@@ -1,4 +1,4 @@
# Copyright (C) 20092020 Alex Schroeder <alex@gnu.org>
# Copyright (C) 20092022 Alex Schroeder <alex@gnu.org>
# Copyright (C) 2015 Aleks-Daniel Jakimenko <alex.jakimenko@gmail.com>
#
# This program is free software; you can redistribute it and/or modify it under
@@ -505,9 +505,9 @@ sub MailUnsubscribe {
=head1 Migrate
The mailmigrate action will migrate your subscription list from the
old format to the new format. This is necessary because these days
because the keys and values of the DB_File are URL encoded.
The mailmigrate action will migrate your subscription list from the old format
to the new format. This is necessary because these days the keys and values of
the DB_File are URL encoded.
=cut

View File

@@ -1,5 +1,5 @@
#! /usr/bin/perl
# Copyright (C) 20142019 Alex Schroeder <alex@gnu.org>
# Copyright (C) 20142022 Alex Schroeder <alex@gnu.org>
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
@@ -194,12 +194,20 @@ sub MarkdownRule {
return OpenHtmlEnvironment('pre',1) . $str; # always level 1
}
# link: [an example](http://example.com/ "Title")
elsif (m/\G\[((?:[^]\n]+\n?)+)\]\($FullUrlPattern(\s+"(.+?)")?\)/cg) {
elsif (m/\G\[((?:[^]\n]+\n?)+)\]\((\S+)(\s+"(.+?)")?\)/cg) {
my ($text, $url, $title) = ($1, $2, $4);
$url =~ /^($UrlProtocols)/;
my %params;
$params{-href} = $url;
$params{-class} = "url $1";
$params{-class} = "url";
$params{-title} = $title if $title;
return $q->a(\%params, $text);
}
# link: [an example](#foo "Title")
elsif (m/\G\[((?:[^]\n]+\n?)+)\]\((#\S)+(\s+"(.+?)")?\)/cg) {
my ($text, $url, $title) = ($1, $2, $4);
my %params;
$params{-href} = $url;
$params{-class} = "named-anchor";
$params{-title} = $title if $title;
return $q->a(\%params, $text);
}

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2012 Alex Schroeder <alex@gnu.org>
# Copyright (C) 20042022 Alex Schroeder <alex@gnu.org>
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
@@ -55,8 +55,6 @@ our ($NamespacesMain, $NamespacesSelf, $NamespaceCurrent,
$NamespaceRoot, $NamespaceSlashing, @NamespaceParameters,
%Namespaces, $NamespacesRootDataDir);
our ($OriginalSiteName, $OriginalInterWikiMoniker, $OriginalDataDir, $OriginalScriptName, $OriginalFullUrl, $OriginalStaticDir, $OriginalStaticUrl, $OriginalWikiDescription);
$NamespacesMain = 'Main'; # to get back to the main namespace
$NamespacesSelf = 'Self'; # for your own namespace
$NamespaceCurrent = ''; # the current namespace, if any
@@ -106,23 +104,6 @@ sub GetNamespace {
}
sub NamespacesInitVariables {
$OriginalSiteName //= $SiteName;
$SiteName = $OriginalSiteName;
$OriginalInterWikiMoniker //= $InterWikiMoniker;
$InterWikiMoniker = $OriginalInterWikiMoniker;
$OriginalDataDir //= $DataDir;
$DataDir = $OriginalDataDir;
$OriginalScriptName //= $ScriptName;
$ScriptName = $OriginalScriptName;
$OriginalFullUrl //= $FullUrl;
$FullUrl = $OriginalFullUrl;
$OriginalStaticDir //= $StaticDir;
$StaticDir = $OriginalStaticDir;
$OriginalStaticUrl //= $StaticUrl;
$StaticUrl = $OriginalStaticUrl;
$OriginalWikiDescription //= $WikiDescription;
$WikiDescription = $OriginalWikiDescription;
%Namespaces = ();
# Do this before changing the $DataDir and $ScriptName
if ($UsePathInfo) {
@@ -148,24 +129,20 @@ sub NamespacesInitVariables {
$SiteName .= ' ' . NormalToFree($NamespaceCurrent);
$InterWikiMoniker = $NamespaceCurrent;
$DataDir .= '/' . $NamespaceCurrent;
}
$PageDir = "$DataDir/page";
$KeepDir = "$DataDir/keep";
$RefererDir = "$DataDir/referer";
$TempDir = "$DataDir/temp";
$LockDir = "$TempDir/lock";
$NoEditFile = "$DataDir/noedit";
$RcFile = "$DataDir/rc.log";
$RcOldFile = "$DataDir/oldrc.log";
$IndexFile = "$DataDir/pageidx";
$VisitorFile = "$DataDir/visitors.log";
$PermanentAnchorsFile = "$DataDir/permanentanchors";
# $ConfigFile -- shared
# $ModuleDir -- shared
# $NearDir -- shared
if ($ns
and $ns ne $NamespacesMain
and $ns ne $NamespacesSelf) {
$PageDir = "$DataDir/page";
$KeepDir = "$DataDir/keep";
$RefererDir = "$DataDir/referer";
$TempDir = "$DataDir/temp";
$LockDir = "$TempDir/lock";
$NoEditFile = "$DataDir/noedit";
$RcFile = "$DataDir/rc.log";
$RcOldFile = "$DataDir/oldrc.log";
$IndexFile = "$DataDir/pageidx";
$VisitorFile = "$DataDir/visitors.log";
$PermanentAnchorsFile = "$DataDir/permanentanchors";
# $ConfigFile -- shared
# $ModuleDir -- shared
# $NearDir -- shared
$ScriptName .= '/' . UrlEncode($NamespaceCurrent);
$FullUrl .= '/' . UrlEncode($NamespaceCurrent);
$StaticDir .= '/' . $NamespaceCurrent; # from static-copy.pl

198
modules/network-blocker.pl Normal file
View File

@@ -0,0 +1,198 @@
# -*- mode: perl -*-
# Copyright (C) 2023 Alex Schroeder <alex@gnu.org>
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <https://www.gnu.org/licenses/>.
=encoding utf8
=head1 NAME
Oddmuse Network Blocker
=head1 DESCRIPTION
This module hooks into regular Oddmuse Surge Protection. It adds the following
features:
Repeated offenders are blocked for increasingly longer times.
For every offender, we record the CIDR their IP number belongs to. Everytime an
IP number is blocked, all the CIDRs of the other blocked IPs are checked: if
there are three or more blocked IP numbers sharing the same CIDRs, the CIDR
itself is blocked.
CIDR blocking works the same way: Repeated offenders are blocked for
increasingly longer times.
=head2 Behind a reverse proxy
Make sure your config file copies the IP number to the correct environment
variable:
$ENV{REMOTE_ADDR} = $ENV{HTTP_X_FORWARDED_FOR};
=head1 SEE ALSO
<Oddmuse Surge Protection|https://oddmuse.org/wiki/Surge_Protection>
=cut
use strict;
use v5.10;
use Net::IP qw(:PROC);
use Net::DNS qw(rr);
AddModuleDescription('network-blocker.pl', 'Network Blocker Extension');
our ($Now, $DataDir, $SurgeProtectionViews, $SurgeProtectionTime);
{
no warnings 'redefine';
*OldNetworkBlockerDelayRequired = \&DelayRequired;
*DelayRequired = \&NewNetworkBlockerDelayRequired;
}
# Block for at least this many seconds.
my $NetworkBlockerMinimumPeriod = 30;
# Every violation doubles the current period until this maximum is reached (four weeks).
my $NetworkBlockerMaximumPeriod = 60 * 60 * 24 * 7 * 4;
# All the blocked networks. Maps CIDR to an array [expiry timestamp, expiry
# period].
my %NetworkBlockerList;
# Candidates are remembered for this many seconds.
my $NetworkBlockerCachePeriod = 600;
# All the candidate networks for a block. Maps IP to an array [ts, cidr, ...].
# Candidates are removed after $NetworkBlockerCachePeriod.
my %NetworkBlockerCandidates;
sub NetworkBlockerRead {
my ($status, $data) = ReadFile("$DataDir/network-blocks");
return unless $status;
my @lines = split(/\n/, $data);
while ($_ = shift(@lines)) {
my @items = split(/,/);
$NetworkBlockerList{shift(@items)} = \@items;
}
# an empty line separates the two sections
while ($_ = shift(@lines)) {
my @items = split(/,/);
$NetworkBlockerCandidates{shift(@items)} = \@items;
}
return 1;
}
sub NetworkBlockerWrite {
RequestLockDir('network-blocks') or return '';
WriteStringToFile(
"$DataDir/network-blocks",
join("\n\n",
join("\n", map {
join(",", $_, @{$NetworkBlockerList{$_}})
} keys %NetworkBlockerList),
join("\n", map {
join(",", $_, @{$NetworkBlockerCandidates{$_}})
} keys %NetworkBlockerCandidates)));
ReleaseLockDir('network-blocks');
}
sub NewNetworkBlockerDelayRequired {
my $ip = shift;
# If $ip is a name and not an IP number, parsing fails. In this case, run the
# regular code.
my $ob = new Net::IP($ip);
return OldNetworkBlockerDelayRequired($ip) unless $ob;
# Read the file. If the file does not exist, no problem.
NetworkBlockerRead();
# See if the current IP number is one of the blocked CIDR ranges.
for my $cidr (keys %NetworkBlockerList) {
# Perhaps this CIDR block can be expired.
if ($NetworkBlockerList{$cidr}->[0] < $Now) {
delete $NetworkBlockerList{$cidr};
next;
}
# Forget the CIDR if it cannot be turned into a range.
my $range = new Net::IP($cidr);
if (not $range) {
warn "CIDR $cidr is blocked but has no range: " . Net::IP::Error();
delete $NetworkBlockerList{$cidr};
next;
}
# If the CIDR overlaps with the remote IP number, it's a block.
warn "Checking whether $ip is in $cidr\n";
my $overlap = $range->overlaps($ob);
# $IP_PARTIAL_OVERLAP (ranges overlap) $IP_NO_OVERLAP (no overlap)
# $IP_A_IN_B_OVERLAP (range2 contains range1) $IP_B_IN_A_OVERLAP (range1
# contains range2) $IP_IDENTICAL (ranges are identical) undef (problem)
if (defined $overlap and $overlap != $IP_NO_OVERLAP) {
# Double the block period unless it has reached $NetworkBlockerMaximumPeriod.
if ($NetworkBlockerList{$cidr}->[1] < $NetworkBlockerMaximumPeriod / 2) {
$NetworkBlockerList{$cidr}->[1] *= 2;
} else {
$NetworkBlockerList{$cidr}->[1] = $NetworkBlockerMaximumPeriod;
}
$NetworkBlockerList{$cidr}->[0] = $Now + $NetworkBlockerList{$cidr}->[1];
# And we're done!
NetworkBlockerWrite();
ReportError(Ts('Too many connections by %s', $cidr)
. ': ' . Tss('Please do not fetch more than %1 pages in %2 seconds.',
$SurgeProtectionViews, $SurgeProtectionTime),
'503 SERVICE UNAVAILABLE');
}
}
# If the CIDR isn't blocked, let's see if Surge Protection wants to block it.
my $result = OldNetworkBlockerDelayRequired($ip);
warn "$ip was blocked\n" if $result;
# If the IP is to be blocked, determine its CIDRs and put them on a list. Sadly,
# routeviews does not support IPv6 at the moment!
if ($result and not ip_is_ipv6($ip) and not $NetworkBlockerCandidates{$ip}) {
my $reverse = $ob->reverse_ip();
$reverse =~ s/in-addr\.arpa\.$/asn.routeviews.org/;
my @candidates;
for my $rr (rr($reverse, "TXT")) {
next unless $rr->type eq "TXT";
my @data = $rr->txtdata;
push(@candidates, join("/", @data[1..2]));
}
warn "$ip is in @candidates\n";
$NetworkBlockerCandidates{$ip} = [$Now, @candidates];
# Expire any of the other candidates
for my $other_ip (keys %NetworkBlockerCandidates) {
if ($NetworkBlockerCandidates{$other_ip}->[0] < $Now - $NetworkBlockerCachePeriod) {
delete $NetworkBlockerCandidates{$other_ip};
}
}
# Determine if any of the CIDRs is to be blocked.
my $save;
for my $cidr (@candidates) {
# Count how often the candidate CIDRs show up for other IP numbers.
my $count = 0;
for my $other_ip (keys %NetworkBlockerCandidates) {
my @data = $NetworkBlockerCandidates{$other_ip};
for my $other_cidr (@data[1 .. $#data]) {
$count++ if $cidr eq $other_cidr;
}
}
if ($count >= 3) {
$NetworkBlockerList{$cidr} = [$Now + $NetworkBlockerMinimumPeriod, $NetworkBlockerMinimumPeriod];
$save = 1;
}
}
NetworkBlockerWrite() if $save;
}
return $result;
}

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2019 Alex Schroeder <alex@gnu.org>
# Copyright (C) 20192023 Alex Schroeder <alex@gnu.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -65,6 +65,7 @@ sub RenamePageMenu {
. GetHiddenValue('id', $id)
. $q->textfield(-name=>'to', -size=>20)
. ' '
. $q->submit('Do it'));
. $q->submit('Do it')
. $q->end_form());
}
}

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2006, 2007, 2008 Alex Schroeder <alex@emacswiki.org>
# Copyright (C) 20062023 Alex Schroeder <alex@gnu.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -36,18 +36,12 @@ sub SearchListRule {
$term = GetId();
}
local ($OpenPageName, %Page);
my %hash = ();
my @found;
if ($variation eq 'list') {
foreach my $id (SearchTitleAndBody($term)) {
$hash{$id} = 1 unless $id eq $original; # skip the page with the query
}
@found = grep { $_ ne $original } SearchTitleAndBody($term);
} elsif ($variation eq 'titlelist') {
@found = grep { $_ ne $original } Matched($term, AllPagesList());
}
if ($variation eq 'titlelist') {
foreach my $id (grep(/$term/, AllPagesList())) {
$hash{$id} = 1 unless $id eq $original; # skip the page with the query
}
}
my @found = keys %hash;
if (defined &PageSort) {
@found = sort PageSort @found;
} else {
@@ -63,32 +57,24 @@ sub SearchListRule {
return;
}
# Add a new action list
$Action{list} = \&DoList;
sub DoList {
my $id = shift;
my $match = GetParam('match', '');
my $search = GetParam('search', '');
my $id = shift;
my $match = GetParam('match', '');
my $search = GetParam('search', '');
ReportError(T('The search parameter is missing.')) unless $match or $search;
print GetHeader('', Ts('Page list for %s', $match||$search), '');
local (%Page, $OpenPageName);
my %hash = ();
foreach my $id (grep(/$match/, $search
? SearchTitleAndBody($search)
: AllPagesList())) {
$hash{$id} = 1;
}
my @found = keys %hash;
if (defined &PageSort) {
@found = sort PageSort @found;
} else {
@found = sort(@found);
}
@found = map { $q->li(GetPageLink($_)) } @found;
print $q->start_div({-class=>'search list'}),
$q->ul(@found), $q->end_div;
my @found = Matched($match, $search ? SearchTitleAndBody($search) : AllPagesList());
if (defined &PageSort) {
@found = sort PageSort @found;
} else {
@found = sort(@found);
}
@found = map { $q->li(GetPageLink($_)) } @found;
print $q->start_div({-class=>'search list'}), $q->ul(@found), $q->end_div;
PrintFooter();
}

View File

@@ -1,4 +1,4 @@
# Copyright (C) 20072014 Alex Schroeder <alex@gnu.org>
# Copyright (C) 20072023 Alex Schroeder <alex@gnu.org>
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
@@ -36,8 +36,7 @@ sub SisterPages {
push(@pages, AllPagesList()) if GetParam('pages', 1);
push(@pages, keys %PermanentAnchors) if GetParam('permanentanchors', 1);
push(@pages, keys %NearSource) if GetParam('near', 0);
my $match = GetParam('match', '');
@pages = grep /$match/i, @pages if $match;
@pages = Matched(GetParam('match', ''), @pages);
@pages = sort @pages;
return @pages;
}

View File

@@ -23,7 +23,7 @@
use strict;
use v5.10;
our (@MyInitVariables, $HtmlHeaders);
our (@MyInitVariables, $HtmlHeaders, $EditNote);
AddModuleDescription('wordcount.pl', 'Word Count Extension');
@@ -57,3 +57,5 @@ sub WordcountAddScript {
}
</script>";
}
$EditNote = "Words: <span id='textWordCount'></span>" . $EditNote;

View File

@@ -85,7 +85,9 @@ versions of Oddmuse.</p>
<ul>
% for my $tarball (@$tarballs) {
<li>
% if ($tarball ne 'latest') {
<a href="https://oddmuse.org/releases/<%= $tarball %>.tar.gz"><%= $tarball %>.tar.gz</a>
% }
(files for <%= link_to release => {tarball => $tarball} => begin %>\
<%= $tarball =%><%= end %>)
</li>

View File

@@ -675,10 +675,10 @@ sub gemini_text {
$block =~ s/\[\[tag:([^]|]+)\]\]/push(@links, $self->gemini_link("tag\/$1", $1)); $1/ge;
$block =~ s/\[\[tag:([^]|]+)\|([^\]|]+)\]\]/push(@links, $self->gemini_link("tag\/$1", $2)); $2/ge;
$block =~ s/<journal search tag:(\S+)>\n*/push(@links, $self->gemini_link("tag\/$1", "Explore the $1 tag")); ""/ge;
$block =~ s/\[\[image:([^]|]+)\]\]/push(@links, $self->gemini_link($1, "$1 (image)")); "$1"/ge;
$block =~ s/\[\[image:([^]|]+)\|([^\]|]+)\]\]/push(@links, $self->gemini_link($1, "$2 (image)")); "$2"/ge;
$block =~ s/\[\[image:([^]|]+)\|([^\]|]*)\|([^\]|]+)\]\]/push(@links, $self->gemini_link($1, "$2 (image)"), $self->gemini_link($3, "$2 (follow-up)")); "$2"/ge;
$block =~ s/\[\[image:([^]|]+)\|([^\]|]*)\|([^\]|]*)\|([^\]|]+)\]\]/push(@links, $self->gemini_link($1, "$2 (image)"), $self->gemini_link($3, "$4 (follow-up)")); "$2"/ge;
$block =~ s/\[\[image(?:\/right)?:([^]|]+)\]\]/push(@links, $self->gemini_link($1, "$1 (image)")); "$1"/ge;
$block =~ s/\[\[image(?:\/right)?:([^]|]+)\|([^\]|]+)\]\]/push(@links, $self->gemini_link($1, "$2 (image)")); "$2"/ge;
$block =~ s/\[\[image(?:\/right)?:([^]|]+)\|([^\]|]*)\|([^\]|]+)\]\]/push(@links, $self->gemini_link($1, "$2 (image)"), $self->gemini_link($3, "$2 (follow-up)")); "$2"/ge;
$block =~ s/\[\[image(?:\/right)?:([^]|]+)\|([^\]|]*)\|([^\]|]*)\|([^\]|]+)\]\]/push(@links, $self->gemini_link($1, "$2 (image)"), $self->gemini_link($3, "$4 (follow-up)")); "$2"/ge;
$block =~ s/\[\[$FreeLinkPattern\|([^\]|]+)\]\]/push(@links, $self->gemini_link($1, $2)); $2/ge;
$block =~ s/\[\[$FreeLinkPattern\]\]/push(@links, $self->gemini_link($1)); $1/ge;
$block =~ s/\[color=([^]]+)\]/color($1)/ge;

View File

@@ -137,13 +137,13 @@ EOT
xpath_run_tests(split(/\n/,<<'EOT'));
[example](http://example.com/)
//a[@class="url http"][@href="http://example.com/"][text()="example"]
//a[@class="url"][@href="http://example.com/"][text()="example"]
[an example](http://example.com/)
//a[@class="url http"][@href="http://example.com/"][text()="an example"]
//a[@class="url"][@href="http://example.com/"][text()="an example"]
[an example](http://example.com/ "Title")
//a[@class="url http"][@href="http://example.com/"][@title="Title"][text()="an example"]
//a[@class="url"][@href="http://example.com/"][@title="Title"][text()="an example"]
[an\nexample](http://example.com/)
//a[@class="url http"][@href="http://example.com/"][text()="an\nexample"]
//a[@class="url"][@href="http://example.com/"][text()="an\nexample"]
\n[an\n\nexample](http://example.com/)
//p[text()="[an"]/following-sibling::p//text()[contains(string(),"example](")]
EOT

58
t/rollback-hang.t Normal file
View File

@@ -0,0 +1,58 @@
# Copyright (C) 20062023 Alex Schroeder <alex@gnu.org>
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
require './t/test.pl';
package OddMuse;
use Test::More tests => 4;
use utf8;
# Reproduce a particular bug from alexschroeder.ch with the rc.log provided.
WriteStringToFile($RcFile, <<'EOT');
16853910992023-05-29_Net_newsHow to IRCAnonymousAlex2en
16854004152023-05-29_Net_newsHow to IRCAnonymousAlex3en
1685430599[[rollback]]1685400415Anonymous
16855185032023-05-29_Net_newsAnonymousAlex4en
EOT
local $SIG{ALRM} = sub { fail "timeout!"; kill 'KILL', $$; };
alarm 3;
# this is recent changes from between the rollback and the page before it, so there are no pages to roll back
my $page = get_page("action=rss full=1 short=0 from=1685413682");
alarm 0;
test_page($page, '2023-05-29 Net news');
test_page_negative($page, 'rollback');
# Reproduce a follow-up bug. First, rolling back just Test works as intended.
WriteStringToFile($RcFile, <<'EOT');
1691499987Testham127.0.0.1Berta1
1691499988Mustuff127.0.0.1Chris1
1691499989Testspam127.0.0.1Spammer2
1691499990Test0Rollback to 2023-08-08 13:06 UTC127.0.0.1Alex3
1691499990[[rollback]]1691499987Test
EOT
my $feed = get_page('action=rc raw=1');
test_page($feed, 'title: Test');
# Rolling back all of the wiki doesn't work.
WriteStringToFile($RcFile, <<'EOT');
1691499987Testham127.0.0.1Berta1
1691499988Mustuff127.0.0.1Chris1
1691499989Testspam127.0.0.1Spammer2
1691499990Test0Rollback to 2023-08-08 13:06 UTC127.0.0.1Alex3
1691499990[[rollback]]1691499987
EOT
$feed = get_page('action=rc raw=1');
test_page($feed, 'title: Test');

View File

@@ -129,8 +129,8 @@ test_page(update_page('Testing', 'This is spam.'), 'This page does not exist');
test_page(update_page('Spam', 'Trying again.'), 'This page does not exist');
test_page(get_page('action=translate id=Spam target=Harmless translation=en'),
'Edit Denied',
'Regular expression "spam" matched on this page');
'Regular expression "spam" matched "Spam" on this page');
test_page(get_page('Spam'), 'This page does not exist');
test_page(get_page('action=translate id=Harmless target=Spam translation=en'),
'Edit Denied',
'Regular expression "spam" matched on this page');
'Regular expression "spam" matched "Spam" on this page');

98
wiki.pl
View File

@@ -1,5 +1,5 @@
#! /usr/bin/env perl
# Copyright (C) 2001-2020
# Copyright (C) 2001-2023
# Alex Schroeder <alex@gnu.org>
# Copyright (C) 2014-2015
# Alex Jakimenko <alex.jakimenko@gmail.com>
@@ -39,6 +39,7 @@ use B;
use CGI qw/-utf8/;
use CGI::Carp qw(fatalsToBrowser);
use File::Glob ':glob';
use List::Util qw(all max);
use Encode qw(encode_utf8 decode_utf8);
use sigtrap 'handler' => \&HandleSignals, 'normal-signals', 'error-signals';
local $| = 1; # Do not buffer output (localized for mod_perl)
@@ -514,7 +515,7 @@ sub ApplyRules {
Clean(CloseHtmlEnvironments() . AddHtmlEnvironment('p')); # another one like this further up
} elsif (m/\G&amp;([A-Za-z]+|#[0-9]+|#x[A-Za-f0-9]+);/cg) { # entity references
Clean("&$1;");
} elsif (m/\G\s+/cg) {
} elsif (m/\G[ \t\r\n]+/cg) { # don't use \s because we want to honor NO-BREAK SPACE etc
Clean(' ');
} elsif (m/\G([A-Za-z\x{0080}-\x{fffd}]+([ \t]+[a-z\x{0080}-\x{fffd}]+)*[ \t]+)/cg
or m/\G([A-Za-z\x{0080}-\x{fffd}]+)/cg or m/\G(\S)/cg) {
@@ -1313,7 +1314,7 @@ sub GetId {
SetParam($p, 1); # script/p/q -> p=1
}
}
return $id;
return FreeToNormal($id);
}
sub DoBrowseRequest {
@@ -1543,28 +1544,34 @@ sub LatestChanges {
sub StripRollbacks {
my @result = @_;
if (not (GetParam('all', $ShowAll) or GetParam('rollback', $ShowRollbacks))) { # strip rollbacks
my (%rollback);
my (%rollback); # used for single-page rollbacks
for (my $i = $#result; $i >= 0; $i--) {
# some fields have a different meaning if looking at rollbacks
my ($ts, $id, $target_ts, $target_id) = @{$result[$i]};
# if this is a rollback marker
if ($id eq '[[rollback]]') {
# if this is a single page rollback marker, strip it
if ($target_id) {
$rollback{$target_id} = $target_ts; # single page rollback
splice(@result, $i, 1); # strip marker
# if this page is not already being rolled back, remember the target
# id and target ts so that those lines can be stripped below
if (not $rollback{$target_id} or $target_ts < $rollback{$target_id}) {
$rollback{$target_id} = $target_ts;
}
# the marker is always stripped
splice(@result, $i, 1);
} else {
# if this is a global rollback, things are different: we're going to
# find the correct timestamp and strip all of those lines immediately
my $end = $i;
while ($ts > $target_ts and $i > 0) {
$i--; # quickly skip all these lines
$ts = $result[$i][0];
}
splice(@result, $i + 1, $end - $i);
$i++; # compensate $i-- in for loop
$i-- while $i > 0 and $target_ts < $result[$i-1][0];
# splice the lines found
splice(@result, $i, $end - $i + 1);
}
} elsif ($rollback{$id} and $ts > $rollback{$id}) {
splice(@result, $i, 1); # strip rolled back single pages
}
}
} else { # just strip the marker left by DoRollback()
} else { # if rollbacks are not not shown, just strip the markers
for (my $i = $#result; $i >= 0; $i--) {
splice(@result, $i, 1) if $result[$i][1] eq '[[rollback]]'; # id
}
@@ -1712,6 +1719,11 @@ sub RcOtherParameters {
return $more;
}
sub RcSelfWebsite {
my $action = 'rc';
return "action=$action" . RcOtherParameters(qw(from upto days));
}
sub RcSelfAction {
my $action = GetParam('action', 'rc');
return "action=$action" . RcOtherParameters(qw(from upto days));
@@ -1898,7 +1910,7 @@ sub GetRcRss {
};
my $title = QuoteHtml($SiteName) . ': ' . GetParam('title', QuoteHtml(NormalToFree($HomePage)));
$rss .= "<title>$title</title>\n";
$rss .= "<link>$ScriptName?" . RcSelfAction() . "</link>\n";
$rss .= "<link>$ScriptName?" . RcSelfWebsite() . "</link>\n";
$rss .= qq{<atom:link href="$ScriptName?} . RcSelfAction() . qq{" rel="self" type="application/rss+xml" />\n};
$rss .= qq{<atom:link href="$ScriptName?} . RcPreviousAction() . qq{" rel="previous" type="application/rss+xml" />\n};
$rss .= qq{<atom:link href="$ScriptName?} . RcLastAction() . qq{" rel="last" type="application/rss+xml" />\n};
@@ -1918,7 +1930,7 @@ sub GetRcRss {
$rss .= "\n";
}
my $limit = GetParam("rsslimit", 15); # Only take the first 15 entries
@@ -2552,23 +2564,30 @@ sub GetFormStart {
}
sub GetSearchForm {
my $html = GetFormStart(undef, 'get', 'search') . $q->start_p;
$html .= $q->label({-for=>'search'}, T('Search:')) . ' '
. $q->textfield(-name=>'search', -id=>'search', -size=>15, -accesskey=>T('f')) . ' ';
if (GetParam('search') ne '' and UserIsAdmin()) { # see DoBrowseRequest
$html .= $q->label({-for=>'replace'}, T('Replace:')) . ' '
. $q->textfield(-name=>'replace', -id=>'replace', -size=>20) . ' '
. $q->label({-for=>'delete', -title=>'If you want to replace matches with the empty string'}, T('Delete')) . ' '
. $q->input({-type=>'checkbox', -name=>'delete'})
. $q->submit('preview', T('Preview'));
my $html = GetFormStart(undef, 'get', 'search');
my $replacing = (GetParam('search') ne '' and UserIsAdmin());
$html .= $q->start_p({-class => ($replacing ? 'replace' : 'search')});
$html .= $q->span({-class=>'search'},
$q->label({-for=>'search'}, T('Search:')) . ' '
. $q->textfield(-name=>'search', -id=>'search', -size=>15, -accesskey=>T('f'))) . ' ';
if ($replacing) { # see DoBrowseRequest
$html .= $q->span({-class=>'replace'},
$q->label({-for=>'replace'}, T('Replace:')) . ' '
. $q->textfield(-name=>'replace', -id=>'replace', -size=>20)) . ' '
. $q->span({-class=>'delete'},
$q->label({-for=>'delete', -title=>'If you want to replace matches with the empty string'}, T('Delete')) . ' '
. $q->input({-type=>'checkbox', -name=>'delete'})) . ' '
. $q->submit('preview', T('Preview')) . ' ';
}
if (GetParam('matchingpages', $MatchingPages)) {
$html .= $q->label({-for=>'matchingpage'}, T('Filter:')) . ' '
. $q->textfield(-name=>'match', -id=>'matchingpage', -size=>15) . ' ';
$html .= $q->span({-class=>'match'},
$q->label({-for=>'matchingpage'}, T('Filter:')) . ' '
. $q->textfield(-name=>'match', -id=>'matchingpage', -size=>15)) . ' ';
}
if (%Languages) {
$html .= $q->label({-for=>'searchlang'}, T('Language:')) . ' '
. $q->textfield(-name=>'lang', -id=>'searchlang', -size=>5, -default=>GetParam('lang', '')) . ' ';
$html .= $q->span({-class=>'lang'},
$q->label({-for=>'searchlang'}, T('Language:')) . ' '
. $q->textfield(-name=>'lang', -id=>'searchlang', -size=>5, -default=>GetParam('lang', ''))) . ' ';
}
$html .= $q->submit('dosearch', T('Go!')) . $q->end_p . $q->end_form;
return $html;
@@ -3356,7 +3375,6 @@ sub SortIndex {
sub DoIndex {
my $raw = GetParam('raw', 0);
my $match = GetParam('match', '');
my $limit = GetParam('n', '');
my @pages = ();
my @menu = ($q->label({-for=>'indexmatch'}, T('Filter:')) . ' '
@@ -3368,7 +3386,7 @@ sub DoIndex {
push(@pages, $sub->()) if $value;
push(@menu, $q->checkbox(-name=>$option, -checked=>$value, -label=>$text));
}
@pages = grep /$match/i, @pages if $match;
@pages = Matched(GetParam('match', ''), @pages);
@pages = sort SortIndex @pages;
@pages = @pages[0 .. $limit - 1] if $limit;
if ($raw) {
@@ -3541,11 +3559,23 @@ sub SearchTitleAndBody {
return @found;
}
sub Filtered { # this is overwriten in extensions such as tags.pl
# Filter the pages to be searched for $string. The default implementation
# ignores $string and uses $match instead, just in case the user used both
# search and match parameters. This is overwritten in extensions such as tags.pl
# which extract tags from $string and use that to filter the pages.
sub Filtered {
my ($string, @pages) = @_;
my $match = GetParam('match', '');
@pages = grep /$match/i, @pages if $match;
return @pages;
return Matched(GetParam('match', ''), @pages);
}
sub Matched { # strictly for page titles
my ($string, @pages) = @_;
return @pages unless $string;
my @terms = grep { $_ } split(/[ _]+/, $string);
return grep {
my $id = $_;
all { $id =~ /$_/i } @terms;
} @pages;
}
sub SearchString {