2007-04-02 16:10:19 +00:00
|
|
|
# Copyright (C) 2005-2007 Fletcher T. Penney <fletcher@fletcherpenney.net>
|
2005-07-28 21:39:46 +00:00
|
|
|
# Copyright (C) 2005 Alex Schroeder <alex@emacswiki.org>
|
|
|
|
|
#
|
2005-08-06 16:08:42 +00:00
|
|
|
# Portions of Markdown code Copyright (C) 2004 John Gruber
|
|
|
|
|
# <http://daringfireball.net/projects/markdown/>
|
|
|
|
|
#
|
2005-07-28 21:39:46 +00:00
|
|
|
# This program is free software; you can redistribute it and/or modify
|
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
|
# (at your option) any later version.
|
|
|
|
|
#
|
|
|
|
|
# This program is distributed in the hope that it will be useful,
|
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
|
# GNU General Public License for more details.
|
2005-08-01 23:34:52 +00:00
|
|
|
#
|
2005-07-28 21:39:46 +00:00
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
|
# along with this program; if not, write to the
|
|
|
|
|
# Free Software Foundation, Inc.
|
|
|
|
|
# 59 Temple Place, Suite 330
|
|
|
|
|
# Boston, MA 02111-1307 USA
|
|
|
|
|
|
|
|
|
|
|
2005-08-01 23:34:52 +00:00
|
|
|
# Original MarkdownRule by Alex Schroeder
|
|
|
|
|
# Remainder by Fletcher Penney
|
|
|
|
|
|
|
|
|
|
# To enable other features, I suggest you also check out:
|
2007-04-02 16:10:19 +00:00
|
|
|
# MultiMarkdown <http://fletcherpenney.net/MultiMarkdown>
|
2005-08-01 23:34:52 +00:00
|
|
|
|
2007-04-02 15:00:56 +00:00
|
|
|
# Requires MultiMarkdown 2.0.a2 or higher
|
2005-08-01 23:34:52 +00:00
|
|
|
|
2007-09-09 21:26:57 +00:00
|
|
|
# TODO: auto links in codespans should not be interpreted (e.g. `<http://somelink/>`)
|
2007-04-02 15:00:56 +00:00
|
|
|
|
2014-08-21 22:23:23 +02:00
|
|
|
AddModuleDescription('markdown.pl', 'Markdown Extension');
|
2007-04-02 15:00:56 +00:00
|
|
|
|
2007-04-02 15:23:38 +00:00
|
|
|
use vars qw!%MarkdownRuleOrder @MyMarkdownRules $MarkdownEnabled $SmartyPantsEnabled!;
|
2007-04-02 15:00:56 +00:00
|
|
|
|
|
|
|
|
$MarkdownEnabled = 1;
|
2007-04-02 15:23:38 +00:00
|
|
|
$SmartyPantsEnabled = 1;
|
2005-08-01 23:34:52 +00:00
|
|
|
|
|
|
|
|
@MyRules = (\&MarkdownRule);
|
|
|
|
|
|
|
|
|
|
$RuleOrder{\&MarkdownRule} = -10;
|
|
|
|
|
|
|
|
|
|
$TempNoWikiWords = 0;
|
2005-07-28 21:39:46 +00:00
|
|
|
|
|
|
|
|
sub MarkdownRule {
|
2005-08-03 01:29:34 +00:00
|
|
|
# Allow journal pages
|
2007-09-12 05:02:13 +00:00
|
|
|
if (m/\G(<journal(\s+(\d*))?(\s+"(.*)")?(\s+(reverse))?\>[ \t]*\n?)/cgi) {
|
2005-08-03 01:29:34 +00:00
|
|
|
# <journal 10 "regexp"> includes 10 pages matching regexp
|
|
|
|
|
Clean(CloseHtmlEnvironments());
|
|
|
|
|
Dirty($1);
|
|
|
|
|
my $oldpos = pos;
|
|
|
|
|
PrintJournal($3, $5, $7);
|
|
|
|
|
Clean(AddHtmlEnvironment('p')); # if dirty block is looked at later, this will disappear
|
|
|
|
|
pos = $oldpos; # restore \G after call to ApplyRules
|
|
|
|
|
return;
|
|
|
|
|
}
|
2005-08-17 23:34:25 +00:00
|
|
|
|
2005-07-28 21:39:46 +00:00
|
|
|
if (pos == 0) {
|
|
|
|
|
my $pos = length($_); # fake matching entire file
|
|
|
|
|
my $source = $_;
|
|
|
|
|
# fake that we're blosxom!
|
|
|
|
|
$blosxom::version = 1;
|
2007-04-02 15:23:38 +00:00
|
|
|
require "$ModuleDir/Markdown/MultiMarkdown.pl";
|
2005-10-28 21:33:35 +00:00
|
|
|
|
2005-08-03 23:50:22 +00:00
|
|
|
*Markdown::_RunSpanGamut = *NewRunSpanGamut;
|
|
|
|
|
*Markdown::_DoHeaders = *NewDoHeaders;
|
|
|
|
|
*Markdown::_EncodeCode = *NewEncodeCode;
|
2007-03-10 22:20:47 +00:00
|
|
|
*Markdown::_DoAutoLinks = *NewDoAutoLinks;
|
|
|
|
|
*Markdown::_ParseMetaData = *NewParseMetaData;
|
2006-04-04 01:16:05 +00:00
|
|
|
|
2005-08-01 23:34:52 +00:00
|
|
|
# Do not allow raw HTML
|
|
|
|
|
$source = SanitizeSource($source);
|
|
|
|
|
|
2007-04-02 15:00:56 +00:00
|
|
|
# Allow other Modules to process raw text before Markdown
|
|
|
|
|
# This allows other modules to be "Markdown Compatible"
|
|
|
|
|
@MyMarkdownRules = sort {$MarkdownRuleOrder{$a} <=> $MarkdownRuleOrder{$b}} @MyMarkdownRules; # default is 0
|
|
|
|
|
foreach my $sub (@MyMarkdownRules) {
|
|
|
|
|
$source = &$sub($source);
|
|
|
|
|
}
|
|
|
|
|
|
2005-07-28 21:39:46 +00:00
|
|
|
my $result = Markdown::Markdown($source);
|
2007-04-02 15:23:38 +00:00
|
|
|
|
|
|
|
|
if ($SmartyPantsEnabled) {
|
|
|
|
|
require "$ModuleDir/Markdown/SmartyPants.pl";
|
|
|
|
|
$result = SmartyPants::SmartyPants($result,"2",undef);
|
|
|
|
|
}
|
|
|
|
|
|
2006-01-03 01:17:49 +00:00
|
|
|
$result = UnescapeWikiWords($result);
|
2005-08-01 23:34:52 +00:00
|
|
|
|
2005-08-03 04:03:10 +00:00
|
|
|
$result = AntiSpam($result);
|
2005-08-01 23:34:52 +00:00
|
|
|
|
2005-07-28 21:39:46 +00:00
|
|
|
pos = $pos;
|
2005-08-17 23:34:25 +00:00
|
|
|
|
|
|
|
|
# Encode '<' and '>' for RSS feeds
|
2005-08-15 03:07:17 +00:00
|
|
|
# Otherwise, "full" does not work
|
2005-08-17 13:06:07 +00:00
|
|
|
if (GetParam("action",'') =~ /^(rss|journal)$/) {
|
2005-08-15 03:07:17 +00:00
|
|
|
$result =~ s/\</</g;
|
|
|
|
|
$result =~ s/\>/>/g;
|
|
|
|
|
}
|
2005-07-28 21:39:46 +00:00
|
|
|
return $result;
|
|
|
|
|
}
|
|
|
|
|
return undef;
|
|
|
|
|
}
|
2005-08-01 23:34:52 +00:00
|
|
|
|
|
|
|
|
sub SanitizeSource {
|
|
|
|
|
$text = shift;
|
|
|
|
|
|
|
|
|
|
# We don't want to allow insertion of raw html into Wikis
|
2005-08-02 03:13:00 +00:00
|
|
|
# for security reasons.
|
|
|
|
|
# By converting all '<', we preclude inclusion of HTML tags.
|
|
|
|
|
# We don't have to do the same for '>', which would screw up blockquotes.
|
|
|
|
|
# Remember, on a wiki, we don't want to allow arbitrary HTML...
|
|
|
|
|
# (in other words, this is not a bug)
|
|
|
|
|
|
2007-03-10 22:20:47 +00:00
|
|
|
$text =~ s/\</</g;
|
2005-08-01 23:34:52 +00:00
|
|
|
|
|
|
|
|
return $text;
|
|
|
|
|
}
|
|
|
|
|
|
2005-08-02 03:13:00 +00:00
|
|
|
|
2005-08-01 23:34:52 +00:00
|
|
|
# Replace certain core OddMuse routines for compatibility
|
|
|
|
|
*GetCluster = *MarkdownGetCluster;
|
|
|
|
|
|
|
|
|
|
sub MarkdownGetCluster {
|
2007-03-10 22:20:47 +00:00
|
|
|
$_ = shift;
|
|
|
|
|
return '' unless $PageCluster;
|
|
|
|
|
if (( /^$LinkPattern\n/)
|
|
|
|
|
or (/^\[\[$FreeLinkPattern\]\]\n/)) {
|
|
|
|
|
return $1
|
|
|
|
|
};
|
2005-08-01 23:34:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2007-03-10 22:20:47 +00:00
|
|
|
# Let Markdown handle special characters, rather than OddMuse
|
2007-09-09 15:36:42 +00:00
|
|
|
|
|
|
|
|
# This opened up a security flaw whereby a user's input (e.g. search string)
|
|
|
|
|
# would be displayed as raw HTML).
|
|
|
|
|
#
|
|
|
|
|
# It also appears that Alex has changed the way Oddmuse works, so I can't seem
|
|
|
|
|
# to provide a workaround at this time. I don't remember why I had to add this
|
|
|
|
|
# routine, so I am disabling it for now, and will have to "re-fix things" when
|
|
|
|
|
# I figure out if anything is now broken....
|
|
|
|
|
|
2007-09-09 20:58:44 +00:00
|
|
|
*oldQuoteHtml = *QuoteHtml;
|
|
|
|
|
*QuoteHtml = *MarkdownQuoteHtml;
|
2007-03-10 22:20:47 +00:00
|
|
|
|
|
|
|
|
sub MarkdownQuoteHtml {
|
|
|
|
|
my $html = shift;
|
|
|
|
|
|
2007-09-10 19:32:30 +00:00
|
|
|
$html =~ s/&/&/g;
|
|
|
|
|
$html =~ s/</</g;
|
|
|
|
|
# $html =~ s/>/>/g;
|
2012-07-19 05:56:54 -04:00
|
|
|
$html =~ s/[\x00-\x08\x0b\x0c\x0e-\x1f]/ /g; # legal xml: #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFD]
|
2007-09-10 19:32:30 +00:00
|
|
|
|
2007-03-10 22:20:47 +00:00
|
|
|
return $html;
|
|
|
|
|
}
|
|
|
|
|
|
2005-08-01 23:34:52 +00:00
|
|
|
|
|
|
|
|
# Change InterMap/NearLink to match >1 space, rather than exactly one
|
|
|
|
|
# So that Markdown can display as codeblock
|
|
|
|
|
|
|
|
|
|
*InterInit = *MarkdownInterInit;
|
|
|
|
|
*NearInit = *MarkdownNearInit;
|
|
|
|
|
|
|
|
|
|
sub MarkdownInterInit {
|
|
|
|
|
$InterSiteInit = 1;
|
|
|
|
|
foreach (split(/\n/, GetPageContent($InterMap))) {
|
|
|
|
|
if (/^ +($InterSitePattern)[ \t]+([^ ]+)$/) {
|
|
|
|
|
$InterSite{$1} = $2;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
sub MarkdownNearInit {
|
|
|
|
|
InterInit() unless $InterSiteInit;
|
|
|
|
|
$NearSiteInit = 1;
|
|
|
|
|
foreach (split(/\n/, GetPageContent($NearMap))) {
|
|
|
|
|
if (/^ +($InterSitePattern)[ \t]+([^ ]+)(?:[ \t]+([^ ]+))?$/) {
|
|
|
|
|
my ($site, $url, $search) = ($1, $2, $3);
|
|
|
|
|
next unless $InterSite{$site};
|
|
|
|
|
$NearSite{$site} = $url;
|
|
|
|
|
$NearSearch{$site} = $search if $search;
|
|
|
|
|
my ($status, $data) = ReadFile("$NearDir/$site");
|
|
|
|
|
next unless $status;
|
|
|
|
|
foreach my $page (split(/\n/, $data)) {
|
|
|
|
|
push(@{$NearSource{$page}}, $site);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Modify the Markdown source to work with OddMuse
|
|
|
|
|
|
|
|
|
|
sub DoWikiWords {
|
|
|
|
|
|
|
|
|
|
my $text = shift;
|
2012-07-19 05:56:54 -04:00
|
|
|
my $WikiWord = '[A-Z]+[a-z\x{0080}-\x{fffd}]+[A-Z][A-Za-z\x{0080}-\x{fffd}]*';
|
|
|
|
|
my $FreeLinkPattern = "([-,.()' _0-9A-Za-z\x{0080}-\x{fffd}]+)";
|
2007-03-10 22:20:47 +00:00
|
|
|
|
2005-08-06 15:50:12 +00:00
|
|
|
if ($FreeLinks) {
|
|
|
|
|
# FreeLinks
|
|
|
|
|
$text =~ s{
|
|
|
|
|
\[\[($FreeLinkPattern)\]\]
|
|
|
|
|
}{
|
|
|
|
|
my $label = $1;
|
|
|
|
|
$label =~ s{
|
2007-09-09 20:58:44 +00:00
|
|
|
([\s\>])($WikiWord)
|
2005-08-06 15:50:12 +00:00
|
|
|
}{
|
|
|
|
|
$1 ."\\" . $2
|
|
|
|
|
}xsge;
|
|
|
|
|
|
|
|
|
|
CreateWikiLink($label)
|
|
|
|
|
}xsge;
|
|
|
|
|
|
|
|
|
|
# Images - this is too convenient not to support...
|
|
|
|
|
# Though it doesn't fit with Markdown syntax
|
|
|
|
|
$text =~ s{
|
|
|
|
|
(\[\[image:$FreeLinkPattern\]\])
|
|
|
|
|
}{
|
|
|
|
|
my $link = GetDownloadLink($2, 1, undef, $3);
|
|
|
|
|
$link =~ s/_/_/g;
|
|
|
|
|
$link
|
|
|
|
|
}xsge;
|
|
|
|
|
|
|
|
|
|
$text =~ s{
|
|
|
|
|
(\[\[image:$FreeLinkPattern\|([^]|]+)\]\])
|
2005-08-03 22:13:23 +00:00
|
|
|
}{
|
2005-08-06 15:50:12 +00:00
|
|
|
my $link = GetDownloadLink($2, 1, undef, $3);
|
|
|
|
|
$link =~ s/_/_/g;
|
|
|
|
|
$link
|
2005-08-03 22:13:23 +00:00
|
|
|
}xsge;
|
|
|
|
|
|
2005-08-06 15:50:12 +00:00
|
|
|
# And Same thing for downloads
|
|
|
|
|
|
|
|
|
|
$text =~ s{
|
|
|
|
|
(\[\[download:$FreeLinkPattern\|?(.*)\]\])
|
|
|
|
|
}{
|
|
|
|
|
my $link = GetDownloadLink($2, undef, undef, $3);
|
|
|
|
|
$link =~ s/_/_/g;
|
|
|
|
|
$link
|
|
|
|
|
}xsge;
|
|
|
|
|
}
|
2005-08-03 22:13:23 +00:00
|
|
|
|
2005-08-01 23:34:52 +00:00
|
|
|
# WikiWords
|
2005-08-06 15:50:12 +00:00
|
|
|
if ($WikiLinks) {
|
|
|
|
|
$text =~ s{
|
2007-09-09 20:58:44 +00:00
|
|
|
([\s\>])($WikiWord\b)
|
2005-08-06 15:50:12 +00:00
|
|
|
}{
|
|
|
|
|
$1 . CreateWikiLink($2)
|
|
|
|
|
}xsge;
|
|
|
|
|
|
|
|
|
|
# Catch WikiWords at beginning of page (ie PageCluster)
|
|
|
|
|
$text =~ s{^($WikiWord)
|
|
|
|
|
}{
|
|
|
|
|
CreateWikiLink($1)
|
|
|
|
|
}xse;
|
|
|
|
|
}
|
2005-08-03 22:33:30 +00:00
|
|
|
|
2005-08-02 23:20:00 +00:00
|
|
|
|
2005-08-01 23:34:52 +00:00
|
|
|
return $text;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
sub CreateWikiLink {
|
|
|
|
|
my $title = shift;
|
|
|
|
|
|
|
|
|
|
my $id = $title;
|
|
|
|
|
$id =~ s/ /_/g;
|
|
|
|
|
$id =~ s/__+/_/g;
|
|
|
|
|
$id =~ s/^_//g;
|
|
|
|
|
$id =~ s/_$//;
|
2005-08-03 22:13:23 +00:00
|
|
|
|
2005-08-01 23:34:52 +00:00
|
|
|
|
|
|
|
|
#AllPagesList();
|
|
|
|
|
#my $exists = $IndexHash{$id};
|
|
|
|
|
|
2005-08-03 22:13:23 +00:00
|
|
|
my $resolvable = $id;
|
|
|
|
|
$resolvable =~ s/\\//g;
|
|
|
|
|
|
|
|
|
|
my ($class, $resolved, $linktitle, $exists) = ResolveId($resolvable);
|
2005-08-01 23:34:52 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
if ($resolved) {
|
|
|
|
|
if ($class eq 'near') {
|
2007-03-10 22:20:47 +00:00
|
|
|
return "[$title]($ScriptName/$resolved)";
|
2005-08-01 23:34:52 +00:00
|
|
|
}
|
2007-03-10 22:20:47 +00:00
|
|
|
return "[$title]($ScriptName/" . UrlEncode($resolved) . ")";
|
2005-08-01 23:34:52 +00:00
|
|
|
} else {
|
|
|
|
|
if ($title =~ / /) {
|
2005-08-17 23:08:56 +00:00
|
|
|
return "[$title]\[?]($ScriptName/?action=edit;id=$id)";
|
2005-08-01 23:34:52 +00:00
|
|
|
} else {
|
2005-08-17 23:08:56 +00:00
|
|
|
return "$title\[?]($ScriptName/?action=edit;id=$id)";
|
2005-08-01 23:34:52 +00:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
sub UnescapeWikiWords {
|
|
|
|
|
my $text = shift;
|
2012-07-19 05:56:54 -04:00
|
|
|
my $WikiWord = '[A-Z]+[a-z\x{0080}-\x{fffd}]+[A-Z][A-Za-z\x{0080}-\x{fffd}]*';
|
2005-08-01 23:34:52 +00:00
|
|
|
|
|
|
|
|
# Unescape escaped WikiWords
|
|
|
|
|
$text =~ s/\\($WikiWord)/$1/g;
|
|
|
|
|
|
|
|
|
|
return $text;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
sub NewRunSpanGamut {
|
|
|
|
|
#
|
|
|
|
|
# These are all the transformations that occur *within* block-level
|
|
|
|
|
# tags like paragraphs, headers, and list items.
|
|
|
|
|
#
|
|
|
|
|
my $text = shift;
|
|
|
|
|
|
|
|
|
|
$text = Markdown::_DoCodeSpans($text);
|
|
|
|
|
|
2007-04-02 15:00:56 +00:00
|
|
|
$text = Markdown::_EscapeSpecialCharsWithinTagAttributes($text);
|
2005-08-01 23:34:52 +00:00
|
|
|
|
|
|
|
|
# Process anchor and image tags. Images must come first,
|
|
|
|
|
# because ![foo][f] looks like an anchor.
|
|
|
|
|
$text = Markdown::_DoImages($text);
|
2007-03-10 22:20:47 +00:00
|
|
|
$text = NewDoAnchors($text);
|
2005-08-01 23:34:52 +00:00
|
|
|
|
|
|
|
|
# Process WikiWords
|
|
|
|
|
if (!$TempNoWikiWords) {
|
|
|
|
|
$text = DoWikiWords($text);
|
|
|
|
|
|
|
|
|
|
# And then reprocess anchors and images
|
|
|
|
|
$text = Markdown::_DoImages($text);
|
2007-03-10 22:20:47 +00:00
|
|
|
$text = NewDoAnchors($text);
|
2005-08-01 23:34:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
# Make links out of things like `<http://example.com/>`
|
|
|
|
|
# Must come after _DoAnchors(), because you can use < and >
|
|
|
|
|
# delimiters in inline links like [this](<url>).
|
|
|
|
|
$text = Markdown::_DoAutoLinks($text);
|
|
|
|
|
|
|
|
|
|
$text = Markdown::_EncodeAmpsAndAngles($text);
|
|
|
|
|
|
|
|
|
|
$text = Markdown::_DoItalicsAndBold($text);
|
|
|
|
|
|
|
|
|
|
# Do hard breaks:
|
2007-03-10 22:20:47 +00:00
|
|
|
$text =~ s/ {2,}\n/$Markdown::g_hardbreak/g;
|
2005-08-01 23:34:52 +00:00
|
|
|
|
|
|
|
|
return $text;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
# Don't do wiki words in headers
|
|
|
|
|
|
|
|
|
|
*OldDoHeaders = *Markdown::_DoHeaders;
|
|
|
|
|
|
|
|
|
|
sub NewDoHeaders {
|
|
|
|
|
my $text = shift;
|
|
|
|
|
|
|
|
|
|
$TempNoWikiWords = 1;
|
|
|
|
|
|
|
|
|
|
$text = OldDoHeaders($text);
|
|
|
|
|
|
|
|
|
|
$TempNoWikiWords = 0;
|
|
|
|
|
|
|
|
|
|
return $text;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
# Protect WikiWords in Code Blocks
|
|
|
|
|
|
|
|
|
|
*OldEncodeCode = *Markdown::_EncodeCode;
|
|
|
|
|
|
|
|
|
|
sub NewEncodeCode {
|
|
|
|
|
my $text = shift;
|
|
|
|
|
|
2007-09-09 16:13:22 +00:00
|
|
|
# Undo sanitization of '<, >, and &' (necessary due to a change in how Oddmuse works)
|
2005-08-02 03:13:00 +00:00
|
|
|
$text =~ s/</</g;
|
2007-09-10 19:52:50 +00:00
|
|
|
# $text =~ s/>/>/g;
|
2007-09-09 16:13:22 +00:00
|
|
|
$text =~ s/&/&/g;
|
2005-08-02 03:13:00 +00:00
|
|
|
|
2005-08-01 23:34:52 +00:00
|
|
|
$text = OldEncodeCode($text);
|
|
|
|
|
|
|
|
|
|
# Protect Wiki Words
|
2012-07-19 05:56:54 -04:00
|
|
|
my $WikiWord = '[A-Z]+[a-z\x{0080}-\x{fffd}]+[A-Z][A-Za-z\x{0080}-\x{fffd}]*';
|
2005-08-01 23:34:52 +00:00
|
|
|
$text =~ s!($WikiWord)!\\$1!gx;
|
|
|
|
|
|
|
|
|
|
return $text;
|
|
|
|
|
}
|
|
|
|
|
|
2005-08-03 04:03:10 +00:00
|
|
|
|
|
|
|
|
sub AntiSpam {
|
|
|
|
|
my $text = shift;
|
|
|
|
|
my $EmailRegExp = '[\w\.\-]+@([\w\-]+\.)+[\w]+';
|
|
|
|
|
|
|
|
|
|
$text =~ s {
|
|
|
|
|
($EmailRegExp)
|
|
|
|
|
}{
|
|
|
|
|
my $masked="";
|
|
|
|
|
my @decimal = unpack('C*', $1);
|
|
|
|
|
foreach $i (@decimal) {
|
|
|
|
|
$masked.="&#".$i.";";
|
|
|
|
|
}
|
|
|
|
|
$masked
|
|
|
|
|
}xsge;
|
|
|
|
|
|
|
|
|
|
return $text;
|
2005-08-03 04:14:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
sub NewDoAutoLinks {
|
|
|
|
|
my $text = shift;
|
2007-04-02 15:00:56 +00:00
|
|
|
|
2007-09-10 19:52:50 +00:00
|
|
|
# Added > to the excluded characters list for Oddmuse compatibility
|
|
|
|
|
$text =~ s{<((https?|ftp|dict):[^'"<>\s]+)\>}{<a href="$1">$1</a>}gi;
|
2005-08-03 04:14:57 +00:00
|
|
|
|
|
|
|
|
# Email addresses: <address@domain.foo>
|
|
|
|
|
$text =~ s{
|
|
|
|
|
<
|
|
|
|
|
(?:mailto:)?
|
|
|
|
|
(
|
|
|
|
|
[-.\w]+
|
|
|
|
|
\@
|
|
|
|
|
[-a-z0-9]+(\.[-a-z0-9]+)*\.[a-z]+
|
|
|
|
|
)
|
2007-09-10 19:32:30 +00:00
|
|
|
>
|
2005-08-03 04:14:57 +00:00
|
|
|
}{
|
2005-10-02 00:02:57 +00:00
|
|
|
Markdown::_EncodeEmailAddress( Markdown::_UnescapeSpecialChars($1) );
|
2005-08-03 04:14:57 +00:00
|
|
|
}egix;
|
|
|
|
|
|
|
|
|
|
return $text;
|
|
|
|
|
}
|
2005-08-06 15:41:36 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
# Fix problem with validity - Oddmuse forced a page to start with <p>,
|
|
|
|
|
# which screws up Markdown
|
|
|
|
|
|
|
|
|
|
*PrintWikiToHTML = *MarkdownPrintWikiToHTML;
|
|
|
|
|
|
|
|
|
|
sub MarkdownPrintWikiToHTML {
|
|
|
|
|
my ($pageText, $savecache, $revision, $islocked) = @_;
|
|
|
|
|
$FootnoteNumber = 0;
|
|
|
|
|
$pageText =~ s/$FS//g; # Remove separators (paranoia)
|
|
|
|
|
$pageText = QuoteHtml($pageText);
|
|
|
|
|
my ($blocks, $flags) = ApplyRules($pageText, 1, $savecache, $revision); # p is start tag!
|
|
|
|
|
# local links, anchors if cache ok
|
|
|
|
|
if ($savecache and not $revision and $Page{revision} # don't save revision 0 pages
|
|
|
|
|
and $Page{blocks} ne $blocks and $Page{flags} ne $flags) {
|
|
|
|
|
$Page{blocks} = $blocks;
|
|
|
|
|
$Page{flags} = $flags;
|
|
|
|
|
if ($islocked or RequestLockDir('main')) { # not fatal!
|
|
|
|
|
SavePage();
|
|
|
|
|
ReleaseLock() unless $islocked;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2005-08-17 13:06:07 +00:00
|
|
|
|
|
|
|
|
*AddComment = *MarkdownAddComment;
|
|
|
|
|
|
|
|
|
|
sub MarkdownAddComment {
|
|
|
|
|
my ($old, $comment) = @_;
|
|
|
|
|
my $string = $old;
|
|
|
|
|
$comment =~ s/\r//g; # Remove "\r"-s (0x0d) from the string
|
|
|
|
|
$comment =~ s/\s+$//g; # Remove whitespace at the end
|
|
|
|
|
if ($comment ne '' and $comment ne $NewComment) {
|
|
|
|
|
my $author = GetParam('username', T('Anonymous'));
|
|
|
|
|
my $homepage = GetParam('homepage', '');
|
|
|
|
|
$homepage = 'http://' . $homepage if $homepage and not substr($homepage,0,7) eq 'http://';
|
|
|
|
|
$author = "[$author]($homepage)" if $homepage;
|
|
|
|
|
$string .= "\n----\n\n" if $string and $string ne "\n";
|
|
|
|
|
$string .= $comment . "\n\n-- " . $author . ' ' . TimeToText($Now) . "\n\n";
|
|
|
|
|
}
|
|
|
|
|
return $string;
|
|
|
|
|
}
|
|
|
|
|
|
2007-03-10 22:20:47 +00:00
|
|
|
sub NewDoAnchors {
|
|
|
|
|
my $text = shift;
|
2012-07-19 05:56:54 -04:00
|
|
|
my $WikiWord = '[A-Z]+[a-z\x{0080}-\x{fffd}]+[A-Z][A-Za-z\x{0080}-\x{fffd}]*';
|
2007-04-02 16:10:19 +00:00
|
|
|
|
2007-03-10 22:20:47 +00:00
|
|
|
return Markdown::_DoAnchors($text);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
sub NewParseMetaData {
|
|
|
|
|
# Attempting to parse metadata screws up Oddmuse
|
|
|
|
|
# if there is a colon in the first line (or a link)
|
|
|
|
|
my $text = shift;
|
|
|
|
|
return $text;
|
2012-04-19 02:18:45 +02:00
|
|
|
}
|