use warnings;
use strict;
-use IkiWiki 2.00;
+use IkiWiki 3.00;
# This regexp matches urls that are in a known safe scheme.
# Feel free to use it from other plugins.
our $safe_url_regexp;
-sub import { #{{{
+sub import {
+ hook(type => "getsetup", id => "htmlscrubber", call => \&getsetup);
hook(type => "sanitize", id => "htmlscrubber", call => \&sanitize);
# Only known uri schemes are allowed to avoid all the ways of
"msnim", "notes", "rsync", "secondlife", "skype", "ssh",
"sftp", "smb", "sms", "snews", "webcal", "ymsgr",
);
- # data is a special case. Allow data:image/*, but
- # disallow data:text/javascript and everything else.
- $safe_url_regexp=qr/^(?:(?:$uri_schemes):|data:image\/|[^:]+(?:$|\/))/i;
-} # }}}
+ # data is a special case. Allow a few data:image/ types,
+ # but disallow data:text/javascript and everything else.
+ $safe_url_regexp=qr/^(?:(?:$uri_schemes):|data:image\/(?:png|jpeg|gif)|[^:]+(?:$|[\/\?]))/i;
+}
-sub sanitize (@) { #{{{
+sub getsetup () {
+ return
+ plugin => {
+ safe => 1,
+ rebuild => undef,
+ section => "core",
+ },
+ htmlscrubber_skip => {
+ type => "pagespec",
+ example => "!*/Discussion",
+ description => "PageSpec specifying pages not to scrub",
+ link => "ikiwiki/PageSpec",
+ safe => 1,
+ rebuild => undef,
+ },
+}
+
+sub sanitize (@) {
my %params=@_;
+
+ if (exists $config{htmlscrubber_skip} &&
+ length $config{htmlscrubber_skip} &&
+ exists $params{destpage} &&
+ pagespec_match($params{destpage}, $config{htmlscrubber_skip})) {
+ return $params{content};
+ }
+
return scrubber()->scrub($params{content});
-} # }}}
+}
my $_scrubber;
-sub scrubber { #{{{
+sub scrubber {
return $_scrubber if defined $_scrubber;
eval q{use HTML::Scrubber};
}],
);
return $_scrubber;
-} # }}}
+}
1