use warnings;
use strict;
use IkiWiki 1.00;
-use IkiWiki::Render; # for displaytime
use URI;
sub import { #{{{
+ hook(type => "getopt", id => "inline", call => \&getopt);
+ hook(type => "checkconfig", id => "inline", call => \&checkconfig);
hook(type => "preprocess", id => "inline",
call => \&IkiWiki::preprocess_inline);
hook(type => "pagetemplate", id => "inline",
call => \&IkiWiki::pingurl);
} # }}}
+sub getopt () { #{{{
+ eval q{use Getopt::Long};
+ error($@) if $@;
+ Getopt::Long::Configure('pass_through');
+ GetOptions(
+ "rss!" => \$config{rss},
+ "atom!" => \$config{atom},
+ );
+}
+
+sub checkconfig () { #{{{
+ if (($config{rss} || $config{atom}) && ! length $config{url}) {
+ error(gettext("Must specify url to wiki with --url when using --rss or --atom"));
+ }
+ if ($config{rss}) {
+ push @{$config{wiki_file_prune_regexps}}, qr/\.rss$/;
+ }
+ if ($config{atom}) {
+ push @{$config{wiki_file_prune_regexps}}, qr/\.atom$/;
+ }
+} #}}}
+
# Back to ikiwiki namespace for the rest, this code is very much
# internal to ikiwiki even though it's separated into a plugin.
package IkiWiki;
my $archive=yesno($params{archive});
my $rss=($config{rss} && exists $params{rss}) ? yesno($params{rss}) : $config{rss};
my $atom=($config{atom} && exists $params{atom}) ? yesno($params{atom}) : $config{atom};
- my $feeds=exists $params{feeds} ? yesno($params{feeds}) : 1;
+ my $quick=exists $params{quick} ? yesno($params{quick}) : 0;
+ my $feeds=exists $params{feeds} ? yesno($params{feeds}) : !$quick;
if (! exists $params{show} && ! $archive) {
$params{show}=10;
}
my @list;
foreach my $page (keys %pagesources) {
next if $page eq $params{page};
- if (pagespec_match($page, $params{pages})) {
+ if (pagespec_match($page, $params{pages}, $params{page})) {
push @list, $page;
}
}
@list=sort { $pagectime{$b} <=> $pagectime{$a} } @list;
}
else {
- return "unknown sort type $params{sort}";
+ return sprintf(gettext("unknown sort type %s"), $params{sort});
+ }
+
+ if (yesno($params{reverse})) {
+ @list=reverse(@list);
}
if (exists $params{skip}) {
my $file = $pagesources{$page};
my $type = pagetype($file);
if (! $raw || ($raw && ! defined $type)) {
- # Get the content before populating the template,
- # since getting the content uses the same template
- # if inlines are nested.
- # TODO: if $archive=1, the only reason to do this
- # is to let the meta plugin get page title info; so stop
- # calling this next line then once the meta plugin can
- # store that accross runs (also tags plugin).
- my $content=get_inline_content($page, $params{destpage});
- # Don't use htmllink because this way the title is separate
- # and can be overridden by other plugins.
+ unless ($archive && $quick) {
+ # Get the content before populating the
+ # template, since getting the content uses
+ # the same template if inlines are nested.
+ my $content=get_inline_content($page, $params{destpage});
+ $template->param(content => $content);
+ }
+ # Don't use htmllink because this way the
+ # title is separate and can be overridden by
+ # other plugins.
my $link=bestlink($params{page}, $page);
$link=htmlpage($link) if defined $type;
$link=abs2rel($link, dirname($params{destpage}));
$template->param(pageurl => $link);
$template->param(title => pagetitle(basename($page)));
- $template->param(content => $content);
$template->param(ctime => displaytime($pagectime{$page}));
if ($actions) {
my $file = $pagesources{$page};
my $type = pagetype($file);
if ($config{discussion}) {
- $template->param(have_actions => 1);
- $template->param(discussionlink => htmllink($page, $page, "Discussion", 1, 1));
+ my $discussionlink=gettext("discussion");
+ if ($page !~ /.*\/\Q$discussionlink\E$/ &&
+ (length $config{cgiurl} ||
+ exists $links{$page."/".$discussionlink})) {
+ $template->param(have_actions => 1);
+ $template->param(discussionlink => htmllink($page, $params{page}, gettext("Discussion"), 1, 1));
+ }
}
if (length $config{cgiurl} && defined $type) {
$template->param(have_actions => 1);
}
}
- if ($feeds && $rss) {
- will_render($params{page}, rsspage($params{page}));
- writefile(rsspage($params{page}), $config{destdir},
- genfeed("rss", $rssurl, $desc, $params{page}, @list));
- $toping{$params{page}}=1 unless $config{rebuild};
- $feedlinks{$params{destpage}}=qq{<link rel="alternate" type="application/rss+xml" title="RSS" href="$rssurl" />};
- }
- if ($feeds && $atom) {
- will_render($params{page}, atompage($params{page}));
- writefile(atompage($params{page}), $config{destdir},
- genfeed("atom", $atomurl, $desc, $params{page}, @list));
- $toping{$params{page}}=1 unless $config{rebuild};
- $feedlinks{$params{destpage}}=qq{<link rel="alternate" type="application/atom+xml" title="Atom" href="$atomurl" />};
+ if ($feeds) {
+ if (exists $params{feedshow} && @list > $params{feedshow}) {
+ @list=@list[0..$params{feedshow} - 1];
+ }
+
+ if ($rss) {
+ will_render($params{page}, rsspage($params{page}));
+ writefile(rsspage($params{page}), $config{destdir},
+ genfeed("rss", $rssurl, $desc, $params{page}, @list));
+ $toping{$params{page}}=1 unless $config{rebuild};
+ $feedlinks{$params{destpage}}=qq{<link rel="alternate" type="application/rss+xml" title="RSS" href="$rssurl" />};
+ }
+ if ($atom) {
+ will_render($params{page}, atompage($params{page}));
+ writefile(atompage($params{page}), $config{destdir},
+ genfeed("atom", $atomurl, $desc, $params{page}, @list));
+ $toping{$params{page}}=1 unless $config{rebuild};
+ $feedlinks{$params{destpage}}=qq{<link rel="alternate" type="application/atom+xml" title="Atom" href="$atomurl" />};
+ }
}
return $ret;
eval q{use POSIX};
error($@) if $@;
- my $lc_time= POSIX::setlocale(&POSIX::LC_TIME);
+ my $lc_time=POSIX::setlocale(&POSIX::LC_TIME);
POSIX::setlocale(&POSIX::LC_TIME, "C");
my $ret=POSIX::strftime("%a, %d %b %Y %H:%M:%S %z", localtime($time));
POSIX::setlocale(&POSIX::LC_TIME, $lc_time);
eval q{use POSIX};
error($@) if $@;
- my $lc_time= POSIX::setlocale(&POSIX::LC_TIME);
+ my $lc_time=POSIX::setlocale(&POSIX::LC_TIME);
POSIX::setlocale(&POSIX::LC_TIME, "C");
my $ret=POSIX::strftime("%Y-%m-%dT%H:%M:%SZ", localtime($time));
POSIX::setlocale(&POSIX::LC_TIME, $lc_time);
sub absolute_urls ($$) { #{{{
# sucky sub because rss sucks
my $content=shift;
- my $url=shift;
+ my $baseurl=shift;
+ my $url=$baseurl;
$url=~s/[^\/]+$//;
- $content=~s/<a\s+href="(?![^:]+:\/\/)([^"]+)"/<a href="$url$1"/ig;
- $content=~s/<img\s+src="(?![^:]+:\/\/)([^"]+)"/<img src="$url$1"/ig;
+ $content=~s/(<a(?:\s+(?:class|id)="?\w+"?)?)\s+href="(#[^"]+)"/$1 href="$baseurl$2"/ig;
+ $content=~s/(<a(?:\s+(?:class|id)="?\w+"?)?)\s+href="(?!\w+:\/\/)([^"]+)"/$1 href="$url$2"/ig;
+ $content=~s/(<img(?:\s+(?:class|id)="?\w+"?)?)\s+src="(?!\w+:\/\/)([^"]+)"/$1 src="$url$2"/ig;
return $content;
} #}}}
my $lasttime = 0;
foreach my $p (@pages) {
my $u=URI->new(encode_utf8($config{url}."/".htmlpage($p)));
+
+ my $pcontent = absolute_urls(get_inline_content($p, $page), $url);
$itemtemplate->param(
- title => pagetitle(basename($p)),
+ title => pagetitle(basename($p), 1),
url => $u,
permalink => $u,
date_822 => date_822($pagectime{$p}),
date_3339 => date_3339($pagectime{$p}),
);
- my $pcontent = absolute_urls(get_inline_content($p, $page), $url);
if ($itemtemplate->query(name => "enclosure")) {
my $file=$pagesources{$p};
my $type=pagetype($file);
my $template=template($feedtype."page.tmpl", blind_cache => 1);
$template->param(
- title => $page ne "index" ? pagetitle($page) : $config{wikiname},
+ title => $page ne "index" ? pagetitle($page, 1) : $config{wikiname},
wikiname => $config{wikiname},
pageurl => $url,
content => $content,
} #}}}
sub pingurl (@) { #{{{
- return unless $config{pingurl} && %toping;
+ return unless @{$config{pingurl}} && %toping;
eval q{require RPC::XML::Client};
if ($@) {
- debug("RPC::XML::Client not found, not pinging");
+ debug(gettext("RPC::XML::Client not found, not pinging"));
return;
}
# daemonize here so slow pings don't slow down wiki updates
- eval q{use POSIX ’setsid’};
- chdir '/';
- open STDIN, '/dev/null';
- open STDOUT, '>/dev/null';
defined(my $pid = fork) or error("Can't fork: $!");
return if $pid;
+ chdir '/';
+ eval q{use POSIX 'setsid'};
setsid() or error("Can't start a new session: $!");
- open STDERR, '>&STDOUT' or error("Can’t dup stdout: $!");
+ open STDIN, '/dev/null';
+ open STDOUT, '>/dev/null';
+ open STDERR, '>&STDOUT' or error("Can't dup stdout: $!");
+
+ # Don't need to keep a lock on the wiki as a daemon.
+ IkiWiki::unlockwiki();
foreach my $page (keys %toping) {
- my $title=pagetitle(basename($page));
+ my $title=pagetitle(basename($page), 0);
my $url="$config{url}/".htmlpage($page);
foreach my $pingurl (@{$config{pingurl}}) {
debug("Pinging $pingurl for $page");
eval {
my $client = RPC::XML::Client->new($pingurl);
my $req = RPC::XML::request->new('weblogUpdates.ping',
- $title, $url);
+ $title, $url);
my $res = $client->send_request($req);
if (! ref $res) {
debug("Did not receive response to ping");
}
}
}
+
+ exit 0; # daemon done
} #}}}
1