X-Git-Url: http://git.vanrenterghem.biz/git.ikiwiki.info.git/blobdiff_plain/485a017a88949b92b59edf49197c4c09be910665..8ed94c0a18435f3a1934e19949153c7ccf8ec78a:/IkiWiki/Plugin/inline.pm diff --git a/IkiWiki/Plugin/inline.pm b/IkiWiki/Plugin/inline.pm index 56adaf509..8490b455f 100644 --- a/IkiWiki/Plugin/inline.pm +++ b/IkiWiki/Plugin/inline.pm @@ -4,186 +4,550 @@ package IkiWiki::Plugin::inline; use warnings; use strict; -use IkiWiki; +use Encode; +use IkiWiki 2.00; use URI; -sub import { #{{{ - IkiWiki::hook(type => "preprocess", id => "inline", +my %knownfeeds; +my %page_numfeeds; +my @inline; +my $nested=0; + +sub import { + hook(type => "getopt", id => "inline", call => \&getopt); + hook(type => "getsetup", id => "inline", call => \&getsetup); + hook(type => "checkconfig", id => "inline", call => \&checkconfig); + hook(type => "sessioncgi", id => "inline", call => \&sessioncgi); + hook(type => "preprocess", id => "inline", call => \&IkiWiki::preprocess_inline); + hook(type => "pagetemplate", id => "inline", + call => \&IkiWiki::pagetemplate_inline); + hook(type => "format", id => "inline", call => \&format); # Hook to change to do pinging since it's called late. # This ensures each page only pings once and prevents slow # pings interrupting page builds. - IkiWiki::hook(type => "change", id => "inline", - call => \&IkiWiki::pingurl); -} # }}} + hook(type => "change", id => "inline", call => \&IkiWiki::pingurl); +} + +sub getopt () { + eval q{use Getopt::Long}; + error($@) if $@; + Getopt::Long::Configure('pass_through'); + GetOptions( + "rss!" => \$config{rss}, + "atom!" => \$config{atom}, + "allowrss!" => \$config{allowrss}, + "allowatom!" => \$config{allowatom}, + "pingurl=s" => sub { + push @{$config{pingurl}}, $_[1]; + }, + ); +} + +sub getsetup () { + return + plugin => { + safe => 1, + rebuild => undef, + }, + rss => { + type => "boolean", + example => 0, + description => "enable rss feeds by default?", + safe => 1, + rebuild => 1, + }, + atom => { + type => "boolean", + example => 0, + description => "enable atom feeds by default?", + safe => 1, + rebuild => 1, + }, + allowrss => { + type => "boolean", + example => 0, + description => "allow rss feeds to be used?", + safe => 1, + rebuild => 1, + }, + allowatom => { + type => "boolean", + example => 0, + description => "allow atom feeds to be used?", + safe => 1, + rebuild => 1, + }, + pingurl => { + type => "string", + example => "http://rpc.technorati.com/rpc/ping", + description => "urls to ping (using XML-RPC) on feed update", + safe => 1, + rebuild => 0, + }, +} + +sub checkconfig () { + if (($config{rss} || $config{atom}) && ! length $config{url}) { + error(gettext("Must specify url to wiki with --url when using --rss or --atom")); + } + if ($config{rss}) { + push @{$config{wiki_file_prune_regexps}}, qr/\.rss$/; + } + if ($config{atom}) { + push @{$config{wiki_file_prune_regexps}}, qr/\.atom$/; + } + if (! exists $config{pingurl}) { + $config{pingurl}=[]; + } +} + +sub format (@) { + my %params=@_; + + # Fill in the inline content generated earlier. This is actually an + # optimisation. + $params{content}=~s{
}{ + delete @inline[$1,] + }eg; + return $params{content}; +} + +sub sessioncgi ($$) { + my $q=shift; + my $session=shift; + + if ($q->param('do') eq 'blog') { + my $page=titlepage(decode_utf8($q->param('title'))); + $page=~s/(\/)/"__".ord($1)."__"/eg; # don't create subdirs + # if the page already exists, munge it to be unique + my $from=$q->param('from'); + my $add=""; + while (exists $IkiWiki::pagecase{lc($from."/".$page.$add)}) { + $add=1 unless length $add; + $add++; + } + $q->param('page', $page.$add); + # now go create the page + $q->param('do', 'create'); + # make sure the editpage plugin in loaded + if (IkiWiki->can("cgi_editpage")) { + IkiWiki::cgi_editpage($q, $session); + } + else { + error(gettext("page editing not allowed")); + } + exit; + } +} # Back to ikiwiki namespace for the rest, this code is very much # internal to ikiwiki even though it's separated into a plugin. package IkiWiki; my %toping; -my $processing_inline=0; +my %feedlinks; -sub preprocess_inline (@) { #{{{ +sub preprocess_inline (@) { my %params=@_; - + if (! exists $params{pages}) { - return ""; + error gettext("missing pages parameter"); } - if (! exists $params{archive}) { - $params{archive}="no"; - } - if (! exists $params{show} && $params{archive} eq "no") { + my $raw=yesno($params{raw}); + my $archive=yesno($params{archive}); + my $rss=(($config{rss} || $config{allowrss}) && exists $params{rss}) ? yesno($params{rss}) : $config{rss}; + my $atom=(($config{atom} || $config{allowatom}) && exists $params{atom}) ? yesno($params{atom}) : $config{atom}; + my $quick=exists $params{quick} ? yesno($params{quick}) : 0; + my $feeds=exists $params{feeds} ? yesno($params{feeds}) : !$quick; + my $emptyfeeds=exists $params{emptyfeeds} ? yesno($params{emptyfeeds}) : 1; + my $feedonly=yesno($params{feedonly}); + if (! exists $params{show} && ! $archive) { $params{show}=10; } - if (! exists $params{rss}) { - $params{rss}="yes"; + if (! exists $params{feedshow} && exists $params{show}) { + $params{feedshow}=$params{show}; } - - # Avoid nested inlines, to avoid loops etc. - if ($processing_inline) { - return ""; + my $desc; + if (exists $params{description}) { + $desc = $params{description} + } + else { + $desc = $config{wikiname}; + } + my $actions=yesno($params{actions}); + if (exists $params{template}) { + $params{template}=~s/[^-_a-zA-Z0-9]+//g; + } + else { + $params{template} = $archive ? "archivepage" : "inlinepage"; } - $processing_inline=1; my @list; foreach my $page (keys %pagesources) { next if $page eq $params{page}; - if (pagespec_match($page, $params{pages})) { + if (pagespec_match($page, $params{pages}, location => $params{page})) { push @list, $page; } } - @list=sort { $pagectime{$b} <=> $pagectime{$a} } @list; + + if (exists $params{sort} && $params{sort} eq 'title') { + @list=sort { pagetitle(basename($a)) cmp pagetitle(basename($b)) } @list; + } + elsif (exists $params{sort} && $params{sort} eq 'mtime') { + @list=sort { $pagemtime{$b} <=> $pagemtime{$a} } @list; + } + elsif (! exists $params{sort} || $params{sort} eq 'age') { + @list=sort { $pagectime{$b} <=> $pagectime{$a} } @list; + } + else { + error sprintf(gettext("unknown sort type %s"), $params{sort}); + } + + if (yesno($params{reverse})) { + @list=reverse(@list); + } + + if (exists $params{skip}) { + @list=@list[$params{skip} .. scalar @list - 1]; + } + + my @feedlist; + if ($feeds) { + if (exists $params{feedshow} && + $params{feedshow} && @list > $params{feedshow}) { + @feedlist=@list[0..$params{feedshow} - 1]; + } + else { + @feedlist=@list; + } + } + if ($params{show} && @list > $params{show}) { @list=@list[0..$params{show} - 1]; } add_depends($params{page}, $params{pages}); + # Explicitly add all currently displayed pages as dependencies, so + # that if they are removed or otherwise changed, the inline will be + # sure to be updated. + add_depends($params{page}, join(" or ", $#list >= $#feedlist ? @list : @feedlist)); + + if ($feeds && exists $params{feedpages}) { + @feedlist=grep { pagespec_match($_, $params{feedpages}, location => $params{page}) } @feedlist; + } + + my ($feedbase, $feednum); + if ($feeds) { + # Ensure that multiple feeds on a page go to unique files. + + # Feedfile can lead to conflicts if usedirs is not enabled, + # so avoid supporting it in that case. + delete $params{feedfile} if ! $config{usedirs}; + # Tight limits on legal feedfiles, to avoid security issues + # and conflicts. + if (defined $params{feedfile}) { + if ($params{feedfile} =~ /\// || + $params{feedfile} !~ /$config{wiki_file_regexp}/) { + error("illegal feedfile"); + } + $params{feedfile}=possibly_foolish_untaint($params{feedfile}); + } + $feedbase=targetpage($params{destpage}, "", $params{feedfile}); + + my $feedid=join("\0", $feedbase, map { $_."\0".$params{$_} } sort keys %params); + if (exists $knownfeeds{$feedid}) { + $feednum=$knownfeeds{$feedid}; + } + else { + if (exists $page_numfeeds{$params{destpage}}{$feedbase}) { + if ($feeds) { + $feednum=$knownfeeds{$feedid}=++$page_numfeeds{$params{destpage}}{$feedbase}; + } + } + else { + $feednum=$knownfeeds{$feedid}=""; + if ($feeds) { + $page_numfeeds{$params{destpage}}{$feedbase}=1; + } + } + } + } + + my $rssurl=basename($feedbase."rss".$feednum) if $feeds && $rss; + my $atomurl=basename($feedbase."atom".$feednum) if $feeds && $atom; my $ret=""; - - if (exists $params{rootpage} && $config{cgiurl}) { - # Add a blog post form, with a rss link button. + + if (length $config{cgiurl} && ! $params{preview} && (exists $params{rootpage} || + (exists $params{postform} && yesno($params{postform}))) && + IkiWiki->can("cgi_editpage")) { + # Add a blog post form, with feed buttons. my $formtemplate=template("blogpost.tmpl", blind_cache => 1); $formtemplate->param(cgiurl => $config{cgiurl}); - $formtemplate->param(rootpage => $params{rootpage}); - if ($config{rss}) { - $formtemplate->param(rssurl => rsspage(basename($params{page}))); + my $rootpage; + if (exists $params{rootpage}) { + $rootpage=bestlink($params{page}, $params{rootpage}); + if (!length $rootpage) { + $rootpage=$params{rootpage}; + } + } + else { + $rootpage=$params{page}; + } + $formtemplate->param(rootpage => $rootpage); + $formtemplate->param(rssurl => $rssurl) if $feeds && $rss; + $formtemplate->param(atomurl => $atomurl) if $feeds && $atom; + if (exists $params{postformtext}) { + $formtemplate->param(postformtext => + $params{postformtext}); + } + else { + $formtemplate->param(postformtext => + gettext("Add a new post titled:")); } $ret.=$formtemplate->output; + + # The post form includes the feed buttons, so + # emptyfeeds cannot be hidden. + $emptyfeeds=1; } - elsif ($config{rss} && $params{rss} eq "yes") { - # Add a rss link button. - my $linktemplate=template("rsslink.tmpl", blind_cache => 1); - $linktemplate->param(rssurl => rsspage(basename($params{page}))); + elsif ($feeds && !$params{preview} && ($emptyfeeds || @feedlist)) { + # Add feed buttons. + my $linktemplate=template("feedlink.tmpl", blind_cache => 1); + $linktemplate->param(rssurl => $rssurl) if $rss; + $linktemplate->param(atomurl => $atomurl) if $atom; $ret.=$linktemplate->output; } - my $template=template( - (($params{archive} eq "no") - ? "inlinepage.tmpl" - : "inlinepagetitle.tmpl"), - blind_cache => 1, - ); + if (! $feedonly) { + require HTML::Template; + my @params=IkiWiki::template_params($params{template}.".tmpl", blind_cache => 1); + if (! @params) { + error sprintf(gettext("nonexistant template %s"), $params{template}); + } + my $template=HTML::Template->new(@params) unless $raw; - foreach my $page (@list) { - # Don't use htmllink because this way the title is separate - # and can be overridden by other plugins. - my $link=htmlpage(bestlink($params{page}, $page)); - $link=abs2rel($link, dirname($params{page})); - $template->param(pageurl => $link); - $template->param(title => pagetitle(basename($page))); - $template->param(content => get_inline_content($page, $params{page})) - if $params{archive} eq "no"; - $template->param(ctime => displaytime($pagectime{$page})); - - run_hooks(pagetemplate => sub { - shift->(page => $page, destpage => $params{page}, - template => $template,); - }); - - $ret.=$template->output; - $template->clear_params; + foreach my $page (@list) { + my $file = $pagesources{$page}; + my $type = pagetype($file); + if (! $raw || ($raw && ! defined $type)) { + unless ($archive && $quick) { + # Get the content before populating the + # template, since getting the content uses + # the same template if inlines are nested. + my $content=get_inline_content($page, $params{destpage}); + $template->param(content => $content); + } + $template->param(pageurl => urlto(bestlink($params{page}, $page), $params{destpage})); + $template->param(inlinepage => $page); + $template->param(title => pagetitle(basename($page))); + $template->param(ctime => displaytime($pagectime{$page}, $params{timeformat})); + $template->param(mtime => displaytime($pagemtime{$page}, $params{timeformat})); + $template->param(first => 1) if $page eq $list[0]; + $template->param(last => 1) if $page eq $list[$#list]; + + if ($actions) { + my $file = $pagesources{$page}; + my $type = pagetype($file); + if ($config{discussion}) { + my $discussionlink=gettext("discussion"); + if ($page !~ /.*\/\Q$discussionlink\E$/ && + (length $config{cgiurl} || + exists $links{$page."/".$discussionlink})) { + $template->param(have_actions => 1); + $template->param(discussionlink => + htmllink($page, + $params{destpage}, + gettext("Discussion"), + noimageinline => 1, + forcesubpage => 1)); + } + } + if (length $config{cgiurl} && defined $type) { + $template->param(have_actions => 1); + $template->param(editurl => cgiurl(do => "edit", page => $page)); + } + } + + run_hooks(pagetemplate => sub { + shift->(page => $page, destpage => $params{destpage}, + template => $template,); + }); + + $ret.=$template->output; + $template->clear_params; + } + else { + if (defined $type) { + $ret.="\n". + linkify($page, $params{destpage}, + preprocess($page, $params{destpage}, + filter($page, $params{destpage}, + readfile(srcfile($file))))); + } + } + } } - # TODO: should really add this to renderedfiles and call - # check_overwrite, but currently renderedfiles - # only supports listing one file per page. - if ($config{rss} && $params{rss} eq "yes") { - writefile(rsspage($params{page}), $config{destdir}, - genrss($params{page}, @list)); - $toping{$params{page}}=1 unless $config{rebuild}; + if ($feeds && ($emptyfeeds || @feedlist)) { + if ($rss) { + my $rssp=$feedbase."rss".$feednum; + will_render($params{destpage}, $rssp); + if (! $params{preview}) { + writefile($rssp, $config{destdir}, + genfeed("rss", + $config{url}."/".$rssp, $desc, $params{guid}, $params{destpage}, @feedlist)); + $toping{$params{destpage}}=1 unless $config{rebuild}; + $feedlinks{$params{destpage}}.=qq{}; + } + } + if ($atom) { + my $atomp=$feedbase."atom".$feednum; + will_render($params{destpage}, $atomp); + if (! $params{preview}) { + writefile($atomp, $config{destdir}, + genfeed("atom", $config{url}."/".$atomp, $desc, $params{guid}, $params{destpage}, @feedlist)); + $toping{$params{destpage}}=1 unless $config{rebuild}; + $feedlinks{$params{destpage}}.=qq{}; + } + } } - $processing_inline=0; + return $ret if $raw || $nested; + push @inline, $ret; + return "
\n\n"; +} - return $ret; -} #}}} +sub pagetemplate_inline (@) { + my %params=@_; + my $page=$params{page}; + my $template=$params{template}; + + $template->param(feedlinks => $feedlinks{$page}) + if exists $feedlinks{$page} && $template->query(name => "feedlinks"); +} -sub get_inline_content ($$) { #{{{ +sub get_inline_content ($$) { my $page=shift; my $destpage=shift; my $file=$pagesources{$page}; my $type=pagetype($file); if (defined $type) { - return htmlize($type, preprocess($page, $destpage, linkify($page, $destpage, readfile(srcfile($file))))); + $nested++; + my $ret=htmlize($page, $destpage, $type, + linkify($page, $destpage, + preprocess($page, $destpage, + filter($page, $destpage, + readfile(srcfile($file)))))); + $nested--; + return $ret; } else { return ""; } -} #}}} +} -sub date_822 ($) { #{{{ +sub date_822 ($) { my $time=shift; - eval q{use POSIX}; - my $lc_time= POSIX::setlocale(&POSIX::LC_TIME); + my $lc_time=POSIX::setlocale(&POSIX::LC_TIME); POSIX::setlocale(&POSIX::LC_TIME, "C"); my $ret=POSIX::strftime("%a, %d %b %Y %H:%M:%S %z", localtime($time)); POSIX::setlocale(&POSIX::LC_TIME, $lc_time); return $ret; -} #}}} +} + +sub date_3339 ($) { + my $time=shift; -sub absolute_urls ($$) { #{{{ + my $lc_time=POSIX::setlocale(&POSIX::LC_TIME); + POSIX::setlocale(&POSIX::LC_TIME, "C"); + my $ret=POSIX::strftime("%Y-%m-%dT%H:%M:%SZ", gmtime($time)); + POSIX::setlocale(&POSIX::LC_TIME, $lc_time); + return $ret; +} + +sub absolute_urls ($$) { # sucky sub because rss sucks my $content=shift; - my $url=shift; + my $baseurl=shift; + my $url=$baseurl; $url=~s/[^\/]+$//; - - $content=~s/new($url); + $top_uri->path_query(""); # reset the path + my $urltop = $top_uri->as_string; - return $page.".rss"; -} #}}} + $content=~s/(new(encode_utf8("$config{url}/".htmlpage($page))); + my $url=URI->new(encode_utf8(urlto($page,"",1))); - my $itemtemplate=template("rssitem.tmpl", blind_cache => 1, - die_on_bad_params => 0); + my $itemtemplate=template($feedtype."item.tmpl", blind_cache => 1); my $content=""; + my $lasttime = 0; foreach my $p (@pages) { - next unless exists $renderedfiles{$p}; - - my $u=URI->new(encode_utf8("$config{url}/$renderedfiles{$p}")); + my $u=URI->new(encode_utf8(urlto($p, "", 1))); + my $pcontent = absolute_urls(get_inline_content($p, $page), $url); $itemtemplate->param( title => pagetitle(basename($p)), url => $u, permalink => $u, - pubdate => date_822($pagectime{$p}), - content => absolute_urls(get_inline_content($p, $page), $url), + cdate_822 => date_822($pagectime{$p}), + mdate_822 => date_822($pagemtime{$p}), + cdate_3339 => date_3339($pagectime{$p}), + mdate_3339 => date_3339($pagemtime{$p}), ); + + if (exists $pagestate{$p} && + exists $pagestate{$p}{meta}{guid}) { + $itemtemplate->param(guid => $pagestate{$p}{meta}{guid}); + } + + if ($itemtemplate->query(name => "enclosure")) { + my $file=$pagesources{$p}; + my $type=pagetype($file); + if (defined $type) { + $itemtemplate->param(content => $pcontent); + } + else { + my $size=(srcfile_stat($file))[8]; + my $mime="unknown"; + eval q{use File::MimeInfo}; + if (! $@) { + $mime = mimetype($file); + } + $itemtemplate->param( + enclosure => $u, + type => $mime, + length => $size, + ); + } + } + else { + $itemtemplate->param(content => $pcontent); + } + run_hooks(pagetemplate => sub { shift->(page => $p, destpage => $page, template => $itemtemplate); @@ -191,14 +555,21 @@ sub genrss ($@) { #{{{ $content.=$itemtemplate->output; $itemtemplate->clear_params; + + $lasttime = $pagemtime{$p} if $pagemtime{$p} > $lasttime; } - my $template=template("rsspage.tmpl", blind_cache => 1); + my $template=template($feedtype."page.tmpl", blind_cache => 1); $template->param( - title => $config{wikiname}, + title => $page ne "index" ? pagetitle($page) : $config{wikiname}, wikiname => $config{wikiname}, pageurl => $url, content => $content, + feeddesc => $feeddesc, + guid => $guid, + feeddate => date_3339($lasttime), + feedurl => $feedurl, + version => $IkiWiki::version, ); run_hooks(pagetemplate => sub { shift->(page => $page, destpage => $page, @@ -206,35 +577,54 @@ sub genrss ($@) { #{{{ }); return $template->output; -} #}}} +} -sub pingurl (@) { #{{{ - return unless $config{pingurl} && %toping; +sub pingurl (@) { + return unless @{$config{pingurl}} && %toping; eval q{require RPC::XML::Client}; if ($@) { - debug("RPC::XML::Client not found, not pinging"); + debug(gettext("RPC::XML::Client not found, not pinging")); return; } + # daemonize here so slow pings don't slow down wiki updates + defined(my $pid = fork) or error("Can't fork: $!"); + return if $pid; + chdir '/'; + POSIX::setsid() or error("Can't start a new session: $!"); + open STDIN, '/dev/null'; + open STDOUT, '>/dev/null'; + open STDERR, '>&STDOUT' or error("Can't dup stdout: $!"); + + # Don't need to keep a lock on the wiki as a daemon. + IkiWiki::unlockwiki(); + foreach my $page (keys %toping) { - my $title=pagetitle(basename($page)); - my $url="$config{url}/".htmlpage($page); + my $title=pagetitle(basename($page), 0); + my $url=urlto($page, "", 1); foreach my $pingurl (@{$config{pingurl}}) { - my $client = RPC::XML::Client->new($pingurl); - my $req = RPC::XML::request->new('weblogUpdates.ping', - $title, $url); debug("Pinging $pingurl for $page"); - my $res = $client->send_request($req); - if (! ref $res) { - debug("Did not receive response to ping"); - } - my $r=$res->value; - if (! exists $r->{flerror} || $r->{flerror}) { - debug("Ping rejected: ".$r->{message}); + eval { + my $client = RPC::XML::Client->new($pingurl); + my $req = RPC::XML::request->new('weblogUpdates.ping', + $title, $url); + my $res = $client->send_request($req); + if (! ref $res) { + error("Did not receive response to ping"); + } + my $r=$res->value; + if (! exists $r->{flerror} || $r->{flerror}) { + error("Ping rejected: ".(exists $r->{message} ? $r->{message} : "[unknown reason]")); + } + }; + if ($@) { + error "Ping failed: $@"; } } } -} #}}} + + exit 0; # daemon done +} 1