X-Git-Url: http://git.vanrenterghem.biz/git.ikiwiki.info.git/blobdiff_plain/f92505d78b82c6ac146e43054ebd12441575a602..ed75653e4d1ebc9827601ab3649e1422f45ef338:/IkiWiki/Plugin/aggregate.pm diff --git a/IkiWiki/Plugin/aggregate.pm b/IkiWiki/Plugin/aggregate.pm index ba40ee6bc..a9c7447fa 100644 --- a/IkiWiki/Plugin/aggregate.pm +++ b/IkiWiki/Plugin/aggregate.pm @@ -1,13 +1,13 @@ #!/usr/bin/perl -# Blog aggregation plugin. +# Feed aggregation plugin. package IkiWiki::Plugin::aggregate; use warnings; use strict; use IkiWiki 2.00; -use HTML::Entities; use HTML::Parser; use HTML::Tagset; +use HTML::Entities; use URI; use open qw{:utf8 :std}; @@ -21,6 +21,9 @@ sub import { #{{{ hook(type => "preprocess", id => "aggregate", call => \&preprocess); hook(type => "delete", id => "aggregate", call => \&delete); hook(type => "savestate", id => "aggregate", call => \&savestate); + if (exists $config{aggregate_webtrigger} && $config{aggregate_webtrigger}) { + hook(type => "cgi", id => "aggregate", call => \&cgi); + } } # }}} sub getopt () { #{{{ @@ -33,48 +36,78 @@ sub getopt () { #{{{ sub checkconfig () { #{{{ if ($config{aggregate} && ! ($config{post_commit} && IkiWiki::commit_hook_enabled())) { - # See if any feeds need aggregation. - loadstate(); - my @feeds=needsaggregate(); - return unless @feeds; - if (! lockaggregate()) { - debug("an aggregation process is already running"); - return; - } - # force a later rebuild of source pages - $IkiWiki::forcerebuild{$_->{sourcepage}}=1 - foreach @feeds; - - # Fork a child process to handle the aggregation. - # The parent process will then handle building the - # result. This avoids messy code to clear state - # accumulated while aggregating. - defined(my $pid = fork) or error("Can't fork: $!"); - if (! $pid) { - IkiWiki::loadindex(); + launchaggregation(); + } +} #}}} - # Aggregation happens without the main wiki lock - # being held. This allows editing pages etc while - # aggregation is running. - aggregate(@feeds); - - IkiWiki::lockwiki; - # Merge changes, since aggregation state may have - # changed on disk while the aggregation was happening. - mergestate(); - expire(); - savestate(); - IkiWiki::unlockwiki; - exit 0; +sub cgi ($) { #{{{ + my $cgi=shift; + + if (defined $cgi->param('do') && + $cgi->param("do") eq "aggregate_webtrigger") { + $|=1; + print "Content-Type: text/plain\n\n"; + $config{cgi}=0; + $config{verbose}=1; + $config{syslog}=0; + print gettext("Aggregation triggered via web.")."\n\n"; + if (launchaggregation()) { + IkiWiki::lockwiki(); + IkiWiki::loadindex(); + require IkiWiki::Render; + IkiWiki::refresh(); + IkiWiki::saveindex(); } - waitpid($pid,0); - if ($?) { - error "aggregation failed with code $?"; + else { + print gettext("Nothing to do right now, all feeds are up-to-date!")."\n"; } + exit 0; + } +} #}}} - clearstate(); - unlockaggregate(); +sub launchaggregation () { #{{{ + # See if any feeds need aggregation. + loadstate(); + my @feeds=needsaggregate(); + return unless @feeds; + if (! lockaggregate()) { + debug("an aggregation process is already running"); + return; + } + # force a later rebuild of source pages + $IkiWiki::forcerebuild{$_->{sourcepage}}=1 + foreach @feeds; + + # Fork a child process to handle the aggregation. + # The parent process will then handle building the + # result. This avoids messy code to clear state + # accumulated while aggregating. + defined(my $pid = fork) or error("Can't fork: $!"); + if (! $pid) { + IkiWiki::loadindex(); + # Aggregation happens without the main wiki lock + # being held. This allows editing pages etc while + # aggregation is running. + aggregate(@feeds); + + IkiWiki::lockwiki; + # Merge changes, since aggregation state may have + # changed on disk while the aggregation was happening. + mergestate(); + expire(); + savestate(); + IkiWiki::unlockwiki; + exit 0; + } + waitpid($pid,0); + if ($?) { + error "aggregation failed with code $?"; } + + clearstate(); + unlockaggregate(); + + return 1; } #}}} sub needsbuild (@) { #{{{ @@ -206,8 +239,6 @@ sub loadstate () { #{{{ sub savestate () { #{{{ return unless $state_loaded; garbage_collect(); - eval q{use HTML::Entities}; - error($@) if $@; my $newfile="$config{wikistatedir}/aggregate.new"; my $cleanup = sub { unlink($newfile) }; open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup); @@ -336,8 +367,6 @@ sub aggregate (@) { #{{{ error($@) if $@; eval q{use URI::Fetch}; error($@) if $@; - eval q{use HTML::Entities}; - error($@) if $@; foreach my $feed (@_) { $feed->{lastupdate}=time; @@ -408,7 +437,7 @@ sub aggregate (@) { #{{{ title => defined $entry->title ? decode_entities($entry->title) : "untitled", link => $entry->link, content => defined $entry->content->body ? $entry->content->body : "", - guid => defined $entry->id ? $entry->id : time."_".$feed->name, + guid => defined $entry->id ? $entry->id : time."_".$feed->{name}, ctime => $entry->issued ? ($entry->issued->epoch || time) : time, ); }