]> git.vanrenterghem.biz Git - git.ikiwiki.info.git/blobdiff - IkiWiki/Plugin/aggregate.pm
er, this was implemented over 1 year ago..
[git.ikiwiki.info.git] / IkiWiki / Plugin / aggregate.pm
index 964ef4b4b282b829f5231f224e6ebfb9160e95eb..6f9c78075f88509524314feea170e48e50377398 100644 (file)
@@ -4,7 +4,7 @@ package IkiWiki::Plugin::aggregate;
 
 use warnings;
 use strict;
-use IkiWiki;
+use IkiWiki 2.00;
 use HTML::Entities;
 use HTML::Parser;
 use HTML::Tagset;
@@ -31,21 +31,29 @@ sub getopt () { #{{{
 } #}}}
 
 sub checkconfig () { #{{{
-       IkiWiki::lockwiki();
-       loadstate();
-       if ($config{aggregate}) {
+       if ($config{aggregate} && ! ($config{post_commit} && 
+                                    IkiWiki::commit_hook_enabled())) {
+               if (! IkiWiki::lockwiki(0)) {
+                       debug("wiki is locked by another process, not aggregating");
+                       exit 1;
+               }
+       
+               loadstate();
                IkiWiki::loadindex();
                aggregate();
                expire();
                savestate();
+               clearstate();
+
+               IkiWiki::unlockwiki();
        }
-       IkiWiki::unlockwiki();
 } #}}}
 
 sub filter (@) { #{{{
        my %params=@_;
        my $page=$params{page};
 
+       loadstate(); # if not already loaded
        # Mark all feeds originating on this page as removable;
        # preprocess will unmark those that still exist.
        remove_feeds($page);
@@ -58,7 +66,7 @@ sub preprocess (@) { #{{{
 
        foreach my $required (qw{name url}) {
                if (! exists $params{$required}) {
-                       return "[[".sprintf(gettext("aggregate plugin missing %s parameter"), $required)."]]";
+                       return "[[aggregate ".sprintf(gettext("missing %s parameter"), $required)."]]";
                }
        }
 
@@ -116,10 +124,12 @@ sub delete (@) { #{{{
        }
 } #}}}
 
+my $state_loaded=0;
 sub loadstate () { #{{{
+       return if $state_loaded;
        if (-e "$config{wikistatedir}/aggregate") {
-               open (IN, "$config{wikistatedir}/aggregate" ||
-                       die "$config{wikistatedir}/aggregate: $!");
+               open(IN, "$config{wikistatedir}/aggregate") ||
+                       die "$config{wikistatedir}/aggregate: $!";
                while (<IN>) {
                        $_=IkiWiki::possibly_foolish_untaint($_);
                        chomp;
@@ -147,14 +157,19 @@ sub loadstate () { #{{{
                }
 
                close IN;
+               
+               $state_loaded=1;
        }
 } #}}}
 
 sub savestate () { #{{{
        eval q{use HTML::Entities};
        error($@) if $@;
-       open (OUT, ">$config{wikistatedir}/aggregate" ||
-               die "$config{wikistatedir}/aggregate: $!");
+       my $newfile="$config{wikistatedir}/aggregate.new";
+       # TODO: This cleanup function could use improvement. Any newly
+       # aggregated files are left behind unrecorded, and should be deleted.
+       my $cleanup = sub { unlink($newfile) };
+       open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
        foreach my $data (values %feeds, values %guids) {
                if ($data->{remove}) {
                        if ($data->{name}) {
@@ -188,9 +203,17 @@ sub savestate () { #{{{
                                push @line, "$field=".$data->{$field};
                        }
                }
-               print OUT join(" ", @line)."\n";
+               print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup);
        }
-       close OUT;
+       close OUT || error("save $newfile: $!", $cleanup);
+       rename($newfile, "$config{wikistatedir}/aggregate") ||
+               error("rename $newfile: $!", $cleanup);
+} #}}}
+
+sub clearstate () { #{{{
+       %feeds=();
+       %guids=();
+       $state_loaded=0;
 } #}}}
 
 sub expire () { #{{{
@@ -223,6 +246,8 @@ sub expire () { #{{{
 sub aggregate () { #{{{
        eval q{use XML::Feed};
        error($@) if $@;
+       eval q{use URI::Fetch};
+       error($@) if $@;
        eval q{use HTML::Entities};
        error($@) if $@;
 
@@ -231,6 +256,9 @@ sub aggregate () { #{{{
                        time - $feed->{lastupdate} >= $feed->{updateinterval};
                $feed->{lastupdate}=time;
                $feed->{newposts}=0;
+               $feed->{message}=sprintf(gettext("processed ok at %s"),
+                       displaytime($feed->{lastupdate}));
+               $feed->{error}=0;
                $IkiWiki::forcerebuild{$feed->{sourcepage}}=1;
 
                debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
@@ -238,14 +266,36 @@ sub aggregate () { #{{{
                if (! length $feed->{feedurl}) {
                        my @urls=XML::Feed->find_feeds($feed->{url});
                        if (! @urls) {
-                               $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{feedurl});
+                               $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{url});
                                $feed->{error}=1;
                                debug($feed->{message});
                                next;
                        }
                        $feed->{feedurl}=pop @urls;
                }
-               my $f=eval{XML::Feed->parse(URI->new($feed->{feedurl}))};
+               my $res=URI::Fetch->fetch($feed->{feedurl});
+               if (! $res) {
+                       $feed->{message}=URI::Fetch->errstr;
+                       $feed->{error}=1;
+                       debug($feed->{message});
+                       next;
+               }
+               if ($res->status == URI::Fetch::URI_GONE()) {
+                       $feed->{message}=gettext("feed not found");
+                       $feed->{error}=1;
+                       debug($feed->{message});
+                       next;
+               }
+               my $content=$res->content;
+               my $f=eval{XML::Feed->parse(\$content)};
+               if ($@) {
+                       # One common cause of XML::Feed crashing is a feed
+                       # that contains invalid UTF-8 sequences. Convert
+                       # feed to ascii to try to work around.
+                       $feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)"));
+                       $content=Encode::decode_utf8($content);
+                       $f=eval{XML::Feed->parse(\$content)};
+               }
                if ($@) {
                        $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
                        $feed->{error}=1;
@@ -264,15 +314,11 @@ sub aggregate () { #{{{
                                feed => $feed,
                                title => defined $entry->title ? decode_entities($entry->title) : "untitled",
                                link => $entry->link,
-                               content => $entry->content->body,
+                               content => defined $entry->content->body ? $entry->content->body : "",
                                guid => defined $entry->id ? $entry->id : time."_".$feed->name,
                                ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
                        );
                }
-
-               $feed->{message}=sprintf(gettext("processed ok at "),
-                       displaytime($feed->{lastupdate}));
-               $feed->{error}=0;
        }
 } #}}}
 
@@ -310,6 +356,19 @@ sub add_page (@) { #{{{
                       -e pagefile($page.$c)) {
                        $c++
                }
+
+               # Make sure that the file name isn't too long. 
+               # NB: This doesn't check for path length limits.
+               my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
+               if (defined $max && length(htmlfn($page)) >= $max) {
+                       $c="";
+                       $page=$feed->{dir}."/item";
+                       while (exists $IkiWiki::pagecase{lc $page.$c} ||
+                              -e pagefile($page.$c)) {
+                               $c++
+                       }
+               }
+
                $guid->{page}=$page;
                debug(sprintf(gettext("creating new page %s"), $page));
        }
@@ -337,7 +396,7 @@ sub add_page (@) { #{{{
        if (ref $feed->{tags}) {
                $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
        }
-       writefile(htmlpage($guid->{page}), $config{srcdir},
+       writefile(htmlfn($guid->{page}), $config{srcdir},
                $template->output);
 
        # Set the mtime, this lets the build process get the right creation
@@ -412,7 +471,11 @@ sub remove_feeds () { #{{{
 sub pagefile ($) { #{{{
        my $page=shift;
 
-       return "$config{srcdir}/".htmlpage($page);
+       return "$config{srcdir}/".htmlfn($page);
+} #}}}
+
+sub htmlfn ($) { #{{{
+       return shift().".".$config{htmlext};
 } #}}}
 
 1