]> git.vanrenterghem.biz Git - git.ikiwiki.info.git/blobdiff - IkiWiki/Plugin/aggregate.pm
git: Do the revert operation in a secondary working tree
[git.ikiwiki.info.git] / IkiWiki / Plugin / aggregate.pm
index 8ef5ca541ae108e3087b15027c1115d9403e57d4..83bd670cb449f017ce9c35b81262af29ccdb6883 100644 (file)
@@ -4,20 +4,20 @@ package IkiWiki::Plugin::aggregate;
 
 use warnings;
 use strict;
 
 use warnings;
 use strict;
-use IkiWiki 2.00;
+use IkiWiki 3.00;
 use HTML::Parser;
 use HTML::Tagset;
 use HTML::Entities;
 use HTML::Parser;
 use HTML::Tagset;
 use HTML::Entities;
-use URI;
 use open qw{:utf8 :std};
 
 my %feeds;
 my %guids;
 
 use open qw{:utf8 :std};
 
 my %feeds;
 my %guids;
 
-sub import { #{{{
+sub import {
        hook(type => "getopt", id => "aggregate", call => \&getopt);
        hook(type => "getsetup", id => "aggregate", call => \&getsetup);
        hook(type => "getopt", id => "aggregate", call => \&getopt);
        hook(type => "getsetup", id => "aggregate", call => \&getsetup);
-       hook(type => "checkconfig", id => "aggregate", call => \&checkconfig);
+       hook(type => "checkconfig", id => "aggregate", call => \&checkconfig,
+               last => 1);
        hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
        hook(type => "preprocess", id => "aggregate", call => \&preprocess);
         hook(type => "delete", id => "aggregate", call => \&delete);
        hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
        hook(type => "preprocess", id => "aggregate", call => \&preprocess);
         hook(type => "delete", id => "aggregate", call => \&delete);
@@ -26,9 +26,9 @@ sub import { #{{{
        if (exists $config{aggregate_webtrigger} && $config{aggregate_webtrigger}) {
                hook(type => "cgi", id => "aggregate", call => \&cgi);
        }
        if (exists $config{aggregate_webtrigger} && $config{aggregate_webtrigger}) {
                hook(type => "cgi", id => "aggregate", call => \&cgi);
        }
-} # }}}
+}
 
 
-sub getopt () { #{{{
+sub getopt () {
         eval q{use Getopt::Long};
        error($@) if $@;
         Getopt::Long::Configure('pass_through');
         eval q{use Getopt::Long};
        error($@) if $@;
         Getopt::Long::Configure('pass_through');
@@ -36,9 +36,9 @@ sub getopt () { #{{{
                "aggregate" => \$config{aggregate},
                "aggregateinternal!" => \$config{aggregateinternal},
        );
                "aggregate" => \$config{aggregate},
                "aggregateinternal!" => \$config{aggregateinternal},
        );
-} #}}}
+}
 
 
-sub getsetup () { #{{{
+sub getsetup () {
        return
                plugin => {
                        safe => 1,
        return
                plugin => {
                        safe => 1,
@@ -46,7 +46,7 @@ sub getsetup () { #{{{
                },
                aggregateinternal => {
                        type => "boolean",
                },
                aggregateinternal => {
                        type => "boolean",
-                       example => 0,
+                       example => 1,
                        description => "enable aggregation to internal pages?",
                        safe => 0, # enabling needs manual transition
                        rebuild => 0,
                        description => "enable aggregation to internal pages?",
                        safe => 0, # enabling needs manual transition
                        rebuild => 0,
@@ -58,16 +58,31 @@ sub getsetup () { #{{{
                        safe => 1,
                        rebuild => 0,
                },
                        safe => 1,
                        rebuild => 0,
                },
-} #}}}
+               cookiejar => {
+                       type => "string",
+                       example => { file => "$ENV{HOME}/.ikiwiki/cookies" },
+                       safe => 0, # hooks into perl module internals
+                       description => "cookie control",
+               },
+}
+
+sub checkconfig () {
+       if (! defined $config{aggregateinternal}) {
+               $config{aggregateinternal}=1;
+       }
+       if (! defined $config{cookiejar}) {
+               $config{cookiejar}={ file => "$ENV{HOME}/.ikiwiki/cookies" };
+       }
 
 
-sub checkconfig () { #{{{
+       # This is done here rather than in a refresh hook because it
+       # needs to run before the wiki is locked.
        if ($config{aggregate} && ! ($config{post_commit} && 
                                     IkiWiki::commit_hook_enabled())) {
                launchaggregation();
        }
        if ($config{aggregate} && ! ($config{post_commit} && 
                                     IkiWiki::commit_hook_enabled())) {
                launchaggregation();
        }
-} #}}}
+}
 
 
-sub cgi ($) { #{{{
+sub cgi ($) {
        my $cgi=shift;
 
        if (defined $cgi->param('do') &&
        my $cgi=shift;
 
        if (defined $cgi->param('do') &&
@@ -90,9 +105,9 @@ sub cgi ($) { #{{{
                }
                exit 0;
        }
                }
                exit 0;
        }
-} #}}}
+}
 
 
-sub launchaggregation () { #{{{
+sub launchaggregation () {
        # See if any feeds need aggregation.
        loadstate();
        my @feeds=needsaggregate();
        # See if any feeds need aggregation.
        loadstate();
        my @feeds=needsaggregate();
@@ -135,16 +150,16 @@ sub launchaggregation () { #{{{
        unlockaggregate();
 
        return 1;
        unlockaggregate();
 
        return 1;
-} #}}}
+}
 
 #  Pages with extension _aggregated have plain html markup, pass through.
 
 #  Pages with extension _aggregated have plain html markup, pass through.
-sub htmlize (@) { #{{{
+sub htmlize (@) {
        my %params=@_;
        return $params{content};
        my %params=@_;
        return $params{content};
-} #}}}
+}
 
 # Used by ikiwiki-transition aggregateinternal.
 
 # Used by ikiwiki-transition aggregateinternal.
-sub migrate_to_internal { #{{{
+sub migrate_to_internal {
        if (! lockaggregate()) {
                error("an aggregation process is currently running");
        }
        if (! lockaggregate()) {
                error("an aggregation process is currently running");
        }
@@ -159,10 +174,14 @@ sub migrate_to_internal { #{{{
                
                $config{aggregateinternal} = 0;
                my $oldname = "$config{srcdir}/".htmlfn($data->{page});
                
                $config{aggregateinternal} = 0;
                my $oldname = "$config{srcdir}/".htmlfn($data->{page});
+               if (! -e $oldname) {
+                       $oldname = $IkiWiki::Plugin::transient::transientdir."/".htmlfn($data->{page});
+               }
+
                my $oldoutput = $config{destdir}."/".IkiWiki::htmlpage($data->{page});
                
                $config{aggregateinternal} = 1;
                my $oldoutput = $config{destdir}."/".IkiWiki::htmlpage($data->{page});
                
                $config{aggregateinternal} = 1;
-               my $newname = "$config{srcdir}/".htmlfn($data->{page});
+               my $newname = $IkiWiki::Plugin::transient::transientdir."/".htmlfn($data->{page});
                
                debug "moving $oldname -> $newname";
                if (-e $newname) {
                
                debug "moving $oldname -> $newname";
                if (-e $newname) {
@@ -182,7 +201,7 @@ sub migrate_to_internal { #{{{
                if (-e $oldoutput) {
                        require IkiWiki::Render;
                        debug("removing output file $oldoutput");
                if (-e $oldoutput) {
                        require IkiWiki::Render;
                        debug("removing output file $oldoutput");
-                       IkiWiki::prune($oldoutput);
+                       IkiWiki::prune($oldoutput, $config{destdir});
                }
        }
        
                }
        }
        
@@ -190,9 +209,9 @@ sub migrate_to_internal { #{{{
        IkiWiki::unlockwiki;
        
        unlockaggregate();
        IkiWiki::unlockwiki;
        
        unlockaggregate();
-} #}}}
+}
 
 
-sub needsbuild (@) { #{{{
+sub needsbuild (@) {
        my $needsbuild=shift;
        
        loadstate();
        my $needsbuild=shift;
        
        loadstate();
@@ -206,9 +225,11 @@ sub needsbuild (@) { #{{{
                        markunseen($feed->{sourcepage});
                }
        }
                        markunseen($feed->{sourcepage});
                }
        }
-} # }}}
 
 
-sub preprocess (@) { #{{{
+       return $needsbuild;
+}
+
+sub preprocess (@) {
        my %params=@_;
 
        foreach my $required (qw{name url}) {
        my %params=@_;
 
        foreach my $required (qw{name url}) {
@@ -245,6 +266,7 @@ sub preprocess (@) { #{{{
        $feed->{template}=$params{template} . ".tmpl";
        delete $feed->{unseen};
        $feed->{lastupdate}=0 unless defined $feed->{lastupdate};
        $feed->{template}=$params{template} . ".tmpl";
        delete $feed->{unseen};
        $feed->{lastupdate}=0 unless defined $feed->{lastupdate};
+       $feed->{lasttry}=$feed->{lastupdate} unless defined $feed->{lasttry};
        $feed->{numposts}=0 unless defined $feed->{numposts};
        $feed->{newposts}=0 unless defined $feed->{newposts};
        $feed->{message}=gettext("new feed") unless defined $feed->{message};
        $feed->{numposts}=0 unless defined $feed->{numposts};
        $feed->{newposts}=0 unless defined $feed->{newposts};
        $feed->{message}=gettext("new feed") unless defined $feed->{message};
@@ -265,9 +287,9 @@ sub preprocess (@) { #{{{
               ($feed->{newposts} ? "; ".$feed->{newposts}.
                                    " ".gettext("new") : "").
               ")";
               ($feed->{newposts} ? "; ".$feed->{newposts}.
                                    " ".gettext("new") : "").
               ")";
-} # }}}
+}
 
 
-sub delete (@) { #{{{
+sub delete (@) {
        my @files=@_;
 
        # Remove feed data for removed pages.
        my @files=@_;
 
        # Remove feed data for removed pages.
@@ -275,9 +297,9 @@ sub delete (@) { #{{{
                my $page=pagename($file);
                markunseen($page);
        }
                my $page=pagename($file);
                markunseen($page);
        }
-} #}}}
+}
 
 
-sub markunseen ($) { #{{{
+sub markunseen ($) {
        my $page=shift;
 
        foreach my $id (keys %feeds) {
        my $page=shift;
 
        foreach my $id (keys %feeds) {
@@ -285,15 +307,15 @@ sub markunseen ($) { #{{{
                        $feeds{$id}->{unseen}=1;
                }
        }
                        $feeds{$id}->{unseen}=1;
                }
        }
-} #}}}
+}
 
 my $state_loaded=0;
 
 
 my $state_loaded=0;
 
-sub loadstate () { #{{{
+sub loadstate () {
        return if $state_loaded;
        $state_loaded=1;
        if (-e "$config{wikistatedir}/aggregate") {
        return if $state_loaded;
        $state_loaded=1;
        if (-e "$config{wikistatedir}/aggregate") {
-               open(IN, "$config{wikistatedir}/aggregate") ||
+               open(IN, "<", "$config{wikistatedir}/aggregate") ||
                        die "$config{wikistatedir}/aggregate: $!";
                while (<IN>) {
                        $_=IkiWiki::possibly_foolish_untaint($_);
                        die "$config{wikistatedir}/aggregate: $!";
                while (<IN>) {
                        $_=IkiWiki::possibly_foolish_untaint($_);
@@ -323,14 +345,14 @@ sub loadstate () { #{{{
 
                close IN;
        }
 
                close IN;
        }
-} #}}}
+}
 
 
-sub savestate () { #{{{
+sub savestate () {
        return unless $state_loaded;
        garbage_collect();
        my $newfile="$config{wikistatedir}/aggregate.new";
        my $cleanup = sub { unlink($newfile) };
        return unless $state_loaded;
        garbage_collect();
        my $newfile="$config{wikistatedir}/aggregate.new";
        my $cleanup = sub { unlink($newfile) };
-       open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
+       open (OUT, ">", $newfile) || error("open $newfile: $!", $cleanup);
        foreach my $data (values %feeds, values %guids) {
                my @line;
                foreach my $field (keys %$data) {
        foreach my $data (values %feeds, values %guids) {
                my @line;
                foreach my $field (keys %$data) {
@@ -342,7 +364,8 @@ sub savestate () { #{{{
                                push @line, "tag=$_" foreach @{$data->{tags}};
                        }
                        else {
                                push @line, "tag=$_" foreach @{$data->{tags}};
                        }
                        else {
-                               push @line, "$field=".$data->{$field};
+                               push @line, "$field=".$data->{$field}
+                                       if defined $data->{$field};
                        }
                }
                print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup);
                        }
                }
                print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup);
@@ -350,9 +373,23 @@ sub savestate () { #{{{
        close OUT || error("save $newfile: $!", $cleanup);
        rename($newfile, "$config{wikistatedir}/aggregate") ||
                error("rename $newfile: $!", $cleanup);
        close OUT || error("save $newfile: $!", $cleanup);
        rename($newfile, "$config{wikistatedir}/aggregate") ||
                error("rename $newfile: $!", $cleanup);
-} #}}}
 
 
-sub garbage_collect () { #{{{
+       my $timestamp=undef;
+       foreach my $feed (keys %feeds) {
+               my $t=$feeds{$feed}->{lastupdate}+$feeds{$feed}->{updateinterval};
+               if (! defined $timestamp || $timestamp > $t) {
+                       $timestamp=$t;
+               }
+       }
+       $newfile=~s/\.new$/time/;
+       open (OUT, ">", $newfile) || error("open $newfile: $!", $cleanup);
+       if (defined $timestamp) {
+               print OUT $timestamp."\n";
+       }
+       close OUT || error("save $newfile: $!", $cleanup);
+}
+
+sub garbage_collect () {
        foreach my $name (keys %feeds) {
                # remove any feeds that were not seen while building the pages
                # that used to contain them
        foreach my $name (keys %feeds) {
                # remove any feeds that were not seen while building the pages
                # that used to contain them
@@ -364,20 +401,23 @@ sub garbage_collect () { #{{{
        foreach my $guid (values %guids) {
                # any guid whose feed is gone should be removed
                if (! exists $feeds{$guid->{feed}}) {
        foreach my $guid (values %guids) {
                # any guid whose feed is gone should be removed
                if (! exists $feeds{$guid->{feed}}) {
-                       unlink "$config{srcdir}/".htmlfn($guid->{page})
-                               if exists $guid->{page};
+                       if (exists $guid->{page}) {
+                               unlink $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page})
+                                       || unlink "$config{srcdir}/".htmlfn($guid->{page});
+                       }
                        delete $guids{$guid->{guid}};
                }
                # handle expired guids
                elsif ($guid->{expired} && exists $guid->{page}) {
                        unlink "$config{srcdir}/".htmlfn($guid->{page});
                        delete $guids{$guid->{guid}};
                }
                # handle expired guids
                elsif ($guid->{expired} && exists $guid->{page}) {
                        unlink "$config{srcdir}/".htmlfn($guid->{page});
+                       unlink $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page});
                        delete $guid->{page};
                        delete $guid->{md5};
                }
        }
                        delete $guid->{page};
                        delete $guid->{md5};
                }
        }
-} #}}}
+}
 
 
-sub mergestate () { #{{{
+sub mergestate () {
        # Load the current state in from disk, and merge into it
        # values from the state in memory that might have changed
        # during aggregation.
        # Load the current state in from disk, and merge into it
        # values from the state in memory that might have changed
        # during aggregation.
@@ -390,14 +430,15 @@ sub mergestate () { #{{{
        # fields.
        foreach my $name (keys %myfeeds) {
                if (exists $feeds{$name}) {
        # fields.
        foreach my $name (keys %myfeeds) {
                if (exists $feeds{$name}) {
-                       foreach my $field (qw{message lastupdate numposts
-                                             newposts error}) {
+                       foreach my $field (qw{message lastupdate lasttry
+                                             numposts newposts error}) {
                                $feeds{$name}->{$field}=$myfeeds{$name}->{$field};
                        }
                }
        }
 
        # New guids can be created during aggregation.
                                $feeds{$name}->{$field}=$myfeeds{$name}->{$field};
                        }
                }
        }
 
        # New guids can be created during aggregation.
+       # Guids have a few fields that may be updated during aggregation.
        # It's also possible that guids were removed from the on-disk state
        # while the aggregation was in process. That would only happen if
        # their feed was also removed, so any removed guids added back here
        # It's also possible that guids were removed from the on-disk state
        # while the aggregation was in process. That would only happen if
        # their feed was also removed, so any removed guids added back here
@@ -406,16 +447,21 @@ sub mergestate () { #{{{
                if (! exists $guids{$guid}) {
                        $guids{$guid}=$myguids{$guid};
                }
                if (! exists $guids{$guid}) {
                        $guids{$guid}=$myguids{$guid};
                }
+               else {
+                       foreach my $field (qw{md5}) {
+                               $guids{$guid}->{$field}=$myguids{$guid}->{$field};
+                       }
+               }
        }
        }
-} #}}}
+}
 
 
-sub clearstate () { #{{{
+sub clearstate () {
        %feeds=();
        %guids=();
        $state_loaded=0;
        %feeds=();
        %guids=();
        $state_loaded=0;
-} #}}}
+}
 
 
-sub expire () { #{{{
+sub expire () {
        foreach my $feed (values %feeds) {
                next unless $feed->{expireage} || $feed->{expirecount};
                my $count=0;
        foreach my $feed (values %feeds) {
                next unless $feed->{expireage} || $feed->{expirecount};
                my $count=0;
@@ -444,26 +490,25 @@ sub expire () { #{{{
                        }
                }
        }
                        }
                }
        }
-} #}}}
+}
 
 
-sub needsaggregate () { #{{{
+sub needsaggregate () {
        return values %feeds if $config{rebuild};
        return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
        return values %feeds if $config{rebuild};
        return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
-} #}}}
+}
 
 
-sub aggregate (@) { #{{{
+sub aggregate (@) {
+       eval q{use Net::INET6Glue::INET_is_INET6}; # may not be available
        eval q{use XML::Feed};
        error($@) if $@;
        eval q{use URI::Fetch};
        error($@) if $@;
 
        foreach my $feed (@_) {
        eval q{use XML::Feed};
        error($@) if $@;
        eval q{use URI::Fetch};
        error($@) if $@;
 
        foreach my $feed (@_) {
-               $feed->{lastupdate}=time;
+               $feed->{lasttry}=time;
                $feed->{newposts}=0;
                $feed->{message}=sprintf(gettext("last checked %s"),
                $feed->{newposts}=0;
                $feed->{message}=sprintf(gettext("last checked %s"),
-                       '<span class="date" title="'.
-                       localtime($feed->{lastupdate}).'">'.
-                       displaytime($feed->{lastupdate}).'</span>');
+                       displaytime($feed->{lasttry}));
                $feed->{error}=0;
 
                debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
                $feed->{error}=0;
 
                debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
@@ -478,13 +523,21 @@ sub aggregate (@) { #{{{
                        }
                        $feed->{feedurl}=pop @urls;
                }
                        }
                        $feed->{feedurl}=pop @urls;
                }
-               my $res=URI::Fetch->fetch($feed->{feedurl});
+               my $res=URI::Fetch->fetch($feed->{feedurl},
+                       UserAgent => LWP::UserAgent->new(
+                               cookie_jar => $config{cookiejar},
+                       ),
+               );
                if (! $res) {
                        $feed->{message}=URI::Fetch->errstr;
                        $feed->{error}=1;
                        debug($feed->{message});
                        next;
                }
                if (! $res) {
                        $feed->{message}=URI::Fetch->errstr;
                        $feed->{error}=1;
                        debug($feed->{message});
                        next;
                }
+
+               # lastupdate is only set if we were able to contact the server
+               $feed->{lastupdate}=$feed->{lasttry};
+
                if ($res->status == URI::Fetch::URI_GONE()) {
                        $feed->{message}=gettext("feed not found");
                        $feed->{error}=1;
                if ($res->status == URI::Fetch::URI_GONE()) {
                        $feed->{message}=gettext("feed not found");
                        $feed->{error}=1;
@@ -498,15 +551,19 @@ sub aggregate (@) { #{{{
                        # that contains invalid UTF-8 sequences. Convert
                        # feed to ascii to try to work around.
                        $feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)"));
                        # that contains invalid UTF-8 sequences. Convert
                        # feed to ascii to try to work around.
                        $feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)"));
-                       $content=Encode::decode_utf8($content, 0);
-                       $f=eval{XML::Feed->parse(\$content)};
+                       $f=eval {
+                               $content=Encode::decode_utf8($content, 0);
+                               XML::Feed->parse(\$content)
+                       };
                }
                if ($@) {
                        # Another possibility is badly escaped entities.
                        $feed->{message}.=" ".sprintf(gettext("(feed entities escaped)"));
                        $content=~s/\&(?!amp)(\w+);/&amp;$1;/g;
                }
                if ($@) {
                        # Another possibility is badly escaped entities.
                        $feed->{message}.=" ".sprintf(gettext("(feed entities escaped)"));
                        $content=~s/\&(?!amp)(\w+);/&amp;$1;/g;
-                       $content=Encode::decode_utf8($content, 0);
-                       $f=eval{XML::Feed->parse(\$content)};
+                       $f=eval {
+                               $content=Encode::decode_utf8($content, 0);
+                               XML::Feed->parse(\$content)
+                       };
                }
                if ($@) {
                        $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
                }
                if ($@) {
                        $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
@@ -522,10 +579,15 @@ sub aggregate (@) { #{{{
                }
 
                foreach my $entry ($f->entries) {
                }
 
                foreach my $entry ($f->entries) {
-                       my $content=$content=$entry->content->body;
+                       # XML::Feed doesn't work around XML::Atom's bizarre
+                       # API, so we will. Real unicode strings? Yes please.
+                       # See [[bugs/Aggregated_Atom_feeds_are_double-encoded]]
+                       local $XML::Atom::ForceUnicode = 1;
+
+                       my $c=$entry->content;
                        # atom feeds may have no content, only a summary
                        # atom feeds may have no content, only a summary
-                       if (! defined $content && ref $entry->summary) {
-                               $content=$entry->summary->body;
+                       if (! defined $c && ref $entry->summary) {
+                               $c=$entry->summary;
                        }
 
                        add_page(
                        }
 
                        add_page(
@@ -533,15 +595,16 @@ sub aggregate (@) { #{{{
                                copyright => $f->copyright,
                                title => defined $entry->title ? decode_entities($entry->title) : "untitled",
                                link => $entry->link,
                                copyright => $f->copyright,
                                title => defined $entry->title ? decode_entities($entry->title) : "untitled",
                                link => $entry->link,
-                               content => defined $content ? $content : "",
+                               content => (defined $c && defined $c->body) ? $c->body : "",
                                guid => defined $entry->id ? $entry->id : time."_".$feed->{name},
                                ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
                                guid => defined $entry->id ? $entry->id : time."_".$feed->{name},
                                ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
+                               base => (defined $c && $c->can("base")) ? $c->base : undef,
                        );
                }
        }
                        );
                }
        }
-} #}}}
+}
 
 
-sub add_page (@) { #{{{
+sub add_page (@) {
        my %params=@_;
        
        my $feed=$params{feed};
        my %params=@_;
        
        my $feed=$params{feed};
@@ -551,6 +614,7 @@ sub add_page (@) { #{{{
                # updating an existing post
                $guid=$guids{$params{guid}};
                return if $guid->{expired};
                # updating an existing post
                $guid=$guids{$params{guid}};
                return if $guid->{expired};
+               write_page($feed, $guid, $mtime, \%params);
        }
        else {
                # new post
        }
        else {
                # new post
@@ -572,25 +636,38 @@ sub add_page (@) { #{{{
                }
                my $c="";
                while (exists $IkiWiki::pagecase{lc $page.$c} ||
                }
                my $c="";
                while (exists $IkiWiki::pagecase{lc $page.$c} ||
+                      -e $IkiWiki::Plugin::transient::transientdir."/".htmlfn($page.$c) ||
                       -e "$config{srcdir}/".htmlfn($page.$c)) {
                        $c++
                }
 
                       -e "$config{srcdir}/".htmlfn($page.$c)) {
                        $c++
                }
 
-               # Make sure that the file name isn't too long. 
-               # NB: This doesn't check for path length limits.
-               my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
-               if (defined $max && length(htmlfn($page)) >= $max) {
+               $guid->{page}=$page;
+               eval { write_page($feed, $guid, $mtime, \%params) };
+               if ($@) {
+                       # assume failure was due to a too long filename
+                       # (or o
                        $c="";
                        $page=$feed->{dir}."/item";
                        while (exists $IkiWiki::pagecase{lc $page.$c} ||
                        $c="";
                        $page=$feed->{dir}."/item";
                        while (exists $IkiWiki::pagecase{lc $page.$c} ||
-                              -e "$config{srcdir}/".htmlfn($page.$c)) {
+                             -e $IkiWiki::Plugin::transient::transientdir."/".htmlfn($page.$c) ||
+                             -e "$config{srcdir}/".htmlfn($page.$c)) {
                                $c++
                        }
                                $c++
                        }
+
+                       $guid->{page}=$page;
+                       write_page($feed, $guid, $mtime, \%params);
                }
 
                }
 
-               $guid->{page}=$page;
                debug(sprintf(gettext("creating new page %s"), $page));
        }
                debug(sprintf(gettext("creating new page %s"), $page));
        }
+}
+
+sub write_page ($$$$$) {
+       my $feed=shift;
+       my $guid=shift;
+       my $mtime=shift;
+       my %params=%{shift()};
+
        $guid->{feed}=$feed->{name};
        
        # To write or not to write? Need to avoid writing unchanged pages
        $guid->{feed}=$feed->{name};
        
        # To write or not to write? Need to avoid writing unchanged pages
@@ -604,50 +681,52 @@ sub add_page (@) { #{{{
        $guid->{md5}=$digest;
 
        # Create the page.
        $guid->{md5}=$digest;
 
        # Create the page.
-       my $template=template($feed->{template}, blind_cache => 1);
+       my $template;
+       eval {
+               $template=template($feed->{template}, blind_cache => 1);
+       };
+       if ($@) {
+               print STDERR gettext("failed to process template:")." $@";
+               return;
+       }
        $template->param(title => $params{title})
                if defined $params{title} && length($params{title});
        $template->param(title => $params{title})
                if defined $params{title} && length($params{title});
-       $template->param(content => htmlescape(htmlabs($params{content}, $feed->{feedurl})));
+       $template->param(content => wikiescape(htmlabs($params{content},
+               defined $params{base} ? $params{base} : $feed->{feedurl})));
        $template->param(name => $feed->{name});
        $template->param(url => $feed->{url});
        $template->param(copyright => $params{copyright})
                if defined $params{copyright} && length $params{copyright};
        $template->param(name => $feed->{name});
        $template->param(url => $feed->{url});
        $template->param(copyright => $params{copyright})
                if defined $params{copyright} && length $params{copyright};
-       $template->param(permalink => urlabs($params{link}, $feed->{feedurl}))
+       $template->param(permalink => IkiWiki::urlabs($params{link}, $feed->{feedurl}))
                if defined $params{link};
        if (ref $feed->{tags}) {
                $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
        }
                if defined $params{link};
        if (ref $feed->{tags}) {
                $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
        }
-       writefile(htmlfn($guid->{page}), $config{srcdir},
-               $template->output);
+       writefile(htmlfn($guid->{page}),
+               $IkiWiki::Plugin::transient::transientdir, $template->output);
 
        if (defined $mtime && $mtime <= time) {
                # Set the mtime, this lets the build process get the right
                # creation time on record for the new page.
 
        if (defined $mtime && $mtime <= time) {
                # Set the mtime, this lets the build process get the right
                # creation time on record for the new page.
-               utime $mtime, $mtime, "$config{srcdir}/".htmlfn($guid->{page});
+               utime $mtime, $mtime,
+                       $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page});
                # Store it in pagectime for expiry code to use also.
                # Store it in pagectime for expiry code to use also.
-               $IkiWiki::pagectime{$guid->{page}}=$mtime;
+               $IkiWiki::pagectime{$guid->{page}}=$mtime
+                       unless exists $IkiWiki::pagectime{$guid->{page}};
        }
        else {
                # Dummy value for expiry code.
        }
        else {
                # Dummy value for expiry code.
-               $IkiWiki::pagectime{$guid->{page}}=time;
+               $IkiWiki::pagectime{$guid->{page}}=time
+                       unless exists $IkiWiki::pagectime{$guid->{page}};
        }
        }
-} #}}}
+}
 
 
-sub htmlescape ($) { #{{{
+sub wikiescape ($) {
        # escape accidental wikilinks and preprocessor stuff
        # escape accidental wikilinks and preprocessor stuff
-       my $html=shift;
-       $html=~s/(?<!\\)\[\[/\\\[\[/g;
-       return $html;
-} #}}}
-
-sub urlabs ($$) { #{{{
-       my $url=shift;
-       my $urlbase=shift;
-
-       URI->new_abs($url, $urlbase)->as_string;
-} #}}}
+       return encode_entities(shift, '\[\]');
+}
 
 
-sub htmlabs ($$) { #{{{
+sub htmlabs ($$) {
        # Convert links in html from relative to absolute.
        # Note that this is a heuristic, which is not specified by the rss
        # spec and may not be right for all feeds. Also, see Debian
        # Convert links in html from relative to absolute.
        # Note that this is a heuristic, which is not specified by the rss
        # spec and may not be right for all feeds. Also, see Debian
@@ -672,7 +751,7 @@ sub htmlabs ($$) { #{{{
                                next unless $v_offset; # 0 v_offset means no value
                                my $v = substr($text, $v_offset, $v_len);
                                $v =~ s/^([\'\"])(.*)\1$/$2/;
                                next unless $v_offset; # 0 v_offset means no value
                                my $v = substr($text, $v_offset, $v_len);
                                $v =~ s/^([\'\"])(.*)\1$/$2/;
-                               my $new_v=urlabs($v, $urlbase);
+                               my $new_v=IkiWiki::urlabs($v, $urlbase);
                                $new_v =~ s/\"/&quot;/g; # since we quote with ""
                                substr($text, $v_offset, $v_len) = qq("$new_v");
                        }
                                $new_v =~ s/\"/&quot;/g; # since we quote with ""
                                substr($text, $v_offset, $v_len) = qq("$new_v");
                        }
@@ -683,15 +762,15 @@ sub htmlabs ($$) { #{{{
        $p->eof;
 
        return $ret;
        $p->eof;
 
        return $ret;
-} #}}}
+}
 
 
-sub htmlfn ($) { #{{{
+sub htmlfn ($) {
        return shift().".".($config{aggregateinternal} ? "_aggregated" : $config{htmlext});
        return shift().".".($config{aggregateinternal} ? "_aggregated" : $config{htmlext});
-} #}}}
+}
 
 my $aggregatelock;
 
 
 my $aggregatelock;
 
-sub lockaggregate () { #{{{
+sub lockaggregate () {
        # Take an exclusive lock to prevent multiple concurrent aggregators.
        # Returns true if the lock was aquired.
        if (! -d $config{wikistatedir}) {
        # Take an exclusive lock to prevent multiple concurrent aggregators.
        # Returns true if the lock was aquired.
        if (! -d $config{wikistatedir}) {
@@ -704,11 +783,11 @@ sub lockaggregate () { #{{{
                return 0;
        }
        return 1;
                return 0;
        }
        return 1;
-} #}}}
+}
 
 
-sub unlockaggregate () { #{{{
+sub unlockaggregate () {
        return close($aggregatelock) if $aggregatelock;
        return;
        return close($aggregatelock) if $aggregatelock;
        return;
-} #}}}
+}
 
 1
 
 1