hook(type => "checkconfig", id => "aggregate", call => \&checkconfig);
hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
hook(type => "preprocess", id => "aggregate", call => \&preprocess);
hook(type => "delete", id => "aggregate", call => \&delete);
hook(type => "savestate", id => "aggregate", call => \&savestate);
hook(type => "checkconfig", id => "aggregate", call => \&checkconfig);
hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
hook(type => "preprocess", id => "aggregate", call => \&preprocess);
hook(type => "delete", id => "aggregate", call => \&delete);
hook(type => "savestate", id => "aggregate", call => \&savestate);
if (exists $config{aggregate_webtrigger} && $config{aggregate_webtrigger}) {
hook(type => "cgi", id => "aggregate", call => \&cgi);
}
if (exists $config{aggregate_webtrigger} && $config{aggregate_webtrigger}) {
hook(type => "cgi", id => "aggregate", call => \&cgi);
}
"aggregate" => \$config{aggregate},
"aggregateinternal!" => \$config{aggregateinternal},
);
"aggregate" => \$config{aggregate},
"aggregateinternal!" => \$config{aggregateinternal},
);
+}
+
+sub getsetup () {
+ return
+ plugin => {
+ safe => 1,
+ rebuild => undef,
+ },
+ aggregateinternal => {
+ type => "boolean",
+ example => 1,
+ description => "enable aggregation to internal pages?",
+ safe => 0, # enabling needs manual transition
+ rebuild => 0,
+ },
+ aggregate_webtrigger => {
+ type => "boolean",
+ example => 0,
+ description => "allow aggregation to be triggered via the web?",
+ safe => 1,
+ rebuild => 0,
+ },
+}
+
+sub checkconfig () {
+ if (! defined $config{aggregateinternal}) {
+ $config{aggregateinternal}=1;
+ }
- print "W: $oldname not found\n";
+ debug("$oldname not found");
+ }
+ if (-e $oldoutput) {
+ require IkiWiki::Render;
+ debug("removing output file $oldoutput");
+ IkiWiki::prune($oldoutput);
$feed->{template}=$params{template} . ".tmpl";
delete $feed->{unseen};
$feed->{lastupdate}=0 unless defined $feed->{lastupdate};
$feed->{template}=$params{template} . ".tmpl";
delete $feed->{unseen};
$feed->{lastupdate}=0 unless defined $feed->{lastupdate};
$feed->{numposts}=0 unless defined $feed->{numposts};
$feed->{newposts}=0 unless defined $feed->{newposts};
$feed->{message}=gettext("new feed") unless defined $feed->{message};
$feed->{numposts}=0 unless defined $feed->{numposts};
$feed->{newposts}=0 unless defined $feed->{newposts};
$feed->{message}=gettext("new feed") unless defined $feed->{message};
close OUT || error("save $newfile: $!", $cleanup);
rename($newfile, "$config{wikistatedir}/aggregate") ||
error("rename $newfile: $!", $cleanup);
close OUT || error("save $newfile: $!", $cleanup);
rename($newfile, "$config{wikistatedir}/aggregate") ||
error("rename $newfile: $!", $cleanup);
foreach my $name (keys %feeds) {
# remove any feeds that were not seen while building the pages
# that used to contain them
foreach my $name (keys %feeds) {
# remove any feeds that were not seen while building the pages
# that used to contain them
foreach my $guid (values %guids) {
# any guid whose feed is gone should be removed
if (! exists $feeds{$guid->{feed}}) {
foreach my $guid (values %guids) {
# any guid whose feed is gone should be removed
if (! exists $feeds{$guid->{feed}}) {
# Load the current state in from disk, and merge into it
# values from the state in memory that might have changed
# during aggregation.
# Load the current state in from disk, and merge into it
# values from the state in memory that might have changed
# during aggregation.
- foreach my $item (sort { $IkiWiki::pagectime{$b->{page}} <=> $IkiWiki::pagectime{$a->{page}} }
- grep { exists $_->{page} && $_->{feed} eq $feed->{name} && $IkiWiki::pagectime{$_->{page}} }
+ foreach my $item (sort { ($IkiWiki::pagectime{$b->{page}} || 0) <=> ($IkiWiki::pagectime{$a->{page}} || 0) }
+ grep { exists $_->{page} && $_->{feed} eq $feed->{name} }
if ($days_old > $feed->{expireage}) {
debug(sprintf(gettext("expiring %s (%s days old)"),
$item->{page}, int($days_old)));
if ($days_old > $feed->{expireage}) {
debug(sprintf(gettext("expiring %s (%s days old)"),
$item->{page}, int($days_old)));
return values %feeds if $config{rebuild};
return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
return values %feeds if $config{rebuild};
return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
- $feed->{message}=sprintf(gettext("processed ok at %s"),
- displaytime($feed->{lastupdate}));
+ $feed->{message}=sprintf(gettext("last checked %s"),
+ displaytime($feed->{lasttry}));
# that contains invalid UTF-8 sequences. Convert
# feed to ascii to try to work around.
$feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)"));
# that contains invalid UTF-8 sequences. Convert
# feed to ascii to try to work around.
$feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)"));
- $content=Encode::decode_utf8($content, 0);
- $f=eval{XML::Feed->parse(\$content)};
+ $f=eval {
+ $content=Encode::decode_utf8($content, 0);
+ XML::Feed->parse(\$content)
+ };
}
if ($@) {
# Another possibility is badly escaped entities.
$feed->{message}.=" ".sprintf(gettext("(feed entities escaped)"));
$content=~s/\&(?!amp)(\w+);/&$1;/g;
}
if ($@) {
# Another possibility is badly escaped entities.
$feed->{message}.=" ".sprintf(gettext("(feed entities escaped)"));
$content=~s/\&(?!amp)(\w+);/&$1;/g;
- $content=Encode::decode_utf8($content, 0);
- $f=eval{XML::Feed->parse(\$content)};
+ $f=eval {
+ $content=Encode::decode_utf8($content, 0);
+ XML::Feed->parse(\$content)
+ };
add_page(
feed => $feed,
copyright => $f->copyright,
title => defined $entry->title ? decode_entities($entry->title) : "untitled",
link => $entry->link,
add_page(
feed => $feed,
copyright => $f->copyright,
title => defined $entry->title ? decode_entities($entry->title) : "untitled",
link => $entry->link,
guid => defined $entry->id ? $entry->id : time."_".$feed->{name},
ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
guid => defined $entry->id ? $entry->id : time."_".$feed->{name},
ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
# escape slashes and periods in title so it doesn't specify
# directory name or trigger ".." disallowing code.
$page=~s!([/.])!"__".ord($1)."__"!eg;
# escape slashes and periods in title so it doesn't specify
# directory name or trigger ".." disallowing code.
$page=~s!([/.])!"__".ord($1)."__"!eg;
my $template=template($feed->{template}, blind_cache => 1);
$template->param(title => $params{title})
if defined $params{title} && length($params{title});
my $template=template($feed->{template}, blind_cache => 1);
$template->param(title => $params{title})
if defined $params{title} && length($params{title});
- $template->param(content => htmlescape(htmlabs($params{content}, $feed->{feedurl})));
+ $template->param(content => wikiescape(htmlabs($params{content},
+ defined $params{base} ? $params{base} : $feed->{feedurl})));
$template->param(name => $feed->{name});
$template->param(url => $feed->{url});
$template->param(copyright => $params{copyright})
$template->param(name => $feed->{name});
$template->param(url => $feed->{url});
$template->param(copyright => $params{copyright})
- # Set the mtime, this lets the build process get the right creation
- # time on record for the new page.
- utime $mtime, $mtime, pagefile($guid->{page})
- if defined $mtime && $mtime <= time;
-} #}}}
+ if (defined $mtime && $mtime <= time) {
+ # Set the mtime, this lets the build process get the right
+ # creation time on record for the new page.
+ utime $mtime, $mtime, "$config{srcdir}/".htmlfn($guid->{page});
+ # Store it in pagectime for expiry code to use also.
+ $IkiWiki::pagectime{$guid->{page}}=$mtime;
+ }
+ else {
+ # Dummy value for expiry code.
+ $IkiWiki::pagectime{$guid->{page}}=time;
+ }
+}
# Convert links in html from relative to absolute.
# Note that this is a heuristic, which is not specified by the rss
# spec and may not be right for all feeds. Also, see Debian
# Convert links in html from relative to absolute.
# Note that this is a heuristic, which is not specified by the rss
# spec and may not be right for all feeds. Also, see Debian
# Take an exclusive lock to prevent multiple concurrent aggregators.
# Returns true if the lock was aquired.
if (! -d $config{wikistatedir}) {
# Take an exclusive lock to prevent multiple concurrent aggregators.
# Returns true if the lock was aquired.
if (! -d $config{wikistatedir}) {