sub import { #{{{
hook(type => "getopt", id => "aggregate", call => \&getopt);
+ hook(type => "getsetup", id => "aggregate", call => \&getsetup);
hook(type => "checkconfig", id => "aggregate", call => \&checkconfig);
hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
hook(type => "preprocess", id => "aggregate", call => \&preprocess);
);
} #}}}
+sub getsetup () { #{{{
+ return
+ plugin => {
+ safe => 1,
+ rebuild => undef,
+ },
+ aggregateinternal => {
+ type => "boolean",
+ example => 0,
+ description => "enable aggregation to internal pages?",
+ safe => 0, # enabling needs manual transition
+ rebuild => 0,
+ },
+ aggregate_webtrigger => {
+ type => "boolean",
+ example => 0,
+ description => "allow aggregation to be triggered via the web?",
+ safe => 1,
+ rebuild => 0,
+ },
+} #}}}
+
sub checkconfig () { #{{{
if ($config{aggregate} && ! ($config{post_commit} &&
IkiWiki::commit_hook_enabled())) {
return $params{content};
} #}}}
+# Used by ikiwiki-transition aggregateinternal.
sub migrate_to_internal { #{{{
-
if (! lockaggregate()) {
- error("an aggregation process is already running");
- return;
+ error("an aggregation process is currently running");
}
IkiWiki::lockwiki();
loadstate();
+ $config{verbose}=1;
foreach my $data (values %guids) {
next unless $data->{page};
-
+ next if $data->{expired};
+
$config{aggregateinternal} = 0;
my $oldname = pagefile($data->{page});
-
+ my $oldoutput = $config{destdir}."/".IkiWiki::htmlpage($data->{page});
+
$config{aggregateinternal} = 1;
my $newname = pagefile($data->{page});
-
- print "I: $oldname -> $newname\n";
+
+ debug "moving $oldname -> $newname";
if (-e $newname) {
if (-e $oldname) {
error("$newname already exists");
}
else {
- print STDERR
- "W: already renamed to $newname?\n";
+ debug("already renamed to $newname?");
}
}
elsif (-e $oldname) {
rename($oldname, $newname) || error("$!");
}
else {
- print "W: $oldname not found\n";
+ debug("$oldname not found");
+ }
+ if (-e $oldoutput) {
+ require IkiWiki::Render;
+ debug("removing output file $oldoutput");
+ IkiWiki::prune($oldoutput);
}
}
-
+
savestate();
IkiWiki::unlockwiki;
-
+
unlockaggregate();
} #}}}
my $count=0;
my %seen;
foreach my $item (sort { $IkiWiki::pagectime{$b->{page}} <=> $IkiWiki::pagectime{$a->{page}} }
- grep { exists $_->{page} && $_->{feed} eq $feed->{name} && $IkiWiki::pagectime{$_->{page}} }
+ grep { exists $_->{page} && $_->{feed} eq $feed->{name} }
values %guids) {
if ($feed->{expireage}) {
my $days_old = (time - $IkiWiki::pagectime{$item->{page}}) / 60 / 60 / 24;
}
foreach my $entry ($f->entries) {
+ my $content=$content=$entry->content->body;
+ # atom feeds may have no content, only a summary
+ if (! defined $content && ref $entry->summary) {
+ $content=$entry->summary->body;
+ }
+
add_page(
feed => $feed,
copyright => $f->copyright,
title => defined $entry->title ? decode_entities($entry->title) : "untitled",
link => $entry->link,
- content => defined $entry->content->body ? $entry->content->body : "",
+ content => defined $content ? $content : "",
guid => defined $entry->id ? $entry->id : time."_".$feed->{name},
ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
);
writefile(htmlfn($guid->{page}), $config{srcdir},
$template->output);
- # Set the mtime, this lets the build process get the right creation
- # time on record for the new page.
- utime $mtime, $mtime, pagefile($guid->{page})
- if defined $mtime && $mtime <= time;
+ if (defined $mtime && $mtime <= time) {
+ # Set the mtime, this lets the build process get the right
+ # creation time on record for the new page.
+ utime $mtime, $mtime, pagefile($guid->{page});
+ # Store it in pagectime for expiry code to use also.
+ $IkiWiki::pagectime{$guid->{page}}=$mtime;
+ }
} #}}}
sub htmlescape ($) { #{{{