2 # Blog aggregation plugin.
3 package IkiWiki::Plugin::aggregate;
12 use open qw{:utf8 :std};
18 hook(type => "getopt", id => "aggregate", call => \&getopt);
19 hook(type => "checkconfig", id => "aggregate", call => \&checkconfig);
20 hook(type => "filter", id => "aggregate", call => \&filter);
21 hook(type => "preprocess", id => "aggregate", call => \&preprocess);
22 hook(type => "delete", id => "aggregate", call => \&delete);
23 hook(type => "savestate", id => "aggregate", call => \&savestate);
27 eval q{use Getopt::Long};
29 Getopt::Long::Configure('pass_through');
30 GetOptions("aggregate" => \$config{aggregate});
33 sub checkconfig () { #{{{
36 if ($config{aggregate}) {
42 IkiWiki::unlockwiki();
47 my $page=$params{page};
49 # Mark all feeds originating on this page as removable;
50 # preprocess will unmark those that still exist.
53 return $params{content};
56 sub preprocess (@) { #{{{
59 foreach my $required (qw{name url}) {
60 if (! exists $params{$required}) {
61 return "[[".sprintf(gettext("aggregate plugin missing %s parameter"), $required)."]]";
66 my $name=$params{name};
67 if (exists $feeds{$name}) {
74 $feed->{sourcepage}=$params{page};
75 $feed->{url}=$params{url};
76 my $dir=exists $params{dir} ? $params{dir} : $params{page}."/".IkiWiki::titlepage($params{name});
78 ($dir)=$dir=~/$config{wiki_file_regexp}/;
80 $feed->{feedurl}=defined $params{feedurl} ? $params{feedurl} : "";
81 $feed->{updateinterval}=defined $params{updateinterval} ? $params{updateinterval} * 60 : 15 * 60;
82 $feed->{expireage}=defined $params{expireage} ? $params{expireage} : 0;
83 $feed->{expirecount}=defined $params{expirecount} ? $params{expirecount} : 0;
84 delete $feed->{remove};
85 delete $feed->{expired};
86 $feed->{lastupdate}=0 unless defined $feed->{lastupdate};
87 $feed->{numposts}=0 unless defined $feed->{numposts};
88 $feed->{newposts}=0 unless defined $feed->{newposts};
89 $feed->{message}=gettext("new feed") unless defined $feed->{message};
90 $feed->{error}=0 unless defined $feed->{error};
96 push @{$feed->{tags}}, $value;
100 return "<a href=\"".$feed->{url}."\">".$feed->{name}."</a>: ".
101 ($feed->{error} ? "<em>" : "").$feed->{message}.
102 ($feed->{error} ? "</em>" : "").
103 " (".$feed->{numposts}." ".gettext("posts").
104 ($feed->{newposts} ? "; ".$feed->{newposts}.
105 " ".gettext("new") : "").
109 sub delete (@) { #{{{
112 # Remove feed data for removed pages.
113 foreach my $file (@files) {
114 my $page=pagename($file);
119 sub loadstate () { #{{{
120 if (-e "$config{wikistatedir}/aggregate") {
121 open (IN, "$config{wikistatedir}/aggregate" ||
122 die "$config{wikistatedir}/aggregate: $!");
124 $_=IkiWiki::possibly_foolish_untaint($_);
127 foreach my $i (split(/ /, $_)) {
128 my ($field, $val)=split(/=/, $i, 2);
129 if ($field eq "name" || $field eq "feed" ||
130 $field eq "guid" || $field eq "message") {
131 $data->{$field}=decode_entities($val, " \t\n");
133 elsif ($field eq "tag") {
134 push @{$data->{tags}}, $val;
137 $data->{$field}=$val;
141 if (exists $data->{name}) {
142 $feeds{$data->{name}}=$data;
144 elsif (exists $data->{guid}) {
145 $guids{$data->{guid}}=$data;
153 sub savestate () { #{{{
154 eval q{use HTML::Entities};
156 my $newfile="$config{wikistatedir}/aggregate.new";
157 # TODO: This cleanup function could use improvement. Any newly
158 # aggregated files are left behind unrecorded, and should be deleted.
159 my $cleanup = sub { unlink($newfile) };
160 open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
161 foreach my $data (values %feeds, values %guids) {
162 if ($data->{remove}) {
164 foreach my $guid (values %guids) {
165 if ($guid->{feed} eq $data->{name}) {
171 unlink pagefile($data->{page});
175 elsif ($data->{expired} && exists $data->{page}) {
176 unlink pagefile($data->{page});
177 delete $data->{page};
182 foreach my $field (keys %$data) {
183 if ($field eq "name" || $field eq "feed" ||
184 $field eq "guid" || $field eq "message") {
185 push @line, "$field=".encode_entities($data->{$field}, " \t\n");
187 elsif ($field eq "tags") {
188 push @line, "tag=$_" foreach @{$data->{tags}};
191 push @line, "$field=".$data->{$field};
194 print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup);
196 close OUT || error("save $newfile: $!", $cleanup);
197 rename($newfile, "$config{wikistatedir}/aggregate") ||
198 error("rename $newfile: $!", $cleanup);
202 foreach my $feed (values %feeds) {
203 next unless $feed->{expireage} || $feed->{expirecount};
205 foreach my $item (sort { $IkiWiki::pagectime{$b->{page}} <=> $IkiWiki::pagectime{$a->{page}} }
206 grep { exists $_->{page} && $_->{feed} eq $feed->{name} && $IkiWiki::pagectime{$_->{page}} }
208 if ($feed->{expireage}) {
209 my $days_old = (time - $IkiWiki::pagectime{$item->{page}}) / 60 / 60 / 24;
210 if ($days_old > $feed->{expireage}) {
211 debug(sprintf(gettext("expiring %s (%s days old)"),
212 $item->{page}, $days_old));
216 elsif ($feed->{expirecount} &&
217 $count >= $feed->{expirecount}) {
218 debug(sprintf(gettext("expiring %s"), $item->{page}));
228 sub aggregate () { #{{{
229 eval q{use XML::Feed};
231 eval q{use HTML::Entities};
234 foreach my $feed (values %feeds) {
235 next unless $config{rebuild} ||
236 time - $feed->{lastupdate} >= $feed->{updateinterval};
237 $feed->{lastupdate}=time;
239 $IkiWiki::forcerebuild{$feed->{sourcepage}}=1;
241 debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
243 if (! length $feed->{feedurl}) {
244 my @urls=XML::Feed->find_feeds($feed->{url});
246 $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{feedurl});
248 debug($feed->{message});
251 $feed->{feedurl}=pop @urls;
253 my $f=eval{XML::Feed->parse(URI->new($feed->{feedurl}))};
255 $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
257 debug($feed->{message});
261 $feed->{message}=XML::Feed->errstr;
263 debug($feed->{message});
267 foreach my $entry ($f->entries) {
270 title => defined $entry->title ? decode_entities($entry->title) : "untitled",
271 link => $entry->link,
272 content => $entry->content->body,
273 guid => defined $entry->id ? $entry->id : time."_".$feed->name,
274 ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
278 $feed->{message}=sprintf(gettext("processed ok at %s"),
279 displaytime($feed->{lastupdate}));
284 sub add_page (@) { #{{{
287 my $feed=$params{feed};
290 if (exists $guids{$params{guid}}) {
291 # updating an existing post
292 $guid=$guids{$params{guid}};
293 return if $guid->{expired};
297 $guid->{guid}=$params{guid};
298 $guids{$params{guid}}=$guid;
299 $mtime=$params{ctime};
303 # assign it an unused page
304 my $page=IkiWiki::titlepage($params{title});
305 # escape slashes and periods in title so it doesn't specify
306 # directory name or trigger ".." disallowing code.
307 $page=~s!([/.])!"__".ord($1)."__"!eg;
308 $page=$feed->{dir}."/".$page;
309 ($page)=$page=~/$config{wiki_file_regexp}/;
310 if (! defined $page || ! length $page) {
311 $page=$feed->{dir}."/item";
314 while (exists $IkiWiki::pagecase{lc $page.$c} ||
315 -e pagefile($page.$c)) {
319 # Make sure that the file name isn't too long.
320 # NB: This doesn't check for path length limits.
322 my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
323 if (defined $max && length(htmlpage($page)) >= $max) {
325 $page=$feed->{dir}."/item";
326 while (exists $IkiWiki::pagecase{lc $page.$c} ||
327 -e pagefile($page.$c)) {
333 debug(sprintf(gettext("creating new page %s"), $page));
335 $guid->{feed}=$feed->{name};
337 # To write or not to write? Need to avoid writing unchanged pages
338 # to avoid unneccessary rebuilding. The mtime from rss cannot be
339 # trusted; let's use a digest.
340 eval q{use Digest::MD5 'md5_hex'};
343 my $digest=md5_hex(Encode::encode_utf8($params{content}));
344 return unless ! exists $guid->{md5} || $guid->{md5} ne $digest || $config{rebuild};
345 $guid->{md5}=$digest;
348 my $template=template("aggregatepost.tmpl", blind_cache => 1);
349 $template->param(title => $params{title})
350 if defined $params{title} && length($params{title});
351 $template->param(content => htmlescape(htmlabs($params{content}, $feed->{feedurl})));
352 $template->param(name => $feed->{name});
353 $template->param(url => $feed->{url});
354 $template->param(permalink => urlabs($params{link}, $feed->{feedurl}))
355 if defined $params{link};
356 if (ref $feed->{tags}) {
357 $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
359 writefile(htmlpage($guid->{page}), $config{srcdir},
362 # Set the mtime, this lets the build process get the right creation
363 # time on record for the new page.
364 utime $mtime, $mtime, pagefile($guid->{page}) if defined $mtime;
367 sub htmlescape ($) { #{{{
368 # escape accidental wikilinks and preprocessor stuff
370 $html=~s/(?<!\\)\[\[/\\\[\[/g;
374 sub urlabs ($$) { #{{{
378 URI->new_abs($url, $urlbase)->as_string;
381 sub htmlabs ($$) { #{{{
382 # Convert links in html from relative to absolute.
383 # Note that this is a heuristic, which is not specified by the rss
384 # spec and may not be right for all feeds. Also, see Debian
390 my $p = HTML::Parser->new(api_version => 3);
391 $p->handler(default => sub { $ret.=join("", @_) }, "text");
392 $p->handler(start => sub {
393 my ($tagname, $pos, $text) = @_;
394 if (ref $HTML::Tagset::linkElements{$tagname}) {
396 # use attribute sets from right to left
397 # to avoid invalidating the offsets
398 # when replacing the values
399 my($k_offset, $k_len, $v_offset, $v_len) =
401 my $attrname = lc(substr($text, $k_offset, $k_len));
402 next unless grep { $_ eq $attrname } @{$HTML::Tagset::linkElements{$tagname}};
403 next unless $v_offset; # 0 v_offset means no value
404 my $v = substr($text, $v_offset, $v_len);
405 $v =~ s/^([\'\"])(.*)\1$/$2/;
406 my $new_v=urlabs($v, $urlbase);
407 $new_v =~ s/\"/"/g; # since we quote with ""
408 substr($text, $v_offset, $v_len) = qq("$new_v");
412 }, "tagname, tokenpos, text");
419 sub remove_feeds () { #{{{
423 foreach my $id (keys %feeds) {
424 if ($feeds{$id}->{sourcepage} eq $page) {
425 $feeds{$id}->{remove}=1;
431 sub pagefile ($) { #{{{
434 return "$config{srcdir}/".htmlpage($page);