sub preprocess_inline (@) {
my %params=@_;
- if (! exists $params{pages}) {
+ if (! exists $params{pages} && ! exists $params{pagenames}) {
error gettext("missing pages parameter");
}
my $raw=yesno($params{raw});
$params{template} = $archive ? "archivepage" : "inlinepage";
}
- my @list=pagespec_match_list(
- [ grep { $_ ne $params{page} } keys %pagesources ],
- $params{pages}, location => $params{page});
+ my @list;
- if (exists $params{sort} && $params{sort} eq 'title') {
- @list=sort { pagetitle(basename($a)) cmp pagetitle(basename($b)) } @list;
- }
- elsif (exists $params{sort} && $params{sort} eq 'title_natural') {
- eval q{use Sort::Naturally};
- if ($@) {
- error(gettext("Sort::Naturally needed for title_natural sort"));
+ if (exists $params{pagenames}) {
+
+ foreach my $p (qw(sort pages)) {
+ if (exists $params{$p}) {
+ error sprintf(gettext("the %s and %s parameters cannot be used together"),
+ "pagenames", $p);
+ }
}
- @list=sort { Sort::Naturally::ncmp(pagetitle(basename($a)), pagetitle(basename($b))) } @list;
- }
- elsif (exists $params{sort} && $params{sort} eq 'mtime') {
- @list=sort { $pagemtime{$b} <=> $pagemtime{$a} } @list;
- }
- elsif (! exists $params{sort} || $params{sort} eq 'age') {
- @list=sort { $pagectime{$b} <=> $pagectime{$a} } @list;
+
+ @list = split ' ', $params{pagenames};
+ my $_;
+ @list = map { bestlink($params{page}, $_) } @list;
+
+ $params{pages} = join(" or ", @list);
}
else {
- error sprintf(gettext("unknown sort type %s"), $params{sort});
+ @list = pagespec_match_list(
+ [ grep { $_ ne $params{page} } keys %pagesources ],
+ $params{pages}, location => $params{page});
+
+ if (exists $params{sort} && $params{sort} eq 'title') {
+ @list=sort { pagetitle(basename($a)) cmp pagetitle(basename($b)) } @list;
+ }
+ elsif (exists $params{sort} && $params{sort} eq 'title_natural') {
+ eval q{use Sort::Naturally};
+ if ($@) {
+ error(gettext("Sort::Naturally needed for title_natural sort"));
+ }
+ @list=sort { Sort::Naturally::ncmp(pagetitle(basename($a)), pagetitle(basename($b))) } @list;
+ }
+ elsif (exists $params{sort} && $params{sort} eq 'mtime') {
+ @list=sort { $pagemtime{$b} <=> $pagemtime{$a} } @list;
+ }
+ elsif (! exists $params{sort} || $params{sort} eq 'age') {
+ @list=sort { $pagectime{$b} <=> $pagectime{$a} } @list;
+ }
+ else {
+ error sprintf(gettext("unknown sort type %s"), $params{sort});
+ }
}
if (yesno($params{reverse})) {
my $indent=0;
my $openli=0;
my $addparent="";
- my $map = "<div class='map'>\n<ul>\n";
+ my $map = "<div class='map'>\n";
+
+ # Return empty div if %mapitems is empty
+ if (!scalar(keys %mapitems)) {
+ $map .= "</div>\n";
+ return $map;
+ }
+ else { # continue populating $map
+ $map .= "<ul>\n";
+ }
+
foreach my $item (sort keys %mapitems) {
my @linktext = (length $mapitems{$item} ? (linktext => $mapitems{$item}) : ());
$item=~s/^\Q$common_prefix\E\///
# Needs to update whenever a page is added or removed, so
# register a dependency.
add_depends($params{page}, $params{pages});
+ add_depends($params{page}, $params{among}) if exists $params{among};
my %counts;
my $max = 0;
foreach my $page (pagespec_match_list([keys %links],
$params{pages}, location => $params{page})) {
use IkiWiki::Render;
- $counts{$page} = scalar(IkiWiki::backlinks($page));
+
+ my @backlinks = IkiWiki::backlink_pages($page);
+
+ if (exists $params{among}) {
+ @backlinks = pagespec_match_list(\@backlinks,
+ $params{among}, location => $params{page});
+ }
+
+ $counts{$page} = scalar(@backlinks);
$max = $counts{$page} if $counts{$page} > $max;
}
my $res = "<div class='pagecloud'>\n";
foreach my $page (sort keys %counts) {
+ next unless $counts{$page} > 0;
+
my $class = $classes[$counts{$page} * scalar(@classes) / ($max + 1)];
$res .= "<span class=\"$class\">".
htmllink($params{page}, $params{destpage}, $page).
$backlinks_calculated=1;
}
-sub backlinks ($) {
+sub backlink_pages ($) {
my $page=shift;
calculate_backlinks();
+ return keys %{$backlinks{$page}};
+}
+
+sub backlinks ($) {
+ my $page=shift;
+
my @links;
- foreach my $p (keys %{$backlinks{$page}}) {
+ foreach my $p (backlink_pages($page)) {
my $href=urlto($p, $page);
# Trim common dir prefixes from both pages.
the po file, for use by wikis whose primary language is not English.
* Add Danish basewiki translation by Jonas Smedegaard.
* img: Fix adding of dependency from page to the image.
+ * pagestats: add `among` parameter, which only counts links from specified
+ pages (smcv)
+ * pagestats: when making a tag cloud, don't emit links where the tag is
+ unused (smcv)
+ * map: Avoid emitting an unclosed ul element if the map is empty. (harishcm)
+ * inline: Add pagenames parameter that can be used to list a set of
+ pages to inline, in a specific order, without using a PageSpec. (smcv)
-- Joey Hess <joeyh@debian.org> Tue, 02 Jun 2009 17:03:41 -0400
[[!template id=gitbranch author="[[harishcm]]" branch=smcv/ready/harishcm-map-fix]]
+> [[merged|done]] --[[Joey]]
+
Patch:
--- /usr/local/share/perl/5.8.8/IkiWiki/Plugin/map.pm
For example, set "feedfile=feed" to cause it to generate `page/feed.atom`
and/or `page/feed.rss`. This option is not supported if the wiki is
configured not to use `usedirs`.
+* `pagenames` - If given instead of `pages`, this is interpreted as a
+ space-separated list of links to pages (with the same
+ [[SubPage/LinkingRules]] as in a [[ikiwiki/WikiLink]]), and they are inlined
+ in exactly the order given: the `sort` and `pages` parameters cannot be used
+ in conjunction with this one.
[[!meta robots="noindex, follow"]]
\[[!pagestats style="table"]]
+The optional `among` parameter limits counting to pages that match a
+[[ikiwiki/PageSpec]]. For instance, to display a cloud of tags used on blog
+entries, you could use:
+
+ \[[!pagestats pages="tags/*" among="blog/posts/*"]]
+
+or to display a cloud of tags related to Linux, you could use:
+
+ \[[!pagestats pages="tags/* and not tags/linux" among="tagged(linux)"]]
+
[[!meta robots="noindex, follow"]]
-----
-I'm working on an [[rcs]] plugin for CVS, adapted from `svn.pm`, in order to integrate ikiwiki at sites where that's all they've got. What's working so far: web commit (post-commit hook and all), diff, add (under certain conditions), and remove. What's not working: with rcs_add(), iff any of the new page's parent dirs aren't already under CVS control and the post-commit hook is enabled, the browser and ikiwiki stall for several seconds trying to add it, then time out. (If I kill ikiwiki when this is happening, it cvs adds the topmost parent that needed adding; if I wait for timeout, it doesn't. I think.) If I disable the post-commit hook and do the same kind of thing, the page is created and saved.
-
-In case you're lucky enough not to know, cvs adds on directories are weird -- they operate immediately against the repository, unlike file adds:
+I'm working on an [[rcs]] plugin for CVS, adapted from `svn.pm`, in order
+to integrate ikiwiki at sites where that's all they've got. What's working
+so far: web commit (post-commit hook and all), diff, add (under certain
+conditions), and remove. What's not working: with rcs_add(), iff any of the
+new page's parent dirs aren't already under CVS control and the post-commit
+hook is enabled, the browser and ikiwiki stall for several seconds trying
+to add it, then time out. (If I kill ikiwiki when this is happening, it cvs
+adds the topmost parent that needed adding; if I wait for timeout, it
+doesn't. I think.) If I disable the post-commit hook and do the same kind
+of thing, the page is created and saved.
+
+In case you're lucky enough not to know, cvs adds on directories are weird
+-- they operate immediately against the repository, unlike file adds:
$ cvs add randomdir
Directory /Users/schmonz/Documents/cvswiki/repository/ikiwiki/randomdir added to the repository
-I was able to work out that when I'm seeing this page save misbehavior, my plugin is somewhere inside `system("cvs", "-Q", "add", "$file")`, which was never returning. If I changed it to anything other than cvs it iterated correctly over all the parent dirs which needed to be added to CVS, in the proper order. (cvs add isn't recursive, sadly.)
+I was able to work out that when I'm seeing this page save misbehavior, my
+plugin is somewhere inside `system("cvs", "-Q", "add", "$file")`, which was
+never returning. If I changed it to anything other than cvs it iterated
+correctly over all the parent dirs which needed to be added to CVS, in the
+proper order. (cvs add isn't recursive, sadly.)
Can you offer an educated guess what's going wrong here? --[[Schmonz]]
> Got `rcs_recentchanges` working, believe it or not, thanks to [cvsps](http://www.cobite.com/cvsps/). If I can figure out this interaction between the post-commit hook and `cvs add` on directories, the CVS plugin is mostly done. Could it be a locking issue? Where should I be looking? Any suggestions appreciated. --[[Schmonz]]
->> Okay, it is definitely a locking issue. First, on the conjecture that `cvs add <directory>` was triggering the post-commit hook and confusing ikiwiki, I wrapped the ikiwiki post-commit binary with a shell script that exited 0 if the triggering file was a directory. The first half of the conjecture was correct -- my wrapper got triggered -- but the web add of `one/two/three.mdwn` (where `one` and `two` weren't existing CVS-controlled dirs) remained hung as before. There were two ikiwiki processes running. On a whim, I killed the one with the higher PID; `cvs add one` immediately completed successfully, then back to a hang and two ikiwiki processes. I killed the newer one again and then `cvs add one/two` and `cvs add one/two/three.mdwn` completed and the web add was successful. --[[Schmonz]]
+>> Okay, it is definitely a locking issue. First, on the conjecture that
+>> `cvs add <directory>` was triggering the post-commit hook and confusing
+>> ikiwiki, I wrapped the ikiwiki post-commit binary with a shell script
+>> that exited 0 if the triggering file was a directory. The first half of
+>> the conjecture was correct -- my wrapper got triggered -- but the web
+>> add of `one/two/three.mdwn` (where `one` and `two` weren't existing
+>> CVS-controlled dirs) remained hung as before. There were two ikiwiki
+>> processes running. On a whim, I killed the one with the higher PID; `cvs
+>> add one` immediately completed successfully, then back to a hang and two
+>> ikiwiki processes. I killed the newer one again and then `cvs add
+>> one/two` and `cvs add one/two/three.mdwn` completed and the web add was
+>> successful. --[[Schmonz]]
+
+>>> Aaaaaand I was wrong about the second half of the conjecture being
+>>> wrong. The wrapper script wasn't correctly identifying directories;
+>>> with that fixed, everything works. I've created a
+>>> [[plugins/contrib/cvs]] plugin page. Thanks for listening. :-)
+>>> --[[Schmonz]]
+
+>> Here is a comment I committed to my laptop from Madrid Airport before
+>> your most recent updates, in case it's still useful:
+>>
+>> Locking certianly seems likely to be a problem. ikiwiki calls `rcs_add`
+>> *before* disabling the post-commit plugin, since all over VCS allow
+>> adding something in a staged manner. You can see this in, for example,
+>> `editpage.pm` lines 391+.
+>>
+>> So I guess what happens is that ikiwiki has taken the wiki lock, calls
+>> `rcs_add`, which does a `cvs add`, which runs the post commit hook,
+>> since it is not disabled -- which blocks waiting for the wiki lock.
+>>
+>> I guess you can fix this in either of three ways: Modify lots of places
+>> in ikiwiki to disable the post commit hook before calling `rcs_add`,
+>> or make cvs's `rcs_add` temporarily disable the commit hook and
+>> re-enable it (but only if it was not already disabled, somehow),
+>> or make cvs's `rcs_add` only make note that it needs to call `cvs add`
+>> later, and do so at `rcs_commit`. The last of these seems easist,
+>> especially since ikiwiki always commits after an add, in the same
+>> process, so you could just use a temporary list of things to add.
+>> --[[Joey]]
->>> Aaaaaand I was wrong about the second half of the conjecture being wrong. The wrapper script wasn't correctly identifying directories; with that fixed, everything works. I've created a [[plugins/contrib/cvs]] plugin page. Thanks for listening. :-) --[[Schmonz]]
(which returns the keys of $backlinks{$page}, and might be suitable for
exporting) and IkiWiki::backlinks (which calls backlink_pages, then performs
the same lossy transformation as before on the result).
+
+[[done]] --[[Joey]]
be useful for other things, like [[todo/wikitrails]]. --[[smcv]]
[[!tag plugins/inline]]
+
+> It's sort of a pity that a pagespec like "a or b or c" doesn't somehow
+> match to (a, b, c) in that order, but I don't see how that would be
+> generally possible. While this feels a bit like bloat and inline already
+> has far too many parameters, I have [[merged|done]] it. --[[Joey]]
archive="yes" quick="yes" reverse="yes" timeformat="%x"]]
--[[smcv]]
+
+> [[merged|done]] thanks --[[Joey]]
>>
>> --[[smcv]]
+>>> I need a copyright&license statement, so debian/copyright can be updated for
+>>> the plugin, before I can merge this. Otherwise ready. --[[Joey]]
+
>>> That looks like a nice set of fixes. One more that might be worthwhile: instead of reading the page source into a var, and then writing it out later, it might be nice to just
>>> `print readfile(srcfile(pagesources{$page}));` at the appropriate point. -- [[Will]]