return $page."style.css";
} #}}}
-sub htmllink ($$;$$$) { #{{{
- my $page=shift;
+sub htmllink ($$$;$$$) { #{{{
+ my $lpage=shift; # the page doing the linking
+ my $page=shift; # the page that will contain the link (different for inline)
my $link=shift;
my $noimageinline=shift; # don't turn links into inline html images
my $forcesubpage=shift; # force a link to a subpage
my $bestlink;
if (! $forcesubpage) {
- $bestlink=bestlink($page, $link);
+ $bestlink=bestlink($lpage, $link);
}
else {
- $bestlink="$page/".lc($link);
+ $bestlink="$lpage/".lc($link);
}
$linktext=pagetitle(basename($link)) unless defined $linktext;
# TODO BUG: %renderedfiles may not have it, if the linked to page
# was also added and isn't yet rendered! Note that this bug is
- # masked by the bug mentioned below that makes all new files
- # be rendered twice.
+ # masked by the bug that makes all new files be rendered twice.
if (! grep { $_ eq $bestlink } values %renderedfiles) {
$bestlink=htmlpage($bestlink);
}
if (! grep { $_ eq $bestlink } values %renderedfiles) {
return "<span><a href=\"".
- cgiurl(do => "create", page => $link, from =>$page).
+ cgiurl(do => "create", page => $link, from => $page).
"\">?</a>$linktext</span>"
}
my $locked_pages=userinfo_get($admin, "locked_pages");
if (globlist_match($page, userinfo_get($admin, "locked_pages"))) {
return 1 if $nonfatal;
- error(htmllink("", $page, 1)." is locked by ".
- htmllink("", $admin, 1)." and cannot be edited.");
+ error(htmllink("", "", $page, 1)." is locked by ".
+ htmllink("", "", $admin, 1)." and cannot be edited.");
}
}
$form->field(name => "password", type => "password");
$form->field(name => "confirm_password", type => "password");
$form->field(name => "subscriptions", size => 50,
- comment => "(".htmllink("", "GlobList", 1).")");
+ comment => "(".htmllink("", "", "GlobList", 1).")");
$form->field(name => "locked_pages", size => 50,
- comment => "(".htmllink("", "GlobList", 1).")");
+ comment => "(".htmllink("", "", "GlobList", 1).")");
if (! is_admin($user_name)) {
$form->field(name => "locked_pages", type => "hidden");
$form->tmpl_param("can_commit", $config{rcs});
$form->tmpl_param("indexlink", indexlink());
$form->tmpl_param("helponformattinglink",
- htmllink("", "HelpOnFormatting", 1));
+ htmllink("", "", "HelpOnFormatting", 1));
$form->tmpl_param("styleurl", styleurl());
$form->tmpl_param("baseurl", "$config{url}/");
if (! $form->submitted) {
require IkiWiki::Render;
$form->tmpl_param("page_preview",
htmlize($config{default_pageext},
- linkify($page, $form->field('content'))));
+ linkify($page, $page, $form->field('content'))));
}
else {
$form->tmpl_param("page_preview", "");
my $bestlink=IkiWiki::bestlink($page, $link);
next if length $bestlink;
push @broken,
- IkiWiki::htmllink($page, $link, 1).
+ IkiWiki::htmllink($page, $page, $link, 1).
" in ".
- IkiWiki::htmllink($params{page}, $page, 1);
+ IkiWiki::htmllink($params{page}, $params{page}, $page, 1);
}
}
}
foreach my $page (blog_list($params{pages}, $params{show})) {
next if $page eq $params{page};
push @pages, $page;
- $template->param(pagelink => htmllink($params{page}, $page));
+ $template->param(pagelink => htmllink($params{page}, $params{page}, $page));
$template->param(content => get_inline_content($params{page}, $page))
if $params{archive} eq "no";
$template->param(ctime => scalar(gmtime($pagectime{$page})));
my $file=$pagesources{$page};
my $type=pagetype($file);
if ($type ne 'unknown') {
- return htmlize($type, linkify($parentpage, readfile(srcfile($file))));
+ return htmlize($type, linkify($page, $parentpage, readfile(srcfile($file))));
}
else {
return "";
}
return "All pages are linked to by other pages." unless @orphans;
- return "<ul>\n".join("\n", map { "<li>".IkiWiki::htmllink($params{page}, $_, 1)."</li>" } sort @orphans)."</ul>\n";
+ return "<ul>\n".join("\n", map { "<li>".IkiWiki::htmllink($params{page}, $params{page}, $_, 1)."</li>" } sort @orphans)."</ul>\n";
} # }}}
1
my %params=@_;
$params{content} =~ s{(?<=\s)(\\?)$smiley_regexp(?=\s)}{
- $1 ? $2 : IkiWiki::htmllink($params{page}, $smileys{$2}, 0, 0, $2)
+ $1 ? $2 : IkiWiki::htmllink($params{page}, $params{page}, $smileys{$2}, 0, 0, $2)
}egs;
return $params{content};
use File::Spec;
use IkiWiki;
-sub linkify ($$) { #{{{
+sub linkify ($$$) { #{{{
+ my $lpage=shift;
my $page=shift;
my $content=shift;
$content =~ s{(\\?)$config{wiki_link_regexp}}{
- $2 ? ( $1 ? "[[$2|$3]]" : htmllink($page, titlepage($3), 0, 0, pagetitle($2)))
- : ( $1 ? "[[$3]]" : htmllink($page, titlepage($3)))
+ $2 ? ( $1 ? "[[$2|$3]]" : htmllink($lpage, $page, titlepage($3), 0, 0, pagetitle($2)))
+ : ( $1 ? "[[$3]]" : htmllink($lpage, $page, titlepage($3)))
}eg;
return $content;
$actions++;
}
if ($config{discussion}) {
- $template->param(discussionlink => htmllink($page, "Discussion", 1, 1));
+ $template->param(discussionlink => htmllink($page, $page, "Discussion", 1, 1));
$actions++;
}
$links{$page}=[findlinks($page, $content)];
- $content=linkify($page, $content);
+ $content=linkify($page, $page, $content);
$content=preprocess($page, $content);
$content=htmlize($type, $content);
* Enable full utf-8 support for page input and output.
* Add a workaround for markdown, which does not work well with utf-8
strings.
- * --getctime had bitrotted (well I only ever used it the once so far..),
+ * --getctime had bitrotted (well I only ever used it the once so far..),
fix and make it a bit more flexible
* rcs_getctime is changed, now rather than needing to loop over all pages,
it should just use the rcs to get the ctime of the passed file.
+ * When inlining a page in another one, links from the inlined page are now
+ expanded the same as they are when rendering the inlined page as a
+ standalone page. So rather than being expanded from the POV of the
+ inlining page, they are expanded from the POV of the inlined page.
- -- Joey Hess <joeyh@debian.org> Fri, 26 May 2006 04:49:49 -0400
+ For example, a link from blog/foo to "bar" will now link to blog/bar
+ if it exists. Previously this needed to be a link explicitly to
+ "blog/bar"; such links will also continue to work.
+
+ (This was slightly complex to do as the link still has to be constructed
+ relative to the inlining page.)
+
+ -- Joey Hess <joeyh@debian.org> Fri, 26 May 2006 11:43:08 -0400
ikiwiki (1.3) unstable; urgency=low
-Ikiwiki also supports style sheets now. I've not done too much with the default style sheet, but you can customise [[style.css]] to do whatever you like with the look of your wiki.
\ No newline at end of file
+Ikiwiki also supports style sheets now. I've not done too much with the
+default style sheet, but you can customise [[style.css]] to do whatever you
+like with the look of your wiki.
page name. Since the vocabulary it knows is very small, many hints won't
affect the result at all.
-This plugin is included in ikiwiki, but is not enabled by default.
+This plugin is included in ikiwiki, but is not enabled by default. As a
+special bonus, enabling this plugin makes any error messages ikiwiki should
+display be written in haiku.
You need to have the Coy module installed for this plugin to do anything
interesting. That does all the heavy lifting.
* Unit test suite (with tests of at least core stuff like
[[GlobList]]).
-* [[todo/Plugin]] mechanism.
-* Should have fully working [[todo/utf8]] support.
+* [[Plugins]]
+* Should have fully working [[todo/done/utf8]] support.
* [[Optimised_rendering|todo/optimisations]] if possible. Deal with other scalability issues.
* improved [[todo/html]] stylesheets and templates
* A version of the logo in a different font, possibly with the dots on the i's highlighted in some other color.
-This is a [[SubPage]] of the [[SandBox]].
\ No newline at end of file
+This page, [[test]], is a [[SubPage]] of the [[SandBox]].
--- /dev/null
+ikiwiki should support utf-8 pages, both input and output. To test, here's a
+utf-8 smiley:
+
+# ☺
+
+Currently ikiwiki is belived to be utf-8 clean itself; it tells perl to use
+binmode when reading possibly binary files (such as images) and it uses
+utf-8 compatable regexps etc.
+
+Notes:
+
+* Apache "AddDefaultCharset on" settings will not play well with utf-8
+ pages. Turn it off.
Well, that would probably be fairly easy to add if it used globlists to
specify which pages use the non-default template.
+
+Hmm, I think the pagetemplate hook should allow one to get close enough to
+this in a plugin now.
Might be nice to support automatically generating an index based on headers
-in a page, for long pages. The question is, how to turn on such an index? Well, make it a [[plugin]] enabled by a [[preprocessordirective]].
+in a page, for long pages. The question is, how to turn on such an index?
+Well, make it a [[plugin]] enabled by a [[preprocessordirective]].
Suggestions of ideas for plugins:
* list of registered users - tricky because it sorta calls for a way to rebuild the page when a new user is registered. Might be better as a cgi?
-* a [[todo/link_map]]
-* [[todo/sigs]] ?
+* a [[link_map]]
+* [[sigs]] ?
* [[pageindexes]]
* Wiki stats, such as total number of links, most linked to pages
+++ /dev/null
-ikiwiki should support utf-8 pages, both input and output. To test, here's a
-utf-8 smiley:
-
-# ☺
-
-Currently ikiwiki is belived to be utf-8 clean itself; it tells perl to use
-binmode when reading possibly binary files (such as images) and it uses
-utf-8 compatable regexps etc.
-
-utf-8 IO is not enabled by default though. While you can probably embed
-utf-8 in pages anyway, ikiwiki will not treat it right in the cases where
-it deals with things on a per-character basis (mostly when escaping and
-de-escaping special characters in filenames).
-
-To enable utf-8, edit ikiwiki and add -CSD to the perl hashbang line.
-(This should probably be configurable via a --utf8 or better --encoding=
-switch.)
-
-The following problems have been observed when running ikiwiki this way:
-
-* If invalid utf-8 creeps into a file, ikiwiki will crash rendering it as
- follows:
-
- Malformed UTF-8 character (unexpected continuation byte 0x97, with no preceding start byte) in substitution iterator at /usr/bin/markdown line 1317.
- Malformed UTF-8 character (fatal) at /usr/bin/markdown line 1317.
-
- In this example, a literal 0x97 character had gotten into a markdown
- file.
-
- Running this before markdown can avoid it:
-
- $content = Encode::encode_utf8($content);
-
- I'm not sure how, or what should be done after markdown to get the string
- back into a form that perl can treat as utf-8.
-
-* Apache "AddDefaultCharset on" settings will not play well with utf-8
- pages.
-
-* CGI::FormBuilder needs to be told to set `charset => "utf-8"` so that
- utf-8 is used in the edit form. (done)
#!/usr/bin/perl
use warnings;
use strict;
-use Test::More tests => 11;
+use Test::More tests => 13;
-sub linkify ($$$) {
+sub linkify ($$$$) {
+ my $lpage=shift;
my $page=shift;
+
my $content=shift;
my @existing_pages=@{shift()};
}
%IkiWiki::config=IkiWiki::defaultconfig();
- return IkiWiki::linkify($page, $content);
+ return IkiWiki::linkify($lpage, $page, $content);
}
sub links_to ($$) {
BEGIN { use_ok("IkiWiki::Render"); }
-ok(links_to("bar", linkify("foo", "link to [[bar]] ok", ["foo", "bar"])), "ok link");
-ok(not_links_to("bar", linkify("foo", "link to \\[[bar]] ok", ["foo", "bar"])), "escaped link");
-ok(links_to("page=bar", linkify("foo", "link to [[bar]] ok", ["foo"])), "broken link");
-ok(links_to("bar", linkify("foo", "link to [[baz]] and [[bar]] ok", ["foo", "baz", "bar"])), "dual links");
-ok(links_to("baz", linkify("foo", "link to [[baz]] and [[bar]] ok", ["foo", "baz", "bar"])), "dual links");
-ok(links_to("bar", linkify("foo", "link to [[some_page|bar]] ok", ["foo", "bar"])), "named link");
-ok(links_text("some page", linkify("foo", "link to [[some_page|bar]] ok", ["foo", "bar"])), "named link text");
-ok(links_to("bar", linkify("foo", "link to [[some page|bar]] ok", ["foo", "bar"])), "named link, with whitespace");
-ok(links_text("some page", linkify("foo", "link to [[some page|bar]] ok", ["foo", "bar"])), "named link text, with whitespace");
-ok(links_text("Some long, & complex page name.", linkify("foo", "link to [[Some long, & complex page name.|bar]] ok, and this is not a link]] here", ["foo", "bar"])), "complex named link text");
+ok(links_to("bar", linkify("foo", "foo", "link to [[bar]] ok", ["foo", "bar"])), "ok link");
+ok(not_links_to("bar", linkify("foo", "foo", "link to \\[[bar]] ok", ["foo", "bar"])), "escaped link");
+ok(links_to("page=bar", linkify("foo", "foo", "link to [[bar]] ok", ["foo"])), "broken link");
+ok(links_to("bar", linkify("foo", "foo", "link to [[baz]] and [[bar]] ok", ["foo", "baz", "bar"])), "dual links");
+ok(links_to("baz", linkify("foo", "foo", "link to [[baz]] and [[bar]] ok", ["foo", "baz", "bar"])), "dual links");
+ok(links_to("bar", linkify("foo", "foo", "link to [[some_page|bar]] ok", ["foo", "bar"])), "named link");
+ok(links_text("some page", linkify("foo", "foo", "link to [[some_page|bar]] ok", ["foo", "bar"])), "named link text");
+ok(links_to("bar", linkify("foo", "foo", "link to [[some page|bar]] ok", ["foo", "bar"])), "named link, with whitespace");
+ok(links_text("some page", linkify("foo", "foo", "link to [[some page|bar]] ok", ["foo", "bar"])), "named link text, with whitespace");
+ok(links_text("Some long, & complex page name.", linkify("foo", "foo", "link to [[Some long, & complex page name.|bar]] ok, and this is not a link]] here", ["foo", "bar"])), "complex named link text");
+ok(links_to("foo/bar", linkify("foo/item", "foo", "link to [[bar]] ok", ["foo", "foo/item", "foo/bar"])), "inline page link");
+ok(links_to("bar", linkify("foo", "foo", "link to [[bar]] ok", ["foo", "foo/item", "foo/bar"])), "same except not inline");
+